mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
tools: Fix the Python coding style
This commit is contained in:
parent
d453cce1b3
commit
bfa9610f58
66
.flake8
66
.flake8
@ -149,6 +149,8 @@ exclude =
|
||||
components/expat/expat,
|
||||
components/unity/unity,
|
||||
examples/build_system/cmake/import_lib/main/lib/tinyxml2
|
||||
# other third-party libraries
|
||||
tools/kconfig_new/kconfiglib.py,
|
||||
# autogenerated scripts
|
||||
components/protocomm/python/constants_pb2.py,
|
||||
components/protocomm/python/sec0_pb2.py,
|
||||
@ -159,67 +161,5 @@ exclude =
|
||||
examples/provisioning/custom_config/components/custom_provisioning/python/custom_config_pb2.py,
|
||||
# temporary list (should be empty)
|
||||
components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py,
|
||||
tools/ci/apply_bot_filter.py,
|
||||
tools/cmake/convert_to_cmake.py,
|
||||
tools/esp_app_trace/apptrace_proc.py,
|
||||
tools/esp_app_trace/logtrace_proc.py,
|
||||
tools/esp_app_trace/pylibelf/__init__.py,
|
||||
tools/esp_app_trace/pylibelf/constants/__init__.py,
|
||||
tools/esp_app_trace/pylibelf/iterators/__init__.py,
|
||||
tools/esp_app_trace/pylibelf/macros/__init__.py,
|
||||
tools/esp_app_trace/pylibelf/types/__init__.py,
|
||||
tools/esp_app_trace/pylibelf/util/__init__.py,
|
||||
tools/esp_app_trace/pylibelf/util/syms/__init__.py,
|
||||
tools/esp_prov/proto/__init__.py,
|
||||
tools/esp_prov/prov/__init__.py,
|
||||
tools/esp_prov/prov/custom_prov.py,
|
||||
tools/esp_prov/prov/wifi_prov.py,
|
||||
tools/esp_prov/security/__init__.py,
|
||||
tools/esp_prov/security/security.py,
|
||||
tools/esp_prov/security/security0.py,
|
||||
tools/esp_prov/security/security1.py,
|
||||
tools/esp_prov/transport/__init__.py,
|
||||
tools/esp_prov/transport/transport.py,
|
||||
tools/esp_prov/transport/transport_ble.py,
|
||||
tools/esp_prov/transport/transport_console.py,
|
||||
tools/esp_prov/transport/transport_softap.py,
|
||||
tools/esp_prov/utils/__init__.py,
|
||||
tools/esp_prov/utils/convenience.py,
|
||||
tools/gen_esp_err_to_name.py,
|
||||
tools/idf.py,
|
||||
tools/idf_size.py,
|
||||
tools/kconfig_new/confgen.py,
|
||||
tools/kconfig_new/confserver.py,
|
||||
tools/kconfig_new/gen_kconfig_doc.py,
|
||||
tools/kconfig_new/kconfiglib.py,
|
||||
tools/kconfig_new/test/test_confserver.py,
|
||||
tools/ldgen/fragments.py,
|
||||
tools/ldgen/generation.py,
|
||||
tools/ldgen/ldgen.py,
|
||||
tools/ldgen/pyparsing.py,
|
||||
tools/ldgen/sdkconfig.py,
|
||||
tools/ldgen/test/test_fragments.py,
|
||||
tools/ldgen/test/test_generation.py,
|
||||
tools/esp_app_trace/pylibelf,
|
||||
tools/mass_mfg/mfg_gen.py,
|
||||
tools/test_idf_monitor/run_test_idf_monitor.py,
|
||||
tools/test_idf_size/test_idf_size.py,
|
||||
tools/tiny-test-fw/CIAssignExampleTest.py,
|
||||
tools/tiny-test-fw/CIAssignUnitTest.py,
|
||||
tools/tiny-test-fw/DUT.py,
|
||||
tools/tiny-test-fw/EnvConfig.py,
|
||||
tools/tiny-test-fw/IDF/IDFApp.py,
|
||||
tools/tiny-test-fw/IDF/IDFDUT.py,
|
||||
tools/tiny-test-fw/Runner.py,
|
||||
tools/tiny-test-fw/TinyFW.py,
|
||||
tools/tiny-test-fw/Utility/CaseConfig.py,
|
||||
tools/tiny-test-fw/Utility/LineChart.py,
|
||||
tools/tiny-test-fw/Utility/PowerControl.py,
|
||||
tools/tiny-test-fw/Utility/SearchCases.py,
|
||||
tools/tiny-test-fw/Utility/__init__.py,
|
||||
tools/tiny-test-fw/docs/conf.py,
|
||||
tools/tiny-test-fw/example.py,
|
||||
tools/unit-test-app/idf_ext.py,
|
||||
tools/unit-test-app/tools/CreateSectionTable.py,
|
||||
tools/unit-test-app/tools/UnitTestParser.py,
|
||||
tools/unit-test-app/unit_test.py,
|
||||
tools/windows/eclipse_make.py,
|
||||
|
@ -30,7 +30,7 @@ def parse_filter(filter_name):
|
||||
|
||||
def process_filter(execute_by_default, filter_name, ci_name):
|
||||
execute = execute_by_default
|
||||
|
||||
|
||||
# bot message is case insensitive (processed with lower case). so we also convert ci_name to lower case.
|
||||
ci_name = ci_name.lower()
|
||||
|
||||
@ -55,8 +55,8 @@ if __name__ == "__main__":
|
||||
if os.getenv("BOT_NEEDS_TRIGGER_BY_NAME", "0") == "1":
|
||||
execute_by_default = False
|
||||
|
||||
need_to_execute = process_filter(True, "BOT_STAGE_FILTER", os.getenv("CI_JOB_STAGE")) \
|
||||
and process_filter(execute_by_default, "BOT_JOB_FILTER", os.getenv("CI_JOB_NAME"))
|
||||
need_to_execute = process_filter(True, "BOT_STAGE_FILTER", os.getenv("CI_JOB_STAGE")) and process_filter(execute_by_default,
|
||||
"BOT_JOB_FILTER", os.getenv("CI_JOB_NAME"))
|
||||
if need_to_execute:
|
||||
sys.exit(0)
|
||||
else:
|
||||
|
@ -8,10 +8,10 @@ import subprocess
|
||||
import re
|
||||
import os.path
|
||||
import glob
|
||||
import sys
|
||||
|
||||
debug = False
|
||||
|
||||
|
||||
def get_make_variables(path, makefile="Makefile", expected_failure=False, variables={}):
|
||||
"""
|
||||
Given the path to a Makefile of some kind, return a dictionary of all variables defined in this Makefile
|
||||
@ -20,9 +20,9 @@ def get_make_variables(path, makefile="Makefile", expected_failure=False, variab
|
||||
|
||||
Overrides IDF_PATH= to avoid recursively evaluating the entire project Makefile structure.
|
||||
"""
|
||||
variable_setters = [ ("%s=%s" % (k,v)) for (k,v) in variables.items() ]
|
||||
variable_setters = [("%s=%s" % (k,v)) for (k,v) in variables.items()]
|
||||
|
||||
cmdline = ["make", "-rpn", "-C", path, "-f", makefile ] + variable_setters
|
||||
cmdline = ["make", "-rpn", "-C", path, "-f", makefile] + variable_setters
|
||||
if debug:
|
||||
print("Running %s..." % (" ".join(cmdline)))
|
||||
|
||||
@ -54,15 +54,16 @@ def get_make_variables(path, makefile="Makefile", expected_failure=False, variab
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def get_component_variables(project_path, component_path):
|
||||
make_vars = get_make_variables(component_path,
|
||||
os.path.join(os.environ["IDF_PATH"],
|
||||
"make",
|
||||
"component_wrapper.mk"),
|
||||
expected_failure=True,
|
||||
variables = {
|
||||
"COMPONENT_MAKEFILE" : os.path.join(component_path, "component.mk"),
|
||||
"COMPONENT_NAME" : os.path.basename(component_path),
|
||||
variables={
|
||||
"COMPONENT_MAKEFILE": os.path.join(component_path, "component.mk"),
|
||||
"COMPONENT_NAME": os.path.basename(component_path),
|
||||
"PROJECT_PATH": project_path,
|
||||
})
|
||||
|
||||
@ -70,7 +71,7 @@ def get_component_variables(project_path, component_path):
|
||||
# Convert to sources
|
||||
def find_src(obj):
|
||||
obj = os.path.splitext(obj)[0]
|
||||
for ext in [ "c", "cpp", "S" ]:
|
||||
for ext in ["c", "cpp", "S"]:
|
||||
if os.path.exists(os.path.join(component_path, obj) + "." + ext):
|
||||
return obj + "." + ext
|
||||
print("WARNING: Can't find source file for component %s COMPONENT_OBJS %s" % (component_path, obj))
|
||||
@ -86,7 +87,7 @@ def get_component_variables(project_path, component_path):
|
||||
component_srcs = list()
|
||||
for component_srcdir in make_vars.get("COMPONENT_SRCDIRS", ".").split(" "):
|
||||
component_srcdir_path = os.path.abspath(os.path.join(component_path, component_srcdir))
|
||||
|
||||
|
||||
srcs = list()
|
||||
srcs += glob.glob(os.path.join(component_srcdir_path, "*.[cS]"))
|
||||
srcs += glob.glob(os.path.join(component_srcdir_path, "*.cpp"))
|
||||
@ -96,7 +97,6 @@ def get_component_variables(project_path, component_path):
|
||||
component_srcs += srcs
|
||||
make_vars["COMPONENT_SRCS"] = " ".join(component_srcs)
|
||||
|
||||
|
||||
return make_vars
|
||||
|
||||
|
||||
@ -111,7 +111,7 @@ def convert_project(project_path):
|
||||
raise RuntimeError("This project already has a CMakeLists.txt file")
|
||||
|
||||
project_vars = get_make_variables(project_path, expected_failure=True)
|
||||
if not "PROJECT_NAME" in project_vars:
|
||||
if "PROJECT_NAME" not in project_vars:
|
||||
raise RuntimeError("PROJECT_NAME does not appear to be defined in IDF project Makefile at %s" % project_path)
|
||||
|
||||
component_paths = project_vars["COMPONENT_PATHS"].split(" ")
|
||||
@ -143,6 +143,7 @@ include($ENV{IDF_PATH}/tools/cmake/project.cmake)
|
||||
|
||||
print("Converted project %s" % project_cmakelists)
|
||||
|
||||
|
||||
def convert_component(project_path, component_path):
|
||||
if debug:
|
||||
print("Converting %s..." % (component_path))
|
||||
|
@ -1,10 +1,12 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import struct
|
||||
import sys
|
||||
|
||||
|
||||
class bcolors:
|
||||
HEADER = '\033[95m'
|
||||
OKBLUE = '\033[94m'
|
||||
@ -14,13 +16,14 @@ class bcolors:
|
||||
ENDC = '\033[0m'
|
||||
BOLD = '\033[1m'
|
||||
UNDERLINE = '\033[4m'
|
||||
|
||||
|
||||
|
||||
def main():
|
||||
ESP32_TRACE_BLOCK_HDR_SZ = 8
|
||||
ESP32_TRACE_BLOCK_TASK_IDX = 0
|
||||
ESP32_TRACE_BLOCK_TS_IDX = 1
|
||||
ESP32_TRACE_BLOCK_DATA_IDX = 2
|
||||
|
||||
|
||||
parser = argparse.ArgumentParser(description='ESP32 App Trace Parse Tool')
|
||||
|
||||
parser.add_argument('file', help='Path to app trace file', type=str)
|
||||
@ -31,11 +34,11 @@ def main():
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
print "===================================================================="
|
||||
print("====================================================================")
|
||||
try:
|
||||
ftrc = open(args.file, 'rb')
|
||||
except IOError as e:
|
||||
print "Failed to open trace file (%s)!" % e
|
||||
print("Failed to open trace file (%s)!" % e)
|
||||
sys.exit(2)
|
||||
|
||||
passed = True
|
||||
@ -44,81 +47,84 @@ def main():
|
||||
last_ts = None
|
||||
tot_discont = 0
|
||||
while True:
|
||||
#ftrc.seek(off)
|
||||
# ftrc.seek(off)
|
||||
task = None
|
||||
ts = 0
|
||||
trc_buf = ftrc.read(args.block_len)
|
||||
if len(trc_buf) == 0:
|
||||
# print 'EOF'
|
||||
# print('EOF')
|
||||
break
|
||||
trc_data = struct.unpack('<LL%sB' % (len(trc_buf) - ESP32_TRACE_BLOCK_HDR_SZ), trc_buf)
|
||||
if len(trc_data):
|
||||
# print "%x %x, len %d" % (trc_data[0], trc_data[1], len(trc_data) - 2)
|
||||
# print trc_data[2:]
|
||||
# sys.exit(0)
|
||||
# print("%x %x, len %d" % (trc_data[0], trc_data[1], len(trc_data) - 2))
|
||||
# print(trc_data[2:])
|
||||
# sys.exit(0)
|
||||
task = trc_data[ESP32_TRACE_BLOCK_TASK_IDX]
|
||||
ts = trc_data[ESP32_TRACE_BLOCK_TS_IDX]
|
||||
# print ts
|
||||
# print(ts)
|
||||
if last_ts and last_ts >= ts:
|
||||
# print "Global TS discontinuity %x -> %x, task %x, stamp %x at %x" % (last_ts, ts, task, data_stats[task]['stamp'], off)
|
||||
# print("Global TS discontinuity %x -> %x, task %x, stamp %x at %x" % (last_ts, ts, task,
|
||||
# data_stats[task]['stamp'], off))
|
||||
if args.print_details:
|
||||
print "Global TS discontinuity %x -> %x, task %x at %x" % (last_ts, ts, task, off)
|
||||
# tot_discont += 1
|
||||
# passed = False
|
||||
print("Global TS discontinuity %x -> %x, task %x at %x" % (last_ts, ts, task, off))
|
||||
# tot_discont += 1
|
||||
# passed = False
|
||||
last_ts = ts
|
||||
if not task in data_stats:
|
||||
print "%x: NEW TASK" % task
|
||||
data_stats[task] = {'stamp' : trc_data[ESP32_TRACE_BLOCK_DATA_IDX], 'last_ts' : ts, 'count' : 1, 'discont_offs' : [], 'inv_stamps_offs' : []}
|
||||
if task not in data_stats:
|
||||
print("%x: NEW TASK" % task)
|
||||
data_stats[task] = {'stamp': trc_data[ESP32_TRACE_BLOCK_DATA_IDX], 'last_ts': ts, 'count': 1, 'discont_offs': [], 'inv_stamps_offs': []}
|
||||
else:
|
||||
if data_stats[task]['last_ts'] == ts:
|
||||
print "Task TS discontinuity %x -> %x, task %x, stamp %x at %x" % (last_ts, ts, task, data_stats[task]['stamp'], off)
|
||||
print("Task TS discontinuity %x -> %x, task %x, stamp %x at %x" % (last_ts, ts, task, data_stats[task]['stamp'], off))
|
||||
data_stats[task]['discont_offs'].append(off)
|
||||
tot_discont += 1
|
||||
passed = False
|
||||
data_stats[task]['last_ts'] = ts
|
||||
data_stats[task]['count'] += 1
|
||||
if len(trc_data) > ESP32_TRACE_BLOCK_DATA_IDX:
|
||||
# print "DATA = %x %x %x %x" % (trc_data[-4], trc_data[-3], trc_data[-2], trc_data[-1])
|
||||
# print("DATA = %x %x %x %x" % (trc_data[-4], trc_data[-3], trc_data[-2], trc_data[-1]))
|
||||
if args.print_tasks:
|
||||
print "Task[%d] %x, ts %08x, stamp %x" % (off/args.block_len, task, ts, trc_data[ESP32_TRACE_BLOCK_DATA_IDX])
|
||||
print("Task[%d] %x, ts %08x, stamp %x" % (off / args.block_len, task, ts, trc_data[ESP32_TRACE_BLOCK_DATA_IDX]))
|
||||
else:
|
||||
print "%x: NO DATA" % task
|
||||
print("%x: NO DATA" % task)
|
||||
else:
|
||||
print "Failed to unpack data!"
|
||||
print("Failed to unpack data!")
|
||||
sys.exit(2)
|
||||
|
||||
# check data
|
||||
for i in range(ESP32_TRACE_BLOCK_DATA_IDX, len(trc_data)):
|
||||
if trc_data[i] != data_stats[task]['stamp']:
|
||||
if not args.no_errors:
|
||||
print "Invalid stamp %x->%x at %x, task %x" % (data_stats[task]['stamp'], trc_data[i], off + ESP32_TRACE_BLOCK_HDR_SZ + i, task)
|
||||
print("Invalid stamp %x->%x at %x, task %x" % (data_stats[task]['stamp'], trc_data[i], off + ESP32_TRACE_BLOCK_HDR_SZ + i, task))
|
||||
passed = False
|
||||
data_stats[task]['stamp'] = trc_data[i]
|
||||
data_stats[task]['inv_stamps_offs'].append(off)
|
||||
# break
|
||||
if len(trc_buf) < args.block_len:
|
||||
print 'Last block (not full)'
|
||||
print('Last block (not full)')
|
||||
break
|
||||
|
||||
if data_stats[task]['stamp'] != None:
|
||||
if data_stats[task]['stamp'] is not None:
|
||||
data_stats[task]['stamp'] = (data_stats[task]['stamp'] + 1) & 0xFF
|
||||
# print "stamp=%x" % data_stats[task][ESP32_TRACE_STAMP_IDX]
|
||||
# print("stamp=%x" % data_stats[task][ESP32_TRACE_STAMP_IDX])
|
||||
off += args.block_len
|
||||
|
||||
ftrc.close()
|
||||
print "===================================================================="
|
||||
print "Trace size %d bytes, discont %d\n" % (off, tot_discont)
|
||||
print("====================================================================")
|
||||
print("Trace size %d bytes, discont %d\n" % (off, tot_discont))
|
||||
for t in data_stats:
|
||||
print "Task %x. Total count %d. Inv stamps %d. TS Discontinuities %d." % (t, data_stats[t]['count'], len(data_stats[t]['inv_stamps_offs']), len(data_stats[t]['discont_offs']))
|
||||
print("Task %x. Total count %d. Inv stamps %d. TS Discontinuities %d." % (t, data_stats[t]['count'],
|
||||
len(data_stats[t]['inv_stamps_offs']), len(data_stats[t]['discont_offs'])))
|
||||
if args.print_details:
|
||||
print 'Invalid stamps offs: [{}]'.format(', '.join(hex(x) for x in data_stats[t]['inv_stamps_offs']))
|
||||
print 'TS Discontinuities offs: [{}]'.format(', '.join(hex(x) for x in data_stats[t]['discont_offs']))
|
||||
print "\n"
|
||||
|
||||
print('Invalid stamps offs: [{}]'.format(', '.join(hex(x) for x in data_stats[t]['inv_stamps_offs'])))
|
||||
print('TS Discontinuities offs: [{}]'.format(', '.join(hex(x) for x in data_stats[t]['discont_offs'])))
|
||||
print("\n")
|
||||
|
||||
if passed:
|
||||
print "Data - OK"
|
||||
print("Data - OK")
|
||||
else:
|
||||
print "Data - FAILED!"
|
||||
print("Data - FAILED!")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -1,6 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
#
|
||||
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import struct
|
||||
import sys
|
||||
@ -8,7 +9,8 @@ import pylibelf as elf
|
||||
import pylibelf.util as elfutil
|
||||
import pylibelf.iterators as elfiter
|
||||
import pylibelf.constants as elfconst
|
||||
from ctypes import *
|
||||
import ctypes
|
||||
|
||||
|
||||
class ESPLogTraceParserError(RuntimeError):
|
||||
def __init__(self, message):
|
||||
@ -44,7 +46,7 @@ def logtrace_parse(fname):
|
||||
if len(trc_buf) < ESP32_LOGTRACE_HDR_SZ:
|
||||
# print "EOF"
|
||||
if len(trc_buf) > 0:
|
||||
print "Unprocessed %d bytes of log record header!" % len(trc_buf)
|
||||
print("Unprocessed %d bytes of log record header!" % len(trc_buf))
|
||||
# data_ok = False
|
||||
break
|
||||
try:
|
||||
@ -58,17 +60,17 @@ def logtrace_parse(fname):
|
||||
except IOError as e:
|
||||
raise ESPLogTraceParserError("Failed to read log record args (%s)!" % e)
|
||||
if len(trc_buf) < args_sz:
|
||||
# print "EOF"
|
||||
# print("EOF")
|
||||
if len(trc_buf) > 0:
|
||||
print "Unprocessed %d bytes of log record args!" % len(trc_buf)
|
||||
print("Unprocessed %d bytes of log record args!" % len(trc_buf))
|
||||
# data_ok = False
|
||||
break
|
||||
try:
|
||||
log_args = struct.unpack('<%sL' % nargs, trc_buf)
|
||||
except struct.error as e:
|
||||
raise ESPLogTraceParserError("Failed to unpack log record args (%s)!" % e)
|
||||
# print log_args
|
||||
recs.append(ESPLogTraceRecord(fmt_addr, list(log_args)))
|
||||
# print(log_args)
|
||||
recs.append(ESPLogTraceRecord(fmt_addr, list(log_args)))
|
||||
|
||||
ftrc.close()
|
||||
# sorted(recs, key=lambda rec: rec.fmt_addr)
|
||||
@ -83,9 +85,9 @@ def logtrace_get_str_from_elf(felf, str_addr):
|
||||
continue
|
||||
if str_addr < hdr.sh_addr or str_addr >= hdr.sh_addr + hdr.sh_size:
|
||||
continue
|
||||
# print "Found SECT: %x..%x @ %x" % (hdr.sh_addr, hdr.sh_addr + hdr.sh_size, str_addr - hdr.sh_addr)
|
||||
# print("Found SECT: %x..%x @ %x" % (hdr.sh_addr, hdr.sh_addr + hdr.sh_size, str_addr - hdr.sh_addr))
|
||||
sec_data = elfiter.getOnlyData(sect).contents
|
||||
buf = cast(sec_data.d_buf, POINTER(c_char))
|
||||
buf = ctypes.cast(sec_data.d_buf, ctypes.POINTER(ctypes.c_char))
|
||||
for i in range(str_addr - hdr.sh_addr, hdr.sh_size):
|
||||
if buf[i] == "\0":
|
||||
break
|
||||
@ -94,6 +96,7 @@ def logtrace_get_str_from_elf(felf, str_addr):
|
||||
return tgt_str
|
||||
return None
|
||||
|
||||
|
||||
def logtrace_formated_print(recs, elfname, no_err):
|
||||
try:
|
||||
felf = elfutil.open_elf(elfname)
|
||||
@ -105,30 +108,31 @@ def logtrace_formated_print(recs, elfname, no_err):
|
||||
i = 0
|
||||
prcnt_idx = 0
|
||||
while i < len(lrec.args):
|
||||
prcnt_idx = fmt_str.find('%', prcnt_idx, -2) # TODO: check str ending with %
|
||||
prcnt_idx = fmt_str.find('%', prcnt_idx, -2) # TODO: check str ending with %
|
||||
if prcnt_idx == -1:
|
||||
break
|
||||
prcnt_idx += 1 # goto next char
|
||||
prcnt_idx += 1 # goto next char
|
||||
if fmt_str[prcnt_idx] == 's':
|
||||
# find string
|
||||
arg_str = logtrace_get_str_from_elf(felf, lrec.args[i])
|
||||
if arg_str:
|
||||
lrec.args[i] = arg_str
|
||||
i += 1
|
||||
# print "\nFmt = {%s}, args = %d/%s" % lrec
|
||||
# print("\nFmt = {%s}, args = %d/%s" % lrec)
|
||||
fmt_str = fmt_str.replace('%p', '%x')
|
||||
# print "=====> " + fmt_str % lrec.args
|
||||
# print("=====> " + fmt_str % lrec.args)
|
||||
try:
|
||||
print fmt_str % tuple(lrec.args),
|
||||
# print ".",
|
||||
print(fmt_str % tuple(lrec.args), end='')
|
||||
# print(".", end='')
|
||||
pass
|
||||
except Exception as e:
|
||||
if not no_err:
|
||||
print "Print error (%s)" % e
|
||||
print "\nFmt = {%s}, args = %d/%s" % (fmt_str, len(lrec.args), lrec.args)
|
||||
print("Print error (%s)" % e)
|
||||
print("\nFmt = {%s}, args = %d/%s" % (fmt_str, len(lrec.args), lrec.args))
|
||||
|
||||
elf.elf_end(felf)
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
parser = argparse.ArgumentParser(description='ESP32 Log Trace Parsing Tool')
|
||||
@ -141,23 +145,24 @@ def main():
|
||||
|
||||
# parse trace file
|
||||
try:
|
||||
print "Parse trace file '%s'..." % args.trace_file
|
||||
lrecs = logtrace_parse(args.trace_file);
|
||||
print "Parsing completed."
|
||||
print("Parse trace file '%s'..." % args.trace_file)
|
||||
lrecs = logtrace_parse(args.trace_file)
|
||||
print("Parsing completed.")
|
||||
except ESPLogTraceParserError as e:
|
||||
print "Failed to parse log trace (%s)!" % e
|
||||
print("Failed to parse log trace (%s)!" % e)
|
||||
sys.exit(2)
|
||||
# print recs
|
||||
# get format strings and print info
|
||||
print "===================================================================="
|
||||
print("====================================================================")
|
||||
try:
|
||||
logtrace_formated_print(lrecs, args.elf_file, args.no_errors);
|
||||
logtrace_formated_print(lrecs, args.elf_file, args.no_errors)
|
||||
except ESPLogTraceParserError as e:
|
||||
print "Failed to print log trace (%s)!" % e
|
||||
print("Failed to print log trace (%s)!" % e)
|
||||
sys.exit(2)
|
||||
print "\n====================================================================\n"
|
||||
print("\n====================================================================\n")
|
||||
|
||||
print("Log records count: %d" % len(lrecs))
|
||||
|
||||
print "Log records count: %d" % len(lrecs)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
@ -29,4 +29,5 @@ wifi_constants_pb2 = imp.load_source("wifi_constants_pb2", idf_path + "/componen
|
||||
wifi_config_pb2 = imp.load_source("wifi_config_pb2", idf_path + "/components/wifi_provisioning/python/wifi_config_pb2.py")
|
||||
|
||||
# custom_provisioning component related python files generated from .proto files
|
||||
custom_config_pb2 = imp.load_source("custom_config_pb2", idf_path + "/examples/provisioning/custom_config/components/custom_provisioning/python/custom_config_pb2.py")
|
||||
custom_config_pb2 = imp.load_source("custom_config_pb2", idf_path +
|
||||
"/examples/provisioning/custom_config/components/custom_provisioning/python/custom_config_pb2.py")
|
||||
|
@ -13,5 +13,5 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from .wifi_prov import *
|
||||
from .custom_prov import *
|
||||
from .wifi_prov import * # noqa F403
|
||||
from .custom_prov import * # noqa F403
|
||||
|
@ -21,9 +21,11 @@ from future.utils import tobytes
|
||||
import utils
|
||||
import proto
|
||||
|
||||
|
||||
def print_verbose(security_ctx, data):
|
||||
if (security_ctx.verbose):
|
||||
print("++++ " + data + " ++++")
|
||||
print("++++ " + data + " ++++")
|
||||
|
||||
|
||||
def custom_config_request(security_ctx, info, version):
|
||||
# Form protobuf request packet from custom-config data
|
||||
@ -34,6 +36,7 @@ def custom_config_request(security_ctx, info, version):
|
||||
print_verbose(security_ctx, "Client -> Device (CustomConfig cmd) " + utils.str_to_hexstr(enc_cmd))
|
||||
return enc_cmd
|
||||
|
||||
|
||||
def custom_config_response(security_ctx, response_data):
|
||||
# Interpret protobuf response packet
|
||||
decrypt = security_ctx.decrypt_data(tobytes(response_data))
|
||||
|
@ -21,9 +21,11 @@ from future.utils import tobytes
|
||||
import utils
|
||||
import proto
|
||||
|
||||
|
||||
def print_verbose(security_ctx, data):
|
||||
if (security_ctx.verbose):
|
||||
print("++++ " + data + " ++++")
|
||||
print("++++ " + data + " ++++")
|
||||
|
||||
|
||||
def config_get_status_request(security_ctx):
|
||||
# Form protobuf request packet for GetStatus command
|
||||
@ -35,6 +37,7 @@ def config_get_status_request(security_ctx):
|
||||
print_verbose(security_ctx, "Client -> Device (Encrypted CmdGetStatus) " + utils.str_to_hexstr(encrypted_cfg))
|
||||
return encrypted_cfg
|
||||
|
||||
|
||||
def config_get_status_response(security_ctx, response_data):
|
||||
# Interpret protobuf response packet from GetStatus command
|
||||
decrypted_message = security_ctx.decrypt_data(tobytes(response_data))
|
||||
@ -56,6 +59,7 @@ def config_get_status_response(security_ctx, response_data):
|
||||
print("++++ Failure reason: " + "Incorrect SSID ++++")
|
||||
return cmd_resp1.resp_get_status.sta_state
|
||||
|
||||
|
||||
def config_set_config_request(security_ctx, ssid, passphrase):
|
||||
# Form protobuf request packet for SetConfig command
|
||||
cmd = proto.wifi_config_pb2.WiFiConfigPayload()
|
||||
@ -66,6 +70,7 @@ def config_set_config_request(security_ctx, ssid, passphrase):
|
||||
print_verbose(security_ctx, "Client -> Device (SetConfig cmd) " + utils.str_to_hexstr(enc_cmd))
|
||||
return enc_cmd
|
||||
|
||||
|
||||
def config_set_config_response(security_ctx, response_data):
|
||||
# Interpret protobuf response packet from SetConfig command
|
||||
decrypt = security_ctx.decrypt_data(tobytes(response_data))
|
||||
@ -74,6 +79,7 @@ def config_set_config_response(security_ctx, response_data):
|
||||
print_verbose(security_ctx, "SetConfig status " + str(cmd_resp4.resp_set_config.status))
|
||||
return cmd_resp4.resp_set_config.status
|
||||
|
||||
|
||||
def config_apply_config_request(security_ctx):
|
||||
# Form protobuf request packet for ApplyConfig command
|
||||
cmd = proto.wifi_config_pb2.WiFiConfigPayload()
|
||||
@ -82,6 +88,7 @@ def config_apply_config_request(security_ctx):
|
||||
print_verbose(security_ctx, "Client -> Device (ApplyConfig cmd) " + utils.str_to_hexstr(enc_cmd))
|
||||
return enc_cmd
|
||||
|
||||
|
||||
def config_apply_config_response(security_ctx, response_data):
|
||||
# Interpret protobuf response packet from ApplyConfig command
|
||||
decrypt = security_ctx.decrypt_data(tobytes(response_data))
|
||||
|
@ -13,5 +13,5 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from .security0 import *
|
||||
from .security1 import *
|
||||
from .security0 import * # noqa: F403, F401
|
||||
from .security1 import * # noqa: F403, F401
|
||||
|
@ -15,7 +15,7 @@
|
||||
|
||||
# Base class for protocomm security
|
||||
|
||||
|
||||
class Security:
|
||||
def __init__(self, security_session):
|
||||
self.security_session = security_session
|
||||
|
||||
|
@ -19,9 +19,9 @@
|
||||
from __future__ import print_function
|
||||
from future.utils import tobytes
|
||||
|
||||
import utils
|
||||
import proto
|
||||
from .security import *
|
||||
from .security import Security
|
||||
|
||||
|
||||
class Security0(Security):
|
||||
def __init__(self, verbose):
|
||||
|
@ -21,7 +21,7 @@ from future.utils import tobytes
|
||||
|
||||
import utils
|
||||
import proto
|
||||
from .security import *
|
||||
from .security import Security
|
||||
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
@ -30,6 +30,7 @@ from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
|
||||
import session_pb2
|
||||
|
||||
|
||||
# Enum for state of protocomm_security1 FSM
|
||||
class security_state:
|
||||
REQUEST1 = 0
|
||||
@ -37,6 +38,7 @@ class security_state:
|
||||
RESPONSE2 = 2
|
||||
FINISHED = 3
|
||||
|
||||
|
||||
def xor(a, b):
|
||||
# XOR two inputs of type `bytes`
|
||||
ret = bytearray()
|
||||
@ -50,6 +52,7 @@ def xor(a, b):
|
||||
# Convert bytearray to bytes
|
||||
return bytes(ret)
|
||||
|
||||
|
||||
class Security1(Security):
|
||||
def __init__(self, pop, verbose):
|
||||
# Initialize state of the security1 FSM
|
||||
|
@ -13,6 +13,6 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from .transport_console import *
|
||||
from .transport_softap import *
|
||||
from .transport_ble import *
|
||||
from .transport_console import * # noqa: F403, F401
|
||||
from .transport_softap import * # noqa: F403, F401
|
||||
from .transport_ble import * # noqa: F403, F401
|
||||
|
@ -17,6 +17,7 @@
|
||||
|
||||
import abc
|
||||
|
||||
|
||||
class Transport():
|
||||
|
||||
@abc.abstractmethod
|
||||
|
@ -15,10 +15,11 @@
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
from .transport import *
|
||||
from .transport import Transport
|
||||
|
||||
from . import ble_cli
|
||||
|
||||
|
||||
class Transport_BLE(Transport):
|
||||
def __init__(self, devname, service_uuid, nu_lookup):
|
||||
# Expect service UUID like '0000ffff-0000-1000-8000-00805f9b34fb'
|
||||
@ -32,7 +33,7 @@ class Transport_BLE(Transport):
|
||||
self.cli = ble_cli.get_client()
|
||||
|
||||
# Use client to connect to BLE device and bind to service
|
||||
if not self.cli.connect(devname = devname, iface = 'hci0', srv_uuid = service_uuid):
|
||||
if not self.cli.connect(devname=devname, iface='hci0', srv_uuid=service_uuid):
|
||||
raise RuntimeError("Failed to initialize transport")
|
||||
|
||||
# Check if expected characteristics are provided by the service
|
||||
@ -44,7 +45,7 @@ class Transport_BLE(Transport):
|
||||
# Make sure device is disconnected before application gets closed
|
||||
try:
|
||||
self.disconnect()
|
||||
except:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def disconnect(self):
|
||||
|
@ -18,11 +18,12 @@ from builtins import input
|
||||
|
||||
import utils
|
||||
|
||||
from .transport import *
|
||||
from .transport import Transport
|
||||
|
||||
|
||||
class Transport_Console(Transport):
|
||||
|
||||
def send_data(self, path, data, session_id = 0):
|
||||
def send_data(self, path, data, session_id=0):
|
||||
print("Client->Device msg :", path, session_id, utils.str_to_hexstr(data))
|
||||
try:
|
||||
resp = input("Enter device->client msg : ")
|
||||
|
@ -18,7 +18,8 @@ from future.utils import tobytes
|
||||
|
||||
import http.client
|
||||
|
||||
from .transport import *
|
||||
from .transport import Transport
|
||||
|
||||
|
||||
class Transport_Softap(Transport):
|
||||
def __init__(self, url):
|
||||
@ -36,4 +37,4 @@ class Transport_Softap(Transport):
|
||||
raise RuntimeError("Server responded with error code " + str(response.status))
|
||||
|
||||
def send_data(self, ep_name, data):
|
||||
return self._send_post_request('/'+ ep_name, data)
|
||||
return self._send_post_request('/' + ep_name, data)
|
||||
|
@ -13,4 +13,4 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from .convenience import *
|
||||
from .convenience import * # noqa: F403, F401
|
||||
|
@ -15,15 +15,17 @@
|
||||
|
||||
# Convenience functions for commonly used data type conversions
|
||||
|
||||
|
||||
def str_to_hexstr(string):
|
||||
# Form hexstr by appending ASCII codes (in hex) corresponding to
|
||||
# each character in the input string
|
||||
return ''.join('{:02x}'.format(ord(c)) for c in string)
|
||||
|
||||
|
||||
def hexstr_to_str(hexstr):
|
||||
# Prepend 0 (if needed) to make the hexstr length an even number
|
||||
if len(hexstr)%2 == 1:
|
||||
if len(hexstr) % 2 == 1:
|
||||
hexstr = '0' + hexstr
|
||||
# Interpret consecutive pairs of hex characters as 8 bit ASCII codes
|
||||
# and append characters corresponding to each code to form the string
|
||||
return ''.join(chr(int(hexstr[2*i:2*i+2], 16)) for i in range(len(hexstr)//2))
|
||||
return ''.join(chr(int(hexstr[2 * i: 2 * i + 2], 16)) for i in range(len(hexstr) // 2))
|
||||
|
@ -40,17 +40,18 @@ import textwrap
|
||||
import functools
|
||||
|
||||
# list files here which should not be parsed
|
||||
ignore_files = [ 'components/mdns/test_afl_fuzz_host/esp32_compat.h' ]
|
||||
ignore_files = ['components/mdns/test_afl_fuzz_host/esp32_compat.h']
|
||||
|
||||
# add directories here which should not be parsed
|
||||
ignore_dirs = ( 'examples' )
|
||||
ignore_dirs = ('examples')
|
||||
|
||||
# macros from here have higher priorities in case of collisions
|
||||
priority_headers = [ 'components/esp32/include/esp_err.h' ]
|
||||
priority_headers = ['components/esp32/include/esp_err.h']
|
||||
|
||||
err_dict = collections.defaultdict(list) # identified errors are stored here; mapped by the error code
|
||||
rev_err_dict = dict() # map of error string to error code
|
||||
unproc_list = list() # errors with unknown codes which depend on other errors
|
||||
|
||||
err_dict = collections.defaultdict(list) #identified errors are stored here; mapped by the error code
|
||||
rev_err_dict = dict() #map of error string to error code
|
||||
unproc_list = list() #errors with unknown codes which depend on other errors
|
||||
|
||||
class ErrItem(object):
|
||||
"""
|
||||
@ -62,13 +63,14 @@ class ErrItem(object):
|
||||
- rel_str - (optional) error string which is a base for the error
|
||||
- rel_off - (optional) offset in relation to the base error
|
||||
"""
|
||||
def __init__(self, name, file, include_as = None, comment = "", rel_str = "", rel_off = 0):
|
||||
def __init__(self, name, file, include_as=None, comment="", rel_str="", rel_off=0):
|
||||
self.name = name
|
||||
self.file = file
|
||||
self.include_as = include_as
|
||||
self.comment = comment
|
||||
self.rel_str = rel_str
|
||||
self.rel_off = rel_off
|
||||
|
||||
def __str__(self):
|
||||
ret = self.name + " from " + self.file
|
||||
if (self.rel_str != ""):
|
||||
@ -76,6 +78,7 @@ class ErrItem(object):
|
||||
if self.comment != "":
|
||||
ret += " // " + self.comment
|
||||
return ret
|
||||
|
||||
def __cmp__(self, other):
|
||||
if self.file in priority_headers and other.file not in priority_headers:
|
||||
return -1
|
||||
@ -99,6 +102,7 @@ class ErrItem(object):
|
||||
else:
|
||||
return 0
|
||||
|
||||
|
||||
class InputError(RuntimeError):
|
||||
"""
|
||||
Represents and error on the input
|
||||
@ -106,6 +110,7 @@ class InputError(RuntimeError):
|
||||
def __init__(self, p, e):
|
||||
super(InputError, self).__init__(p + ": " + e)
|
||||
|
||||
|
||||
def process(line, idf_path, include_as):
|
||||
"""
|
||||
Process a line of text from file idf_path (relative to IDF project).
|
||||
@ -129,18 +134,18 @@ def process(line, idf_path, include_as):
|
||||
m = re.search(r'/\*!<(.+?(?=\*/))', todo_str)
|
||||
if m:
|
||||
comment = m.group(1).strip()
|
||||
todo_str = todo_str[:m.start()].strip() # keep just the part before the comment
|
||||
todo_str = todo_str[:m.start()].strip() # keep just the part before the comment
|
||||
|
||||
# identify possible parentheses ()
|
||||
m = re.search(r'\((.+)\)', todo_str)
|
||||
if m:
|
||||
todo_str = m.group(1) #keep what is inside the parentheses
|
||||
todo_str = m.group(1) # keep what is inside the parentheses
|
||||
|
||||
# identify BASE error code, e.g. from the form BASE + 0x01
|
||||
m = re.search(r'\s*(\w+)\s*\+(.+)', todo_str)
|
||||
if m:
|
||||
related = m.group(1) # BASE
|
||||
todo_str = m.group(2) # keep and process only what is after "BASE +"
|
||||
related = m.group(1) # BASE
|
||||
todo_str = m.group(2) # keep and process only what is after "BASE +"
|
||||
|
||||
# try to match a hexadecimal number
|
||||
m = re.search(r'0x([0-9A-Fa-f]+)', todo_str)
|
||||
@ -153,8 +158,8 @@ def process(line, idf_path, include_as):
|
||||
num = int(m.group(1), 10)
|
||||
elif re.match(r'\w+', todo_str):
|
||||
# It is possible that there is no number, e.g. #define ERROR BASE
|
||||
related = todo_str # BASE error
|
||||
num = 0 # (BASE + 0)
|
||||
related = todo_str # BASE error
|
||||
num = 0 # (BASE + 0)
|
||||
else:
|
||||
raise InputError(idf_path, "Cannot parse line %s" % line)
|
||||
|
||||
@ -168,6 +173,7 @@ def process(line, idf_path, include_as):
|
||||
# Store the information available now and compute the error code later
|
||||
unproc_list.append(ErrItem(words[1], idf_path, include_as, comment, related, num))
|
||||
|
||||
|
||||
def process_remaining_errors():
|
||||
"""
|
||||
Create errors which could not be processed before because the error code
|
||||
@ -180,7 +186,6 @@ def process_remaining_errors():
|
||||
for item in unproc_list:
|
||||
if item.rel_str in rev_err_dict:
|
||||
base_num = rev_err_dict[item.rel_str]
|
||||
base = err_dict[base_num][0]
|
||||
num = base_num + item.rel_off
|
||||
err_dict[num].append(ErrItem(item.name, item.file, item.include_as, item.comment))
|
||||
rev_err_dict[item.name] = num
|
||||
@ -189,6 +194,7 @@ def process_remaining_errors():
|
||||
|
||||
del unproc_list[:]
|
||||
|
||||
|
||||
def path_to_include(path):
|
||||
"""
|
||||
Process the path (relative to the IDF project) in a form which can be used
|
||||
@ -207,7 +213,8 @@ def path_to_include(path):
|
||||
# no include in the path -> use just the filename
|
||||
return os.path.basename(path)
|
||||
else:
|
||||
return os.sep.join(spl_path[i+1:]) # subdirectories and filename in "include"
|
||||
return os.sep.join(spl_path[i + 1:]) # subdirectories and filename in "include"
|
||||
|
||||
|
||||
def print_warning(error_list, error_code):
|
||||
"""
|
||||
@ -217,6 +224,7 @@ def print_warning(error_list, error_code):
|
||||
for e in error_list:
|
||||
print(" " + str(e))
|
||||
|
||||
|
||||
def max_string_width():
|
||||
max = 0
|
||||
for k in err_dict:
|
||||
@ -226,6 +234,7 @@ def max_string_width():
|
||||
max = x
|
||||
return max
|
||||
|
||||
|
||||
def generate_c_output(fin, fout):
|
||||
"""
|
||||
Writes the output to fout based on th error dictionary err_dict and
|
||||
@ -247,7 +256,7 @@ def generate_c_output(fin, fout):
|
||||
include_list = list(includes)
|
||||
include_list.sort()
|
||||
|
||||
max_width = max_string_width() + 17 + 1 # length of " ERR_TBL_IT()," with spaces is 17
|
||||
max_width = max_string_width() + 17 + 1 # length of " ERR_TBL_IT()," with spaces is 17
|
||||
max_decdig = max(len(str(k)) for k in err_dict)
|
||||
|
||||
for line in fin:
|
||||
@ -271,7 +280,7 @@ def generate_c_output(fin, fout):
|
||||
fout.write("# ifdef %s\n" % e.name)
|
||||
fout.write(table_line)
|
||||
hexnum_length = 0
|
||||
if k > 0: # negative number and zero should be only ESP_FAIL and ESP_OK
|
||||
if k > 0: # negative number and zero should be only ESP_FAIL and ESP_OK
|
||||
hexnum = " 0x%x" % k
|
||||
hexnum_length = len(hexnum)
|
||||
fout.write(hexnum)
|
||||
@ -280,7 +289,7 @@ def generate_c_output(fin, fout):
|
||||
fout.write(" %s" % e.comment)
|
||||
else:
|
||||
indent = " " * (len(table_line) + hexnum_length + 1)
|
||||
w = textwrap.wrap(e.comment, width=120, initial_indent = indent, subsequent_indent = indent)
|
||||
w = textwrap.wrap(e.comment, width=120, initial_indent=indent, subsequent_indent=indent)
|
||||
# this couldn't be done with initial_indent because there is no initial_width option
|
||||
fout.write(" %s" % w[0].strip())
|
||||
for i in range(1, len(w)):
|
||||
@ -289,6 +298,7 @@ def generate_c_output(fin, fout):
|
||||
else:
|
||||
fout.write(line)
|
||||
|
||||
|
||||
def generate_rst_output(fout):
|
||||
for k in sorted(err_dict.keys()):
|
||||
v = err_dict[k][0]
|
||||
@ -301,6 +311,7 @@ def generate_rst_output(fout):
|
||||
fout.write(': {}'.format(v.comment))
|
||||
fout.write('\n\n')
|
||||
|
||||
|
||||
def main():
|
||||
if 'IDF_PATH' in os.environ:
|
||||
idf_path = os.environ['IDF_PATH']
|
||||
@ -348,5 +359,6 @@ def main():
|
||||
with open(args.c_input, 'r', encoding='utf-8') as fin, open(args.c_output, 'w', encoding='utf-8') as fout:
|
||||
generate_c_output(fin, fout)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
186
tools/idf.py
186
tools/idf.py
@ -36,18 +36,20 @@ import re
|
||||
import shutil
|
||||
import json
|
||||
|
||||
|
||||
class FatalError(RuntimeError):
|
||||
"""
|
||||
Wrapper class for runtime errors that aren't caused by bugs in idf.py or the build proces.s
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
# Use this Python interpreter for any subprocesses we launch
|
||||
PYTHON=sys.executable
|
||||
PYTHON = sys.executable
|
||||
|
||||
# note: os.environ changes don't automatically propagate to child processes,
|
||||
# you have to pass env=os.environ explicitly anywhere that we create a process
|
||||
os.environ["PYTHON"]=sys.executable
|
||||
os.environ["PYTHON"] = sys.executable
|
||||
|
||||
# Make flavors, across the various kinds of Windows environments & POSIX...
|
||||
if "MSYSTEM" in os.environ: # MSYS
|
||||
@ -60,13 +62,15 @@ else:
|
||||
MAKE_CMD = "make"
|
||||
MAKE_GENERATOR = "Unix Makefiles"
|
||||
|
||||
GENERATORS = [
|
||||
# ('generator name', 'build command line', 'version command line', 'verbose flag')
|
||||
("Ninja", [ "ninja" ], [ "ninja", "--version" ], "-v"),
|
||||
(MAKE_GENERATOR, [ MAKE_CMD, "-j", str(multiprocessing.cpu_count()+2) ], [ "make", "--version" ], "VERBOSE=1"),
|
||||
GENERATORS = \
|
||||
[
|
||||
# ('generator name', 'build command line', 'version command line', 'verbose flag')
|
||||
("Ninja", ["ninja"], ["ninja", "--version"], "-v"),
|
||||
(MAKE_GENERATOR, [MAKE_CMD, "-j", str(multiprocessing.cpu_count() + 2)], ["make", "--version"], "VERBOSE=1"),
|
||||
]
|
||||
GENERATOR_CMDS = dict( (a[0], a[1]) for a in GENERATORS )
|
||||
GENERATOR_VERBOSE = dict( (a[0], a[3]) for a in GENERATORS )
|
||||
GENERATOR_CMDS = dict((a[0], a[1]) for a in GENERATORS)
|
||||
GENERATOR_VERBOSE = dict((a[0], a[3]) for a in GENERATORS)
|
||||
|
||||
|
||||
def _run_tool(tool_name, args, cwd):
|
||||
def quote_arg(arg):
|
||||
@ -83,6 +87,7 @@ def _run_tool(tool_name, args, cwd):
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise FatalError("%s failed with exit code %d" % (tool_name, e.returncode))
|
||||
|
||||
|
||||
def check_environment():
|
||||
"""
|
||||
Verify the environment contains the top-level tools we need to operate
|
||||
@ -96,7 +101,8 @@ def check_environment():
|
||||
if "IDF_PATH" in os.environ:
|
||||
set_idf_path = os.path.realpath(os.environ["IDF_PATH"])
|
||||
if set_idf_path != detected_idf_path:
|
||||
print("WARNING: IDF_PATH environment variable is set to %s but idf.py path indicates IDF directory %s. Using the environment variable directory, but results may be unexpected..."
|
||||
print("WARNING: IDF_PATH environment variable is set to %s but idf.py path indicates IDF directory %s. "
|
||||
"Using the environment variable directory, but results may be unexpected..."
|
||||
% (set_idf_path, detected_idf_path))
|
||||
else:
|
||||
print("Setting IDF_PATH environment variable: %s" % detected_idf_path)
|
||||
@ -105,19 +111,21 @@ def check_environment():
|
||||
# check Python dependencies
|
||||
print("Checking Python dependencies...")
|
||||
try:
|
||||
subprocess.check_call([ os.environ["PYTHON"],
|
||||
os.path.join(os.environ["IDF_PATH"], "tools", "check_python_dependencies.py")],
|
||||
subprocess.check_call([os.environ["PYTHON"],
|
||||
os.path.join(os.environ["IDF_PATH"], "tools", "check_python_dependencies.py")],
|
||||
env=os.environ)
|
||||
except subprocess.CalledProcessError:
|
||||
raise SystemExit(1)
|
||||
|
||||
|
||||
def executable_exists(args):
|
||||
try:
|
||||
subprocess.check_output(args)
|
||||
return True
|
||||
except:
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
|
||||
def detect_cmake_generator():
|
||||
"""
|
||||
Find the default cmake generator, if none was specified. Raises an exception if no valid generator is found.
|
||||
@ -127,6 +135,7 @@ def detect_cmake_generator():
|
||||
return generator
|
||||
raise FatalError("To use idf.py, either the 'ninja' or 'GNU make' build tool must be available in the PATH")
|
||||
|
||||
|
||||
def _ensure_build_directory(args, always_run_cmake=False):
|
||||
"""Check the build directory exists and that cmake has been run there.
|
||||
|
||||
@ -158,15 +167,15 @@ def _ensure_build_directory(args, always_run_cmake=False):
|
||||
try:
|
||||
cmake_args = ["cmake", "-G", args.generator, "-DPYTHON_DEPS_CHECKED=1"]
|
||||
if not args.no_warnings:
|
||||
cmake_args += [ "--warn-uninitialized" ]
|
||||
cmake_args += ["--warn-uninitialized"]
|
||||
if args.no_ccache:
|
||||
cmake_args += [ "-DCCACHE_DISABLE=1" ]
|
||||
cmake_args += ["-DCCACHE_DISABLE=1"]
|
||||
if args.define_cache_entry:
|
||||
cmake_args += ["-D" + d for d in args.define_cache_entry]
|
||||
cmake_args += [ project_dir]
|
||||
cmake_args += ["-D" + d for d in args.define_cache_entry]
|
||||
cmake_args += [project_dir]
|
||||
|
||||
_run_tool("cmake", cmake_args, cwd=args.build_dir)
|
||||
except:
|
||||
except Exception:
|
||||
# don't allow partially valid CMakeCache.txt files,
|
||||
# to keep the "should I run cmake?" logic simple
|
||||
if os.path.exists(cache_path):
|
||||
@ -183,13 +192,13 @@ def _ensure_build_directory(args, always_run_cmake=False):
|
||||
args.generator = generator # reuse the previously configured generator, if none was given
|
||||
if generator != args.generator:
|
||||
raise FatalError("Build is configured for generator '%s' not '%s'. Run 'idf.py fullclean' to start again."
|
||||
% (generator, args.generator))
|
||||
% (generator, args.generator))
|
||||
|
||||
try:
|
||||
home_dir = cache["CMAKE_HOME_DIRECTORY"]
|
||||
if os.path.normcase(os.path.realpath(home_dir)) != os.path.normcase(os.path.realpath(project_dir)):
|
||||
raise FatalError("Build directory '%s' configured for project '%s' not '%s'. Run 'idf.py fullclean' to start again."
|
||||
% (build_dir, os.path.realpath(home_dir), os.path.realpath(project_dir)))
|
||||
% (build_dir, os.path.realpath(home_dir), os.path.realpath(project_dir)))
|
||||
except KeyError:
|
||||
pass # if cmake failed part way, CMAKE_HOME_DIRECTORY may not be set yet
|
||||
|
||||
@ -209,9 +218,10 @@ def parse_cmakecache(path):
|
||||
# groups are name, type, value
|
||||
m = re.match(r"^([^#/:=]+):([^:=]+)=(.+)\n$", line)
|
||||
if m:
|
||||
result[m.group(1)] = m.group(3)
|
||||
result[m.group(1)] = m.group(3)
|
||||
return result
|
||||
|
||||
|
||||
def build_target(target_name, args):
|
||||
"""
|
||||
Execute the target build system to build target 'target_name'
|
||||
@ -228,11 +238,11 @@ def build_target(target_name, args):
|
||||
# will point to files in another project, if these files are perfect duplicates of each other.)
|
||||
#
|
||||
# It would be nicer to set these from cmake, but there's no cross-platform way to set build-time environment
|
||||
#os.environ["CCACHE_BASEDIR"] = args.build_dir
|
||||
#os.environ["CCACHE_NO_HASHDIR"] = "1"
|
||||
# os.environ["CCACHE_BASEDIR"] = args.build_dir
|
||||
# os.environ["CCACHE_NO_HASHDIR"] = "1"
|
||||
pass
|
||||
if args.verbose:
|
||||
generator_cmd += [ GENERATOR_VERBOSE[args.generator] ]
|
||||
generator_cmd += [GENERATOR_VERBOSE[args.generator]]
|
||||
|
||||
_run_tool(generator_cmd[0], generator_cmd + [target_name], args.build_dir)
|
||||
|
||||
@ -241,17 +251,18 @@ def _get_esptool_args(args):
|
||||
esptool_path = os.path.join(os.environ["IDF_PATH"], "components/esptool_py/esptool/esptool.py")
|
||||
if args.port is None:
|
||||
args.port = get_default_serial_port()
|
||||
result = [ PYTHON, esptool_path ]
|
||||
result += [ "-p", args.port ]
|
||||
result += [ "-b", str(args.baud) ]
|
||||
result = [PYTHON, esptool_path]
|
||||
result += ["-p", args.port]
|
||||
result += ["-b", str(args.baud)]
|
||||
|
||||
with open(os.path.join(args.build_dir, "flasher_args.json")) as f:
|
||||
flasher_args = json.load(f)
|
||||
|
||||
extra_esptool_args = flasher_args["extra_esptool_args"]
|
||||
result += [ "--after", extra_esptool_args["after"] ]
|
||||
result += ["--after", extra_esptool_args["after"]]
|
||||
return result
|
||||
|
||||
|
||||
def flash(action, args):
|
||||
"""
|
||||
Run esptool to flash the entire project, from an argfile generated by the build system
|
||||
@ -263,14 +274,16 @@ def flash(action, args):
|
||||
"flash": "flash_project_args",
|
||||
}[action]
|
||||
esptool_args = _get_esptool_args(args)
|
||||
esptool_args += [ "write_flash", "@"+flasher_args_path ]
|
||||
esptool_args += ["write_flash", "@" + flasher_args_path]
|
||||
_run_tool("esptool.py", esptool_args, args.build_dir)
|
||||
|
||||
|
||||
def erase_flash(action, args):
|
||||
esptool_args = _get_esptool_args(args)
|
||||
esptool_args += [ "erase_flash" ]
|
||||
esptool_args += ["erase_flash"]
|
||||
_run_tool("esptool.py", esptool_args, args.build_dir)
|
||||
|
||||
|
||||
def monitor(action, args):
|
||||
"""
|
||||
Run idf_monitor.py to watch build output
|
||||
@ -285,19 +298,21 @@ def monitor(action, args):
|
||||
|
||||
elf_file = os.path.join(args.build_dir, project_desc["app_elf"])
|
||||
if not os.path.exists(elf_file):
|
||||
raise FatalError("ELF file '%s' not found. You need to build & flash the project before running 'monitor', and the binary on the device must match the one in the build directory exactly. Try 'idf.py flash monitor'." % elf_file)
|
||||
raise FatalError("ELF file '%s' not found. You need to build & flash the project before running 'monitor', "
|
||||
"and the binary on the device must match the one in the build directory exactly. "
|
||||
"Try 'idf.py flash monitor'." % elf_file)
|
||||
idf_monitor = os.path.join(os.environ["IDF_PATH"], "tools/idf_monitor.py")
|
||||
monitor_args = [PYTHON, idf_monitor ]
|
||||
monitor_args = [PYTHON, idf_monitor]
|
||||
if args.port is not None:
|
||||
monitor_args += [ "-p", args.port ]
|
||||
monitor_args += [ "-b", project_desc["monitor_baud"] ]
|
||||
monitor_args += [ elf_file ]
|
||||
monitor_args += ["-p", args.port]
|
||||
monitor_args += ["-b", project_desc["monitor_baud"]]
|
||||
monitor_args += [elf_file]
|
||||
|
||||
idf_py = [ PYTHON ] + get_commandline_options() # commands to re-run idf.py
|
||||
monitor_args += [ "-m", " ".join("'%s'" % a for a in idf_py) ]
|
||||
idf_py = [PYTHON] + get_commandline_options() # commands to re-run idf.py
|
||||
monitor_args += ["-m", " ".join("'%s'" % a for a in idf_py)]
|
||||
|
||||
if "MSYSTEM" in os.environ:
|
||||
monitor_args = [ "winpty" ] + monitor_args
|
||||
monitor_args = ["winpty"] + monitor_args
|
||||
_run_tool("idf_monitor", monitor_args, args.project_dir)
|
||||
|
||||
|
||||
@ -307,9 +322,11 @@ def clean(action, args):
|
||||
return
|
||||
build_target("clean", args)
|
||||
|
||||
|
||||
def reconfigure(action, args):
|
||||
_ensure_build_directory(args, True)
|
||||
|
||||
|
||||
def fullclean(action, args):
|
||||
build_dir = args.build_dir
|
||||
if not os.path.isdir(build_dir):
|
||||
@ -320,8 +337,9 @@ def fullclean(action, args):
|
||||
return
|
||||
|
||||
if not os.path.exists(os.path.join(build_dir, "CMakeCache.txt")):
|
||||
raise FatalError("Directory '%s' doesn't seem to be a CMake build directory. Refusing to automatically delete files in this directory. Delete the directory manually to 'clean' it." % build_dir)
|
||||
red_flags = [ "CMakeLists.txt", ".git", ".svn" ]
|
||||
raise FatalError("Directory '%s' doesn't seem to be a CMake build directory. Refusing to automatically "
|
||||
"delete files in this directory. Delete the directory manually to 'clean' it." % build_dir)
|
||||
red_flags = ["CMakeLists.txt", ".git", ".svn"]
|
||||
for red in red_flags:
|
||||
red = os.path.join(build_dir, red)
|
||||
if os.path.exists(red):
|
||||
@ -334,6 +352,7 @@ def fullclean(action, args):
|
||||
else:
|
||||
os.remove(f)
|
||||
|
||||
|
||||
def print_closing_message(args):
|
||||
# print a closing message of some kind
|
||||
#
|
||||
@ -362,7 +381,7 @@ def print_closing_message(args):
|
||||
else: # flashing the whole project
|
||||
cmd = " ".join(flasher_args["write_flash_args"]) + " "
|
||||
flash_items = sorted(((o,f) for (o,f) in flasher_args["flash_files"].items() if len(o) > 0),
|
||||
key = lambda x: int(x[0], 0))
|
||||
key=lambda x: int(x[0], 0))
|
||||
for o,f in flash_items:
|
||||
cmd += o + " " + flasher_path(f) + " "
|
||||
|
||||
@ -384,33 +403,35 @@ def print_closing_message(args):
|
||||
if "bootloader" in args.actions:
|
||||
print_flashing_message("Bootloader", "bootloader")
|
||||
|
||||
|
||||
ACTIONS = {
|
||||
# action name : ( function (or alias), dependencies, order-only dependencies )
|
||||
"all" : ( build_target, [], [ "reconfigure", "menuconfig", "clean", "fullclean" ] ),
|
||||
"build": ( "all", [], [] ), # build is same as 'all' target
|
||||
"clean": ( clean, [], [ "fullclean" ] ),
|
||||
"fullclean": ( fullclean, [], [] ),
|
||||
"reconfigure": ( reconfigure, [], [ "menuconfig" ] ),
|
||||
"menuconfig": ( build_target, [], [] ),
|
||||
"defconfig": ( build_target, [], [] ),
|
||||
"confserver": ( build_target, [], [] ),
|
||||
"size": ( build_target, [ "app" ], [] ),
|
||||
"size-components": ( build_target, [ "app" ], [] ),
|
||||
"size-files": ( build_target, [ "app" ], [] ),
|
||||
"bootloader": ( build_target, [], [] ),
|
||||
"bootloader-clean": ( build_target, [], [] ),
|
||||
"bootloader-flash": ( flash, [ "bootloader" ], [ "erase_flash"] ),
|
||||
"app": ( build_target, [], [ "clean", "fullclean", "reconfigure" ] ),
|
||||
"app-flash": ( flash, [ "app" ], [ "erase_flash"]),
|
||||
"partition_table": ( build_target, [], [ "reconfigure" ] ),
|
||||
"partition_table-flash": ( flash, [ "partition_table" ], [ "erase_flash" ]),
|
||||
"flash": ( flash, [ "all" ], [ "erase_flash" ] ),
|
||||
"erase_flash": ( erase_flash, [], []),
|
||||
"monitor": ( monitor, [], [ "flash", "partition_table-flash", "bootloader-flash", "app-flash" ]),
|
||||
"erase_otadata": ( build_target, [], []),
|
||||
"read_otadata": ( build_target, [], []),
|
||||
"all": (build_target, [], ["reconfigure", "menuconfig", "clean", "fullclean"]),
|
||||
"build": ("all", [], []), # build is same as 'all' target
|
||||
"clean": (clean, [], ["fullclean"]),
|
||||
"fullclean": (fullclean, [], []),
|
||||
"reconfigure": (reconfigure, [], ["menuconfig"]),
|
||||
"menuconfig": (build_target, [], []),
|
||||
"defconfig": (build_target, [], []),
|
||||
"confserver": (build_target, [], []),
|
||||
"size": (build_target, ["app"], []),
|
||||
"size-components": (build_target, ["app"], []),
|
||||
"size-files": (build_target, ["app"], []),
|
||||
"bootloader": (build_target, [], []),
|
||||
"bootloader-clean": (build_target, [], []),
|
||||
"bootloader-flash": (flash, ["bootloader"], ["erase_flash"]),
|
||||
"app": (build_target, [], ["clean", "fullclean", "reconfigure"]),
|
||||
"app-flash": (flash, ["app"], ["erase_flash"]),
|
||||
"partition_table": (build_target, [], ["reconfigure"]),
|
||||
"partition_table-flash": (flash, ["partition_table"], ["erase_flash"]),
|
||||
"flash": (flash, ["all"], ["erase_flash"]),
|
||||
"erase_flash": (erase_flash, [], []),
|
||||
"monitor": (monitor, [], ["flash", "partition_table-flash", "bootloader-flash", "app-flash"]),
|
||||
"erase_otadata": (build_target, [], []),
|
||||
"read_otadata": (build_target, [], []),
|
||||
}
|
||||
|
||||
|
||||
def get_commandline_options():
|
||||
""" Return all the command line options up to but not including the action """
|
||||
result = []
|
||||
@ -421,6 +442,7 @@ def get_commandline_options():
|
||||
result.append(a)
|
||||
return result
|
||||
|
||||
|
||||
def get_default_serial_port():
|
||||
""" Return a default serial port. esptool can do this (smarter), but it can create
|
||||
inconsistencies where esptool.py uses one port and idf_monitor uses another.
|
||||
@ -431,23 +453,25 @@ def get_default_serial_port():
|
||||
import serial.tools.list_ports
|
||||
|
||||
ports = list(reversed(sorted(
|
||||
p.device for p in serial.tools.list_ports.comports() )))
|
||||
p.device for p in serial.tools.list_ports.comports())))
|
||||
try:
|
||||
print ("Choosing default port %s (use '-p PORT' option to set a specific serial port)" % ports[0])
|
||||
print("Choosing default port %s (use '-p PORT' option to set a specific serial port)" % ports[0])
|
||||
return ports[0]
|
||||
except IndexError:
|
||||
raise RuntimeError("No serial ports found. Connect a device, or use '-p PORT' option to set a specific port.")
|
||||
|
||||
|
||||
# Import the actions, arguments extension file
|
||||
if os.path.exists(os.path.join(os.getcwd(), "idf_ext.py")):
|
||||
sys.path.append(os.getcwd())
|
||||
try:
|
||||
from idf_ext import add_action_extensions, add_argument_extensions
|
||||
except ImportError as e:
|
||||
except ImportError:
|
||||
print("Error importing extension file idf_ext.py. Skipping.")
|
||||
print("Please make sure that it contains implementations (even if they're empty implementations) of")
|
||||
print("add_action_extensions and add_argument_extensions.")
|
||||
|
||||
|
||||
def main():
|
||||
if sys.version_info[0] != 2 or sys.version_info[1] != 7:
|
||||
print("Note: You are using Python %d.%d.%d. Python 3 support is new, please report any problems "
|
||||
@ -457,13 +481,13 @@ def main():
|
||||
# Add actions extensions
|
||||
try:
|
||||
add_action_extensions({
|
||||
"build_target": build_target,
|
||||
"reconfigure" : reconfigure,
|
||||
"flash" : flash,
|
||||
"monitor" : monitor,
|
||||
"clean" : clean,
|
||||
"fullclean" : fullclean
|
||||
}, ACTIONS)
|
||||
"build_target": build_target,
|
||||
"reconfigure": reconfigure,
|
||||
"flash": flash,
|
||||
"monitor": monitor,
|
||||
"clean": clean,
|
||||
"fullclean": fullclean
|
||||
}, ACTIONS)
|
||||
except NameError:
|
||||
pass
|
||||
|
||||
@ -478,7 +502,8 @@ def main():
|
||||
parser.add_argument('-n', '--no-warnings', help="Disable Cmake warnings", action="store_true")
|
||||
parser.add_argument('-v', '--verbose', help="Verbose build output", action="store_true")
|
||||
parser.add_argument('-D', '--define-cache-entry', help="Create a cmake cache entry", nargs='+')
|
||||
parser.add_argument('--no-ccache', help="Disable ccache. Otherwise, if ccache is available on the PATH then it will be used for faster builds.", action="store_true")
|
||||
parser.add_argument('--no-ccache', help="Disable ccache. Otherwise, if ccache is available on the PATH then it will be used for faster builds.",
|
||||
action="store_true")
|
||||
parser.add_argument('actions', help="Actions (build targets or other operations)", nargs='+',
|
||||
choices=ACTIONS.keys())
|
||||
|
||||
@ -494,21 +519,23 @@ def main():
|
||||
|
||||
# Advanced parameter checks
|
||||
if args.build_dir is not None and os.path.realpath(args.project_dir) == os.path.realpath(args.build_dir):
|
||||
raise FatalError("Setting the build directory to the project directory is not supported. Suggest dropping --build-dir option, the default is a 'build' subdirectory inside the project directory.")
|
||||
raise FatalError("Setting the build directory to the project directory is not supported. Suggest dropping "
|
||||
"--build-dir option, the default is a 'build' subdirectory inside the project directory.")
|
||||
if args.build_dir is None:
|
||||
args.build_dir = os.path.join(args.project_dir, "build")
|
||||
args.build_dir = os.path.realpath(args.build_dir)
|
||||
|
||||
completed_actions = set()
|
||||
|
||||
def execute_action(action, remaining_actions):
|
||||
( function, dependencies, order_dependencies ) = ACTIONS[action]
|
||||
(function, dependencies, order_dependencies) = ACTIONS[action]
|
||||
# very simple dependency management, build a set of completed actions and make sure
|
||||
# all dependencies are in it
|
||||
for dep in dependencies:
|
||||
if not dep in completed_actions:
|
||||
if dep not in completed_actions:
|
||||
execute_action(dep, remaining_actions)
|
||||
for dep in order_dependencies:
|
||||
if dep in remaining_actions and not dep in completed_actions:
|
||||
if dep in remaining_actions and dep not in completed_actions:
|
||||
execute_action(dep, remaining_actions)
|
||||
|
||||
if action in completed_actions:
|
||||
@ -527,11 +554,10 @@ def main():
|
||||
|
||||
print_closing_message(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except FatalError as e:
|
||||
print(e)
|
||||
sys.exit(2)
|
||||
|
||||
|
||||
|
@ -22,23 +22,22 @@
|
||||
#
|
||||
from __future__ import print_function
|
||||
from __future__ import unicode_literals
|
||||
from builtins import dict
|
||||
import argparse, sys, subprocess, re
|
||||
import argparse
|
||||
import re
|
||||
import os.path
|
||||
import pprint
|
||||
import operator
|
||||
|
||||
DEFAULT_TOOLCHAIN_PREFIX = "xtensa-esp32-elf-"
|
||||
|
||||
CHIP_SIZES = {
|
||||
"esp32" : {
|
||||
"total_iram" : 0x20000,
|
||||
"total_irom" : 0x330000,
|
||||
"total_drom" : 0x800000,
|
||||
"esp32": {
|
||||
"total_iram": 0x20000,
|
||||
"total_irom": 0x330000,
|
||||
"total_drom": 0x800000,
|
||||
# total dram is determined from objdump output
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def scan_to_header(f, header_line):
|
||||
""" Scan forward in a file until you reach 'header_line', then return """
|
||||
for line in f:
|
||||
@ -46,11 +45,13 @@ def scan_to_header(f, header_line):
|
||||
return
|
||||
raise RuntimeError("Didn't find line '%s' in file" % header_line)
|
||||
|
||||
|
||||
def load_map_data(map_file):
|
||||
memory_config = load_memory_config(map_file)
|
||||
sections = load_sections(map_file)
|
||||
return memory_config, sections
|
||||
|
||||
|
||||
def load_memory_config(map_file):
|
||||
""" Memory Configuration section is the total size of each output section """
|
||||
result = {}
|
||||
@ -64,19 +65,21 @@ def load_memory_config(map_file):
|
||||
else:
|
||||
return result # we're at the end of the Memory Configuration
|
||||
section = {
|
||||
"name" : m.group("name"),
|
||||
"origin" : int(m.group("origin"), 16),
|
||||
"length" : int(m.group("length"), 16),
|
||||
"name": m.group("name"),
|
||||
"origin": int(m.group("origin"), 16),
|
||||
"length": int(m.group("length"), 16),
|
||||
}
|
||||
if section["name"] != "*default*":
|
||||
result[section["name"]] = section
|
||||
raise RuntimeError("End of file while scanning memory configuration?")
|
||||
|
||||
|
||||
def load_sections(map_file):
|
||||
""" Load section size information from the MAP file.
|
||||
|
||||
Returns a dict of 'sections', where each key is a section name and the value
|
||||
is a dict with details about this section, including a "sources" key which holds a list of source file line information for each symbol linked into the section.
|
||||
is a dict with details about this section, including a "sources" key which holds a list of source file line
|
||||
information for each symbol linked into the section.
|
||||
"""
|
||||
scan_to_header(map_file, "Linker script and memory map")
|
||||
sections = {}
|
||||
@ -88,10 +91,10 @@ def load_sections(map_file):
|
||||
m = re.match(RE_SECTION_HEADER, line)
|
||||
if m is not None: # start of a new section
|
||||
section = {
|
||||
"name" : m.group("name"),
|
||||
"address" : int(m.group("address"), 16),
|
||||
"size" : int(m.group("size"), 16),
|
||||
"sources" : [],
|
||||
"name": m.group("name"),
|
||||
"address": int(m.group("address"), 16),
|
||||
"size": int(m.group("size"), 16),
|
||||
"sources": [],
|
||||
}
|
||||
sections[section["name"]] = section
|
||||
continue
|
||||
@ -113,14 +116,14 @@ def load_sections(map_file):
|
||||
archive = "(exe)"
|
||||
|
||||
source = {
|
||||
"size" : int(m.group("size"), 16),
|
||||
"address" : int(m.group("address"), 16),
|
||||
"archive" : os.path.basename(archive),
|
||||
"object_file" : os.path.basename(m.group("object_file")),
|
||||
"sym_name" : sym_name,
|
||||
"size": int(m.group("size"), 16),
|
||||
"address": int(m.group("address"), 16),
|
||||
"archive": os.path.basename(archive),
|
||||
"object_file": os.path.basename(m.group("object_file")),
|
||||
"sym_name": sym_name,
|
||||
}
|
||||
source["file"] = "%s:%s" % (source["archive"], source["object_file"])
|
||||
section["sources"] += [ source ]
|
||||
section["sources"] += [source]
|
||||
|
||||
# In some cases the section name appears on the previous line, back it up in here
|
||||
RE_SYMBOL_ONLY_LINE = r"^ (?P<sym_name>\S*)$"
|
||||
@ -130,6 +133,7 @@ def load_sections(map_file):
|
||||
|
||||
return sections
|
||||
|
||||
|
||||
def sizes_by_key(sections, key):
|
||||
""" Takes a dict of sections (from load_sections) and returns
|
||||
a dict keyed by 'key' with aggregate output size information.
|
||||
@ -147,6 +151,7 @@ def sizes_by_key(sections, key):
|
||||
archive[section["name"]] += s["size"]
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser("idf_size - a tool to print IDF elf file sizes")
|
||||
|
||||
@ -183,6 +188,7 @@ def main():
|
||||
print("Symbols within the archive:", args.archive_details, "(Not all symbols may be reported)")
|
||||
print_archive_symbols(sections, args.archive_details)
|
||||
|
||||
|
||||
def print_summary(memory_config, sections):
|
||||
def get_size(section):
|
||||
try:
|
||||
@ -196,7 +202,7 @@ def print_summary(memory_config, sections):
|
||||
used_data = get_size(".dram0.data")
|
||||
used_bss = get_size(".dram0.bss")
|
||||
used_dram = used_data + used_bss
|
||||
used_iram = sum( get_size(s) for s in sections if s.startswith(".iram0") )
|
||||
used_iram = sum(get_size(s) for s in sections if s.startswith(".iram0"))
|
||||
flash_code = get_size(".flash.text")
|
||||
flash_rodata = get_size(".flash.rodata")
|
||||
total_size = used_data + used_iram + flash_code + flash_rodata
|
||||
@ -214,10 +220,10 @@ def print_summary(memory_config, sections):
|
||||
print(" Flash rodata: %7d bytes" % flash_rodata)
|
||||
print("Total image size:~%7d bytes (.bin may be padded larger)" % (total_size))
|
||||
|
||||
|
||||
def print_detailed_sizes(sections, key, header):
|
||||
sizes = sizes_by_key(sections, key)
|
||||
|
||||
sub_heading = None
|
||||
headings = (header,
|
||||
"DRAM .data",
|
||||
"& .bss",
|
||||
@ -240,6 +246,7 @@ def print_detailed_sizes(sections, key, header):
|
||||
def return_total_size(elem):
|
||||
val = elem[1]
|
||||
return val["total"]
|
||||
|
||||
def return_header(elem):
|
||||
return elem[0]
|
||||
s = sorted(list(result.items()), key=return_header)
|
||||
@ -255,6 +262,7 @@ def print_detailed_sizes(sections, key, header):
|
||||
v["flash_rodata"],
|
||||
v["total"]))
|
||||
|
||||
|
||||
def print_archive_symbols(sections, archive):
|
||||
interested_sections = [".dram0.data", ".dram0.bss", ".iram0.text", ".iram0.vectors", ".flash.text", ".flash.rodata"]
|
||||
result = {}
|
||||
@ -267,7 +275,7 @@ def print_archive_symbols(sections, archive):
|
||||
for s in section["sources"]:
|
||||
if archive != s["archive"]:
|
||||
continue
|
||||
s["sym_name"] = re.sub("(.text.|.literal.|.data.|.bss.|.rodata.)", "", s["sym_name"]);
|
||||
s["sym_name"] = re.sub("(.text.|.literal.|.data.|.bss.|.rodata.)", "", s["sym_name"])
|
||||
result[section_name][s["sym_name"]] = result[section_name].get(s["sym_name"], 0) + s["size"]
|
||||
for t in interested_sections:
|
||||
print("\nSymbols from section:", t)
|
||||
@ -275,10 +283,10 @@ def print_archive_symbols(sections, archive):
|
||||
s = sorted(list(result[t].items()), key=lambda k_v: k_v[0])
|
||||
# do a secondary sort in order to have consistent order (for diff-ing the output)
|
||||
for key,val in sorted(s, key=lambda k_v: k_v[1], reverse=True):
|
||||
print(("%s(%d)"% (key.replace(t + ".", ""), val)), end=' ')
|
||||
print(("%s(%d)" % (key.replace(t + ".", ""), val)), end=' ')
|
||||
section_total += val
|
||||
print("\nSection total:",section_total)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
@ -30,13 +30,13 @@ import json
|
||||
|
||||
import gen_kconfig_doc
|
||||
import kconfiglib
|
||||
import pprint
|
||||
|
||||
__version__ = "0.1"
|
||||
|
||||
if not "IDF_CMAKE" in os.environ:
|
||||
if "IDF_CMAKE" not in os.environ:
|
||||
os.environ["IDF_CMAKE"] = ""
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='confgen.py v%s - Config Generation Tool' % __version__, prog=os.path.basename(sys.argv[0]))
|
||||
|
||||
@ -46,7 +46,8 @@ def main():
|
||||
default=None)
|
||||
|
||||
parser.add_argument('--defaults',
|
||||
help='Optional project defaults file, used if --config file doesn\'t exist. Multiple files can be specified using multiple --defaults arguments.',
|
||||
help='Optional project defaults file, used if --config file doesn\'t exist. '
|
||||
'Multiple files can be specified using multiple --defaults arguments.',
|
||||
nargs='?',
|
||||
default=[],
|
||||
action='append')
|
||||
@ -70,15 +71,15 @@ def main():
|
||||
args = parser.parse_args()
|
||||
|
||||
for fmt, filename in args.output:
|
||||
if not fmt in OUTPUT_FORMATS.keys():
|
||||
if fmt not in OUTPUT_FORMATS.keys():
|
||||
print("Format '%s' not recognised. Known formats: %s" % (fmt, OUTPUT_FORMATS.keys()))
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
args.env = [ (name,value) for (name,value) in ( e.split("=",1) for e in args.env) ]
|
||||
args.env = [(name,value) for (name,value) in (e.split("=",1) for e in args.env)]
|
||||
except ValueError:
|
||||
print("--env arguments must each contain =. To unset an environment variable, use 'ENV='")
|
||||
sys.exit(1)
|
||||
print("--env arguments must each contain =. To unset an environment variable, use 'ENV='")
|
||||
sys.exit(1)
|
||||
|
||||
for name, value in args.env:
|
||||
os.environ[name] = value
|
||||
@ -124,6 +125,7 @@ def write_config(config, filename):
|
||||
"""
|
||||
config.write_config(filename, header=CONFIG_HEADING)
|
||||
|
||||
|
||||
def write_header(config, filename):
|
||||
CONFIG_HEADING = """/*
|
||||
* Automatically generated file. DO NOT EDIT.
|
||||
@ -133,6 +135,7 @@ def write_header(config, filename):
|
||||
"""
|
||||
config.write_autoconf(filename, header=CONFIG_HEADING)
|
||||
|
||||
|
||||
def write_cmake(config, filename):
|
||||
with open(filename, "w") as f:
|
||||
write = f.write
|
||||
@ -143,6 +146,7 @@ def write_cmake(config, filename):
|
||||
# Espressif IoT Development Framework (ESP-IDF) Configuration cmake include file
|
||||
#
|
||||
""")
|
||||
|
||||
def write_node(node):
|
||||
sym = node.item
|
||||
if not isinstance(sym, kconfiglib.Symbol):
|
||||
@ -158,8 +162,10 @@ def write_cmake(config, filename):
|
||||
prefix, sym.name, val))
|
||||
config.walk_menu(write_node)
|
||||
|
||||
|
||||
def get_json_values(config):
|
||||
config_dict = {}
|
||||
|
||||
def write_node(node):
|
||||
sym = node.item
|
||||
if not isinstance(sym, kconfiglib.Symbol):
|
||||
@ -167,7 +173,7 @@ def get_json_values(config):
|
||||
|
||||
val = sym.str_value # this calculates _write_to_conf, due to kconfiglib magic
|
||||
if sym._write_to_conf:
|
||||
if sym.type in [ kconfiglib.BOOL, kconfiglib.TRISTATE ]:
|
||||
if sym.type in [kconfiglib.BOOL, kconfiglib.TRISTATE]:
|
||||
val = (val != "n")
|
||||
elif sym.type == kconfiglib.HEX:
|
||||
val = int(val, 16)
|
||||
@ -177,11 +183,13 @@ def get_json_values(config):
|
||||
config.walk_menu(write_node)
|
||||
return config_dict
|
||||
|
||||
|
||||
def write_json(config, filename):
|
||||
config_dict = get_json_values(config)
|
||||
with open(filename, "w") as f:
|
||||
json.dump(config_dict, f, indent=4, sort_keys=True)
|
||||
|
||||
|
||||
def write_json_menus(config, filename):
|
||||
result = [] # root level items
|
||||
node_lookup = {} # lookup from MenuNode to an item in result
|
||||
@ -190,7 +198,7 @@ def write_json_menus(config, filename):
|
||||
try:
|
||||
json_parent = node_lookup[node.parent]["children"]
|
||||
except KeyError:
|
||||
assert not node.parent in node_lookup # if fails, we have a parent node with no "children" entity (ie a bug)
|
||||
assert node.parent not in node_lookup # if fails, we have a parent node with no "children" entity (ie a bug)
|
||||
json_parent = result # root level node
|
||||
|
||||
# node.kconfig.y means node has no dependency,
|
||||
@ -206,11 +214,11 @@ def write_json_menus(config, filename):
|
||||
|
||||
new_json = None
|
||||
if node.item == kconfiglib.MENU or is_menuconfig:
|
||||
new_json = { "type" : "menu",
|
||||
"title" : node.prompt[0],
|
||||
"depends_on": depends,
|
||||
"children": []
|
||||
}
|
||||
new_json = {"type": "menu",
|
||||
"title": node.prompt[0],
|
||||
"depends_on": depends,
|
||||
"children": []
|
||||
}
|
||||
if is_menuconfig:
|
||||
sym = node.item
|
||||
new_json["name"] = sym.name
|
||||
@ -236,12 +244,12 @@ def write_json_menus(config, filename):
|
||||
greatest_range = [int(min_range.str_value), int(max_range.str_value)]
|
||||
|
||||
new_json = {
|
||||
"type" : kconfiglib.TYPE_TO_STR[sym.type],
|
||||
"name" : sym.name,
|
||||
"type": kconfiglib.TYPE_TO_STR[sym.type],
|
||||
"name": sym.name,
|
||||
"title": node.prompt[0] if node.prompt else None,
|
||||
"depends_on" : depends,
|
||||
"depends_on": depends,
|
||||
"help": node.help,
|
||||
"range" : greatest_range,
|
||||
"range": greatest_range,
|
||||
"children": [],
|
||||
}
|
||||
elif isinstance(node.item, kconfiglib.Choice):
|
||||
@ -250,7 +258,7 @@ def write_json_menus(config, filename):
|
||||
"type": "choice",
|
||||
"title": node.prompt[0],
|
||||
"name": choice.name,
|
||||
"depends_on" : depends,
|
||||
"depends_on": depends,
|
||||
"help": node.help,
|
||||
"children": []
|
||||
}
|
||||
@ -263,6 +271,7 @@ def write_json_menus(config, filename):
|
||||
with open(filename, "w") as f:
|
||||
f.write(json.dumps(result, sort_keys=True, indent=4))
|
||||
|
||||
|
||||
def update_if_changed(source, destination):
|
||||
with open(source, "r") as f:
|
||||
source_contents = f.read()
|
||||
@ -276,14 +285,14 @@ def update_if_changed(source, destination):
|
||||
f.write(source_contents)
|
||||
|
||||
|
||||
OUTPUT_FORMATS = {
|
||||
"config" : write_config,
|
||||
"header" : write_header,
|
||||
"cmake" : write_cmake,
|
||||
"docs" : gen_kconfig_doc.write_docs,
|
||||
"json" : write_json,
|
||||
"json_menus" : write_json_menus,
|
||||
}
|
||||
OUTPUT_FORMATS = {"config": write_config,
|
||||
"header": write_header,
|
||||
"cmake": write_cmake,
|
||||
"docs": gen_kconfig_doc.write_docs,
|
||||
"json": write_json,
|
||||
"json_menus": write_json_menus,
|
||||
}
|
||||
|
||||
|
||||
class FatalError(RuntimeError):
|
||||
"""
|
||||
@ -291,6 +300,7 @@ class FatalError(RuntimeError):
|
||||
"""
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
|
@ -12,12 +12,13 @@ import sys
|
||||
import confgen
|
||||
from confgen import FatalError, __version__
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='confserver.py v%s - Config Generation Tool' % __version__, prog=os.path.basename(sys.argv[0]))
|
||||
|
||||
parser.add_argument('--config',
|
||||
help='Project configuration settings',
|
||||
required=True)
|
||||
help='Project configuration settings',
|
||||
required=True)
|
||||
|
||||
parser.add_argument('--kconfig',
|
||||
help='KConfig file with config item definitions',
|
||||
@ -29,10 +30,10 @@ def main():
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
args.env = [ (name,value) for (name,value) in ( e.split("=",1) for e in args.env) ]
|
||||
args.env = [(name,value) for (name,value) in (e.split("=",1) for e in args.env)]
|
||||
except ValueError:
|
||||
print("--env arguments must each contain =. To unset an environment variable, use 'ENV='")
|
||||
sys.exit(1)
|
||||
print("--env arguments must each contain =. To unset an environment variable, use 'ENV='")
|
||||
sys.exit(1)
|
||||
|
||||
for name, value in args.env:
|
||||
os.environ[name] = value
|
||||
@ -47,7 +48,7 @@ def run_server(kconfig, sdkconfig):
|
||||
|
||||
config_dict = confgen.get_json_values(config)
|
||||
ranges_dict = get_ranges(config)
|
||||
json.dump({"version": 1, "values" : config_dict, "ranges" : ranges_dict}, sys.stdout)
|
||||
json.dump({"version": 1, "values": config_dict, "ranges": ranges_dict}, sys.stdout)
|
||||
print("\n")
|
||||
|
||||
while True:
|
||||
@ -81,7 +82,7 @@ def run_server(kconfig, sdkconfig):
|
||||
|
||||
values_diff = diff(before, after)
|
||||
ranges_diff = diff(before_ranges, after_ranges)
|
||||
response = {"version" : 1, "values" : values_diff, "ranges" : ranges_diff}
|
||||
response = {"version": 1, "values": values_diff, "ranges": ranges_diff}
|
||||
if error:
|
||||
for e in error:
|
||||
print("Error: %s" % e, file=sys.stderr)
|
||||
@ -91,10 +92,10 @@ def run_server(kconfig, sdkconfig):
|
||||
|
||||
|
||||
def handle_request(config, req):
|
||||
if not "version" in req:
|
||||
return [ "All requests must have a 'version'" ]
|
||||
if "version" not in req:
|
||||
return ["All requests must have a 'version'"]
|
||||
if int(req["version"]) != 1:
|
||||
return [ "Only version 1 requests supported" ]
|
||||
return ["Only version 1 requests supported"]
|
||||
|
||||
error = []
|
||||
|
||||
@ -103,7 +104,7 @@ def handle_request(config, req):
|
||||
try:
|
||||
config.load_config(req["load"])
|
||||
except Exception as e:
|
||||
error += [ "Failed to load from %s: %s" % (req["load"], e) ]
|
||||
error += ["Failed to load from %s: %s" % (req["load"], e)]
|
||||
|
||||
if "set" in req:
|
||||
handle_set(config, error, req["set"])
|
||||
@ -113,16 +114,17 @@ def handle_request(config, req):
|
||||
print("Saving config to %s..." % req["save"], file=sys.stderr)
|
||||
confgen.write_config(config, req["save"])
|
||||
except Exception as e:
|
||||
error += [ "Failed to save to %s: %s" % (req["save"], e) ]
|
||||
error += ["Failed to save to %s: %s" % (req["save"], e)]
|
||||
|
||||
return error
|
||||
|
||||
|
||||
def handle_set(config, error, to_set):
|
||||
missing = [ k for k in to_set if not k in config.syms ]
|
||||
missing = [k for k in to_set if k not in config.syms]
|
||||
if missing:
|
||||
error.append("The following config symbol(s) were not found: %s" % (", ".join(missing)))
|
||||
# replace name keys with the full config symbol for each key:
|
||||
to_set = dict((config.syms[k],v) for (k,v) in to_set.items() if not k in missing)
|
||||
to_set = dict((config.syms[k],v) for (k,v) in to_set.items() if k not in missing)
|
||||
|
||||
# Work through the list of values to set, noting that
|
||||
# some may not be immediately applicable (maybe they depend
|
||||
@ -130,14 +132,14 @@ def handle_set(config, error, to_set):
|
||||
# knowing if any value is unsettable until then end
|
||||
|
||||
while len(to_set):
|
||||
set_pass = [ (k,v) for (k,v) in to_set.items() if k.visibility ]
|
||||
set_pass = [(k,v) for (k,v) in to_set.items() if k.visibility]
|
||||
if not set_pass:
|
||||
break # no visible keys left
|
||||
for (sym,val) in set_pass:
|
||||
if sym.type in (kconfiglib.BOOL, kconfiglib.TRISTATE):
|
||||
if val == True:
|
||||
if val is True:
|
||||
sym.set_value(2)
|
||||
elif val == False:
|
||||
elif val is False:
|
||||
sym.set_value(0)
|
||||
else:
|
||||
error.append("Boolean symbol %s only accepts true/false values" % sym.name)
|
||||
@ -150,7 +152,6 @@ def handle_set(config, error, to_set):
|
||||
error.append("The following config symbol(s) were not visible so were not updated: %s" % (", ".join(s.name for s in to_set)))
|
||||
|
||||
|
||||
|
||||
def diff(before, after):
|
||||
"""
|
||||
Return a dictionary with the difference between 'before' and 'after' (either with the new value if changed,
|
||||
@ -164,6 +165,7 @@ def diff(before, after):
|
||||
|
||||
def get_ranges(config):
|
||||
ranges_dict = {}
|
||||
|
||||
def handle_node(node):
|
||||
sym = node.item
|
||||
if not isinstance(sym, kconfiglib.Symbol):
|
||||
@ -182,4 +184,3 @@ if __name__ == '__main__':
|
||||
except FatalError as e:
|
||||
print("A fatal error occurred: %s" % e, file=sys.stderr)
|
||||
sys.exit(2)
|
||||
|
||||
|
@ -21,7 +21,6 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import re
|
||||
import kconfiglib
|
||||
|
||||
@ -33,7 +32,8 @@ HEADING_SYMBOLS = '#*=-^"+'
|
||||
|
||||
# Keep the heading level in sync with api-reference/kconfig.rst
|
||||
INITIAL_HEADING_LEVEL = 3
|
||||
MAX_HEADING_LEVEL = len(HEADING_SYMBOLS)-1
|
||||
MAX_HEADING_LEVEL = len(HEADING_SYMBOLS) - 1
|
||||
|
||||
|
||||
def write_docs(config, filename):
|
||||
""" Note: writing .rst documentation ignores the current value
|
||||
@ -42,22 +42,25 @@ def write_docs(config, filename):
|
||||
with open(filename, "w") as f:
|
||||
config.walk_menu(lambda node: write_menu_item(f, node))
|
||||
|
||||
|
||||
def node_is_menu(node):
|
||||
try:
|
||||
return node.item == kconfiglib.MENU or node.is_menuconfig
|
||||
except AttributeError:
|
||||
return False # not all MenuNodes have is_menuconfig for some reason
|
||||
|
||||
|
||||
def get_breadcrumbs(node):
|
||||
# this is a bit wasteful as it recalculates each time, but still...
|
||||
result = []
|
||||
node = node.parent
|
||||
while node.parent:
|
||||
if node.prompt:
|
||||
result = [ ":ref:`%s`" % get_link_anchor(node) ] + result
|
||||
result = [":ref:`%s`" % get_link_anchor(node)] + result
|
||||
node = node.parent
|
||||
return " > ".join(result)
|
||||
|
||||
|
||||
def get_link_anchor(node):
|
||||
try:
|
||||
return "CONFIG_%s" % node.item.name
|
||||
@ -68,11 +71,12 @@ def get_link_anchor(node):
|
||||
result = []
|
||||
while node.parent:
|
||||
if node.prompt:
|
||||
result = [ re.sub(r"[^a-zA-z0-9]+", "-", node.prompt[0]) ] + result
|
||||
result = [re.sub(r"[^a-zA-z0-9]+", "-", node.prompt[0])] + result
|
||||
node = node.parent
|
||||
result = "-".join(result).lower()
|
||||
return result
|
||||
|
||||
|
||||
def get_heading_level(node):
|
||||
result = INITIAL_HEADING_LEVEL
|
||||
node = node.parent
|
||||
@ -83,6 +87,7 @@ def get_heading_level(node):
|
||||
node = node.parent
|
||||
return result
|
||||
|
||||
|
||||
def format_rest_text(text, indent):
|
||||
# Format an indented text block for use with ReST
|
||||
text = indent + text.replace('\n', '\n' + indent)
|
||||
@ -92,6 +97,7 @@ def format_rest_text(text, indent):
|
||||
text += '\n'
|
||||
return text
|
||||
|
||||
|
||||
def node_should_write(node):
|
||||
if not node.prompt:
|
||||
return False # Don't do anything for invisible menu items
|
||||
@ -101,6 +107,7 @@ def node_should_write(node):
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def write_menu_item(f, node):
|
||||
if not node_should_write(node):
|
||||
return
|
||||
@ -112,7 +119,7 @@ def write_menu_item(f, node):
|
||||
|
||||
is_menu = node_is_menu(node)
|
||||
|
||||
## Heading
|
||||
# Heading
|
||||
if name:
|
||||
title = 'CONFIG_%s' % name
|
||||
else:
|
||||
@ -167,6 +174,6 @@ def write_menu_item(f, node):
|
||||
child = child.next
|
||||
f.write('\n')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print("Run this via 'confgen.py --output doc FILENAME'")
|
||||
|
||||
|
@ -1,25 +1,16 @@
|
||||
#!/usr/bin/env python
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import json
|
||||
import argparse
|
||||
import shutil
|
||||
import tempfile
|
||||
|
||||
import pexpect
|
||||
|
||||
sys.path.append("..")
|
||||
import confserver
|
||||
|
||||
def create_server_thread(*args):
|
||||
t = threading.Thread()
|
||||
|
||||
def parse_testcases():
|
||||
with open("testcases.txt", "r") as f:
|
||||
cases = [ l for l in f.readlines() if len(l.strip()) > 0 ]
|
||||
cases = [l for l in f.readlines() if len(l.strip()) > 0]
|
||||
# Each 3 lines in the file should be formatted as:
|
||||
# * Description of the test change
|
||||
# * JSON "changes" to send to the server
|
||||
@ -29,19 +20,20 @@ def parse_testcases():
|
||||
|
||||
for i in range(0, len(cases), 3):
|
||||
desc = cases[i]
|
||||
send = cases[i+1]
|
||||
expect = cases[i+2]
|
||||
send = cases[i + 1]
|
||||
expect = cases[i + 2]
|
||||
if not desc.startswith("* "):
|
||||
raise RuntimeError("Unexpected description at line %d: '%s'" % (i+1, desc))
|
||||
raise RuntimeError("Unexpected description at line %d: '%s'" % (i + 1, desc))
|
||||
if not send.startswith("> "):
|
||||
raise RuntimeError("Unexpected send at line %d: '%s'" % (i+2, send))
|
||||
raise RuntimeError("Unexpected send at line %d: '%s'" % (i + 2, send))
|
||||
if not expect.startswith("< "):
|
||||
raise RuntimeError("Unexpected expect at line %d: '%s'" % (i+3, expect))
|
||||
raise RuntimeError("Unexpected expect at line %d: '%s'" % (i + 3, expect))
|
||||
desc = desc[2:]
|
||||
send = json.loads(send[2:])
|
||||
expect = json.loads(expect[2:])
|
||||
yield (desc, send, expect)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('--logfile', type=argparse.FileType('w'), help='Optional session log of the interactions with confserver.py')
|
||||
@ -72,7 +64,7 @@ def main():
|
||||
|
||||
for (desc, send, expected) in cases:
|
||||
print(desc)
|
||||
req = { "version" : "1", "set" : send }
|
||||
req = {"version": "1", "set": send}
|
||||
req = json.dumps(req)
|
||||
print("Sending: %s" % (req))
|
||||
p.send("%s\n" % req)
|
||||
@ -84,13 +76,13 @@ def main():
|
||||
read_vals = readback[expect_key]
|
||||
exp_vals = expected[expect_key]
|
||||
if read_vals != exp_vals:
|
||||
expect_diff = dict((k,v) for (k,v) in exp_vals.items() if not k in read_vals or v != read_vals[k])
|
||||
expect_diff = dict((k,v) for (k,v) in exp_vals.items() if k not in read_vals or v != read_vals[k])
|
||||
raise RuntimeError("Test failed! Was expecting %s: %s" % (expect_key, json.dumps(expect_diff)))
|
||||
print("OK")
|
||||
|
||||
print("Testing load/save...")
|
||||
before = os.stat(temp_sdkconfig_path).st_mtime
|
||||
p.send("%s\n" % json.dumps({ "version" : "1", "save" : temp_sdkconfig_path }))
|
||||
p.send("%s\n" % json.dumps({"version": "1", "save": temp_sdkconfig_path}))
|
||||
save_result = expect_json()
|
||||
print("Save result: %s" % (json.dumps(save_result)))
|
||||
assert len(save_result["values"]) == 0
|
||||
@ -98,7 +90,7 @@ def main():
|
||||
after = os.stat(temp_sdkconfig_path).st_mtime
|
||||
assert after > before
|
||||
|
||||
p.send("%s\n" % json.dumps({ "version" : "1", "load" : temp_sdkconfig_path }))
|
||||
p.send("%s\n" % json.dumps({"version": "1", "load": temp_sdkconfig_path}))
|
||||
load_result = expect_json()
|
||||
print("Load result: %s" % (json.dumps(load_result)))
|
||||
assert len(load_result["values"]) > 0 # loading same file should return all config items
|
||||
@ -111,6 +103,6 @@ def main():
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
@ -15,19 +15,29 @@
|
||||
#
|
||||
|
||||
import re
|
||||
import collections
|
||||
import sys
|
||||
import os
|
||||
|
||||
from sdkconfig import SDKConfig
|
||||
from pyparsing import *
|
||||
from pyparsing import OneOrMore
|
||||
from pyparsing import restOfLine
|
||||
from pyparsing import alphanums
|
||||
from pyparsing import Word
|
||||
from pyparsing import alphas
|
||||
from pyparsing import ParseBaseException
|
||||
from pyparsing import Suppress
|
||||
from pyparsing import Group
|
||||
from pyparsing import Literal
|
||||
from pyparsing import ZeroOrMore
|
||||
from pyparsing import Optional
|
||||
from pyparsing import originalTextFor
|
||||
from common import LdGenFailure
|
||||
|
||||
"""
|
||||
Fragment file internal representation. Parses and stores instances of the fragment definitions
|
||||
contained within the file.
|
||||
"""
|
||||
|
||||
class FragmentFileModel():
|
||||
"""
|
||||
Fragment file internal representation. Parses and stores instances of the fragment definitions
|
||||
contained within the file.
|
||||
"""
|
||||
|
||||
def __init__(self, fragment_file):
|
||||
path = os.path.realpath(fragment_file.name)
|
||||
@ -54,13 +64,14 @@ class FragmentFileModel():
|
||||
for fragment in self.fragments:
|
||||
fragment.path = path
|
||||
|
||||
"""
|
||||
Encapsulates a fragment as defined in the generator syntax. Sets values common to all fragment and performs processing
|
||||
such as checking the validity of the fragment name and getting the entry values.
|
||||
"""
|
||||
class Fragment:
|
||||
|
||||
IDENTIFIER = Word(alphas+"_", alphanums+"_")
|
||||
class Fragment:
|
||||
"""
|
||||
Encapsulates a fragment as defined in the generator syntax. Sets values common to all fragment and performs processing
|
||||
such as checking the validity of the fragment name and getting the entry values.
|
||||
"""
|
||||
|
||||
IDENTIFIER = Word(alphas + "_", alphanums + "_")
|
||||
ENTITY = Word(alphanums + ".-_$")
|
||||
|
||||
def __init__(self, name, entries):
|
||||
@ -68,6 +79,7 @@ class Fragment:
|
||||
self.name = name
|
||||
self.entries = entries
|
||||
|
||||
|
||||
class Sections(Fragment):
|
||||
|
||||
def __init__(self, name, entries):
|
||||
@ -113,10 +125,11 @@ class Sections(Fragment):
|
||||
|
||||
return sections
|
||||
|
||||
"""
|
||||
Encapsulates a scheme fragment, which defines what target input sections are placed under.
|
||||
"""
|
||||
|
||||
class Scheme(Fragment):
|
||||
"""
|
||||
Encapsulates a scheme fragment, which defines what target input sections are placed under.
|
||||
"""
|
||||
|
||||
def __init__(self, name, items):
|
||||
Fragment.__init__(self, name, items)
|
||||
@ -151,10 +164,11 @@ class Scheme(Fragment):
|
||||
|
||||
return scheme
|
||||
|
||||
"""
|
||||
Encapsulates a mapping fragment, which defines what targets the input sections of mappable entties are placed under.
|
||||
"""
|
||||
|
||||
class Mapping(Fragment):
|
||||
"""
|
||||
Encapsulates a mapping fragment, which defines what targets the input sections of mappable entties are placed under.
|
||||
"""
|
||||
|
||||
# Name of the default condition entry
|
||||
DEFAULT_CONDITION = "default"
|
||||
@ -192,10 +206,10 @@ class Mapping(Fragment):
|
||||
for normal_group in self.entries[0]:
|
||||
# Get the original string of the condition
|
||||
condition = next(iter(normal_group.condition.asList())).strip()
|
||||
mappings = self._create_mappings_set(normal_group[1])
|
||||
|
||||
mappings = self._create_mappings_set(normal_group[1])
|
||||
|
||||
processed.append((condition, mappings))
|
||||
|
||||
|
||||
default_group = self.entries[1]
|
||||
|
||||
if len(default_group) > 1:
|
||||
@ -217,9 +231,6 @@ class Mapping(Fragment):
|
||||
# Match header [mapping]
|
||||
header = Suppress("[") + Suppress("mapping") + Suppress("]")
|
||||
|
||||
# Define possbile values for input archive and object file
|
||||
filename = Word(alphanums + "-" + "_")
|
||||
|
||||
# There are three possible patterns for mapping entries:
|
||||
# obj:symbol (scheme)
|
||||
# obj (scheme)
|
||||
|
@ -14,22 +14,20 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import re
|
||||
import collections
|
||||
import itertools
|
||||
import os
|
||||
import subprocess
|
||||
import fnmatch
|
||||
|
||||
from sdkconfig import SDKConfig
|
||||
from fragments import FragmentFileModel, Sections, Scheme, Mapping, Fragment
|
||||
from pyparsing import *
|
||||
from fragments import Sections, Scheme, Mapping, Fragment
|
||||
from pyparsing import Suppress, White, ParseException, Literal, Regex, Group, ZeroOrMore, Word, OneOrMore, nums, alphanums, alphas, Optional
|
||||
from common import LdGenFailure
|
||||
|
||||
"""
|
||||
Encapsulates a generated placement rule placed under a target
|
||||
"""
|
||||
|
||||
class PlacementRule():
|
||||
"""
|
||||
Encapsulates a generated placement rule placed under a target
|
||||
"""
|
||||
|
||||
DEFAULT_SPECIFICITY = 0
|
||||
ARCHIVE_SPECIFICITY = 1
|
||||
@ -71,12 +69,12 @@ class PlacementRule():
|
||||
(section, expansion) = section_data
|
||||
if expansion:
|
||||
metadata = self.__metadata(self.__container([]), self.__container([expansion]), self.__container(True))
|
||||
self.sections[section] = metadata
|
||||
self.sections[section] = metadata
|
||||
|
||||
def get_section_names(self):
|
||||
return self.sections.keys()
|
||||
|
||||
def add_exclusion(self, other, sections_infos = None):
|
||||
def add_exclusion(self, other, sections_infos=None):
|
||||
# Utility functions for this method
|
||||
def do_section_expansion(rule, section):
|
||||
if section in rule.get_section_names():
|
||||
@ -116,7 +114,7 @@ class PlacementRule():
|
||||
# most specific rule from the list, and if an even more specific rule is found,
|
||||
# replace it entirely. Otherwise, keep appending.
|
||||
exclusions = self.sections[section].excludes
|
||||
exclusions_list = exclusions.content if exclusions.content != None else []
|
||||
exclusions_list = exclusions.content if exclusions.content is not None else []
|
||||
exclusions_to_remove = filter(lambda r: r.is_more_specific_rule_of(other), exclusions_list)
|
||||
|
||||
remaining_exclusions = [e for e in exclusions_list if e not in exclusions_to_remove]
|
||||
@ -132,8 +130,8 @@ class PlacementRule():
|
||||
return False
|
||||
|
||||
# Compare archive, obj and target
|
||||
for entity_index in range (1, other.specificity + 1):
|
||||
if self[entity_index] != other[entity_index] and other[entity_index] != None:
|
||||
for entity_index in range(1, other.specificity + 1):
|
||||
if self[entity_index] != other[entity_index] and other[entity_index] is not None:
|
||||
return False
|
||||
|
||||
return True
|
||||
@ -143,15 +141,15 @@ class PlacementRule():
|
||||
return False
|
||||
|
||||
# Compare archive, obj and target
|
||||
for entity_index in range (1, other.specificity + 1):
|
||||
if self[entity_index] != other[entity_index] and other[entity_index] != None:
|
||||
for entity_index in range(1, other.specificity + 1):
|
||||
if self[entity_index] != other[entity_index] and other[entity_index] is not None:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def __getitem__(self, key):
|
||||
if key == PlacementRule.ARCHIVE_SPECIFICITY:
|
||||
return self.archive
|
||||
return self.archive
|
||||
elif key == PlacementRule.OBJECT_SPECIFICITY:
|
||||
return self.obj
|
||||
elif key == PlacementRule.SYMBOL_SPECIFICITY:
|
||||
@ -193,7 +191,7 @@ class PlacementRule():
|
||||
sections_string = " ".join(sections_string)
|
||||
|
||||
archive = str(self.archive) if self.archive else ""
|
||||
obj = (str(self.obj) + (".*" if self.obj else "")) if self.obj else ""
|
||||
obj = (str(self.obj) + (".*" if self.obj else "")) if self.obj else ""
|
||||
|
||||
# Handle output string generation based on information available
|
||||
if self.specificity == PlacementRule.DEFAULT_SPECIFICITY:
|
||||
@ -247,10 +245,11 @@ class PlacementRule():
|
||||
yield self.symbol
|
||||
raise StopIteration
|
||||
|
||||
"""
|
||||
Implements generation of placement rules based on collected sections, scheme and mapping fragment.
|
||||
"""
|
||||
|
||||
class GenerationModel:
|
||||
"""
|
||||
Implements generation of placement rules based on collected sections, scheme and mapping fragment.
|
||||
"""
|
||||
|
||||
DEFAULT_SCHEME = "default"
|
||||
|
||||
@ -273,7 +272,7 @@ class GenerationModel:
|
||||
|
||||
rule = PlacementRule(archive, obj, symbol, section_entries, target)
|
||||
|
||||
if not rule in rules:
|
||||
if rule not in rules:
|
||||
rules.append(rule)
|
||||
|
||||
def _build_scheme_dictionary(self):
|
||||
@ -403,7 +402,7 @@ class GenerationModel:
|
||||
def _create_extra_rules(self, rules):
|
||||
# This function generates extra rules for symbol specific rules. The reason for generating extra rules is to isolate,
|
||||
# as much as possible, rules that require expansion. Particularly, object specific extra rules are generated.
|
||||
rules_to_process = sorted(rules, key = lambda r: r.specificity)
|
||||
rules_to_process = sorted(rules, key=lambda r: r.specificity)
|
||||
symbol_specific_rules = list(filter(lambda r: r.specificity == PlacementRule.SYMBOL_SPECIFICITY, rules_to_process))
|
||||
|
||||
extra_rules = dict()
|
||||
@ -433,7 +432,8 @@ class GenerationModel:
|
||||
extra_rule = extra_rules[extra_rules_key]
|
||||
|
||||
if section not in extra_rule.get_section_names():
|
||||
new_rule = PlacementRule(extra_rule.archive, extra_rule.obj, extra_rule.symbol, list(extra_rule.get_section_names()) + [section] , extra_rule.target)
|
||||
new_rule = PlacementRule(extra_rule.archive, extra_rule.obj, extra_rule.symbol,
|
||||
list(extra_rule.get_section_names()) + [section], extra_rule.target)
|
||||
extra_rules[extra_rules_key] = new_rule
|
||||
except KeyError:
|
||||
extra_rule = PlacementRule(symbol_specific_rule.archive, symbol_specific_rule.obj, None, [section], section_rule.target)
|
||||
@ -452,16 +452,16 @@ class GenerationModel:
|
||||
|
||||
# Sort the rules by means of how specific they are. Sort by specificity from lowest to highest
|
||||
# * -> lib:* -> lib:obj -> lib:obj:symbol
|
||||
sorted_rules = sorted(rules, key = lambda r: r.specificity)
|
||||
sorted_rules = sorted(rules, key=lambda r: r.specificity)
|
||||
|
||||
# Now that the rules have been sorted, loop through each rule, and then loop
|
||||
# through rules below it (higher indeces), adding exclusions whenever appropriate.
|
||||
for general_rule in sorted_rules:
|
||||
for specific_rule in reversed(sorted_rules):
|
||||
if (specific_rule.specificity > general_rule.specificity and \
|
||||
if (specific_rule.specificity > general_rule.specificity and
|
||||
specific_rule.specificity != PlacementRule.SYMBOL_SPECIFICITY) or \
|
||||
(specific_rule.specificity == PlacementRule.SYMBOL_SPECIFICITY and \
|
||||
general_rule.specificity == PlacementRule.OBJECT_SPECIFICITY):
|
||||
(specific_rule.specificity == PlacementRule.SYMBOL_SPECIFICITY and
|
||||
general_rule.specificity == PlacementRule.OBJECT_SPECIFICITY):
|
||||
general_rule.add_exclusion(specific_rule, sections_info)
|
||||
|
||||
def add_fragments_from_file(self, fragment_file):
|
||||
@ -484,11 +484,12 @@ class GenerationModel:
|
||||
|
||||
dict_to_append_to[fragment.name] = fragment
|
||||
|
||||
"""
|
||||
Encapsulates a linker script template file. Finds marker syntax and handles replacement to generate the
|
||||
final output.
|
||||
"""
|
||||
|
||||
class TemplateModel:
|
||||
"""
|
||||
Encapsulates a linker script template file. Finds marker syntax and handles replacement to generate the
|
||||
final output.
|
||||
"""
|
||||
|
||||
Marker = collections.namedtuple("Marker", "target indent rules")
|
||||
|
||||
@ -526,7 +527,6 @@ class TemplateModel:
|
||||
target = None
|
||||
try:
|
||||
target = member.target
|
||||
indent = member.indent
|
||||
rules = member.rules
|
||||
|
||||
del rules[:]
|
||||
@ -535,7 +535,7 @@ class TemplateModel:
|
||||
except KeyError:
|
||||
message = GenerationException.UNDEFINED_REFERENCE + " to target '" + target + "'."
|
||||
raise GenerationException(message)
|
||||
except AttributeError as a:
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
def write(self, output_file):
|
||||
@ -557,11 +557,12 @@ class TemplateModel:
|
||||
except AttributeError:
|
||||
output_file.write(member)
|
||||
|
||||
"""
|
||||
Exception for linker script generation failures such as undefined references/ failure to
|
||||
evaluate conditions, duplicate mappings, etc.
|
||||
"""
|
||||
|
||||
class GenerationException(LdGenFailure):
|
||||
"""
|
||||
Exception for linker script generation failures such as undefined references/ failure to
|
||||
evaluate conditions, duplicate mappings, etc.
|
||||
"""
|
||||
|
||||
UNDEFINED_REFERENCE = "Undefined reference"
|
||||
|
||||
@ -575,11 +576,12 @@ class GenerationException(LdGenFailure):
|
||||
else:
|
||||
return self.message
|
||||
|
||||
"""
|
||||
Encapsulates an output of objdump. Contains information about the static library sections
|
||||
and names
|
||||
"""
|
||||
|
||||
class SectionsInfo(dict):
|
||||
"""
|
||||
Encapsulates an output of objdump. Contains information about the static library sections
|
||||
and names
|
||||
"""
|
||||
|
||||
__info = collections.namedtuple("__info", "filename content")
|
||||
|
||||
@ -607,8 +609,11 @@ class SectionsInfo(dict):
|
||||
object = Fragment.ENTITY.setResultsName("object") + Literal(":").suppress() + Literal("file format elf32-xtensa-le").suppress()
|
||||
|
||||
# Sections table
|
||||
header = Suppress(Literal("Sections:") + Literal("Idx") + Literal("Name") + Literal("Size") + Literal("VMA") + Literal("LMA") + Literal("File off") + Literal("Algn"))
|
||||
entry = Word(nums).suppress() + Fragment.ENTITY + Suppress(OneOrMore(Word(alphanums, exact=8)) + Word(nums + "*") + ZeroOrMore(Word(alphas.upper()) + Optional(Literal(","))))
|
||||
header = Suppress(Literal("Sections:") + Literal("Idx") + Literal("Name") + Literal("Size") + Literal("VMA") +
|
||||
Literal("LMA") + Literal("File off") + Literal("Algn"))
|
||||
entry = Word(nums).suppress() + Fragment.ENTITY + Suppress(OneOrMore(Word(alphanums, exact=8)) +
|
||||
Word(nums + "*") + ZeroOrMore(Word(alphas.upper()) +
|
||||
Optional(Literal(","))))
|
||||
|
||||
# Content is object file line + sections table
|
||||
content = Group(object + header + Group(ZeroOrMore(entry)).setResultsName("sections"))
|
||||
|
@ -16,8 +16,6 @@
|
||||
#
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import traceback
|
||||
import sys
|
||||
import tempfile
|
||||
|
||||
@ -26,41 +24,41 @@ from sdkconfig import SDKConfig
|
||||
from generation import GenerationModel, TemplateModel, SectionsInfo
|
||||
from common import LdGenFailure
|
||||
|
||||
|
||||
def main():
|
||||
|
||||
argparser = argparse.ArgumentParser(description = "ESP-IDF linker script generator")
|
||||
argparser = argparse.ArgumentParser(description="ESP-IDF linker script generator")
|
||||
|
||||
argparser.add_argument(
|
||||
"--input", "-i",
|
||||
help = "Linker template file",
|
||||
type = argparse.FileType("r"))
|
||||
help="Linker template file",
|
||||
type=argparse.FileType("r"))
|
||||
|
||||
argparser.add_argument(
|
||||
"--fragments", "-f",
|
||||
type = argparse.FileType("r"),
|
||||
help = "Input fragment files",
|
||||
nargs = "+")
|
||||
type=argparse.FileType("r"),
|
||||
help="Input fragment files",
|
||||
nargs="+")
|
||||
|
||||
argparser.add_argument(
|
||||
"--sections", "-s",
|
||||
type = argparse.FileType("r"),
|
||||
help = "Library sections info",
|
||||
)
|
||||
type=argparse.FileType("r"),
|
||||
help="Library sections info")
|
||||
|
||||
argparser.add_argument(
|
||||
"--output", "-o",
|
||||
help = "Output linker script",
|
||||
type = str)
|
||||
help="Output linker script",
|
||||
type=str)
|
||||
|
||||
argparser.add_argument(
|
||||
"--config", "-c",
|
||||
help = "Project configuration",
|
||||
type = argparse.FileType("r"))
|
||||
help="Project configuration",
|
||||
type=argparse.FileType("r"))
|
||||
|
||||
argparser.add_argument(
|
||||
"--kconfig", "-k",
|
||||
help = "IDF Kconfig file",
|
||||
type = argparse.FileType("r"))
|
||||
help="IDF Kconfig file",
|
||||
type=argparse.FileType("r"))
|
||||
|
||||
argparser.add_argument(
|
||||
"--env", "-e",
|
||||
@ -110,5 +108,6 @@ def main():
|
||||
print("linker script generation failed for %s\nERROR: %s" % (input_file.name, e))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -15,28 +15,30 @@
|
||||
#
|
||||
|
||||
import os
|
||||
from pyparsing import *
|
||||
from pyparsing import Word, printables, Combine, Literal, hexnums, quotedString, Optional, nums, removeQuotes, oneOf, Group, infixNotation, opAssoc
|
||||
|
||||
import sys
|
||||
parent_dir_name = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
kconfig_new_dir = os.path.abspath(parent_dir_name + "/kconfig_new")
|
||||
sys.path.append(kconfig_new_dir)
|
||||
import kconfiglib
|
||||
try:
|
||||
import kconfiglib
|
||||
except ImportError:
|
||||
parent_dir_name = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
kconfig_new_dir = os.path.abspath(parent_dir_name + "/kconfig_new")
|
||||
sys.path.append(kconfig_new_dir)
|
||||
import kconfiglib
|
||||
|
||||
|
||||
|
||||
"""
|
||||
Encapsulates an sdkconfig file. Defines grammar of a configuration entry, and enables
|
||||
evaluation of logical expressions involving those entries.
|
||||
"""
|
||||
class SDKConfig:
|
||||
"""
|
||||
Encapsulates an sdkconfig file. Defines grammar of a configuration entry, and enables
|
||||
evaluation of logical expressions involving those entries.
|
||||
"""
|
||||
|
||||
# A configuration entry is in the form CONFIG=VALUE. Definitions of components of that grammar
|
||||
IDENTIFIER = Word(printables.upper())
|
||||
|
||||
HEX = Combine("0x" + Word(hexnums)).setParseAction(lambda t:int(t[0], 16))
|
||||
DECIMAL = Combine(Optional(Literal("+") | Literal("-")) + Word(nums)).setParseAction(lambda t:int(t[0]))
|
||||
LITERAL = Word(printables)
|
||||
LITERAL = Word(printables)
|
||||
QUOTED_LITERAL = quotedString.setParseAction(removeQuotes)
|
||||
|
||||
VALUE = HEX | DECIMAL | LITERAL | QUOTED_LITERAL
|
||||
@ -44,8 +46,8 @@ class SDKConfig:
|
||||
# Operators supported by the expression evaluation
|
||||
OPERATOR = oneOf(["=", "!=", ">", "<", "<=", ">="])
|
||||
|
||||
def __init__(self, kconfig_file, sdkconfig_file, env = []):
|
||||
env = [ (name, value) for (name,value) in ( e.split("=",1) for e in env) ]
|
||||
def __init__(self, kconfig_file, sdkconfig_file, env=[]):
|
||||
env = [(name, value) for (name,value) in (e.split("=",1) for e in env)]
|
||||
|
||||
for name, value in env:
|
||||
value = " ".join(value.split())
|
||||
@ -57,11 +59,11 @@ class SDKConfig:
|
||||
def evaluate_expression(self, expression):
|
||||
result = self.config.eval_string(expression)
|
||||
|
||||
if result == 0: # n
|
||||
if result == 0: # n
|
||||
return False
|
||||
elif result == 2: # y
|
||||
elif result == 2: # y
|
||||
return True
|
||||
else: # m
|
||||
else: # m
|
||||
raise Exception("Unsupported config expression result.")
|
||||
|
||||
@staticmethod
|
||||
@ -77,10 +79,9 @@ class SDKConfig:
|
||||
|
||||
condition = Group(Optional("(").suppress() + test + Optional(")").suppress())
|
||||
|
||||
grammar = infixNotation(
|
||||
condition, [
|
||||
("!", 1, opAssoc.RIGHT),
|
||||
("&&", 2, opAssoc.LEFT),
|
||||
("||", 2, opAssoc.LEFT)])
|
||||
grammar = infixNotation(condition, [
|
||||
("!", 1, opAssoc.RIGHT),
|
||||
("&&", 2, opAssoc.LEFT),
|
||||
("||", 2, opAssoc.LEFT)])
|
||||
|
||||
return grammar
|
||||
|
@ -17,12 +17,17 @@
|
||||
|
||||
import unittest
|
||||
import sys
|
||||
import os
|
||||
from pyparsing import ParseException
|
||||
from pyparsing import restOfLine
|
||||
|
||||
try:
|
||||
import fragments
|
||||
except ImportError:
|
||||
sys.path.append('../')
|
||||
import fragments
|
||||
|
||||
from sdkconfig import SDKConfig
|
||||
|
||||
sys.path.append('../')
|
||||
from fragments import *
|
||||
from pyparsing import *
|
||||
from sdkconfig import *
|
||||
|
||||
class FragmentTest(unittest.TestCase):
|
||||
|
||||
@ -31,10 +36,11 @@ class FragmentTest(unittest.TestCase):
|
||||
fragment = self.parser.parseString(text, parseAll=True)
|
||||
return fragment[0]
|
||||
|
||||
|
||||
class SectionsTest(FragmentTest):
|
||||
|
||||
def setUp(self):
|
||||
self.parser = Sections.get_fragment_grammar()
|
||||
self.parser = fragments.Sections.get_fragment_grammar()
|
||||
|
||||
def test_valid_entries(self):
|
||||
valid_entries = """
|
||||
@ -74,7 +80,7 @@ class SectionsTest(FragmentTest):
|
||||
"""
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(blank_entries)
|
||||
self.parse(blank_entries)
|
||||
|
||||
def test_invalid_names(self):
|
||||
with_spaces = """
|
||||
@ -93,13 +99,13 @@ class SectionsTest(FragmentTest):
|
||||
"""
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(with_spaces)
|
||||
self.parse(with_spaces)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(begins_with_number)
|
||||
self.parse(begins_with_number)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(with_special_character)
|
||||
self.parse(with_special_character)
|
||||
|
||||
def test_non_existent_entries(self):
|
||||
misspelled_entries_field = """
|
||||
@ -113,10 +119,10 @@ class SectionsTest(FragmentTest):
|
||||
"""
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(misspelled_entries_field)
|
||||
self.parse(misspelled_entries_field)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(missing_entries_field)
|
||||
self.parse(missing_entries_field)
|
||||
|
||||
def test_duplicate_entries(self):
|
||||
duplicate_entries = """
|
||||
@ -143,10 +149,11 @@ class SectionsTest(FragmentTest):
|
||||
|
||||
self.assertEqual(set(entries), expected)
|
||||
|
||||
|
||||
class SchemeTest(FragmentTest):
|
||||
|
||||
def setUp(self):
|
||||
self.parser = Scheme.get_fragment_grammar()
|
||||
self.parser = fragments.Scheme.get_fragment_grammar()
|
||||
|
||||
def test_valid_entries(self):
|
||||
valid_entries = """
|
||||
@ -202,10 +209,10 @@ class SchemeTest(FragmentTest):
|
||||
"""
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
scheme = self.parse(wrong_character)
|
||||
self.parse(wrong_character)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
scheme = self.parse(single_word)
|
||||
self.parse(single_word)
|
||||
|
||||
def test_blank_entries(self):
|
||||
blank_entries = """
|
||||
@ -214,7 +221,7 @@ class SchemeTest(FragmentTest):
|
||||
"""
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(blank_entries)
|
||||
self.parse(blank_entries)
|
||||
|
||||
def test_non_existent_entries(self):
|
||||
misspelled_entries_field = """
|
||||
@ -228,15 +235,16 @@ class SchemeTest(FragmentTest):
|
||||
"""
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(misspelled_entries_field)
|
||||
self.parse(misspelled_entries_field)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(missing_entries_field)
|
||||
self.parse(missing_entries_field)
|
||||
|
||||
|
||||
class MappingTest(FragmentTest):
|
||||
|
||||
def setUp(self):
|
||||
self.parser = Mapping.get_fragment_grammar()
|
||||
self.parser = fragments.Mapping.get_fragment_grammar()
|
||||
|
||||
def parse_expression(self, expression):
|
||||
parser = SDKConfig.get_expression_grammar()
|
||||
@ -264,12 +272,12 @@ class MappingTest(FragmentTest):
|
||||
entries = mapping.entries
|
||||
|
||||
expected = [("default", {
|
||||
("obj", "symbol", "noflash"),
|
||||
("obj", None, "noflash"),
|
||||
("obj", "symbol_2", "noflash"),
|
||||
("obj_2", None, "noflash"),
|
||||
("*", None, "noflash")
|
||||
} ) ]
|
||||
("obj", "symbol", "noflash"),
|
||||
("obj", None, "noflash"),
|
||||
("obj", "symbol_2", "noflash"),
|
||||
("obj_2", None, "noflash"),
|
||||
("*", None, "noflash")
|
||||
})]
|
||||
|
||||
self.assertEqual(entries, expected)
|
||||
|
||||
@ -360,43 +368,43 @@ class MappingTest(FragmentTest):
|
||||
"""
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(with_fragment_name)
|
||||
self.parse(with_fragment_name)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(missing_archive)
|
||||
self.parse(missing_archive)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(misspelled_archive)
|
||||
self.parse(misspelled_archive)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(missing_entries)
|
||||
self.parse(missing_entries)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(misspelled_entries)
|
||||
self.parse(misspelled_entries)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(missing_symbols)
|
||||
self.parse(missing_symbols)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(missing_scheme_1)
|
||||
self.parse(missing_scheme_1)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(missing_scheme_2)
|
||||
self.parse(missing_scheme_2)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(missing_entity)
|
||||
self.parse(missing_entity)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(wilcard_symbol)
|
||||
self.parse(wilcard_symbol)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(empty_object_with_symbol)
|
||||
self.parse(empty_object_with_symbol)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(wildcard_object_with_symbol)
|
||||
self.parse(wildcard_object_with_symbol)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
sections = self.parse(empty_definition)
|
||||
self.parse(empty_definition)
|
||||
|
||||
def test_explicit_blank_default_w_others(self):
|
||||
expl_blnk_w_oth = """
|
||||
@ -412,14 +420,13 @@ class MappingTest(FragmentTest):
|
||||
|
||||
entries = mapping.entries
|
||||
|
||||
expected = [ ( entries[0][0] , {
|
||||
("obj_a", None, "noflash"),
|
||||
} ),
|
||||
("default", set() ) ]
|
||||
expected = [(entries[0][0], {
|
||||
("obj_a", None, "noflash"),
|
||||
}),
|
||||
("default", set())]
|
||||
|
||||
self.assertEqual(entries, expected)
|
||||
|
||||
|
||||
def test_implicit_blank_default_w_others(self):
|
||||
impl_blnk_w_oth = """
|
||||
[mapping]
|
||||
@ -433,10 +440,10 @@ class MappingTest(FragmentTest):
|
||||
|
||||
entries = mapping.entries
|
||||
|
||||
expected = [ ( entries[0][0] , {
|
||||
("obj_a", None, "noflash"),
|
||||
} ),
|
||||
("default", set() ) ]
|
||||
expected = [(entries[0][0], {
|
||||
("obj_a", None, "noflash"),
|
||||
}),
|
||||
("default", set())]
|
||||
|
||||
self.assertEqual(entries, expected)
|
||||
|
||||
@ -449,7 +456,7 @@ class MappingTest(FragmentTest):
|
||||
"""
|
||||
mapping = self.parse(expl_blnk_def)
|
||||
entries = mapping.entries
|
||||
expected = [ ("default", set() ) ]
|
||||
expected = [("default", set())]
|
||||
|
||||
self.assertEqual(entries, expected)
|
||||
|
||||
@ -462,7 +469,7 @@ class MappingTest(FragmentTest):
|
||||
"""
|
||||
mapping = self.parse(impl_blnk_def)
|
||||
entries = mapping.entries
|
||||
expected = [ ("default", set() ) ]
|
||||
expected = [("default", set())]
|
||||
|
||||
self.assertEqual(entries, expected)
|
||||
|
||||
@ -486,19 +493,19 @@ class MappingTest(FragmentTest):
|
||||
|
||||
entries = mapping.entries
|
||||
|
||||
expected = [ ( entries[0][0] , {
|
||||
("obj_a1", None, "noflash"),
|
||||
("obj_a2", None, "noflash"),
|
||||
} ),
|
||||
( entries[1][0] , {
|
||||
("obj_b1", None, "noflash"),
|
||||
("obj_b2", None, "noflash"),
|
||||
("obj_b3", None, "noflash"),
|
||||
} ),
|
||||
( entries[2][0] , {
|
||||
("obj_c1", None, "noflash"),
|
||||
} ),
|
||||
("default", set() ) ]
|
||||
expected = [(entries[0][0], {
|
||||
("obj_a1", None, "noflash"),
|
||||
("obj_a2", None, "noflash"),
|
||||
}),
|
||||
(entries[1][0], {
|
||||
("obj_b1", None, "noflash"),
|
||||
("obj_b2", None, "noflash"),
|
||||
("obj_b3", None, "noflash"),
|
||||
}),
|
||||
(entries[2][0], {
|
||||
("obj_c1", None, "noflash"),
|
||||
}),
|
||||
("default", set())]
|
||||
|
||||
self.assertEqual(entries, expected)
|
||||
|
||||
@ -522,18 +529,18 @@ class MappingTest(FragmentTest):
|
||||
|
||||
entries = mapping.entries
|
||||
|
||||
expected = [ ( entries[0][0] , {
|
||||
("obj_a", None, "noflash")
|
||||
} ),
|
||||
( entries[1][0] , set()),
|
||||
( entries[2][0] , {
|
||||
("obj_c", None, "noflash")
|
||||
} ),
|
||||
( entries[3][0] , set()),
|
||||
( entries[4][0] , set()),
|
||||
( "default" , {
|
||||
("obj", None, "noflash")
|
||||
} ) ]
|
||||
expected = [(entries[0][0], {
|
||||
("obj_a", None, "noflash")
|
||||
}),
|
||||
(entries[1][0], set()),
|
||||
(entries[2][0], {
|
||||
("obj_c", None, "noflash")
|
||||
}),
|
||||
(entries[3][0], set()),
|
||||
(entries[4][0], set()),
|
||||
("default", {
|
||||
("obj", None, "noflash")
|
||||
})]
|
||||
|
||||
self.assertEqual(entries, expected)
|
||||
|
||||
@ -548,8 +555,7 @@ class MappingTest(FragmentTest):
|
||||
"""
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
mapping = self.parse(blank_first_condition)
|
||||
|
||||
self.parse(blank_first_condition)
|
||||
|
||||
def test_nonlast_default(self):
|
||||
nonlast_default_1 = """
|
||||
@ -587,13 +593,13 @@ class MappingTest(FragmentTest):
|
||||
"""
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
mapping = self.parse(nonlast_default_1)
|
||||
self.parse(nonlast_default_1)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
mapping = self.parse(nonlast_default_2)
|
||||
self.parse(nonlast_default_2)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
mapping = self.parse(nonlast_default_3)
|
||||
self.parse(nonlast_default_3)
|
||||
|
||||
def test_duplicate_default(self):
|
||||
duplicate_default_1 = """
|
||||
@ -623,10 +629,11 @@ class MappingTest(FragmentTest):
|
||||
"""
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
mapping = self.parse(duplicate_default_1)
|
||||
self.parse(duplicate_default_1)
|
||||
|
||||
with self.assertRaises(ParseException):
|
||||
mapping = self.parse(duplicate_default_2)
|
||||
self.parse(duplicate_default_2)
|
||||
|
||||
if __name__ =="__main__":
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
@ -17,11 +17,25 @@
|
||||
|
||||
import unittest
|
||||
import sys
|
||||
import os
|
||||
|
||||
sys.path.append('../')
|
||||
from generation import *
|
||||
from pyparsing import *
|
||||
try:
|
||||
from generation import PlacementRule
|
||||
except ImportError:
|
||||
sys.path.append('../')
|
||||
from generation import PlacementRule
|
||||
|
||||
from generation import GenerationException
|
||||
from generation import SectionsInfo
|
||||
from generation import TemplateModel
|
||||
from generation import GenerationModel
|
||||
|
||||
from fragments import FragmentFileModel
|
||||
from fragments import Mapping
|
||||
from fragments import Sections
|
||||
from fragments import Scheme
|
||||
|
||||
from sdkconfig import SDKConfig
|
||||
|
||||
|
||||
class GenerationModelTest(unittest.TestCase):
|
||||
|
||||
@ -270,7 +284,6 @@ class GenerationModelTest(unittest.TestCase):
|
||||
|
||||
self._compare_rules(expected, actual)
|
||||
|
||||
|
||||
def test_rule_generation_nominal_4(self):
|
||||
normal = """
|
||||
[mapping]
|
||||
@ -524,8 +537,10 @@ class GenerationModelTest(unittest.TestCase):
|
||||
dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckPendingReadyList", self.model.sections["rodata"].entries, "dram0_data")
|
||||
|
||||
rtc_text_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text")
|
||||
rtc_data_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
rtc_bss_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
rtc_data_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
|
||||
self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
rtc_bss_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
|
||||
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
|
||||
iram0_text_E3 = PlacementRule("libfreertos.a", "croutine", "xCoRoutineCreate", self.model.sections["text"].entries, "iram0_text")
|
||||
dram0_data_E3 = PlacementRule("libfreertos.a", "croutine", "xCoRoutineCreate", self.model.sections["rodata"].entries, "dram0_data")
|
||||
@ -591,8 +606,10 @@ class GenerationModelTest(unittest.TestCase):
|
||||
dram0_bss_default = self._get_default("dram0_bss", expected)
|
||||
|
||||
rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text")
|
||||
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
|
||||
self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
|
||||
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
|
||||
iram0_text_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "iram0_text")
|
||||
dram0_data_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["rodata"].entries, "dram0_data")
|
||||
@ -648,8 +665,10 @@ class GenerationModelTest(unittest.TestCase):
|
||||
dram0_bss_default = self._get_default("dram0_bss", expected)
|
||||
|
||||
rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text")
|
||||
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
|
||||
self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
|
||||
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
|
||||
iram0_text_E2 = PlacementRule("libfreertos.a", None, None, self.model.sections["text"].entries, "iram0_text")
|
||||
dram0_data_E2 = PlacementRule("libfreertos.a", None, None, self.model.sections["rodata"].entries, "dram0_data")
|
||||
@ -767,8 +786,10 @@ class GenerationModelTest(unittest.TestCase):
|
||||
dram0_bss_default = self._get_default("dram0_bss", expected)
|
||||
|
||||
rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text")
|
||||
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
|
||||
self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList",
|
||||
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
|
||||
iram0_text_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "iram0_text")
|
||||
dram0_data_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["rodata"].entries, "dram0_data")
|
||||
@ -847,8 +868,10 @@ class GenerationModelTest(unittest.TestCase):
|
||||
rtc_bss_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
|
||||
rtc_text_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["text"].entries, "rtc_text")
|
||||
rtc_data_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
rtc_bss_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
rtc_data_E4 = PlacementRule("libfreertos.a", "event_groups", None,
|
||||
self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
rtc_bss_E4 = PlacementRule("libfreertos.a", "event_groups", None,
|
||||
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
|
||||
iram0_text_E5 = PlacementRule("libfreertos.a", None, None, self.model.sections["text"].entries, "iram0_text")
|
||||
dram0_data_E5 = PlacementRule("libfreertos.a", None, None, self.model.sections["rodata"].entries, "dram0_data")
|
||||
@ -918,8 +941,10 @@ class GenerationModelTest(unittest.TestCase):
|
||||
dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["rodata"].entries, "dram0_data")
|
||||
|
||||
rtc_text_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["text"].entries, "rtc_text")
|
||||
rtc_data_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
rtc_bss_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
rtc_data_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate",
|
||||
self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
rtc_bss_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate",
|
||||
self.model.sections["bss"].entries + self.model.sections["common"].entries, "rtc_bss")
|
||||
|
||||
rtc_text_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "rtc_text")
|
||||
rtc_data_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["data"].entries + self.model.sections["rodata"].entries, "rtc_data")
|
||||
@ -1041,7 +1066,7 @@ class GenerationModelTest(unittest.TestCase):
|
||||
croutine (noflash)
|
||||
"""
|
||||
|
||||
conflict_scheme = """
|
||||
conflict_scheme = """
|
||||
[scheme:conflict]
|
||||
entries:
|
||||
rodata -> dram0_data
|
||||
@ -1052,9 +1077,9 @@ class GenerationModelTest(unittest.TestCase):
|
||||
self._add_mapping(conflict_mapping)
|
||||
|
||||
with self.assertRaises(GenerationException):
|
||||
actual = self.model.generate_rules(self.sdkconfig, self.sections_info)
|
||||
self.model.generate_rules(self.sdkconfig, self.sections_info)
|
||||
|
||||
def test_rule_generation_condition (self):
|
||||
def test_rule_generation_condition(self):
|
||||
generation_with_condition = """
|
||||
[mapping]
|
||||
archive: lib.a
|
||||
@ -1083,7 +1108,7 @@ class GenerationModelTest(unittest.TestCase):
|
||||
flash_rodata_default = self._get_default("flash_rodata", expected)
|
||||
|
||||
if perf_level < 4:
|
||||
for append_no in range (1, perf_level + 1):
|
||||
for append_no in range(1, perf_level + 1):
|
||||
iram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["text"].entries, "iram0_text")
|
||||
dram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["rodata"].entries, "dram0_data")
|
||||
|
||||
@ -1095,5 +1120,6 @@ class GenerationModelTest(unittest.TestCase):
|
||||
|
||||
self._compare_rules(expected, actual)
|
||||
|
||||
if __name__ =="__main__":
|
||||
unittest.main()
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
@ -26,17 +26,18 @@ import socket
|
||||
import pty
|
||||
import filecmp
|
||||
import threading
|
||||
import errno
|
||||
|
||||
test_list = (
|
||||
# Add new tests here. All files should be placed in IN_DIR. Columns are:
|
||||
# Input file Filter string File with expected output Timeout
|
||||
('in1.txt', '', 'in1f1.txt', 60),
|
||||
('in1.txt', '*:V', 'in1f1.txt', 60),
|
||||
('in1.txt', 'hello_world', 'in1f2.txt', 60),
|
||||
('in1.txt', '*:N', 'in1f3.txt', 60),
|
||||
('in2.txt', 'boot mdf_device_handle:I mesh:E vfs:I', 'in2f1.txt', 240),
|
||||
('in2.txt', 'vfs', 'in2f2.txt', 240),
|
||||
)
|
||||
# Add new tests here. All files should be placed in IN_DIR. Columns are:
|
||||
# Input file Filter string File with expected output Timeout
|
||||
('in1.txt', '', 'in1f1.txt', 60),
|
||||
('in1.txt', '*:V', 'in1f1.txt', 60),
|
||||
('in1.txt', 'hello_world', 'in1f2.txt', 60),
|
||||
('in1.txt', '*:N', 'in1f3.txt', 60),
|
||||
('in2.txt', 'boot mdf_device_handle:I mesh:E vfs:I', 'in2f1.txt', 240),
|
||||
('in2.txt', 'vfs', 'in2f2.txt', 240),
|
||||
)
|
||||
|
||||
IN_DIR = 'tests/' # tests are in this directory
|
||||
OUT_DIR = 'outputs/' # test results are written to this directory (kept only for debugging purposes)
|
||||
@ -51,6 +52,7 @@ SOCKET_TIMEOUT = 30
|
||||
# the test is restarted after failure (idf_monitor has to be killed):
|
||||
RETRIES_PER_TEST = 5
|
||||
|
||||
|
||||
def monitor_timeout(process):
|
||||
if process.poll() is None:
|
||||
# idf_monitor is still running
|
||||
@ -64,6 +66,7 @@ def monitor_timeout(process):
|
||||
else:
|
||||
raise
|
||||
|
||||
|
||||
class TestRunner(object):
|
||||
def __enter__(self):
|
||||
self.serversocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
@ -85,12 +88,13 @@ class TestRunner(object):
|
||||
clientsocket.setsockopt(socket.SOL_TCP, socket.TCP_NODELAY, 1)
|
||||
return clientsocket
|
||||
|
||||
|
||||
def test_iteration(runner, test, startup_timeout):
|
||||
print('\nRunning test on {} with filter "{}" and expecting {}'.format(test[0], test[1], test[2]))
|
||||
try:
|
||||
with open(OUT_DIR + test[2], "w", encoding='utf-8') as o_f, open(ERR_OUT, "w", encoding='utf-8') as e_f:
|
||||
monitor_cmd = [sys.executable,
|
||||
IDF_MONITOR, '--port', 'socket://{}:{}'.format(HOST, runner.port), '--print_filter', test[1], ELF_FILE]
|
||||
IDF_MONITOR, '--port', 'socket://{}:{}'.format(HOST, runner.port), '--print_filter', test[1], ELF_FILE]
|
||||
(master_fd, slave_fd) = pty.openpty()
|
||||
print('\t', ' '.join(monitor_cmd), sep='')
|
||||
print('\tstdout="{}" stderr="{}" stdin="{}"'.format(o_f.name, e_f.name, os.ttyname(slave_fd)))
|
||||
@ -140,6 +144,7 @@ def test_iteration(runner, test, startup_timeout):
|
||||
else:
|
||||
raise RuntimeError("The contents of the files are different. Please examine the artifacts.")
|
||||
|
||||
|
||||
def main():
|
||||
gstart = time.time()
|
||||
if not os.path.exists(OUT_DIR):
|
||||
@ -169,5 +174,6 @@ def main():
|
||||
gend = time.time()
|
||||
print('Execution took {:.2f} seconds\n'.format(gend - gstart))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -16,8 +16,12 @@
|
||||
|
||||
import sys
|
||||
|
||||
sys.path.append('..')
|
||||
import idf_size
|
||||
try:
|
||||
import idf_size
|
||||
except ImportError:
|
||||
sys.path.append('..')
|
||||
import idf_size
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
|
@ -22,11 +22,15 @@ import sys
|
||||
import re
|
||||
import argparse
|
||||
|
||||
test_fw_path = os.getenv("TEST_FW_PATH")
|
||||
if test_fw_path:
|
||||
sys.path.insert(0, test_fw_path)
|
||||
try:
|
||||
from Utility.CIAssignTest import AssignTest
|
||||
except ImportError:
|
||||
test_fw_path = os.getenv("TEST_FW_PATH")
|
||||
if test_fw_path:
|
||||
sys.path.insert(0, test_fw_path)
|
||||
from Utility.CIAssignTest import AssignTest
|
||||
|
||||
from Utility.CIAssignTest import AssignTest, Group
|
||||
from Utility.CIAssignTest import Group
|
||||
|
||||
|
||||
class ExampleGroup(Group):
|
||||
|
@ -9,11 +9,13 @@ import argparse
|
||||
|
||||
import yaml
|
||||
|
||||
test_fw_path = os.getenv("TEST_FW_PATH")
|
||||
if test_fw_path:
|
||||
sys.path.insert(0, test_fw_path)
|
||||
|
||||
from Utility import CIAssignTest
|
||||
try:
|
||||
from Utility import CIAssignTest
|
||||
except ImportError:
|
||||
test_fw_path = os.getenv("TEST_FW_PATH")
|
||||
if test_fw_path:
|
||||
sys.path.insert(0, test_fw_path)
|
||||
from Utility import CIAssignTest
|
||||
|
||||
|
||||
class Group(CIAssignTest.Group):
|
||||
|
@ -426,10 +426,10 @@ class BaseDUT(object):
|
||||
|
||||
:param data: data which needs to be checked and maybe transformed
|
||||
"""
|
||||
if type(data) is type(u''):
|
||||
if isinstance(data, type(u'')):
|
||||
try:
|
||||
data = data.encode('utf-8')
|
||||
except:
|
||||
except Exception:
|
||||
print(u'Cannot encode {} of type {}'.format(data, type(data)))
|
||||
raise
|
||||
return data
|
||||
@ -529,9 +529,9 @@ class BaseDUT(object):
|
||||
:return: match groups if match succeed otherwise None
|
||||
"""
|
||||
ret = None
|
||||
if type(pattern.pattern) is type(u''):
|
||||
if isinstance(pattern.pattern, type(u'')):
|
||||
pattern = re.compile(BaseDUT.u_to_bytearray(pattern.pattern))
|
||||
if type(data) is type(u''):
|
||||
if isinstance(data, type(u'')):
|
||||
data = BaseDUT.u_to_bytearray(data)
|
||||
match = pattern.search(data)
|
||||
if match:
|
||||
@ -543,7 +543,7 @@ class BaseDUT(object):
|
||||
|
||||
EXPECT_METHOD = [
|
||||
[type(re.compile("")), "_expect_re"],
|
||||
[type(b''), "_expect_str"], # Python 2 & 3 hook to work without 'from builtins import str' from future
|
||||
[type(b''), "_expect_str"], # Python 2 & 3 hook to work without 'from builtins import str' from future
|
||||
[type(u''), "_expect_str"],
|
||||
]
|
||||
|
||||
|
@ -67,7 +67,7 @@ class Config(object):
|
||||
try:
|
||||
value = self.configs[variable_name]
|
||||
except KeyError:
|
||||
#TODO: to support auto get variable here
|
||||
# TODO: to support auto get variable here
|
||||
value = None
|
||||
if value is None:
|
||||
raise ValueError("Failed to get variable")
|
||||
|
@ -41,11 +41,11 @@ class IDFApp(App.BaseApp):
|
||||
"or 'idf.py build' "
|
||||
"for resolving the issue."
|
||||
"").format(self.IDF_DOWNLOAD_CONFIG_FILE, self.IDF_FLASH_ARGS_FILE,
|
||||
self.binary_path, self.IDF_DOWNLOAD_CONFIG_FILE)
|
||||
self.binary_path, self.IDF_DOWNLOAD_CONFIG_FILE)
|
||||
raise AssertionError(msg)
|
||||
|
||||
self.flash_files, self.flash_settings = self._parse_flash_download_config()
|
||||
self.partition_table = self._parse_partition_table()
|
||||
self.partition_table = self._parse_partition_table()
|
||||
|
||||
@classmethod
|
||||
def get_sdk_path(cls):
|
||||
@ -54,7 +54,6 @@ class IDFApp(App.BaseApp):
|
||||
assert os.path.exists(idf_path)
|
||||
return idf_path
|
||||
|
||||
|
||||
def get_binary_path(self, app_path):
|
||||
"""
|
||||
get binary path according to input app_path.
|
||||
@ -81,7 +80,7 @@ class IDFApp(App.BaseApp):
|
||||
# CMake version using build metadata file
|
||||
with open(os.path.join(self.binary_path, self.IDF_FLASH_ARGS_FILE), "r") as f:
|
||||
args = json.load(f)
|
||||
flash_files = [ (offs,file) for (offs,file) in args["flash_files"].items() if offs != "" ]
|
||||
flash_files = [(offs,file) for (offs,file) in args["flash_files"].items() if offs != ""]
|
||||
flash_settings = args["flash_settings"]
|
||||
else:
|
||||
# GNU Make version uses download.config arguments file
|
||||
@ -92,13 +91,13 @@ class IDFApp(App.BaseApp):
|
||||
for idx in range(0, len(args), 2): # process arguments in pairs
|
||||
if args[idx].startswith("--"):
|
||||
# strip the -- from the command line argument
|
||||
flash_settings[args[idx][2:]] = args[idx+1]
|
||||
flash_settings[args[idx][2:]] = args[idx + 1]
|
||||
else:
|
||||
# offs, filename
|
||||
flash_files.append( (args[idx], args[idx+1]) )
|
||||
flash_files.append((args[idx], args[idx + 1]))
|
||||
|
||||
# make file offsets into integers, make paths absolute
|
||||
flash_files = [ (int(offs, 0), os.path.join(self.binary_path, path.strip())) for (offs, path) in flash_files ]
|
||||
flash_files = [(int(offs, 0), os.path.join(self.binary_path, path.strip())) for (offs, path) in flash_files]
|
||||
|
||||
return (flash_files, flash_settings)
|
||||
|
||||
|
@ -17,11 +17,8 @@ import os
|
||||
import os.path
|
||||
import sys
|
||||
import re
|
||||
import subprocess
|
||||
import functools
|
||||
import random
|
||||
import tempfile
|
||||
import time
|
||||
|
||||
from serial.tools import list_ports
|
||||
|
||||
@ -94,7 +91,7 @@ class IDFDUT(DUT.SerialDUT):
|
||||
esp = esptool.ESP32ROM(port)
|
||||
esp.connect()
|
||||
return esp.read_mac()
|
||||
except RuntimeError as e:
|
||||
except RuntimeError:
|
||||
return None
|
||||
finally:
|
||||
esp._port.close()
|
||||
@ -112,7 +109,7 @@ class IDFDUT(DUT.SerialDUT):
|
||||
"""
|
||||
try:
|
||||
# note: opening here prevents us from having to seek back to 0 each time
|
||||
flash_files = [ (offs, open(path, "rb")) for (offs, path) in self.app.flash_files ]
|
||||
flash_files = [(offs, open(path, "rb")) for (offs, path) in self.app.flash_files]
|
||||
|
||||
if erase_nvs:
|
||||
address = self.app.partition_table["nvs"]["offset"]
|
||||
@ -120,7 +117,7 @@ class IDFDUT(DUT.SerialDUT):
|
||||
nvs_file = tempfile.TemporaryFile()
|
||||
nvs_file.write(b'\xff' * size)
|
||||
nvs_file.seek(0)
|
||||
flash_files.append( (int(address, 0), nvs_file) )
|
||||
flash_files.append((int(address, 0), nvs_file))
|
||||
|
||||
# fake flasher args object, this is a hack until
|
||||
# esptool Python API is improved
|
||||
@ -158,7 +155,7 @@ class IDFDUT(DUT.SerialDUT):
|
||||
:param: erase_nvs: whether erase NVS partition during flash
|
||||
:return: None
|
||||
"""
|
||||
for baud_rate in [ 921600, 115200 ]:
|
||||
for baud_rate in [921600, 115200]:
|
||||
try:
|
||||
self._try_flash(erase_nvs, baud_rate)
|
||||
break
|
||||
@ -183,7 +180,7 @@ class IDFDUT(DUT.SerialDUT):
|
||||
:return: None
|
||||
"""
|
||||
raise NotImplementedError() # TODO: implement this
|
||||
address = self.app.partition_table[partition]["offset"]
|
||||
# address = self.app.partition_table[partition]["offset"]
|
||||
size = self.app.partition_table[partition]["size"]
|
||||
# TODO can use esp.erase_region() instead of this, I think
|
||||
with open(".erase_partition.tmp", "wb") as f:
|
||||
@ -231,7 +228,7 @@ class IDFDUT(DUT.SerialDUT):
|
||||
return [x for x in ports if not cls.INVALID_PORT_PATTERN.search(x)]
|
||||
|
||||
# On MacOs with python3.6: type of espport is already utf8
|
||||
if type(espport) is type(u''):
|
||||
if isinstance(espport, type(u'')):
|
||||
port_hint = espport
|
||||
else:
|
||||
port_hint = espport.decode('utf8')
|
||||
|
@ -53,7 +53,7 @@ class Runner(threading.Thread):
|
||||
for case in self.test_cases:
|
||||
result = case.run()
|
||||
self.test_result.append(result)
|
||||
|
||||
|
||||
def get_test_result(self):
|
||||
return self.test_result and all(self.test_result)
|
||||
|
||||
|
@ -59,9 +59,9 @@ def _convert_to_lower_case_bytes(item):
|
||||
"""
|
||||
if isinstance(item, (tuple, list)):
|
||||
output = [_convert_to_lower_case_bytes(v) for v in item]
|
||||
elif type(item) == type(b''):
|
||||
elif isinstance(item, type(b'')):
|
||||
output = item.lower()
|
||||
elif type(item) == type(u''):
|
||||
elif isinstance(item, type(u'')):
|
||||
output = item.encode().lower()
|
||||
else:
|
||||
output = item
|
||||
|
@ -15,7 +15,7 @@
|
||||
import matplotlib
|
||||
# fix can't draw figure with docker
|
||||
matplotlib.use('Agg')
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.pyplot as plt # noqa: E402 - matplotlib.use('Agg') need to be before this
|
||||
|
||||
|
||||
# candidate colors
|
||||
|
@ -90,6 +90,6 @@ class Control(object):
|
||||
|
||||
@classmethod
|
||||
def control_rest(cls, apc_ip, outlet, action):
|
||||
outlet_list = list(range(1, 9)) # has to be a list if we want to remove from it under Python 3
|
||||
outlet_list = list(range(1, 9)) # has to be a list if we want to remove from it under Python 3
|
||||
outlet_list.remove(outlet)
|
||||
cls.control(apc_ip, dict.fromkeys(outlet_list, action))
|
||||
|
@ -44,7 +44,7 @@ class Search(object):
|
||||
except ImportError as e:
|
||||
print("ImportError: \r\n\tFile:" + file_name + "\r\n\tError:" + str(e))
|
||||
for i, test_function in enumerate(test_functions):
|
||||
print("\t{}. ".format(i+1) + test_function.case_info["name"])
|
||||
print("\t{}. ".format(i + 1) + test_function.case_info["name"])
|
||||
return test_functions
|
||||
|
||||
@classmethod
|
||||
|
@ -29,7 +29,7 @@ def console_log(data, color="white", end="\n"):
|
||||
if color not in _COLOR_CODES:
|
||||
color = "white"
|
||||
color_codes = _COLOR_CODES[color]
|
||||
if type(data) is type(b''):
|
||||
if isinstance(data, type(b'')):
|
||||
data = data.decode('utf-8', 'replace')
|
||||
print(color_codes + data, end=end)
|
||||
if color not in ["white", "W"]:
|
||||
|
@ -155,6 +155,3 @@ texinfo_documents = [
|
||||
author, 'TinyTestFW', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
|
||||
|
||||
|
@ -17,13 +17,16 @@ import re
|
||||
import os
|
||||
import sys
|
||||
|
||||
# if we want to run test case outside `tiny-test-fw` folder,
|
||||
# we need to insert tiny-test-fw path into sys path
|
||||
test_fw_path = os.getenv("TEST_FW_PATH")
|
||||
if test_fw_path and test_fw_path not in sys.path:
|
||||
sys.path.insert(0, test_fw_path)
|
||||
try:
|
||||
import TinyFW
|
||||
except ImportError:
|
||||
# if we want to run test case outside `tiny-test-fw` folder,
|
||||
# we need to insert tiny-test-fw path into sys path
|
||||
test_fw_path = os.getenv("TEST_FW_PATH")
|
||||
if test_fw_path and test_fw_path not in sys.path:
|
||||
sys.path.insert(0, test_fw_path)
|
||||
import TinyFW
|
||||
|
||||
import TinyFW
|
||||
import IDF
|
||||
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
import sys
|
||||
import glob
|
||||
import tempfile
|
||||
import os
|
||||
@ -6,7 +5,6 @@ import os.path
|
||||
import re
|
||||
import shutil
|
||||
import argparse
|
||||
import json
|
||||
import copy
|
||||
|
||||
PROJECT_NAME = "unit-test-app"
|
||||
@ -16,12 +14,13 @@ PROJECT_PATH = os.getcwd()
|
||||
# Each file in configs/ directory defines a configuration. The format is the
|
||||
# same as sdkconfig file. Configuration is applied on top of sdkconfig.defaults
|
||||
# file from the project directory
|
||||
CONFIG_NAMES = os.listdir(os.path.join(PROJECT_PATH, "configs"))
|
||||
CONFIG_NAMES = os.listdir(os.path.join(PROJECT_PATH, "configs"))
|
||||
|
||||
# Build (intermediate) and output (artifact) directories
|
||||
BUILDS_DIR = os.path.join(PROJECT_PATH, "builds")
|
||||
BINARIES_DIR = os.path.join(PROJECT_PATH, "output")
|
||||
|
||||
|
||||
# Convert the values passed to the -T parameter to corresponding cache entry definitions
|
||||
# TESTS_ALL and TEST_COMPONENTS
|
||||
class TestComponentAction(argparse.Action):
|
||||
@ -46,10 +45,11 @@ class TestComponentAction(argparse.Action):
|
||||
|
||||
# Brute force add reconfigure at the very beginning
|
||||
existing_actions = getattr(namespace, "actions", [])
|
||||
if not "reconfigure" in existing_actions:
|
||||
if "reconfigure" not in existing_actions:
|
||||
existing_actions = ["reconfigure"] + existing_actions
|
||||
setattr(namespace, "actions", existing_actions)
|
||||
|
||||
|
||||
class TestExcludeComponentAction(argparse.Action):
|
||||
def __call__(self, parser, namespace, values, option_string=None):
|
||||
# Create a new of cache definition entry, adding previous elements
|
||||
@ -66,22 +66,24 @@ class TestExcludeComponentAction(argparse.Action):
|
||||
|
||||
# Brute force add reconfigure at the very beginning
|
||||
existing_actions = getattr(namespace, "actions", [])
|
||||
if not "reconfigure" in existing_actions:
|
||||
if "reconfigure" not in existing_actions:
|
||||
existing_actions = ["reconfigure"] + existing_actions
|
||||
setattr(namespace, "actions", existing_actions)
|
||||
|
||||
|
||||
def add_argument_extensions(parser):
|
||||
# For convenience, define a -T argument that gets converted to -D arguments
|
||||
parser.add_argument('-T', '--test-component', help="Specify the components to test", nargs='+', action=TestComponentAction)
|
||||
# For convenience, define a -T argument that gets converted to -D arguments
|
||||
parser.add_argument('-E', '--test-exclude-components', help="Specify the components to exclude from testing", nargs='+', action=TestExcludeComponentAction)
|
||||
|
||||
|
||||
def add_action_extensions(base_functions, base_actions):
|
||||
|
||||
def ut_apply_config(ut_apply_config_name, args):
|
||||
config_name = re.match(r"ut-apply-config-(.*)", ut_apply_config_name).group(1)
|
||||
|
||||
def set_config_build_variables(prop, defval = None):
|
||||
def set_config_build_variables(prop, defval=None):
|
||||
property_value = re.findall(r"^%s=(.+)" % prop, config_file_content, re.MULTILINE)
|
||||
if (property_value):
|
||||
property_value = property_value[0]
|
||||
@ -167,7 +169,7 @@ def add_action_extensions(base_functions, base_actions):
|
||||
# For local builds, use 'apply-config-NAME' target and then use normal 'all'
|
||||
# and 'flash' targets.
|
||||
def ut_build(ut_build_name, args):
|
||||
# Create a copy of the passed arguments to prevent arg modifications to accrue if
|
||||
# Create a copy of the passed arguments to prevent arg modifications to accrue if
|
||||
# all configs are being built
|
||||
build_args = copy.copy(args)
|
||||
|
||||
|
@ -23,8 +23,8 @@ class Section(object):
|
||||
return False
|
||||
|
||||
def __getitem__(self, item):
|
||||
"""
|
||||
process slice.
|
||||
"""
|
||||
process slice.
|
||||
convert absolute address to relative address in current section and return slice result
|
||||
"""
|
||||
if isinstance(item, int):
|
||||
@ -128,11 +128,11 @@ class SectionTable(object):
|
||||
key = {"address": address, "section": section}
|
||||
for section in self.table:
|
||||
if key in section:
|
||||
tmp = section[address:address+size]
|
||||
tmp = section[address:address + size]
|
||||
value = 0
|
||||
for i in range(size):
|
||||
if endian == "LE":
|
||||
value += ord(tmp[i]) << (i*8)
|
||||
value += ord(tmp[i]) << (i * 8)
|
||||
elif endian == "BE":
|
||||
value += ord(tmp[i]) << ((size - i - 1) * 8)
|
||||
else:
|
||||
|
@ -29,7 +29,7 @@ class Parser(object):
|
||||
""" parse unit test cases from build files and create files for test bench """
|
||||
|
||||
TAG_PATTERN = re.compile("([^=]+)(=)?(.+)?")
|
||||
DESCRIPTION_PATTERN = re.compile("\[([^]\[]+)\]")
|
||||
DESCRIPTION_PATTERN = re.compile("\[([^]\[]+)\]") # noqa: W605 - regular expression
|
||||
CONFIG_PATTERN = re.compile(r"{([^}]+)}")
|
||||
TEST_GROUPS_PATTERN = re.compile(r"TEST_GROUPS=(.*)$")
|
||||
|
||||
@ -83,7 +83,7 @@ class Parser(object):
|
||||
name_addr = table.get_unsigned_int(section, test_addr, 4)
|
||||
desc_addr = table.get_unsigned_int(section, test_addr + 4, 4)
|
||||
file_name_addr = table.get_unsigned_int(section, test_addr + 12, 4)
|
||||
function_count = table.get_unsigned_int(section, test_addr+20, 4)
|
||||
function_count = table.get_unsigned_int(section, test_addr + 20, 4)
|
||||
name = table.get_string("any", name_addr)
|
||||
desc = table.get_string("any", desc_addr)
|
||||
file_name = table.get_string("any", file_name_addr)
|
||||
@ -213,7 +213,6 @@ class Parser(object):
|
||||
|
||||
return self.parse_tags_internal(configs, self.config_dependencies, self.CONFIG_PATTERN)
|
||||
|
||||
|
||||
def get_test_groups(self, config_file):
|
||||
"""
|
||||
If the config file includes TEST_GROUPS variable, return its value as a list of strings.
|
||||
@ -325,7 +324,7 @@ def test_parser():
|
||||
}
|
||||
sdkconfig = ["123", "789"]
|
||||
tags = parser.parse_tags_internal(sdkconfig, config_dependency, parser.CONFIG_PATTERN)
|
||||
assert sorted(tags) == ['a', 'd', 'f'] # sorted is required for older Python3, e.g. 3.4.8
|
||||
assert sorted(tags) == ['a', 'd', 'f'] # sorted is required for older Python3, e.g. 3.4.8
|
||||
|
||||
|
||||
def main():
|
||||
|
@ -26,7 +26,7 @@ import argparse
|
||||
import threading
|
||||
|
||||
try:
|
||||
import TinyFW
|
||||
import TinyFW
|
||||
except ImportError:
|
||||
# if we want to run test case outside `tiny-test-fw` folder,
|
||||
# we need to insert tiny-test-fw path into sys path
|
||||
@ -374,7 +374,7 @@ class Handler(threading.Thread):
|
||||
Utility.console_log("No case detected!", color="orange")
|
||||
while not self.finish and not self.force_stop.isSet():
|
||||
try:
|
||||
self.dut.expect_any((re.compile('\(' + str(self.child_case_index) + '\)\s"(\w+)"'),
|
||||
self.dut.expect_any((re.compile('\(' + str(self.child_case_index) + '\)\s"(\w+)"'), # noqa: W605 - regex
|
||||
get_child_case_name),
|
||||
(self.WAIT_SIGNAL_PATTERN, device_wait_action), # wait signal pattern
|
||||
(self.SEND_SIGNAL_PATTERN, device_send_action), # send signal pattern
|
||||
@ -742,7 +742,7 @@ if __name__ == '__main__':
|
||||
test_env = Env.Env(**env_config)
|
||||
detect_update_unit_test_info(test_env, extra_data=list_of_dicts, app_bin=args.app_bin)
|
||||
|
||||
for index in range(1, args.repeat+1):
|
||||
for index in range(1, args.repeat + 1):
|
||||
if args.repeat > 1:
|
||||
Utility.console_log("Repetition {}".format(index), color="green")
|
||||
for dic in list_of_dicts:
|
||||
|
@ -3,11 +3,16 @@
|
||||
# Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths
|
||||
# to Windows paths, for Eclipse
|
||||
from __future__ import print_function, division
|
||||
import sys, subprocess, os.path, re
|
||||
import sys
|
||||
import subprocess
|
||||
import os.path
|
||||
import re
|
||||
|
||||
UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+')
|
||||
|
||||
paths = {}
|
||||
|
||||
|
||||
def check_path(path):
|
||||
try:
|
||||
return paths[path]
|
||||
@ -24,13 +29,15 @@ def check_path(path):
|
||||
paths[path] = winpath
|
||||
return winpath
|
||||
|
||||
|
||||
def main():
|
||||
print("Running make in '%s'" % check_path(os.getcwd()))
|
||||
make = subprocess.Popen(["make"] + sys.argv[1:] + ["BATCH_BUILD=1"], stdout=subprocess.PIPE)
|
||||
for line in iter(make.stdout.readline, ''):
|
||||
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
|
||||
print(line.rstrip())
|
||||
line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line)
|
||||
print(line.rstrip())
|
||||
sys.exit(make.wait())
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
Loading…
Reference in New Issue
Block a user