diff --git a/components/app_update/otatool.py b/components/app_update/otatool.py index ca22001015..5033053562 100755 --- a/components/app_update/otatool.py +++ b/components/app_update/otatool.py @@ -16,23 +16,24 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function, division +from __future__ import division, print_function + import argparse -import os -import sys import binascii -import tempfile import collections +import os import struct +import sys +import tempfile try: - from parttool import PartitionName, PartitionType, ParttoolTarget, PARTITION_TABLE_OFFSET + from parttool import PARTITION_TABLE_OFFSET, PartitionName, PartitionType, ParttoolTarget except ImportError: - COMPONENTS_PATH = os.path.expandvars(os.path.join("$IDF_PATH", "components")) - PARTTOOL_DIR = os.path.join(COMPONENTS_PATH, "partition_table") + COMPONENTS_PATH = os.path.expandvars(os.path.join('$IDF_PATH', 'components')) + PARTTOOL_DIR = os.path.join(COMPONENTS_PATH, 'partition_table') sys.path.append(PARTTOOL_DIR) - from parttool import PartitionName, PartitionType, ParttoolTarget, PARTITION_TABLE_OFFSET + from parttool import PARTITION_TABLE_OFFSET, PartitionName, PartitionType, ParttoolTarget __version__ = '2.0' @@ -48,7 +49,7 @@ def status(msg): class OtatoolTarget(): - OTADATA_PARTITION = PartitionType("data", "ota") + OTADATA_PARTITION = PartitionType('data', 'ota') def __init__(self, port=None, baud=None, partition_table_offset=PARTITION_TABLE_OFFSET, partition_table_file=None, spi_flash_sec_size=SPI_FLASH_SEC_SIZE, esptool_args=[], esptool_write_args=[], @@ -61,14 +62,14 @@ class OtatoolTarget(): temp_file.close() try: self.target.read_partition(OtatoolTarget.OTADATA_PARTITION, temp_file.name) - with open(temp_file.name, "rb") as f: + with open(temp_file.name, 'rb') as f: self.otadata = f.read() finally: os.unlink(temp_file.name) def _check_otadata_partition(self): if not self.otadata: - raise Exception("No otadata partition found") + raise Exception('No otadata partition found') def erase_otadata(self): self._check_otadata_partition() @@ -77,7 +78,7 @@ class OtatoolTarget(): def _get_otadata_info(self): info = [] - otadata_info = collections.namedtuple("otadata_info", "seq crc") + otadata_info = collections.namedtuple('otadata_info', 'seq crc') for i in range(2): start = i * (self.spi_flash_sec_size >> 1) @@ -94,7 +95,7 @@ class OtatoolTarget(): def _get_partition_id_from_ota_id(self, ota_id): if isinstance(ota_id, int): - return PartitionType("app", "ota_" + str(ota_id)) + return PartitionType('app', 'ota_' + str(ota_id)) else: return PartitionName(ota_id) @@ -106,7 +107,7 @@ class OtatoolTarget(): def is_otadata_info_valid(status): seq = status.seq % (1 << 32) - crc = hex(binascii.crc32(struct.pack("I", seq), 0xFFFFFFFF) % (1 << 32)) + crc = hex(binascii.crc32(struct.pack('I', seq), 0xFFFFFFFF) % (1 << 32)) return seq < (int('0xFFFFFFFF', 16) % (1 << 32)) and status.crc == crc partition_table = self.target.partition_table @@ -124,7 +125,7 @@ class OtatoolTarget(): ota_partitions = sorted(ota_partitions, key=lambda p: p.subtype) if not ota_partitions: - raise Exception("No ota app partitions found") + raise Exception('No ota app partitions found') # Look for the app partition to switch to ota_partition_next = None @@ -137,7 +138,7 @@ class OtatoolTarget(): ota_partition_next = list(ota_partition_next)[0] except IndexError: - raise Exception("Partition to switch to not found") + raise Exception('Partition to switch to not found') otadata_info = self._get_otadata_info() @@ -177,15 +178,15 @@ class OtatoolTarget(): ota_seq_next = target_seq # Create binary data from computed values - ota_seq_next = struct.pack("I", ota_seq_next) + ota_seq_next = struct.pack('I', ota_seq_next) ota_seq_crc_next = binascii.crc32(ota_seq_next, 0xFFFFFFFF) % (1 << 32) - ota_seq_crc_next = struct.pack("I", ota_seq_crc_next) + ota_seq_crc_next = struct.pack('I', ota_seq_crc_next) temp_file = tempfile.NamedTemporaryFile(delete=False) temp_file.close() try: - with open(temp_file.name, "wb") as otadata_next_file: + with open(temp_file.name, 'wb') as otadata_next_file: start = (1 if otadata_compute_base == 0 else 0) * (self.spi_flash_sec_size >> 1) otadata_next_file.write(self.otadata) @@ -217,14 +218,14 @@ def _read_otadata(target): otadata_info = target._get_otadata_info() - print(" {:8s} \t {:8s} | \t {:8s} \t {:8s}".format("OTA_SEQ", "CRC", "OTA_SEQ", "CRC")) - print("Firmware: 0x{:8x} \t0x{:8x} | \t0x{:8x} \t 0x{:8x}".format(otadata_info[0].seq, otadata_info[0].crc, + print(' {:8s} \t {:8s} | \t {:8s} \t {:8s}'.format('OTA_SEQ', 'CRC', 'OTA_SEQ', 'CRC')) + print('Firmware: 0x{:8x} \t0x{:8x} | \t0x{:8x} \t 0x{:8x}'.format(otadata_info[0].seq, otadata_info[0].crc, otadata_info[1].seq, otadata_info[1].crc)) def _erase_otadata(target): target.erase_otadata() - status("Erased ota_data partition contents") + status('Erased ota_data partition contents') def _switch_ota_partition(target, ota_id): @@ -233,68 +234,68 @@ def _switch_ota_partition(target, ota_id): def _read_ota_partition(target, ota_id, output): target.read_ota_partition(ota_id, output) - status("Read ota partition contents to file {}".format(output)) + status('Read ota partition contents to file {}'.format(output)) def _write_ota_partition(target, ota_id, input): target.write_ota_partition(ota_id, input) - status("Written contents of file {} to ota partition".format(input)) + status('Written contents of file {} to ota partition'.format(input)) def _erase_ota_partition(target, ota_id): target.erase_ota_partition(ota_id) - status("Erased contents of ota partition") + status('Erased contents of ota partition') def main(): if sys.version_info[0] < 3: - print("WARNING: Support for Python 2 is deprecated and will be removed in future versions.", file=sys.stderr) + print('WARNING: Support for Python 2 is deprecated and will be removed in future versions.', file=sys.stderr) elif sys.version_info[0] == 3 and sys.version_info[1] < 6: - print("WARNING: Python 3 versions older than 3.6 are not supported.", file=sys.stderr) + print('WARNING: Python 3 versions older than 3.6 are not supported.', file=sys.stderr) global quiet - parser = argparse.ArgumentParser("ESP-IDF OTA Partitions Tool") + parser = argparse.ArgumentParser('ESP-IDF OTA Partitions Tool') - parser.add_argument("--quiet", "-q", help="suppress stderr messages", action="store_true") - parser.add_argument("--esptool-args", help="additional main arguments for esptool", nargs="+") - parser.add_argument("--esptool-write-args", help="additional subcommand arguments for esptool write_flash", nargs="+") - parser.add_argument("--esptool-read-args", help="additional subcommand arguments for esptool read_flash", nargs="+") - parser.add_argument("--esptool-erase-args", help="additional subcommand arguments for esptool erase_region", nargs="+") + parser.add_argument('--quiet', '-q', help='suppress stderr messages', action='store_true') + parser.add_argument('--esptool-args', help='additional main arguments for esptool', nargs='+') + parser.add_argument('--esptool-write-args', help='additional subcommand arguments for esptool write_flash', nargs='+') + parser.add_argument('--esptool-read-args', help='additional subcommand arguments for esptool read_flash', nargs='+') + parser.add_argument('--esptool-erase-args', help='additional subcommand arguments for esptool erase_region', nargs='+') # There are two possible sources for the partition table: a device attached to the host # or a partition table CSV/binary file. These sources are mutually exclusive. - parser.add_argument("--port", "-p", help="port where the device to read the partition table from is attached") + parser.add_argument('--port', '-p', help='port where the device to read the partition table from is attached') - parser.add_argument("--baud", "-b", help="baudrate to use", type=int) + parser.add_argument('--baud', '-b', help='baudrate to use', type=int) - parser.add_argument("--partition-table-offset", "-o", help="offset to read the partition table from", type=str) + parser.add_argument('--partition-table-offset', '-o', help='offset to read the partition table from', type=str) - parser.add_argument("--partition-table-file", "-f", help="file (CSV/binary) to read the partition table from; \ - overrides device attached to specified port as the partition table source when defined") + parser.add_argument('--partition-table-file', '-f', help='file (CSV/binary) to read the partition table from; \ + overrides device attached to specified port as the partition table source when defined') - subparsers = parser.add_subparsers(dest="operation", help="run otatool -h for additional help") + subparsers = parser.add_subparsers(dest='operation', help='run otatool -h for additional help') spi_flash_sec_size = argparse.ArgumentParser(add_help=False) - spi_flash_sec_size.add_argument("--spi-flash-sec-size", help="value of SPI_FLASH_SEC_SIZE macro", type=str) + spi_flash_sec_size.add_argument('--spi-flash-sec-size', help='value of SPI_FLASH_SEC_SIZE macro', type=str) # Specify the supported operations - subparsers.add_parser("read_otadata", help="read otadata partition", parents=[spi_flash_sec_size]) - subparsers.add_parser("erase_otadata", help="erase otadata partition") + subparsers.add_parser('read_otadata', help='read otadata partition', parents=[spi_flash_sec_size]) + subparsers.add_parser('erase_otadata', help='erase otadata partition') slot_or_name_parser = argparse.ArgumentParser(add_help=False) slot_or_name_parser_args = slot_or_name_parser.add_mutually_exclusive_group() - slot_or_name_parser_args.add_argument("--slot", help="slot number of the ota partition", type=int) - slot_or_name_parser_args.add_argument("--name", help="name of the ota partition") + slot_or_name_parser_args.add_argument('--slot', help='slot number of the ota partition', type=int) + slot_or_name_parser_args.add_argument('--name', help='name of the ota partition') - subparsers.add_parser("switch_ota_partition", help="switch otadata partition", parents=[slot_or_name_parser, spi_flash_sec_size]) + subparsers.add_parser('switch_ota_partition', help='switch otadata partition', parents=[slot_or_name_parser, spi_flash_sec_size]) - read_ota_partition_subparser = subparsers.add_parser("read_ota_partition", help="read contents of an ota partition", parents=[slot_or_name_parser]) - read_ota_partition_subparser.add_argument("--output", help="file to write the contents of the ota partition to") + read_ota_partition_subparser = subparsers.add_parser('read_ota_partition', help='read contents of an ota partition', parents=[slot_or_name_parser]) + read_ota_partition_subparser.add_argument('--output', help='file to write the contents of the ota partition to') - write_ota_partition_subparser = subparsers.add_parser("write_ota_partition", help="write contents to an ota partition", parents=[slot_or_name_parser]) - write_ota_partition_subparser.add_argument("--input", help="file whose contents to write to the ota partition") + write_ota_partition_subparser = subparsers.add_parser('write_ota_partition', help='write contents to an ota partition', parents=[slot_or_name_parser]) + write_ota_partition_subparser.add_argument('--input', help='file whose contents to write to the ota partition') - subparsers.add_parser("erase_ota_partition", help="erase contents of an ota partition", parents=[slot_or_name_parser]) + subparsers.add_parser('erase_ota_partition', help='erase contents of an ota partition', parents=[slot_or_name_parser]) args = parser.parse_args() @@ -309,34 +310,34 @@ def main(): target_args = {} if args.port: - target_args["port"] = args.port + target_args['port'] = args.port if args.partition_table_file: - target_args["partition_table_file"] = args.partition_table_file + target_args['partition_table_file'] = args.partition_table_file if args.partition_table_offset: - target_args["partition_table_offset"] = int(args.partition_table_offset, 0) + target_args['partition_table_offset'] = int(args.partition_table_offset, 0) try: if args.spi_flash_sec_size: - target_args["spi_flash_sec_size"] = int(args.spi_flash_sec_size, 0) + target_args['spi_flash_sec_size'] = int(args.spi_flash_sec_size, 0) except AttributeError: pass if args.esptool_args: - target_args["esptool_args"] = args.esptool_args + target_args['esptool_args'] = args.esptool_args if args.esptool_write_args: - target_args["esptool_write_args"] = args.esptool_write_args + target_args['esptool_write_args'] = args.esptool_write_args if args.esptool_read_args: - target_args["esptool_read_args"] = args.esptool_read_args + target_args['esptool_read_args'] = args.esptool_read_args if args.esptool_erase_args: - target_args["esptool_erase_args"] = args.esptool_erase_args + target_args['esptool_erase_args'] = args.esptool_erase_args if args.baud: - target_args["baud"] = args.baud + target_args['baud'] = args.baud target = OtatoolTarget(**target_args) @@ -347,10 +348,10 @@ def main(): try: if args.name is not None: - ota_id = ["name"] + ota_id = ['name'] else: if args.slot is not None: - ota_id = ["slot"] + ota_id = ['slot'] except AttributeError: pass @@ -358,8 +359,8 @@ def main(): 'read_otadata':(_read_otadata, []), 'erase_otadata':(_erase_otadata, []), 'switch_ota_partition':(_switch_ota_partition, ota_id), - 'read_ota_partition':(_read_ota_partition, ["output"] + ota_id), - 'write_ota_partition':(_write_ota_partition, ["input"] + ota_id), + 'read_ota_partition':(_read_ota_partition, ['output'] + ota_id), + 'write_ota_partition':(_write_ota_partition, ['input'] + ota_id), 'erase_ota_partition':(_erase_ota_partition, ota_id) } diff --git a/components/efuse/efuse_table_gen.py b/components/efuse/efuse_table_gen.py index 8033108e42..5b66f73ab1 100755 --- a/components/efuse/efuse_table_gen.py +++ b/components/efuse/efuse_table_gen.py @@ -17,18 +17,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function, division +from __future__ import division, print_function + import argparse +import hashlib import os import re import sys -import hashlib __version__ = '1.0' quiet = False max_blk_len = 256 -idf_target = "esp32" +idf_target = 'esp32' copyright = '''// Copyright 2017-2020 Espressif Systems (Shanghai) PTE LTD // @@ -61,7 +62,7 @@ def critical(msg): class FuseTable(list): def __init__(self): super(FuseTable, self).__init__(self) - self.md5_digest_table = "" + self.md5_digest_table = '' @classmethod def from_csv(cls, csv_contents): @@ -77,14 +78,14 @@ class FuseTable(list): for line_no in range(len(lines)): line = expand_vars(lines[line_no]).strip() - if line.startswith("#") or len(line) == 0: + if line.startswith('#') or len(line) == 0: continue try: res.append(FuseDefinition.from_csv(line)) except InputError as e: - raise InputError("Error at line %d: %s" % (line_no + 1, e)) + raise InputError('Error at line %d: %s' % (line_no + 1, e)) except Exception: - critical("Unexpected error parsing line %d: %s" % (line_no + 1, line)) + critical('Unexpected error parsing line %d: %s' % (line_no + 1, line)) raise # fix up missing bit_start @@ -102,9 +103,9 @@ class FuseTable(list): # fix up missing field_name last_field = None for e in res: - if e.field_name == "" and last_field is None: - raise InputError("Error at line %d: %s missing field name" % (line_no + 1, e)) - elif e.field_name == "" and last_field is not None: + if e.field_name == '' and last_field is None: + raise InputError('Error at line %d: %s missing field name' % (line_no + 1, e)) + elif e.field_name == '' and last_field is not None: e.field_name = last_field.field_name last_field = e @@ -136,12 +137,12 @@ class FuseTable(list): fl_error = False for p in self: field_name = p.field_name + p.group - if field_name != "" and len(duplicates.intersection([field_name])) != 0: + if field_name != '' and len(duplicates.intersection([field_name])) != 0: fl_error = True - print("Field at %s, %s, %s, %s have dublicate field_name" % + print('Field at %s, %s, %s, %s have dublicate field_name' % (p.field_name, p.efuse_block, p.bit_start, p.bit_count)) if fl_error is True: - raise InputError("Field names must be unique") + raise InputError('Field names must be unique') def verify(self, type_table=None): for p in self: @@ -153,7 +154,7 @@ class FuseTable(list): last = None for p in sorted(self, key=lambda x:(x.efuse_block, x.bit_start)): if last is not None and last.efuse_block == p.efuse_block and p.bit_start < last.bit_start + last.bit_count: - raise InputError("Field at %s, %s, %s, %s overlaps %s, %s, %s, %s" % + raise InputError('Field at %s, %s, %s, %s overlaps %s, %s, %s, %s' % (p.field_name, p.efuse_block, p.bit_start, p.bit_count, last.field_name, last.efuse_block, last.bit_start, last.bit_count)) last = p @@ -161,7 +162,7 @@ class FuseTable(list): def calc_md5(self): txt_table = '' for p in self: - txt_table += "%s %s %d %s %s" % (p.field_name, p.efuse_block, p.bit_start, str(p.get_bit_count()), p.comment) + "\n" + txt_table += '%s %s %d %s %s' % (p.field_name, p.efuse_block, p.bit_start, str(p.get_bit_count()), p.comment) + '\n' self.md5_digest_table = hashlib.md5(txt_table.encode('utf-8')).hexdigest() def show_range_used_bits(self): @@ -169,9 +170,9 @@ class FuseTable(list): rows = '' rows += 'Sorted efuse table:\n' num = 1 - rows += "{0} \t{1:<30} \t{2} \t{3} \t{4}".format("#", "field_name", "efuse_block", "bit_start", "bit_count") + "\n" + rows += '{0} \t{1:<30} \t{2} \t{3} \t{4}'.format('#', 'field_name', 'efuse_block', 'bit_start', 'bit_count') + '\n' for p in sorted(self, key=lambda x:(x.efuse_block, x.bit_start)): - rows += "{0} \t{1:<30} \t{2} \t{3:^8} \t{4:^8}".format(num, p.field_name, p.efuse_block, p.bit_start, p.bit_count) + "\n" + rows += '{0} \t{1:<30} \t{2} \t{3:^8} \t{4:^8}'.format(num, p.field_name, p.efuse_block, p.bit_start, p.bit_count) + '\n' num += 1 rows += '\nUsed bits in efuse table:\n' @@ -204,30 +205,30 @@ class FuseTable(list): def to_header(self, file_name): rows = [copyright] - rows += ["#ifdef __cplusplus", + rows += ['#ifdef __cplusplus', 'extern "C" {', - "#endif", - "", - "", - "// md5_digest_table " + self.md5_digest_table, - "// This file was generated from the file " + file_name + ".csv. DO NOT CHANGE THIS FILE MANUALLY.", - "// If you want to change some fields, you need to change " + file_name + ".csv file", - "// then run `efuse_common_table` or `efuse_custom_table` command it will generate this file.", + '#endif', + '', + '', + '// md5_digest_table ' + self.md5_digest_table, + '// This file was generated from the file ' + file_name + '.csv. DO NOT CHANGE THIS FILE MANUALLY.', + '// If you want to change some fields, you need to change ' + file_name + '.csv file', + '// then run `efuse_common_table` or `efuse_custom_table` command it will generate this file.', "// To show efuse_table run the command 'show_efuse_table'.", - "", - ""] + '', + ''] last_field_name = '' for p in self: if (p.field_name != last_field_name): - rows += ["extern const esp_efuse_desc_t* " + "ESP_EFUSE_" + p.field_name + "[];"] + rows += ['extern const esp_efuse_desc_t* ' + 'ESP_EFUSE_' + p.field_name + '[];'] last_field_name = p.field_name - rows += ["", - "#ifdef __cplusplus", - "}", - "#endif", - ""] + rows += ['', + '#ifdef __cplusplus', + '}', + '#endif', + ''] return '\n'.join(rows) def to_c_file(self, file_name, debug): @@ -236,33 +237,33 @@ class FuseTable(list): '#include "esp_efuse.h"', '#include ', '#include "' + file_name + '.h"', - "", - "// md5_digest_table " + self.md5_digest_table, - "// This file was generated from the file " + file_name + ".csv. DO NOT CHANGE THIS FILE MANUALLY.", - "// If you want to change some fields, you need to change " + file_name + ".csv file", - "// then run `efuse_common_table` or `efuse_custom_table` command it will generate this file.", + '', + '// md5_digest_table ' + self.md5_digest_table, + '// This file was generated from the file ' + file_name + '.csv. DO NOT CHANGE THIS FILE MANUALLY.', + '// If you want to change some fields, you need to change ' + file_name + '.csv file', + '// then run `efuse_common_table` or `efuse_custom_table` command it will generate this file.', "// To show efuse_table run the command 'show_efuse_table'."] - rows += [""] + rows += [''] - if idf_target == "esp32": - rows += ["#define MAX_BLK_LEN CONFIG_EFUSE_MAX_BLK_LEN"] + if idf_target == 'esp32': + rows += ['#define MAX_BLK_LEN CONFIG_EFUSE_MAX_BLK_LEN'] - rows += [""] + rows += [''] - last_free_bit_blk1 = self.get_str_position_last_free_bit_in_blk("EFUSE_BLK1") - last_free_bit_blk2 = self.get_str_position_last_free_bit_in_blk("EFUSE_BLK2") - last_free_bit_blk3 = self.get_str_position_last_free_bit_in_blk("EFUSE_BLK3") + last_free_bit_blk1 = self.get_str_position_last_free_bit_in_blk('EFUSE_BLK1') + last_free_bit_blk2 = self.get_str_position_last_free_bit_in_blk('EFUSE_BLK2') + last_free_bit_blk3 = self.get_str_position_last_free_bit_in_blk('EFUSE_BLK3') - rows += ["// The last free bit in the block is counted over the entire file."] + rows += ['// The last free bit in the block is counted over the entire file.'] if last_free_bit_blk1 is not None: - rows += ["#define LAST_FREE_BIT_BLK1 " + last_free_bit_blk1] + rows += ['#define LAST_FREE_BIT_BLK1 ' + last_free_bit_blk1] if last_free_bit_blk2 is not None: - rows += ["#define LAST_FREE_BIT_BLK2 " + last_free_bit_blk2] + rows += ['#define LAST_FREE_BIT_BLK2 ' + last_free_bit_blk2] if last_free_bit_blk3 is not None: - rows += ["#define LAST_FREE_BIT_BLK3 " + last_free_bit_blk3] + rows += ['#define LAST_FREE_BIT_BLK3 ' + last_free_bit_blk3] - rows += [""] + rows += [''] if last_free_bit_blk1 is not None: rows += ['_Static_assert(LAST_FREE_BIT_BLK1 <= MAX_BLK_LEN, "The eFuse table does not match the coding scheme. ' @@ -274,50 +275,50 @@ class FuseTable(list): rows += ['_Static_assert(LAST_FREE_BIT_BLK3 <= MAX_BLK_LEN, "The eFuse table does not match the coding scheme. ' 'Edit the table and restart the efuse_common_table or efuse_custom_table command to regenerate the new files.");'] - rows += [""] + rows += [''] last_name = '' for p in self: if (p.field_name != last_name): if last_name != '': - rows += ["};\n"] - rows += ["static const esp_efuse_desc_t " + p.field_name + "[] = {"] + rows += ['};\n'] + rows += ['static const esp_efuse_desc_t ' + p.field_name + '[] = {'] last_name = p.field_name - rows += [p.to_struct(debug) + ","] - rows += ["};\n"] - rows += ["\n\n\n"] + rows += [p.to_struct(debug) + ','] + rows += ['};\n'] + rows += ['\n\n\n'] last_name = '' for p in self: if (p.field_name != last_name): if last_name != '': - rows += [" NULL", - "};\n"] - rows += ["const esp_efuse_desc_t* " + "ESP_EFUSE_" + p.field_name + "[] = {"] + rows += [' NULL', + '};\n'] + rows += ['const esp_efuse_desc_t* ' + 'ESP_EFUSE_' + p.field_name + '[] = {'] last_name = p.field_name - index = str(0) if str(p.group) == "" else str(p.group) - rows += [" &" + p.field_name + "[" + index + "], \t\t// " + p.comment] - rows += [" NULL", - "};\n"] + index = str(0) if str(p.group) == '' else str(p.group) + rows += [' &' + p.field_name + '[' + index + '], \t\t// ' + p.comment] + rows += [' NULL', + '};\n'] return '\n'.join(rows) class FuseDefinition(object): def __init__(self): - self.field_name = "" - self.group = "" - self.efuse_block = "" + self.field_name = '' + self.group = '' + self.efuse_block = '' self.bit_start = None self.bit_count = None self.define = None - self.comment = "" + self.comment = '' @classmethod def from_csv(cls, line): """ Parse a line from the CSV """ - line_w_defaults = line + ",,,," # lazy way to support default fields - fields = [f.strip() for f in line_w_defaults.split(",")] + line_w_defaults = line + ',,,,' # lazy way to support default fields + fields = [f.strip() for f in line_w_defaults.split(',')] res = FuseDefinition() res.field_name = fields[0] @@ -330,12 +331,12 @@ class FuseDefinition(object): return res def parse_num(self, strval): - if strval == "": + if strval == '': return None # Field will fill in default return self.parse_int(strval) def parse_bit_count(self, strval): - if strval == "MAX_BLK_LEN": + if strval == 'MAX_BLK_LEN': self.define = strval return self.get_max_bits_of_block() else: @@ -345,18 +346,18 @@ class FuseDefinition(object): try: return int(v, 0) except ValueError: - raise InputError("Invalid field value %s" % v) + raise InputError('Invalid field value %s' % v) def parse_block(self, strval): - if strval == "": + if strval == '': raise InputError("Field 'efuse_block' can't be left empty.") - if idf_target == "esp32": - if strval not in ["EFUSE_BLK0", "EFUSE_BLK1", "EFUSE_BLK2", "EFUSE_BLK3"]: + if idf_target == 'esp32': + if strval not in ['EFUSE_BLK0', 'EFUSE_BLK1', 'EFUSE_BLK2', 'EFUSE_BLK3']: raise InputError("Field 'efuse_block' should be one of EFUSE_BLK0..EFUSE_BLK3") else: - if strval not in ["EFUSE_BLK0", "EFUSE_BLK1", "EFUSE_BLK2", "EFUSE_BLK3", "EFUSE_BLK4", - "EFUSE_BLK5", "EFUSE_BLK6", "EFUSE_BLK7", "EFUSE_BLK8", "EFUSE_BLK9", - "EFUSE_BLK10"]: + if strval not in ['EFUSE_BLK0', 'EFUSE_BLK1', 'EFUSE_BLK2', 'EFUSE_BLK3', 'EFUSE_BLK4', + 'EFUSE_BLK5', 'EFUSE_BLK6', 'EFUSE_BLK7', 'EFUSE_BLK8', 'EFUSE_BLK9', + 'EFUSE_BLK10']: raise InputError("Field 'efuse_block' should be one of EFUSE_BLK0..EFUSE_BLK10") return strval @@ -365,32 +366,32 @@ class FuseDefinition(object): '''common_table: EFUSE_BLK0, EFUSE_BLK1, EFUSE_BLK2, EFUSE_BLK3 custom_table: ----------, ----------, ----------, EFUSE_BLK3(some reserved in common_table) ''' - if self.efuse_block == "EFUSE_BLK0": + if self.efuse_block == 'EFUSE_BLK0': return 256 else: return max_blk_len def verify(self, type_table): if self.efuse_block is None: - raise ValidationError(self, "efuse_block field is not set") + raise ValidationError(self, 'efuse_block field is not set') if self.bit_count is None: - raise ValidationError(self, "bit_count field is not set") + raise ValidationError(self, 'bit_count field is not set') if type_table is not None: - if type_table == "custom_table": - if self.efuse_block != "EFUSE_BLK3": - raise ValidationError(self, "custom_table should use only EFUSE_BLK3") + if type_table == 'custom_table': + if self.efuse_block != 'EFUSE_BLK3': + raise ValidationError(self, 'custom_table should use only EFUSE_BLK3') max_bits = self.get_max_bits_of_block() if self.bit_start + self.bit_count > max_bits: - raise ValidationError(self, "The field is outside the boundaries(max_bits = %d) of the %s block" % (max_bits, self.efuse_block)) + raise ValidationError(self, 'The field is outside the boundaries(max_bits = %d) of the %s block' % (max_bits, self.efuse_block)) def get_full_name(self): def get_postfix(group): - postfix = "" - if group != "": - postfix = "_PART_" + group + postfix = '' + if group != '': + postfix = '_PART_' + group return postfix return self.field_name + get_postfix(self.group) @@ -402,19 +403,19 @@ class FuseDefinition(object): return self.bit_count def to_struct(self, debug): - start = " {" + start = ' {' if debug is True: - start = " {" + '"' + self.field_name + '" ,' - return ", ".join([start + self.efuse_block, + start = ' {' + '"' + self.field_name + '" ,' + return ', '.join([start + self.efuse_block, str(self.bit_start), - str(self.get_bit_count()) + "}, \t // " + self.comment]) + str(self.get_bit_count()) + '}, \t // ' + self.comment]) def process_input_file(file, type_table): - status("Parsing efuse CSV input file " + file.name + " ...") + status('Parsing efuse CSV input file ' + file.name + ' ...') input = file.read() table = FuseTable.from_csv(input) - status("Verifying efuse table...") + status('Verifying efuse table...') table.verify(type_table) return table @@ -432,35 +433,35 @@ def create_output_files(name, output_table, debug): file_name = os.path.splitext(os.path.basename(name))[0] gen_dir = os.path.dirname(name) - dir_for_file_h = gen_dir + "/include" + dir_for_file_h = gen_dir + '/include' try: os.stat(dir_for_file_h) except Exception: os.mkdir(dir_for_file_h) - file_h_path = os.path.join(dir_for_file_h, file_name + ".h") - file_c_path = os.path.join(gen_dir, file_name + ".c") + file_h_path = os.path.join(dir_for_file_h, file_name + '.h') + file_c_path = os.path.join(gen_dir, file_name + '.c') # src files are the same if ckeck_md5_in_file(output_table.md5_digest_table, file_c_path) is False: - status("Creating efuse *.h file " + file_h_path + " ...") + status('Creating efuse *.h file ' + file_h_path + ' ...') output = output_table.to_header(file_name) with open(file_h_path, 'w') as f: f.write(output) - status("Creating efuse *.c file " + file_c_path + " ...") + status('Creating efuse *.c file ' + file_c_path + ' ...') output = output_table.to_c_file(file_name, debug) with open(file_c_path, 'w') as f: f.write(output) else: - print("Source files do not require updating correspond to csv file.") + print('Source files do not require updating correspond to csv file.') def main(): if sys.version_info[0] < 3: - print("WARNING: Support for Python 2 is deprecated and will be removed in future versions.", file=sys.stderr) + print('WARNING: Support for Python 2 is deprecated and will be removed in future versions.', file=sys.stderr) elif sys.version_info[0] == 3 and sys.version_info[1] < 6: - print("WARNING: Python 3 versions older than 3.6 are not supported.", file=sys.stderr) + print('WARNING: Python 3 versions older than 3.6 are not supported.', file=sys.stderr) global quiet global max_blk_len global idf_target @@ -468,8 +469,8 @@ def main(): parser = argparse.ArgumentParser(description='ESP32 eFuse Manager') parser.add_argument('--idf_target', '-t', help='Target chip type', choices=['esp32', 'esp32s2', 'esp32s3', 'esp32c3'], default='esp32') parser.add_argument('--quiet', '-q', help="Don't print non-critical status messages to stderr", action='store_true') - parser.add_argument('--debug', help='Create header file with debug info', default=False, action="store_false") - parser.add_argument('--info', help='Print info about range of used bits', default=False, action="store_true") + parser.add_argument('--debug', help='Create header file with debug info', default=False, action='store_false') + parser.add_argument('--info', help='Print info about range of used bits', default=False, action='store_true') parser.add_argument('--max_blk_len', help='Max number of bits in BLOCKs', type=int, default=256) parser.add_argument('common_input', help='Path to common CSV file to parse.', type=argparse.FileType('r')) parser.add_argument('custom_input', help='Path to custom CSV file to parse.', type=argparse.FileType('r'), nargs='?', default=None) @@ -479,18 +480,18 @@ def main(): idf_target = args.idf_target max_blk_len = args.max_blk_len - print("Max number of bits in BLK %d" % (max_blk_len)) + print('Max number of bits in BLK %d' % (max_blk_len)) if max_blk_len not in [256, 192, 128]: - raise InputError("Unsupported block length = %d" % (max_blk_len)) + raise InputError('Unsupported block length = %d' % (max_blk_len)) quiet = args.quiet debug = args.debug info = args.info - common_table = process_input_file(args.common_input, "common_table") + common_table = process_input_file(args.common_input, 'common_table') two_table = common_table if args.custom_input is not None: - custom_table = process_input_file(args.custom_input, "custom_table") + custom_table = process_input_file(args.custom_input, 'custom_table') two_table += custom_table two_table.verify() @@ -512,7 +513,7 @@ class InputError(RuntimeError): class ValidationError(InputError): def __init__(self, p, message): - super(ValidationError, self).__init__("Entry %s invalid: %s" % (p.field_name, message)) + super(ValidationError, self).__init__('Entry %s invalid: %s' % (p.field_name, message)) if __name__ == '__main__': diff --git a/components/efuse/test_efuse_host/efuse_tests.py b/components/efuse/test_efuse_host/efuse_tests.py index 1e4b9d3383..ec5a17acac 100755 --- a/components/efuse/test_efuse_host/efuse_tests.py +++ b/components/efuse/test_efuse_host/efuse_tests.py @@ -1,12 +1,13 @@ #!/usr/bin/env python -from __future__ import print_function, division -import unittest +from __future__ import division, print_function + import sys +import unittest try: import efuse_table_gen except ImportError: - sys.path.append("..") + sys.path.append('..') import efuse_table_gen @@ -117,7 +118,7 @@ name2, EFUSE_BLK2, , , EFUSE_BLK2, , 4, name1, EFUSE_BLK3, , 5, """ - with self.assertRaisesRegex(efuse_table_gen.InputError, "Field names must be unique"): + with self.assertRaisesRegex(efuse_table_gen.InputError, 'Field names must be unique'): efuse_table_gen.FuseTable.from_csv(csv) def test_seq_bit_start5_fill(self): @@ -154,7 +155,7 @@ name1, EFUSE_BLK3, 1, name2, EFUSE_BLK3, 5, 4, Use for test name 2 """ t = efuse_table_gen.FuseTable.from_csv(csv) - with self.assertRaisesRegex(efuse_table_gen.InputError, "overlap"): + with self.assertRaisesRegex(efuse_table_gen.InputError, 'overlap'): t.verify() def test_empty_field_name_fail(self): @@ -163,7 +164,7 @@ name2, EFUSE_BLK3, 5, , EFUSE_BLK3, , 5, name2, EFUSE_BLK2, , 4, """ - with self.assertRaisesRegex(efuse_table_gen.InputError, "missing field name"): + with self.assertRaisesRegex(efuse_table_gen.InputError, 'missing field name'): efuse_table_gen.FuseTable.from_csv(csv) def test_unique_field_name_fail(self): @@ -172,7 +173,7 @@ name2, EFUSE_BLK2, , name1, EFUSE_BLK3, 0, 5, Use for test name 1 name1, EFUSE_BLK3, 5, 4, Use for test name 2 """ - with self.assertRaisesRegex(efuse_table_gen.InputError, "Field names must be unique"): + with self.assertRaisesRegex(efuse_table_gen.InputError, 'Field names must be unique'): efuse_table_gen.FuseTable.from_csv(csv) def test_bit_count_empty_fail(self): @@ -181,7 +182,7 @@ name1, EFUSE_BLK3, 5, name1, EFUSE_BLK3, 0, , Use for test name 1 name2, EFUSE_BLK3, 5, 4, Use for test name 2 """ - with self.assertRaisesRegex(efuse_table_gen.InputError, "empty"): + with self.assertRaisesRegex(efuse_table_gen.InputError, 'empty'): efuse_table_gen.FuseTable.from_csv(csv) def test_bit_start_num_fail(self): @@ -190,7 +191,7 @@ name2, EFUSE_BLK3, 5, name1, EFUSE_BLK3, k, 5, Use for test name 1 name2, EFUSE_BLK3, 5, 4, Use for test name 2 """ - with self.assertRaisesRegex(efuse_table_gen.InputError, "Invalid field value"): + with self.assertRaisesRegex(efuse_table_gen.InputError, 'Invalid field value'): efuse_table_gen.FuseTable.from_csv(csv) def test_join_entry(self): @@ -257,7 +258,7 @@ name2, EFUSE_BLK3, 191, """ efuse_table_gen.max_blk_len = 192 t = efuse_table_gen.FuseTable.from_csv(csv) - with self.assertRaisesRegex(efuse_table_gen.InputError, "The field is outside the boundaries"): + with self.assertRaisesRegex(efuse_table_gen.InputError, 'The field is outside the boundaries'): t.verify() def test_field_blk1_size_is_more(self): @@ -267,7 +268,7 @@ name1, EFUSE_BLK0, 0, name2, EFUSE_BLK1, 1, 256, Use for test name 2 """ t = efuse_table_gen.FuseTable.from_csv(csv) - with self.assertRaisesRegex(efuse_table_gen.InputError, "The field is outside the boundaries"): + with self.assertRaisesRegex(efuse_table_gen.InputError, 'The field is outside the boundaries'): t.verify() @@ -311,8 +312,8 @@ name1, EFUSE_BLK3, 0, name2, EFUSE_BLK2, 5, 4, Use for test name 2 """ t = efuse_table_gen.FuseTable.from_csv(csv) - with self.assertRaisesRegex(efuse_table_gen.ValidationError, "custom_table should use only EFUSE_BLK3"): - t.verify("custom_table") + with self.assertRaisesRegex(efuse_table_gen.ValidationError, 'custom_table should use only EFUSE_BLK3'): + t.verify('custom_table') def test_common_and_custom_table_use_the_same_bits(self): csv_common = """ @@ -321,7 +322,7 @@ name1, EFUSE_BLK3, 0, name2, EFUSE_BLK2, 5, 4, Use for test name 2 """ common_table = efuse_table_gen.FuseTable.from_csv(csv_common) - common_table.verify("common_table") + common_table.verify('common_table') two_tables = common_table csv_custom = """ @@ -330,12 +331,12 @@ name3, EFUSE_BLK3, 20, name4, EFUSE_BLK3, 4, 1, Use for test name 2 """ custom_table = efuse_table_gen.FuseTable.from_csv(csv_custom) - custom_table.verify("custom_table") + custom_table.verify('custom_table') two_tables += custom_table - with self.assertRaisesRegex(efuse_table_gen.InputError, "overlaps"): + with self.assertRaisesRegex(efuse_table_gen.InputError, 'overlaps'): two_tables.verify() -if __name__ == "__main__": +if __name__ == '__main__': unittest.main() diff --git a/components/esp32s2/test/gen_digital_signature_tests.py b/components/esp32s2/test/gen_digital_signature_tests.py index c3aed78ae3..9e50c10466 100644 --- a/components/esp32s2/test/gen_digital_signature_tests.py +++ b/components/esp32s2/test/gen_digital_signature_tests.py @@ -2,13 +2,13 @@ import hashlib import hmac -import struct import os import random -from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +import struct -from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes from cryptography.utils import int_to_bytes @@ -19,9 +19,9 @@ def number_as_bignum_words(number): """ result = [] while number != 0: - result.append("0x%08x" % (number & 0xFFFFFFFF)) + result.append('0x%08x' % (number & 0xFFFFFFFF)) number >>= 32 - return "{ " + ", ".join(result) + " }" + return '{ ' + ', '.join(result) + ' }' def number_as_bytes(number, pad_bits=None): @@ -38,7 +38,7 @@ def bytes_as_char_array(b): """ Given a sequence of bytes, format as a char array """ - return "{ " + ", ".join("0x%02x" % x for x in b) + " }" + return '{ ' + ', '.join('0x%02x' % x for x in b) + ' }' NUM_HMAC_KEYS = 3 @@ -50,36 +50,36 @@ hmac_keys = [os.urandom(32) for x in range(NUM_HMAC_KEYS)] messages = [random.randrange(0, 1 << 4096) for x in range(NUM_MESSAGES)] -with open("digital_signature_test_cases.h", "w") as f: - f.write("/* File generated by gen_digital_signature_tests.py */\n\n") +with open('digital_signature_test_cases.h', 'w') as f: + f.write('/* File generated by gen_digital_signature_tests.py */\n\n') # Write out HMAC keys - f.write("#define NUM_HMAC_KEYS %d\n\n" % NUM_HMAC_KEYS) - f.write("static const uint8_t test_hmac_keys[NUM_HMAC_KEYS][32] = {\n") + f.write('#define NUM_HMAC_KEYS %d\n\n' % NUM_HMAC_KEYS) + f.write('static const uint8_t test_hmac_keys[NUM_HMAC_KEYS][32] = {\n') for h in hmac_keys: - f.write(" %s,\n" % bytes_as_char_array(h)) - f.write("};\n\n") + f.write(' %s,\n' % bytes_as_char_array(h)) + f.write('};\n\n') # Write out messages - f.write("#define NUM_MESSAGES %d\n\n" % NUM_MESSAGES) - f.write("static const uint32_t test_messages[NUM_MESSAGES][4096/32] = {\n") + f.write('#define NUM_MESSAGES %d\n\n' % NUM_MESSAGES) + f.write('static const uint32_t test_messages[NUM_MESSAGES][4096/32] = {\n') for m in messages: - f.write(" // Message %d\n" % messages.index(m)) - f.write(" %s,\n" % number_as_bignum_words(m)) - f.write(" };\n") - f.write("\n\n\n") + f.write(' // Message %d\n' % messages.index(m)) + f.write(' %s,\n' % number_as_bignum_words(m)) + f.write(' };\n') + f.write('\n\n\n') - f.write("#define NUM_CASES %d\n\n" % NUM_CASES) - f.write("static const encrypt_testcase_t test_cases[NUM_CASES] = {\n") + f.write('#define NUM_CASES %d\n\n' % NUM_CASES) + f.write('static const encrypt_testcase_t test_cases[NUM_CASES] = {\n') for case in range(NUM_CASES): - f.write(" { /* Case %d */\n" % case) + f.write(' { /* Case %d */\n' % case) iv = os.urandom(16) - f.write(" .iv = %s,\n" % (bytes_as_char_array(iv))) + f.write(' .iv = %s,\n' % (bytes_as_char_array(iv))) hmac_key_idx = random.randrange(0, NUM_HMAC_KEYS) - aes_key = hmac.HMAC(hmac_keys[hmac_key_idx], b"\xFF" * 32, hashlib.sha256).digest() + aes_key = hmac.HMAC(hmac_keys[hmac_key_idx], b'\xFF' * 32, hashlib.sha256).digest() sizes = [4096, 3072, 2048, 1024, 512] key_size = sizes[case % len(sizes)] @@ -100,13 +100,13 @@ with open("digital_signature_test_cases.h", "w") as f: mprime &= 0xFFFFFFFF length = key_size // 32 - 1 - f.write(" .p_data = {\n") - f.write(" .Y = %s,\n" % number_as_bignum_words(Y)) - f.write(" .M = %s,\n" % number_as_bignum_words(M)) - f.write(" .Rb = %s,\n" % number_as_bignum_words(rinv)) - f.write(" .M_prime = 0x%08x,\n" % mprime) - f.write(" .length = %d, // %d bit\n" % (length, key_size)) - f.write(" },\n") + f.write(' .p_data = {\n') + f.write(' .Y = %s,\n' % number_as_bignum_words(Y)) + f.write(' .M = %s,\n' % number_as_bignum_words(M)) + f.write(' .Rb = %s,\n' % number_as_bignum_words(rinv)) + f.write(' .M_prime = 0x%08x,\n' % mprime) + f.write(' .length = %d, // %d bit\n' % (length, key_size)) + f.write(' },\n') # calculate MD from preceding values and IV @@ -114,7 +114,7 @@ with open("digital_signature_test_cases.h", "w") as f: md_in = number_as_bytes(Y, 4096) + \ number_as_bytes(M, 4096) + \ number_as_bytes(rinv, 4096) + \ - struct.pack("{}<".format(packet_data)) + print('Packet_data>{}<'.format(packet_data)) response = bytearray.fromhex(packet_data.decode()) - print("Sending to socket:") + print('Sending to socket:') packet = ' '.join(format(x, '02x') for x in bytearray(response)) - print("Packet>{}<".format(packet)) + print('Packet>{}<'.format(packet)) if client_address is not None: sock.sendto(response, ('127.0.0.1', 7777)) @@ -50,7 +50,7 @@ def sock_listener(dut1): try: payload, client_address = sock.recvfrom(1024) packet = ' '.join(format(x, '02x') for x in bytearray(payload)) - print("Received from address {}, data {}".format(client_address, packet)) + print('Received from address {}, data {}'.format(client_address, packet)) dut1.write(str.encode(packet)) except socket.timeout: pass @@ -59,7 +59,7 @@ def sock_listener(dut1): sock = None -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def lwip_test_suite(env, extra_data): global stop_io_listener global stop_sock_listener @@ -70,12 +70,12 @@ def lwip_test_suite(env, extra_data): 3. Execute ttcn3 test suite 4. Collect result from ttcn3 """ - dut1 = env.get_dut("net_suite", "examples/system/network_tests", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('net_suite', 'examples/system/network_tests', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "net_suite.bin") + binary_file = os.path.join(dut1.app.binary_path, 'net_suite.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("net_suite", "{}KB".format(bin_size // 1024)) - ttfw_idf.check_performance("net_suite", bin_size // 1024, dut1.TARGET) + ttfw_idf.log_performance('net_suite', '{}KB'.format(bin_size // 1024)) + ttfw_idf.check_performance('net_suite', bin_size // 1024, dut1.TARGET) dut1.start_app() thread1 = Thread(target=sock_listener, args=(dut1, )) thread2 = Thread(target=io_listener, args=(dut1, )) @@ -84,48 +84,48 @@ def lwip_test_suite(env, extra_data): TTCN_SRC = 'esp32_netsuite.ttcn' TTCN_CFG = 'esp32_netsuite.cfg' # System Paths - netsuite_path = os.getenv("NETSUITE_PATH") - netsuite_src_path = os.path.join(netsuite_path, "src") + netsuite_path = os.getenv('NETSUITE_PATH') + netsuite_src_path = os.path.join(netsuite_path, 'src') test_dir = os.path.dirname(os.path.realpath(__file__)) # Building the suite - print("Rebuilding the test suite") - print("-------------------------") + print('Rebuilding the test suite') + print('-------------------------') # copy esp32 specific files to ttcn net-suite dir copyfile(os.path.join(test_dir, TTCN_SRC), os.path.join(netsuite_src_path, TTCN_SRC)) copyfile(os.path.join(test_dir, TTCN_CFG), os.path.join(netsuite_src_path, TTCN_CFG)) proc = subprocess.Popen(['bash', '-c', 'cd ' + netsuite_src_path + ' && source make.sh'], cwd=netsuite_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE) output = proc.stdout.read() - print("Note: First build step we expect failure (titan/net_suite build system not suitable for multijob make)") + print('Note: First build step we expect failure (titan/net_suite build system not suitable for multijob make)') print(output) proc = subprocess.Popen(['bash', '-c', 'cd ' + netsuite_src_path + ' && make'], cwd=netsuite_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - print("Note: This time all dependencies shall be generated -- multijob make shall pass") + print('Note: This time all dependencies shall be generated -- multijob make shall pass') output = proc.stdout.read() print(output) # Executing the test suite thread1.start() thread2.start() time.sleep(2) - print("Executing the test suite") - print("------------------------") + print('Executing the test suite') + print('------------------------') proc = subprocess.Popen(['ttcn3_start', os.path.join(netsuite_src_path,'test_suite'), os.path.join(netsuite_src_path, TTCN_CFG)], stdout=subprocess.PIPE) output = proc.stdout.read() print(output) - print("Collecting results") - print("------------------") + print('Collecting results') + print('------------------') verdict_stats = re.search('(Verdict statistics:.*)', output) if verdict_stats: verdict_stats = verdict_stats.group(1) else: - verdict_stats = b"" + verdict_stats = b'' verdict = re.search('Overall verdict: pass', output) if verdict: - print("Test passed!") - Utility.console_log(verdict_stats, "green") + print('Test passed!') + Utility.console_log(verdict_stats, 'green') else: - Utility.console_log(verdict_stats, "red") + Utility.console_log(verdict_stats, 'red') raise ValueError('Test failed with: {}'.format(verdict_stats)) else: try: @@ -137,8 +137,8 @@ def lwip_test_suite(env, extra_data): time.sleep(0.5) except KeyboardInterrupt: pass - print("Executing done, waiting for tests to finish") - print("-------------------------------------------") + print('Executing done, waiting for tests to finish') + print('-------------------------------------------') stop_io_listener.set() stop_sock_listener.set() thread1.join() @@ -146,6 +146,6 @@ def lwip_test_suite(env, extra_data): if __name__ == '__main__': - print("Manual execution, please build and start ttcn in a separate console") + print('Manual execution, please build and start ttcn in a separate console') manual_test = True lwip_test_suite() diff --git a/components/mbedtls/esp_crt_bundle/gen_crt_bundle.py b/components/mbedtls/esp_crt_bundle/gen_crt_bundle.py index 4f558fb681..87e29e61fa 100755 --- a/components/mbedtls/esp_crt_bundle/gen_crt_bundle.py +++ b/components/mbedtls/esp_crt_bundle/gen_crt_bundle.py @@ -24,12 +24,12 @@ from __future__ import with_statement -import os -import sys -import struct import argparse import csv +import os import re +import struct +import sys from io import open try: @@ -80,22 +80,22 @@ class CertificateBundle: def add_from_file(self, file_path): try: if file_path.endswith('.pem'): - status("Parsing certificates from %s" % file_path) + status('Parsing certificates from %s' % file_path) with open(file_path, 'r', encoding='utf-8') as f: crt_str = f.read() self.add_from_pem(crt_str) return True elif file_path.endswith('.der'): - status("Parsing certificates from %s" % file_path) + status('Parsing certificates from %s' % file_path) with open(file_path, 'rb') as f: crt_str = f.read() self.add_from_der(crt_str) return True except ValueError: - critical("Invalid certificate in %s" % file_path) - raise InputError("Invalid certificate") + critical('Invalid certificate in %s' % file_path) + raise InputError('Invalid certificate') return False @@ -119,13 +119,13 @@ class CertificateBundle: crt += strg if(count == 0): - raise InputError("No certificate found") + raise InputError('No certificate found') - status("Successfully added %d certificates" % count) + status('Successfully added %d certificates' % count) def add_from_der(self, crt_str): self.certificates.append(x509.load_der_x509_certificate(crt_str, default_backend())) - status("Successfully added 1 certificate") + status('Successfully added 1 certificate') def create_bundle(self): # Sort certificates in order to do binary search when looking up certificates @@ -162,7 +162,7 @@ class CertificateBundle: for row in csv_reader: filter_set.add(row[1]) - status("Parsing certificates from %s" % crts_path) + status('Parsing certificates from %s' % crts_path) crt_str = [] with open(crts_path, 'r', encoding='utf-8') as f: crt_str = f.read() @@ -202,14 +202,14 @@ def main(): for path in args.input: if os.path.isfile(path): - if os.path.basename(path) == "cacrt_all.pem" and args.filter: + if os.path.basename(path) == 'cacrt_all.pem' and args.filter: bundle.add_with_filter(path, args.filter) else: bundle.add_from_file(path) elif os.path.isdir(path): bundle.add_from_path(path) else: - raise InputError("Invalid --input=%s, is neither file nor folder" % args.input) + raise InputError('Invalid --input=%s, is neither file nor folder' % args.input) status('Successfully added %d certificates in total' % len(bundle.certificates)) diff --git a/components/mbedtls/esp_crt_bundle/test_gen_crt_bundle/test_gen_crt_bundle.py b/components/mbedtls/esp_crt_bundle/test_gen_crt_bundle/test_gen_crt_bundle.py index 167aa6384b..4ad3b72b97 100755 --- a/components/mbedtls/esp_crt_bundle/test_gen_crt_bundle/test_gen_crt_bundle.py +++ b/components/mbedtls/esp_crt_bundle/test_gen_crt_bundle/test_gen_crt_bundle.py @@ -1,13 +1,13 @@ #!/usr/bin/env python -import unittest -import sys import os +import sys +import unittest try: import gen_crt_bundle except ImportError: - sys.path.append("..") + sys.path.append('..') import gen_crt_bundle @@ -67,11 +67,11 @@ class GenCrtBundleTests(Py23TestCase): def test_invalid_crt_input(self): bundle = gen_crt_bundle.CertificateBundle() - with self.assertRaisesRegex(gen_crt_bundle.InputError, "Invalid certificate"): + with self.assertRaisesRegex(gen_crt_bundle.InputError, 'Invalid certificate'): bundle.add_from_file(test_crts_path + invalid_test_file) - with self.assertRaisesRegex(gen_crt_bundle.InputError, "No certificate found"): - bundle.add_from_pem("") + with self.assertRaisesRegex(gen_crt_bundle.InputError, 'No certificate found'): + bundle.add_from_pem('') def test_non_ascii_crt_input(self): bundle = gen_crt_bundle.CertificateBundle() @@ -80,5 +80,5 @@ class GenCrtBundleTests(Py23TestCase): self.assertTrue(len(bundle.certificates)) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main() diff --git a/components/mqtt/weekend_test/mqtt_publish_test.py b/components/mqtt/weekend_test/mqtt_publish_test.py index bdc2e638f3..c9ea51c1a3 100644 --- a/components/mqtt/weekend_test/mqtt_publish_test.py +++ b/components/mqtt/weekend_test/mqtt_publish_test.py @@ -1,36 +1,35 @@ -from __future__ import print_function -from __future__ import unicode_literals -from builtins import str -import re -import sys -import ssl -import paho.mqtt.client as mqtt -from threading import Thread, Event -import time -import string +from __future__ import print_function, unicode_literals + import random +import re +import ssl +import string +import sys +import time +from builtins import str +from threading import Event, Thread -from tiny_test_fw import DUT +import paho.mqtt.client as mqtt import ttfw_idf - +from tiny_test_fw import DUT event_client_connected = Event() event_stop_client = Event() event_client_received_correct = Event() -message_log = "" +message_log = '' broker_host = {} broker_port = {} -expected_data = "" -subscribe_topic = "" -publish_topic = "" +expected_data = '' +subscribe_topic = '' +publish_topic = '' expected_count = 0 # The callback for when the client receives a CONNACK response from the server. def on_connect(client, userdata, flags, rc): - print("Connected with result code " + str(rc)) + print('Connected with result code ' + str(rc)) event_client_connected.set() - client.subscribe("/topic/qos0") + client.subscribe('/topic/qos0') def mqtt_client_task(client): @@ -52,8 +51,8 @@ def on_message(client, userdata, msg): payload = msg.payload.decode() if payload == expected_data: expected_count += 1 - print("[{}] Received...".format(msg.mid)) - message_log += "Received data:" + msg.topic + " " + payload + "\n" + print('[{}] Received...'.format(msg.mid)) + message_log += 'Received data:' + msg.topic + ' ' + payload + '\n' def test_single_config(dut, transport, qos, repeat, published, queue=0): @@ -63,49 +62,49 @@ def test_single_config(dut, transport, qos, repeat, published, queue=0): sample_string = ''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(16)) event_client_connected.clear() expected_count = 0 - message_log = "" + message_log = '' expected_data = sample_string * repeat print("PUBLISH TEST: transport:{}, qos:{}, sequence:{}, enqueue:{}, sample msg:'{}'".format(transport, qos, published, queue, expected_data)) client = None try: - if transport in ["ws", "wss"]: - client = mqtt.Client(transport="websockets") + if transport in ['ws', 'wss']: + client = mqtt.Client(transport='websockets') else: client = mqtt.Client() client.on_connect = on_connect client.on_message = on_message - if transport in ["ssl", "wss"]: + if transport in ['ssl', 'wss']: client.tls_set(None, None, None, cert_reqs=ssl.CERT_NONE, tls_version=ssl.PROTOCOL_TLSv1_2, ciphers=None) client.tls_insecure_set(True) - print("Connecting...") + print('Connecting...') client.connect(broker_host[transport], broker_port[transport], 60) except Exception: - print("ENV_TEST_FAILURE: Unexpected error while connecting to broker {}: {}:".format(broker_host[transport], sys.exc_info()[0])) + print('ENV_TEST_FAILURE: Unexpected error while connecting to broker {}: {}:'.format(broker_host[transport], sys.exc_info()[0])) raise # Starting a py-client in a separate thread thread1 = Thread(target=mqtt_client_task, args=(client,)) thread1.start() - print("Connecting py-client to broker {}:{}...".format(broker_host[transport], broker_port[transport])) + print('Connecting py-client to broker {}:{}...'.format(broker_host[transport], broker_port[transport])) if not event_client_connected.wait(timeout=30): - raise ValueError("ENV_TEST_FAILURE: Test script cannot connect to broker: {}".format(broker_host[transport])) + raise ValueError('ENV_TEST_FAILURE: Test script cannot connect to broker: {}'.format(broker_host[transport])) client.subscribe(subscribe_topic, qos) - dut.write(' '.join(str(x) for x in (transport, sample_string, repeat, published, qos, queue)), eol="\n") + dut.write(' '.join(str(x) for x in (transport, sample_string, repeat, published, qos, queue)), eol='\n') try: # waiting till subscribed to defined topic - dut.expect(re.compile(r"MQTT_EVENT_SUBSCRIBED"), timeout=30) + dut.expect(re.compile(r'MQTT_EVENT_SUBSCRIBED'), timeout=30) for i in range(published): client.publish(publish_topic, sample_string * repeat, qos) - print("Publishing...") - print("Checking esp-client received msg published from py-client...") - dut.expect(re.compile(r"Correct pattern received exactly x times"), timeout=60) + print('Publishing...') + print('Checking esp-client received msg published from py-client...') + dut.expect(re.compile(r'Correct pattern received exactly x times'), timeout=60) start = time.time() while expected_count < published and time.time() - start <= 60: time.sleep(1) # Note: tolerate that messages qos=1 to be received more than once if expected_count == published or (expected_count > published and qos == 1): - print("All data received from ESP32...") + print('All data received from ESP32...') else: - raise ValueError("Not all data received from ESP32: Expected:{}x{}, Received:{}x{}".format(expected_count, published, expected_data, message_log)) + raise ValueError('Not all data received from ESP32: Expected:{}x{}, Received:{}x{}'.format(expected_count, published, expected_data, message_log)) finally: event_stop_client.set() thread1.join() @@ -113,7 +112,7 @@ def test_single_config(dut, transport, qos, repeat, published, queue=0): event_stop_client.clear() -@ttfw_idf.idf_custom_test(env_tag="Example_WIFI") +@ttfw_idf.idf_custom_test(env_tag='Example_WIFI') def test_weekend_mqtt_publish(env, extra_data): # Using broker url dictionary for different transport global broker_host @@ -127,28 +126,28 @@ def test_weekend_mqtt_publish(env, extra_data): 3. Test evaluates python client received correct qos0 message 4. Test ESP32 client received correct qos0 message """ - dut1 = env.get_dut("mqtt_publish_connect_test", "tools/test_apps/protocols/mqtt/publish_connect_test") + dut1 = env.get_dut('mqtt_publish_connect_test', 'tools/test_apps/protocols/mqtt/publish_connect_test') # Look for host:port in sdkconfig try: # python client subscribes to the topic to which esp client publishes and vice versa - publish_topic = dut1.app.get_sdkconfig()["CONFIG_EXAMPLE_SUBSCIBE_TOPIC"].replace('"','') - subscribe_topic = dut1.app.get_sdkconfig()["CONFIG_EXAMPLE_PUBLISH_TOPIC"].replace('"','') - broker_host["ssl"], broker_port["ssl"] = get_host_port_from_dut(dut1, "CONFIG_EXAMPLE_BROKER_SSL_URI") - broker_host["tcp"], broker_port["tcp"] = get_host_port_from_dut(dut1, "CONFIG_EXAMPLE_BROKER_TCP_URI") - broker_host["ws"], broker_port["ws"] = get_host_port_from_dut(dut1, "CONFIG_EXAMPLE_BROKER_WS_URI") - broker_host["wss"], broker_port["wss"] = get_host_port_from_dut(dut1, "CONFIG_EXAMPLE_BROKER_WSS_URI") + publish_topic = dut1.app.get_sdkconfig()['CONFIG_EXAMPLE_SUBSCIBE_TOPIC'].replace('"','') + subscribe_topic = dut1.app.get_sdkconfig()['CONFIG_EXAMPLE_PUBLISH_TOPIC'].replace('"','') + broker_host['ssl'], broker_port['ssl'] = get_host_port_from_dut(dut1, 'CONFIG_EXAMPLE_BROKER_SSL_URI') + broker_host['tcp'], broker_port['tcp'] = get_host_port_from_dut(dut1, 'CONFIG_EXAMPLE_BROKER_TCP_URI') + broker_host['ws'], broker_port['ws'] = get_host_port_from_dut(dut1, 'CONFIG_EXAMPLE_BROKER_WS_URI') + broker_host['wss'], broker_port['wss'] = get_host_port_from_dut(dut1, 'CONFIG_EXAMPLE_BROKER_WSS_URI') except Exception: print('ENV_TEST_FAILURE: Cannot find broker url in sdkconfig') raise dut1.start_app() try: - ip_address = dut1.expect(re.compile(r" IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: print('ENV_TEST_FAILURE: Cannot connect to AP') raise for qos in [0, 1, 2]: - for transport in ["tcp", "ssl", "ws", "wss"]: + for transport in ['tcp', 'ssl', 'ws', 'wss']: for q in [0, 1]: if broker_host[transport] is None: print('Skipping transport: {}...'.format(transport)) @@ -156,14 +155,14 @@ def test_weekend_mqtt_publish(env, extra_data): # simple test with empty message test_single_config(dut1, transport, qos, 0, 5, q) # decide on broker what level of test will pass (local broker works the best) - if broker_host[transport].startswith("192.168") and qos > 0 and q == 0: + if broker_host[transport].startswith('192.168') and qos > 0 and q == 0: # medium size, medium repeated test_single_config(dut1, transport, qos, 5, 50, q) # long data test_single_config(dut1, transport, qos, 1000, 10, q) # short data, many repeats test_single_config(dut1, transport, qos, 2, 200, q) - elif transport in ["ws", "wss"]: + elif transport in ['ws', 'wss']: # more relaxed criteria for websockets! test_single_config(dut1, transport, qos, 2, 5, q) test_single_config(dut1, transport, qos, 50, 1, q) diff --git a/components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py b/components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py index f8ec9a0094..890f5222b4 100755 --- a/components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py +++ b/components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py @@ -19,40 +19,42 @@ # from __future__ import division, print_function -from future.moves.itertools import zip_longest -from builtins import int, range, bytes -from io import open -import sys + import argparse -import binascii -import random -import struct -import os import array -import zlib +import binascii import codecs import datetime import distutils.dir_util +import os +import random +import struct +import sys +import zlib +from builtins import bytes, int, range +from io import open + +from future.moves.itertools import zip_longest try: - from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes from cryptography.hazmat.backends import default_backend + from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes except ImportError: print('The cryptography package is not installed.' 'Please refer to the Get Started section of the ESP-IDF Programming Guide for ' 'setting up the required packages.') raise -VERSION1_PRINT = "V1 - Multipage Blob Support Disabled" -VERSION2_PRINT = "V2 - Multipage Blob Support Enabled" +VERSION1_PRINT = 'V1 - Multipage Blob Support Disabled' +VERSION2_PRINT = 'V2 - Multipage Blob Support Enabled' def reverse_hexbytes(addr_tmp): addr = [] - reversed_bytes = "" + reversed_bytes = '' for i in range(0, len(addr_tmp), 2): addr.append(addr_tmp[i:i + 2]) - reversed_bytes = "".join(reversed(addr)) + reversed_bytes = ''.join(reversed(addr)) return reversed_bytes @@ -62,10 +64,10 @@ def reverse_hexbytes(addr_tmp): class Page(object): PAGE_PARAMS = { - "max_size": 4096, - "max_old_blob_size": 1984, - "max_new_blob_size": 4000, - "max_entries": 126 + 'max_size': 4096, + 'max_old_blob_size': 1984, + 'max_new_blob_size': 4000, + 'max_entries': 126 } # Item type codes @@ -98,7 +100,7 @@ class Page(object): self.entry_num = 0 self.bitmap_array = array.array('B') self.version = version - self.page_buf = bytearray(b'\xff') * Page.PAGE_PARAMS["max_size"] + self.page_buf = bytearray(b'\xff') * Page.PAGE_PARAMS['max_size'] if not is_rsrv_page: self.bitmap_array = self.create_bitmap_array() self.set_header(page_num, version) @@ -167,7 +169,7 @@ class Page(object): else: encr_key_input = codecs.decode(nvs_obj.encr_key, 'hex') - rel_addr = nvs_obj.page_num * Page.PAGE_PARAMS["max_size"] + Page.FIRST_ENTRY_OFFSET + rel_addr = nvs_obj.page_num * Page.PAGE_PARAMS['max_size'] + Page.FIRST_ENTRY_OFFSET if not isinstance(data_input, bytearray): byte_arr = bytearray(b'\xff') * 32 @@ -249,8 +251,8 @@ class Page(object): chunk_size = 0 # Get the size available in current page - tailroom = (Page.PAGE_PARAMS["max_entries"] - self.entry_num - 1) * Page.SINGLE_ENTRY_SIZE - assert tailroom >= 0, "Page overflow!!" + tailroom = (Page.PAGE_PARAMS['max_entries'] - self.entry_num - 1) * Page.SINGLE_ENTRY_SIZE + assert tailroom >= 0, 'Page overflow!!' # Split the binary data into two and store a chunk of available size onto curr page if tailroom < remaining_size: @@ -358,14 +360,14 @@ class Page(object): # Set size of data datalen = len(data) - if datalen > Page.PAGE_PARAMS["max_old_blob_size"]: + if datalen > Page.PAGE_PARAMS['max_old_blob_size']: if self.version == Page.VERSION1: - raise InputError(" Input File: Size (%d) exceeds max allowed length `%s` bytes for key `%s`." - % (datalen, Page.PAGE_PARAMS["max_old_blob_size"], key)) + raise InputError(' Input File: Size (%d) exceeds max allowed length `%s` bytes for key `%s`.' + % (datalen, Page.PAGE_PARAMS['max_old_blob_size'], key)) else: - if encoding == "string": - raise InputError(" Input File: Size (%d) exceeds max allowed length `%s` bytes for key `%s`." - % (datalen, Page.PAGE_PARAMS["max_old_blob_size"], key)) + if encoding == 'string': + raise InputError(' Input File: Size (%d) exceeds max allowed length `%s` bytes for key `%s`.' + % (datalen, Page.PAGE_PARAMS['max_old_blob_size'], key)) # Calculate no. of entries data will require rounded_size = (datalen + 31) & ~31 @@ -373,10 +375,10 @@ class Page(object): total_entry_count = data_entry_count + 1 # +1 for the entry header # Check if page is already full and new page is needed to be created right away - if self.entry_num >= Page.PAGE_PARAMS["max_entries"]: + if self.entry_num >= Page.PAGE_PARAMS['max_entries']: raise PageFullError() - elif (self.entry_num + total_entry_count) >= Page.PAGE_PARAMS["max_entries"]: - if not (self.version == Page.VERSION2 and encoding in ["hex2bin", "binary", "base64"]): + elif (self.entry_num + total_entry_count) >= Page.PAGE_PARAMS['max_entries']: + if not (self.version == Page.VERSION2 and encoding in ['hex2bin', 'binary', 'base64']): raise PageFullError() # Entry header @@ -385,7 +387,7 @@ class Page(object): entry_struct[0] = ns_index # Set Span if self.version == Page.VERSION2: - if encoding == "string": + if encoding == 'string': entry_struct[2] = data_entry_count + 1 # Set Chunk Index chunk_index = Page.CHUNK_ANY @@ -399,12 +401,12 @@ class Page(object): entry_struct[8:8 + len(key)] = key.encode() # set Type - if encoding == "string": + if encoding == 'string': entry_struct[1] = Page.SZ - elif encoding in ["hex2bin", "binary", "base64"]: + elif encoding in ['hex2bin', 'binary', 'base64']: entry_struct[1] = Page.BLOB - if self.version == Page.VERSION2 and (encoding in ["hex2bin", "binary", "base64"]): + if self.version == Page.VERSION2 and (encoding in ['hex2bin', 'binary', 'base64']): entry_struct = self.write_varlen_binary_data(entry_struct,ns_index,key,data, datalen,total_entry_count, encoding, nvs_obj) else: @@ -413,7 +415,7 @@ class Page(object): """ Low-level function to write data of primitive type into page buffer. """ def write_primitive_data(self, key, data, encoding, ns_index,nvs_obj): # Check if entry exceeds max number of entries allowed per page - if self.entry_num >= Page.PAGE_PARAMS["max_entries"]: + if self.entry_num >= Page.PAGE_PARAMS['max_entries']: raise PageFullError() entry_struct = bytearray(b'\xff') * 32 @@ -427,28 +429,28 @@ class Page(object): entry_struct[8:24] = key_array entry_struct[8:8 + len(key)] = key.encode() - if encoding == "u8": + if encoding == 'u8': entry_struct[1] = Page.U8 struct.pack_into(' absolute path given so outdir is ignored for this file." % filepath) + print('\nWarning: `%s` \n\t==> absolute path given so outdir is ignored for this file.' % filepath) # Set to empty as outdir is ignored here outdir = '' @@ -728,11 +730,11 @@ def encrypt(args): check_size(args.size) if (args.keygen is False) and (not args.inputkey): - sys.exit("Error. --keygen or --inputkey argument needed.") + sys.exit('Error. --keygen or --inputkey argument needed.') elif args.keygen and args.inputkey: - sys.exit("Error. --keygen and --inputkey both are not allowed.") + sys.exit('Error. --keygen and --inputkey both are not allowed.') elif not args.keygen and args.keyfile: - print("\nWarning:","--inputkey argument is given. --keyfile argument will be ignored...") + print('\nWarning:','--inputkey argument is given. --keyfile argument will be ignored...') if args.inputkey: # Check if key file has .bin extension @@ -835,7 +837,7 @@ def decrypt(args): start_entry_offset += nvs_read_bytes output_file.write(output_buf) - print("\nCreated NVS decrypted binary: ===>", args.output) + print('\nCreated NVS decrypted binary: ===>', args.output) def generate_key(args): @@ -850,7 +852,7 @@ def generate_key(args): if not args.keyfile: timestamp = datetime.datetime.now().strftime('%m-%d_%H-%M') - args.keyfile = "keys-" + timestamp + bin_ext + args.keyfile = 'keys-' + timestamp + bin_ext keys_outdir = os.path.join(args.outdir,keys_dir, '') # Create keys/ dir in if does not exist @@ -872,7 +874,7 @@ def generate_key(args): with open(output_keyfile, 'wb') as output_keys_file: output_keys_file.write(keys_buf) - print("\nCreated encryption keys: ===> ", output_keyfile) + print('\nCreated encryption keys: ===> ', output_keyfile) return key @@ -914,7 +916,7 @@ def generate(args, is_encr_enabled=False, encr_key=None): else: version_set = VERSION2_PRINT - print("\nCreating NVS binary with version:", version_set) + print('\nCreating NVS binary with version:', version_set) line = input_file.readline().strip() @@ -939,25 +941,25 @@ def generate(args, is_encr_enabled=False, encr_key=None): try: # Check key length - if len(data["key"]) > 15: - raise InputError("Length of key `{}` should be <= 15 characters.".format(data["key"])) - write_entry(nvs_obj, data["key"], data["type"], data["encoding"], data["value"]) + if len(data['key']) > 15: + raise InputError('Length of key `{}` should be <= 15 characters.'.format(data['key'])) + write_entry(nvs_obj, data['key'], data['type'], data['encoding'], data['value']) except InputError as e: print(e) filedir, filename = os.path.split(args.output) if filename: - print("\nWarning: NVS binary not created...") + print('\nWarning: NVS binary not created...') os.remove(args.output) if is_dir_new and not filedir == os.getcwd(): - print("\nWarning: Output dir not created...") + print('\nWarning: Output dir not created...') os.rmdir(filedir) sys.exit(-2) - print("\nCreated NVS binary: ===>", args.output) + print('\nCreated NVS binary: ===>', args.output) def main(): - parser = argparse.ArgumentParser(description="\nESP NVS partition generation utility", formatter_class=argparse.RawTextHelpFormatter) + parser = argparse.ArgumentParser(description='\nESP NVS partition generation utility', formatter_class=argparse.RawTextHelpFormatter) subparser = parser.add_subparsers(title='Commands', dest='command', help='\nRun nvs_partition_gen.py {command} -h for additional help\n\n') @@ -1022,7 +1024,7 @@ def main(): \nVersion 2 - Multipage blob support enabled.\ \nDefault: Version 2''') parser_encr.add_argument('--keygen', - action="store_true", + action='store_true', default=False, help='Generates key for encrypting NVS partition') parser_encr.add_argument('--keyfile', @@ -1057,5 +1059,5 @@ def main(): args.func(args) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/components/partition_table/gen_empty_partition.py b/components/partition_table/gen_empty_partition.py index f65f74d706..0e29baa681 100644 --- a/components/partition_table/gen_empty_partition.py +++ b/components/partition_table/gen_empty_partition.py @@ -17,8 +17,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function, division -from __future__ import unicode_literals +from __future__ import division, print_function, unicode_literals + import argparse import sys @@ -28,7 +28,7 @@ quiet = False def generate_blanked_file(size, output_path): - output = b"\xFF" * size + output = b'\xFF' * size try: stdout_binary = sys.stdout.buffer # Python 3 except AttributeError: diff --git a/components/partition_table/gen_esp32part.py b/components/partition_table/gen_esp32part.py index 3406478cbe..d7b57a469b 100755 --- a/components/partition_table/gen_esp32part.py +++ b/components/partition_table/gen_esp32part.py @@ -20,19 +20,19 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function, division -from __future__ import unicode_literals +from __future__ import division, print_function, unicode_literals + import argparse +import binascii +import errno +import hashlib import os import re import struct import sys -import hashlib -import binascii -import errno MAX_PARTITION_LENGTH = 0xC00 # 3K for partition data (96 entries) leaves 1K in a 4K sector for signature -MD5_PARTITION_BEGIN = b"\xEB\xEB" + b"\xFF" * 14 # The first 2 bytes are like magic numbers for MD5 sum +MD5_PARTITION_BEGIN = b'\xEB\xEB' + b'\xFF' * 14 # The first 2 bytes are like magic numbers for MD5 sum PARTITION_TABLE_SIZE = 0x1000 # Size of partition table MIN_PARTITION_SUBTYPE_APP_OTA = 0x10 @@ -44,26 +44,26 @@ APP_TYPE = 0x00 DATA_TYPE = 0x01 TYPES = { - "app": APP_TYPE, - "data": DATA_TYPE, + 'app': APP_TYPE, + 'data': DATA_TYPE, } # Keep this map in sync with esp_partition_subtype_t enum in esp_partition.h SUBTYPES = { APP_TYPE: { - "factory": 0x00, - "test": 0x20, + 'factory': 0x00, + 'test': 0x20, }, DATA_TYPE: { - "ota": 0x00, - "phy": 0x01, - "nvs": 0x02, - "coredump": 0x03, - "nvs_keys": 0x04, - "efuse": 0x05, - "esphttpd": 0x80, - "fat": 0x81, - "spiffs": 0x82, + 'ota': 0x00, + 'phy': 0x01, + 'nvs': 0x02, + 'coredump': 0x03, + 'nvs_keys': 0x04, + 'efuse': 0x05, + 'esphttpd': 0x80, + 'fat': 0x81, + 'spiffs': 0x82, }, } @@ -103,14 +103,14 @@ class PartitionTable(list): for line_no in range(len(lines)): line = expand_vars(lines[line_no]).strip() - if line.startswith("#") or len(line) == 0: + if line.startswith('#') or len(line) == 0: continue try: res.append(PartitionDefinition.from_csv(line, line_no + 1)) except InputError as e: - raise InputError("Error at line %d: %s" % (line_no + 1, e)) + raise InputError('Error at line %d: %s' % (line_no + 1, e)) except Exception: - critical("Unexpected error parsing CSV line %d: %s" % (line_no + 1, line)) + critical('Unexpected error parsing CSV line %d: %s' % (line_no + 1, line)) raise # fix up missing offsets & negative sizes @@ -118,10 +118,10 @@ class PartitionTable(list): for e in res: if e.offset is not None and e.offset < last_end: if e == res[0]: - raise InputError("CSV Error: First partition offset 0x%x overlaps end of partition table 0x%x" + raise InputError('CSV Error: First partition offset 0x%x overlaps end of partition table 0x%x' % (e.offset, last_end)) else: - raise InputError("CSV Error: Partitions overlap. Partition at line %d sets offset 0x%x. Previous partition ends 0x%x" + raise InputError('CSV Error: Partitions overlap. Partition at line %d sets offset 0x%x. Previous partition ends 0x%x' % (e.line_no, e.offset, last_end)) if e.offset is None: pad_to = 0x10000 if e.type == APP_TYPE else 4 @@ -186,19 +186,19 @@ class PartitionTable(list): # print sorted duplicate partitions by name if len(duplicates) != 0: - print("A list of partitions that have the same name:") + print('A list of partitions that have the same name:') for p in sorted(self, key=lambda x:x.name): if len(duplicates.intersection([p.name])) != 0: - print("%s" % (p.to_csv())) - raise InputError("Partition names must be unique") + print('%s' % (p.to_csv())) + raise InputError('Partition names must be unique') # check for overlaps last = None for p in sorted(self, key=lambda x:x.offset): if p.offset < offset_part_table + PARTITION_TABLE_SIZE: - raise InputError("Partition offset 0x%x is below 0x%x" % (p.offset, offset_part_table + PARTITION_TABLE_SIZE)) + raise InputError('Partition offset 0x%x is below 0x%x' % (p.offset, offset_part_table + PARTITION_TABLE_SIZE)) if last is not None and p.offset < last.offset + last.size: - raise InputError("Partition at 0x%x overlaps 0x%x-0x%x" % (p.offset, last.offset, last.offset + last.size - 1)) + raise InputError('Partition at 0x%x overlaps 0x%x-0x%x' % (p.offset, last.offset, last.offset + last.size - 1)) last = p def flash_size(self): @@ -218,7 +218,7 @@ class PartitionTable(list): for o in range(0,len(b),32): data = b[o:o + 32] if len(data) != 32: - raise InputError("Partition table length must be a multiple of 32 bytes") + raise InputError('Partition table length must be a multiple of 32 bytes') if data == b'\xFF' * 32: return result # got end marker if md5sum and data[:2] == MD5_PARTITION_BEGIN[:2]: # check only the magic number part @@ -229,26 +229,26 @@ class PartitionTable(list): else: md5.update(data) result.append(PartitionDefinition.from_binary(data)) - raise InputError("Partition table is missing an end-of-table marker") + raise InputError('Partition table is missing an end-of-table marker') def to_binary(self): - result = b"".join(e.to_binary() for e in self) + result = b''.join(e.to_binary() for e in self) if md5sum: result += MD5_PARTITION_BEGIN + hashlib.md5(result).digest() if len(result) >= MAX_PARTITION_LENGTH: - raise InputError("Binary partition table length (%d) longer than max" % len(result)) - result += b"\xFF" * (MAX_PARTITION_LENGTH - len(result)) # pad the sector, for signing + raise InputError('Binary partition table length (%d) longer than max' % len(result)) + result += b'\xFF' * (MAX_PARTITION_LENGTH - len(result)) # pad the sector, for signing return result def to_csv(self, simple_formatting=False): - rows = ["# ESP-IDF Partition Table", - "# Name, Type, SubType, Offset, Size, Flags"] + rows = ['# ESP-IDF Partition Table', + '# Name, Type, SubType, Offset, Size, Flags'] rows += [x.to_csv(simple_formatting) for x in self] - return "\n".join(rows) + "\n" + return '\n'.join(rows) + '\n' class PartitionDefinition(object): - MAGIC_BYTES = b"\xAA\x50" + MAGIC_BYTES = b'\xAA\x50' ALIGNMENT = { APP_TYPE: 0x10000, @@ -258,15 +258,15 @@ class PartitionDefinition(object): # dictionary maps flag name (as used in CSV flags list, property name) # to bit set in flags words in binary format FLAGS = { - "encrypted": 0 + 'encrypted': 0 } # add subtypes for the 16 OTA slot values ("ota_XX, etc.") for ota_slot in range(NUM_PARTITION_SUBTYPE_APP_OTA): - SUBTYPES[TYPES["app"]]["ota_%d" % ota_slot] = MIN_PARTITION_SUBTYPE_APP_OTA + ota_slot + SUBTYPES[TYPES['app']]['ota_%d' % ota_slot] = MIN_PARTITION_SUBTYPE_APP_OTA + ota_slot def __init__(self): - self.name = "" + self.name = '' self.type = None self.subtype = None self.offset = None @@ -276,8 +276,8 @@ class PartitionDefinition(object): @classmethod def from_csv(cls, line, line_no): """ Parse a line from the CSV """ - line_w_defaults = line + ",,,," # lazy way to support default fields - fields = [f.strip() for f in line_w_defaults.split(",")] + line_w_defaults = line + ',,,,' # lazy way to support default fields + fields = [f.strip() for f in line_w_defaults.split(',')] res = PartitionDefinition() res.line_no = line_no @@ -289,7 +289,7 @@ class PartitionDefinition(object): if res.size is None: raise InputError("Size field can't be empty") - flags = fields[5].split(":") + flags = fields[5].split(':') for flag in flags: if flag in cls.FLAGS: setattr(res, flag, True) @@ -305,7 +305,7 @@ class PartitionDefinition(object): def __repr__(self): def maybe_hex(x): - return "0x%x" % x if x is not None else "None" + return '0x%x' % x if x is not None else 'None' return "PartitionDefinition('%s', 0x%x, 0x%x, %s, %s)" % (self.name, self.type, self.subtype or 0, maybe_hex(self.offset), maybe_hex(self.size)) @@ -328,65 +328,65 @@ class PartitionDefinition(object): return self.offset >= other.offset def parse_type(self, strval): - if strval == "": + if strval == '': raise InputError("Field 'type' can't be left empty.") return parse_int(strval, TYPES) def parse_subtype(self, strval): - if strval == "": + if strval == '': return 0 # default return parse_int(strval, SUBTYPES.get(self.type, {})) def parse_address(self, strval): - if strval == "": + if strval == '': return None # PartitionTable will fill in default return parse_int(strval) def verify(self): if self.type is None: - raise ValidationError(self, "Type field is not set") + raise ValidationError(self, 'Type field is not set') if self.subtype is None: - raise ValidationError(self, "Subtype field is not set") + raise ValidationError(self, 'Subtype field is not set') if self.offset is None: - raise ValidationError(self, "Offset field is not set") + raise ValidationError(self, 'Offset field is not set') align = self.ALIGNMENT.get(self.type, 4) if self.offset % align: - raise ValidationError(self, "Offset 0x%x is not aligned to 0x%x" % (self.offset, align)) + raise ValidationError(self, 'Offset 0x%x is not aligned to 0x%x' % (self.offset, align)) if self.size % align and secure: - raise ValidationError(self, "Size 0x%x is not aligned to 0x%x" % (self.size, align)) + raise ValidationError(self, 'Size 0x%x is not aligned to 0x%x' % (self.size, align)) if self.size is None: - raise ValidationError(self, "Size field is not set") + raise ValidationError(self, 'Size field is not set') - if self.name in TYPES and TYPES.get(self.name, "") != self.type: + if self.name in TYPES and TYPES.get(self.name, '') != self.type: critical("WARNING: Partition has name '%s' which is a partition type, but does not match this partition's " - "type (0x%x). Mistake in partition table?" % (self.name, self.type)) + 'type (0x%x). Mistake in partition table?' % (self.name, self.type)) all_subtype_names = [] for names in (t.keys() for t in SUBTYPES.values()): all_subtype_names += names - if self.name in all_subtype_names and SUBTYPES.get(self.type, {}).get(self.name, "") != self.subtype: + if self.name in all_subtype_names and SUBTYPES.get(self.type, {}).get(self.name, '') != self.subtype: critical("WARNING: Partition has name '%s' which is a partition subtype, but this partition has " - "non-matching type 0x%x and subtype 0x%x. Mistake in partition table?" % (self.name, self.type, self.subtype)) + 'non-matching type 0x%x and subtype 0x%x. Mistake in partition table?' % (self.name, self.type, self.subtype)) - STRUCT_FORMAT = b"<2sBBLL16sL" + STRUCT_FORMAT = b'<2sBBLL16sL' @classmethod def from_binary(cls, b): if len(b) != 32: - raise InputError("Partition definition length must be exactly 32 bytes. Got %d bytes." % len(b)) + raise InputError('Partition definition length must be exactly 32 bytes. Got %d bytes.' % len(b)) res = cls() (magic, res.type, res.subtype, res.offset, res.size, res.name, flags) = struct.unpack(cls.STRUCT_FORMAT, b) - if b"\x00" in res.name: # strip null byte padding from name string - res.name = res.name[:res.name.index(b"\x00")] + if b'\x00' in res.name: # strip null byte padding from name string + res.name = res.name[:res.name.index(b'\x00')] res.name = res.name.decode() if magic != cls.MAGIC_BYTES: - raise InputError("Invalid magic bytes (%r) for partition definition" % magic) + raise InputError('Invalid magic bytes (%r) for partition definition' % magic) for flag,bit in cls.FLAGS.items(): if flags & (1 << bit): setattr(res, flag, True) flags &= ~(1 << bit) if flags != 0: - critical("WARNING: Partition definition had unknown flag(s) 0x%08x. Newer binary format?" % flags) + critical('WARNING: Partition definition had unknown flag(s) 0x%08x. Newer binary format?' % flags) return res def get_flags_list(self): @@ -404,22 +404,22 @@ class PartitionDefinition(object): def to_csv(self, simple_formatting=False): def addr_format(a, include_sizes): if not simple_formatting and include_sizes: - for (val, suffix) in [(0x100000, "M"), (0x400, "K")]: + for (val, suffix) in [(0x100000, 'M'), (0x400, 'K')]: if a % val == 0: - return "%d%s" % (a // val, suffix) - return "0x%x" % a + return '%d%s' % (a // val, suffix) + return '0x%x' % a def lookup_keyword(t, keywords): for k,v in keywords.items(): if simple_formatting is False and t == v: return k - return "%d" % t + return '%d' % t def generate_text_flags(): """ colon-delimited list of flags """ - return ":".join(self.get_flags_list()) + return ':'.join(self.get_flags_list()) - return ",".join([self.name, + return ','.join([self.name, lookup_keyword(self.type, TYPES), lookup_keyword(self.subtype, SUBTYPES.get(self.type, {})), addr_format(self.offset, False), @@ -432,17 +432,17 @@ def parse_int(v, keywords={}): k/m/K/M suffixes and 'keyword' value lookup. """ try: - for letter, multiplier in [("k", 1024), ("m", 1024 * 1024)]: + for letter, multiplier in [('k', 1024), ('m', 1024 * 1024)]: if v.lower().endswith(letter): return parse_int(v[:-1], keywords) * multiplier return int(v, 0) except ValueError: if len(keywords) == 0: - raise InputError("Invalid field value %s" % v) + raise InputError('Invalid field value %s' % v) try: return keywords[v.lower()] except KeyError: - raise InputError("Value '%s' is not valid. Known keywords: %s" % (v, ", ".join(keywords))) + raise InputError("Value '%s' is not valid. Known keywords: %s" % (v, ', '.join(keywords))) def main(): @@ -456,11 +456,11 @@ def main(): nargs='?', choices=['1MB', '2MB', '4MB', '8MB', '16MB']) parser.add_argument('--disable-md5sum', help='Disable md5 checksum for the partition table', default=False, action='store_true') parser.add_argument('--no-verify', help="Don't verify partition table fields", action='store_true') - parser.add_argument('--verify', '-v', help="Verify partition table fields (deprecated, this behaviour is " - "enabled by default and this flag does nothing.", action='store_true') + parser.add_argument('--verify', '-v', help='Verify partition table fields (deprecated, this behaviour is ' + 'enabled by default and this flag does nothing.', action='store_true') parser.add_argument('--quiet', '-q', help="Don't print non-critical status messages to stderr", action='store_true') parser.add_argument('--offset', '-o', help='Set offset partition table', default='0x8000') - parser.add_argument('--secure', help="Require app partitions to be suitable for secure boot", action='store_true') + parser.add_argument('--secure', help='Require app partitions to be suitable for secure boot', action='store_true') parser.add_argument('input', help='Path to CSV or binary file to parse.', type=argparse.FileType('rb')) parser.add_argument('output', help='Path to output converted binary or CSV file. Will use stdout if omitted.', nargs='?', default='-') @@ -474,19 +474,19 @@ def main(): input = args.input.read() input_is_binary = input[0:2] == PartitionDefinition.MAGIC_BYTES if input_is_binary: - status("Parsing binary partition input...") + status('Parsing binary partition input...') table = PartitionTable.from_binary(input) else: input = input.decode() - status("Parsing CSV input...") + status('Parsing CSV input...') table = PartitionTable.from_csv(input) if not args.no_verify: - status("Verifying table...") + status('Verifying table...') table.verify() if args.flash_size: - size_mb = int(args.flash_size.replace("MB", "")) + size_mb = int(args.flash_size.replace('MB', '')) size = size_mb * 1024 * 1024 # flash memory uses honest megabytes! table_size = table.flash_size() if size < table_size: @@ -526,7 +526,7 @@ class InputError(RuntimeError): class ValidationError(InputError): def __init__(self, partition, message): super(ValidationError, self).__init__( - "Partition %s invalid: %s" % (partition.name, message)) + 'Partition %s invalid: %s' % (partition.name, message)) if __name__ == '__main__': diff --git a/components/partition_table/parttool.py b/components/partition_table/parttool.py index 107efb0394..93ee52668c 100755 --- a/components/partition_table/parttool.py +++ b/components/partition_table/parttool.py @@ -16,20 +16,21 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function, division +from __future__ import division, print_function + import argparse import os -import sys -import subprocess -import tempfile import re -import gen_esp32part as gen +import subprocess +import sys +import tempfile +import gen_esp32part as gen __version__ = '2.0' -COMPONENTS_PATH = os.path.expandvars(os.path.join("$IDF_PATH", "components")) -ESPTOOL_PY = os.path.join(COMPONENTS_PATH, "esptool_py", "esptool", "esptool.py") +COMPONENTS_PATH = os.path.expandvars(os.path.join('$IDF_PATH', 'components')) +ESPTOOL_PY = os.path.join(COMPONENTS_PATH, 'esptool_py', 'esptool', 'esptool.py') PARTITION_TABLE_OFFSET = 0x8000 @@ -78,14 +79,14 @@ class ParttoolTarget(): def parse_esptool_args(esptool_args): results = list() for arg in esptool_args: - pattern = re.compile(r"(.+)=(.+)") + pattern = re.compile(r'(.+)=(.+)') result = pattern.match(arg) try: key = result.group(1) value = result.group(2) - results.extend(["--" + key, value]) + results.extend(['--' + key, value]) except AttributeError: - results.extend(["--" + arg]) + results.extend(['--' + arg]) return results self.esptool_args = parse_esptool_args(esptool_args) @@ -95,14 +96,14 @@ class ParttoolTarget(): if partition_table_file: partition_table = None - with open(partition_table_file, "rb") as f: + with open(partition_table_file, 'rb') as f: input_is_binary = (f.read(2) == gen.PartitionDefinition.MAGIC_BYTES) f.seek(0) if input_is_binary: partition_table = gen.PartitionTable.from_binary(f.read()) if partition_table is None: - with open(partition_table_file, "r") as f: + with open(partition_table_file, 'r') as f: f.seek(0) partition_table = gen.PartitionTable.from_csv(f.read()) else: @@ -110,8 +111,8 @@ class ParttoolTarget(): temp_file.close() try: - self._call_esptool(["read_flash", str(partition_table_offset), str(gen.MAX_PARTITION_LENGTH), temp_file.name]) - with open(temp_file.name, "rb") as f: + self._call_esptool(['read_flash', str(partition_table_offset), str(gen.MAX_PARTITION_LENGTH), temp_file.name]) + with open(temp_file.name, 'rb') as f: partition_table = gen.PartitionTable.from_binary(f.read()) finally: os.unlink(temp_file.name) @@ -125,18 +126,18 @@ class ParttoolTarget(): esptool_args = [sys.executable, ESPTOOL_PY] + self.esptool_args if self.port: - esptool_args += ["--port", self.port] + esptool_args += ['--port', self.port] if self.baud: - esptool_args += ["--baud", str(self.baud)] + esptool_args += ['--baud', str(self.baud)] esptool_args += args - print("Running %s..." % (" ".join(esptool_args))) + print('Running %s...' % (' '.join(esptool_args))) try: subprocess.check_call(esptool_args, stdout=out, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: - print("An exception: **", str(e), "** occurred in _call_esptool.", file=out) + print('An exception: **', str(e), '** occurred in _call_esptool.', file=out) raise e def get_partition_info(self, partition_id): @@ -149,37 +150,37 @@ class ParttoolTarget(): if not partition_id.part_list: partition = partition[0] else: # default boot partition - search = ["factory"] + ["ota_{}".format(d) for d in range(16)] + search = ['factory'] + ['ota_{}'.format(d) for d in range(16)] for subtype in search: - partition = next(self.partition_table.find_by_type("app", subtype), None) + partition = next(self.partition_table.find_by_type('app', subtype), None) if partition: break if not partition: - raise Exception("Partition does not exist") + raise Exception('Partition does not exist') return partition def erase_partition(self, partition_id): partition = self.get_partition_info(partition_id) - self._call_esptool(["erase_region", str(partition.offset), str(partition.size)] + self.esptool_erase_args) + self._call_esptool(['erase_region', str(partition.offset), str(partition.size)] + self.esptool_erase_args) def read_partition(self, partition_id, output): partition = self.get_partition_info(partition_id) - self._call_esptool(["read_flash", str(partition.offset), str(partition.size), output] + self.esptool_read_args) + self._call_esptool(['read_flash', str(partition.offset), str(partition.size), output] + self.esptool_read_args) def write_partition(self, partition_id, input): self.erase_partition(partition_id) partition = self.get_partition_info(partition_id) - with open(input, "rb") as input_file: + with open(input, 'rb') as input_file: content_len = len(input_file.read()) if content_len > partition.size: - raise Exception("Input file size exceeds partition size") + raise Exception('Input file size exceeds partition size') - self._call_esptool(["write_flash", str(partition.offset), input] + self.esptool_write_args) + self._call_esptool(['write_flash', str(partition.offset), input] + self.esptool_write_args) def _write_partition(target, partition_id, input): @@ -214,41 +215,41 @@ def _get_partition_info(target, partition_id, info): try: for p in partitions: info_dict = { - "name": '{}'.format(p.name), - "type": '{}'.format(p.type), - "subtype": '{}'.format(p.subtype), - "offset": '0x{:x}'.format(p.offset), - "size": '0x{:x}'.format(p.size), - "encrypted": '{}'.format(p.encrypted) + 'name': '{}'.format(p.name), + 'type': '{}'.format(p.type), + 'subtype': '{}'.format(p.subtype), + 'offset': '0x{:x}'.format(p.offset), + 'size': '0x{:x}'.format(p.size), + 'encrypted': '{}'.format(p.encrypted) } for i in info: infos += [info_dict[i]] except KeyError: - raise RuntimeError("Request for unknown partition info {}".format(i)) + raise RuntimeError('Request for unknown partition info {}'.format(i)) - print(" ".join(infos)) + print(' '.join(infos)) def main(): global quiet - parser = argparse.ArgumentParser("ESP-IDF Partitions Tool") + parser = argparse.ArgumentParser('ESP-IDF Partitions Tool') - parser.add_argument("--quiet", "-q", help="suppress stderr messages", action="store_true") - parser.add_argument("--esptool-args", help="additional main arguments for esptool", nargs="+") - parser.add_argument("--esptool-write-args", help="additional subcommand arguments when writing to flash", nargs="+") - parser.add_argument("--esptool-read-args", help="additional subcommand arguments when reading flash", nargs="+") - parser.add_argument("--esptool-erase-args", help="additional subcommand arguments when erasing regions of flash", nargs="+") + parser.add_argument('--quiet', '-q', help='suppress stderr messages', action='store_true') + parser.add_argument('--esptool-args', help='additional main arguments for esptool', nargs='+') + parser.add_argument('--esptool-write-args', help='additional subcommand arguments when writing to flash', nargs='+') + parser.add_argument('--esptool-read-args', help='additional subcommand arguments when reading flash', nargs='+') + parser.add_argument('--esptool-erase-args', help='additional subcommand arguments when erasing regions of flash', nargs='+') # By default the device attached to the specified port is queried for the partition table. If a partition table file # is specified, that is used instead. - parser.add_argument("--port", "-p", help="port where the target device of the command is connected to; the partition table is sourced from this device \ - when the partition table file is not defined") - parser.add_argument("--baud", "-b", help="baudrate to use", type=int) + parser.add_argument('--port', '-p', help='port where the target device of the command is connected to; the partition table is sourced from this device \ + when the partition table file is not defined') + parser.add_argument('--baud', '-b', help='baudrate to use', type=int) - parser.add_argument("--partition-table-offset", "-o", help="offset to read the partition table from", type=str) - parser.add_argument("--partition-table-file", "-f", help="file (CSV/binary) to read the partition table from; \ - overrides device attached to specified port as the partition table source when defined") + parser.add_argument('--partition-table-offset', '-o', help='offset to read the partition table from', type=str) + parser.add_argument('--partition-table-file', '-f', help='file (CSV/binary) to read the partition table from; \ + overrides device attached to specified port as the partition table source when defined') partition_selection_parser = argparse.ArgumentParser(add_help=False) @@ -256,30 +257,30 @@ def main(): # partition name or the first partition that matches the specified type/subtype partition_selection_args = partition_selection_parser.add_mutually_exclusive_group() - partition_selection_args.add_argument("--partition-name", "-n", help="name of the partition") - partition_selection_args.add_argument("--partition-type", "-t", help="type of the partition") - partition_selection_args.add_argument('--partition-boot-default', "-d", help='select the default boot partition \ - using the same fallback logic as the IDF bootloader', action="store_true") + partition_selection_args.add_argument('--partition-name', '-n', help='name of the partition') + partition_selection_args.add_argument('--partition-type', '-t', help='type of the partition') + partition_selection_args.add_argument('--partition-boot-default', '-d', help='select the default boot partition \ + using the same fallback logic as the IDF bootloader', action='store_true') - partition_selection_parser.add_argument("--partition-subtype", "-s", help="subtype of the partition") + partition_selection_parser.add_argument('--partition-subtype', '-s', help='subtype of the partition') - subparsers = parser.add_subparsers(dest="operation", help="run parttool -h for additional help") + subparsers = parser.add_subparsers(dest='operation', help='run parttool -h for additional help') # Specify the supported operations - read_part_subparser = subparsers.add_parser("read_partition", help="read partition from device and dump contents into a file", + read_part_subparser = subparsers.add_parser('read_partition', help='read partition from device and dump contents into a file', parents=[partition_selection_parser]) - read_part_subparser.add_argument("--output", help="file to dump the read partition contents to") + read_part_subparser.add_argument('--output', help='file to dump the read partition contents to') - write_part_subparser = subparsers.add_parser("write_partition", help="write contents of a binary file to partition on device", + write_part_subparser = subparsers.add_parser('write_partition', help='write contents of a binary file to partition on device', parents=[partition_selection_parser]) - write_part_subparser.add_argument("--input", help="file whose contents are to be written to the partition offset") + write_part_subparser.add_argument('--input', help='file whose contents are to be written to the partition offset') - subparsers.add_parser("erase_partition", help="erase the contents of a partition on the device", parents=[partition_selection_parser]) + subparsers.add_parser('erase_partition', help='erase the contents of a partition on the device', parents=[partition_selection_parser]) - print_partition_info_subparser = subparsers.add_parser("get_partition_info", help="get partition information", parents=[partition_selection_parser]) - print_partition_info_subparser.add_argument("--info", help="type of partition information to get", - choices=["name", "type", "subtype", "offset", "size", "encrypted"], default=["offset", "size"], nargs="+") - print_partition_info_subparser.add_argument('--part_list', help="Get a list of partitions suitable for a given type", action='store_true') + print_partition_info_subparser = subparsers.add_parser('get_partition_info', help='get partition information', parents=[partition_selection_parser]) + print_partition_info_subparser.add_argument('--info', help='type of partition information to get', + choices=['name', 'type', 'subtype', 'offset', 'size', 'encrypted'], default=['offset', 'size'], nargs='+') + print_partition_info_subparser.add_argument('--part_list', help='Get a list of partitions suitable for a given type', action='store_true') args = parser.parse_args() quiet = args.quiet @@ -295,40 +296,40 @@ def main(): partition_id = PartitionName(args.partition_name) elif args.partition_type: if not args.partition_subtype: - raise RuntimeError("--partition-subtype should be defined when --partition-type is defined") + raise RuntimeError('--partition-subtype should be defined when --partition-type is defined') partition_id = PartitionType(args.partition_type, args.partition_subtype, getattr(args, 'part_list', None)) elif args.partition_boot_default: partition_id = PARTITION_BOOT_DEFAULT else: - raise RuntimeError("Partition to operate on should be defined using --partition-name OR \ - partition-type,--partition-subtype OR partition-boot-default") + raise RuntimeError('Partition to operate on should be defined using --partition-name OR \ + partition-type,--partition-subtype OR partition-boot-default') # Prepare the device to perform operation on target_args = {} if args.port: - target_args["port"] = args.port + target_args['port'] = args.port if args.baud: - target_args["baud"] = args.baud + target_args['baud'] = args.baud if args.partition_table_file: - target_args["partition_table_file"] = args.partition_table_file + target_args['partition_table_file'] = args.partition_table_file if args.partition_table_offset: - target_args["partition_table_offset"] = int(args.partition_table_offset, 0) + target_args['partition_table_offset'] = int(args.partition_table_offset, 0) if args.esptool_args: - target_args["esptool_args"] = args.esptool_args + target_args['esptool_args'] = args.esptool_args if args.esptool_write_args: - target_args["esptool_write_args"] = args.esptool_write_args + target_args['esptool_write_args'] = args.esptool_write_args if args.esptool_read_args: - target_args["esptool_read_args"] = args.esptool_read_args + target_args['esptool_read_args'] = args.esptool_read_args if args.esptool_erase_args: - target_args["esptool_erase_args"] = args.esptool_erase_args + target_args['esptool_erase_args'] = args.esptool_erase_args target = ParttoolTarget(**target_args) @@ -336,9 +337,9 @@ def main(): common_args = {'target':target, 'partition_id':partition_id} parttool_ops = { 'erase_partition':(_erase_partition, []), - 'read_partition':(_read_partition, ["output"]), - 'write_partition':(_write_partition, ["input"]), - 'get_partition_info':(_get_partition_info, ["info"]) + 'read_partition':(_read_partition, ['output']), + 'write_partition':(_write_partition, ['input']), + 'get_partition_info':(_get_partition_info, ['info']) } (op, op_args) = parttool_ops[args.operation] diff --git a/components/partition_table/test_gen_esp32part_host/gen_esp32part_tests.py b/components/partition_table/test_gen_esp32part_host/gen_esp32part_tests.py index 2607a0deac..2af5031395 100755 --- a/components/partition_table/test_gen_esp32part_host/gen_esp32part_tests.py +++ b/components/partition_table/test_gen_esp32part_host/gen_esp32part_tests.py @@ -1,18 +1,19 @@ #!/usr/bin/env python -from __future__ import print_function, division -import unittest -import struct +from __future__ import division, print_function + import csv -import sys -import subprocess -import tempfile -import os import io +import os +import struct +import subprocess +import sys +import tempfile +import unittest try: import gen_esp32part except ImportError: - sys.path.append("..") + sys.path.append('..') import gen_esp32part SIMPLE_CSV = """ @@ -20,40 +21,40 @@ SIMPLE_CSV = """ factory,0,2,65536,1048576, """ -LONGER_BINARY_TABLE = b"" +LONGER_BINARY_TABLE = b'' # type 0x00, subtype 0x00, # offset 64KB, size 1MB -LONGER_BINARY_TABLE += b"\xAA\x50\x00\x00" + \ - b"\x00\x00\x01\x00" + \ - b"\x00\x00\x10\x00" + \ - b"factory\0" + (b"\0" * 8) + \ - b"\x00\x00\x00\x00" +LONGER_BINARY_TABLE += b'\xAA\x50\x00\x00' + \ + b'\x00\x00\x01\x00' + \ + b'\x00\x00\x10\x00' + \ + b'factory\0' + (b'\0' * 8) + \ + b'\x00\x00\x00\x00' # type 0x01, subtype 0x20, # offset 0x110000, size 128KB -LONGER_BINARY_TABLE += b"\xAA\x50\x01\x20" + \ - b"\x00\x00\x11\x00" + \ - b"\x00\x02\x00\x00" + \ - b"data" + (b"\0" * 12) + \ - b"\x00\x00\x00\x00" +LONGER_BINARY_TABLE += b'\xAA\x50\x01\x20' + \ + b'\x00\x00\x11\x00' + \ + b'\x00\x02\x00\x00' + \ + b'data' + (b'\0' * 12) + \ + b'\x00\x00\x00\x00' # type 0x10, subtype 0x00, # offset 0x150000, size 1MB -LONGER_BINARY_TABLE += b"\xAA\x50\x10\x00" + \ - b"\x00\x00\x15\x00" + \ - b"\x00\x10\x00\x00" + \ - b"second" + (b"\0" * 10) + \ - b"\x00\x00\x00\x00" +LONGER_BINARY_TABLE += b'\xAA\x50\x10\x00' + \ + b'\x00\x00\x15\x00' + \ + b'\x00\x10\x00\x00' + \ + b'second' + (b'\0' * 10) + \ + b'\x00\x00\x00\x00' # MD5 checksum -LONGER_BINARY_TABLE += b"\xEB\xEB" + b"\xFF" * 14 +LONGER_BINARY_TABLE += b'\xEB\xEB' + b'\xFF' * 14 LONGER_BINARY_TABLE += b'\xf9\xbd\x06\x1b\x45\x68\x6f\x86\x57\x1a\x2c\xd5\x2a\x1d\xa6\x5b' # empty partition -LONGER_BINARY_TABLE += b"\xFF" * 32 +LONGER_BINARY_TABLE += b'\xFF' * 32 def _strip_trailing_ffs(binary_table): """ Strip all FFs down to the last 32 bytes (terminating entry) """ - while binary_table.endswith(b"\xFF" * 64): + while binary_table.endswith(b'\xFF' * 64): binary_table = binary_table[0:len(binary_table) - 32] return binary_table @@ -75,7 +76,7 @@ class CSVParserTests(Py23TestCase): def test_simple_partition(self): table = gen_esp32part.PartitionTable.from_csv(SIMPLE_CSV) self.assertEqual(len(table), 1) - self.assertEqual(table[0].name, "factory") + self.assertEqual(table[0].name, 'factory') self.assertEqual(table[0].type, 0) self.assertEqual(table[0].subtype, 2) self.assertEqual(table[0].offset, 65536) @@ -86,7 +87,7 @@ class CSVParserTests(Py23TestCase): # Name,Type, SubType,Offset,Size ihavenotype, """ - with self.assertRaisesRegex(gen_esp32part.InputError, "type"): + with self.assertRaisesRegex(gen_esp32part.InputError, 'type'): gen_esp32part.PartitionTable.from_csv(csv) def test_type_subtype_names(self): @@ -115,15 +116,15 @@ myota_status, data, ota,, 0x100000 nomagic = gen_esp32part.PartitionTable.from_csv(csv_nomagicnumbers) nomagic.verify() - self.assertEqual(nomagic["myapp"].type, 0) - self.assertEqual(nomagic["myapp"].subtype, 0) - self.assertEqual(nomagic["myapp"], magic["myapp"]) - self.assertEqual(nomagic["myota_0"].type, 0) - self.assertEqual(nomagic["myota_0"].subtype, 0x10) - self.assertEqual(nomagic["myota_0"], magic["myota_0"]) - self.assertEqual(nomagic["myota_15"], magic["myota_15"]) - self.assertEqual(nomagic["mytest"], magic["mytest"]) - self.assertEqual(nomagic["myota_status"], magic["myota_status"]) + self.assertEqual(nomagic['myapp'].type, 0) + self.assertEqual(nomagic['myapp'].subtype, 0) + self.assertEqual(nomagic['myapp'], magic['myapp']) + self.assertEqual(nomagic['myota_0'].type, 0) + self.assertEqual(nomagic['myota_0'].subtype, 0x10) + self.assertEqual(nomagic['myota_0'], magic['myota_0']) + self.assertEqual(nomagic['myota_15'], magic['myota_15']) + self.assertEqual(nomagic['mytest'], magic['mytest']) + self.assertEqual(nomagic['myota_status'], magic['myota_status']) # self.assertEqual(nomagic.to_binary(), magic.to_binary()) @@ -176,7 +177,7 @@ second, data, 0x15, , 1M first, app, factory, 0x100000, 2M second, app, ota_0, 0x200000, 1M """ - with self.assertRaisesRegex(gen_esp32part.InputError, "overlap"): + with self.assertRaisesRegex(gen_esp32part.InputError, 'overlap'): t = gen_esp32part.PartitionTable.from_csv(csv) t.verify() @@ -185,7 +186,7 @@ second, app, ota_0, 0x200000, 1M first, app, factory, 0x100000, 1M first, app, ota_0, 0x200000, 1M """ - with self.assertRaisesRegex(gen_esp32part.InputError, "Partition names must be unique"): + with self.assertRaisesRegex(gen_esp32part.InputError, 'Partition names must be unique'): t = gen_esp32part.PartitionTable.from_csv(csv) t.verify() @@ -200,10 +201,10 @@ first, 0x30, 0xEE, 0x100400, 0x300000 self.assertEqual(len(tb), 64 + 32) self.assertEqual(b'\xAA\x50', tb[0:2]) # magic self.assertEqual(b'\x30\xee', tb[2:4]) # type, subtype - eo, es = struct.unpack("" + 'little': '<', + 'big': '>' } _len_dict = { - 1: "B", - 2: "H", - 4: "I", - 8: "Q" + 1: 'B', + 2: 'H', + 4: 'I', + 8: 'Q' } _type_dict = { @@ -137,7 +138,7 @@ class SpiffsObjLuPage(SpiffsPage): def to_binary(self): global test - img = b"" + img = b'' for (obj_id, page_type) in self.obj_ids: if page_type == SpiffsObjIndexPage: @@ -147,7 +148,7 @@ class SpiffsObjLuPage(SpiffsPage): assert(len(img) <= self.build_config.page_size) - img += b"\xFF" * (self.build_config.page_size - len(img)) + img += b'\xFF' * (self.build_config.page_size - len(img)) return img @@ -205,7 +206,7 @@ class SpiffsObjIndexPage(SpiffsPage): SPIFFS_PH_FLAG_USED_FINAL_INDEX) # Add padding before the object index page specific information - img += b"\xFF" * self.build_config.OBJ_DATA_PAGE_HEADER_LEN_ALIGNED_PAD + img += b'\xFF' * self.build_config.OBJ_DATA_PAGE_HEADER_LEN_ALIGNED_PAD # If this is the first object index page for the object, add filname, type # and size information @@ -216,7 +217,7 @@ class SpiffsObjIndexPage(SpiffsPage): self.size, SPIFFS_TYPE_FILE) - img += self.name.encode() + (b"\x00" * ((self.build_config.obj_name_len - len(self.name)) + self.build_config.meta_len)) + img += self.name.encode() + (b'\x00' * ((self.build_config.obj_name_len - len(self.name)) + self.build_config.meta_len)) # Finally, add the page index of daa pages for page in self.pages: @@ -226,7 +227,7 @@ class SpiffsObjIndexPage(SpiffsPage): assert(len(img) <= self.build_config.page_size) - img += b"\xFF" * (self.build_config.page_size - len(img)) + img += b'\xFF' * (self.build_config.page_size - len(img)) return img @@ -252,7 +253,7 @@ class SpiffsObjDataPage(SpiffsPage): assert(len(img) <= self.build_config.page_size) - img += b"\xFF" * (self.build_config.page_size - len(img)) + img += b'\xFF' * (self.build_config.page_size - len(img)) return img @@ -296,7 +297,7 @@ class SpiffsBlock(): except AttributeError: # no next lookup page # Since the amount of lookup pages is pre-computed at every block instance, # this should never occur - raise RuntimeError("invalid attempt to add page to a block when there is no more space in lookup") + raise RuntimeError('invalid attempt to add page to a block when there is no more space in lookup') self.pages.append(page) @@ -335,7 +336,7 @@ class SpiffsBlock(): return self.remaining_pages <= 0 def to_binary(self, blocks_lim): - img = b"" + img = b'' if self.build_config.use_magic: for (idx, page) in enumerate(self.pages): @@ -348,14 +349,14 @@ class SpiffsBlock(): assert(len(img) <= self.build_config.block_size) - img += b"\xFF" * (self.build_config.block_size - len(img)) + img += b'\xFF' * (self.build_config.block_size - len(img)) return img class SpiffsFS(): def __init__(self, img_size, build_config): if img_size % build_config.block_size != 0: - raise RuntimeError("image size should be a multiple of block size") + raise RuntimeError('image size should be a multiple of block size') self.img_size = img_size self.build_config = build_config @@ -367,7 +368,7 @@ class SpiffsFS(): def _create_block(self): if self.is_full(): - raise SpiffsFullError("the image size has been exceeded") + raise SpiffsFullError('the image size has been exceeded') block = SpiffsBlock(len(self.blocks), self.blocks_lim, self.build_config) self.blocks.append(block) @@ -385,7 +386,7 @@ class SpiffsFS(): name = img_path - with open(file_path, "rb") as obj: + with open(file_path, 'rb') as obj: contents = obj.read() stream = io.BytesIO(contents) @@ -434,7 +435,7 @@ class SpiffsFS(): self.cur_obj_id += 1 def to_binary(self): - img = b"" + img = b'' for block in self.blocks: img += block.to_binary(self.blocks_lim) bix = len(self.blocks) @@ -447,78 +448,78 @@ class SpiffsFS(): bix += 1 else: # Just fill remaining spaces FF's - img += "\xFF" * (self.img_size - len(img)) + img += '\xFF' * (self.img_size - len(img)) return img def main(): if sys.version_info[0] < 3: - print("WARNING: Support for Python 2 is deprecated and will be removed in future versions.", file=sys.stderr) + print('WARNING: Support for Python 2 is deprecated and will be removed in future versions.', file=sys.stderr) elif sys.version_info[0] == 3 and sys.version_info[1] < 6: - print("WARNING: Python 3 versions older than 3.6 are not supported.", file=sys.stderr) - parser = argparse.ArgumentParser(description="SPIFFS Image Generator", + print('WARNING: Python 3 versions older than 3.6 are not supported.', file=sys.stderr) + parser = argparse.ArgumentParser(description='SPIFFS Image Generator', formatter_class=argparse.ArgumentDefaultsHelpFormatter) - parser.add_argument("image_size", - help="Size of the created image") + parser.add_argument('image_size', + help='Size of the created image') - parser.add_argument("base_dir", - help="Path to directory from which the image will be created") + parser.add_argument('base_dir', + help='Path to directory from which the image will be created') - parser.add_argument("output_file", - help="Created image output file path") + parser.add_argument('output_file', + help='Created image output file path') - parser.add_argument("--page-size", - help="Logical page size. Set to value same as CONFIG_SPIFFS_PAGE_SIZE.", + parser.add_argument('--page-size', + help='Logical page size. Set to value same as CONFIG_SPIFFS_PAGE_SIZE.', type=int, default=256) - parser.add_argument("--block-size", + parser.add_argument('--block-size', help="Logical block size. Set to the same value as the flash chip's sector size (g_rom_flashchip.sector_size).", type=int, default=4096) - parser.add_argument("--obj-name-len", - help="File full path maximum length. Set to value same as CONFIG_SPIFFS_OBJ_NAME_LEN.", + parser.add_argument('--obj-name-len', + help='File full path maximum length. Set to value same as CONFIG_SPIFFS_OBJ_NAME_LEN.', type=int, default=32) - parser.add_argument("--meta-len", - help="File metadata length. Set to value same as CONFIG_SPIFFS_META_LENGTH.", + parser.add_argument('--meta-len', + help='File metadata length. Set to value same as CONFIG_SPIFFS_META_LENGTH.', type=int, default=4) - parser.add_argument("--use-magic", - help="Use magic number to create an identifiable SPIFFS image. Specify if CONFIG_SPIFFS_USE_MAGIC.", - action="store_true", + parser.add_argument('--use-magic', + help='Use magic number to create an identifiable SPIFFS image. Specify if CONFIG_SPIFFS_USE_MAGIC.', + action='store_true', default=True) - parser.add_argument("--follow-symlinks", - help="Take into account symbolic links during partition image creation.", - action="store_true", + parser.add_argument('--follow-symlinks', + help='Take into account symbolic links during partition image creation.', + action='store_true', default=False) - parser.add_argument("--use-magic-len", - help="Use position in memory to create different magic numbers for each block. Specify if CONFIG_SPIFFS_USE_MAGIC_LENGTH.", - action="store_true", + parser.add_argument('--use-magic-len', + help='Use position in memory to create different magic numbers for each block. Specify if CONFIG_SPIFFS_USE_MAGIC_LENGTH.', + action='store_true', default=True) - parser.add_argument("--big-endian", - help="Specify if the target architecture is big-endian. If not specified, little-endian is assumed.", - action="store_true", + parser.add_argument('--big-endian', + help='Specify if the target architecture is big-endian. If not specified, little-endian is assumed.', + action='store_true', default=False) args = parser.parse_args() if not os.path.exists(args.base_dir): - raise RuntimeError("given base directory %s does not exist" % args.base_dir) + raise RuntimeError('given base directory %s does not exist' % args.base_dir) - with open(args.output_file, "wb") as image_file: + with open(args.output_file, 'wb') as image_file: image_size = int(args.image_size, 0) spiffs_build_default = SpiffsBuildConfig(args.page_size, SPIFFS_PAGE_IX_LEN, args.block_size, SPIFFS_BLOCK_IX_LEN, args.meta_len, args.obj_name_len, SPIFFS_OBJ_ID_LEN, SPIFFS_SPAN_IX_LEN, - True, True, "big" if args.big_endian else "little", + True, True, 'big' if args.big_endian else 'little', args.use_magic, args.use_magic_len) spiffs = SpiffsFS(image_size, spiffs_build_default) @@ -526,12 +527,12 @@ def main(): for root, dirs, files in os.walk(args.base_dir, followlinks=args.follow_symlinks): for f in files: full_path = os.path.join(root, f) - spiffs.create_file("/" + os.path.relpath(full_path, args.base_dir).replace("\\", "/"), full_path) + spiffs.create_file('/' + os.path.relpath(full_path, args.base_dir).replace('\\', '/'), full_path) image = spiffs.to_binary() image_file.write(image) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/components/ulp/esp32ulp_mapgen.py b/components/ulp/esp32ulp_mapgen.py index 9a8ad9e75a..4191190373 100755 --- a/components/ulp/esp32ulp_mapgen.py +++ b/components/ulp/esp32ulp_mapgen.py @@ -6,58 +6,59 @@ # Distributed under the terms of Apache License v2.0 found in the top-level LICENSE file. from __future__ import print_function -from optparse import OptionParser + import sys +from optparse import OptionParser BASE_ADDR = 0x50000000 def gen_ld_h_from_sym(f_sym, f_ld, f_h): - f_ld.write("/* Variable definitions for ESP32ULP linker\n") - f_ld.write(" * This file is generated automatically by esp32ulp_mapgen.py utility.\n") - f_ld.write(" */\n\n") - f_h.write("// Variable definitions for ESP32ULP\n") - f_h.write("// This file is generated automatically by esp32ulp_mapgen.py utility\n\n") - f_h.write("#pragma once\n\n") + f_ld.write('/* Variable definitions for ESP32ULP linker\n') + f_ld.write(' * This file is generated automatically by esp32ulp_mapgen.py utility.\n') + f_ld.write(' */\n\n') + f_h.write('// Variable definitions for ESP32ULP\n') + f_h.write('// This file is generated automatically by esp32ulp_mapgen.py utility\n\n') + f_h.write('#pragma once\n\n') for line in f_sym: - name, _, addr_str = line.split(" ", 2) + name, _, addr_str = line.split(' ', 2) addr = int(addr_str, 16) + BASE_ADDR - f_h.write("extern uint32_t ulp_{0};\n".format(name)) - f_ld.write("PROVIDE ( ulp_{0} = 0x{1:08x} );\n".format(name, addr)) + f_h.write('extern uint32_t ulp_{0};\n'.format(name)) + f_ld.write('PROVIDE ( ulp_{0} = 0x{1:08x} );\n'.format(name, addr)) def gen_ld_h_from_sym_riscv(f_sym, f_ld, f_h): - f_ld.write("/* Variable definitions for ESP32ULP linker\n") - f_ld.write(" * This file is generated automatically by esp32ulp_mapgen.py utility.\n") - f_ld.write(" */\n\n") - f_h.write("// Variable definitions for ESP32ULP\n") - f_h.write("// This file is generated automatically by esp32ulp_mapgen.py utility\n\n") - f_h.write("#pragma once\n\n") + f_ld.write('/* Variable definitions for ESP32ULP linker\n') + f_ld.write(' * This file is generated automatically by esp32ulp_mapgen.py utility.\n') + f_ld.write(' */\n\n') + f_h.write('// Variable definitions for ESP32ULP\n') + f_h.write('// This file is generated automatically by esp32ulp_mapgen.py utility\n\n') + f_h.write('#pragma once\n\n') for line in f_sym: addr_str, _, name = line.split() addr = int(addr_str, 16) + BASE_ADDR - f_h.write("extern uint32_t ulp_{0};\n".format(name)) - f_ld.write("PROVIDE ( ulp_{0} = 0x{1:08x} );\n".format(name, addr)) + f_h.write('extern uint32_t ulp_{0};\n'.format(name)) + f_ld.write('PROVIDE ( ulp_{0} = 0x{1:08x} );\n'.format(name, addr)) def main(): if sys.version_info[0] < 3: - print("WARNING: Support for Python 2 is deprecated and will be removed in future versions.", file=sys.stderr) + print('WARNING: Support for Python 2 is deprecated and will be removed in future versions.', file=sys.stderr) elif sys.version_info[0] == 3 and sys.version_info[1] < 6: - print("WARNING: Python 3 versions older than 3.6 are not supported.", file=sys.stderr) - description = ("This application generates .h and .ld files for symbols defined in input file. " - "The input symbols file can be generated using nm utility like this: " - "esp32-ulp-nm -g -f posix > ") + print('WARNING: Python 3 versions older than 3.6 are not supported.', file=sys.stderr) + description = ('This application generates .h and .ld files for symbols defined in input file. ' + 'The input symbols file can be generated using nm utility like this: ' + 'esp32-ulp-nm -g -f posix > ') parser = OptionParser(description=description) - parser.add_option("-s", "--symfile", dest="symfile", - help="symbols file name", metavar="SYMFILE") - parser.add_option("-o", "--outputfile", dest="outputfile", - help="destination .h and .ld files name prefix", metavar="OUTFILE") + parser.add_option('-s', '--symfile', dest='symfile', + help='symbols file name', metavar='SYMFILE') + parser.add_option('-o', '--outputfile', dest='outputfile', + help='destination .h and .ld files name prefix', metavar='OUTFILE') - parser.add_option("--riscv", action="store_true", help="use format for ulp riscv .sym file") + parser.add_option('--riscv', action='store_true', help='use format for ulp riscv .sym file') (options, args) = parser.parse_args() if options.symfile is None: @@ -69,14 +70,14 @@ def main(): return 1 if options.riscv: - with open(options.outputfile + ".h", 'w') as f_h, open(options.outputfile + ".ld", 'w') as f_ld, open(options.symfile) as f_sym: + with open(options.outputfile + '.h', 'w') as f_h, open(options.outputfile + '.ld', 'w') as f_ld, open(options.symfile) as f_sym: gen_ld_h_from_sym_riscv(f_sym, f_ld, f_h) return 0 - with open(options.outputfile + ".h", 'w') as f_h, open(options.outputfile + ".ld", 'w') as f_ld, open(options.symfile) as f_sym: + with open(options.outputfile + '.h', 'w') as f_h, open(options.outputfile + '.ld', 'w') as f_ld, open(options.symfile) as f_sym: gen_ld_h_from_sym(f_sym, f_ld, f_h) return 0 -if __name__ == "__main__": +if __name__ == '__main__': exit(main()) diff --git a/components/wifi_provisioning/python/wifi_config_pb2.py b/components/wifi_provisioning/python/wifi_config_pb2.py index 0dd6d43d40..1e53d49cdc 100644 --- a/components/wifi_provisioning/python/wifi_config_pb2.py +++ b/components/wifi_provisioning/python/wifi_config_pb2.py @@ -2,13 +2,15 @@ # source: wifi_config.proto import sys + _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pb2 from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 +from google.protobuf.internal import enum_type_wrapper + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -17,7 +19,6 @@ _sym_db = _symbol_database.Default() import constants_pb2 as constants__pb2 import wifi_constants_pb2 as wifi__constants__pb2 - DESCRIPTOR = _descriptor.FileDescriptor( name='wifi_config.proto', package='', @@ -163,21 +164,21 @@ _CMDSETCONFIG = _descriptor.Descriptor( _descriptor.FieldDescriptor( name='ssid', full_name='CmdSetConfig.ssid', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value=_b(''), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='passphrase', full_name='CmdSetConfig.passphrase', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value=_b(''), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bssid', full_name='CmdSetConfig.bssid', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value=_b(''), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None, file=DESCRIPTOR), diff --git a/components/wifi_provisioning/python/wifi_constants_pb2.py b/components/wifi_provisioning/python/wifi_constants_pb2.py index 8090568d2d..e969529811 100644 --- a/components/wifi_provisioning/python/wifi_constants_pb2.py +++ b/components/wifi_provisioning/python/wifi_constants_pb2.py @@ -2,13 +2,15 @@ # source: wifi_constants.proto import sys + _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pb2 from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database -from google.protobuf import descriptor_pb2 +from google.protobuf.internal import enum_type_wrapper + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -141,7 +143,7 @@ _WIFICONNECTEDSTATE = _descriptor.Descriptor( _descriptor.FieldDescriptor( name='ip4_addr', full_name='WifiConnectedState.ip4_addr', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=_b('').decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None, file=DESCRIPTOR), @@ -155,14 +157,14 @@ _WIFICONNECTEDSTATE = _descriptor.Descriptor( _descriptor.FieldDescriptor( name='ssid', full_name='WifiConnectedState.ssid', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value=_b(''), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='bssid', full_name='WifiConnectedState.bssid', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value=_b(''), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None, file=DESCRIPTOR), diff --git a/components/wifi_provisioning/python/wifi_scan_pb2.py b/components/wifi_provisioning/python/wifi_scan_pb2.py index 2e95d8f505..70838085de 100644 --- a/components/wifi_provisioning/python/wifi_scan_pb2.py +++ b/components/wifi_provisioning/python/wifi_scan_pb2.py @@ -3,12 +3,14 @@ # source: wifi_scan.proto import sys + _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import enum_type_wrapper + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -17,7 +19,6 @@ _sym_db = _symbol_database.Default() import constants_pb2 as constants__pb2 import wifi_constants_pb2 as wifi__constants__pb2 - DESCRIPTOR = _descriptor.FileDescriptor( name='wifi_scan.proto', package='', @@ -261,7 +262,7 @@ _WIFISCANRESULT = _descriptor.Descriptor( _descriptor.FieldDescriptor( name='ssid', full_name='WiFiScanResult.ssid', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value=_b(''), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -282,7 +283,7 @@ _WIFISCANRESULT = _descriptor.Descriptor( _descriptor.FieldDescriptor( name='bssid', full_name='WiFiScanResult.bssid', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value=_b(''), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), diff --git a/components/xtensa/trax/traceparse.py b/components/xtensa/trax/traceparse.py index 119e4c9e88..c335f93a89 100644 --- a/components/xtensa/trax/traceparse.py +++ b/components/xtensa/trax/traceparse.py @@ -50,11 +50,12 @@ # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function + import sys # Check if loaded into GDB try: - assert gdb.__name__ == "gdb" + assert gdb.__name__ == 'gdb' WITH_GDB = True except NameError: WITH_GDB = False @@ -114,7 +115,7 @@ class TraxPacket(object): return result def __str__(self): - return "%d byte packet%s" % (self.size_bytes, " (truncated)" if self.truncated else "") + return '%d byte packet%s' % (self.size_bytes, ' (truncated)' if self.truncated else '') class TraxMessage(object): @@ -175,7 +176,7 @@ class TraxMessage(object): self.icnt = self.packets[0].get_bits(12, -1) self.is_correlation = True else: - raise NotImplementedError("Unknown message type (%d)" % self.msg_type) + raise NotImplementedError('Unknown message type (%d)' % self.msg_type) def process_forward(self, cur_pc): """ @@ -229,23 +230,23 @@ class TraxMessage(object): return prev_pc def __str__(self): - desc = "Unknown (%d)" % self.msg_type - extra = "" + desc = 'Unknown (%d)' % self.msg_type + extra = '' if self.truncated: - desc = "Truncated" + desc = 'Truncated' if self.msg_type == TVAL_INDBR: - desc = "Indirect branch" - extra = ", icnt=%d, uaddr=0x%x, exc=%d" % (self.icnt, self.uaddr, self.is_exception) + desc = 'Indirect branch' + extra = ', icnt=%d, uaddr=0x%x, exc=%d' % (self.icnt, self.uaddr, self.is_exception) if self.msg_type == TVAL_INDBRSYNC: - desc = "Indirect branch w/sync" - extra = ", icnt=%d, dcont=%d, exc=%d" % (self.icnt, self.dcont, self.is_exception) + desc = 'Indirect branch w/sync' + extra = ', icnt=%d, dcont=%d, exc=%d' % (self.icnt, self.dcont, self.is_exception) if self.msg_type == TVAL_SYNC: - desc = "Synchronization" - extra = ", icnt=%d, dcont=%d" % (self.icnt, self.dcont) + desc = 'Synchronization' + extra = ', icnt=%d, dcont=%d' % (self.icnt, self.dcont) if self.msg_type == TVAL_CORR: - desc = "Correlation" - extra = ", icnt=%d" % self.icnt - return "%s message, %d packets, PC range 0x%08x - 0x%08x, target PC 0x%08x" % ( + desc = 'Correlation' + extra = ', icnt=%d' % self.icnt + return '%s message, %d packets, PC range 0x%08x - 0x%08x, target PC 0x%08x' % ( desc, len(self.packets), self.pc_start, self.pc_end, self.pc_target) + extra @@ -264,7 +265,7 @@ def load_messages(data): # Iterate over the input data, splitting bytes into packets and messages for i, b in enumerate(data): if (b & MSEO_MSGEND) and not (b & MSEO_PKTEND): - raise AssertionError("Invalid MSEO bits in b=0x%x. Not a TRAX dump?" % b) + raise AssertionError('Invalid MSEO bits in b=0x%x. Not a TRAX dump?' % b) if b & MSEO_PKTEND: pkt_cnt += 1 @@ -276,7 +277,7 @@ def load_messages(data): try: messages.append(TraxMessage(packets, len(messages) == 0)) except NotImplementedError as e: - sys.stderr.write("Failed to parse message #%03d (at %d bytes): %s\n" % (msg_cnt, i, str(e))) + sys.stderr.write('Failed to parse message #%03d (at %d bytes): %s\n' % (msg_cnt, i, str(e))) packets = [] # Resolve PC ranges of messages. @@ -312,32 +313,32 @@ def parse_and_dump(filename, disassemble=WITH_GDB): data = f.read() messages = load_messages(data) - sys.stderr.write("Loaded %d messages in %d bytes\n" % (len(messages), len(data))) + sys.stderr.write('Loaded %d messages in %d bytes\n' % (len(messages), len(data))) for i, m in enumerate(messages): if m.truncated: continue - print("%04d: %s" % (i, str(m))) + print('%04d: %s' % (i, str(m))) if m.is_exception: - print("*** Exception occurred ***") + print('*** Exception occurred ***') if disassemble and WITH_GDB: try: - gdb.execute("disassemble 0x%08x, 0x%08x" % (m.pc_start, m.pc_end)) # noqa: F821 + gdb.execute('disassemble 0x%08x, 0x%08x' % (m.pc_start, m.pc_end)) # noqa: F821 except gdb.MemoryError: # noqa: F821 - print("Failed to disassemble from 0x%08x to 0x%08x" % (m.pc_start, m.pc_end)) + print('Failed to disassemble from 0x%08x to 0x%08x' % (m.pc_start, m.pc_end)) def main(): if sys.version_info[0] < 3: - print("WARNING: Support for Python 2 is deprecated and will be removed in future versions.", file=sys.stderr) + print('WARNING: Support for Python 2 is deprecated and will be removed in future versions.', file=sys.stderr) elif sys.version_info[0] == 3 and sys.version_info[1] < 6: - print("WARNING: Python 3 versions older than 3.6 are not supported.", file=sys.stderr) + print('WARNING: Python 3 versions older than 3.6 are not supported.', file=sys.stderr) if len(sys.argv) < 2: - sys.stderr.write("Usage: %s \n") + sys.stderr.write('Usage: %s \n') raise SystemExit(1) parse_and_dump(sys.argv[1]) -if __name__ == "__main__" and not WITH_GDB: +if __name__ == '__main__' and not WITH_GDB: main() diff --git a/docs/build_docs.py b/docs/build_docs.py index 3e1fedec21..85c9aa159f 100755 --- a/docs/build_docs.py +++ b/docs/build_docs.py @@ -24,31 +24,33 @@ # limitations under the License. # from __future__ import print_function + import argparse import locale import math import multiprocessing import os import os.path +import re import subprocess import sys -import re -from packaging import version from collections import namedtuple -LANGUAGES = ["en", "zh_CN"] -TARGETS = ["esp32", "esp32s2"] +from packaging import version -SPHINX_WARN_LOG = "sphinx-warning-log.txt" -SPHINX_SANITIZED_LOG = "sphinx-warning-log-sanitized.txt" -SPHINX_KNOWN_WARNINGS = os.path.join(os.environ["IDF_PATH"], "docs", "sphinx-known-warnings.txt") +LANGUAGES = ['en', 'zh_CN'] +TARGETS = ['esp32', 'esp32s2'] -DXG_WARN_LOG = "doxygen-warning-log.txt" -DXG_SANITIZED_LOG = "doxygen-warning-log-sanitized.txt" -DXG_KNOWN_WARNINGS = os.path.join(os.environ["IDF_PATH"], "docs", "doxygen-known-warnings.txt") +SPHINX_WARN_LOG = 'sphinx-warning-log.txt' +SPHINX_SANITIZED_LOG = 'sphinx-warning-log-sanitized.txt' +SPHINX_KNOWN_WARNINGS = os.path.join(os.environ['IDF_PATH'], 'docs', 'sphinx-known-warnings.txt') + +DXG_WARN_LOG = 'doxygen-warning-log.txt' +DXG_SANITIZED_LOG = 'doxygen-warning-log-sanitized.txt' +DXG_KNOWN_WARNINGS = os.path.join(os.environ['IDF_PATH'], 'docs', 'doxygen-known-warnings.txt') DXG_CI_VERSION = version.parse('1.8.11') -LogMessage = namedtuple("LogMessage", "original_text sanitized_text") +LogMessage = namedtuple('LogMessage', 'original_text sanitized_text') languages = LANGUAGES targets = TARGETS @@ -58,11 +60,11 @@ def main(): # check Python dependencies for docs try: subprocess.check_call([sys.executable, - os.path.join(os.environ["IDF_PATH"], - "tools", - "check_python_dependencies.py"), - "-r", - "{}/docs/requirements.txt".format(os.environ["IDF_PATH"]) + os.path.join(os.environ['IDF_PATH'], + 'tools', + 'check_python_dependencies.py'), + '-r', + '{}/docs/requirements.txt'.format(os.environ['IDF_PATH']) ]) except subprocess.CalledProcessError: raise SystemExit(2) # stdout will already have these errors @@ -73,31 +75,31 @@ def main(): # type not the str type. if ('UTF-8' not in locale.getlocale()) and ('utf8' not in locale.getlocale()): raise RuntimeError("build_docs.py requires the default locale's encoding to be UTF-8.\n" + - " - Linux. Setting environment variable LC_ALL=C.UTF-8 when running build_docs.py may be " + - "enough to fix this.\n" - " - Windows. Possible solution for the Windows 10 starting version 1803. Go to " + - "Control Panel->Clock and Region->Region->Administrative->Change system locale...; " + - "Check `Beta: Use Unicode UTF-8 for worldwide language support` and reboot") + ' - Linux. Setting environment variable LC_ALL=C.UTF-8 when running build_docs.py may be ' + + 'enough to fix this.\n' + ' - Windows. Possible solution for the Windows 10 starting version 1803. Go to ' + + 'Control Panel->Clock and Region->Region->Administrative->Change system locale...; ' + + 'Check `Beta: Use Unicode UTF-8 for worldwide language support` and reboot') parser = argparse.ArgumentParser(description='build_docs.py: Build IDF docs', prog='build_docs.py') - parser.add_argument("--language", "-l", choices=LANGUAGES, required=False) - parser.add_argument("--target", "-t", choices=TARGETS, required=False) - parser.add_argument("--build-dir", "-b", type=str, default="_build") - parser.add_argument("--source-dir", "-s", type=str, default="") - parser.add_argument("--builders", "-bs", nargs='+', type=str, default=["html"], - help="List of builders for Sphinx, e.g. html or latex, for latex a PDF is also generated") - parser.add_argument("--sphinx-parallel-builds", "-p", choices=["auto"] + [str(x) for x in range(8)], - help="Parallel Sphinx builds - number of independent Sphinx builds to run", default="auto") - parser.add_argument("--sphinx-parallel-jobs", "-j", choices=["auto"] + [str(x) for x in range(8)], - help="Sphinx parallel jobs argument - number of threads for each Sphinx build to use", default="1") - parser.add_argument("--input-docs", "-i", nargs='+', default=[""], - help="List of documents to build relative to the doc base folder, i.e. the language folder. Defaults to all documents") + parser.add_argument('--language', '-l', choices=LANGUAGES, required=False) + parser.add_argument('--target', '-t', choices=TARGETS, required=False) + parser.add_argument('--build-dir', '-b', type=str, default='_build') + parser.add_argument('--source-dir', '-s', type=str, default='') + parser.add_argument('--builders', '-bs', nargs='+', type=str, default=['html'], + help='List of builders for Sphinx, e.g. html or latex, for latex a PDF is also generated') + parser.add_argument('--sphinx-parallel-builds', '-p', choices=['auto'] + [str(x) for x in range(8)], + help='Parallel Sphinx builds - number of independent Sphinx builds to run', default='auto') + parser.add_argument('--sphinx-parallel-jobs', '-j', choices=['auto'] + [str(x) for x in range(8)], + help='Sphinx parallel jobs argument - number of threads for each Sphinx build to use', default='1') + parser.add_argument('--input-docs', '-i', nargs='+', default=[''], + help='List of documents to build relative to the doc base folder, i.e. the language folder. Defaults to all documents') action_parsers = parser.add_subparsers(dest='action') build_parser = action_parsers.add_parser('build', help='Build documentation') - build_parser.add_argument("--check-warnings-only", "-w", action='store_true') + build_parser.add_argument('--check-warnings-only', '-w', action='store_true') action_parsers.add_parser('linkcheck', help='Check links (a current IDF revision should be uploaded to GitHub)') @@ -107,27 +109,27 @@ def main(): global languages if args.language is None: - print("Building all languages") + print('Building all languages') languages = LANGUAGES else: languages = [args.language] global targets if args.target is None: - print("Building all targets") + print('Building all targets') targets = TARGETS else: targets = [args.target] - if args.action == "build" or args.action is None: + if args.action == 'build' or args.action is None: if args.action is None: args.check_warnings_only = False sys.exit(action_build(args)) - if args.action == "linkcheck": + if args.action == 'linkcheck': sys.exit(action_linkcheck(args)) - if args.action == "gh-linkcheck": + if args.action == 'gh-linkcheck': sys.exit(action_gh_linkcheck(args)) @@ -135,7 +137,7 @@ def parallel_call(args, callback): num_sphinx_builds = len(languages) * len(targets) num_cpus = multiprocessing.cpu_count() - if args.sphinx_parallel_builds == "auto": + if args.sphinx_parallel_builds == 'auto': # at most one sphinx build per CPU, up to the number of CPUs args.sphinx_parallel_builds = min(num_sphinx_builds, num_cpus) else: @@ -143,17 +145,17 @@ def parallel_call(args, callback): # Force -j1 because sphinx works incorrectly args.sphinx_parallel_jobs = 1 - if args.sphinx_parallel_jobs == "auto": + if args.sphinx_parallel_jobs == 'auto': # N CPUs per build job, rounded up - (maybe smarter to round down to avoid contention, idk) args.sphinx_parallel_jobs = int(math.ceil(num_cpus / args.sphinx_parallel_builds)) else: args.sphinx_parallel_jobs = int(args.sphinx_parallel_jobs) - print("Will use %d parallel builds and %d jobs per build" % (args.sphinx_parallel_builds, args.sphinx_parallel_jobs)) + print('Will use %d parallel builds and %d jobs per build' % (args.sphinx_parallel_builds, args.sphinx_parallel_jobs)) pool = multiprocessing.Pool(args.sphinx_parallel_builds) if args.sphinx_parallel_jobs > 1: - print("WARNING: Sphinx parallel jobs currently produce incorrect docs output with Sphinx 1.8.5") + print('WARNING: Sphinx parallel jobs currently produce incorrect docs output with Sphinx 1.8.5') # make a list of all combinations of build_docs() args as tuples # @@ -173,13 +175,13 @@ def parallel_call(args, callback): is_error = False for ret in errcodes: if ret != 0: - print("\nThe following language/target combinations failed to build:") + print('\nThe following language/target combinations failed to build:') is_error = True break if is_error: for ret, entry in zip(errcodes, entries): if ret != 0: - print("language: %s, target: %s, errcode: %d" % (entry[0], entry[1], ret)) + print('language: %s, target: %s, errcode: %d' % (entry[0], entry[1], ret)) # Don't re-throw real error code from each parallel process return 1 else: @@ -193,9 +195,9 @@ def sphinx_call(language, target, build_dir, src_dir, sphinx_parallel_jobs, buil # wrap stdout & stderr in a way that lets us see which build_docs instance they come from # # this doesn't apply to subprocesses, they write to OS stdout & stderr so no prefix appears - prefix = "%s/%s: " % (language, target) + prefix = '%s/%s: ' % (language, target) - print("Building in build_dir: %s" % (build_dir)) + print('Building in build_dir: %s' % (build_dir)) try: os.makedirs(build_dir) except OSError: @@ -205,21 +207,21 @@ def sphinx_call(language, target, build_dir, src_dir, sphinx_parallel_jobs, buil environ.update(os.environ) environ['BUILDDIR'] = build_dir - args = [sys.executable, "-u", "-m", "sphinx.cmd.build", - "-j", str(sphinx_parallel_jobs), - "-b", buildername, - "-d", os.path.join(build_dir, "doctrees"), - "-w", SPHINX_WARN_LOG, - "-t", target, - "-D", "idf_target={}".format(target), - "-D", "docs_to_build={}".format(",". join(input_docs)), + args = [sys.executable, '-u', '-m', 'sphinx.cmd.build', + '-j', str(sphinx_parallel_jobs), + '-b', buildername, + '-d', os.path.join(build_dir, 'doctrees'), + '-w', SPHINX_WARN_LOG, + '-t', target, + '-D', 'idf_target={}'.format(target), + '-D', 'docs_to_build={}'.format(','. join(input_docs)), src_dir, os.path.join(build_dir, buildername) # build directory ] saved_cwd = os.getcwd() os.chdir(build_dir) # also run sphinx in the build directory - print("Running '%s'" % (" ".join(args))) + print("Running '%s'" % (' '.join(args))) ret = 1 try: @@ -282,7 +284,7 @@ def call_build_docs(entry): # Build PDF from tex if 'latex' in builders: - latex_dir = os.path.join(build_dir, "latex") + latex_dir = os.path.join(build_dir, 'latex') ret = build_pdf(language, target, latex_dir) return ret @@ -294,9 +296,9 @@ def build_pdf(language, target, latex_dir): # wrap stdout & stderr in a way that lets us see which build_docs instance they come from # # this doesn't apply to subprocesses, they write to OS stdout & stderr so no prefix appears - prefix = "%s/%s: " % (language, target) + prefix = '%s/%s: ' % (language, target) - print("Building PDF in latex_dir: %s" % (latex_dir)) + print('Building PDF in latex_dir: %s' % (latex_dir)) saved_cwd = os.getcwd() os.chdir(latex_dir) @@ -337,8 +339,8 @@ def build_pdf(language, target, latex_dir): return ret -SANITIZE_FILENAME_REGEX = re.compile("[^:]*/([^/:]*)(:.*)") -SANITIZE_LINENUM_REGEX = re.compile("([^:]*)(:[0-9]+:)(.*)") +SANITIZE_FILENAME_REGEX = re.compile('[^:]*/([^/:]*)(:.*)') +SANITIZE_LINENUM_REGEX = re.compile('([^:]*)(:[0-9]+:)(.*)') def sanitize_line(line): @@ -376,12 +378,12 @@ def check_docs(language, target, log_file, known_warnings_file, out_sanitized_lo for known_line in k: known_messages.append(known_line) - if "doxygen" in known_warnings_file: + if 'doxygen' in known_warnings_file: # Clean a known Doxygen limitation: it's expected to always document anonymous # structs/unions but we don't do this in our docs, so filter these all out with a regex # (this won't match any named field, only anonymous members - # ie the last part of the field is is just ::@NUM not ::name) - RE_ANONYMOUS_FIELD = re.compile(r".+:line: warning: parameters of member [^:\s]+(::[^:\s]+)*(::@\d+)+ are not \(all\) documented") + RE_ANONYMOUS_FIELD = re.compile(r'.+:line: warning: parameters of member [^:\s]+(::[^:\s]+)*(::@\d+)+ are not \(all\) documented') all_messages = [msg for msg in all_messages if not re.match(RE_ANONYMOUS_FIELD, msg.sanitized_text)] # Collect all new messages that are not match with the known messages. @@ -395,17 +397,17 @@ def check_docs(language, target, log_file, known_warnings_file, out_sanitized_lo new_messages.append(msg) if new_messages: - print("\n%s/%s: Build failed due to new/different warnings (%s):\n" % (language, target, log_file)) + print('\n%s/%s: Build failed due to new/different warnings (%s):\n' % (language, target, log_file)) for msg in new_messages: - print("%s/%s: %s" % (language, target, msg.original_text), end='') - print("\n%s/%s: (Check files %s and %s for full details.)" % (language, target, known_warnings_file, log_file)) + print('%s/%s: %s' % (language, target, msg.original_text), end='') + print('\n%s/%s: (Check files %s and %s for full details.)' % (language, target, known_warnings_file, log_file)) return 1 return 0 def action_linkcheck(args): - args.builders = "linkcheck" + args.builders = 'linkcheck' return parallel_call(args, call_linkcheck) @@ -416,49 +418,49 @@ def call_linkcheck(entry): # https://github.com/espressif/esp-idf/tree/ # https://github.com/espressif/esp-idf/blob/ # https://github.com/espressif/esp-idf/raw/ -GH_LINK_RE = r"https://github.com/espressif/esp-idf/(?:tree|blob|raw)/[^\s]+" +GH_LINK_RE = r'https://github.com/espressif/esp-idf/(?:tree|blob|raw)/[^\s]+' # we allow this one doc, because we always want users to see the latest support policy -GH_LINK_ALLOWED = ["https://github.com/espressif/esp-idf/blob/master/SUPPORT_POLICY.md", - "https://github.com/espressif/esp-idf/blob/master/SUPPORT_POLICY_CN.md"] +GH_LINK_ALLOWED = ['https://github.com/espressif/esp-idf/blob/master/SUPPORT_POLICY.md', + 'https://github.com/espressif/esp-idf/blob/master/SUPPORT_POLICY_CN.md'] def action_gh_linkcheck(args): - print("Checking for hardcoded GitHub links\n") + print('Checking for hardcoded GitHub links\n') github_links = [] docs_dir = os.path.relpath(os.path.dirname(__file__)) for root, _, files in os.walk(docs_dir): - if "_build" in root: + if '_build' in root: continue - files = [os.path.join(root, f) for f in files if f.endswith(".rst")] + files = [os.path.join(root, f) for f in files if f.endswith('.rst')] for path in files: - with open(path, "r") as f: + with open(path, 'r') as f: for link in re.findall(GH_LINK_RE, f.read()): if link not in GH_LINK_ALLOWED: github_links.append((path, link)) if github_links: for path, link in github_links: - print("%s: %s" % (path, link)) - print("WARNING: Some .rst files contain hardcoded Github links.") - print("Please check above output and replace links with one of the following:") - print("- :idf:`dir` - points to directory inside ESP-IDF") - print("- :idf_file:`file` - points to file inside ESP-IDF") - print("- :idf_raw:`file` - points to raw view of the file inside ESP-IDF") - print("- :component:`dir` - points to directory inside ESP-IDF components dir") - print("- :component_file:`file` - points to file inside ESP-IDF components dir") - print("- :component_raw:`file` - points to raw view of the file inside ESP-IDF components dir") - print("- :example:`dir` - points to directory inside ESP-IDF examples dir") - print("- :example_file:`file` - points to file inside ESP-IDF examples dir") - print("- :example_raw:`file` - points to raw view of the file inside ESP-IDF examples dir") - print("These link types will point to the correct GitHub version automatically") + print('%s: %s' % (path, link)) + print('WARNING: Some .rst files contain hardcoded Github links.') + print('Please check above output and replace links with one of the following:') + print('- :idf:`dir` - points to directory inside ESP-IDF') + print('- :idf_file:`file` - points to file inside ESP-IDF') + print('- :idf_raw:`file` - points to raw view of the file inside ESP-IDF') + print('- :component:`dir` - points to directory inside ESP-IDF components dir') + print('- :component_file:`file` - points to file inside ESP-IDF components dir') + print('- :component_raw:`file` - points to raw view of the file inside ESP-IDF components dir') + print('- :example:`dir` - points to directory inside ESP-IDF examples dir') + print('- :example_file:`file` - points to file inside ESP-IDF examples dir') + print('- :example_raw:`file` - points to raw view of the file inside ESP-IDF examples dir') + print('These link types will point to the correct GitHub version automatically') return 1 else: - print("No hardcoded links found") + print('No hardcoded links found') return 0 -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/docs/conf_common.py b/docs/conf_common.py index 88f9e179f4..e41d24a326 100644 --- a/docs/conf_common.py +++ b/docs/conf_common.py @@ -14,17 +14,17 @@ # All configuration values have a default; values that are commented out # serve to show the default. -from __future__ import print_function -from __future__ import unicode_literals -import sys +from __future__ import print_function, unicode_literals + import os import os.path import re import subprocess -from sanitize_version import sanitize_version -from idf_extensions.util import download_file_if_missing -from get_github_rev import get_github_rev +import sys +from get_github_rev import get_github_rev +from idf_extensions.util import download_file_if_missing +from sanitize_version import sanitize_version # build_docs on the CI server sometimes fails under Python3. This is a workaround: sys.setrecursionlimit(3500) @@ -242,7 +242,7 @@ versions_url = 'https://dl.espressif.com/dl/esp-idf/idf_versions.js' idf_targets = ['esp32', 'esp32s2'] languages = ['en', 'zh_CN'] -project_homepage = "https://github.com/espressif/esp-idf" +project_homepage = 'https://github.com/espressif/esp-idf' # -- Options for HTML output ---------------------------------------------- @@ -250,11 +250,11 @@ project_homepage = "https://github.com/espressif/esp-idf" # # Redirects should be listed in page_redirects.xt # -with open("../page_redirects.txt") as f: - lines = [re.sub(" +", " ", line.strip()) for line in f.readlines() if line.strip() != "" and not line.startswith("#")] +with open('../page_redirects.txt') as f: + lines = [re.sub(' +', ' ', line.strip()) for line in f.readlines() if line.strip() != '' and not line.startswith('#')] for line in lines: # check for well-formed entries if len(line.split(' ')) != 2: - raise RuntimeError("Invalid line in page_redirects.txt: %s" % line) + raise RuntimeError('Invalid line in page_redirects.txt: %s' % line) html_redirect_pages = [tuple(line.split(' ')) for line in lines] # The theme to use for HTML and HTML Help pages. See the documentation for @@ -264,10 +264,10 @@ html_theme = 'sphinx_idf_theme' # context used by sphinx_idf_theme html_context = { - "display_github": True, # Add 'Edit on Github' link instead of 'View page source' - "github_user": "espressif", - "github_repo": "esp-idf", - "github_version": get_github_rev(), + 'display_github': True, # Add 'Edit on Github' link instead of 'View page source' + 'github_user': 'espressif', + 'github_repo': 'esp-idf', + 'github_version': get_github_rev(), } # Theme options are theme-specific and customize the look and feel of a theme @@ -287,7 +287,7 @@ html_context = { # The name of an image file (relative to this directory) to place at the top # of the sidebar. -html_logo = "../_static/espressif-logo.svg" +html_logo = '../_static/espressif-logo.svg' # The name of an image file (within the static path) to use as favicon of the @@ -380,7 +380,7 @@ latex_elements = { # The name of an image file (relative to this directory) to place at the bottom of # the title page. -latex_logo = "../_static/espressif2.pdf" +latex_logo = '../_static/espressif2.pdf' latex_engine = 'xelatex' latex_use_xindy = False @@ -427,7 +427,7 @@ def setup(app): app.add_stylesheet('theme_overrides.css') # these two must be pushed in by build_docs.py - if "idf_target" not in app.config: + if 'idf_target' not in app.config: app.add_config_value('idf_target', None, 'env') app.add_config_value('idf_targets', None, 'env') @@ -436,8 +436,8 @@ def setup(app): # Breathe extension variables (depend on build_dir) # note: we generate into xml_in and then copy_if_modified to xml dir - app.config.breathe_projects = {"esp32-idf": os.path.join(app.config.build_dir, "xml_in/")} - app.config.breathe_default_project = "esp32-idf" + app.config.breathe_projects = {'esp32-idf': os.path.join(app.config.build_dir, 'xml_in/')} + app.config.breathe_default_project = 'esp32-idf' setup_diag_font(app) @@ -455,13 +455,13 @@ def setup_config_values(app, config): app.add_config_value('idf_target_title_dict', idf_target_title_dict, 'env') - pdf_name = "esp-idf-{}-{}-{}".format(app.config.language, app.config.version, app.config.idf_target) + pdf_name = 'esp-idf-{}-{}-{}'.format(app.config.language, app.config.version, app.config.idf_target) app.add_config_value('pdf_file', pdf_name, 'env') def setup_html_context(app, config): # Setup path for 'edit on github'-link - config.html_context['conf_py_path'] = "/docs/{}/".format(app.config.language) + config.html_context['conf_py_path'] = '/docs/{}/'.format(app.config.language) def setup_diag_font(app): @@ -476,7 +476,7 @@ def setup_diag_font(app): font_dir = os.path.join(config_dir, '_static') assert os.path.exists(font_dir) - print("Downloading font file %s for %s" % (font_name, app.config.language)) + print('Downloading font file %s for %s' % (font_name, app.config.language)) download_file_if_missing('https://dl.espressif.com/dl/esp-idf/docs/_static/{}'.format(font_name), font_dir) font_path = os.path.abspath(os.path.join(font_dir, font_name)) diff --git a/docs/en/conf.py b/docs/en/conf.py index cb77736dd3..ecaf1ab66e 100644 --- a/docs/en/conf.py +++ b/docs/en/conf.py @@ -9,8 +9,8 @@ try: from conf_common import * # noqa: F403,F401 except ImportError: - import sys import os + import sys sys.path.insert(0, os.path.abspath('..')) from conf_common import * # noqa: F403,F401 diff --git a/docs/extensions/html_redirects.py b/docs/extensions/html_redirects.py index 5019e84a61..1614c4372b 100644 --- a/docs/extensions/html_redirects.py +++ b/docs/extensions/html_redirects.py @@ -53,22 +53,22 @@ def create_redirect_pages(app): return # only relevant for standalone HTML output for (old_url, new_url) in app.config.html_redirect_pages: - print("Creating redirect %s to %s..." % (old_url, new_url)) + print('Creating redirect %s to %s...' % (old_url, new_url)) if old_url.startswith('/'): - print("Stripping leading / from URL in config file...") + print('Stripping leading / from URL in config file...') old_url = old_url[1:] new_url = app.builder.get_relative_uri(old_url, new_url) out_file = app.builder.get_outfilename(old_url) - print("HTML file %s redirects to relative URL %s" % (out_file, new_url)) + print('HTML file %s redirects to relative URL %s' % (out_file, new_url)) out_dir = os.path.dirname(out_file) if not os.path.exists(out_dir): os.makedirs(out_dir) - content = REDIRECT_TEMPLATE.replace("$NEWURL", new_url) + content = REDIRECT_TEMPLATE.replace('$NEWURL', new_url) - with open(out_file, "w") as rp: + with open(out_file, 'w') as rp: rp.write(content) return [] diff --git a/docs/extensions/list_filter.py b/docs/extensions/list_filter.py index 896ccfc3d2..e1a81b328c 100644 --- a/docs/extensions/list_filter.py +++ b/docs/extensions/list_filter.py @@ -1,4 +1,5 @@ import re + from docutils import nodes from docutils.parsers.rst import Directive diff --git a/docs/extensions/toctree_filter.py b/docs/extensions/toctree_filter.py index 8cdb13cdbf..8f0a455d89 100644 --- a/docs/extensions/toctree_filter.py +++ b/docs/extensions/toctree_filter.py @@ -1,5 +1,6 @@ # Based on https://stackoverflow.com/a/46600038 with some modifications import re + from sphinx.directives.other import TocTree diff --git a/docs/generate_chart.py b/docs/generate_chart.py index 6d855b53cd..146f3049f1 100644 --- a/docs/generate_chart.py +++ b/docs/generate_chart.py @@ -18,14 +18,14 @@ import argparse import datetime as dt import json -import numpy as np -import requests import matplotlib.dates import matplotlib.patches as mpatches import matplotlib.pyplot as plt -from matplotlib.dates import MONTHLY, DateFormatter, RRuleLocator, rrulewrapper +import numpy as np +import requests from dateutil import parser from dateutil.relativedelta import relativedelta +from matplotlib.dates import MONTHLY, DateFormatter, RRuleLocator, rrulewrapper class Version(object): @@ -68,18 +68,18 @@ class ChartVersions(object): def get_releases_as_json(self): return { x.version_name: { - "start_date": x.get_start_date().strftime("%Y-%m-%d"), - "end_service": x.get_end_service_date().strftime("%Y-%m-%d"), - "end_date": x.get_end_of_life_date().strftime("%Y-%m-%d") + 'start_date': x.get_start_date().strftime('%Y-%m-%d'), + 'end_service': x.get_end_service_date().strftime('%Y-%m-%d'), + 'end_date': x.get_end_of_life_date().strftime('%Y-%m-%d') } for x in self.sorted_releases_supported } @staticmethod def parse_chart_releases_from_js(js_as_string): - return json.loads(js_as_string[js_as_string.find("RELEASES: ") + len("RELEASES: "):js_as_string.rfind("};")]) + return json.loads(js_as_string[js_as_string.find('RELEASES: ') + len('RELEASES: '):js_as_string.rfind('};')]) def _get_all_version_from_url(self, url=None, filename=None): - releases_file = requests.get(url).text if url is not None else "".join(open(filename).readlines()) + releases_file = requests.get(url).text if url is not None else ''.join(open(filename).readlines()) return self.parse_chart_releases_from_js(releases_file) def _get_releases_from_url(self, url=None, filename=None): @@ -178,7 +178,7 @@ class ChartVersions(object): rule = rrulewrapper(MONTHLY, interval=x_ax_interval) loc = RRuleLocator(rule) - formatter = DateFormatter("%b %Y") + formatter = DateFormatter('%b %Y') ax.xaxis.set_major_locator(loc) ax.xaxis.set_major_formatter(formatter) @@ -198,19 +198,19 @@ class ChartVersions(object): bbox_to_anchor=(1.01, 1.165), loc='upper right') fig.set_size_inches(11, 5, forward=True) plt.savefig(output_chart_name + output_chart_extension, bbox_inches='tight') - print("Saved into " + output_chart_name + output_chart_extension) + print('Saved into ' + output_chart_name + output_chart_extension) if __name__ == '__main__': arg_parser = argparse.ArgumentParser( - description="Create chart of version support. Set the url or filename with versions." - "If you set both filename and url the script will prefer filename.") - arg_parser.add_argument("--url", metavar="URL", default="https://dl.espressif.com/dl/esp-idf/idf_versions.js") - arg_parser.add_argument("--filename", - help="Set the name of the source file, if is set, the script ignores the url.") - arg_parser.add_argument("--output-format", help="Set the output format of the image.", default="svg") - arg_parser.add_argument("--output-file", help="Set the name of the output file.", default="docs/chart") + description='Create chart of version support. Set the url or filename with versions.' + 'If you set both filename and url the script will prefer filename.') + arg_parser.add_argument('--url', metavar='URL', default='https://dl.espressif.com/dl/esp-idf/idf_versions.js') + arg_parser.add_argument('--filename', + help='Set the name of the source file, if is set, the script ignores the url.') + arg_parser.add_argument('--output-format', help='Set the output format of the image.', default='svg') + arg_parser.add_argument('--output-file', help='Set the name of the output file.', default='docs/chart') args = arg_parser.parse_args() ChartVersions(url=args.url if args.filename is None else None, filename=args.filename).create_chart( - output_chart_extension="." + args.output_format.lower()[-3:], output_chart_name=args.output_file) + output_chart_extension='.' + args.output_format.lower()[-3:], output_chart_name=args.output_file) diff --git a/docs/idf_extensions/build_system/__init__.py b/docs/idf_extensions/build_system/__init__.py index 9332a0f10e..dbad02bc8d 100644 --- a/docs/idf_extensions/build_system/__init__.py +++ b/docs/idf_extensions/build_system/__init__.py @@ -6,11 +6,11 @@ # # Then emits the new 'idf-info' event which has information read from IDF # build system, that other extensions can use to generate relevant data. +import json import os.path import shutil -import sys import subprocess -import json +import sys # this directory also contains the dummy IDF project project_path = os.path.abspath(os.path.dirname(__file__)) @@ -23,7 +23,7 @@ def setup(app): # Setup some common paths try: - build_dir = os.environ["BUILDDIR"] # TODO see if we can remove this + build_dir = os.environ['BUILDDIR'] # TODO see if we can remove this except KeyError: build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep)) @@ -43,7 +43,7 @@ def setup(app): except KeyError: idf_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..')) - app.add_config_value('docs_root', os.path.join(idf_path, "docs"), 'env') + app.add_config_value('docs_root', os.path.join(idf_path, 'docs'), 'env') app.add_config_value('idf_path', idf_path, 'env') app.add_config_value('build_dir', build_dir, 'env') # not actually an IDF thing app.add_event('idf-info') @@ -55,43 +55,43 @@ def setup(app): def generate_idf_info(app, config): - print("Running CMake on dummy project to get build info...") + print('Running CMake on dummy project to get build info...') build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep)) - cmake_build_dir = os.path.join(build_dir, "build_dummy_project") - idf_py_path = os.path.join(app.config.idf_path, "tools", "idf.py") - print("Running idf.py...") + cmake_build_dir = os.path.join(build_dir, 'build_dummy_project') + idf_py_path = os.path.join(app.config.idf_path, 'tools', 'idf.py') + print('Running idf.py...') idf_py = [sys.executable, idf_py_path, - "-B", + '-B', cmake_build_dir, - "-C", + '-C', project_path, - "-D", - "SDKCONFIG={}".format(os.path.join(build_dir, "dummy_project_sdkconfig")) + '-D', + 'SDKCONFIG={}'.format(os.path.join(build_dir, 'dummy_project_sdkconfig')) ] # force a clean idf.py build w/ new sdkconfig each time # (not much slower than 'reconfigure', avoids any potential config & build versioning problems shutil.rmtree(cmake_build_dir, ignore_errors=True) - print("Starting new dummy IDF project... ") + print('Starting new dummy IDF project... ') if (app.config.idf_target in PREVIEW_TARGETS): - subprocess.check_call(idf_py + ["--preview", "set-target", app.config.idf_target]) + subprocess.check_call(idf_py + ['--preview', 'set-target', app.config.idf_target]) else: - subprocess.check_call(idf_py + ["set-target", app.config.idf_target]) + subprocess.check_call(idf_py + ['set-target', app.config.idf_target]) - print("Running CMake on dummy project...") - subprocess.check_call(idf_py + ["reconfigure"]) + print('Running CMake on dummy project...') + subprocess.check_call(idf_py + ['reconfigure']) - with open(os.path.join(cmake_build_dir, "project_description.json")) as f: + with open(os.path.join(cmake_build_dir, 'project_description.json')) as f: project_description = json.load(f) - if project_description["target"] != app.config.idf_target: + if project_description['target'] != app.config.idf_target: # this shouldn't really happen unless someone has been moving around directories inside _build, as # the cmake_build_dir path should be target-specific - raise RuntimeError(("Error configuring the dummy IDF project for {}. " + - "Target in project description is {}. " + - "Is build directory contents corrupt?") - .format(app.config.idf_target, project_description["target"])) + raise RuntimeError(('Error configuring the dummy IDF project for {}. ' + + 'Target in project description is {}. ' + + 'Is build directory contents corrupt?') + .format(app.config.idf_target, project_description['target'])) app.emit('idf-info', project_description) return [] diff --git a/docs/idf_extensions/esp_err_definitions.py b/docs/idf_extensions/esp_err_definitions.py index f8a80b2c51..ecf9719300 100644 --- a/docs/idf_extensions/esp_err_definitions.py +++ b/docs/idf_extensions/esp_err_definitions.py @@ -1,5 +1,5 @@ # Extension to generate esp_err definition as .rst -from .util import copy_if_modified, call_with_python +from .util import call_with_python, copy_if_modified def setup(app): diff --git a/docs/idf_extensions/exclude_docs.py b/docs/idf_extensions/exclude_docs.py index 769e9e5400..d06721377f 100644 --- a/docs/idf_extensions/exclude_docs.py +++ b/docs/idf_extensions/exclude_docs.py @@ -35,8 +35,8 @@ def build_subset(app, config): # Get all docs that will be built docs = [filename for filename in get_matching_files(app.srcdir, compile_matchers(exclude_docs))] if not docs: - raise ValueError("No documents to build") - print("Building a subset of the documents: {}".format(docs)) + raise ValueError('No documents to build') + print('Building a subset of the documents: {}'.format(docs)) # Sphinx requires a master document, if there is a document name 'index' then we pick that index_docs = [doc for doc in docs if 'index' in doc] diff --git a/docs/idf_extensions/format_idf_target.py b/docs/idf_extensions/format_idf_target.py index d96f7804ec..b6422ecba1 100644 --- a/docs/idf_extensions/format_idf_target.py +++ b/docs/idf_extensions/format_idf_target.py @@ -1,9 +1,10 @@ -import re import os import os.path +import re + from docutils import io, nodes, statemachine, utils -from docutils.utils.error_reporting import SafeString, ErrorString from docutils.parsers.rst import directives +from docutils.utils.error_reporting import ErrorString, SafeString from sphinx.directives.other import Include as BaseInclude from sphinx.util import logging @@ -73,26 +74,26 @@ class StringSubstituter: def init_sub_strings(self, config): self.target_name = config.idf_target - self.add_pair("{IDF_TARGET_NAME}", self.TARGET_NAMES[config.idf_target]) - self.add_pair("{IDF_TARGET_PATH_NAME}", config.idf_target) - self.add_pair("{IDF_TARGET_TOOLCHAIN_NAME}", self.TOOLCHAIN_NAMES[config.idf_target]) - self.add_pair("{IDF_TARGET_CFG_PREFIX}", self.CONFIG_PREFIX[config.idf_target]) - self.add_pair("{IDF_TARGET_TRM_EN_URL}", self.TRM_EN_URL[config.idf_target]) - self.add_pair("{IDF_TARGET_TRM_CN_URL}", self.TRM_CN_URL[config.idf_target]) + self.add_pair('{IDF_TARGET_NAME}', self.TARGET_NAMES[config.idf_target]) + self.add_pair('{IDF_TARGET_PATH_NAME}', config.idf_target) + self.add_pair('{IDF_TARGET_TOOLCHAIN_NAME}', self.TOOLCHAIN_NAMES[config.idf_target]) + self.add_pair('{IDF_TARGET_CFG_PREFIX}', self.CONFIG_PREFIX[config.idf_target]) + self.add_pair('{IDF_TARGET_TRM_EN_URL}', self.TRM_EN_URL[config.idf_target]) + self.add_pair('{IDF_TARGET_TRM_CN_URL}', self.TRM_CN_URL[config.idf_target]) def add_local_subs(self, matches): for sub_def in matches: if len(sub_def) != 2: - raise ValueError("IDF_TARGET_X substitution define invalid, val={}".format(sub_def)) + raise ValueError('IDF_TARGET_X substitution define invalid, val={}'.format(sub_def)) - tag = "{" + "IDF_TARGET_{}".format(sub_def[0]) + "}" + tag = '{' + 'IDF_TARGET_{}'.format(sub_def[0]) + '}' match_default = re.match(r'^\s*default(\s*)=(\s*)\"(.*?)\"', sub_def[1]) if match_default is None: # There should always be a default value - raise ValueError("No default value in IDF_TARGET_X substitution define, val={}".format(sub_def)) + raise ValueError('No default value in IDF_TARGET_X substitution define, val={}'.format(sub_def)) match_target = re.match(r'^.*{}(\s*)=(\s*)\"(.*?)\"'.format(self.target_name), sub_def[1]) diff --git a/docs/idf_extensions/gen_defines.py b/docs/idf_extensions/gen_defines.py index c067ce5a7d..5962da2c4a 100644 --- a/docs/idf_extensions/gen_defines.py +++ b/docs/idf_extensions/gen_defines.py @@ -8,35 +8,35 @@ import glob import os import pprint -import subprocess import re +import subprocess def generate_defines(app, project_description): - sdk_config_path = os.path.join(project_description["build_dir"], "config") + sdk_config_path = os.path.join(project_description['build_dir'], 'config') # Parse kconfig macros to pass into doxygen # # TODO: this should use the set of "config which can't be changed" eventually, # not the header - defines = get_defines(os.path.join(project_description["build_dir"], - "config", "sdkconfig.h"), sdk_config_path) + defines = get_defines(os.path.join(project_description['build_dir'], + 'config', 'sdkconfig.h'), sdk_config_path) # Add all SOC _caps.h headers and kconfig macros to the defines # # kind of a hack, be nicer to add a component info dict in project_description.json - soc_path = [p for p in project_description["build_component_paths"] if p.endswith("/soc")][0] - soc_headers = glob.glob(os.path.join(soc_path, project_description["target"], - "include", "soc", "*_caps.h")) + soc_path = [p for p in project_description['build_component_paths'] if p.endswith('/soc')][0] + soc_headers = glob.glob(os.path.join(soc_path, project_description['target'], + 'include', 'soc', '*_caps.h')) assert len(soc_headers) > 0 for soc_header in soc_headers: defines.update(get_defines(soc_header, sdk_config_path)) # write a list of definitions to make debugging easier - with open(os.path.join(app.config.build_dir, "macro-definitions.txt"), "w") as f: + with open(os.path.join(app.config.build_dir, 'macro-definitions.txt'), 'w') as f: pprint.pprint(defines, f) - print("Saved macro list to %s" % f.name) + print('Saved macro list to %s' % f.name) add_tags(app, defines) @@ -48,19 +48,19 @@ def get_defines(header_path, sdk_config_path): # Note: we run C preprocessor here without any -I arguments (except "sdkconfig.h"), so assumption is # that these headers are all self-contained and don't include any other headers # not in the same directory - print("Reading macros from %s..." % (header_path)) - processed_output = subprocess.check_output(["xtensa-esp32-elf-gcc", "-I", sdk_config_path, - "-dM", "-E", header_path]).decode() - for line in processed_output.split("\n"): + print('Reading macros from %s...' % (header_path)) + processed_output = subprocess.check_output(['xtensa-esp32-elf-gcc', '-I', sdk_config_path, + '-dM', '-E', header_path]).decode() + for line in processed_output.split('\n'): line = line.strip() - m = re.search("#define ([^ ]+) ?(.*)", line) + m = re.search('#define ([^ ]+) ?(.*)', line) if m: name = m.group(1) value = m.group(2) - if name.startswith("_"): + if name.startswith('_'): continue # toolchain macro - if (" " in value) or ("=" in value): - value = "" # macros that expand to multiple tokens (ie function macros) cause doxygen errors, so just mark as 'defined' + if (' ' in value) or ('=' in value): + value = '' # macros that expand to multiple tokens (ie function macros) cause doxygen errors, so just mark as 'defined' defines[name] = value return defines @@ -70,7 +70,7 @@ def add_tags(app, defines): # try to parse define values as ints and add to tags for name, value in defines.items(): try: - define_value = int(value.strip("()")) + define_value = int(value.strip('()')) if define_value > 0: app.tags.add(name) except ValueError: diff --git a/docs/idf_extensions/gen_idf_tools_links.py b/docs/idf_extensions/gen_idf_tools_links.py index 126efe63c5..c999d84e66 100644 --- a/docs/idf_extensions/gen_idf_tools_links.py +++ b/docs/idf_extensions/gen_idf_tools_links.py @@ -1,7 +1,9 @@ # Generate toolchain download links from toolchain info makefile from __future__ import print_function + import os.path -from .util import copy_if_modified, call_with_python + +from .util import call_with_python, copy_if_modified def setup(app): @@ -12,9 +14,9 @@ def setup(app): def generate_idf_tools_links(app, project_description): - print("Generating IDF Tools list") - os.environ["IDF_MAINTAINER"] = "1" + print('Generating IDF Tools list') + os.environ['IDF_MAINTAINER'] = '1' tools_rst = os.path.join(app.config.build_dir, 'inc', 'idf-tools-inc.rst') tools_rst_tmp = os.path.join(app.config.build_dir, 'idf-tools-inc.rst') - call_with_python("{}/tools/idf_tools.py gen-doc --output {}".format(app.config.idf_path, tools_rst_tmp)) + call_with_python('{}/tools/idf_tools.py gen-doc --output {}'.format(app.config.idf_path, tools_rst_tmp)) copy_if_modified(tools_rst_tmp, tools_rst) diff --git a/docs/idf_extensions/gen_toolchain_links.py b/docs/idf_extensions/gen_toolchain_links.py index 64699f9f15..cde0266b75 100644 --- a/docs/idf_extensions/gen_toolchain_links.py +++ b/docs/idf_extensions/gen_toolchain_links.py @@ -1,17 +1,19 @@ # Generate toolchain download links from toolchain info makefile from __future__ import print_function + import os.path from collections import namedtuple + from .util import copy_if_modified BASE_URL = 'https://dl.espressif.com/dl/' -PlatformInfo = namedtuple("PlatformInfo", [ - "platform_name", - "platform_archive_suffix", - "extension", - "unpack_cmd", - "unpack_code" +PlatformInfo = namedtuple('PlatformInfo', [ + 'platform_name', + 'platform_archive_suffix', + 'extension', + 'unpack_cmd', + 'unpack_code' ]) @@ -23,9 +25,9 @@ def setup(app): def generate_toolchain_download_links(app, project_description): - print("Generating toolchain download links") + print('Generating toolchain download links') toolchain_tmpdir = '{}/toolchain_inc'.format(app.config.build_dir) - toolchain_versions = os.path.join(app.config.idf_path, "tools/toolchain_versions.mk") + toolchain_versions = os.path.join(app.config.idf_path, 'tools/toolchain_versions.mk') gen_toolchain_links(toolchain_versions, toolchain_tmpdir) copy_if_modified(toolchain_tmpdir, '{}/inc'.format(app.config.build_dir)) @@ -34,11 +36,11 @@ def gen_toolchain_links(versions_file, out_dir): version_vars = {} with open(versions_file) as f: for line in f: - name, var = line.partition("=")[::2] + name, var = line.partition('=')[::2] version_vars[name.strip()] = var.strip() - gcc_version = version_vars["CURRENT_TOOLCHAIN_GCC_VERSION"] - toolchain_desc = version_vars["CURRENT_TOOLCHAIN_COMMIT_DESC_SHORT"] + gcc_version = version_vars['CURRENT_TOOLCHAIN_GCC_VERSION'] + toolchain_desc = version_vars['CURRENT_TOOLCHAIN_COMMIT_DESC_SHORT'] unpack_code_linux_macos = """ :: @@ -59,10 +61,10 @@ def gen_toolchain_links(versions_file, out_dir): """ platform_info = [ - PlatformInfo("linux64", "linux-amd64", "tar.gz", "z", unpack_code_linux_macos), - PlatformInfo("linux32", "linux-i686","tar.gz", "z", unpack_code_linux_macos), - PlatformInfo("osx", "macos", "tar.gz", "z", unpack_code_linux_macos), - PlatformInfo("win32", "win32", "zip", None, None) + PlatformInfo('linux64', 'linux-amd64', 'tar.gz', 'z', unpack_code_linux_macos), + PlatformInfo('linux32', 'linux-i686','tar.gz', 'z', unpack_code_linux_macos), + PlatformInfo('osx', 'macos', 'tar.gz', 'z', unpack_code_linux_macos), + PlatformInfo('win32', 'win32', 'zip', None, None) ] try: @@ -70,7 +72,7 @@ def gen_toolchain_links(versions_file, out_dir): except OSError: pass - with open(os.path.join(out_dir, 'download-links.inc'), "w") as links_file: + with open(os.path.join(out_dir, 'download-links.inc'), 'w') as links_file: for p in platform_info: archive_name = 'xtensa-esp32-elf-gcc{}-{}-{}.{}'.format( gcc_version.replace('.', '_'), toolchain_desc, p.platform_archive_suffix, p.extension) @@ -79,8 +81,8 @@ def gen_toolchain_links(versions_file, out_dir): p.platform_name, BASE_URL, archive_name), file=links_file) if p.unpack_code is not None: - with open(os.path.join(out_dir, 'unpack-code-%s.inc' % p.platform_name), "w") as f: + with open(os.path.join(out_dir, 'unpack-code-%s.inc' % p.platform_name), 'w') as f: print(p.unpack_code.format(p.unpack_cmd, archive_name), file=f) - with open(os.path.join(out_dir, 'scratch-build-code.inc'), "w") as code_file: + with open(os.path.join(out_dir, 'scratch-build-code.inc'), 'w') as code_file: print(scratch_build_code_linux_macos.format(toolchain_desc), file=code_file) diff --git a/docs/idf_extensions/gen_version_specific_includes.py b/docs/idf_extensions/gen_version_specific_includes.py index 0707d8344e..214d4ba552 100755 --- a/docs/idf_extensions/gen_version_specific_includes.py +++ b/docs/idf_extensions/gen_version_specific_includes.py @@ -4,17 +4,18 @@ # Sphinx extension to generate ReSTructured Text .inc snippets # with version-based content for this IDF version -from __future__ import print_function -from __future__ import unicode_literals -from io import open -from .util import copy_if_modified -import subprocess +from __future__ import print_function, unicode_literals + import os import re +import subprocess +from io import open + +from .util import copy_if_modified TEMPLATES = { - "en": { - "git-clone-bash": """ + 'en': { + 'git-clone-bash': """ .. code-block:: bash mkdir -p ~/esp @@ -22,7 +23,7 @@ TEMPLATES = { git clone %(clone_args)s--recursive https://github.com/espressif/esp-idf.git """, - "git-clone-windows": """ + 'git-clone-windows': """ .. code-block:: batch mkdir %%userprofile%%\\esp @@ -30,8 +31,8 @@ TEMPLATES = { git clone %(clone_args)s--recursive https://github.com/espressif/esp-idf.git """, - "git-clone-notes": { - "template": """ + 'git-clone-notes': { + 'template': """ .. note:: %(extra_note)s @@ -40,35 +41,35 @@ TEMPLATES = { %(zipfile_note)s """, - "master": 'This command will clone the master branch, which has the latest development ("bleeding edge") ' + 'master': 'This command will clone the master branch, which has the latest development ("bleeding edge") ' 'version of ESP-IDF. It is fully functional and updated on weekly basis with the most recent features and bugfixes.', - "branch": 'The ``git clone`` option ``-b %(clone_arg)s`` tells git to clone the %(ver_type)s in the ESP-IDF repository ``git clone`` ' + 'branch': 'The ``git clone`` option ``-b %(clone_arg)s`` tells git to clone the %(ver_type)s in the ESP-IDF repository ``git clone`` ' 'corresponding to this version of the documentation.', - "zipfile": { - "stable": 'As a fallback, it is also possible to download a zip file of this stable release from the `Releases page`_. ' + 'zipfile': { + 'stable': 'As a fallback, it is also possible to download a zip file of this stable release from the `Releases page`_. ' 'Do not download the "Source code" zip file(s) generated automatically by GitHub, they do not work with ESP-IDF.', - "unstable": 'GitHub\'s "Download zip file" feature does not work with ESP-IDF, a ``git clone`` is required. As a fallback, ' + 'unstable': 'GitHub\'s "Download zip file" feature does not work with ESP-IDF, a ``git clone`` is required. As a fallback, ' '`Stable version`_ can be installed without Git.' }, # zipfile }, # git-clone-notes - "version-note": { - "master": """ + 'version-note': { + 'master': """ .. note:: This is documentation for the master branch (latest version) of ESP-IDF. This version is under continual development. `Stable version`_ documentation is available, as well as other :doc:`/versions`. """, - "stable": """ + 'stable': """ .. note:: This is documentation for stable version %s of ESP-IDF. Other :doc:`/versions` are also available. """, - "branch": """ + 'branch': """ .. note:: This is documentation for %s ``%s`` of ESP-IDF. Other :doc:`/versions` are also available. """ }, # version-note }, # en - "zh_CN": { - "git-clone-bash": """ + 'zh_CN': { + 'git-clone-bash': """ .. code-block:: bash mkdir -p ~/esp @@ -76,7 +77,7 @@ TEMPLATES = { git clone %(clone_args)s--recursive https://github.com/espressif/esp-idf.git """, - "git-clone-windows": """ + 'git-clone-windows': """ .. code-block:: batch mkdir %%userprofile%%\\esp @@ -84,8 +85,8 @@ TEMPLATES = { git clone %(clone_args)s--recursive https://github.com/espressif/esp-idf.git """, - "git-clone-notes": { - "template": """ + 'git-clone-notes': { + 'template': """ .. note:: %(extra_note)s @@ -94,24 +95,24 @@ TEMPLATES = { %(zipfile_note)s """, - "master": '此命令将克隆 master 分支,该分支保存着 ESP-IDF 的最新版本,它功能齐全,每周都会更新一些新功能并修正一些错误。', - "branch": '``git clone`` 命令的 ``-b %(clone_arg)s`` 选项告诉 git 从 ESP-IDF 仓库中克隆与此版本的文档对应的分支。', - "zipfile": { - "stable": '作为备份,还可以从 `Releases page`_ 下载此稳定版本的 zip 文件。不要下载由 GitHub 自动生成的"源代码"的 zip 文件,它们不适用于 ESP-IDF。', - "unstable": 'GitHub 中"下载 zip 文档"的功能不适用于 ESP-IDF,所以需要使用 ``git clone`` 命令。作为备份,可以在没有安装 Git 的环境中下载 ' + 'master': '此命令将克隆 master 分支,该分支保存着 ESP-IDF 的最新版本,它功能齐全,每周都会更新一些新功能并修正一些错误。', + 'branch': '``git clone`` 命令的 ``-b %(clone_arg)s`` 选项告诉 git 从 ESP-IDF 仓库中克隆与此版本的文档对应的分支。', + 'zipfile': { + 'stable': '作为备份,还可以从 `Releases page`_ 下载此稳定版本的 zip 文件。不要下载由 GitHub 自动生成的"源代码"的 zip 文件,它们不适用于 ESP-IDF。', + 'unstable': 'GitHub 中"下载 zip 文档"的功能不适用于 ESP-IDF,所以需要使用 ``git clone`` 命令。作为备份,可以在没有安装 Git 的环境中下载 ' '`Stable version`_ 的 zip 归档文件。' }, # zipfile }, # git-clone - "version-note": { - "master": """ + 'version-note': { + 'master': """ .. note:: 这是ESP-IDF master 分支(最新版本)的文档,该版本在持续开发中。还有 `Stable version`_ 的文档,以及其他版本的文档 :doc:`/versions` 供参考。 """, - "stable": """ + 'stable': """ .. note:: 这是ESP-IDF 稳定版本 %s 的文档,还有其他版本的文档 :doc:`/versions` 供参考。 """, - "branch": """ + 'branch': """ .. note:: 这是ESP-IDF %s ``%s`` 版本的文档,还有其他版本的文档 :doc:`/versions` 供参考。 """ @@ -128,9 +129,9 @@ def setup(app): def generate_version_specific_includes(app, project_description): language = app.config.language - tmp_out_dir = os.path.join(app.config.build_dir, "version_inc") + tmp_out_dir = os.path.join(app.config.build_dir, 'version_inc') if not os.path.exists(tmp_out_dir): - print("Creating directory %s" % tmp_out_dir) + print('Creating directory %s' % tmp_out_dir) os.mkdir(tmp_out_dir) template = TEMPLATES[language] @@ -138,56 +139,56 @@ def generate_version_specific_includes(app, project_description): version, ver_type, is_stable = get_version() write_git_clone_inc_files(template, tmp_out_dir, version, ver_type, is_stable) - write_version_note(template["version-note"], tmp_out_dir, version, ver_type, is_stable) - copy_if_modified(tmp_out_dir, os.path.join(app.config.build_dir, "inc")) - print("Done") + write_version_note(template['version-note'], tmp_out_dir, version, ver_type, is_stable) + copy_if_modified(tmp_out_dir, os.path.join(app.config.build_dir, 'inc')) + print('Done') def write_git_clone_inc_files(templates, out_dir, version, ver_type, is_stable): def out_file(basename): - p = os.path.join(out_dir, "%s.inc" % basename) - print("Writing %s..." % p) + p = os.path.join(out_dir, '%s.inc' % basename) + print('Writing %s...' % p) return p - if version == "master": - clone_args = "" + if version == 'master': + clone_args = '' else: - clone_args = "-b %s " % version + clone_args = '-b %s ' % version - with open(out_file("git-clone-bash"), "w", encoding="utf-8") as f: - f.write(templates["git-clone-bash"] % locals()) + with open(out_file('git-clone-bash'), 'w', encoding='utf-8') as f: + f.write(templates['git-clone-bash'] % locals()) - with open(out_file("git-clone-windows"), "w", encoding="utf-8") as f: - f.write(templates["git-clone-windows"] % locals()) + with open(out_file('git-clone-windows'), 'w', encoding='utf-8') as f: + f.write(templates['git-clone-windows'] % locals()) - with open(out_file("git-clone-notes"), "w", encoding="utf-8") as f: - template = templates["git-clone-notes"] + with open(out_file('git-clone-notes'), 'w', encoding='utf-8') as f: + template = templates['git-clone-notes'] - zipfile = template["zipfile"] + zipfile = template['zipfile'] - if version == "master": - extra_note = template["master"] - zipfile_note = zipfile["unstable"] + if version == 'master': + extra_note = template['master'] + zipfile_note = zipfile['unstable'] else: - extra_note = template["branch"] % {"clone_arg": version, "ver_type": ver_type} - zipfile_note = zipfile["stable"] if is_stable else zipfile["unstable"] + extra_note = template['branch'] % {'clone_arg': version, 'ver_type': ver_type} + zipfile_note = zipfile['stable'] if is_stable else zipfile['unstable'] - f.write(template["template"] % locals()) + f.write(template['template'] % locals()) - print("Wrote git-clone-xxx.inc files") + print('Wrote git-clone-xxx.inc files') def write_version_note(template, out_dir, version, ver_type, is_stable): - if version == "master": - content = template["master"] - elif ver_type == "tag" and is_stable: - content = template["stable"] % version + if version == 'master': + content = template['master'] + elif ver_type == 'tag' and is_stable: + content = template['stable'] % version else: - content = template["branch"] % (ver_type, version) - out_file = os.path.join(out_dir, "version-note.inc") - with open(out_file, "w", encoding='utf-8') as f: + content = template['branch'] % (ver_type, version) + out_file = os.path.join(out_dir, 'version-note.inc') + with open(out_file, 'w', encoding='utf-8') as f: f.write(content) - print("%s written" % out_file) + print('%s written' % out_file) def get_version(): @@ -196,22 +197,22 @@ def get_version(): """ # Use git to look for a tag try: - tag = subprocess.check_output(["git", "describe", "--exact-match"]).strip().decode('utf-8') - is_stable = re.match(r"v[0-9\.]+$", tag) is not None - return (tag, "tag", is_stable) + tag = subprocess.check_output(['git', 'describe', '--exact-match']).strip().decode('utf-8') + is_stable = re.match(r'v[0-9\.]+$', tag) is not None + return (tag, 'tag', is_stable) except subprocess.CalledProcessError: pass # No tag, look at branch name from CI, this will give the correct branch name even if the ref for the branch we # merge into has moved forward before the pipeline runs - branch = os.environ.get("CI_COMMIT_REF_NAME", None) + branch = os.environ.get('CI_COMMIT_REF_NAME', None) if branch is not None: - return (branch, "branch", False) + return (branch, 'branch', False) # Try to find the branch name even if docs are built locally - branch = subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"]).strip().decode('utf-8') - if branch != "HEAD": - return (branch, "branch", False) + branch = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip().decode('utf-8') + if branch != 'HEAD': + return (branch, 'branch', False) # As a last resort we return commit SHA-1, should never happen in CI/docs that should be published - return (subprocess.check_output(["git", "rev-parse", "--short", "HEAD"]).strip().decode('utf-8'), "commit", False) + return (subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode('utf-8'), 'commit', False) diff --git a/docs/idf_extensions/include_build_file.py b/docs/idf_extensions/include_build_file.py index 7472129574..c6143bdc79 100644 --- a/docs/idf_extensions/include_build_file.py +++ b/docs/idf_extensions/include_build_file.py @@ -1,4 +1,5 @@ import os.path + from docutils.parsers.rst import directives from docutils.parsers.rst.directives.misc import Include as BaseInclude from sphinx.util.docutils import SphinxDirective diff --git a/docs/idf_extensions/kconfig_reference.py b/docs/idf_extensions/kconfig_reference.py index 3ca9c79130..37ec03732f 100644 --- a/docs/idf_extensions/kconfig_reference.py +++ b/docs/idf_extensions/kconfig_reference.py @@ -1,7 +1,7 @@ # Extension to generate the KConfig reference list import os.path -import sys import subprocess +import sys from .util import copy_if_modified @@ -18,18 +18,18 @@ def generate_reference(app, project_description): build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep)) # Generate 'kconfig.inc' file from components' Kconfig files - print("Generating kconfig.inc from kconfig contents") + print('Generating kconfig.inc from kconfig contents') kconfig_inc_path = '{}/inc/kconfig.inc'.format(build_dir) temp_sdkconfig_path = '{}/sdkconfig.tmp'.format(build_dir) - kconfigs = project_description["config_environment"]["COMPONENT_KCONFIGS"].split(";") - kconfig_projbuilds = project_description["config_environment"]["COMPONENT_KCONFIGS_PROJBUILD"].split(";") + kconfigs = project_description['config_environment']['COMPONENT_KCONFIGS'].split(';') + kconfig_projbuilds = project_description['config_environment']['COMPONENT_KCONFIGS_PROJBUILD'].split(';') sdkconfig_renames = set() # TODO: this should be generated in project description as well, if possible for k in kconfigs + kconfig_projbuilds: component_dir = os.path.dirname(k) - sdkconfig_rename = os.path.join(component_dir, "sdkconfig.rename") + sdkconfig_rename = os.path.join(component_dir, 'sdkconfig.rename') if os.path.exists(sdkconfig_rename): sdkconfig_renames.add(sdkconfig_rename) @@ -37,27 +37,27 @@ def generate_reference(app, project_description): kconfig_projbuilds_source_path = '{}/inc/kconfig_projbuilds_source.in'.format(build_dir) prepare_kconfig_files_args = [sys.executable, - "{}/tools/kconfig_new/prepare_kconfig_files.py".format(app.config.idf_path), - "--env", "COMPONENT_KCONFIGS={}".format(" ".join(kconfigs)), - "--env", "COMPONENT_KCONFIGS_PROJBUILD={}".format(" ".join(kconfig_projbuilds)), - "--env", "COMPONENT_KCONFIGS_SOURCE_FILE={}".format(kconfigs_source_path), - "--env", "COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE={}".format(kconfig_projbuilds_source_path), + '{}/tools/kconfig_new/prepare_kconfig_files.py'.format(app.config.idf_path), + '--env', 'COMPONENT_KCONFIGS={}'.format(' '.join(kconfigs)), + '--env', 'COMPONENT_KCONFIGS_PROJBUILD={}'.format(' '.join(kconfig_projbuilds)), + '--env', 'COMPONENT_KCONFIGS_SOURCE_FILE={}'.format(kconfigs_source_path), + '--env', 'COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE={}'.format(kconfig_projbuilds_source_path), ] subprocess.check_call(prepare_kconfig_files_args) confgen_args = [sys.executable, - "{}/tools/kconfig_new/confgen.py".format(app.config.idf_path), - "--kconfig", "./Kconfig", - "--sdkconfig-rename", "./sdkconfig.rename", - "--config", temp_sdkconfig_path, - "--env", "COMPONENT_KCONFIGS={}".format(" ".join(kconfigs)), - "--env", "COMPONENT_KCONFIGS_PROJBUILD={}".format(" ".join(kconfig_projbuilds)), - "--env", "COMPONENT_SDKCONFIG_RENAMES={}".format(" ".join(sdkconfig_renames)), - "--env", "COMPONENT_KCONFIGS_SOURCE_FILE={}".format(kconfigs_source_path), - "--env", "COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE={}".format(kconfig_projbuilds_source_path), - "--env", "IDF_PATH={}".format(app.config.idf_path), - "--env", "IDF_TARGET={}".format(app.config.idf_target), - "--output", "docs", kconfig_inc_path + '.in' + '{}/tools/kconfig_new/confgen.py'.format(app.config.idf_path), + '--kconfig', './Kconfig', + '--sdkconfig-rename', './sdkconfig.rename', + '--config', temp_sdkconfig_path, + '--env', 'COMPONENT_KCONFIGS={}'.format(' '.join(kconfigs)), + '--env', 'COMPONENT_KCONFIGS_PROJBUILD={}'.format(' '.join(kconfig_projbuilds)), + '--env', 'COMPONENT_SDKCONFIG_RENAMES={}'.format(' '.join(sdkconfig_renames)), + '--env', 'COMPONENT_KCONFIGS_SOURCE_FILE={}'.format(kconfigs_source_path), + '--env', 'COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE={}'.format(kconfig_projbuilds_source_path), + '--env', 'IDF_PATH={}'.format(app.config.idf_path), + '--env', 'IDF_TARGET={}'.format(app.config.idf_target), + '--output', 'docs', kconfig_inc_path + '.in' ] subprocess.check_call(confgen_args, cwd=app.config.idf_path) copy_if_modified(kconfig_inc_path + '.in', kconfig_inc_path) diff --git a/docs/idf_extensions/latex_builder.py b/docs/idf_extensions/latex_builder.py index 10a0c2c1df..8a1f2f6454 100644 --- a/docs/idf_extensions/latex_builder.py +++ b/docs/idf_extensions/latex_builder.py @@ -1,6 +1,7 @@ -from sphinx.builders.latex import LaTeXBuilder import os +from sphinx.builders.latex import LaTeXBuilder + # Overrides the default Sphinx latex build class IdfLatexBuilder(LaTeXBuilder): @@ -26,7 +27,7 @@ class IdfLatexBuilder(LaTeXBuilder): def prepare_latex_macros(self, package_path, config): - PACKAGE_NAME = "espidf.sty" + PACKAGE_NAME = 'espidf.sty' latex_package = '' with open(package_path, 'r') as template: @@ -36,7 +37,7 @@ class IdfLatexBuilder(LaTeXBuilder): latex_package = latex_package.replace('', idf_target_title) # Release name for the PDF front page, remove '_' as this is used for subscript in Latex - idf_release_name = "Release {}".format(config.version.replace('_', '-')) + idf_release_name = 'Release {}'.format(config.version.replace('_', '-')) latex_package = latex_package.replace('', idf_release_name) with open(os.path.join(self.outdir, PACKAGE_NAME), 'w') as package_file: @@ -45,7 +46,7 @@ class IdfLatexBuilder(LaTeXBuilder): def finish(self): super().finish() - TEMPLATE_PATH = "../latex_templates/espidf.sty" + TEMPLATE_PATH = '../latex_templates/espidf.sty' self.prepare_latex_macros(os.path.join(self.confdir,TEMPLATE_PATH), self.config) diff --git a/docs/idf_extensions/link_roles.py b/docs/idf_extensions/link_roles.py index 744f6c34b2..0a8c936079 100644 --- a/docs/idf_extensions/link_roles.py +++ b/docs/idf_extensions/link_roles.py @@ -1,14 +1,15 @@ # based on http://protips.readthedocs.io/link-roles.html -from __future__ import print_function -from __future__ import unicode_literals -import re +from __future__ import print_function, unicode_literals + import os +import re import subprocess -from docutils import nodes from collections import namedtuple -from sphinx.transforms.post_transforms import SphinxPostTransform + +from docutils import nodes from get_github_rev import get_github_rev +from sphinx.transforms.post_transforms import SphinxPostTransform # Creates a dict of all submodules with the format {submodule_path : (url relative to git root), commit)} @@ -27,7 +28,7 @@ def get_submodules(): rev = sub_info[0].lstrip('-')[0:7] path = sub_info[1].lstrip('./') - config_key_arg = "submodule.{}.url".format(path) + config_key_arg = 'submodule.{}.url'.format(path) rel_url = subprocess.check_output(['git', 'config', '--file', gitmodules_file, '--get', config_key_arg]).decode('utf-8').lstrip('./').rstrip('\n') submodule_dict[path] = Submodule(rel_url, rev) @@ -38,8 +39,8 @@ def get_submodules(): def url_join(*url_parts): """ Make a URL out of multiple components, assume first part is the https:// part and anything else is a path component """ - result = "/".join(url_parts) - result = re.sub(r"([^:])//+", r"\1/", result) # remove any // that isn't in the https:// part + result = '/'.join(url_parts) + result = re.sub(r'([^:])//+', r'\1/', result) # remove any // that isn't in the https:// part return result @@ -47,7 +48,7 @@ def github_link(link_type, idf_rev, submods, root_path, app_config): def role(name, rawtext, text, lineno, inliner, options={}, content=[]): msgs = [] BASE_URL = 'https://github.com/' - IDF_REPO = "espressif/esp-idf" + IDF_REPO = 'espressif/esp-idf' def warning(msg): system_msg = inliner.reporter.warning(msg) @@ -90,31 +91,31 @@ def github_link(link_type, idf_rev, submods, root_path, app_config): line_no = tuple(int(ln_group) for ln_group in line_no.groups() if ln_group) # tuple of (nnn,) or (nnn, NNN) for ranges elif '#' in abs_path: # drop any other anchor from the line abs_path = abs_path.split('#')[0] - warning("URL %s seems to contain an unusable anchor after the #, only line numbers are supported" % link) + warning('URL %s seems to contain an unusable anchor after the #, only line numbers are supported' % link) is_dir = (link_type == 'tree') if not os.path.exists(abs_path): - warning("IDF path %s does not appear to exist (absolute path %s)" % (rel_path, abs_path)) + warning('IDF path %s does not appear to exist (absolute path %s)' % (rel_path, abs_path)) elif is_dir and not os.path.isdir(abs_path): # note these "wrong type" warnings are not strictly needed as GitHub will apply a redirect, # but the may become important in the future (plus make for cleaner links) - warning("IDF path %s is not a directory but role :%s: is for linking to a directory, try :%s_file:" % (rel_path, name, name)) + warning('IDF path %s is not a directory but role :%s: is for linking to a directory, try :%s_file:' % (rel_path, name, name)) elif not is_dir and os.path.isdir(abs_path): - warning("IDF path %s is a directory but role :%s: is for linking to a file" % (rel_path, name)) + warning('IDF path %s is a directory but role :%s: is for linking to a file' % (rel_path, name)) # check the line number is valid if line_no: if is_dir: - warning("URL %s contains a line number anchor but role :%s: is for linking to a directory" % (rel_path, name, name)) + warning('URL %s contains a line number anchor but role :%s: is for linking to a directory' % (rel_path, name, name)) elif os.path.exists(abs_path) and not os.path.isdir(abs_path): - with open(abs_path, "r") as f: + with open(abs_path, 'r') as f: lines = len(f.readlines()) if any(True for ln in line_no if ln > lines): - warning("URL %s specifies a range larger than file (file has %d lines)" % (rel_path, lines)) + warning('URL %s specifies a range larger than file (file has %d lines)' % (rel_path, lines)) if tuple(sorted(line_no)) != line_no: # second line number comes before first one! - warning("URL %s specifies a backwards line number range" % rel_path) + warning('URL %s specifies a backwards line number range' % rel_path) node = nodes.reference(rawtext, link_text, refuri=url, **options) return [node], msgs @@ -148,7 +149,7 @@ class TranslationLinkNodeTransform(SphinxPostTransform): doc_path = env.doc2path(docname, None, None) return_path = '../' * doc_path.count('/') # path back to the root from 'docname' # then take off 3 more paths for language/release/targetname and build the new URL - url = "{}.html".format(os.path.join(return_path, '../../..', language, env.config.release, + url = '{}.html'.format(os.path.join(return_path, '../../..', language, env.config.release, env.config.idf_target, docname)) node.replace_self(nodes.reference(rawtext, link_text, refuri=url, **options)) else: diff --git a/docs/idf_extensions/run_doxygen.py b/docs/idf_extensions/run_doxygen.py index 4fea153fd2..5e96b009b9 100644 --- a/docs/idf_extensions/run_doxygen.py +++ b/docs/idf_extensions/run_doxygen.py @@ -1,20 +1,21 @@ # Extension to generate Doxygen XML include files, with IDF config & soc macros included -from __future__ import print_function -from __future__ import unicode_literals -from io import open +from __future__ import print_function, unicode_literals + import os import os.path import re import subprocess +from io import open + from .util import copy_if_modified ALL_KINDS = [ - ("function", "Functions"), - ("union", "Unions"), - ("struct", "Structures"), - ("define", "Macros"), - ("typedef", "Type Definitions"), - ("enum", "Enumerations") + ('function', 'Functions'), + ('union', 'Unions'), + ('struct', 'Structures'), + ('define', 'Macros'), + ('typedef', 'Type Definitions'), + ('enum', 'Enumerations') ] """list of items that will be generated for a single API file """ @@ -30,27 +31,27 @@ def generate_doxygen(app, defines): build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep)) # Call Doxygen to get XML files from the header files - print("Calling Doxygen to generate latest XML files") + print('Calling Doxygen to generate latest XML files') doxy_env = os.environ doxy_env.update({ - "ENV_DOXYGEN_DEFINES": " ".join('{}={}'.format(key, value) for key, value in defines.items()), - "IDF_PATH": app.config.idf_path, - "IDF_TARGET": app.config.idf_target, + 'ENV_DOXYGEN_DEFINES': ' '.join('{}={}'.format(key, value) for key, value in defines.items()), + 'IDF_PATH': app.config.idf_path, + 'IDF_TARGET': app.config.idf_target, }) - doxyfile_dir = os.path.join(app.config.docs_root, "doxygen") - doxyfile_main = os.path.join(doxyfile_dir, "Doxyfile_common") - doxyfile_target = os.path.join(doxyfile_dir, "Doxyfile_" + app.config.idf_target) - print("Running doxygen with doxyfiles {} and {}".format(doxyfile_main, doxyfile_target)) + doxyfile_dir = os.path.join(app.config.docs_root, 'doxygen') + doxyfile_main = os.path.join(doxyfile_dir, 'Doxyfile_common') + doxyfile_target = os.path.join(doxyfile_dir, 'Doxyfile_' + app.config.idf_target) + print('Running doxygen with doxyfiles {} and {}'.format(doxyfile_main, doxyfile_target)) # It's possible to have doxygen log warnings to a file using WARN_LOGFILE directive, # but in some cases it will still log an error to stderr and return success! # # So take all of stderr and redirect it to a logfile (will contain warnings and errors) - logfile = os.path.join(build_dir, "doxygen-warning-log.txt") + logfile = os.path.join(build_dir, 'doxygen-warning-log.txt') - with open(logfile, "w") as f: + with open(logfile, 'w') as f: # note: run Doxygen in the build directory, so the xml & xml_in files end up in there - subprocess.check_call(["doxygen", doxyfile_main], env=doxy_env, cwd=build_dir, stderr=f) + subprocess.check_call(['doxygen', doxyfile_main], env=doxy_env, cwd=build_dir, stderr=f) # Doxygen has generated XML files in 'xml' directory. # Copy them to 'xml_in', only touching the files which have changed. @@ -69,11 +70,11 @@ def convert_api_xml_to_inc(app, doxyfiles): """ build_dir = app.config.build_dir - xml_directory_path = "{}/xml".format(build_dir) - inc_directory_path = "{}/inc".format(build_dir) + xml_directory_path = '{}/xml'.format(build_dir) + inc_directory_path = '{}/inc'.format(build_dir) if not os.path.isdir(xml_directory_path): - raise RuntimeError("Directory {} does not exist!".format(xml_directory_path)) + raise RuntimeError('Directory {} does not exist!'.format(xml_directory_path)) if not os.path.exists(inc_directory_path): os.makedirs(inc_directory_path) @@ -83,16 +84,16 @@ def convert_api_xml_to_inc(app, doxyfiles): print("Generating 'api_name.inc' files with Doxygen directives") for header_file_path in header_paths: api_name = get_api_name(header_file_path) - inc_file_path = inc_directory_path + "/" + api_name + ".inc" + inc_file_path = inc_directory_path + '/' + api_name + '.inc' rst_output = generate_directives(header_file_path, xml_directory_path) previous_rst_output = '' if os.path.isfile(inc_file_path): - with open(inc_file_path, "r", encoding='utf-8') as inc_file_old: + with open(inc_file_path, 'r', encoding='utf-8') as inc_file_old: previous_rst_output = inc_file_old.read() if previous_rst_output != rst_output: - with open(inc_file_path, "w", encoding='utf-8') as inc_file: + with open(inc_file_path, 'w', encoding='utf-8') as inc_file: inc_file.write(rst_output) @@ -108,11 +109,11 @@ def get_doxyfile_input_paths(app, doxyfile_path): print("Getting Doxyfile's INPUT") - with open(doxyfile_path, "r", encoding='utf-8') as input_file: + with open(doxyfile_path, 'r', encoding='utf-8') as input_file: line = input_file.readline() # read contents of Doxyfile until 'INPUT' statement while line: - if line.find("INPUT") == 0: + if line.find('INPUT') == 0: break line = input_file.readline() @@ -124,13 +125,13 @@ def get_doxyfile_input_paths(app, doxyfile_path): # we have reached the end of 'INPUT' statement break # process only lines that are not comments - if line.find("#") == -1: + if line.find('#') == -1: # extract header file path inside components folder - m = re.search("components/(.*\.h)", line) # noqa: W605 - regular expression + m = re.search('components/(.*\.h)', line) # noqa: W605 - regular expression header_file_path = m.group(1) # Replace env variable used for multi target header - header_file_path = header_file_path.replace("$(IDF_TARGET)", app.config.idf_target) + header_file_path = header_file_path.replace('$(IDF_TARGET)', app.config.idf_target) doxyfile_INPUT.append(header_file_path) @@ -150,8 +151,8 @@ def get_api_name(header_file_path): The name of API. """ - api_name = "" - regex = r".*/(.*)\.h" + api_name = '' + regex = r'.*/(.*)\.h' m = re.search(regex, header_file_path) if m: api_name = m.group(1) @@ -173,15 +174,15 @@ def generate_directives(header_file_path, xml_directory_path): api_name = get_api_name(header_file_path) # in XLT file name each "_" in the api name is expanded by Doxygen to "__" - xlt_api_name = api_name.replace("_", "__") - xml_file_path = "%s/%s_8h.xml" % (xml_directory_path, xlt_api_name) + xlt_api_name = api_name.replace('_', '__') + xml_file_path = '%s/%s_8h.xml' % (xml_directory_path, xlt_api_name) - rst_output = "" + rst_output = '' rst_output = ".. File automatically generated by 'gen-dxd.py'\n" - rst_output += "\n" - rst_output += get_rst_header("Header File") - rst_output += "* :component_file:`" + header_file_path + "`\n" - rst_output += "\n" + rst_output += '\n' + rst_output += get_rst_header('Header File') + rst_output += '* :component_file:`' + header_file_path + '`\n' + rst_output += '\n' try: import xml.etree.cElementTree as ET @@ -206,10 +207,10 @@ def get_rst_header(header_name): """ - rst_output = "" - rst_output += header_name + "\n" - rst_output += "^" * len(header_name) + "\n" - rst_output += "\n" + rst_output = '' + rst_output += header_name + '\n' + rst_output += '^' * len(header_name) + '\n' + rst_output += '\n' return rst_output @@ -226,14 +227,14 @@ def select_unions(innerclass_list): """ - rst_output = "" + rst_output = '' for line in innerclass_list.splitlines(): # union is denoted by "union" at the beginning of line - if line.find("union") == 0: - union_id, union_name = re.split(r"\t+", line) - rst_output += ".. doxygenunion:: " + if line.find('union') == 0: + union_id, union_name = re.split(r'\t+', line) + rst_output += '.. doxygenunion:: ' rst_output += union_name - rst_output += "\n" + rst_output += '\n' return rst_output @@ -251,20 +252,20 @@ def select_structs(innerclass_list): """ - rst_output = "" + rst_output = '' for line in innerclass_list.splitlines(): # structure is denoted by "struct" at the beginning of line - if line.find("struct") == 0: + if line.find('struct') == 0: # skip structures that are part of union # they are documented by 'doxygenunion' directive - if line.find("::") > 0: + if line.find('::') > 0: continue - struct_id, struct_name = re.split(r"\t+", line) - rst_output += ".. doxygenstruct:: " + struct_id, struct_name = re.split(r'\t+', line) + rst_output += '.. doxygenstruct:: ' rst_output += struct_name - rst_output += "\n" - rst_output += " :members:\n" - rst_output += "\n" + rst_output += '\n' + rst_output += ' :members:\n' + rst_output += '\n' return rst_output @@ -282,12 +283,12 @@ def get_directives(tree, kind): """ - rst_output = "" - if kind in ["union", "struct"]: - innerclass_list = "" + rst_output = '' + if kind in ['union', 'struct']: + innerclass_list = '' for elem in tree.iterfind('compounddef/innerclass'): - innerclass_list += elem.attrib["refid"] + "\t" + elem.text + "\n" - if kind == "union": + innerclass_list += elem.attrib['refid'] + '\t' + elem.text + '\n' + if kind == 'union': rst_output += select_unions(innerclass_list) else: rst_output += select_structs(innerclass_list) @@ -295,10 +296,10 @@ def get_directives(tree, kind): for elem in tree.iterfind( 'compounddef/sectiondef/memberdef[@kind="%s"]' % kind): name = elem.find('name') - rst_output += ".. doxygen%s:: " % kind - rst_output += name.text + "\n" + rst_output += '.. doxygen%s:: ' % kind + rst_output += name.text + '\n' if rst_output: all_kinds_dict = dict(ALL_KINDS) - rst_output = get_rst_header(all_kinds_dict[kind]) + rst_output + "\n" + rst_output = get_rst_header(all_kinds_dict[kind]) + rst_output + '\n' return rst_output diff --git a/docs/idf_extensions/util.py b/docs/idf_extensions/util.py index f5f271d23c..24163c19ad 100644 --- a/docs/idf_extensions/util.py +++ b/docs/idf_extensions/util.py @@ -15,10 +15,11 @@ # limitations under the License. from __future__ import unicode_literals -from io import open + import os import shutil import sys +from io import open try: import urllib.request @@ -33,10 +34,10 @@ def files_equal(path_1, path_2): if not os.path.exists(path_1) or not os.path.exists(path_2): return False file_1_contents = '' - with open(path_1, "r", encoding='utf-8') as f_1: + with open(path_1, 'r', encoding='utf-8') as f_1: file_1_contents = f_1.read() file_2_contents = '' - with open(path_2, "r", encoding='utf-8') as f_2: + with open(path_2, 'r', encoding='utf-8') as f_2: file_2_contents = f_2.read() return file_1_contents == file_2_contents @@ -63,7 +64,7 @@ def copy_if_modified(src_path, dst_path): def download_file_if_missing(from_url, to_path): - filename_with_path = to_path + "/" + os.path.basename(from_url) + filename_with_path = to_path + '/' + os.path.basename(from_url) exists = os.path.isfile(filename_with_path) if exists: print("The file '%s' already exists" % (filename_with_path)) diff --git a/docs/sanitize_version.py b/docs/sanitize_version.py index 90e9101678..1047ec5a78 100644 --- a/docs/sanitize_version.py +++ b/docs/sanitize_version.py @@ -35,8 +35,8 @@ def sanitize_version(original_version): except KeyError: version = original_version - if version == "master": - return "latest" + if version == 'master': + return 'latest' version = version.replace('/', '-') diff --git a/docs/test/en/conf.py b/docs/test/en/conf.py index 7de45ad731..c2e02b5f20 100644 --- a/docs/test/en/conf.py +++ b/docs/test/en/conf.py @@ -8,8 +8,8 @@ try: from conf_common import * # noqa: F403,F401 except ImportError: - import sys import os + import sys sys.path.insert(0, os.path.abspath('../..')) from conf_common import * # noqa: F403,F401 @@ -27,7 +27,7 @@ html_logo = None latex_logo = None html_static_path = [] -conditional_include_dict = {'esp32':["esp32_page.rst"], - 'esp32s2':["esp32s2_page.rst"], - 'SOC_BT_SUPPORTED':["bt_page.rst"], +conditional_include_dict = {'esp32':['esp32_page.rst'], + 'esp32s2':['esp32s2_page.rst'], + 'SOC_BT_SUPPORTED':['bt_page.rst'], } diff --git a/docs/test/test_docs.py b/docs/test/test_docs.py index 366171fdd7..bea1e1af90 100755 --- a/docs/test/test_docs.py +++ b/docs/test/test_docs.py @@ -1,16 +1,16 @@ #!/usr/bin/env python3 -import unittest +import os import subprocess import sys -import os +import unittest CURRENT_DIR = os.path.dirname(os.path.realpath(__file__)) -ESP32_DOC = "esp32_page" -ESP32_S2_DOC = "esp32s2_page" -BT_DOC = "bt_page" -LINK_ROLES_DOC = "link_roles" -IDF_FORMAT_DOC = "idf_target_format" +ESP32_DOC = 'esp32_page' +ESP32_S2_DOC = 'esp32s2_page' +BT_DOC = 'bt_page' +LINK_ROLES_DOC = 'link_roles' +IDF_FORMAT_DOC = 'idf_target_format' class DocBuilder(): @@ -24,7 +24,7 @@ class DocBuilder(): self.html_out_dir = os.path.join(CURRENT_DIR, build_dir, language, target, 'html') def build(self, opt_args=[]): - args = [sys.executable, self.build_docs_py_path, "-b", self.build_dir, "-s", self.src_dir, "-t", self.target, "-l", self.language] + args = [sys.executable, self.build_docs_py_path, '-b', self.build_dir, '-s', self.src_dir, '-t', self.target, '-l', self.language] args.extend(opt_args) return subprocess.call(args) @@ -33,65 +33,65 @@ class TestDocs(unittest.TestCase): @classmethod def setUpClass(cls): - cls.builder = DocBuilder("test", "_build/test_docs", "esp32s2", "en") + cls.builder = DocBuilder('test', '_build/test_docs', 'esp32s2', 'en') cls.build_ret_flag = cls.builder.build() def setUp(self): if self.build_ret_flag: - self.fail("Build docs failed with return: {}".format(self.build_ret_flag)) + self.fail('Build docs failed with return: {}'.format(self.build_ret_flag)) def assert_str_not_in_doc(self, doc_name, str_to_find): with open(os.path.join(self.builder.html_out_dir, doc_name)) as f: content = f.read() - self.assertFalse(str_to_find in content, "Found {} in {}".format(str_to_find, doc_name)) + self.assertFalse(str_to_find in content, 'Found {} in {}'.format(str_to_find, doc_name)) def assert_str_in_doc(self, doc_name, str_to_find): with open(os.path.join(self.builder.html_out_dir, doc_name)) as f: content = f.read() - self.assertTrue(str_to_find in content, "Did not find {} in {}".format(str_to_find, doc_name)) + self.assertTrue(str_to_find in content, 'Did not find {} in {}'.format(str_to_find, doc_name)) def test_only_dir(self): # Test that ESP32 content was excluded - self.assert_str_not_in_doc(ESP32_S2_DOC + ".html", "!ESP32_CONTENT!") + self.assert_str_not_in_doc(ESP32_S2_DOC + '.html', '!ESP32_CONTENT!') # Test that ESP32 S2 content was included - self.assert_str_in_doc(ESP32_S2_DOC + ".html", "!ESP32_S2_CONTENT!") + self.assert_str_in_doc(ESP32_S2_DOC + '.html', '!ESP32_S2_CONTENT!') # Test that BT content was excluded - self.assert_str_not_in_doc(ESP32_S2_DOC + ".html", "!BT_CONTENT!") + self.assert_str_not_in_doc(ESP32_S2_DOC + '.html', '!BT_CONTENT!') def test_toctree_filter(self): # ESP32 page should NOT be built - esp32_doc = os.path.join(self.builder.html_out_dir, ESP32_DOC + ".html") - self.assertFalse(os.path.isfile(esp32_doc), "Found {}".format(esp32_doc)) - self.assert_str_not_in_doc('index.html', "!ESP32_CONTENT!") + esp32_doc = os.path.join(self.builder.html_out_dir, ESP32_DOC + '.html') + self.assertFalse(os.path.isfile(esp32_doc), 'Found {}'.format(esp32_doc)) + self.assert_str_not_in_doc('index.html', '!ESP32_CONTENT!') - esp32s2_doc = os.path.join(self.builder.html_out_dir, ESP32_S2_DOC + ".html") - self.assertTrue(os.path.isfile(esp32s2_doc), "{} not found".format(esp32s2_doc)) + esp32s2_doc = os.path.join(self.builder.html_out_dir, ESP32_S2_DOC + '.html') + self.assertTrue(os.path.isfile(esp32s2_doc), '{} not found'.format(esp32s2_doc)) # Spot check a few other tags # No Bluetooth on ESP32 S2 - bt_doc = os.path.join(self.builder.html_out_dir, BT_DOC + ".html") - self.assertFalse(os.path.isfile(bt_doc), "Found {}".format(bt_doc)) - self.assert_str_not_in_doc('index.html', "!BT_CONTENT!") + bt_doc = os.path.join(self.builder.html_out_dir, BT_DOC + '.html') + self.assertFalse(os.path.isfile(bt_doc), 'Found {}'.format(bt_doc)) + self.assert_str_not_in_doc('index.html', '!BT_CONTENT!') def test_link_roles(self): - print("test") + print('test') class TestBuildSubset(unittest.TestCase): def test_build_subset(self): - builder = DocBuilder("test", "_build/test_build_subset", "esp32", "en") + builder = DocBuilder('test', '_build/test_build_subset', 'esp32', 'en') - docs_to_build = "esp32_page.rst" + docs_to_build = 'esp32_page.rst' - self.assertFalse(builder.build(["-i", docs_to_build])) + self.assertFalse(builder.build(['-i', docs_to_build])) # Check that we only built the input docs - bt_doc = os.path.join(builder.html_out_dir, BT_DOC + ".html") - esp32_doc = os.path.join(builder.html_out_dir, ESP32_DOC + ".html") - self.assertFalse(os.path.isfile(bt_doc), "Found {}".format(bt_doc)) - self.assertTrue(os.path.isfile(esp32_doc), "Found {}".format(esp32_doc)) + bt_doc = os.path.join(builder.html_out_dir, BT_DOC + '.html') + esp32_doc = os.path.join(builder.html_out_dir, ESP32_DOC + '.html') + self.assertFalse(os.path.isfile(bt_doc), 'Found {}'.format(bt_doc)) + self.assertTrue(os.path.isfile(esp32_doc), 'Found {}'.format(esp32_doc)) if __name__ == '__main__': diff --git a/docs/test/test_sphinx_idf_extensions.py b/docs/test/test_sphinx_idf_extensions.py index 8ea9f1c8d5..9082658be7 100755 --- a/docs/test/test_sphinx_idf_extensions.py +++ b/docs/test/test_sphinx_idf_extensions.py @@ -3,8 +3,8 @@ import os import sys import unittest -from unittest.mock import MagicMock from tempfile import TemporaryDirectory +from unittest.mock import MagicMock from sphinx.util import tags @@ -14,9 +14,7 @@ except ImportError: sys.path.append('..') from idf_extensions import exclude_docs -from idf_extensions import format_idf_target -from idf_extensions import gen_idf_tools_links -from idf_extensions import link_roles +from idf_extensions import format_idf_target, gen_idf_tools_links, link_roles class TestFormatIdfTarget(unittest.TestCase): @@ -30,14 +28,14 @@ class TestFormatIdfTarget(unittest.TestCase): def test_add_subs(self): - self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_NAME}'], "ESP32") - self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_PATH_NAME}'], "esp32") - self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_TOOLCHAIN_NAME}'], "esp32") - self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_CFG_PREFIX}'], "ESP32") + self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_NAME}'], 'ESP32') + self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_PATH_NAME}'], 'esp32') + self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_TOOLCHAIN_NAME}'], 'esp32') + self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_CFG_PREFIX}'], 'ESP32') self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_TRM_EN_URL}'], - "https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_en.pdf") + 'https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_en.pdf') self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_TRM_CN_URL}'], - "https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_cn.pdf") + 'https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_cn.pdf') def test_sub(self): content = ('This is a {IDF_TARGET_NAME}, with {IDF_TARGET_PATH_NAME}/soc.c, compiled with ' @@ -54,14 +52,14 @@ class TestFormatIdfTarget(unittest.TestCase): content = ('{IDF_TARGET_TX_PIN:default="IO3", esp32="IO4", esp32s2="IO5"}' 'The {IDF_TARGET_NAME} UART {IDF_TARGET_TX_PIN} uses for TX') - expected = "The ESP32 UART IO4 uses for TX" + expected = 'The ESP32 UART IO4 uses for TX' self.assertEqual(self.str_sub.substitute(content), expected) def test_local_sub_default(self): content = ('{IDF_TARGET_TX_PIN:default="IO3", esp32s2="IO5"}' 'The {IDF_TARGET_NAME} UART {IDF_TARGET_TX_PIN} uses for TX') - expected = "The ESP32 UART IO3 uses for TX" + expected = 'The ESP32 UART IO3 uses for TX' self.assertEqual(self.str_sub.substitute(content), expected) def test_local_sub_no_default(self): @@ -76,12 +74,12 @@ class TestExclude(unittest.TestCase): def setUp(self): self.app = MagicMock() self.app.tags = tags.Tags() - self.app.config.conditional_include_dict = {"esp32":["esp32.rst", "bt.rst"], "esp32s2":["esp32s2.rst"]} + self.app.config.conditional_include_dict = {'esp32':['esp32.rst', 'bt.rst'], 'esp32s2':['esp32s2.rst']} self.app.config.docs_to_build = None self.app.config.exclude_patterns = [] def test_update_exclude_pattern(self): - self.app.tags.add("esp32") + self.app.tags.add('esp32') exclude_docs.update_exclude_patterns(self.app, self.app.config) docs_to_build = set(self.app.config.conditional_include_dict['esp32']) @@ -92,7 +90,7 @@ class TestExclude(unittest.TestCase): class TestGenIDFToolLinks(unittest.TestCase): def setUp(self): self.app = MagicMock() - self.app.config.build_dir = "_build" + self.app.config.build_dir = '_build' self.app.config.idf_path = os.environ['IDF_PATH'] def test_gen_idf_tool_links(self): diff --git a/docs/zh_CN/conf.py b/docs/zh_CN/conf.py index 567585ffbd..cd82717ebc 100644 --- a/docs/zh_CN/conf.py +++ b/docs/zh_CN/conf.py @@ -9,8 +9,8 @@ try: from conf_common import * # noqa: F403,F401 except ImportError: - import sys import os + import sys sys.path.insert(0, os.path.abspath('..')) from conf_common import * # noqa: F403,F401 diff --git a/examples/bluetooth/nimble/blecent/blecent_test.py b/examples/bluetooth/nimble/blecent/blecent_test.py index b409bb981a..c1d04cea4b 100644 --- a/examples/bluetooth/nimble/blecent/blecent_test.py +++ b/examples/bluetooth/nimble/blecent/blecent_test.py @@ -15,21 +15,22 @@ # limitations under the License. from __future__ import print_function + import os import re -import uuid import subprocess +import uuid -from tiny_test_fw import Utility import ttfw_idf from ble import lib_ble_client +from tiny_test_fw import Utility # When running on local machine execute the following before running this script # > make app bootloader # > make print_flash_cmd | tail -n 1 > build/download.config -@ttfw_idf.idf_example_test(env_tag="Example_WIFI_BT") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI_BT') def test_example_app_ble_central(env, extra_data): """ Steps: @@ -37,7 +38,7 @@ def test_example_app_ble_central(env, extra_data): """ interface = 'hci0' - adv_host_name = "BleCentTestApp" + adv_host_name = 'BleCentTestApp' adv_iface_index = 0 adv_type = 'peripheral' adv_uuid = '1811' @@ -45,15 +46,15 @@ def test_example_app_ble_central(env, extra_data): subprocess.check_output(['rm','-rf','/var/lib/bluetooth/*']) subprocess.check_output(['hciconfig','hci0','reset']) # Acquire DUT - dut = env.get_dut("blecent", "examples/bluetooth/nimble/blecent", dut_class=ttfw_idf.ESP32DUT) + dut = env.get_dut('blecent', 'examples/bluetooth/nimble/blecent', dut_class=ttfw_idf.ESP32DUT) # Get binary file - binary_file = os.path.join(dut.app.binary_path, "blecent.bin") + binary_file = os.path.join(dut.app.binary_path, 'blecent.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("blecent_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('blecent_bin_size', '{}KB'.format(bin_size // 1024)) # Upload binary and start testing - Utility.console_log("Starting blecent example test app") + Utility.console_log('Starting blecent example test app') dut.start_app() dut.reset() @@ -62,16 +63,16 @@ def test_example_app_ble_central(env, extra_data): # Get BLE client module ble_client_obj = lib_ble_client.BLE_Bluez_Client(interface) if not ble_client_obj: - raise RuntimeError("Get DBus-Bluez object failed !!") + raise RuntimeError('Get DBus-Bluez object failed !!') # Discover Bluetooth Adapter and power on is_adapter_set = ble_client_obj.set_adapter() if not is_adapter_set: - raise RuntimeError("Adapter Power On failed !!") + raise RuntimeError('Adapter Power On failed !!') # Write device address to dut - dut.expect("BLE Host Task Started", timeout=60) - dut.write(device_addr + "\n") + dut.expect('BLE Host Task Started', timeout=60) + dut.write(device_addr + '\n') ''' Blecent application run: @@ -87,22 +88,22 @@ def test_example_app_ble_central(env, extra_data): ble_client_obj.disconnect() # Check dut responses - dut.expect("Connection established", timeout=60) + dut.expect('Connection established', timeout=60) - dut.expect("Service discovery complete; status=0", timeout=60) - print("Service discovery passed\n\tService Discovery Status: 0") + dut.expect('Service discovery complete; status=0', timeout=60) + print('Service discovery passed\n\tService Discovery Status: 0') - dut.expect("GATT procedure initiated: read;", timeout=60) - dut.expect("Read complete; status=0", timeout=60) - print("Read passed\n\tSupportedNewAlertCategoryCharacteristic\n\tRead Status: 0") + dut.expect('GATT procedure initiated: read;', timeout=60) + dut.expect('Read complete; status=0', timeout=60) + print('Read passed\n\tSupportedNewAlertCategoryCharacteristic\n\tRead Status: 0') - dut.expect("GATT procedure initiated: write;", timeout=60) - dut.expect("Write complete; status=0", timeout=60) - print("Write passed\n\tAlertNotificationControlPointCharacteristic\n\tWrite Status: 0") + dut.expect('GATT procedure initiated: write;', timeout=60) + dut.expect('Write complete; status=0', timeout=60) + print('Write passed\n\tAlertNotificationControlPointCharacteristic\n\tWrite Status: 0') - dut.expect("GATT procedure initiated: write;", timeout=60) - dut.expect("Subscribe complete; status=0", timeout=60) - print("Subscribe passed\n\tClientCharacteristicConfigurationDescriptor\n\tSubscribe Status: 0") + dut.expect('GATT procedure initiated: write;', timeout=60) + dut.expect('Subscribe complete; status=0', timeout=60) + print('Subscribe passed\n\tClientCharacteristicConfigurationDescriptor\n\tSubscribe Status: 0') if __name__ == '__main__': diff --git a/examples/bluetooth/nimble/blehr/blehr_test.py b/examples/bluetooth/nimble/blehr/blehr_test.py index 7dd951618f..bd09a7ddcf 100644 --- a/examples/bluetooth/nimble/blehr/blehr_test.py +++ b/examples/bluetooth/nimble/blehr/blehr_test.py @@ -15,20 +15,21 @@ # limitations under the License. from __future__ import print_function + import os import re +import subprocess import threading import traceback -import subprocess try: import Queue except ImportError: import queue as Queue -from tiny_test_fw import Utility import ttfw_idf from ble import lib_ble_client +from tiny_test_fw import Utility # When running on local machine execute the following before running this script # > make app bootloader @@ -44,28 +45,28 @@ def blehr_client_task(hr_obj, dut_addr): # Get BLE client module ble_client_obj = lib_ble_client.BLE_Bluez_Client(interface, devname=ble_devname, devaddr=dut_addr) if not ble_client_obj: - raise RuntimeError("Failed to get DBus-Bluez object") + raise RuntimeError('Failed to get DBus-Bluez object') # Discover Bluetooth Adapter and power on is_adapter_set = ble_client_obj.set_adapter() if not is_adapter_set: - raise RuntimeError("Adapter Power On failed !!") + raise RuntimeError('Adapter Power On failed !!') # Connect BLE Device is_connected = ble_client_obj.connect() if not is_connected: # Call disconnect to perform cleanup operations before exiting application ble_client_obj.disconnect() - raise RuntimeError("Connection to device " + str(ble_devname) + " failed !!") + raise RuntimeError('Connection to device ' + str(ble_devname) + ' failed !!') # Read Services services_ret = ble_client_obj.get_services() if services_ret: - Utility.console_log("\nServices\n") + Utility.console_log('\nServices\n') Utility.console_log(str(services_ret)) else: ble_client_obj.disconnect() - raise RuntimeError("Failure: Read Services failed") + raise RuntimeError('Failure: Read Services failed') ''' Blehr application run: @@ -75,9 +76,9 @@ def blehr_client_task(hr_obj, dut_addr): ''' blehr_ret = ble_client_obj.hr_update_simulation(hr_srv_uuid, hr_char_uuid) if blehr_ret: - Utility.console_log("Success: blehr example test passed") + Utility.console_log('Success: blehr example test passed') else: - raise RuntimeError("Failure: blehr example test failed") + raise RuntimeError('Failure: blehr example test failed') # Call disconnect to perform cleanup operations before exiting application ble_client_obj.disconnect() @@ -96,7 +97,7 @@ class BleHRThread(threading.Thread): self.exceptions_queue.put(traceback.format_exc(), block=False) -@ttfw_idf.idf_example_test(env_tag="Example_WIFI_BT") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI_BT') def test_example_app_ble_hr(env, extra_data): """ Steps: @@ -110,20 +111,20 @@ def test_example_app_ble_hr(env, extra_data): subprocess.check_output(['hciconfig','hci0','reset']) # Acquire DUT - dut = env.get_dut("blehr", "examples/bluetooth/nimble/blehr", dut_class=ttfw_idf.ESP32DUT) + dut = env.get_dut('blehr', 'examples/bluetooth/nimble/blehr', dut_class=ttfw_idf.ESP32DUT) # Get binary file - binary_file = os.path.join(dut.app.binary_path, "blehr.bin") + binary_file = os.path.join(dut.app.binary_path, 'blehr.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("blehr_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('blehr_bin_size', '{}KB'.format(bin_size // 1024)) # Upload binary and start testing - Utility.console_log("Starting blehr simple example test app") + Utility.console_log('Starting blehr simple example test app') dut.start_app() dut.reset() # Get device address from dut - dut_addr = dut.expect(re.compile(r"Device Address: ([a-fA-F0-9:]+)"), timeout=30)[0] + dut_addr = dut.expect(re.compile(r'Device Address: ([a-fA-F0-9:]+)'), timeout=30)[0] exceptions_queue = Queue.Queue() # Starting a py-client in a separate thread blehr_thread_obj = BleHRThread(dut_addr, exceptions_queue) @@ -137,15 +138,15 @@ def test_example_app_ble_hr(env, extra_data): except Queue.Empty: break else: - Utility.console_log("\n" + exception_msg) + Utility.console_log('\n' + exception_msg) if exception_msg: - raise Exception("Thread did not run successfully") + raise Exception('Thread did not run successfully') # Check dut responses - dut.expect("subscribe event; cur_notify=1", timeout=30) - dut.expect("subscribe event; cur_notify=0", timeout=30) - dut.expect("disconnect;", timeout=30) + dut.expect('subscribe event; cur_notify=1', timeout=30) + dut.expect('subscribe event; cur_notify=0', timeout=30) + dut.expect('disconnect;', timeout=30) if __name__ == '__main__': diff --git a/examples/bluetooth/nimble/bleprph/bleprph_test.py b/examples/bluetooth/nimble/bleprph/bleprph_test.py index 150dc59025..f38207bea9 100644 --- a/examples/bluetooth/nimble/bleprph/bleprph_test.py +++ b/examples/bluetooth/nimble/bleprph/bleprph_test.py @@ -15,20 +15,21 @@ # limitations under the License. from __future__ import print_function + import os import re -import traceback -import threading import subprocess +import threading +import traceback try: import Queue except ImportError: import queue as Queue -from tiny_test_fw import Utility import ttfw_idf from ble import lib_ble_client +from tiny_test_fw import Utility # When running on local machine execute the following before running this script # > make app bootloader @@ -44,45 +45,45 @@ def bleprph_client_task(prph_obj, dut, dut_addr): # Get BLE client module ble_client_obj = lib_ble_client.BLE_Bluez_Client(interface, devname=ble_devname, devaddr=dut_addr) if not ble_client_obj: - raise RuntimeError("Failed to get DBus-Bluez object") + raise RuntimeError('Failed to get DBus-Bluez object') # Discover Bluetooth Adapter and power on is_adapter_set = ble_client_obj.set_adapter() if not is_adapter_set: - raise RuntimeError("Adapter Power On failed !!") + raise RuntimeError('Adapter Power On failed !!') # Connect BLE Device is_connected = ble_client_obj.connect() if not is_connected: # Call disconnect to perform cleanup operations before exiting application ble_client_obj.disconnect() - raise RuntimeError("Connection to device " + ble_devname + " failed !!") + raise RuntimeError('Connection to device ' + ble_devname + ' failed !!') # Check dut responses - dut.expect("GAP procedure initiated: advertise;", timeout=30) + dut.expect('GAP procedure initiated: advertise;', timeout=30) # Read Services services_ret = ble_client_obj.get_services(srv_uuid) if services_ret: - Utility.console_log("\nServices\n") + Utility.console_log('\nServices\n') Utility.console_log(str(services_ret)) else: ble_client_obj.disconnect() - raise RuntimeError("Failure: Read Services failed") + raise RuntimeError('Failure: Read Services failed') # Read Characteristics chars_ret = {} chars_ret = ble_client_obj.read_chars() if chars_ret: - Utility.console_log("\nCharacteristics retrieved") + Utility.console_log('\nCharacteristics retrieved') for path, props in chars_ret.items(): - Utility.console_log("\n\tCharacteristic: " + str(path)) - Utility.console_log("\tCharacteristic UUID: " + str(props[2])) - Utility.console_log("\tValue: " + str(props[0])) - Utility.console_log("\tProperties: : " + str(props[1])) + Utility.console_log('\n\tCharacteristic: ' + str(path)) + Utility.console_log('\tCharacteristic UUID: ' + str(props[2])) + Utility.console_log('\tValue: ' + str(props[0])) + Utility.console_log('\tProperties: : ' + str(props[1])) else: ble_client_obj.disconnect() - raise RuntimeError("Failure: Read Characteristics failed") + raise RuntimeError('Failure: Read Characteristics failed') ''' Write Characteristics @@ -91,15 +92,15 @@ def bleprph_client_task(prph_obj, dut, dut_addr): chars_ret_on_write = {} chars_ret_on_write = ble_client_obj.write_chars(b'A') if chars_ret_on_write: - Utility.console_log("\nCharacteristics after write operation") + Utility.console_log('\nCharacteristics after write operation') for path, props in chars_ret_on_write.items(): - Utility.console_log("\n\tCharacteristic:" + str(path)) - Utility.console_log("\tCharacteristic UUID: " + str(props[2])) - Utility.console_log("\tValue:" + str(props[0])) - Utility.console_log("\tProperties: : " + str(props[1])) + Utility.console_log('\n\tCharacteristic:' + str(path)) + Utility.console_log('\tCharacteristic UUID: ' + str(props[2])) + Utility.console_log('\tValue:' + str(props[0])) + Utility.console_log('\tProperties: : ' + str(props[1])) else: ble_client_obj.disconnect() - raise RuntimeError("Failure: Write Characteristics failed") + raise RuntimeError('Failure: Write Characteristics failed') # Call disconnect to perform cleanup operations before exiting application ble_client_obj.disconnect() @@ -119,7 +120,7 @@ class BlePrphThread(threading.Thread): self.exceptions_queue.put(traceback.format_exc(), block=False) -@ttfw_idf.idf_example_test(env_tag="Example_WIFI_BT") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI_BT') def test_example_app_ble_peripheral(env, extra_data): """ Steps: @@ -133,20 +134,20 @@ def test_example_app_ble_peripheral(env, extra_data): subprocess.check_output(['hciconfig','hci0','reset']) # Acquire DUT - dut = env.get_dut("bleprph", "examples/bluetooth/nimble/bleprph", dut_class=ttfw_idf.ESP32DUT) + dut = env.get_dut('bleprph', 'examples/bluetooth/nimble/bleprph', dut_class=ttfw_idf.ESP32DUT) # Get binary file - binary_file = os.path.join(dut.app.binary_path, "bleprph.bin") + binary_file = os.path.join(dut.app.binary_path, 'bleprph.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("bleprph_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('bleprph_bin_size', '{}KB'.format(bin_size // 1024)) # Upload binary and start testing - Utility.console_log("Starting bleprph simple example test app") + Utility.console_log('Starting bleprph simple example test app') dut.start_app() dut.reset() # Get device address from dut - dut_addr = dut.expect(re.compile(r"Device Address: ([a-fA-F0-9:]+)"), timeout=30)[0] + dut_addr = dut.expect(re.compile(r'Device Address: ([a-fA-F0-9:]+)'), timeout=30)[0] exceptions_queue = Queue.Queue() # Starting a py-client in a separate thread @@ -161,14 +162,14 @@ def test_example_app_ble_peripheral(env, extra_data): except Queue.Empty: break else: - Utility.console_log("\n" + exception_msg) + Utility.console_log('\n' + exception_msg) if exception_msg: - raise Exception("Thread did not run successfully") + raise Exception('Thread did not run successfully') # Check dut responses - dut.expect("connection established; status=0", timeout=30) - dut.expect("disconnect;", timeout=30) + dut.expect('connection established; status=0', timeout=30) + dut.expect('disconnect;', timeout=30) if __name__ == '__main__': diff --git a/examples/cxx/pthread/example_test.py b/examples/cxx/pthread/example_test.py index 4156b12819..82999fdc7a 100644 --- a/examples/cxx/pthread/example_test.py +++ b/examples/cxx/pthread/example_test.py @@ -1,5 +1,7 @@ from __future__ import unicode_literals + import re + import ttfw_idf diff --git a/examples/get-started/blink/example_test.py b/examples/get-started/blink/example_test.py index 3309573c50..505ad58fe9 100644 --- a/examples/get-started/blink/example_test.py +++ b/examples/get-started/blink/example_test.py @@ -1,20 +1,19 @@ #!/usr/bin/env python -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -import re -import os -import hashlib +from __future__ import division, print_function, unicode_literals + +import hashlib +import os +import re -from tiny_test_fw import Utility import ttfw_idf +from tiny_test_fw import Utility def verify_elf_sha256_embedding(dut): - elf_file = os.path.join(dut.app.binary_path, "blink.elf") + elf_file = os.path.join(dut.app.binary_path, 'blink.elf') sha256 = hashlib.sha256() - with open(elf_file, "rb") as f: + with open(elf_file, 'rb') as f: sha256.update(f.read()) sha256_expected = sha256.hexdigest() @@ -28,12 +27,12 @@ def verify_elf_sha256_embedding(dut): raise ValueError('ELF file SHA256 mismatch') -@ttfw_idf.idf_example_test(env_tag="Example_GENERIC") +@ttfw_idf.idf_example_test(env_tag='Example_GENERIC') def test_examples_blink(env, extra_data): - dut = env.get_dut("blink", "examples/get-started/blink", dut_class=ttfw_idf.ESP32DUT) - binary_file = os.path.join(dut.app.binary_path, "blink.bin") + dut = env.get_dut('blink', 'examples/get-started/blink', dut_class=ttfw_idf.ESP32DUT) + binary_file = os.path.join(dut.app.binary_path, 'blink.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("blink_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('blink_bin_size', '{}KB'.format(bin_size // 1024)) dut.start_app() diff --git a/examples/get-started/hello_world/example_test.py b/examples/get-started/hello_world/example_test.py index 116fa402f6..00c3358075 100644 --- a/examples/get-started/hello_world/example_test.py +++ b/examples/get-started/hello_world/example_test.py @@ -1,16 +1,14 @@ #!/usr/bin/env python -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import division, print_function, unicode_literals import ttfw_idf -@ttfw_idf.idf_example_test(env_tag="Example_GENERIC", target=['esp32', 'esp32s2'], ci_target=['esp32']) +@ttfw_idf.idf_example_test(env_tag='Example_GENERIC', target=['esp32', 'esp32s2'], ci_target=['esp32']) def test_examples_hello_world(env, extra_data): app_name = 'hello_world' - dut = env.get_dut(app_name, "examples/get-started/hello_world") + dut = env.get_dut(app_name, 'examples/get-started/hello_world') dut.start_app() res = dut.expect(ttfw_idf.MINIMUM_FREE_HEAP_SIZE_RE) if not res: diff --git a/examples/peripherals/gpio/generic_gpio/example_test.py b/examples/peripherals/gpio/generic_gpio/example_test.py index a0564846b4..aec3c22510 100644 --- a/examples/peripherals/gpio/generic_gpio/example_test.py +++ b/examples/peripherals/gpio/generic_gpio/example_test.py @@ -1,16 +1,14 @@ #!/usr/bin/env python -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import division, print_function, unicode_literals import ttfw_idf -@ttfw_idf.idf_example_test(env_tag="Example_TWAI1", target=['esp32', 'esp32s2'], ci_target=['esp32']) +@ttfw_idf.idf_example_test(env_tag='Example_TWAI1', target=['esp32', 'esp32s2'], ci_target=['esp32']) def test_examples_gpio(env, extra_data): - app_name = "gpio" - dut = env.get_dut(app_name, "examples/peripherals/gpio/generic_gpio") + app_name = 'gpio' + dut = env.get_dut(app_name, 'examples/peripherals/gpio/generic_gpio') dut.start_app() res = dut.expect(ttfw_idf.MINIMUM_FREE_HEAP_SIZE_RE) if not res: diff --git a/examples/peripherals/i2c/i2c_tools/example_test.py b/examples/peripherals/i2c/i2c_tools/example_test.py index b3d376aa94..67d48b4810 100644 --- a/examples/peripherals/i2c/i2c_tools/example_test.py +++ b/examples/peripherals/i2c/i2c_tools/example_test.py @@ -10,27 +10,27 @@ def test_i2ctools_example(env, extra_data): # Get device under test, flash and start example. "i2ctool" must be defined in EnvConfig dut = env.get_dut('i2ctools', 'examples/peripherals/i2c/i2c_tools', dut_class=ttfw_idf.ESP32DUT) dut.start_app() - dut.expect("i2c-tools>", timeout=EXPECT_TIMEOUT) + dut.expect('i2c-tools>', timeout=EXPECT_TIMEOUT) # Get i2c address - dut.write("i2cdetect") - dut.expect("5b", timeout=EXPECT_TIMEOUT) + dut.write('i2cdetect') + dut.expect('5b', timeout=EXPECT_TIMEOUT) # Get chip ID - dut.write("i2cget -c 0x5b -r 0x20 -l 1") - dut.expect("0x81", timeout=EXPECT_TIMEOUT) + dut.write('i2cget -c 0x5b -r 0x20 -l 1') + dut.expect('0x81', timeout=EXPECT_TIMEOUT) # Reset sensor - dut.write("i2cset -c 0x5b -r 0xFF 0x11 0xE5 0x72 0x8A") - dut.expect("OK", timeout=EXPECT_TIMEOUT) + dut.write('i2cset -c 0x5b -r 0xFF 0x11 0xE5 0x72 0x8A') + dut.expect('OK', timeout=EXPECT_TIMEOUT) # Get status - dut.write("i2cget -c 0x5b -r 0x00 -l 1") - dut.expect_any("0x10", timeout=EXPECT_TIMEOUT) + dut.write('i2cget -c 0x5b -r 0x00 -l 1') + dut.expect_any('0x10', timeout=EXPECT_TIMEOUT) # Change work mode - dut.write("i2cset -c 0x5b -r 0xF4") - dut.expect("OK", timeout=EXPECT_TIMEOUT) - dut.write("i2cset -c 0x5b -r 0x01 0x10") - dut.expect("OK", timeout=EXPECT_TIMEOUT) + dut.write('i2cset -c 0x5b -r 0xF4') + dut.expect('OK', timeout=EXPECT_TIMEOUT) + dut.write('i2cset -c 0x5b -r 0x01 0x10') + dut.expect('OK', timeout=EXPECT_TIMEOUT) # Get new status - dut.write("i2cget -c 0x5b -r 0x00 -l 1") - dut.expect_any("0x98", "0x90", timeout=EXPECT_TIMEOUT) + dut.write('i2cget -c 0x5b -r 0x00 -l 1') + dut.expect_any('0x98', '0x90', timeout=EXPECT_TIMEOUT) if __name__ == '__main__': diff --git a/examples/peripherals/i2s_adc_dac/tools/generate_audio_file.py b/examples/peripherals/i2s_adc_dac/tools/generate_audio_file.py index ac58fbc756..4b4b9ed1c9 100644 --- a/examples/peripherals/i2s_adc_dac/tools/generate_audio_file.py +++ b/examples/peripherals/i2s_adc_dac/tools/generate_audio_file.py @@ -1,43 +1,44 @@ from __future__ import print_function -from builtins import range + import os -import wave import struct +import wave +from builtins import range def get_wave_array_str(filename, target_bits): - wave_read = wave.open(filename, "r") - array_str = "" + wave_read = wave.open(filename, 'r') + array_str = '' nchannels, sampwidth, framerate, nframes, comptype, compname = wave_read.getparams() sampwidth *= 8 for i in range(wave_read.getnframes()): - val, = struct.unpack("', file=audio_table) print('const unsigned char audio_table[] = {', file=audio_table) for wav in wav_file_list: - print("processing: {}".format(wav)) + print('processing: {}'.format(wav)) print(get_wave_array_str(filename=wav, target_bits=scale_bits), file=audio_table) print('};\n', file=audio_table) - print("Done...") + print('Done...') if __name__ == '__main__': - print("Generating audio array...") + print('Generating audio array...') wav_list = [] - for filename in os.listdir("./"): - if filename.endswith(".wav"): + for filename in os.listdir('./'): + if filename.endswith('.wav'): wav_list.append(filename) - gen_wave_table(wav_file_list=wav_list, target_file_name="audio_example_file.h") + gen_wave_table(wav_file_list=wav_list, target_file_name='audio_example_file.h') diff --git a/examples/peripherals/rmt/ir_protocols/example_test.py b/examples/peripherals/rmt/ir_protocols/example_test.py index e61010e4f2..6501a42c48 100644 --- a/examples/peripherals/rmt/ir_protocols/example_test.py +++ b/examples/peripherals/rmt/ir_protocols/example_test.py @@ -8,19 +8,19 @@ EXPECT_TIMEOUT = 20 @ttfw_idf.idf_example_test(env_tag='Example_RMT_IR_PROTOCOLS') def test_examples_rmt_ir_protocols(env, extra_data): dut = env.get_dut('ir_protocols_example', 'examples/peripherals/rmt/ir_protocols', app_config_name='nec') - print("Using binary path: {}".format(dut.app.binary_path)) + print('Using binary path: {}'.format(dut.app.binary_path)) dut.start_app() - dut.expect("example: Send command 0x20 to address 0x10", timeout=EXPECT_TIMEOUT) - dut.expect("Scan Code --- addr: 0x0010 cmd: 0x0020", timeout=EXPECT_TIMEOUT) - dut.expect("Scan Code (repeat) --- addr: 0x0010 cmd: 0x0020", timeout=EXPECT_TIMEOUT) + dut.expect('example: Send command 0x20 to address 0x10', timeout=EXPECT_TIMEOUT) + dut.expect('Scan Code --- addr: 0x0010 cmd: 0x0020', timeout=EXPECT_TIMEOUT) + dut.expect('Scan Code (repeat) --- addr: 0x0010 cmd: 0x0020', timeout=EXPECT_TIMEOUT) env.close_dut(dut.name) dut = env.get_dut('ir_protocols_example', 'examples/peripherals/rmt/ir_protocols', app_config_name='rc5') - print("Using binary path: {}".format(dut.app.binary_path)) + print('Using binary path: {}'.format(dut.app.binary_path)) dut.start_app() - dut.expect("example: Send command 0x20 to address 0x10", timeout=EXPECT_TIMEOUT) - dut.expect("Scan Code --- addr: 0x0010 cmd: 0x0020", timeout=EXPECT_TIMEOUT) - dut.expect("Scan Code (repeat) --- addr: 0x0010 cmd: 0x0020", timeout=EXPECT_TIMEOUT) + dut.expect('example: Send command 0x20 to address 0x10', timeout=EXPECT_TIMEOUT) + dut.expect('Scan Code --- addr: 0x0010 cmd: 0x0020', timeout=EXPECT_TIMEOUT) + dut.expect('Scan Code (repeat) --- addr: 0x0010 cmd: 0x0020', timeout=EXPECT_TIMEOUT) env.close_dut(dut.name) diff --git a/examples/peripherals/sdio/sdio_test.py b/examples/peripherals/sdio/sdio_test.py index 4025d53946..07ec784a74 100644 --- a/examples/peripherals/sdio/sdio_test.py +++ b/examples/peripherals/sdio/sdio_test.py @@ -12,11 +12,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from tiny_test_fw import TinyFW import ttfw_idf +from tiny_test_fw import TinyFW -@ttfw_idf.idf_example_test(env_tag="Example_SDIO", ignore=True) +@ttfw_idf.idf_example_test(env_tag='Example_SDIO', ignore=True) def test_example_sdio_communication(env, extra_data): """ Configurations @@ -36,88 +36,88 @@ def test_example_sdio_communication(env, extra_data): or use sdio test board, which has two wrover modules connect to a same FT3232 Assume that first dut is host and second is slave """ - dut1 = env.get_dut("sdio_host", "examples/peripherals/sdio/host", dut_class=ttfw_idf.ESP32DUT) - dut2 = env.get_dut("sdio_slave", "examples/peripherals/sdio/slave", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('sdio_host', 'examples/peripherals/sdio/host', dut_class=ttfw_idf.ESP32DUT) + dut2 = env.get_dut('sdio_slave', 'examples/peripherals/sdio/slave', dut_class=ttfw_idf.ESP32DUT) dut1.start_app() # wait until the master is ready to setup the slave - dut1.expect("host ready, start initializing slave...") + dut1.expect('host ready, start initializing slave...') dut2.start_app() - dut1.expect("0a 0d 10 13 16 19 1c 1f 22 25 28 2b 2e 31 34 37") - dut1.expect("3a 3d 40 43 46 49 4c 4f 52 55 58 5b 00 00 00 00") - dut1.expect("6a 6d 70 73 76 79 7c 7f 82 85 88 8b 8e 91 94 97") - dut1.expect("9a 9d a0 a3 a6 a9 ac af b2 b5 b8 bb be c1 c4 c7") + dut1.expect('0a 0d 10 13 16 19 1c 1f 22 25 28 2b 2e 31 34 37') + dut1.expect('3a 3d 40 43 46 49 4c 4f 52 55 58 5b 00 00 00 00') + dut1.expect('6a 6d 70 73 76 79 7c 7f 82 85 88 8b 8e 91 94 97') + dut1.expect('9a 9d a0 a3 a6 a9 ac af b2 b5 b8 bb be c1 c4 c7') - dut2.expect("================ JOB_WRITE_REG ================") - dut2.expect("0a 0d 10 13 16 19 1c 1f 22 25 28 2b 2e 31 34 37") - dut2.expect("3a 3d 40 43 46 49 4c 4f 52 55 58 5b 00 00 00 00") - dut2.expect("6a 6d 70 73 76 79 7c 7f 82 85 88 8b 8e 91 94 97") - dut2.expect("9a 9d a0 a3 a6 a9 ac af b2 b5 b8 bb be c1 c4 c7") + dut2.expect('================ JOB_WRITE_REG ================') + dut2.expect('0a 0d 10 13 16 19 1c 1f 22 25 28 2b 2e 31 34 37') + dut2.expect('3a 3d 40 43 46 49 4c 4f 52 55 58 5b 00 00 00 00') + dut2.expect('6a 6d 70 73 76 79 7c 7f 82 85 88 8b 8e 91 94 97') + dut2.expect('9a 9d a0 a3 a6 a9 ac af b2 b5 b8 bb be c1 c4 c7') - dut1.expect("host int: 0") - dut1.expect("host int: 1") - dut1.expect("host int: 2") - dut1.expect("host int: 3") - dut1.expect("host int: 4") - dut1.expect("host int: 5") - dut1.expect("host int: 6") - dut1.expect("host int: 7") - dut1.expect("host int: 0") - dut1.expect("host int: 1") - dut1.expect("host int: 2") - dut1.expect("host int: 3") - dut1.expect("host int: 4") - dut1.expect("host int: 5") - dut1.expect("host int: 6") - dut1.expect("host int: 7") + dut1.expect('host int: 0') + dut1.expect('host int: 1') + dut1.expect('host int: 2') + dut1.expect('host int: 3') + dut1.expect('host int: 4') + dut1.expect('host int: 5') + dut1.expect('host int: 6') + dut1.expect('host int: 7') + dut1.expect('host int: 0') + dut1.expect('host int: 1') + dut1.expect('host int: 2') + dut1.expect('host int: 3') + dut1.expect('host int: 4') + dut1.expect('host int: 5') + dut1.expect('host int: 6') + dut1.expect('host int: 7') - dut2.expect("================ JOB_SEND_INT ================") - dut2.expect("================ JOB_SEND_INT ================") + dut2.expect('================ JOB_SEND_INT ================') + dut2.expect('================ JOB_SEND_INT ================') - dut1.expect("send packet length: 3") - dut1.expect("send packet length: 6") - dut1.expect("send packet length: 12") - dut1.expect("send packet length: 128") - dut1.expect("send packet length: 511") - dut1.expect("send packet length: 512") + dut1.expect('send packet length: 3') + dut1.expect('send packet length: 6') + dut1.expect('send packet length: 12') + dut1.expect('send packet length: 128') + dut1.expect('send packet length: 511') + dut1.expect('send packet length: 512') - dut2.expect("recv len: 3") - dut2.expect("recv len: 6") - dut2.expect("recv len: 12") - dut2.expect("recv len: 128") + dut2.expect('recv len: 3') + dut2.expect('recv len: 6') + dut2.expect('recv len: 12') + dut2.expect('recv len: 128') # 511 - dut2.expect("recv len: 128") - dut2.expect("recv len: 128") - dut2.expect("recv len: 128") - dut2.expect("recv len: 127") + dut2.expect('recv len: 128') + dut2.expect('recv len: 128') + dut2.expect('recv len: 128') + dut2.expect('recv len: 127') # 512 - dut2.expect("recv len: 128") - dut2.expect("recv len: 128") - dut2.expect("recv len: 128") - dut2.expect("recv len: 128") + dut2.expect('recv len: 128') + dut2.expect('recv len: 128') + dut2.expect('recv len: 128') + dut2.expect('recv len: 128') - dut1.expect("receive data, size: 3") - dut1.expect("receive data, size: 6") - dut1.expect("receive data, size: 12") - dut1.expect("receive data, size: 128") + dut1.expect('receive data, size: 3') + dut1.expect('receive data, size: 6') + dut1.expect('receive data, size: 12') + dut1.expect('receive data, size: 128') - dut1.expect("receive data, size: 128") - dut1.expect("receive data, size: 128") - dut1.expect("receive data, size: 128") - dut1.expect("receive data, size: 127") + dut1.expect('receive data, size: 128') + dut1.expect('receive data, size: 128') + dut1.expect('receive data, size: 128') + dut1.expect('receive data, size: 127') - dut1.expect("receive data, size: 128") - dut1.expect("receive data, size: 128") - dut1.expect("receive data, size: 128") - dut1.expect("receive data, size: 128") + dut1.expect('receive data, size: 128') + dut1.expect('receive data, size: 128') + dut1.expect('receive data, size: 128') + dut1.expect('receive data, size: 128') # the last valid line of one round - dut1.expect("ce d3 d8 dd e2 e7 ec f1 f6 fb 00 05 0a 0f 14 19") + dut1.expect('ce d3 d8 dd e2 e7 ec f1 f6 fb 00 05 0a 0f 14 19') # the first 2 lines of the second round - dut1.expect("46 4b 50") - dut1.expect("5a 5f 64 69 6e 73") + dut1.expect('46 4b 50') + dut1.expect('5a 5f 64 69 6e 73') if __name__ == '__main__': - TinyFW.set_default_config(env_config_file="EnvConfigTemplate.yml", dut=ttfw_idf.IDFDUT) + TinyFW.set_default_config(env_config_file='EnvConfigTemplate.yml', dut=ttfw_idf.IDFDUT) test_example_sdio_communication() diff --git a/examples/peripherals/twai/twai_alert_and_recovery/example_test.py b/examples/peripherals/twai/twai_alert_and_recovery/example_test.py index 5c534d8373..1554d725fc 100644 --- a/examples/peripherals/twai/twai_alert_and_recovery/example_test.py +++ b/examples/peripherals/twai/twai_alert_and_recovery/example_test.py @@ -4,7 +4,7 @@ from __future__ import print_function import ttfw_idf # TWAI Self Test Example constants -STR_EXPECT = ("TWAI Alert and Recovery: Driver installed", "TWAI Alert and Recovery: Driver uninstalled") +STR_EXPECT = ('TWAI Alert and Recovery: Driver installed', 'TWAI Alert and Recovery: Driver uninstalled') EXPECT_TIMEOUT = 20 diff --git a/examples/peripherals/twai/twai_network/example_test.py b/examples/peripherals/twai/twai_network/example_test.py index f79f409ef2..69a2515b31 100644 --- a/examples/peripherals/twai/twai_network/example_test.py +++ b/examples/peripherals/twai/twai_network/example_test.py @@ -6,9 +6,9 @@ from threading import Thread import ttfw_idf # Define tuple of strings to expect for each DUT. -master_expect = ("TWAI Master: Driver installed", "TWAI Master: Driver uninstalled") -slave_expect = ("TWAI Slave: Driver installed", "TWAI Slave: Driver uninstalled") -listen_only_expect = ("TWAI Listen Only: Driver installed", "TWAI Listen Only: Driver uninstalled") +master_expect = ('TWAI Master: Driver installed', 'TWAI Master: Driver uninstalled') +slave_expect = ('TWAI Slave: Driver installed', 'TWAI Slave: Driver uninstalled') +listen_only_expect = ('TWAI Listen Only: Driver installed', 'TWAI Listen Only: Driver uninstalled') def dut_thread_callback(**kwargs): @@ -31,11 +31,11 @@ def dut_thread_callback(**kwargs): def test_twai_network_example(env, extra_data): # Get device under test. "dut1", "dut2", and "dut3" must be properly defined in EnvConfig - dut_master = env.get_dut("dut1", "examples/peripherals/twai/twai_network/twai_network_master", + dut_master = env.get_dut('dut1', 'examples/peripherals/twai/twai_network/twai_network_master', dut_class=ttfw_idf.ESP32DUT) - dut_slave = env.get_dut("dut2", "examples/peripherals/twai/twai_network/twai_network_slave", + dut_slave = env.get_dut('dut2', 'examples/peripherals/twai/twai_network/twai_network_slave', dut_class=ttfw_idf.ESP32DUT) - dut_listen_only = env.get_dut("dut3", "examples/peripherals/twai/twai_network/twai_network_listen_only", + dut_listen_only = env.get_dut('dut3', 'examples/peripherals/twai/twai_network/twai_network_listen_only', dut_class=ttfw_idf.ESP32DUT) # Flash app onto each DUT, each DUT is reset again at the start of each thread @@ -45,14 +45,14 @@ def test_twai_network_example(env, extra_data): # Create dict of keyword arguments for each dut results = [[False], [False], [False]] - master_kwargs = {"dut": dut_master, "result": results[0], "expected": master_expect} - slave_kwargs = {"dut": dut_slave, "result": results[1], "expected": slave_expect} - listen_only_kwargs = {"dut": dut_listen_only, "result": results[2], "expected": listen_only_expect} + master_kwargs = {'dut': dut_master, 'result': results[0], 'expected': master_expect} + slave_kwargs = {'dut': dut_slave, 'result': results[1], 'expected': slave_expect} + listen_only_kwargs = {'dut': dut_listen_only, 'result': results[2], 'expected': listen_only_expect} # Create thread for each dut - dut_master_thread = Thread(target=dut_thread_callback, name="Master Thread", kwargs=master_kwargs) - dut_slave_thread = Thread(target=dut_thread_callback, name="Slave Thread", kwargs=slave_kwargs) - dut_listen_only_thread = Thread(target=dut_thread_callback, name="Listen Only Thread", kwargs=listen_only_kwargs) + dut_master_thread = Thread(target=dut_thread_callback, name='Master Thread', kwargs=master_kwargs) + dut_slave_thread = Thread(target=dut_thread_callback, name='Slave Thread', kwargs=slave_kwargs) + dut_listen_only_thread = Thread(target=dut_thread_callback, name='Listen Only Thread', kwargs=listen_only_kwargs) # Start each thread dut_listen_only_thread.start() @@ -67,7 +67,7 @@ def test_twai_network_example(env, extra_data): # check each thread ran to completion for result in results: if result[0] is not True: - raise Exception("One or more threads did not run successfully") + raise Exception('One or more threads did not run successfully') if __name__ == '__main__': diff --git a/examples/peripherals/twai/twai_self_test/example_test.py b/examples/peripherals/twai/twai_self_test/example_test.py index ce1cc95508..6b06f1a049 100644 --- a/examples/peripherals/twai/twai_self_test/example_test.py +++ b/examples/peripherals/twai/twai_self_test/example_test.py @@ -3,9 +3,8 @@ from __future__ import print_function import ttfw_idf - # TWAI Self Test Example constants -STR_EXPECT = ("TWAI Self Test: Driver installed", "TWAI Self Test: Driver uninstalled") +STR_EXPECT = ('TWAI Self Test: Driver installed', 'TWAI Self Test: Driver uninstalled') EXPECT_TIMEOUT = 20 diff --git a/examples/protocols/asio/chat_client/asio_chat_client_test.py b/examples/protocols/asio/chat_client/asio_chat_client_test.py index c906beae81..a161e9436c 100644 --- a/examples/protocols/asio/chat_client/asio_chat_client_test.py +++ b/examples/protocols/asio/chat_client/asio_chat_client_test.py @@ -1,21 +1,21 @@ -import re import os +import re import socket -from threading import Thread import time +from threading import Thread import ttfw_idf global g_client_response global g_msg_to_client -g_client_response = b"" -g_msg_to_client = b" 3XYZ" +g_client_response = b'' +g_msg_to_client = b' 3XYZ' def get_my_ip(): s1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s1.connect(("8.8.8.8", 80)) + s1.connect(('8.8.8.8', 80)) my_ip = s1.getsockname()[0] s1.close() return my_ip @@ -23,14 +23,14 @@ def get_my_ip(): def chat_server_sketch(my_ip): global g_client_response - print("Starting the server on {}".format(my_ip)) + print('Starting the server on {}'.format(my_ip)) port = 2222 s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.settimeout(600) s.bind((my_ip, port)) s.listen(1) q,addr = s.accept() - print("connection accepted") + print('connection accepted') q.settimeout(30) q.send(g_msg_to_client) data = q.recv(1024) @@ -39,12 +39,12 @@ def chat_server_sketch(my_ip): g_client_response = data else: g_client_response = q.recv(1024) - print("received from client {}".format(g_client_response)) + print('received from client {}'.format(g_client_response)) s.close() - print("server closed") + print('server closed') -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_asio_chat_client(env, extra_data): """ steps: | @@ -57,19 +57,19 @@ def test_examples_protocol_asio_chat_client(env, extra_data): """ global g_client_response global g_msg_to_client - test_msg = "ABC" - dut1 = env.get_dut("chat_client", "examples/protocols/asio/chat_client", dut_class=ttfw_idf.ESP32DUT) + test_msg = 'ABC' + dut1 = env.get_dut('chat_client', 'examples/protocols/asio/chat_client', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "asio_chat_client.bin") + binary_file = os.path.join(dut1.app.binary_path, 'asio_chat_client.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("asio_chat_client_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('asio_chat_client_size', '{}KB'.format(bin_size // 1024)) # 1. start a tcp server on the host host_ip = get_my_ip() thread1 = Thread(target=chat_server_sketch, args=(host_ip,)) thread1.start() # 2. start the dut test and wait till client gets IP address dut1.start_app() - dut1.expect(re.compile(r" IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)"), timeout=30) + dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30) # 3. send host's IP to the client i.e. the `dut1` dut1.write(host_ip) # 4. client `dut1` should receive a message @@ -82,10 +82,10 @@ def test_examples_protocol_asio_chat_client(env, extra_data): print(g_client_response) # 6. evaluate host_server received this message if (g_client_response[4:7] == test_msg): - print("PASS: Received correct message") + print('PASS: Received correct message') pass else: - print("Failure!") + print('Failure!') raise ValueError('Wrong data received from asi tcp server: {} (expected:{})'.format(g_client_response[4:7], test_msg)) thread1.join() diff --git a/examples/protocols/asio/chat_server/asio_chat_server_test.py b/examples/protocols/asio/chat_server/asio_chat_server_test.py index eae8f0cb4a..6577aba064 100644 --- a/examples/protocols/asio/chat_server/asio_chat_server_test.py +++ b/examples/protocols/asio/chat_server/asio_chat_server_test.py @@ -1,11 +1,11 @@ -import re import os +import re import socket import ttfw_idf -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_asio_chat_server(env, extra_data): """ steps: | @@ -14,16 +14,16 @@ def test_examples_protocol_asio_chat_server(env, extra_data): 3. Test connects to server and sends a test message 4. Test evaluates received test message from server """ - test_msg = b" 4ABC\n" - dut1 = env.get_dut("chat_server", "examples/protocols/asio/chat_server", dut_class=ttfw_idf.ESP32DUT) + test_msg = b' 4ABC\n' + dut1 = env.get_dut('chat_server', 'examples/protocols/asio/chat_server', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "asio_chat_server.bin") + binary_file = os.path.join(dut1.app.binary_path, 'asio_chat_server.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("asio_chat_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('asio_chat_server_bin_size', '{}KB'.format(bin_size // 1024)) # 1. start test dut1.start_app() # 2. get the server IP address - data = dut1.expect(re.compile(r" IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)"), timeout=30) + data = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30) # 3. create tcp client and connect to server cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM) cli.settimeout(30) @@ -32,10 +32,10 @@ def test_examples_protocol_asio_chat_server(env, extra_data): data = cli.recv(1024) # 4. check the message received back from the server if (data == test_msg): - print("PASS: Received correct message {}".format(data)) + print('PASS: Received correct message {}'.format(data)) pass else: - print("Failure!") + print('Failure!') raise ValueError('Wrong data received from asi tcp server: {} (expoected:{})'.format(data, test_msg)) diff --git a/examples/protocols/asio/ssl_client_server/example_test.py b/examples/protocols/asio/ssl_client_server/example_test.py index 876eb01b65..975adb8503 100644 --- a/examples/protocols/asio/ssl_client_server/example_test.py +++ b/examples/protocols/asio/ssl_client_server/example_test.py @@ -1,4 +1,5 @@ from __future__ import unicode_literals + import ttfw_idf diff --git a/examples/protocols/asio/tcp_echo_server/asio_tcp_server_test.py b/examples/protocols/asio/tcp_echo_server/asio_tcp_server_test.py index a2fa883efb..135efcd36a 100644 --- a/examples/protocols/asio/tcp_echo_server/asio_tcp_server_test.py +++ b/examples/protocols/asio/tcp_echo_server/asio_tcp_server_test.py @@ -1,11 +1,11 @@ -import re import os +import re import socket import ttfw_idf -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_asio_tcp_server(env, extra_data): """ steps: | @@ -15,16 +15,16 @@ def test_examples_protocol_asio_tcp_server(env, extra_data): 4. Test evaluates received test message from server 5. Test evaluates received test message on server stdout """ - test_msg = b"echo message from client to server" - dut1 = env.get_dut("tcp_echo_server", "examples/protocols/asio/tcp_echo_server", dut_class=ttfw_idf.ESP32DUT) + test_msg = b'echo message from client to server' + dut1 = env.get_dut('tcp_echo_server', 'examples/protocols/asio/tcp_echo_server', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "asio_tcp_echo_server.bin") + binary_file = os.path.join(dut1.app.binary_path, 'asio_tcp_echo_server.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("asio_tcp_echo_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('asio_tcp_echo_server_bin_size', '{}KB'.format(bin_size // 1024)) # 1. start test dut1.start_app() # 2. get the server IP address - data = dut1.expect(re.compile(r" IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)"), timeout=30) + data = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30) # 3. create tcp client and connect to server cli = socket.socket(socket.AF_INET, socket.SOCK_STREAM) cli.settimeout(30) @@ -33,10 +33,10 @@ def test_examples_protocol_asio_tcp_server(env, extra_data): data = cli.recv(1024) # 4. check the message received back from the server if (data == test_msg): - print("PASS: Received correct message") + print('PASS: Received correct message') pass else: - print("Failure!") + print('Failure!') raise ValueError('Wrong data received from asi tcp server: {} (expected:{})'.format(data, test_msg)) # 5. check the client message appears also on server terminal dut1.expect(test_msg.decode()) diff --git a/examples/protocols/asio/udp_echo_server/asio_udp_server_test.py b/examples/protocols/asio/udp_echo_server/asio_udp_server_test.py index b83d3cce28..73b15a720a 100644 --- a/examples/protocols/asio/udp_echo_server/asio_udp_server_test.py +++ b/examples/protocols/asio/udp_echo_server/asio_udp_server_test.py @@ -1,11 +1,11 @@ -import re import os +import re import socket import ttfw_idf -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_asio_udp_server(env, extra_data): """ steps: | @@ -15,16 +15,16 @@ def test_examples_protocol_asio_udp_server(env, extra_data): 4. Test evaluates received test message from server 5. Test evaluates received test message on server stdout """ - test_msg = b"echo message from client to server" - dut1 = env.get_dut("udp_echo_server", "examples/protocols/asio/udp_echo_server", dut_class=ttfw_idf.ESP32DUT) + test_msg = b'echo message from client to server' + dut1 = env.get_dut('udp_echo_server', 'examples/protocols/asio/udp_echo_server', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "asio_udp_echo_server.bin") + binary_file = os.path.join(dut1.app.binary_path, 'asio_udp_echo_server.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("asio_udp_echo_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('asio_udp_echo_server_bin_size', '{}KB'.format(bin_size // 1024)) # 1. start test dut1.start_app() # 2. get the server IP address - data = dut1.expect(re.compile(r" IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)"), timeout=30) + data = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30) # 3. create tcp client and connect to server cli = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) cli.settimeout(30) @@ -33,10 +33,10 @@ def test_examples_protocol_asio_udp_server(env, extra_data): data = cli.recv(1024) # 4. check the message received back from the server if (data == test_msg): - print("PASS: Received correct message") + print('PASS: Received correct message') pass else: - print("Failure!") + print('Failure!') raise ValueError('Wrong data received from asio udp server: {} (expected:{})'.format(data, test_msg)) # 5. check the client message appears also on server terminal dut1.expect(test_msg.decode()) diff --git a/examples/protocols/cbor/example_test.py b/examples/protocols/cbor/example_test.py index 0d7887a8c0..8ffac522ab 100644 --- a/examples/protocols/cbor/example_test.py +++ b/examples/protocols/cbor/example_test.py @@ -1,6 +1,8 @@ from __future__ import unicode_literals + import re import textwrap + import ttfw_idf diff --git a/examples/protocols/esp_http_client/esp_http_client_test.py b/examples/protocols/esp_http_client/esp_http_client_test.py index 7ce194c266..103f4a9b71 100644 --- a/examples/protocols/esp_http_client/esp_http_client_test.py +++ b/examples/protocols/esp_http_client/esp_http_client_test.py @@ -1,64 +1,64 @@ -import re import os +import re import ttfw_idf -@ttfw_idf.idf_example_test(env_tag="Example_EthKitV1") +@ttfw_idf.idf_example_test(env_tag='Example_EthKitV1') def test_examples_protocol_esp_http_client(env, extra_data): """ steps: | 1. join AP 2. Send HTTP request to httpbin.org """ - dut1 = env.get_dut("esp_http_client", "examples/protocols/esp_http_client", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('esp_http_client', 'examples/protocols/esp_http_client', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "esp-http-client-example.bin") + binary_file = os.path.join(dut1.app.binary_path, 'esp-http-client-example.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("esp_http_client_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('esp_http_client_bin_size', '{}KB'.format(bin_size // 1024)) # start test dut1.start_app() - dut1.expect("Connected to AP, begin http example", timeout=30) - dut1.expect(re.compile(r"HTTP GET Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP POST Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP PUT Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP PATCH Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP DELETE Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP HEAD Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP Basic Auth Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP Basic Auth redirect Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP Digest Auth Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTPS Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP chunk encoding Status = 200, content_length = (-?\d)")) + dut1.expect('Connected to AP, begin http example', timeout=30) + dut1.expect(re.compile(r'HTTP GET Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP POST Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP PUT Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP PATCH Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP DELETE Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP HEAD Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP Basic Auth Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP Basic Auth redirect Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP Digest Auth Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTPS Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP chunk encoding Status = 200, content_length = (-?\d)')) # content-len for chunked encoding is typically -1, could be a positive length in some cases - dut1.expect(re.compile(r"HTTP Stream reader Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"Last esp error code: 0x8001")) - dut1.expect("Finish http example") + dut1.expect(re.compile(r'HTTP Stream reader Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'Last esp error code: 0x8001')) + dut1.expect('Finish http example') # test mbedtls dynamic resource - dut1 = env.get_dut("esp_http_client", "examples/protocols/esp_http_client", dut_class=ttfw_idf.ESP32DUT, app_config_name='ssldyn') + dut1 = env.get_dut('esp_http_client', 'examples/protocols/esp_http_client', dut_class=ttfw_idf.ESP32DUT, app_config_name='ssldyn') # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "esp-http-client-example.bin") + binary_file = os.path.join(dut1.app.binary_path, 'esp-http-client-example.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("esp_http_client_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('esp_http_client_bin_size', '{}KB'.format(bin_size // 1024)) # start test dut1.start_app() - dut1.expect("Connected to AP, begin http example", timeout=30) - dut1.expect(re.compile(r"HTTP GET Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP POST Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP PUT Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP PATCH Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP DELETE Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP HEAD Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP Basic Auth Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP Basic Auth redirect Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP Digest Auth Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTPS Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"HTTP chunk encoding Status = 200, content_length = (-?\d)")) + dut1.expect('Connected to AP, begin http example', timeout=30) + dut1.expect(re.compile(r'HTTP GET Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP POST Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP PUT Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP PATCH Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP DELETE Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP HEAD Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP Basic Auth Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP Basic Auth redirect Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP Digest Auth Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTPS Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'HTTP chunk encoding Status = 200, content_length = (-?\d)')) # content-len for chunked encoding is typically -1, could be a positive length in some cases - dut1.expect(re.compile(r"HTTP Stream reader Status = 200, content_length = (\d)")) - dut1.expect(re.compile(r"Last esp error code: 0x8001")) - dut1.expect("Finish http example") + dut1.expect(re.compile(r'HTTP Stream reader Status = 200, content_length = (\d)')) + dut1.expect(re.compile(r'Last esp error code: 0x8001')) + dut1.expect('Finish http example') if __name__ == '__main__': diff --git a/examples/protocols/esp_local_ctrl/example_test.py b/examples/protocols/esp_local_ctrl/example_test.py index 49143a3899..f76fcca456 100644 --- a/examples/protocols/esp_local_ctrl/example_test.py +++ b/examples/protocols/esp_local_ctrl/example_test.py @@ -1,7 +1,9 @@ from __future__ import unicode_literals + import os import re import sys + import ttfw_idf diff --git a/examples/protocols/esp_local_ctrl/scripts/esp_local_ctrl.py b/examples/protocols/esp_local_ctrl/scripts/esp_local_ctrl.py index 749d74ce88..95979b401f 100644 --- a/examples/protocols/esp_local_ctrl/scripts/esp_local_ctrl.py +++ b/examples/protocols/esp_local_ctrl/scripts/esp_local_ctrl.py @@ -16,22 +16,22 @@ # from __future__ import print_function -from future.utils import tobytes -from builtins import input -import os -import sys -import struct + import argparse +import os import ssl +import struct +import sys +from builtins import input import proto +from future.utils import tobytes # The tools directory is already in the PATH in environment prepared by install.sh which would allow to import # esp_prov as file but not as complete module. sys.path.insert(0, os.path.join(os.environ['IDF_PATH'], 'tools/esp_prov')) import esp_prov # noqa: E402 - # Set this to true to allow exceptions to be thrown config_throw_except = False @@ -48,26 +48,26 @@ PROP_FLAG_READONLY = (1 << 0) def prop_typestr(prop): - if prop["type"] == PROP_TYPE_TIMESTAMP: - return "TIME(us)" - elif prop["type"] == PROP_TYPE_INT32: - return "INT32" - elif prop["type"] == PROP_TYPE_BOOLEAN: - return "BOOLEAN" - elif prop["type"] == PROP_TYPE_STRING: - return "STRING" - return "UNKNOWN" + if prop['type'] == PROP_TYPE_TIMESTAMP: + return 'TIME(us)' + elif prop['type'] == PROP_TYPE_INT32: + return 'INT32' + elif prop['type'] == PROP_TYPE_BOOLEAN: + return 'BOOLEAN' + elif prop['type'] == PROP_TYPE_STRING: + return 'STRING' + return 'UNKNOWN' def encode_prop_value(prop, value): try: - if prop["type"] == PROP_TYPE_TIMESTAMP: + if prop['type'] == PROP_TYPE_TIMESTAMP: return struct.pack('q', value) - elif prop["type"] == PROP_TYPE_INT32: + elif prop['type'] == PROP_TYPE_INT32: return struct.pack('i', value) - elif prop["type"] == PROP_TYPE_BOOLEAN: + elif prop['type'] == PROP_TYPE_BOOLEAN: return struct.pack('?', value) - elif prop["type"] == PROP_TYPE_STRING: + elif prop['type'] == PROP_TYPE_STRING: return tobytes(value) return value except struct.error as e: @@ -77,13 +77,13 @@ def encode_prop_value(prop, value): def decode_prop_value(prop, value): try: - if prop["type"] == PROP_TYPE_TIMESTAMP: + if prop['type'] == PROP_TYPE_TIMESTAMP: return struct.unpack('q', value)[0] - elif prop["type"] == PROP_TYPE_INT32: + elif prop['type'] == PROP_TYPE_INT32: return struct.unpack('i', value)[0] - elif prop["type"] == PROP_TYPE_BOOLEAN: + elif prop['type'] == PROP_TYPE_BOOLEAN: return struct.unpack('?', value)[0] - elif prop["type"] == PROP_TYPE_STRING: + elif prop['type'] == PROP_TYPE_STRING: return value.decode('latin-1') return value except struct.error as e: @@ -93,13 +93,13 @@ def decode_prop_value(prop, value): def str_to_prop_value(prop, strval): try: - if prop["type"] == PROP_TYPE_TIMESTAMP: + if prop['type'] == PROP_TYPE_TIMESTAMP: return int(strval) - elif prop["type"] == PROP_TYPE_INT32: + elif prop['type'] == PROP_TYPE_INT32: return int(strval) - elif prop["type"] == PROP_TYPE_BOOLEAN: + elif prop['type'] == PROP_TYPE_BOOLEAN: return bool(strval) - elif prop["type"] == PROP_TYPE_STRING: + elif prop['type'] == PROP_TYPE_STRING: return strval return strval except ValueError as e: @@ -108,7 +108,7 @@ def str_to_prop_value(prop, strval): def prop_is_readonly(prop): - return (prop["flags"] & PROP_FLAG_READONLY) != 0 + return (prop['flags'] & PROP_FLAG_READONLY) != 0 def on_except(err): @@ -122,8 +122,8 @@ def get_transport(sel_transport, service_name, check_hostname): try: tp = None if (sel_transport == 'http'): - example_path = os.environ['IDF_PATH'] + "/examples/protocols/esp_local_ctrl" - cert_path = example_path + "/main/certs/rootCA.pem" + example_path = os.environ['IDF_PATH'] + '/examples/protocols/esp_local_ctrl' + cert_path = example_path + '/main/certs/rootCA.pem' ssl_ctx = ssl.create_default_context(cafile=cert_path) ssl_ctx.check_hostname = check_hostname tp = esp_prov.transport.Transport_HTTP(service_name, ssl_ctx) @@ -156,15 +156,15 @@ def get_all_property_values(tp): response = tp.send_data('esp_local_ctrl/control', message) count = proto.get_prop_count_response(response) if count == 0: - raise RuntimeError("No properties found!") + raise RuntimeError('No properties found!') indices = [i for i in range(count)] message = proto.get_prop_vals_request(indices) response = tp.send_data('esp_local_ctrl/control', message) props = proto.get_prop_vals_response(response) if len(props) != count: - raise RuntimeError("Incorrect count of properties!") + raise RuntimeError('Incorrect count of properties!') for p in props: - p["value"] = decode_prop_value(p, p["value"]) + p['value'] = decode_prop_value(p, p['value']) return props except RuntimeError as e: on_except(e) @@ -176,7 +176,7 @@ def set_property_values(tp, props, indices, values, check_readonly=False): if check_readonly: for index in indices: if prop_is_readonly(props[index]): - raise RuntimeError("Cannot set value of Read-Only property") + raise RuntimeError('Cannot set value of Read-Only property') message = proto.set_prop_vals_request(indices, values) response = tp.send_data('esp_local_ctrl/control', message) return proto.set_prop_vals_response(response) @@ -188,27 +188,27 @@ def set_property_values(tp, props, indices, values, check_readonly=False): if __name__ == '__main__': parser = argparse.ArgumentParser(add_help=False) - parser = argparse.ArgumentParser(description="Control an ESP32 running esp_local_ctrl service") + parser = argparse.ArgumentParser(description='Control an ESP32 running esp_local_ctrl service') - parser.add_argument("--version", dest='version', type=str, - help="Protocol version", default='') + parser.add_argument('--version', dest='version', type=str, + help='Protocol version', default='') - parser.add_argument("--transport", dest='transport', type=str, - help="transport i.e http or ble", default='http') + parser.add_argument('--transport', dest='transport', type=str, + help='transport i.e http or ble', default='http') - parser.add_argument("--name", dest='service_name', type=str, - help="BLE Device Name / HTTP Server hostname or IP", default='') + parser.add_argument('--name', dest='service_name', type=str, + help='BLE Device Name / HTTP Server hostname or IP', default='') - parser.add_argument("--dont-check-hostname", action="store_true", + parser.add_argument('--dont-check-hostname', action='store_true', # If enabled, the certificate won't be rejected for hostname mismatch. # This option is hidden because it should be used only for testing purposes. help=argparse.SUPPRESS) - parser.add_argument("-v", "--verbose", dest='verbose', help="increase output verbosity", action="store_true") + parser.add_argument('-v', '--verbose', dest='verbose', help='increase output verbosity', action='store_true') args = parser.parse_args() if args.version != '': - print("==== Esp_Ctrl Version: " + args.version + " ====") + print('==== Esp_Ctrl Version: ' + args.version + ' ====') if args.service_name == '': args.service_name = 'my_esp_ctrl_device' @@ -217,45 +217,45 @@ if __name__ == '__main__': obj_transport = get_transport(args.transport, args.service_name, not args.dont_check_hostname) if obj_transport is None: - print("---- Invalid transport ----") + print('---- Invalid transport ----') exit(1) if args.version != '': - print("\n==== Verifying protocol version ====") + print('\n==== Verifying protocol version ====') if not version_match(obj_transport, args.version, args.verbose): - print("---- Error in protocol version matching ----") + print('---- Error in protocol version matching ----') exit(2) - print("==== Verified protocol version successfully ====") + print('==== Verified protocol version successfully ====') while True: properties = get_all_property_values(obj_transport) if len(properties) == 0: - print("---- Error in reading property values ----") + print('---- Error in reading property values ----') exit(4) - print("\n==== Available Properties ====") - print("{0: >4} {1: <16} {2: <10} {3: <16} {4: <16}".format( - "S.N.", "Name", "Type", "Flags", "Value")) + print('\n==== Available Properties ====') + print('{0: >4} {1: <16} {2: <10} {3: <16} {4: <16}'.format( + 'S.N.', 'Name', 'Type', 'Flags', 'Value')) for i in range(len(properties)): - print("[{0: >2}] {1: <16} {2: <10} {3: <16} {4: <16}".format( - i + 1, properties[i]["name"], prop_typestr(properties[i]), - ["","Read-Only"][prop_is_readonly(properties[i])], - str(properties[i]["value"]))) + print('[{0: >2}] {1: <16} {2: <10} {3: <16} {4: <16}'.format( + i + 1, properties[i]['name'], prop_typestr(properties[i]), + ['','Read-Only'][prop_is_readonly(properties[i])], + str(properties[i]['value']))) select = 0 while True: try: inval = input("\nSelect properties to set (0 to re-read, 'q' to quit) : ") if inval.lower() == 'q': - print("Quitting...") + print('Quitting...') exit(5) invals = inval.split(',') selections = [int(val) for val in invals] if min(selections) < 0 or max(selections) > len(properties): - raise ValueError("Invalid input") + raise ValueError('Invalid input') break except ValueError as e: - print(str(e) + "! Retry...") + print(str(e) + '! Retry...') if len(selections) == 1 and selections[0] == 0: continue @@ -264,15 +264,15 @@ if __name__ == '__main__': set_indices = [] for select in selections: while True: - inval = input("Enter value to set for property (" + properties[select - 1]["name"] + ") : ") + inval = input('Enter value to set for property (' + properties[select - 1]['name'] + ') : ') value = encode_prop_value(properties[select - 1], str_to_prop_value(properties[select - 1], inval)) if value is None: - print("Invalid input! Retry...") + print('Invalid input! Retry...') continue break set_values += [value] set_indices += [select - 1] if not set_property_values(obj_transport, properties, set_indices, set_values): - print("Failed to set values!") + print('Failed to set values!') diff --git a/examples/protocols/esp_local_ctrl/scripts/proto.py b/examples/protocols/esp_local_ctrl/scripts/proto.py index 15dda7ab4b..dcad41d1b9 100644 --- a/examples/protocols/esp_local_ctrl/scripts/proto.py +++ b/examples/protocols/esp_local_ctrl/scripts/proto.py @@ -15,9 +15,11 @@ from __future__ import print_function -from future.utils import tobytes + import os +from future.utils import tobytes + def _load_source(name, path): try: @@ -30,8 +32,8 @@ def _load_source(name, path): idf_path = os.environ['IDF_PATH'] -constants_pb2 = _load_source("constants_pb2", idf_path + "/components/protocomm/python/constants_pb2.py") -local_ctrl_pb2 = _load_source("esp_local_ctrl_pb2", idf_path + "/components/esp_local_ctrl/python/esp_local_ctrl_pb2.py") +constants_pb2 = _load_source('constants_pb2', idf_path + '/components/protocomm/python/constants_pb2.py') +local_ctrl_pb2 = _load_source('esp_local_ctrl_pb2', idf_path + '/components/esp_local_ctrl/python/esp_local_ctrl_pb2.py') def get_prop_count_request(): @@ -67,10 +69,10 @@ def get_prop_vals_response(response_data): if (resp.resp_get_prop_vals.status == 0): for prop in resp.resp_get_prop_vals.props: results += [{ - "name": prop.name, - "type": prop.type, - "flags": prop.flags, - "value": tobytes(prop.value) + 'name': prop.name, + 'type': prop.type, + 'flags': prop.flags, + 'value': tobytes(prop.value) }] return results diff --git a/examples/protocols/http_server/advanced_tests/http_server_advanced_test.py b/examples/protocols/http_server/advanced_tests/http_server_advanced_test.py index 466eac6262..26ce155b0e 100644 --- a/examples/protocols/http_server/advanced_tests/http_server_advanced_test.py +++ b/examples/protocols/http_server/advanced_tests/http_server_advanced_test.py @@ -14,15 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -import re -import os +from __future__ import division, print_function, unicode_literals + +import os +import re -from tiny_test_fw import Utility import ttfw_idf from idf_http_server_test import test as client +from tiny_test_fw import Utility # When running on local machine execute the following before running this script # > make app bootloader @@ -36,23 +35,23 @@ from idf_http_server_test import test as client # features to this component. -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_http_server_advanced(env, extra_data): # Acquire DUT - dut1 = env.get_dut("http_server", "examples/protocols/http_server/advanced_tests", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('http_server', 'examples/protocols/http_server/advanced_tests', dut_class=ttfw_idf.ESP32DUT) # Get binary file - binary_file = os.path.join(dut1.app.binary_path, "tests.bin") + binary_file = os.path.join(dut1.app.binary_path, 'tests.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("http_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('http_server_bin_size', '{}KB'.format(bin_size // 1024)) # Upload binary and start testing - Utility.console_log("Starting http_server advanced test app") + Utility.console_log('Starting http_server advanced test app') dut1.start_app() # Parse IP address of STA - Utility.console_log("Waiting to connect with AP") - got_ip = dut1.expect(re.compile(r"(?:[\s\S]*)IPv4 address: (\d+.\d+.\d+.\d+)"), timeout=30)[0] + Utility.console_log('Waiting to connect with AP') + got_ip = dut1.expect(re.compile(r'(?:[\s\S]*)IPv4 address: (\d+.\d+.\d+.\d+)'), timeout=30)[0] got_port = dut1.expect(re.compile(r"(?:[\s\S]*)Started HTTP server on port: '(\d+)'"), timeout=15)[0] result = dut1.expect(re.compile(r"(?:[\s\S]*)Max URI handlers: '(\d+)'(?:[\s\S]*)Max Open Sessions: " # noqa: W605 @@ -64,18 +63,18 @@ def test_examples_protocol_http_server_advanced(env, extra_data): max_uri_len = int(result[3]) max_stack_size = int(result[4]) - Utility.console_log("Got IP : " + got_ip) - Utility.console_log("Got Port : " + got_port) + Utility.console_log('Got IP : ' + got_ip) + Utility.console_log('Got Port : ' + got_port) # Run test script # If failed raise appropriate exception failed = False - Utility.console_log("Sessions and Context Tests...") + Utility.console_log('Sessions and Context Tests...') if not client.spillover_session(got_ip, got_port, max_sessions): - Utility.console_log("Ignoring failure") + Utility.console_log('Ignoring failure') if not client.parallel_sessions_adder(got_ip, got_port, max_sessions): - Utility.console_log("Ignoring failure") + Utility.console_log('Ignoring failure') if not client.leftover_data_test(got_ip, got_port): failed = True if not client.async_response_test(got_ip, got_port): @@ -90,17 +89,17 @@ def test_examples_protocol_http_server_advanced(env, extra_data): # if not client.packet_size_limit_test(got_ip, got_port, test_size): # Utility.console_log("Ignoring failure") - Utility.console_log("Getting initial stack usage...") + Utility.console_log('Getting initial stack usage...') if not client.get_hello(got_ip, got_port): failed = True inital_stack = int(dut1.expect(re.compile(r"(?:[\s\S]*)Free Stack for server task: '(\d+)'"), timeout=15)[0]) if inital_stack < 0.1 * max_stack_size: - Utility.console_log("More than 90% of stack being used on server start") + Utility.console_log('More than 90% of stack being used on server start') failed = True - Utility.console_log("Basic HTTP Client Tests...") + Utility.console_log('Basic HTTP Client Tests...') if not client.get_hello(got_ip, got_port): failed = True if not client.post_hello(got_ip, got_port): @@ -122,7 +121,7 @@ def test_examples_protocol_http_server_advanced(env, extra_data): if not client.get_test_headers(got_ip, got_port): failed = True - Utility.console_log("Error code tests...") + Utility.console_log('Error code tests...') if not client.code_500_server_error_test(got_ip, got_port): failed = True if not client.code_501_method_not_impl(got_ip, got_port): @@ -138,20 +137,20 @@ def test_examples_protocol_http_server_advanced(env, extra_data): if not client.code_408_req_timeout(got_ip, got_port): failed = True if not client.code_414_uri_too_long(got_ip, got_port, max_uri_len): - Utility.console_log("Ignoring failure") + Utility.console_log('Ignoring failure') if not client.code_431_hdr_too_long(got_ip, got_port, max_hdr_len): - Utility.console_log("Ignoring failure") + Utility.console_log('Ignoring failure') if not client.test_upgrade_not_supported(got_ip, got_port): failed = True - Utility.console_log("Getting final stack usage...") + Utility.console_log('Getting final stack usage...') if not client.get_hello(got_ip, got_port): failed = True final_stack = int(dut1.expect(re.compile(r"(?:[\s\S]*)Free Stack for server task: '(\d+)'"), timeout=15)[0]) if final_stack < 0.05 * max_stack_size: - Utility.console_log("More than 95% of stack got used during tests") + Utility.console_log('More than 95% of stack got used during tests') failed = True if failed: diff --git a/examples/protocols/http_server/advanced_tests/scripts/test.py b/examples/protocols/http_server/advanced_tests/scripts/test.py index 4196ff532a..dc43fdd197 100644 --- a/examples/protocols/http_server/advanced_tests/scripts/test.py +++ b/examples/protocols/http_server/advanced_tests/scripts/test.py @@ -129,20 +129,17 @@ # - Simple GET on /hello/restart_results (returns the leak results) -from __future__ import division -from __future__ import print_function -from builtins import str -from builtins import range -from builtins import object -import threading -import socket -import time +from __future__ import division, print_function + import argparse import http.client -import sys -import string import random - +import socket +import string +import sys +import threading +import time +from builtins import object, range, str try: import Utility @@ -151,7 +148,7 @@ except ImportError: # This environment variable is expected on the host machine # > export TEST_FW_PATH=~/esp/esp-idf/tools/tiny-test-fw - test_fw_path = os.getenv("TEST_FW_PATH") + test_fw_path = os.getenv('TEST_FW_PATH') if test_fw_path and test_fw_path not in sys.path: sys.path.insert(0, test_fw_path) @@ -177,32 +174,32 @@ class Session(object): self.client.sendall(data.encode()) except socket.error as err: self.client.close() - Utility.console_log("Socket Error in send :", err) + Utility.console_log('Socket Error in send :', err) rval = False return rval def send_get(self, path, headers=None): - request = "GET " + path + " HTTP/1.1\r\nHost: " + self.target + request = 'GET ' + path + ' HTTP/1.1\r\nHost: ' + self.target if headers: for field, value in headers.items(): - request += "\r\n" + field + ": " + value - request += "\r\n\r\n" + request += '\r\n' + field + ': ' + value + request += '\r\n\r\n' return self.send_err_check(request) def send_put(self, path, data, headers=None): - request = "PUT " + path + " HTTP/1.1\r\nHost: " + self.target + request = 'PUT ' + path + ' HTTP/1.1\r\nHost: ' + self.target if headers: for field, value in headers.items(): - request += "\r\n" + field + ": " + value - request += "\r\nContent-Length: " + str(len(data)) + "\r\n\r\n" + request += '\r\n' + field + ': ' + value + request += '\r\nContent-Length: ' + str(len(data)) + '\r\n\r\n' return self.send_err_check(request, data) def send_post(self, path, data, headers=None): - request = "POST " + path + " HTTP/1.1\r\nHost: " + self.target + request = 'POST ' + path + ' HTTP/1.1\r\nHost: ' + self.target if headers: for field, value in headers.items(): - request += "\r\n" + field + ": " + value - request += "\r\nContent-Length: " + str(len(data)) + "\r\n\r\n" + request += '\r\n' + field + ': ' + value + request += '\r\nContent-Length: ' + str(len(data)) + '\r\n\r\n' return self.send_err_check(request, data) def read_resp_hdrs(self): @@ -246,7 +243,7 @@ class Session(object): return headers except socket.error as err: self.client.close() - Utility.console_log("Socket Error in recv :", err) + Utility.console_log('Socket Error in recv :', err) return None def read_resp_data(self): @@ -275,9 +272,9 @@ class Session(object): rem_len -= len(new_data) chunk_data_buf = '' # Fetch remaining CRLF - if self.client.recv(2) != "\r\n": + if self.client.recv(2) != '\r\n': # Error in packet - Utility.console_log("Error in chunked data") + Utility.console_log('Error in chunked data') return None if not chunk_len: # If last chunk @@ -290,7 +287,7 @@ class Session(object): return read_data except socket.error as err: self.client.close() - Utility.console_log("Socket Error in recv :", err) + Utility.console_log('Socket Error in recv :', err) return None def close(self): @@ -299,10 +296,10 @@ class Session(object): def test_val(text, expected, received): if expected != received: - Utility.console_log(" Fail!") - Utility.console_log(" [reason] " + text + ":") - Utility.console_log(" expected: " + str(expected)) - Utility.console_log(" received: " + str(received)) + Utility.console_log(' Fail!') + Utility.console_log(' [reason] ' + text + ':') + Utility.console_log(' expected: ' + str(expected)) + Utility.console_log(' received: ' + str(received)) return False return True @@ -320,7 +317,7 @@ class adder_thread (threading.Thread): # Pipeline 3 requests if (_verbose_): - Utility.console_log(" Thread: Using adder start " + str(self.id)) + Utility.console_log(' Thread: Using adder start ' + str(self.id)) for _ in range(self.depth): self.session.send_post('/adder', str(self.id)) @@ -332,10 +329,10 @@ class adder_thread (threading.Thread): def adder_result(self): if len(self.response) != self.depth: - Utility.console_log("Error : missing response packets") + Utility.console_log('Error : missing response packets') return False for i in range(len(self.response)): - if not test_val("Thread" + str(self.id) + " response[" + str(i) + "]", + if not test_val('Thread' + str(self.id) + ' response[' + str(i) + ']', str(self.id * (i + 1)), str(self.response[i])): return False return True @@ -348,177 +345,177 @@ def get_hello(dut, port): # GET /hello should return 'Hello World!' Utility.console_log("[test] GET /hello returns 'Hello World!' =>", end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("GET", "/hello") + conn.request('GET', '/hello') resp = conn.getresponse() - if not test_val("status_code", 200, resp.status): + if not test_val('status_code', 200, resp.status): conn.close() return False - if not test_val("data", "Hello World!", resp.read().decode()): + if not test_val('data', 'Hello World!', resp.read().decode()): conn.close() return False - if not test_val("data", "text/html", resp.getheader('Content-Type')): + if not test_val('data', 'text/html', resp.getheader('Content-Type')): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def put_hello(dut, port): # PUT /hello returns 405' - Utility.console_log("[test] PUT /hello returns 405 =>", end=' ') + Utility.console_log('[test] PUT /hello returns 405 =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("PUT", "/hello", "Hello") + conn.request('PUT', '/hello', 'Hello') resp = conn.getresponse() - if not test_val("status_code", 405, resp.status): + if not test_val('status_code', 405, resp.status): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def post_hello(dut, port): # POST /hello returns 405' - Utility.console_log("[test] POST /hello returns 405 =>", end=' ') + Utility.console_log('[test] POST /hello returns 405 =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("POST", "/hello", "Hello") + conn.request('POST', '/hello', 'Hello') resp = conn.getresponse() - if not test_val("status_code", 405, resp.status): + if not test_val('status_code', 405, resp.status): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def post_echo(dut, port): # POST /echo echoes data' - Utility.console_log("[test] POST /echo echoes data =>", end=' ') + Utility.console_log('[test] POST /echo echoes data =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("POST", "/echo", "Hello") + conn.request('POST', '/echo', 'Hello') resp = conn.getresponse() - if not test_val("status_code", 200, resp.status): + if not test_val('status_code', 200, resp.status): conn.close() return False - if not test_val("data", "Hello", resp.read().decode()): + if not test_val('data', 'Hello', resp.read().decode()): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def put_echo(dut, port): # PUT /echo echoes data' - Utility.console_log("[test] PUT /echo echoes data =>", end=' ') + Utility.console_log('[test] PUT /echo echoes data =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("PUT", "/echo", "Hello") + conn.request('PUT', '/echo', 'Hello') resp = conn.getresponse() - if not test_val("status_code", 200, resp.status): + if not test_val('status_code', 200, resp.status): conn.close() return False - if not test_val("data", "Hello", resp.read().decode()): + if not test_val('data', 'Hello', resp.read().decode()): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def get_echo(dut, port): # GET /echo returns 404' - Utility.console_log("[test] GET /echo returns 405 =>", end=' ') + Utility.console_log('[test] GET /echo returns 405 =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("GET", "/echo") + conn.request('GET', '/echo') resp = conn.getresponse() - if not test_val("status_code", 405, resp.status): + if not test_val('status_code', 405, resp.status): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def get_test_headers(dut, port): # GET /test_header returns data of Header2' - Utility.console_log("[test] GET /test_header =>", end=' ') + Utility.console_log('[test] GET /test_header =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - custom_header = {"Header1": "Value1", "Header3": "Value3"} - header2_values = ["", " ", "Value2", " Value2", "Value2 ", " Value2 "] + custom_header = {'Header1': 'Value1', 'Header3': 'Value3'} + header2_values = ['', ' ', 'Value2', ' Value2', 'Value2 ', ' Value2 '] for val in header2_values: - custom_header["Header2"] = val - conn.request("GET", "/test_header", headers=custom_header) + custom_header['Header2'] = val + conn.request('GET', '/test_header', headers=custom_header) resp = conn.getresponse() - if not test_val("status_code", 200, resp.status): + if not test_val('status_code', 200, resp.status): conn.close() return False - hdr_val_start_idx = val.find("Value2") + hdr_val_start_idx = val.find('Value2') if hdr_val_start_idx == -1: - if not test_val("header: Header2", "", resp.read().decode()): + if not test_val('header: Header2', '', resp.read().decode()): conn.close() return False else: - if not test_val("header: Header2", val[hdr_val_start_idx:], resp.read().decode()): + if not test_val('header: Header2', val[hdr_val_start_idx:], resp.read().decode()): conn.close() return False resp.read() - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def get_hello_type(dut, port): # GET /hello/type_html returns text/html as Content-Type' - Utility.console_log("[test] GET /hello/type_html has Content-Type of text/html =>", end=' ') + Utility.console_log('[test] GET /hello/type_html has Content-Type of text/html =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("GET", "/hello/type_html") + conn.request('GET', '/hello/type_html') resp = conn.getresponse() - if not test_val("status_code", 200, resp.status): + if not test_val('status_code', 200, resp.status): conn.close() return False - if not test_val("data", "Hello World!", resp.read().decode()): + if not test_val('data', 'Hello World!', resp.read().decode()): conn.close() return False - if not test_val("data", "text/html", resp.getheader('Content-Type')): + if not test_val('data', 'text/html', resp.getheader('Content-Type')): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def get_hello_status(dut, port): # GET /hello/status_500 returns status 500' - Utility.console_log("[test] GET /hello/status_500 returns status 500 =>", end=' ') + Utility.console_log('[test] GET /hello/status_500 returns status 500 =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("GET", "/hello/status_500") + conn.request('GET', '/hello/status_500') resp = conn.getresponse() - if not test_val("status_code", 500, resp.status): + if not test_val('status_code', 500, resp.status): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def get_false_uri(dut, port): # GET /false_uri returns status 404' - Utility.console_log("[test] GET /false_uri returns status 404 =>", end=' ') + Utility.console_log('[test] GET /false_uri returns status 404 =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("GET", "/false_uri") + conn.request('GET', '/false_uri') resp = conn.getresponse() - if not test_val("status_code", 404, resp.status): + if not test_val('status_code', 404, resp.status): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def parallel_sessions_adder(dut, port, max_sessions): # POSTs on /adder in parallel sessions - Utility.console_log("[test] POST {pipelined} on /adder in " + str(max_sessions) + " sessions =>", end=' ') + Utility.console_log('[test] POST {pipelined} on /adder in ' + str(max_sessions) + ' sessions =>', end=' ') t = [] # Create all sessions for i in range(max_sessions): @@ -532,90 +529,90 @@ def parallel_sessions_adder(dut, port, max_sessions): res = True for i in range(len(t)): - if not test_val("Thread" + str(i) + " Failed", t[i].adder_result(), True): + if not test_val('Thread' + str(i) + ' Failed', t[i].adder_result(), True): res = False t[i].close() if (res): - Utility.console_log("Success") + Utility.console_log('Success') return res def async_response_test(dut, port): # Test that an asynchronous work is executed in the HTTPD's context # This is tested by reading two responses over the same session - Utility.console_log("[test] Test HTTPD Work Queue (Async response) =>", end=' ') + Utility.console_log('[test] Test HTTPD Work Queue (Async response) =>', end=' ') s = Session(dut, port) s.send_get('/async_data') s.read_resp_hdrs() - if not test_val("First Response", "Hello World!", s.read_resp_data()): + if not test_val('First Response', 'Hello World!', s.read_resp_data()): s.close() return False s.read_resp_hdrs() - if not test_val("Second Response", "Hello Double World!", s.read_resp_data()): + if not test_val('Second Response', 'Hello Double World!', s.read_resp_data()): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def leftover_data_test(dut, port): # Leftover data in POST is purged (valid and invalid URIs) - Utility.console_log("[test] Leftover data in POST is purged (valid and invalid URIs) =>", end=' ') - s = http.client.HTTPConnection(dut + ":" + port, timeout=15) + Utility.console_log('[test] Leftover data in POST is purged (valid and invalid URIs) =>', end=' ') + s = http.client.HTTPConnection(dut + ':' + port, timeout=15) - s.request("POST", url='/leftover_data', body="abcdefghijklmnopqrstuvwxyz\r\nabcdefghijklmnopqrstuvwxyz") + s.request('POST', url='/leftover_data', body='abcdefghijklmnopqrstuvwxyz\r\nabcdefghijklmnopqrstuvwxyz') resp = s.getresponse() - if not test_val("Partial data", "abcdefghij", resp.read().decode()): + if not test_val('Partial data', 'abcdefghij', resp.read().decode()): s.close() return False - s.request("GET", url='/hello') + s.request('GET', url='/hello') resp = s.getresponse() - if not test_val("Hello World Data", "Hello World!", resp.read().decode()): + if not test_val('Hello World Data', 'Hello World!', resp.read().decode()): s.close() return False - s.request("POST", url='/false_uri', body="abcdefghijklmnopqrstuvwxyz\r\nabcdefghijklmnopqrstuvwxyz") + s.request('POST', url='/false_uri', body='abcdefghijklmnopqrstuvwxyz\r\nabcdefghijklmnopqrstuvwxyz') resp = s.getresponse() - if not test_val("False URI Status", str(404), str(resp.status)): + if not test_val('False URI Status', str(404), str(resp.status)): s.close() return False # socket would have been closed by server due to error s.close() - s = http.client.HTTPConnection(dut + ":" + port, timeout=15) - s.request("GET", url='/hello') + s = http.client.HTTPConnection(dut + ':' + port, timeout=15) + s.request('GET', url='/hello') resp = s.getresponse() - if not test_val("Hello World Data", "Hello World!", resp.read().decode()): + if not test_val('Hello World Data', 'Hello World!', resp.read().decode()): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def spillover_session(dut, port, max_sess): # Session max_sess_sessions + 1 is rejected - Utility.console_log("[test] Session max_sess_sessions (" + str(max_sess) + ") + 1 is rejected =>", end=' ') + Utility.console_log('[test] Session max_sess_sessions (' + str(max_sess) + ') + 1 is rejected =>', end=' ') s = [] _verbose_ = True for i in range(max_sess + 1): if (_verbose_): - Utility.console_log("Executing " + str(i)) + Utility.console_log('Executing ' + str(i)) try: - a = http.client.HTTPConnection(dut + ":" + port, timeout=15) - a.request("GET", url='/hello') + a = http.client.HTTPConnection(dut + ':' + port, timeout=15) + a.request('GET', url='/hello') resp = a.getresponse() - if not test_val("Connection " + str(i), "Hello World!", resp.read().decode()): + if not test_val('Connection ' + str(i), 'Hello World!', resp.read().decode()): a.close() break s.append(a) except Exception: if (_verbose_): - Utility.console_log("Connection " + str(i) + " rejected") + Utility.console_log('Connection ' + str(i) + ' rejected') a.close() break @@ -624,134 +621,134 @@ def spillover_session(dut, port, max_sess): a.close() # Check if number of connections is equal to max_sess - Utility.console_log(["Fail","Success"][len(s) == max_sess]) + Utility.console_log(['Fail','Success'][len(s) == max_sess]) return (len(s) == max_sess) def recv_timeout_test(dut, port): - Utility.console_log("[test] Timeout occurs if partial packet sent =>", end=' ') + Utility.console_log('[test] Timeout occurs if partial packet sent =>', end=' ') s = Session(dut, port) - s.client.sendall(b"GE") + s.client.sendall(b'GE') s.read_resp_hdrs() resp = s.read_resp_data() - if not test_val("Request Timeout", "Server closed this connection", resp): + if not test_val('Request Timeout', 'Server closed this connection', resp): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def packet_size_limit_test(dut, port, test_size): - Utility.console_log("[test] send size limit test =>", end=' ') + Utility.console_log('[test] send size limit test =>', end=' ') retry = 5 while (retry): retry -= 1 - Utility.console_log("data size = ", test_size) - s = http.client.HTTPConnection(dut + ":" + port, timeout=15) + Utility.console_log('data size = ', test_size) + s = http.client.HTTPConnection(dut + ':' + port, timeout=15) random_data = ''.join(string.printable[random.randint(0,len(string.printable)) - 1] for _ in list(range(test_size))) - path = "/echo" - s.request("POST", url=path, body=random_data) + path = '/echo' + s.request('POST', url=path, body=random_data) resp = s.getresponse() - if not test_val("Error", "200", str(resp.status)): - if test_val("Error", "500", str(resp.status)): - Utility.console_log("Data too large to be allocated") + if not test_val('Error', '200', str(resp.status)): + if test_val('Error', '500', str(resp.status)): + Utility.console_log('Data too large to be allocated') test_size = test_size // 10 else: - Utility.console_log("Unexpected error") + Utility.console_log('Unexpected error') s.close() - Utility.console_log("Retry...") + Utility.console_log('Retry...') continue resp = resp.read().decode() result = (resp == random_data) if not result: - test_val("Data size", str(len(random_data)), str(len(resp))) + test_val('Data size', str(len(random_data)), str(len(resp))) s.close() - Utility.console_log("Retry...") + Utility.console_log('Retry...') continue s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True - Utility.console_log("Failed") + Utility.console_log('Failed') return False def arbitrary_termination_test(dut, port): - Utility.console_log("[test] Arbitrary termination test =>", end=' ') + Utility.console_log('[test] Arbitrary termination test =>', end=' ') cases = [ { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom: SomeValue\r\n\r\n", - "code": "200", - "header": "SomeValue" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nCustom: SomeValue\r\n\r\n', + 'code': '200', + 'header': 'SomeValue' }, { - "request": "POST /echo HTTP/1.1\nHost: " + dut + "\r\nCustom: SomeValue\r\n\r\n", - "code": "200", - "header": "SomeValue" + 'request': 'POST /echo HTTP/1.1\nHost: ' + dut + '\r\nCustom: SomeValue\r\n\r\n', + 'code': '200', + 'header': 'SomeValue' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\nCustom: SomeValue\r\n\r\n", - "code": "200", - "header": "SomeValue" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\nCustom: SomeValue\r\n\r\n', + 'code': '200', + 'header': 'SomeValue' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom: SomeValue\n\r\n", - "code": "200", - "header": "SomeValue" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nCustom: SomeValue\n\r\n', + 'code': '200', + 'header': 'SomeValue' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom: SomeValue\r\n\n", - "code": "200", - "header": "SomeValue" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nCustom: SomeValue\r\n\n', + 'code': '200', + 'header': 'SomeValue' }, { - "request": "POST /echo HTTP/1.1\nHost: " + dut + "\nCustom: SomeValue\n\n", - "code": "200", - "header": "SomeValue" + 'request': 'POST /echo HTTP/1.1\nHost: ' + dut + '\nCustom: SomeValue\n\n', + 'code': '200', + 'header': 'SomeValue' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 5\n\r\nABCDE", - "code": "200", - "body": "ABCDE" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Length: 5\n\r\nABCDE', + 'code': '200', + 'body': 'ABCDE' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 5\r\n\nABCDE", - "code": "200", - "body": "ABCDE" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Length: 5\r\n\nABCDE', + 'code': '200', + 'body': 'ABCDE' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 5\n\nABCDE", - "code": "200", - "body": "ABCDE" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Length: 5\n\nABCDE', + 'code': '200', + 'body': 'ABCDE' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 5\n\n\rABCD", - "code": "200", - "body": "\rABCD" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Length: 5\n\n\rABCD', + 'code': '200', + 'body': '\rABCD' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\r\nCustom: SomeValue\r\r\n\r\r\n", - "code": "400" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\r\nCustom: SomeValue\r\r\n\r\r\n', + 'code': '400' }, { - "request": "POST /echo HTTP/1.1\r\r\nHost: " + dut + "\r\n\r\n", - "code": "400" + 'request': 'POST /echo HTTP/1.1\r\r\nHost: ' + dut + '\r\n\r\n', + 'code': '400' }, { - "request": "POST /echo HTTP/1.1\r\n\rHost: " + dut + "\r\n\r\n", - "code": "400" + 'request': 'POST /echo HTTP/1.1\r\n\rHost: ' + dut + '\r\n\r\n', + 'code': '400' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\rCustom: SomeValue\r\n", - "code": "400" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\rCustom: SomeValue\r\n', + 'code': '400' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom: Some\rValue\r\n", - "code": "400" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nCustom: Some\rValue\r\n', + 'code': '400' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom- SomeValue\r\n\r\n", - "code": "400" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nCustom- SomeValue\r\n\r\n', + 'code': '400' } ] for case in cases: @@ -760,159 +757,159 @@ def arbitrary_termination_test(dut, port): resp_hdrs = s.read_resp_hdrs() resp_body = s.read_resp_data() s.close() - if not test_val("Response Code", case["code"], s.status): + if not test_val('Response Code', case['code'], s.status): return False - if "header" in case.keys(): + if 'header' in case.keys(): resp_hdr_val = None - if "Custom" in resp_hdrs.keys(): - resp_hdr_val = resp_hdrs["Custom"] - if not test_val("Response Header", case["header"], resp_hdr_val): + if 'Custom' in resp_hdrs.keys(): + resp_hdr_val = resp_hdrs['Custom'] + if not test_val('Response Header', case['header'], resp_hdr_val): return False - if "body" in case.keys(): - if not test_val("Response Body", case["body"], resp_body): + if 'body' in case.keys(): + if not test_val('Response Body', case['body'], resp_body): return False - Utility.console_log("Success") + Utility.console_log('Success') return True def code_500_server_error_test(dut, port): - Utility.console_log("[test] 500 Server Error test =>", end=' ') + Utility.console_log('[test] 500 Server Error test =>', end=' ') s = Session(dut, port) # Sending a very large content length will cause malloc to fail content_len = 2**30 - s.client.sendall(("POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: " + str(content_len) + "\r\n\r\nABCD").encode()) + s.client.sendall(('POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Length: ' + str(content_len) + '\r\n\r\nABCD').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Server Error", "500", s.status): + if not test_val('Server Error', '500', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_501_method_not_impl(dut, port): - Utility.console_log("[test] 501 Method Not Implemented =>", end=' ') + Utility.console_log('[test] 501 Method Not Implemented =>', end=' ') s = Session(dut, port) - path = "/hello" - s.client.sendall(("ABC " + path + " HTTP/1.1\r\nHost: " + dut + "\r\n\r\n").encode()) + path = '/hello' + s.client.sendall(('ABC ' + path + ' HTTP/1.1\r\nHost: ' + dut + '\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() # Presently server sends back 400 Bad Request # if not test_val("Server Error", "501", s.status): # s.close() # return False - if not test_val("Server Error", "400", s.status): + if not test_val('Server Error', '400', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_505_version_not_supported(dut, port): - Utility.console_log("[test] 505 Version Not Supported =>", end=' ') + Utility.console_log('[test] 505 Version Not Supported =>', end=' ') s = Session(dut, port) - path = "/hello" - s.client.sendall(("GET " + path + " HTTP/2.0\r\nHost: " + dut + "\r\n\r\n").encode()) + path = '/hello' + s.client.sendall(('GET ' + path + ' HTTP/2.0\r\nHost: ' + dut + '\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Server Error", "505", s.status): + if not test_val('Server Error', '505', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_400_bad_request(dut, port): - Utility.console_log("[test] 400 Bad Request =>", end=' ') + Utility.console_log('[test] 400 Bad Request =>', end=' ') s = Session(dut, port) - path = "/hello" - s.client.sendall(("XYZ " + path + " HTTP/1.1\r\nHost: " + dut + "\r\n\r\n").encode()) + path = '/hello' + s.client.sendall(('XYZ ' + path + ' HTTP/1.1\r\nHost: ' + dut + '\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Client Error", "400", s.status): + if not test_val('Client Error', '400', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_404_not_found(dut, port): - Utility.console_log("[test] 404 Not Found =>", end=' ') + Utility.console_log('[test] 404 Not Found =>', end=' ') s = Session(dut, port) - path = "/dummy" - s.client.sendall(("GET " + path + " HTTP/1.1\r\nHost: " + dut + "\r\n\r\n").encode()) + path = '/dummy' + s.client.sendall(('GET ' + path + ' HTTP/1.1\r\nHost: ' + dut + '\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Client Error", "404", s.status): + if not test_val('Client Error', '404', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_405_method_not_allowed(dut, port): - Utility.console_log("[test] 405 Method Not Allowed =>", end=' ') + Utility.console_log('[test] 405 Method Not Allowed =>', end=' ') s = Session(dut, port) - path = "/hello" - s.client.sendall(("POST " + path + " HTTP/1.1\r\nHost: " + dut + "\r\n\r\n").encode()) + path = '/hello' + s.client.sendall(('POST ' + path + ' HTTP/1.1\r\nHost: ' + dut + '\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Client Error", "405", s.status): + if not test_val('Client Error', '405', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_408_req_timeout(dut, port): - Utility.console_log("[test] 408 Request Timeout =>", end=' ') + Utility.console_log('[test] 408 Request Timeout =>', end=' ') s = Session(dut, port) - s.client.sendall(("POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 10\r\n\r\nABCD").encode()) + s.client.sendall(('POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Length: 10\r\n\r\nABCD').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Client Error", "408", s.status): + if not test_val('Client Error', '408', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_411_length_required(dut, port): - Utility.console_log("[test] 411 Length Required =>", end=' ') + Utility.console_log('[test] 411 Length Required =>', end=' ') s = Session(dut, port) - path = "/echo" - s.client.sendall(("POST " + path + " HTTP/1.1\r\nHost: " + dut + "\r\nContent-Type: text/plain\r\nTransfer-Encoding: chunked\r\n\r\n").encode()) + path = '/echo' + s.client.sendall(('POST ' + path + ' HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Type: text/plain\r\nTransfer-Encoding: chunked\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() # Presently server sends back 400 Bad Request # if not test_val("Client Error", "411", s.status): # s.close() # return False - if not test_val("Client Error", "400", s.status): + if not test_val('Client Error', '400', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def send_getx_uri_len(dut, port, length): s = Session(dut, port) - method = "GET " - version = " HTTP/1.1\r\n" - path = "/" + "x" * (length - len(method) - len(version) - len("/")) + method = 'GET ' + version = ' HTTP/1.1\r\n' + path = '/' + 'x' * (length - len(method) - len(version) - len('/')) s.client.sendall(method.encode()) time.sleep(1) s.client.sendall(path.encode()) time.sleep(1) - s.client.sendall((version + "Host: " + dut + "\r\n\r\n").encode()) + s.client.sendall((version + 'Host: ' + dut + '\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() s.close() @@ -920,59 +917,59 @@ def send_getx_uri_len(dut, port, length): def code_414_uri_too_long(dut, port, max_uri_len): - Utility.console_log("[test] 414 URI Too Long =>", end=' ') + Utility.console_log('[test] 414 URI Too Long =>', end=' ') status = send_getx_uri_len(dut, port, max_uri_len) - if not test_val("Client Error", "404", status): + if not test_val('Client Error', '404', status): return False status = send_getx_uri_len(dut, port, max_uri_len + 1) - if not test_val("Client Error", "414", status): + if not test_val('Client Error', '414', status): return False - Utility.console_log("Success") + Utility.console_log('Success') return True def send_postx_hdr_len(dut, port, length): s = Session(dut, port) - path = "/echo" - host = "Host: " + dut - custom_hdr_field = "\r\nCustom: " - custom_hdr_val = "x" * (length - len(host) - len(custom_hdr_field) - len("\r\n\r\n") + len("0")) - request = ("POST " + path + " HTTP/1.1\r\n" + host + custom_hdr_field + custom_hdr_val + "\r\n\r\n").encode() + path = '/echo' + host = 'Host: ' + dut + custom_hdr_field = '\r\nCustom: ' + custom_hdr_val = 'x' * (length - len(host) - len(custom_hdr_field) - len('\r\n\r\n') + len('0')) + request = ('POST ' + path + ' HTTP/1.1\r\n' + host + custom_hdr_field + custom_hdr_val + '\r\n\r\n').encode() s.client.sendall(request[:length // 2]) time.sleep(1) s.client.sendall(request[length // 2:]) hdr = s.read_resp_hdrs() resp = s.read_resp_data() s.close() - if hdr and ("Custom" in hdr): - return (hdr["Custom"] == custom_hdr_val), resp + if hdr and ('Custom' in hdr): + return (hdr['Custom'] == custom_hdr_val), resp return False, s.status def code_431_hdr_too_long(dut, port, max_hdr_len): - Utility.console_log("[test] 431 Header Too Long =>", end=' ') + Utility.console_log('[test] 431 Header Too Long =>', end=' ') res, status = send_postx_hdr_len(dut, port, max_hdr_len) if not res: return False res, status = send_postx_hdr_len(dut, port, max_hdr_len + 1) - if not test_val("Client Error", "431", status): + if not test_val('Client Error', '431', status): return False - Utility.console_log("Success") + Utility.console_log('Success') return True def test_upgrade_not_supported(dut, port): - Utility.console_log("[test] Upgrade Not Supported =>", end=' ') + Utility.console_log('[test] Upgrade Not Supported =>', end=' ') s = Session(dut, port) # path = "/hello" - s.client.sendall(("OPTIONS * HTTP/1.1\r\nHost:" + dut + "\r\nUpgrade: TLS/1.0\r\nConnection: Upgrade\r\n\r\n").encode()) + s.client.sendall(('OPTIONS * HTTP/1.1\r\nHost:' + dut + '\r\nUpgrade: TLS/1.0\r\nConnection: Upgrade\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Client Error", "400", s.status): + if not test_val('Client Error', '400', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True @@ -997,7 +994,7 @@ if __name__ == '__main__': _verbose_ = True - Utility.console_log("### Basic HTTP Client Tests") + Utility.console_log('### Basic HTTP Client Tests') get_hello(dut, port) post_hello(dut, port) put_hello(dut, port) @@ -1009,7 +1006,7 @@ if __name__ == '__main__': get_false_uri(dut, port) get_test_headers(dut, port) - Utility.console_log("### Error code tests") + Utility.console_log('### Error code tests') code_500_server_error_test(dut, port) code_501_method_not_impl(dut, port) code_505_version_not_supported(dut, port) @@ -1024,7 +1021,7 @@ if __name__ == '__main__': # Not supported yet (Error on chunked request) # code_411_length_required(dut, port) - Utility.console_log("### Sessions and Context Tests") + Utility.console_log('### Sessions and Context Tests') parallel_sessions_adder(dut, port, max_sessions) leftover_data_test(dut, port) async_response_test(dut, port) diff --git a/examples/protocols/http_server/persistent_sockets/http_server_persistence_test.py b/examples/protocols/http_server/persistent_sockets/http_server_persistence_test.py index 71ccd6d70c..c5f0642f82 100644 --- a/examples/protocols/http_server/persistent_sockets/http_server_persistence_test.py +++ b/examples/protocols/http_server/persistent_sockets/http_server_persistence_test.py @@ -14,49 +14,47 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from builtins import str -from builtins import range -import re +from __future__ import division, print_function, unicode_literals + import os import random +import re +from builtins import range, str -from tiny_test_fw import Utility import ttfw_idf from idf_http_server_test import adder as client +from tiny_test_fw import Utility # When running on local machine execute the following before running this script # > make app bootloader # > make print_flash_cmd | tail -n 1 > build/download.config -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_http_server_persistence(env, extra_data): # Acquire DUT - dut1 = env.get_dut("http_server", "examples/protocols/http_server/persistent_sockets", + dut1 = env.get_dut('http_server', 'examples/protocols/http_server/persistent_sockets', dut_class=ttfw_idf.ESP32DUT) # Get binary file - binary_file = os.path.join(dut1.app.binary_path, "persistent_sockets.bin") + binary_file = os.path.join(dut1.app.binary_path, 'persistent_sockets.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("http_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('http_server_bin_size', '{}KB'.format(bin_size // 1024)) # Upload binary and start testing - Utility.console_log("Starting http_server persistance test app") + Utility.console_log('Starting http_server persistance test app') dut1.start_app() # Parse IP address of STA - Utility.console_log("Waiting to connect with AP") - got_ip = dut1.expect(re.compile(r"(?:[\s\S]*)IPv4 address: (\d+.\d+.\d+.\d+)"), timeout=30)[0] + Utility.console_log('Waiting to connect with AP') + got_ip = dut1.expect(re.compile(r'(?:[\s\S]*)IPv4 address: (\d+.\d+.\d+.\d+)'), timeout=30)[0] got_port = dut1.expect(re.compile(r"(?:[\s\S]*)Starting server on port: '(\d+)'"), timeout=30)[0] - Utility.console_log("Got IP : " + got_ip) - Utility.console_log("Got Port : " + got_port) + Utility.console_log('Got IP : ' + got_ip) + Utility.console_log('Got Port : ' + got_port) # Expected Logs - dut1.expect("Registering URI handlers", timeout=30) + dut1.expect('Registering URI handlers', timeout=30) # Run test script conn = client.start_session(got_ip, got_port) @@ -65,23 +63,23 @@ def test_examples_protocol_http_server_persistence(env, extra_data): # Test PUT request and initialize session context num = random.randint(0,100) - client.putreq(conn, "/adder", str(num)) + client.putreq(conn, '/adder', str(num)) visitor += 1 - dut1.expect("/adder visitor count = " + str(visitor), timeout=30) - dut1.expect("/adder PUT handler read " + str(num), timeout=30) - dut1.expect("PUT allocating new session", timeout=30) + dut1.expect('/adder visitor count = ' + str(visitor), timeout=30) + dut1.expect('/adder PUT handler read ' + str(num), timeout=30) + dut1.expect('PUT allocating new session', timeout=30) # Retest PUT request and change session context value num = random.randint(0,100) - Utility.console_log("Adding: " + str(num)) - client.putreq(conn, "/adder", str(num)) + Utility.console_log('Adding: ' + str(num)) + client.putreq(conn, '/adder', str(num)) visitor += 1 adder += num - dut1.expect("/adder visitor count = " + str(visitor), timeout=30) - dut1.expect("/adder PUT handler read " + str(num), timeout=30) + dut1.expect('/adder visitor count = ' + str(visitor), timeout=30) + dut1.expect('/adder PUT handler read ' + str(num), timeout=30) try: # Re allocation shouldn't happen - dut1.expect("PUT allocating new session", timeout=30) + dut1.expect('PUT allocating new session', timeout=30) # Not expected raise RuntimeError except Exception: @@ -91,37 +89,37 @@ def test_examples_protocol_http_server_persistence(env, extra_data): # Test POST request and session persistence random_nums = [random.randint(0,100) for _ in range(100)] for num in random_nums: - Utility.console_log("Adding: " + str(num)) - client.postreq(conn, "/adder", str(num)) + Utility.console_log('Adding: ' + str(num)) + client.postreq(conn, '/adder', str(num)) visitor += 1 adder += num - dut1.expect("/adder visitor count = " + str(visitor), timeout=30) - dut1.expect("/adder handler read " + str(num), timeout=30) + dut1.expect('/adder visitor count = ' + str(visitor), timeout=30) + dut1.expect('/adder handler read ' + str(num), timeout=30) # Test GET request and session persistence - Utility.console_log("Matching final sum: " + str(adder)) - if client.getreq(conn, "/adder").decode() != str(adder): + Utility.console_log('Matching final sum: ' + str(adder)) + if client.getreq(conn, '/adder').decode() != str(adder): raise RuntimeError visitor += 1 - dut1.expect("/adder visitor count = " + str(visitor), timeout=30) - dut1.expect("/adder GET handler send " + str(adder), timeout=30) + dut1.expect('/adder visitor count = ' + str(visitor), timeout=30) + dut1.expect('/adder GET handler send ' + str(adder), timeout=30) - Utility.console_log("Ending session") + Utility.console_log('Ending session') # Close connection and check for invocation of context "Free" function client.end_session(conn) - dut1.expect("/adder Free Context function called", timeout=30) + dut1.expect('/adder Free Context function called', timeout=30) - Utility.console_log("Validating user context data") + Utility.console_log('Validating user context data') # Start another session to check user context data client.start_session(got_ip, got_port) num = random.randint(0,100) - client.putreq(conn, "/adder", str(num)) + client.putreq(conn, '/adder', str(num)) visitor += 1 - dut1.expect("/adder visitor count = " + str(visitor), timeout=30) - dut1.expect("/adder PUT handler read " + str(num), timeout=30) - dut1.expect("PUT allocating new session", timeout=30) + dut1.expect('/adder visitor count = ' + str(visitor), timeout=30) + dut1.expect('/adder PUT handler read ' + str(num), timeout=30) + dut1.expect('PUT allocating new session', timeout=30) client.end_session(conn) - dut1.expect("/adder Free Context function called", timeout=30) + dut1.expect('/adder Free Context function called', timeout=30) if __name__ == '__main__': diff --git a/examples/protocols/http_server/simple/http_server_simple_test.py b/examples/protocols/http_server/simple/http_server_simple_test.py index fd11dc0b10..dfd80734c1 100644 --- a/examples/protocols/http_server/simple/http_server_simple_test.py +++ b/examples/protocols/http_server/simple/http_server_simple_test.py @@ -14,22 +14,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -from builtins import range -import re -import os -import string -import random +from __future__ import division, print_function, unicode_literals +import os +import random +import re +import socket +import string import threading import time -import socket +from builtins import range -from tiny_test_fw import Utility import ttfw_idf from idf_http_server_test import client +from tiny_test_fw import Utility class http_client_thread(threading.Thread): @@ -64,97 +62,97 @@ class http_client_thread(threading.Thread): # > make print_flash_cmd | tail -n 1 > build/download.config -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_http_server_simple(env, extra_data): # Acquire DUT - dut1 = env.get_dut("http_server", "examples/protocols/http_server/simple", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('http_server', 'examples/protocols/http_server/simple', dut_class=ttfw_idf.ESP32DUT) # Get binary file - binary_file = os.path.join(dut1.app.binary_path, "simple.bin") + binary_file = os.path.join(dut1.app.binary_path, 'simple.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("http_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('http_server_bin_size', '{}KB'.format(bin_size // 1024)) # Upload binary and start testing - Utility.console_log("Starting http_server simple test app") + Utility.console_log('Starting http_server simple test app') dut1.start_app() # Parse IP address of STA - Utility.console_log("Waiting to connect with AP") - got_ip = dut1.expect(re.compile(r"(?:[\s\S]*)IPv4 address: (\d+.\d+.\d+.\d+)"), timeout=30)[0] + Utility.console_log('Waiting to connect with AP') + got_ip = dut1.expect(re.compile(r'(?:[\s\S]*)IPv4 address: (\d+.\d+.\d+.\d+)'), timeout=30)[0] got_port = dut1.expect(re.compile(r"(?:[\s\S]*)Starting server on port: '(\d+)'"), timeout=30)[0] - Utility.console_log("Got IP : " + got_ip) - Utility.console_log("Got Port : " + got_port) + Utility.console_log('Got IP : ' + got_ip) + Utility.console_log('Got Port : ' + got_port) # Expected Logs - dut1.expect("Registering URI handlers", timeout=30) + dut1.expect('Registering URI handlers', timeout=30) # Run test script # If failed raise appropriate exception - Utility.console_log("Test /hello GET handler") + Utility.console_log('Test /hello GET handler') if not client.test_get_handler(got_ip, got_port): raise RuntimeError # Acquire host IP. Need a way to check it - dut1.expect(re.compile(r"(?:[\s\S]*)Found header => Host: (\d+.\d+.\d+.\d+)"), timeout=30)[0] + dut1.expect(re.compile(r'(?:[\s\S]*)Found header => Host: (\d+.\d+.\d+.\d+)'), timeout=30)[0] # Match additional headers sent in the request - dut1.expect("Found header => Test-Header-2: Test-Value-2", timeout=30) - dut1.expect("Found header => Test-Header-1: Test-Value-1", timeout=30) - dut1.expect("Found URL query parameter => query1=value1", timeout=30) - dut1.expect("Found URL query parameter => query3=value3", timeout=30) - dut1.expect("Found URL query parameter => query2=value2", timeout=30) - dut1.expect("Request headers lost", timeout=30) + dut1.expect('Found header => Test-Header-2: Test-Value-2', timeout=30) + dut1.expect('Found header => Test-Header-1: Test-Value-1', timeout=30) + dut1.expect('Found URL query parameter => query1=value1', timeout=30) + dut1.expect('Found URL query parameter => query3=value3', timeout=30) + dut1.expect('Found URL query parameter => query2=value2', timeout=30) + dut1.expect('Request headers lost', timeout=30) - Utility.console_log("Test /ctrl PUT handler and realtime handler de/registration") + Utility.console_log('Test /ctrl PUT handler and realtime handler de/registration') if not client.test_put_handler(got_ip, got_port): raise RuntimeError - dut1.expect("Unregistering /hello and /echo URIs", timeout=30) - dut1.expect("Registering /hello and /echo URIs", timeout=30) + dut1.expect('Unregistering /hello and /echo URIs', timeout=30) + dut1.expect('Registering /hello and /echo URIs', timeout=30) # Generate random data of 10KB random_data = ''.join(string.printable[random.randint(0,len(string.printable)) - 1] for _ in range(10 * 1024)) - Utility.console_log("Test /echo POST handler with random data") + Utility.console_log('Test /echo POST handler with random data') if not client.test_post_handler(got_ip, got_port, random_data): raise RuntimeError - query = "http://foobar" - Utility.console_log("Test /hello with custom query : " + query) + query = 'http://foobar' + Utility.console_log('Test /hello with custom query : ' + query) if not client.test_custom_uri_query(got_ip, got_port, query): raise RuntimeError - dut1.expect("Found URL query => " + query, timeout=30) + dut1.expect('Found URL query => ' + query, timeout=30) - query = "abcd+1234%20xyz" - Utility.console_log("Test /hello with custom query : " + query) + query = 'abcd+1234%20xyz' + Utility.console_log('Test /hello with custom query : ' + query) if not client.test_custom_uri_query(got_ip, got_port, query): raise RuntimeError - dut1.expect("Found URL query => " + query, timeout=30) + dut1.expect('Found URL query => ' + query, timeout=30) -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_http_server_lru_purge_enable(env, extra_data): # Acquire DUT - dut1 = env.get_dut("http_server", "examples/protocols/http_server/simple", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('http_server', 'examples/protocols/http_server/simple', dut_class=ttfw_idf.ESP32DUT) # Get binary file - binary_file = os.path.join(dut1.app.binary_path, "simple.bin") + binary_file = os.path.join(dut1.app.binary_path, 'simple.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("http_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('http_server_bin_size', '{}KB'.format(bin_size // 1024)) # Upload binary and start testing - Utility.console_log("Starting http_server simple test app") + Utility.console_log('Starting http_server simple test app') dut1.start_app() # Parse IP address of STA - Utility.console_log("Waiting to connect with AP") - got_ip = dut1.expect(re.compile(r"(?:[\s\S]*)IPv4 address: (\d+.\d+.\d+.\d+)"), timeout=30)[0] + Utility.console_log('Waiting to connect with AP') + got_ip = dut1.expect(re.compile(r'(?:[\s\S]*)IPv4 address: (\d+.\d+.\d+.\d+)'), timeout=30)[0] got_port = dut1.expect(re.compile(r"(?:[\s\S]*)Starting server on port: '(\d+)'"), timeout=30)[0] - Utility.console_log("Got IP : " + got_ip) - Utility.console_log("Got Port : " + got_port) + Utility.console_log('Got IP : ' + got_ip) + Utility.console_log('Got Port : ' + got_port) # Expected Logs - dut1.expect("Registering URI handlers", timeout=30) + dut1.expect('Registering URI handlers', timeout=30) threads = [] # Open 20 sockets, one from each thread for _ in range(20): @@ -163,7 +161,7 @@ def test_examples_protocol_http_server_lru_purge_enable(env, extra_data): thread.start() threads.append(thread) except OSError as err: - Utility.console_log("Error: unable to start thread, " + err) + Utility.console_log('Error: unable to start thread, ' + err) for t in threads: t.join() diff --git a/examples/protocols/http_server/ws_echo_server/ws_server_example_test.py b/examples/protocols/http_server/ws_echo_server/ws_server_example_test.py index b8ca7aed8a..e44896bd31 100644 --- a/examples/protocols/http_server/ws_echo_server/ws_server_example_test.py +++ b/examples/protocols/http_server/ws_echo_server/ws_server_example_test.py @@ -14,15 +14,14 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import division -from __future__ import print_function -from __future__ import unicode_literals -import re -from tiny_test_fw import Utility -import ttfw_idf -import os -import websocket +from __future__ import division, print_function, unicode_literals +import os +import re + +import ttfw_idf +import websocket +from tiny_test_fw import Utility OPCODE_TEXT = 0x1 OPCODE_BIN = 0x2 @@ -37,7 +36,7 @@ class WsClient: self.ws = websocket.WebSocket() def __enter__(self): - self.ws.connect("ws://{}:{}/ws".format(self.ip, self.port)) + self.ws.connect('ws://{}:{}/ws'.format(self.ip, self.port)) return self def __exit__(self, exc_type, exc_value, traceback): @@ -46,7 +45,7 @@ class WsClient: def read(self): return self.ws.recv_data(control_frame=True) - def write(self, data="", opcode=OPCODE_TEXT): + def write(self, data='', opcode=OPCODE_TEXT): if opcode == OPCODE_BIN: return self.ws.send_binary(data.encode()) if opcode == OPCODE_PING: @@ -54,27 +53,27 @@ class WsClient: return self.ws.send(data) -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_http_ws_echo_server(env, extra_data): # Acquire DUT - dut1 = env.get_dut("http_server", "examples/protocols/http_server/ws_echo_server", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('http_server', 'examples/protocols/http_server/ws_echo_server', dut_class=ttfw_idf.ESP32DUT) # Get binary file - binary_file = os.path.join(dut1.app.binary_path, "ws_echo_server.bin") + binary_file = os.path.join(dut1.app.binary_path, 'ws_echo_server.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("http_ws_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('http_ws_server_bin_size', '{}KB'.format(bin_size // 1024)) # Upload binary and start testing - Utility.console_log("Starting ws-echo-server test app based on http_server") + Utility.console_log('Starting ws-echo-server test app based on http_server') dut1.start_app() # Parse IP address of STA - Utility.console_log("Waiting to connect with AP") - got_ip = dut1.expect(re.compile(r"IPv4 address: (\d+.\d+.\d+.\d+)"), timeout=60)[0] + Utility.console_log('Waiting to connect with AP') + got_ip = dut1.expect(re.compile(r'IPv4 address: (\d+.\d+.\d+.\d+)'), timeout=60)[0] got_port = dut1.expect(re.compile(r"Starting server on port: '(\d+)'"), timeout=60)[0] - Utility.console_log("Got IP : " + got_ip) - Utility.console_log("Got Port : " + got_port) + Utility.console_log('Got IP : ' + got_ip) + Utility.console_log('Got Port : ' + got_port) # Start ws server test with WsClient(got_ip, int(got_port)) as ws: @@ -82,23 +81,23 @@ def test_examples_protocol_http_ws_echo_server(env, extra_data): for expected_opcode in [OPCODE_TEXT, OPCODE_BIN, OPCODE_PING]: ws.write(data=DATA, opcode=expected_opcode) opcode, data = ws.read() - Utility.console_log("Testing opcode {}: Received opcode:{}, data:{}".format(expected_opcode, opcode, data)) + Utility.console_log('Testing opcode {}: Received opcode:{}, data:{}'.format(expected_opcode, opcode, data)) data = data.decode() if expected_opcode == OPCODE_PING: - dut1.expect("Got a WS PING frame, Replying PONG") + dut1.expect('Got a WS PING frame, Replying PONG') if opcode != OPCODE_PONG or data != DATA: - raise RuntimeError("Failed to receive correct opcode:{} or data:{}".format(opcode, data)) + raise RuntimeError('Failed to receive correct opcode:{} or data:{}'.format(opcode, data)) continue - dut_data = dut1.expect(re.compile(r"Got packet with message: ([A-Za-z0-9_]*)"))[0] - dut_opcode = int(dut1.expect(re.compile(r"Packet type: ([0-9]*)"))[0]) + dut_data = dut1.expect(re.compile(r'Got packet with message: ([A-Za-z0-9_]*)'))[0] + dut_opcode = int(dut1.expect(re.compile(r'Packet type: ([0-9]*)'))[0]) if opcode != expected_opcode or data != DATA or opcode != dut_opcode or data != dut_data: - raise RuntimeError("Failed to receive correct opcode:{} or data:{}".format(opcode, data)) - ws.write(data="Trigger async", opcode=OPCODE_TEXT) + raise RuntimeError('Failed to receive correct opcode:{} or data:{}'.format(opcode, data)) + ws.write(data='Trigger async', opcode=OPCODE_TEXT) opcode, data = ws.read() - Utility.console_log("Testing async send: Received opcode:{}, data:{}".format(opcode, data)) + Utility.console_log('Testing async send: Received opcode:{}, data:{}'.format(opcode, data)) data = data.decode() - if opcode != OPCODE_TEXT or data != "Async data": - raise RuntimeError("Failed to receive correct opcode:{} or data:{}".format(opcode, data)) + if opcode != OPCODE_TEXT or data != 'Async data': + raise RuntimeError('Failed to receive correct opcode:{} or data:{}'.format(opcode, data)) if __name__ == '__main__': diff --git a/examples/protocols/https_request/example_test.py b/examples/protocols/https_request/example_test.py index 5c6f937bd0..970cf5533c 100644 --- a/examples/protocols/https_request/example_test.py +++ b/examples/protocols/https_request/example_test.py @@ -1,10 +1,11 @@ import os import re + import ttfw_idf from tiny_test_fw import Utility -@ttfw_idf.idf_example_test(env_tag="Example_EthKitV1") +@ttfw_idf.idf_example_test(env_tag='Example_EthKitV1') def test_examples_protocol_https_request(env, extra_data): """ steps: | @@ -13,24 +14,24 @@ def test_examples_protocol_https_request(env, extra_data): certificate verification options 3. send http request """ - dut1 = env.get_dut("https_request", "examples/protocols/https_request", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('https_request', 'examples/protocols/https_request', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "https_request.bin") + binary_file = os.path.join(dut1.app.binary_path, 'https_request.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("https_request_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('https_request_bin_size', '{}KB'.format(bin_size // 1024)) # start tes - Utility.console_log("Starting https_request simple test app") + Utility.console_log('Starting https_request simple test app') dut1.start_app() # Check for connection using crt bundle Utility.console_log("Testing for \"https_request using crt bundle\"") try: - dut1.expect(re.compile("https_request using crt bundle"), timeout=30) - dut1.expect_all("Certificate validated", - "Connection established...", - "Reading HTTP response...", - "HTTP/1.1 200 OK", - re.compile("connection closed")) + dut1.expect(re.compile('https_request using crt bundle'), timeout=30) + dut1.expect_all('Certificate validated', + 'Connection established...', + 'Reading HTTP response...', + 'HTTP/1.1 200 OK', + re.compile('connection closed')) except Exception: Utility.console_log("Failed the test for \"https_request using crt bundle\"") raise @@ -39,11 +40,11 @@ def test_examples_protocol_https_request(env, extra_data): # Check for connection using cacert_buf Utility.console_log("Testing for \"https_request using cacert_buf\"") try: - dut1.expect(re.compile("https_request using cacert_buf"), timeout=20) - dut1.expect_all("Connection established...", - "Reading HTTP response...", - "HTTP/1.1 200 OK", - re.compile("connection closed")) + dut1.expect(re.compile('https_request using cacert_buf'), timeout=20) + dut1.expect_all('Connection established...', + 'Reading HTTP response...', + 'HTTP/1.1 200 OK', + re.compile('connection closed')) except Exception: Utility.console_log("Passed the test for \"https_request using cacert_buf\"") raise @@ -52,32 +53,32 @@ def test_examples_protocol_https_request(env, extra_data): # Check for connection using global ca_store Utility.console_log("Testing for \"https_request using global ca_store\"") try: - dut1.expect(re.compile("https_request using global ca_store"), timeout=20) - dut1.expect_all("Connection established...", - "Reading HTTP response...", - "HTTP/1.1 200 OK", - re.compile("connection closed")) + dut1.expect(re.compile('https_request using global ca_store'), timeout=20) + dut1.expect_all('Connection established...', + 'Reading HTTP response...', + 'HTTP/1.1 200 OK', + re.compile('connection closed')) except Exception: Utility.console_log("Failed the test for \"https_request using global ca_store\"") raise Utility.console_log("Passed the test for \"https_request using global ca_store\"") # Check for connection using crt bundle with mbedtls dynamic resource enabled - dut1 = env.get_dut("https_request", "examples/protocols/https_request", dut_class=ttfw_idf.ESP32DUT, app_config_name='ssldyn') + dut1 = env.get_dut('https_request', 'examples/protocols/https_request', dut_class=ttfw_idf.ESP32DUT, app_config_name='ssldyn') # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "https_request.bin") + binary_file = os.path.join(dut1.app.binary_path, 'https_request.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("https_request_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('https_request_bin_size', '{}KB'.format(bin_size // 1024)) # start test dut1.start_app() # only check if one connection is established Utility.console_log("Testing for \"https_request using crt bundle\" with mbedtls dynamic resource enabled") try: - dut1.expect(re.compile("https_request using crt bundle"), timeout=30) - dut1.expect_all("Connection established...", - "Reading HTTP response...", - "HTTP/1.1 200 OK", - re.compile("connection closed")) + dut1.expect(re.compile('https_request using crt bundle'), timeout=30) + dut1.expect_all('Connection established...', + 'Reading HTTP response...', + 'HTTP/1.1 200 OK', + re.compile('connection closed')) except Exception: Utility.console_log("Failed the test for \"https_request using crt bundle\" when mbedtls dynamic resource was enabled") raise diff --git a/examples/protocols/https_x509_bundle/example_test.py b/examples/protocols/https_x509_bundle/example_test.py index 8de1acd731..28e352a7f6 100644 --- a/examples/protocols/https_x509_bundle/example_test.py +++ b/examples/protocols/https_x509_bundle/example_test.py @@ -1,9 +1,10 @@ import os import re + import ttfw_idf -@ttfw_idf.idf_example_test(env_tag="Example_WIFI", ignore=True) +@ttfw_idf.idf_example_test(env_tag='Example_WIFI', ignore=True) def test_examples_protocol_https_x509_bundle(env, extra_data): """ steps: | @@ -11,28 +12,28 @@ def test_examples_protocol_https_x509_bundle(env, extra_data): 2. connect to multiple URLs 3. send http request """ - dut1 = env.get_dut("https_x509_bundle", "examples/protocols/https_x509_bundle") + dut1 = env.get_dut('https_x509_bundle', 'examples/protocols/https_x509_bundle') # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "https_x509_bundle.bin") + binary_file = os.path.join(dut1.app.binary_path, 'https_x509_bundle.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("https_x509_bundle_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('https_x509_bundle_bin_size', '{}KB'.format(bin_size // 1024)) # start test dut1.start_app() - num_URLS = dut1.expect(re.compile(r"Connecting to (\d+) URLs"), timeout=30) - dut1.expect(re.compile(r"Connection established to ([\s\S]*)"), timeout=30) - dut1.expect("Completed {} connections".format(num_URLS[0]), timeout=60) + num_URLS = dut1.expect(re.compile(r'Connecting to (\d+) URLs'), timeout=30) + dut1.expect(re.compile(r'Connection established to ([\s\S]*)'), timeout=30) + dut1.expect('Completed {} connections'.format(num_URLS[0]), timeout=60) # test mbedtls dynamic resource - dut1 = env.get_dut("https_x509_bundle", "examples/protocols/https_x509_bundle", app_config_name='ssldyn') + dut1 = env.get_dut('https_x509_bundle', 'examples/protocols/https_x509_bundle', app_config_name='ssldyn') # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "https_x509_bundle.bin") + binary_file = os.path.join(dut1.app.binary_path, 'https_x509_bundle.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("https_x509_bundle_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('https_x509_bundle_bin_size', '{}KB'.format(bin_size // 1024)) # start test dut1.start_app() - num_URLS = dut1.expect(re.compile(r"Connecting to (\d+) URLs"), timeout=30) - dut1.expect(re.compile(r"Connection established to ([\s\S]*)"), timeout=30) - dut1.expect("Completed {} connections".format(num_URLS[0]), timeout=60) + num_URLS = dut1.expect(re.compile(r'Connecting to (\d+) URLs'), timeout=30) + dut1.expect(re.compile(r'Connection established to ([\s\S]*)'), timeout=30) + dut1.expect('Completed {} connections'.format(num_URLS[0]), timeout=60) if __name__ == '__main__': diff --git a/examples/protocols/icmp_echo/example_test.py b/examples/protocols/icmp_echo/example_test.py index 717153c256..f52ed02cd6 100644 --- a/examples/protocols/icmp_echo/example_test.py +++ b/examples/protocols/icmp_echo/example_test.py @@ -1,7 +1,9 @@ from __future__ import unicode_literals -import re -import ttfw_idf + import os +import re + +import ttfw_idf @ttfw_idf.idf_example_test(env_tag='Example_WIFI') diff --git a/examples/protocols/mdns/mdns_example_test.py b/examples/protocols/mdns/mdns_example_test.py index e081f2af02..52651035bb 100644 --- a/examples/protocols/mdns/mdns_example_test.py +++ b/examples/protocols/mdns/mdns_example_test.py @@ -1,15 +1,15 @@ -import re import os +import re import socket -import time import struct +import subprocess +import time +from threading import Event, Thread + import dpkt import dpkt.dns -from threading import Thread, Event -import subprocess - -from tiny_test_fw import DUT import ttfw_idf +from tiny_test_fw import DUT stop_mdns_server = Event() esp_answered = Event() @@ -18,7 +18,7 @@ esp_answered = Event() def get_dns_query_for_esp(esp_host): dns = dpkt.dns.DNS(b'\x00\x00\x01\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01') dns.qd[0].name = esp_host + u'.local' - print("Created query for esp host: {} ".format(dns.__repr__())) + print('Created query for esp host: {} '.format(dns.__repr__())) return dns.pack() @@ -32,26 +32,26 @@ def get_dns_answer_to_mdns(tester_host): arr.name = tester_host arr.ip = socket.inet_aton('127.0.0.1') dns. an.append(arr) - print("Created answer to mdns query: {} ".format(dns.__repr__())) + print('Created answer to mdns query: {} '.format(dns.__repr__())) return dns.pack() def get_dns_answer_to_mdns_lwip(tester_host, id): - dns = dpkt.dns.DNS(b"\x5e\x39\x84\x00\x00\x01\x00\x01\x00\x00\x00\x00\x0a\x64\x61\x76\x69\x64" - b"\x2d\x63\x6f\x6d\x70\x05\x6c\x6f\x63\x61\x6c\x00\x00\x01\x00\x01\xc0\x0c" - b"\x00\x01\x00\x01\x00\x00\x00\x0a\x00\x04\xc0\xa8\x0a\x6c") + dns = dpkt.dns.DNS(b'\x5e\x39\x84\x00\x00\x01\x00\x01\x00\x00\x00\x00\x0a\x64\x61\x76\x69\x64' + b'\x2d\x63\x6f\x6d\x70\x05\x6c\x6f\x63\x61\x6c\x00\x00\x01\x00\x01\xc0\x0c' + b'\x00\x01\x00\x01\x00\x00\x00\x0a\x00\x04\xc0\xa8\x0a\x6c') dns.qd[0].name = tester_host dns.an[0].name = tester_host dns.an[0].ip = socket.inet_aton('127.0.0.1') dns.an[0].rdata = socket.inet_aton('127.0.0.1') dns.id = id - print("Created answer to mdns (lwip) query: {} ".format(dns.__repr__())) + print('Created answer to mdns (lwip) query: {} '.format(dns.__repr__())) return dns.pack() def mdns_server(esp_host): global esp_answered - UDP_IP = "0.0.0.0" + UDP_IP = '0.0.0.0' UDP_PORT = 5353 MCAST_GRP = '224.0.0.251' TESTER_NAME = u'tinytester.local' @@ -60,7 +60,7 @@ def mdns_server(esp_host): sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) sock.bind((UDP_IP,UDP_PORT)) - mreq = struct.pack("4sl", socket.inet_aton(MCAST_GRP), socket.INADDR_ANY) + mreq = struct.pack('4sl', socket.inet_aton(MCAST_GRP), socket.INADDR_ANY) sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq) sock.settimeout(30) while not stop_mdns_server.is_set(): @@ -72,14 +72,14 @@ def mdns_server(esp_host): dns = dpkt.dns.DNS(data) if len(dns.qd) > 0 and dns.qd[0].type == dpkt.dns.DNS_A: if dns.qd[0].name == TESTER_NAME: - print("Received query: {} ".format(dns.__repr__())) + print('Received query: {} '.format(dns.__repr__())) sock.sendto(get_dns_answer_to_mdns(TESTER_NAME), (MCAST_GRP,UDP_PORT)) elif dns.qd[0].name == TESTER_NAME_LWIP: - print("Received query: {} ".format(dns.__repr__())) + print('Received query: {} '.format(dns.__repr__())) sock.sendto(get_dns_answer_to_mdns_lwip(TESTER_NAME_LWIP, dns.id), addr) if len(dns.an) > 0 and dns.an[0].type == dpkt.dns.DNS_A: if dns.an[0].name == esp_host + u'.local': - print("Received answer to esp32-mdns query: {}".format(dns.__repr__())) + print('Received answer to esp32-mdns query: {}'.format(dns.__repr__())) esp_answered.set() except socket.timeout: break @@ -87,7 +87,7 @@ def mdns_server(esp_host): continue -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_mdns(env, extra_data): global stop_mdns_server """ @@ -97,21 +97,21 @@ def test_examples_protocol_mdns(env, extra_data): 3. check the mdns name is accessible 4. check DUT output if mdns advertized host is resolved """ - dut1 = env.get_dut("mdns-test", "examples/protocols/mdns", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('mdns-test', 'examples/protocols/mdns', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "mdns-test.bin") + binary_file = os.path.join(dut1.app.binary_path, 'mdns-test.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("mdns-test_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('mdns-test_bin_size', '{}KB'.format(bin_size // 1024)) # 1. start mdns application dut1.start_app() # 2. get the dut host name (and IP address) - specific_host = dut1.expect(re.compile(r"mdns hostname set to: \[([^\]]+)\]"), timeout=30) + specific_host = dut1.expect(re.compile(r'mdns hostname set to: \[([^\]]+)\]'), timeout=30) specific_host = str(specific_host[0]) thread1 = Thread(target=mdns_server, args=(specific_host,)) thread1.start() try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30)[0] - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30)[0] + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: stop_mdns_server.set() thread1.join() @@ -121,15 +121,15 @@ def test_examples_protocol_mdns(env, extra_data): if not esp_answered.wait(timeout=30): raise ValueError('Test has failed: did not receive mdns answer within timeout') # 4. check DUT output if mdns advertized host is resolved - dut1.expect(re.compile(r"mdns-test: Query A: tinytester.local resolved to: 127.0.0.1"), timeout=30) - dut1.expect(re.compile(r"mdns-test: gethostbyname: tinytester-lwip.local resolved to: 127.0.0.1"), timeout=30) - dut1.expect(re.compile(r"mdns-test: getaddrinfo: tinytester-lwip.local resolved to: 127.0.0.1"), timeout=30) + dut1.expect(re.compile(r'mdns-test: Query A: tinytester.local resolved to: 127.0.0.1'), timeout=30) + dut1.expect(re.compile(r'mdns-test: gethostbyname: tinytester-lwip.local resolved to: 127.0.0.1'), timeout=30) + dut1.expect(re.compile(r'mdns-test: getaddrinfo: tinytester-lwip.local resolved to: 127.0.0.1'), timeout=30) # 5. check the DUT answers to `dig` command dig_output = subprocess.check_output(['dig', '+short', '-p', '5353', '@224.0.0.251', '{}.local'.format(specific_host)]) print('Resolving {} using "dig" succeeded with:\n{}'.format(specific_host, dig_output)) if not ip_address.encode('utf-8') in dig_output: - raise ValueError("Test has failed: Incorrectly resolved DUT hostname using dig" + raise ValueError('Test has failed: Incorrectly resolved DUT hostname using dig' "Output should've contained DUT's IP address:{}".format(ip_address)) finally: stop_mdns_server.set() diff --git a/examples/protocols/modbus/serial/example_test.py b/examples/protocols/modbus/serial/example_test.py index bfc70f44bb..c15018262f 100644 --- a/examples/protocols/modbus/serial/example_test.py +++ b/examples/protocols/modbus/serial/example_test.py @@ -1,15 +1,15 @@ # Need Python 3 string formatting functions from __future__ import print_function +import logging import os import re -import logging from threading import Thread import ttfw_idf LOG_LEVEL = logging.DEBUG -LOGGER_NAME = "modbus_test" +LOGGER_NAME = 'modbus_test' # Allowed parameter reads TEST_READ_MIN_COUNT = 10 # Minimum number of correct readings @@ -27,38 +27,38 @@ TEST_SLAVE_ASCII = 'slave_ascii' # Define tuple of strings to expect for each DUT. # -master_expect = ("MASTER_TEST: Modbus master stack initialized...", "MASTER_TEST: Start modbus test...", "MASTER_TEST: Destroy master...") -slave_expect = ("SLAVE_TEST: Modbus slave stack initialized.", "SLAVE_TEST: Start modbus test...", "SLAVE_TEST: Modbus controller destroyed.") +master_expect = ('MASTER_TEST: Modbus master stack initialized...', 'MASTER_TEST: Start modbus test...', 'MASTER_TEST: Destroy master...') +slave_expect = ('SLAVE_TEST: Modbus slave stack initialized.', 'SLAVE_TEST: Start modbus test...', 'SLAVE_TEST: Modbus controller destroyed.') # The dictionary for expected values in listing -expect_dict_master_ok = {"START": (), - "READ_PAR_OK": (), - "ALARM_MSG": (u'7',)} +expect_dict_master_ok = {'START': (), + 'READ_PAR_OK': (), + 'ALARM_MSG': (u'7',)} -expect_dict_master_err = {"READ_PAR_ERR": (u'263', u'ESP_ERR_TIMEOUT'), - "READ_STK_ERR": (u'107', u'ESP_ERR_TIMEOUT')} +expect_dict_master_err = {'READ_PAR_ERR': (u'263', u'ESP_ERR_TIMEOUT'), + 'READ_STK_ERR': (u'107', u'ESP_ERR_TIMEOUT')} # The dictionary for regular expression patterns to check in listing -pattern_dict_master_ok = {"START": (r'.*I \([0-9]+\) MASTER_TEST: Start modbus test...'), - "READ_PAR_OK": (r'.*I\s\([0-9]+\) MASTER_TEST: Characteristic #[0-9]+ [a-zA-Z0-9_]+' +pattern_dict_master_ok = {'START': (r'.*I \([0-9]+\) MASTER_TEST: Start modbus test...'), + 'READ_PAR_OK': (r'.*I\s\([0-9]+\) MASTER_TEST: Characteristic #[0-9]+ [a-zA-Z0-9_]+' r'\s\([a-zA-Z\%\/]+\) value = [a-zA-Z0-9\.\s]*\(0x[a-zA-Z0-9]+\) read successful.'), - "ALARM_MSG": (r'.*I \([0-9]*\) MASTER_TEST: Alarm triggered by cid #([0-9]+).')} + 'ALARM_MSG': (r'.*I \([0-9]*\) MASTER_TEST: Alarm triggered by cid #([0-9]+).')} -pattern_dict_master_err = {"READ_PAR_ERR_TOUT": (r'.*E \([0-9]+\) MASTER_TEST: Characteristic #[0-9]+' +pattern_dict_master_err = {'READ_PAR_ERR_TOUT': (r'.*E \([0-9]+\) MASTER_TEST: Characteristic #[0-9]+' r'\s\([a-zA-Z0-9_]+\) read fail, err = [0-9]+ \([_A-Z]+\).'), - "READ_STK_ERR_TOUT": (r'.*E \([0-9]+\) MB_CONTROLLER_MASTER: [a-zA-Z0-9_]+\([0-9]+\):\s' + 'READ_STK_ERR_TOUT': (r'.*E \([0-9]+\) MB_CONTROLLER_MASTER: [a-zA-Z0-9_]+\([0-9]+\):\s' r'SERIAL master get parameter failure error=\(0x([a-zA-Z0-9]+)\) \(([_A-Z]+)\).')} # The dictionary for expected values in listing -expect_dict_slave_ok = {"START": (), - "READ_PAR_OK": (), - "DESTROY": ()} +expect_dict_slave_ok = {'START': (), + 'READ_PAR_OK': (), + 'DESTROY': ()} # The dictionary for regular expression patterns to check in listing -pattern_dict_slave_ok = {"START": (r'.*I \([0-9]+\) SLAVE_TEST: Start modbus test...'), - "READ_PAR_OK": (r'.*I\s\([0-9]+\) SLAVE_TEST: [A-Z]+ READ \([a-zA-Z0-9_]+ us\),\s' +pattern_dict_slave_ok = {'START': (r'.*I \([0-9]+\) SLAVE_TEST: Start modbus test...'), + 'READ_PAR_OK': (r'.*I\s\([0-9]+\) SLAVE_TEST: [A-Z]+ READ \([a-zA-Z0-9_]+ us\),\s' r'ADDR:[0-9]+, TYPE:[0-9]+, INST_ADDR:0x[a-zA-Z0-9]+, SIZE:[0-9]+'), - "DESTROY": (r'.*I\s\([0-9]+\) SLAVE_TEST: Modbus controller destroyed.')} + 'DESTROY': (r'.*I\s\([0-9]+\) SLAVE_TEST: Modbus controller destroyed.')} logger = logging.getLogger(LOGGER_NAME) @@ -89,8 +89,8 @@ class DutTestThread(Thread): # Check DUT exceptions dut_exceptions = self.dut.get_exceptions() - if "Guru Meditation Error:" in dut_exceptions: - raise Exception("%s generated an exception: %s\n" % (str(self.dut), dut_exceptions)) + if 'Guru Meditation Error:' in dut_exceptions: + raise Exception('%s generated an exception: %s\n' % (str(self.dut), dut_exceptions)) # Mark thread has run to completion without any exceptions self.data = self.dut.stop_capture_raw_data() @@ -102,7 +102,7 @@ def test_filter_output(data=None, start_pattern=None, end_pattern=None): """ start_index = str(data).find(start_pattern) end_index = str(data).find(end_pattern) - logger.debug("Listing start index= %d, end=%d" % (start_index, end_index)) + logger.debug('Listing start index= %d, end=%d' % (start_index, end_index)) if start_index == -1 or end_index == -1: return data return data[start_index:end_index + len(end_pattern)] @@ -145,9 +145,9 @@ def test_check_output(data=None, check_dict=None, expect_dict=None): for line in data_lines: group, index = test_expect_re(line, pattern) if index is not None: - logger.debug("Found key{%s}=%s, line: \n%s" % (key, group, line)) + logger.debug('Found key{%s}=%s, line: \n%s' % (key, group, line)) if expect_dict[key] == group: - logger.debug("The result is correct for the key:%s, expected:%s == returned:%s" % (key, str(expect_dict[key]), str(group))) + logger.debug('The result is correct for the key:%s, expected:%s == returned:%s' % (key, str(expect_dict[key]), str(group))) match_count += 1 return match_count @@ -158,7 +158,7 @@ def test_check_mode(dut=None, mode_str=None, value=None): global logger try: opt = dut.app.get_sdkconfig()[mode_str] - logger.info("%s {%s} = %s.\n" % (str(dut), mode_str, opt)) + logger.info('%s {%s} = %s.\n' % (str(dut), mode_str, opt)) return value == opt except Exception: logger.info('ENV_TEST_FAILURE: %s: Cannot find option %s in sdkconfig.' % (str(dut), mode_str)) @@ -170,30 +170,30 @@ def test_modbus_communication(env, comm_mode): global logger # Get device under test. "dut1 - master", "dut2 - slave" must be properly connected through RS485 interface driver - dut_master = env.get_dut("modbus_master", "examples/protocols/modbus/serial/mb_master", dut_class=ttfw_idf.ESP32DUT) - dut_slave = env.get_dut("modbus_slave", "examples/protocols/modbus/serial/mb_slave", dut_class=ttfw_idf.ESP32DUT) + dut_master = env.get_dut('modbus_master', 'examples/protocols/modbus/serial/mb_master', dut_class=ttfw_idf.ESP32DUT) + dut_slave = env.get_dut('modbus_slave', 'examples/protocols/modbus/serial/mb_slave', dut_class=ttfw_idf.ESP32DUT) try: - logger.debug("Environment vars: %s\r\n" % os.environ) - logger.debug("DUT slave sdkconfig: %s\r\n" % dut_slave.app.get_sdkconfig()) - logger.debug("DUT master sdkconfig: %s\r\n" % dut_master.app.get_sdkconfig()) + logger.debug('Environment vars: %s\r\n' % os.environ) + logger.debug('DUT slave sdkconfig: %s\r\n' % dut_slave.app.get_sdkconfig()) + logger.debug('DUT master sdkconfig: %s\r\n' % dut_master.app.get_sdkconfig()) # Check Kconfig configuration options for each built example - if test_check_mode(dut_master, "CONFIG_MB_COMM_MODE_ASCII", "y") and test_check_mode(dut_slave, "CONFIG_MB_COMM_MODE_ASCII", "y"): - logger.info("ENV_TEST_INFO: Modbus ASCII test mode selected in the configuration. \n") + if test_check_mode(dut_master, 'CONFIG_MB_COMM_MODE_ASCII', 'y') and test_check_mode(dut_slave, 'CONFIG_MB_COMM_MODE_ASCII', 'y'): + logger.info('ENV_TEST_INFO: Modbus ASCII test mode selected in the configuration. \n') slave_name = TEST_SLAVE_ASCII master_name = TEST_MASTER_ASCII - elif test_check_mode(dut_master, "CONFIG_MB_COMM_MODE_RTU", "y") and test_check_mode(dut_slave, "CONFIG_MB_COMM_MODE_RTU", "y"): - logger.info("ENV_TEST_INFO: Modbus RTU test mode selected in the configuration. \n") + elif test_check_mode(dut_master, 'CONFIG_MB_COMM_MODE_RTU', 'y') and test_check_mode(dut_slave, 'CONFIG_MB_COMM_MODE_RTU', 'y'): + logger.info('ENV_TEST_INFO: Modbus RTU test mode selected in the configuration. \n') slave_name = TEST_SLAVE_RTU master_name = TEST_MASTER_RTU else: logger.error("ENV_TEST_FAILURE: Communication mode in master and slave configuration don't match.\n") raise Exception("ENV_TEST_FAILURE: Communication mode in master and slave configuration don't match.\n") # Check if slave address for example application is default one to be able to communicate - if not test_check_mode(dut_slave, "CONFIG_MB_SLAVE_ADDR", "1"): - logger.error("ENV_TEST_FAILURE: Slave address option is incorrect.\n") - raise Exception("ENV_TEST_FAILURE: Slave address option is incorrect.\n") + if not test_check_mode(dut_slave, 'CONFIG_MB_SLAVE_ADDR', '1'): + logger.error('ENV_TEST_FAILURE: Slave address option is incorrect.\n') + raise Exception('ENV_TEST_FAILURE: Slave address option is incorrect.\n') # Flash app onto each DUT dut_master.start_app() @@ -212,15 +212,15 @@ def test_modbus_communication(env, comm_mode): dut_master_thread.join(timeout=TEST_THREAD_JOIN_TIMEOUT) if dut_slave_thread.isAlive(): - logger.error("ENV_TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n" % + logger.error('ENV_TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n' % (dut_slave_thread.tname, TEST_THREAD_JOIN_TIMEOUT)) - raise Exception("ENV_TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n" % + raise Exception('ENV_TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n' % (dut_slave_thread.tname, TEST_THREAD_JOIN_TIMEOUT)) if dut_master_thread.isAlive(): - logger.error("ENV_TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n" % + logger.error('ENV_TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n' % (dut_master_thread.tname, TEST_THREAD_JOIN_TIMEOUT)) - raise Exception("ENV_TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n" % + raise Exception('ENV_TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n' % (dut_master_thread.tname, TEST_THREAD_JOIN_TIMEOUT)) finally: dut_master.close() @@ -228,43 +228,43 @@ def test_modbus_communication(env, comm_mode): # Check if test threads completed successfully and captured data if not dut_slave_thread.result or dut_slave_thread.data is None: - logger.error("The thread %s was not run successfully." % dut_slave_thread.tname) - raise Exception("The thread %s was not run successfully." % dut_slave_thread.tname) + logger.error('The thread %s was not run successfully.' % dut_slave_thread.tname) + raise Exception('The thread %s was not run successfully.' % dut_slave_thread.tname) if not dut_master_thread.result or dut_master_thread.data is None: - logger.error("The thread %s was not run successfully." % dut_slave_thread.tname) - raise Exception("The thread %s was not run successfully." % dut_master_thread.tname) + logger.error('The thread %s was not run successfully.' % dut_slave_thread.tname) + raise Exception('The thread %s was not run successfully.' % dut_master_thread.tname) # Filter output to get test messages master_output = test_filter_output(dut_master_thread.data, master_expect[0], master_expect[len(master_expect) - 1]) if master_output is not None: - logger.info("The data for master thread is captured.") + logger.info('The data for master thread is captured.') logger.debug(master_output) slave_output = test_filter_output(dut_slave_thread.data, slave_expect[0], slave_expect[len(slave_expect) - 1]) if slave_output is not None: - logger.info("The data for slave thread is captured.") + logger.info('The data for slave thread is captured.') logger.debug(slave_output) # Check if parameters are read correctly by master match_count = test_check_output(master_output, pattern_dict_master_ok, expect_dict_master_ok) if match_count < TEST_READ_MIN_COUNT: - logger.error("There are errors reading parameters from %s, %d" % (dut_master_thread.tname, match_count)) - raise Exception("There are errors reading parameters from %s, %d" % (dut_master_thread.tname, match_count)) - logger.info("OK pattern test for %s, match_count=%d." % (dut_master_thread.tname, match_count)) + logger.error('There are errors reading parameters from %s, %d' % (dut_master_thread.tname, match_count)) + raise Exception('There are errors reading parameters from %s, %d' % (dut_master_thread.tname, match_count)) + logger.info('OK pattern test for %s, match_count=%d.' % (dut_master_thread.tname, match_count)) # If the test completed successfully (alarm triggered) but there are some errors during reading of parameters match_count = test_check_output(master_output, pattern_dict_master_err, expect_dict_master_err) if match_count > TEST_READ_MAX_ERR_COUNT: - logger.error("There are errors reading parameters from %s, %d" % (dut_master_thread.tname, match_count)) - raise Exception("There are errors reading parameters from %s, %d" % (dut_master_thread.tname, match_count)) - logger.info("ERROR pattern test for %s, match_count=%d." % (dut_master_thread.tname, match_count)) + logger.error('There are errors reading parameters from %s, %d' % (dut_master_thread.tname, match_count)) + raise Exception('There are errors reading parameters from %s, %d' % (dut_master_thread.tname, match_count)) + logger.info('ERROR pattern test for %s, match_count=%d.' % (dut_master_thread.tname, match_count)) match_count = test_check_output(slave_output, pattern_dict_slave_ok, expect_dict_slave_ok) if match_count < TEST_READ_MIN_COUNT: - logger.error("There are errors reading parameters from %s, %d" % (dut_slave_thread.tname, match_count)) - raise Exception("There are errors reading parameters from %s, %d" % (dut_slave_thread.tname, match_count)) - logger.info("OK pattern test for %s, match_count=%d." % (dut_slave_thread.tname, match_count)) + logger.error('There are errors reading parameters from %s, %d' % (dut_slave_thread.tname, match_count)) + raise Exception('There are errors reading parameters from %s, %d' % (dut_slave_thread.tname, match_count)) + logger.info('OK pattern test for %s, match_count=%d.' % (dut_slave_thread.tname, match_count)) if __name__ == '__main__': @@ -282,7 +282,7 @@ if __name__ == '__main__': fh.setFormatter(formatter) logger.addHandler(fh) logger.addHandler(ch) - logger.info("Start script %s." % os.path.basename(__file__)) - print("Logging file name: %s" % logger.handlers[0].baseFilename) + logger.info('Start script %s.' % os.path.basename(__file__)) + print('Logging file name: %s' % logger.handlers[0].baseFilename) test_modbus_communication() logging.shutdown() diff --git a/examples/protocols/modbus/tcp/example_test.py b/examples/protocols/modbus/tcp/example_test.py index 1db10be087..17c8226662 100644 --- a/examples/protocols/modbus/tcp/example_test.py +++ b/examples/protocols/modbus/tcp/example_test.py @@ -1,13 +1,13 @@ +import logging import os import re -import logging from threading import Thread import ttfw_idf from tiny_test_fw import DUT LOG_LEVEL = logging.DEBUG -LOGGER_NAME = "modbus_test" +LOGGER_NAME = 'modbus_test' # Allowed options for the test TEST_READ_MAX_ERR_COUNT = 3 # Maximum allowed read errors during initialization @@ -69,7 +69,7 @@ class DutTestThread(Thread): super(DutTestThread, self).__init__() def __enter__(self): - logger.debug("Restart %s." % self.tname) + logger.debug('Restart %s.' % self.tname) # Reset DUT first self.dut.reset() # Capture output from the DUT @@ -80,7 +80,7 @@ class DutTestThread(Thread): """ The exit method of context manager """ if exc_type is not None or exc_value is not None: - logger.info("Thread %s rised an exception type: %s, value: %s" % (self.tname, str(exc_type), str(exc_value))) + logger.info('Thread %s rised an exception type: %s, value: %s' % (self.tname, str(exc_type), str(exc_value))) def run(self): """ The function implements thread functionality @@ -94,8 +94,8 @@ class DutTestThread(Thread): # Check DUT exceptions dut_exceptions = self.dut.get_exceptions() - if "Guru Meditation Error:" in dut_exceptions: - raise Exception("%s generated an exception: %s\n" % (str(self.dut), dut_exceptions)) + if 'Guru Meditation Error:' in dut_exceptions: + raise Exception('%s generated an exception: %s\n' % (str(self.dut), dut_exceptions)) # Mark thread has run to completion without any exceptions self.data = self.dut.stop_capture_raw_data(capture_id=self.dut.name) @@ -108,13 +108,13 @@ class DutTestThread(Thread): self.dut.read() result = self.dut.expect(re.compile(message), TEST_EXPECT_STR_TIMEOUT) if int(result[0]) != index: - raise Exception("Incorrect index of IP=%d for %s\n" % (int(result[0]), str(self.dut))) - message = "IP%s=%s" % (result[0], self.ip_addr) - self.dut.write(message, "\r\n", False) - logger.debug("Sent message for %s: %s" % (self.tname, message)) + raise Exception('Incorrect index of IP=%d for %s\n' % (int(result[0]), str(self.dut))) + message = 'IP%s=%s' % (result[0], self.ip_addr) + self.dut.write(message, '\r\n', False) + logger.debug('Sent message for %s: %s' % (self.tname, message)) message = r'.*IP\([0-9]+\) = \[([0-9a-zA-Z\.\:]+)\] set from stdin.*' result = self.dut.expect(re.compile(message), TEST_EXPECT_STR_TIMEOUT) - logger.debug("Thread %s initialized with slave IP (%s)." % (self.tname, result[0])) + logger.debug('Thread %s initialized with slave IP (%s).' % (self.tname, result[0])) def test_start(self, timeout_value): """ The method to initialize and handle test stages @@ -122,37 +122,37 @@ class DutTestThread(Thread): def handle_get_ip4(data): """ Handle get_ip v4 """ - logger.debug("%s[STACK_IPV4]: %s" % (self.tname, str(data))) + logger.debug('%s[STACK_IPV4]: %s' % (self.tname, str(data))) self.test_stage = STACK_IPV4 def handle_get_ip6(data): """ Handle get_ip v6 """ - logger.debug("%s[STACK_IPV6]: %s" % (self.tname, str(data))) + logger.debug('%s[STACK_IPV6]: %s' % (self.tname, str(data))) self.test_stage = STACK_IPV6 def handle_init(data): """ Handle init """ - logger.debug("%s[STACK_INIT]: %s" % (self.tname, str(data))) + logger.debug('%s[STACK_INIT]: %s' % (self.tname, str(data))) self.test_stage = STACK_INIT def handle_connect(data): """ Handle connect """ - logger.debug("%s[STACK_CONNECT]: %s" % (self.tname, str(data))) + logger.debug('%s[STACK_CONNECT]: %s' % (self.tname, str(data))) self.test_stage = STACK_CONNECT def handle_test_start(data): """ Handle connect """ - logger.debug("%s[STACK_START]: %s" % (self.tname, str(data))) + logger.debug('%s[STACK_START]: %s' % (self.tname, str(data))) self.test_stage = STACK_START def handle_par_ok(data): """ Handle parameter ok """ - logger.debug("%s[READ_PAR_OK]: %s" % (self.tname, str(data))) + logger.debug('%s[READ_PAR_OK]: %s' % (self.tname, str(data))) if self.test_stage >= STACK_START: self.param_ok_count += 1 self.test_stage = STACK_PAR_OK @@ -160,14 +160,14 @@ class DutTestThread(Thread): def handle_par_fail(data): """ Handle parameter fail """ - logger.debug("%s[READ_PAR_FAIL]: %s" % (self.tname, str(data))) + logger.debug('%s[READ_PAR_FAIL]: %s' % (self.tname, str(data))) self.param_fail_count += 1 self.test_stage = STACK_PAR_FAIL def handle_destroy(data): """ Handle destroy """ - logger.debug("%s[DESTROY]: %s" % (self.tname, str(data))) + logger.debug('%s[DESTROY]: %s' % (self.tname, str(data))) self.test_stage = STACK_DESTROY self.test_finish = True @@ -183,7 +183,7 @@ class DutTestThread(Thread): (re.compile(self.expected[STACK_DESTROY]), handle_destroy), timeout=timeout_value) except DUT.ExpectTimeout: - logger.debug("%s, expect timeout on stage #%d (%s seconds)" % (self.tname, self.test_stage, timeout_value)) + logger.debug('%s, expect timeout on stage #%d (%s seconds)' % (self.tname, self.test_stage, timeout_value)) self.test_finish = True @@ -193,7 +193,7 @@ def test_check_mode(dut=None, mode_str=None, value=None): global logger try: opt = dut.app.get_sdkconfig()[mode_str] - logger.debug("%s {%s} = %s.\n" % (str(dut), mode_str, opt)) + logger.debug('%s {%s} = %s.\n' % (str(dut), mode_str, opt)) return value == opt except Exception: logger.error('ENV_TEST_FAILURE: %s: Cannot find option %s in sdkconfig.' % (str(dut), mode_str)) @@ -208,8 +208,8 @@ def test_modbus_communication(env, comm_mode): # Get device under test. Both duts must be able to be connected to WiFi router dut_master = env.get_dut('modbus_tcp_master', os.path.join(rel_project_path, TEST_MASTER_TCP)) dut_slave = env.get_dut('modbus_tcp_slave', os.path.join(rel_project_path, TEST_SLAVE_TCP)) - log_file = os.path.join(env.log_path, "modbus_tcp_test.log") - print("Logging file name: %s" % log_file) + log_file = os.path.join(env.log_path, 'modbus_tcp_test.log') + print('Logging file name: %s' % log_file) try: # create file handler which logs even debug messages @@ -229,29 +229,29 @@ def test_modbus_communication(env, comm_mode): logger.addHandler(ch) # Check Kconfig configuration options for each built example - if (test_check_mode(dut_master, "CONFIG_FMB_COMM_MODE_TCP_EN", "y") and - test_check_mode(dut_slave, "CONFIG_FMB_COMM_MODE_TCP_EN", "y")): + if (test_check_mode(dut_master, 'CONFIG_FMB_COMM_MODE_TCP_EN', 'y') and + test_check_mode(dut_slave, 'CONFIG_FMB_COMM_MODE_TCP_EN', 'y')): slave_name = TEST_SLAVE_TCP master_name = TEST_MASTER_TCP else: - logger.error("ENV_TEST_FAILURE: IP resolver mode do not match in the master and slave implementation.\n") - raise Exception("ENV_TEST_FAILURE: IP resolver mode do not match in the master and slave implementation.\n") + logger.error('ENV_TEST_FAILURE: IP resolver mode do not match in the master and slave implementation.\n') + raise Exception('ENV_TEST_FAILURE: IP resolver mode do not match in the master and slave implementation.\n') address = None - if test_check_mode(dut_master, "CONFIG_MB_SLAVE_IP_FROM_STDIN", "y"): - logger.info("ENV_TEST_INFO: Set slave IP address through STDIN.\n") + if test_check_mode(dut_master, 'CONFIG_MB_SLAVE_IP_FROM_STDIN', 'y'): + logger.info('ENV_TEST_INFO: Set slave IP address through STDIN.\n') # Flash app onto DUT (Todo: Debug case when the slave flashed before master then expect does not work correctly for no reason dut_slave.start_app() dut_master.start_app() - if test_check_mode(dut_master, "CONFIG_EXAMPLE_CONNECT_IPV6", "y"): + if test_check_mode(dut_master, 'CONFIG_EXAMPLE_CONNECT_IPV6', 'y'): address = dut_slave.expect(re.compile(pattern_dict_slave[STACK_IPV6]), TEST_EXPECT_STR_TIMEOUT) else: address = dut_slave.expect(re.compile(pattern_dict_slave[STACK_IPV4]), TEST_EXPECT_STR_TIMEOUT) if address is not None: - print("Found IP slave address: %s" % address[0]) + print('Found IP slave address: %s' % address[0]) else: - raise Exception("ENV_TEST_FAILURE: Slave IP address is not found in the output. Check network settings.\n") + raise Exception('ENV_TEST_FAILURE: Slave IP address is not found in the output. Check network settings.\n') else: - raise Exception("ENV_TEST_FAILURE: Slave IP resolver is not configured correctly.\n") + raise Exception('ENV_TEST_FAILURE: Slave IP resolver is not configured correctly.\n') # Create thread for each dut with DutTestThread(dut=dut_master, name=master_name, ip_addr=address[0], expect=pattern_dict_master) as dut_master_thread: @@ -266,21 +266,21 @@ def test_modbus_communication(env, comm_mode): dut_master_thread.join(timeout=TEST_THREAD_JOIN_TIMEOUT) if dut_slave_thread.isAlive(): - logger.error("ENV_TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n" % + logger.error('ENV_TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n' % (dut_slave_thread.tname, TEST_THREAD_JOIN_TIMEOUT)) - raise Exception("ENV_TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n" % + raise Exception('ENV_TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n' % (dut_slave_thread.tname, TEST_THREAD_JOIN_TIMEOUT)) if dut_master_thread.isAlive(): - logger.error("TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n" % + logger.error('TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n' % (dut_master_thread.tname, TEST_THREAD_JOIN_TIMEOUT)) - raise Exception("TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n" % + raise Exception('TEST_FAILURE: The thread %s is not completed successfully after %d seconds.\n' % (dut_master_thread.tname, TEST_THREAD_JOIN_TIMEOUT)) - logger.info("TEST_INFO: %s error count = %d, %s error count = %d.\n" % + logger.info('TEST_INFO: %s error count = %d, %s error count = %d.\n' % (dut_master_thread.tname, dut_master_thread.param_fail_count, dut_slave_thread.tname, dut_slave_thread.param_fail_count)) - logger.info("TEST_INFO: %s ok count = %d, %s ok count = %d.\n" % + logger.info('TEST_INFO: %s ok count = %d, %s ok count = %d.\n' % (dut_master_thread.tname, dut_master_thread.param_ok_count, dut_slave_thread.tname, dut_slave_thread.param_ok_count)) @@ -288,10 +288,10 @@ def test_modbus_communication(env, comm_mode): (dut_slave_thread.param_fail_count > TEST_READ_MAX_ERR_COUNT) or (dut_slave_thread.param_ok_count == 0) or (dut_master_thread.param_ok_count == 0)): - raise Exception("TEST_FAILURE: %s parameter read error(ok) count = %d(%d), %s parameter read error(ok) count = %d(%d).\n" % + raise Exception('TEST_FAILURE: %s parameter read error(ok) count = %d(%d), %s parameter read error(ok) count = %d(%d).\n' % (dut_master_thread.tname, dut_master_thread.param_fail_count, dut_master_thread.param_ok_count, dut_slave_thread.tname, dut_slave_thread.param_fail_count, dut_slave_thread.param_ok_count)) - logger.info("TEST_SUCCESS: The Modbus parameter test is completed successfully.\n") + logger.info('TEST_SUCCESS: The Modbus parameter test is completed successfully.\n') finally: dut_master.close() diff --git a/examples/protocols/mqtt/ssl/mqtt_ssl_example_test.py b/examples/protocols/mqtt/ssl/mqtt_ssl_example_test.py index 2fddd57aa8..5c77c7b141 100644 --- a/examples/protocols/mqtt/ssl/mqtt_ssl_example_test.py +++ b/examples/protocols/mqtt/ssl/mqtt_ssl_example_test.py @@ -1,29 +1,28 @@ -from __future__ import print_function -from __future__ import unicode_literals -from builtins import str -import re +from __future__ import print_function, unicode_literals + import os -import sys +import re import ssl +import sys +from builtins import str +from threading import Event, Thread + import paho.mqtt.client as mqtt -from threading import Thread, Event - -from tiny_test_fw import DUT import ttfw_idf - +from tiny_test_fw import DUT event_client_connected = Event() event_stop_client = Event() event_client_received_correct = Event() event_client_received_binary = Event() -message_log = "" +message_log = '' # The callback for when the client receives a CONNACK response from the server. def on_connect(client, userdata, flags, rc): - print("Connected with result code " + str(rc)) + print('Connected with result code ' + str(rc)) event_client_connected.set() - client.subscribe("/topic/qos0") + client.subscribe('/topic/qos0') def mqtt_client_task(client): @@ -36,33 +35,33 @@ def on_message(client, userdata, msg): global message_log global event_client_received_correct global event_client_received_binary - if msg.topic == "/topic/binary": + if msg.topic == '/topic/binary': binary = userdata size = os.path.getsize(binary) - print("Receiving binary from esp and comparing with {}, size {}...".format(binary, size)) - with open(binary, "rb") as f: + print('Receiving binary from esp and comparing with {}, size {}...'.format(binary, size)) + with open(binary, 'rb') as f: bin = f.read() if bin == msg.payload[:size]: - print("...matches!") + print('...matches!') event_client_received_binary.set() return else: - recv_binary = binary + ".received" - with open(recv_binary, "w") as fw: + recv_binary = binary + '.received' + with open(recv_binary, 'w') as fw: fw.write(msg.payload) raise ValueError('Received binary (saved as: {}) does not match the original file: {}'.format(recv_binary, binary)) payload = msg.payload.decode() - if not event_client_received_correct.is_set() and payload == "data": - client.subscribe("/topic/binary") - client.publish("/topic/qos0", "send binary please") - if msg.topic == "/topic/qos0" and payload == "data": + if not event_client_received_correct.is_set() and payload == 'data': + client.subscribe('/topic/binary') + client.publish('/topic/qos0', 'send binary please') + if msg.topic == '/topic/qos0' and payload == 'data': event_client_received_correct.set() - message_log += "Received data:" + msg.topic + " " + payload + "\n" + message_log += 'Received data:' + msg.topic + ' ' + payload + '\n' -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_mqtt_ssl(env, extra_data): - broker_url = "" + broker_url = '' broker_port = 0 """ steps: | @@ -72,15 +71,15 @@ def test_examples_protocol_mqtt_ssl(env, extra_data): 4. Test ESP32 client received correct qos0 message 5. Test python client receives binary data from running partition and compares it with the binary """ - dut1 = env.get_dut("mqtt_ssl", "examples/protocols/mqtt/ssl", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('mqtt_ssl', 'examples/protocols/mqtt/ssl', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "mqtt_ssl.bin") + binary_file = os.path.join(dut1.app.binary_path, 'mqtt_ssl.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("mqtt_ssl_bin_size", "{}KB" + ttfw_idf.log_performance('mqtt_ssl_bin_size', '{}KB' .format(bin_size // 1024)) # Look for host:port in sdkconfig try: - value = re.search(r'\:\/\/([^:]+)\:([0-9]+)', dut1.app.get_sdkconfig()["CONFIG_BROKER_URI"]) + value = re.search(r'\:\/\/([^:]+)\:([0-9]+)', dut1.app.get_sdkconfig()['CONFIG_BROKER_URI']) broker_url = value.group(1) broker_port = int(value.group(2)) except Exception: @@ -97,31 +96,31 @@ def test_examples_protocol_mqtt_ssl(env, extra_data): None, None, cert_reqs=ssl.CERT_NONE, tls_version=ssl.PROTOCOL_TLSv1_2, ciphers=None) client.tls_insecure_set(True) - print("Connecting...") + print('Connecting...') client.connect(broker_url, broker_port, 60) except Exception: - print("ENV_TEST_FAILURE: Unexpected error while connecting to broker {}: {}:".format(broker_url, sys.exc_info()[0])) + print('ENV_TEST_FAILURE: Unexpected error while connecting to broker {}: {}:'.format(broker_url, sys.exc_info()[0])) raise # Starting a py-client in a separate thread thread1 = Thread(target=mqtt_client_task, args=(client,)) thread1.start() try: - print("Connecting py-client to broker {}:{}...".format(broker_url, broker_port)) + print('Connecting py-client to broker {}:{}...'.format(broker_url, broker_port)) if not event_client_connected.wait(timeout=30): - raise ValueError("ENV_TEST_FAILURE: Test script cannot connect to broker: {}".format(broker_url)) + raise ValueError('ENV_TEST_FAILURE: Test script cannot connect to broker: {}'.format(broker_url)) dut1.start_app() try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: print('ENV_TEST_FAILURE: Cannot connect to AP') raise - print("Checking py-client received msg published from esp...") + print('Checking py-client received msg published from esp...') if not event_client_received_correct.wait(timeout=30): raise ValueError('Wrong data received, msg log: {}'.format(message_log)) - print("Checking esp-client received msg published from py-client...") - dut1.expect(re.compile(r"DATA=send binary please"), timeout=30) - print("Receiving binary data from running partition...") + print('Checking esp-client received msg published from py-client...') + dut1.expect(re.compile(r'DATA=send binary please'), timeout=30) + print('Receiving binary data from running partition...') if not event_client_received_binary.wait(timeout=30): raise ValueError('Binary not received within timeout') finally: diff --git a/examples/protocols/mqtt/tcp/mqtt_tcp_example_test.py b/examples/protocols/mqtt/tcp/mqtt_tcp_example_test.py index 6159481b9d..7acb1dc754 100644 --- a/examples/protocols/mqtt/tcp/mqtt_tcp_example_test.py +++ b/examples/protocols/mqtt/tcp/mqtt_tcp_example_test.py @@ -1,20 +1,20 @@ -import re import os -import sys +import re import socket -from threading import Thread import struct +import sys import time +from threading import Thread -from tiny_test_fw import DUT import ttfw_idf +from tiny_test_fw import DUT msgid = -1 def get_my_ip(): s1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s1.connect(("8.8.8.8", 80)) + s1.connect(('8.8.8.8', 80)) my_ip = s1.getsockname()[0] s1.close() return my_ip @@ -22,7 +22,7 @@ def get_my_ip(): def mqqt_server_sketch(my_ip, port): global msgid - print("Starting the server on {}".format(my_ip)) + print('Starting the server on {}'.format(my_ip)) s = None try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) @@ -31,29 +31,29 @@ def mqqt_server_sketch(my_ip, port): s.listen(1) q,addr = s.accept() q.settimeout(30) - print("connection accepted") + print('connection accepted') except Exception: - print("Local server on {}:{} listening/accepting failure: {}" - "Possibly check permissions or firewall settings" - "to accept connections on this address".format(my_ip, port, sys.exc_info()[0])) + print('Local server on {}:{} listening/accepting failure: {}' + 'Possibly check permissions or firewall settings' + 'to accept connections on this address'.format(my_ip, port, sys.exc_info()[0])) raise data = q.recv(1024) # check if received initial empty message - print("received from client {}".format(data)) + print('received from client {}'.format(data)) data = bytearray([0x20, 0x02, 0x00, 0x00]) q.send(data) # try to receive qos1 data = q.recv(1024) - msgid = struct.unpack(">H", data[15:17])[0] - print("received from client {}, msgid: {}".format(data, msgid)) + msgid = struct.unpack('>H', data[15:17])[0] + print('received from client {}, msgid: {}'.format(data, msgid)) data = bytearray([0x40, 0x02, data[15], data[16]]) q.send(data) time.sleep(5) s.close() - print("server closed") + print('server closed') -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_mqtt_qos1(env, extra_data): global msgid """ @@ -63,11 +63,11 @@ def test_examples_protocol_mqtt_qos1(env, extra_data): 3. Test evaluates that qos1 message is queued and removed from queued after ACK received 4. Test the broker received the same message id evaluated in step 3 """ - dut1 = env.get_dut("mqtt_tcp", "examples/protocols/mqtt/tcp", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('mqtt_tcp', 'examples/protocols/mqtt/tcp', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "mqtt_tcp.bin") + binary_file = os.path.join(dut1.app.binary_path, 'mqtt_tcp.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("mqtt_tcp_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('mqtt_tcp_bin_size', '{}KB'.format(bin_size // 1024)) # 1. start mqtt broker sketch host_ip = get_my_ip() thread1 = Thread(target=mqqt_server_sketch, args=(host_ip,1883)) @@ -76,23 +76,23 @@ def test_examples_protocol_mqtt_qos1(env, extra_data): dut1.start_app() # waiting for getting the IP address try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') - print("writing to device: {}".format("mqtt://" + host_ip + "\n")) - dut1.write("mqtt://" + host_ip + "\n") + print('writing to device: {}'.format('mqtt://' + host_ip + '\n')) + dut1.write('mqtt://' + host_ip + '\n') thread1.join() - print("Message id received from server: {}".format(msgid)) + print('Message id received from server: {}'.format(msgid)) # 3. check the message id was enqueued and then deleted - msgid_enqueued = dut1.expect(re.compile(r"OUTBOX: ENQUEUE msgid=([0-9]+)"), timeout=30) - msgid_deleted = dut1.expect(re.compile(r"OUTBOX: DELETED msgid=([0-9]+)"), timeout=30) + msgid_enqueued = dut1.expect(re.compile(r'OUTBOX: ENQUEUE msgid=([0-9]+)'), timeout=30) + msgid_deleted = dut1.expect(re.compile(r'OUTBOX: DELETED msgid=([0-9]+)'), timeout=30) # 4. check the msgid of received data are the same as that of enqueued and deleted from outbox if (msgid_enqueued[0] == str(msgid) and msgid_deleted[0] == str(msgid)): - print("PASS: Received correct msg id") + print('PASS: Received correct msg id') else: - print("Failure!") + print('Failure!') raise ValueError('Mismatch of msgid: received: {}, enqueued {}, deleted {}'.format(msgid, msgid_enqueued, msgid_deleted)) diff --git a/examples/protocols/mqtt/ws/mqtt_ws_example_test.py b/examples/protocols/mqtt/ws/mqtt_ws_example_test.py index fdf1e2cdcb..69c5dc99de 100644 --- a/examples/protocols/mqtt/ws/mqtt_ws_example_test.py +++ b/examples/protocols/mqtt/ws/mqtt_ws_example_test.py @@ -1,26 +1,26 @@ -from __future__ import print_function -from __future__ import unicode_literals -from builtins import str -import re -import os -import sys -import paho.mqtt.client as mqtt -from threading import Thread, Event +from __future__ import print_function, unicode_literals -from tiny_test_fw import DUT +import os +import re +import sys +from builtins import str +from threading import Event, Thread + +import paho.mqtt.client as mqtt import ttfw_idf +from tiny_test_fw import DUT event_client_connected = Event() event_stop_client = Event() event_client_received_correct = Event() -message_log = "" +message_log = '' # The callback for when the client receives a CONNACK response from the server. def on_connect(client, userdata, flags, rc): - print("Connected with result code " + str(rc)) + print('Connected with result code ' + str(rc)) event_client_connected.set() - client.subscribe("/topic/qos0") + client.subscribe('/topic/qos0') def mqtt_client_task(client): @@ -32,16 +32,16 @@ def mqtt_client_task(client): def on_message(client, userdata, msg): global message_log payload = msg.payload.decode() - if not event_client_received_correct.is_set() and payload == "data": - client.publish("/topic/qos0", "data_to_esp32") - if msg.topic == "/topic/qos0" and payload == "data": + if not event_client_received_correct.is_set() and payload == 'data': + client.publish('/topic/qos0', 'data_to_esp32') + if msg.topic == '/topic/qos0' and payload == 'data': event_client_received_correct.set() - message_log += "Received data:" + msg.topic + " " + payload + "\n" + message_log += 'Received data:' + msg.topic + ' ' + payload + '\n' -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_mqtt_ws(env, extra_data): - broker_url = "" + broker_url = '' broker_port = 0 """ steps: | @@ -50,14 +50,14 @@ def test_examples_protocol_mqtt_ws(env, extra_data): 3. Test evaluates it received correct qos0 message 4. Test ESP32 client received correct qos0 message """ - dut1 = env.get_dut("mqtt_websocket", "examples/protocols/mqtt/ws", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('mqtt_websocket', 'examples/protocols/mqtt/ws', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "mqtt_websocket.bin") + binary_file = os.path.join(dut1.app.binary_path, 'mqtt_websocket.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("mqtt_websocket_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('mqtt_websocket_bin_size', '{}KB'.format(bin_size // 1024)) # Look for host:port in sdkconfig try: - value = re.search(r'\:\/\/([^:]+)\:([0-9]+)', dut1.app.get_sdkconfig()["CONFIG_BROKER_URI"]) + value = re.search(r'\:\/\/([^:]+)\:([0-9]+)', dut1.app.get_sdkconfig()['CONFIG_BROKER_URI']) broker_url = value.group(1) broker_port = int(value.group(2)) except Exception: @@ -66,33 +66,33 @@ def test_examples_protocol_mqtt_ws(env, extra_data): client = None # 1. Test connects to a broker try: - client = mqtt.Client(transport="websockets") + client = mqtt.Client(transport='websockets') client.on_connect = on_connect client.on_message = on_message - print("Connecting...") + print('Connecting...') client.connect(broker_url, broker_port, 60) except Exception: - print("ENV_TEST_FAILURE: Unexpected error while connecting to broker {}: {}:".format(broker_url, sys.exc_info()[0])) + print('ENV_TEST_FAILURE: Unexpected error while connecting to broker {}: {}:'.format(broker_url, sys.exc_info()[0])) raise # Starting a py-client in a separate thread thread1 = Thread(target=mqtt_client_task, args=(client,)) thread1.start() try: - print("Connecting py-client to broker {}:{}...".format(broker_url, broker_port)) + print('Connecting py-client to broker {}:{}...'.format(broker_url, broker_port)) if not event_client_connected.wait(timeout=30): - raise ValueError("ENV_TEST_FAILURE: Test script cannot connect to broker: {}".format(broker_url)) + raise ValueError('ENV_TEST_FAILURE: Test script cannot connect to broker: {}'.format(broker_url)) dut1.start_app() try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: print('ENV_TEST_FAILURE: Cannot connect to AP') raise - print("Checking py-client received msg published from esp...") + print('Checking py-client received msg published from esp...') if not event_client_received_correct.wait(timeout=30): raise ValueError('Wrong data received, msg log: {}'.format(message_log)) - print("Checking esp-client received msg published from py-client...") - dut1.expect(re.compile(r"DATA=data_to_esp32"), timeout=30) + print('Checking esp-client received msg published from py-client...') + dut1.expect(re.compile(r'DATA=data_to_esp32'), timeout=30) finally: event_stop_client.set() thread1.join() diff --git a/examples/protocols/mqtt/wss/mqtt_wss_example_test.py b/examples/protocols/mqtt/wss/mqtt_wss_example_test.py index 0bc1999351..5e2fa8a794 100644 --- a/examples/protocols/mqtt/wss/mqtt_wss_example_test.py +++ b/examples/protocols/mqtt/wss/mqtt_wss_example_test.py @@ -1,28 +1,27 @@ from __future__ import unicode_literals -from __future__ import unicode_literals -from builtins import str -import re + import os -import sys +import re import ssl +import sys +from builtins import str +from threading import Event, Thread + import paho.mqtt.client as mqtt -from threading import Thread, Event - -from tiny_test_fw import DUT import ttfw_idf - +from tiny_test_fw import DUT event_client_connected = Event() event_stop_client = Event() event_client_received_correct = Event() -message_log = "" +message_log = '' # The callback for when the client receives a CONNACK response from the server. def on_connect(client, userdata, flags, rc): - print("Connected with result code " + str(rc)) + print('Connected with result code ' + str(rc)) event_client_connected.set() - client.subscribe("/topic/qos0") + client.subscribe('/topic/qos0') def mqtt_client_task(client): @@ -34,16 +33,16 @@ def mqtt_client_task(client): def on_message(client, userdata, msg): global message_log payload = msg.payload.decode() - if not event_client_received_correct.is_set() and payload == "data": - client.publish("/topic/qos0", "data_to_esp32") - if msg.topic == "/topic/qos0" and payload == "data": + if not event_client_received_correct.is_set() and payload == 'data': + client.publish('/topic/qos0', 'data_to_esp32') + if msg.topic == '/topic/qos0' and payload == 'data': event_client_received_correct.set() - message_log += "Received data:" + msg.topic + " " + payload + "\n" + message_log += 'Received data:' + msg.topic + ' ' + payload + '\n' -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_mqtt_wss(env, extra_data): - broker_url = "" + broker_url = '' broker_port = 0 """ steps: | @@ -52,14 +51,14 @@ def test_examples_protocol_mqtt_wss(env, extra_data): 3. Test evaluates it received correct qos0 message 4. Test ESP32 client received correct qos0 message """ - dut1 = env.get_dut("mqtt_websocket_secure", "examples/protocols/mqtt/wss", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('mqtt_websocket_secure', 'examples/protocols/mqtt/wss', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "mqtt_websocket_secure.bin") + binary_file = os.path.join(dut1.app.binary_path, 'mqtt_websocket_secure.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("mqtt_websocket_secure_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('mqtt_websocket_secure_bin_size', '{}KB'.format(bin_size // 1024)) # Look for host:port in sdkconfig try: - value = re.search(r'\:\/\/([^:]+)\:([0-9]+)', dut1.app.get_sdkconfig()["CONFIG_BROKER_URI"]) + value = re.search(r'\:\/\/([^:]+)\:([0-9]+)', dut1.app.get_sdkconfig()['CONFIG_BROKER_URI']) broker_url = value.group(1) broker_port = int(value.group(2)) except Exception: @@ -68,36 +67,36 @@ def test_examples_protocol_mqtt_wss(env, extra_data): client = None # 1. Test connects to a broker try: - client = mqtt.Client(transport="websockets") + client = mqtt.Client(transport='websockets') client.on_connect = on_connect client.on_message = on_message client.tls_set(None, None, None, cert_reqs=ssl.CERT_NONE, tls_version=ssl.PROTOCOL_TLSv1_2, ciphers=None) - print("Connecting...") + print('Connecting...') client.connect(broker_url, broker_port, 60) except Exception: - print("ENV_TEST_FAILURE: Unexpected error while connecting to broker {}: {}:".format(broker_url, sys.exc_info()[0])) + print('ENV_TEST_FAILURE: Unexpected error while connecting to broker {}: {}:'.format(broker_url, sys.exc_info()[0])) raise # Starting a py-client in a separate thread thread1 = Thread(target=mqtt_client_task, args=(client,)) thread1.start() try: - print("Connecting py-client to broker {}:{}...".format(broker_url, broker_port)) + print('Connecting py-client to broker {}:{}...'.format(broker_url, broker_port)) if not event_client_connected.wait(timeout=30): - raise ValueError("ENV_TEST_FAILURE: Test script cannot connect to broker: {}".format(broker_url)) + raise ValueError('ENV_TEST_FAILURE: Test script cannot connect to broker: {}'.format(broker_url)) dut1.start_app() try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: print('ENV_TEST_FAILURE: Cannot connect to AP') raise - print("Checking py-client received msg published from esp...") + print('Checking py-client received msg published from esp...') if not event_client_received_correct.wait(timeout=30): raise ValueError('Wrong data received, msg log: {}'.format(message_log)) - print("Checking esp-client received msg published from py-client...") - dut1.expect(re.compile(r"DATA=data_to_esp32"), timeout=30) + print('Checking esp-client received msg published from py-client...') + dut1.expect(re.compile(r'DATA=data_to_esp32'), timeout=30) finally: event_stop_client.set() thread1.join() diff --git a/examples/protocols/pppos_client/example_test.py b/examples/protocols/pppos_client/example_test.py index c16589356f..d332830fb6 100644 --- a/examples/protocols/pppos_client/example_test.py +++ b/examples/protocols/pppos_client/example_test.py @@ -1,10 +1,12 @@ from __future__ import unicode_literals -from tiny_test_fw import Utility + import os -import serial import threading import time + +import serial import ttfw_idf +from tiny_test_fw import Utility class SerialThread(object): diff --git a/examples/protocols/sntp/example_test.py b/examples/protocols/sntp/example_test.py index 0f04189c93..c9fee3f783 100644 --- a/examples/protocols/sntp/example_test.py +++ b/examples/protocols/sntp/example_test.py @@ -1,8 +1,10 @@ from __future__ import unicode_literals -from tiny_test_fw import Utility + import datetime import re + import ttfw_idf +from tiny_test_fw import Utility @ttfw_idf.idf_example_test(env_tag='Example_WIFI') diff --git a/examples/protocols/sockets/tcp_client/example_test.py b/examples/protocols/sockets/tcp_client/example_test.py index 36af228380..ae0a33c2ca 100644 --- a/examples/protocols/sockets/tcp_client/example_test.py +++ b/examples/protocols/sockets/tcp_client/example_test.py @@ -6,17 +6,17 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from builtins import input +from __future__ import print_function, unicode_literals + import os import re -import sys -import netifaces import socket -from threading import Thread, Event -import ttfw_idf +import sys +from builtins import input +from threading import Event, Thread +import netifaces +import ttfw_idf # ----------- Config ---------- PORT = 3333 @@ -26,7 +26,7 @@ INTERFACE = 'eth0' def get_my_ip(type): for i in netifaces.ifaddresses(INTERFACE)[type]: - return i['addr'].replace("%{}".format(INTERFACE), "") + return i['addr'].replace('%{}'.format(INTERFACE), '') class TcpServer: @@ -44,11 +44,11 @@ class TcpServer: try: self.socket.bind(('', self.port)) except socket.error as e: - print("Bind failed:{}".format(e)) + print('Bind failed:{}'.format(e)) raise self.socket.listen(1) - print("Starting server on port={} family_addr={}".format(self.port, self.family_addr)) + print('Starting server on port={} family_addr={}'.format(self.port, self.family_addr)) self.server_thread = Thread(target=self.run_server) self.server_thread.start() return self @@ -68,7 +68,7 @@ class TcpServer: while not self.shutdown.is_set(): try: conn, address = self.socket.accept() # accept new connection - print("Connection from: {}".format(address)) + print('Connection from: {}'.format(address)) conn.setblocking(1) data = conn.recv(1024) if not data: @@ -79,13 +79,13 @@ class TcpServer: conn.send(reply.encode()) conn.close() except socket.error as e: - print("Running server failed:{}".format(e)) + print('Running server failed:{}'.format(e)) raise if not self.persist: break -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_socket_tcpclient(env, extra_data): """ steps: @@ -93,39 +93,39 @@ def test_examples_protocol_socket_tcpclient(env, extra_data): 2. have the board connect to the server 3. send and receive data """ - dut1 = env.get_dut("tcp_client", "examples/protocols/sockets/tcp_client", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('tcp_client', 'examples/protocols/sockets/tcp_client', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "tcp_client.bin") + binary_file = os.path.join(dut1.app.binary_path, 'tcp_client.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("tcp_client_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('tcp_client_bin_size', '{}KB'.format(bin_size // 1024)) # start test dut1.start_app() - ipv4 = dut1.expect(re.compile(r" IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)"), timeout=30)[0] + ipv4 = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30)[0] ipv6_r = r':'.join((r'[0-9a-fA-F]{4}',) * 8) # expect all 8 octets from IPv6 (assumes it's printed in the long form) ipv6 = dut1.expect(re.compile(r' IPv6 address: ({})'.format(ipv6_r)), timeout=30)[0] - print("Connected with IPv4={} and IPv6={}".format(ipv4, ipv6)) + print('Connected with IPv4={} and IPv6={}'.format(ipv4, ipv6)) # test IPv4 with TcpServer(PORT, socket.AF_INET): server_ip = get_my_ip(netifaces.AF_INET) - print("Connect tcp client to server IP={}".format(server_ip)) + print('Connect tcp client to server IP={}'.format(server_ip)) dut1.write(server_ip) - dut1.expect(re.compile(r"OK: Message from ESP32")) + dut1.expect(re.compile(r'OK: Message from ESP32')) # test IPv6 with TcpServer(PORT, socket.AF_INET6): server_ip = get_my_ip(netifaces.AF_INET6) - print("Connect tcp client to server IP={}".format(server_ip)) + print('Connect tcp client to server IP={}'.format(server_ip)) dut1.write(server_ip) - dut1.expect(re.compile(r"OK: Message from ESP32")) + dut1.expect(re.compile(r'OK: Message from ESP32')) if __name__ == '__main__': - if sys.argv[1:] and sys.argv[1].startswith("IPv"): # if additional arguments provided: + if sys.argv[1:] and sys.argv[1].startswith('IPv'): # if additional arguments provided: # Usage: example_test.py - family_addr = socket.AF_INET6 if sys.argv[1] == "IPv6" else socket.AF_INET + family_addr = socket.AF_INET6 if sys.argv[1] == 'IPv6' else socket.AF_INET with TcpServer(PORT, family_addr, persist=True) as s: - print(input("Press Enter stop the server...")) + print(input('Press Enter stop the server...')) else: test_examples_protocol_socket_tcpclient() diff --git a/examples/protocols/sockets/tcp_server/example_test.py b/examples/protocols/sockets/tcp_server/example_test.py index a29fd50882..6cfce086c1 100644 --- a/examples/protocols/sockets/tcp_server/example_test.py +++ b/examples/protocols/sockets/tcp_server/example_test.py @@ -6,14 +6,14 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import print_function, unicode_literals + import os -import sys import re import socket -import ttfw_idf +import sys +import ttfw_idf # ----------- Config ---------- PORT = 3333 @@ -46,28 +46,28 @@ def tcp_client(address, payload): return data.decode() -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_socket_tcpserver(env, extra_data): - MESSAGE = "Data to ESP" + MESSAGE = 'Data to ESP' """ steps: 1. join AP 2. have the board connect to the server 3. send and receive data """ - dut1 = env.get_dut("tcp_client", "examples/protocols/sockets/tcp_server", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('tcp_client', 'examples/protocols/sockets/tcp_server', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "tcp_server.bin") + binary_file = os.path.join(dut1.app.binary_path, 'tcp_server.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("tcp_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('tcp_server_bin_size', '{}KB'.format(bin_size // 1024)) # start test dut1.start_app() - ipv4 = dut1.expect(re.compile(r" IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)"), timeout=30)[0] + ipv4 = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30)[0] ipv6_r = r':'.join((r'[0-9a-fA-F]{4}',) * 8) # expect all 8 octets from IPv6 (assumes it's printed in the long form) ipv6 = dut1.expect(re.compile(r' IPv6 address: ({})'.format(ipv6_r)), timeout=30)[0] - print("Connected with IPv4={} and IPv6={}".format(ipv4, ipv6)) + print('Connected with IPv4={} and IPv6={}'.format(ipv4, ipv6)) # test IPv4 received = tcp_client(ipv4, MESSAGE) @@ -75,7 +75,7 @@ def test_examples_protocol_socket_tcpserver(env, extra_data): raise dut1.expect(MESSAGE) # test IPv6 - received = tcp_client("{}%{}".format(ipv6, INTERFACE), MESSAGE) + received = tcp_client('{}%{}'.format(ipv6, INTERFACE), MESSAGE) if not received == MESSAGE: raise dut1.expect(MESSAGE) diff --git a/examples/protocols/sockets/udp_client/example_test.py b/examples/protocols/sockets/udp_client/example_test.py index c0be30e781..d038df99e3 100644 --- a/examples/protocols/sockets/udp_client/example_test.py +++ b/examples/protocols/sockets/udp_client/example_test.py @@ -6,17 +6,17 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals -from builtins import input +from __future__ import print_function, unicode_literals + import os import re -import netifaces import socket -from threading import Thread, Event -import ttfw_idf import sys +from builtins import input +from threading import Event, Thread +import netifaces +import ttfw_idf # ----------- Config ---------- PORT = 3333 @@ -26,7 +26,7 @@ INTERFACE = 'eth0' def get_my_ip(type): for i in netifaces.ifaddresses(INTERFACE)[type]: - return i['addr'].replace("%{}".format(INTERFACE), "") + return i['addr'].replace('%{}'.format(INTERFACE), '') class UdpServer: @@ -44,10 +44,10 @@ class UdpServer: try: self.socket.bind(('', self.port)) except socket.error as e: - print("Bind failed:{}".format(e)) + print('Bind failed:{}'.format(e)) raise - print("Starting server on port={} family_addr={}".format(self.port, self.family_addr)) + print('Starting server on port={} family_addr={}'.format(self.port, self.family_addr)) self.server_thread = Thread(target=self.run_server) self.server_thread.start() return self @@ -72,13 +72,13 @@ class UdpServer: reply = 'OK: ' + data self.socket.sendto(reply.encode(), addr) except socket.error as e: - print("Running server failed:{}".format(e)) + print('Running server failed:{}'.format(e)) raise if not self.persist: break -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_socket_udpclient(env, extra_data): """ steps: @@ -86,39 +86,39 @@ def test_examples_protocol_socket_udpclient(env, extra_data): 2. have the board connect to the server 3. send and receive data """ - dut1 = env.get_dut("udp_client", "examples/protocols/sockets/udp_client", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('udp_client', 'examples/protocols/sockets/udp_client', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "udp_client.bin") + binary_file = os.path.join(dut1.app.binary_path, 'udp_client.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("udp_client_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('udp_client_bin_size', '{}KB'.format(bin_size // 1024)) # start test dut1.start_app() - ipv4 = dut1.expect(re.compile(r" IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)"), timeout=30)[0] + ipv4 = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30)[0] ipv6_r = r':'.join((r'[0-9a-fA-F]{4}',) * 8) # expect all 8 octets from IPv6 (assumes it's printed in the long form) ipv6 = dut1.expect(re.compile(r' IPv6 address: ({})'.format(ipv6_r)), timeout=30)[0] - print("Connected with IPv4={} and IPv6={}".format(ipv4, ipv6)) + print('Connected with IPv4={} and IPv6={}'.format(ipv4, ipv6)) # test IPv4 with UdpServer(PORT, socket.AF_INET): server_ip = get_my_ip(netifaces.AF_INET) - print("Connect udp client to server IP={}".format(server_ip)) + print('Connect udp client to server IP={}'.format(server_ip)) dut1.write(server_ip) - dut1.expect(re.compile(r"OK: Message from ESP32")) + dut1.expect(re.compile(r'OK: Message from ESP32')) # test IPv6 with UdpServer(PORT, socket.AF_INET6): server_ip = get_my_ip(netifaces.AF_INET6) - print("Connect udp client to server IP={}".format(server_ip)) + print('Connect udp client to server IP={}'.format(server_ip)) dut1.write(server_ip) - dut1.expect(re.compile(r"OK: Message from ESP32")) + dut1.expect(re.compile(r'OK: Message from ESP32')) if __name__ == '__main__': - if sys.argv[1:] and sys.argv[1].startswith("IPv"): # if additional arguments provided: + if sys.argv[1:] and sys.argv[1].startswith('IPv'): # if additional arguments provided: # Usage: example_test.py - family_addr = socket.AF_INET6 if sys.argv[1] == "IPv6" else socket.AF_INET + family_addr = socket.AF_INET6 if sys.argv[1] == 'IPv6' else socket.AF_INET with UdpServer(PORT, family_addr, persist=True) as s: - print(input("Press Enter stop the server...")) + print(input('Press Enter stop the server...')) else: test_examples_protocol_socket_udpclient() diff --git a/examples/protocols/sockets/udp_server/example_test.py b/examples/protocols/sockets/udp_server/example_test.py index e11b75eae3..9c3b323bee 100644 --- a/examples/protocols/sockets/udp_server/example_test.py +++ b/examples/protocols/sockets/udp_server/example_test.py @@ -6,14 +6,14 @@ # -*- coding: utf-8 -*- -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import print_function, unicode_literals + import os -import sys import re import socket -import ttfw_idf +import sys +import ttfw_idf # ----------- Config ---------- PORT = 3333 @@ -44,28 +44,28 @@ def udp_client(address, payload): return reply.decode() -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_socket_udpserver(env, extra_data): - MESSAGE = "Data to ESP" + MESSAGE = 'Data to ESP' """ steps: 1. join AP 2. have the board connect to the server 3. send and receive data """ - dut1 = env.get_dut("udp_server", "examples/protocols/sockets/udp_server", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('udp_server', 'examples/protocols/sockets/udp_server', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "udp_server.bin") + binary_file = os.path.join(dut1.app.binary_path, 'udp_server.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("udp_server_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('udp_server_bin_size', '{}KB'.format(bin_size // 1024)) # start test dut1.start_app() - ipv4 = dut1.expect(re.compile(r" IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)"), timeout=30)[0] + ipv4 = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30)[0] ipv6_r = r':'.join((r'[0-9a-fA-F]{4}',) * 8) # expect all 8 octets from IPv6 (assumes it's printed in the long form) ipv6 = dut1.expect(re.compile(r' IPv6 address: ({})'.format(ipv6_r)), timeout=30)[0] - print("Connected with IPv4={} and IPv6={}".format(ipv4, ipv6)) + print('Connected with IPv4={} and IPv6={}'.format(ipv4, ipv6)) # test IPv4 received = udp_client(ipv4, MESSAGE) @@ -73,7 +73,7 @@ def test_examples_protocol_socket_udpserver(env, extra_data): raise dut1.expect(MESSAGE) # test IPv6 - received = udp_client("{}%{}".format(ipv6, INTERFACE), MESSAGE) + received = udp_client('{}%{}'.format(ipv6, INTERFACE), MESSAGE) if not received == MESSAGE: raise dut1.expect(MESSAGE) diff --git a/examples/protocols/websocket/example_test.py b/examples/protocols/websocket/example_test.py index c436fb1796..90ce656281 100644 --- a/examples/protocols/websocket/example_test.py +++ b/examples/protocols/websocket/example_test.py @@ -1,14 +1,15 @@ -from __future__ import print_function -from __future__ import unicode_literals -import re +from __future__ import print_function, unicode_literals + import os -import socket import random +import re +import socket import string +from threading import Event, Thread + +import ttfw_idf from SimpleWebSocketServer import SimpleWebSocketServer, WebSocket from tiny_test_fw import Utility -from threading import Thread, Event -import ttfw_idf def get_my_ip(): @@ -66,15 +67,15 @@ class Websocket(object): def test_echo(dut): - dut.expect("WEBSOCKET_EVENT_CONNECTED") + dut.expect('WEBSOCKET_EVENT_CONNECTED') for i in range(0, 10): - dut.expect(re.compile(r"Received=hello (\d)"), timeout=30) - print("All echos received") + dut.expect(re.compile(r'Received=hello (\d)'), timeout=30) + print('All echos received') def test_close(dut): - code = dut.expect(re.compile(r"WEBSOCKET: Received closed message with code=(\d*)"), timeout=60)[0] - print("Received close frame with code {}".format(code)) + code = dut.expect(re.compile(r'WEBSOCKET: Received closed message with code=(\d*)'), timeout=60)[0] + print('Received close frame with code {}'.format(code)) def test_recv_long_msg(dut, websocket, msg_len, repeats): @@ -86,17 +87,17 @@ def test_recv_long_msg(dut, websocket, msg_len, repeats): recv_msg = '' while len(recv_msg) < msg_len: # Filter out color encoding - match = dut.expect(re.compile(r"Received=([a-zA-Z0-9]*).*\n"), timeout=30)[0] + match = dut.expect(re.compile(r'Received=([a-zA-Z0-9]*).*\n'), timeout=30)[0] recv_msg += match if recv_msg == send_msg: - print("Sent message and received message are equal") + print('Sent message and received message are equal') else: - raise ValueError("DUT received string do not match sent string, \nexpected: {}\nwith length {}\ - \nreceived: {}\nwith length {}".format(send_msg, len(send_msg), recv_msg, len(recv_msg))) + raise ValueError('DUT received string do not match sent string, \nexpected: {}\nwith length {}\ + \nreceived: {}\nwith length {}'.format(send_msg, len(send_msg), recv_msg, len(recv_msg))) -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_websocket(env, extra_data): """ steps: @@ -104,17 +105,17 @@ def test_examples_protocol_websocket(env, extra_data): 2. connect to uri specified in the config 3. send and receive data """ - dut1 = env.get_dut("websocket", "examples/protocols/websocket", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('websocket', 'examples/protocols/websocket', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "websocket-example.bin") + binary_file = os.path.join(dut1.app.binary_path, 'websocket-example.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("websocket_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('websocket_bin_size', '{}KB'.format(bin_size // 1024)) try: - if "CONFIG_WEBSOCKET_URI_FROM_STDIN" in dut1.app.get_sdkconfig(): + if 'CONFIG_WEBSOCKET_URI_FROM_STDIN' in dut1.app.get_sdkconfig(): uri_from_stdin = True else: - uri = dut1.app.get_sdkconfig()["CONFIG_WEBSOCKET_URI"].strip('"') + uri = dut1.app.get_sdkconfig()['CONFIG_WEBSOCKET_URI'].strip('"') uri_from_stdin = False except Exception: @@ -127,9 +128,9 @@ def test_examples_protocol_websocket(env, extra_data): if uri_from_stdin: server_port = 4455 with Websocket(server_port) as ws: - uri = "ws://{}:{}".format(get_my_ip(), server_port) - print("DUT connecting to {}".format(uri)) - dut1.expect("Please enter uri of websocket endpoint", timeout=30) + uri = 'ws://{}:{}'.format(get_my_ip(), server_port) + print('DUT connecting to {}'.format(uri)) + dut1.expect('Please enter uri of websocket endpoint', timeout=30) dut1.write(uri) test_echo(dut1) # Message length should exceed DUT's buffer size to test fragmentation, default is 1024 byte @@ -137,7 +138,7 @@ def test_examples_protocol_websocket(env, extra_data): test_close(dut1) else: - print("DUT connecting to {}".format(uri)) + print('DUT connecting to {}'.format(uri)) test_echo(dut1) diff --git a/examples/provisioning/legacy/ble_prov/ble_prov_test.py b/examples/provisioning/legacy/ble_prov/ble_prov_test.py index 90192b532b..ba09b9ee2f 100644 --- a/examples/provisioning/legacy/ble_prov/ble_prov_test.py +++ b/examples/provisioning/legacy/ble_prov/ble_prov_test.py @@ -15,73 +15,74 @@ # limitations under the License. from __future__ import print_function -import re -import os -import ttfw_idf +import os +import re + import esp_prov +import ttfw_idf # Have esp_prov throw exception esp_prov.config_throw_except = True -@ttfw_idf.idf_example_test(env_tag="Example_WIFI_BT") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI_BT') def test_examples_provisioning_ble(env, extra_data): # Acquire DUT - dut1 = env.get_dut("ble_prov", "examples/provisioning/legacy/ble_prov", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('ble_prov', 'examples/provisioning/legacy/ble_prov', dut_class=ttfw_idf.ESP32DUT) # Get binary file - binary_file = os.path.join(dut1.app.binary_path, "ble_prov.bin") + binary_file = os.path.join(dut1.app.binary_path, 'ble_prov.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("ble_prov_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('ble_prov_bin_size', '{}KB'.format(bin_size // 1024)) # Upload binary and start testing dut1.start_app() # Parse BLE devname devname = dut1.expect(re.compile(r"Provisioning started with BLE devname : '(PROV_\S\S\S\S\S\S)'"), timeout=60)[0] - print("BLE Device Alias for DUT :", devname) + print('BLE Device Alias for DUT :', devname) # Match additional headers sent in the request - dut1.expect("BLE Provisioning started", timeout=30) + dut1.expect('BLE Provisioning started', timeout=30) - print("Starting Provisioning") + print('Starting Provisioning') verbose = False - protover = "V0.1" + protover = 'V0.1' secver = 1 - pop = "abcd1234" - provmode = "ble" - ap_ssid = "myssid" - ap_password = "mypassword" + pop = 'abcd1234' + provmode = 'ble' + ap_ssid = 'myssid' + ap_password = 'mypassword' - print("Getting security") + print('Getting security') security = esp_prov.get_security(secver, pop, verbose) if security is None: - raise RuntimeError("Failed to get security") + raise RuntimeError('Failed to get security') - print("Getting transport") + print('Getting transport') transport = esp_prov.get_transport(provmode, devname) if transport is None: - raise RuntimeError("Failed to get transport") + raise RuntimeError('Failed to get transport') - print("Verifying protocol version") + print('Verifying protocol version') if not esp_prov.version_match(transport, protover): - raise RuntimeError("Mismatch in protocol version") + raise RuntimeError('Mismatch in protocol version') - print("Starting Session") + print('Starting Session') if not esp_prov.establish_session(transport, security): - raise RuntimeError("Failed to start session") + raise RuntimeError('Failed to start session') - print("Sending Wifi credential to DUT") + print('Sending Wifi credential to DUT') if not esp_prov.send_wifi_config(transport, security, ap_ssid, ap_password): - raise RuntimeError("Failed to send Wi-Fi config") + raise RuntimeError('Failed to send Wi-Fi config') - print("Applying config") + print('Applying config') if not esp_prov.apply_wifi_config(transport, security): - raise RuntimeError("Failed to send apply config") + raise RuntimeError('Failed to send apply config') if not esp_prov.wait_wifi_connected(transport, security): - raise RuntimeError("Provisioning failed") + raise RuntimeError('Provisioning failed') if __name__ == '__main__': diff --git a/examples/provisioning/legacy/custom_config/components/custom_provisioning/python/custom_config_pb2.py b/examples/provisioning/legacy/custom_config/components/custom_provisioning/python/custom_config_pb2.py index 821bb18f88..be6029305c 100644 --- a/examples/provisioning/legacy/custom_config/components/custom_provisioning/python/custom_config_pb2.py +++ b/examples/provisioning/legacy/custom_config/components/custom_provisioning/python/custom_config_pb2.py @@ -2,12 +2,14 @@ # source: custom_config.proto import sys + _b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) -from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import enum_type_wrapper + # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() @@ -61,7 +63,7 @@ _CUSTOMCONFIGREQUEST = _descriptor.Descriptor( _descriptor.FieldDescriptor( name='info', full_name='CustomConfigRequest.info', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=_b("").decode('utf-8'), + has_default_value=False, default_value=_b('').decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), diff --git a/examples/provisioning/legacy/softap_prov/softap_prov_test.py b/examples/provisioning/legacy/softap_prov/softap_prov_test.py index abd78111ec..468f5a0c6a 100644 --- a/examples/provisioning/legacy/softap_prov/softap_prov_test.py +++ b/examples/provisioning/legacy/softap_prov/softap_prov_test.py @@ -15,88 +15,89 @@ # limitations under the License. from __future__ import print_function -import re -import os -import ttfw_idf +import os +import re + import esp_prov +import ttfw_idf import wifi_tools # Have esp_prov throw exception esp_prov.config_throw_except = True -@ttfw_idf.idf_example_test(env_tag="Example_WIFI_BT") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI_BT') def test_examples_provisioning_softap(env, extra_data): # Acquire DUT - dut1 = env.get_dut("softap_prov", "examples/provisioning/legacy/softap_prov", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('softap_prov', 'examples/provisioning/legacy/softap_prov', dut_class=ttfw_idf.ESP32DUT) # Get binary file - binary_file = os.path.join(dut1.app.binary_path, "softap_prov.bin") + binary_file = os.path.join(dut1.app.binary_path, 'softap_prov.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("softap_prov_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('softap_prov_bin_size', '{}KB'.format(bin_size // 1024)) # Upload binary and start testing dut1.start_app() # Parse IP address of STA - dut1.expect("Starting WiFi SoftAP provisioning", timeout=60) + dut1.expect('Starting WiFi SoftAP provisioning', timeout=60) [ssid, password] = dut1.expect(re.compile(r"SoftAP Provisioning started with SSID '(\S+)', Password '(\S+)'"), timeout=30) iface = wifi_tools.get_wiface_name() if iface is None: - raise RuntimeError("Failed to get Wi-Fi interface on host") - print("Interface name : " + iface) - print("SoftAP SSID : " + ssid) - print("SoftAP Password : " + password) + raise RuntimeError('Failed to get Wi-Fi interface on host') + print('Interface name : ' + iface) + print('SoftAP SSID : ' + ssid) + print('SoftAP Password : ' + password) try: ctrl = wifi_tools.wpa_cli(iface, reset_on_exit=True) - print("Connecting to DUT SoftAP...") + print('Connecting to DUT SoftAP...') ip = ctrl.connect(ssid, password) - got_ip = dut1.expect(re.compile(r"DHCP server assigned IP to a station, IP is: (\d+.\d+.\d+.\d+)"), timeout=60)[0] + got_ip = dut1.expect(re.compile(r'DHCP server assigned IP to a station, IP is: (\d+.\d+.\d+.\d+)'), timeout=60)[0] if ip != got_ip: - raise RuntimeError("SoftAP connected to another host! " + ip + "!=" + got_ip) - print("Connected to DUT SoftAP") + raise RuntimeError('SoftAP connected to another host! ' + ip + '!=' + got_ip) + print('Connected to DUT SoftAP') - print("Starting Provisioning") + print('Starting Provisioning') verbose = False - protover = "V0.1" + protover = 'V0.1' secver = 1 - pop = "abcd1234" - provmode = "softap" - ap_ssid = "myssid" - ap_password = "mypassword" - softap_endpoint = ip.split('.')[0] + "." + ip.split('.')[1] + "." + ip.split('.')[2] + ".1:80" + pop = 'abcd1234' + provmode = 'softap' + ap_ssid = 'myssid' + ap_password = 'mypassword' + softap_endpoint = ip.split('.')[0] + '.' + ip.split('.')[1] + '.' + ip.split('.')[2] + '.1:80' - print("Getting security") + print('Getting security') security = esp_prov.get_security(secver, pop, verbose) if security is None: - raise RuntimeError("Failed to get security") + raise RuntimeError('Failed to get security') - print("Getting transport") + print('Getting transport') transport = esp_prov.get_transport(provmode, softap_endpoint) if transport is None: - raise RuntimeError("Failed to get transport") + raise RuntimeError('Failed to get transport') - print("Verifying protocol version") + print('Verifying protocol version') if not esp_prov.version_match(transport, protover): - raise RuntimeError("Mismatch in protocol version") + raise RuntimeError('Mismatch in protocol version') - print("Starting Session") + print('Starting Session') if not esp_prov.establish_session(transport, security): - raise RuntimeError("Failed to start session") + raise RuntimeError('Failed to start session') - print("Sending Wifi credential to DUT") + print('Sending Wifi credential to DUT') if not esp_prov.send_wifi_config(transport, security, ap_ssid, ap_password): - raise RuntimeError("Failed to send Wi-Fi config") + raise RuntimeError('Failed to send Wi-Fi config') - print("Applying config") + print('Applying config') if not esp_prov.apply_wifi_config(transport, security): - raise RuntimeError("Failed to send apply config") + raise RuntimeError('Failed to send apply config') if not esp_prov.wait_wifi_connected(transport, security): - raise RuntimeError("Provisioning failed") + raise RuntimeError('Provisioning failed') finally: ctrl.reset() diff --git a/examples/provisioning/wifi_prov_mgr/wifi_prov_mgr_test.py b/examples/provisioning/wifi_prov_mgr/wifi_prov_mgr_test.py index 8858d4d428..a516fbed3c 100644 --- a/examples/provisioning/wifi_prov_mgr/wifi_prov_mgr_test.py +++ b/examples/provisioning/wifi_prov_mgr/wifi_prov_mgr_test.py @@ -15,84 +15,85 @@ # limitations under the License. from __future__ import print_function -import re -import os -import ttfw_idf +import os +import re + import esp_prov +import ttfw_idf # Have esp_prov throw exception esp_prov.config_throw_except = True -@ttfw_idf.idf_example_test(env_tag="Example_WIFI_BT") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI_BT') def test_examples_wifi_prov_mgr(env, extra_data): # Acquire DUT - dut1 = env.get_dut("wifi_prov_mgr", "examples/provisioning/wifi_prov_mgr", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('wifi_prov_mgr', 'examples/provisioning/wifi_prov_mgr', dut_class=ttfw_idf.ESP32DUT) # Get binary file - binary_file = os.path.join(dut1.app.binary_path, "wifi_prov_mgr.bin") + binary_file = os.path.join(dut1.app.binary_path, 'wifi_prov_mgr.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("wifi_prov_mgr_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('wifi_prov_mgr_bin_size', '{}KB'.format(bin_size // 1024)) # Upload binary and start testing dut1.start_app() # Check if BT memory is released before provisioning starts - dut1.expect("wifi_prov_scheme_ble: BT memory released", timeout=60) + dut1.expect('wifi_prov_scheme_ble: BT memory released', timeout=60) # Parse BLE devname - devname = dut1.expect(re.compile(r"Provisioning started with service name : (PROV_\S\S\S\S\S\S)"), timeout=30)[0] - print("BLE Device Alias for DUT :", devname) + devname = dut1.expect(re.compile(r'Provisioning started with service name : (PROV_\S\S\S\S\S\S)'), timeout=30)[0] + print('BLE Device Alias for DUT :', devname) - print("Starting Provisioning") + print('Starting Provisioning') verbose = False - protover = "v1.1" + protover = 'v1.1' secver = 1 - pop = "abcd1234" - provmode = "ble" - ap_ssid = "myssid" - ap_password = "mypassword" + pop = 'abcd1234' + provmode = 'ble' + ap_ssid = 'myssid' + ap_password = 'mypassword' - print("Getting security") + print('Getting security') security = esp_prov.get_security(secver, pop, verbose) if security is None: - raise RuntimeError("Failed to get security") + raise RuntimeError('Failed to get security') - print("Getting transport") + print('Getting transport') transport = esp_prov.get_transport(provmode, devname) if transport is None: - raise RuntimeError("Failed to get transport") + raise RuntimeError('Failed to get transport') - print("Verifying protocol version") + print('Verifying protocol version') if not esp_prov.version_match(transport, protover): - raise RuntimeError("Mismatch in protocol version") + raise RuntimeError('Mismatch in protocol version') - print("Verifying scan list capability") + print('Verifying scan list capability') if not esp_prov.has_capability(transport, 'wifi_scan'): - raise RuntimeError("Capability not present") + raise RuntimeError('Capability not present') - print("Starting Session") + print('Starting Session') if not esp_prov.establish_session(transport, security): - raise RuntimeError("Failed to start session") + raise RuntimeError('Failed to start session') - print("Sending Custom Data") - if not esp_prov.custom_data(transport, security, "My Custom Data"): - raise RuntimeError("Failed to send custom data") + print('Sending Custom Data') + if not esp_prov.custom_data(transport, security, 'My Custom Data'): + raise RuntimeError('Failed to send custom data') - print("Sending Wifi credential to DUT") + print('Sending Wifi credential to DUT') if not esp_prov.send_wifi_config(transport, security, ap_ssid, ap_password): - raise RuntimeError("Failed to send Wi-Fi config") + raise RuntimeError('Failed to send Wi-Fi config') - print("Applying config") + print('Applying config') if not esp_prov.apply_wifi_config(transport, security): - raise RuntimeError("Failed to send apply config") + raise RuntimeError('Failed to send apply config') if not esp_prov.wait_wifi_connected(transport, security): - raise RuntimeError("Provisioning failed") + raise RuntimeError('Provisioning failed') # Check if BTDM memory is released after provisioning finishes - dut1.expect("wifi_prov_scheme_ble: BTDM memory released", timeout=30) + dut1.expect('wifi_prov_scheme_ble: BTDM memory released', timeout=30) if __name__ == '__main__': diff --git a/examples/security/flash_encryption/example_test.py b/examples/security/flash_encryption/example_test.py index 2b6d55a124..f9346d016d 100644 --- a/examples/security/flash_encryption/example_test.py +++ b/examples/security/flash_encryption/example_test.py @@ -1,18 +1,20 @@ from __future__ import print_function + import binascii -from io import BytesIO -from collections import namedtuple import os import sys +from collections import namedtuple +from io import BytesIO import ttfw_idf + try: import espsecure except ImportError: - idf_path = os.getenv("IDF_PATH") + idf_path = os.getenv('IDF_PATH') if not idf_path or not os.path.exists(idf_path): raise - sys.path.insert(0, os.path.join(idf_path, "components", "esptool_py", "esptool")) + sys.path.insert(0, os.path.join(idf_path, 'components', 'esptool_py', 'esptool')) import espsecure @@ -30,7 +32,7 @@ def test_examples_security_flash_encryption(env, extra_data): dut.start_app() # calculate the expected ciphertext - flash_addr = dut.app.partition_table["storage"]["offset"] + flash_addr = dut.app.partition_table['storage']['offset'] plain_hex_str = '00 01 02 03 04 05 06 07 08 09 0a 0b 0c 0d 0e 0f' plain_data = binascii.unhexlify(plain_hex_str.replace(' ', '')) diff --git a/examples/storage/nvs_rw_blob/nvs_rw_blob_example_test.py b/examples/storage/nvs_rw_blob/nvs_rw_blob_example_test.py index dba35cc542..74e6f541be 100644 --- a/examples/storage/nvs_rw_blob/nvs_rw_blob_example_test.py +++ b/examples/storage/nvs_rw_blob/nvs_rw_blob_example_test.py @@ -1,8 +1,9 @@ -from tiny_test_fw import Utility import random import re import time + import ttfw_idf +from tiny_test_fw import Utility @ttfw_idf.idf_example_test(env_tag='Example_GENERIC') diff --git a/examples/storage/nvs_rw_value/nvs_rw_value_example_test.py b/examples/storage/nvs_rw_value/nvs_rw_value_example_test.py index 8eddae0ece..25af89ebfe 100644 --- a/examples/storage/nvs_rw_value/nvs_rw_value_example_test.py +++ b/examples/storage/nvs_rw_value/nvs_rw_value_example_test.py @@ -1,5 +1,5 @@ -from tiny_test_fw import Utility import ttfw_idf +from tiny_test_fw import Utility try: from itertools import izip_longest as zip_longest diff --git a/examples/storage/nvs_rw_value_cxx/nvs_rw_value_cxx_example_test.py b/examples/storage/nvs_rw_value_cxx/nvs_rw_value_cxx_example_test.py index f95c867119..7cdb10d513 100644 --- a/examples/storage/nvs_rw_value_cxx/nvs_rw_value_cxx_example_test.py +++ b/examples/storage/nvs_rw_value_cxx/nvs_rw_value_cxx_example_test.py @@ -1,5 +1,5 @@ -from tiny_test_fw import Utility import ttfw_idf +from tiny_test_fw import Utility try: from itertools import izip_longest as zip_longest diff --git a/examples/storage/partition_api/partition_mmap/partition_mmap_example_test.py b/examples/storage/partition_api/partition_mmap/partition_mmap_example_test.py index bb7c3fa753..c9a5a73375 100644 --- a/examples/storage/partition_api/partition_mmap/partition_mmap_example_test.py +++ b/examples/storage/partition_api/partition_mmap/partition_mmap_example_test.py @@ -1,4 +1,5 @@ import re + import ttfw_idf diff --git a/examples/storage/parttool/example_test.py b/examples/storage/parttool/example_test.py index 8493ecab2f..2caa34a1f4 100644 --- a/examples/storage/parttool/example_test.py +++ b/examples/storage/parttool/example_test.py @@ -1,7 +1,8 @@ from __future__ import print_function + import os -import sys import subprocess +import sys import ttfw_idf @@ -12,23 +13,23 @@ def test_examples_parttool(env, extra_data): dut.start_app(False) # Verify factory firmware - dut.expect("Partitions Tool Example") - dut.expect("Example end") + dut.expect('Partitions Tool Example') + dut.expect('Example end') # Close connection to DUT dut.receive_thread.exit() dut.port_inst.close() # Run the example python script - script_path = os.path.join(os.getenv("IDF_PATH"), "examples", "storage", "parttool", "parttool_example.py") + script_path = os.path.join(os.getenv('IDF_PATH'), 'examples', 'storage', 'parttool', 'parttool_example.py') - binary_path = "" + binary_path = '' for flash_file in dut.app.flash_files: - if "parttool.bin" in flash_file[1]: + if 'parttool.bin' in flash_file[1]: binary_path = flash_file[1] break - subprocess.check_call([sys.executable, script_path, "--binary", binary_path, "--port", dut.port]) + subprocess.check_call([sys.executable, script_path, '--binary', binary_path, '--port', dut.port]) if __name__ == '__main__': diff --git a/examples/storage/parttool/parttool_example.py b/examples/storage/parttool/parttool_example.py index bf2c6f7cdf..454db4002b 100755 --- a/examples/storage/parttool/parttool_example.py +++ b/examples/storage/parttool/parttool_example.py @@ -16,16 +16,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import argparse import os import sys -import argparse -PARTITION_TABLE_DIR = os.path.join("components", "partition_table", "") +PARTITION_TABLE_DIR = os.path.join('components', 'partition_table', '') def assert_file_same(file1, file2, err): - with open(file1, "rb") as f1: - with open(file2, "rb") as f2: + with open(file1, 'rb') as f1: + with open(file2, 'rb') as f2: f1 = f1.read() f2 = f2.read() @@ -39,17 +39,17 @@ def assert_file_same(file1, file2, err): def main(): - COMPONENTS_PATH = os.path.expandvars(os.path.join("$IDF_PATH", "components")) - PARTTOOL_DIR = os.path.join(COMPONENTS_PATH, "partition_table") + COMPONENTS_PATH = os.path.expandvars(os.path.join('$IDF_PATH', 'components')) + PARTTOOL_DIR = os.path.join(COMPONENTS_PATH, 'partition_table') sys.path.append(PARTTOOL_DIR) - from parttool import PartitionName, PartitionType, ParttoolTarget from gen_empty_partition import generate_blanked_file + from parttool import PartitionName, PartitionType, ParttoolTarget - parser = argparse.ArgumentParser("ESP-IDF Partitions Tool Example") + parser = argparse.ArgumentParser('ESP-IDF Partitions Tool Example') - parser.add_argument("--port", "-p", help="port where the device to perform operations on is connected") - parser.add_argument("--binary", "-b", help="path to built example binary", default=os.path.join("build", "parttool.bin")) + parser.add_argument('--port', '-p', help='port where the device to perform operations on is connected') + parser.add_argument('--binary', '-b', help='path to built example binary', default=os.path.join('build', 'parttool.bin')) args = parser.parse_args() @@ -57,47 +57,47 @@ def main(): # Read app partition and save the contents to a file. The app partition is identified # using type-subtype combination - print("Checking if device app binary matches built binary") - factory = PartitionType("app", "factory") - target.read_partition(factory, "app.bin") - assert_file_same(args.binary, "app.bin", "Device app binary does not match built binary") + print('Checking if device app binary matches built binary') + factory = PartitionType('app', 'factory') + target.read_partition(factory, 'app.bin') + assert_file_same(args.binary, 'app.bin', 'Device app binary does not match built binary') # Retrieve info on data storage partition, this time identifying it by name. - storage = PartitionName("storage") + storage = PartitionName('storage') storage_info = target.get_partition_info(storage) - print("Found data partition at offset 0x{:x} with size 0x{:x}".format(storage_info.offset, storage_info.size)) + print('Found data partition at offset 0x{:x} with size 0x{:x}'.format(storage_info.offset, storage_info.size)) # Create a file whose contents will be written to the storage partition - with open("write.bin", "wb") as f: + with open('write.bin', 'wb') as f: # Create a file to write to the data partition with randomly generated content f.write(os.urandom(storage_info.size)) # Write the contents of the created file to storage partition - print("Writing to data partition") - target.write_partition(storage, "write.bin") + print('Writing to data partition') + target.write_partition(storage, 'write.bin') # Read back the contents of the storage partition - print("Reading data partition") - target.read_partition(storage, "read.bin") + print('Reading data partition') + target.read_partition(storage, 'read.bin') - assert_file_same("write.bin", "read.bin", "Read contents of storage partition does not match source file contents") + assert_file_same('write.bin', 'read.bin', 'Read contents of storage partition does not match source file contents') # Erase contents of the storage partition - print("Erasing data partition") + print('Erasing data partition') target.erase_partition(storage) # Read back the erased data partition - print("Reading data partition") - target.read_partition(storage, "read.bin") + print('Reading data partition') + target.read_partition(storage, 'read.bin') # Generate a file of all 0xFF - generate_blanked_file(storage_info.size, "blank.bin") + generate_blanked_file(storage_info.size, 'blank.bin') - assert_file_same("blank.bin", "read.bin", "Contents of storage partition not fully erased") + assert_file_same('blank.bin', 'read.bin', 'Contents of storage partition not fully erased') # Example end and cleanup - print("\nPartition tool operations performed successfully!") - clean_files = ["app.bin", "read.bin", "blank.bin", "write.bin"] + print('\nPartition tool operations performed successfully!') + clean_files = ['app.bin', 'read.bin', 'blank.bin', 'write.bin'] for clean_file in clean_files: os.unlink(clean_file) diff --git a/examples/storage/sd_card/sd_card_example_test.py b/examples/storage/sd_card/sd_card_example_test.py index 05b04eb6c5..dc380a3d69 100644 --- a/examples/storage/sd_card/sd_card_example_test.py +++ b/examples/storage/sd_card/sd_card_example_test.py @@ -1,7 +1,8 @@ -from tiny_test_fw import Utility -import ttfw_idf import re +import ttfw_idf +from tiny_test_fw import Utility + @ttfw_idf.idf_example_test(env_tag='UT_T1_SDMODE') def test_examples_sd_card(env, extra_data): diff --git a/examples/storage/semihost_vfs/semihost_vfs_example_test.py b/examples/storage/semihost_vfs/semihost_vfs_example_test.py index 5513cb8ec5..e0cb41a4d6 100644 --- a/examples/storage/semihost_vfs/semihost_vfs_example_test.py +++ b/examples/storage/semihost_vfs/semihost_vfs_example_test.py @@ -1,7 +1,8 @@ -from io import open import os import shutil import tempfile +from io import open + import ttfw_idf try: @@ -11,7 +12,7 @@ except ImportError: from itertools import zip_longest -@ttfw_idf.idf_example_test(env_tag="test_jtag_arm") +@ttfw_idf.idf_example_test(env_tag='test_jtag_arm') def test_examples_semihost_vfs(env, extra_data): rel_project_path = os.path.join('examples', 'storage', 'semihost_vfs') diff --git a/examples/storage/spiffs/spiffs_example_test.py b/examples/storage/spiffs/spiffs_example_test.py index 30f6616107..fa70e1fddb 100644 --- a/examples/storage/spiffs/spiffs_example_test.py +++ b/examples/storage/spiffs/spiffs_example_test.py @@ -1,4 +1,5 @@ import re + import ttfw_idf diff --git a/examples/storage/spiffsgen/example_test.py b/examples/storage/spiffsgen/example_test.py index 32b6cc7e92..c5dd9db917 100644 --- a/examples/storage/spiffsgen/example_test.py +++ b/examples/storage/spiffsgen/example_test.py @@ -1,6 +1,7 @@ from __future__ import print_function -import os + import hashlib +import os import ttfw_idf diff --git a/examples/storage/wear_levelling/wear_levelling_example_test.py b/examples/storage/wear_levelling/wear_levelling_example_test.py index a5614fbd08..395a78cc8c 100644 --- a/examples/storage/wear_levelling/wear_levelling_example_test.py +++ b/examples/storage/wear_levelling/wear_levelling_example_test.py @@ -1,4 +1,5 @@ import re + import ttfw_idf diff --git a/examples/system/app_trace_to_host/example_test.py b/examples/system/app_trace_to_host/example_test.py index ed2e1bda4d..082b564340 100644 --- a/examples/system/app_trace_to_host/example_test.py +++ b/examples/system/app_trace_to_host/example_test.py @@ -1,10 +1,12 @@ from __future__ import unicode_literals + import os import re + import ttfw_idf -@ttfw_idf.idf_example_test(env_tag="test_jtag_arm") +@ttfw_idf.idf_example_test(env_tag='test_jtag_arm') def test_examples_app_trace_to_host(env, extra_data): rel_project_path = os.path.join('examples', 'system', 'app_trace_to_host') dut = env.get_dut('app_trace_to_host', rel_project_path) @@ -22,7 +24,7 @@ def test_examples_app_trace_to_host(env, extra_data): re.compile(r'example: Collected \d+ samples in 20 ms.'), timeout=20) - ocd.apptrace_start("file://adc.log 0 9000 5 0 0") + ocd.apptrace_start('file://adc.log 0 9000 5 0 0') ocd.apptrace_wait_stop(tmo=30) with ttfw_idf.CustomProcess(' '.join([os.path.join(idf_path, 'tools/esp_app_trace/logtrace_proc.py'), diff --git a/examples/system/base_mac_address/example_test.py b/examples/system/base_mac_address/example_test.py index 827a4d81b3..cf0a340dc2 100644 --- a/examples/system/base_mac_address/example_test.py +++ b/examples/system/base_mac_address/example_test.py @@ -1,7 +1,9 @@ from __future__ import unicode_literals -from tiny_test_fw import Utility + import re + import ttfw_idf +from tiny_test_fw import Utility @ttfw_idf.idf_example_test(env_tag='Example_GENERIC') diff --git a/examples/system/console/example_test.py b/examples/system/console/example_test.py index b723392809..2a56824b2c 100644 --- a/examples/system/console/example_test.py +++ b/examples/system/console/example_test.py @@ -6,15 +6,15 @@ import ttfw_idf @ttfw_idf.idf_example_test(env_tag='Example_GENERIC') def test_examples_system_console(env, extra_data): dut = env.get_dut('console_example', 'examples/system/console', app_config_name='history') - print("Using binary path: {}".format(dut.app.binary_path)) + print('Using binary path: {}'.format(dut.app.binary_path)) dut.start_app() - dut.expect("Command history enabled") + dut.expect('Command history enabled') env.close_dut(dut.name) dut = env.get_dut('console_example', 'examples/system/console', app_config_name='nohistory') - print("Using binary path: {}".format(dut.app.binary_path)) + print('Using binary path: {}'.format(dut.app.binary_path)) dut.start_app() - dut.expect("Command history disabled") + dut.expect('Command history disabled') if __name__ == '__main__': diff --git a/examples/system/deep_sleep/example_test.py b/examples/system/deep_sleep/example_test.py index dc6abfe480..035290e65e 100644 --- a/examples/system/deep_sleep/example_test.py +++ b/examples/system/deep_sleep/example_test.py @@ -1,5 +1,7 @@ from __future__ import unicode_literals + import re + import ttfw_idf diff --git a/examples/system/efuse/example_test.py b/examples/system/efuse/example_test.py index 255c677b25..5379dfd060 100644 --- a/examples/system/efuse/example_test.py +++ b/examples/system/efuse/example_test.py @@ -1,5 +1,7 @@ from __future__ import unicode_literals + import re + import ttfw_idf diff --git a/examples/system/esp_event/default_event_loop/example_test.py b/examples/system/esp_event/default_event_loop/example_test.py index 9019cd93cd..78f9377533 100644 --- a/examples/system/esp_event/default_event_loop/example_test.py +++ b/examples/system/esp_event/default_event_loop/example_test.py @@ -5,80 +5,80 @@ import ttfw_idf # Timer events TIMER_EVENT_LIMIT = 3 -TIMER_EXPIRY_HANDLING = "TIMER_EVENTS:TIMER_EVENT_EXPIRY: timer_expiry_handler, executed {} out of " + str(TIMER_EVENT_LIMIT) + " times" +TIMER_EXPIRY_HANDLING = 'TIMER_EVENTS:TIMER_EVENT_EXPIRY: timer_expiry_handler, executed {} out of ' + str(TIMER_EVENT_LIMIT) + ' times' # Task events TASK_ITERATION_LIMIT = 5 TASK_UNREGISTRATION_LIMIT = 3 -TASK_ITERATION_POST = "TASK_EVENTS:TASK_ITERATION_EVENT: posting to default loop, {} out of " + str(TASK_ITERATION_LIMIT) -TASK_ITERATION_HANDLING = "TASK_EVENTS:TASK_ITERATION_EVENT: task_iteration_handler, executed {} times" +TASK_ITERATION_POST = 'TASK_EVENTS:TASK_ITERATION_EVENT: posting to default loop, {} out of ' + str(TASK_ITERATION_LIMIT) +TASK_ITERATION_HANDLING = 'TASK_EVENTS:TASK_ITERATION_EVENT: task_iteration_handler, executed {} times' def _test_timer_events(dut): dut.start_app() - print("Checking timer events posting and handling") + print('Checking timer events posting and handling') - dut.expect("setting up") - dut.expect("starting event sources") + dut.expect('setting up') + dut.expect('starting event sources') - print("Finished setup") + print('Finished setup') - dut.expect("TIMER_EVENTS:TIMER_EVENT_STARTED: posting to default loop") - print("Posted timer started event") - dut.expect("TIMER_EVENTS:TIMER_EVENT_STARTED: timer_started_handler, instance 0") - dut.expect("TIMER_EVENTS:TIMER_EVENT_STARTED: timer_started_handler, instance 1") - dut.expect("TIMER_EVENTS:TIMER_EVENT_STARTED: timer_started_handler_2") - dut.expect("TIMER_EVENTS:TIMER_EVENT_STARTED: timer_any_handler") - dut.expect("TIMER_EVENTS:TIMER_EVENT_STARTED: all_event_handler") - print("Handled timer started event") + dut.expect('TIMER_EVENTS:TIMER_EVENT_STARTED: posting to default loop') + print('Posted timer started event') + dut.expect('TIMER_EVENTS:TIMER_EVENT_STARTED: timer_started_handler, instance 0') + dut.expect('TIMER_EVENTS:TIMER_EVENT_STARTED: timer_started_handler, instance 1') + dut.expect('TIMER_EVENTS:TIMER_EVENT_STARTED: timer_started_handler_2') + dut.expect('TIMER_EVENTS:TIMER_EVENT_STARTED: timer_any_handler') + dut.expect('TIMER_EVENTS:TIMER_EVENT_STARTED: all_event_handler') + print('Handled timer started event') for expiries in range(1, TIMER_EVENT_LIMIT + 1): - dut.expect("TIMER_EVENTS:TIMER_EVENT_EXPIRY: posting to default loop") - print("Posted timer expiry event {} out of {}".format(expiries, TIMER_EVENT_LIMIT)) + dut.expect('TIMER_EVENTS:TIMER_EVENT_EXPIRY: posting to default loop') + print('Posted timer expiry event {} out of {}'.format(expiries, TIMER_EVENT_LIMIT)) if expiries >= TIMER_EVENT_LIMIT: - dut.expect("TIMER_EVENTS:TIMER_EVENT_STOPPED: posting to default loop") - print("Posted timer stopped event") + dut.expect('TIMER_EVENTS:TIMER_EVENT_STOPPED: posting to default loop') + print('Posted timer stopped event') dut.expect(TIMER_EXPIRY_HANDLING.format(expiries)) - dut.expect("TIMER_EVENTS:TIMER_EVENT_EXPIRY: timer_any_handler") - dut.expect("TIMER_EVENTS:TIMER_EVENT_EXPIRY: all_event_handler") + dut.expect('TIMER_EVENTS:TIMER_EVENT_EXPIRY: timer_any_handler') + dut.expect('TIMER_EVENTS:TIMER_EVENT_EXPIRY: all_event_handler') - print("Handled timer expiry event {} out of {}".format(expiries, TIMER_EVENT_LIMIT)) + print('Handled timer expiry event {} out of {}'.format(expiries, TIMER_EVENT_LIMIT)) - dut.expect("TIMER_EVENTS:TIMER_EVENT_STOPPED: timer_stopped_handler") - dut.expect("TIMER_EVENTS:TIMER_EVENT_STOPPED: deleted timer event source") - print("Handled timer stopped event") + dut.expect('TIMER_EVENTS:TIMER_EVENT_STOPPED: timer_stopped_handler') + dut.expect('TIMER_EVENTS:TIMER_EVENT_STOPPED: deleted timer event source') + print('Handled timer stopped event') def _test_iteration_events(dut): dut.start_app() - print("Checking iteration events posting and handling") - dut.expect("setting up") - dut.expect("starting event sources") - print("Finished setup") + print('Checking iteration events posting and handling') + dut.expect('setting up') + dut.expect('starting event sources') + print('Finished setup') for iteration in range(1, TASK_ITERATION_LIMIT + 1): dut.expect(TASK_ITERATION_POST.format(iteration)) - print("Posted iteration {} out of {}".format(iteration, TASK_ITERATION_LIMIT)) + print('Posted iteration {} out of {}'.format(iteration, TASK_ITERATION_LIMIT)) if iteration < TASK_UNREGISTRATION_LIMIT: dut.expect(TASK_ITERATION_HANDLING.format(iteration)) - dut.expect("TASK_EVENTS:TASK_ITERATION_EVENT: all_event_handler") + dut.expect('TASK_EVENTS:TASK_ITERATION_EVENT: all_event_handler') elif iteration == TASK_UNREGISTRATION_LIMIT: - dut.expect("TASK_EVENTS:TASK_ITERATION_EVENT: unregistering task_iteration_handler") - dut.expect("TASK_EVENTS:TASK_ITERATION_EVENT: all_event_handler") - print("Unregistered handler at iteration {} out of {}".format(iteration, TASK_ITERATION_LIMIT)) + dut.expect('TASK_EVENTS:TASK_ITERATION_EVENT: unregistering task_iteration_handler') + dut.expect('TASK_EVENTS:TASK_ITERATION_EVENT: all_event_handler') + print('Unregistered handler at iteration {} out of {}'.format(iteration, TASK_ITERATION_LIMIT)) else: - dut.expect("TASK_EVENTS:TASK_ITERATION_EVENT: all_event_handler") + dut.expect('TASK_EVENTS:TASK_ITERATION_EVENT: all_event_handler') - print("Handled iteration {} out of {}".format(iteration, TASK_ITERATION_LIMIT)) + print('Handled iteration {} out of {}'.format(iteration, TASK_ITERATION_LIMIT)) - dut.expect("TASK_EVENTS:TASK_ITERATION_EVENT: deleting task event source") - print("Deleted task event source") + dut.expect('TASK_EVENTS:TASK_ITERATION_EVENT: deleting task event source') + print('Deleted task event source') @ttfw_idf.idf_example_test(env_tag='Example_GENERIC') diff --git a/examples/system/esp_event/user_event_loops/example_test.py b/examples/system/esp_event/user_event_loops/example_test.py index e4334117ed..82664f9cf8 100644 --- a/examples/system/esp_event/user_event_loops/example_test.py +++ b/examples/system/esp_event/user_event_loops/example_test.py @@ -4,8 +4,8 @@ import ttfw_idf TASK_ITERATION_LIMIT = 10 -TASK_ITERATION_POSTING = "posting TASK_EVENTS:TASK_ITERATION_EVENT to {}, iteration {} out of " + str(TASK_ITERATION_LIMIT) -TASK_ITERATION_HANDLING = "handling TASK_EVENTS:TASK_ITERATION_EVENT from {}, iteration {}" +TASK_ITERATION_POSTING = 'posting TASK_EVENTS:TASK_ITERATION_EVENT to {}, iteration {} out of ' + str(TASK_ITERATION_LIMIT) +TASK_ITERATION_HANDLING = 'handling TASK_EVENTS:TASK_ITERATION_EVENT from {}, iteration {}' @ttfw_idf.idf_example_test(env_tag='Example_GENERIC') @@ -14,26 +14,26 @@ def test_user_event_loops_example(env, extra_data): dut.start_app() - dut.expect("setting up") - dut.expect("starting event source") - dut.expect("starting application task") - print("Finished setup") + dut.expect('setting up') + dut.expect('starting event source') + dut.expect('starting application task') + print('Finished setup') for iteration in range(1, TASK_ITERATION_LIMIT + 1): loop = None if (iteration % 2 == 0): - loop = "loop_with_task" + loop = 'loop_with_task' else: - loop = "loop_without_task" + loop = 'loop_without_task' dut.expect(TASK_ITERATION_POSTING.format(loop, iteration)) - print("Posted iteration {} to {}".format(iteration, loop)) + print('Posted iteration {} to {}'.format(iteration, loop)) dut.expect(TASK_ITERATION_HANDLING.format(loop, iteration)) - print("Handled iteration {} from {}".format(iteration, loop)) + print('Handled iteration {} from {}'.format(iteration, loop)) - dut.expect("deleting task event source") - print("Deleted task event source") + dut.expect('deleting task event source') + print('Deleted task event source') if __name__ == '__main__': diff --git a/examples/system/esp_timer/example_test.py b/examples/system/esp_timer/example_test.py index 4a8730e455..d8cd0f4ccf 100644 --- a/examples/system/esp_timer/example_test.py +++ b/examples/system/esp_timer/example_test.py @@ -1,4 +1,5 @@ from __future__ import print_function + import re import ttfw_idf diff --git a/examples/system/freertos/real_time_stats/example_test.py b/examples/system/freertos/real_time_stats/example_test.py index 62f41643a8..d5528ea452 100644 --- a/examples/system/freertos/real_time_stats/example_test.py +++ b/examples/system/freertos/real_time_stats/example_test.py @@ -3,7 +3,7 @@ from __future__ import print_function import ttfw_idf STATS_TASK_ITERS = 3 -STATS_TASK_EXPECT = "Real time stats obtained" +STATS_TASK_EXPECT = 'Real time stats obtained' @ttfw_idf.idf_example_test(env_tag='Example_GENERIC') diff --git a/examples/system/gcov/example_test.py b/examples/system/gcov/example_test.py index 6b41fe5bf4..4331ba2129 100644 --- a/examples/system/gcov/example_test.py +++ b/examples/system/gcov/example_test.py @@ -1,12 +1,13 @@ from __future__ import unicode_literals + import os import time -from ttfw_idf import Utility import ttfw_idf +from ttfw_idf import Utility -@ttfw_idf.idf_example_test(env_tag="test_jtag_arm") +@ttfw_idf.idf_example_test(env_tag='test_jtag_arm') def test_examples_gcov(env, extra_data): rel_project_path = os.path.join('examples', 'system', 'gcov') diff --git a/examples/system/himem/example_test.py b/examples/system/himem/example_test.py index 6803c69565..f14a6ecb90 100644 --- a/examples/system/himem/example_test.py +++ b/examples/system/himem/example_test.py @@ -1,5 +1,7 @@ from __future__ import unicode_literals + import re + import ttfw_idf diff --git a/examples/system/light_sleep/example_test.py b/examples/system/light_sleep/example_test.py index a464e1056e..a67b81d9af 100644 --- a/examples/system/light_sleep/example_test.py +++ b/examples/system/light_sleep/example_test.py @@ -1,4 +1,5 @@ from __future__ import print_function + import re import time diff --git a/examples/system/ota/advanced_https_ota/example_test.py b/examples/system/ota/advanced_https_ota/example_test.py index ea9111a2bf..145361605a 100644 --- a/examples/system/ota/advanced_https_ota/example_test.py +++ b/examples/system/ota/advanced_https_ota/example_test.py @@ -1,71 +1,71 @@ -import re -import os -import struct -import socket import http.server -from threading import Thread -import ssl - -from tiny_test_fw import DUT -import ttfw_idf +import os import random +import re +import socket +import ssl +import struct import subprocess +from threading import Thread -server_cert = "-----BEGIN CERTIFICATE-----\n" \ - "MIIDXTCCAkWgAwIBAgIJAP4LF7E72HakMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n"\ - "BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\n"\ - "aWRnaXRzIFB0eSBMdGQwHhcNMTkwNjA3MDk1OTE2WhcNMjAwNjA2MDk1OTE2WjBF\n"\ - "MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\n"\ - "ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\n"\ - "CgKCAQEAlzfCyv3mIv7TlLkObxunKfCdrJ/zgdANrsx0RBtpEPhV560hWJ0fEin0\n"\ - "nIOMpJSiF9E6QsPdr6Q+eogH4XnOMU9JE+iG743N1dPfGEzJvRlyct/Ck8SswKPC\n"\ - "9+VXsnOdZmUw9y/xtANbURA/TspvPzz3Avv382ffffrJGh7ooOmaZSCZFlSYHLZA\n"\ - "w/XlRr0sSRbLpFGY0gXjaAV8iHHiPDYLy4kZOepjV9U51xi+IGsL4w75zuMgsHyF\n"\ - "3nJeGYHgtGVBrkL0ZKG5udY0wcBjysjubDJC4iSlNiq2HD3fhs7j6CZddV2v845M\n"\ - "lVKNxP0kO4Uj4D8r+5USWC8JKfAwxQIDAQABo1AwTjAdBgNVHQ4EFgQU6OE7ssfY\n"\ - "IIPTDThiUoofUpsD5NwwHwYDVR0jBBgwFoAU6OE7ssfYIIPTDThiUoofUpsD5Nww\n"\ - "DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAXIlHS/FJWfmcinUAxyBd\n"\ - "/xd5Lu8ykeru6oaUCci+Vk9lyoMMES7lQ+b/00d5x7AcTawkTil9EWpBTPTOTraA\n"\ - "lzJMQhNKmSLk0iIoTtAJtSZgUSpIIozqK6lenxQQDsHbXKU6h+u9H6KZE8YcjsFl\n"\ - "6vL7sw9BVotw/VxfgjQ5OSGLgoLrdVT0z5C2qOuwOgz1c7jNiJhtMdwN+cOtnJp2\n"\ - "fuBgEYyE3eeuWogvkWoDcIA8r17Ixzkpq2oJsdvZcHZPIZShPKW2SHUsl98KDemu\n"\ - "y0pQyExmQUbwKE4vbFb9XuWCcL9XaOHQytyszt2DeD67AipvoBwVU7/LBOvqnsmy\n"\ - "hA==\n"\ - "-----END CERTIFICATE-----\n" +import ttfw_idf +from tiny_test_fw import DUT -server_key = "-----BEGIN PRIVATE KEY-----\n"\ - "MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCXN8LK/eYi/tOU\n"\ - "uQ5vG6cp8J2sn/OB0A2uzHREG2kQ+FXnrSFYnR8SKfScg4yklKIX0TpCw92vpD56\n"\ - "iAfhec4xT0kT6Ibvjc3V098YTMm9GXJy38KTxKzAo8L35Veyc51mZTD3L/G0A1tR\n"\ - "ED9Oym8/PPcC+/fzZ999+skaHuig6ZplIJkWVJgctkDD9eVGvSxJFsukUZjSBeNo\n"\ - "BXyIceI8NgvLiRk56mNX1TnXGL4gawvjDvnO4yCwfIXecl4ZgeC0ZUGuQvRkobm5\n"\ - "1jTBwGPKyO5sMkLiJKU2KrYcPd+GzuPoJl11Xa/zjkyVUo3E/SQ7hSPgPyv7lRJY\n"\ - "Lwkp8DDFAgMBAAECggEAfBhAfQE7mUByNbxgAgI5fot9eaqR1Nf+QpJ6X2H3KPwC\n"\ - "02sa0HOwieFwYfj6tB1doBoNq7i89mTc+QUlIn4pHgIowHO0OGawomeKz5BEhjCZ\n"\ - "4XeLYGSoODary2+kNkf2xY8JTfFEcyvGBpJEwc4S2VyYgRRx+IgnumTSH+N5mIKZ\n"\ - "SXWNdZIuHEmkwod+rPRXs6/r+PH0eVW6WfpINEbr4zVAGXJx2zXQwd2cuV1GTJWh\n"\ - "cPVOXLu+XJ9im9B370cYN6GqUnR3fui13urYbnWnEf3syvoH/zuZkyrVChauoFf8\n"\ - "8EGb74/HhXK7Q2s8NRakx2c7OxQifCbcy03liUMmyQKBgQDFAob5B/66N4Q2cq/N\n"\ - "MWPf98kYBYoLaeEOhEJhLQlKk0pIFCTmtpmUbpoEes2kCUbH7RwczpYko8tlKyoB\n"\ - "6Fn6RY4zQQ64KZJI6kQVsjkYpcP/ihnOY6rbds+3yyv+4uPX7Eh9sYZwZMggE19M\n"\ - "CkFHkwAjiwqhiiSlUxe20sWmowKBgQDEfx4lxuFzA1PBPeZKGVBTxYPQf+DSLCre\n"\ - "ZFg3ZmrxbCjRq1O7Lra4FXWD3dmRq7NDk79JofoW50yD8wD7I0B7opdDfXD2idO8\n"\ - "0dBnWUKDr2CAXyoLEINce9kJPbx4kFBQRN9PiGF7VkDQxeQ3kfS8CvcErpTKCOdy\n"\ - "5wOwBTwJdwKBgDiTFTeGeDv5nVoVbS67tDao7XKchJvqd9q3WGiXikeELJyuTDqE\n"\ - "zW22pTwMF+m3UEAxcxVCrhMvhkUzNAkANHaOatuFHzj7lyqhO5QPbh4J3FMR0X9X\n"\ - "V8VWRSg+jA/SECP9koOl6zlzd5Tee0tW1pA7QpryXscs6IEhb3ns5R2JAoGAIkzO\n"\ - "RmnhEOKTzDex611f2D+yMsMfy5BKK2f4vjLymBH5TiBKDXKqEpgsW0huoi8Gq9Uu\n"\ - "nvvXXAgkIyRYF36f0vUe0nkjLuYAQAWgC2pZYgNLJR13iVbol0xHJoXQUHtgiaJ8\n"\ - "GLYFzjHQPqFMpSalQe3oELko39uOC1CoJCHFySECgYBeycUnRBikCO2n8DNhY4Eg\n"\ - "9Y3oxcssRt6ea5BZwgW2eAYi7/XqKkmxoSoOykUt3MJx9+EkkrL17bxFSpkj1tvL\n"\ - "qvxn7egtsKjjgGNAxwXC4MwCvhveyUQQxtQb8AqGrGqo4jEEN0L15cnP38i2x1Uo\n"\ - "muhfskWf4MABV0yTUaKcGg==\n"\ - "-----END PRIVATE KEY-----\n" +server_cert = '-----BEGIN CERTIFICATE-----\n' \ + 'MIIDXTCCAkWgAwIBAgIJAP4LF7E72HakMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n'\ + 'BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\n'\ + 'aWRnaXRzIFB0eSBMdGQwHhcNMTkwNjA3MDk1OTE2WhcNMjAwNjA2MDk1OTE2WjBF\n'\ + 'MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\n'\ + 'ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\n'\ + 'CgKCAQEAlzfCyv3mIv7TlLkObxunKfCdrJ/zgdANrsx0RBtpEPhV560hWJ0fEin0\n'\ + 'nIOMpJSiF9E6QsPdr6Q+eogH4XnOMU9JE+iG743N1dPfGEzJvRlyct/Ck8SswKPC\n'\ + '9+VXsnOdZmUw9y/xtANbURA/TspvPzz3Avv382ffffrJGh7ooOmaZSCZFlSYHLZA\n'\ + 'w/XlRr0sSRbLpFGY0gXjaAV8iHHiPDYLy4kZOepjV9U51xi+IGsL4w75zuMgsHyF\n'\ + '3nJeGYHgtGVBrkL0ZKG5udY0wcBjysjubDJC4iSlNiq2HD3fhs7j6CZddV2v845M\n'\ + 'lVKNxP0kO4Uj4D8r+5USWC8JKfAwxQIDAQABo1AwTjAdBgNVHQ4EFgQU6OE7ssfY\n'\ + 'IIPTDThiUoofUpsD5NwwHwYDVR0jBBgwFoAU6OE7ssfYIIPTDThiUoofUpsD5Nww\n'\ + 'DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAXIlHS/FJWfmcinUAxyBd\n'\ + '/xd5Lu8ykeru6oaUCci+Vk9lyoMMES7lQ+b/00d5x7AcTawkTil9EWpBTPTOTraA\n'\ + 'lzJMQhNKmSLk0iIoTtAJtSZgUSpIIozqK6lenxQQDsHbXKU6h+u9H6KZE8YcjsFl\n'\ + '6vL7sw9BVotw/VxfgjQ5OSGLgoLrdVT0z5C2qOuwOgz1c7jNiJhtMdwN+cOtnJp2\n'\ + 'fuBgEYyE3eeuWogvkWoDcIA8r17Ixzkpq2oJsdvZcHZPIZShPKW2SHUsl98KDemu\n'\ + 'y0pQyExmQUbwKE4vbFb9XuWCcL9XaOHQytyszt2DeD67AipvoBwVU7/LBOvqnsmy\n'\ + 'hA==\n'\ + '-----END CERTIFICATE-----\n' + +server_key = '-----BEGIN PRIVATE KEY-----\n'\ + 'MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCXN8LK/eYi/tOU\n'\ + 'uQ5vG6cp8J2sn/OB0A2uzHREG2kQ+FXnrSFYnR8SKfScg4yklKIX0TpCw92vpD56\n'\ + 'iAfhec4xT0kT6Ibvjc3V098YTMm9GXJy38KTxKzAo8L35Veyc51mZTD3L/G0A1tR\n'\ + 'ED9Oym8/PPcC+/fzZ999+skaHuig6ZplIJkWVJgctkDD9eVGvSxJFsukUZjSBeNo\n'\ + 'BXyIceI8NgvLiRk56mNX1TnXGL4gawvjDvnO4yCwfIXecl4ZgeC0ZUGuQvRkobm5\n'\ + '1jTBwGPKyO5sMkLiJKU2KrYcPd+GzuPoJl11Xa/zjkyVUo3E/SQ7hSPgPyv7lRJY\n'\ + 'Lwkp8DDFAgMBAAECggEAfBhAfQE7mUByNbxgAgI5fot9eaqR1Nf+QpJ6X2H3KPwC\n'\ + '02sa0HOwieFwYfj6tB1doBoNq7i89mTc+QUlIn4pHgIowHO0OGawomeKz5BEhjCZ\n'\ + '4XeLYGSoODary2+kNkf2xY8JTfFEcyvGBpJEwc4S2VyYgRRx+IgnumTSH+N5mIKZ\n'\ + 'SXWNdZIuHEmkwod+rPRXs6/r+PH0eVW6WfpINEbr4zVAGXJx2zXQwd2cuV1GTJWh\n'\ + 'cPVOXLu+XJ9im9B370cYN6GqUnR3fui13urYbnWnEf3syvoH/zuZkyrVChauoFf8\n'\ + '8EGb74/HhXK7Q2s8NRakx2c7OxQifCbcy03liUMmyQKBgQDFAob5B/66N4Q2cq/N\n'\ + 'MWPf98kYBYoLaeEOhEJhLQlKk0pIFCTmtpmUbpoEes2kCUbH7RwczpYko8tlKyoB\n'\ + '6Fn6RY4zQQ64KZJI6kQVsjkYpcP/ihnOY6rbds+3yyv+4uPX7Eh9sYZwZMggE19M\n'\ + 'CkFHkwAjiwqhiiSlUxe20sWmowKBgQDEfx4lxuFzA1PBPeZKGVBTxYPQf+DSLCre\n'\ + 'ZFg3ZmrxbCjRq1O7Lra4FXWD3dmRq7NDk79JofoW50yD8wD7I0B7opdDfXD2idO8\n'\ + '0dBnWUKDr2CAXyoLEINce9kJPbx4kFBQRN9PiGF7VkDQxeQ3kfS8CvcErpTKCOdy\n'\ + '5wOwBTwJdwKBgDiTFTeGeDv5nVoVbS67tDao7XKchJvqd9q3WGiXikeELJyuTDqE\n'\ + 'zW22pTwMF+m3UEAxcxVCrhMvhkUzNAkANHaOatuFHzj7lyqhO5QPbh4J3FMR0X9X\n'\ + 'V8VWRSg+jA/SECP9koOl6zlzd5Tee0tW1pA7QpryXscs6IEhb3ns5R2JAoGAIkzO\n'\ + 'RmnhEOKTzDex611f2D+yMsMfy5BKK2f4vjLymBH5TiBKDXKqEpgsW0huoi8Gq9Uu\n'\ + 'nvvXXAgkIyRYF36f0vUe0nkjLuYAQAWgC2pZYgNLJR13iVbol0xHJoXQUHtgiaJ8\n'\ + 'GLYFzjHQPqFMpSalQe3oELko39uOC1CoJCHFySECgYBeycUnRBikCO2n8DNhY4Eg\n'\ + '9Y3oxcssRt6ea5BZwgW2eAYi7/XqKkmxoSoOykUt3MJx9+EkkrL17bxFSpkj1tvL\n'\ + 'qvxn7egtsKjjgGNAxwXC4MwCvhveyUQQxtQb8AqGrGqo4jEEN0L15cnP38i2x1Uo\n'\ + 'muhfskWf4MABV0yTUaKcGg==\n'\ + '-----END PRIVATE KEY-----\n' def get_my_ip(): s1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s1.connect(("8.8.8.8", 80)) + s1.connect(('8.8.8.8', 80)) my_ip = s1.getsockname()[0] s1.close() return my_ip @@ -81,16 +81,16 @@ def get_server_status(host_ip, port): def create_file(server_file, file_data): - with open(server_file, "w+") as file: + with open(server_file, 'w+') as file: file.write(file_data) def get_ca_cert(ota_image_dir): os.chdir(ota_image_dir) - server_file = os.path.join(ota_image_dir, "server_cert.pem") + server_file = os.path.join(ota_image_dir, 'server_cert.pem') create_file(server_file, server_cert) - key_file = os.path.join(ota_image_dir, "server_key.pem") + key_file = os.path.join(ota_image_dir, 'server_key.pem') create_file(key_file, server_key) return server_file, key_file @@ -131,7 +131,7 @@ def start_https_server(ota_image_dir, server_ip, server_port): def start_chunked_server(ota_image_dir, server_port): server_file, key_file = get_ca_cert(ota_image_dir) - chunked_server = subprocess.Popen(["openssl", "s_server", "-WWW", "-key", key_file, "-cert", server_file, "-port", str(server_port)]) + chunked_server = subprocess.Popen(['openssl', 's_server', '-WWW', '-key', key_file, '-cert', server_file, '-port', str(server_port)]) return chunked_server @@ -141,7 +141,7 @@ def redirect_handler_factory(url): """ class RedirectHandler(http.server.SimpleHTTPRequestHandler): def do_GET(self): - print("Sending resp, URL: " + url) + print('Sending resp, URL: ' + url) self.send_response(301) self.send_header('Location', url) self.end_headers() @@ -158,7 +158,7 @@ def redirect_handler_factory(url): def start_redirect_server(ota_image_dir, server_ip, server_port, redirection_port): os.chdir(ota_image_dir) server_file, key_file = get_ca_cert(ota_image_dir) - redirectHandler = redirect_handler_factory("https://" + server_ip + ":" + str(redirection_port) + "/advanced_https_ota.bin") + redirectHandler = redirect_handler_factory('https://' + server_ip + ':' + str(redirection_port) + '/advanced_https_ota.bin') httpd = http.server.HTTPServer((server_ip, server_port), redirectHandler) @@ -168,7 +168,7 @@ def start_redirect_server(ota_image_dir, server_ip, server_port, redirection_por httpd.serve_forever() -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_advanced_https_ota_example(env, extra_data): """ This is a positive test case, which downloads complete binary file multiple number of times. @@ -178,16 +178,16 @@ def test_examples_protocol_advanced_https_ota_example(env, extra_data): 2. Fetch OTA image over HTTPS 3. Reboot with the new OTA image """ - dut1 = env.get_dut("advanced_https_ota_example", "examples/system/ota/advanced_https_ota", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('advanced_https_ota_example', 'examples/system/ota/advanced_https_ota', dut_class=ttfw_idf.ESP32DUT) # Number of iterations to validate OTA iterations = 3 server_port = 8001 # File to be downloaded. This file is generated after compilation - bin_name = "advanced_https_ota.bin" + bin_name = 'advanced_https_ota.bin' # check and log bin size binary_file = os.path.join(dut1.app.binary_path, bin_name) bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("advanced_https_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('advanced_https_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() if (get_server_status(host_ip, server_port) is False): @@ -196,23 +196,23 @@ def test_examples_protocol_advanced_https_ota_example(env, extra_data): thread1.start() dut1.start_app() for i in range(iterations): - dut1.expect("Loaded app from partition at offset", timeout=30) + dut1.expect('Loaded app from partition at offset', timeout=30) try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') thread1.close() - dut1.expect("Starting Advanced OTA example", timeout=30) + dut1.expect('Starting Advanced OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":" + str(server_port) + "/" + bin_name)) - dut1.write("https://" + host_ip + ":" + str(server_port) + "/" + bin_name) - dut1.expect("Loaded app from partition at offset", timeout=60) - dut1.expect("Starting Advanced OTA example", timeout=30) + print('writing to device: {}'.format('https://' + host_ip + ':' + str(server_port) + '/' + bin_name)) + dut1.write('https://' + host_ip + ':' + str(server_port) + '/' + bin_name) + dut1.expect('Loaded app from partition at offset', timeout=60) + dut1.expect('Starting Advanced OTA example', timeout=30) dut1.reset() -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_advanced_https_ota_example_truncated_bin(env, extra_data): """ Working of OTA if binary file is truncated is validated in this test case. @@ -223,25 +223,25 @@ def test_examples_protocol_advanced_https_ota_example_truncated_bin(env, extra_d 3. Fetch OTA image over HTTPS 4. Check working of code if bin is truncated """ - dut1 = env.get_dut("advanced_https_ota_example", "examples/system/ota/advanced_https_ota", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('advanced_https_ota_example', 'examples/system/ota/advanced_https_ota', dut_class=ttfw_idf.ESP32DUT) server_port = 8001 # Original binary file generated after compilation - bin_name = "advanced_https_ota.bin" + bin_name = 'advanced_https_ota.bin' # Truncated binary file to be generated from original binary file - truncated_bin_name = "truncated.bin" + truncated_bin_name = 'truncated.bin' # Size of truncated file to be grnerated. This value can range from 288 bytes (Image header size) to size of original binary file # truncated_bin_size is set to 64000 to reduce consumed by the test case truncated_bin_size = 64000 # check and log bin size binary_file = os.path.join(dut1.app.binary_path, bin_name) - f = open(binary_file, "rb+") - fo = open(os.path.join(dut1.app.binary_path, truncated_bin_name), "wb+") + f = open(binary_file, 'rb+') + fo = open(os.path.join(dut1.app.binary_path, truncated_bin_name), 'wb+') fo.write(f.read(truncated_bin_size)) fo.close() f.close() binary_file = os.path.join(dut1.app.binary_path, truncated_bin_name) bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("advanced_https_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('advanced_https_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() if (get_server_status(host_ip, server_port) is False): @@ -249,21 +249,21 @@ def test_examples_protocol_advanced_https_ota_example_truncated_bin(env, extra_d thread1.daemon = True thread1.start() dut1.start_app() - dut1.expect("Loaded app from partition at offset", timeout=30) + dut1.expect('Loaded app from partition at offset', timeout=30) try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') - dut1.expect("Starting Advanced OTA example", timeout=30) + dut1.expect('Starting Advanced OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":" + str(server_port) + "/" + truncated_bin_name)) - dut1.write("https://" + host_ip + ":" + str(server_port) + "/" + truncated_bin_name) - dut1.expect("Image validation failed, image is corrupted", timeout=30) + print('writing to device: {}'.format('https://' + host_ip + ':' + str(server_port) + '/' + truncated_bin_name)) + dut1.write('https://' + host_ip + ':' + str(server_port) + '/' + truncated_bin_name) + dut1.expect('Image validation failed, image is corrupted', timeout=30) os.remove(binary_file) -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_advanced_https_ota_example_truncated_header(env, extra_data): """ Working of OTA if headers of binary file are truncated is vaildated in this test case. @@ -274,24 +274,24 @@ def test_examples_protocol_advanced_https_ota_example_truncated_header(env, extr 3. Fetch OTA image over HTTPS 4. Check working of code if headers are not sent completely """ - dut1 = env.get_dut("advanced_https_ota_example", "examples/system/ota/advanced_https_ota", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('advanced_https_ota_example', 'examples/system/ota/advanced_https_ota', dut_class=ttfw_idf.ESP32DUT) server_port = 8001 # Original binary file generated after compilation - bin_name = "advanced_https_ota.bin" + bin_name = 'advanced_https_ota.bin' # Truncated binary file to be generated from original binary file - truncated_bin_name = "truncated_header.bin" + truncated_bin_name = 'truncated_header.bin' # Size of truncated file to be grnerated. This value should be less than 288 bytes (Image header size) truncated_bin_size = 180 # check and log bin size binary_file = os.path.join(dut1.app.binary_path, bin_name) - f = open(binary_file, "rb+") - fo = open(os.path.join(dut1.app.binary_path, truncated_bin_name), "wb+") + f = open(binary_file, 'rb+') + fo = open(os.path.join(dut1.app.binary_path, truncated_bin_name), 'wb+') fo.write(f.read(truncated_bin_size)) fo.close() f.close() binary_file = os.path.join(dut1.app.binary_path, truncated_bin_name) bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("advanced_https_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('advanced_https_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() if (get_server_status(host_ip, server_port) is False): @@ -299,21 +299,21 @@ def test_examples_protocol_advanced_https_ota_example_truncated_header(env, extr thread1.daemon = True thread1.start() dut1.start_app() - dut1.expect("Loaded app from partition at offset", timeout=30) + dut1.expect('Loaded app from partition at offset', timeout=30) try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') - dut1.expect("Starting Advanced OTA example", timeout=30) + dut1.expect('Starting Advanced OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":" + str(server_port) + "/" + truncated_bin_name)) - dut1.write("https://" + host_ip + ":" + str(server_port) + "/" + truncated_bin_name) - dut1.expect("advanced_https_ota_example: esp_https_ota_read_img_desc failed", timeout=30) + print('writing to device: {}'.format('https://' + host_ip + ':' + str(server_port) + '/' + truncated_bin_name)) + dut1.write('https://' + host_ip + ':' + str(server_port) + '/' + truncated_bin_name) + dut1.expect('advanced_https_ota_example: esp_https_ota_read_img_desc failed', timeout=30) os.remove(binary_file) -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_advanced_https_ota_example_random(env, extra_data): """ Working of OTA if random data is added in binary file are validated in this test case. @@ -324,23 +324,23 @@ def test_examples_protocol_advanced_https_ota_example_random(env, extra_data): 3. Fetch OTA image over HTTPS 4. Check working of code for random binary file """ - dut1 = env.get_dut("advanced_https_ota_example", "examples/system/ota/advanced_https_ota", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('advanced_https_ota_example', 'examples/system/ota/advanced_https_ota', dut_class=ttfw_idf.ESP32DUT) server_port = 8001 # Random binary file to be generated - random_bin_name = "random.bin" + random_bin_name = 'random.bin' # Size of random binary file. 32000 is choosen, to reduce the time required to run the test-case random_bin_size = 32000 # check and log bin size binary_file = os.path.join(dut1.app.binary_path, random_bin_name) - fo = open(binary_file, "wb+") + fo = open(binary_file, 'wb+') # First byte of binary file is always set to zero. If first byte is generated randomly, # in some cases it may generate 0xE9 which will result in failure of testcase. - fo.write(struct.pack("B", 0)) + fo.write(struct.pack('B', 0)) for i in range(random_bin_size - 1): - fo.write(struct.pack("B", random.randrange(0,255,1))) + fo.write(struct.pack('B', random.randrange(0,255,1))) fo.close() bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("advanced_https_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('advanced_https_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() if (get_server_status(host_ip, server_port) is False): @@ -348,21 +348,21 @@ def test_examples_protocol_advanced_https_ota_example_random(env, extra_data): thread1.daemon = True thread1.start() dut1.start_app() - dut1.expect("Loaded app from partition at offset", timeout=30) + dut1.expect('Loaded app from partition at offset', timeout=30) try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') - dut1.expect("Starting Advanced OTA example", timeout=30) + dut1.expect('Starting Advanced OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":" + str(server_port) + "/" + random_bin_name)) - dut1.write("https://" + host_ip + ":" + str(server_port) + "/" + random_bin_name) - dut1.expect("esp_ota_ops: OTA image has invalid magic byte", timeout=10) + print('writing to device: {}'.format('https://' + host_ip + ':' + str(server_port) + '/' + random_bin_name)) + dut1.write('https://' + host_ip + ':' + str(server_port) + '/' + random_bin_name) + dut1.expect('esp_ota_ops: OTA image has invalid magic byte', timeout=10) os.remove(binary_file) -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_advanced_https_ota_example_chunked(env, extra_data): """ This is a positive test case, which downloads complete binary file multiple number of times. @@ -372,35 +372,35 @@ def test_examples_protocol_advanced_https_ota_example_chunked(env, extra_data): 2. Fetch OTA image over HTTPS 3. Reboot with the new OTA image """ - dut1 = env.get_dut("advanced_https_ota_example", "examples/system/ota/advanced_https_ota", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('advanced_https_ota_example', 'examples/system/ota/advanced_https_ota', dut_class=ttfw_idf.ESP32DUT) # File to be downloaded. This file is generated after compilation - bin_name = "advanced_https_ota.bin" + bin_name = 'advanced_https_ota.bin' # check and log bin size binary_file = os.path.join(dut1.app.binary_path, bin_name) bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("advanced_https_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('advanced_https_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() chunked_server = start_chunked_server(dut1.app.binary_path, 8070) dut1.start_app() - dut1.expect("Loaded app from partition at offset", timeout=30) + dut1.expect('Loaded app from partition at offset', timeout=30) try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') - dut1.expect("Starting Advanced OTA example", timeout=30) + dut1.expect('Starting Advanced OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":8070/" + bin_name)) - dut1.write("https://" + host_ip + ":8070/" + bin_name) - dut1.expect("Loaded app from partition at offset", timeout=60) - dut1.expect("Starting Advanced OTA example", timeout=30) + print('writing to device: {}'.format('https://' + host_ip + ':8070/' + bin_name)) + dut1.write('https://' + host_ip + ':8070/' + bin_name) + dut1.expect('Loaded app from partition at offset', timeout=60) + dut1.expect('Starting Advanced OTA example', timeout=30) chunked_server.kill() - os.remove(os.path.join(dut1.app.binary_path, "server_cert.pem")) - os.remove(os.path.join(dut1.app.binary_path, "server_key.pem")) + os.remove(os.path.join(dut1.app.binary_path, 'server_cert.pem')) + os.remove(os.path.join(dut1.app.binary_path, 'server_key.pem')) -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_advanced_https_ota_example_redirect_url(env, extra_data): """ This is a positive test case, which starts a server and a redirection server. @@ -411,16 +411,16 @@ def test_examples_protocol_advanced_https_ota_example_redirect_url(env, extra_da 2. Fetch OTA image over HTTPS 3. Reboot with the new OTA image """ - dut1 = env.get_dut("advanced_https_ota_example", "examples/system/ota/advanced_https_ota", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('advanced_https_ota_example', 'examples/system/ota/advanced_https_ota', dut_class=ttfw_idf.ESP32DUT) server_port = 8001 # Port to which the request should be redirecetd redirection_server_port = 8081 # File to be downloaded. This file is generated after compilation - bin_name = "advanced_https_ota.bin" + bin_name = 'advanced_https_ota.bin' # check and log bin size binary_file = os.path.join(dut1.app.binary_path, bin_name) bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("advanced_https_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('advanced_https_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() if (get_server_status(host_ip, server_port) is False): @@ -431,24 +431,24 @@ def test_examples_protocol_advanced_https_ota_example_redirect_url(env, extra_da thread2.daemon = True thread2.start() dut1.start_app() - dut1.expect("Loaded app from partition at offset", timeout=30) + dut1.expect('Loaded app from partition at offset', timeout=30) try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') thread1.close() thread2.close() - dut1.expect("Starting Advanced OTA example", timeout=30) + dut1.expect('Starting Advanced OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":" + str(redirection_server_port) + "/" + bin_name)) - dut1.write("https://" + host_ip + ":" + str(redirection_server_port) + "/" + bin_name) - dut1.expect("Loaded app from partition at offset", timeout=60) - dut1.expect("Starting Advanced OTA example", timeout=30) + print('writing to device: {}'.format('https://' + host_ip + ':' + str(redirection_server_port) + '/' + bin_name)) + dut1.write('https://' + host_ip + ':' + str(redirection_server_port) + '/' + bin_name) + dut1.expect('Loaded app from partition at offset', timeout=60) + dut1.expect('Starting Advanced OTA example', timeout=30) dut1.reset() -@ttfw_idf.idf_example_test(env_tag="Example_8Mflash_Ethernet") +@ttfw_idf.idf_example_test(env_tag='Example_8Mflash_Ethernet') def test_examples_protocol_advanced_https_ota_example_anti_rollback(env, extra_data): """ Working of OTA when anti_rollback is enabled and security version of new image is less than current one. @@ -459,17 +459,17 @@ def test_examples_protocol_advanced_https_ota_example_anti_rollback(env, extra_d 3. Fetch OTA image over HTTPS 4. Check working of anti_rollback feature """ - dut1 = env.get_dut("advanced_https_ota_example", "examples/system/ota/advanced_https_ota", dut_class=ttfw_idf.ESP32DUT, app_config_name='anti_rollback') + dut1 = env.get_dut('advanced_https_ota_example', 'examples/system/ota/advanced_https_ota', dut_class=ttfw_idf.ESP32DUT, app_config_name='anti_rollback') server_port = 8001 # Original binary file generated after compilation - bin_name = "advanced_https_ota.bin" + bin_name = 'advanced_https_ota.bin' # Modified firmware image to lower security version in its header. This is to enable negative test case - anti_rollback_bin_name = "advanced_https_ota_lower_sec_version.bin" + anti_rollback_bin_name = 'advanced_https_ota_lower_sec_version.bin' # check and log bin size binary_file = os.path.join(dut1.app.binary_path, bin_name) file_size = os.path.getsize(binary_file) - f = open(binary_file, "rb+") - fo = open(os.path.join(dut1.app.binary_path, anti_rollback_bin_name), "wb+") + f = open(binary_file, 'rb+') + fo = open(os.path.join(dut1.app.binary_path, anti_rollback_bin_name), 'wb+') fo.write(f.read(file_size)) # Change security_version to 0 for negative test case fo.seek(36) @@ -478,7 +478,7 @@ def test_examples_protocol_advanced_https_ota_example_anti_rollback(env, extra_d f.close() binary_file = os.path.join(dut1.app.binary_path, anti_rollback_bin_name) bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("advanced_https_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('advanced_https_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() if (get_server_status(host_ip, server_port) is False): @@ -487,27 +487,27 @@ def test_examples_protocol_advanced_https_ota_example_anti_rollback(env, extra_d thread1.start() dut1.start_app() # Positive Case - dut1.expect("Loaded app from partition at offset", timeout=30) + dut1.expect('Loaded app from partition at offset', timeout=30) try: - ip_address = dut1.expect(re.compile(r" eth ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' eth ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') - dut1.expect("Starting Advanced OTA example", timeout=30) + dut1.expect('Starting Advanced OTA example', timeout=30) # Use originally generated image with secure_version=1 - print("writing to device: {}".format("https://" + host_ip + ":" + str(server_port) + "/" + bin_name)) - dut1.write("https://" + host_ip + ":" + str(server_port) + "/" + bin_name) - dut1.expect("Loaded app from partition at offset", timeout=60) - dut1.expect(re.compile(r" eth ip: ([^,]+),"), timeout=30) - dut1.expect("App is valid, rollback cancelled successfully", 30) + print('writing to device: {}'.format('https://' + host_ip + ':' + str(server_port) + '/' + bin_name)) + dut1.write('https://' + host_ip + ':' + str(server_port) + '/' + bin_name) + dut1.expect('Loaded app from partition at offset', timeout=60) + dut1.expect(re.compile(r' eth ip: ([^,]+),'), timeout=30) + dut1.expect('App is valid, rollback cancelled successfully', 30) # Negative Case - dut1.expect("Starting Advanced OTA example", timeout=30) + dut1.expect('Starting Advanced OTA example', timeout=30) # Use modified image with secure_version=0 - print("writing to device: {}".format("https://" + host_ip + ":" + str(server_port) + "/" + anti_rollback_bin_name)) - dut1.write("https://" + host_ip + ":" + str(server_port) + "/" + anti_rollback_bin_name) - dut1.expect("New firmware security version is less than eFuse programmed, 0 < 1", timeout=30) + print('writing to device: {}'.format('https://' + host_ip + ':' + str(server_port) + '/' + anti_rollback_bin_name)) + dut1.write('https://' + host_ip + ':' + str(server_port) + '/' + anti_rollback_bin_name) + dut1.expect('New firmware security version is less than eFuse programmed, 0 < 1', timeout=30) os.remove(anti_rollback_bin_name) diff --git a/examples/system/ota/native_ota_example/example_test.py b/examples/system/ota/native_ota_example/example_test.py index be949ff7e3..7dd3d989f3 100644 --- a/examples/system/ota/native_ota_example/example_test.py +++ b/examples/system/ota/native_ota_example/example_test.py @@ -1,71 +1,71 @@ -import re -import os -import struct -import socket import http.server -from threading import Thread -import ssl - -from tiny_test_fw import DUT -import ttfw_idf +import os import random +import re +import socket +import ssl +import struct import subprocess +from threading import Thread -server_cert = "-----BEGIN CERTIFICATE-----\n" \ - "MIIDXTCCAkWgAwIBAgIJAP4LF7E72HakMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n"\ - "BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\n"\ - "aWRnaXRzIFB0eSBMdGQwHhcNMTkwNjA3MDk1OTE2WhcNMjAwNjA2MDk1OTE2WjBF\n"\ - "MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\n"\ - "ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\n"\ - "CgKCAQEAlzfCyv3mIv7TlLkObxunKfCdrJ/zgdANrsx0RBtpEPhV560hWJ0fEin0\n"\ - "nIOMpJSiF9E6QsPdr6Q+eogH4XnOMU9JE+iG743N1dPfGEzJvRlyct/Ck8SswKPC\n"\ - "9+VXsnOdZmUw9y/xtANbURA/TspvPzz3Avv382ffffrJGh7ooOmaZSCZFlSYHLZA\n"\ - "w/XlRr0sSRbLpFGY0gXjaAV8iHHiPDYLy4kZOepjV9U51xi+IGsL4w75zuMgsHyF\n"\ - "3nJeGYHgtGVBrkL0ZKG5udY0wcBjysjubDJC4iSlNiq2HD3fhs7j6CZddV2v845M\n"\ - "lVKNxP0kO4Uj4D8r+5USWC8JKfAwxQIDAQABo1AwTjAdBgNVHQ4EFgQU6OE7ssfY\n"\ - "IIPTDThiUoofUpsD5NwwHwYDVR0jBBgwFoAU6OE7ssfYIIPTDThiUoofUpsD5Nww\n"\ - "DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAXIlHS/FJWfmcinUAxyBd\n"\ - "/xd5Lu8ykeru6oaUCci+Vk9lyoMMES7lQ+b/00d5x7AcTawkTil9EWpBTPTOTraA\n"\ - "lzJMQhNKmSLk0iIoTtAJtSZgUSpIIozqK6lenxQQDsHbXKU6h+u9H6KZE8YcjsFl\n"\ - "6vL7sw9BVotw/VxfgjQ5OSGLgoLrdVT0z5C2qOuwOgz1c7jNiJhtMdwN+cOtnJp2\n"\ - "fuBgEYyE3eeuWogvkWoDcIA8r17Ixzkpq2oJsdvZcHZPIZShPKW2SHUsl98KDemu\n"\ - "y0pQyExmQUbwKE4vbFb9XuWCcL9XaOHQytyszt2DeD67AipvoBwVU7/LBOvqnsmy\n"\ - "hA==\n"\ - "-----END CERTIFICATE-----\n" +import ttfw_idf +from tiny_test_fw import DUT -server_key = "-----BEGIN PRIVATE KEY-----\n"\ - "MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCXN8LK/eYi/tOU\n"\ - "uQ5vG6cp8J2sn/OB0A2uzHREG2kQ+FXnrSFYnR8SKfScg4yklKIX0TpCw92vpD56\n"\ - "iAfhec4xT0kT6Ibvjc3V098YTMm9GXJy38KTxKzAo8L35Veyc51mZTD3L/G0A1tR\n"\ - "ED9Oym8/PPcC+/fzZ999+skaHuig6ZplIJkWVJgctkDD9eVGvSxJFsukUZjSBeNo\n"\ - "BXyIceI8NgvLiRk56mNX1TnXGL4gawvjDvnO4yCwfIXecl4ZgeC0ZUGuQvRkobm5\n"\ - "1jTBwGPKyO5sMkLiJKU2KrYcPd+GzuPoJl11Xa/zjkyVUo3E/SQ7hSPgPyv7lRJY\n"\ - "Lwkp8DDFAgMBAAECggEAfBhAfQE7mUByNbxgAgI5fot9eaqR1Nf+QpJ6X2H3KPwC\n"\ - "02sa0HOwieFwYfj6tB1doBoNq7i89mTc+QUlIn4pHgIowHO0OGawomeKz5BEhjCZ\n"\ - "4XeLYGSoODary2+kNkf2xY8JTfFEcyvGBpJEwc4S2VyYgRRx+IgnumTSH+N5mIKZ\n"\ - "SXWNdZIuHEmkwod+rPRXs6/r+PH0eVW6WfpINEbr4zVAGXJx2zXQwd2cuV1GTJWh\n"\ - "cPVOXLu+XJ9im9B370cYN6GqUnR3fui13urYbnWnEf3syvoH/zuZkyrVChauoFf8\n"\ - "8EGb74/HhXK7Q2s8NRakx2c7OxQifCbcy03liUMmyQKBgQDFAob5B/66N4Q2cq/N\n"\ - "MWPf98kYBYoLaeEOhEJhLQlKk0pIFCTmtpmUbpoEes2kCUbH7RwczpYko8tlKyoB\n"\ - "6Fn6RY4zQQ64KZJI6kQVsjkYpcP/ihnOY6rbds+3yyv+4uPX7Eh9sYZwZMggE19M\n"\ - "CkFHkwAjiwqhiiSlUxe20sWmowKBgQDEfx4lxuFzA1PBPeZKGVBTxYPQf+DSLCre\n"\ - "ZFg3ZmrxbCjRq1O7Lra4FXWD3dmRq7NDk79JofoW50yD8wD7I0B7opdDfXD2idO8\n"\ - "0dBnWUKDr2CAXyoLEINce9kJPbx4kFBQRN9PiGF7VkDQxeQ3kfS8CvcErpTKCOdy\n"\ - "5wOwBTwJdwKBgDiTFTeGeDv5nVoVbS67tDao7XKchJvqd9q3WGiXikeELJyuTDqE\n"\ - "zW22pTwMF+m3UEAxcxVCrhMvhkUzNAkANHaOatuFHzj7lyqhO5QPbh4J3FMR0X9X\n"\ - "V8VWRSg+jA/SECP9koOl6zlzd5Tee0tW1pA7QpryXscs6IEhb3ns5R2JAoGAIkzO\n"\ - "RmnhEOKTzDex611f2D+yMsMfy5BKK2f4vjLymBH5TiBKDXKqEpgsW0huoi8Gq9Uu\n"\ - "nvvXXAgkIyRYF36f0vUe0nkjLuYAQAWgC2pZYgNLJR13iVbol0xHJoXQUHtgiaJ8\n"\ - "GLYFzjHQPqFMpSalQe3oELko39uOC1CoJCHFySECgYBeycUnRBikCO2n8DNhY4Eg\n"\ - "9Y3oxcssRt6ea5BZwgW2eAYi7/XqKkmxoSoOykUt3MJx9+EkkrL17bxFSpkj1tvL\n"\ - "qvxn7egtsKjjgGNAxwXC4MwCvhveyUQQxtQb8AqGrGqo4jEEN0L15cnP38i2x1Uo\n"\ - "muhfskWf4MABV0yTUaKcGg==\n"\ - "-----END PRIVATE KEY-----\n" +server_cert = '-----BEGIN CERTIFICATE-----\n' \ + 'MIIDXTCCAkWgAwIBAgIJAP4LF7E72HakMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n'\ + 'BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\n'\ + 'aWRnaXRzIFB0eSBMdGQwHhcNMTkwNjA3MDk1OTE2WhcNMjAwNjA2MDk1OTE2WjBF\n'\ + 'MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\n'\ + 'ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\n'\ + 'CgKCAQEAlzfCyv3mIv7TlLkObxunKfCdrJ/zgdANrsx0RBtpEPhV560hWJ0fEin0\n'\ + 'nIOMpJSiF9E6QsPdr6Q+eogH4XnOMU9JE+iG743N1dPfGEzJvRlyct/Ck8SswKPC\n'\ + '9+VXsnOdZmUw9y/xtANbURA/TspvPzz3Avv382ffffrJGh7ooOmaZSCZFlSYHLZA\n'\ + 'w/XlRr0sSRbLpFGY0gXjaAV8iHHiPDYLy4kZOepjV9U51xi+IGsL4w75zuMgsHyF\n'\ + '3nJeGYHgtGVBrkL0ZKG5udY0wcBjysjubDJC4iSlNiq2HD3fhs7j6CZddV2v845M\n'\ + 'lVKNxP0kO4Uj4D8r+5USWC8JKfAwxQIDAQABo1AwTjAdBgNVHQ4EFgQU6OE7ssfY\n'\ + 'IIPTDThiUoofUpsD5NwwHwYDVR0jBBgwFoAU6OE7ssfYIIPTDThiUoofUpsD5Nww\n'\ + 'DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAXIlHS/FJWfmcinUAxyBd\n'\ + '/xd5Lu8ykeru6oaUCci+Vk9lyoMMES7lQ+b/00d5x7AcTawkTil9EWpBTPTOTraA\n'\ + 'lzJMQhNKmSLk0iIoTtAJtSZgUSpIIozqK6lenxQQDsHbXKU6h+u9H6KZE8YcjsFl\n'\ + '6vL7sw9BVotw/VxfgjQ5OSGLgoLrdVT0z5C2qOuwOgz1c7jNiJhtMdwN+cOtnJp2\n'\ + 'fuBgEYyE3eeuWogvkWoDcIA8r17Ixzkpq2oJsdvZcHZPIZShPKW2SHUsl98KDemu\n'\ + 'y0pQyExmQUbwKE4vbFb9XuWCcL9XaOHQytyszt2DeD67AipvoBwVU7/LBOvqnsmy\n'\ + 'hA==\n'\ + '-----END CERTIFICATE-----\n' + +server_key = '-----BEGIN PRIVATE KEY-----\n'\ + 'MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCXN8LK/eYi/tOU\n'\ + 'uQ5vG6cp8J2sn/OB0A2uzHREG2kQ+FXnrSFYnR8SKfScg4yklKIX0TpCw92vpD56\n'\ + 'iAfhec4xT0kT6Ibvjc3V098YTMm9GXJy38KTxKzAo8L35Veyc51mZTD3L/G0A1tR\n'\ + 'ED9Oym8/PPcC+/fzZ999+skaHuig6ZplIJkWVJgctkDD9eVGvSxJFsukUZjSBeNo\n'\ + 'BXyIceI8NgvLiRk56mNX1TnXGL4gawvjDvnO4yCwfIXecl4ZgeC0ZUGuQvRkobm5\n'\ + '1jTBwGPKyO5sMkLiJKU2KrYcPd+GzuPoJl11Xa/zjkyVUo3E/SQ7hSPgPyv7lRJY\n'\ + 'Lwkp8DDFAgMBAAECggEAfBhAfQE7mUByNbxgAgI5fot9eaqR1Nf+QpJ6X2H3KPwC\n'\ + '02sa0HOwieFwYfj6tB1doBoNq7i89mTc+QUlIn4pHgIowHO0OGawomeKz5BEhjCZ\n'\ + '4XeLYGSoODary2+kNkf2xY8JTfFEcyvGBpJEwc4S2VyYgRRx+IgnumTSH+N5mIKZ\n'\ + 'SXWNdZIuHEmkwod+rPRXs6/r+PH0eVW6WfpINEbr4zVAGXJx2zXQwd2cuV1GTJWh\n'\ + 'cPVOXLu+XJ9im9B370cYN6GqUnR3fui13urYbnWnEf3syvoH/zuZkyrVChauoFf8\n'\ + '8EGb74/HhXK7Q2s8NRakx2c7OxQifCbcy03liUMmyQKBgQDFAob5B/66N4Q2cq/N\n'\ + 'MWPf98kYBYoLaeEOhEJhLQlKk0pIFCTmtpmUbpoEes2kCUbH7RwczpYko8tlKyoB\n'\ + '6Fn6RY4zQQ64KZJI6kQVsjkYpcP/ihnOY6rbds+3yyv+4uPX7Eh9sYZwZMggE19M\n'\ + 'CkFHkwAjiwqhiiSlUxe20sWmowKBgQDEfx4lxuFzA1PBPeZKGVBTxYPQf+DSLCre\n'\ + 'ZFg3ZmrxbCjRq1O7Lra4FXWD3dmRq7NDk79JofoW50yD8wD7I0B7opdDfXD2idO8\n'\ + '0dBnWUKDr2CAXyoLEINce9kJPbx4kFBQRN9PiGF7VkDQxeQ3kfS8CvcErpTKCOdy\n'\ + '5wOwBTwJdwKBgDiTFTeGeDv5nVoVbS67tDao7XKchJvqd9q3WGiXikeELJyuTDqE\n'\ + 'zW22pTwMF+m3UEAxcxVCrhMvhkUzNAkANHaOatuFHzj7lyqhO5QPbh4J3FMR0X9X\n'\ + 'V8VWRSg+jA/SECP9koOl6zlzd5Tee0tW1pA7QpryXscs6IEhb3ns5R2JAoGAIkzO\n'\ + 'RmnhEOKTzDex611f2D+yMsMfy5BKK2f4vjLymBH5TiBKDXKqEpgsW0huoi8Gq9Uu\n'\ + 'nvvXXAgkIyRYF36f0vUe0nkjLuYAQAWgC2pZYgNLJR13iVbol0xHJoXQUHtgiaJ8\n'\ + 'GLYFzjHQPqFMpSalQe3oELko39uOC1CoJCHFySECgYBeycUnRBikCO2n8DNhY4Eg\n'\ + '9Y3oxcssRt6ea5BZwgW2eAYi7/XqKkmxoSoOykUt3MJx9+EkkrL17bxFSpkj1tvL\n'\ + 'qvxn7egtsKjjgGNAxwXC4MwCvhveyUQQxtQb8AqGrGqo4jEEN0L15cnP38i2x1Uo\n'\ + 'muhfskWf4MABV0yTUaKcGg==\n'\ + '-----END PRIVATE KEY-----\n' def get_my_ip(): s1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s1.connect(("8.8.8.8", 80)) + s1.connect(('8.8.8.8', 80)) my_ip = s1.getsockname()[0] s1.close() return my_ip @@ -81,16 +81,16 @@ def get_server_status(host_ip, port): def create_file(server_file, file_data): - with open(server_file, "w+") as file: + with open(server_file, 'w+') as file: file.write(file_data) def get_ca_cert(ota_image_dir): os.chdir(ota_image_dir) - server_file = os.path.join(ota_image_dir, "server_cert.pem") + server_file = os.path.join(ota_image_dir, 'server_cert.pem') create_file(server_file, server_cert) - key_file = os.path.join(ota_image_dir, "server_key.pem") + key_file = os.path.join(ota_image_dir, 'server_key.pem') create_file(key_file, server_key) return server_file, key_file @@ -131,11 +131,11 @@ def start_https_server(ota_image_dir, server_ip, server_port): def start_chunked_server(ota_image_dir, server_port): server_file, key_file = get_ca_cert(ota_image_dir) - chunked_server = subprocess.Popen(["openssl", "s_server", "-WWW", "-key", key_file, "-cert", server_file, "-port", str(server_port)]) + chunked_server = subprocess.Popen(['openssl', 's_server', '-WWW', '-key', key_file, '-cert', server_file, '-port', str(server_port)]) return chunked_server -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_native_ota_example(env, extra_data): """ This is a positive test case, which downloads complete binary file multiple number of times. @@ -145,16 +145,16 @@ def test_examples_protocol_native_ota_example(env, extra_data): 2. Fetch OTA image over HTTPS 3. Reboot with the new OTA image """ - dut1 = env.get_dut("native_ota_example", "examples/system/ota/native_ota_example", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('native_ota_example', 'examples/system/ota/native_ota_example', dut_class=ttfw_idf.ESP32DUT) server_port = 8002 # No. of times working of application to be validated iterations = 3 # File to be downloaded. This file is generated after compilation - bin_name = "native_ota.bin" + bin_name = 'native_ota.bin' # check and log bin size binary_file = os.path.join(dut1.app.binary_path, bin_name) bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("native_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('native_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() if (get_server_status(host_ip, server_port) is False): @@ -163,23 +163,23 @@ def test_examples_protocol_native_ota_example(env, extra_data): thread1.start() dut1.start_app() for i in range(iterations): - dut1.expect("Loaded app from partition at offset", timeout=30) + dut1.expect('Loaded app from partition at offset', timeout=30) try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') thread1.close() - dut1.expect("Starting OTA example", timeout=30) + dut1.expect('Starting OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":" + str(server_port) + "/" + bin_name)) - dut1.write("https://" + host_ip + ":" + str(server_port) + "/" + bin_name) - dut1.expect("Loaded app from partition at offset", timeout=60) - dut1.expect("Starting OTA example", timeout=30) + print('writing to device: {}'.format('https://' + host_ip + ':' + str(server_port) + '/' + bin_name)) + dut1.write('https://' + host_ip + ':' + str(server_port) + '/' + bin_name) + dut1.expect('Loaded app from partition at offset', timeout=60) + dut1.expect('Starting OTA example', timeout=30) dut1.reset() -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_native_ota_example_truncated_bin(env, extra_data): """ Working of OTA if binary file is truncated is validated in this test case. @@ -190,25 +190,25 @@ def test_examples_protocol_native_ota_example_truncated_bin(env, extra_data): 3. Fetch OTA image over HTTPS 4. Check working of code if bin is truncated """ - dut1 = env.get_dut("native_ota_example", "examples/system/ota/native_ota_example", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('native_ota_example', 'examples/system/ota/native_ota_example', dut_class=ttfw_idf.ESP32DUT) server_port = 8002 # Original binary file generated after compilation - bin_name = "native_ota.bin" + bin_name = 'native_ota.bin' # Truncated binary file to be generated from original binary file - truncated_bin_name = "truncated.bin" + truncated_bin_name = 'truncated.bin' # Size of truncated file to be grnerated. This value can range from 288 bytes (Image header size) to size of original binary file # truncated_bin_size is set to 64000 to reduce consumed by the test case truncated_bin_size = 64000 # check and log bin size binary_file = os.path.join(dut1.app.binary_path, bin_name) - f = open(binary_file, "rb+") - fo = open(os.path.join(dut1.app.binary_path, truncated_bin_name), "wb+") + f = open(binary_file, 'rb+') + fo = open(os.path.join(dut1.app.binary_path, truncated_bin_name), 'wb+') fo.write(f.read(truncated_bin_size)) fo.close() f.close() binary_file = os.path.join(dut1.app.binary_path, truncated_bin_name) bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("native_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('native_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() if (get_server_status(host_ip, server_port) is False): @@ -216,21 +216,21 @@ def test_examples_protocol_native_ota_example_truncated_bin(env, extra_data): thread1.daemon = True thread1.start() dut1.start_app() - dut1.expect("Loaded app from partition at offset", timeout=30) + dut1.expect('Loaded app from partition at offset', timeout=30) try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=60) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=60) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') - dut1.expect("Starting OTA example", timeout=30) + dut1.expect('Starting OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":" + str(server_port) + "/" + truncated_bin_name)) - dut1.write("https://" + host_ip + ":" + str(server_port) + "/" + truncated_bin_name) - dut1.expect("native_ota_example: Image validation failed, image is corrupted", timeout=20) + print('writing to device: {}'.format('https://' + host_ip + ':' + str(server_port) + '/' + truncated_bin_name)) + dut1.write('https://' + host_ip + ':' + str(server_port) + '/' + truncated_bin_name) + dut1.expect('native_ota_example: Image validation failed, image is corrupted', timeout=20) os.remove(binary_file) -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_native_ota_example_truncated_header(env, extra_data): """ Working of OTA if headers of binary file are truncated is vaildated in this test case. @@ -241,24 +241,24 @@ def test_examples_protocol_native_ota_example_truncated_header(env, extra_data): 3. Fetch OTA image over HTTPS 4. Check working of code if headers are not sent completely """ - dut1 = env.get_dut("native_ota_example", "examples/system/ota/native_ota_example", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('native_ota_example', 'examples/system/ota/native_ota_example', dut_class=ttfw_idf.ESP32DUT) server_port = 8002 # Original binary file generated after compilation - bin_name = "native_ota.bin" + bin_name = 'native_ota.bin' # Truncated binary file to be generated from original binary file - truncated_bin_name = "truncated_header.bin" + truncated_bin_name = 'truncated_header.bin' # Size of truncated file to be grnerated. This value should be less than 288 bytes (Image header size) truncated_bin_size = 180 # check and log bin size binary_file = os.path.join(dut1.app.binary_path, bin_name) - f = open(binary_file, "rb+") - fo = open(os.path.join(dut1.app.binary_path, truncated_bin_name), "wb+") + f = open(binary_file, 'rb+') + fo = open(os.path.join(dut1.app.binary_path, truncated_bin_name), 'wb+') fo.write(f.read(truncated_bin_size)) fo.close() f.close() binary_file = os.path.join(dut1.app.binary_path, truncated_bin_name) bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("native_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('native_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() if (get_server_status(host_ip, server_port) is False): @@ -266,21 +266,21 @@ def test_examples_protocol_native_ota_example_truncated_header(env, extra_data): thread1.daemon = True thread1.start() dut1.start_app() - dut1.expect("Loaded app from partition at offset", timeout=30) + dut1.expect('Loaded app from partition at offset', timeout=30) try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=60) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=60) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') - dut1.expect("Starting OTA example", timeout=30) + dut1.expect('Starting OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":" + str(server_port) + "/" + truncated_bin_name)) - dut1.write("https://" + host_ip + ":" + str(server_port) + "/" + truncated_bin_name) - dut1.expect("native_ota_example: received package is not fit len", timeout=20) + print('writing to device: {}'.format('https://' + host_ip + ':' + str(server_port) + '/' + truncated_bin_name)) + dut1.write('https://' + host_ip + ':' + str(server_port) + '/' + truncated_bin_name) + dut1.expect('native_ota_example: received package is not fit len', timeout=20) os.remove(binary_file) -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_native_ota_example_random(env, extra_data): """ Working of OTA if random data is added in binary file are validated in this test case. @@ -291,23 +291,23 @@ def test_examples_protocol_native_ota_example_random(env, extra_data): 3. Fetch OTA image over HTTPS 4. Check working of code for random binary file """ - dut1 = env.get_dut("native_ota_example", "examples/system/ota/native_ota_example", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('native_ota_example', 'examples/system/ota/native_ota_example', dut_class=ttfw_idf.ESP32DUT) server_port = 8002 # Random binary file to be generated - random_bin_name = "random.bin" + random_bin_name = 'random.bin' # Size of random binary file. 32000 is choosen, to reduce the time required to run the test-case random_bin_size = 32000 # check and log bin size binary_file = os.path.join(dut1.app.binary_path, random_bin_name) - fo = open(binary_file, "wb+") + fo = open(binary_file, 'wb+') # First byte of binary file is always set to zero. If first byte is generated randomly, # in some cases it may generate 0xE9 which will result in failure of testcase. - fo.write(struct.pack("B", 0)) + fo.write(struct.pack('B', 0)) for i in range(random_bin_size - 1): - fo.write(struct.pack("B", random.randrange(0,255,1))) + fo.write(struct.pack('B', random.randrange(0,255,1))) fo.close() bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("native_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('native_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() if (get_server_status(host_ip, server_port) is False): @@ -315,21 +315,21 @@ def test_examples_protocol_native_ota_example_random(env, extra_data): thread1.daemon = True thread1.start() dut1.start_app() - dut1.expect("Loaded app from partition at offset", timeout=30) + dut1.expect('Loaded app from partition at offset', timeout=30) try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=60) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=60) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') - dut1.expect("Starting OTA example", timeout=30) + dut1.expect('Starting OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":" + str(server_port) + "/" + random_bin_name)) - dut1.write("https://" + host_ip + ":" + str(server_port) + "/" + random_bin_name) - dut1.expect("esp_ota_ops: OTA image has invalid magic byte", timeout=20) + print('writing to device: {}'.format('https://' + host_ip + ':' + str(server_port) + '/' + random_bin_name)) + dut1.write('https://' + host_ip + ':' + str(server_port) + '/' + random_bin_name) + dut1.expect('esp_ota_ops: OTA image has invalid magic byte', timeout=20) os.remove(binary_file) -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_native_ota_example_chunked(env, extra_data): """ This is a positive test case, which downloads complete binary file multiple number of times. @@ -339,32 +339,32 @@ def test_examples_protocol_native_ota_example_chunked(env, extra_data): 2. Fetch OTA image over HTTPS 3. Reboot with the new OTA image """ - dut1 = env.get_dut("native_ota_example", "examples/system/ota/native_ota_example", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('native_ota_example', 'examples/system/ota/native_ota_example', dut_class=ttfw_idf.ESP32DUT) # File to be downloaded. This file is generated after compilation - bin_name = "native_ota.bin" + bin_name = 'native_ota.bin' # check and log bin size binary_file = os.path.join(dut1.app.binary_path, bin_name) bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("native_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('native_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() chunked_server = start_chunked_server(dut1.app.binary_path, 8070) dut1.start_app() - dut1.expect("Loaded app from partition at offset", timeout=30) + dut1.expect('Loaded app from partition at offset', timeout=30) try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') - dut1.expect("Starting OTA example", timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":8070/" + bin_name)) - dut1.write("https://" + host_ip + ":8070/" + bin_name) - dut1.expect("Loaded app from partition at offset", timeout=60) - dut1.expect("Starting OTA example", timeout=30) + dut1.expect('Starting OTA example', timeout=30) + print('writing to device: {}'.format('https://' + host_ip + ':8070/' + bin_name)) + dut1.write('https://' + host_ip + ':8070/' + bin_name) + dut1.expect('Loaded app from partition at offset', timeout=60) + dut1.expect('Starting OTA example', timeout=30) chunked_server.kill() - os.remove(os.path.join(dut1.app.binary_path, "server_cert.pem")) - os.remove(os.path.join(dut1.app.binary_path, "server_key.pem")) + os.remove(os.path.join(dut1.app.binary_path, 'server_cert.pem')) + os.remove(os.path.join(dut1.app.binary_path, 'server_key.pem')) if __name__ == '__main__': diff --git a/examples/system/ota/otatool/example_test.py b/examples/system/ota/otatool/example_test.py index 404f075780..0ec0bc1234 100644 --- a/examples/system/ota/otatool/example_test.py +++ b/examples/system/ota/otatool/example_test.py @@ -1,7 +1,8 @@ from __future__ import print_function + import os -import sys import subprocess +import sys import ttfw_idf @@ -12,22 +13,22 @@ def test_otatool_example(env, extra_data): # Verify factory firmware dut.start_app() - dut.expect("OTA Tool Example") - dut.expect("Example end") + dut.expect('OTA Tool Example') + dut.expect('Example end') # Close connection to DUT dut.receive_thread.exit() dut.port_inst.close() - script_path = os.path.join(os.getenv("IDF_PATH"), "examples", "system", "ota", "otatool", "otatool_example.py") - binary_path = "" + script_path = os.path.join(os.getenv('IDF_PATH'), 'examples', 'system', 'ota', 'otatool', 'otatool_example.py') + binary_path = '' for flash_file in dut.app.flash_files: - if "otatool.bin" in flash_file[1]: + if 'otatool.bin' in flash_file[1]: binary_path = flash_file[1] break - subprocess.check_call([sys.executable, script_path, "--binary", binary_path]) + subprocess.check_call([sys.executable, script_path, '--binary', binary_path]) if __name__ == '__main__': diff --git a/examples/system/ota/otatool/get_running_partition.py b/examples/system/ota/otatool/get_running_partition.py index b91d71a6d4..38f5e7a6ea 100644 --- a/examples/system/ota/otatool/get_running_partition.py +++ b/examples/system/ota/otatool/get_running_partition.py @@ -16,34 +16,34 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import os -import sys -import serial -import subprocess -import re import argparse - +import os +import re +import subprocess +import sys from subprocess import CalledProcessError +import serial + def get_running_partition(port=None): # Monitor the serial output of target device. The firmware outputs the currently # running partition - IDF_PATH = os.path.expandvars("$IDF_PATH") + IDF_PATH = os.path.expandvars('$IDF_PATH') sys.path.append(os.path.join(IDF_PATH, 'components', 'esptool_py', 'esptool')) import esptool - ESPTOOL_PY = os.path.join(IDF_PATH, "components", "esptool_py", "esptool", "esptool.py") + ESPTOOL_PY = os.path.join(IDF_PATH, 'components', 'esptool_py', 'esptool', 'esptool.py') - baud = os.environ.get("ESPTOOL_BAUD", esptool.ESPLoader.ESP_ROM_BAUD) + baud = os.environ.get('ESPTOOL_BAUD', esptool.ESPLoader.ESP_ROM_BAUD) if not port: - error_message = "Unable to obtain default target device port.\nSerial log:\n\n" + error_message = 'Unable to obtain default target device port.\nSerial log:\n\n' try: # Check what esptool.py finds on what port the device is connected to - output = subprocess.check_output([sys.executable, ESPTOOL_PY, "chip_id"]) # may raise CalledProcessError - pattern = r"Serial port ([\S]+)" + output = subprocess.check_output([sys.executable, ESPTOOL_PY, 'chip_id']) # may raise CalledProcessError + pattern = r'Serial port ([\S]+)' pattern = re.compile(pattern.encode()) port = re.search(pattern, output).group(1) # may raise AttributeError @@ -52,7 +52,7 @@ def get_running_partition(port=None): except AttributeError: raise Exception(error_message + output) - serial_instance = serial.serial_for_url(port.decode("utf-8"), baud, do_not_open=True) + serial_instance = serial.serial_for_url(port.decode('utf-8'), baud, do_not_open=True) serial_instance.dtr = False serial_instance.rts = False @@ -62,16 +62,16 @@ def get_running_partition(port=None): serial_instance.rts = False # Read until example end and find the currently running partition string - content = serial_instance.read_until(b"Example end") - pattern = re.compile(b"Running partition: ([a-z0-9_]+)") + content = serial_instance.read_until(b'Example end') + pattern = re.compile(b'Running partition: ([a-z0-9_]+)') running = re.search(pattern, content).group(1) - return running.decode("utf-8") + return running.decode('utf-8') def main(): parser = argparse.ArgumentParser() - parser.add_argument("--port", default=None) + parser.add_argument('--port', default=None) args = parser.parse_args() try: @@ -83,5 +83,5 @@ def main(): print(res) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/examples/system/ota/otatool/otatool_example.py b/examples/system/ota/otatool/otatool_example.py index b2a464eaec..bfb78f3921 100755 --- a/examples/system/ota/otatool/otatool_example.py +++ b/examples/system/ota/otatool/otatool_example.py @@ -16,16 +16,16 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import argparse import os import sys -import argparse from get_running_partition import get_running_partition def assert_file_same(file1, file2, err): - with open(file1, "rb") as f1: - with open(file2, "rb") as f2: + with open(file1, 'rb') as f1: + with open(file2, 'rb') as f2: f1 = f1.read() f2 = f2.read() @@ -41,75 +41,75 @@ def assert_file_same(file1, file2, err): def assert_running_partition(expected, port=None): running = get_running_partition(port) if running != expected: - raise Exception("Running partition %s does not match expected %s" % (running, expected)) + raise Exception('Running partition %s does not match expected %s' % (running, expected)) def main(): - COMPONENTS_PATH = os.path.expandvars(os.path.join("$IDF_PATH", "components")) - OTATOOL_DIR = os.path.join(COMPONENTS_PATH, "app_update") + COMPONENTS_PATH = os.path.expandvars(os.path.join('$IDF_PATH', 'components')) + OTATOOL_DIR = os.path.join(COMPONENTS_PATH, 'app_update') sys.path.append(OTATOOL_DIR) from otatool import OtatoolTarget - parser = argparse.ArgumentParser("ESP-IDF OTA Tool Example") + parser = argparse.ArgumentParser('ESP-IDF OTA Tool Example') - parser.add_argument("--port", "-p", help="port where the device to perform operations on is connected") - parser.add_argument("--binary", "-b", help="path to built example binary", default=os.path.join("build", "otatool.bin")) + parser.add_argument('--port', '-p', help='port where the device to perform operations on is connected') + parser.add_argument('--binary', '-b', help='path to built example binary', default=os.path.join('build', 'otatool.bin')) args = parser.parse_args() target = OtatoolTarget(args.port) - print("Writing factory firmware to ota_0") + print('Writing factory firmware to ota_0') target.write_ota_partition(0, args.binary) - print("Writing factory firmware to ota_1") - target.write_ota_partition("ota_1", args.binary) + print('Writing factory firmware to ota_1') + target.write_ota_partition('ota_1', args.binary) # Verify that the contents of the two ota slots are the same as that of the factory partition - print("Checking written firmware to ota_0 and ota_1 match factory firmware") - target.read_ota_partition("ota_0", "app0.bin") - target.read_ota_partition(1, "app1.bin") + print('Checking written firmware to ota_0 and ota_1 match factory firmware') + target.read_ota_partition('ota_0', 'app0.bin') + target.read_ota_partition(1, 'app1.bin') - assert_file_same("app0.bin", args.binary, "Slot 0 app does not match factory app") - assert_file_same("app1.bin", args.binary, "Slot 1 app does not match factory app") + assert_file_same('app0.bin', args.binary, 'Slot 0 app does not match factory app') + assert_file_same('app1.bin', args.binary, 'Slot 1 app does not match factory app') # Switch to factory app - print("Switching to factory app") + print('Switching to factory app') target.erase_otadata() - assert_running_partition("factory") + assert_running_partition('factory') # Switch to slot 0 - print("Switching to OTA slot 0") + print('Switching to OTA slot 0') target.switch_ota_partition(0) - assert_running_partition("ota_0") + assert_running_partition('ota_0') # Switch to slot 1 twice in a row - print("Switching to OTA slot 1 (twice in a row)") + print('Switching to OTA slot 1 (twice in a row)') target.switch_ota_partition(1) - assert_running_partition("ota_1") - target.switch_ota_partition("ota_1") - assert_running_partition("ota_1") + assert_running_partition('ota_1') + target.switch_ota_partition('ota_1') + assert_running_partition('ota_1') # Switch to slot 0 twice in a row - print("Switching to OTA slot 0 (twice in a row)") + print('Switching to OTA slot 0 (twice in a row)') target.switch_ota_partition(0) - assert_running_partition("ota_0") - target.switch_ota_partition("ota_0") - assert_running_partition("ota_0") + assert_running_partition('ota_0') + target.switch_ota_partition('ota_0') + assert_running_partition('ota_0') # Switch to factory app - print("Switching to factory app") + print('Switching to factory app') target.erase_otadata() - assert_running_partition("factory") + assert_running_partition('factory') # Switch to slot 1 - print("Switching to OTA slot 1") + print('Switching to OTA slot 1') target.switch_ota_partition(1) - assert_running_partition("ota_1") + assert_running_partition('ota_1') # Example end and cleanup - print("\nOTA tool operations executed successfully!") - clean_files = ["app0.bin", "app1.bin"] + print('\nOTA tool operations executed successfully!') + clean_files = ['app0.bin', 'app1.bin'] for clean_file in clean_files: os.unlink(clean_file) diff --git a/examples/system/ota/simple_ota_example/example_test.py b/examples/system/ota/simple_ota_example/example_test.py index f960c9219c..aba06a0045 100644 --- a/examples/system/ota/simple_ota_example/example_test.py +++ b/examples/system/ota/simple_ota_example/example_test.py @@ -1,68 +1,68 @@ -import re -import os -import socket import http.server -from threading import Thread +import os +import re +import socket import ssl +from threading import Thread -from tiny_test_fw import DUT import ttfw_idf +from tiny_test_fw import DUT -server_cert = "-----BEGIN CERTIFICATE-----\n" \ - "MIIDXTCCAkWgAwIBAgIJAP4LF7E72HakMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n"\ - "BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\n"\ - "aWRnaXRzIFB0eSBMdGQwHhcNMTkwNjA3MDk1OTE2WhcNMjAwNjA2MDk1OTE2WjBF\n"\ - "MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\n"\ - "ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\n"\ - "CgKCAQEAlzfCyv3mIv7TlLkObxunKfCdrJ/zgdANrsx0RBtpEPhV560hWJ0fEin0\n"\ - "nIOMpJSiF9E6QsPdr6Q+eogH4XnOMU9JE+iG743N1dPfGEzJvRlyct/Ck8SswKPC\n"\ - "9+VXsnOdZmUw9y/xtANbURA/TspvPzz3Avv382ffffrJGh7ooOmaZSCZFlSYHLZA\n"\ - "w/XlRr0sSRbLpFGY0gXjaAV8iHHiPDYLy4kZOepjV9U51xi+IGsL4w75zuMgsHyF\n"\ - "3nJeGYHgtGVBrkL0ZKG5udY0wcBjysjubDJC4iSlNiq2HD3fhs7j6CZddV2v845M\n"\ - "lVKNxP0kO4Uj4D8r+5USWC8JKfAwxQIDAQABo1AwTjAdBgNVHQ4EFgQU6OE7ssfY\n"\ - "IIPTDThiUoofUpsD5NwwHwYDVR0jBBgwFoAU6OE7ssfYIIPTDThiUoofUpsD5Nww\n"\ - "DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAXIlHS/FJWfmcinUAxyBd\n"\ - "/xd5Lu8ykeru6oaUCci+Vk9lyoMMES7lQ+b/00d5x7AcTawkTil9EWpBTPTOTraA\n"\ - "lzJMQhNKmSLk0iIoTtAJtSZgUSpIIozqK6lenxQQDsHbXKU6h+u9H6KZE8YcjsFl\n"\ - "6vL7sw9BVotw/VxfgjQ5OSGLgoLrdVT0z5C2qOuwOgz1c7jNiJhtMdwN+cOtnJp2\n"\ - "fuBgEYyE3eeuWogvkWoDcIA8r17Ixzkpq2oJsdvZcHZPIZShPKW2SHUsl98KDemu\n"\ - "y0pQyExmQUbwKE4vbFb9XuWCcL9XaOHQytyszt2DeD67AipvoBwVU7/LBOvqnsmy\n"\ - "hA==\n"\ - "-----END CERTIFICATE-----\n" +server_cert = '-----BEGIN CERTIFICATE-----\n' \ + 'MIIDXTCCAkWgAwIBAgIJAP4LF7E72HakMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV\n'\ + 'BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX\n'\ + 'aWRnaXRzIFB0eSBMdGQwHhcNMTkwNjA3MDk1OTE2WhcNMjAwNjA2MDk1OTE2WjBF\n'\ + 'MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50\n'\ + 'ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB\n'\ + 'CgKCAQEAlzfCyv3mIv7TlLkObxunKfCdrJ/zgdANrsx0RBtpEPhV560hWJ0fEin0\n'\ + 'nIOMpJSiF9E6QsPdr6Q+eogH4XnOMU9JE+iG743N1dPfGEzJvRlyct/Ck8SswKPC\n'\ + '9+VXsnOdZmUw9y/xtANbURA/TspvPzz3Avv382ffffrJGh7ooOmaZSCZFlSYHLZA\n'\ + 'w/XlRr0sSRbLpFGY0gXjaAV8iHHiPDYLy4kZOepjV9U51xi+IGsL4w75zuMgsHyF\n'\ + '3nJeGYHgtGVBrkL0ZKG5udY0wcBjysjubDJC4iSlNiq2HD3fhs7j6CZddV2v845M\n'\ + 'lVKNxP0kO4Uj4D8r+5USWC8JKfAwxQIDAQABo1AwTjAdBgNVHQ4EFgQU6OE7ssfY\n'\ + 'IIPTDThiUoofUpsD5NwwHwYDVR0jBBgwFoAU6OE7ssfYIIPTDThiUoofUpsD5Nww\n'\ + 'DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAXIlHS/FJWfmcinUAxyBd\n'\ + '/xd5Lu8ykeru6oaUCci+Vk9lyoMMES7lQ+b/00d5x7AcTawkTil9EWpBTPTOTraA\n'\ + 'lzJMQhNKmSLk0iIoTtAJtSZgUSpIIozqK6lenxQQDsHbXKU6h+u9H6KZE8YcjsFl\n'\ + '6vL7sw9BVotw/VxfgjQ5OSGLgoLrdVT0z5C2qOuwOgz1c7jNiJhtMdwN+cOtnJp2\n'\ + 'fuBgEYyE3eeuWogvkWoDcIA8r17Ixzkpq2oJsdvZcHZPIZShPKW2SHUsl98KDemu\n'\ + 'y0pQyExmQUbwKE4vbFb9XuWCcL9XaOHQytyszt2DeD67AipvoBwVU7/LBOvqnsmy\n'\ + 'hA==\n'\ + '-----END CERTIFICATE-----\n' -server_key = "-----BEGIN PRIVATE KEY-----\n"\ - "MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCXN8LK/eYi/tOU\n"\ - "uQ5vG6cp8J2sn/OB0A2uzHREG2kQ+FXnrSFYnR8SKfScg4yklKIX0TpCw92vpD56\n"\ - "iAfhec4xT0kT6Ibvjc3V098YTMm9GXJy38KTxKzAo8L35Veyc51mZTD3L/G0A1tR\n"\ - "ED9Oym8/PPcC+/fzZ999+skaHuig6ZplIJkWVJgctkDD9eVGvSxJFsukUZjSBeNo\n"\ - "BXyIceI8NgvLiRk56mNX1TnXGL4gawvjDvnO4yCwfIXecl4ZgeC0ZUGuQvRkobm5\n"\ - "1jTBwGPKyO5sMkLiJKU2KrYcPd+GzuPoJl11Xa/zjkyVUo3E/SQ7hSPgPyv7lRJY\n"\ - "Lwkp8DDFAgMBAAECggEAfBhAfQE7mUByNbxgAgI5fot9eaqR1Nf+QpJ6X2H3KPwC\n"\ - "02sa0HOwieFwYfj6tB1doBoNq7i89mTc+QUlIn4pHgIowHO0OGawomeKz5BEhjCZ\n"\ - "4XeLYGSoODary2+kNkf2xY8JTfFEcyvGBpJEwc4S2VyYgRRx+IgnumTSH+N5mIKZ\n"\ - "SXWNdZIuHEmkwod+rPRXs6/r+PH0eVW6WfpINEbr4zVAGXJx2zXQwd2cuV1GTJWh\n"\ - "cPVOXLu+XJ9im9B370cYN6GqUnR3fui13urYbnWnEf3syvoH/zuZkyrVChauoFf8\n"\ - "8EGb74/HhXK7Q2s8NRakx2c7OxQifCbcy03liUMmyQKBgQDFAob5B/66N4Q2cq/N\n"\ - "MWPf98kYBYoLaeEOhEJhLQlKk0pIFCTmtpmUbpoEes2kCUbH7RwczpYko8tlKyoB\n"\ - "6Fn6RY4zQQ64KZJI6kQVsjkYpcP/ihnOY6rbds+3yyv+4uPX7Eh9sYZwZMggE19M\n"\ - "CkFHkwAjiwqhiiSlUxe20sWmowKBgQDEfx4lxuFzA1PBPeZKGVBTxYPQf+DSLCre\n"\ - "ZFg3ZmrxbCjRq1O7Lra4FXWD3dmRq7NDk79JofoW50yD8wD7I0B7opdDfXD2idO8\n"\ - "0dBnWUKDr2CAXyoLEINce9kJPbx4kFBQRN9PiGF7VkDQxeQ3kfS8CvcErpTKCOdy\n"\ - "5wOwBTwJdwKBgDiTFTeGeDv5nVoVbS67tDao7XKchJvqd9q3WGiXikeELJyuTDqE\n"\ - "zW22pTwMF+m3UEAxcxVCrhMvhkUzNAkANHaOatuFHzj7lyqhO5QPbh4J3FMR0X9X\n"\ - "V8VWRSg+jA/SECP9koOl6zlzd5Tee0tW1pA7QpryXscs6IEhb3ns5R2JAoGAIkzO\n"\ - "RmnhEOKTzDex611f2D+yMsMfy5BKK2f4vjLymBH5TiBKDXKqEpgsW0huoi8Gq9Uu\n"\ - "nvvXXAgkIyRYF36f0vUe0nkjLuYAQAWgC2pZYgNLJR13iVbol0xHJoXQUHtgiaJ8\n"\ - "GLYFzjHQPqFMpSalQe3oELko39uOC1CoJCHFySECgYBeycUnRBikCO2n8DNhY4Eg\n"\ - "9Y3oxcssRt6ea5BZwgW2eAYi7/XqKkmxoSoOykUt3MJx9+EkkrL17bxFSpkj1tvL\n"\ - "qvxn7egtsKjjgGNAxwXC4MwCvhveyUQQxtQb8AqGrGqo4jEEN0L15cnP38i2x1Uo\n"\ - "muhfskWf4MABV0yTUaKcGg==\n"\ - "-----END PRIVATE KEY-----\n" +server_key = '-----BEGIN PRIVATE KEY-----\n'\ + 'MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQCXN8LK/eYi/tOU\n'\ + 'uQ5vG6cp8J2sn/OB0A2uzHREG2kQ+FXnrSFYnR8SKfScg4yklKIX0TpCw92vpD56\n'\ + 'iAfhec4xT0kT6Ibvjc3V098YTMm9GXJy38KTxKzAo8L35Veyc51mZTD3L/G0A1tR\n'\ + 'ED9Oym8/PPcC+/fzZ999+skaHuig6ZplIJkWVJgctkDD9eVGvSxJFsukUZjSBeNo\n'\ + 'BXyIceI8NgvLiRk56mNX1TnXGL4gawvjDvnO4yCwfIXecl4ZgeC0ZUGuQvRkobm5\n'\ + '1jTBwGPKyO5sMkLiJKU2KrYcPd+GzuPoJl11Xa/zjkyVUo3E/SQ7hSPgPyv7lRJY\n'\ + 'Lwkp8DDFAgMBAAECggEAfBhAfQE7mUByNbxgAgI5fot9eaqR1Nf+QpJ6X2H3KPwC\n'\ + '02sa0HOwieFwYfj6tB1doBoNq7i89mTc+QUlIn4pHgIowHO0OGawomeKz5BEhjCZ\n'\ + '4XeLYGSoODary2+kNkf2xY8JTfFEcyvGBpJEwc4S2VyYgRRx+IgnumTSH+N5mIKZ\n'\ + 'SXWNdZIuHEmkwod+rPRXs6/r+PH0eVW6WfpINEbr4zVAGXJx2zXQwd2cuV1GTJWh\n'\ + 'cPVOXLu+XJ9im9B370cYN6GqUnR3fui13urYbnWnEf3syvoH/zuZkyrVChauoFf8\n'\ + '8EGb74/HhXK7Q2s8NRakx2c7OxQifCbcy03liUMmyQKBgQDFAob5B/66N4Q2cq/N\n'\ + 'MWPf98kYBYoLaeEOhEJhLQlKk0pIFCTmtpmUbpoEes2kCUbH7RwczpYko8tlKyoB\n'\ + '6Fn6RY4zQQ64KZJI6kQVsjkYpcP/ihnOY6rbds+3yyv+4uPX7Eh9sYZwZMggE19M\n'\ + 'CkFHkwAjiwqhiiSlUxe20sWmowKBgQDEfx4lxuFzA1PBPeZKGVBTxYPQf+DSLCre\n'\ + 'ZFg3ZmrxbCjRq1O7Lra4FXWD3dmRq7NDk79JofoW50yD8wD7I0B7opdDfXD2idO8\n'\ + '0dBnWUKDr2CAXyoLEINce9kJPbx4kFBQRN9PiGF7VkDQxeQ3kfS8CvcErpTKCOdy\n'\ + '5wOwBTwJdwKBgDiTFTeGeDv5nVoVbS67tDao7XKchJvqd9q3WGiXikeELJyuTDqE\n'\ + 'zW22pTwMF+m3UEAxcxVCrhMvhkUzNAkANHaOatuFHzj7lyqhO5QPbh4J3FMR0X9X\n'\ + 'V8VWRSg+jA/SECP9koOl6zlzd5Tee0tW1pA7QpryXscs6IEhb3ns5R2JAoGAIkzO\n'\ + 'RmnhEOKTzDex611f2D+yMsMfy5BKK2f4vjLymBH5TiBKDXKqEpgsW0huoi8Gq9Uu\n'\ + 'nvvXXAgkIyRYF36f0vUe0nkjLuYAQAWgC2pZYgNLJR13iVbol0xHJoXQUHtgiaJ8\n'\ + 'GLYFzjHQPqFMpSalQe3oELko39uOC1CoJCHFySECgYBeycUnRBikCO2n8DNhY4Eg\n'\ + '9Y3oxcssRt6ea5BZwgW2eAYi7/XqKkmxoSoOykUt3MJx9+EkkrL17bxFSpkj1tvL\n'\ + 'qvxn7egtsKjjgGNAxwXC4MwCvhveyUQQxtQb8AqGrGqo4jEEN0L15cnP38i2x1Uo\n'\ + 'muhfskWf4MABV0yTUaKcGg==\n'\ + '-----END PRIVATE KEY-----\n' def get_my_ip(): s1 = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - s1.connect(("8.8.8.8", 80)) + s1.connect(('8.8.8.8', 80)) my_ip = s1.getsockname()[0] s1.close() return my_ip @@ -75,13 +75,13 @@ def start_https_server(ota_image_dir, server_ip, server_port): # args = parser.parse_args() os.chdir(ota_image_dir) - server_file = os.path.join(ota_image_dir, "server_cert.pem") - cert_file_handle = open(server_file, "w+") + server_file = os.path.join(ota_image_dir, 'server_cert.pem') + cert_file_handle = open(server_file, 'w+') cert_file_handle.write(server_cert) cert_file_handle.close() - key_file = os.path.join(ota_image_dir, "server_key.pem") - key_file_handle = open("server_key.pem", "w+") + key_file = os.path.join(ota_image_dir, 'server_key.pem') + key_file_handle = open('server_key.pem', 'w+') key_file_handle.write(server_key) key_file_handle.close() @@ -93,7 +93,7 @@ def start_https_server(ota_image_dir, server_ip, server_port): httpd.serve_forever() -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_simple_ota_example(env, extra_data): """ steps: | @@ -101,33 +101,33 @@ def test_examples_protocol_simple_ota_example(env, extra_data): 2. Fetch OTA image over HTTPS 3. Reboot with the new OTA image """ - dut1 = env.get_dut("simple_ota_example", "examples/system/ota/simple_ota_example", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('simple_ota_example', 'examples/system/ota/simple_ota_example', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "simple_ota.bin") + binary_file = os.path.join(dut1.app.binary_path, 'simple_ota.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("simple_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('simple_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() thread1 = Thread(target=start_https_server, args=(dut1.app.binary_path, host_ip, 8000)) thread1.daemon = True thread1.start() dut1.start_app() - dut1.expect("Loaded app from partition at offset 0x10000", timeout=30) + dut1.expect('Loaded app from partition at offset 0x10000', timeout=30) try: - ip_address = dut1.expect(re.compile(r" sta ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' sta ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') thread1.close() - dut1.expect("Starting OTA example", timeout=30) + dut1.expect('Starting OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":8000/simple_ota.bin")) - dut1.write("https://" + host_ip + ":8000/simple_ota.bin") - dut1.expect("Loaded app from partition at offset 0x110000", timeout=60) - dut1.expect("Starting OTA example", timeout=30) + print('writing to device: {}'.format('https://' + host_ip + ':8000/simple_ota.bin')) + dut1.write('https://' + host_ip + ':8000/simple_ota.bin') + dut1.expect('Loaded app from partition at offset 0x110000', timeout=60) + dut1.expect('Starting OTA example', timeout=30) -@ttfw_idf.idf_example_test(env_tag="Example_EthKitV1") +@ttfw_idf.idf_example_test(env_tag='Example_EthKitV1') def test_examples_protocol_simple_ota_example_ethernet_with_spiram_config(env, extra_data): """ steps: | @@ -135,33 +135,33 @@ def test_examples_protocol_simple_ota_example_ethernet_with_spiram_config(env, e 2. Fetch OTA image over HTTPS 3. Reboot with the new OTA image """ - dut1 = env.get_dut("simple_ota_example", "examples/system/ota/simple_ota_example", dut_class=ttfw_idf.ESP32DUT, app_config_name='spiram') + dut1 = env.get_dut('simple_ota_example', 'examples/system/ota/simple_ota_example', dut_class=ttfw_idf.ESP32DUT, app_config_name='spiram') # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "simple_ota.bin") + binary_file = os.path.join(dut1.app.binary_path, 'simple_ota.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("simple_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('simple_ota_bin_size', '{}KB'.format(bin_size // 1024)) # start test host_ip = get_my_ip() thread1 = Thread(target=start_https_server, args=(dut1.app.binary_path, host_ip, 8000)) thread1.daemon = True thread1.start() dut1.start_app() - dut1.expect("Loaded app from partition at offset 0x10000", timeout=30) + dut1.expect('Loaded app from partition at offset 0x10000', timeout=30) try: - ip_address = dut1.expect(re.compile(r" eth ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' eth ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') thread1.close() - dut1.expect("Starting OTA example", timeout=30) + dut1.expect('Starting OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":8000/simple_ota.bin")) - dut1.write("https://" + host_ip + ":8000/simple_ota.bin") - dut1.expect("Loaded app from partition at offset 0x110000", timeout=60) - dut1.expect("Starting OTA example", timeout=30) + print('writing to device: {}'.format('https://' + host_ip + ':8000/simple_ota.bin')) + dut1.write('https://' + host_ip + ':8000/simple_ota.bin') + dut1.expect('Loaded app from partition at offset 0x110000', timeout=60) + dut1.expect('Starting OTA example', timeout=30) -@ttfw_idf.idf_example_test(env_tag="Example_Flash_Encryption_OTA") +@ttfw_idf.idf_example_test(env_tag='Example_Flash_Encryption_OTA') def test_examples_protocol_simple_ota_example_with_flash_encryption(env, extra_data): """ steps: | @@ -169,13 +169,13 @@ def test_examples_protocol_simple_ota_example_with_flash_encryption(env, extra_d 2. Fetch OTA image over HTTPS 3. Reboot with the new OTA image """ - dut1 = env.get_dut("simple_ota_example", "examples/system/ota/simple_ota_example", dut_class=ttfw_idf.ESP32DUT, app_config_name='flash_enc') + dut1 = env.get_dut('simple_ota_example', 'examples/system/ota/simple_ota_example', dut_class=ttfw_idf.ESP32DUT, app_config_name='flash_enc') # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "simple_ota.bin") + binary_file = os.path.join(dut1.app.binary_path, 'simple_ota.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("simple_ota_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('simple_ota_bin_size', '{}KB'.format(bin_size // 1024)) # erase flash on the device - print("Erasing the flash in order to have an empty NVS key partiton") + print('Erasing the flash in order to have an empty NVS key partiton') dut1.erase_flash() # start test host_ip = get_my_ip() @@ -183,21 +183,21 @@ def test_examples_protocol_simple_ota_example_with_flash_encryption(env, extra_d thread1.daemon = True thread1.start() dut1.start_app() - dut1.expect("Loaded app from partition at offset 0x20000", timeout=30) - dut1.expect("Flash encryption mode is DEVELOPMENT (not secure)", timeout=10) + dut1.expect('Loaded app from partition at offset 0x20000', timeout=30) + dut1.expect('Flash encryption mode is DEVELOPMENT (not secure)', timeout=10) try: - ip_address = dut1.expect(re.compile(r" eth ip: ([^,]+),"), timeout=30) - print("Connected to AP with IP: {}".format(ip_address)) + ip_address = dut1.expect(re.compile(r' eth ip: ([^,]+),'), timeout=30) + print('Connected to AP with IP: {}'.format(ip_address)) except DUT.ExpectTimeout: raise ValueError('ENV_TEST_FAILURE: Cannot connect to AP') thread1.close() - dut1.expect("Starting OTA example", timeout=30) + dut1.expect('Starting OTA example', timeout=30) - print("writing to device: {}".format("https://" + host_ip + ":8000/simple_ota.bin")) - dut1.write("https://" + host_ip + ":8000/simple_ota.bin") - dut1.expect("Loaded app from partition at offset 0x120000", timeout=60) - dut1.expect("Flash encryption mode is DEVELOPMENT (not secure)", timeout=10) - dut1.expect("Starting OTA example", timeout=30) + print('writing to device: {}'.format('https://' + host_ip + ':8000/simple_ota.bin')) + dut1.write('https://' + host_ip + ':8000/simple_ota.bin') + dut1.expect('Loaded app from partition at offset 0x120000', timeout=60) + dut1.expect('Flash encryption mode is DEVELOPMENT (not secure)', timeout=10) + dut1.expect('Starting OTA example', timeout=30) if __name__ == '__main__': diff --git a/examples/system/perfmon/example_test.py b/examples/system/perfmon/example_test.py index 6ce4f98ffc..0ad3e827d4 100644 --- a/examples/system/perfmon/example_test.py +++ b/examples/system/perfmon/example_test.py @@ -1,4 +1,5 @@ from __future__ import unicode_literals + import ttfw_idf diff --git a/examples/system/select/example_test.py b/examples/system/select/example_test.py index 1a7380307f..d95d3300a8 100644 --- a/examples/system/select/example_test.py +++ b/examples/system/select/example_test.py @@ -1,7 +1,9 @@ from __future__ import unicode_literals -from tiny_test_fw import Utility + import os + import ttfw_idf +from tiny_test_fw import Utility def get_socket_msgs(i): diff --git a/examples/system/sysview_tracing/example_test.py b/examples/system/sysview_tracing/example_test.py index 666fd4a46e..0e1cea85a7 100644 --- a/examples/system/sysview_tracing/example_test.py +++ b/examples/system/sysview_tracing/example_test.py @@ -1,14 +1,16 @@ from __future__ import unicode_literals -from io import open -import debug_backend + import os import re import tempfile import time +from io import open + +import debug_backend import ttfw_idf -@ttfw_idf.idf_example_test(env_tag="test_jtag_arm") +@ttfw_idf.idf_example_test(env_tag='test_jtag_arm') def test_examples_sysview_tracing(env, extra_data): rel_project_path = os.path.join('examples', 'system', 'sysview_tracing') diff --git a/examples/system/sysview_tracing_heap_log/example_test.py b/examples/system/sysview_tracing_heap_log/example_test.py index 1740025a43..cea4e334ec 100644 --- a/examples/system/sysview_tracing_heap_log/example_test.py +++ b/examples/system/sysview_tracing_heap_log/example_test.py @@ -1,13 +1,15 @@ from __future__ import unicode_literals -from io import open -import debug_backend + import os import re import tempfile +from io import open + +import debug_backend import ttfw_idf -@ttfw_idf.idf_example_test(env_tag="test_jtag_arm") +@ttfw_idf.idf_example_test(env_tag='test_jtag_arm') def test_examples_sysview_tracing_heap_log(env, extra_data): rel_project_path = os.path.join('examples', 'system', 'sysview_tracing_heap_log') diff --git a/examples/system/task_watchdog/example_test.py b/examples/system/task_watchdog/example_test.py index 95c0555cc5..ba908e6e6f 100644 --- a/examples/system/task_watchdog/example_test.py +++ b/examples/system/task_watchdog/example_test.py @@ -1,4 +1,5 @@ from __future__ import unicode_literals + import ttfw_idf diff --git a/examples/system/ulp/example_test.py b/examples/system/ulp/example_test.py index 11052e0931..7d4c84ebec 100644 --- a/examples/system/ulp/example_test.py +++ b/examples/system/ulp/example_test.py @@ -1,8 +1,10 @@ from __future__ import unicode_literals -from tiny_test_fw import Utility + import re import time + import ttfw_idf +from tiny_test_fw import Utility @ttfw_idf.idf_example_test(env_tag='Example_GENERIC') diff --git a/examples/system/ulp_adc/example_test.py b/examples/system/ulp_adc/example_test.py index 87184ede56..0331823638 100644 --- a/examples/system/ulp_adc/example_test.py +++ b/examples/system/ulp_adc/example_test.py @@ -1,7 +1,9 @@ from __future__ import unicode_literals -from tiny_test_fw import Utility + import re + import ttfw_idf +from tiny_test_fw import Utility @ttfw_idf.idf_example_test(env_tag='Example_GENERIC') diff --git a/examples/system/unit_test/example_test.py b/examples/system/unit_test/example_test.py index 6313c9e0d6..9fe6cb8d7f 100644 --- a/examples/system/unit_test/example_test.py +++ b/examples/system/unit_test/example_test.py @@ -1,5 +1,7 @@ from __future__ import unicode_literals + import re + import ttfw_idf diff --git a/examples/wifi/iperf/iperf_test.py b/examples/wifi/iperf/iperf_test.py index b669057c1d..9ac638dc73 100644 --- a/examples/wifi/iperf/iperf_test.py +++ b/examples/wifi/iperf/iperf_test.py @@ -19,20 +19,17 @@ The test env Example_ShieldBox do need the following config:: apc_ip: "192.168.1.88" pc_nic: "eth0" """ -from __future__ import division -from __future__ import unicode_literals -from builtins import str -from builtins import range -from builtins import object -import re +from __future__ import division, unicode_literals + import os -import time +import re import subprocess +import time +from builtins import object, range, str -from tiny_test_fw import TinyFW, DUT, Utility import ttfw_idf -from idf_iperf_test_util import (Attenuator, PowerControl, LineChart, TestReport) - +from idf_iperf_test_util import Attenuator, LineChart, PowerControl, TestReport +from tiny_test_fw import DUT, TinyFW, Utility # configurations TEST_TIME = TEST_TIMEOUT = 60 @@ -46,20 +43,20 @@ ATTEN_VALUE_LIST = range(0, 60, 2) FAILED_TO_SCAN_RSSI = -97 INVALID_HEAP_SIZE = 0xFFFFFFFF -PC_IPERF_TEMP_LOG_FILE = ".tmp_iperf.log" -CONFIG_NAME_PATTERN = re.compile(r"sdkconfig\.ci\.(.+)") +PC_IPERF_TEMP_LOG_FILE = '.tmp_iperf.log' +CONFIG_NAME_PATTERN = re.compile(r'sdkconfig\.ci\.(.+)') # We need to auto compare the difference between adjacent configs (01 -> 00, 02 -> 01, ...) and put them to reports. # Using numbers for config will make this easy. # Use default value `99` for config with best performance. -BEST_PERFORMANCE_CONFIG = "99" +BEST_PERFORMANCE_CONFIG = '99' class TestResult(object): """ record, analysis test result and convert data to output format """ - PC_BANDWIDTH_LOG_PATTERN = re.compile(r"(\d+).0\s*-\s*(\d+).0\s+sec\s+[\d.]+\s+MBytes\s+([\d.]+)\s+Mbits/sec") - DUT_BANDWIDTH_LOG_PATTERN = re.compile(r"(\d+)-\s+(\d+)\s+sec\s+([\d.]+)\s+Mbits/sec") + PC_BANDWIDTH_LOG_PATTERN = re.compile(r'(\d+).0\s*-\s*(\d+).0\s+sec\s+[\d.]+\s+MBytes\s+([\d.]+)\s+Mbits/sec') + DUT_BANDWIDTH_LOG_PATTERN = re.compile(r'(\d+)-\s+(\d+)\s+sec\s+([\d.]+)\s+Mbits/sec') ZERO_POINT_THRESHOLD = -88 # RSSI, dbm ZERO_THROUGHPUT_THRESHOLD = -92 # RSSI, dbm @@ -138,7 +135,7 @@ class TestResult(object): if float(result[2]) == 0 and rssi > self.ZERO_POINT_THRESHOLD \ and fall_to_0_recorded < 1: # throughput fall to 0 error. we only record 1 records for one test - self.error_list.append("[Error][fall to 0][{}][att: {}][rssi: {}]: 0 throughput interval: {}-{}" + self.error_list.append('[Error][fall to 0][{}][att: {}][rssi: {}]: 0 throughput interval: {}-{}' .format(ap_ssid, att, rssi, result[0], result[1])) fall_to_0_recorded += 1 @@ -148,7 +145,7 @@ class TestResult(object): throughput = 0.0 if throughput == 0 and rssi > self.ZERO_THROUGHPUT_THRESHOLD: - self.error_list.append("[Error][Fatal][{}][att: {}][rssi: {}]: No throughput data found" + self.error_list.append('[Error][Fatal][{}][att: {}][rssi: {}]: No throughput data found' .format(ap_ssid, att, rssi)) self._save_result(throughput, ap_ssid, att, rssi, heap_size) @@ -167,7 +164,7 @@ class TestResult(object): result_dict = data[ap_ssid] index_list = list(result_dict.keys()) index_list.sort() - if index_type == "att": + if index_type == 'att': index_list.reverse() for i, index_value in enumerate(index_list[1:]): @@ -176,12 +173,12 @@ class TestResult(object): continue _percentage = result_dict[index_value] / result_dict[index_list[i]] if _percentage < 1 - self.BAD_POINT_PERCENTAGE_THRESHOLD: - self.error_list.append("[Error][Bad point][{}][{}: {}]: drop {:.02f}%" + self.error_list.append('[Error][Bad point][{}][{}: {}]: drop {:.02f}%' .format(ap_ssid, index_type, index_value, (1 - _percentage) * 100)) - analysis_bad_point(self.throughput_by_rssi, "rssi") - analysis_bad_point(self.throughput_by_att, "att") + analysis_bad_point(self.throughput_by_rssi, 'rssi') + analysis_bad_point(self.throughput_by_att, 'att') def draw_throughput_figure(self, path, ap_ssid, draw_type): """ @@ -190,26 +187,26 @@ class TestResult(object): :param draw_type: "att" or "rssi" :return: file_name """ - if draw_type == "rssi": - type_name = "RSSI" + if draw_type == 'rssi': + type_name = 'RSSI' data = self.throughput_by_rssi range_list = self.RSSI_RANGE - elif draw_type == "att": - type_name = "Att" + elif draw_type == 'att': + type_name = 'Att' data = self.throughput_by_att range_list = self.ATT_RANGE else: - raise AssertionError("draw type not supported") + raise AssertionError('draw type not supported') if isinstance(ap_ssid, list): - file_name = "ThroughputVs{}_{}_{}_{}.html".format(type_name, self.proto, self.direction, + file_name = 'ThroughputVs{}_{}_{}_{}.html'.format(type_name, self.proto, self.direction, hash(ap_ssid)[:6]) else: - file_name = "ThroughputVs{}_{}_{}_{}.html".format(type_name, self.proto, self.direction, ap_ssid) + file_name = 'ThroughputVs{}_{}_{}_{}.html'.format(type_name, self.proto, self.direction, ap_ssid) LineChart.draw_line_chart(os.path.join(path, file_name), - "Throughput Vs {} ({} {})".format(type_name, self.proto, self.direction), - "{} (dbm)".format(type_name), - "Throughput (Mbps)", + 'Throughput Vs {} ({} {})'.format(type_name, self.proto, self.direction), + '{} (dbm)'.format(type_name), + 'Throughput (Mbps)', data, range_list) return file_name @@ -220,13 +217,13 @@ class TestResult(object): :return: file_name """ if isinstance(ap_ssid, list): - file_name = "AttVsRSSI_{}.html".format(hash(ap_ssid)[:6]) + file_name = 'AttVsRSSI_{}.html'.format(hash(ap_ssid)[:6]) else: - file_name = "AttVsRSSI_{}.html".format(ap_ssid) + file_name = 'AttVsRSSI_{}.html'.format(ap_ssid) LineChart.draw_line_chart(os.path.join(path, file_name), - "Att Vs RSSI", - "Att (dbm)", - "RSSI (dbm)", + 'Att Vs RSSI', + 'Att (dbm)', + 'RSSI (dbm)', self.att_rssi_map, self.ATT_RANGE) return file_name @@ -246,15 +243,15 @@ class TestResult(object): 3. min free heap size during test """ if self.throughput_by_att: - ret = "[{}_{}][{}]: {}\r\n\r\n".format(self.proto, self.direction, self.config_name, - "Fail" if self.error_list else "Success") - ret += "Performance for each AP:\r\n" + ret = '[{}_{}][{}]: {}\r\n\r\n'.format(self.proto, self.direction, self.config_name, + 'Fail' if self.error_list else 'Success') + ret += 'Performance for each AP:\r\n' for ap_ssid in self.throughput_by_att: - ret += "[{}]: {:.02f} Mbps\r\n".format(ap_ssid, max(self.throughput_by_att[ap_ssid].values())) + ret += '[{}]: {:.02f} Mbps\r\n'.format(ap_ssid, max(self.throughput_by_att[ap_ssid].values())) if self.heap_size != INVALID_HEAP_SIZE: - ret += "Minimum heap size: {}".format(self.heap_size) + ret += 'Minimum heap size: {}'.format(self.heap_size) else: - ret = "" + ret = '' return ret @@ -275,10 +272,10 @@ class IperfTestUtility(object): self.test_result = test_result else: self.test_result = { - "tcp_tx": TestResult("tcp", "tx", config_name), - "tcp_rx": TestResult("tcp", "rx", config_name), - "udp_tx": TestResult("udp", "tx", config_name), - "udp_rx": TestResult("udp", "rx", config_name), + 'tcp_tx': TestResult('tcp', 'tx', config_name), + 'tcp_rx': TestResult('tcp', 'rx', config_name), + 'udp_tx': TestResult('udp', 'tx', config_name), + 'udp_rx': TestResult('udp', 'rx', config_name), } def setup(self): @@ -291,23 +288,23 @@ class IperfTestUtility(object): 4. connect to AP """ try: - subprocess.check_output("sudo killall iperf 2>&1 > /dev/null", shell=True) + subprocess.check_output('sudo killall iperf 2>&1 > /dev/null', shell=True) except subprocess.CalledProcessError: pass - self.dut.write("restart") - self.dut.expect_any("iperf>", "esp32>") - self.dut.write("scan {}".format(self.ap_ssid)) + self.dut.write('restart') + self.dut.expect_any('iperf>', 'esp32>') + self.dut.write('scan {}'.format(self.ap_ssid)) for _ in range(SCAN_RETRY_COUNT): try: - rssi = int(self.dut.expect(re.compile(r"\[{}]\[rssi=(-\d+)]".format(self.ap_ssid)), + rssi = int(self.dut.expect(re.compile(r'\[{}]\[rssi=(-\d+)]'.format(self.ap_ssid)), timeout=SCAN_TIMEOUT)[0]) break except DUT.ExpectTimeout: continue else: - raise AssertionError("Failed to scan AP") - self.dut.write("sta {} {}".format(self.ap_ssid, self.ap_password)) - dut_ip = self.dut.expect(re.compile(r"sta ip: ([\d.]+), mask: ([\d.]+), gw: ([\d.]+)"))[0] + raise AssertionError('Failed to scan AP') + self.dut.write('sta {} {}'.format(self.ap_ssid, self.ap_password)) + dut_ip = self.dut.expect(re.compile(r'sta ip: ([\d.]+), mask: ([\d.]+), gw: ([\d.]+)'))[0] return dut_ip, rssi def _save_test_result(self, test_case, raw_data, att, rssi, heap_size): @@ -318,22 +315,22 @@ class IperfTestUtility(object): # connect and scan to get RSSI dut_ip, rssi = self.setup() - assert direction in ["rx", "tx"] - assert proto in ["tcp", "udp"] + assert direction in ['rx', 'tx'] + assert proto in ['tcp', 'udp'] # run iperf test - if direction == "tx": - with open(PC_IPERF_TEMP_LOG_FILE, "w") as f: - if proto == "tcp": - process = subprocess.Popen(["iperf", "-s", "-B", self.pc_nic_ip, - "-t", str(TEST_TIME), "-i", "1", "-f", "m"], + if direction == 'tx': + with open(PC_IPERF_TEMP_LOG_FILE, 'w') as f: + if proto == 'tcp': + process = subprocess.Popen(['iperf', '-s', '-B', self.pc_nic_ip, + '-t', str(TEST_TIME), '-i', '1', '-f', 'm'], stdout=f, stderr=f) - self.dut.write("iperf -c {} -i 1 -t {}".format(self.pc_nic_ip, TEST_TIME)) + self.dut.write('iperf -c {} -i 1 -t {}'.format(self.pc_nic_ip, TEST_TIME)) else: - process = subprocess.Popen(["iperf", "-s", "-u", "-B", self.pc_nic_ip, - "-t", str(TEST_TIME), "-i", "1", "-f", "m"], + process = subprocess.Popen(['iperf', '-s', '-u', '-B', self.pc_nic_ip, + '-t', str(TEST_TIME), '-i', '1', '-f', 'm'], stdout=f, stderr=f) - self.dut.write("iperf -c {} -u -i 1 -t {}".format(self.pc_nic_ip, TEST_TIME)) + self.dut.write('iperf -c {} -u -i 1 -t {}'.format(self.pc_nic_ip, TEST_TIME)) for _ in range(TEST_TIMEOUT): if process.poll() is not None: @@ -342,25 +339,25 @@ class IperfTestUtility(object): else: process.terminate() - with open(PC_IPERF_TEMP_LOG_FILE, "r") as f: + with open(PC_IPERF_TEMP_LOG_FILE, 'r') as f: pc_raw_data = server_raw_data = f.read() else: - with open(PC_IPERF_TEMP_LOG_FILE, "w") as f: - if proto == "tcp": - self.dut.write("iperf -s -i 1 -t {}".format(TEST_TIME)) + with open(PC_IPERF_TEMP_LOG_FILE, 'w') as f: + if proto == 'tcp': + self.dut.write('iperf -s -i 1 -t {}'.format(TEST_TIME)) # wait until DUT TCP server created try: - self.dut.expect("iperf tcp server create successfully", timeout=1) + self.dut.expect('iperf tcp server create successfully', timeout=1) except DUT.ExpectTimeout: # compatible with old iperf example binary pass - process = subprocess.Popen(["iperf", "-c", dut_ip, - "-t", str(TEST_TIME), "-f", "m"], + process = subprocess.Popen(['iperf', '-c', dut_ip, + '-t', str(TEST_TIME), '-f', 'm'], stdout=f, stderr=f) else: - self.dut.write("iperf -s -u -i 1 -t {}".format(TEST_TIME)) - process = subprocess.Popen(["iperf", "-c", dut_ip, "-u", "-b", "100M", - "-t", str(TEST_TIME), "-f", "m"], + self.dut.write('iperf -s -u -i 1 -t {}'.format(TEST_TIME)) + process = subprocess.Popen(['iperf', '-c', dut_ip, '-u', '-b', '100M', + '-t', str(TEST_TIME), '-f', 'm'], stdout=f, stderr=f) for _ in range(TEST_TIMEOUT): @@ -371,18 +368,18 @@ class IperfTestUtility(object): process.terminate() server_raw_data = self.dut.read() - with open(PC_IPERF_TEMP_LOG_FILE, "r") as f: + with open(PC_IPERF_TEMP_LOG_FILE, 'r') as f: pc_raw_data = f.read() # save PC iperf logs to console - with open(self.pc_iperf_log_file, "a+") as f: - f.write("## [{}] `{}`\r\n##### {}" + with open(self.pc_iperf_log_file, 'a+') as f: + f.write('## [{}] `{}`\r\n##### {}' .format(self.config_name, - "{}_{}".format(proto, direction), - time.strftime("%m-%d %H:%M:%S", time.localtime(time.time())))) + '{}_{}'.format(proto, direction), + time.strftime('%m-%d %H:%M:%S', time.localtime(time.time())))) f.write('\r\n```\r\n\r\n' + pc_raw_data + '\r\n```\r\n') - self.dut.write("heap") - heap_size = self.dut.expect(re.compile(r"min heap size: (\d+)\D"))[0] + self.dut.write('heap') + heap_size = self.dut.expect(re.compile(r'min heap size: (\d+)\D'))[0] # return server raw data (for parsing test results) and RSSI return server_raw_data, rssi, heap_size @@ -399,14 +396,14 @@ class IperfTestUtility(object): heap_size = INVALID_HEAP_SIZE try: server_raw_data, rssi, heap_size = self._test_once(proto, direction) - throughput = self._save_test_result("{}_{}".format(proto, direction), + throughput = self._save_test_result('{}_{}'.format(proto, direction), server_raw_data, atten_val, rssi, heap_size) - Utility.console_log("[{}][{}_{}][{}][{}]: {:.02f}" + Utility.console_log('[{}][{}_{}][{}][{}]: {:.02f}' .format(self.config_name, proto, direction, rssi, self.ap_ssid, throughput)) except Exception as e: - self._save_test_result("{}_{}".format(proto, direction), "", atten_val, rssi, heap_size) - Utility.console_log("Failed during test: {}".format(e)) + self._save_test_result('{}_{}'.format(proto, direction), '', atten_val, rssi, heap_size) + Utility.console_log('Failed during test: {}'.format(e)) def run_all_cases(self, atten_val): """ @@ -414,10 +411,10 @@ class IperfTestUtility(object): :param atten_val: attenuate value """ - self.run_test("tcp", "tx", atten_val) - self.run_test("tcp", "rx", atten_val) - self.run_test("udp", "tx", atten_val) - self.run_test("udp", "rx", atten_val) + self.run_test('tcp', 'tx', atten_val) + self.run_test('tcp', 'rx', atten_val) + self.run_test('udp', 'tx', atten_val) + self.run_test('udp', 'rx', atten_val) def wait_ap_power_on(self): """ @@ -426,12 +423,12 @@ class IperfTestUtility(object): :return: True or False """ - self.dut.write("restart") - self.dut.expect_any("iperf>", "esp32>") + self.dut.write('restart') + self.dut.expect_any('iperf>', 'esp32>') for _ in range(WAIT_AP_POWER_ON_TIMEOUT // SCAN_TIMEOUT): try: - self.dut.write("scan {}".format(self.ap_ssid)) - self.dut.expect(re.compile(r"\[{}]\[rssi=(-\d+)]".format(self.ap_ssid)), + self.dut.write('scan {}'.format(self.ap_ssid)) + self.dut.expect(re.compile(r'\[{}]\[rssi=(-\d+)]'.format(self.ap_ssid)), timeout=SCAN_TIMEOUT) ret = True break @@ -458,23 +455,23 @@ class IperfTestUtilitySoftap(IperfTestUtility): 3. scan to get AP RSSI 4. connect to AP """ - self.softap_dut.write("restart") - self.softap_dut.expect_any("iperf>", "esp32>", timeout=30) - self.softap_dut.write("ap {} {}".format(self.ap_ssid, self.ap_password)) - self.dut.write("restart") - self.dut.expect_any("iperf>", "esp32>", timeout=30) - self.dut.write("scan {}".format(self.ap_ssid)) + self.softap_dut.write('restart') + self.softap_dut.expect_any('iperf>', 'esp32>', timeout=30) + self.softap_dut.write('ap {} {}'.format(self.ap_ssid, self.ap_password)) + self.dut.write('restart') + self.dut.expect_any('iperf>', 'esp32>', timeout=30) + self.dut.write('scan {}'.format(self.ap_ssid)) for _ in range(SCAN_RETRY_COUNT): try: - rssi = int(self.dut.expect(re.compile(r"\[{}]\[rssi=(-\d+)]".format(self.ap_ssid)), + rssi = int(self.dut.expect(re.compile(r'\[{}]\[rssi=(-\d+)]'.format(self.ap_ssid)), timeout=SCAN_TIMEOUT)[0]) break except DUT.ExpectTimeout: continue else: - raise AssertionError("Failed to scan AP") - self.dut.write("sta {} {}".format(self.ap_ssid, self.ap_password)) - dut_ip = self.dut.expect(re.compile(r"sta ip: ([\d.]+), mask: ([\d.]+), gw: ([\d.]+)"))[0] + raise AssertionError('Failed to scan AP') + self.dut.write('sta {} {}'.format(self.ap_ssid, self.ap_password)) + dut_ip = self.dut.expect(re.compile(r'sta ip: ([\d.]+), mask: ([\d.]+), gw: ([\d.]+)'))[0] return dut_ip, rssi def _test_once(self, proto, direction): @@ -482,69 +479,69 @@ class IperfTestUtilitySoftap(IperfTestUtility): # connect and scan to get RSSI dut_ip, rssi = self.setup() - assert direction in ["rx", "tx"] - assert proto in ["tcp", "udp"] + assert direction in ['rx', 'tx'] + assert proto in ['tcp', 'udp'] # run iperf test - if direction == "tx": - if proto == "tcp": - self.softap_dut.write("iperf -s -i 1 -t {}".format(TEST_TIME)) + if direction == 'tx': + if proto == 'tcp': + self.softap_dut.write('iperf -s -i 1 -t {}'.format(TEST_TIME)) # wait until DUT TCP server created try: - self.softap_dut.expect("iperf tcp server create successfully", timeout=1) + self.softap_dut.expect('iperf tcp server create successfully', timeout=1) except DUT.ExpectTimeout: # compatible with old iperf example binary pass - self.dut.write("iperf -c {} -i 1 -t {}".format(self.softap_ip, TEST_TIME)) + self.dut.write('iperf -c {} -i 1 -t {}'.format(self.softap_ip, TEST_TIME)) else: - self.softap_dut.write("iperf -s -u -i 1 -t {}".format(TEST_TIME)) - self.dut.write("iperf -c {} -u -i 1 -t {}".format(self.softap_ip, TEST_TIME)) + self.softap_dut.write('iperf -s -u -i 1 -t {}'.format(TEST_TIME)) + self.dut.write('iperf -c {} -u -i 1 -t {}'.format(self.softap_ip, TEST_TIME)) else: - if proto == "tcp": - self.dut.write("iperf -s -i 1 -t {}".format(TEST_TIME)) + if proto == 'tcp': + self.dut.write('iperf -s -i 1 -t {}'.format(TEST_TIME)) # wait until DUT TCP server created try: - self.dut.expect("iperf tcp server create successfully", timeout=1) + self.dut.expect('iperf tcp server create successfully', timeout=1) except DUT.ExpectTimeout: # compatible with old iperf example binary pass - self.softap_dut.write("iperf -c {} -i 1 -t {}".format(dut_ip, TEST_TIME)) + self.softap_dut.write('iperf -c {} -i 1 -t {}'.format(dut_ip, TEST_TIME)) else: - self.dut.write("iperf -s -u -i 1 -t {}".format(TEST_TIME)) - self.softap_dut.write("iperf -c {} -u -i 1 -t {}".format(dut_ip, TEST_TIME)) + self.dut.write('iperf -s -u -i 1 -t {}'.format(TEST_TIME)) + self.softap_dut.write('iperf -c {} -u -i 1 -t {}'.format(dut_ip, TEST_TIME)) time.sleep(60) - if direction == "tx": + if direction == 'tx': server_raw_data = self.dut.read() else: server_raw_data = self.softap_dut.read() - self.dut.write("iperf -a") - self.softap_dut.write("iperf -a") - self.dut.write("heap") - heap_size = self.dut.expect(re.compile(r"min heap size: (\d+)\D"))[0] + self.dut.write('iperf -a') + self.softap_dut.write('iperf -a') + self.dut.write('heap') + heap_size = self.dut.expect(re.compile(r'min heap size: (\d+)\D'))[0] # return server raw data (for parsing test results) and RSSI return server_raw_data, rssi, heap_size -@ttfw_idf.idf_example_test(env_tag="Example_ShieldBox_Basic", category="stress") +@ttfw_idf.idf_example_test(env_tag='Example_ShieldBox_Basic', category='stress') def test_wifi_throughput_with_different_configs(env, extra_data): """ steps: | 1. build iperf with specified configs 2. test throughput for all routers """ - pc_nic_ip = env.get_pc_nic_info("pc_nic", "ipv4")["addr"] - pc_iperf_log_file = os.path.join(env.log_path, "pc_iperf_log.md") + pc_nic_ip = env.get_pc_nic_info('pc_nic', 'ipv4')['addr'] + pc_iperf_log_file = os.path.join(env.log_path, 'pc_iperf_log.md') ap_info = { - "ssid": env.get_variable("ap_ssid"), - "password": env.get_variable("ap_password"), + 'ssid': env.get_variable('ap_ssid'), + 'password': env.get_variable('ap_password'), } - config_names_raw = subprocess.check_output(["ls", os.path.dirname(os.path.abspath(__file__))]) + config_names_raw = subprocess.check_output(['ls', os.path.dirname(os.path.abspath(__file__))]) config_names = CONFIG_NAME_PATTERN.findall(config_names_raw) if not config_names: - raise ValueError("no configs found in {}".format(os.path.dirname(__file__))) + raise ValueError('no configs found in {}'.format(os.path.dirname(__file__))) test_result = dict() sdkconfig_files = dict() @@ -552,24 +549,24 @@ def test_wifi_throughput_with_different_configs(env, extra_data): for config_name in config_names: # 1. get the config sdkconfig_files[config_name] = os.path.join(os.path.dirname(__file__), - "sdkconfig.ci.{}".format(config_name)) + 'sdkconfig.ci.{}'.format(config_name)) # 2. get DUT and download - dut = env.get_dut("iperf", "examples/wifi/iperf", dut_class=ttfw_idf.ESP32DUT, + dut = env.get_dut('iperf', 'examples/wifi/iperf', dut_class=ttfw_idf.ESP32DUT, app_config_name=config_name) dut.start_app() - dut.expect_any("iperf>", "esp32>") + dut.expect_any('iperf>', 'esp32>') # 3. run test for each required att value test_result[config_name] = { - "tcp_tx": TestResult("tcp", "tx", config_name), - "tcp_rx": TestResult("tcp", "rx", config_name), - "udp_tx": TestResult("udp", "tx", config_name), - "udp_rx": TestResult("udp", "rx", config_name), + 'tcp_tx': TestResult('tcp', 'tx', config_name), + 'tcp_rx': TestResult('tcp', 'rx', config_name), + 'udp_tx': TestResult('udp', 'tx', config_name), + 'udp_rx': TestResult('udp', 'rx', config_name), } - test_utility = IperfTestUtility(dut, config_name, ap_info["ssid"], - ap_info["password"], pc_nic_ip, pc_iperf_log_file, test_result[config_name]) + test_utility = IperfTestUtility(dut, config_name, ap_info['ssid'], + ap_info['password'], pc_nic_ip, pc_iperf_log_file, test_result[config_name]) for _ in range(RETRY_COUNT_FOR_BEST_PERFORMANCE): test_utility.run_all_cases(0) @@ -577,18 +574,18 @@ def test_wifi_throughput_with_different_configs(env, extra_data): for result_type in test_result[config_name]: summary = str(test_result[config_name][result_type]) if summary: - Utility.console_log(summary, color="orange") + Utility.console_log(summary, color='orange') # 4. check test results - env.close_dut("iperf") + env.close_dut('iperf') # 5. generate report - report = TestReport.ThroughputForConfigsReport(os.path.join(env.log_path, "ThroughputForConfigsReport"), - ap_info["ssid"], test_result, sdkconfig_files) + report = TestReport.ThroughputForConfigsReport(os.path.join(env.log_path, 'ThroughputForConfigsReport'), + ap_info['ssid'], test_result, sdkconfig_files) report.generate_report() -@ttfw_idf.idf_example_test(env_tag="Example_ShieldBox", category="stress") +@ttfw_idf.idf_example_test(env_tag='Example_ShieldBox', category='stress') def test_wifi_throughput_vs_rssi(env, extra_data): """ steps: | @@ -597,37 +594,37 @@ def test_wifi_throughput_vs_rssi(env, extra_data): 3. set attenuator value from 0-60 for each router 4. test TCP tx rx and UDP tx rx throughput """ - att_port = env.get_variable("attenuator_port") - ap_list = env.get_variable("ap_list") - pc_nic_ip = env.get_pc_nic_info("pc_nic", "ipv4")["addr"] - apc_ip = env.get_variable("apc_ip") - pc_iperf_log_file = os.path.join(env.log_path, "pc_iperf_log.md") + att_port = env.get_variable('attenuator_port') + ap_list = env.get_variable('ap_list') + pc_nic_ip = env.get_pc_nic_info('pc_nic', 'ipv4')['addr'] + apc_ip = env.get_variable('apc_ip') + pc_iperf_log_file = os.path.join(env.log_path, 'pc_iperf_log.md') test_result = { - "tcp_tx": TestResult("tcp", "tx", BEST_PERFORMANCE_CONFIG), - "tcp_rx": TestResult("tcp", "rx", BEST_PERFORMANCE_CONFIG), - "udp_tx": TestResult("udp", "tx", BEST_PERFORMANCE_CONFIG), - "udp_rx": TestResult("udp", "rx", BEST_PERFORMANCE_CONFIG), + 'tcp_tx': TestResult('tcp', 'tx', BEST_PERFORMANCE_CONFIG), + 'tcp_rx': TestResult('tcp', 'rx', BEST_PERFORMANCE_CONFIG), + 'udp_tx': TestResult('udp', 'tx', BEST_PERFORMANCE_CONFIG), + 'udp_rx': TestResult('udp', 'rx', BEST_PERFORMANCE_CONFIG), } # 1. get DUT and download - dut = env.get_dut("iperf", "examples/wifi/iperf", dut_class=ttfw_idf.ESP32DUT, + dut = env.get_dut('iperf', 'examples/wifi/iperf', dut_class=ttfw_idf.ESP32DUT, app_config_name=BEST_PERFORMANCE_CONFIG) dut.start_app() - dut.expect_any("iperf>", "esp32>") + dut.expect_any('iperf>', 'esp32>') # 2. run test for each required att value for ap_info in ap_list: - test_utility = IperfTestUtility(dut, BEST_PERFORMANCE_CONFIG, ap_info["ssid"], ap_info["password"], + test_utility = IperfTestUtility(dut, BEST_PERFORMANCE_CONFIG, ap_info['ssid'], ap_info['password'], pc_nic_ip, pc_iperf_log_file, test_result) - PowerControl.Control.control_rest(apc_ip, ap_info["outlet"], "OFF") - PowerControl.Control.control(apc_ip, {ap_info["outlet"]: "ON"}) + PowerControl.Control.control_rest(apc_ip, ap_info['outlet'], 'OFF') + PowerControl.Control.control(apc_ip, {ap_info['outlet']: 'ON'}) Attenuator.set_att(att_port, 0) if not test_utility.wait_ap_power_on(): - Utility.console_log("[{}] failed to power on, skip testing this AP" - .format(ap_info["ssid"]), color="red") + Utility.console_log('[{}] failed to power on, skip testing this AP' + .format(ap_info['ssid']), color='red') continue for atten_val in ATTEN_VALUE_LIST: @@ -635,44 +632,44 @@ def test_wifi_throughput_vs_rssi(env, extra_data): test_utility.run_all_cases(atten_val) # 3. check test results - env.close_dut("iperf") + env.close_dut('iperf') # 4. generate report - report = TestReport.ThroughputVsRssiReport(os.path.join(env.log_path, "STAThroughputVsRssiReport"), + report = TestReport.ThroughputVsRssiReport(os.path.join(env.log_path, 'STAThroughputVsRssiReport'), test_result) report.generate_report() -@ttfw_idf.idf_example_test(env_tag="Example_ShieldBox_Basic") +@ttfw_idf.idf_example_test(env_tag='Example_ShieldBox_Basic') def test_wifi_throughput_basic(env, extra_data): """ steps: | 1. test TCP tx rx and UDP tx rx throughput 2. compare with the pre-defined pass standard """ - pc_nic_ip = env.get_pc_nic_info("pc_nic", "ipv4")["addr"] - pc_iperf_log_file = os.path.join(env.log_path, "pc_iperf_log.md") + pc_nic_ip = env.get_pc_nic_info('pc_nic', 'ipv4')['addr'] + pc_iperf_log_file = os.path.join(env.log_path, 'pc_iperf_log.md') ap_info = { - "ssid": env.get_variable("ap_ssid"), - "password": env.get_variable("ap_password"), + 'ssid': env.get_variable('ap_ssid'), + 'password': env.get_variable('ap_password'), } # 1. get DUT - dut = env.get_dut("iperf", "examples/wifi/iperf", dut_class=ttfw_idf.ESP32DUT, + dut = env.get_dut('iperf', 'examples/wifi/iperf', dut_class=ttfw_idf.ESP32DUT, app_config_name=BEST_PERFORMANCE_CONFIG) dut.start_app() - dut.expect_any("iperf>", "esp32>") + dut.expect_any('iperf>', 'esp32>') # 2. preparing test_result = { - "tcp_tx": TestResult("tcp", "tx", BEST_PERFORMANCE_CONFIG), - "tcp_rx": TestResult("tcp", "rx", BEST_PERFORMANCE_CONFIG), - "udp_tx": TestResult("udp", "tx", BEST_PERFORMANCE_CONFIG), - "udp_rx": TestResult("udp", "rx", BEST_PERFORMANCE_CONFIG), + 'tcp_tx': TestResult('tcp', 'tx', BEST_PERFORMANCE_CONFIG), + 'tcp_rx': TestResult('tcp', 'rx', BEST_PERFORMANCE_CONFIG), + 'udp_tx': TestResult('udp', 'tx', BEST_PERFORMANCE_CONFIG), + 'udp_rx': TestResult('udp', 'rx', BEST_PERFORMANCE_CONFIG), } - test_utility = IperfTestUtility(dut, BEST_PERFORMANCE_CONFIG, ap_info["ssid"], - ap_info["password"], pc_nic_ip, pc_iperf_log_file, test_result) + test_utility = IperfTestUtility(dut, BEST_PERFORMANCE_CONFIG, ap_info['ssid'], + ap_info['password'], pc_nic_ip, pc_iperf_log_file, test_result) # 3. run test for TCP Tx, Rx and UDP Tx, Rx for _ in range(RETRY_COUNT_FOR_BEST_PERFORMANCE): @@ -681,22 +678,22 @@ def test_wifi_throughput_basic(env, extra_data): # 4. log performance and compare with pass standard performance_items = [] for throughput_type in test_result: - ttfw_idf.log_performance("{}_throughput".format(throughput_type), - "{:.02f} Mbps".format(test_result[throughput_type].get_best_throughput())) - performance_items.append(["{}_throughput".format(throughput_type), - "{:.02f} Mbps".format(test_result[throughput_type].get_best_throughput())]) + ttfw_idf.log_performance('{}_throughput'.format(throughput_type), + '{:.02f} Mbps'.format(test_result[throughput_type].get_best_throughput())) + performance_items.append(['{}_throughput'.format(throughput_type), + '{:.02f} Mbps'.format(test_result[throughput_type].get_best_throughput())]) # 5. save to report TinyFW.JunitReport.update_performance(performance_items) # do check after logging, otherwise test will exit immediately if check fail, some performance can't be logged. for throughput_type in test_result: - ttfw_idf.check_performance("{}_throughput".format(throughput_type), + ttfw_idf.check_performance('{}_throughput'.format(throughput_type), test_result[throughput_type].get_best_throughput(), dut.TARGET) - env.close_dut("iperf") + env.close_dut('iperf') -@ttfw_idf.idf_example_test(env_tag="Example_ShieldBox2", category="stress") +@ttfw_idf.idf_example_test(env_tag='Example_ShieldBox2', category='stress') def test_softap_throughput_vs_rssi(env, extra_data): """ steps: | @@ -705,25 +702,25 @@ def test_softap_throughput_vs_rssi(env, extra_data): 3. set attenuator value from 0-60 for each router 4. test TCP tx rx and UDP tx rx throughput """ - att_port = env.get_variable("attenuator_port") + att_port = env.get_variable('attenuator_port') test_result = { - "tcp_tx": TestResult("tcp", "tx", BEST_PERFORMANCE_CONFIG), - "tcp_rx": TestResult("tcp", "rx", BEST_PERFORMANCE_CONFIG), - "udp_tx": TestResult("udp", "tx", BEST_PERFORMANCE_CONFIG), - "udp_rx": TestResult("udp", "rx", BEST_PERFORMANCE_CONFIG), + 'tcp_tx': TestResult('tcp', 'tx', BEST_PERFORMANCE_CONFIG), + 'tcp_rx': TestResult('tcp', 'rx', BEST_PERFORMANCE_CONFIG), + 'udp_tx': TestResult('udp', 'tx', BEST_PERFORMANCE_CONFIG), + 'udp_rx': TestResult('udp', 'rx', BEST_PERFORMANCE_CONFIG), } # 1. get DUT and download - softap_dut = env.get_dut("softap_iperf", "examples/wifi/iperf", dut_class=ttfw_idf.ESP32DUT, + softap_dut = env.get_dut('softap_iperf', 'examples/wifi/iperf', dut_class=ttfw_idf.ESP32DUT, app_config_name=BEST_PERFORMANCE_CONFIG) softap_dut.start_app() - softap_dut.expect_any("iperf>", "esp32>") + softap_dut.expect_any('iperf>', 'esp32>') - sta_dut = env.get_dut("sta_iperf", "examples/wifi/iperf", dut_class=ttfw_idf.ESP32DUT, + sta_dut = env.get_dut('sta_iperf', 'examples/wifi/iperf', dut_class=ttfw_idf.ESP32DUT, app_config_name=BEST_PERFORMANCE_CONFIG) sta_dut.start_app() - sta_dut.expect_any("iperf>", "esp32>") + sta_dut.expect_any('iperf>', 'esp32>') # 2. run test for each required att value test_utility = IperfTestUtilitySoftap(sta_dut, softap_dut, BEST_PERFORMANCE_CONFIG, test_result) @@ -734,17 +731,17 @@ def test_softap_throughput_vs_rssi(env, extra_data): assert Attenuator.set_att(att_port, atten_val) is True test_utility.run_all_cases(atten_val) - env.close_dut("softap_iperf") - env.close_dut("sta_iperf") + env.close_dut('softap_iperf') + env.close_dut('sta_iperf') # 3. generate report - report = TestReport.ThroughputVsRssiReport(os.path.join(env.log_path, "SoftAPThroughputVsRssiReport"), + report = TestReport.ThroughputVsRssiReport(os.path.join(env.log_path, 'SoftAPThroughputVsRssiReport'), test_result) report.generate_report() if __name__ == '__main__': - test_wifi_throughput_basic(env_config_file="EnvConfig.yml") - test_wifi_throughput_with_different_configs(env_config_file="EnvConfig.yml") - test_wifi_throughput_vs_rssi(env_config_file="EnvConfig.yml") - test_softap_throughput_vs_rssi(env_config_file="EnvConfig.yml") + test_wifi_throughput_basic(env_config_file='EnvConfig.yml') + test_wifi_throughput_with_different_configs(env_config_file='EnvConfig.yml') + test_wifi_throughput_vs_rssi(env_config_file='EnvConfig.yml') + test_softap_throughput_vs_rssi(env_config_file='EnvConfig.yml') diff --git a/tools/ble/lib_ble_client.py b/tools/ble/lib_ble_client.py index 73be5187fb..f7b3093344 100644 --- a/tools/ble/lib_ble_client.py +++ b/tools/ble/lib_ble_client.py @@ -18,25 +18,25 @@ # DBus-Bluez BLE library from __future__ import print_function + import sys import time import traceback try: - from future.moves.itertools import zip_longest import dbus import dbus.mainloop.glib + from future.moves.itertools import zip_longest from gi.repository import GLib except ImportError as e: if 'linux' not in sys.platform: raise e print(e) - print("Install packages `libgirepository1.0-dev gir1.2-gtk-3.0 libcairo2-dev libdbus-1-dev libdbus-glib-1-dev` for resolving the issue") - print("Run `pip install -r $IDF_PATH/tools/ble/requirements.txt` for resolving the issue") + print('Install packages `libgirepository1.0-dev gir1.2-gtk-3.0 libcairo2-dev libdbus-1-dev libdbus-glib-1-dev` for resolving the issue') + print('Run `pip install -r $IDF_PATH/tools/ble/requirements.txt` for resolving the issue') raise -from . import lib_gatt -from . import lib_gap +from . import lib_gap, lib_gatt srv_added_old_cnt = 0 srv_added_new_cnt = 0 @@ -198,7 +198,7 @@ class BLE_Bluez_Client: try: self.bus = dbus.SystemBus() - om_iface_obj = dbus.Interface(self.bus.get_object(BLUEZ_SERVICE_NAME, "/"), DBUS_OM_IFACE) + om_iface_obj = dbus.Interface(self.bus.get_object(BLUEZ_SERVICE_NAME, '/'), DBUS_OM_IFACE) self.ble_objs = om_iface_obj.GetManagedObjects() except Exception as e: @@ -206,7 +206,7 @@ class BLE_Bluez_Client: def __del__(self): try: - print("Test Exit") + print('Test Exit') except Exception as e: print(e) sys.exit(1) @@ -220,7 +220,7 @@ class BLE_Bluez_Client: verify_signal_check = 0 adapter_on = False try: - print("discovering adapter...") + print('discovering adapter...') for path, interfaces in self.ble_objs.items(): adapter = interfaces.get(ADAPTER_IFACE) if adapter is not None: @@ -234,32 +234,32 @@ class BLE_Bluez_Client: break if self.adapter is None: - raise Exception("Bluetooth adapter not found") + raise Exception('Bluetooth adapter not found') if self.props_iface_obj is None: - raise Exception("Properties interface not found") + raise Exception('Properties interface not found') - print("bluetooth adapter discovered") + print('bluetooth adapter discovered') # Check if adapter is already powered on if adapter_on: - print("Adapter already powered on") + print('Adapter already powered on') return True # Power On Adapter - print("powering on adapter...") + print('powering on adapter...') self.props_iface_obj.connect_to_signal('PropertiesChanged', props_change_handler) - self.props_iface_obj.Set(ADAPTER_IFACE, "Powered", dbus.Boolean(1)) + self.props_iface_obj.Set(ADAPTER_IFACE, 'Powered', dbus.Boolean(1)) signal_caught = False GLib.timeout_add_seconds(5, verify_signal_is_caught) event_loop.run() if adapter_on: - print("bluetooth adapter powered on") + print('bluetooth adapter powered on') return True else: - raise Exception("Failure: bluetooth adapter not powered on") + raise Exception('Failure: bluetooth adapter not powered on') except Exception: print(traceback.format_exc()) @@ -275,7 +275,7 @@ class BLE_Bluez_Client: device_connected = False try: self.adapter.StartDiscovery() - print("\nStarted Discovery") + print('\nStarted Discovery') discovery_start = True @@ -283,7 +283,7 @@ class BLE_Bluez_Client: verify_signal_check = 0 try: if self.device is None: - print("\nConnecting to device...") + print('\nConnecting to device...') # Wait for device to be discovered time.sleep(5) device_found = self.get_device() @@ -294,13 +294,13 @@ class BLE_Bluez_Client: GLib.timeout_add_seconds(5, verify_signal_is_caught) event_loop.run() if device_connected: - print("\nConnected to device") + print('\nConnected to device') return True else: raise Exception except Exception as e: print(e) - print("\nRetries left", retry_cnt - 1) + print('\nRetries left', retry_cnt - 1) continue # Device not found @@ -318,7 +318,7 @@ class BLE_Bluez_Client: ''' dev_path = None - om_iface_obj = dbus.Interface(self.bus.get_object(BLUEZ_SERVICE_NAME, "/"), DBUS_OM_IFACE) + om_iface_obj = dbus.Interface(self.bus.get_object(BLUEZ_SERVICE_NAME, '/'), DBUS_OM_IFACE) self.ble_objs = om_iface_obj.GetManagedObjects() for path, interfaces in self.ble_objs.items(): if DEVICE_IFACE not in interfaces.keys(): @@ -326,12 +326,12 @@ class BLE_Bluez_Client: device_addr_iface = (path.replace('_', ':')).lower() dev_addr = self.devaddr.lower() if dev_addr in device_addr_iface and \ - interfaces[DEVICE_IFACE].get("Name") == self.devname: + interfaces[DEVICE_IFACE].get('Name') == self.devname: dev_path = path break if dev_path is None: - raise Exception("\nBLE device not found") + raise Exception('\nBLE device not found') device_props_iface_obj = dbus.Interface(self.bus.get_object(BLUEZ_SERVICE_NAME, dev_path), DBUS_PROP_IFACE) device_props_iface_obj.connect_to_signal('PropertiesChanged', props_change_handler) @@ -373,7 +373,7 @@ class BLE_Bluez_Client: signal_caught = False try: - om_iface_obj = dbus.Interface(self.bus.get_object(BLUEZ_SERVICE_NAME, "/"), DBUS_OM_IFACE) + om_iface_obj = dbus.Interface(self.bus.get_object(BLUEZ_SERVICE_NAME, '/'), DBUS_OM_IFACE) self.ble_objs = om_iface_obj.GetManagedObjects() for path, interfaces in self.ble_objs.items(): self.srvc_iface_added_handler(path, interfaces) @@ -383,12 +383,12 @@ class BLE_Bluez_Client: om_iface_obj.connect_to_signal('InterfacesAdded', self.srvc_iface_added_handler) event_loop.run() if not services_resolved: - raise Exception("Services not found...") + raise Exception('Services not found...') if service_uuid: self.verify_service_uuid_found(service_uuid) if not service_uuid_found: - raise Exception("Service with uuid: %s not found..." % service_uuid) + raise Exception('Service with uuid: %s not found...' % service_uuid) # Services found return self.srv_uuid @@ -426,7 +426,7 @@ class BLE_Bluez_Client: signal_caught = False try: - om_iface_obj = dbus.Interface(self.bus.get_object(BLUEZ_SERVICE_NAME, "/"), DBUS_OM_IFACE) + om_iface_obj = dbus.Interface(self.bus.get_object(BLUEZ_SERVICE_NAME, '/'), DBUS_OM_IFACE) self.ble_objs = om_iface_obj.GetManagedObjects() for path, interfaces in self.ble_objs.items(): self.chrc_iface_added_handler(path, interfaces) @@ -460,13 +460,13 @@ class BLE_Bluez_Client: if 'read' in props[1]: chrc_val = chrc.ReadValue({}, dbus_interface=GATT_CHRC_IFACE) else: - print("Warning: Cannot read value. Characteristic does not have read permission.") + print('Warning: Cannot read value. Characteristic does not have read permission.') if not (ord(write_val) == int(chrc_val[0])): - print("\nWrite Failed") + print('\nWrite Failed') return False self.chars[path] = chrc_val, props[1], props[2] # update value if not char_write_props: - raise Exception("Failure: Cannot perform write operation. Characteristic does not have write permission.") + raise Exception('Failure: Cannot perform write operation. Characteristic does not have write permission.') return self.chars except Exception: @@ -498,7 +498,7 @@ class BLE_Bluez_Client: break if srv_path is None: - raise Exception("Failure: HR UUID:", hr_srv_uuid, "not found") + raise Exception('Failure: HR UUID:', hr_srv_uuid, 'not found') chars_ret = self.read_chars() @@ -509,10 +509,10 @@ class BLE_Bluez_Client: if hr_char_uuid in props[2]: # uuid break if chrc is None: - raise Exception("Failure: Characteristics for service: ", srv_path, "not found") + raise Exception('Failure: Characteristics for service: ', srv_path, 'not found') # Subscribe to notifications - print("\nSubscribe to notifications: On") + print('\nSubscribe to notifications: On') chrc.StartNotify(dbus_interface=GATT_CHRC_IFACE) chrc_props_iface_obj = dbus.Interface(self.bus.get_object(BLUEZ_SERVICE_NAME, chrc_path), DBUS_PROP_IFACE) @@ -524,7 +524,7 @@ class BLE_Bluez_Client: event_loop.run() chrc.StopNotify(dbus_interface=GATT_CHRC_IFACE) time.sleep(2) - print("\nSubscribe to notifications: Off") + print('\nSubscribe to notifications: Off') ble_hr_chrc = False return True @@ -587,7 +587,7 @@ class BLE_Bluez_Client: lib_gap.ADV_OBJ = False try: - print("Advertising started") + print('Advertising started') gatt_app_ret = self.create_and_reg_gatt_app() # Check if gatt app create and register command @@ -609,7 +609,7 @@ class BLE_Bluez_Client: # Get device when connected if not self.device: - om_iface_obj = dbus.Interface(self.bus.get_object(BLUEZ_SERVICE_NAME, "/"), DBUS_OM_IFACE) + om_iface_obj = dbus.Interface(self.bus.get_object(BLUEZ_SERVICE_NAME, '/'), DBUS_OM_IFACE) self.ble_objs = om_iface_obj.GetManagedObjects() for path, interfaces in self.ble_objs.items(): @@ -679,13 +679,13 @@ class BLE_Bluez_Client: # Check for success if lib_gatt.GATT_APP_OBJ: - print("GATT Data created") + print('GATT Data created') if gatt_app_registered: - print("GATT Application registered") + print('GATT Application registered') gatt_checks_done = True if gatt_app_retry_check_cnt == 20: if not gatt_app_registered: - print("Failure: GATT Application could not be registered") + print('Failure: GATT Application could not be registered') gatt_checks_done = True # End polling if app is registered or cnt has reached 10 @@ -707,13 +707,13 @@ class BLE_Bluez_Client: adv_checks_done = False if lib_gap.ADV_OBJ: - print("Advertising data created") + print('Advertising data created') if adv_registered or adv_active_instance: - print("Advertisement registered") + print('Advertisement registered') adv_checks_done = True if adv_retry_check_cnt == 10: if not adv_registered and not adv_active_instance: - print("Failure: Advertisement could not be registered") + print('Failure: Advertisement could not be registered') adv_checks_done = True # End polling if success or cnt has reached 10 @@ -737,11 +737,11 @@ class BLE_Bluez_Client: if blecent_retry_check_cnt == 10: # check for failures if not read_req_check: - print("Failure: Read Request not received") + print('Failure: Read Request not received') if not write_req_check: - print("Failure: Write Request not received") + print('Failure: Write Request not received') if not subscribe_req_check: - print("Failure: Subscribe Request not received") + print('Failure: Subscribe Request not received') # Blecent Test failed test_checks_pass = False @@ -790,27 +790,27 @@ class BLE_Bluez_Client: if blecent_retry_check_cnt == 10: # check for failures if not gatt_app_obj_check: - print("Warning: GATT Data could not be removed") + print('Warning: GATT Data could not be removed') if not gatt_app_reg_check: - print("Warning: GATT Application could not be unregistered") + print('Warning: GATT Application could not be unregistered') if not adv_data_check: - print("Warning: Advertising data could not be removed") + print('Warning: Advertising data could not be removed') if not adv_reg_check: - print("Warning: Advertisement could not be unregistered") + print('Warning: Advertisement could not be unregistered') # Blecent Test failed adv_stop = False else: # Check for success if not gatt_app_obj_check and not lib_gatt.GATT_APP_OBJ: - print("GATT Data removed") + print('GATT Data removed') gatt_app_obj_check = True if not gatt_app_reg_check and not gatt_app_registered: - print("GATT Application unregistered") + print('GATT Application unregistered') gatt_app_reg_check = True if not adv_data_check and not adv_reg_check and not (adv_registered or adv_active_instance or lib_gap.ADV_OBJ): - print("Advertising data removed") - print("Advertisement unregistered") + print('Advertising data removed') + print('Advertisement unregistered') adv_data_check = True adv_reg_check = True # all checks passed @@ -842,7 +842,7 @@ class BLE_Bluez_Client: blecent_retry_check_cnt = 0 verify_signal_check = 0 - print("\nexiting from test...") + print('\nexiting from test...') self.props_iface_obj.connect_to_signal('PropertiesChanged', props_change_handler) @@ -864,13 +864,13 @@ class BLE_Bluez_Client: event_loop.run() if adv_stop: - print("Stop Advertising status: ", adv_stop) + print('Stop Advertising status: ', adv_stop) else: - print("Warning: Stop Advertising status: ", adv_stop) + print('Warning: Stop Advertising status: ', adv_stop) # Disconnect device if self.device: - print("disconnecting device...") + print('disconnecting device...') self.device.Disconnect(dbus_interface=DEVICE_IFACE) if self.adapter: self.adapter.RemoveDevice(self.device) @@ -885,9 +885,9 @@ class BLE_Bluez_Client: event_loop.run() if not device_connected: - print("device disconnected") + print('device disconnected') else: - print("Warning: device could not be disconnected") + print('Warning: device could not be disconnected') except Exception: print(traceback.format_exc()) diff --git a/tools/ble/lib_gap.py b/tools/ble/lib_gap.py index 02466c7aa1..abe67e6cdd 100644 --- a/tools/ble/lib_gap.py +++ b/tools/ble/lib_gap.py @@ -18,6 +18,7 @@ # Register Advertisement from __future__ import print_function + import sys try: @@ -27,8 +28,8 @@ except ImportError as e: if 'linux' not in sys.platform: raise e print(e) - print("Install packages `libgirepository1.0-dev gir1.2-gtk-3.0 libcairo2-dev libdbus-1-dev libdbus-glib-1-dev` for resolving the issue") - print("Run `pip install -r $IDF_PATH/tools/ble/requirements.txt` for resolving the issue") + print('Install packages `libgirepository1.0-dev gir1.2-gtk-3.0 libcairo2-dev libdbus-1-dev libdbus-glib-1-dev` for resolving the issue') + print('Run `pip install -r $IDF_PATH/tools/ble/requirements.txt` for resolving the issue') raise ADV_OBJ = False diff --git a/tools/ble/lib_gatt.py b/tools/ble/lib_gatt.py index 4710e1549c..da2dd39d1c 100644 --- a/tools/ble/lib_gatt.py +++ b/tools/ble/lib_gatt.py @@ -18,6 +18,7 @@ # Creating GATT Application which then becomes available to remote devices. from __future__ import print_function + import sys try: @@ -27,8 +28,8 @@ except ImportError as e: if 'linux' not in sys.platform: raise e print(e) - print("Install packages `libgirepository1.0-dev gir1.2-gtk-3.0 libcairo2-dev libdbus-1-dev libdbus-glib-1-dev` for resolving the issue") - print("Run `pip install -r $IDF_PATH/tools/ble/requirements.txt` for resolving the issue") + print('Install packages `libgirepository1.0-dev gir1.2-gtk-3.0 libcairo2-dev libdbus-1-dev libdbus-glib-1-dev` for resolving the issue') + print('Run `pip install -r $IDF_PATH/tools/ble/requirements.txt` for resolving the issue') raise alert_status_char_obj = None @@ -216,7 +217,7 @@ class Characteristic(dbus.service.Object): @dbus.service.signal(DBUS_PROP_IFACE, signature='sa{sv}as') def PropertiesChanged(self, interface, changed, invalidated): - print("\nProperties Changed") + print('\nProperties Changed') class Descriptor(dbus.service.Object): @@ -293,8 +294,8 @@ class SupportedNewAlertCategoryCharacteristic(Characteristic): val_list = [] for val in self.value: val_list.append(dbus.Byte(val)) - print("Read Request received\n", "\tSupportedNewAlertCategoryCharacteristic") - print("\tValue:", "\t", val_list) + print('Read Request received\n', '\tSupportedNewAlertCategoryCharacteristic') + print('\tValue:', '\t', val_list) return val_list @@ -314,23 +315,23 @@ class AlertNotificationControlPointCharacteristic(Characteristic): val_list = [] for val in self.value: val_list.append(dbus.Byte(val)) - print("Read Request received\n", "\tAlertNotificationControlPointCharacteristic") - print("\tValue:", "\t", val_list) + print('Read Request received\n', '\tAlertNotificationControlPointCharacteristic') + print('\tValue:', '\t', val_list) return val_list def WriteValue(self, value, options): global CHAR_WRITE CHAR_WRITE = True - print("Write Request received\n", "\tAlertNotificationControlPointCharacteristic") - print("\tCurrent value:", "\t", self.value) + print('Write Request received\n', '\tAlertNotificationControlPointCharacteristic') + print('\tCurrent value:', '\t', self.value) val_list = [] for val in value: val_list.append(val) self.value = val_list # Check if new value is written - print("\tNew value:", "\t", self.value) + print('\tNew value:', '\t', self.value) if not self.value == value: - print("Failed: Write Request\n\tNew value not written\tCurrent value:", self.value) + print('Failed: Write Request\n\tNew value not written\tCurrent value:', self.value) class UnreadAlertStatusCharacteristic(Characteristic): @@ -355,7 +356,7 @@ class UnreadAlertStatusCharacteristic(Characteristic): print('\nAlready notifying, nothing to do') return self.notifying = True - print("\nNotify Started") + print('\nNotify Started') self.cccd_obj.WriteValue([dbus.Byte(1), dbus.Byte(0)]) self.cccd_obj.ReadValue() @@ -364,26 +365,26 @@ class UnreadAlertStatusCharacteristic(Characteristic): print('\nNot notifying, nothing to do') return self.notifying = False - print("\nNotify Stopped") + print('\nNotify Stopped') def ReadValue(self, options): - print("Read Request received\n", "\tUnreadAlertStatusCharacteristic") + print('Read Request received\n', '\tUnreadAlertStatusCharacteristic') val_list = [] for val in self.value: val_list.append(dbus.Byte(val)) - print("\tValue:", "\t", val_list) + print('\tValue:', '\t', val_list) return val_list def WriteValue(self, value, options): - print("Write Request received\n", "\tUnreadAlertStatusCharacteristic") + print('Write Request received\n', '\tUnreadAlertStatusCharacteristic') val_list = [] for val in value: val_list.append(val) self.value = val_list # Check if new value is written - print("\tNew value:", "\t", self.value) + print('\tNew value:', '\t', self.value) if not self.value == value: - print("Failed: Write Request\n\tNew value not written\tCurrent value:", self.value) + print('Failed: Write Request\n\tNew value not written\tCurrent value:', self.value) class ClientCharacteristicConfigurationDescriptor(Descriptor): @@ -398,7 +399,7 @@ class ClientCharacteristicConfigurationDescriptor(Descriptor): characteristic) def ReadValue(self): - print("\tValue on read:", "\t", self.value) + print('\tValue on read:', '\t', self.value) return self.value def WriteValue(self, value): @@ -407,6 +408,6 @@ class ClientCharacteristicConfigurationDescriptor(Descriptor): val_list.append(val) self.value = val_list # Check if new value is written - print("New value on write:", "\t", self.value) + print('New value on write:', '\t', self.value) if not self.value == value: - print("Failed: Write Request\n\tNew value not written\tCurrent value:", self.value) + print('Failed: Write Request\n\tNew value not written\tCurrent value:', self.value) diff --git a/tools/build_apps.py b/tools/build_apps.py index d95717fd79..561d1390a5 100755 --- a/tools/build_apps.py +++ b/tools/build_apps.py @@ -8,74 +8,74 @@ import argparse import logging import sys -from find_build_apps import BuildItem, BuildError, setup_logging, BUILD_SYSTEMS -from find_build_apps.common import rmdir, SIZE_JSON_FN +from find_build_apps import BUILD_SYSTEMS, BuildError, BuildItem, setup_logging +from find_build_apps.common import SIZE_JSON_FN, rmdir def main(): - parser = argparse.ArgumentParser(description="ESP-IDF app builder") + parser = argparse.ArgumentParser(description='ESP-IDF app builder') parser.add_argument( - "-v", - "--verbose", - action="count", - help="Increase the logging level of the script. Can be specified multiple times.", + '-v', + '--verbose', + action='count', + help='Increase the logging level of the script. Can be specified multiple times.', ) parser.add_argument( - "--build-verbose", - action="store_true", - help="Enable verbose output from build system.", + '--build-verbose', + action='store_true', + help='Enable verbose output from build system.', ) parser.add_argument( - "--log-file", - type=argparse.FileType("w"), - help="Write the script log to the specified file, instead of stderr", + '--log-file', + type=argparse.FileType('w'), + help='Write the script log to the specified file, instead of stderr', ) parser.add_argument( - "--parallel-count", + '--parallel-count', default=1, type=int, help="Number of parallel build jobs. Note that this script doesn't start the jobs, " + - "it needs to be executed multiple times with same value of --parallel-count and " + - "different values of --parallel-index.", + 'it needs to be executed multiple times with same value of --parallel-count and ' + + 'different values of --parallel-index.', ) parser.add_argument( - "--parallel-index", + '--parallel-index', default=1, type=int, - help="Index (1-based) of the job, out of the number specified by --parallel-count.", + help='Index (1-based) of the job, out of the number specified by --parallel-count.', ) parser.add_argument( - "--format", - default="json", - choices=["json"], - help="Format to read the list of builds", + '--format', + default='json', + choices=['json'], + help='Format to read the list of builds', ) parser.add_argument( - "--dry-run", - action="store_true", + '--dry-run', + action='store_true', help="Don't actually build, only print the build commands", ) parser.add_argument( - "--keep-going", - action="store_true", + '--keep-going', + action='store_true', help="Don't exit immediately when a build fails.", ) parser.add_argument( - "--output-build-list", - type=argparse.FileType("w"), - help="If specified, the list of builds (with all the placeholders expanded) will be written to this file.", + '--output-build-list', + type=argparse.FileType('w'), + help='If specified, the list of builds (with all the placeholders expanded) will be written to this file.', ) parser.add_argument( - "--size-info", - type=argparse.FileType("a"), - help="If specified, the test case name and size info json will be written to this file" + '--size-info', + type=argparse.FileType('a'), + help='If specified, the test case name and size info json will be written to this file' ) parser.add_argument( - "build_list", - type=argparse.FileType("r"), - nargs="?", + 'build_list', + type=argparse.FileType('r'), + nargs='?', default=sys.stdin, - help="Name of the file to read the list of builds from. If not specified, read from stdin.", + help='Name of the file to read the list of builds from. If not specified, read from stdin.', ) args = parser.parse_args() @@ -83,7 +83,7 @@ def main(): build_items = [BuildItem.from_json(line) for line in args.build_list] if not build_items: - logging.warning("Empty build list") + logging.warning('Empty build list') SystemExit(0) num_builds = len(build_items) @@ -92,12 +92,12 @@ def main(): num_builds_per_job = (num_builds + num_jobs - 1) // num_jobs min_job_index = num_builds_per_job * job_index if min_job_index >= num_builds: - logging.warn("Nothing to do for job {} (build total: {}, per job: {})".format( + logging.warn('Nothing to do for job {} (build total: {}, per job: {})'.format( job_index + 1, num_builds, num_builds_per_job)) raise SystemExit(0) max_job_index = min(num_builds_per_job * (job_index + 1) - 1, num_builds - 1) - logging.info("Total {} builds, max. {} builds per job, running builds {}-{}".format( + logging.info('Total {} builds, max. {} builds per job, running builds {}-{}'.format( num_builds, num_builds_per_job, min_job_index + 1, max_job_index + 1)) builds_for_current_job = build_items[min_job_index:max_job_index + 1] @@ -107,13 +107,13 @@ def main(): build_info.dry_run = args.dry_run build_info.verbose = args.build_verbose build_info.keep_going = args.keep_going - logging.debug(" Build {}: {}".format(index, repr(build_info))) + logging.debug(' Build {}: {}'.format(index, repr(build_info))) if args.output_build_list: - args.output_build_list.write(build_info.to_json_expanded() + "\n") + args.output_build_list.write(build_info.to_json_expanded() + '\n') failed_builds = [] for build_info in builds_for_current_job: - logging.info("Running build {}: {}".format(build_info.index, repr(build_info))) + logging.info('Running build {}: {}'.format(build_info.index, repr(build_info))) build_system_class = BUILD_SYSTEMS[build_info.build_system] try: build_system_class.build(build_info) @@ -127,16 +127,16 @@ def main(): if args.size_info: build_info.write_size_info(args.size_info) if not build_info.preserve: - logging.info("Removing build directory {}".format(build_info.build_path)) + logging.info('Removing build directory {}'.format(build_info.build_path)) # we only remove binaries here, log files are still needed by check_build_warnings.py rmdir(build_info.build_path, exclude_file_pattern=SIZE_JSON_FN) if failed_builds: - logging.error("The following build have failed:") + logging.error('The following build have failed:') for build in failed_builds: - logging.error(" {}".format(build)) + logging.error(' {}'.format(build)) raise SystemExit(1) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/check_python_dependencies.py b/tools/check_python_dependencies.py index a8d7ad7cef..ea7f2088b8 100755 --- a/tools/check_python_dependencies.py +++ b/tools/check_python_dependencies.py @@ -29,15 +29,15 @@ except Exception: def escape_backslash(path): - if sys.platform == "win32": + if sys.platform == 'win32': # escaped backslashes are necessary in order to be able to copy-paste the printed path - return path.replace("\\", "\\\\") + return path.replace('\\', '\\\\') else: return path -if __name__ == "__main__": - idf_path = os.getenv("IDF_PATH") +if __name__ == '__main__': + idf_path = os.getenv('IDF_PATH') default_requirements_path = os.path.join(idf_path, 'requirements.txt') @@ -72,30 +72,30 @@ if __name__ == "__main__": elif os.environ.get('IDF_PYTHON_ENV_PATH'): # We are running inside a private virtual environment under IDF_TOOLS_PATH, # ask the user to run install.bat again. - if sys.platform == "win32" and not os.environ.get("MSYSTEM"): + if sys.platform == 'win32' and not os.environ.get('MSYSTEM'): install_script = 'install.bat' else: install_script = 'install.sh' print('To install the missing packages, please run "%s"' % os.path.join(idf_path, install_script)) - elif sys.platform == "win32" and os.environ.get("MSYSTEM", None) == "MINGW32" and "/mingw32/bin/python" in sys.executable: + elif sys.platform == 'win32' and os.environ.get('MSYSTEM', None) == 'MINGW32' and '/mingw32/bin/python' in sys.executable: print("The recommended way to install a packages is via \"pacman\". Please run \"pacman -Ss \" for" - " searching the package database and if found then " + ' searching the package database and if found then ' "\"pacman -S mingw-w64-i686-python-\" for installing it.") print("NOTE: You may need to run \"pacman -Syu\" if your package database is older and run twice if the " "previous run updated \"pacman\" itself.") - print("Please read https://github.com/msys2/msys2/wiki/Using-packages for further information about using " + print('Please read https://github.com/msys2/msys2/wiki/Using-packages for further information about using ' "\"pacman\"") # Special case for MINGW32 Python, needs some packages # via MSYS2 not via pip or system breaks... for requirement in not_satisfied: if requirement.startswith('cryptography'): - print("WARNING: The cryptography package have dependencies on system packages so please make sure " + print('WARNING: The cryptography package have dependencies on system packages so please make sure ' "you run \"pacman -Syu\" followed by \"pacman -S mingw-w64-i686-python{}-cryptography\"." - "".format(sys.version_info[0],)) + ''.format(sys.version_info[0],)) continue elif requirement.startswith('setuptools'): print("Please run the following command to install MSYS2's MINGW Python setuptools package:") - print("pacman -S mingw-w64-i686-python-setuptools") + print('pacman -S mingw-w64-i686-python-setuptools') continue else: print('Please follow the instructions found in the "Set up the tools" section of ' diff --git a/tools/check_term.py b/tools/check_term.py index 7a3fdcfbe6..b957c893f7 100644 --- a/tools/check_term.py +++ b/tools/check_term.py @@ -15,6 +15,7 @@ # limitations under the License. from __future__ import print_function + import os import sys diff --git a/tools/ci/apply_bot_filter.py b/tools/ci/apply_bot_filter.py index 0f8ef24b98..1376da28aa 100755 --- a/tools/ci/apply_bot_filter.py +++ b/tools/ci/apply_bot_filter.py @@ -3,15 +3,14 @@ # internal use only # called by CI jobs to determine if it need to be executed +import json import os import re import sys -import json - RE_FILTER_PATTERN = re.compile(r'^r"(.+)?"$') -RE_TYPE = type(re.compile("", 0)) +RE_TYPE = type(re.compile('', 0)) def parse_filter(filter_name): @@ -50,13 +49,13 @@ def process_filter(execute_by_default, filter_name, ci_name): return execute -if __name__ == "__main__": +if __name__ == '__main__': execute_by_default = True - if os.getenv("BOT_NEEDS_TRIGGER_BY_NAME", "0") == "1": + if os.getenv('BOT_NEEDS_TRIGGER_BY_NAME', '0') == '1': execute_by_default = False - need_to_execute = process_filter(True, "BOT_STAGE_FILTER", os.getenv("CI_JOB_STAGE")) and process_filter(execute_by_default, - "BOT_JOB_FILTER", os.getenv("CI_JOB_NAME")) + need_to_execute = process_filter(True, 'BOT_STAGE_FILTER', os.getenv('CI_JOB_STAGE')) and process_filter(execute_by_default, + 'BOT_JOB_FILTER', os.getenv('CI_JOB_NAME')) if need_to_execute: sys.exit(0) else: diff --git a/tools/ci/check_artifacts_expire_time.py b/tools/ci/check_artifacts_expire_time.py index ae7942dae5..486d1c4abd 100644 --- a/tools/ci/check_artifacts_expire_time.py +++ b/tools/ci/check_artifacts_expire_time.py @@ -12,42 +12,42 @@ try: except ImportError: from yaml import Loader as Loader -IDF_PATH = os.getenv("IDF_PATH") +IDF_PATH = os.getenv('IDF_PATH') if not IDF_PATH: - print("Please set IDF_PATH before running this script") + print('Please set IDF_PATH before running this script') raise SystemExit(-1) -GITLAB_CONFIG_FILE = os.path.join(os.getenv("IDF_PATH"), ".gitlab-ci.yml") +GITLAB_CONFIG_FILE = os.path.join(os.getenv('IDF_PATH'), '.gitlab-ci.yml') def check_artifacts_expire_time(): - with open(GITLAB_CONFIG_FILE, "r") as f: + with open(GITLAB_CONFIG_FILE, 'r') as f: config = yaml.load(f, Loader=Loader) errors = [] - print("expire time for jobs:") + print('expire time for jobs:') job_names = list(config.keys()) job_names.sort() for job_name in job_names: - if job_name.startswith("."): + if job_name.startswith('.'): # skip ignored jobs continue try: - if "expire_in" not in config[job_name]["artifacts"]: + if 'expire_in' not in config[job_name]['artifacts']: errors.append(job_name) else: - print("{}: {}".format(job_name, config[job_name]["artifacts"]["expire_in"])) + print('{}: {}'.format(job_name, config[job_name]['artifacts']['expire_in'])) except (KeyError, TypeError): # this is not job, or the job does not have artifacts pass if errors: - print("\n\nThe following jobs did not set expire time for its artifacts") + print('\n\nThe following jobs did not set expire time for its artifacts') for error in errors: print(error) raise SystemExit(-2) diff --git a/tools/ci/check_build_warnings.py b/tools/ci/check_build_warnings.py index bed1985996..0ccd5ed3b0 100755 --- a/tools/ci/check_build_warnings.py +++ b/tools/ci/check_build_warnings.py @@ -15,23 +15,23 @@ import sys try: from find_build_apps import BuildItem, setup_logging except ImportError: - sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..")) + sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) from find_build_apps import BuildItem, setup_logging -WARNING_REGEX = re.compile(r"(?:error|warning)[^\w]", re.MULTILINE | re.IGNORECASE) +WARNING_REGEX = re.compile(r'(?:error|warning)[^\w]', re.MULTILINE | re.IGNORECASE) IGNORE_WARNS = [ re.compile(r_str) for r_str in [ - r"library/error\.o", - r".*error.*\.c\.obj", - r"-Werror", - r"error\.d", - r"reassigning to symbol", - r"changes choice state", - r"crosstool_version_check\.cmake", - r"CryptographyDeprecationWarning", - r"Python 3 versions older than 3.6 are not supported.", - r"Support for Python 2 is deprecated and will be removed in future versions.", + r'library/error\.o', + r'.*error.*\.c\.obj', + r'-Werror', + r'error\.d', + r'reassigning to symbol', + r'changes choice state', + r'crosstool_version_check\.cmake', + r'CryptographyDeprecationWarning', + r'Python 3 versions older than 3.6 are not supported.', + r'Support for Python 2 is deprecated and will be removed in future versions.', ] ] @@ -50,54 +50,54 @@ def line_has_warnings(line): # type: (str) -> bool def main(): - parser = argparse.ArgumentParser(description="ESP-IDF app builder") + parser = argparse.ArgumentParser(description='ESP-IDF app builder') parser.add_argument( - "-v", - "--verbose", - action="count", - help="Increase the logging level of the script. Can be specified multiple times.", + '-v', + '--verbose', + action='count', + help='Increase the logging level of the script. Can be specified multiple times.', ) parser.add_argument( - "--log-file", - type=argparse.FileType("w"), - help="Write the script log to the specified file, instead of stderr", + '--log-file', + type=argparse.FileType('w'), + help='Write the script log to the specified file, instead of stderr', ) parser.add_argument( - "build_list", - type=argparse.FileType("r"), - nargs="?", + 'build_list', + type=argparse.FileType('r'), + nargs='?', default=sys.stdin, - help="Name of the file to read the list of builds from. If not specified, read from stdin.", + help='Name of the file to read the list of builds from. If not specified, read from stdin.', ) args = parser.parse_args() setup_logging(args) build_items = [BuildItem.from_json(line) for line in args.build_list] if not build_items: - logging.warning("Empty build list") + logging.warning('Empty build list') SystemExit(0) found_warnings = 0 for build_item in build_items: if not build_item.build_log_path: - logging.debug("No log file for {}".format(build_item.work_dir)) + logging.debug('No log file for {}'.format(build_item.work_dir)) continue - with open(build_item.build_log_path, "r") as log_file: + with open(build_item.build_log_path, 'r') as log_file: for line_no, line in enumerate(log_file): if line_has_warnings(line): - logging.error("Issue in app {}, config {}:".format(build_item.app_dir, build_item.config_name)) - logging.error(line.rstrip("\n")) - logging.error("See {}:{} for details".format(os.path.basename(build_item.build_log_path), + logging.error('Issue in app {}, config {}:'.format(build_item.app_dir, build_item.config_name)) + logging.error(line.rstrip('\n')) + logging.error('See {}:{} for details'.format(os.path.basename(build_item.build_log_path), line_no + 1)) found_warnings += 1 break if found_warnings: - logging.error("Checked {} builds, found {} warnings".format(len(build_items), found_warnings)) + logging.error('Checked {} builds, found {} warnings'.format(len(build_items), found_warnings)) raise SystemExit(1) - logging.info("No warnings found") + logging.info('No warnings found') -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/ci/check_callgraph.py b/tools/ci/check_callgraph.py index a5eab370cd..700d5e6bad 100755 --- a/tools/ci/check_callgraph.py +++ b/tools/ci/check_callgraph.py @@ -16,20 +16,20 @@ # limitations under the License. import argparse -from functools import partial import os import re +from functools import partial import elftools from elftools.elf import elffile try: - from typing import List, Optional, BinaryIO, Tuple, Generator, Dict, Callable + from typing import BinaryIO, Callable, Dict, Generator, List, Optional, Tuple except ImportError: pass FUNCTION_REGEX = re.compile( - r"^;; Function (?P.*)\s+\((?P\S+)(,.*)?\).*$" + r'^;; Function (?P.*)\s+\((?P\S+)(,.*)?\).*$' ) CALL_REGEX = re.compile(r'^.*\(call.*"(?P.*)".*$') SYMBOL_REF_REGEX = re.compile(r'^.*\(symbol_ref[^()]*\("(?P.*)"\).*$') @@ -52,24 +52,24 @@ class SectionAddressRange(object): self.high = addr + size def __str__(self): - return "{}: 0x{:08x} - 0x{:08x}".format(self.name, self.low, self.high) + return '{}: 0x{:08x} - 0x{:08x}'.format(self.name, self.low, self.high) def contains_address(self, addr): return self.low <= addr < self.high TARGET_SECTIONS = { - "esp32": [ - SectionAddressRange(".rom.text", 0x40000000, 0x70000), - SectionAddressRange(".rom.rodata", 0x3ff96000, 0x9018) + 'esp32': [ + SectionAddressRange('.rom.text', 0x40000000, 0x70000), + SectionAddressRange('.rom.rodata', 0x3ff96000, 0x9018) ], - "esp32s2": [ - SectionAddressRange(".rom.text", 0x40000000, 0x1bed0), - SectionAddressRange(".rom.rodata", 0x3ffac600, 0x392c) + 'esp32s2': [ + SectionAddressRange('.rom.text', 0x40000000, 0x1bed0), + SectionAddressRange('.rom.rodata', 0x3ffac600, 0x392c) ], - "esp32s3": [ - SectionAddressRange(".rom.text", 0x40000000, 0x568d0), - SectionAddressRange(".rom.rodata", 0x3ff071c0, 0x8e30) + 'esp32s3': [ + SectionAddressRange('.rom.text', 0x40000000, 0x568d0), + SectionAddressRange('.rom.rodata', 0x3ff071c0, 0x8e30) ] } # type: Dict[str, List[SectionAddressRange]] @@ -85,11 +85,11 @@ class Symbol(object): self.referred_from = list() # type: List[Symbol] def __str__(self): - return "{} @0x{:08x} [{}]{} {}".format( + return '{} @0x{:08x} [{}]{} {}'.format( self.name, self.addr, - self.section or "unknown", - " (local)" if self.local else "", + self.section or 'unknown', + ' (local)' if self.local else '', self.filename ) @@ -100,7 +100,7 @@ class Reference(object): self.to_sym = to_sym def __str__(self): - return "{} @0x{:08x} ({}) -> {} @0x{:08x} ({})".format( + return '{} @0x{:08x} ({}) -> {} @0x{:08x} ({})'.format( self.from_sym.name, self.from_sym.addr, self.from_sym.section, @@ -124,12 +124,12 @@ class ElfInfo(object): continue filename = None for sym in s.iter_symbols(): - sym_type = sym.entry["st_info"]["type"] - if sym_type == "STT_FILE": + sym_type = sym.entry['st_info']['type'] + if sym_type == 'STT_FILE': filename = sym.name - if sym_type in ["STT_NOTYPE", "STT_FUNC", "STT_OBJECT"]: - local = sym.entry["st_info"]["bind"] == "STB_LOCAL" - addr = sym.entry["st_value"] + if sym_type in ['STT_NOTYPE', 'STT_FUNC', 'STT_OBJECT']: + local = sym.entry['st_info']['bind'] == 'STB_LOCAL' + addr = sym.entry['st_value'] symbols.append( Symbol( sym.name, @@ -144,17 +144,17 @@ class ElfInfo(object): def _load_sections(self): # type: () -> List[SectionAddressRange] result = [] for segment in self.elf_obj.iter_segments(): - if segment["p_type"] == "PT_LOAD": + if segment['p_type'] == 'PT_LOAD': for section in self.elf_obj.iter_sections(): if not segment.section_in_segment(section): continue result.append( SectionAddressRange( - section.name, section["sh_addr"], section["sh_size"] + section.name, section['sh_addr'], section['sh_size'] ) ) - target = os.environ.get("IDF_TARGET") + target = os.environ.get('IDF_TARGET') if target in TARGET_SECTIONS: result += TARGET_SECTIONS[target] @@ -180,7 +180,7 @@ def load_rtl_file(rtl_filename, tu_filename, functions): # type: (str, str, Lis # Find function definition match = re.match(FUNCTION_REGEX, line) if match: - function_name = match.group("function") + function_name = match.group('function') last_function = RtlFunction(function_name, rtl_filename, tu_filename) functions.append(last_function) continue @@ -189,7 +189,7 @@ def load_rtl_file(rtl_filename, tu_filename, functions): # type: (str, str, Lis # Find direct function calls match = re.match(CALL_REGEX, line) if match: - target = match.group("target") + target = match.group('target') if target not in last_function.calls: last_function.calls.append(target) continue @@ -197,7 +197,7 @@ def load_rtl_file(rtl_filename, tu_filename, functions): # type: (str, str, Lis # Find symbol references match = re.match(SYMBOL_REF_REGEX, line) if match: - target = match.group("target") + target = match.group('target') if target not in last_function.refs: last_function.refs.append(target) continue @@ -298,7 +298,7 @@ def match_rtl_funcs_to_symbols(rtl_functions, elfinfo): # type: (List[RtlFuncti symbols.append(sym_from) for target_rtl_func_name in source_rtl_func.calls + source_rtl_func.refs: - if "*.LC" in target_rtl_func_name: # skip local labels + if '*.LC' in target_rtl_func_name: # skip local labels continue maybe_sym_to = find_symbol_by_name(target_rtl_func_name, elfinfo, partial(match_local_target_func, source_rtl_func.rtl_filename, sym_from)) @@ -351,68 +351,68 @@ def main(): parser = argparse.ArgumentParser() parser.add_argument( - "--rtl-list", - help="File with the list of RTL files", - type=argparse.FileType("r"), + '--rtl-list', + help='File with the list of RTL files', + type=argparse.FileType('r'), ) parser.add_argument( - "--rtl-dir", help="Directory where to look for RTL files, recursively" + '--rtl-dir', help='Directory where to look for RTL files, recursively' ) parser.add_argument( - "--elf-file", + '--elf-file', required=True, - help="Program ELF file", - type=argparse.FileType("rb"), + help='Program ELF file', + type=argparse.FileType('rb'), ) - action_sub = parser.add_subparsers(dest="action") + action_sub = parser.add_subparsers(dest='action') find_refs_parser = action_sub.add_parser( - "find-refs", - help="List the references coming from a given list of source sections" - "to a given list of target sections.", + 'find-refs', + help='List the references coming from a given list of source sections' + 'to a given list of target sections.', ) find_refs_parser.add_argument( - "--from-sections", help="comma-separated list of source sections" + '--from-sections', help='comma-separated list of source sections' ) find_refs_parser.add_argument( - "--to-sections", help="comma-separated list of target sections" + '--to-sections', help='comma-separated list of target sections' ) find_refs_parser.add_argument( - "--exit-code", - action="store_true", - help="If set, exits with non-zero code when any references found", + '--exit-code', + action='store_true', + help='If set, exits with non-zero code when any references found', ) action_sub.add_parser( - "all-refs", - help="Print the list of all references", + 'all-refs', + help='Print the list of all references', ) parser.parse_args() args = parser.parse_args() if args.rtl_list: - with open(args.rtl_list, "r") as rtl_list_file: + with open(args.rtl_list, 'r') as rtl_list_file: rtl_list = [line.strip for line in rtl_list_file] else: if not args.rtl_dir: - raise RuntimeError("Either --rtl-list or --rtl-dir must be specified") - rtl_list = list(find_files_recursive(args.rtl_dir, ".expand")) + raise RuntimeError('Either --rtl-list or --rtl-dir must be specified') + rtl_list = list(find_files_recursive(args.rtl_dir, '.expand')) if not rtl_list: - raise RuntimeError("No RTL files specified") + raise RuntimeError('No RTL files specified') _, refs = get_symbols_and_refs(rtl_list, args.elf_file) - if args.action == "find-refs": - from_sections = args.from_sections.split(",") if args.from_sections else [] - to_sections = args.to_sections.split(",") if args.to_sections else [] + if args.action == 'find-refs': + from_sections = args.from_sections.split(',') if args.from_sections else [] + to_sections = args.to_sections.split(',') if args.to_sections else [] found = list_refs_from_to_sections( refs, from_sections, to_sections ) if args.exit_code and found: raise SystemExit(1) - elif args.action == "all-refs": + elif args.action == 'all-refs': for r in refs: print(str(r)) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/ci/check_codeowners.py b/tools/ci/check_codeowners.py index 665f97e47c..aa25b3cda3 100755 --- a/tools/ci/check_codeowners.py +++ b/tools/ci/check_codeowners.py @@ -24,8 +24,8 @@ import sys from idf_ci_utils import IDF_PATH -CODEOWNERS_PATH = os.path.join(IDF_PATH, ".gitlab", "CODEOWNERS") -CODEOWNER_GROUP_PREFIX = "@esp-idf-codeowners/" +CODEOWNERS_PATH = os.path.join(IDF_PATH, '.gitlab', 'CODEOWNERS') +CODEOWNER_GROUP_PREFIX = '@esp-idf-codeowners/' def get_all_files(): @@ -33,7 +33,7 @@ def get_all_files(): Get list of all file paths in the repository. """ # only split on newlines, since file names may contain spaces - return subprocess.check_output(["git", "ls-files"], cwd=IDF_PATH).decode("utf-8").strip().split('\n') + return subprocess.check_output(['git', 'ls-files'], cwd=IDF_PATH).decode('utf-8').strip().split('\n') def pattern_to_regex(pattern): @@ -93,7 +93,7 @@ def action_identify(args): with open(CODEOWNERS_PATH) as f: for line in f: line = line.strip() - if not line or line.startswith("#"): + if not line or line.startswith('#'): continue tokens = line.split() path_pattern = tokens[0] @@ -121,18 +121,18 @@ def action_ci_check(args): errors = [] def add_error(msg): - errors.append("{}:{}: {}".format(CODEOWNERS_PATH, line_no, msg)) + errors.append('{}:{}: {}'.format(CODEOWNERS_PATH, line_no, msg)) all_files = get_all_files() - prev_path_pattern = "" + prev_path_pattern = '' with open(CODEOWNERS_PATH) as f: for line_no, line in enumerate(f, start=1): # Skip empty lines and comments line = line.strip() - if line.startswith("# sort-order-reset"): - prev_path_pattern = "" + if line.startswith('# sort-order-reset'): + prev_path_pattern = '' - if not line or line.startswith("#"): + if not line or line.startswith('#'): continue # Each line has a form of " +" @@ -140,18 +140,18 @@ def action_ci_check(args): path_pattern = tokens[0] owners = tokens[1:] if not owners: - add_error("no owners specified for {}".format(path_pattern)) + add_error('no owners specified for {}'.format(path_pattern)) # Check that the file is sorted by path patterns - path_pattern_for_cmp = path_pattern.replace("-", "_") # ignore difference between _ and - for ordering + path_pattern_for_cmp = path_pattern.replace('-', '_') # ignore difference between _ and - for ordering if prev_path_pattern and path_pattern_for_cmp < prev_path_pattern: - add_error("file is not sorted: {} < {}".format(path_pattern_for_cmp, prev_path_pattern)) + add_error('file is not sorted: {} < {}'.format(path_pattern_for_cmp, prev_path_pattern)) prev_path_pattern = path_pattern_for_cmp # Check that the pattern matches at least one file files = files_by_pattern(all_files, path_pattern) if not files: - add_error("no files matched by pattern {}".format(path_pattern)) + add_error('no files matched by pattern {}'.format(path_pattern)) for o in owners: # Sanity-check the owner group name @@ -159,9 +159,9 @@ def action_ci_check(args): add_error("owner {} doesn't start with {}".format(o, CODEOWNER_GROUP_PREFIX)) if not errors: - print("No errors found.") + print('No errors found.') else: - print("Errors found!") + print('Errors found!') for e in errors: print(e) raise SystemExit(1) @@ -169,29 +169,29 @@ def action_ci_check(args): def main(): parser = argparse.ArgumentParser( - sys.argv[0], description="Internal helper script for working with the CODEOWNERS file." + sys.argv[0], description='Internal helper script for working with the CODEOWNERS file.' ) - subparsers = parser.add_subparsers(dest="action") + subparsers = parser.add_subparsers(dest='action') identify = subparsers.add_parser( - "identify", - help="List the owners of the specified path within IDF." + 'identify', + help='List the owners of the specified path within IDF.' "This command doesn't support files inside submodules, or files not added to git repository.", ) - identify.add_argument("path", help="Path of the file relative to the root of the repository") + identify.add_argument('path', help='Path of the file relative to the root of the repository') subparsers.add_parser( - "ci-check", - help="Check CODEOWNERS file: every line should match at least one file, sanity-check group names, " - "check that the file is sorted by paths", + 'ci-check', + help='Check CODEOWNERS file: every line should match at least one file, sanity-check group names, ' + 'check that the file is sorted by paths', ) test_pattern = subparsers.add_parser( - "test-pattern", - help="Print files in the repository for a given CODEOWNERS pattern. Useful when adding new rules." + 'test-pattern', + help='Print files in the repository for a given CODEOWNERS pattern. Useful when adding new rules.' ) - test_pattern.add_argument("--regex", action="store_true", help="Print the equivalent regular expression instead of the file list.") - test_pattern.add_argument("pattern", help="Path pattern to get the list of files for") + test_pattern.add_argument('--regex', action='store_true', help='Print the equivalent regular expression instead of the file list.') + test_pattern.add_argument('pattern', help='Path pattern to get the list of files for') args = parser.parse_args() @@ -199,10 +199,10 @@ def main(): parser.print_help() parser.exit(1) - action_func_name = "action_" + args.action.replace("-", "_") + action_func_name = 'action_' + args.action.replace('-', '_') action_func = globals()[action_func_name] action_func(args) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/ci/check_deprecated_kconfigs.py b/tools/ci/check_deprecated_kconfigs.py index 3182f3933a..dca9ddd0a0 100755 --- a/tools/ci/check_deprecated_kconfigs.py +++ b/tools/ci/check_deprecated_kconfigs.py @@ -14,8 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import print_function, unicode_literals import argparse import os @@ -47,7 +46,7 @@ def _parse_path(path, sep=None): def _valid_directory(path): if not os.path.isdir(path): - raise argparse.ArgumentTypeError("{} is not a valid directory!".format(path)) + raise argparse.ArgumentTypeError('{} is not a valid directory!'.format(path)) return path @@ -114,5 +113,5 @@ def main(): return 0 -if __name__ == "__main__": +if __name__ == '__main__': sys.exit(main()) diff --git a/tools/ci/check_examples_cmake_make.py b/tools/ci/check_examples_cmake_make.py index 12f3bd5a92..d142b161af 100644 --- a/tools/ci/check_examples_cmake_make.py +++ b/tools/ci/check_examples_cmake_make.py @@ -1,10 +1,10 @@ #!/usr/bin/env python -import os -import sys -import pprint import json +import os +import pprint import subprocess +import sys # ============================================================================= # Service funcs @@ -12,7 +12,7 @@ import subprocess def _build_path(path, *paths): - return str(os.path.normpath(os.path.join(path, *paths)).replace("\\", "/")) + return str(os.path.normpath(os.path.join(path, *paths)).replace('\\', '/')) def _unify_paths(path_list): @@ -20,7 +20,7 @@ def _unify_paths(path_list): def _exclude_by_pat_list(path_list, ignore_list): - print("- Applying ignore list") + print('- Applying ignore list') path_list_res = list(path_list) for ign in ignore_list: if len(ign.strip()): @@ -50,7 +50,7 @@ def get_idf_path(path, *paths): def _get_apps(target, build_system): - print("- Getting paths of apps") + print('- Getting paths of apps') args = [sys.executable, get_idf_path('tools/find_apps.py'), '-p', @@ -79,21 +79,21 @@ def get_apps(target, build_system, ignorelist): def get_cmake_ignore_list(): - print("- Getting CMake ignore list") + print('- Getting CMake ignore list') return _file2linelist( - get_idf_path("tools", "ci", - "check_examples_cmake_make-cmake_ignore.txt")) + get_idf_path('tools', 'ci', + 'check_examples_cmake_make-cmake_ignore.txt')) def get_make_ignore_list(): - print("- Getting Make ignore list") + print('- Getting Make ignore list') return _file2linelist( - get_idf_path("tools", "ci", - "check_examples_cmake_make-make_ignore.txt")) + get_idf_path('tools', 'ci', + 'check_examples_cmake_make-make_ignore.txt')) def diff(first, second): - print("- Comparing...") + print('- Comparing...') first = set(first) second = set(second) res = list(first - second) + list(second - first) @@ -103,21 +103,21 @@ def diff(first, second): def main(): cmake_ignore = get_cmake_ignore_list() make_ignore = get_make_ignore_list() - cmakes = get_apps("esp32", "cmake", cmake_ignore) - makes = get_apps("esp32", "make", make_ignore) + cmakes = get_apps('esp32', 'cmake', cmake_ignore) + makes = get_apps('esp32', 'make', make_ignore) res = diff(cmakes, makes) if len(res): pp = pprint.PrettyPrinter(indent=4) print( - "[ ERROR ] Some projects are not containing Make and Cmake project files:" + '[ ERROR ] Some projects are not containing Make and Cmake project files:' ) pp.pprint(res) - raise ValueError("Test is not passed") + raise ValueError('Test is not passed') else: - print("[ DONE ]") + print('[ DONE ]') -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/ci/check_kconfigs.py b/tools/ci/check_kconfigs.py index a12a11f186..5ac512f736 100755 --- a/tools/ci/check_kconfigs.py +++ b/tools/ci/check_kconfigs.py @@ -14,8 +14,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import print_function, unicode_literals import argparse import os @@ -23,7 +22,7 @@ import re import sys from io import open -from idf_ci_utils import get_submodule_dirs, IDF_PATH +from idf_ci_utils import IDF_PATH, get_submodule_dirs # regular expression for matching Kconfig files RE_KCONFIG = r'^Kconfig(\.projbuild)?(\.in)?$' @@ -101,7 +100,7 @@ class SourceChecker(BaseChecker): if path in ['$COMPONENT_KCONFIGS_SOURCE_FILE', '$COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE']: pass elif not filename.startswith('Kconfig.'): - raise InputError(self.path_in_idf, line_number, "only filenames starting with Kconfig.* can be sourced", + raise InputError(self.path_in_idf, line_number, 'only filenames starting with Kconfig.* can be sourced', line.replace(path, os.path.join(os.path.dirname(path), 'Kconfig.' + filename))) @@ -124,7 +123,7 @@ class LineRuleChecker(BaseChecker): if rule[2]: line = rule[0].sub(rule[2], line) if len(errors) > 0: - raise InputError(self.path_in_idf, line_number, "; ".join(errors), line) + raise InputError(self.path_in_idf, line_number, '; '.join(errors), line) class IndentAndNameChecker(BaseChecker): @@ -369,7 +368,7 @@ class IndentAndNameChecker(BaseChecker): def valid_directory(path): if not os.path.isdir(path): - raise argparse.ArgumentTypeError("{} is not a valid directory!".format(path)) + raise argparse.ArgumentTypeError('{} is not a valid directory!'.format(path)) return path @@ -394,7 +393,7 @@ def validate_kconfig_file(kconfig_full_path, verbose=False): # type: (str, bool fail = True f_o.write(e.suggested_line) except UnicodeDecodeError: - raise ValueError("The encoding of {} is not Unicode.".format(kconfig_full_path)) + raise ValueError('The encoding of {} is not Unicode.'.format(kconfig_full_path)) if fail: print('\t{} has been saved with suggestions for resolving the issues.\n' @@ -475,5 +474,5 @@ def main(): return 0 -if __name__ == "__main__": +if __name__ == '__main__': sys.exit(main()) diff --git a/tools/ci/check_public_headers.py b/tools/ci/check_public_headers.py index 0f1de1e98f..49c410388e 100644 --- a/tools/ci/check_public_headers.py +++ b/tools/ci/check_public_headers.py @@ -17,18 +17,18 @@ # limitations under the License. # -from __future__ import print_function -from __future__ import unicode_literals -import re -import os -import subprocess -import json -import fnmatch +from __future__ import print_function, unicode_literals + import argparse +import fnmatch +import json +import os import queue -from threading import Thread, Event +import re +import subprocess import tempfile from io import open +from threading import Event, Thread class HeaderFailed(Exception): @@ -38,30 +38,30 @@ class HeaderFailed(Exception): class HeaderFailedSdkconfig(HeaderFailed): def __str__(self): - return "Sdkconfig Error" + return 'Sdkconfig Error' class HeaderFailedBuildError(HeaderFailed): def __str__(self): - return "Header Build Error" + return 'Header Build Error' class HeaderFailedCppGuardMissing(HeaderFailed): def __str__(self): - return "Header Missing C++ Guard" + return 'Header Missing C++ Guard' class HeaderFailedContainsCode(HeaderFailed): def __str__(self): - return "Header Produced non-zero object" + return 'Header Produced non-zero object' # Creates a temp file and returns both output as a string and a file name # -def exec_cmd_to_temp_file(what, suffix=""): +def exec_cmd_to_temp_file(what, suffix=''): out_file = tempfile.NamedTemporaryFile(suffix=suffix, delete=False) rc, out, err = exec_cmd(what, out_file) - with open(out_file.name, "r", encoding='utf-8') as f: + with open(out_file.name, 'r', encoding='utf-8') as f: out = f.read() return rc, out, err, out_file.name @@ -90,14 +90,14 @@ class PublicHeaderChecker: print(message) def __init__(self, verbose=False, jobs=1, prefix=None): - self.gcc = "{}gcc".format(prefix) - self.gpp = "{}g++".format(prefix) + self.gcc = '{}gcc'.format(prefix) + self.gpp = '{}g++'.format(prefix) self.verbose = verbose self.jobs = jobs self.prefix = prefix self.extern_c = re.compile(r'extern "C"') self.error_macro = re.compile(r'#error') - self.error_orphan_kconfig = re.compile(r"#error CONFIG_VARS_USED_WHILE_SDKCONFIG_NOT_INCLUDED") + self.error_orphan_kconfig = re.compile(r'#error CONFIG_VARS_USED_WHILE_SDKCONFIG_NOT_INCLUDED') self.kconfig_macro = re.compile(r'\bCONFIG_[A-Z0-9_]+') self.assembly_nocode = r'^\s*(\.file|\.text|\.ident).*$' self.check_threads = [] @@ -129,10 +129,10 @@ class PublicHeaderChecker: try: self.check_one_header(task, num) except HeaderFailed as e: - self.failed_queue.put("{}: Failed! {}".format(task, e)) + self.failed_queue.put('{}: Failed! {}'.format(task, e)) except Exception as e: # Makes sure any unexpected exceptions causes the program to terminate - self.failed_queue.put("{}: Failed! {}".format(task, e)) + self.failed_queue.put('{}: Failed! {}'.format(task, e)) self.terminate.set() raise @@ -174,7 +174,7 @@ class PublicHeaderChecker: self.compile_one_header(header) temp_header = None try: - _, _, _, temp_header = exec_cmd_to_temp_file(["sed", "/#include/d; /#error/d", header], suffix=".h") + _, _, _, temp_header = exec_cmd_to_temp_file(['sed', '/#include/d; /#error/d', header], suffix='.h') res = self.preprocess_one_header(temp_header, num, ignore_sdkconfig_issue=True) if res == self.PREPROC_OUT_SAME_HRD_FAILED: raise HeaderFailedCppGuardMissing() @@ -185,53 +185,53 @@ class PublicHeaderChecker: os.unlink(temp_header) def compile_one_header(self, header): - rc, out, err = exec_cmd([self.gcc, "-S", "-o-", "-include", header, self.main_c] + self.include_dir_flags) + rc, out, err = exec_cmd([self.gcc, '-S', '-o-', '-include', header, self.main_c] + self.include_dir_flags) if rc == 0: if not re.sub(self.assembly_nocode, '', out, flags=re.M).isspace(): raise HeaderFailedContainsCode() return # Header OK: produced zero code - self.log("{}: FAILED: compilation issue".format(header), True) + self.log('{}: FAILED: compilation issue'.format(header), True) self.log(err, True) raise HeaderFailedBuildError() def preprocess_one_header(self, header, num, ignore_sdkconfig_issue=False): - all_compilation_flags = ["-w", "-P", "-E", "-DESP_PLATFORM", "-include", header, self.main_c] + self.include_dir_flags + all_compilation_flags = ['-w', '-P', '-E', '-DESP_PLATFORM', '-include', header, self.main_c] + self.include_dir_flags if not ignore_sdkconfig_issue: # just strip commnets to check for CONFIG_... macros - rc, out, err = exec_cmd([self.gcc, "-fpreprocessed", "-dD", "-P", "-E", header] + self.include_dir_flags) + rc, out, err = exec_cmd([self.gcc, '-fpreprocessed', '-dD', '-P', '-E', header] + self.include_dir_flags) if re.search(self.kconfig_macro, out): # enable defined #error if sdkconfig.h not included - all_compilation_flags.append("-DIDF_CHECK_SDKCONFIG_INCLUDED") + all_compilation_flags.append('-DIDF_CHECK_SDKCONFIG_INCLUDED') try: # compile with C++, check for errors, outputs for a temp file - rc, cpp_out, err, cpp_out_file = exec_cmd_to_temp_file([self.gpp, "--std=c++17"] + all_compilation_flags) + rc, cpp_out, err, cpp_out_file = exec_cmd_to_temp_file([self.gpp, '--std=c++17'] + all_compilation_flags) if rc != 0: if re.search(self.error_macro, err): if re.search(self.error_orphan_kconfig, err): - self.log("{}: CONFIG_VARS_USED_WHILE_SDKCONFIG_NOT_INCLUDED".format(header), True) + self.log('{}: CONFIG_VARS_USED_WHILE_SDKCONFIG_NOT_INCLUDED'.format(header), True) return self.COMPILE_ERR_REF_CONFIG_HDR_FAILED - self.log("{}: Error directive failure: OK".format(header)) + self.log('{}: Error directive failure: OK'.format(header)) return self.COMPILE_ERR_ERROR_MACRO_HDR_OK - self.log("{}: FAILED: compilation issue".format(header), True) + self.log('{}: FAILED: compilation issue'.format(header), True) self.log(err) return self.COMPILE_ERR_HDR_FAILED # compile with C compiler, outputs to another temp file - rc, c99_out, err, c99_out_file = exec_cmd_to_temp_file([self.gcc, "--std=c99"] + all_compilation_flags) + rc, c99_out, err, c99_out_file = exec_cmd_to_temp_file([self.gcc, '--std=c99'] + all_compilation_flags) if rc != 0: - self.log("{} FAILED should never happen".format(header)) + self.log('{} FAILED should never happen'.format(header)) return self.COMPILE_ERR_HDR_FAILED # diff the two outputs - rc, diff, err = exec_cmd(["diff", c99_out_file, cpp_out_file]) + rc, diff, err = exec_cmd(['diff', c99_out_file, cpp_out_file]) if not diff or diff.isspace(): if not cpp_out or cpp_out.isspace(): - self.log("{} The same, but empty out - OK".format(header)) + self.log('{} The same, but empty out - OK'.format(header)) return self.PREPROC_OUT_ZERO_HDR_OK - self.log("{} FAILED C and C++ preprocessor output is the same!".format(header), True) + self.log('{} FAILED C and C++ preprocessor output is the same!'.format(header), True) return self.PREPROC_OUT_SAME_HRD_FAILED if re.search(self.extern_c, diff): - self.log("{} extern C present - OK".format(header)) + self.log('{} extern C present - OK'.format(header)) return self.PREPROC_OUT_DIFFERENT_WITH_EXT_C_HDR_OK - self.log("{} Different but no extern C - FAILED".format(header), True) + self.log('{} Different but no extern C - FAILED'.format(header), True) return self.PREPROC_OUT_DIFFERENT_NO_EXT_C_HDR_FAILED finally: os.unlink(cpp_out_file) @@ -243,32 +243,32 @@ class PublicHeaderChecker: # Get compilation data from an example to list all public header files def list_public_headers(self, ignore_dirs, ignore_files, only_dir=None): idf_path = os.getenv('IDF_PATH') - project_dir = os.path.join(idf_path, "examples", "get-started", "blink") - subprocess.check_call(["idf.py", "reconfigure"], cwd=project_dir) - build_commands_json = os.path.join(project_dir, "build", "compile_commands.json") - with open(build_commands_json, "r", encoding='utf-8') as f: - build_command = json.load(f)[0]["command"].split() + project_dir = os.path.join(idf_path, 'examples', 'get-started', 'blink') + subprocess.check_call(['idf.py', 'reconfigure'], cwd=project_dir) + build_commands_json = os.path.join(project_dir, 'build', 'compile_commands.json') + with open(build_commands_json, 'r', encoding='utf-8') as f: + build_command = json.load(f)[0]['command'].split() include_dir_flags = [] include_dirs = [] # process compilation flags (includes and defines) for item in build_command: - if item.startswith("-I"): + if item.startswith('-I'): include_dir_flags.append(item) - if "components" in item: + if 'components' in item: include_dirs.append(item[2:]) # Removing the leading "-I" - if item.startswith("-D"): + if item.startswith('-D'): include_dir_flags.append(item.replace('\\','')) # removes escaped quotes, eg: -DMBEDTLS_CONFIG_FILE=\\\"mbedtls/esp_config.h\\\" - include_dir_flags.append("-I" + os.path.join(project_dir, "build", "config")) - include_dir_flags.append("-DCI_HEADER_CHECK") - sdkconfig_h = os.path.join(project_dir, "build", "config", "sdkconfig.h") + include_dir_flags.append('-I' + os.path.join(project_dir, 'build', 'config')) + include_dir_flags.append('-DCI_HEADER_CHECK') + sdkconfig_h = os.path.join(project_dir, 'build', 'config', 'sdkconfig.h') # prepares a main_c file for easier sdkconfig checks and avoid compilers warning when compiling headers directly - with open(sdkconfig_h, "a") as f: - f.write("#define IDF_SDKCONFIG_INCLUDED") - main_c = os.path.join(project_dir, "build", "compile.c") - with open(main_c, "w") as f: - f.write("#if defined(IDF_CHECK_SDKCONFIG_INCLUDED) && ! defined(IDF_SDKCONFIG_INCLUDED)\n" - "#error CONFIG_VARS_USED_WHILE_SDKCONFIG_NOT_INCLUDED\n" - "#endif") + with open(sdkconfig_h, 'a') as f: + f.write('#define IDF_SDKCONFIG_INCLUDED') + main_c = os.path.join(project_dir, 'build', 'compile.c') + with open(main_c, 'w') as f: + f.write('#if defined(IDF_CHECK_SDKCONFIG_INCLUDED) && ! defined(IDF_SDKCONFIG_INCLUDED)\n' + '#error CONFIG_VARS_USED_WHILE_SDKCONFIG_NOT_INCLUDED\n' + '#endif') # processes public include dirs, removing ignored files all_include_files = [] files_to_check = [] @@ -277,7 +277,7 @@ class PublicHeaderChecker: self.log('{} - directory ignored (not in "{}")'.format(d, only_dir)) continue if os.path.relpath(d, idf_path).startswith(tuple(ignore_dirs)): - self.log("{} - directory ignored".format(d)) + self.log('{} - directory ignored'.format(d)) continue for root, dirnames, filenames in os.walk(d): for filename in fnmatch.filter(filenames, '*.h'): @@ -289,10 +289,10 @@ class PublicHeaderChecker: for f in all_include_files: rel_path_file = os.path.relpath(f, idf_path) if any([os.path.commonprefix([d, rel_path_file]) == d for d in ignore_dirs]): - self.log("{} - file ignored (inside ignore dir)".format(f)) + self.log('{} - file ignored (inside ignore dir)'.format(f)) continue if rel_path_file in ignore_files: - self.log("{} - file ignored".format(f)) + self.log('{} - file ignored'.format(f)) continue files_to_check.append(f) # removes duplicates and places headers to a work queue @@ -302,22 +302,22 @@ class PublicHeaderChecker: def check_all_headers(): - parser = argparse.ArgumentParser("Public header checker file") - parser.add_argument("--verbose", "-v", help="enables verbose mode", action="store_true") - parser.add_argument("--jobs", "-j", help="number of jobs to run checker", default=1, type=int) - parser.add_argument("--prefix", "-p", help="compiler prefix", default="xtensa-esp32-elf-", type=str) - parser.add_argument("--exclude-file", "-e", help="exception file", default="check_public_headers_exceptions.txt", type=str) - parser.add_argument("--only-dir", "-d", help="reduce the analysis to this directory only", default=None, type=str) + parser = argparse.ArgumentParser('Public header checker file') + parser.add_argument('--verbose', '-v', help='enables verbose mode', action='store_true') + parser.add_argument('--jobs', '-j', help='number of jobs to run checker', default=1, type=int) + parser.add_argument('--prefix', '-p', help='compiler prefix', default='xtensa-esp32-elf-', type=str) + parser.add_argument('--exclude-file', '-e', help='exception file', default='check_public_headers_exceptions.txt', type=str) + parser.add_argument('--only-dir', '-d', help='reduce the analysis to this directory only', default=None, type=str) args = parser.parse_args() # process excluded files and dirs exclude_file = os.path.join(os.path.dirname(__file__), args.exclude_file) - with open(exclude_file, "r", encoding='utf-8') as f: + with open(exclude_file, 'r', encoding='utf-8') as f: lines = [line.rstrip() for line in f] ignore_files = [] ignore_dirs = [] for line in lines: - if not line or line.isspace() or line.startswith("#"): + if not line or line.isspace() or line.startswith('#'): continue if os.path.isdir(line): ignore_dirs.append(line) @@ -334,9 +334,9 @@ def check_all_headers(): for failed in failures: print(failed) exit(1) - print("No errors found") + print('No errors found') except KeyboardInterrupt: - print("Keyboard interrupt") + print('Keyboard interrupt') if __name__ == '__main__': diff --git a/tools/ci/check_readme_links.py b/tools/ci/check_readme_links.py index f500eaac4f..8955db4be0 100755 --- a/tools/ci/check_readme_links.py +++ b/tools/ci/check_readme_links.py @@ -17,15 +17,15 @@ # limitations under the License. # -import os -import re -import os.path -import urllib.request -import urllib.error -import concurrent.futures import argparse +import concurrent.futures +import os +import os.path +import re +import urllib.error +import urllib.request +from collections import defaultdict, namedtuple from pathlib import Path -from collections import namedtuple, defaultdict EXCLUDE_DOCS_LIST = ['examples/peripherals/secure_element/atecc608_ecdsa/components/esp-cryptoauthlib/cryptoauthlib/**'] @@ -43,7 +43,7 @@ class ReadmeLinkError(Exception): class RelativeLinkError(ReadmeLinkError): def __str__(self): - return "Relative link error, file - {} not found, linked from {}".format(self.url, self.file) + return 'Relative link error, file - {} not found, linked from {}'.format(self.url, self.file) class UrlLinkError(ReadmeLinkError): @@ -53,7 +53,7 @@ class UrlLinkError(ReadmeLinkError): def __str__(self): files = [str(f) for f in self.file] - return "URL error, url - {} in files - {} is not accessible, request returned {}".format(self.url, ", ".join(files), self.error_code) + return 'URL error, url - {} in files - {} is not accessible, request returned {}'.format(self.url, ', '.join(files), self.error_code) # we do not want a failed test just due to bad network conditions, for non 404 errors we simply print a warning @@ -65,9 +65,9 @@ def check_url(url, files, timeout): if e.code == 404: raise UrlLinkError(files, url, str(e)) else: - print("Unable to access {}, err = {}".format(url, str(e))) + print('Unable to access {}, err = {}'.format(url, str(e))) except Exception as e: - print("Unable to access {}, err = {}".format(url, str(e))) + print('Unable to access {}, err = {}'.format(url, str(e))) def check_web_links(web_links): @@ -93,19 +93,19 @@ def check_file_links(file_links): if not Path.exists(link_path): errors.append(RelativeLinkError(link.file, link.url)) - print("Found {} errors with relative links".format(len(errors))) + print('Found {} errors with relative links'.format(len(errors))) return errors def get_md_links(folder): - MD_LINK_RE = r"\[.+?\]\((.+?)(#.+)?\)" + MD_LINK_RE = r'\[.+?\]\((.+?)(#.+)?\)' idf_path = Path(os.getenv('IDF_PATH')) links = [] for path in (idf_path / folder).rglob('*.md'): if any([path.relative_to(idf_path).match(exclude_doc) for exclude_doc in EXCLUDE_DOCS_LIST]): - print("{} - excluded".format(path)) + print('{} - excluded'.format(path)) continue with path.open(encoding='utf8') as f: @@ -123,7 +123,7 @@ def get_md_links(folder): def check_readme_links(args): links = get_md_links('examples') - print("Found {} links".format(len(links))) + print('Found {} links'.format(len(links))) errors = [] @@ -145,7 +145,7 @@ def check_readme_links(args): if not args.skip_weburl: errors.extend(check_web_links(web_links)) - print("Found {} errors:".format(len(errors))) + print('Found {} errors:'.format(len(errors))) for e in errors: print(e) if errors: @@ -155,7 +155,7 @@ def check_readme_links(args): if __name__ == '__main__': parser = argparse.ArgumentParser(description='check_readme_links.py: Checks for dead links in example READMEs', prog='check_readme_links.py') - parser.add_argument("--skip-weburl", "-w", action='store_true', help="Skip checking of web URLs, only check links to local files") + parser.add_argument('--skip-weburl', '-w', action='store_true', help='Skip checking of web URLs, only check links to local files') args = parser.parse_args() check_readme_links(args) diff --git a/tools/ci/checkout_project_ref.py b/tools/ci/checkout_project_ref.py index 175c6fe295..975b7e9f29 100755 --- a/tools/ci/checkout_project_ref.py +++ b/tools/ci/checkout_project_ref.py @@ -3,14 +3,13 @@ # internal use only # called by CI jobs when it uses a project related to IDF -import os -import json import argparse -import subprocess +import json +import os import re +import subprocess - -IDF_GIT_DESCRIBE_PATTERN = re.compile(r"^v(\d)\.(\d)") +IDF_GIT_DESCRIBE_PATTERN = re.compile(r'^v(\d)\.(\d)') RETRY_COUNT = 3 @@ -18,8 +17,8 @@ def get_customized_project_revision(proj_name): """ get customized project revision defined in bot message """ - revision = "" - customized_project_revisions = os.getenv("BOT_CUSTOMIZED_REVISION") + revision = '' + customized_project_revisions = os.getenv('BOT_CUSTOMIZED_REVISION') if customized_project_revisions: customized_project_revisions = json.loads(customized_project_revisions) try: @@ -35,9 +34,9 @@ def target_branch_candidates(proj_name): """ candidates = [ # branch name (or tag name) of current IDF - os.getenv("CI_COMMIT_REF_NAME"), + os.getenv('CI_COMMIT_REF_NAME'), # CI_MERGE_REQUEST_TARGET_BRANCH_NAME - os.getenv("CI_MERGE_REQUEST_TARGET_BRANCH_NAME"), + os.getenv('CI_MERGE_REQUEST_TARGET_BRANCH_NAME'), ] customized_candidate = get_customized_project_revision(proj_name) if customized_candidate: @@ -46,16 +45,16 @@ def target_branch_candidates(proj_name): # branch name read from IDF try: - git_describe = subprocess.check_output(["git", "describe", "HEAD"]) + git_describe = subprocess.check_output(['git', 'describe', 'HEAD']) match = IDF_GIT_DESCRIBE_PATTERN.search(git_describe.decode()) if match: major_revision = match.group(1) minor_revision = match.group(2) # release branch - candidates.append("release/v{}.{}".format(major_revision, minor_revision)) + candidates.append('release/v{}.{}'.format(major_revision, minor_revision)) # branch to match all major branches, like v3.x or v3 - candidates.append("release/v{}.x".format(major_revision)) - candidates.append("release/v{}".format(major_revision)) + candidates.append('release/v{}.x'.format(major_revision)) + candidates.append('release/v{}'.format(major_revision)) except subprocess.CalledProcessError: # this should not happen as IDF should have describe message pass @@ -63,14 +62,14 @@ def target_branch_candidates(proj_name): return [c for c in candidates if c] # filter out null value -if __name__ == "__main__": +if __name__ == '__main__': parser = argparse.ArgumentParser() - parser.add_argument("project", - help="the name of project") - parser.add_argument("project_relative_path", - help="relative path of project to IDF repository directory") + parser.add_argument('project', + help='the name of project') + parser.add_argument('project_relative_path', + help='relative path of project to IDF repository directory') parser.add_argument('--customized_only', action='store_true', - help="Only to find customized revision") + help='Only to find customized revision') args = parser.parse_args() @@ -83,10 +82,10 @@ if __name__ == "__main__": # change to project dir for checkout os.chdir(args.project_relative_path) - ref_to_use = "" + ref_to_use = '' for candidate in candidate_branches: # check if candidate branch exists - branch_match = subprocess.check_output(["git", "branch", "-a", "--list", "origin/" + candidate]) + branch_match = subprocess.check_output(['git', 'branch', '-a', '--list', 'origin/' + candidate]) if branch_match: ref_to_use = candidate break @@ -95,13 +94,13 @@ if __name__ == "__main__": for _ in range(RETRY_COUNT): # Add retry for projects with git-lfs try: - subprocess.check_call(["git", "checkout", "-f", ref_to_use], stdout=subprocess.PIPE) # not print the stdout - print("CI using ref {} for project {}".format(ref_to_use, args.project)) + subprocess.check_call(['git', 'checkout', '-f', ref_to_use], stdout=subprocess.PIPE) # not print the stdout + print('CI using ref {} for project {}'.format(ref_to_use, args.project)) break except subprocess.CalledProcessError: pass else: - print("Failed to use ref {} for project {}".format(ref_to_use, args.project)) + print('Failed to use ref {} for project {}'.format(ref_to_use, args.project)) exit(1) else: - print("using default branch") + print('using default branch') diff --git a/tools/ci/ci_fetch_submodule.py b/tools/ci/ci_fetch_submodule.py index 9d30cb6958..23ad870158 100644 --- a/tools/ci/ci_fetch_submodule.py +++ b/tools/ci/ci_fetch_submodule.py @@ -3,26 +3,26 @@ # internal use only for CI # download archive of one commit instead of cloning entire submodule repo -import re -import os -import subprocess import argparse +import os +import re import shutil +import subprocess import time import gitlab_api SUBMODULE_PATTERN = re.compile(r"\[submodule \"([^\"]+)\"]") -PATH_PATTERN = re.compile(r"path\s+=\s+(\S+)") -URL_PATTERN = re.compile(r"url\s+=\s+(\S+)") +PATH_PATTERN = re.compile(r'path\s+=\s+(\S+)') +URL_PATTERN = re.compile(r'url\s+=\s+(\S+)') -SUBMODULE_ARCHIVE_TEMP_FOLDER = "submodule_archive" +SUBMODULE_ARCHIVE_TEMP_FOLDER = 'submodule_archive' class SubModule(object): # We don't need to support recursive submodule clone now - GIT_LS_TREE_OUTPUT_PATTERN = re.compile(r"\d+\s+commit\s+([0-9a-f]+)\s+") + GIT_LS_TREE_OUTPUT_PATTERN = re.compile(r'\d+\s+commit\s+([0-9a-f]+)\s+') def __init__(self, gitlab_inst, path, url): self.path = path @@ -31,7 +31,7 @@ class SubModule(object): self.commit_id = self._get_commit_id(path) def _get_commit_id(self, path): - output = subprocess.check_output(["git", "ls-tree", "HEAD", path]) + output = subprocess.check_output(['git', 'ls-tree', 'HEAD', path]) output = output.decode() # example output: 160000 commit d88a262fbdf35e5abb372280eb08008749c3faa0 components/esp_wifi/lib match = self.GIT_LS_TREE_OUTPUT_PATTERN.search(output) @@ -40,11 +40,11 @@ class SubModule(object): def _get_project_id(self, url): base_name = os.path.basename(url) project_id = self.gitlab_inst.get_project_id(os.path.splitext(base_name)[0], # remove .git - namespace="espressif") + namespace='espressif') return project_id def download_archive(self): - print("Update submodule: {}: {}".format(self.path, self.commit_id)) + print('Update submodule: {}: {}'.format(self.path, self.commit_id)) path_name = self.gitlab_inst.download_archive(self.commit_id, SUBMODULE_ARCHIVE_TEMP_FOLDER, self.project_id) renamed_path = os.path.join(os.path.dirname(path_name), os.path.basename(self.path)) @@ -56,7 +56,7 @@ class SubModule(object): def update_submodule(git_module_file, submodules_to_update): gitlab_inst = gitlab_api.Gitlab() submodules = [] - with open(git_module_file, "r") as f: + with open(git_module_file, 'r') as f: data = f.read() match = SUBMODULE_PATTERN.search(data) while True: @@ -90,18 +90,18 @@ def update_submodule(git_module_file, submodules_to_update): if __name__ == '__main__': start_time = time.time() parser = argparse.ArgumentParser() - parser.add_argument("--repo_path", "-p", default=".", help="repo path") - parser.add_argument("--submodule", "-s", default="all", - help="Submodules to update. By default update all submodules. " - "For multiple submodules, separate them with `;`. " - "`all` and `none` are special values that indicates we fetch all / none submodules") + parser.add_argument('--repo_path', '-p', default='.', help='repo path') + parser.add_argument('--submodule', '-s', default='all', + help='Submodules to update. By default update all submodules. ' + 'For multiple submodules, separate them with `;`. ' + '`all` and `none` are special values that indicates we fetch all / none submodules') args = parser.parse_args() - if args.submodule == "none": + if args.submodule == 'none': print("don't need to update submodules") exit(0) - if args.submodule == "all": + if args.submodule == 'all': _submodules = [] else: - _submodules = args.submodule.split(";") - update_submodule(os.path.join(args.repo_path, ".gitmodules"), _submodules) - print("total time spent on update submodule: {:.02f}s".format(time.time() - start_time)) + _submodules = args.submodule.split(';') + update_submodule(os.path.join(args.repo_path, '.gitmodules'), _submodules) + print('total time spent on update submodule: {:.02f}s'.format(time.time() - start_time)) diff --git a/tools/ci/deploy_docs.py b/tools/ci/deploy_docs.py index 2a17dda2b1..8e5613f3e5 100755 --- a/tools/ci/deploy_docs.py +++ b/tools/ci/deploy_docs.py @@ -22,9 +22,10 @@ import os import os.path import re import stat -import sys import subprocess +import sys import tarfile + import packaging.version @@ -34,86 +35,86 @@ def env(variable, default=None): # import sanitize_version from the docs directory, shared with here -sys.path.append(os.path.join(env("IDF_PATH"), "docs")) +sys.path.append(os.path.join(env('IDF_PATH'), 'docs')) from sanitize_version import sanitize_version # noqa def main(): # if you get KeyErrors on the following lines, it's probably because you're not running in Gitlab CI - git_ver = env("GIT_VER") # output of git describe --always - ci_ver = env("CI_COMMIT_REF_NAME", git_ver) # branch or tag we're building for (used for 'release' & URL) + git_ver = env('GIT_VER') # output of git describe --always + ci_ver = env('CI_COMMIT_REF_NAME', git_ver) # branch or tag we're building for (used for 'release' & URL) version = sanitize_version(ci_ver) - print("Git version: {}".format(git_ver)) - print("CI Version: {}".format(ci_ver)) - print("Deployment version: {}".format(version)) + print('Git version: {}'.format(git_ver)) + print('CI Version: {}'.format(ci_ver)) + print('Deployment version: {}'.format(version)) if not version: - raise RuntimeError("A version is needed to deploy") + raise RuntimeError('A version is needed to deploy') - build_dir = env("DOCS_BUILD_DIR") # top-level local build dir, where docs have already been built + build_dir = env('DOCS_BUILD_DIR') # top-level local build dir, where docs have already been built if not build_dir: - raise RuntimeError("Valid DOCS_BUILD_DIR is needed to deploy") + raise RuntimeError('Valid DOCS_BUILD_DIR is needed to deploy') - url_base = env("DOCS_DEPLOY_URL_BASE") # base for HTTP URLs, used to print the URL to the log after deploying + url_base = env('DOCS_DEPLOY_URL_BASE') # base for HTTP URLs, used to print the URL to the log after deploying - docs_server = env("DOCS_DEPLOY_SERVER") # ssh server to deploy to - docs_user = env("DOCS_DEPLOY_SERVER_USER") - docs_path = env("DOCS_DEPLOY_PATH") # filesystem path on DOCS_SERVER + docs_server = env('DOCS_DEPLOY_SERVER') # ssh server to deploy to + docs_user = env('DOCS_DEPLOY_SERVER_USER') + docs_path = env('DOCS_DEPLOY_PATH') # filesystem path on DOCS_SERVER if not docs_server: - raise RuntimeError("Valid DOCS_DEPLOY_SERVER is needed to deploy") + raise RuntimeError('Valid DOCS_DEPLOY_SERVER is needed to deploy') if not docs_user: - raise RuntimeError("Valid DOCS_DEPLOY_SERVER_USER is needed to deploy") + raise RuntimeError('Valid DOCS_DEPLOY_SERVER_USER is needed to deploy') - docs_server = "{}@{}".format(docs_user, docs_server) + docs_server = '{}@{}'.format(docs_user, docs_server) if not docs_path: - raise RuntimeError("Valid DOCS_DEPLOY_PATH is needed to deploy") + raise RuntimeError('Valid DOCS_DEPLOY_PATH is needed to deploy') - print("DOCS_DEPLOY_SERVER {} DOCS_DEPLOY_PATH {}".format(docs_server, docs_path)) + print('DOCS_DEPLOY_SERVER {} DOCS_DEPLOY_PATH {}'.format(docs_server, docs_path)) tarball_path, version_urls = build_doc_tarball(version, git_ver, build_dir) deploy(version, tarball_path, docs_path, docs_server) - print("Docs URLs:") + print('Docs URLs:') doc_deploy_type = os.getenv('TYPE') for vurl in version_urls: language, _, target = vurl.split('/') tag = '{}_{}'.format(language, target) - url = "{}/{}/index.html".format(url_base, vurl) # (index.html needed for the preview server) - url = re.sub(r"([^:])//", r"\1/", url) # get rid of any // that isn't in the https:// part + url = '{}/{}/index.html'.format(url_base, vurl) # (index.html needed for the preview server) + url = re.sub(r'([^:])//', r'\1/', url) # get rid of any // that isn't in the https:// part print('[document {}][{}] {}'.format(doc_deploy_type, tag, url)) # note: it would be neater to use symlinks for stable, but because of the directory order # (language first) it's kind of a pain to do on a remote server, so we just repeat the # process but call the version 'stable' this time if is_stable_version(version): - print("Deploying again as stable version...") - tarball_path, version_urls = build_doc_tarball("stable", git_ver, build_dir) - deploy("stable", tarball_path, docs_path, docs_server) + print('Deploying again as stable version...') + tarball_path, version_urls = build_doc_tarball('stable', git_ver, build_dir) + deploy('stable', tarball_path, docs_path, docs_server) def deploy(version, tarball_path, docs_path, docs_server): def run_ssh(commands): """ Log into docs_server and run a sequence of commands using ssh """ - print("Running ssh: {}".format(commands)) - subprocess.run(["ssh", "-o", "BatchMode=yes", docs_server, "-x", " && ".join(commands)], check=True) + print('Running ssh: {}'.format(commands)) + subprocess.run(['ssh', '-o', 'BatchMode=yes', docs_server, '-x', ' && '.join(commands)], check=True) # copy the version tarball to the server - run_ssh(["mkdir -p {}".format(docs_path)]) - print("Running scp {} to {}".format(tarball_path, "{}:{}".format(docs_server, docs_path))) - subprocess.run(["scp", "-B", tarball_path, "{}:{}".format(docs_server, docs_path)], check=True) + run_ssh(['mkdir -p {}'.format(docs_path)]) + print('Running scp {} to {}'.format(tarball_path, '{}:{}'.format(docs_server, docs_path))) + subprocess.run(['scp', '-B', tarball_path, '{}:{}'.format(docs_server, docs_path)], check=True) tarball_name = os.path.basename(tarball_path) - run_ssh(["cd {}".format(docs_path), - "rm -rf ./*/{}".format(version), # remove any pre-existing docs matching this version - "tar -zxvf {}".format(tarball_name), # untar the archive with the new docs - "rm {}".format(tarball_name)]) + run_ssh(['cd {}'.format(docs_path), + 'rm -rf ./*/{}'.format(version), # remove any pre-existing docs matching this version + 'tar -zxvf {}'.format(tarball_name), # untar the archive with the new docs + 'rm {}'.format(tarball_name)]) # Note: deleting and then extracting the archive is a bit awkward for updating stable/latest/etc # as the version will be invalid for a window of time. Better to do it atomically, but this is @@ -124,21 +125,21 @@ def build_doc_tarball(version, git_ver, build_dir): """ Make a tar.gz archive of the docs, in the directory structure used to deploy as the given version """ version_paths = [] - tarball_path = "{}/{}.tar.gz".format(build_dir, version) + tarball_path = '{}/{}.tar.gz'.format(build_dir, version) # find all the 'html/' directories under build_dir - html_dirs = glob.glob("{}/**/html/".format(build_dir), recursive=True) - print("Found %d html directories" % len(html_dirs)) + html_dirs = glob.glob('{}/**/html/'.format(build_dir), recursive=True) + print('Found %d html directories' % len(html_dirs)) - pdfs = glob.glob("{}/**/latex/build/*.pdf".format(build_dir), recursive=True) - print("Found %d PDFs in latex directories" % len(pdfs)) + pdfs = glob.glob('{}/**/latex/build/*.pdf'.format(build_dir), recursive=True) + print('Found %d PDFs in latex directories' % len(pdfs)) # add symlink for stable and latest and adds them to PDF blob symlinks = create_and_add_symlinks(version, git_ver, pdfs) def not_sources_dir(ti): """ Filter the _sources directories out of the tarballs """ - if ti.name.endswith("/_sources"): + if ti.name.endswith('/_sources'): return None ti.mode |= stat.S_IWGRP # make everything group-writeable @@ -149,7 +150,7 @@ def build_doc_tarball(version, git_ver, build_dir): except OSError: pass - with tarfile.open(tarball_path, "w:gz") as tarball: + with tarfile.open(tarball_path, 'w:gz') as tarball: for html_dir in html_dirs: # html_dir has the form '///html/' target_dirname = os.path.dirname(os.path.dirname(html_dir)) @@ -157,7 +158,7 @@ def build_doc_tarball(version, git_ver, build_dir): language = os.path.basename(os.path.dirname(target_dirname)) # when deploying, we want the top-level directory layout 'language/version/target' - archive_path = "{}/{}/{}".format(language, version, target) + archive_path = '{}/{}/{}'.format(language, version, target) print("Archiving '{}' as '{}'...".format(html_dir, archive_path)) tarball.add(html_dir, archive_path, filter=not_sources_dir) version_paths.append(archive_path) @@ -171,7 +172,7 @@ def build_doc_tarball(version, git_ver, build_dir): language = os.path.basename(os.path.dirname(target_dirname)) # when deploying, we want the layout 'language/version/target/pdf' - archive_path = "{}/{}/{}/{}".format(language, version, target, pdf_filename) + archive_path = '{}/{}/{}/{}'.format(language, version, target, pdf_filename) print("Archiving '{}' as '{}'...".format(pdf_path, archive_path)) tarball.add(pdf_path, archive_path) @@ -192,34 +193,34 @@ def create_and_add_symlinks(version, git_ver, pdfs): symlinks.append(symlink_path) pdfs.extend(symlinks) - print("Found %d PDFs in latex directories after adding symlink" % len(pdfs)) + print('Found %d PDFs in latex directories after adding symlink' % len(pdfs)) return symlinks def is_stable_version(version): """ Heuristic for whether this is the latest stable release """ - if not version.startswith("v"): + if not version.startswith('v'): return False # branch name - if "-" in version: + if '-' in version: return False # prerelease tag - git_out = subprocess.check_output(["git", "tag", "-l"]).decode("utf-8") + git_out = subprocess.check_output(['git', 'tag', '-l']).decode('utf-8') - versions = [v.strip() for v in git_out.split("\n")] - versions = [v for v in versions if re.match(r"^v[\d\.]+$", v)] # include vX.Y.Z only + versions = [v.strip() for v in git_out.split('\n')] + versions = [v for v in versions if re.match(r'^v[\d\.]+$', v)] # include vX.Y.Z only versions = [packaging.version.parse(v) for v in versions] max_version = max(versions) if max_version.public != version[1:]: - print("Stable version is v{}. This version is {}.".format(max_version.public, version)) + print('Stable version is v{}. This version is {}.'.format(max_version.public, version)) return False else: - print("This version {} is the stable version".format(version)) + print('This version {} is the stable version'.format(version)) return True -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/ci/normalize_clangtidy_path.py b/tools/ci/normalize_clangtidy_path.py index 153ca620ce..9f7a26df5a 100755 --- a/tools/ci/normalize_clangtidy_path.py +++ b/tools/ci/normalize_clangtidy_path.py @@ -1,7 +1,7 @@ #!/usr/bin/env python import argparse import re -from os.path import join, normpath, dirname, relpath +from os.path import dirname, join, normpath, relpath CLANG_TIDY_REGEX = re.compile(r'(.+|[a-zA-Z]:\\\\.+):([0-9]+):([0-9]+): ([^:]+): (.+)') diff --git a/tools/ci/python_packages/gitlab_api.py b/tools/ci/python_packages/gitlab_api.py index 4ec2757cfb..ed0d947354 100644 --- a/tools/ci/python_packages/gitlab_api.py +++ b/tools/ci/python_packages/gitlab_api.py @@ -1,8 +1,8 @@ +import argparse import os import re -import argparse -import tempfile import tarfile +import tempfile import zipfile from functools import wraps @@ -10,21 +10,21 @@ import gitlab class Gitlab(object): - JOB_NAME_PATTERN = re.compile(r"(\w+)(\s+(\d+)/(\d+))?") + JOB_NAME_PATTERN = re.compile(r'(\w+)(\s+(\d+)/(\d+))?') DOWNLOAD_ERROR_MAX_RETRIES = 3 def __init__(self, project_id=None): - config_data_from_env = os.getenv("PYTHON_GITLAB_CONFIG") + config_data_from_env = os.getenv('PYTHON_GITLAB_CONFIG') if config_data_from_env: # prefer to load config from env variable - with tempfile.NamedTemporaryFile("w", delete=False) as temp_file: + with tempfile.NamedTemporaryFile('w', delete=False) as temp_file: temp_file.write(config_data_from_env) config_files = [temp_file.name] else: # otherwise try to use config file at local filesystem config_files = None - gitlab_id = os.getenv("LOCAL_GITLAB_HTTPS_HOST") # if None, will use the default gitlab server + gitlab_id = os.getenv('LOCAL_GITLAB_HTTPS_HOST') # if None, will use the default gitlab server self.gitlab_inst = gitlab.Gitlab.from_config(gitlab_id=gitlab_id, config_files=config_files) self.gitlab_inst.auth() if project_id: @@ -46,7 +46,7 @@ class Gitlab(object): if len(projects) == 1: project_id = project.id break - if project.namespace["path"] == namespace: + if project.namespace['path'] == namespace: project_id = project.id break else: @@ -65,7 +65,7 @@ class Gitlab(object): with tempfile.NamedTemporaryFile(delete=False) as temp_file: job.artifacts(streamed=True, action=temp_file.write) - with zipfile.ZipFile(temp_file.name, "r") as archive_file: + with zipfile.ZipFile(temp_file.name, 'r') as archive_file: archive_file.extractall(destination) def retry_download(func): @@ -120,12 +120,12 @@ class Gitlab(object): except OSError: # already exists pass - with open(file_path, "wb") as f: + with open(file_path, 'wb') as f: f.write(data) return raw_data_list - def find_job_id(self, job_name, pipeline_id=None, job_status="success"): + def find_job_id(self, job_name, pipeline_id=None, job_status='success'): """ Get Job ID from job name of specific pipeline @@ -137,14 +137,14 @@ class Gitlab(object): """ job_id_list = [] if pipeline_id is None: - pipeline_id = os.getenv("CI_PIPELINE_ID") + pipeline_id = os.getenv('CI_PIPELINE_ID') pipeline = self.project.pipelines.get(pipeline_id) jobs = pipeline.jobs.list(all=True) for job in jobs: match = self.JOB_NAME_PATTERN.match(job.name) if match: if match.group(1) == job_name and job.status == job_status: - job_id_list.append({"id": job.id, "parallel_num": match.group(3)}) + job_id_list.append({'id': job.id, 'parallel_num': match.group(3)}) return job_id_list @retry_download @@ -166,12 +166,12 @@ class Gitlab(object): try: project.repository_archive(sha=ref, streamed=True, action=temp_file.write) except gitlab.GitlabGetError as e: - print("Failed to archive from project {}".format(project_id)) + print('Failed to archive from project {}'.format(project_id)) raise e - print("archive size: {:.03f}MB".format(float(os.path.getsize(temp_file.name)) / (1024 * 1024))) + print('archive size: {:.03f}MB'.format(float(os.path.getsize(temp_file.name)) / (1024 * 1024))) - with tarfile.open(temp_file.name, "r") as archive_file: + with tarfile.open(temp_file.name, 'r') as archive_file: root_name = archive_file.getnames()[0] archive_file.extractall(destination) @@ -180,27 +180,27 @@ class Gitlab(object): if __name__ == '__main__': parser = argparse.ArgumentParser() - parser.add_argument("action") - parser.add_argument("project_id", type=int) - parser.add_argument("--pipeline_id", "-i", type=int, default=None) - parser.add_argument("--ref", "-r", default="master") - parser.add_argument("--job_id", "-j", type=int, default=None) - parser.add_argument("--job_name", "-n", default=None) - parser.add_argument("--project_name", "-m", default=None) - parser.add_argument("--destination", "-d", default=None) - parser.add_argument("--artifact_path", "-a", nargs="*", default=None) + parser.add_argument('action') + parser.add_argument('project_id', type=int) + parser.add_argument('--pipeline_id', '-i', type=int, default=None) + parser.add_argument('--ref', '-r', default='master') + parser.add_argument('--job_id', '-j', type=int, default=None) + parser.add_argument('--job_name', '-n', default=None) + parser.add_argument('--project_name', '-m', default=None) + parser.add_argument('--destination', '-d', default=None) + parser.add_argument('--artifact_path', '-a', nargs='*', default=None) args = parser.parse_args() gitlab_inst = Gitlab(args.project_id) - if args.action == "download_artifacts": + if args.action == 'download_artifacts': gitlab_inst.download_artifacts(args.job_id, args.destination) - if args.action == "download_artifact": + if args.action == 'download_artifact': gitlab_inst.download_artifact(args.job_id, args.artifact_path, args.destination) - elif args.action == "find_job_id": + elif args.action == 'find_job_id': job_ids = gitlab_inst.find_job_id(args.job_name, args.pipeline_id) - print(";".join([",".join([str(j["id"]), j["parallel_num"]]) for j in job_ids])) - elif args.action == "download_archive": + print(';'.join([','.join([str(j['id']), j['parallel_num']]) for j in job_ids])) + elif args.action == 'download_archive': gitlab_inst.download_archive(args.ref, args.destination) - elif args.action == "get_project_id": + elif args.action == 'get_project_id': ret = gitlab_inst.get_project_id(args.project_name) - print("project id: {}".format(ret)) + print('project id: {}'.format(ret)) diff --git a/tools/ci/python_packages/idf_http_server_test/adder.py b/tools/ci/python_packages/idf_http_server_test/adder.py index 46eb75a557..2597d42477 100644 --- a/tools/ci/python_packages/idf_http_server_test/adder.py +++ b/tools/ci/python_packages/idf_http_server_test/adder.py @@ -14,12 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function -from __future__ import unicode_literals -from builtins import str -from builtins import range -import http.client +from __future__ import print_function, unicode_literals + import argparse +import http.client +from builtins import range, str from tiny_test_fw import Utility @@ -33,41 +32,41 @@ def end_session(conn): def getreq(conn, path, verbose=False): - conn.request("GET", path) + conn.request('GET', path) resp = conn.getresponse() data = resp.read() if verbose: - Utility.console_log("GET : " + path) - Utility.console_log("Status : " + resp.status) - Utility.console_log("Reason : " + resp.reason) - Utility.console_log("Data length : " + str(len(data))) - Utility.console_log("Data content : " + data) + Utility.console_log('GET : ' + path) + Utility.console_log('Status : ' + resp.status) + Utility.console_log('Reason : ' + resp.reason) + Utility.console_log('Data length : ' + str(len(data))) + Utility.console_log('Data content : ' + data) return data def postreq(conn, path, data, verbose=False): - conn.request("POST", path, data) + conn.request('POST', path, data) resp = conn.getresponse() data = resp.read() if verbose: - Utility.console_log("POST : " + data) - Utility.console_log("Status : " + resp.status) - Utility.console_log("Reason : " + resp.reason) - Utility.console_log("Data length : " + str(len(data))) - Utility.console_log("Data content : " + data) + Utility.console_log('POST : ' + data) + Utility.console_log('Status : ' + resp.status) + Utility.console_log('Reason : ' + resp.reason) + Utility.console_log('Data length : ' + str(len(data))) + Utility.console_log('Data content : ' + data) return data def putreq(conn, path, body, verbose=False): - conn.request("PUT", path, body) + conn.request('PUT', path, body) resp = conn.getresponse() data = resp.read() if verbose: - Utility.console_log("PUT : " + path, body) - Utility.console_log("Status : " + resp.status) - Utility.console_log("Reason : " + resp.reason) - Utility.console_log("Data length : " + str(len(data))) - Utility.console_log("Data content : " + data) + Utility.console_log('PUT : ' + path, body) + Utility.console_log('Status : ' + resp.status) + Utility.console_log('Reason : ' + resp.reason) + Utility.console_log('Data length : ' + str(len(data))) + Utility.console_log('Data content : ' + data) return data @@ -85,22 +84,22 @@ if __name__ == '__main__': N = args['N'] # Establish HTTP connection - Utility.console_log("Connecting to => " + ip + ":" + port) + Utility.console_log('Connecting to => ' + ip + ':' + port) conn = start_session(ip, port) # Reset adder context to specified value(0) # -- Not needed as new connection will always # -- have zero value of the accumulator - Utility.console_log("Reset the accumulator to 0") - putreq(conn, "/adder", str(0)) + Utility.console_log('Reset the accumulator to 0') + putreq(conn, '/adder', str(0)) # Sum numbers from 1 to specified value(N) - Utility.console_log("Summing numbers from 1 to " + str(N)) + Utility.console_log('Summing numbers from 1 to ' + str(N)) for i in range(1, N + 1): - postreq(conn, "/adder", str(i)) + postreq(conn, '/adder', str(i)) # Fetch the result - Utility.console_log("Result :" + getreq(conn, "/adder")) + Utility.console_log('Result :' + getreq(conn, '/adder')) # Close HTTP connection end_session(conn) diff --git a/tools/ci/python_packages/idf_http_server_test/client.py b/tools/ci/python_packages/idf_http_server_test/client.py index 95d195d824..a1b6ec5758 100644 --- a/tools/ci/python_packages/idf_http_server_test/client.py +++ b/tools/ci/python_packages/idf_http_server_test/client.py @@ -14,12 +14,11 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function -from __future__ import unicode_literals -from builtins import str -import http.client -import argparse +from __future__ import print_function, unicode_literals +import argparse +import http.client +from builtins import str from tiny_test_fw import Utility @@ -31,28 +30,28 @@ def verbose_print(verbosity, *args): def test_val(text, expected, received): if expected != received: - Utility.console_log(" Fail!") - Utility.console_log(" [reason] " + text + ":") - Utility.console_log(" expected: " + str(expected)) - Utility.console_log(" received: " + str(received)) + Utility.console_log(' Fail!') + Utility.console_log(' [reason] ' + text + ':') + Utility.console_log(' expected: ' + str(expected)) + Utility.console_log(' received: ' + str(received)) return False return True def test_get_handler(ip, port, verbosity=False): - verbose_print(verbosity, "======== GET HANDLER TEST =============") + verbose_print(verbosity, '======== GET HANDLER TEST =============') # Establish HTTP connection - verbose_print(verbosity, "Connecting to => " + ip + ":" + port) - sess = http.client.HTTPConnection(ip + ":" + port, timeout=15) + verbose_print(verbosity, 'Connecting to => ' + ip + ':' + port) + sess = http.client.HTTPConnection(ip + ':' + port, timeout=15) - uri = "/hello?query1=value1&query2=value2&query3=value3" + uri = '/hello?query1=value1&query2=value2&query3=value3' # GET hello response - test_headers = {"Test-Header-1":"Test-Value-1", "Test-Header-2":"Test-Value-2"} - verbose_print(verbosity, "Sending GET to URI : ", uri) - verbose_print(verbosity, "Sending additional headers : ") + test_headers = {'Test-Header-1':'Test-Value-1', 'Test-Header-2':'Test-Value-2'} + verbose_print(verbosity, 'Sending GET to URI : ', uri) + verbose_print(verbosity, 'Sending additional headers : ') for k, v in test_headers.items(): - verbose_print(verbosity, "\t", k, ": ", v) - sess.request("GET", url=uri, headers=test_headers) + verbose_print(verbosity, '\t', k, ': ', v) + sess.request('GET', url=uri, headers=test_headers) resp = sess.getresponse() resp_hdrs = resp.getheaders() resp_data = resp.read().decode() @@ -60,100 +59,100 @@ def test_get_handler(ip, port, verbosity=False): sess.close() if not ( - test_val("Status code mismatch", 200, resp.status) and - test_val("Response mismatch", "Custom-Value-1", resp.getheader("Custom-Header-1")) and - test_val("Response mismatch", "Custom-Value-2", resp.getheader("Custom-Header-2")) and - test_val("Response mismatch", "Hello World!", resp_data) + test_val('Status code mismatch', 200, resp.status) and + test_val('Response mismatch', 'Custom-Value-1', resp.getheader('Custom-Header-1')) and + test_val('Response mismatch', 'Custom-Value-2', resp.getheader('Custom-Header-2')) and + test_val('Response mismatch', 'Hello World!', resp_data) ): return False - verbose_print(verbosity, "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv") - verbose_print(verbosity, "Server response to GET /hello") - verbose_print(verbosity, "Response Headers : ") + verbose_print(verbosity, 'vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv') + verbose_print(verbosity, 'Server response to GET /hello') + verbose_print(verbosity, 'Response Headers : ') for k, v in resp_hdrs: - verbose_print(verbosity, "\t", k, ": ", v) - verbose_print(verbosity, "Response Data : " + resp_data) - verbose_print(verbosity, "========================================\n") + verbose_print(verbosity, '\t', k, ': ', v) + verbose_print(verbosity, 'Response Data : ' + resp_data) + verbose_print(verbosity, '========================================\n') return True def test_post_handler(ip, port, msg, verbosity=False): - verbose_print(verbosity, "======== POST HANDLER TEST ============") + verbose_print(verbosity, '======== POST HANDLER TEST ============') # Establish HTTP connection - verbose_print(verbosity, "Connecting to => " + ip + ":" + port) - sess = http.client.HTTPConnection(ip + ":" + port, timeout=15) + verbose_print(verbosity, 'Connecting to => ' + ip + ':' + port) + sess = http.client.HTTPConnection(ip + ':' + port, timeout=15) # POST message to /echo and get back response - sess.request("POST", url="/echo", body=msg) + sess.request('POST', url='/echo', body=msg) resp = sess.getresponse() resp_data = resp.read().decode() - verbose_print(verbosity, "Server response to POST /echo (" + msg + ")") - verbose_print(verbosity, "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv") + verbose_print(verbosity, 'Server response to POST /echo (' + msg + ')') + verbose_print(verbosity, 'vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv') verbose_print(verbosity, resp_data) - verbose_print(verbosity, "========================================\n") + verbose_print(verbosity, '========================================\n') # Close HTTP connection sess.close() - return test_val("Response mismatch", msg, resp_data) + return test_val('Response mismatch', msg, resp_data) def test_put_handler(ip, port, verbosity=False): - verbose_print(verbosity, "======== PUT HANDLER TEST =============") + verbose_print(verbosity, '======== PUT HANDLER TEST =============') # Establish HTTP connection - verbose_print(verbosity, "Connecting to => " + ip + ":" + port) - sess = http.client.HTTPConnection(ip + ":" + port, timeout=15) + verbose_print(verbosity, 'Connecting to => ' + ip + ':' + port) + sess = http.client.HTTPConnection(ip + ':' + port, timeout=15) # PUT message to /ctrl to disable /hello and /echo URI handlers # and set 404 error handler to custom http_404_error_handler() - verbose_print(verbosity, "Disabling /hello and /echo handlers") - sess.request("PUT", url="/ctrl", body="0") + verbose_print(verbosity, 'Disabling /hello and /echo handlers') + sess.request('PUT', url='/ctrl', body='0') resp = sess.getresponse() resp.read() try: # Send HTTP request to /hello URI - sess.request("GET", url="/hello") + sess.request('GET', url='/hello') resp = sess.getresponse() resp_data = resp.read().decode() # 404 Error must be returned from server as URI /hello is no longer available. # But the custom error handler http_404_error_handler() will not close the # session if the requested URI is /hello - if not test_val("Status code mismatch", 404, resp.status): + if not test_val('Status code mismatch', 404, resp.status): raise AssertionError # Compare error response string with expectation - verbose_print(verbosity, "Response on GET /hello : " + resp_data) - if not test_val("Response mismatch", "/hello URI is not available", resp_data): + verbose_print(verbosity, 'Response on GET /hello : ' + resp_data) + if not test_val('Response mismatch', '/hello URI is not available', resp_data): raise AssertionError # Using same session for sending an HTTP request to /echo, as it is expected # that the custom error handler http_404_error_handler() would not have closed # the session - sess.request("POST", url="/echo", body="Some content") + sess.request('POST', url='/echo', body='Some content') resp = sess.getresponse() resp_data = resp.read().decode() # 404 Error must be returned from server as URI /hello is no longer available. # The custom error handler http_404_error_handler() will close the session # this time as the requested URI is /echo - if not test_val("Status code mismatch", 404, resp.status): + if not test_val('Status code mismatch', 404, resp.status): raise AssertionError # Compare error response string with expectation - verbose_print(verbosity, "Response on POST /echo : " + resp_data) - if not test_val("Response mismatch", "/echo URI is not available", resp_data): + verbose_print(verbosity, 'Response on POST /echo : ' + resp_data) + if not test_val('Response mismatch', '/echo URI is not available', resp_data): raise AssertionError try: # Using same session should fail as by now the session would have closed - sess.request("POST", url="/hello", body="Some content") + sess.request('POST', url='/hello', body='Some content') resp = sess.getresponse() resp.read().decode() # If control reaches this point then the socket was not closed. # This is not expected - verbose_print(verbosity, "Socket not closed by server") + verbose_print(verbosity, 'Socket not closed by server') raise AssertionError except http.client.HTTPException: @@ -161,7 +160,7 @@ def test_put_handler(ip, port, verbosity=False): pass except http.client.HTTPException: - verbose_print(verbosity, "Socket closed by server") + verbose_print(verbosity, 'Socket closed by server') return False except AssertionError: @@ -171,47 +170,47 @@ def test_put_handler(ip, port, verbosity=False): # Close HTTP connection sess.close() - verbose_print(verbosity, "Enabling /hello handler") + verbose_print(verbosity, 'Enabling /hello handler') # Create new connection - sess = http.client.HTTPConnection(ip + ":" + port, timeout=15) + sess = http.client.HTTPConnection(ip + ':' + port, timeout=15) # PUT message to /ctrl to enable /hello URI handler # and restore 404 error handler to default - sess.request("PUT", url="/ctrl", body="1") + sess.request('PUT', url='/ctrl', body='1') resp = sess.getresponse() resp.read() # Close HTTP connection sess.close() # Create new connection - sess = http.client.HTTPConnection(ip + ":" + port, timeout=15) + sess = http.client.HTTPConnection(ip + ':' + port, timeout=15) try: # Sending HTTP request to /hello should work now - sess.request("GET", url="/hello") + sess.request('GET', url='/hello') resp = sess.getresponse() resp_data = resp.read().decode() - if not test_val("Status code mismatch", 200, resp.status): + if not test_val('Status code mismatch', 200, resp.status): raise AssertionError - verbose_print(verbosity, "Response on GET /hello : " + resp_data) - if not test_val("Response mismatch", "Hello World!", resp_data): + verbose_print(verbosity, 'Response on GET /hello : ' + resp_data) + if not test_val('Response mismatch', 'Hello World!', resp_data): raise AssertionError # 404 Error handler should have been restored to default - sess.request("GET", url="/invalid") + sess.request('GET', url='/invalid') resp = sess.getresponse() resp_data = resp.read().decode() - if not test_val("Status code mismatch", 404, resp.status): + if not test_val('Status code mismatch', 404, resp.status): raise AssertionError - verbose_print(verbosity, "Response on GET /invalid : " + resp_data) - if not test_val("Response mismatch", "This URI does not exist", resp_data): + verbose_print(verbosity, 'Response on GET /invalid : ' + resp_data) + if not test_val('Response mismatch', 'This URI does not exist', resp_data): raise AssertionError except http.client.HTTPException: - verbose_print(verbosity, "Socket closed by server") + verbose_print(verbosity, 'Socket closed by server') return False except AssertionError: @@ -225,26 +224,26 @@ def test_put_handler(ip, port, verbosity=False): def test_custom_uri_query(ip, port, query, verbosity=False): - verbose_print(verbosity, "======== GET HANDLER TEST =============") + verbose_print(verbosity, '======== GET HANDLER TEST =============') # Establish HTTP connection - verbose_print(verbosity, "Connecting to => " + ip + ":" + port) - sess = http.client.HTTPConnection(ip + ":" + port, timeout=15) + verbose_print(verbosity, 'Connecting to => ' + ip + ':' + port) + sess = http.client.HTTPConnection(ip + ':' + port, timeout=15) - uri = "/hello?" + query + uri = '/hello?' + query # GET hello response - verbose_print(verbosity, "Sending GET to URI : ", uri) - sess.request("GET", url=uri, headers={}) + verbose_print(verbosity, 'Sending GET to URI : ', uri) + sess.request('GET', url=uri, headers={}) resp = sess.getresponse() resp_data = resp.read().decode() - verbose_print(verbosity, "vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv") - verbose_print(verbosity, "Server response to GET /hello") - verbose_print(verbosity, "Response Data : " + resp_data) - verbose_print(verbosity, "========================================\n") + verbose_print(verbosity, 'vvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvvv') + verbose_print(verbosity, 'Server response to GET /hello') + verbose_print(verbosity, 'Response Data : ' + resp_data) + verbose_print(verbosity, '========================================\n') # Close HTTP connection sess.close() - return "Hello World!" == resp_data + return 'Hello World!' == resp_data if __name__ == '__main__': @@ -265,4 +264,4 @@ if __name__ == '__main__': test_put_handler(ip, port, True) and test_post_handler(ip, port, msg, True) ): - Utility.console_log("Failed!") + Utility.console_log('Failed!') diff --git a/tools/ci/python_packages/idf_http_server_test/test.py b/tools/ci/python_packages/idf_http_server_test/test.py index 3ad5bcc9c7..e1866c8c2a 100644 --- a/tools/ci/python_packages/idf_http_server_test/test.py +++ b/tools/ci/python_packages/idf_http_server_test/test.py @@ -129,19 +129,17 @@ # - Simple GET on /hello/restart_results (returns the leak results) -from __future__ import division -from __future__ import print_function -from builtins import str -from builtins import range -from builtins import object -import threading -import socket -import time +from __future__ import division, print_function + import argparse import http.client -import sys -import string import random +import socket +import string +import sys +import threading +import time +from builtins import object, range, str from tiny_test_fw import Utility @@ -165,32 +163,32 @@ class Session(object): self.client.sendall(data.encode()) except socket.error as err: self.client.close() - Utility.console_log("Socket Error in send :", err) + Utility.console_log('Socket Error in send :', err) rval = False return rval def send_get(self, path, headers=None): - request = "GET " + path + " HTTP/1.1\r\nHost: " + self.target + request = 'GET ' + path + ' HTTP/1.1\r\nHost: ' + self.target if headers: for field, value in headers.items(): - request += "\r\n" + field + ": " + value - request += "\r\n\r\n" + request += '\r\n' + field + ': ' + value + request += '\r\n\r\n' return self.send_err_check(request) def send_put(self, path, data, headers=None): - request = "PUT " + path + " HTTP/1.1\r\nHost: " + self.target + request = 'PUT ' + path + ' HTTP/1.1\r\nHost: ' + self.target if headers: for field, value in headers.items(): - request += "\r\n" + field + ": " + value - request += "\r\nContent-Length: " + str(len(data)) + "\r\n\r\n" + request += '\r\n' + field + ': ' + value + request += '\r\nContent-Length: ' + str(len(data)) + '\r\n\r\n' return self.send_err_check(request, data) def send_post(self, path, data, headers=None): - request = "POST " + path + " HTTP/1.1\r\nHost: " + self.target + request = 'POST ' + path + ' HTTP/1.1\r\nHost: ' + self.target if headers: for field, value in headers.items(): - request += "\r\n" + field + ": " + value - request += "\r\nContent-Length: " + str(len(data)) + "\r\n\r\n" + request += '\r\n' + field + ': ' + value + request += '\r\nContent-Length: ' + str(len(data)) + '\r\n\r\n' return self.send_err_check(request, data) def read_resp_hdrs(self): @@ -234,7 +232,7 @@ class Session(object): return headers except socket.error as err: self.client.close() - Utility.console_log("Socket Error in recv :", err) + Utility.console_log('Socket Error in recv :', err) return None def read_resp_data(self): @@ -263,9 +261,9 @@ class Session(object): rem_len -= len(new_data) chunk_data_buf = '' # Fetch remaining CRLF - if self.client.recv(2) != "\r\n": + if self.client.recv(2) != '\r\n': # Error in packet - Utility.console_log("Error in chunked data") + Utility.console_log('Error in chunked data') return None if not chunk_len: # If last chunk @@ -278,7 +276,7 @@ class Session(object): return read_data except socket.error as err: self.client.close() - Utility.console_log("Socket Error in recv :", err) + Utility.console_log('Socket Error in recv :', err) return None def close(self): @@ -287,10 +285,10 @@ class Session(object): def test_val(text, expected, received): if expected != received: - Utility.console_log(" Fail!") - Utility.console_log(" [reason] " + text + ":") - Utility.console_log(" expected: " + str(expected)) - Utility.console_log(" received: " + str(received)) + Utility.console_log(' Fail!') + Utility.console_log(' [reason] ' + text + ':') + Utility.console_log(' expected: ' + str(expected)) + Utility.console_log(' received: ' + str(received)) return False return True @@ -308,7 +306,7 @@ class adder_thread (threading.Thread): # Pipeline 3 requests if (_verbose_): - Utility.console_log(" Thread: Using adder start " + str(self.id)) + Utility.console_log(' Thread: Using adder start ' + str(self.id)) for _ in range(self.depth): self.session.send_post('/adder', str(self.id)) @@ -320,10 +318,10 @@ class adder_thread (threading.Thread): def adder_result(self): if len(self.response) != self.depth: - Utility.console_log("Error : missing response packets") + Utility.console_log('Error : missing response packets') return False for i in range(len(self.response)): - if not test_val("Thread" + str(self.id) + " response[" + str(i) + "]", + if not test_val('Thread' + str(self.id) + ' response[' + str(i) + ']', str(self.id * (i + 1)), str(self.response[i])): return False return True @@ -336,177 +334,177 @@ def get_hello(dut, port): # GET /hello should return 'Hello World!' Utility.console_log("[test] GET /hello returns 'Hello World!' =>", end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("GET", "/hello") + conn.request('GET', '/hello') resp = conn.getresponse() - if not test_val("status_code", 200, resp.status): + if not test_val('status_code', 200, resp.status): conn.close() return False - if not test_val("data", "Hello World!", resp.read().decode()): + if not test_val('data', 'Hello World!', resp.read().decode()): conn.close() return False - if not test_val("data", "text/html", resp.getheader('Content-Type')): + if not test_val('data', 'text/html', resp.getheader('Content-Type')): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def put_hello(dut, port): # PUT /hello returns 405' - Utility.console_log("[test] PUT /hello returns 405 =>", end=' ') + Utility.console_log('[test] PUT /hello returns 405 =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("PUT", "/hello", "Hello") + conn.request('PUT', '/hello', 'Hello') resp = conn.getresponse() - if not test_val("status_code", 405, resp.status): + if not test_val('status_code', 405, resp.status): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def post_hello(dut, port): # POST /hello returns 405' - Utility.console_log("[test] POST /hello returns 405 =>", end=' ') + Utility.console_log('[test] POST /hello returns 405 =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("POST", "/hello", "Hello") + conn.request('POST', '/hello', 'Hello') resp = conn.getresponse() - if not test_val("status_code", 405, resp.status): + if not test_val('status_code', 405, resp.status): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def post_echo(dut, port): # POST /echo echoes data' - Utility.console_log("[test] POST /echo echoes data =>", end=' ') + Utility.console_log('[test] POST /echo echoes data =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("POST", "/echo", "Hello") + conn.request('POST', '/echo', 'Hello') resp = conn.getresponse() - if not test_val("status_code", 200, resp.status): + if not test_val('status_code', 200, resp.status): conn.close() return False - if not test_val("data", "Hello", resp.read().decode()): + if not test_val('data', 'Hello', resp.read().decode()): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def put_echo(dut, port): # PUT /echo echoes data' - Utility.console_log("[test] PUT /echo echoes data =>", end=' ') + Utility.console_log('[test] PUT /echo echoes data =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("PUT", "/echo", "Hello") + conn.request('PUT', '/echo', 'Hello') resp = conn.getresponse() - if not test_val("status_code", 200, resp.status): + if not test_val('status_code', 200, resp.status): conn.close() return False - if not test_val("data", "Hello", resp.read().decode()): + if not test_val('data', 'Hello', resp.read().decode()): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def get_echo(dut, port): # GET /echo returns 404' - Utility.console_log("[test] GET /echo returns 405 =>", end=' ') + Utility.console_log('[test] GET /echo returns 405 =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("GET", "/echo") + conn.request('GET', '/echo') resp = conn.getresponse() - if not test_val("status_code", 405, resp.status): + if not test_val('status_code', 405, resp.status): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def get_test_headers(dut, port): # GET /test_header returns data of Header2' - Utility.console_log("[test] GET /test_header =>", end=' ') + Utility.console_log('[test] GET /test_header =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - custom_header = {"Header1": "Value1", "Header3": "Value3"} - header2_values = ["", " ", "Value2", " Value2", "Value2 ", " Value2 "] + custom_header = {'Header1': 'Value1', 'Header3': 'Value3'} + header2_values = ['', ' ', 'Value2', ' Value2', 'Value2 ', ' Value2 '] for val in header2_values: - custom_header["Header2"] = val - conn.request("GET", "/test_header", headers=custom_header) + custom_header['Header2'] = val + conn.request('GET', '/test_header', headers=custom_header) resp = conn.getresponse() - if not test_val("status_code", 200, resp.status): + if not test_val('status_code', 200, resp.status): conn.close() return False - hdr_val_start_idx = val.find("Value2") + hdr_val_start_idx = val.find('Value2') if hdr_val_start_idx == -1: - if not test_val("header: Header2", "", resp.read().decode()): + if not test_val('header: Header2', '', resp.read().decode()): conn.close() return False else: - if not test_val("header: Header2", val[hdr_val_start_idx:], resp.read().decode()): + if not test_val('header: Header2', val[hdr_val_start_idx:], resp.read().decode()): conn.close() return False resp.read() - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def get_hello_type(dut, port): # GET /hello/type_html returns text/html as Content-Type' - Utility.console_log("[test] GET /hello/type_html has Content-Type of text/html =>", end=' ') + Utility.console_log('[test] GET /hello/type_html has Content-Type of text/html =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("GET", "/hello/type_html") + conn.request('GET', '/hello/type_html') resp = conn.getresponse() - if not test_val("status_code", 200, resp.status): + if not test_val('status_code', 200, resp.status): conn.close() return False - if not test_val("data", "Hello World!", resp.read().decode()): + if not test_val('data', 'Hello World!', resp.read().decode()): conn.close() return False - if not test_val("data", "text/html", resp.getheader('Content-Type')): + if not test_val('data', 'text/html', resp.getheader('Content-Type')): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def get_hello_status(dut, port): # GET /hello/status_500 returns status 500' - Utility.console_log("[test] GET /hello/status_500 returns status 500 =>", end=' ') + Utility.console_log('[test] GET /hello/status_500 returns status 500 =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("GET", "/hello/status_500") + conn.request('GET', '/hello/status_500') resp = conn.getresponse() - if not test_val("status_code", 500, resp.status): + if not test_val('status_code', 500, resp.status): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def get_false_uri(dut, port): # GET /false_uri returns status 404' - Utility.console_log("[test] GET /false_uri returns status 404 =>", end=' ') + Utility.console_log('[test] GET /false_uri returns status 404 =>', end=' ') conn = http.client.HTTPConnection(dut, int(port), timeout=15) - conn.request("GET", "/false_uri") + conn.request('GET', '/false_uri') resp = conn.getresponse() - if not test_val("status_code", 404, resp.status): + if not test_val('status_code', 404, resp.status): conn.close() return False - Utility.console_log("Success") + Utility.console_log('Success') conn.close() return True def parallel_sessions_adder(dut, port, max_sessions): # POSTs on /adder in parallel sessions - Utility.console_log("[test] POST {pipelined} on /adder in " + str(max_sessions) + " sessions =>", end=' ') + Utility.console_log('[test] POST {pipelined} on /adder in ' + str(max_sessions) + ' sessions =>', end=' ') t = [] # Create all sessions for i in range(max_sessions): @@ -520,90 +518,90 @@ def parallel_sessions_adder(dut, port, max_sessions): res = True for i in range(len(t)): - if not test_val("Thread" + str(i) + " Failed", t[i].adder_result(), True): + if not test_val('Thread' + str(i) + ' Failed', t[i].adder_result(), True): res = False t[i].close() if (res): - Utility.console_log("Success") + Utility.console_log('Success') return res def async_response_test(dut, port): # Test that an asynchronous work is executed in the HTTPD's context # This is tested by reading two responses over the same session - Utility.console_log("[test] Test HTTPD Work Queue (Async response) =>", end=' ') + Utility.console_log('[test] Test HTTPD Work Queue (Async response) =>', end=' ') s = Session(dut, port) s.send_get('/async_data') s.read_resp_hdrs() - if not test_val("First Response", "Hello World!", s.read_resp_data()): + if not test_val('First Response', 'Hello World!', s.read_resp_data()): s.close() return False s.read_resp_hdrs() - if not test_val("Second Response", "Hello Double World!", s.read_resp_data()): + if not test_val('Second Response', 'Hello Double World!', s.read_resp_data()): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def leftover_data_test(dut, port): # Leftover data in POST is purged (valid and invalid URIs) - Utility.console_log("[test] Leftover data in POST is purged (valid and invalid URIs) =>", end=' ') - s = http.client.HTTPConnection(dut + ":" + port, timeout=15) + Utility.console_log('[test] Leftover data in POST is purged (valid and invalid URIs) =>', end=' ') + s = http.client.HTTPConnection(dut + ':' + port, timeout=15) - s.request("POST", url='/leftover_data', body="abcdefghijklmnopqrstuvwxyz\r\nabcdefghijklmnopqrstuvwxyz") + s.request('POST', url='/leftover_data', body='abcdefghijklmnopqrstuvwxyz\r\nabcdefghijklmnopqrstuvwxyz') resp = s.getresponse() - if not test_val("Partial data", "abcdefghij", resp.read().decode()): + if not test_val('Partial data', 'abcdefghij', resp.read().decode()): s.close() return False - s.request("GET", url='/hello') + s.request('GET', url='/hello') resp = s.getresponse() - if not test_val("Hello World Data", "Hello World!", resp.read().decode()): + if not test_val('Hello World Data', 'Hello World!', resp.read().decode()): s.close() return False - s.request("POST", url='/false_uri', body="abcdefghijklmnopqrstuvwxyz\r\nabcdefghijklmnopqrstuvwxyz") + s.request('POST', url='/false_uri', body='abcdefghijklmnopqrstuvwxyz\r\nabcdefghijklmnopqrstuvwxyz') resp = s.getresponse() - if not test_val("False URI Status", str(404), str(resp.status)): + if not test_val('False URI Status', str(404), str(resp.status)): s.close() return False # socket would have been closed by server due to error s.close() - s = http.client.HTTPConnection(dut + ":" + port, timeout=15) - s.request("GET", url='/hello') + s = http.client.HTTPConnection(dut + ':' + port, timeout=15) + s.request('GET', url='/hello') resp = s.getresponse() - if not test_val("Hello World Data", "Hello World!", resp.read().decode()): + if not test_val('Hello World Data', 'Hello World!', resp.read().decode()): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def spillover_session(dut, port, max_sess): # Session max_sess_sessions + 1 is rejected - Utility.console_log("[test] Session max_sess_sessions (" + str(max_sess) + ") + 1 is rejected =>", end=' ') + Utility.console_log('[test] Session max_sess_sessions (' + str(max_sess) + ') + 1 is rejected =>', end=' ') s = [] _verbose_ = True for i in range(max_sess + 1): if (_verbose_): - Utility.console_log("Executing " + str(i)) + Utility.console_log('Executing ' + str(i)) try: - a = http.client.HTTPConnection(dut + ":" + port, timeout=15) - a.request("GET", url='/hello') + a = http.client.HTTPConnection(dut + ':' + port, timeout=15) + a.request('GET', url='/hello') resp = a.getresponse() - if not test_val("Connection " + str(i), "Hello World!", resp.read().decode()): + if not test_val('Connection ' + str(i), 'Hello World!', resp.read().decode()): a.close() break s.append(a) except Exception: if (_verbose_): - Utility.console_log("Connection " + str(i) + " rejected") + Utility.console_log('Connection ' + str(i) + ' rejected') a.close() break @@ -612,134 +610,134 @@ def spillover_session(dut, port, max_sess): a.close() # Check if number of connections is equal to max_sess - Utility.console_log(["Fail","Success"][len(s) == max_sess]) + Utility.console_log(['Fail','Success'][len(s) == max_sess]) return (len(s) == max_sess) def recv_timeout_test(dut, port): - Utility.console_log("[test] Timeout occurs if partial packet sent =>", end=' ') + Utility.console_log('[test] Timeout occurs if partial packet sent =>', end=' ') s = Session(dut, port) - s.client.sendall(b"GE") + s.client.sendall(b'GE') s.read_resp_hdrs() resp = s.read_resp_data() - if not test_val("Request Timeout", "Server closed this connection", resp): + if not test_val('Request Timeout', 'Server closed this connection', resp): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def packet_size_limit_test(dut, port, test_size): - Utility.console_log("[test] send size limit test =>", end=' ') + Utility.console_log('[test] send size limit test =>', end=' ') retry = 5 while (retry): retry -= 1 - Utility.console_log("data size = ", test_size) - s = http.client.HTTPConnection(dut + ":" + port, timeout=15) + Utility.console_log('data size = ', test_size) + s = http.client.HTTPConnection(dut + ':' + port, timeout=15) random_data = ''.join(string.printable[random.randint(0,len(string.printable)) - 1] for _ in list(range(test_size))) - path = "/echo" - s.request("POST", url=path, body=random_data) + path = '/echo' + s.request('POST', url=path, body=random_data) resp = s.getresponse() - if not test_val("Error", "200", str(resp.status)): - if test_val("Error", "500", str(resp.status)): - Utility.console_log("Data too large to be allocated") + if not test_val('Error', '200', str(resp.status)): + if test_val('Error', '500', str(resp.status)): + Utility.console_log('Data too large to be allocated') test_size = test_size // 10 else: - Utility.console_log("Unexpected error") + Utility.console_log('Unexpected error') s.close() - Utility.console_log("Retry...") + Utility.console_log('Retry...') continue resp = resp.read().decode() result = (resp == random_data) if not result: - test_val("Data size", str(len(random_data)), str(len(resp))) + test_val('Data size', str(len(random_data)), str(len(resp))) s.close() - Utility.console_log("Retry...") + Utility.console_log('Retry...') continue s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True - Utility.console_log("Failed") + Utility.console_log('Failed') return False def arbitrary_termination_test(dut, port): - Utility.console_log("[test] Arbitrary termination test =>", end=' ') + Utility.console_log('[test] Arbitrary termination test =>', end=' ') cases = [ { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom: SomeValue\r\n\r\n", - "code": "200", - "header": "SomeValue" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nCustom: SomeValue\r\n\r\n', + 'code': '200', + 'header': 'SomeValue' }, { - "request": "POST /echo HTTP/1.1\nHost: " + dut + "\r\nCustom: SomeValue\r\n\r\n", - "code": "200", - "header": "SomeValue" + 'request': 'POST /echo HTTP/1.1\nHost: ' + dut + '\r\nCustom: SomeValue\r\n\r\n', + 'code': '200', + 'header': 'SomeValue' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\nCustom: SomeValue\r\n\r\n", - "code": "200", - "header": "SomeValue" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\nCustom: SomeValue\r\n\r\n', + 'code': '200', + 'header': 'SomeValue' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom: SomeValue\n\r\n", - "code": "200", - "header": "SomeValue" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nCustom: SomeValue\n\r\n', + 'code': '200', + 'header': 'SomeValue' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom: SomeValue\r\n\n", - "code": "200", - "header": "SomeValue" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nCustom: SomeValue\r\n\n', + 'code': '200', + 'header': 'SomeValue' }, { - "request": "POST /echo HTTP/1.1\nHost: " + dut + "\nCustom: SomeValue\n\n", - "code": "200", - "header": "SomeValue" + 'request': 'POST /echo HTTP/1.1\nHost: ' + dut + '\nCustom: SomeValue\n\n', + 'code': '200', + 'header': 'SomeValue' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 5\n\r\nABCDE", - "code": "200", - "body": "ABCDE" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Length: 5\n\r\nABCDE', + 'code': '200', + 'body': 'ABCDE' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 5\r\n\nABCDE", - "code": "200", - "body": "ABCDE" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Length: 5\r\n\nABCDE', + 'code': '200', + 'body': 'ABCDE' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 5\n\nABCDE", - "code": "200", - "body": "ABCDE" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Length: 5\n\nABCDE', + 'code': '200', + 'body': 'ABCDE' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 5\n\n\rABCD", - "code": "200", - "body": "\rABCD" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Length: 5\n\n\rABCD', + 'code': '200', + 'body': '\rABCD' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\r\nCustom: SomeValue\r\r\n\r\r\n", - "code": "400" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\r\nCustom: SomeValue\r\r\n\r\r\n', + 'code': '400' }, { - "request": "POST /echo HTTP/1.1\r\r\nHost: " + dut + "\r\n\r\n", - "code": "400" + 'request': 'POST /echo HTTP/1.1\r\r\nHost: ' + dut + '\r\n\r\n', + 'code': '400' }, { - "request": "POST /echo HTTP/1.1\r\n\rHost: " + dut + "\r\n\r\n", - "code": "400" + 'request': 'POST /echo HTTP/1.1\r\n\rHost: ' + dut + '\r\n\r\n', + 'code': '400' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\rCustom: SomeValue\r\n", - "code": "400" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\rCustom: SomeValue\r\n', + 'code': '400' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom: Some\rValue\r\n", - "code": "400" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nCustom: Some\rValue\r\n', + 'code': '400' }, { - "request": "POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nCustom- SomeValue\r\n\r\n", - "code": "400" + 'request': 'POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nCustom- SomeValue\r\n\r\n', + 'code': '400' } ] for case in cases: @@ -748,159 +746,159 @@ def arbitrary_termination_test(dut, port): resp_hdrs = s.read_resp_hdrs() resp_body = s.read_resp_data() s.close() - if not test_val("Response Code", case["code"], s.status): + if not test_val('Response Code', case['code'], s.status): return False - if "header" in case.keys(): + if 'header' in case.keys(): resp_hdr_val = None - if "Custom" in resp_hdrs.keys(): - resp_hdr_val = resp_hdrs["Custom"] - if not test_val("Response Header", case["header"], resp_hdr_val): + if 'Custom' in resp_hdrs.keys(): + resp_hdr_val = resp_hdrs['Custom'] + if not test_val('Response Header', case['header'], resp_hdr_val): return False - if "body" in case.keys(): - if not test_val("Response Body", case["body"], resp_body): + if 'body' in case.keys(): + if not test_val('Response Body', case['body'], resp_body): return False - Utility.console_log("Success") + Utility.console_log('Success') return True def code_500_server_error_test(dut, port): - Utility.console_log("[test] 500 Server Error test =>", end=' ') + Utility.console_log('[test] 500 Server Error test =>', end=' ') s = Session(dut, port) # Sending a very large content length will cause malloc to fail content_len = 2**30 - s.client.sendall(("POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: " + str(content_len) + "\r\n\r\nABCD").encode()) + s.client.sendall(('POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Length: ' + str(content_len) + '\r\n\r\nABCD').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Server Error", "500", s.status): + if not test_val('Server Error', '500', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_501_method_not_impl(dut, port): - Utility.console_log("[test] 501 Method Not Implemented =>", end=' ') + Utility.console_log('[test] 501 Method Not Implemented =>', end=' ') s = Session(dut, port) - path = "/hello" - s.client.sendall(("ABC " + path + " HTTP/1.1\r\nHost: " + dut + "\r\n\r\n").encode()) + path = '/hello' + s.client.sendall(('ABC ' + path + ' HTTP/1.1\r\nHost: ' + dut + '\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() # Presently server sends back 400 Bad Request # if not test_val("Server Error", "501", s.status): # s.close() # return False - if not test_val("Server Error", "400", s.status): + if not test_val('Server Error', '400', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_505_version_not_supported(dut, port): - Utility.console_log("[test] 505 Version Not Supported =>", end=' ') + Utility.console_log('[test] 505 Version Not Supported =>', end=' ') s = Session(dut, port) - path = "/hello" - s.client.sendall(("GET " + path + " HTTP/2.0\r\nHost: " + dut + "\r\n\r\n").encode()) + path = '/hello' + s.client.sendall(('GET ' + path + ' HTTP/2.0\r\nHost: ' + dut + '\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Server Error", "505", s.status): + if not test_val('Server Error', '505', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_400_bad_request(dut, port): - Utility.console_log("[test] 400 Bad Request =>", end=' ') + Utility.console_log('[test] 400 Bad Request =>', end=' ') s = Session(dut, port) - path = "/hello" - s.client.sendall(("XYZ " + path + " HTTP/1.1\r\nHost: " + dut + "\r\n\r\n").encode()) + path = '/hello' + s.client.sendall(('XYZ ' + path + ' HTTP/1.1\r\nHost: ' + dut + '\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Client Error", "400", s.status): + if not test_val('Client Error', '400', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_404_not_found(dut, port): - Utility.console_log("[test] 404 Not Found =>", end=' ') + Utility.console_log('[test] 404 Not Found =>', end=' ') s = Session(dut, port) - path = "/dummy" - s.client.sendall(("GET " + path + " HTTP/1.1\r\nHost: " + dut + "\r\n\r\n").encode()) + path = '/dummy' + s.client.sendall(('GET ' + path + ' HTTP/1.1\r\nHost: ' + dut + '\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Client Error", "404", s.status): + if not test_val('Client Error', '404', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_405_method_not_allowed(dut, port): - Utility.console_log("[test] 405 Method Not Allowed =>", end=' ') + Utility.console_log('[test] 405 Method Not Allowed =>', end=' ') s = Session(dut, port) - path = "/hello" - s.client.sendall(("POST " + path + " HTTP/1.1\r\nHost: " + dut + "\r\n\r\n").encode()) + path = '/hello' + s.client.sendall(('POST ' + path + ' HTTP/1.1\r\nHost: ' + dut + '\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Client Error", "405", s.status): + if not test_val('Client Error', '405', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_408_req_timeout(dut, port): - Utility.console_log("[test] 408 Request Timeout =>", end=' ') + Utility.console_log('[test] 408 Request Timeout =>', end=' ') s = Session(dut, port) - s.client.sendall(("POST /echo HTTP/1.1\r\nHost: " + dut + "\r\nContent-Length: 10\r\n\r\nABCD").encode()) + s.client.sendall(('POST /echo HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Length: 10\r\n\r\nABCD').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Client Error", "408", s.status): + if not test_val('Client Error', '408', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def code_411_length_required(dut, port): - Utility.console_log("[test] 411 Length Required =>", end=' ') + Utility.console_log('[test] 411 Length Required =>', end=' ') s = Session(dut, port) - path = "/echo" - s.client.sendall(("POST " + path + " HTTP/1.1\r\nHost: " + dut + "\r\nContent-Type: text/plain\r\nTransfer-Encoding: chunked\r\n\r\n").encode()) + path = '/echo' + s.client.sendall(('POST ' + path + ' HTTP/1.1\r\nHost: ' + dut + '\r\nContent-Type: text/plain\r\nTransfer-Encoding: chunked\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() # Presently server sends back 400 Bad Request # if not test_val("Client Error", "411", s.status): # s.close() # return False - if not test_val("Client Error", "400", s.status): + if not test_val('Client Error', '400', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True def send_getx_uri_len(dut, port, length): s = Session(dut, port) - method = "GET " - version = " HTTP/1.1\r\n" - path = "/" + "x" * (length - len(method) - len(version) - len("/")) + method = 'GET ' + version = ' HTTP/1.1\r\n' + path = '/' + 'x' * (length - len(method) - len(version) - len('/')) s.client.sendall(method.encode()) time.sleep(1) s.client.sendall(path.encode()) time.sleep(1) - s.client.sendall((version + "Host: " + dut + "\r\n\r\n").encode()) + s.client.sendall((version + 'Host: ' + dut + '\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() s.close() @@ -908,59 +906,59 @@ def send_getx_uri_len(dut, port, length): def code_414_uri_too_long(dut, port, max_uri_len): - Utility.console_log("[test] 414 URI Too Long =>", end=' ') + Utility.console_log('[test] 414 URI Too Long =>', end=' ') status = send_getx_uri_len(dut, port, max_uri_len) - if not test_val("Client Error", "404", status): + if not test_val('Client Error', '404', status): return False status = send_getx_uri_len(dut, port, max_uri_len + 1) - if not test_val("Client Error", "414", status): + if not test_val('Client Error', '414', status): return False - Utility.console_log("Success") + Utility.console_log('Success') return True def send_postx_hdr_len(dut, port, length): s = Session(dut, port) - path = "/echo" - host = "Host: " + dut - custom_hdr_field = "\r\nCustom: " - custom_hdr_val = "x" * (length - len(host) - len(custom_hdr_field) - len("\r\n\r\n") + len("0")) - request = ("POST " + path + " HTTP/1.1\r\n" + host + custom_hdr_field + custom_hdr_val + "\r\n\r\n").encode() + path = '/echo' + host = 'Host: ' + dut + custom_hdr_field = '\r\nCustom: ' + custom_hdr_val = 'x' * (length - len(host) - len(custom_hdr_field) - len('\r\n\r\n') + len('0')) + request = ('POST ' + path + ' HTTP/1.1\r\n' + host + custom_hdr_field + custom_hdr_val + '\r\n\r\n').encode() s.client.sendall(request[:length // 2]) time.sleep(1) s.client.sendall(request[length // 2:]) hdr = s.read_resp_hdrs() resp = s.read_resp_data() s.close() - if hdr and ("Custom" in hdr): - return (hdr["Custom"] == custom_hdr_val), resp + if hdr and ('Custom' in hdr): + return (hdr['Custom'] == custom_hdr_val), resp return False, s.status def code_431_hdr_too_long(dut, port, max_hdr_len): - Utility.console_log("[test] 431 Header Too Long =>", end=' ') + Utility.console_log('[test] 431 Header Too Long =>', end=' ') res, status = send_postx_hdr_len(dut, port, max_hdr_len) if not res: return False res, status = send_postx_hdr_len(dut, port, max_hdr_len + 1) - if not test_val("Client Error", "431", status): + if not test_val('Client Error', '431', status): return False - Utility.console_log("Success") + Utility.console_log('Success') return True def test_upgrade_not_supported(dut, port): - Utility.console_log("[test] Upgrade Not Supported =>", end=' ') + Utility.console_log('[test] Upgrade Not Supported =>', end=' ') s = Session(dut, port) # path = "/hello" - s.client.sendall(("OPTIONS * HTTP/1.1\r\nHost:" + dut + "\r\nUpgrade: TLS/1.0\r\nConnection: Upgrade\r\n\r\n").encode()) + s.client.sendall(('OPTIONS * HTTP/1.1\r\nHost:' + dut + '\r\nUpgrade: TLS/1.0\r\nConnection: Upgrade\r\n\r\n').encode()) s.read_resp_hdrs() s.read_resp_data() - if not test_val("Client Error", "400", s.status): + if not test_val('Client Error', '400', s.status): s.close() return False s.close() - Utility.console_log("Success") + Utility.console_log('Success') return True @@ -985,7 +983,7 @@ if __name__ == '__main__': _verbose_ = True - Utility.console_log("### Basic HTTP Client Tests") + Utility.console_log('### Basic HTTP Client Tests') get_hello(dut, port) post_hello(dut, port) put_hello(dut, port) @@ -997,7 +995,7 @@ if __name__ == '__main__': get_false_uri(dut, port) get_test_headers(dut, port) - Utility.console_log("### Error code tests") + Utility.console_log('### Error code tests') code_500_server_error_test(dut, port) code_501_method_not_impl(dut, port) code_505_version_not_supported(dut, port) @@ -1012,7 +1010,7 @@ if __name__ == '__main__': # Not supported yet (Error on chunked request) # code_411_length_required(dut, port) - Utility.console_log("### Sessions and Context Tests") + Utility.console_log('### Sessions and Context Tests') parallel_sessions_adder(dut, port, max_sessions) leftover_data_test(dut, port) async_response_test(dut, port) diff --git a/tools/ci/python_packages/idf_iperf_test_util/Attenuator.py b/tools/ci/python_packages/idf_iperf_test_util/Attenuator.py index 7a188ff9b6..5f20548678 100644 --- a/tools/ci/python_packages/idf_iperf_test_util/Attenuator.py +++ b/tools/ci/python_packages/idf_iperf_test_util/Attenuator.py @@ -18,9 +18,10 @@ Internal use only. This file provide method to control programmable attenuator. """ -import time -import serial import codecs +import time + +import serial def set_att(port, att, att_fix=False): @@ -47,16 +48,16 @@ def set_att(port, att, att_fix=False): serial_port = serial.Serial(port, baudrate=9600, rtscts=False, timeout=0.1) if serial_port.isOpen() is False: - raise IOError("attenuator control, failed to open att port") + raise IOError('attenuator control, failed to open att port') - cmd_hex = "7e7e10{:02x}{:x}".format(att_t, 0x10 + att_t) - exp_res_hex = "7e7e20{:02x}00{:x}".format(att_t, 0x20 + att_t) + cmd_hex = '7e7e10{:02x}{:x}'.format(att_t, 0x10 + att_t) + exp_res_hex = '7e7e20{:02x}00{:x}'.format(att_t, 0x20 + att_t) - cmd = codecs.decode(cmd_hex, "hex") - exp_res = codecs.decode(exp_res_hex, "hex") + cmd = codecs.decode(cmd_hex, 'hex') + exp_res = codecs.decode(exp_res_hex, 'hex') serial_port.write(cmd) - res = b"" + res = b'' for i in range(5): res += serial_port.read(20) diff --git a/tools/ci/python_packages/idf_iperf_test_util/LineChart.py b/tools/ci/python_packages/idf_iperf_test_util/LineChart.py index aa3aa88d8c..48bb25ca8f 100644 --- a/tools/ci/python_packages/idf_iperf_test_util/LineChart.py +++ b/tools/ci/python_packages/idf_iperf_test_util/LineChart.py @@ -40,18 +40,18 @@ def draw_line_chart(file_name, title, x_label, y_label, data_series, range_list) _data[str(key)] = data_series[item][key] _data = list(_data.values()) try: - legend = item + " (max: {:.02f})".format(max([x for x in _data if x])) + legend = item + ' (max: {:.02f})'.format(max([x for x in _data if x])) except TypeError: legend = item line.add_yaxis(legend, _data, is_smooth=True, is_connect_nones=True, label_opts=opts.LabelOpts(is_show=False)) line.set_global_opts( datazoom_opts=opts.DataZoomOpts(range_start=0, range_end=100), - title_opts=opts.TitleOpts(title=title, pos_left="center"), - legend_opts=opts.LegendOpts(pos_top="10%", pos_left="right", orient="vertical"), - tooltip_opts=opts.TooltipOpts(trigger="axis"), - xaxis_opts=opts.AxisOpts(type_="category", name=x_label, splitline_opts=opts.SplitLineOpts(is_show=True)), - yaxis_opts=opts.AxisOpts(type_="value", name=y_label, + title_opts=opts.TitleOpts(title=title, pos_left='center'), + legend_opts=opts.LegendOpts(pos_top='10%', pos_left='right', orient='vertical'), + tooltip_opts=opts.TooltipOpts(trigger='axis'), + xaxis_opts=opts.AxisOpts(type_='category', name=x_label, splitline_opts=opts.SplitLineOpts(is_show=True)), + yaxis_opts=opts.AxisOpts(type_='value', name=y_label, axistick_opts=opts.AxisTickOpts(is_show=True), splitline_opts=opts.SplitLineOpts(is_show=True)), ) diff --git a/tools/ci/python_packages/idf_iperf_test_util/PowerControl.py b/tools/ci/python_packages/idf_iperf_test_util/PowerControl.py index 17eb4322a6..bf0882e7e3 100644 --- a/tools/ci/python_packages/idf_iperf_test_util/PowerControl.py +++ b/tools/ci/python_packages/idf_iperf_test_util/PowerControl.py @@ -27,15 +27,15 @@ class Control(object): @classmethod def apc_telnet_make_choice(cls, telnet, choice): """ select a choice """ - telnet.read_until(b"Event Log") - telnet.read_until(b">") - telnet.write(choice.encode() + b"\r\n") + telnet.read_until(b'Event Log') + telnet.read_until(b'>') + telnet.write(choice.encode() + b'\r\n') @classmethod def apc_telnet_common_action(cls, telnet, check_str, action): """ wait until a pattern and then write a line """ telnet.read_until(check_str.encode()) - telnet.write(action.encode() + b"\r\n") + telnet.write(action.encode() + b'\r\n') @classmethod def control(cls, apc_ip, control_dict): @@ -48,45 +48,45 @@ class Control(object): for _outlet in control_dict: assert 0 < _outlet < 9 - assert control_dict[_outlet] in ["ON", "OFF"] + assert control_dict[_outlet] in ['ON', 'OFF'] # telnet # set timeout as 2s so that it won't waste time even can't access APC tn = telnetlib.Telnet(host=apc_ip, timeout=5) # log on - cls.apc_telnet_common_action(tn, "User Name :", "apc") - cls.apc_telnet_common_action(tn, "Password :", "apc") + cls.apc_telnet_common_action(tn, 'User Name :', 'apc') + cls.apc_telnet_common_action(tn, 'Password :', 'apc') # go to Device Manager - cls.apc_telnet_make_choice(tn, "1") + cls.apc_telnet_make_choice(tn, '1') # go to Outlet Management - cls.apc_telnet_make_choice(tn, "2") + cls.apc_telnet_make_choice(tn, '2') # go to Outlet Control/Configuration - cls.apc_telnet_make_choice(tn, "1") + cls.apc_telnet_make_choice(tn, '1') # do select Outlet and control for _outlet in control_dict: # choose Outlet cls.apc_telnet_make_choice(tn, str(_outlet)) # choose Control Outlet - cls.apc_telnet_make_choice(tn, "1") + cls.apc_telnet_make_choice(tn, '1') # choose action _action = control_dict[_outlet] - if "ON" in _action: - cls.apc_telnet_make_choice(tn, "1") + if 'ON' in _action: + cls.apc_telnet_make_choice(tn, '1') else: - cls.apc_telnet_make_choice(tn, "2") + cls.apc_telnet_make_choice(tn, '2') # do confirm - cls.apc_telnet_common_action(tn, "cancel :", "YES") - cls.apc_telnet_common_action(tn, "continue...", "") + cls.apc_telnet_common_action(tn, 'cancel :', 'YES') + cls.apc_telnet_common_action(tn, 'continue...', '') # return to Outlet Control/Configuration - cls.apc_telnet_make_choice(tn, "\033") - cls.apc_telnet_make_choice(tn, "\033") + cls.apc_telnet_make_choice(tn, '\033') + cls.apc_telnet_make_choice(tn, '\033') # exit to main menu and logout - tn.write(b"\033\r\n") - tn.write(b"\033\r\n") - tn.write(b"\033\r\n") - tn.write(b"4\r\n") + tn.write(b'\033\r\n') + tn.write(b'\033\r\n') + tn.write(b'\033\r\n') + tn.write(b'4\r\n') @classmethod def control_rest(cls, apc_ip, outlet, action): diff --git a/tools/ci/python_packages/idf_iperf_test_util/TestReport.py b/tools/ci/python_packages/idf_iperf_test_util/TestReport.py index d2847fee9d..d93b65b606 100644 --- a/tools/ci/python_packages/idf_iperf_test_util/TestReport.py +++ b/tools/ci/python_packages/idf_iperf_test_util/TestReport.py @@ -10,9 +10,9 @@ import os class ThroughputForConfigsReport(object): - THROUGHPUT_TYPES = ["tcp_tx", "tcp_rx", "udp_tx", "udp_rx"] + THROUGHPUT_TYPES = ['tcp_tx', 'tcp_rx', 'udp_tx', 'udp_rx'] - REPORT_FILE_NAME = "ThroughputForConfigs.md" + REPORT_FILE_NAME = 'ThroughputForConfigs.md' def __init__(self, output_path, ap_ssid, throughput_results, sdkconfig_files): """ @@ -42,14 +42,14 @@ class ThroughputForConfigsReport(object): @staticmethod def _parse_config_file(config_file_path): sdkconfig = {} - with open(config_file_path, "r") as f: + with open(config_file_path, 'r') as f: for line in f: if not line.isspace(): - if line[0] == "#": + if line[0] == '#': continue - name, value = line.split("=") - value = value.strip("\r\n") - sdkconfig[name] = value if value else "n" + name, value = line.split('=') + value = value.strip('\r\n') + sdkconfig[name] = value if value else 'n' return sdkconfig def _generate_the_difference_between_configs(self): @@ -65,7 +65,7 @@ class ThroughputForConfigsReport(object): """ - data = "## Config Definition:\r\n\r\n" + data = '## Config Definition:\r\n\r\n' def find_difference(base, new): _difference = {} @@ -75,13 +75,13 @@ class ThroughputForConfigsReport(object): try: _base_value = base[_config] except KeyError: - _base_value = "null" + _base_value = 'null' try: _new_value = new[_config] except KeyError: - _new_value = "null" + _new_value = 'null' if _base_value != _new_value: - _difference[_config] = "{} -> {}".format(_base_value, _new_value) + _difference[_config] = '{} -> {}'.format(_base_value, _new_value) return _difference for i, _config_name in enumerate(self.sort_order): @@ -96,9 +96,9 @@ class ThroughputForConfigsReport(object): if previous_config: # log the difference difference = find_difference(previous_config, current_config) - data += "* {} (compared to {}):\r\n".format(_config_name, previous_config_name) + data += '* {} (compared to {}):\r\n'.format(_config_name, previous_config_name) for diff_name in difference: - data += " * `{}`: {}\r\n".format(diff_name, difference[diff_name]) + data += ' * `{}`: {}\r\n'.format(diff_name, difference[diff_name]) return data def _generate_report_for_one_type(self, throughput_type): @@ -115,39 +115,39 @@ class ThroughputForConfigsReport(object): """ empty = True - ret = "\r\n### {} {}\r\n\r\n".format(*throughput_type.split("_")) - ret += "| config name | throughput (Mbps) | free heap size (bytes) |\r\n" - ret += "|-------------|-------------------|------------------------|\r\n" + ret = '\r\n### {} {}\r\n\r\n'.format(*throughput_type.split('_')) + ret += '| config name | throughput (Mbps) | free heap size (bytes) |\r\n' + ret += '|-------------|-------------------|------------------------|\r\n' for config in self.sort_order: try: result = self.results[config][throughput_type] - throughput = "{:.02f}".format(max(result.throughput_by_att[self.ap_ssid].values())) + throughput = '{:.02f}'.format(max(result.throughput_by_att[self.ap_ssid].values())) heap_size = str(result.heap_size) # although markdown table will do alignment # do align here for better text editor presentation - ret += "| {:<12}| {:<18}| {:<23}|\r\n".format(config, throughput, heap_size) + ret += '| {:<12}| {:<18}| {:<23}|\r\n'.format(config, throughput, heap_size) empty = False except KeyError: pass - return ret if not empty else "" + return ret if not empty else '' def generate_report(self): - data = "# Throughput for different configs\r\n" - data += "\r\nAP: {}\r\n".format(self.ap_ssid) + data = '# Throughput for different configs\r\n' + data += '\r\nAP: {}\r\n'.format(self.ap_ssid) for throughput_type in self.THROUGHPUT_TYPES: data += self._generate_report_for_one_type(throughput_type) - data += "\r\n------\r\n" + data += '\r\n------\r\n' data += self._generate_the_difference_between_configs() - with open(os.path.join(self.output_path, self.REPORT_FILE_NAME), "w") as f: + with open(os.path.join(self.output_path, self.REPORT_FILE_NAME), 'w') as f: f.write(data) class ThroughputVsRssiReport(object): - REPORT_FILE_NAME = "ThroughputVsRssi.md" + REPORT_FILE_NAME = 'ThroughputVsRssi.md' def __init__(self, output_path, throughput_results): """ @@ -160,7 +160,7 @@ class ThroughputVsRssiReport(object): } """ self.output_path = output_path - self.raw_data_path = os.path.join(output_path, "raw_data") + self.raw_data_path = os.path.join(output_path, 'raw_data') self.results = throughput_results self.throughput_types = list(self.results.keys()) self.throughput_types.sort() @@ -179,20 +179,20 @@ class ThroughputVsRssiReport(object): | udp rx | Failed | 55.44 | """ - ret = "\r\n### Summary\r\n\r\n" - ret += "| item | curve analysis | max throughput (Mbps) |\r\n" - ret += "|---------|----------------|-----------------------|\r\n" + ret = '\r\n### Summary\r\n\r\n' + ret += '| item | curve analysis | max throughput (Mbps) |\r\n' + ret += '|---------|----------------|-----------------------|\r\n' for _type in self.throughput_types: result = self.results[_type] max_throughput = 0.0 - curve_analysis = "Failed" if result.error_list else "Success" + curve_analysis = 'Failed' if result.error_list else 'Success' for ap_ssid in result.throughput_by_att: _max_for_ap = max(result.throughput_by_rssi[ap_ssid].values()) if _max_for_ap > max_throughput: max_throughput = _max_for_ap - max_throughput = "{:.02f}".format(max_throughput) - ret += "| {:<8}| {:<15}| {:<22}|\r\n".format("{}_{}".format(result.proto, result.direction), + max_throughput = '{:.02f}'.format(max_throughput) + ret += '| {:<8}| {:<15}| {:<22}|\r\n'.format('{}_{}'.format(result.proto, result.direction), curve_analysis, max_throughput) return ret @@ -217,29 +217,29 @@ class ThroughputVsRssiReport(object): """ result.post_analysis() - ret = "\r\n### {} {}\r\n".format(result.proto, result.direction) + ret = '\r\n### {} {}\r\n'.format(result.proto, result.direction) if result.error_list: - ret += "\r\nErrors:\r\n\r\n" + ret += '\r\nErrors:\r\n\r\n' for error in result.error_list: - ret += "* " + error + "\r\n" + ret += '* ' + error + '\r\n' for ap_ssid in result.throughput_by_rssi: - ret += "\r\nAP: {}\r\n".format(ap_ssid) + ret += '\r\nAP: {}\r\n'.format(ap_ssid) # draw figure - file_name = result.draw_throughput_figure(self.raw_data_path, ap_ssid, "rssi") - result.draw_throughput_figure(self.raw_data_path, ap_ssid, "att") + file_name = result.draw_throughput_figure(self.raw_data_path, ap_ssid, 'rssi') + result.draw_throughput_figure(self.raw_data_path, ap_ssid, 'att') result.draw_rssi_vs_att_figure(self.raw_data_path, ap_ssid) - ret += "\r\n[throughput Vs RSSI]({})\r\n".format(os.path.join("raw_data", file_name)) + ret += '\r\n[throughput Vs RSSI]({})\r\n'.format(os.path.join('raw_data', file_name)) return ret def generate_report(self): - data = "# Throughput Vs RSSI\r\n" + data = '# Throughput Vs RSSI\r\n' data += self._generate_summary() for _type in self.throughput_types: data += self._generate_report_for_one_type(self.results[_type]) - with open(os.path.join(self.output_path, self.REPORT_FILE_NAME), "w") as f: + with open(os.path.join(self.output_path, self.REPORT_FILE_NAME), 'w') as f: f.write(data) diff --git a/tools/ci/python_packages/tiny_test_fw/App.py b/tools/ci/python_packages/tiny_test_fw/App.py index 77053bf8fd..10ac232a98 100644 --- a/tools/ci/python_packages/tiny_test_fw/App.py +++ b/tools/ci/python_packages/tiny_test_fw/App.py @@ -80,9 +80,9 @@ class BaseApp(object): if not test_suite_name: test_suite_name = os.path.splitext(os.path.basename(sys.modules['__main__'].__file__))[0] sdk_path = cls.get_sdk_path() - log_folder = os.path.join(sdk_path, "TEST_LOGS", + log_folder = os.path.join(sdk_path, 'TEST_LOGS', test_suite_name + - time.strftime("_%m%d_%H_%M_%S", time.localtime(LOG_FOLDER_TIMESTAMP))) + time.strftime('_%m%d_%H_%M_%S', time.localtime(LOG_FOLDER_TIMESTAMP))) if not os.path.exists(log_folder): os.makedirs(log_folder) return log_folder diff --git a/tools/ci/python_packages/tiny_test_fw/DUT.py b/tools/ci/python_packages/tiny_test_fw/DUT.py index f2aa809edc..ec798a1501 100644 --- a/tools/ci/python_packages/tiny_test_fw/DUT.py +++ b/tools/ci/python_packages/tiny_test_fw/DUT.py @@ -38,12 +38,13 @@ If they using different port then need to implement their DUTPort class as well. """ from __future__ import print_function -import time + +import copy +import functools import re import sys import threading -import copy -import functools +import time # python2 and python3 queue package name is different try: @@ -82,15 +83,15 @@ def _decode_data(data): # convert bytes to string. This is a bit of a hack, we know that we want to log this # later so encode to the stdout encoding with backslash escapes for anything non-encodable try: - return data.decode(sys.stdout.encoding, "backslashreplace") + return data.decode(sys.stdout.encoding, 'backslashreplace') except UnicodeDecodeError: # Python <3.5 doesn't support backslashreplace - return data.decode(sys.stdout.encoding, "replace") + return data.decode(sys.stdout.encoding, 'replace') return data def _pattern_to_string(pattern): try: - ret = "RegEx: " + pattern.pattern + ret = 'RegEx: ' + pattern.pattern except AttributeError: ret = pattern return ret @@ -167,7 +168,7 @@ class _LogThread(threading.Thread, _queue.Queue): Then data will be passed to ``expect`` as soon as received. """ def __init__(self): - threading.Thread.__init__(self, name="LogThread") + threading.Thread.__init__(self, name='LogThread') _queue.Queue.__init__(self, maxsize=0) self.setDaemon(True) self.flush_lock = threading.Lock() @@ -177,7 +178,7 @@ class _LogThread(threading.Thread, _queue.Queue): :param filename: log file name :param data: log data. Must be ``bytes``. """ - self.put({"filename": filename, "data": data}) + self.put({'filename': filename, 'data': data}) def flush_data(self): with self.flush_lock: @@ -187,14 +188,14 @@ class _LogThread(threading.Thread, _queue.Queue): try: log = self.get_nowait() try: - data_cache[log["filename"]] += log["data"] + data_cache[log['filename']] += log['data'] except KeyError: - data_cache[log["filename"]] = log["data"] + data_cache[log['filename']] = log['data'] except _queue.Empty: break # flush data for filename in data_cache: - with open(filename, "ab+") as f: + with open(filename, 'ab+') as f: f.write(data_cache[filename]) def run(self): @@ -231,7 +232,7 @@ class RecvThread(threading.Thread): lines = decoded_data.splitlines(True) last_line = lines[-1] - if last_line[-1] != "\n": + if last_line[-1] != '\n': if len(lines) == 1: # only one line and the line is not finished, then append this to cache self._line_cache += lines[-1] @@ -239,7 +240,7 @@ class RecvThread(threading.Thread): else: # more than one line and not finished, replace line cache self._line_cache = lines[-1] - ret += "".join(lines[:-1]) + ret += ''.join(lines[:-1]) else: # line finishes, flush cache self._line_cache = str() @@ -302,7 +303,7 @@ class BaseDUT(object): self.start_receive() def __str__(self): - return "DUT({}: {})".format(self.name, str(self.port)) + return 'DUT({}: {})'.format(self.name, str(self.port)) def _save_expect_failure(self, pattern, data, start_time): """ @@ -311,8 +312,8 @@ class BaseDUT(object): The expect failures could be false alarm, and test case might generate a lot of such failures. Therefore, we don't print the failure immediately and limit the max size of failure list. """ - self.expect_failures.insert(0, {"pattern": pattern, "data": data, - "start": start_time, "end": time.time()}) + self.expect_failures.insert(0, {'pattern': pattern, 'data': data, + 'start': start_time, 'end': time.time()}) self.expect_failures = self.expect_failures[:self.MAX_EXPECT_FAILURES_TO_SAVED] def _save_dut_log(self, data): @@ -444,7 +445,7 @@ class BaseDUT(object): raise e return data - def write(self, data, eol="\r\n", flush=True): + def write(self, data, eol='\r\n', flush=True): """ :param data: data :param eol: end of line pattern. @@ -474,7 +475,7 @@ class BaseDUT(object): self.data_cache.flush(size) return data - def start_capture_raw_data(self, capture_id="default"): + def start_capture_raw_data(self, capture_id='default'): """ Sometime application want to get DUT raw data and use ``expect`` method at the same time. Capture methods provides a way to get raw data without affecting ``expect`` or ``read`` method. @@ -491,7 +492,7 @@ class BaseDUT(object): # otherwise, create new data cache self.recorded_data[capture_id] = _DataCache() - def stop_capture_raw_data(self, capture_id="default"): + def stop_capture_raw_data(self, capture_id='default'): """ Stop capture and get raw data. This method should be used after ``start_capture_raw_data`` on the same capture ID. @@ -504,9 +505,9 @@ class BaseDUT(object): ret = self.recorded_data[capture_id].get_data() self.recorded_data.pop(capture_id) except KeyError as e: - e.message = "capture_id does not exist. " \ - "You should call start_capture_raw_data with same ID " \ - "before calling stop_capture_raw_data" + e.message = 'capture_id does not exist. ' \ + 'You should call start_capture_raw_data with same ID ' \ + 'before calling stop_capture_raw_data' raise e return ret @@ -552,9 +553,9 @@ class BaseDUT(object): return ret, index EXPECT_METHOD = [ - [type(re.compile("")), "_expect_re"], - [type(b''), "_expect_str"], # Python 2 & 3 hook to work without 'from builtins import str' from future - [type(u''), "_expect_str"], + [type(re.compile('')), '_expect_re'], + [type(b''), '_expect_str'], # Python 2 & 3 hook to work without 'from builtins import str' from future + [type(u''), '_expect_str'], ] def _get_expect_method(self, pattern): @@ -607,7 +608,7 @@ class BaseDUT(object): if ret is None: pattern = _pattern_to_string(pattern) self._save_expect_failure(pattern, data, start_time) - raise ExpectTimeout(self.name + ": " + pattern) + raise ExpectTimeout(self.name + ': ' + pattern) return stdout if full_stdout else ret def _expect_multi(self, expect_all, expect_item_list, timeout): @@ -622,12 +623,12 @@ class BaseDUT(object): def process_expected_item(item_raw): # convert item raw data to standard dict item = { - "pattern": item_raw[0] if isinstance(item_raw, tuple) else item_raw, - "method": self._get_expect_method(item_raw[0] if isinstance(item_raw, tuple) + 'pattern': item_raw[0] if isinstance(item_raw, tuple) else item_raw, + 'method': self._get_expect_method(item_raw[0] if isinstance(item_raw, tuple) else item_raw), - "callback": item_raw[1] if isinstance(item_raw, tuple) else None, - "index": -1, - "ret": None, + 'callback': item_raw[1] if isinstance(item_raw, tuple) else None, + 'index': -1, + 'ret': None, } return item @@ -642,9 +643,9 @@ class BaseDUT(object): for expect_item in expect_items: if expect_item not in matched_expect_items: # exclude those already matched - expect_item["ret"], expect_item["index"] = \ - expect_item["method"](data, expect_item["pattern"]) - if expect_item["ret"] is not None: + expect_item['ret'], expect_item['index'] = \ + expect_item['method'](data, expect_item['pattern']) + if expect_item['ret'] is not None: # match succeed for one item matched_expect_items.append(expect_item) @@ -664,20 +665,20 @@ class BaseDUT(object): if match_succeed: # sort matched items according to order of appearance in the input data, # so that the callbacks are invoked in correct order - matched_expect_items = sorted(matched_expect_items, key=lambda it: it["index"]) + matched_expect_items = sorted(matched_expect_items, key=lambda it: it['index']) # invoke callbacks and flush matched data cache slice_index = -1 for expect_item in matched_expect_items: # trigger callback - if expect_item["callback"]: - expect_item["callback"](expect_item["ret"]) - slice_index = max(slice_index, expect_item["index"]) + if expect_item['callback']: + expect_item['callback'](expect_item['ret']) + slice_index = max(slice_index, expect_item['index']) # flush already matched data self.data_cache.flush(slice_index) else: - pattern = str([_pattern_to_string(x["pattern"]) for x in expect_items]) + pattern = str([_pattern_to_string(x['pattern']) for x in expect_items]) self._save_expect_failure(pattern, data, start_time) - raise ExpectTimeout(self.name + ": " + pattern) + raise ExpectTimeout(self.name + ': ' + pattern) @_expect_lock def expect_any(self, *expect_items, **timeout): @@ -697,8 +698,8 @@ class BaseDUT(object): """ # to be compatible with python2 # in python3 we can write f(self, *expect_items, timeout=DEFAULT_TIMEOUT) - if "timeout" not in timeout: - timeout["timeout"] = self.DEFAULT_EXPECT_TIMEOUT + if 'timeout' not in timeout: + timeout['timeout'] = self.DEFAULT_EXPECT_TIMEOUT return self._expect_multi(False, expect_items, **timeout) @_expect_lock @@ -719,38 +720,38 @@ class BaseDUT(object): """ # to be compatible with python2 # in python3 we can write f(self, *expect_items, timeout=DEFAULT_TIMEOUT) - if "timeout" not in timeout: - timeout["timeout"] = self.DEFAULT_EXPECT_TIMEOUT + if 'timeout' not in timeout: + timeout['timeout'] = self.DEFAULT_EXPECT_TIMEOUT return self._expect_multi(True, expect_items, **timeout) @staticmethod def _format_ts(ts): - return "{}:{}".format(time.strftime("%m-%d %H:%M:%S", time.localtime(ts)), str(ts % 1)[2:5]) + return '{}:{}'.format(time.strftime('%m-%d %H:%M:%S', time.localtime(ts)), str(ts % 1)[2:5]) def print_debug_info(self): """ Print debug info of current DUT. Currently we will print debug info for expect failures. """ - Utility.console_log("DUT debug info for DUT: {}:".format(self.name), color="orange") + Utility.console_log('DUT debug info for DUT: {}:'.format(self.name), color='orange') for failure in self.expect_failures: - Utility.console_log(u"\t[pattern]: {}\r\n\t[data]: {}\r\n\t[time]: {} - {}\r\n" - .format(failure["pattern"], failure["data"], - self._format_ts(failure["start"]), self._format_ts(failure["end"])), - color="orange") + Utility.console_log(u'\t[pattern]: {}\r\n\t[data]: {}\r\n\t[time]: {} - {}\r\n' + .format(failure['pattern'], failure['data'], + self._format_ts(failure['start']), self._format_ts(failure['end'])), + color='orange') class SerialDUT(BaseDUT): """ serial with logging received data feature """ DEFAULT_UART_CONFIG = { - "baudrate": 115200, - "bytesize": serial.EIGHTBITS, - "parity": serial.PARITY_NONE, - "stopbits": serial.STOPBITS_ONE, - "timeout": 0.05, - "xonxoff": False, - "rtscts": False, + 'baudrate': 115200, + 'bytesize': serial.EIGHTBITS, + 'parity': serial.PARITY_NONE, + 'stopbits': serial.STOPBITS_ONE, + 'timeout': 0.05, + 'xonxoff': False, + 'rtscts': False, } def __init__(self, name, port, log_file, app, **kwargs): @@ -768,8 +769,8 @@ class SerialDUT(BaseDUT): :param data: raw data from read :return: formatted data (str) """ - timestamp = "[{}]".format(self._format_ts(time.time())) - formatted_data = timestamp.encode() + b"\r\n" + data + b"\r\n" + timestamp = '[{}]'.format(self._format_ts(time.time())) + formatted_data = timestamp.encode() + b'\r\n' + data + b'\r\n' return formatted_data def _port_open(self): diff --git a/tools/ci/python_packages/tiny_test_fw/Env.py b/tools/ci/python_packages/tiny_test_fw/Env.py index d3fefd8c77..e6c55b297d 100644 --- a/tools/ci/python_packages/tiny_test_fw/Env.py +++ b/tools/ci/python_packages/tiny_test_fw/Env.py @@ -13,12 +13,12 @@ # limitations under the License. """ Test Env, manages DUT, App and EnvConfig, interface for test cases to access these components """ +import functools import os import threading -import functools +import traceback import netifaces -import traceback from . import EnvConfig @@ -44,7 +44,7 @@ class Env(object): :keyword env_config_file: test env config file path :keyword test_name: test suite name, used when generate log folder name """ - CURRENT_LOG_FOLDER = "" + CURRENT_LOG_FOLDER = '' def __init__(self, app=None, @@ -79,7 +79,7 @@ class Env(object): :return: dut instance """ if dut_name in self.allocated_duts: - dut = self.allocated_duts[dut_name]["dut"] + dut = self.allocated_duts[dut_name]['dut'] else: if dut_class is None: dut_class = self.default_dut_cls @@ -95,7 +95,7 @@ class Env(object): result, detected_target = dut_class.confirm_dut(port) except ValueError: # try to auto detect ports - allocated_ports = [self.allocated_duts[x]["port"] for x in self.allocated_duts] + allocated_ports = [self.allocated_duts[x]['port'] for x in self.allocated_duts] available_ports = dut_class.list_available_ports() for port in available_ports: if port not in allocated_ports: @@ -113,17 +113,17 @@ class Env(object): if port: try: - dut_config = self.get_variable(dut_name + "_port_config") + dut_config = self.get_variable(dut_name + '_port_config') except ValueError: dut_config = dict() dut_config.update(dut_init_args) dut = dut_class(dut_name, port, - os.path.join(self.log_path, dut_name + ".log"), + os.path.join(self.log_path, dut_name + '.log'), app_inst, **dut_config) - self.allocated_duts[dut_name] = {"port": port, "dut": dut} + self.allocated_duts[dut_name] = {'port': port, 'dut': dut} else: - raise ValueError("Failed to get DUT") + raise ValueError('Failed to get DUT') return dut @_synced @@ -136,7 +136,7 @@ class Env(object): :return: None """ try: - dut = self.allocated_duts.pop(dut_name)["dut"] + dut = self.allocated_duts.pop(dut_name)['dut'] dut.close() except KeyError: pass @@ -153,13 +153,13 @@ class Env(object): return self.config.get_variable(variable_name) PROTO_MAP = { - "ipv4": netifaces.AF_INET, - "ipv6": netifaces.AF_INET6, - "mac": netifaces.AF_LINK, + 'ipv4': netifaces.AF_INET, + 'ipv6': netifaces.AF_INET6, + 'mac': netifaces.AF_LINK, } @_synced - def get_pc_nic_info(self, nic_name="pc_nic", proto="ipv4"): + def get_pc_nic_info(self, nic_name='pc_nic', proto='ipv4'): """ get_pc_nic_info(nic_name="pc_nic") try to get info of a specified NIC and protocol. @@ -192,7 +192,7 @@ class Env(object): """ dut_close_errors = [] for dut_name in self.allocated_duts: - dut = self.allocated_duts[dut_name]["dut"] + dut = self.allocated_duts[dut_name]['dut'] try: if dut_debug: dut.print_debug_info() diff --git a/tools/ci/python_packages/tiny_test_fw/EnvConfig.py b/tools/ci/python_packages/tiny_test_fw/EnvConfig.py index f51380b9f5..88cba8f6aa 100644 --- a/tools/ci/python_packages/tiny_test_fw/EnvConfig.py +++ b/tools/ci/python_packages/tiny_test_fw/EnvConfig.py @@ -79,5 +79,5 @@ class Config(object): # TODO: to support auto get variable here value = None if value is None: - raise ValueError("Failed to get variable") + raise ValueError('Failed to get variable') return value diff --git a/tools/ci/python_packages/tiny_test_fw/TinyFW.py b/tools/ci/python_packages/tiny_test_fw/TinyFW.py index d9e6abb10b..4f7737b26f 100644 --- a/tools/ci/python_packages/tiny_test_fw/TinyFW.py +++ b/tools/ci/python_packages/tiny_test_fw/TinyFW.py @@ -13,18 +13,15 @@ # limitations under the License. """ Interface for test cases. """ -import os -import time import functools +import os import socket +import time from datetime import datetime import junit_xml -from . import Env -from . import DUT -from . import App -from . import Utility +from . import DUT, App, Env, Utility class TestCaseFailed(AssertionError): @@ -37,7 +34,7 @@ class TestCaseFailed(AssertionError): 'cases' argument is the names of one or more test cases """ - message = "Test case{} failed: {}".format("s" if len(cases) > 1 else "", ", ".join(str(c) for c in cases)) + message = 'Test case{} failed: {}'.format('s' if len(cases) > 1 else '', ', '.join(str(c) for c in cases)) super(TestCaseFailed, self).__init__(self, message) @@ -50,11 +47,11 @@ class DefaultEnvConfig(object): 3. default env config get from this class """ DEFAULT_CONFIG = { - "app": App.BaseApp, - "dut": DUT.BaseDUT, - "env_tag": "default", - "env_config_file": None, - "test_suite_name": None, + 'app': App.BaseApp, + 'dut': DUT.BaseDUT, + 'env_tag': 'default', + 'env_config_file': None, + 'test_suite_name': None, } @classmethod @@ -78,10 +75,10 @@ get_default_config = DefaultEnvConfig.get_default_config MANDATORY_INFO = { - "execution_time": 1, - "env_tag": "default", - "category": "function", - "ignore": False, + 'execution_time': 1, + 'env_tag': 'default', + 'category': 'function', + 'ignore': False, } @@ -89,8 +86,8 @@ class JunitReport(object): # wrapper for junit test report # TODO: JunitReport methods are not thread safe (although not likely to be used this way). - JUNIT_FILE_NAME = "XUNIT_RESULT.xml" - JUNIT_DEFAULT_TEST_SUITE = "test-suite" + JUNIT_FILE_NAME = 'XUNIT_RESULT.xml' + JUNIT_DEFAULT_TEST_SUITE = 'test-suite' JUNIT_TEST_SUITE = junit_xml.TestSuite(JUNIT_DEFAULT_TEST_SUITE, hostname=socket.gethostname(), timestamp=datetime.utcnow().isoformat()) @@ -100,7 +97,7 @@ class JunitReport(object): @classmethod def output_report(cls, junit_file_path): """ Output current test result to file. """ - with open(os.path.join(junit_file_path, cls.JUNIT_FILE_NAME), "w") as f: + with open(os.path.join(junit_file_path, cls.JUNIT_FILE_NAME), 'w') as f: cls.JUNIT_TEST_SUITE.to_file(f, [cls.JUNIT_TEST_SUITE], prettyprint=False) @classmethod @@ -136,7 +133,7 @@ class JunitReport(object): """ # set stdout to empty string, so we can always append string to stdout. # It won't affect output logic. If stdout is empty, it won't be put to report. - test_case = junit_xml.TestCase(name, stdout="") + test_case = junit_xml.TestCase(name, stdout='') cls.JUNIT_CURRENT_TEST_CASE = test_case cls._TEST_CASE_CREATED_TS = time.time() return test_case @@ -151,7 +148,7 @@ class JunitReport(object): assert cls.JUNIT_CURRENT_TEST_CASE for item in performance_items: - cls.JUNIT_CURRENT_TEST_CASE.stdout += "[{}]: {}\n".format(item[0], item[1]) + cls.JUNIT_CURRENT_TEST_CASE.stdout += '[{}]: {}\n'.format(item[0], item[1]) def test_method(**kwargs): @@ -174,8 +171,8 @@ def test_method(**kwargs): def test(test_func): case_info = MANDATORY_INFO.copy() - case_info["name"] = case_info["ID"] = test_func.__name__ - case_info["junit_report_by_case"] = False + case_info['name'] = case_info['ID'] = test_func.__name__ + case_info['junit_report_by_case'] = False case_info.update(kwargs) @functools.wraps(test_func) @@ -197,12 +194,12 @@ def test_method(**kwargs): env_inst = Env.Env(**env_config) # prepare for xunit test results - junit_file_path = env_inst.app_cls.get_log_folder(env_config["test_suite_name"]) - junit_test_case = JunitReport.create_test_case(case_info["ID"]) + junit_file_path = env_inst.app_cls.get_log_folder(env_config['test_suite_name']) + junit_test_case = JunitReport.create_test_case(case_info['ID']) result = False try: - Utility.console_log("starting running test: " + test_func.__name__, color="green") + Utility.console_log('starting running test: ' + test_func.__name__, color='green') # execute test function test_func(env_inst, extra_data) # if finish without exception, test result is True @@ -224,16 +221,16 @@ def test_method(**kwargs): for error in close_errors: junit_test_case.add_failure_info(str(error)) result = False - if not case_info["junit_report_by_case"]: + if not case_info['junit_report_by_case']: JunitReport.test_case_finish(junit_test_case) # end case and output result JunitReport.output_report(junit_file_path) if result: - Utility.console_log("Test Succeed: " + test_func.__name__, color="green") + Utility.console_log('Test Succeed: ' + test_func.__name__, color='green') else: - Utility.console_log(("Test Fail: " + test_func.__name__), color="red") + Utility.console_log(('Test Fail: ' + test_func.__name__), color='red') return result handle_test.case_info = case_info diff --git a/tools/ci/python_packages/tiny_test_fw/Utility/CIAssignTest.py b/tools/ci/python_packages/tiny_test_fw/Utility/CIAssignTest.py index d20ddb146a..ae3b23425c 100644 --- a/tools/ci/python_packages/tiny_test_fw/Utility/CIAssignTest.py +++ b/tools/ci/python_packages/tiny_test_fw/Utility/CIAssignTest.py @@ -39,9 +39,9 @@ The Basic logic to assign test cases is as follow: """ +import json import os import re -import json import yaml @@ -50,13 +50,13 @@ try: except ImportError: from yaml import Loader as Loader -from . import (CaseConfig, SearchCases, GitlabCIJob, console_log) +from . import CaseConfig, GitlabCIJob, SearchCases, console_log class Group(object): MAX_EXECUTION_TIME = 30 MAX_CASE = 15 - SORT_KEYS = ["env_tag"] + SORT_KEYS = ['env_tag'] # Matching CI job rules could be different from the way we want to group test cases. # For example, when assign unit test cases, different test cases need to use different test functions. # We need to put them into different groups. @@ -92,7 +92,7 @@ class Group(object): :return: True or False """ - max_time = (sum([self._get_case_attr(x, "execution_time") for x in self.case_list]) + max_time = (sum([self._get_case_attr(x, 'execution_time') for x in self.case_list]) < self.MAX_EXECUTION_TIME) max_case = (len(self.case_list) < self.MAX_CASE) return max_time and max_case @@ -135,8 +135,8 @@ class Group(object): :return: {"Filter": case filter, "CaseConfig": list of case configs for cases in this group} """ output_data = { - "Filter": self.filters, - "CaseConfig": [{"name": self._get_case_attr(x, "name")} for x in self.case_list], + 'Filter': self.filters, + 'CaseConfig': [{'name': self._get_case_attr(x, 'name')} for x in self.case_list], } return output_data @@ -149,12 +149,12 @@ class AssignTest(object): :param ci_config_file: path of ``.gitlab-ci.yml`` """ # subclass need to rewrite CI test job pattern, to filter all test jobs - CI_TEST_JOB_PATTERN = re.compile(r"^test_.+") + CI_TEST_JOB_PATTERN = re.compile(r'^test_.+') # by default we only run function in CI, as other tests could take long time DEFAULT_FILTER = { - "category": "function", - "ignore": False, - "supported_in_ci": True, + 'category': 'function', + 'ignore': False, + 'supported_in_ci': True, } def __init__(self, test_case_paths, ci_config_file, case_group=Group): @@ -168,25 +168,25 @@ class AssignTest(object): def _handle_parallel_attribute(job_name, job): jobs_out = [] try: - for i in range(job["parallel"]): - jobs_out.append(GitlabCIJob.Job(job, job_name + "_{}".format(i + 1))) + for i in range(job['parallel']): + jobs_out.append(GitlabCIJob.Job(job, job_name + '_{}'.format(i + 1))) except KeyError: # Gitlab don't allow to set parallel to 1. # to make test job name same ($CI_JOB_NAME_$CI_NODE_INDEX), # we append "_" to jobs don't have parallel attribute - jobs_out.append(GitlabCIJob.Job(job, job_name + "_")) + jobs_out.append(GitlabCIJob.Job(job, job_name + '_')) return jobs_out def _parse_gitlab_ci_config(self, ci_config_file): - with open(ci_config_file, "r") as f: + with open(ci_config_file, 'r') as f: ci_config = yaml.load(f, Loader=Loader) job_list = list() for job_name in ci_config: if self.CI_TEST_JOB_PATTERN.search(job_name) is not None: job_list.extend(self._handle_parallel_attribute(job_name, ci_config[job_name])) - job_list.sort(key=lambda x: x["name"]) + job_list.sort(key=lambda x: x['name']) return job_list def search_cases(self, case_filter=None): @@ -256,7 +256,7 @@ class AssignTest(object): Bot could also pass test count. If filtered cases need to be tested for several times, then we do duplicate them here. """ - test_count = os.getenv("BOT_TEST_COUNT") + test_count = os.getenv('BOT_TEST_COUNT') if test_count: test_count = int(test_count) self.test_cases *= test_count @@ -269,7 +269,7 @@ class AssignTest(object): """ group_count = dict() for group in test_groups: - key = ",".join(group.ci_job_match_keys) + key = ','.join(group.ci_job_match_keys) try: group_count[key] += 1 except KeyError: @@ -305,26 +305,26 @@ class AssignTest(object): # print debug info # total requirement of current pipeline required_group_count = self._count_groups_by_keys(test_groups) - console_log("Required job count by tags:") + console_log('Required job count by tags:') for tags in required_group_count: - console_log("\t{}: {}".format(tags, required_group_count[tags])) + console_log('\t{}: {}'.format(tags, required_group_count[tags])) # number of unused jobs - not_used_jobs = [job for job in self.jobs if "case group" not in job] + not_used_jobs = [job for job in self.jobs if 'case group' not in job] if not_used_jobs: - console_log("{} jobs not used. Please check if you define too much jobs".format(len(not_used_jobs)), "O") + console_log('{} jobs not used. Please check if you define too much jobs'.format(len(not_used_jobs)), 'O') for job in not_used_jobs: - console_log("\t{}".format(job["name"]), "O") + console_log('\t{}'.format(job['name']), 'O') # failures if failed_to_assign: - console_log("Too many test cases vs jobs to run. " - "Please increase parallel count in tools/ci/config/target-test.yml " - "for jobs with specific tags:", "R") + console_log('Too many test cases vs jobs to run. ' + 'Please increase parallel count in tools/ci/config/target-test.yml ' + 'for jobs with specific tags:', 'R') failed_group_count = self._count_groups_by_keys(failed_to_assign) for tags in failed_group_count: - console_log("\t{}: {}".format(tags, failed_group_count[tags]), "R") - raise RuntimeError("Failed to assign test case to CI jobs") + console_log('\t{}: {}'.format(tags, failed_group_count[tags]), 'R') + raise RuntimeError('Failed to assign test case to CI jobs') def output_configs(self, output_path): """ diff --git a/tools/ci/python_packages/tiny_test_fw/Utility/CaseConfig.py b/tools/ci/python_packages/tiny_test_fw/Utility/CaseConfig.py index 1334ab3ea1..fa7fc31ba0 100644 --- a/tools/ci/python_packages/tiny_test_fw/Utility/CaseConfig.py +++ b/tools/ci/python_packages/tiny_test_fw/Utility/CaseConfig.py @@ -141,9 +141,9 @@ def filter_test_cases(test_methods, case_filter): class Parser(object): DEFAULT_CONFIG = { - "TestConfig": dict(), - "Filter": dict(), - "CaseConfig": [{"extra_data": None}], + 'TestConfig': dict(), + 'Filter': dict(), + 'CaseConfig': [{'extra_data': None}], } @classmethod @@ -156,7 +156,7 @@ class Parser(object): """ configs = cls.DEFAULT_CONFIG.copy() if config_file: - with open(config_file, "r") as f: + with open(config_file, 'r') as f: configs.update(yaml.load(f, Loader=Loader)) return configs @@ -170,8 +170,8 @@ class Parser(object): """ output = dict() for key in overwrite: - module = importlib.import_module(overwrite[key]["package"]) - output[key] = module.__getattribute__(overwrite[key]["class"]) + module = importlib.import_module(overwrite[key]['package']) + output[key] = module.__getattribute__(overwrite[key]['class']) return output @classmethod @@ -185,10 +185,10 @@ class Parser(object): """ configs = cls.parse_config_file(config_file) test_case_list = [] - for _config in configs["CaseConfig"]: - _filter = configs["Filter"].copy() - _overwrite = cls.handle_overwrite_args(_config.pop("overwrite", dict())) - _extra_data = _config.pop("extra_data", None) + for _config in configs['CaseConfig']: + _filter = configs['Filter'].copy() + _overwrite = cls.handle_overwrite_args(_config.pop('overwrite', dict())) + _extra_data = _config.pop('extra_data', None) _filter.update(_config) # Try get target from yml @@ -222,8 +222,8 @@ class Generator(object): def __init__(self): self.default_config = { - "TestConfig": dict(), - "Filter": dict(), + 'TestConfig': dict(), + 'Filter': dict(), } def set_default_configs(self, test_config, case_filter): @@ -232,7 +232,7 @@ class Generator(object): :param case_filter: "Filter" value :return: None """ - self.default_config = {"TestConfig": test_config, "Filter": case_filter} + self.default_config = {'TestConfig': test_config, 'Filter': case_filter} def generate_config(self, case_configs, output_file): """ @@ -241,6 +241,6 @@ class Generator(object): :return: None """ config = self.default_config.copy() - config.update({"CaseConfig": case_configs}) - with open(output_file, "w") as f: + config.update({'CaseConfig': case_configs}) + with open(output_file, 'w') as f: yaml.dump(config, f) diff --git a/tools/ci/python_packages/tiny_test_fw/Utility/GitlabCIJob.py b/tools/ci/python_packages/tiny_test_fw/Utility/GitlabCIJob.py index c5c9a66c66..1fa6477e77 100644 --- a/tools/ci/python_packages/tiny_test_fw/Utility/GitlabCIJob.py +++ b/tools/ci/python_packages/tiny_test_fw/Utility/GitlabCIJob.py @@ -26,8 +26,8 @@ class Job(dict): """ def __init__(self, job, job_name): super(Job, self).__init__(job) - self["name"] = job_name - self.tags = set(self["tags"]) + self['name'] = job_name + self.tags = set(self['tags']) def match_group(self, group): """ @@ -38,7 +38,7 @@ class Job(dict): :return: True or False """ match_result = False - if "case group" not in self and group.ci_job_match_keys == self.tags: + if 'case group' not in self and group.ci_job_match_keys == self.tags: # group not assigned and all tags match match_result = True return match_result @@ -49,7 +49,7 @@ class Job(dict): :param group: the case group to assign """ - self["case group"] = group + self['case group'] = group def output_config(self, file_path): """ @@ -59,7 +59,7 @@ class Job(dict): :param file_path: output file path :return: None """ - file_name = os.path.join(file_path, self["name"] + ".yml") - if "case group" in self: - with open(file_name, "w") as f: - yaml.safe_dump(self["case group"].output(), f, encoding='utf-8', default_flow_style=False) + file_name = os.path.join(file_path, self['name'] + '.yml') + if 'case group' in self: + with open(file_name, 'w') as f: + yaml.safe_dump(self['case group'].output(), f, encoding='utf-8', default_flow_style=False) diff --git a/tools/ci/python_packages/tiny_test_fw/Utility/SearchCases.py b/tools/ci/python_packages/tiny_test_fw/Utility/SearchCases.py index 9404c0698e..f61de3a7ec 100644 --- a/tools/ci/python_packages/tiny_test_fw/Utility/SearchCases.py +++ b/tools/ci/python_packages/tiny_test_fw/Utility/SearchCases.py @@ -13,23 +13,23 @@ # limitations under the License. """ search test cases from a given file or path """ -import os -import fnmatch -import types import copy +import fnmatch +import os +import types from . import load_source class Search(object): - TEST_CASE_FILE_PATTERN = "*_test.py" + TEST_CASE_FILE_PATTERN = '*_test.py' SUPPORT_REPLICATE_CASES_KEY = ['target'] @classmethod def _search_cases_from_file(cls, file_name): """ get test cases from test case .py file """ - print("Try to get cases from: " + file_name) + print('Try to get cases from: ' + file_name) test_functions = [] try: mod = load_source(file_name) @@ -42,14 +42,14 @@ class Search(object): except AttributeError: continue except ImportError as e: - print("ImportError: \r\n\tFile:" + file_name + "\r\n\tError:" + str(e)) + print('ImportError: \r\n\tFile:' + file_name + '\r\n\tError:' + str(e)) test_functions_out = [] for case in test_functions: test_functions_out += cls.replicate_case(case) for i, test_function in enumerate(test_functions_out): - print("\t{}. {} <{}>".format(i + 1, test_function.case_info["name"], test_function.case_info["target"])) + print('\t{}. {} <{}>'.format(i + 1, test_function.case_info['name'], test_function.case_info['target'])) test_function.case_info['app_dir'] = os.path.dirname(file_name) return test_functions_out @@ -58,7 +58,7 @@ class Search(object): """ search all test case files recursively of a path """ if not os.path.exists(test_case): - raise OSError("test case path not exist") + raise OSError('test case path not exist') if os.path.isdir(test_case): test_case_files = [] for root, _, file_names in os.walk(test_case): diff --git a/tools/ci/python_packages/tiny_test_fw/Utility/__init__.py b/tools/ci/python_packages/tiny_test_fw/Utility/__init__.py index 6548869de0..1ba3b51eed 100644 --- a/tools/ci/python_packages/tiny_test_fw/Utility/__init__.py +++ b/tools/ci/python_packages/tiny_test_fw/Utility/__init__.py @@ -1,4 +1,5 @@ from __future__ import print_function + import os.path import sys import time @@ -7,35 +8,35 @@ import traceback from .. import Env _COLOR_CODES = { - "white": u'\033[0m', - "red": u'\033[31m', - "green": u'\033[32m', - "orange": u'\033[33m', - "blue": u'\033[34m', - "purple": u'\033[35m', - "W": u'\033[0m', - "R": u'\033[31m', - "G": u'\033[32m', - "O": u'\033[33m', - "B": u'\033[34m', - "P": u'\033[35m' + 'white': u'\033[0m', + 'red': u'\033[31m', + 'green': u'\033[32m', + 'orange': u'\033[33m', + 'blue': u'\033[34m', + 'purple': u'\033[35m', + 'W': u'\033[0m', + 'R': u'\033[31m', + 'G': u'\033[32m', + 'O': u'\033[33m', + 'B': u'\033[34m', + 'P': u'\033[35m' } def _get_log_file_name(): if Env.Env.CURRENT_LOG_FOLDER: - file_name = os.path.join(Env.Env.CURRENT_LOG_FOLDER, "console.log") + file_name = os.path.join(Env.Env.CURRENT_LOG_FOLDER, 'console.log') else: - raise OSError("env log folder does not exist, will not save to log file") + raise OSError('env log folder does not exist, will not save to log file') return file_name def format_timestamp(): ts = time.time() - return "{}:{}".format(time.strftime("%m-%d %H:%M:%S", time.localtime(ts)), str(ts % 1)[2:5]) + return '{}:{}'.format(time.strftime('%m-%d %H:%M:%S', time.localtime(ts)), str(ts % 1)[2:5]) -def console_log(data, color="white", end="\n"): +def console_log(data, color='white', end='\n'): """ log data to console. (if not flush console log, Gitlab-CI won't update logs during job execution) @@ -44,19 +45,19 @@ def console_log(data, color="white", end="\n"): :param color: color """ if color not in _COLOR_CODES: - color = "white" + color = 'white' color_codes = _COLOR_CODES[color] if isinstance(data, type(b'')): data = data.decode('utf-8', 'replace') print(color_codes + data, end=end) - if color not in ["white", "W"]: + if color not in ['white', 'W']: # reset color to white for later logs - print(_COLOR_CODES["white"] + u"\r") + print(_COLOR_CODES['white'] + u'\r') sys.stdout.flush() - log_data = "[{}] ".format(format_timestamp()) + data + log_data = '[{}] '.format(format_timestamp()) + data try: log_file = _get_log_file_name() - with open(log_file, "a+") as f: + with open(log_file, 'a+') as f: f.write(log_data + end) except OSError: pass @@ -108,4 +109,4 @@ def handle_unexpected_exception(junit_test_case, exception): traceback.print_exc() # AssertionError caused by an 'assert' statement has an empty string as its 'str' form e_str = str(exception) if str(exception) else repr(exception) - junit_test_case.add_failure_info("Unexpected exception: {}\n{}".format(e_str, traceback.format_exc())) + junit_test_case.add_failure_info('Unexpected exception: {}\n{}'.format(e_str, traceback.format_exc())) diff --git a/tools/ci/python_packages/tiny_test_fw/bin/Runner.py b/tools/ci/python_packages/tiny_test_fw/bin/Runner.py index 913054f5d4..fc3c931c64 100644 --- a/tools/ci/python_packages/tiny_test_fw/bin/Runner.py +++ b/tools/ci/python_packages/tiny_test_fw/bin/Runner.py @@ -21,13 +21,13 @@ Command line interface to run test cases from a given path. Use ``python Runner.py test_case_path -c config_file -e env_config_file`` to run test cases. """ +import argparse import os import sys -import argparse import threading from tiny_test_fw import TinyFW -from tiny_test_fw.Utility import SearchCases, CaseConfig +from tiny_test_fw.Utility import CaseConfig, SearchCases class Runner(threading.Thread): @@ -43,7 +43,7 @@ class Runner(threading.Thread): if case_config: test_suite_name = os.path.splitext(os.path.basename(case_config))[0] else: - test_suite_name = "TestRunner" + test_suite_name = 'TestRunner' TinyFW.set_default_config(env_config_file=env_config_file, test_suite_name=test_suite_name) test_methods = SearchCases.Search.search_test_cases(test_case_paths) self.test_cases = CaseConfig.Parser.apply_config(test_methods, case_config) @@ -60,12 +60,12 @@ class Runner(threading.Thread): if __name__ == '__main__': parser = argparse.ArgumentParser() - parser.add_argument("test_cases", nargs='+', - help="test case folders or files") - parser.add_argument("--case_config", "-c", default=None, - help="case filter/config file") - parser.add_argument("--env_config_file", "-e", default=None, - help="test env config file") + parser.add_argument('test_cases', nargs='+', + help='test case folders or files') + parser.add_argument('--case_config', '-c', default=None, + help='case filter/config file') + parser.add_argument('--env_config_file', '-e', default=None, + help='test env config file') args = parser.parse_args() test_cases = [os.path.join(os.getenv('IDF_PATH'), path) if not os.path.isabs(path) else path for path in args.test_cases] @@ -78,7 +78,7 @@ if __name__ == '__main__': if not runner.is_alive(): break except KeyboardInterrupt: - print("exit by Ctrl-C") + print('exit by Ctrl-C') break if not runner.get_test_result(): sys.exit(1) diff --git a/tools/ci/python_packages/tiny_test_fw/bin/example.py b/tools/ci/python_packages/tiny_test_fw/bin/example.py index 238d3561cc..011ec6b7a0 100644 --- a/tools/ci/python_packages/tiny_test_fw/bin/example.py +++ b/tools/ci/python_packages/tiny_test_fw/bin/example.py @@ -19,7 +19,7 @@ import ttfw_idf from tiny_test_fw import TinyFW -@ttfw_idf.idf_example_test(env_tag="Example_WIFI") +@ttfw_idf.idf_example_test(env_tag='Example_WIFI') def test_examples_protocol_https_request(env, extra_data): """ steps: | @@ -27,17 +27,17 @@ def test_examples_protocol_https_request(env, extra_data): 2. connect to www.howsmyssl.com:443 3. send http request """ - dut1 = env.get_dut("https_request", "examples/protocols/https_request", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('https_request', 'examples/protocols/https_request', dut_class=ttfw_idf.ESP32DUT) dut1.start_app() - dut1.expect(re.compile(r"Connecting to www.howsmyssl.com:443"), timeout=30) - dut1.expect("Performing the SSL/TLS handshake") - dut1.expect("Certificate verified.", timeout=15) - dut1.expect_all(re.compile(r"Cipher suite is TLS-ECDHE-RSA-WITH-AES-128-GCM-SHA256"), - "Reading HTTP response", + dut1.expect(re.compile(r'Connecting to www.howsmyssl.com:443'), timeout=30) + dut1.expect('Performing the SSL/TLS handshake') + dut1.expect('Certificate verified.', timeout=15) + dut1.expect_all(re.compile(r'Cipher suite is TLS-ECDHE-RSA-WITH-AES-128-GCM-SHA256'), + 'Reading HTTP response', timeout=20) - dut1.expect(re.compile(r"Completed (\d) requests")) + dut1.expect(re.compile(r'Completed (\d) requests')) if __name__ == '__main__': - TinyFW.set_default_config(env_config_file="EnvConfigTemplate.yml", dut=ttfw_idf.IDFDUT) + TinyFW.set_default_config(env_config_file='EnvConfigTemplate.yml', dut=ttfw_idf.IDFDUT) test_examples_protocol_https_request() diff --git a/tools/ci/python_packages/tiny_test_fw/docs/conf.py b/tools/ci/python_packages/tiny_test_fw/docs/conf.py index 58f8449865..22d5329e26 100644 --- a/tools/ci/python_packages/tiny_test_fw/docs/conf.py +++ b/tools/ci/python_packages/tiny_test_fw/docs/conf.py @@ -18,6 +18,7 @@ # import os import sys + sys.path.insert(0, os.path.abspath('..')) # import sphinx_rtd_theme diff --git a/tools/ci/python_packages/ttfw_idf/CIScanTests.py b/tools/ci/python_packages/ttfw_idf/CIScanTests.py index 959c28c1ac..91f11b3ee7 100644 --- a/tools/ci/python_packages/ttfw_idf/CIScanTests.py +++ b/tools/ci/python_packages/ttfw_idf/CIScanTests.py @@ -7,9 +7,9 @@ from collections import defaultdict from copy import deepcopy from find_apps import find_apps -from find_build_apps import BUILD_SYSTEMS, BUILD_SYSTEM_CMAKE +from find_build_apps import BUILD_SYSTEM_CMAKE, BUILD_SYSTEMS +from idf_py_actions.constants import PREVIEW_TARGETS, SUPPORTED_TARGETS from ttfw_idf.IDFAssignTest import ExampleAssignTest, TestAppsAssignTest -from idf_py_actions.constants import SUPPORTED_TARGETS, PREVIEW_TARGETS TEST_LABELS = { 'example_test': 'BOT_LABEL_EXAMPLE_TEST', @@ -73,15 +73,15 @@ def main(): default=BUILD_SYSTEM_CMAKE) parser.add_argument('-c', '--ci-config-file', required=True, - help="gitlab ci config target-test file") + help='gitlab ci config target-test file') parser.add_argument('-o', '--output-path', required=True, - help="output path of the scan result") - parser.add_argument("--exclude", nargs="*", + help='output path of the scan result') + parser.add_argument('--exclude', nargs='*', help='Ignore specified directory. Can be used multiple times.') - parser.add_argument('--preserve', action="store_true", + parser.add_argument('--preserve', action='store_true', help='add this flag to preserve artifacts for all apps') - parser.add_argument('--build-all', action="store_true", + parser.add_argument('--build-all', action='store_true', help='add this flag to build all apps') args = parser.parse_args() diff --git a/tools/ci/python_packages/ttfw_idf/DebugUtils.py b/tools/ci/python_packages/ttfw_idf/DebugUtils.py index 16fdf481bf..80f2efb54b 100644 --- a/tools/ci/python_packages/ttfw_idf/DebugUtils.py +++ b/tools/ci/python_packages/ttfw_idf/DebugUtils.py @@ -13,12 +13,12 @@ # limitations under the License. from __future__ import unicode_literals -from io import open + import logging +from io import open import pexpect import pygdbmi.gdbcontroller - from tiny_test_fw import Utility try: diff --git a/tools/ci/python_packages/ttfw_idf/IDFApp.py b/tools/ci/python_packages/ttfw_idf/IDFApp.py index 74dea0ac80..6773d4f63e 100644 --- a/tools/ci/python_packages/ttfw_idf/IDFApp.py +++ b/tools/ci/python_packages/ttfw_idf/IDFApp.py @@ -22,7 +22,8 @@ import sys from abc import abstractmethod from tiny_test_fw import App -from .IDFAssignTest import ExampleGroup, TestAppsGroup, UnitTestGroup, IDFCaseGroup, ComponentUTGroup + +from .IDFAssignTest import ComponentUTGroup, ExampleGroup, IDFCaseGroup, TestAppsGroup, UnitTestGroup try: import gitlab_api @@ -36,8 +37,8 @@ def parse_encrypted_flag(args, offs, binary): # If the current entry is a partition, we have to check whether it is # the one we are looking for or not try: - if (entry["offset"], entry["file"]) == (offs, binary): - return entry["encrypted"] == "true" + if (entry['offset'], entry['file']) == (offs, binary): + return entry['encrypted'] == 'true' except (TypeError, KeyError): # TypeError occurs if the entry is a list, which is possible in JSON # data structure. @@ -58,12 +59,12 @@ def parse_flash_settings(path, default_encryption=False): # The following list only contains the files that need encryption encrypt_files = [] - if file_name == "flasher_args.json": + if file_name == 'flasher_args.json': # CMake version using build metadata file - with open(path, "r") as f: + with open(path, 'r') as f: args = json.load(f) - for (offs, binary) in args["flash_files"].items(): + for (offs, binary) in args['flash_files'].items(): if offs: flash_files.append((offs, binary)) encrypted = parse_encrypted_flag(args, offs, binary) @@ -73,15 +74,15 @@ def parse_flash_settings(path, default_encryption=False): if (encrypted is None and default_encryption) or encrypted: encrypt_files.append((offs, binary)) - flash_settings = args["flash_settings"] - app_name = os.path.splitext(args["app"]["file"])[0] + flash_settings = args['flash_settings'] + app_name = os.path.splitext(args['app']['file'])[0] else: # GNU Make version uses download.config arguments file - with open(path, "r") as f: - args = f.readlines()[-1].split(" ") + with open(path, 'r') as f: + args = f.readlines()[-1].split(' ') flash_settings = {} for idx in range(0, len(args), 2): # process arguments in pairs - if args[idx].startswith("--"): + if args[idx].startswith('--'): # strip the -- from the command line argument flash_settings[args[idx][2:]] = args[idx + 1] else: @@ -92,7 +93,7 @@ def parse_flash_settings(path, default_encryption=False): encrypt_files = flash_files # we can only guess app name in download.config. for p in flash_files: - if not os.path.dirname(p[1]) and "partition" not in p[1]: + if not os.path.dirname(p[1]) and 'partition' not in p[1]: # app bin usually in the same dir with download.config and it's not partition table app_name = os.path.splitext(p[1])[0] break @@ -107,9 +108,9 @@ class Artifacts(object): # at least one of app_path or config_name is not None. otherwise we can't match artifact assert app_path or config_name assert os.path.exists(artifact_index_file) - self.gitlab_inst = gitlab_api.Gitlab(os.getenv("CI_PROJECT_ID")) + self.gitlab_inst = gitlab_api.Gitlab(os.getenv('CI_PROJECT_ID')) self.dest_root_path = dest_root_path - with open(artifact_index_file, "r") as f: + with open(artifact_index_file, 'r') as f: artifact_index = json.load(f) self.artifact_info = self._find_artifact(artifact_index, app_path, config_name, target) @@ -120,11 +121,11 @@ class Artifacts(object): if app_path: # We use endswith here to avoid issue like: # examples_protocols_mqtt_ws but return a examples_protocols_mqtt_wss failure - match_result = artifact_info["app_dir"].endswith(app_path) + match_result = artifact_info['app_dir'].endswith(app_path) if config_name: - match_result = match_result and config_name == artifact_info["config"] + match_result = match_result and config_name == artifact_info['config'] if target: - match_result = match_result and target == artifact_info["target"] + match_result = match_result and target == artifact_info['target'] if match_result: ret = artifact_info break @@ -134,15 +135,15 @@ class Artifacts(object): def _get_app_base_path(self): if self.artifact_info: - return os.path.join(self.artifact_info["work_dir"], self.artifact_info["build_dir"]) + return os.path.join(self.artifact_info['work_dir'], self.artifact_info['build_dir']) else: return None def _get_flash_arg_file(self, base_path, job_id): - if self.artifact_info["build_system"] == "cmake": - flash_arg_file = os.path.join(base_path, "flasher_args.json") + if self.artifact_info['build_system'] == 'cmake': + flash_arg_file = os.path.join(base_path, 'flasher_args.json') else: - flash_arg_file = os.path.join(base_path, "download.config") + flash_arg_file = os.path.join(base_path, 'download.config') self.gitlab_inst.download_artifact(job_id, [flash_arg_file], self.dest_root_path) return flash_arg_file @@ -152,19 +153,19 @@ class Artifacts(object): # files also appear in the first list flash_files, _, _, app_name = parse_flash_settings(os.path.join(self.dest_root_path, flash_arg_file)) artifact_files = [os.path.join(base_path, p[1]) for p in flash_files] - artifact_files.append(os.path.join(base_path, app_name + ".elf")) + artifact_files.append(os.path.join(base_path, app_name + '.elf')) self.gitlab_inst.download_artifact(job_id, artifact_files, self.dest_root_path) def _download_sdkconfig_file(self, base_path, job_id): - self.gitlab_inst.download_artifact(job_id, [os.path.join(os.path.dirname(base_path), "sdkconfig")], + self.gitlab_inst.download_artifact(job_id, [os.path.join(os.path.dirname(base_path), 'sdkconfig')], self.dest_root_path) def download_artifacts(self): if not self.artifact_info: return None base_path = self._get_app_base_path() - job_id = self.artifact_info["ci_job_id"] + job_id = self.artifact_info['ci_job_id'] # 1. download flash args file flash_arg_file = self._get_flash_arg_file(base_path, job_id) @@ -177,15 +178,15 @@ class Artifacts(object): def download_artifact_files(self, file_names): if self.artifact_info: - base_path = os.path.join(self.artifact_info["work_dir"], self.artifact_info["build_dir"]) - job_id = self.artifact_info["ci_job_id"] + base_path = os.path.join(self.artifact_info['work_dir'], self.artifact_info['build_dir']) + job_id = self.artifact_info['ci_job_id'] # download all binary files artifact_files = [os.path.join(base_path, fn) for fn in file_names] self.gitlab_inst.download_artifact(job_id, artifact_files, self.dest_root_path) # download sdkconfig file - self.gitlab_inst.download_artifact(job_id, [os.path.join(os.path.dirname(base_path), "sdkconfig")], + self.gitlab_inst.download_artifact(job_id, [os.path.join(os.path.dirname(base_path), 'sdkconfig')], self.dest_root_path) else: base_path = None @@ -197,13 +198,13 @@ class UnitTestArtifacts(Artifacts): def _get_app_base_path(self): if self.artifact_info: - output_dir = self.BUILDS_DIR_RE.sub('output/', self.artifact_info["build_dir"]) - return os.path.join(self.artifact_info["app_dir"], output_dir) + output_dir = self.BUILDS_DIR_RE.sub('output/', self.artifact_info['build_dir']) + return os.path.join(self.artifact_info['app_dir'], output_dir) else: return None def _download_sdkconfig_file(self, base_path, job_id): - self.gitlab_inst.download_artifact(job_id, [os.path.join(base_path, "sdkconfig")], self.dest_root_path) + self.gitlab_inst.download_artifact(job_id, [os.path.join(base_path, 'sdkconfig')], self.dest_root_path) class IDFApp(App.BaseApp): @@ -212,8 +213,8 @@ class IDFApp(App.BaseApp): idf applications should inherent from this class and overwrite method get_binary_path. """ - IDF_DOWNLOAD_CONFIG_FILE = "download.config" - IDF_FLASH_ARGS_FILE = "flasher_args.json" + IDF_DOWNLOAD_CONFIG_FILE = 'download.config' + IDF_FLASH_ARGS_FILE = 'flasher_args.json' def __init__(self, app_path, config_name=None, target=None, case_group=IDFCaseGroup, artifact_cls=Artifacts): super(IDFApp, self).__init__(app_path) @@ -229,11 +230,11 @@ class IDFApp(App.BaseApp): assert os.path.exists(self.binary_path) if self.IDF_DOWNLOAD_CONFIG_FILE not in os.listdir(self.binary_path): if self.IDF_FLASH_ARGS_FILE not in os.listdir(self.binary_path): - msg = ("Neither {} nor {} exists. " + msg = ('Neither {} nor {} exists. ' "Try to run 'make print_flash_cmd | tail -n 1 > {}/{}' " "or 'idf.py build' " - "for resolving the issue." - "").format(self.IDF_DOWNLOAD_CONFIG_FILE, self.IDF_FLASH_ARGS_FILE, + 'for resolving the issue.' + '').format(self.IDF_DOWNLOAD_CONFIG_FILE, self.IDF_FLASH_ARGS_FILE, self.binary_path, self.IDF_DOWNLOAD_CONFIG_FILE) raise AssertionError(msg) @@ -252,7 +253,7 @@ class IDFApp(App.BaseApp): @classmethod def get_sdk_path(cls): # type: () -> str - idf_path = os.getenv("IDF_PATH") + idf_path = os.getenv('IDF_PATH') assert idf_path assert os.path.exists(idf_path) return idf_path @@ -263,7 +264,7 @@ class IDFApp(App.BaseApp): Note: could be overwritten by a derived class to provide other locations or order """ - return [os.path.join(self.binary_path, "sdkconfig"), os.path.join(self.binary_path, "..", "sdkconfig")] + return [os.path.join(self.binary_path, 'sdkconfig'), os.path.join(self.binary_path, '..', 'sdkconfig')] def get_sdkconfig(self): """ @@ -306,13 +307,13 @@ class IDFApp(App.BaseApp): if path: return os.path.join(self.idf_path, path) else: - raise OSError("Failed to get binary for {}".format(self)) + raise OSError('Failed to get binary for {}'.format(self)) def _get_elf_file_path(self): - ret = "" + ret = '' file_names = os.listdir(self.binary_path) for fn in file_names: - if os.path.splitext(fn)[1] == ".elf": + if os.path.splitext(fn)[1] == '.elf': ret = os.path.join(self.binary_path, fn) return ret @@ -343,14 +344,14 @@ class IDFApp(App.BaseApp): # a default encrpytion flag: the macro # CONFIG_SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT sdkconfig_dict = self.get_sdkconfig() - default_encryption = "CONFIG_SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT" in sdkconfig_dict + default_encryption = 'CONFIG_SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT' in sdkconfig_dict flash_files, encrypt_files, flash_settings, _ = parse_flash_settings(path, default_encryption) # Flash setting "encrypt" only and only if all the files to flash # must be encrypted. Else, this parameter should be False. # All files must be encrypted is both file lists are the same - flash_settings["encrypt"] = sorted(flash_files) == sorted(encrypt_files) + flash_settings['encrypt'] = sorted(flash_files) == sorted(encrypt_files) return self._int_offs_abs_paths(flash_files), self._int_offs_abs_paths(encrypt_files), flash_settings @@ -363,9 +364,9 @@ class IDFApp(App.BaseApp): (Called from constructor) """ partition_tool = os.path.join(self.idf_path, - "components", - "partition_table", - "gen_esp32part.py") + 'components', + 'partition_table', + 'gen_esp32part.py') assert os.path.exists(partition_tool) errors = [] @@ -393,18 +394,18 @@ class IDFApp(App.BaseApp): p, os.linesep, msg) for p, msg in errors]) - raise ValueError("No partition table found for IDF binary path: {}{}{}".format(self.binary_path, + raise ValueError('No partition table found for IDF binary path: {}{}{}'.format(self.binary_path, os.linesep, traceback_msg)) partition_table = dict() for line in raw_data.splitlines(): - if line[0] != "#": + if line[0] != '#': try: - _name, _type, _subtype, _offset, _size, _flags = line.split(",") - if _size[-1] == "K": + _name, _type, _subtype, _offset, _size, _flags = line.split(',') + if _size[-1] == 'K': _size = int(_size[:-1]) * 1024 - elif _size[-1] == "M": + elif _size[-1] == 'M': _size = int(_size[:-1]) * 1024 * 1024 else: _size = int(_size) @@ -412,11 +413,11 @@ class IDFApp(App.BaseApp): except ValueError: continue partition_table[_name] = { - "type": _type, - "subtype": _subtype, - "offset": _offset, - "size": _size, - "flags": _flags + 'type': _type, + 'subtype': _subtype, + 'offset': _offset, + 'size': _size, + 'flags': _flags } return partition_table @@ -444,11 +445,11 @@ class Example(IDFApp): """ overrides the parent method to provide exact path of sdkconfig for example tests """ - return [os.path.join(self.binary_path, "..", "sdkconfig")] + return [os.path.join(self.binary_path, '..', 'sdkconfig')] def _try_get_binary_from_local_fs(self): # build folder of example path - path = os.path.join(self.idf_path, self.app_path, "build") + path = os.path.join(self.idf_path, self.app_path, 'build') if os.path.exists(path): return path @@ -456,11 +457,11 @@ class Example(IDFApp): # Path format: $IDF_PATH/build_examples/app_path_with_underscores/config/target # (see tools/ci/build_examples.sh) # For example: $IDF_PATH/build_examples/examples_get-started_blink/default/esp32 - app_path_underscored = self.app_path.replace(os.path.sep, "_") + app_path_underscored = self.app_path.replace(os.path.sep, '_') example_path = os.path.join(self.idf_path, self.case_group.LOCAL_BUILD_DIR) for dirpath in os.listdir(example_path): if os.path.basename(dirpath) == app_path_underscored: - path = os.path.join(example_path, dirpath, self.config_name, self.target, "build") + path = os.path.join(example_path, dirpath, self.config_name, self.target, 'build') if os.path.exists(path): return path else: @@ -476,18 +477,18 @@ class UT(IDFApp): super(UT, self).__init__(app_path, config_name, target, case_group, artifacts_cls) def _try_get_binary_from_local_fs(self): - path = os.path.join(self.idf_path, self.app_path, "build") + path = os.path.join(self.idf_path, self.app_path, 'build') if os.path.exists(path): return path # first try to get from build folder of unit-test-app - path = os.path.join(self.idf_path, "tools", "unit-test-app", "build") + path = os.path.join(self.idf_path, 'tools', 'unit-test-app', 'build') if os.path.exists(path): # found, use bin in build path return path # ``build_unit_test.sh`` will copy binary to output folder - path = os.path.join(self.idf_path, "tools", "unit-test-app", "output", self.target, self.config_name) + path = os.path.join(self.idf_path, 'tools', 'unit-test-app', 'output', self.target, self.config_name) if os.path.exists(path): return path diff --git a/tools/ci/python_packages/ttfw_idf/IDFAssignTest.py b/tools/ci/python_packages/ttfw_idf/IDFAssignTest.py index 18714defde..e963536a93 100644 --- a/tools/ci/python_packages/ttfw_idf/IDFAssignTest.py +++ b/tools/ci/python_packages/ttfw_idf/IDFAssignTest.py @@ -18,7 +18,7 @@ import gitlab_api from tiny_test_fw.Utility import CIAssignTest try: - from idf_py_actions.constants import SUPPORTED_TARGETS, PREVIEW_TARGETS + from idf_py_actions.constants import PREVIEW_TARGETS, SUPPORTED_TARGETS except ImportError: SUPPORTED_TARGETS = [] PREVIEW_TARGETS = [] diff --git a/tools/ci/python_packages/ttfw_idf/IDFDUT.py b/tools/ci/python_packages/ttfw_idf/IDFDUT.py index d94f479dcd..6682760ae4 100644 --- a/tools/ci/python_packages/ttfw_idf/IDFDUT.py +++ b/tools/ci/python_packages/ttfw_idf/IDFDUT.py @@ -13,14 +13,15 @@ # limitations under the License. """ DUT for IDF applications """ +import functools import os import os.path -import sys import re -import functools -import tempfile import subprocess +import sys +import tempfile import time + import pexpect # python2 and python3 queue package name is different @@ -29,18 +30,16 @@ try: except ImportError: import queue as _queue - from serial.tools import list_ports - from tiny_test_fw import DUT, Utility try: import esptool except ImportError: # cheat and use IDF's copy of esptool if available - idf_path = os.getenv("IDF_PATH") + idf_path = os.getenv('IDF_PATH') if not idf_path or not os.path.exists(idf_path): raise - sys.path.insert(0, os.path.join(idf_path, "components", "esptool_py", "esptool")) + sys.path.insert(0, os.path.join(idf_path, 'components', 'esptool_py', 'esptool')) import esptool @@ -54,14 +53,14 @@ class IDFDUTException(RuntimeError): class IDFRecvThread(DUT.RecvThread): - PERFORMANCE_PATTERN = re.compile(r"\[Performance]\[(\w+)]: ([^\r\n]+)\r?\n") + PERFORMANCE_PATTERN = re.compile(r'\[Performance]\[(\w+)]: ([^\r\n]+)\r?\n') EXCEPTION_PATTERNS = [ re.compile(r"(Guru Meditation Error: Core\s+\d panic'ed \([\w].*?\))"), - re.compile(r"(abort\(\) was called at PC 0x[a-fA-F\d]{8} on core \d)"), - re.compile(r"(rst 0x\d+ \(TG\dWDT_SYS_RESET|TGWDT_CPU_RESET\))") + re.compile(r'(abort\(\) was called at PC 0x[a-fA-F\d]{8} on core \d)'), + re.compile(r'(rst 0x\d+ \(TG\dWDT_SYS_RESET|TGWDT_CPU_RESET\))') ] - BACKTRACE_PATTERN = re.compile(r"Backtrace:((\s(0x[0-9a-f]{8}):0x[0-9a-f]{8})+)") - BACKTRACE_ADDRESS_PATTERN = re.compile(r"(0x[0-9a-f]{8}):0x[0-9a-f]{8}") + BACKTRACE_PATTERN = re.compile(r'Backtrace:((\s(0x[0-9a-f]{8}):0x[0-9a-f]{8})+)') + BACKTRACE_ADDRESS_PATTERN = re.compile(r'(0x[0-9a-f]{8}):0x[0-9a-f]{8}') def __init__(self, read, dut): super(IDFRecvThread, self).__init__(read, dut) @@ -71,8 +70,8 @@ class IDFRecvThread(DUT.RecvThread): def collect_performance(self, comp_data): matches = self.PERFORMANCE_PATTERN.findall(comp_data) for match in matches: - Utility.console_log("[Performance][{}]: {}".format(match[0], match[1]), - color="orange") + Utility.console_log('[Performance][{}]: {}'.format(match[0], match[1]), + color='orange') self.performance_items.put((match[0], match[1])) def detect_exception(self, comp_data): @@ -83,7 +82,7 @@ class IDFRecvThread(DUT.RecvThread): if match: start = match.end() self.exceptions.put(match.group(0)) - Utility.console_log("[Exception]: {}".format(match.group(0)), color="red") + Utility.console_log('[Exception]: {}'.format(match.group(0)), color='red') else: break @@ -93,18 +92,18 @@ class IDFRecvThread(DUT.RecvThread): match = self.BACKTRACE_PATTERN.search(comp_data, pos=start) if match: start = match.end() - Utility.console_log("[Backtrace]:{}".format(match.group(1)), color="red") + Utility.console_log('[Backtrace]:{}'.format(match.group(1)), color='red') # translate backtrace addresses = self.BACKTRACE_ADDRESS_PATTERN.findall(match.group(1)) - translated_backtrace = "" + translated_backtrace = '' for addr in addresses: ret = self.dut.lookup_pc_address(addr) if ret: - translated_backtrace += ret + "\n" + translated_backtrace += ret + '\n' if translated_backtrace: - Utility.console_log("Translated backtrace\n:" + translated_backtrace, color="yellow") + Utility.console_log('Translated backtrace\n:' + translated_backtrace, color='yellow') else: - Utility.console_log("Failed to translate backtrace", color="yellow") + Utility.console_log('Failed to translate backtrace', color='yellow') else: break @@ -149,7 +148,7 @@ class IDFDUT(DUT.SerialDUT): # /dev/ttyAMA0 port is listed in Raspberry Pi # /dev/tty.Bluetooth-Incoming-Port port is listed in Mac - INVALID_PORT_PATTERN = re.compile(r"AMA|Bluetooth") + INVALID_PORT_PATTERN = re.compile(r'AMA|Bluetooth') # if need to erase NVS partition in start app ERASE_NVS = True RECV_THREAD_CLS = IDFRecvThread @@ -163,7 +162,7 @@ class IDFDUT(DUT.SerialDUT): @classmethod def _get_rom(cls): - raise NotImplementedError("This is an abstraction class, method not defined.") + raise NotImplementedError('This is an abstraction class, method not defined.') @classmethod def get_mac(cls, app, port): @@ -200,7 +199,7 @@ class IDFDUT(DUT.SerialDUT): # Otherwise overwrite it in ESP8266DUT inst = esptool.ESPLoader.detect_chip(port) if expected_rom_class and type(inst) != expected_rom_class: - raise RuntimeError("Target not expected") + raise RuntimeError('Target not expected') return inst.read_mac() is not None, get_target_by_rom_class(type(inst)) except(esptool.FatalError, RuntimeError): return False, None @@ -227,7 +226,7 @@ class IDFDUT(DUT.SerialDUT): # and encrypt_files contains the ones to flash encrypted. flash_files = self.app.flash_files encrypt_files = self.app.encrypt_files - encrypt = self.app.flash_settings.get("encrypt", False) + encrypt = self.app.flash_settings.get('encrypt', False) if encrypt: flash_files = encrypt_files encrypt_files = [] @@ -236,12 +235,12 @@ class IDFDUT(DUT.SerialDUT): for entry in flash_files if entry not in encrypt_files] - flash_files = [(offs, open(path, "rb")) for (offs, path) in flash_files] - encrypt_files = [(offs, open(path, "rb")) for (offs, path) in encrypt_files] + flash_files = [(offs, open(path, 'rb')) for (offs, path) in flash_files] + encrypt_files = [(offs, open(path, 'rb')) for (offs, path) in encrypt_files] if erase_nvs: - address = self.app.partition_table["nvs"]["offset"] - size = self.app.partition_table["nvs"]["size"] + address = self.app.partition_table['nvs']['offset'] + size = self.app.partition_table['nvs']['size'] nvs_file = tempfile.TemporaryFile() nvs_file.write(b'\xff' * size) nvs_file.seek(0) @@ -252,7 +251,7 @@ class IDFDUT(DUT.SerialDUT): # Get the CONFIG_SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT macro # value. If it is set to True, then NVS is always encrypted. sdkconfig_dict = self.app.get_sdkconfig() - macro_encryption = "CONFIG_SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT" in sdkconfig_dict + macro_encryption = 'CONFIG_SECURE_FLASH_ENCRYPTION_MODE_DEVELOPMENT' in sdkconfig_dict # If the macro is not enabled (plain text flash) or all files # must be encrypted, add NVS to flash_files. if not macro_encryption or encrypt: @@ -270,9 +269,9 @@ class IDFDUT(DUT.SerialDUT): # write_flash expects the parameter encrypt_files to be None and not # an empty list, so perform the check here flash_args = FlashArgs({ - 'flash_size': self.app.flash_settings["flash_size"], - 'flash_mode': self.app.flash_settings["flash_mode"], - 'flash_freq': self.app.flash_settings["flash_freq"], + 'flash_size': self.app.flash_settings['flash_size'], + 'flash_mode': self.app.flash_settings['flash_mode'], + 'flash_freq': self.app.flash_settings['flash_freq'], 'addr_filename': flash_files, 'encrypt_files': encrypt_files or None, 'no_stub': False, @@ -328,9 +327,9 @@ class IDFDUT(DUT.SerialDUT): """ raise NotImplementedError() # TODO: implement this # address = self.app.partition_table[partition]["offset"] - size = self.app.partition_table[partition]["size"] + size = self.app.partition_table[partition]['size'] # TODO can use esp.erase_region() instead of this, I think - with open(".erase_partition.tmp", "wb") as f: + with open('.erase_partition.tmp', 'wb') as f: f.write(chr(0xFF) * size) @_uses_esptool @@ -356,18 +355,18 @@ class IDFDUT(DUT.SerialDUT): """ if os.path.isabs(output_file) is False: output_file = os.path.relpath(output_file, self.app.get_log_folder()) - if "partition" in kwargs: - partition = self.app.partition_table[kwargs["partition"]] - _address = partition["offset"] - _size = partition["size"] - elif "address" in kwargs and "size" in kwargs: - _address = kwargs["address"] - _size = kwargs["size"] + if 'partition' in kwargs: + partition = self.app.partition_table[kwargs['partition']] + _address = partition['offset'] + _size = partition['size'] + elif 'address' in kwargs and 'size' in kwargs: + _address = kwargs['address'] + _size = kwargs['size'] else: raise IDFToolError("You must specify 'partition' or ('address' and 'size') to dump flash") content = esp.read_flash(_address, _size) - with open(output_file, "wb") as f: + with open(output_file, 'wb') as f: f.write(content) @classmethod @@ -405,9 +404,9 @@ class IDFDUT(DUT.SerialDUT): return ports def lookup_pc_address(self, pc_addr): - cmd = ["%saddr2line" % self.TOOLCHAIN_PREFIX, - "-pfiaC", "-e", self.app.elf_file, pc_addr] - ret = "" + cmd = ['%saddr2line' % self.TOOLCHAIN_PREFIX, + '-pfiaC', '-e', self.app.elf_file, pc_addr] + ret = '' try: translation = subprocess.check_output(cmd) ret = translation.decode() @@ -439,7 +438,7 @@ class IDFDUT(DUT.SerialDUT): def stop_receive(self): if self.receive_thread: - for name in ["performance_items", "exceptions"]: + for name in ['performance_items', 'exceptions']: source_queue = getattr(self.receive_thread, name) dest_queue = getattr(self, name) self._queue_copy(source_queue, dest_queue) @@ -447,7 +446,7 @@ class IDFDUT(DUT.SerialDUT): def get_exceptions(self): """ Get exceptions detected by DUT receive thread. """ - return self._get_from_queue("exceptions") + return self._get_from_queue('exceptions') def get_performance_items(self): """ @@ -456,18 +455,18 @@ class IDFDUT(DUT.SerialDUT): :return: a list of performance items. """ - return self._get_from_queue("performance_items") + return self._get_from_queue('performance_items') def close(self): super(IDFDUT, self).close() if not self.allow_dut_exception and self.get_exceptions(): - Utility.console_log("DUT exception detected on {}".format(self), color="red") + Utility.console_log('DUT exception detected on {}'.format(self), color='red') raise IDFDUTException() class ESP32DUT(IDFDUT): - TARGET = "esp32" - TOOLCHAIN_PREFIX = "xtensa-esp32-elf-" + TARGET = 'esp32' + TOOLCHAIN_PREFIX = 'xtensa-esp32-elf-' @classmethod def _get_rom(cls): @@ -478,8 +477,8 @@ class ESP32DUT(IDFDUT): class ESP32S2DUT(IDFDUT): - TARGET = "esp32s2" - TOOLCHAIN_PREFIX = "xtensa-esp32s2-elf-" + TARGET = 'esp32s2' + TOOLCHAIN_PREFIX = 'xtensa-esp32s2-elf-' @classmethod def _get_rom(cls): @@ -490,8 +489,8 @@ class ESP32S2DUT(IDFDUT): class ESP32C3DUT(IDFDUT): - TARGET = "esp32c3" - TOOLCHAIN_PREFIX = "riscv32-esp-elf-" + TARGET = 'esp32c3' + TOOLCHAIN_PREFIX = 'riscv32-esp-elf-' @classmethod def _get_rom(cls): @@ -502,8 +501,8 @@ class ESP32C3DUT(IDFDUT): class ESP8266DUT(IDFDUT): - TARGET = "esp8266" - TOOLCHAIN_PREFIX = "xtensa-lx106-elf-" + TARGET = 'esp8266' + TOOLCHAIN_PREFIX = 'xtensa-lx106-elf-' @classmethod def _get_rom(cls): @@ -528,34 +527,34 @@ class IDFQEMUDUT(IDFDUT): QEMU_SERIAL_PORT = 3334 def __init__(self, name, port, log_file, app, allow_dut_exception=False, **kwargs): - self.flash_image = tempfile.NamedTemporaryFile('rb+', suffix=".bin", prefix="qemu_flash_img") + self.flash_image = tempfile.NamedTemporaryFile('rb+', suffix='.bin', prefix='qemu_flash_img') self.app = app self.flash_size = 4 * 1024 * 1024 self._write_flash_img() args = [ - "qemu-system-xtensa", - "-nographic", - "-machine", self.TARGET, - "-drive", "file={},if=mtd,format=raw".format(self.flash_image.name), - "-nic", "user,model=open_eth", - "-serial", "tcp::{},server,nowait".format(self.QEMU_SERIAL_PORT), - "-S", - "-global driver=timer.esp32.timg,property=wdt_disable,value=true"] + 'qemu-system-xtensa', + '-nographic', + '-machine', self.TARGET, + '-drive', 'file={},if=mtd,format=raw'.format(self.flash_image.name), + '-nic', 'user,model=open_eth', + '-serial', 'tcp::{},server,nowait'.format(self.QEMU_SERIAL_PORT), + '-S', + '-global driver=timer.esp32.timg,property=wdt_disable,value=true'] # TODO(IDF-1242): generate a temporary efuse binary, pass it to QEMU - if "QEMU_BIOS_PATH" in os.environ: - args += ["-L", os.environ["QEMU_BIOS_PATH"]] + if 'QEMU_BIOS_PATH' in os.environ: + args += ['-L', os.environ['QEMU_BIOS_PATH']] - self.qemu = pexpect.spawn(" ".join(args), timeout=self.DEFAULT_EXPECT_TIMEOUT) - self.qemu.expect_exact(b"(qemu)") + self.qemu = pexpect.spawn(' '.join(args), timeout=self.DEFAULT_EXPECT_TIMEOUT) + self.qemu.expect_exact(b'(qemu)') super(IDFQEMUDUT, self).__init__(name, port, log_file, app, allow_dut_exception=allow_dut_exception, **kwargs) def _write_flash_img(self): self.flash_image.seek(0) self.flash_image.write(b'\x00' * self.flash_size) for offs, path in self.app.flash_files: - with open(path, "rb") as flash_file: + with open(path, 'rb') as flash_file: contents = flash_file.read() self.flash_image.seek(offs) self.flash_image.write(contents) @@ -568,7 +567,7 @@ class IDFQEMUDUT(IDFDUT): @classmethod def get_mac(cls, app, port): # TODO(IDF-1242): get this from QEMU/efuse binary - return "11:22:33:44:55:66" + return '11:22:33:44:55:66' @classmethod def confirm_dut(cls, port, **kwargs): @@ -577,30 +576,30 @@ class IDFQEMUDUT(IDFDUT): def start_app(self, erase_nvs=ERASE_NVS): # TODO: implement erase_nvs # since the flash image is generated every time in the constructor, maybe this isn't needed... - self.qemu.sendline(b"cont\n") - self.qemu.expect_exact(b"(qemu)") + self.qemu.sendline(b'cont\n') + self.qemu.expect_exact(b'(qemu)') def reset(self): - self.qemu.sendline(b"system_reset\n") - self.qemu.expect_exact(b"(qemu)") + self.qemu.sendline(b'system_reset\n') + self.qemu.expect_exact(b'(qemu)') def erase_partition(self, partition): - raise NotImplementedError("method erase_partition not implemented") + raise NotImplementedError('method erase_partition not implemented') def erase_flash(self): - raise NotImplementedError("method erase_flash not implemented") + raise NotImplementedError('method erase_flash not implemented') def dump_flash(self, output_file, **kwargs): - raise NotImplementedError("method dump_flash not implemented") + raise NotImplementedError('method dump_flash not implemented') @classmethod def list_available_ports(cls): - return ["socket://localhost:{}".format(cls.QEMU_SERIAL_PORT)] + return ['socket://localhost:{}'.format(cls.QEMU_SERIAL_PORT)] def close(self): super(IDFQEMUDUT, self).close() - self.qemu.sendline(b"q\n") - self.qemu.expect_exact(b"(qemu)") + self.qemu.sendline(b'q\n') + self.qemu.expect_exact(b'(qemu)') for _ in range(self.DEFAULT_EXPECT_TIMEOUT): if not self.qemu.isalive(): break @@ -610,5 +609,5 @@ class IDFQEMUDUT(IDFDUT): class ESP32QEMUDUT(IDFQEMUDUT): - TARGET = "esp32" - TOOLCHAIN_PREFIX = "xtensa-esp32-elf-" + TARGET = 'esp32' + TOOLCHAIN_PREFIX = 'xtensa-esp32-elf-' diff --git a/tools/ci/python_packages/ttfw_idf/__init__.py b/tools/ci/python_packages/ttfw_idf/__init__.py index 9366dcdb07..0f7641149f 100644 --- a/tools/ci/python_packages/ttfw_idf/__init__.py +++ b/tools/ci/python_packages/ttfw_idf/__init__.py @@ -19,12 +19,12 @@ import re from copy import deepcopy import junit_xml - from tiny_test_fw import TinyFW, Utility -from .DebugUtils import OCDBackend, GDBBackend, CustomProcess # noqa: export DebugUtils for users -from .IDFApp import IDFApp, Example, LoadableElfTestApp, UT, TestApp, ComponentUTApp # noqa: export all Apps for users -from .IDFDUT import IDFDUT, ESP32DUT, ESP32S2DUT, ESP32C3DUT, ESP8266DUT, ESP32QEMUDUT # noqa: export DUTs for users -from .unity_test_parser import TestResults, TestFormat + +from .DebugUtils import CustomProcess, GDBBackend, OCDBackend # noqa: export DebugUtils for users +from .IDFApp import UT, ComponentUTApp, Example, IDFApp, LoadableElfTestApp, TestApp # noqa: export all Apps for users +from .IDFDUT import ESP32C3DUT, ESP32DUT, ESP32QEMUDUT, ESP32S2DUT, ESP8266DUT, IDFDUT # noqa: export DUTs for users +from .unity_test_parser import TestFormat, TestResults # pass TARGET_DUT_CLS_DICT to Env.py to avoid circular dependency issue. TARGET_DUT_CLS_DICT = { @@ -35,7 +35,7 @@ TARGET_DUT_CLS_DICT = { def format_case_id(target, case_name): - return "{}.{}".format(target, case_name) + return '{}.{}'.format(target, case_name) try: @@ -128,13 +128,13 @@ def test_func_generator(func, app, target, ci_target, module, execution_time, le dut_dict=dut_classes, **kwargs ) test_func = original_method(func) - test_func.case_info["ID"] = format_case_id(target, test_func.case_info["name"]) + test_func.case_info['ID'] = format_case_id(target, test_func.case_info['name']) return test_func @ci_target_check -def idf_example_test(app=Example, target="ESP32", ci_target=None, module="examples", execution_time=1, - level="example", erase_nvs=True, config_name=None, **kwargs): +def idf_example_test(app=Example, target='ESP32', ci_target=None, module='examples', execution_time=1, + level='example', erase_nvs=True, config_name=None, **kwargs): """ decorator for testing idf examples (with default values for some keyword args). @@ -155,8 +155,8 @@ def idf_example_test(app=Example, target="ESP32", ci_target=None, module="exampl @ci_target_check -def idf_unit_test(app=UT, target="ESP32", ci_target=None, module="unit-test", execution_time=1, - level="unit", erase_nvs=True, **kwargs): +def idf_unit_test(app=UT, target='ESP32', ci_target=None, module='unit-test', execution_time=1, + level='unit', erase_nvs=True, **kwargs): """ decorator for testing idf unit tests (with default values for some keyword args). @@ -176,8 +176,8 @@ def idf_unit_test(app=UT, target="ESP32", ci_target=None, module="unit-test", ex @ci_target_check -def idf_custom_test(app=TestApp, target="ESP32", ci_target=None, module="misc", execution_time=1, - level="integration", erase_nvs=True, config_name=None, **kwargs): +def idf_custom_test(app=TestApp, target='ESP32', ci_target=None, module='misc', execution_time=1, + level='integration', erase_nvs=True, config_name=None, **kwargs): """ decorator for idf custom tests (with default values for some keyword args). @@ -198,8 +198,8 @@ def idf_custom_test(app=TestApp, target="ESP32", ci_target=None, module="misc", @ci_target_check -def idf_component_unit_test(app=ComponentUTApp, target="ESP32", ci_target=None, module="misc", execution_time=1, - level="integration", erase_nvs=True, config_name=None, **kwargs): +def idf_component_unit_test(app=ComponentUTApp, target='ESP32', ci_target=None, module='misc', execution_time=1, + level='integration', erase_nvs=True, config_name=None, **kwargs): """ decorator for idf custom tests (with default values for some keyword args). @@ -253,11 +253,11 @@ def log_performance(item, value): :param item: performance item name :param value: performance value """ - performance_msg = "[Performance][{}]: {}".format(item, value) - Utility.console_log(performance_msg, "orange") + performance_msg = '[Performance][{}]: {}'.format(item, value) + Utility.console_log(performance_msg, 'orange') # update to junit test report current_junit_case = TinyFW.JunitReport.get_current_test_case() - current_junit_case.stdout += performance_msg + "\r\n" + current_junit_case.stdout += performance_msg + '\r\n' def check_performance(item, value, target): @@ -300,7 +300,7 @@ def check_performance(item, value, target): # if no exception was thrown then the performance is met and no need to continue break else: - raise AssertionError("Failed to get performance standard for {}".format(item)) + raise AssertionError('Failed to get performance standard for {}'.format(item)) MINIMUM_FREE_HEAP_SIZE_RE = re.compile(r'Minimum free heap size: (\d+) bytes') diff --git a/tools/ci/python_packages/ttfw_idf/unity_test_parser.py b/tools/ci/python_packages/ttfw_idf/unity_test_parser.py index 63c4bb140e..e3b8c3c81b 100644 --- a/tools/ci/python_packages/ttfw_idf/unity_test_parser.py +++ b/tools/ci/python_packages/ttfw_idf/unity_test_parser.py @@ -13,13 +13,13 @@ import re import junit_xml -_NORMAL_TEST_REGEX = re.compile(r"(?P.+):(?P\d+):(?P[^\s:]+):(?PPASS|FAIL|IGNORE)(?:: (?P.+))?") -_UNITY_FIXTURE_VERBOSE_PREFIX_REGEX = re.compile(r"(?PTEST\((?P[^\s,]+), (?P[^\s\)]+)\))(?P.+)?$") -_UNITY_FIXTURE_REMAINDER_REGEX = re.compile(r"^(?P.+):(?P\d+)::(?PPASS|FAIL|IGNORE)(?:: (?P.+))?") +_NORMAL_TEST_REGEX = re.compile(r'(?P.+):(?P\d+):(?P[^\s:]+):(?PPASS|FAIL|IGNORE)(?:: (?P.+))?') +_UNITY_FIXTURE_VERBOSE_PREFIX_REGEX = re.compile(r'(?PTEST\((?P[^\s,]+), (?P[^\s\)]+)\))(?P.+)?$') +_UNITY_FIXTURE_REMAINDER_REGEX = re.compile(r'^(?P.+):(?P\d+)::(?PPASS|FAIL|IGNORE)(?:: (?P.+))?') _TEST_SUMMARY_BLOCK_REGEX = re.compile( - r"^(?P\d+) Tests (?P\d+) Failures (?P\d+) Ignored\s*\r?\n(?POK|FAIL)(?:ED)?", re.MULTILINE + r'^(?P\d+) Tests (?P\d+) Failures (?P\d+) Ignored\s*\r?\n(?POK|FAIL)(?:ED)?', re.MULTILINE ) -_TEST_RESULT_ENUM = ["PASS", "FAIL", "IGNORE"] +_TEST_RESULT_ENUM = ['PASS', 'FAIL', 'IGNORE'] class TestFormat(enum.Enum): @@ -63,14 +63,14 @@ class TestResult: self, test_name, result, - group="default", - file="", + group='default', + file='', line=0, - message="", - full_line="", + message='', + full_line='', ): if result not in _TEST_RESULT_ENUM: - raise ValueError("result must be one of {}.".format(_TEST_RESULT_ENUM)) + raise ValueError('result must be one of {}.'.format(_TEST_RESULT_ENUM)) self._test_name = test_name self._result = result @@ -78,11 +78,11 @@ class TestResult: self._message = message self._full_line = full_line - if result != "PASS": + if result != 'PASS': self._file = file self._line = line else: - self._file = "" + self._file = '' self._line = 0 def file(self): @@ -150,7 +150,7 @@ class TestResults: self._parse_unity_fixture_verbose(test_output) else: raise ValueError( - "test_format must be one of UNITY_BASIC or UNITY_FIXTURE_VERBOSE." + 'test_format must be one of UNITY_BASIC or UNITY_FIXTURE_VERBOSE.' ) def num_tests(self): @@ -185,7 +185,7 @@ class TestResults: return self._tests def to_junit( - self, suite_name="all_tests", + self, suite_name='all_tests', ): """ Convert the tests to JUnit XML. @@ -207,7 +207,7 @@ class TestResults: test_case_list = [] for test in self._tests: - if test.result() == "PASS": + if test.result() == 'PASS': test_case_list.append( junit_xml.TestCase(name=test.name(), classname=test.group()) ) @@ -218,11 +218,11 @@ class TestResults: file=test.file(), line=test.line(), ) - if test.result() == "FAIL": + if test.result() == 'FAIL': junit_tc.add_failure_info( message=test.message(), output=test.full_line() ) - elif test.result() == "IGNORE": + elif test.result() == 'IGNORE': junit_tc.add_skipped_info( message=test.message(), output=test.full_line() ) @@ -245,17 +245,17 @@ class TestResults: """ match = _TEST_SUMMARY_BLOCK_REGEX.search(unity_output) if not match: - raise ValueError("A Unity test summary block was not found.") + raise ValueError('A Unity test summary block was not found.') try: stats = TestStats() - stats.total = int(match.group("num_tests")) - stats.failed = int(match.group("num_failures")) - stats.ignored = int(match.group("num_ignored")) + stats.total = int(match.group('num_tests')) + stats.failed = int(match.group('num_failures')) + stats.ignored = int(match.group('num_ignored')) stats.passed = stats.total - stats.failed - stats.ignored return stats except ValueError: - raise ValueError("The Unity test summary block was not valid.") + raise ValueError('The Unity test summary block was not valid.') def _parse_unity_basic(self, unity_output): """ @@ -268,13 +268,13 @@ class TestResults: for test in _NORMAL_TEST_REGEX.finditer(unity_output): try: new_test = TestResult( - test.group("test_name"), - test.group("result"), - file=test.group("file"), - line=int(test.group("line")), - message=test.group("message") - if test.group("message") is not None - else "", + test.group('test_name'), + test.group('result'), + file=test.group('file'), + line=int(test.group('line')), + message=test.group('message') + if test.group('message') is not None + else '', full_line=test.group(0), ) except ValueError: @@ -283,10 +283,10 @@ class TestResults: self._add_new_test(new_test, found_test_stats) if len(self._tests) == 0: - raise ValueError("No tests were found.") + raise ValueError('No tests were found.') if found_test_stats != self._test_stats: - raise ValueError("Test output does not match summary block.") + raise ValueError('Test output does not match summary block.') def _parse_unity_fixture_verbose(self, unity_output): """ @@ -309,7 +309,7 @@ class TestResults: if prefix_match: # Handle the remaining portion of a test case line after the unity_fixture # prefix. - remainder = prefix_match.group("remainder") + remainder = prefix_match.group('remainder') if remainder: self._parse_unity_fixture_remainder( prefix_match, remainder, found_test_stats @@ -324,10 +324,10 @@ class TestResults: pass if len(self._tests) == 0: - raise ValueError("No tests were found.") + raise ValueError('No tests were found.') if found_test_stats != self._test_stats: - raise ValueError("Test output does not match summary block.") + raise ValueError('Test output does not match summary block.') def _parse_unity_fixture_remainder(self, prefix_match, remainder, test_stats): """ @@ -337,26 +337,26 @@ class TestResults: """ new_test = None - if remainder == " PASS": + if remainder == ' PASS': new_test = TestResult( - prefix_match.group("test_name"), - "PASS", - group=prefix_match.group("test_group"), + prefix_match.group('test_name'), + 'PASS', + group=prefix_match.group('test_group'), full_line=prefix_match.group(0), ) else: remainder_match = _UNITY_FIXTURE_REMAINDER_REGEX.match(remainder) if remainder_match: new_test = TestResult( - prefix_match.group("test_name"), - remainder_match.group("result"), - group=prefix_match.group("test_group"), - file=remainder_match.group("file"), - line=int(remainder_match.group("line")), - message=remainder_match.group("message") - if remainder_match.group("message") is not None - else "", - full_line=prefix_match.group("prefix") + remainder_match.group(0), + prefix_match.group('test_name'), + remainder_match.group('result'), + group=prefix_match.group('test_group'), + file=remainder_match.group('file'), + line=int(remainder_match.group('line')), + message=remainder_match.group('message') + if remainder_match.group('message') is not None + else '', + full_line=prefix_match.group('prefix') + remainder_match.group(0), ) if new_test is not None: @@ -365,9 +365,9 @@ class TestResults: def _add_new_test(self, new_test, test_stats): """Add a new test and increment the proper members of test_stats.""" test_stats.total += 1 - if new_test.result() == "PASS": + if new_test.result() == 'PASS': test_stats.passed += 1 - elif new_test.result() == "FAIL": + elif new_test.result() == 'FAIL': test_stats.failed += 1 else: test_stats.ignored += 1 diff --git a/tools/ci/python_packages/wifi_tools.py b/tools/ci/python_packages/wifi_tools.py index 81b8eea28c..4d517c982f 100644 --- a/tools/ci/python_packages/wifi_tools.py +++ b/tools/ci/python_packages/wifi_tools.py @@ -13,10 +13,11 @@ # limitations under the License. # +import time + import dbus import dbus.mainloop.glib import netifaces -import time def get_wiface_name(): @@ -46,17 +47,17 @@ class wpa_cli: dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) bus = dbus.SystemBus() - service = dbus.Interface(bus.get_object("fi.w1.wpa_supplicant1", "/fi/w1/wpa_supplicant1"), - "fi.w1.wpa_supplicant1") + service = dbus.Interface(bus.get_object('fi.w1.wpa_supplicant1', '/fi/w1/wpa_supplicant1'), + 'fi.w1.wpa_supplicant1') iface_path = service.GetInterface(self.iface_name) - self.iface_obj = bus.get_object("fi.w1.wpa_supplicant1", iface_path) - self.iface_ifc = dbus.Interface(self.iface_obj, "fi.w1.wpa_supplicant1.Interface") + self.iface_obj = bus.get_object('fi.w1.wpa_supplicant1', iface_path) + self.iface_ifc = dbus.Interface(self.iface_obj, 'fi.w1.wpa_supplicant1.Interface') self.iface_props = dbus.Interface(self.iface_obj, 'org.freedesktop.DBus.Properties') if self.iface_ifc is None: raise RuntimeError('supplicant : Failed to fetch interface') - self.old_network = self._get_iface_property("CurrentNetwork") - print("Old network is %s" % self.old_network) + self.old_network = self._get_iface_property('CurrentNetwork') + print('Old network is %s' % self.old_network) if self.old_network == '/': self.old_network = None @@ -69,7 +70,7 @@ class wpa_cli: Note: The result is a dbus wrapped type, so should usually convert it to the corresponding native Python type """ - return self.iface_props.Get("fi.w1.wpa_supplicant1.Interface", name) + return self.iface_props.Get('fi.w1.wpa_supplicant1.Interface', name) def connect(self, ssid, password): if self.connected is True: @@ -79,9 +80,9 @@ class wpa_cli: if self.new_network is not None: self.iface_ifc.RemoveNetwork(self.new_network) - print("Pre-connect state is %s, IP is %s" % (self._get_iface_property("State"), get_wiface_IPv4(self.iface_name))) + print('Pre-connect state is %s, IP is %s' % (self._get_iface_property('State'), get_wiface_IPv4(self.iface_name))) - self.new_network = self.iface_ifc.AddNetwork({"ssid": ssid, "psk": password}) + self.new_network = self.iface_ifc.AddNetwork({'ssid': ssid, 'psk': password}) self.iface_ifc.SelectNetwork(self.new_network) time.sleep(10) @@ -89,12 +90,12 @@ class wpa_cli: retry = 10 while retry > 0: time.sleep(5) - state = str(self._get_iface_property("State")) - print("wpa iface state %s (scanning %s)" % (state, bool(self._get_iface_property("Scanning")))) - if state in ["disconnected", "inactive"]: + state = str(self._get_iface_property('State')) + print('wpa iface state %s (scanning %s)' % (state, bool(self._get_iface_property('Scanning')))) + if state in ['disconnected', 'inactive']: self.iface_ifc.Reconnect() ip = get_wiface_IPv4(self.iface_name) - print("wpa iface %s IP %s" % (self.iface_name, ip)) + print('wpa iface %s IP %s' % (self.iface_name, ip)) if ip is not None: self.connected = True return ip diff --git a/tools/ci/test_autocomplete.py b/tools/ci/test_autocomplete.py index d85a915ee3..9ee4406dc9 100755 --- a/tools/ci/test_autocomplete.py +++ b/tools/ci/test_autocomplete.py @@ -1,55 +1,56 @@ #!/usr/bin/env python import os import sys -import pexpect import unittest +import pexpect + class Test(unittest.TestCase): def test_fish(self): - os.environ["TERM"] = "vt100" - child = pexpect.spawn("fish -i") - with open(os.environ["IDF_PATH"] + "/fish" + str(sys.version_info.major) + ".out", "wb") as output: + os.environ['TERM'] = 'vt100' + child = pexpect.spawn('fish -i') + with open(os.environ['IDF_PATH'] + '/fish' + str(sys.version_info.major) + '.out', 'wb') as output: child.logfile = output child.sendline('. ./export.fish') result = child.expect( - ["Go to the project directory and run.*idf\\.py build", pexpect.EOF, + ['Go to the project directory and run.*idf\\.py build', pexpect.EOF, pexpect.TIMEOUT], timeout=40) - self.assertEqual(result, 0, "Export was not successful!") - child.send("idf.py \t\t") - result = child.expect(["all.*app.*app-flash.*bootloader.*", pexpect.EOF, pexpect.TIMEOUT], timeout=40) - self.assertEqual(result, 0, "Autocompletion for idf.py failed in fish!") + self.assertEqual(result, 0, 'Export was not successful!') + child.send('idf.py \t\t') + result = child.expect(['all.*app.*app-flash.*bootloader.*', pexpect.EOF, pexpect.TIMEOUT], timeout=40) + self.assertEqual(result, 0, 'Autocompletion for idf.py failed in fish!') def test_bash(self): - os.environ["TERM"] = "xterm-256color" - child = pexpect.spawn("bash -i") - with open(os.environ["IDF_PATH"] + "/bash" + str(sys.version_info.major) + ".out", "wb") as output: + os.environ['TERM'] = 'xterm-256color' + child = pexpect.spawn('bash -i') + with open(os.environ['IDF_PATH'] + '/bash' + str(sys.version_info.major) + '.out', 'wb') as output: child.logfile = output child.sendline('. ./export.sh') - child.send("idf.py \t\t") + child.send('idf.py \t\t') result = child.expect( - ["Go to the project directory and run.*idf\\.py build", pexpect.EOF, + ['Go to the project directory and run.*idf\\.py build', pexpect.EOF, pexpect.TIMEOUT], timeout=40) - self.assertEqual(result, 0, "Export was not successful!") + self.assertEqual(result, 0, 'Export was not successful!') result = child.expect( - ["all.*app.*app-flash.*bootloader.*bootloader-flash.*build-system-targets.*clean.*", pexpect.EOF, + ['all.*app.*app-flash.*bootloader.*bootloader-flash.*build-system-targets.*clean.*', pexpect.EOF, pexpect.TIMEOUT], timeout=40) - self.assertEqual(result, 0, "Autocompletion for idf.py failed in bash!") + self.assertEqual(result, 0, 'Autocompletion for idf.py failed in bash!') def test_zsh(self): - child = pexpect.spawn("zsh -i") - with open(os.environ["IDF_PATH"] + "/zsh" + str(sys.version_info.major) + ".out", "wb") as output: + child = pexpect.spawn('zsh -i') + with open(os.environ['IDF_PATH'] + '/zsh' + str(sys.version_info.major) + '.out', 'wb') as output: child.logfile = output child.sendline('. ./export.sh') result = child.expect( - ["Go to the project directory and run.*idf\\.py build", pexpect.EOF, + ['Go to the project directory and run.*idf\\.py build', pexpect.EOF, pexpect.TIMEOUT], timeout=40) - self.assertEqual(result, 0, "Export was not successful!") - child.send("idf.py \t\t") + self.assertEqual(result, 0, 'Export was not successful!') + child.send('idf.py \t\t') result = child.expect( - ["all.*app.*app-flash.*bootloader.*bootloader-flash.*build-system-targets.*clean.*", pexpect.EOF, + ['all.*app.*app-flash.*bootloader.*bootloader-flash.*build-system-targets.*clean.*', pexpect.EOF, pexpect.TIMEOUT], timeout=40) - self.assertEqual(result, 0, "Autocompletion for idf.py failed in zsh!") + self.assertEqual(result, 0, 'Autocompletion for idf.py failed in zsh!') if __name__ == '__main__': diff --git a/tools/ci/test_check_kconfigs.py b/tools/ci/test_check_kconfigs.py index b8541e4a6d..5568f7ca73 100755 --- a/tools/ci/test_check_kconfigs.py +++ b/tools/ci/test_check_kconfigs.py @@ -15,11 +15,8 @@ # limitations under the License. import unittest -from check_kconfigs import LineRuleChecker -from check_kconfigs import SourceChecker -from check_kconfigs import InputError -from check_kconfigs import IndentAndNameChecker -from check_kconfigs import CONFIG_NAME_MAX_LENGTH + +from check_kconfigs import CONFIG_NAME_MAX_LENGTH, IndentAndNameChecker, InputError, LineRuleChecker, SourceChecker class ApplyLine(object): diff --git a/tools/cmake/convert_to_cmake.py b/tools/cmake/convert_to_cmake.py index 4e3b0a4b64..6e34711b3d 100755 --- a/tools/cmake/convert_to_cmake.py +++ b/tools/cmake/convert_to_cmake.py @@ -4,15 +4,15 @@ # CMakeLists.txt files # import argparse -import subprocess -import re -import os.path import glob +import os.path +import re +import subprocess debug = False -def get_make_variables(path, makefile="Makefile", expected_failure=False, variables={}): +def get_make_variables(path, makefile='Makefile', expected_failure=False, variables={}): """ Given the path to a Makefile of some kind, return a dictionary of all variables defined in this Makefile @@ -20,82 +20,82 @@ def get_make_variables(path, makefile="Makefile", expected_failure=False, variab Overrides IDF_PATH= to avoid recursively evaluating the entire project Makefile structure. """ - variable_setters = [("%s=%s" % (k,v)) for (k,v) in variables.items()] + variable_setters = [('%s=%s' % (k,v)) for (k,v) in variables.items()] - cmdline = ["make", "-rpn", "-C", path, "-f", makefile] + variable_setters + cmdline = ['make', '-rpn', '-C', path, '-f', makefile] + variable_setters if debug: - print("Running %s..." % (" ".join(cmdline))) + print('Running %s...' % (' '.join(cmdline))) p = subprocess.Popen(cmdline, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - (output, stderr) = p.communicate("\n") + (output, stderr) = p.communicate('\n') if (not expected_failure) and p.returncode != 0: - raise RuntimeError("Unexpected make failure, result %d" % p.returncode) + raise RuntimeError('Unexpected make failure, result %d' % p.returncode) if debug: - print("Make stdout:") + print('Make stdout:') print(output) - print("Make stderr:") + print('Make stderr:') print(stderr) next_is_makefile = False # is the next line a makefile variable? result = {} - BUILT_IN_VARS = set(["MAKEFILE_LIST", "SHELL", "CURDIR", "MAKEFLAGS"]) + BUILT_IN_VARS = set(['MAKEFILE_LIST', 'SHELL', 'CURDIR', 'MAKEFLAGS']) - for line in output.decode('utf-8').split("\n"): - if line.startswith("# makefile"): # this line appears before any variable defined in the makefile itself + for line in output.decode('utf-8').split('\n'): + if line.startswith('# makefile'): # this line appears before any variable defined in the makefile itself next_is_makefile = True elif next_is_makefile: next_is_makefile = False - m = re.match(r"(?P[^ ]+) :?= (?P.+)", line) + m = re.match(r'(?P[^ ]+) :?= (?P.+)', line) if m is not None: - if not m.group("var") in BUILT_IN_VARS: - result[m.group("var")] = m.group("val").strip() + if not m.group('var') in BUILT_IN_VARS: + result[m.group('var')] = m.group('val').strip() return result def get_component_variables(project_path, component_path): make_vars = get_make_variables(component_path, - os.path.join(os.environ["IDF_PATH"], - "make", - "component_wrapper.mk"), + os.path.join(os.environ['IDF_PATH'], + 'make', + 'component_wrapper.mk'), expected_failure=True, variables={ - "COMPONENT_MAKEFILE": os.path.join(component_path, "component.mk"), - "COMPONENT_NAME": os.path.basename(component_path), - "PROJECT_PATH": project_path, + 'COMPONENT_MAKEFILE': os.path.join(component_path, 'component.mk'), + 'COMPONENT_NAME': os.path.basename(component_path), + 'PROJECT_PATH': project_path, }) - if "COMPONENT_OBJS" in make_vars: # component.mk specifies list of object files + if 'COMPONENT_OBJS' in make_vars: # component.mk specifies list of object files # Convert to sources def find_src(obj): obj = os.path.splitext(obj)[0] - for ext in ["c", "cpp", "S"]: - if os.path.exists(os.path.join(component_path, obj) + "." + ext): - return obj + "." + ext + for ext in ['c', 'cpp', 'S']: + if os.path.exists(os.path.join(component_path, obj) + '.' + ext): + return obj + '.' + ext print("WARNING: Can't find source file for component %s COMPONENT_OBJS %s" % (component_path, obj)) return None srcs = [] - for obj in make_vars["COMPONENT_OBJS"].split(): + for obj in make_vars['COMPONENT_OBJS'].split(): src = find_src(obj) if src is not None: srcs.append(src) - make_vars["COMPONENT_SRCS"] = " ".join(srcs) + make_vars['COMPONENT_SRCS'] = ' '.join(srcs) else: component_srcs = list() - for component_srcdir in make_vars.get("COMPONENT_SRCDIRS", ".").split(): + for component_srcdir in make_vars.get('COMPONENT_SRCDIRS', '.').split(): component_srcdir_path = os.path.abspath(os.path.join(component_path, component_srcdir)) srcs = list() - srcs += glob.glob(os.path.join(component_srcdir_path, "*.[cS]")) - srcs += glob.glob(os.path.join(component_srcdir_path, "*.cpp")) + srcs += glob.glob(os.path.join(component_srcdir_path, '*.[cS]')) + srcs += glob.glob(os.path.join(component_srcdir_path, '*.cpp')) srcs = [('"%s"' % str(os.path.relpath(s, component_path))) for s in srcs] - make_vars["COMPONENT_ADD_INCLUDEDIRS"] = make_vars.get("COMPONENT_ADD_INCLUDEDIRS", "include") + make_vars['COMPONENT_ADD_INCLUDEDIRS'] = make_vars.get('COMPONENT_ADD_INCLUDEDIRS', 'include') component_srcs += srcs - make_vars["COMPONENT_SRCS"] = " ".join(component_srcs) + make_vars['COMPONENT_SRCS'] = ' '.join(component_srcs) return make_vars @@ -103,33 +103,33 @@ def get_component_variables(project_path, component_path): def convert_project(project_path): if not os.path.exists(project_path): raise RuntimeError("Project directory '%s' not found" % project_path) - if not os.path.exists(os.path.join(project_path, "Makefile")): + if not os.path.exists(os.path.join(project_path, 'Makefile')): raise RuntimeError("Directory '%s' doesn't contain a project Makefile" % project_path) - project_cmakelists = os.path.join(project_path, "CMakeLists.txt") + project_cmakelists = os.path.join(project_path, 'CMakeLists.txt') if os.path.exists(project_cmakelists): - raise RuntimeError("This project already has a CMakeLists.txt file") + raise RuntimeError('This project already has a CMakeLists.txt file') project_vars = get_make_variables(project_path, expected_failure=True) - if "PROJECT_NAME" not in project_vars: - raise RuntimeError("PROJECT_NAME does not appear to be defined in IDF project Makefile at %s" % project_path) + if 'PROJECT_NAME' not in project_vars: + raise RuntimeError('PROJECT_NAME does not appear to be defined in IDF project Makefile at %s' % project_path) - component_paths = project_vars["COMPONENT_PATHS"].split() + component_paths = project_vars['COMPONENT_PATHS'].split() converted_components = 0 # Convert components as needed for p in component_paths: - if "MSYSTEM" in os.environ: - cmd = ["cygpath", "-w", p] + if 'MSYSTEM' in os.environ: + cmd = ['cygpath', '-w', p] p = subprocess.check_output(cmd).decode('utf-8').strip() converted_components += convert_component(project_path, p) - project_name = project_vars["PROJECT_NAME"] + project_name = project_vars['PROJECT_NAME'] # Generate the project CMakeLists.txt file - with open(project_cmakelists, "w") as f: + with open(project_cmakelists, 'w') as f: f.write(""" # (Automatically converted from project Makefile by convert_to_cmake.py.) @@ -141,47 +141,47 @@ cmake_minimum_required(VERSION 3.5) f.write(""" include($ENV{IDF_PATH}/tools/cmake/project.cmake) """) - f.write("project(%s)\n" % project_name) + f.write('project(%s)\n' % project_name) - print("Converted project %s" % project_cmakelists) + print('Converted project %s' % project_cmakelists) if converted_components > 0: - print("Note: Newly created component CMakeLists.txt do not have any REQUIRES or PRIV_REQUIRES " - "lists to declare their component requirements. Builds may fail to include other " + print('Note: Newly created component CMakeLists.txt do not have any REQUIRES or PRIV_REQUIRES ' + 'lists to declare their component requirements. Builds may fail to include other ' "components' header files. If so requirements need to be added to the components' " "CMakeLists.txt files. See the 'Component Requirements' section of the " - "Build System docs for more details.") + 'Build System docs for more details.') def convert_component(project_path, component_path): if debug: - print("Converting %s..." % (component_path)) - cmakelists_path = os.path.join(component_path, "CMakeLists.txt") + print('Converting %s...' % (component_path)) + cmakelists_path = os.path.join(component_path, 'CMakeLists.txt') if os.path.exists(cmakelists_path): - print("Skipping already-converted component %s..." % cmakelists_path) + print('Skipping already-converted component %s...' % cmakelists_path) return 0 v = get_component_variables(project_path, component_path) # Look up all the variables before we start writing the file, so it's not # created if there's an erro - component_srcs = v.get("COMPONENT_SRCS", None) + component_srcs = v.get('COMPONENT_SRCS', None) - component_add_includedirs = v["COMPONENT_ADD_INCLUDEDIRS"] - cflags = v.get("CFLAGS", None) + component_add_includedirs = v['COMPONENT_ADD_INCLUDEDIRS'] + cflags = v.get('CFLAGS', None) - with open(cmakelists_path, "w") as f: + with open(cmakelists_path, 'w') as f: if component_srcs is not None: - f.write("idf_component_register(SRCS %s)\n" % component_srcs) - f.write(" INCLUDE_DIRS %s" % component_add_includedirs) - f.write(" # Edit following two lines to set component requirements (see docs)\n") - f.write(" REQUIRES "")\n") - f.write(" PRIV_REQUIRES "")\n\n") + f.write('idf_component_register(SRCS %s)\n' % component_srcs) + f.write(' INCLUDE_DIRS %s' % component_add_includedirs) + f.write(' # Edit following two lines to set component requirements (see docs)\n') + f.write(' REQUIRES '')\n') + f.write(' PRIV_REQUIRES '')\n\n') else: - f.write("idf_component_register()\n") + f.write('idf_component_register()\n') if cflags is not None: - f.write("target_compile_options(${COMPONENT_LIB} PRIVATE %s)\n" % cflags) + f.write('target_compile_options(${COMPONENT_LIB} PRIVATE %s)\n' % cflags) - print("Converted %s" % cmakelists_path) + print('Converted %s' % cmakelists_path) return 1 @@ -197,9 +197,9 @@ def main(): args = parser.parse_args() debug = args.debug - print("Converting %s..." % args.project) + print('Converting %s...' % args.project) convert_project(args.project) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/esp_app_trace/espytrace/apptrace.py b/tools/esp_app_trace/espytrace/apptrace.py index ef90e28479..7a48afefce 100644 --- a/tools/esp_app_trace/espytrace/apptrace.py +++ b/tools/esp_app_trace/espytrace/apptrace.py @@ -1,6 +1,8 @@ from __future__ import print_function -import sys + import os +import sys + try: from urlparse import urlparse except ImportError: @@ -9,13 +11,15 @@ try: import SocketServer except ImportError: import socketserver as SocketServer -import threading -import tempfile -import time -import subprocess + import os.path -import elftools.elf.elffile as elffile +import subprocess +import tempfile +import threading +import time + import elftools.elf.constants as elfconst +import elftools.elf.elffile as elffile def clock(): @@ -44,7 +48,7 @@ def addr2line(toolchain, elf_path, addr): source line location string """ try: - return subprocess.check_output(['%saddr2line' % toolchain, '-e', elf_path, '0x%x' % addr]).decode("utf-8") + return subprocess.check_output(['%saddr2line' % toolchain, '-e', elf_path, '0x%x' % addr]).decode('utf-8') except subprocess.CalledProcessError: return '' @@ -208,7 +212,7 @@ class FileReader(Reader): line = '' start_tm = clock() while not self.need_stop: - line += self.trace_file.readline().decode("utf-8") + line += self.trace_file.readline().decode('utf-8') if line.endswith(linesep): break if self.timeout != -1 and clock() >= start_tm + self.timeout: @@ -421,13 +425,13 @@ class TraceDataProcessor: event : object Event object """ - print("EVENT[{:d}]: {}".format(self.total_events, event)) + print('EVENT[{:d}]: {}'.format(self.total_events, event)) def print_report(self): """ Base method to print report. """ - print("Processed {:d} events".format(self.total_events)) + print('Processed {:d} events'.format(self.total_events)) def cleanup(self): """ @@ -579,8 +583,8 @@ class BaseLogTraceDataProcessorImpl: """ Prints log report """ - print("=============== LOG TRACE REPORT ===============") - print("Processed {:d} log messages.".format(len(self.messages))) + print('=============== LOG TRACE REPORT ===============') + print('Processed {:d} log messages.'.format(len(self.messages))) def on_new_event(self, event): """ @@ -678,13 +682,13 @@ class HeapTraceEvent: callers += ':' callers += '0x{:x}'.format(addr) if self.alloc: - return "[{:.9f}] HEAP: Allocated {:d} bytes @ 0x{:x} from {} on core {:d} by: {}".format(self.trace_event.ts, + return '[{:.9f}] HEAP: Allocated {:d} bytes @ 0x{:x} from {} on core {:d} by: {}'.format(self.trace_event.ts, self.size, self.addr, self.trace_event.ctx_desc, self.trace_event.core_id, callers) else: - return "[{:.9f}] HEAP: Freed bytes @ 0x{:x} from {} on core {:d} by: {}".format(self.trace_event.ts, + return '[{:.9f}] HEAP: Freed bytes @ 0x{:x} from {} on core {:d} by: {}'.format(self.trace_event.ts, self.addr, self.trace_event.ctx_desc, self.trace_event.core_id, callers) @@ -738,10 +742,10 @@ class BaseHeapTraceDataProcessorImpl: """ Prints heap report """ - print("=============== HEAP TRACE REPORT ===============") - print("Processed {:d} heap events.".format(self.heap_events_count)) + print('=============== HEAP TRACE REPORT ===============') + print('Processed {:d} heap events.'.format(self.heap_events_count)) if len(self.allocs) == 0: - print("OK - Heap errors was not found.") + print('OK - Heap errors was not found.') return leaked_bytes = 0 for alloc in self.allocs: @@ -749,6 +753,6 @@ class BaseHeapTraceDataProcessorImpl: print(alloc) for free in self.frees: if free.addr > alloc.addr and free.addr <= alloc.addr + alloc.size: - print("Possible wrong free operation found") + print('Possible wrong free operation found') print(free) - print("Found {:d} leaked bytes in {:d} blocks.".format(leaked_bytes, len(self.allocs))) + print('Found {:d} leaked bytes in {:d} blocks.'.format(leaked_bytes, len(self.allocs))) diff --git a/tools/esp_app_trace/espytrace/sysview.py b/tools/esp_app_trace/espytrace/sysview.py index 4378dc818c..d92d0125ea 100644 --- a/tools/esp_app_trace/espytrace/sysview.py +++ b/tools/esp_app_trace/espytrace/sysview.py @@ -1,9 +1,9 @@ -import re -import struct import copy import json -import espytrace.apptrace as apptrace +import re +import struct +import espytrace.apptrace as apptrace SYSVIEW_EVTID_NOP = 0 # Dummy packet. SYSVIEW_EVTID_OVERFLOW = 1 @@ -42,33 +42,33 @@ SYSVIEW_MODULE_EVENT_OFFSET = 512 SYSVIEW_SYNC_LEN = 10 _sysview_events_map = { - "SYS_NOP": SYSVIEW_EVTID_NOP, - "SYS_OVERFLOW": SYSVIEW_EVTID_OVERFLOW, - "SYS_ISR_ENTER": SYSVIEW_EVTID_ISR_ENTER, - "SYS_ISR_EXIT": SYSVIEW_EVTID_ISR_EXIT, - "SYS_TASK_START_EXEC": SYSVIEW_EVTID_TASK_START_EXEC, - "SYS_TASK_STOP_EXEC": SYSVIEW_EVTID_TASK_STOP_EXEC, - "SYS_TASK_START_READY": SYSVIEW_EVTID_TASK_START_READY, - "SYS_TASK_STOP_READY": SYSVIEW_EVTID_TASK_STOP_READY, - "SYS_TASK_CREATE": SYSVIEW_EVTID_TASK_CREATE, - "SYS_TASK_INFO": SYSVIEW_EVTID_TASK_INFO, - "SYS_TRACE_START": SYSVIEW_EVTID_TRACE_START, - "SYS_TRACE_STOP": SYSVIEW_EVTID_TRACE_STOP, - "SYS_SYSTIME_CYCLES": SYSVIEW_EVTID_SYSTIME_CYCLES, - "SYS_SYSTIME_US": SYSVIEW_EVTID_SYSTIME_US, - "SYS_SYSDESC": SYSVIEW_EVTID_SYSDESC, - "SYS_USER_START": SYSVIEW_EVTID_USER_START, - "SYS_USER_STOP": SYSVIEW_EVTID_USER_STOP, - "SYS_IDLE": SYSVIEW_EVTID_IDLE, - "SYS_ISR_TO_SCHEDULER": SYSVIEW_EVTID_ISR_TO_SCHEDULER, - "SYS_TIMER_ENTER": SYSVIEW_EVTID_TIMER_ENTER, - "SYS_TIMER_EXIT": SYSVIEW_EVTID_TIMER_EXIT, - "SYS_STACK_INFO": SYSVIEW_EVTID_STACK_INFO, - "SYS_MODULEDESC": SYSVIEW_EVTID_INIT, - "SYS_INIT": SYSVIEW_EVTID_INIT, - "SYS_NAME_RESOURCE": SYSVIEW_EVTID_NAME_RESOURCE, - "SYS_PRINT_FORMATTED": SYSVIEW_EVTID_PRINT_FORMATTED, - "SYS_NUMMODULES": SYSVIEW_EVTID_NUMMODULES + 'SYS_NOP': SYSVIEW_EVTID_NOP, + 'SYS_OVERFLOW': SYSVIEW_EVTID_OVERFLOW, + 'SYS_ISR_ENTER': SYSVIEW_EVTID_ISR_ENTER, + 'SYS_ISR_EXIT': SYSVIEW_EVTID_ISR_EXIT, + 'SYS_TASK_START_EXEC': SYSVIEW_EVTID_TASK_START_EXEC, + 'SYS_TASK_STOP_EXEC': SYSVIEW_EVTID_TASK_STOP_EXEC, + 'SYS_TASK_START_READY': SYSVIEW_EVTID_TASK_START_READY, + 'SYS_TASK_STOP_READY': SYSVIEW_EVTID_TASK_STOP_READY, + 'SYS_TASK_CREATE': SYSVIEW_EVTID_TASK_CREATE, + 'SYS_TASK_INFO': SYSVIEW_EVTID_TASK_INFO, + 'SYS_TRACE_START': SYSVIEW_EVTID_TRACE_START, + 'SYS_TRACE_STOP': SYSVIEW_EVTID_TRACE_STOP, + 'SYS_SYSTIME_CYCLES': SYSVIEW_EVTID_SYSTIME_CYCLES, + 'SYS_SYSTIME_US': SYSVIEW_EVTID_SYSTIME_US, + 'SYS_SYSDESC': SYSVIEW_EVTID_SYSDESC, + 'SYS_USER_START': SYSVIEW_EVTID_USER_START, + 'SYS_USER_STOP': SYSVIEW_EVTID_USER_STOP, + 'SYS_IDLE': SYSVIEW_EVTID_IDLE, + 'SYS_ISR_TO_SCHEDULER': SYSVIEW_EVTID_ISR_TO_SCHEDULER, + 'SYS_TIMER_ENTER': SYSVIEW_EVTID_TIMER_ENTER, + 'SYS_TIMER_EXIT': SYSVIEW_EVTID_TIMER_EXIT, + 'SYS_STACK_INFO': SYSVIEW_EVTID_STACK_INFO, + 'SYS_MODULEDESC': SYSVIEW_EVTID_INIT, + 'SYS_INIT': SYSVIEW_EVTID_INIT, + 'SYS_NAME_RESOURCE': SYSVIEW_EVTID_NAME_RESOURCE, + 'SYS_PRINT_FORMATTED': SYSVIEW_EVTID_PRINT_FORMATTED, + 'SYS_NUMMODULES': SYSVIEW_EVTID_NUMMODULES } _os_events_map = {} @@ -175,7 +175,7 @@ def _read_init_seq(reader): sync_bytes = struct.unpack(SYNC_SEQ_FMT, reader.read(struct.calcsize(SYNC_SEQ_FMT))) for b in sync_bytes: if b != 0: - raise SysViewTraceParseError("Invalid sync sequense!") + raise SysViewTraceParseError('Invalid sync sequense!') def _decode_u32(reader): @@ -263,7 +263,7 @@ def _decode_str(reader): buf = struct.unpack('<2B', reader.read(2)) sz = (buf[1] << 8) | buf[0] val, = struct.unpack('<%ds' % sz, reader.read(sz)) - val = val.decode("utf-8") + val = val.decode('utf-8') if sz < 0xFF: return (sz + 1,val) # one extra byte for length return (sz + 3,val) # 3 extra bytes for length @@ -347,7 +347,7 @@ class SysViewEvent(apptrace.TraceEvent): if event has unknown or invalid format. """ if self.id not in events_fmt_map: - raise SysViewTraceParseError("Unknown event ID %d!" % self.id) + raise SysViewTraceParseError('Unknown event ID %d!' % self.id) self.name = events_fmt_map[self.id][0] evt_params_templates = events_fmt_map[self.id][1] params_len = 0 @@ -357,13 +357,13 @@ class SysViewEvent(apptrace.TraceEvent): cur_pos = reader.get_pos() sz,param_val = event_param.decode(reader, self.plen - params_len) except Exception as e: - raise SysViewTraceParseError("Failed to decode event {}({:d}) {:d} param @ 0x{:x}! {}".format(self.name, self.id, self.plen, cur_pos, e)) + raise SysViewTraceParseError('Failed to decode event {}({:d}) {:d} param @ 0x{:x}! {}'.format(self.name, self.id, self.plen, cur_pos, e)) event_param.idx = i event_param.value = param_val self.params[event_param.name] = event_param params_len += sz if self.id >= SYSVIEW_EVENT_ID_PREDEF_LEN_MAX and self.plen != params_len: - raise SysViewTraceParseError("Invalid event {}({:d}) payload len {:d}! Must be {:d}.".format(self.name, self.id, self.plen, params_len)) + raise SysViewTraceParseError('Invalid event {}({:d}) payload len {:d}! Must be {:d}.'.format(self.name, self.id, self.plen, params_len)) def __str__(self): params = '' @@ -886,7 +886,7 @@ class SysViewTraceDataProcessor(apptrace.TraceDataProcessor): """ apptrace.TraceDataProcessor.__init__(self, print_events, keep_all_events) self.event_ids = {} - self.name = "" + self.name = '' self.root_proc = root_proc if root_proc else self self.traces = {} self.ctx_stack = {} @@ -1003,12 +1003,12 @@ class SysViewTraceDataProcessor(apptrace.TraceDataProcessor): if event.core_id not in self.prev_ctx: self.prev_ctx[event.core_id] = None else: - raise SysViewTraceParseError("Event for unknown core %d" % event.core_id) + raise SysViewTraceParseError('Event for unknown core %d' % event.core_id) else: trace = self.traces[event.core_id] if event.id == SYSVIEW_EVTID_ISR_ENTER: if event.params['irq_num'].value not in trace.irqs_info: - raise SysViewTraceParseError("Enter unknown ISR %d" % event.params['irq_num'].value) + raise SysViewTraceParseError('Enter unknown ISR %d' % event.params['irq_num'].value) if len(self.ctx_stack[event.core_id]): self.prev_ctx[event.core_id] = self.ctx_stack[event.core_id][-1] else: @@ -1026,7 +1026,7 @@ class SysViewTraceDataProcessor(apptrace.TraceDataProcessor): self.prev_ctx[event.core_id] = SysViewEventContext(None, True, 'IRQ_oncore%d' % event.core_id) elif event.id == SYSVIEW_EVTID_TASK_START_EXEC: if event.params['tid'].value not in trace.tasks_info: - raise SysViewTraceParseError("Start exec unknown task 0x%x" % event.params['tid'].value) + raise SysViewTraceParseError('Start exec unknown task 0x%x' % event.params['tid'].value) if len(self.ctx_stack[event.core_id]): # return to the previous context (the last in the list) self.prev_ctx[event.core_id] = self.ctx_stack[event.core_id][-1] @@ -1046,7 +1046,7 @@ class SysViewTraceDataProcessor(apptrace.TraceDataProcessor): break elif event.id == SYSVIEW_EVTID_TASK_STOP_READY: if event.params['tid'].value not in trace.tasks_info: - raise SysViewTraceParseError("Stop ready unknown task 0x%x" % event.params['tid'].value) + raise SysViewTraceParseError('Stop ready unknown task 0x%x' % event.params['tid'].value) if len(self.ctx_stack[event.core_id]): if (not self.ctx_stack[event.core_id][-1].irq and event.params['tid'].value == self.ctx_stack[event.core_id][-1].handle): # return to the previous context (the last in the list) @@ -1187,13 +1187,13 @@ class SysViewMultiStreamTraceDataProcessor(SysViewTraceDataProcessor): class SysViewTraceDataJsonEncoder(json.JSONEncoder): - JSON_TRACE_VER = "1.0" + JSON_TRACE_VER = '1.0' def default(self, obj): global _sysview_events_map global _os_events_map if isinstance(obj, SysViewMultiStreamTraceDataProcessor): - json_event_ids = {"system": _sysview_events_map, "os": {}} + json_event_ids = {'system': _sysview_events_map, 'os': {}} for eid in _os_events_map: ename = _os_events_map[eid][0] json_event_ids['os'][ename] = eid @@ -1208,20 +1208,20 @@ class SysViewTraceDataJsonEncoder(json.JSONEncoder): # include also OS and pre-defined events if isinstance(e, SysViewPredefinedEvent) or isinstance(e, SysViewOSEvent): json_events.append(e) - return {"version": self.JSON_TRACE_VER, "streams": json_event_ids, "events": json_events} + return {'version': self.JSON_TRACE_VER, 'streams': json_event_ids, 'events': json_events} if isinstance(obj, SysViewHeapEvent): blk_size = 0 - if "size" in obj.params: - blk_size = obj.params["size"].value - blk_addr = "0x{:x}".format(obj.params["addr"].value) + if 'size' in obj.params: + blk_size = obj.params['size'].value + blk_addr = '0x{:x}'.format(obj.params['addr'].value) callers = [] for addr in obj.params['callers'].value: callers.append('0x{:x}'.format(addr)) - return {"ctx_name": obj.ctx_name, "in_irq": obj.in_irq, "id": obj.id, "core_id": obj.core_id, - "ts": obj.ts, "addr": blk_addr, "size": blk_size, "callers": callers} + return {'ctx_name': obj.ctx_name, 'in_irq': obj.in_irq, 'id': obj.id, 'core_id': obj.core_id, + 'ts': obj.ts, 'addr': blk_addr, 'size': blk_size, 'callers': callers} if isinstance(obj, SysViewPredefinedEvent) and obj.id == SYSVIEW_EVTID_PRINT_FORMATTED: - return {"ctx_name": obj.ctx_name, "in_irq": obj.in_irq, "id": obj.id, "core_id": obj.core_id, - "ts": obj.ts, "msg": obj.params["msg"].value, "lvl": obj.params["lvl"].value} + return {'ctx_name': obj.ctx_name, 'in_irq': obj.in_irq, 'id': obj.id, 'core_id': obj.core_id, + 'ts': obj.ts, 'msg': obj.params['msg'].value, 'lvl': obj.params['lvl'].value} if isinstance(obj, SysViewEvent): jobj = obj.to_jsonable() # remove unused fields @@ -1280,9 +1280,9 @@ class SysViewHeapTraceDataProcessor(SysViewTraceDataProcessor, apptrace.BaseHeap self.toolchain = toolchain_pref self.elf_path = elf_path # self.no_ctx_events = [] - self.name = "heap" + self.name = 'heap' stream = self.root_proc.get_trace_stream(0, SysViewTraceDataParser.STREAMID_HEAP) - self.event_ids = {"alloc": stream.events_off, "free": stream.events_off + 1} + self.event_ids = {'alloc': stream.events_off, 'free': stream.events_off + 1} def event_supported(self, event): heap_stream = self.root_proc.get_trace_stream(event.core_id, SysViewTraceDataParser.STREAMID_HEAP) @@ -1362,8 +1362,8 @@ class SysViewLogTraceDataProcessor(SysViewTraceDataProcessor, apptrace.BaseLogTr """ SysViewTraceDataProcessor.__init__(self, traces, root_proc=root_proc, print_events=print_events) apptrace.BaseLogTraceDataProcessorImpl.__init__(self, print_log_events) - self.name = "log" - self.event_ids = {"print": SYSVIEW_EVTID_PRINT_FORMATTED} + self.name = 'log' + self.event_ids = {'print': SYSVIEW_EVTID_PRINT_FORMATTED} def event_supported(self, event): return event.id == SYSVIEW_EVTID_PRINT_FORMATTED diff --git a/tools/esp_app_trace/logtrace_proc.py b/tools/esp_app_trace/logtrace_proc.py index 702dd1fab8..ee7120e252 100755 --- a/tools/esp_app_trace/logtrace_proc.py +++ b/tools/esp_app_trace/logtrace_proc.py @@ -2,9 +2,11 @@ # from __future__ import print_function + import argparse import struct import sys + import elftools.elf.elffile as elffile import espytrace.apptrace as apptrace @@ -21,7 +23,7 @@ class ESPLogTraceRecord(object): self.args = log_args def __repr__(self): - return "fmt_addr = 0x%x, args = %d/%s" % (self.fmt_addr, len(self.args), self.args) + return 'fmt_addr = 0x%x, args = %d/%s' % (self.fmt_addr, len(self.args), self.args) def logtrace_parse(fname): @@ -32,40 +34,40 @@ def logtrace_parse(fname): try: ftrc = open(fname, 'rb') except OSError as e: - raise ESPLogTraceParserError("Failed to open trace file (%s)!" % e) + raise ESPLogTraceParserError('Failed to open trace file (%s)!' % e) # data_ok = True while True: # read args num and format str addr try: trc_buf = ftrc.read(ESP32_LOGTRACE_HDR_SZ) except IOError as e: - raise ESPLogTraceParserError("Failed to read log record header (%s)!" % e) + raise ESPLogTraceParserError('Failed to read log record header (%s)!' % e) if len(trc_buf) < ESP32_LOGTRACE_HDR_SZ: # print "EOF" if len(trc_buf) > 0: - print("Unprocessed %d bytes of log record header!" % len(trc_buf)) + print('Unprocessed %d bytes of log record header!' % len(trc_buf)) # data_ok = False break try: nargs,fmt_addr = struct.unpack(ESP32_LOGTRACE_HDR_FMT, trc_buf) except struct.error as e: - raise ESPLogTraceParserError("Failed to unpack log record header (%s)!" % e) + raise ESPLogTraceParserError('Failed to unpack log record header (%s)!' % e) # read args args_sz = struct.calcsize('<%sL' % nargs) try: trc_buf = ftrc.read(args_sz) except IOError as e: - raise ESPLogTraceParserError("Failed to read log record args (%s)!" % e) + raise ESPLogTraceParserError('Failed to read log record args (%s)!' % e) if len(trc_buf) < args_sz: # print("EOF") if len(trc_buf) > 0: - print("Unprocessed %d bytes of log record args!" % len(trc_buf)) + print('Unprocessed %d bytes of log record args!' % len(trc_buf)) # data_ok = False break try: log_args = struct.unpack('<%sL' % nargs, trc_buf) except struct.error as e: - raise ESPLogTraceParserError("Failed to unpack log record args (%s)!" % e) + raise ESPLogTraceParserError('Failed to unpack log record args (%s)!' % e) # print(log_args) recs.append(ESPLogTraceRecord(fmt_addr, list(log_args))) @@ -78,7 +80,7 @@ def logtrace_formated_print(recs, elfname, no_err): try: felf = elffile.ELFFile(open(elfname, 'rb')) except OSError as e: - raise ESPLogTraceParserError("Failed to open ELF file (%s)!" % e) + raise ESPLogTraceParserError('Failed to open ELF file (%s)!' % e) for lrec in recs: fmt_str = apptrace.get_str_from_elf(felf, lrec.fmt_addr) @@ -104,8 +106,8 @@ def logtrace_formated_print(recs, elfname, no_err): pass except Exception as e: if not no_err: - print("Print error (%s)" % e) - print("\nFmt = {%s}, args = %d/%s" % (fmt_str, len(lrec.args), lrec.args)) + print('Print error (%s)' % e) + print('\nFmt = {%s}, args = %d/%s' % (fmt_str, len(lrec.args), lrec.args)) felf.stream.close() @@ -123,21 +125,21 @@ def main(): try: print("Parse trace file '%s'..." % args.trace_file) lrecs = logtrace_parse(args.trace_file) - print("Parsing completed.") + print('Parsing completed.') except ESPLogTraceParserError as e: - print("Failed to parse log trace (%s)!" % e) + print('Failed to parse log trace (%s)!' % e) sys.exit(2) # print recs # get format strings and print info - print("====================================================================") + print('====================================================================') try: logtrace_formated_print(lrecs, args.elf_file, args.no_errors) except ESPLogTraceParserError as e: - print("Failed to print log trace (%s)!" % e) + print('Failed to print log trace (%s)!' % e) sys.exit(2) - print("\n====================================================================\n") + print('\n====================================================================\n') - print("Log records count: %d" % len(lrecs)) + print('Log records count: %d' % len(lrecs)) if __name__ == '__main__': diff --git a/tools/esp_app_trace/sysviewtrace_proc.py b/tools/esp_app_trace/sysviewtrace_proc.py index 95fd643138..4de5143715 100755 --- a/tools/esp_app_trace/sysviewtrace_proc.py +++ b/tools/esp_app_trace/sysviewtrace_proc.py @@ -20,12 +20,13 @@ # import argparse -import sys +import json +import logging import os.path import signal +import sys import traceback -import logging -import json + import espytrace.apptrace as apptrace import espytrace.sysview as sysview @@ -83,22 +84,22 @@ def main(): sysview.SysViewLogTraceDataParser(print_events=False, core_id=i)) parsers.append(parser) except Exception as e: - logging.error("Failed to create data parser (%s)!", e) + logging.error('Failed to create data parser (%s)!', e) traceback.print_exc() sys.exit(2) reader = apptrace.reader_create(trace_source, args.tmo) if not reader: - logging.error("Failed to create trace reader!") + logging.error('Failed to create trace reader!') sys.exit(2) try: # logging.info("Parse trace from '{}'...".format(trace_source)) logging.info("Parse trace from '%s'...", trace_source) sysview.parse_trace(reader, parser, args.events_map) - logging.info("Parsing completed.") + logging.info('Parsing completed.') except (apptrace.ReaderTimeoutError, apptrace.ReaderShutdownRequest) as e: - logging.info("Stop parsing trace. (%s)", e) + logging.info('Stop parsing trace. (%s)', e) except Exception as e: - logging.error("Failed to parse trace (%s)!", e) + logging.error('Failed to parse trace (%s)!', e) parser.cleanup() traceback.print_exc() sys.exit(2) @@ -115,16 +116,16 @@ def main(): proc.add_stream_processor(sysview.SysViewTraceDataParser.STREAMID_LOG, sysview.SysViewLogTraceDataProcessor(root_proc=proc, print_log_events=args.print_events)) except Exception as e: - logging.error("Failed to create data processor (%s)!", e) + logging.error('Failed to create data processor (%s)!', e) traceback.print_exc() sys.exit(2) try: logging.info("Process events from '%s'...", args.trace_sources) proc.merge_and_process() - logging.info("Processing completed.") + logging.info('Processing completed.') except Exception as e: - logging.error("Failed to process trace (%s)!", e) + logging.error('Failed to process trace (%s)!', e) traceback.print_exc() sys.exit(2) finally: diff --git a/tools/esp_prov/esp_prov.py b/tools/esp_prov/esp_prov.py index 8215a10172..b45010d201 100644 --- a/tools/esp_prov/esp_prov.py +++ b/tools/esp_prov/esp_prov.py @@ -16,28 +16,29 @@ # from __future__ import print_function -from builtins import input as binput + import argparse -import textwrap -import time +import json import os import sys -import json +import textwrap +import time +from builtins import input as binput from getpass import getpass try: + import prov import security import transport - import prov except ImportError: idf_path = os.environ['IDF_PATH'] - sys.path.insert(0, idf_path + "/components/protocomm/python") - sys.path.insert(1, idf_path + "/tools/esp_prov") + sys.path.insert(0, idf_path + '/components/protocomm/python') + sys.path.insert(1, idf_path + '/tools/esp_prov') + import prov import security import transport - import prov # Set this to true to allow exceptions to be thrown config_throw_except = False @@ -92,7 +93,7 @@ def version_match(tp, protover, verbose=False): response = tp.send_data('proto-ver', protover) if verbose: - print("proto-ver response : ", response) + print('proto-ver response : ', response) # First assume this to be a simple version string if response.lower() == protover.lower(): @@ -123,7 +124,7 @@ def has_capability(tp, capability='none', verbose=False): response = tp.send_data('proto-ver', capability) if verbose: - print("proto-ver response : ", response) + print('proto-ver response : ', response) try: # Interpret this as JSON structure containing @@ -221,16 +222,16 @@ def scan_wifi_APs(sel_transport, tp, sec): start_time = time.time() response = tp.send_data('prov-scan', message) stop_time = time.time() - print("++++ Scan process executed in " + str(stop_time - start_time) + " sec") + print('++++ Scan process executed in ' + str(stop_time - start_time) + ' sec') prov.scan_start_response(sec, response) message = prov.scan_status_request(sec) response = tp.send_data('prov-scan', message) result = prov.scan_status_response(sec, response) - print("++++ Scan results : " + str(result["count"])) - if result["count"] != 0: + print('++++ Scan results : ' + str(result['count'])) + if result['count'] != 0: index = 0 - remaining = result["count"] + remaining = result['count'] while remaining: count = [remaining, readlen][remaining > readlen] message = prov.scan_result_request(sec, index, count) @@ -287,25 +288,25 @@ def wait_wifi_connected(tp, sec): while True: time.sleep(TIME_PER_POLL) - print("\n==== Wi-Fi connection state ====") + print('\n==== Wi-Fi connection state ====') ret = get_wifi_config(tp, sec) - if ret == "connecting": + if ret == 'connecting': continue - elif ret == "connected": - print("==== Provisioning was successful ====") + elif ret == 'connected': + print('==== Provisioning was successful ====') return True elif retry > 0: retry -= 1 - print("Waiting to poll status again (status %s, %d tries left)..." % (ret, retry)) + print('Waiting to poll status again (status %s, %d tries left)...' % (ret, retry)) else: - print("---- Provisioning failed ----") + print('---- Provisioning failed ----') return False def desc_format(*args): desc = '' for arg in args: - desc += textwrap.fill(replace_whitespace=False, text=arg) + "\n" + desc += textwrap.fill(replace_whitespace=False, text=arg) + '\n' return desc @@ -316,12 +317,12 @@ if __name__ == '__main__': 'See esp-idf/examples/provisioning for sample applications'), formatter_class=argparse.RawTextHelpFormatter) - parser.add_argument("--transport", required=True, dest='mode', type=str, + parser.add_argument('--transport', required=True, dest='mode', type=str, help=desc_format( 'Mode of transport over which provisioning is to be performed.', 'This should be one of "softap", "ble" or "console"')) - parser.add_argument("--service_name", dest='name', type=str, + parser.add_argument('--service_name', dest='name', type=str, help=desc_format( 'This specifies the name of the provisioning service to connect to, ' 'depending upon the mode of transport :', @@ -329,12 +330,12 @@ if __name__ == '__main__': '\t- transport "softap" : HTTP Server hostname or IP', '\t (default "192.168.4.1:80")')) - parser.add_argument("--proto_ver", dest='version', type=str, default='', + parser.add_argument('--proto_ver', dest='version', type=str, default='', help=desc_format( 'This checks the protocol version of the provisioning service running ' 'on the device before initiating Wi-Fi configuration')) - parser.add_argument("--sec_ver", dest='secver', type=int, default=None, + parser.add_argument('--sec_ver', dest='secver', type=int, default=None, help=desc_format( 'Protocomm security scheme used by the provisioning service for secure ' 'session establishment. Accepted values are :', @@ -345,48 +346,48 @@ if __name__ == '__main__': 'the compatible security version is automatically determined from ' 'capabilities retrieved via the version endpoint')) - parser.add_argument("--pop", dest='pop', type=str, default='', + parser.add_argument('--pop', dest='pop', type=str, default='', help=desc_format( 'This specifies the Proof of possession (PoP) when security scheme 1 ' 'is used')) - parser.add_argument("--ssid", dest='ssid', type=str, default='', + parser.add_argument('--ssid', dest='ssid', type=str, default='', help=desc_format( 'This configures the device to use SSID of the Wi-Fi network to which ' 'we would like it to connect to permanently, once provisioning is complete. ' 'If Wi-Fi scanning is supported by the provisioning service, this need not ' 'be specified')) - parser.add_argument("--passphrase", dest='passphrase', type=str, default='', + parser.add_argument('--passphrase', dest='passphrase', type=str, default='', help=desc_format( 'This configures the device to use Passphrase for the Wi-Fi network to which ' 'we would like it to connect to permanently, once provisioning is complete. ' 'If Wi-Fi scanning is supported by the provisioning service, this need not ' 'be specified')) - parser.add_argument("--custom_data", dest='custom_data', type=str, default='', + parser.add_argument('--custom_data', dest='custom_data', type=str, default='', help=desc_format( 'This is an optional parameter, only intended for use with ' '"examples/provisioning/wifi_prov_mgr_custom_data"')) - parser.add_argument("--custom_config", action="store_true", + parser.add_argument('--custom_config', action='store_true', help=desc_format( 'This is an optional parameter, only intended for use with ' '"examples/provisioning/custom_config"')) - parser.add_argument("--custom_info", dest='custom_info', type=str, default='', + parser.add_argument('--custom_info', dest='custom_info', type=str, default='', help=desc_format( 'Custom Config Info String. "--custom_config" must be specified for using this')) - parser.add_argument("--custom_ver", dest='custom_ver', type=int, default=2, + parser.add_argument('--custom_ver', dest='custom_ver', type=int, default=2, help=desc_format( 'Custom Config Version Number. "--custom_config" must be specified for using this')) - parser.add_argument("-v","--verbose", help="Increase output verbosity", action="store_true") + parser.add_argument('-v','--verbose', help='Increase output verbosity', action='store_true') args = parser.parse_args() obj_transport = get_transport(args.mode.lower(), args.name) if obj_transport is None: - print("---- Failed to establish connection ----") + print('---- Failed to establish connection ----') exit(1) # If security version not specified check in capabilities @@ -394,107 +395,107 @@ if __name__ == '__main__': # First check if capabilities are supported or not if not has_capability(obj_transport): print('Security capabilities could not be determined. Please specify "--sec_ver" explicitly') - print("---- Invalid Security Version ----") + print('---- Invalid Security Version ----') exit(2) # When no_sec is present, use security 0, else security 1 args.secver = int(not has_capability(obj_transport, 'no_sec')) - print("Security scheme determined to be :", args.secver) + print('Security scheme determined to be :', args.secver) if (args.secver != 0) and not has_capability(obj_transport, 'no_pop'): if len(args.pop) == 0: - print("---- Proof of Possession argument not provided ----") + print('---- Proof of Possession argument not provided ----') exit(2) elif len(args.pop) != 0: - print("---- Proof of Possession will be ignored ----") + print('---- Proof of Possession will be ignored ----') args.pop = '' obj_security = get_security(args.secver, args.pop, args.verbose) if obj_security is None: - print("---- Invalid Security Version ----") + print('---- Invalid Security Version ----') exit(2) if args.version != '': - print("\n==== Verifying protocol version ====") + print('\n==== Verifying protocol version ====') if not version_match(obj_transport, args.version, args.verbose): - print("---- Error in protocol version matching ----") + print('---- Error in protocol version matching ----') exit(3) - print("==== Verified protocol version successfully ====") + print('==== Verified protocol version successfully ====') - print("\n==== Starting Session ====") + print('\n==== Starting Session ====') if not establish_session(obj_transport, obj_security): - print("Failed to establish session. Ensure that security scheme and proof of possession are correct") - print("---- Error in establishing session ----") + print('Failed to establish session. Ensure that security scheme and proof of possession are correct') + print('---- Error in establishing session ----') exit(4) - print("==== Session Established ====") + print('==== Session Established ====') if args.custom_config: - print("\n==== Sending Custom config to esp32 ====") + print('\n==== Sending Custom config to esp32 ====') if not custom_config(obj_transport, obj_security, args.custom_info, args.custom_ver): - print("---- Error in custom config ----") + print('---- Error in custom config ----') exit(5) - print("==== Custom config sent successfully ====") + print('==== Custom config sent successfully ====') if args.custom_data != '': - print("\n==== Sending Custom data to esp32 ====") + print('\n==== Sending Custom data to esp32 ====') if not custom_data(obj_transport, obj_security, args.custom_data): - print("---- Error in custom data ----") + print('---- Error in custom data ----') exit(5) - print("==== Custom data sent successfully ====") + print('==== Custom data sent successfully ====') if args.ssid == '': if not has_capability(obj_transport, 'wifi_scan'): - print("---- Wi-Fi Scan List is not supported by provisioning service ----") - print("---- Rerun esp_prov with SSID and Passphrase as argument ----") + print('---- Wi-Fi Scan List is not supported by provisioning service ----') + print('---- Rerun esp_prov with SSID and Passphrase as argument ----') exit(3) while True: - print("\n==== Scanning Wi-Fi APs ====") + print('\n==== Scanning Wi-Fi APs ====') start_time = time.time() APs = scan_wifi_APs(args.mode.lower(), obj_transport, obj_security) end_time = time.time() - print("\n++++ Scan finished in " + str(end_time - start_time) + " sec") + print('\n++++ Scan finished in ' + str(end_time - start_time) + ' sec') if APs is None: - print("---- Error in scanning Wi-Fi APs ----") + print('---- Error in scanning Wi-Fi APs ----') exit(8) if len(APs) == 0: - print("No APs found!") + print('No APs found!') exit(9) - print("==== Wi-Fi Scan results ====") - print("{0: >4} {1: <33} {2: <12} {3: >4} {4: <4} {5: <16}".format( - "S.N.", "SSID", "BSSID", "CHN", "RSSI", "AUTH")) + print('==== Wi-Fi Scan results ====') + print('{0: >4} {1: <33} {2: <12} {3: >4} {4: <4} {5: <16}'.format( + 'S.N.', 'SSID', 'BSSID', 'CHN', 'RSSI', 'AUTH')) for i in range(len(APs)): - print("[{0: >2}] {1: <33} {2: <12} {3: >4} {4: <4} {5: <16}".format( - i + 1, APs[i]["ssid"], APs[i]["bssid"], APs[i]["channel"], APs[i]["rssi"], APs[i]["auth"])) + print('[{0: >2}] {1: <33} {2: <12} {3: >4} {4: <4} {5: <16}'.format( + i + 1, APs[i]['ssid'], APs[i]['bssid'], APs[i]['channel'], APs[i]['rssi'], APs[i]['auth'])) while True: try: - select = int(binput("Select AP by number (0 to rescan) : ")) + select = int(binput('Select AP by number (0 to rescan) : ')) if select < 0 or select > len(APs): raise ValueError break except ValueError: - print("Invalid input! Retry") + print('Invalid input! Retry') if select != 0: break - args.ssid = APs[select - 1]["ssid"] - prompt_str = "Enter passphrase for {0} : ".format(args.ssid) + args.ssid = APs[select - 1]['ssid'] + prompt_str = 'Enter passphrase for {0} : '.format(args.ssid) args.passphrase = getpass(prompt_str) - print("\n==== Sending Wi-Fi credential to esp32 ====") + print('\n==== Sending Wi-Fi credential to esp32 ====') if not send_wifi_config(obj_transport, obj_security, args.ssid, args.passphrase): - print("---- Error in send Wi-Fi config ----") + print('---- Error in send Wi-Fi config ----') exit(6) - print("==== Wi-Fi Credentials sent successfully ====") + print('==== Wi-Fi Credentials sent successfully ====') - print("\n==== Applying config to esp32 ====") + print('\n==== Applying config to esp32 ====') if not apply_wifi_config(obj_transport, obj_security): - print("---- Error in apply Wi-Fi config ----") + print('---- Error in apply Wi-Fi config ----') exit(7) - print("==== Apply config sent successfully ====") + print('==== Apply config sent successfully ====') wait_wifi_connected(obj_transport, obj_security) diff --git a/tools/esp_prov/proto/__init__.py b/tools/esp_prov/proto/__init__.py index 54186629d1..4c1bd501bf 100644 --- a/tools/esp_prov/proto/__init__.py +++ b/tools/esp_prov/proto/__init__.py @@ -29,16 +29,16 @@ def _load_source(name, path): idf_path = os.environ['IDF_PATH'] # protocomm component related python files generated from .proto files -constants_pb2 = _load_source("constants_pb2", idf_path + "/components/protocomm/python/constants_pb2.py") -sec0_pb2 = _load_source("sec0_pb2", idf_path + "/components/protocomm/python/sec0_pb2.py") -sec1_pb2 = _load_source("sec1_pb2", idf_path + "/components/protocomm/python/sec1_pb2.py") -session_pb2 = _load_source("session_pb2", idf_path + "/components/protocomm/python/session_pb2.py") +constants_pb2 = _load_source('constants_pb2', idf_path + '/components/protocomm/python/constants_pb2.py') +sec0_pb2 = _load_source('sec0_pb2', idf_path + '/components/protocomm/python/sec0_pb2.py') +sec1_pb2 = _load_source('sec1_pb2', idf_path + '/components/protocomm/python/sec1_pb2.py') +session_pb2 = _load_source('session_pb2', idf_path + '/components/protocomm/python/session_pb2.py') # wifi_provisioning component related python files generated from .proto files -wifi_constants_pb2 = _load_source("wifi_constants_pb2", idf_path + "/components/wifi_provisioning/python/wifi_constants_pb2.py") -wifi_config_pb2 = _load_source("wifi_config_pb2", idf_path + "/components/wifi_provisioning/python/wifi_config_pb2.py") -wifi_scan_pb2 = _load_source("wifi_scan_pb2", idf_path + "/components/wifi_provisioning/python/wifi_scan_pb2.py") +wifi_constants_pb2 = _load_source('wifi_constants_pb2', idf_path + '/components/wifi_provisioning/python/wifi_constants_pb2.py') +wifi_config_pb2 = _load_source('wifi_config_pb2', idf_path + '/components/wifi_provisioning/python/wifi_config_pb2.py') +wifi_scan_pb2 = _load_source('wifi_scan_pb2', idf_path + '/components/wifi_provisioning/python/wifi_scan_pb2.py') # custom_provisioning component related python files generated from .proto files -custom_config_pb2 = _load_source("custom_config_pb2", idf_path + - "/examples/provisioning/legacy/custom_config/components/custom_provisioning/python/custom_config_pb2.py") +custom_config_pb2 = _load_source('custom_config_pb2', idf_path + + '/examples/provisioning/legacy/custom_config/components/custom_provisioning/python/custom_config_pb2.py') diff --git a/tools/esp_prov/prov/__init__.py b/tools/esp_prov/prov/__init__.py index 9c55a1fefd..e920727b6b 100644 --- a/tools/esp_prov/prov/__init__.py +++ b/tools/esp_prov/prov/__init__.py @@ -13,6 +13,6 @@ # limitations under the License. # -from .wifi_prov import * # noqa F403 from .custom_prov import * # noqa F403 -from .wifi_scan import * # noqa F403 +from .wifi_prov import * # noqa F403 +from .wifi_scan import * # noqa F403 diff --git a/tools/esp_prov/prov/custom_prov.py b/tools/esp_prov/prov/custom_prov.py index 87b499e6e1..2a5eaf3aa8 100644 --- a/tools/esp_prov/prov/custom_prov.py +++ b/tools/esp_prov/prov/custom_prov.py @@ -16,15 +16,15 @@ # APIs for interpreting and creating protobuf packets for `custom-config` protocomm endpoint from __future__ import print_function -from future.utils import tobytes -import utils import proto +import utils +from future.utils import tobytes def print_verbose(security_ctx, data): if (security_ctx.verbose): - print("++++ " + data + " ++++") + print('++++ ' + data + ' ++++') def custom_config_request(security_ctx, info, version): @@ -33,7 +33,7 @@ def custom_config_request(security_ctx, info, version): cmd.info = tobytes(info) cmd.version = version enc_cmd = security_ctx.encrypt_data(cmd.SerializeToString()).decode('latin-1') - print_verbose(security_ctx, "Client -> Device (CustomConfig cmd) " + utils.str_to_hexstr(enc_cmd)) + print_verbose(security_ctx, 'Client -> Device (CustomConfig cmd) ' + utils.str_to_hexstr(enc_cmd)) return enc_cmd @@ -42,19 +42,19 @@ def custom_config_response(security_ctx, response_data): decrypt = security_ctx.decrypt_data(tobytes(response_data)) cmd_resp = proto.custom_config_pb2.CustomConfigResponse() cmd_resp.ParseFromString(decrypt) - print_verbose(security_ctx, "CustomConfig status " + str(cmd_resp.status)) + print_verbose(security_ctx, 'CustomConfig status ' + str(cmd_resp.status)) return cmd_resp.status def custom_data_request(security_ctx, data): # Encrypt the custom data enc_cmd = security_ctx.encrypt_data(tobytes(data)) - print_verbose(security_ctx, "Client -> Device (CustomData cmd) " + utils.str_to_hexstr(enc_cmd)) + print_verbose(security_ctx, 'Client -> Device (CustomData cmd) ' + utils.str_to_hexstr(enc_cmd)) return enc_cmd def custom_data_response(security_ctx, response_data): # Decrypt response packet decrypt = security_ctx.decrypt_data(tobytes(response_data)) - print("CustomData response: " + str(decrypt)) + print('CustomData response: ' + str(decrypt)) return 0 diff --git a/tools/esp_prov/prov/wifi_prov.py b/tools/esp_prov/prov/wifi_prov.py index 8bd6cd147f..7351408c73 100644 --- a/tools/esp_prov/prov/wifi_prov.py +++ b/tools/esp_prov/prov/wifi_prov.py @@ -16,15 +16,15 @@ # APIs for interpreting and creating protobuf packets for Wi-Fi provisioning from __future__ import print_function -from future.utils import tobytes -import utils import proto +import utils +from future.utils import tobytes def print_verbose(security_ctx, data): if (security_ctx.verbose): - print("++++ " + data + " ++++") + print('++++ ' + data + ' ++++') def config_get_status_request(security_ctx): @@ -34,7 +34,7 @@ def config_get_status_request(security_ctx): cmd_get_status = proto.wifi_config_pb2.CmdGetStatus() cfg1.cmd_get_status.MergeFrom(cmd_get_status) encrypted_cfg = security_ctx.encrypt_data(cfg1.SerializeToString()).decode('latin-1') - print_verbose(security_ctx, "Client -> Device (Encrypted CmdGetStatus) " + utils.str_to_hexstr(encrypted_cfg)) + print_verbose(security_ctx, 'Client -> Device (Encrypted CmdGetStatus) ' + utils.str_to_hexstr(encrypted_cfg)) return encrypted_cfg @@ -43,26 +43,26 @@ def config_get_status_response(security_ctx, response_data): decrypted_message = security_ctx.decrypt_data(tobytes(response_data)) cmd_resp1 = proto.wifi_config_pb2.WiFiConfigPayload() cmd_resp1.ParseFromString(decrypted_message) - print_verbose(security_ctx, "Response type " + str(cmd_resp1.msg)) - print_verbose(security_ctx, "Response status " + str(cmd_resp1.resp_get_status.status)) + print_verbose(security_ctx, 'Response type ' + str(cmd_resp1.msg)) + print_verbose(security_ctx, 'Response status ' + str(cmd_resp1.resp_get_status.status)) if cmd_resp1.resp_get_status.sta_state == 0: - print("++++ WiFi state: " + "connected ++++") - return "connected" + print('++++ WiFi state: ' + 'connected ++++') + return 'connected' elif cmd_resp1.resp_get_status.sta_state == 1: - print("++++ WiFi state: " + "connecting... ++++") - return "connecting" + print('++++ WiFi state: ' + 'connecting... ++++') + return 'connecting' elif cmd_resp1.resp_get_status.sta_state == 2: - print("++++ WiFi state: " + "disconnected ++++") - return "disconnected" + print('++++ WiFi state: ' + 'disconnected ++++') + return 'disconnected' elif cmd_resp1.resp_get_status.sta_state == 3: - print("++++ WiFi state: " + "connection failed ++++") + print('++++ WiFi state: ' + 'connection failed ++++') if cmd_resp1.resp_get_status.fail_reason == 0: - print("++++ Failure reason: " + "Incorrect Password ++++") + print('++++ Failure reason: ' + 'Incorrect Password ++++') elif cmd_resp1.resp_get_status.fail_reason == 1: - print("++++ Failure reason: " + "Incorrect SSID ++++") - return "failed" - return "unknown" + print('++++ Failure reason: ' + 'Incorrect SSID ++++') + return 'failed' + return 'unknown' def config_set_config_request(security_ctx, ssid, passphrase): @@ -72,7 +72,7 @@ def config_set_config_request(security_ctx, ssid, passphrase): cmd.cmd_set_config.ssid = tobytes(ssid) cmd.cmd_set_config.passphrase = tobytes(passphrase) enc_cmd = security_ctx.encrypt_data(cmd.SerializeToString()).decode('latin-1') - print_verbose(security_ctx, "Client -> Device (SetConfig cmd) " + utils.str_to_hexstr(enc_cmd)) + print_verbose(security_ctx, 'Client -> Device (SetConfig cmd) ' + utils.str_to_hexstr(enc_cmd)) return enc_cmd @@ -81,7 +81,7 @@ def config_set_config_response(security_ctx, response_data): decrypt = security_ctx.decrypt_data(tobytes(response_data)) cmd_resp4 = proto.wifi_config_pb2.WiFiConfigPayload() cmd_resp4.ParseFromString(decrypt) - print_verbose(security_ctx, "SetConfig status " + str(cmd_resp4.resp_set_config.status)) + print_verbose(security_ctx, 'SetConfig status ' + str(cmd_resp4.resp_set_config.status)) return cmd_resp4.resp_set_config.status @@ -90,7 +90,7 @@ def config_apply_config_request(security_ctx): cmd = proto.wifi_config_pb2.WiFiConfigPayload() cmd.msg = proto.wifi_config_pb2.TypeCmdApplyConfig enc_cmd = security_ctx.encrypt_data(cmd.SerializeToString()).decode('latin-1') - print_verbose(security_ctx, "Client -> Device (ApplyConfig cmd) " + utils.str_to_hexstr(enc_cmd)) + print_verbose(security_ctx, 'Client -> Device (ApplyConfig cmd) ' + utils.str_to_hexstr(enc_cmd)) return enc_cmd @@ -99,5 +99,5 @@ def config_apply_config_response(security_ctx, response_data): decrypt = security_ctx.decrypt_data(tobytes(response_data)) cmd_resp5 = proto.wifi_config_pb2.WiFiConfigPayload() cmd_resp5.ParseFromString(decrypt) - print_verbose(security_ctx, "ApplyConfig status " + str(cmd_resp5.resp_apply_config.status)) + print_verbose(security_ctx, 'ApplyConfig status ' + str(cmd_resp5.resp_apply_config.status)) return cmd_resp5.resp_apply_config.status diff --git a/tools/esp_prov/prov/wifi_scan.py b/tools/esp_prov/prov/wifi_scan.py index 1b3f913565..6b9ef8b2d0 100644 --- a/tools/esp_prov/prov/wifi_scan.py +++ b/tools/esp_prov/prov/wifi_scan.py @@ -16,15 +16,15 @@ # APIs for interpreting and creating protobuf packets for Wi-Fi Scanning from __future__ import print_function -from future.utils import tobytes -import utils import proto +import utils +from future.utils import tobytes def print_verbose(security_ctx, data): if (security_ctx.verbose): - print("++++ " + data + " ++++") + print('++++ ' + data + ' ++++') def scan_start_request(security_ctx, blocking=True, passive=False, group_channels=5, period_ms=120): @@ -36,7 +36,7 @@ def scan_start_request(security_ctx, blocking=True, passive=False, group_channel cmd.cmd_scan_start.group_channels = group_channels cmd.cmd_scan_start.period_ms = period_ms enc_cmd = security_ctx.encrypt_data(cmd.SerializeToString()).decode('latin-1') - print_verbose(security_ctx, "Client -> Device (Encrypted CmdScanStart) " + utils.str_to_hexstr(enc_cmd)) + print_verbose(security_ctx, 'Client -> Device (Encrypted CmdScanStart) ' + utils.str_to_hexstr(enc_cmd)) return enc_cmd @@ -45,7 +45,7 @@ def scan_start_response(security_ctx, response_data): dec_resp = security_ctx.decrypt_data(tobytes(response_data)) resp = proto.wifi_scan_pb2.WiFiScanPayload() resp.ParseFromString(dec_resp) - print_verbose(security_ctx, "ScanStart status " + str(resp.status)) + print_verbose(security_ctx, 'ScanStart status ' + str(resp.status)) if resp.status != 0: raise RuntimeError @@ -55,7 +55,7 @@ def scan_status_request(security_ctx): cmd = proto.wifi_scan_pb2.WiFiScanPayload() cmd.msg = proto.wifi_scan_pb2.TypeCmdScanStatus enc_cmd = security_ctx.encrypt_data(cmd.SerializeToString()).decode('latin-1') - print_verbose(security_ctx, "Client -> Device (Encrypted CmdScanStatus) " + utils.str_to_hexstr(enc_cmd)) + print_verbose(security_ctx, 'Client -> Device (Encrypted CmdScanStatus) ' + utils.str_to_hexstr(enc_cmd)) return enc_cmd @@ -64,10 +64,10 @@ def scan_status_response(security_ctx, response_data): dec_resp = security_ctx.decrypt_data(tobytes(response_data)) resp = proto.wifi_scan_pb2.WiFiScanPayload() resp.ParseFromString(dec_resp) - print_verbose(security_ctx, "ScanStatus status " + str(resp.status)) + print_verbose(security_ctx, 'ScanStatus status ' + str(resp.status)) if resp.status != 0: raise RuntimeError - return {"finished": resp.resp_scan_status.scan_finished, "count": resp.resp_scan_status.result_count} + return {'finished': resp.resp_scan_status.scan_finished, 'count': resp.resp_scan_status.result_count} def scan_result_request(security_ctx, index, count): @@ -77,7 +77,7 @@ def scan_result_request(security_ctx, index, count): cmd.cmd_scan_result.start_index = index cmd.cmd_scan_result.count = count enc_cmd = security_ctx.encrypt_data(cmd.SerializeToString()).decode('latin-1') - print_verbose(security_ctx, "Client -> Device (Encrypted CmdScanResult) " + utils.str_to_hexstr(enc_cmd)) + print_verbose(security_ctx, 'Client -> Device (Encrypted CmdScanResult) ' + utils.str_to_hexstr(enc_cmd)) return enc_cmd @@ -86,20 +86,20 @@ def scan_result_response(security_ctx, response_data): dec_resp = security_ctx.decrypt_data(tobytes(response_data)) resp = proto.wifi_scan_pb2.WiFiScanPayload() resp.ParseFromString(dec_resp) - print_verbose(security_ctx, "ScanResult status " + str(resp.status)) + print_verbose(security_ctx, 'ScanResult status ' + str(resp.status)) if resp.status != 0: raise RuntimeError - authmode_str = ["Open", "WEP", "WPA_PSK", "WPA2_PSK", "WPA_WPA2_PSK", "WPA2_ENTERPRISE"] + authmode_str = ['Open', 'WEP', 'WPA_PSK', 'WPA2_PSK', 'WPA_WPA2_PSK', 'WPA2_ENTERPRISE'] results = [] for entry in resp.resp_scan_result.entries: - results += [{"ssid": entry.ssid.decode('latin-1').rstrip('\x00'), - "bssid": utils.str_to_hexstr(entry.bssid.decode('latin-1')), - "channel": entry.channel, - "rssi": entry.rssi, - "auth": authmode_str[entry.auth]}] - print_verbose(security_ctx, "ScanResult SSID : " + str(results[-1]["ssid"])) - print_verbose(security_ctx, "ScanResult BSSID : " + str(results[-1]["bssid"])) - print_verbose(security_ctx, "ScanResult Channel : " + str(results[-1]["channel"])) - print_verbose(security_ctx, "ScanResult RSSI : " + str(results[-1]["rssi"])) - print_verbose(security_ctx, "ScanResult AUTH : " + str(results[-1]["auth"])) + results += [{'ssid': entry.ssid.decode('latin-1').rstrip('\x00'), + 'bssid': utils.str_to_hexstr(entry.bssid.decode('latin-1')), + 'channel': entry.channel, + 'rssi': entry.rssi, + 'auth': authmode_str[entry.auth]}] + print_verbose(security_ctx, 'ScanResult SSID : ' + str(results[-1]['ssid'])) + print_verbose(security_ctx, 'ScanResult BSSID : ' + str(results[-1]['bssid'])) + print_verbose(security_ctx, 'ScanResult Channel : ' + str(results[-1]['channel'])) + print_verbose(security_ctx, 'ScanResult RSSI : ' + str(results[-1]['rssi'])) + print_verbose(security_ctx, 'ScanResult AUTH : ' + str(results[-1]['auth'])) return results diff --git a/tools/esp_prov/security/security0.py b/tools/esp_prov/security/security0.py index 3e8d35366e..68bbb129e8 100644 --- a/tools/esp_prov/security/security0.py +++ b/tools/esp_prov/security/security0.py @@ -17,9 +17,10 @@ # protocomm endpoint with security type protocomm_security0 from __future__ import print_function -from future.utils import tobytes import proto +from future.utils import tobytes + from .security import Security @@ -54,7 +55,7 @@ class Security0(Security): setup_resp.ParseFromString(tobytes(response_data)) # Check if security scheme matches if setup_resp.sec_ver != proto.session_pb2.SecScheme0: - print("Incorrect sec scheme") + print('Incorrect sec scheme') def encrypt_data(self, data): # Passive. No encryption when security0 used diff --git a/tools/esp_prov/security/security1.py b/tools/esp_prov/security/security1.py index be657186c3..0c82093129 100644 --- a/tools/esp_prov/security/security1.py +++ b/tools/esp_prov/security/security1.py @@ -17,18 +17,17 @@ # protocomm endpoint with security type protocomm_security1 from __future__ import print_function -from future.utils import tobytes -import utils import proto -from .security import Security - +import session_pb2 +import utils from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, serialization from cryptography.hazmat.primitives.asymmetric.x25519 import X25519PrivateKey, X25519PublicKey from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes +from future.utils import tobytes -import session_pb2 +from .security import Security # Enum for state of protocomm_security1 FSM @@ -76,7 +75,7 @@ class Security1(Security): self.setup1_response(response_data) return None else: - print("Unexpected state") + print('Unexpected state') return None def __generate_key(self): @@ -92,7 +91,7 @@ class Security1(Security): def _print_verbose(self, data): if (self.verbose): - print("++++ " + data + " ++++") + print('++++ ' + data + ' ++++') def setup0_request(self): # Form SessionCmd0 request packet using client public key @@ -100,26 +99,26 @@ class Security1(Security): setup_req.sec_ver = session_pb2.SecScheme1 self.__generate_key() setup_req.sec1.sc0.client_pubkey = self.client_public_key - self._print_verbose("Client Public Key:\t" + utils.str_to_hexstr(self.client_public_key.decode('latin-1'))) + self._print_verbose('Client Public Key:\t' + utils.str_to_hexstr(self.client_public_key.decode('latin-1'))) return setup_req.SerializeToString().decode('latin-1') def setup0_response(self, response_data): # Interpret SessionResp0 response packet setup_resp = proto.session_pb2.SessionData() setup_resp.ParseFromString(tobytes(response_data)) - self._print_verbose("Security version:\t" + str(setup_resp.sec_ver)) + self._print_verbose('Security version:\t' + str(setup_resp.sec_ver)) if setup_resp.sec_ver != session_pb2.SecScheme1: - print("Incorrect sec scheme") + print('Incorrect sec scheme') exit(1) self.device_public_key = setup_resp.sec1.sr0.device_pubkey # Device random is the initialization vector device_random = setup_resp.sec1.sr0.device_random - self._print_verbose("Device Public Key:\t" + utils.str_to_hexstr(self.device_public_key.decode('latin-1'))) - self._print_verbose("Device Random:\t" + utils.str_to_hexstr(device_random.decode('latin-1'))) + self._print_verbose('Device Public Key:\t' + utils.str_to_hexstr(self.device_public_key.decode('latin-1'))) + self._print_verbose('Device Random:\t' + utils.str_to_hexstr(device_random.decode('latin-1'))) # Calculate Curve25519 shared key using Client private key and Device public key sharedK = self.client_private_key.exchange(X25519PublicKey.from_public_bytes(self.device_public_key)) - self._print_verbose("Shared Key:\t" + utils.str_to_hexstr(sharedK.decode('latin-1'))) + self._print_verbose('Shared Key:\t' + utils.str_to_hexstr(sharedK.decode('latin-1'))) # If PoP is provided, XOR SHA256 of PoP with the previously # calculated Shared Key to form the actual Shared Key @@ -130,7 +129,7 @@ class Security1(Security): digest = h.finalize() # XOR with and update Shared Key sharedK = xor(sharedK, digest) - self._print_verbose("New Shared Key XORed with PoP:\t" + utils.str_to_hexstr(sharedK.decode('latin-1'))) + self._print_verbose('New Shared Key XORed with PoP:\t' + utils.str_to_hexstr(sharedK.decode('latin-1'))) # Initialize the encryption engine with Shared Key and initialization vector cipher = Cipher(algorithms.AES(sharedK), modes.CTR(device_random), backend=default_backend()) self.cipher = cipher.encryptor() @@ -142,7 +141,7 @@ class Security1(Security): setup_req.sec1.msg = proto.sec1_pb2.Session_Command1 # Encrypt device public key and attach to the request packet client_verify = self.cipher.update(self.device_public_key) - self._print_verbose("Client Verify:\t" + utils.str_to_hexstr(client_verify.decode('latin-1'))) + self._print_verbose('Client Verify:\t' + utils.str_to_hexstr(client_verify.decode('latin-1'))) setup_req.sec1.sc1.client_verify_data = client_verify return setup_req.SerializeToString().decode('latin-1') @@ -154,16 +153,16 @@ class Security1(Security): if setup_resp.sec_ver == session_pb2.SecScheme1: # Read encrypyed device verify string device_verify = setup_resp.sec1.sr1.device_verify_data - self._print_verbose("Device verify:\t" + utils.str_to_hexstr(device_verify.decode('latin-1'))) + self._print_verbose('Device verify:\t' + utils.str_to_hexstr(device_verify.decode('latin-1'))) # Decrypt the device verify string enc_client_pubkey = self.cipher.update(setup_resp.sec1.sr1.device_verify_data) - self._print_verbose("Enc client pubkey:\t " + utils.str_to_hexstr(enc_client_pubkey.decode('latin-1'))) + self._print_verbose('Enc client pubkey:\t ' + utils.str_to_hexstr(enc_client_pubkey.decode('latin-1'))) # Match decryped string with client public key if enc_client_pubkey != self.client_public_key: - print("Mismatch in device verify") + print('Mismatch in device verify') return -2 else: - print("Unsupported security protocol") + print('Unsupported security protocol') return -1 def encrypt_data(self, data): diff --git a/tools/esp_prov/transport/__init__.py b/tools/esp_prov/transport/__init__.py index 907df1f3ca..4fa4b3df06 100644 --- a/tools/esp_prov/transport/__init__.py +++ b/tools/esp_prov/transport/__init__.py @@ -13,6 +13,6 @@ # limitations under the License. # +from .transport_ble import * # noqa: F403, F401 from .transport_console import * # noqa: F403, F401 -from .transport_http import * # noqa: F403, F401 -from .transport_ble import * # noqa: F403, F401 +from .transport_http import * # noqa: F403, F401 diff --git a/tools/esp_prov/transport/ble_cli.py b/tools/esp_prov/transport/ble_cli.py index 5d88ece806..ff91376477 100644 --- a/tools/esp_prov/transport/ble_cli.py +++ b/tools/esp_prov/transport/ble_cli.py @@ -14,12 +14,12 @@ # from __future__ import print_function -from builtins import input -from future.utils import iteritems import platform +from builtins import input import utils +from future.utils import iteritems fallback = True @@ -28,9 +28,10 @@ fallback = True # else fallback to console mode if platform.system() == 'Linux': try: + import time + import dbus import dbus.mainloop.glib - import time fallback = False except ImportError: pass @@ -55,33 +56,33 @@ class BLE_Bluez_Client: dbus.mainloop.glib.DBusGMainLoop(set_as_default=True) bus = dbus.SystemBus() - manager = dbus.Interface(bus.get_object("org.bluez", "/"), "org.freedesktop.DBus.ObjectManager") + manager = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.freedesktop.DBus.ObjectManager') objects = manager.GetManagedObjects() for path, interfaces in iteritems(objects): - adapter = interfaces.get("org.bluez.Adapter1") + adapter = interfaces.get('org.bluez.Adapter1') if adapter is not None: if path.endswith(iface): - self.adapter = dbus.Interface(bus.get_object("org.bluez", path), "org.bluez.Adapter1") - self.adapter_props = dbus.Interface(bus.get_object("org.bluez", path), "org.freedesktop.DBus.Properties") + self.adapter = dbus.Interface(bus.get_object('org.bluez', path), 'org.bluez.Adapter1') + self.adapter_props = dbus.Interface(bus.get_object('org.bluez', path), 'org.freedesktop.DBus.Properties') break if self.adapter is None: - raise RuntimeError("Bluetooth adapter not found") + raise RuntimeError('Bluetooth adapter not found') - self.adapter_props.Set("org.bluez.Adapter1", "Powered", dbus.Boolean(1)) + self.adapter_props.Set('org.bluez.Adapter1', 'Powered', dbus.Boolean(1)) self.adapter.StartDiscovery() retry = 10 while (retry > 0): try: if self.device is None: - print("Connecting...") + print('Connecting...') # Wait for device to be discovered time.sleep(5) self._connect_() - print("Connected") - print("Getting Services...") + print('Connected') + print('Getting Services...') # Wait for services to be discovered time.sleep(5) self._get_services_() @@ -89,28 +90,28 @@ class BLE_Bluez_Client: except Exception as e: print(e) retry -= 1 - print("Retries left", retry) + print('Retries left', retry) continue self.adapter.StopDiscovery() return False def _connect_(self): bus = dbus.SystemBus() - manager = dbus.Interface(bus.get_object("org.bluez", "/"), "org.freedesktop.DBus.ObjectManager") + manager = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.freedesktop.DBus.ObjectManager') objects = manager.GetManagedObjects() dev_path = None for path, interfaces in iteritems(objects): - if "org.bluez.Device1" not in interfaces: + if 'org.bluez.Device1' not in interfaces: continue - if interfaces["org.bluez.Device1"].get("Name") == self.devname: + if interfaces['org.bluez.Device1'].get('Name') == self.devname: dev_path = path break if dev_path is None: - raise RuntimeError("BLE device not found") + raise RuntimeError('BLE device not found') try: - self.device = bus.get_object("org.bluez", dev_path) + self.device = bus.get_object('org.bluez', dev_path) try: uuids = self.device.Get('org.bluez.Device1', 'UUIDs', dbus_interface='org.freedesktop.DBus.Properties') @@ -128,19 +129,19 @@ class BLE_Bluez_Client: except Exception as e: print(e) self.device = None - raise RuntimeError("BLE device could not connect") + raise RuntimeError('BLE device could not connect') def _get_services_(self): bus = dbus.SystemBus() - manager = dbus.Interface(bus.get_object("org.bluez", "/"), "org.freedesktop.DBus.ObjectManager") + manager = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.freedesktop.DBus.ObjectManager') objects = manager.GetManagedObjects() service_found = False for srv_path, srv_interfaces in iteritems(objects): - if "org.bluez.GattService1" not in srv_interfaces: + if 'org.bluez.GattService1' not in srv_interfaces: continue if not srv_path.startswith(self.device.object_path): continue - service = bus.get_object("org.bluez", srv_path) + service = bus.get_object('org.bluez', srv_path) srv_uuid = service.Get('org.bluez.GattService1', 'UUID', dbus_interface='org.freedesktop.DBus.Properties') @@ -152,20 +153,20 @@ class BLE_Bluez_Client: nu_lookup = dict() characteristics = dict() for chrc_path, chrc_interfaces in iteritems(objects): - if "org.bluez.GattCharacteristic1" not in chrc_interfaces: + if 'org.bluez.GattCharacteristic1' not in chrc_interfaces: continue if not chrc_path.startswith(service.object_path): continue - chrc = bus.get_object("org.bluez", chrc_path) + chrc = bus.get_object('org.bluez', chrc_path) uuid = chrc.Get('org.bluez.GattCharacteristic1', 'UUID', dbus_interface='org.freedesktop.DBus.Properties') characteristics[uuid] = chrc for desc_path, desc_interfaces in iteritems(objects): - if "org.bluez.GattDescriptor1" not in desc_interfaces: + if 'org.bluez.GattDescriptor1' not in desc_interfaces: continue if not desc_path.startswith(chrc.object_path): continue - desc = bus.get_object("org.bluez", desc_path) + desc = bus.get_object('org.bluez', desc_path) desc_uuid = desc.Get('org.bluez.GattDescriptor1', 'UUID', dbus_interface='org.freedesktop.DBus.Properties') if desc_uuid[4:8] != '2901': @@ -205,7 +206,7 @@ class BLE_Bluez_Client: self.device = None self.nu_lookup = None self.characteristics = dict() - raise RuntimeError("Provisioning service not found") + raise RuntimeError('Provisioning service not found') def get_nu_lookup(self): return self.nu_lookup @@ -224,25 +225,25 @@ class BLE_Bluez_Client: self.nu_lookup = None self.characteristics = dict() if self.adapter_props: - self.adapter_props.Set("org.bluez.Adapter1", "Powered", dbus.Boolean(0)) + self.adapter_props.Set('org.bluez.Adapter1', 'Powered', dbus.Boolean(0)) def send_data(self, characteristic_uuid, data): try: path = self.characteristics[characteristic_uuid] except KeyError: - raise RuntimeError("Invalid characteristic : " + characteristic_uuid) + raise RuntimeError('Invalid characteristic : ' + characteristic_uuid) try: path.WriteValue([ord(c) for c in data], {}, dbus_interface='org.bluez.GattCharacteristic1') except TypeError: # python3 compatible path.WriteValue([c for c in data], {}, dbus_interface='org.bluez.GattCharacteristic1') except dbus.exceptions.DBusException as e: - raise RuntimeError("Failed to write value to characteristic " + characteristic_uuid + ": " + str(e)) + raise RuntimeError('Failed to write value to characteristic ' + characteristic_uuid + ': ' + str(e)) try: readval = path.ReadValue({}, dbus_interface='org.bluez.GattCharacteristic1') except dbus.exceptions.DBusException as e: - raise RuntimeError("Failed to read value from characteristic " + characteristic_uuid + ": " + str(e)) + raise RuntimeError('Failed to read value from characteristic ' + characteristic_uuid + ': ' + str(e)) return ''.join(chr(b) for b in readval) @@ -252,14 +253,14 @@ class BLE_Bluez_Client: # Console based BLE client for Cross Platform support class BLE_Console_Client: def connect(self, devname, iface, chrc_names, fallback_srv_uuid): - print("BLE client is running in console mode") - print("\tThis could be due to your platform not being supported or dependencies not being met") - print("\tPlease ensure all pre-requisites are met to run the full fledged client") - print("BLECLI >> Please connect to BLE device `" + devname + "` manually using your tool of choice") - resp = input("BLECLI >> Was the device connected successfully? [y/n] ") + print('BLE client is running in console mode') + print('\tThis could be due to your platform not being supported or dependencies not being met') + print('\tPlease ensure all pre-requisites are met to run the full fledged client') + print('BLECLI >> Please connect to BLE device `' + devname + '` manually using your tool of choice') + resp = input('BLECLI >> Was the device connected successfully? [y/n] ') if resp != 'Y' and resp != 'y': return False - print("BLECLI >> List available attributes of the connected device") + print('BLECLI >> List available attributes of the connected device') resp = input("BLECLI >> Is the service UUID '" + fallback_srv_uuid + "' listed among available attributes? [y/n] ") if resp != 'Y' and resp != 'y': return False @@ -279,9 +280,9 @@ class BLE_Console_Client: def send_data(self, characteristic_uuid, data): print("BLECLI >> Write following data to characteristic with UUID '" + characteristic_uuid + "' :") - print("\t>> " + utils.str_to_hexstr(data)) - print("BLECLI >> Enter data read from characteristic (in hex) :") - resp = input("\t<< ") + print('\t>> ' + utils.str_to_hexstr(data)) + print('BLECLI >> Enter data read from characteristic (in hex) :') + resp = input('\t<< ') return utils.hexstr_to_str(resp) diff --git a/tools/esp_prov/transport/transport_ble.py b/tools/esp_prov/transport/transport_ble.py index 333d95105c..5d88929a7c 100644 --- a/tools/esp_prov/transport/transport_ble.py +++ b/tools/esp_prov/transport/transport_ble.py @@ -15,9 +15,8 @@ from __future__ import print_function -from .transport import Transport - from . import ble_cli +from .transport import Transport class Transport_BLE(Transport): @@ -35,7 +34,7 @@ class Transport_BLE(Transport): if not self.cli.connect(devname=devname, iface='hci0', chrc_names=nu_lookup.keys(), fallback_srv_uuid=service_uuid): - raise RuntimeError("Failed to initialize transport") + raise RuntimeError('Failed to initialize transport') # Irrespective of provided parameters, let the client # generate a lookup table by reading advertisement data @@ -63,5 +62,5 @@ class Transport_BLE(Transport): def send_data(self, ep_name, data): # Write (and read) data to characteristic corresponding to the endpoint if ep_name not in self.name_uuid_lookup.keys(): - raise RuntimeError("Invalid endpoint : " + ep_name) + raise RuntimeError('Invalid endpoint : ' + ep_name) return self.cli.send_data(self.name_uuid_lookup[ep_name], data) diff --git a/tools/esp_prov/transport/transport_console.py b/tools/esp_prov/transport/transport_console.py index 8e95141cc7..9761d3cfe3 100644 --- a/tools/esp_prov/transport/transport_console.py +++ b/tools/esp_prov/transport/transport_console.py @@ -14,6 +14,7 @@ # from __future__ import print_function + from builtins import input import utils @@ -24,10 +25,10 @@ from .transport import Transport class Transport_Console(Transport): def send_data(self, path, data, session_id=0): - print("Client->Device msg :", path, session_id, utils.str_to_hexstr(data)) + print('Client->Device msg :', path, session_id, utils.str_to_hexstr(data)) try: - resp = input("Enter device->client msg : ") + resp = input('Enter device->client msg : ') except Exception as err: - print("error:", err) + print('error:', err) return None return utils.hexstr_to_str(resp) diff --git a/tools/esp_prov/transport/transport_http.py b/tools/esp_prov/transport/transport_http.py index be81ec4232..cb17befb43 100644 --- a/tools/esp_prov/transport/transport_http.py +++ b/tools/esp_prov/transport/transport_http.py @@ -14,9 +14,11 @@ # from __future__ import print_function -from future.utils import tobytes import socket + +from future.utils import tobytes + try: from http.client import HTTPConnection, HTTPSConnection except ImportError: @@ -31,28 +33,28 @@ class Transport_HTTP(Transport): try: socket.gethostbyname(hostname.split(':')[0]) except socket.gaierror: - raise RuntimeError("Unable to resolve hostname :" + hostname) + raise RuntimeError('Unable to resolve hostname :' + hostname) if ssl_context is None: self.conn = HTTPConnection(hostname, timeout=45) else: self.conn = HTTPSConnection(hostname, context=ssl_context, timeout=45) try: - print("Connecting to " + hostname) + print('Connecting to ' + hostname) self.conn.connect() except Exception as err: - raise RuntimeError("Connection Failure : " + str(err)) - self.headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain"} + raise RuntimeError('Connection Failure : ' + str(err)) + self.headers = {'Content-type': 'application/x-www-form-urlencoded','Accept': 'text/plain'} def _send_post_request(self, path, data): try: - self.conn.request("POST", path, tobytes(data), self.headers) + self.conn.request('POST', path, tobytes(data), self.headers) response = self.conn.getresponse() if response.status == 200: return response.read().decode('latin-1') except Exception as err: - raise RuntimeError("Connection Failure : " + str(err)) - raise RuntimeError("Server responded with error code " + str(response.status)) + raise RuntimeError('Connection Failure : ' + str(err)) + raise RuntimeError('Server responded with error code ' + str(response.status)) def send_data(self, ep_name, data): return self._send_post_request('/' + ep_name, data) diff --git a/tools/esp_prov/utils/convenience.py b/tools/esp_prov/utils/convenience.py index 42d0febe26..be3ab26852 100644 --- a/tools/esp_prov/utils/convenience.py +++ b/tools/esp_prov/utils/convenience.py @@ -15,6 +15,7 @@ # Convenience functions for commonly used data type conversions import binascii + from future.utils import tobytes diff --git a/tools/find_apps.py b/tools/find_apps.py index f4bdf04d0d..d97c93c5d6 100755 --- a/tools/find_apps.py +++ b/tools/find_apps.py @@ -11,28 +11,18 @@ import logging import os import re import sys - import typing -from find_build_apps import ( - BUILD_SYSTEMS, - BUILD_SYSTEM_CMAKE, - BuildSystem, - BuildItem, - setup_logging, - ConfigRule, - config_rules_from_str, - DEFAULT_TARGET, -) +from find_build_apps import (BUILD_SYSTEM_CMAKE, BUILD_SYSTEMS, DEFAULT_TARGET, BuildItem, BuildSystem, ConfigRule, + config_rules_from_str, setup_logging) # Helper functions - def dict_from_sdkconfig(path): """ Parse the sdkconfig file at 'path', return name:value pairs as a dict """ - regex = re.compile(r"^([^#=]+)=(.+)$") + regex = re.compile(r'^([^#=]+)=(.+)$') result = {} with open(path) as f: for line in f: @@ -67,7 +57,7 @@ def find_builds_for_app(app_path, work_dir, build_dir, build_log, target_arg, :return: list of BuildItems representing build configuration of the app """ build_items = [] # type: typing.List[BuildItem] - default_config_name = "" + default_config_name = '' for rule in config_rules: if not rule.file_name: @@ -80,17 +70,17 @@ def find_builds_for_app(app_path, work_dir, build_dir, build_log, target_arg, # Check if the sdkconfig file specifies IDF_TARGET, and if it is matches the --target argument. sdkconfig_dict = dict_from_sdkconfig(sdkconfig_path) - target_from_config = sdkconfig_dict.get("CONFIG_IDF_TARGET") + target_from_config = sdkconfig_dict.get('CONFIG_IDF_TARGET') if target_from_config is not None and target_from_config != target_arg: - logging.debug("Skipping sdkconfig {} which requires target {}".format( + logging.debug('Skipping sdkconfig {} which requires target {}'.format( sdkconfig_path, target_from_config)) continue # Figure out the config name - config_name = rule.config_name or "" - if "*" in rule.file_name: + config_name = rule.config_name or '' + if '*' in rule.file_name: # convert glob pattern into a regex - regex_str = r".*" + rule.file_name.replace(".", r"\.").replace("*", r"(.*)") + regex_str = r'.*' + rule.file_name.replace('.', r'\.').replace('*', r'(.*)') groups = re.match(regex_str, sdkconfig_path) assert groups config_name = groups.group(1) @@ -143,12 +133,12 @@ def find_apps(build_system_class, path, recursive, exclude_list, target): :return: list of paths of the apps found """ build_system_name = build_system_class.NAME - logging.debug("Looking for {} apps in {}{}".format(build_system_name, path, " recursively" if recursive else "")) + logging.debug('Looking for {} apps in {}{}'.format(build_system_name, path, ' recursively' if recursive else '')) if not recursive: if exclude_list: - logging.warning("--exclude option is ignored when used without --recursive") + logging.warning('--exclude option is ignored when used without --recursive') if not build_system_class.is_app(path): - logging.warning("Path {} specified without --recursive flag, but no {} app found there".format( + logging.warning('Path {} specified without --recursive flag, but no {} app found there'.format( path, build_system_name)) return [] return [path] @@ -156,14 +146,14 @@ def find_apps(build_system_class, path, recursive, exclude_list, target): # The remaining part is for recursive == True apps_found = [] # type: typing.List[str] for root, dirs, _ in os.walk(path, topdown=True): - logging.debug("Entering {}".format(root)) + logging.debug('Entering {}'.format(root)) if root in exclude_list: - logging.debug("Skipping {} (excluded)".format(root)) + logging.debug('Skipping {} (excluded)'.format(root)) del dirs[:] continue if build_system_class.is_app(root): - logging.debug("Found {} app in {}".format(build_system_name, root)) + logging.debug('Found {} app in {}'.format(build_system_name, root)) # Don't recurse into app subdirectories del dirs[:] @@ -172,88 +162,88 @@ def find_apps(build_system_class, path, recursive, exclude_list, target): apps_found.append(root) else: if supported_targets: - logging.debug("Skipping, app only supports targets: " + ", ".join(supported_targets)) + logging.debug('Skipping, app only supports targets: ' + ', '.join(supported_targets)) else: - logging.debug("Skipping, app has no supported targets") + logging.debug('Skipping, app has no supported targets') continue return apps_found def main(): - parser = argparse.ArgumentParser(description="Tool to generate build steps for IDF apps") + parser = argparse.ArgumentParser(description='Tool to generate build steps for IDF apps') parser.add_argument( - "-v", - "--verbose", - action="count", - help="Increase the logging level of the script. Can be specified multiple times.", + '-v', + '--verbose', + action='count', + help='Increase the logging level of the script. Can be specified multiple times.', ) parser.add_argument( - "--log-file", - type=argparse.FileType("w"), - help="Write the script log to the specified file, instead of stderr", + '--log-file', + type=argparse.FileType('w'), + help='Write the script log to the specified file, instead of stderr', ) parser.add_argument( - "--recursive", - action="store_true", - help="Look for apps in the specified directories recursively.", + '--recursive', + action='store_true', + help='Look for apps in the specified directories recursively.', ) parser.add_argument( - "--build-system", + '--build-system', choices=BUILD_SYSTEMS.keys() ) parser.add_argument( - "--work-dir", - help="If set, the app is first copied into the specified directory, and then built." + - "If not set, the work directory is the directory of the app.", + '--work-dir', + help='If set, the app is first copied into the specified directory, and then built.' + + 'If not set, the work directory is the directory of the app.', ) parser.add_argument( - "--config", - action="append", - help="Adds configurations (sdkconfig file names) to build. This can either be " + - "FILENAME[=NAME] or FILEPATTERN. FILENAME is the name of the sdkconfig file, " + - "relative to the project directory, to be used. Optional NAME can be specified, " + - "which can be used as a name of this configuration. FILEPATTERN is the name of " + - "the sdkconfig file, relative to the project directory, with at most one wildcard. " + - "The part captured by the wildcard is used as the name of the configuration.", + '--config', + action='append', + help='Adds configurations (sdkconfig file names) to build. This can either be ' + + 'FILENAME[=NAME] or FILEPATTERN. FILENAME is the name of the sdkconfig file, ' + + 'relative to the project directory, to be used. Optional NAME can be specified, ' + + 'which can be used as a name of this configuration. FILEPATTERN is the name of ' + + 'the sdkconfig file, relative to the project directory, with at most one wildcard. ' + + 'The part captured by the wildcard is used as the name of the configuration.', ) parser.add_argument( - "--build-dir", - help="If set, specifies the build directory name. Can expand placeholders. Can be either a " + - "name relative to the work directory, or an absolute path.", + '--build-dir', + help='If set, specifies the build directory name. Can expand placeholders. Can be either a ' + + 'name relative to the work directory, or an absolute path.', ) parser.add_argument( - "--build-log", - help="If specified, the build log will be written to this file. Can expand placeholders.", + '--build-log', + help='If specified, the build log will be written to this file. Can expand placeholders.', ) - parser.add_argument("--target", help="Build apps for given target.") + parser.add_argument('--target', help='Build apps for given target.') parser.add_argument( - "--format", - default="json", - choices=["json"], - help="Format to write the list of builds as", + '--format', + default='json', + choices=['json'], + help='Format to write the list of builds as', ) parser.add_argument( - "--exclude", - action="append", - help="Ignore specified directory (if --recursive is given). Can be used multiple times.", + '--exclude', + action='append', + help='Ignore specified directory (if --recursive is given). Can be used multiple times.', ) parser.add_argument( - "-o", - "--output", - type=argparse.FileType("w"), - help="Output the list of builds to the specified file", + '-o', + '--output', + type=argparse.FileType('w'), + help='Output the list of builds to the specified file', ) parser.add_argument( - "--app-list", + '--app-list', default=None, - help="Scan tests results. Restrict the build/artifacts preservation behavior to apps need to be built. " - "If the file does not exist, will build all apps and upload all artifacts." + help='Scan tests results. Restrict the build/artifacts preservation behavior to apps need to be built. ' + 'If the file does not exist, will build all apps and upload all artifacts.' ) parser.add_argument( - "-p", "--paths", - nargs="+", - help="One or more app paths." + '-p', '--paths', + nargs='+', + help='One or more app paths.' ) args = parser.parse_args() setup_logging(args) @@ -265,19 +255,19 @@ def main(): raise ValueError('Conflict settings. "recursive", "build_system", "target", "exclude", "paths" should not ' 'be specified with "app_list"') if not os.path.exists(args.app_list): - raise OSError("File not found {}".format(args.app_list)) + raise OSError('File not found {}'.format(args.app_list)) else: # If the build target is not set explicitly, get it from the environment or use the default one (esp32) if not args.target: - env_target = os.environ.get("IDF_TARGET") + env_target = os.environ.get('IDF_TARGET') if env_target: - logging.info("--target argument not set, using IDF_TARGET={} from the environment".format(env_target)) + logging.info('--target argument not set, using IDF_TARGET={} from the environment'.format(env_target)) args.target = env_target else: - logging.info("--target argument not set, using IDF_TARGET={} as the default".format(DEFAULT_TARGET)) + logging.info('--target argument not set, using IDF_TARGET={} as the default'.format(DEFAULT_TARGET)) args.target = DEFAULT_TARGET if not args.build_system: - logging.info("--build-system argument not set, using {} as the default".format(BUILD_SYSTEM_CMAKE)) + logging.info('--build-system argument not set, using {} as the default'.format(BUILD_SYSTEM_CMAKE)) args.build_system = BUILD_SYSTEM_CMAKE required_args = [args.build_system, args.target, args.paths] if not all(required_args): @@ -293,38 +283,38 @@ def main(): build_system_class = BUILD_SYSTEMS[args.build_system] for path in args.paths: app_dirs += find_apps(build_system_class, path, args.recursive, args.exclude or [], args.target) - apps = [{"app_dir": app_dir, "build": True, "preserve": True} for app_dir in app_dirs] + apps = [{'app_dir': app_dir, 'build': True, 'preserve': True} for app_dir in app_dirs] if not apps: - logging.warning("No apps found") + logging.warning('No apps found') SystemExit(0) - logging.info("Found {} apps".format(len(apps))) - apps.sort(key=lambda x: x["app_dir"]) + logging.info('Found {} apps'.format(len(apps))) + apps.sort(key=lambda x: x['app_dir']) # Find compatible configurations of each app, collect them as BuildItems build_items = [] # type: typing.List[BuildItem] config_rules = config_rules_from_str(args.config or []) for app in apps: build_items += find_builds_for_app( - app["app_dir"], + app['app_dir'], args.work_dir, args.build_dir, args.build_log, - args.target or app["target"], - args.build_system or app["build_system"], + args.target or app['target'], + args.build_system or app['build_system'], config_rules, - app["preserve"], + app['preserve'], ) - logging.info("Found {} builds".format(len(build_items))) + logging.info('Found {} builds'.format(len(build_items))) # Write out the BuildItems. Only JSON supported now (will add YAML later). - if args.format != "json": + if args.format != 'json': raise NotImplementedError() out = args.output or sys.stdout - out.writelines([item.to_json() + "\n" for item in build_items]) + out.writelines([item.to_json() + '\n' for item in build_items]) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/find_build_apps/__init__.py b/tools/find_build_apps/__init__.py index c87ceebda0..fe0ce42c08 100644 --- a/tools/find_build_apps/__init__.py +++ b/tools/find_build_apps/__init__.py @@ -1,14 +1,7 @@ -from .common import ( - BuildItem, - BuildSystem, - BuildError, - ConfigRule, - config_rules_from_str, - setup_logging, - DEFAULT_TARGET, -) -from .cmake import CMakeBuildSystem, BUILD_SYSTEM_CMAKE -from .make import MakeBuildSystem, BUILD_SYSTEM_MAKE +from .cmake import BUILD_SYSTEM_CMAKE, CMakeBuildSystem +from .common import (DEFAULT_TARGET, BuildError, BuildItem, BuildSystem, ConfigRule, config_rules_from_str, + setup_logging) +from .make import BUILD_SYSTEM_MAKE, MakeBuildSystem BUILD_SYSTEMS = { BUILD_SYSTEM_MAKE: MakeBuildSystem, @@ -16,16 +9,16 @@ BUILD_SYSTEMS = { } __all__ = [ - "BuildItem", - "BuildSystem", - "BuildError", - "ConfigRule", - "config_rules_from_str", - "setup_logging", - "DEFAULT_TARGET", - "CMakeBuildSystem", - "BUILD_SYSTEM_CMAKE", - "MakeBuildSystem", - "BUILD_SYSTEM_MAKE", - "BUILD_SYSTEMS", + 'BuildItem', + 'BuildSystem', + 'BuildError', + 'ConfigRule', + 'config_rules_from_str', + 'setup_logging', + 'DEFAULT_TARGET', + 'CMakeBuildSystem', + 'BUILD_SYSTEM_CMAKE', + 'MakeBuildSystem', + 'BUILD_SYSTEM_MAKE', + 'BUILD_SYSTEMS', ] diff --git a/tools/find_build_apps/cmake.py b/tools/find_build_apps/cmake.py index 76ad7216a5..d124cb51f1 100644 --- a/tools/find_build_apps/cmake.py +++ b/tools/find_build_apps/cmake.py @@ -4,14 +4,14 @@ import shutil import subprocess import sys -from .common import BuildSystem, BuildItem, BuildError +from .common import BuildError, BuildItem, BuildSystem -BUILD_SYSTEM_CMAKE = "cmake" -IDF_PY = os.path.join(os.environ["IDF_PATH"], "tools", "idf.py") +BUILD_SYSTEM_CMAKE = 'cmake' +IDF_PY = os.path.join(os.environ['IDF_PATH'], 'tools', 'idf.py') # While ESP-IDF component CMakeLists files can be identified by the presence of 'idf_component_register' string, # there is no equivalent for the project CMakeLists files. This seems to be the best option... -CMAKE_PROJECT_LINE = r"include($ENV{IDF_PATH}/tools/cmake/project.cmake)" +CMAKE_PROJECT_LINE = r'include($ENV{IDF_PATH}/tools/cmake/project.cmake)' class CMakeBuildSystem(BuildSystem): @@ -24,23 +24,23 @@ class CMakeBuildSystem(BuildSystem): args = [ sys.executable, IDF_PY, - "-B", + '-B', build_path, - "-C", + '-C', work_path, - "-DIDF_TARGET=" + build_item.target, + '-DIDF_TARGET=' + build_item.target, ] if extra_cmakecache_items: for key, val in extra_cmakecache_items.items(): - args.append("-D{}={}".format(key, val)) - if "TEST_EXCLUDE_COMPONENTS" in extra_cmakecache_items \ - and "TEST_COMPONENTS" not in extra_cmakecache_items: - args.append("-DTESTS_ALL=1") + args.append('-D{}={}'.format(key, val)) + if 'TEST_EXCLUDE_COMPONENTS' in extra_cmakecache_items \ + and 'TEST_COMPONENTS' not in extra_cmakecache_items: + args.append('-DTESTS_ALL=1') if build_item.verbose: - args.append("-v") - args.append("build") - cmdline = format(" ".join(args)) - logging.info("Running {}".format(cmdline)) + args.append('-v') + args.append('build') + cmdline = format(' '.join(args)) + logging.info('Running {}'.format(cmdline)) if build_item.dry_run: return @@ -49,20 +49,20 @@ class CMakeBuildSystem(BuildSystem): build_stdout = sys.stdout build_stderr = sys.stderr if build_item.build_log_path: - logging.info("Writing build log to {}".format(build_item.build_log_path)) - log_file = open(build_item.build_log_path, "w") + logging.info('Writing build log to {}'.format(build_item.build_log_path)) + log_file = open(build_item.build_log_path, 'w') build_stdout = log_file build_stderr = log_file try: subprocess.check_call(args, stdout=build_stdout, stderr=build_stderr) except subprocess.CalledProcessError as e: - raise BuildError("Build failed with exit code {}".format(e.returncode)) + raise BuildError('Build failed with exit code {}'.format(e.returncode)) else: # Also save the sdkconfig file in the build directory shutil.copyfile( - os.path.join(work_path, "sdkconfig"), - os.path.join(build_path, "sdkconfig"), + os.path.join(work_path, 'sdkconfig'), + os.path.join(build_path, 'sdkconfig'), ) build_item.size_json_fp = build_item.get_size_json_fp() finally: @@ -71,10 +71,10 @@ class CMakeBuildSystem(BuildSystem): @staticmethod def _read_cmakelists(app_path): - cmakelists_path = os.path.join(app_path, "CMakeLists.txt") + cmakelists_path = os.path.join(app_path, 'CMakeLists.txt') if not os.path.exists(cmakelists_path): return None - with open(cmakelists_path, "r") as cmakelists_file: + with open(cmakelists_path, 'r') as cmakelists_file: return cmakelists_file.read() @staticmethod diff --git a/tools/find_build_apps/common.py b/tools/find_build_apps/common.py index 1b3039ea56..e034689053 100644 --- a/tools/find_build_apps/common.py +++ b/tools/find_build_apps/common.py @@ -7,35 +7,34 @@ import re import shutil import subprocess import sys +import typing from abc import abstractmethod from collections import namedtuple from io import open -import typing +DEFAULT_TARGET = 'esp32' -DEFAULT_TARGET = "esp32" +TARGET_PLACEHOLDER = '@t' +WILDCARD_PLACEHOLDER = '@w' +NAME_PLACEHOLDER = '@n' +FULL_NAME_PLACEHOLDER = '@f' +INDEX_PLACEHOLDER = '@i' -TARGET_PLACEHOLDER = "@t" -WILDCARD_PLACEHOLDER = "@w" -NAME_PLACEHOLDER = "@n" -FULL_NAME_PLACEHOLDER = "@f" -INDEX_PLACEHOLDER = "@i" - -IDF_SIZE_PY = os.path.join(os.environ["IDF_PATH"], "tools", "idf_size.py") +IDF_SIZE_PY = os.path.join(os.environ['IDF_PATH'], 'tools', 'idf_size.py') SIZE_JSON_FN = 'size.json' SDKCONFIG_LINE_REGEX = re.compile(r"^([^=]+)=\"?([^\"\n]*)\"?\n*$") # If these keys are present in sdkconfig.defaults, they will be extracted and passed to CMake SDKCONFIG_TEST_OPTS = [ - "EXCLUDE_COMPONENTS", - "TEST_EXCLUDE_COMPONENTS", - "TEST_COMPONENTS", + 'EXCLUDE_COMPONENTS', + 'TEST_EXCLUDE_COMPONENTS', + 'TEST_COMPONENTS', ] # These keys in sdkconfig.defaults are not propagated to the final sdkconfig file: SDKCONFIG_IGNORE_OPTS = [ - "TEST_GROUPS" + 'TEST_GROUPS' ] # ConfigRule represents one --config argument of find_apps.py. @@ -45,7 +44,7 @@ SDKCONFIG_IGNORE_OPTS = [ # For example: # filename='', config_name='default' — represents the default app configuration, and gives it a name 'default' # filename='sdkconfig.*', config_name=None - represents the set of configurations, names match the wildcard value -ConfigRule = namedtuple("ConfigRule", ["file_name", "config_name"]) +ConfigRule = namedtuple('ConfigRule', ['file_name', 'config_name']) def config_rules_from_str(rule_strings): # type: (typing.List[str]) -> typing.List[ConfigRule] @@ -56,7 +55,7 @@ def config_rules_from_str(rule_strings): # type: (typing.List[str]) -> typing.L """ rules = [] # type: typing.List[ConfigRule] for rule_str in rule_strings: - items = rule_str.split("=", 2) + items = rule_str.split('=', 2) rules.append(ConfigRule(items[0], items[1] if len(items) == 2 else None)) return rules @@ -128,7 +127,7 @@ class BuildItem(object): self.work_path = self.work_dir or self.app_dir if not self.build_dir: - self.build_path = os.path.join(self.work_path, "build") + self.build_path = os.path.join(self.work_path, 'build') elif os.path.isabs(self.build_dir): self.build_path = self.build_dir else: @@ -164,11 +163,11 @@ class BuildItem(object): return self._expand(self._build_log_path) def __repr__(self): - return "({}) Build app {} for target {}, sdkconfig {} in {}".format( + return '({}) Build app {} for target {}, sdkconfig {} in {}'.format( self.build_system, self.app_dir, self.target, - self.sdkconfig_path or "(default)", + self.sdkconfig_path or '(default)', self.build_dir, ) @@ -189,16 +188,16 @@ class BuildItem(object): Internal function, called by to_json and to_json_expanded """ return json.dumps({ - "build_system": self.build_system, - "app_dir": app_dir, - "work_dir": work_dir, - "build_dir": build_dir, - "build_log_path": build_log_path, - "sdkconfig": self.sdkconfig_path, - "config": self.config_name, - "target": self.target, - "verbose": self.verbose, - "preserve": self.preserve, + 'build_system': self.build_system, + 'app_dir': app_dir, + 'work_dir': work_dir, + 'build_dir': build_dir, + 'build_log_path': build_log_path, + 'sdkconfig': self.sdkconfig_path, + 'config': self.config_name, + 'target': self.target, + 'verbose': self.verbose, + 'preserve': self.preserve, }) @staticmethod @@ -208,17 +207,17 @@ class BuildItem(object): """ d = json.loads(str(json_str)) result = BuildItem( - app_path=d["app_dir"], - work_dir=d["work_dir"], - build_path=d["build_dir"], - build_log_path=d["build_log_path"], - sdkconfig_path=d["sdkconfig"], - config_name=d["config"], - target=d["target"], - build_system=d["build_system"], - preserve_artifacts=d["preserve"] + app_path=d['app_dir'], + work_dir=d['work_dir'], + build_path=d['build_dir'], + build_log_path=d['build_log_path'], + sdkconfig_path=d['sdkconfig'], + config_name=d['config'], + target=d['target'], + build_system=d['build_system'], + preserve_artifacts=d['preserve'] ) - result.verbose = d["verbose"] + result.verbose = d['verbose'] return result def _expand(self, path): # type: (str) -> str @@ -233,7 +232,7 @@ class BuildItem(object): path = path.replace(TARGET_PLACEHOLDER, self.target) path = path.replace(NAME_PLACEHOLDER, self._app_name) if (FULL_NAME_PLACEHOLDER in path): # to avoid recursion to the call to app_dir in the next line: - path = path.replace(FULL_NAME_PLACEHOLDER, self.app_dir.replace(os.path.sep, "_")) + path = path.replace(FULL_NAME_PLACEHOLDER, self.app_dir.replace(os.path.sep, '_')) wildcard_pos = path.find(WILDCARD_PLACEHOLDER) if wildcard_pos != -1: if self.config_name: @@ -288,7 +287,7 @@ class BuildSystem: Derived classes implement the methods below. Objects of these classes aren't instantiated, instead the class (type object) is used. """ - NAME = "undefined" + NAME = 'undefined' SUPPORTED_TARGETS_REGEX = re.compile(r'Supported [Tt]argets((?:[ |]+(?:[0-9a-zA-Z\-]+))+)') FORMAL_TO_USUAL = { @@ -307,15 +306,15 @@ class BuildSystem: if work_path != app_path: if os.path.exists(work_path): - logging.debug("Work directory {} exists, removing".format(work_path)) + logging.debug('Work directory {} exists, removing'.format(work_path)) if not build_item.dry_run: shutil.rmtree(work_path) - logging.debug("Copying app from {} to {}".format(app_path, work_path)) + logging.debug('Copying app from {} to {}'.format(app_path, work_path)) if not build_item.dry_run: shutil.copytree(app_path, work_path) if os.path.exists(build_path): - logging.debug("Build directory {} exists, removing".format(build_path)) + logging.debug('Build directory {} exists, removing'.format(build_path)) if not build_item.dry_run: shutil.rmtree(build_path) @@ -328,29 +327,29 @@ class BuildSystem: # Note: the build system supports taking multiple sdkconfig.defaults files via SDKCONFIG_DEFAULTS # CMake variable. However here we do this manually to perform environment variable expansion in the # sdkconfig files. - sdkconfig_defaults_list = ["sdkconfig.defaults", "sdkconfig.defaults." + build_item.target] + sdkconfig_defaults_list = ['sdkconfig.defaults', 'sdkconfig.defaults.' + build_item.target] if build_item.sdkconfig_path: sdkconfig_defaults_list.append(build_item.sdkconfig_path) - sdkconfig_file = os.path.join(work_path, "sdkconfig") + sdkconfig_file = os.path.join(work_path, 'sdkconfig') if os.path.exists(sdkconfig_file): - logging.debug("Removing sdkconfig file: {}".format(sdkconfig_file)) + logging.debug('Removing sdkconfig file: {}'.format(sdkconfig_file)) if not build_item.dry_run: os.unlink(sdkconfig_file) - logging.debug("Creating sdkconfig file: {}".format(sdkconfig_file)) + logging.debug('Creating sdkconfig file: {}'.format(sdkconfig_file)) extra_cmakecache_items = {} if not build_item.dry_run: - with open(sdkconfig_file, "w") as f_out: + with open(sdkconfig_file, 'w') as f_out: for sdkconfig_name in sdkconfig_defaults_list: sdkconfig_path = os.path.join(work_path, sdkconfig_name) if not sdkconfig_path or not os.path.exists(sdkconfig_path): continue - logging.debug("Appending {} to sdkconfig".format(sdkconfig_name)) - with open(sdkconfig_path, "r") as f_in: + logging.debug('Appending {} to sdkconfig'.format(sdkconfig_name)) + with open(sdkconfig_path, 'r') as f_in: for line in f_in: - if not line.endswith("\n"): - line += "\n" + if not line.endswith('\n'): + line += '\n' if cls.NAME == 'cmake': m = SDKCONFIG_LINE_REGEX.match(line) key = m.group(1) if m else None @@ -365,10 +364,10 @@ class BuildSystem: sdkconfig_path = os.path.join(app_path, sdkconfig_name) if not sdkconfig_path: continue - logging.debug("Considering sdkconfig {}".format(sdkconfig_path)) + logging.debug('Considering sdkconfig {}'.format(sdkconfig_path)) if not os.path.exists(sdkconfig_path): continue - logging.debug("Appending {} to sdkconfig".format(sdkconfig_name)) + logging.debug('Appending {} to sdkconfig'.format(sdkconfig_name)) # The preparation of build is finished. Implement the build part in sub classes. if cls.NAME == 'cmake': @@ -409,7 +408,7 @@ class BuildSystem: readme_path = get_md_or_rst(os.path.dirname(app_path)) if not readme_path: return None - with open(readme_path, "r", encoding='utf8') as readme_file: + with open(readme_path, 'r', encoding='utf8') as readme_file: return readme_file.read() @classmethod @@ -460,7 +459,7 @@ def setup_logging(args): log_level = logging.DEBUG logging.basicConfig( - format="%(levelname)s: %(message)s", + format='%(levelname)s: %(message)s', stream=args.log_file or sys.stderr, level=log_level, ) diff --git a/tools/find_build_apps/make.py b/tools/find_build_apps/make.py index 3930079ba3..419964657a 100644 --- a/tools/find_build_apps/make.py +++ b/tools/find_build_apps/make.py @@ -4,12 +4,12 @@ import shlex import subprocess import sys -from .common import BuildSystem, BuildError +from .common import BuildError, BuildSystem # Same for the Makefile projects: -MAKE_PROJECT_LINE = r"include $(IDF_PATH)/make/project.mk" +MAKE_PROJECT_LINE = r'include $(IDF_PATH)/make/project.mk' -BUILD_SYSTEM_MAKE = "make" +BUILD_SYSTEM_MAKE = 'make' try: string_type = basestring @@ -34,8 +34,8 @@ class MakeBuildSystem(BuildSystem): build_stdout = sys.stdout build_stderr = sys.stderr if build_item.build_log_path: - logging.info("Writing build log to {}".format(build_item.build_log_path)) - log_file = open(build_item.build_log_path, "w") + logging.info('Writing build log to {}'.format(build_item.build_log_path)) + log_file = open(build_item.build_log_path, 'w') build_stdout = log_file build_stderr = log_file @@ -46,16 +46,16 @@ class MakeBuildSystem(BuildSystem): except subprocess.CalledProcessError as e: if log_file: log_file.close() - raise BuildError("Build failed with exit code {}".format(e.returncode)) + raise BuildError('Build failed with exit code {}'.format(e.returncode)) build_item.size_json_fp = build_item.get_size_json_fp() @staticmethod def is_app(path): - makefile_path = os.path.join(path, "Makefile") + makefile_path = os.path.join(path, 'Makefile') if not os.path.exists(makefile_path): return False - with open(makefile_path, "r") as makefile: + with open(makefile_path, 'r') as makefile: makefile_content = makefile.read() if MAKE_PROJECT_LINE not in makefile_content: return False diff --git a/tools/gdb_panic_server.py b/tools/gdb_panic_server.py index ccf8a0c6d6..2a45a00f8a 100644 --- a/tools/gdb_panic_server.py +++ b/tools/gdb_panic_server.py @@ -31,18 +31,17 @@ # -from builtins import bytes import argparse +import binascii +import logging import struct import sys -import logging -import binascii +from builtins import bytes from collections import namedtuple -from pyparsing import Literal, Word, nums, OneOrMore, srange, Group, Combine - # Used for type annotations only. Silence linter warnings. -from pyparsing import ParseResults, ParserElement # noqa: F401 # pylint: disable=unused-import +from pyparsing import (Combine, Group, Literal, OneOrMore, ParserElement, # noqa: F401 # pylint: disable=unused-import + ParseResults, Word, nums, srange) try: import typing # noqa: F401 # pylint: disable=unused-import @@ -50,30 +49,30 @@ except ImportError: pass # pyparsing helper -hexnumber = srange("[0-9a-f]") +hexnumber = srange('[0-9a-f]') # List of registers to be passed to GDB, in the order GDB expects. # The names should match those used in IDF panic handler. # Registers not present in IDF panic handler output (like X0) will be assumed to be 0. GDB_REGS_INFO_RISCV_ILP32 = [ - "X0", "RA", "SP", "GP", - "TP", "T0", "T1", "T2", - "S0/FP", "S1", "A0", "A1", - "A2", "A3", "A4", "A5", - "A6", "A7", "S2", "S3", - "S4", "S5", "S6", "S7", - "S8", "S9", "S10", "S11", - "T3", "T4", "T5", "T6", - "MEPC" + 'X0', 'RA', 'SP', 'GP', + 'TP', 'T0', 'T1', 'T2', + 'S0/FP', 'S1', 'A0', 'A1', + 'A2', 'A3', 'A4', 'A5', + 'A6', 'A7', 'S2', 'S3', + 'S4', 'S5', 'S6', 'S7', + 'S8', 'S9', 'S10', 'S11', + 'T3', 'T4', 'T5', 'T6', + 'MEPC' ] GDB_REGS_INFO = { - "esp32c3": GDB_REGS_INFO_RISCV_ILP32 + 'esp32c3': GDB_REGS_INFO_RISCV_ILP32 } -PanicInfo = namedtuple("PanicInfo", "core_id regs stack_base_addr stack_data") +PanicInfo = namedtuple('PanicInfo', 'core_id regs stack_base_addr stack_data') def build_riscv_panic_output_parser(): # type: () -> typing.Type[ParserElement] @@ -83,25 +82,25 @@ def build_riscv_panic_output_parser(): # type: () -> typing.Type[ParserElement] # Guru Meditation Error: Core 0 panic'ed (Store access fault). Exception was unhandled. # Core 0 register dump: - reg_dump_header = Group(Literal("Core") + - Word(nums)("core_id") + - Literal("register dump:"))("reg_dump_header") + reg_dump_header = Group(Literal('Core') + + Word(nums)('core_id') + + Literal('register dump:'))('reg_dump_header') # MEPC : 0x4200232c RA : 0x42009694 SP : 0x3fc93a80 GP : 0x3fc8b320 - reg_name = Word(srange("[A-Z_0-9/-]"))("name") - hexnumber_with_0x = Combine(Literal("0x") + Word(hexnumber)) - reg_value = hexnumber_with_0x("value") - reg_dump_one_reg = Group(reg_name + Literal(":") + reg_value) # not named because there will be OneOrMore - reg_dump_all_regs = Group(OneOrMore(reg_dump_one_reg))("regs") + reg_name = Word(srange('[A-Z_0-9/-]'))('name') + hexnumber_with_0x = Combine(Literal('0x') + Word(hexnumber)) + reg_value = hexnumber_with_0x('value') + reg_dump_one_reg = Group(reg_name + Literal(':') + reg_value) # not named because there will be OneOrMore + reg_dump_all_regs = Group(OneOrMore(reg_dump_one_reg))('regs') reg_dump = Group(reg_dump_header + reg_dump_all_regs) # not named because there will be OneOrMore - reg_dumps = Group(OneOrMore(reg_dump))("reg_dumps") + reg_dumps = Group(OneOrMore(reg_dump))('reg_dumps') # Stack memory: # 3fc93a80: 0x00000030 0x00000021 0x3fc8aedc 0x4200232a 0xa5a5a5a5 0xa5a5a5a5 0x3fc8aedc 0x420099b0 - stack_line = Group(Word(hexnumber)("base") + Literal(":") + - Group(OneOrMore(hexnumber_with_0x))("data")) - stack_dump = Group(Literal("Stack memory:") + - Group(OneOrMore(stack_line))("lines"))("stack_dump") + stack_line = Group(Word(hexnumber)('base') + Literal(':') + + Group(OneOrMore(hexnumber_with_0x))('data')) + stack_dump = Group(Literal('Stack memory:') + + Group(OneOrMore(stack_line))('lines'))('stack_dump') # Parser for the complete panic output: panic_output = reg_dumps + stack_dump @@ -113,7 +112,7 @@ def get_stack_addr_and_data(res): # type: (ParseResults) -> typing.Tuple[int, b stack_base_addr = 0 # First reported address in the dump base_addr = 0 # keeps track of the address for the given line of the dump bytes_in_line = 0 # bytes of stack parsed on the previous line; used to validate the next base address - stack_data = b"" # accumulates all the dumped stack data + stack_data = b'' # accumulates all the dumped stack data for line in res.stack_dump.lines: # update and validate the base address prev_base_addr = base_addr @@ -125,7 +124,7 @@ def get_stack_addr_and_data(res): # type: (ParseResults) -> typing.Tuple[int, b # convert little-endian hex words to byte representation words = [int(w, 16) for w in line.data] - line_data = b"".join([struct.pack(" PanicInfo res = results[0] if len(res.reg_dumps) > 1: - raise NotImplementedError("Handling of multi-core register dumps not implemented") + raise NotImplementedError('Handling of multi-core register dumps not implemented') # Build a dict of register names/values rd = res.reg_dumps[0] @@ -162,7 +161,7 @@ def parse_idf_riscv_panic_output(panic_text): # type: (str) -> PanicInfo PANIC_OUTPUT_PARSERS = { - "esp32c3": parse_idf_riscv_panic_output + 'esp32c3': parse_idf_riscv_panic_output } @@ -173,82 +172,82 @@ class GdbServer(object): self.out_stream = sys.stdout self.reg_list = GDB_REGS_INFO[target] - self.logger = logging.getLogger("GdbServer") + self.logger = logging.getLogger('GdbServer') if log_file: - handler = logging.FileHandler(log_file, "w+") + handler = logging.FileHandler(log_file, 'w+') self.logger.setLevel(logging.DEBUG) - formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") + formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') handler.setFormatter(formatter) self.logger.addHandler(handler) def run(self): # type: () -> None """ Process GDB commands from stdin until GDB tells us to quit """ - buffer = "" + buffer = '' while True: buffer += self.in_stream.read(1) if len(buffer) > 3 and buffer[-3] == '#': self._handle_command(buffer) - buffer = "" + buffer = '' def _handle_command(self, buffer): # type: (str) -> None command = buffer[1:-3] # ignore checksums # Acknowledge the command - self.out_stream.write("+") + self.out_stream.write('+') self.out_stream.flush() - self.logger.debug("Got command: %s", command) - if command == "?": + self.logger.debug('Got command: %s', command) + if command == '?': # report sigtrap as the stop reason; the exact reason doesn't matter for backtracing - self._respond("T05") - elif command.startswith("Hg") or command.startswith("Hc"): + self._respond('T05') + elif command.startswith('Hg') or command.startswith('Hc'): # Select thread command - self._respond("OK") - elif command == "qfThreadInfo": + self._respond('OK') + elif command == 'qfThreadInfo': # Get list of threads. # Only one thread for now, can be extended to show one thread for each core, # if we dump both cores (e.g. on an interrupt watchdog) - self._respond("m1") - elif command == "qC": + self._respond('m1') + elif command == 'qC': # That single thread is selected. - self._respond("QC1") - elif command == "g": + self._respond('QC1') + elif command == 'g': # Registers read self._respond_regs() - elif command.startswith("m"): + elif command.startswith('m'): # Memory read - addr, size = [int(v, 16) for v in command[1:].split(",")] + addr, size = [int(v, 16) for v in command[1:].split(',')] self._respond_mem(addr, size) - elif command.startswith("vKill") or command == "k": + elif command.startswith('vKill') or command == 'k': # Quit - self._respond("OK") + self._respond('OK') raise SystemExit(0) else: # Empty response required for any unknown command - self._respond("") + self._respond('') def _respond(self, data): # type: (str) -> None # calculate checksum - data_bytes = bytes(data.encode("ascii")) # bytes() for Py2 compatibility + data_bytes = bytes(data.encode('ascii')) # bytes() for Py2 compatibility checksum = sum(data_bytes) & 0xff # format and write the response - res = "${}#{:02x}".format(data, checksum) - self.logger.debug("Wrote: %s", res) + res = '${}#{:02x}'.format(data, checksum) + self.logger.debug('Wrote: %s', res) self.out_stream.write(res) self.out_stream.flush() # get the result ('+' or '-') ret = self.in_stream.read(1) - self.logger.debug("Response: %s", ret) + self.logger.debug('Response: %s', ret) if ret != '+': sys.stderr.write("GDB responded with '-' to {}".format(res)) raise SystemExit(1) def _respond_regs(self): # type: () -> None - response = "" + response = '' for reg_name in self.reg_list: # register values are reported as hexadecimal strings # in target byte order (i.e. LSB first for RISC-V) reg_val = self.panic_info.regs.get(reg_name, 0) - reg_bytes = struct.pack(" None @@ -262,24 +261,24 @@ class GdbServer(object): def in_stack(addr): return stack_addr_min <= addr < stack_addr_max - result = "" + result = '' for addr in range(start_addr, start_addr + size): if not in_stack(addr): - result += "00" + result += '00' else: - result += "{:02x}".format(stack_data[addr - stack_addr_min]) + result += '{:02x}'.format(stack_data[addr - stack_addr_min]) self._respond(result) def main(): parser = argparse.ArgumentParser() - parser.add_argument("input_file", type=argparse.FileType("r"), - help="File containing the panic handler output") - parser.add_argument("--target", choices=GDB_REGS_INFO.keys(), - help="Chip to use (determines the architecture)") - parser.add_argument("--gdb-log", default=None, - help="If specified, the file for logging GDB server debug information") + parser.add_argument('input_file', type=argparse.FileType('r'), + help='File containing the panic handler output') + parser.add_argument('--target', choices=GDB_REGS_INFO.keys(), + help='Chip to use (determines the architecture)') + parser.add_argument('--gdb-log', default=None, + help='If specified, the file for logging GDB server debug information') args = parser.parse_args() panic_info = PANIC_OUTPUT_PARSERS[args.target](args.input_file.read()) @@ -291,5 +290,5 @@ def main(): sys.exit(0) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/gen_esp_err_to_name.py b/tools/gen_esp_err_to_name.py index e88b14cd2c..cd9ba8a041 100755 --- a/tools/gen_esp_err_to_name.py +++ b/tools/gen_esp_err_to_name.py @@ -14,13 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import print_function, unicode_literals + import sys + try: - from builtins import str - from builtins import range - from builtins import object + from builtins import object, range, str except ImportError: # This should not happen because the Python packages are checked before invoking this script. However, here is # some output which should help if we missed something. @@ -30,14 +29,14 @@ except ImportError: # requirements.txt from the IDF_PATH should be used) or from the documentation project (then the requirements.txt # for the documentation directory should be used). sys.exit(1) -from io import open -import os import argparse -import re -import fnmatch import collections -import textwrap +import fnmatch import functools +import os +import re +import textwrap +from io import open # list files here which should not be parsed ignore_files = [os.path.join('components', 'mdns', 'test_afl_fuzz_host', 'esp32_compat.h'), @@ -72,7 +71,7 @@ class ErrItem(object): - rel_str - (optional) error string which is a base for the error - rel_off - (optional) offset in relation to the base error """ - def __init__(self, name, file, include_as=None, comment="", rel_str="", rel_off=0): + def __init__(self, name, file, include_as=None, comment='', rel_str='', rel_off=0): self.name = name self.file = file self.include_as = include_as @@ -81,11 +80,11 @@ class ErrItem(object): self.rel_off = rel_off def __str__(self): - ret = self.name + " from " + self.file - if (self.rel_str != ""): - ret += " is (" + self.rel_str + " + " + str(self.rel_off) + ")" - if self.comment != "": - ret += " // " + self.comment + ret = self.name + ' from ' + self.file + if (self.rel_str != ''): + ret += ' is (' + self.rel_str + ' + ' + str(self.rel_off) + ')' + if self.comment != '': + ret += ' // ' + self.comment return ret def __cmp__(self, other): @@ -94,7 +93,7 @@ class ErrItem(object): elif self.file not in priority_headers and other.file in priority_headers: return 1 - base = "_BASE" + base = '_BASE' if self.file == other.file: if self.name.endswith(base) and not(other.name.endswith(base)): @@ -117,7 +116,7 @@ class InputError(RuntimeError): Represents and error on the input """ def __init__(self, p, e): - super(InputError, self).__init__(p + ": " + e) + super(InputError, self).__init__(p + ': ' + e) def process(line, idf_path, include_as): @@ -125,20 +124,20 @@ def process(line, idf_path, include_as): Process a line of text from file idf_path (relative to IDF project). Fills the global list unproc_list and dictionaries err_dict, rev_err_dict """ - if idf_path.endswith(".c"): + if idf_path.endswith('.c'): # We would not try to include a C file - raise InputError(idf_path, "This line should be in a header file: %s" % line) + raise InputError(idf_path, 'This line should be in a header file: %s' % line) words = re.split(r' +', line, 2) # words[1] is the error name # words[2] is the rest of the line (value, base + value, comment) if len(words) < 3: - raise InputError(idf_path, "Error at line %s" % line) + raise InputError(idf_path, 'Error at line %s' % line) - line = "" + line = '' todo_str = words[2] - comment = "" + comment = '' # identify possible comment m = re.search(r'/\*!<(.+?(?=\*/))', todo_str) if m: @@ -170,7 +169,7 @@ def process(line, idf_path, include_as): related = todo_str # BASE error num = 0 # (BASE + 0) else: - raise InputError(idf_path, "Cannot parse line %s" % line) + raise InputError(idf_path, 'Cannot parse line %s' % line) try: related @@ -199,7 +198,7 @@ def process_remaining_errors(): err_dict[num].append(ErrItem(item.name, item.file, item.include_as, item.comment)) rev_err_dict[item.name] = num else: - print(item.rel_str + " referenced by " + item.name + " in " + item.file + " is unknown") + print(item.rel_str + ' referenced by ' + item.name + ' in ' + item.file + ' is unknown') del unproc_list[:] @@ -229,9 +228,9 @@ def print_warning(error_list, error_code): """ Print warning about errors with the same error code """ - print("[WARNING] The following errors have the same code (%d):" % error_code) + print('[WARNING] The following errors have the same code (%d):' % error_code) for e in error_list: - print(" " + str(e)) + print(' ' + str(e)) def max_string_width(): @@ -270,14 +269,14 @@ def generate_c_output(fin, fout): for line in fin: if re.match(r'@COMMENT@', line): - fout.write("//Do not edit this file because it is autogenerated by " + os.path.basename(__file__) + "\n") + fout.write('//Do not edit this file because it is autogenerated by ' + os.path.basename(__file__) + '\n') elif re.match(r'@HEADERS@', line): for i in include_list: if i not in dont_include: fout.write("#if __has_include(\"" + i + "\")\n#include \"" + i + "\"\n#endif\n") elif re.match(r'@ERROR_ITEMS@', line): - last_file = "" + last_file = '' for k in sorted(err_dict.keys()): if len(err_dict[k]) > 1: err_dict[k].sort(key=functools.cmp_to_key(ErrItem.__cmp__)) @@ -285,26 +284,26 @@ def generate_c_output(fin, fout): for e in err_dict[k]: if e.file != last_file: last_file = e.file - fout.write(" // %s\n" % last_file) - table_line = (" ERR_TBL_IT(" + e.name + "), ").ljust(max_width) + "/* " + str(k).rjust(max_decdig) - fout.write("# ifdef %s\n" % e.name) + fout.write(' // %s\n' % last_file) + table_line = (' ERR_TBL_IT(' + e.name + '), ').ljust(max_width) + '/* ' + str(k).rjust(max_decdig) + fout.write('# ifdef %s\n' % e.name) fout.write(table_line) hexnum_length = 0 if k > 0: # negative number and zero should be only ESP_FAIL and ESP_OK - hexnum = " 0x%x" % k + hexnum = ' 0x%x' % k hexnum_length = len(hexnum) fout.write(hexnum) - if e.comment != "": + if e.comment != '': if len(e.comment) < 50: - fout.write(" %s" % e.comment) + fout.write(' %s' % e.comment) else: - indent = " " * (len(table_line) + hexnum_length + 1) + indent = ' ' * (len(table_line) + hexnum_length + 1) w = textwrap.wrap(e.comment, width=120, initial_indent=indent, subsequent_indent=indent) # this couldn't be done with initial_indent because there is no initial_width option - fout.write(" %s" % w[0].strip()) + fout.write(' %s' % w[0].strip()) for i in range(1, len(w)): - fout.write("\n%s" % w[i]) - fout.write(" */\n# endif\n") + fout.write('\n%s' % w[i]) + fout.write(' */\n# endif\n') else: fout.write(line) @@ -359,7 +358,7 @@ def main(): except InputError as e: print(e) except UnicodeDecodeError: - raise ValueError("The encoding of {} is not Unicode.".format(path_in_idf)) + raise ValueError('The encoding of {} is not Unicode.'.format(path_in_idf)) process_remaining_errors() @@ -371,5 +370,5 @@ def main(): generate_c_output(fin, fout) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/idf.py b/tools/idf.py index c44520227b..0ef66e86ee 100755 --- a/tools/idf.py +++ b/tools/idf.py @@ -27,6 +27,7 @@ # any external libraries here - put in external script, or import in # their specific function instead. from __future__ import print_function + import codecs import json import locale @@ -43,18 +44,18 @@ from pkgutil import iter_modules sys.dont_write_bytecode = True from idf_py_actions.errors import FatalError # noqa: E402 -from idf_py_actions.tools import (executable_exists, idf_version, merge_action_lists, realpath) # noqa: E402 +from idf_py_actions.tools import executable_exists, idf_version, merge_action_lists, realpath # noqa: E402 # Use this Python interpreter for any subprocesses we launch PYTHON = sys.executable # note: os.environ changes don't automatically propagate to child processes, # you have to pass env=os.environ explicitly anywhere that we create a process -os.environ["PYTHON"] = sys.executable +os.environ['PYTHON'] = sys.executable # Name of the program, normally 'idf.py'. # Can be overridden from idf.bat using IDF_PY_PROGRAM_NAME -PROG = os.getenv("IDF_PY_PROGRAM_NAME", "idf.py") +PROG = os.getenv('IDF_PY_PROGRAM_NAME', 'idf.py') # function prints warning when autocompletion is not being performed @@ -73,37 +74,37 @@ def check_environment(): """ checks_output = [] - if not executable_exists(["cmake", "--version"]): + if not executable_exists(['cmake', '--version']): debug_print_idf_version() raise FatalError("'cmake' must be available on the PATH to use %s" % PROG) # verify that IDF_PATH env variable is set # find the directory idf.py is in, then the parent directory of this, and assume this is IDF_PATH - detected_idf_path = realpath(os.path.join(os.path.dirname(__file__), "..")) - if "IDF_PATH" in os.environ: - set_idf_path = realpath(os.environ["IDF_PATH"]) + detected_idf_path = realpath(os.path.join(os.path.dirname(__file__), '..')) + if 'IDF_PATH' in os.environ: + set_idf_path = realpath(os.environ['IDF_PATH']) if set_idf_path != detected_idf_path: print_warning( - "WARNING: IDF_PATH environment variable is set to %s but %s path indicates IDF directory %s. " - "Using the environment variable directory, but results may be unexpected..." % + 'WARNING: IDF_PATH environment variable is set to %s but %s path indicates IDF directory %s. ' + 'Using the environment variable directory, but results may be unexpected...' % (set_idf_path, PROG, detected_idf_path)) else: - print_warning("Setting IDF_PATH environment variable: %s" % detected_idf_path) - os.environ["IDF_PATH"] = detected_idf_path + print_warning('Setting IDF_PATH environment variable: %s' % detected_idf_path) + os.environ['IDF_PATH'] = detected_idf_path # check Python version if sys.version_info[0] < 3: - print_warning("WARNING: Support for Python 2 is deprecated and will be removed in future versions.") + print_warning('WARNING: Support for Python 2 is deprecated and will be removed in future versions.') elif sys.version_info[0] == 3 and sys.version_info[1] < 6: - print_warning("WARNING: Python 3 versions older than 3.6 are not supported.") + print_warning('WARNING: Python 3 versions older than 3.6 are not supported.') # check Python dependencies - checks_output.append("Checking Python dependencies...") + checks_output.append('Checking Python dependencies...') try: out = subprocess.check_output( [ - os.environ["PYTHON"], - os.path.join(os.environ["IDF_PATH"], "tools", "check_python_dependencies.py"), + os.environ['PYTHON'], + os.path.join(os.environ['IDF_PATH'], 'tools', 'check_python_dependencies.py'), ], env=os.environ, ) @@ -131,9 +132,9 @@ def _safe_relpath(path, start=None): def debug_print_idf_version(): version = idf_version() if version: - print_warning("ESP-IDF %s" % version) + print_warning('ESP-IDF %s' % version) else: - print_warning("ESP-IDF version unknown") + print_warning('ESP-IDF version unknown') class PropertyDict(dict): @@ -164,39 +165,39 @@ def init_cli(verbose_output=None): self.since = None self.removed = None self.exit_with_error = None - self.custom_message = "" + self.custom_message = '' if isinstance(deprecated, dict): - self.custom_message = deprecated.get("message", "") - self.since = deprecated.get("since", None) - self.removed = deprecated.get("removed", None) - self.exit_with_error = deprecated.get("exit_with_error", None) + self.custom_message = deprecated.get('message', '') + self.since = deprecated.get('since', None) + self.removed = deprecated.get('removed', None) + self.exit_with_error = deprecated.get('exit_with_error', None) elif isinstance(deprecated, str): self.custom_message = deprecated - def full_message(self, type="Option"): + def full_message(self, type='Option'): if self.exit_with_error: - return "%s is deprecated %sand was removed%s.%s" % ( + return '%s is deprecated %sand was removed%s.%s' % ( type, - "since %s " % self.since if self.since else "", - " in %s" % self.removed if self.removed else "", - " %s" % self.custom_message if self.custom_message else "", + 'since %s ' % self.since if self.since else '', + ' in %s' % self.removed if self.removed else '', + ' %s' % self.custom_message if self.custom_message else '', ) else: - return "%s is deprecated %sand will be removed in%s.%s" % ( + return '%s is deprecated %sand will be removed in%s.%s' % ( type, - "since %s " % self.since if self.since else "", - " %s" % self.removed if self.removed else " future versions", - " %s" % self.custom_message if self.custom_message else "", + 'since %s ' % self.since if self.since else '', + ' %s' % self.removed if self.removed else ' future versions', + ' %s' % self.custom_message if self.custom_message else '', ) - def help(self, text, type="Option", separator=" "): - text = text or "" + def help(self, text, type='Option', separator=' '): + text = text or '' return self.full_message(type) + separator + text if self.deprecated else text def short_help(self, text): - text = text or "" - return ("Deprecated! " + text) if self.deprecated else text + text = text or '' + return ('Deprecated! ' + text) if self.deprecated else text def check_deprecation(ctx): """Prints deprecation warnings for arguments in given context""" @@ -205,9 +206,9 @@ def init_cli(verbose_output=None): if isinstance(option, Option) and option.deprecated and ctx.params[option.name] != default: deprecation = Deprecation(option.deprecated) if deprecation.exit_with_error: - raise FatalError("Error: %s" % deprecation.full_message('Option "%s"' % option.name)) + raise FatalError('Error: %s' % deprecation.full_message('Option "%s"' % option.name)) else: - print_warning("Warning: %s" % deprecation.full_message('Option "%s"' % option.name)) + print_warning('Warning: %s' % deprecation.full_message('Option "%s"' % option.name)) class Task(object): def __init__(self, callback, name, aliases, dependencies, order_dependencies, action_args): @@ -246,7 +247,7 @@ def init_cli(verbose_output=None): self.help = self.help or self.callback.__doc__ if self.help is None: - self.help = "" + self.help = '' if dependencies is None: dependencies = [] @@ -255,19 +256,19 @@ def init_cli(verbose_output=None): order_dependencies = [] # Show first line of help if short help is missing - self.short_help = self.short_help or self.help.split("\n")[0] + self.short_help = self.short_help or self.help.split('\n')[0] if deprecated: deprecation = Deprecation(deprecated) self.short_help = deprecation.short_help(self.short_help) - self.help = deprecation.help(self.help, type="Command", separator="\n") + self.help = deprecation.help(self.help, type='Command', separator='\n') # Add aliases to help string if aliases: - aliases_help = "Aliases: %s." % ", ".join(aliases) + aliases_help = 'Aliases: %s.' % ', '.join(aliases) - self.help = "\n".join([self.help, aliases_help]) - self.short_help = " ".join([aliases_help, self.short_help]) + self.help = '\n'.join([self.help, aliases_help]) + self.short_help = ' '.join([aliases_help, self.short_help]) self.unwrapped_callback = self.callback if self.callback is not None: @@ -290,9 +291,9 @@ def init_cli(verbose_output=None): message = deprecation.full_message('Command "%s"' % self.name) if deprecation.exit_with_error: - raise FatalError("Error: %s" % message) + raise FatalError('Error: %s' % message) else: - print_warning("Warning: %s" % message) + print_warning('Warning: %s' % message) self.deprecated = False # disable Click's built-in deprecation handling @@ -307,7 +308,7 @@ def init_cli(verbose_output=None): names - alias of 'param_decls' """ def __init__(self, **kwargs): - names = kwargs.pop("names") + names = kwargs.pop('names') super(Argument, self).__init__(names, **kwargs) class Scope(object): @@ -319,25 +320,25 @@ def init_cli(verbose_output=None): - shared - Opposite to 'global': when defined in global scope, also available for all actions """ - SCOPES = ("default", "global", "shared") + SCOPES = ('default', 'global', 'shared') def __init__(self, scope=None): if scope is None: - self._scope = "default" + self._scope = 'default' elif isinstance(scope, str) and scope in self.SCOPES: self._scope = scope elif isinstance(scope, Scope): self._scope = str(scope) else: - raise FatalError("Unknown scope for option: %s" % scope) + raise FatalError('Unknown scope for option: %s' % scope) @property def is_global(self): - return self._scope == "global" + return self._scope == 'global' @property def is_shared(self): - return self._scope == "shared" + return self._scope == 'shared' def __str__(self): return self._scope @@ -356,7 +357,7 @@ def init_cli(verbose_output=None): custom_message: Additional text to deprecation warning """ - kwargs["param_decls"] = kwargs.pop("names") + kwargs['param_decls'] = kwargs.pop('names') super(Option, self).__init__(**kwargs) self.deprecated = deprecated @@ -368,10 +369,10 @@ def init_cli(verbose_output=None): self.help = deprecation.help(self.help) if self.envvar: - self.help += " The default value can be set with the %s environment variable." % self.envvar + self.help += ' The default value can be set with the %s environment variable.' % self.envvar if self.scope.is_global: - self.help += " This option can be used at most once either globally, or for one subcommand." + self.help += ' This option can be used at most once either globally, or for one subcommand.' def get_help_record(self, ctx): # Backport "hidden" parameter to click 5.0 @@ -387,7 +388,7 @@ def init_cli(verbose_output=None): chain=True, invoke_without_command=True, result_callback=self.execute_tasks, - context_settings={"max_content_width": 140}, + context_settings={'max_content_width': 140}, help=help, ) self._actions = {} @@ -405,7 +406,7 @@ def init_cli(verbose_output=None): shared_options = [] # Global options - for option_args in all_actions.get("global_options", []): + for option_args in all_actions.get('global_options', []): option = Option(**option_args) self.params.append(option) @@ -413,12 +414,12 @@ def init_cli(verbose_output=None): shared_options.append(option) # Global options validators - self.global_action_callbacks = all_actions.get("global_action_callbacks", []) + self.global_action_callbacks = all_actions.get('global_action_callbacks', []) # Actions - for name, action in all_actions.get("actions", {}).items(): - arguments = action.pop("arguments", []) - options = action.pop("options", []) + for name, action in all_actions.get('actions', {}).items(): + arguments = action.pop('arguments', []) + options = action.pop('options', []) if arguments is None: arguments = [] @@ -427,7 +428,7 @@ def init_cli(verbose_output=None): options = [] self._actions[name] = Action(name=name, **action) - for alias in [name] + action.get("aliases", []): + for alias in [name] + action.get('aliases', []): self.commands_with_aliases[alias] = name for argument_args in arguments: @@ -465,72 +466,72 @@ def init_cli(verbose_output=None): def _print_closing_message(self, args, actions): # print a closing message of some kind # - if any(t in str(actions) for t in ("flash", "dfu", "uf2", "uf2-app")): - print("Done") + if any(t in str(actions) for t in ('flash', 'dfu', 'uf2', 'uf2-app')): + print('Done') return - if not os.path.exists(os.path.join(args.build_dir, "flasher_args.json")): - print("Done") + if not os.path.exists(os.path.join(args.build_dir, 'flasher_args.json')): + print('Done') return # Otherwise, if we built any binaries print a message about # how to flash them def print_flashing_message(title, key): - with open(os.path.join(args.build_dir, "flasher_args.json")) as f: + with open(os.path.join(args.build_dir, 'flasher_args.json')) as f: flasher_args = json.load(f) def flasher_path(f): return _safe_relpath(os.path.join(args.build_dir, f)) - if key != "project": # flashing a single item + if key != 'project': # flashing a single item if key not in flasher_args: # This is the case for 'idf.py bootloader' if Secure Boot is on, need to follow manual flashing steps - print("\n%s build complete." % title) + print('\n%s build complete.' % title) return - cmd = "" - if (key == "bootloader"): # bootloader needs --flash-mode, etc to be passed in - cmd = " ".join(flasher_args["write_flash_args"]) + " " + cmd = '' + if (key == 'bootloader'): # bootloader needs --flash-mode, etc to be passed in + cmd = ' '.join(flasher_args['write_flash_args']) + ' ' - cmd += flasher_args[key]["offset"] + " " - cmd += flasher_path(flasher_args[key]["file"]) + cmd += flasher_args[key]['offset'] + ' ' + cmd += flasher_path(flasher_args[key]['file']) else: # flashing the whole project - cmd = " ".join(flasher_args["write_flash_args"]) + " " + cmd = ' '.join(flasher_args['write_flash_args']) + ' ' flash_items = sorted( - ((o, f) for (o, f) in flasher_args["flash_files"].items() if len(o) > 0), + ((o, f) for (o, f) in flasher_args['flash_files'].items() if len(o) > 0), key=lambda x: int(x[0], 0), ) for o, f in flash_items: - cmd += o + " " + flasher_path(f) + " " + cmd += o + ' ' + flasher_path(f) + ' ' - print("\n%s build complete. To flash, run this command:" % title) + print('\n%s build complete. To flash, run this command:' % title) print( - "%s %s -p %s -b %s --before %s --after %s --chip %s %s write_flash %s" % ( + '%s %s -p %s -b %s --before %s --after %s --chip %s %s write_flash %s' % ( PYTHON, - _safe_relpath("%s/components/esptool_py/esptool/esptool.py" % os.environ["IDF_PATH"]), - args.port or "(PORT)", + _safe_relpath('%s/components/esptool_py/esptool/esptool.py' % os.environ['IDF_PATH']), + args.port or '(PORT)', args.baud, - flasher_args["extra_esptool_args"]["before"], - flasher_args["extra_esptool_args"]["after"], - flasher_args["extra_esptool_args"]["chip"], - "--no-stub" if not flasher_args["extra_esptool_args"]["stub"] else "", + flasher_args['extra_esptool_args']['before'], + flasher_args['extra_esptool_args']['after'], + flasher_args['extra_esptool_args']['chip'], + '--no-stub' if not flasher_args['extra_esptool_args']['stub'] else '', cmd.strip(), )) print( "or run 'idf.py -p %s %s'" % ( - args.port or "(PORT)", - key + "-flash" if key != "project" else "flash", + args.port or '(PORT)', + key + '-flash' if key != 'project' else 'flash', )) - if "all" in actions or "build" in actions: - print_flashing_message("Project", "project") + if 'all' in actions or 'build' in actions: + print_flashing_message('Project', 'project') else: - if "app" in actions: - print_flashing_message("App", "app") - if "partition_table" in actions: - print_flashing_message("Partition Table", "partition_table") - if "bootloader" in actions: - print_flashing_message("Bootloader", "bootloader") + if 'app' in actions: + print_flashing_message('App', 'app') + if 'partition_table' in actions: + print_flashing_message('Partition Table', 'partition_table') + if 'bootloader' in actions: + print_flashing_message('Bootloader', 'bootloader') def execute_tasks(self, tasks, **kwargs): ctx = click.get_current_context() @@ -544,12 +545,12 @@ def init_cli(verbose_output=None): dupplicated_tasks = sorted( [item for item, count in Counter(task.name for task in tasks).items() if count > 1]) if dupplicated_tasks: - dupes = ", ".join('"%s"' % t for t in dupplicated_tasks) + dupes = ', '.join('"%s"' % t for t in dupplicated_tasks) print_warning( - "WARNING: Command%s found in the list of commands more than once. " % - ("s %s are" % dupes if len(dupplicated_tasks) > 1 else " %s is" % dupes) + - "Only first occurrence will be executed.") + 'WARNING: Command%s found in the list of commands more than once. ' % + ('s %s are' % dupes if len(dupplicated_tasks) > 1 else ' %s is' % dupes) + + 'Only first occurrence will be executed.') for task in tasks: # Show help and exit if help is in the list of commands @@ -569,7 +570,7 @@ def init_cli(verbose_output=None): if global_value != default and local_value != default and global_value != local_value: raise FatalError( 'Option "%s" provided for "%s" is already defined to a different value. ' - "This option can appear at most once in the command line." % (key, task.name)) + 'This option can appear at most once in the command line.' % (key, task.name)) if local_value != default: global_args[key] = local_value @@ -638,9 +639,9 @@ def init_cli(verbose_output=None): for task in tasks_to_run.values(): name_with_aliases = task.name if task.aliases: - name_with_aliases += " (aliases: %s)" % ", ".join(task.aliases) + name_with_aliases += ' (aliases: %s)' % ', '.join(task.aliases) - print("Executing action: %s" % name_with_aliases) + print('Executing action: %s' % name_with_aliases) task(ctx, global_args, task.action_args) self._print_closing_message(global_args, tasks_to_run.keys()) @@ -652,21 +653,21 @@ def init_cli(verbose_output=None): @click.command( add_help_option=False, context_settings={ - "allow_extra_args": True, - "ignore_unknown_options": True + 'allow_extra_args': True, + 'ignore_unknown_options': True }, ) - @click.option("-C", "--project-dir", default=os.getcwd(), type=click.Path()) + @click.option('-C', '--project-dir', default=os.getcwd(), type=click.Path()) def parse_project_dir(project_dir): return realpath(project_dir) # Set `complete_var` to not existing environment variable name to prevent early cmd completion - project_dir = parse_project_dir(standalone_mode=False, complete_var="_IDF.PY_COMPLETE_NOT_EXISTING") + project_dir = parse_project_dir(standalone_mode=False, complete_var='_IDF.PY_COMPLETE_NOT_EXISTING') all_actions = {} # Load extensions from components dir - idf_py_extensions_path = os.path.join(os.environ["IDF_PATH"], "tools", "idf_py_actions") + idf_py_extensions_path = os.path.join(os.environ['IDF_PATH'], 'tools', 'idf_py_actions') extension_dirs = [realpath(idf_py_extensions_path)] - extra_paths = os.environ.get("IDF_EXTRA_ACTIONS_PATH") + extra_paths = os.environ.get('IDF_EXTRA_ACTIONS_PATH') if extra_paths is not None: for path in extra_paths.split(';'): path = realpath(path) @@ -702,12 +703,12 @@ def init_cli(verbose_output=None): print_warning('WARNING: Cannot load idf.py extension "%s"' % name) # Load extensions from project dir - if os.path.exists(os.path.join(project_dir, "idf_ext.py")): + if os.path.exists(os.path.join(project_dir, 'idf_ext.py')): sys.path.append(project_dir) try: from idf_ext import action_extensions except ImportError: - print_warning("Error importing extension file idf_ext.py. Skipping.") + print_warning('Error importing extension file idf_ext.py. Skipping.') print_warning("Please make sure that it contains implementation (even if it's empty) of add_action_extensions") try: @@ -716,8 +717,8 @@ def init_cli(verbose_output=None): pass cli_help = ( - "ESP-IDF CLI build management tool. " - "For commands that are not known to idf.py an attempt to execute it as a build system target will be made.") + 'ESP-IDF CLI build management tool. ' + 'For commands that are not known to idf.py an attempt to execute it as a build system target will be made.') return CLI(help=cli_help, verbose_output=verbose_output, all_actions=all_actions) @@ -726,7 +727,7 @@ def main(): checks_output = check_environment() cli = init_cli(verbose_output=checks_output) # the argument `prog_name` must contain name of the file - not the absolute path to it! - cli(sys.argv[1:], prog_name=PROG, complete_var="_IDF.PY_COMPLETE") + cli(sys.argv[1:], prog_name=PROG, complete_var='_IDF.PY_COMPLETE') def _valid_unicode_config(): @@ -736,73 +737,73 @@ def _valid_unicode_config(): # With python 3 unicode environment is required try: - return codecs.lookup(locale.getpreferredencoding()).name != "ascii" + return codecs.lookup(locale.getpreferredencoding()).name != 'ascii' except Exception: return False def _find_usable_locale(): try: - locales = subprocess.Popen(["locale", "-a"], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0] + locales = subprocess.Popen(['locale', '-a'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0] except OSError: - locales = "" + locales = '' if isinstance(locales, bytes): - locales = locales.decode("ascii", "replace") + locales = locales.decode('ascii', 'replace') usable_locales = [] for line in locales.splitlines(): locale = line.strip() - locale_name = locale.lower().replace("-", "") + locale_name = locale.lower().replace('-', '') # C.UTF-8 is the best option, if supported - if locale_name == "c.utf8": + if locale_name == 'c.utf8': return locale - if locale_name.endswith(".utf8"): + if locale_name.endswith('.utf8'): # Make a preference of english locales - if locale.startswith("en_"): + if locale.startswith('en_'): usable_locales.insert(0, locale) else: usable_locales.append(locale) if not usable_locales: raise FatalError( - "Support for Unicode filenames is required, but no suitable UTF-8 locale was found on your system." - " Please refer to the manual for your operating system for details on locale reconfiguration.") + 'Support for Unicode filenames is required, but no suitable UTF-8 locale was found on your system.' + ' Please refer to the manual for your operating system for details on locale reconfiguration.') return usable_locales[0] -if __name__ == "__main__": +if __name__ == '__main__': try: # On MSYS2 we need to run idf.py with "winpty" in order to be able to cancel the subprocesses properly on # keyboard interrupt (CTRL+C). # Using an own global variable for indicating that we are running with "winpty" seems to be the most suitable # option as os.environment['_'] contains "winpty" only when it is run manually from console. - WINPTY_VAR = "WINPTY" - WINPTY_EXE = "winpty" - if ("MSYSTEM" in os.environ) and (not os.environ.get("_", "").endswith(WINPTY_EXE) + WINPTY_VAR = 'WINPTY' + WINPTY_EXE = 'winpty' + if ('MSYSTEM' in os.environ) and (not os.environ.get('_', '').endswith(WINPTY_EXE) and WINPTY_VAR not in os.environ): if 'menuconfig' in sys.argv: # don't use winpty for menuconfig because it will print weird characters main() else: - os.environ[WINPTY_VAR] = "1" # the value is of no interest to us + os.environ[WINPTY_VAR] = '1' # the value is of no interest to us # idf.py calls itself with "winpty" and WINPTY global variable set ret = subprocess.call([WINPTY_EXE, sys.executable] + sys.argv, env=os.environ) if ret: raise SystemExit(ret) - elif os.name == "posix" and not _valid_unicode_config(): + elif os.name == 'posix' and not _valid_unicode_config(): # Trying to find best utf-8 locale available on the system and restart python with it best_locale = _find_usable_locale() print_warning( - "Your environment is not configured to handle unicode filenames outside of ASCII range." - " Environment variable LC_ALL is temporary set to %s for unicode support." % best_locale) + 'Your environment is not configured to handle unicode filenames outside of ASCII range.' + ' Environment variable LC_ALL is temporary set to %s for unicode support.' % best_locale) - os.environ["LC_ALL"] = best_locale + os.environ['LC_ALL'] = best_locale ret = subprocess.call([sys.executable] + sys.argv, env=os.environ) if ret: raise SystemExit(ret) diff --git a/tools/idf_monitor.py b/tools/idf_monitor.py index b4f31ce336..18b635e3d0 100755 --- a/tools/idf_monitor.py +++ b/tools/idf_monitor.py @@ -29,35 +29,36 @@ # # Originally released under BSD-3-Clause license. # -from __future__ import print_function, division -from __future__ import unicode_literals -from builtins import chr -from builtins import object -from builtins import bytes -import subprocess +from __future__ import division, print_function, unicode_literals + import argparse import codecs import datetime -import re import os +import re +import subprocess +from builtins import bytes, chr, object + try: import queue except ImportError: import Queue as queue -import shlex -import time -import sys -import serial -import serial.tools.list_ports -import serial.tools.miniterm as miniterm -import threading + import ctypes +import json +import shlex +import sys +import tempfile +import textwrap +import threading +import time import types from distutils.version import StrictVersion from io import open -import textwrap -import tempfile -import json + +import serial +import serial.tools.list_ports +import serial.tools.miniterm as miniterm try: import websocket @@ -97,7 +98,7 @@ ANSI_NORMAL = '\033[0m' def color_print(message, color, newline='\n'): """ Print a message to stderr with colored highlighting """ - sys.stderr.write("%s%s%s%s" % (color, message, ANSI_NORMAL, newline)) + sys.stderr.write('%s%s%s%s' % (color, message, ANSI_NORMAL, newline)) def yellow_print(message, newline='\n'): @@ -108,7 +109,7 @@ def red_print(message, newline='\n'): color_print(message, ANSI_RED, newline) -__version__ = "1.1" +__version__ = '1.1' # Tags for tuples in queues TAG_KEY = 0 @@ -119,14 +120,14 @@ TAG_CMD = 3 # regex matches an potential PC value (0x4xxxxxxx) MATCH_PCADDR = re.compile(r'0x4[0-9a-f]{7}', re.IGNORECASE) -DEFAULT_TOOLCHAIN_PREFIX = "xtensa-esp32-elf-" +DEFAULT_TOOLCHAIN_PREFIX = 'xtensa-esp32-elf-' -DEFAULT_PRINT_FILTER = "" +DEFAULT_PRINT_FILTER = '' # coredump related messages -COREDUMP_UART_START = b"================= CORE DUMP START =================" -COREDUMP_UART_END = b"================= CORE DUMP END =================" -COREDUMP_UART_PROMPT = b"Press Enter to print core dump to UART..." +COREDUMP_UART_START = b'================= CORE DUMP START =================' +COREDUMP_UART_END = b'================= CORE DUMP END =================' +COREDUMP_UART_PROMPT = b'Press Enter to print core dump to UART...' # coredump states COREDUMP_IDLE = 0 @@ -134,21 +135,21 @@ COREDUMP_READING = 1 COREDUMP_DONE = 2 # coredump decoding options -COREDUMP_DECODE_DISABLE = "disable" -COREDUMP_DECODE_INFO = "info" +COREDUMP_DECODE_DISABLE = 'disable' +COREDUMP_DECODE_INFO = 'info' # panic handler related messages -PANIC_START = r"Core \s*\d+ register dump:" -PANIC_END = b"ELF file SHA256:" -PANIC_STACK_DUMP = b"Stack memory:" +PANIC_START = r'Core \s*\d+ register dump:' +PANIC_END = b'ELF file SHA256:' +PANIC_STACK_DUMP = b'Stack memory:' # panic handler decoding states PANIC_IDLE = 0 PANIC_READING = 1 # panic handler decoding options -PANIC_DECODE_DISABLE = "disable" -PANIC_DECODE_BACKTRACE = "backtrace" +PANIC_DECODE_DISABLE = 'disable' +PANIC_DECODE_BACKTRACE = 'backtrace' class StoppableThread(object): @@ -266,11 +267,11 @@ class ConsoleReader(StoppableThread): class ConsoleParser(object): - def __init__(self, eol="CRLF"): + def __init__(self, eol='CRLF'): self.translate_eol = { - "CRLF": lambda c: c.replace("\n", "\r\n"), - "CR": lambda c: c.replace("\n", "\r"), - "LF": lambda c: c.replace("\r", "\n"), + 'CRLF': lambda c: c.replace('\n', '\r\n'), + 'CR': lambda c: c.replace('\n', '\r'), + 'LF': lambda c: c.replace('\r', '\n'), }[eol] self.menu_key = CTRL_T self.exit_key = CTRL_RBRACKET @@ -308,7 +309,7 @@ class ConsoleParser(object): elif c == CTRL_L: # Toggle saving output into file ret = (TAG_CMD, CMD_TOGGLE_LOGGING) elif c == CTRL_P: - yellow_print("Pause app (enter bootloader mode), press Ctrl-T Ctrl-R to restart") + yellow_print('Pause app (enter bootloader mode), press Ctrl-T Ctrl-R to restart') # to fast trigger pause without press menu key ret = (TAG_CMD, CMD_ENTER_BOOT) elif c in [CTRL_X, 'x', 'X']: # Exiting from within the menu @@ -448,7 +449,7 @@ class LineMatcher(object): self._re = re.compile(r'^(?:\033\[[01];?[0-9]+m?)?([EWIDV]) \([0-9]+\) ([^:]+): ') items = print_filter.split() if len(items) == 0: - self._dict["*"] = self.LEVEL_V # default is to print everything + self._dict['*'] = self.LEVEL_V # default is to print everything for f in items: s = f.split(r':') if len(s) == 1: @@ -472,13 +473,13 @@ class LineMatcher(object): lev = self.level[m.group(1)] if m.group(2) in self._dict: return self._dict[m.group(2)] >= lev - return self._dict.get("*", self.LEVEL_N) >= lev + return self._dict.get('*', self.LEVEL_N) >= lev except (KeyError, IndexError): # Regular line written with something else than ESP_LOG* # or an empty line. pass # We need something more than "*.N" for printing. - return self._dict.get("*", self.LEVEL_N) > self.LEVEL_N + return self._dict.get('*', self.LEVEL_N) > self.LEVEL_N class SerialStopException(Exception): @@ -497,8 +498,8 @@ class Monitor(object): Main difference is that all event processing happens in the main thread, not the worker threads. """ - def __init__(self, serial_instance, elf_file, print_filter, make="make", encrypted=False, - toolchain_prefix=DEFAULT_TOOLCHAIN_PREFIX, eol="CRLF", + def __init__(self, serial_instance, elf_file, print_filter, make='make', encrypted=False, + toolchain_prefix=DEFAULT_TOOLCHAIN_PREFIX, eol='CRLF', decode_coredumps=COREDUMP_DECODE_INFO, decode_panic=PANIC_DECODE_DISABLE, target=None, @@ -524,7 +525,7 @@ class Monitor(object): self.console.getkey = types.MethodType(getkey_patched, self.console) - socket_mode = serial_instance.port.startswith("socket://") # testing hook - data from serial can make exit the monitor + socket_mode = serial_instance.port.startswith('socket://') # testing hook - data from serial can make exit the monitor self.serial = serial_instance self.console_parser = ConsoleParser(eol) self.console_reader = ConsoleReader(self.console, self.event_queue, self.cmd_queue, self.console_parser, socket_mode) @@ -540,9 +541,9 @@ class Monitor(object): self.target = target # internal state - self._last_line_part = b"" - self._gdb_buffer = b"" - self._pc_address_buffer = b"" + self._last_line_part = b'' + self._gdb_buffer = b'' + self._pc_address_buffer = b'' self._line_matcher = LineMatcher(print_filter) self._invoke_processing_last_line_timer = None self._force_line_print = False @@ -551,10 +552,10 @@ class Monitor(object): self._log_file = None self._decode_coredumps = decode_coredumps self._reading_coredump = COREDUMP_IDLE - self._coredump_buffer = b"" + self._coredump_buffer = b'' self._decode_panic = decode_panic self._reading_panic = PANIC_IDLE - self._panic_buffer = b"" + self._panic_buffer = b'' def invoke_processing_last_line(self): self.event_queue.put((TAG_SERIAL_FLUSH, b''), False) @@ -596,9 +597,9 @@ class Monitor(object): elif event_tag == TAG_SERIAL_FLUSH: self.handle_serial_input(data, finalize_line=True) else: - raise RuntimeError("Bad event data %r" % ((event_tag,data),)) + raise RuntimeError('Bad event data %r' % ((event_tag,data),)) except SerialStopException: - sys.stderr.write(ANSI_NORMAL + "Stopping condition has been received\n") + sys.stderr.write(ANSI_NORMAL + 'Stopping condition has been received\n') finally: try: self.console_reader.stop() @@ -609,24 +610,24 @@ class Monitor(object): self._invoke_processing_last_line_timer = None except Exception: pass - sys.stderr.write(ANSI_NORMAL + "\n") + sys.stderr.write(ANSI_NORMAL + '\n') def handle_serial_input(self, data, finalize_line=False): sp = data.split(b'\n') - if self._last_line_part != b"": + if self._last_line_part != b'': # add unprocessed part from previous "data" to the first line sp[0] = self._last_line_part + sp[0] - self._last_line_part = b"" - if sp[-1] != b"": + self._last_line_part = b'' + if sp[-1] != b'': # last part is not a full line self._last_line_part = sp.pop() for line in sp: - if line != b"": + if line != b'': if self._serial_check_exit and line == self.console_parser.exit_key.encode('latin-1'): raise SerialStopException() self.check_panic_decode_trigger(line) self.check_coredump_trigger_before_print(line) - if self._force_line_print or self._line_matcher.match(line.decode(errors="ignore")): + if self._force_line_print or self._line_matcher.match(line.decode(errors='ignore')): self._print(line + b'\n') self.handle_possible_pc_address_in_line(line) self.check_coredump_trigger_after_print(line) @@ -636,8 +637,8 @@ class Monitor(object): # default we don't touch it and just wait for the arrival of the rest # of the line. But after some time when we didn't received it we need # to make a decision. - if self._last_line_part != b"": - if self._force_line_print or (finalize_line and self._line_matcher.match(self._last_line_part.decode(errors="ignore"))): + if self._last_line_part != b'': + if self._force_line_print or (finalize_line and self._line_matcher.match(self._last_line_part.decode(errors='ignore'))): self._force_line_print = True self._print(self._last_line_part) self.handle_possible_pc_address_in_line(self._last_line_part) @@ -651,15 +652,15 @@ class Monitor(object): # GDB sequence can be cut in half also. GDB sequence is 7 # characters long, therefore, we save the last 6 characters. self._gdb_buffer = self._last_line_part[-6:] - self._last_line_part = b"" + self._last_line_part = b'' # else: keeping _last_line_part and it will be processed the next time # handle_serial_input is invoked def handle_possible_pc_address_in_line(self, line): line = self._pc_address_buffer + line - self._pc_address_buffer = b"" + self._pc_address_buffer = b'' if self.enable_address_decoding: - for m in re.finditer(MATCH_PCADDR, line.decode(errors="ignore")): + for m in re.finditer(MATCH_PCADDR, line.decode(errors='ignore')): self.lookup_pc_address(m.group()) def __enter__(self): @@ -675,7 +676,7 @@ class Monitor(object): def prompt_next_action(self, reason): self.console.setup() # set up console to trap input characters try: - red_print("--- {}".format(reason)) + red_print('--- {}'.format(reason)) red_print(self.console_parser.get_next_action_text()) k = CTRL_T # ignore CTRL-T here, so people can muscle-memory Ctrl-T Ctrl-F, etc. @@ -698,31 +699,31 @@ class Monitor(object): popen_args = self.make + [target] else: popen_args = [self.make, target] - yellow_print("Running %s..." % " ".join(popen_args)) + yellow_print('Running %s...' % ' '.join(popen_args)) p = subprocess.Popen(popen_args, env=os.environ) try: p.wait() except KeyboardInterrupt: p.wait() if p.returncode != 0: - self.prompt_next_action("Build failed") + self.prompt_next_action('Build failed') else: self.output_enable(True) def lookup_pc_address(self, pc_addr): - cmd = ["%saddr2line" % self.toolchain_prefix, - "-pfiaC", "-e", self.elf_file, pc_addr] + cmd = ['%saddr2line' % self.toolchain_prefix, + '-pfiaC', '-e', self.elf_file, pc_addr] try: - translation = subprocess.check_output(cmd, cwd=".") - if b"?? ??:0" not in translation: + translation = subprocess.check_output(cmd, cwd='.') + if b'?? ??:0' not in translation: self._print(translation.decode(), console_printer=yellow_print) except OSError as e: - red_print("%s: %s" % (" ".join(cmd), e)) + red_print('%s: %s' % (' '.join(cmd), e)) def check_gdbstub_trigger(self, line): line = self._gdb_buffer + line - self._gdb_buffer = b"" - m = re.search(b"\\$(T..)#(..)", line) # look for a gdb "reason" for a break + self._gdb_buffer = b'' + m = re.search(b'\\$(T..)#(..)', line) # look for a gdb "reason" for a break if m is not None: try: chsum = sum(ord(bytes([p])) for p in m.group(1)) & 0xFF @@ -741,27 +742,27 @@ class Monitor(object): else: self.run_gdb() else: - red_print("Malformed gdb message... calculated checksum %02x received %02x" % (chsum, calc_chsum)) + red_print('Malformed gdb message... calculated checksum %02x received %02x' % (chsum, calc_chsum)) def check_coredump_trigger_before_print(self, line): if self._decode_coredumps == COREDUMP_DECODE_DISABLE: return if COREDUMP_UART_PROMPT in line: - yellow_print("Initiating core dump!") + yellow_print('Initiating core dump!') self.event_queue.put((TAG_KEY, '\n')) return if COREDUMP_UART_START in line: - yellow_print("Core dump started (further output muted)") + yellow_print('Core dump started (further output muted)') self._reading_coredump = COREDUMP_READING - self._coredump_buffer = b"" + self._coredump_buffer = b'' self._output_enabled = False return if COREDUMP_UART_END in line: self._reading_coredump = COREDUMP_DONE - yellow_print("\nCore dump finished!") + yellow_print('\nCore dump finished!') self.process_coredump() return @@ -771,7 +772,7 @@ class Monitor(object): self._coredump_buffer += line.replace(b'\r', b'') + b'\n' new_buffer_len_kb = len(self._coredump_buffer) // kb if new_buffer_len_kb > buffer_len_kb: - yellow_print("Received %3d kB..." % (new_buffer_len_kb), newline='\r') + yellow_print('Received %3d kB...' % (new_buffer_len_kb), newline='\r') def check_coredump_trigger_after_print(self, line): if self._decode_coredumps == COREDUMP_DECODE_DISABLE: @@ -781,18 +782,18 @@ class Monitor(object): if not self._output_enabled and self._reading_coredump == COREDUMP_DONE: self._reading_coredump = COREDUMP_IDLE self._output_enabled = True - self._coredump_buffer = b"" + self._coredump_buffer = b'' def process_coredump(self): if self._decode_coredumps != COREDUMP_DECODE_INFO: - raise NotImplementedError("process_coredump: %s not implemented" % self._decode_coredumps) + raise NotImplementedError('process_coredump: %s not implemented' % self._decode_coredumps) - coredump_script = os.path.join(os.path.dirname(__file__), "..", "components", "espcoredump", "espcoredump.py") + coredump_script = os.path.join(os.path.dirname(__file__), '..', 'components', 'espcoredump', 'espcoredump.py') coredump_file = None try: # On Windows, the temporary file can't be read unless it is closed. # Set delete=False and delete the file manually later. - with tempfile.NamedTemporaryFile(mode="wb", delete=False) as coredump_file: + with tempfile.NamedTemporaryFile(mode='wb', delete=False) as coredump_file: coredump_file.write(self._coredump_buffer) coredump_file.flush() @@ -808,9 +809,9 @@ class Monitor(object): else: cmd = [sys.executable, coredump_script, - "info_corefile", - "--core", coredump_file.name, - "--core-format", "b64", + 'info_corefile', + '--core', coredump_file.name, + '--core-format', 'b64', self.elf_file ] output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) @@ -818,7 +819,7 @@ class Monitor(object): self._print(output) self._output_enabled = False # Will be reenabled in check_coredump_trigger_after_print except subprocess.CalledProcessError as e: - yellow_print("Failed to run espcoredump script: {}\n{}\n\n".format(e, e.output)) + yellow_print('Failed to run espcoredump script: {}\n{}\n\n'.format(e, e.output)) self._output_enabled = True self._print(COREDUMP_UART_START + b'\n') self._print(self._coredump_buffer) @@ -834,9 +835,9 @@ class Monitor(object): if self._decode_panic == PANIC_DECODE_DISABLE: return - if self._reading_panic == PANIC_IDLE and re.search(PANIC_START, line.decode("ascii", errors='ignore')): + if self._reading_panic == PANIC_IDLE and re.search(PANIC_START, line.decode('ascii', errors='ignore')): self._reading_panic = PANIC_READING - yellow_print("Stack dump detected") + yellow_print('Stack dump detected') if self._reading_panic == PANIC_READING and PANIC_STACK_DUMP in line: self._output_enabled = False @@ -848,33 +849,33 @@ class Monitor(object): self._reading_panic = PANIC_IDLE self._output_enabled = True self.process_panic_output(self._panic_buffer) - self._panic_buffer = b"" + self._panic_buffer = b'' def process_panic_output(self, panic_output): - panic_output_decode_script = os.path.join(os.path.dirname(__file__), "..", "tools", "gdb_panic_server.py") + panic_output_decode_script = os.path.join(os.path.dirname(__file__), '..', 'tools', 'gdb_panic_server.py') panic_output_file = None try: # On Windows, the temporary file can't be read unless it is closed. # Set delete=False and delete the file manually later. - with tempfile.NamedTemporaryFile(mode="wb", delete=False) as panic_output_file: + with tempfile.NamedTemporaryFile(mode='wb', delete=False) as panic_output_file: panic_output_file.write(panic_output) panic_output_file.flush() - cmd = [self.toolchain_prefix + "gdb", - "--batch", "-n", + cmd = [self.toolchain_prefix + 'gdb', + '--batch', '-n', self.elf_file, - "-ex", "target remote | \"{python}\" \"{script}\" --target {target} \"{output_file}\"" + '-ex', "target remote | \"{python}\" \"{script}\" --target {target} \"{output_file}\"" .format(python=sys.executable, script=panic_output_decode_script, target=self.target, output_file=panic_output_file.name), - "-ex", "bt"] + '-ex', 'bt'] output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) - yellow_print("\nBacktrace:\n\n") + yellow_print('\nBacktrace:\n\n') self._print(output) except subprocess.CalledProcessError as e: - yellow_print("Failed to run gdb_panic_server.py script: {}\n{}\n\n".format(e, e.output)) + yellow_print('Failed to run gdb_panic_server.py script: {}\n{}\n\n'.format(e, e.output)) self._print(panic_output) finally: if panic_output_file is not None: @@ -887,15 +888,15 @@ class Monitor(object): with self: # disable console control sys.stderr.write(ANSI_NORMAL) try: - cmd = ["%sgdb" % self.toolchain_prefix, - "-ex", "set serial baud %d" % self.serial.baudrate, - "-ex", "target remote %s" % self.serial.port, - "-ex", "interrupt", # monitor has already parsed the first 'reason' command, need a second + cmd = ['%sgdb' % self.toolchain_prefix, + '-ex', 'set serial baud %d' % self.serial.baudrate, + '-ex', 'target remote %s' % self.serial.port, + '-ex', 'interrupt', # monitor has already parsed the first 'reason' command, need a second self.elf_file] - process = subprocess.Popen(cmd, cwd=".") + process = subprocess.Popen(cmd, cwd='.') process.wait() except OSError as e: - red_print("%s: %s" % (" ".join(cmd), e)) + red_print('%s: %s' % (' '.join(cmd), e)) except KeyboardInterrupt: pass # happens on Windows, maybe other OSes finally: @@ -906,17 +907,17 @@ class Monitor(object): pass try: # also on Linux, maybe other OSes, gdb sometimes exits uncleanly and breaks the tty mode - subprocess.call(["stty", "sane"]) + subprocess.call(['stty', 'sane']) except Exception: pass # don't care if there's no stty, we tried... - self.prompt_next_action("gdb exited") + self.prompt_next_action('gdb exited') def output_enable(self, enable): self._output_enabled = enable def output_toggle(self): self._output_enabled = not self._output_enabled - yellow_print("\nToggle output display: {}, Type Ctrl-T Ctrl-Y to show/disable output again.".format(self._output_enabled)) + yellow_print('\nToggle output display: {}, Type Ctrl-T Ctrl-Y to show/disable output again.'.format(self._output_enabled)) def toggle_logging(self): if self._log_file: @@ -927,21 +928,21 @@ class Monitor(object): def start_logging(self): if not self._log_file: try: - name = "log.{}.{}.txt".format(os.path.splitext(os.path.basename(self.elf_file))[0], + name = 'log.{}.{}.txt'.format(os.path.splitext(os.path.basename(self.elf_file))[0], datetime.datetime.now().strftime('%Y%m%d%H%M%S')) - self._log_file = open(name, "wb+") - yellow_print("\nLogging is enabled into file {}".format(name)) + self._log_file = open(name, 'wb+') + yellow_print('\nLogging is enabled into file {}'.format(name)) except Exception as e: - red_print("\nLog file {} cannot be created: {}".format(name, e)) + red_print('\nLog file {} cannot be created: {}'.format(name, e)) def stop_logging(self): if self._log_file: try: name = self._log_file.name self._log_file.close() - yellow_print("\nLogging is disabled and file {} has been closed".format(name)) + yellow_print('\nLogging is disabled and file {} has been closed'.format(name)) except Exception as e: - red_print("\nLog file cannot be closed: {}".format(e)) + red_print('\nLog file cannot be closed: {}'.format(e)) finally: self._log_file = None @@ -956,7 +957,7 @@ class Monitor(object): string = string.encode() self._log_file.write(string) except Exception as e: - red_print("\nCannot write to file: {}".format(e)) + red_print('\nCannot write to file: {}'.format(e)) # don't fill-up the screen with the previous errors (probably consequent prints would fail also) self.stop_logging() @@ -972,9 +973,9 @@ class Monitor(object): self.serial.setDTR(self.serial.dtr) # usbser.sys workaround self.output_enable(True) elif cmd == CMD_MAKE: - self.run_make("encrypted-flash" if self.encrypted else "flash") + self.run_make('encrypted-flash' if self.encrypted else 'flash') elif cmd == CMD_APP_FLASH: - self.run_make("encrypted-app-flash" if self.encrypted else "app-flash") + self.run_make('encrypted-app-flash' if self.encrypted else 'app-flash') elif cmd == CMD_OUTPUT_TOGGLE: self.output_toggle() elif cmd == CMD_TOGGLE_LOGGING: @@ -990,11 +991,11 @@ class Monitor(object): time.sleep(0.45) # timeouts taken from esptool.py, includes esp32r0 workaround. defaults: 0.05 self.serial.setDTR(False) # IO0=HIGH, done else: - raise RuntimeError("Bad command data %d" % (cmd)) + raise RuntimeError('Bad command data %d' % (cmd)) def main(): - parser = argparse.ArgumentParser("idf_monitor - a serial output monitor for esp-idf") + parser = argparse.ArgumentParser('idf_monitor - a serial output monitor for esp-idf') parser.add_argument( '--port', '-p', @@ -1005,8 +1006,8 @@ def main(): parser.add_argument( '--disable-address-decoding', '-d', help="Don't print lines about decoded addresses from the application ELF file.", - action="store_true", - default=True if os.environ.get("ESP_MONITOR_DECODE") == 0 else False + action='store_true', + default=True if os.environ.get('ESP_MONITOR_DECODE') == 0 else False ) parser.add_argument( @@ -1027,14 +1028,14 @@ def main(): parser.add_argument( '--toolchain-prefix', - help="Triplet prefix to add before cross-toolchain names", + help='Triplet prefix to add before cross-toolchain names', default=DEFAULT_TOOLCHAIN_PREFIX) parser.add_argument( - "--eol", + '--eol', choices=['CR', 'LF', 'CRLF'], type=lambda c: c.upper(), - help="End of line to use when sending to the serial port", + help='End of line to use when sending to the serial port', default='CR') parser.add_argument( @@ -1043,47 +1044,47 @@ def main(): parser.add_argument( '--print_filter', - help="Filtering string", + help='Filtering string', default=DEFAULT_PRINT_FILTER) parser.add_argument( '--decode-coredumps', choices=[COREDUMP_DECODE_INFO, COREDUMP_DECODE_DISABLE], default=COREDUMP_DECODE_INFO, - help="Handling of core dumps found in serial output" + help='Handling of core dumps found in serial output' ) parser.add_argument( '--decode-panic', choices=[PANIC_DECODE_BACKTRACE, PANIC_DECODE_DISABLE], default=PANIC_DECODE_DISABLE, - help="Handling of panic handler info found in serial output" + help='Handling of panic handler info found in serial output' ) parser.add_argument( '--target', required=False, - help="Target name (used when stack dump decoding is enabled)" + help='Target name (used when stack dump decoding is enabled)' ) parser.add_argument( '--ws', default=os.environ.get('ESP_IDF_MONITOR_WS', None), - help="WebSocket URL for communicating with IDE tools for debugging purposes" + help='WebSocket URL for communicating with IDE tools for debugging purposes' ) args = parser.parse_args() # GDB uses CreateFile to open COM port, which requires the COM name to be r'\\.\COMx' if the COM # number is larger than 10 - if os.name == 'nt' and args.port.startswith("COM"): + if os.name == 'nt' and args.port.startswith('COM'): args.port = args.port.replace('COM', r'\\.\COM') - yellow_print("--- WARNING: GDB cannot open serial ports accessed as COMx") - yellow_print("--- Using %s instead..." % args.port) - elif args.port.startswith("/dev/tty.") and sys.platform == 'darwin': - args.port = args.port.replace("/dev/tty.", "/dev/cu.") - yellow_print("--- WARNING: Serial ports accessed as /dev/tty.* will hang gdb if launched.") - yellow_print("--- Using %s instead..." % args.port) + yellow_print('--- WARNING: GDB cannot open serial ports accessed as COMx') + yellow_print('--- Using %s instead...' % args.port) + elif args.port.startswith('/dev/tty.') and sys.platform == 'darwin': + args.port = args.port.replace('/dev/tty.', '/dev/cu.') + yellow_print('--- WARNING: Serial ports accessed as /dev/tty.* will hang gdb if launched.') + yellow_print('--- Using %s instead...' % args.port) serial_instance = serial.serial_for_url(args.port, args.baud, do_not_open=True) @@ -1097,16 +1098,16 @@ def main(): # all of the child makes we need (the -j argument remains part of # MAKEFLAGS) try: - makeflags = os.environ["MAKEFLAGS"] - makeflags = re.sub(r"--jobserver[^ =]*=[0-9,]+ ?", "", makeflags) - os.environ["MAKEFLAGS"] = makeflags + makeflags = os.environ['MAKEFLAGS'] + makeflags = re.sub(r'--jobserver[^ =]*=[0-9,]+ ?', '', makeflags) + os.environ['MAKEFLAGS'] = makeflags except KeyError: pass # not running a make jobserver # Pass the actual used port to callee of idf_monitor (e.g. make) through `ESPPORT` environment # variable # To make sure the key as well as the value are str type, by the requirements of subprocess - espport_key = str("ESPPORT") + espport_key = str('ESPPORT') espport_val = str(args.port) os.environ.update({espport_key: espport_val}) @@ -1310,5 +1311,5 @@ if os.name == 'nt': pass -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/idf_py_actions/constants.py b/tools/idf_py_actions/constants.py index d2c11999fe..19a26475e8 100644 --- a/tools/idf_py_actions/constants.py +++ b/tools/idf_py_actions/constants.py @@ -4,38 +4,38 @@ import os import platform # Make flavors, across the various kinds of Windows environments & POSIX... -if "MSYSTEM" in os.environ: # MSYS - MAKE_CMD = "make" - MAKE_GENERATOR = "MSYS Makefiles" -elif os.name == "nt": # other Windows - MAKE_CMD = "mingw32-make" - MAKE_GENERATOR = "MinGW Makefiles" -elif platform.system() == "FreeBSD": - MAKE_CMD = "gmake" - MAKE_GENERATOR = "Unix Makefiles" +if 'MSYSTEM' in os.environ: # MSYS + MAKE_CMD = 'make' + MAKE_GENERATOR = 'MSYS Makefiles' +elif os.name == 'nt': # other Windows + MAKE_CMD = 'mingw32-make' + MAKE_GENERATOR = 'MinGW Makefiles' +elif platform.system() == 'FreeBSD': + MAKE_CMD = 'gmake' + MAKE_GENERATOR = 'Unix Makefiles' else: - MAKE_CMD = "make" - MAKE_GENERATOR = "Unix Makefiles" + MAKE_CMD = 'make' + MAKE_GENERATOR = 'Unix Makefiles' GENERATORS = collections.OrderedDict([ # - command: build command line # - version: version command line # - dry_run: command to run in dry run mode # - verbose_flag: verbose flag - ("Ninja", { - "command": ["ninja"], - "version": ["ninja", "--version"], - "dry_run": ["ninja", "-n"], - "verbose_flag": "-v" + ('Ninja', { + 'command': ['ninja'], + 'version': ['ninja', '--version'], + 'dry_run': ['ninja', '-n'], + 'verbose_flag': '-v' }), (MAKE_GENERATOR, { - "command": [MAKE_CMD, "-j", str(multiprocessing.cpu_count() + 2)], - "version": [MAKE_CMD, "--version"], - "dry_run": [MAKE_CMD, "-n"], - "verbose_flag": "VERBOSE=1", + 'command': [MAKE_CMD, '-j', str(multiprocessing.cpu_count() + 2)], + 'version': [MAKE_CMD, '--version'], + 'dry_run': [MAKE_CMD, '-n'], + 'verbose_flag': 'VERBOSE=1', }) ]) -SUPPORTED_TARGETS = ["esp32", "esp32s2"] +SUPPORTED_TARGETS = ['esp32', 'esp32s2'] -PREVIEW_TARGETS = ["esp32s3", "esp32c3", "linux"] +PREVIEW_TARGETS = ['esp32s3', 'esp32c3', 'linux'] diff --git a/tools/idf_py_actions/core_ext.py b/tools/idf_py_actions/core_ext.py index a8bb28b9a8..4d294ca35d 100644 --- a/tools/idf_py_actions/core_ext.py +++ b/tools/idf_py_actions/core_ext.py @@ -5,11 +5,11 @@ import subprocess import sys import click - -from idf_py_actions.constants import GENERATORS, SUPPORTED_TARGETS, PREVIEW_TARGETS +from idf_py_actions.constants import GENERATORS, PREVIEW_TARGETS, SUPPORTED_TARGETS from idf_py_actions.errors import FatalError from idf_py_actions.global_options import global_options -from idf_py_actions.tools import ensure_build_directory, idf_version, merge_action_lists, realpath, run_target, TargetChoice +from idf_py_actions.tools import (TargetChoice, ensure_build_directory, idf_version, merge_action_lists, realpath, + run_target) def action_extensions(base_actions, project_path): @@ -46,7 +46,7 @@ def action_extensions(base_actions, project_path): ensure_build_directory(args, ctx.info_name) try: - subprocess.check_output(GENERATORS[args.generator]["dry_run"] + [target_name], cwd=args.build_dir) + subprocess.check_output(GENERATORS[args.generator]['dry_run'] + [target_name], cwd=args.build_dir) except Exception: raise FatalError( @@ -67,21 +67,21 @@ def action_extensions(base_actions, project_path): if not os.path.isdir(args.build_dir): print("Build directory '%s' not found. Nothing to clean." % args.build_dir) return - build_target("clean", ctx, args) + build_target('clean', ctx, args) def _delete_windows_symlinks(directory): """ It deletes symlinks recursively on Windows. It is useful for Python 2 which doesn't detect symlinks on Windows. """ deleted_paths = [] - if os.name == "nt": + if os.name == 'nt': import ctypes for root, dirnames, _filenames in os.walk(directory): for d in dirnames: full_path = os.path.join(root, d) try: - full_path = full_path.decode("utf-8") + full_path = full_path.decode('utf-8') except Exception: pass if ctypes.windll.kernel32.GetFileAttributesW(full_path) & 0x0400: @@ -98,11 +98,11 @@ def action_extensions(base_actions, project_path): print("Build directory '%s' is empty. Nothing to clean." % build_dir) return - if not os.path.exists(os.path.join(build_dir, "CMakeCache.txt")): + if not os.path.exists(os.path.join(build_dir, 'CMakeCache.txt')): raise FatalError( "Directory '%s' doesn't seem to be a CMake build directory. Refusing to automatically " "delete files in this directory. Delete the directory manually to 'clean' it." % build_dir) - red_flags = ["CMakeLists.txt", ".git", ".svn"] + red_flags = ['CMakeLists.txt', '.git', '.svn'] for red in red_flags: red = os.path.join(build_dir, red) if os.path.exists(red): @@ -115,43 +115,43 @@ def action_extensions(base_actions, project_path): # outside of this directory. deleted_symlinks = _delete_windows_symlinks(build_dir) if args.verbose and len(deleted_symlinks) > 1: - print("The following symlinks were identified and removed:\n%s" % "\n".join(deleted_symlinks)) + print('The following symlinks were identified and removed:\n%s' % '\n'.join(deleted_symlinks)) for f in os.listdir(build_dir): # TODO: once we are Python 3 only, this can be os.scandir() f = os.path.join(build_dir, f) if args.verbose: - print("Removing: %s" % f) + print('Removing: %s' % f) if os.path.isdir(f): shutil.rmtree(f) else: os.remove(f) def python_clean(action, ctx, args): - for root, dirnames, filenames in os.walk(os.environ["IDF_PATH"]): + for root, dirnames, filenames in os.walk(os.environ['IDF_PATH']): for d in dirnames: - if d == "__pycache__": + if d == '__pycache__': dir_to_delete = os.path.join(root, d) if args.verbose: - print("Removing: %s" % dir_to_delete) + print('Removing: %s' % dir_to_delete) shutil.rmtree(dir_to_delete) for filename in fnmatch.filter(filenames, '*.py[co]'): file_to_delete = os.path.join(root, filename) if args.verbose: - print("Removing: %s" % file_to_delete) + print('Removing: %s' % file_to_delete) os.remove(file_to_delete) def set_target(action, ctx, args, idf_target): - if (not args["preview"] and idf_target in PREVIEW_TARGETS): + if (not args['preview'] and idf_target in PREVIEW_TARGETS): raise FatalError( "%s is still in preview. You have to append '--preview' option after idf.py to use any preview feature." % idf_target) - args.define_cache_entry.append("IDF_TARGET=" + idf_target) + args.define_cache_entry.append('IDF_TARGET=' + idf_target) sdkconfig_path = os.path.join(args.project_dir, 'sdkconfig') - sdkconfig_old = sdkconfig_path + ".old" + sdkconfig_old = sdkconfig_path + '.old' if os.path.exists(sdkconfig_old): os.remove(sdkconfig_old) if os.path.exists(sdkconfig_path): os.rename(sdkconfig_path, sdkconfig_old) - print("Set Target to: %s, new sdkconfig created. Existing sdkconfig renamed to sdkconfig.old." % idf_target) + print('Set Target to: %s, new sdkconfig created. Existing sdkconfig renamed to sdkconfig.old.' % idf_target) ensure_build_directory(args, ctx.info_name, True) def reconfigure(action, ctx, args): @@ -161,10 +161,10 @@ def action_extensions(base_actions, project_path): args.project_dir = realpath(args.project_dir) if args.build_dir is not None and args.project_dir == realpath(args.build_dir): raise FatalError( - "Setting the build directory to the project directory is not supported. Suggest dropping " + 'Setting the build directory to the project directory is not supported. Suggest dropping ' "--build-dir option, the default is a 'build' subdirectory inside the project directory.") if args.build_dir is None: - args.build_dir = os.path.join(args.project_dir, "build") + args.build_dir = os.path.join(args.project_dir, 'build') args.build_dir = realpath(args.build_dir) def idf_version_callback(ctx, param, value): @@ -174,9 +174,9 @@ def action_extensions(base_actions, project_path): version = idf_version() if not version: - raise FatalError("ESP-IDF version cannot be determined") + raise FatalError('ESP-IDF version cannot be determined') - print("ESP-IDF %s" % version) + print('ESP-IDF %s' % version) sys.exit(0) def list_targets_callback(ctx, param, value): @@ -186,272 +186,272 @@ def action_extensions(base_actions, project_path): for target in SUPPORTED_TARGETS: print(target) - if "preview" in ctx.params: + if 'preview' in ctx.params: for target in PREVIEW_TARGETS: print(target) sys.exit(0) root_options = { - "global_options": [ + 'global_options': [ { - "names": ["--version"], - "help": "Show IDF version and exit.", - "is_flag": True, - "expose_value": False, - "callback": idf_version_callback, + 'names': ['--version'], + 'help': 'Show IDF version and exit.', + 'is_flag': True, + 'expose_value': False, + 'callback': idf_version_callback, }, { - "names": ["--list-targets"], - "help": "Print list of supported targets and exit.", - "is_flag": True, - "expose_value": False, - "callback": list_targets_callback, + 'names': ['--list-targets'], + 'help': 'Print list of supported targets and exit.', + 'is_flag': True, + 'expose_value': False, + 'callback': list_targets_callback, }, { - "names": ["-C", "--project-dir"], - "scope": "shared", - "help": "Project directory.", - "type": click.Path(), - "default": os.getcwd(), + 'names': ['-C', '--project-dir'], + 'scope': 'shared', + 'help': 'Project directory.', + 'type': click.Path(), + 'default': os.getcwd(), }, { - "names": ["-B", "--build-dir"], - "help": "Build directory.", - "type": click.Path(), - "default": None, + 'names': ['-B', '--build-dir'], + 'help': 'Build directory.', + 'type': click.Path(), + 'default': None, }, { - "names": ["-w/-n", "--cmake-warn-uninitialized/--no-warnings"], - "help": ("Enable CMake uninitialized variable warnings for CMake files inside the project directory. " + 'names': ['-w/-n', '--cmake-warn-uninitialized/--no-warnings'], + 'help': ('Enable CMake uninitialized variable warnings for CMake files inside the project directory. ' "(--no-warnings is now the default, and doesn't need to be specified.)"), - "envvar": "IDF_CMAKE_WARN_UNINITIALIZED", - "is_flag": True, - "default": False, + 'envvar': 'IDF_CMAKE_WARN_UNINITIALIZED', + 'is_flag': True, + 'default': False, }, { - "names": ["-v", "--verbose"], - "help": "Verbose build output.", - "is_flag": True, - "is_eager": True, - "default": False, - "callback": verbose_callback, + 'names': ['-v', '--verbose'], + 'help': 'Verbose build output.', + 'is_flag': True, + 'is_eager': True, + 'default': False, + 'callback': verbose_callback, }, { - "names": ["--preview"], - "help": "Enable IDF features that are still in preview.", - "is_flag": True, - "default": False, + 'names': ['--preview'], + 'help': 'Enable IDF features that are still in preview.', + 'is_flag': True, + 'default': False, }, { - "names": ["--ccache/--no-ccache"], - "help": "Use ccache in build. Disabled by default.", - "is_flag": True, - "envvar": "IDF_CCACHE_ENABLE", - "default": False, + 'names': ['--ccache/--no-ccache'], + 'help': 'Use ccache in build. Disabled by default.', + 'is_flag': True, + 'envvar': 'IDF_CCACHE_ENABLE', + 'default': False, }, { - "names": ["-G", "--generator"], - "help": "CMake generator.", - "type": click.Choice(GENERATORS.keys()), + 'names': ['-G', '--generator'], + 'help': 'CMake generator.', + 'type': click.Choice(GENERATORS.keys()), }, { - "names": ["--dry-run"], - "help": "Only process arguments, but don't execute actions.", - "is_flag": True, - "hidden": True, - "default": False, + 'names': ['--dry-run'], + 'help': "Only process arguments, but don't execute actions.", + 'is_flag': True, + 'hidden': True, + 'default': False, }, ], - "global_action_callbacks": [validate_root_options], + 'global_action_callbacks': [validate_root_options], } build_actions = { - "actions": { - "all": { - "aliases": ["build"], - "callback": build_target, - "short_help": "Build the project.", - "help": ( - "Build the project. This can involve multiple steps:\n\n" - "1. Create the build directory if needed. " + 'actions': { + 'all': { + 'aliases': ['build'], + 'callback': build_target, + 'short_help': 'Build the project.', + 'help': ( + 'Build the project. This can involve multiple steps:\n\n' + '1. Create the build directory if needed. ' "The sub-directory 'build' is used to hold build output, " - "although this can be changed with the -B option.\n\n" - "2. Run CMake as necessary to configure the project " - "and generate build files for the main build tool.\n\n" - "3. Run the main build tool (Ninja or GNU Make). " - "By default, the build tool is automatically detected " - "but it can be explicitly set by passing the -G option to idf.py.\n\n"), - "options": global_options, - "order_dependencies": [ - "reconfigure", - "menuconfig", - "clean", - "fullclean", + 'although this can be changed with the -B option.\n\n' + '2. Run CMake as necessary to configure the project ' + 'and generate build files for the main build tool.\n\n' + '3. Run the main build tool (Ninja or GNU Make). ' + 'By default, the build tool is automatically detected ' + 'but it can be explicitly set by passing the -G option to idf.py.\n\n'), + 'options': global_options, + 'order_dependencies': [ + 'reconfigure', + 'menuconfig', + 'clean', + 'fullclean', ], }, - "menuconfig": { - "callback": menuconfig, - "help": 'Run "menuconfig" project configuration tool.', - "options": global_options + [ + 'menuconfig': { + 'callback': menuconfig, + 'help': 'Run "menuconfig" project configuration tool.', + 'options': global_options + [ { - "names": ["--style", "--color-scheme", "style"], - "help": ( - "Menuconfig style.\n" - "The built-in styles include:\n\n" - "- default - a yellowish theme,\n\n" - "- monochrome - a black and white theme, or\n\n" - "- aquatic - a blue theme.\n\n" - "It is possible to customize these themes further" - " as it is described in the Color schemes section of the kconfiglib documentation.\n" + 'names': ['--style', '--color-scheme', 'style'], + 'help': ( + 'Menuconfig style.\n' + 'The built-in styles include:\n\n' + '- default - a yellowish theme,\n\n' + '- monochrome - a black and white theme, or\n\n' + '- aquatic - a blue theme.\n\n' + 'It is possible to customize these themes further' + ' as it is described in the Color schemes section of the kconfiglib documentation.\n' 'The default value is \"aquatic\".'), - "envvar": "MENUCONFIG_STYLE", - "default": "aquatic", + 'envvar': 'MENUCONFIG_STYLE', + 'default': 'aquatic', } ], }, - "confserver": { - "callback": build_target, - "help": "Run JSON configuration server.", - "options": global_options, + 'confserver': { + 'callback': build_target, + 'help': 'Run JSON configuration server.', + 'options': global_options, }, - "size": { - "callback": build_target, - "help": "Print basic size information about the app.", - "options": global_options, - "dependencies": ["app"], + 'size': { + 'callback': build_target, + 'help': 'Print basic size information about the app.', + 'options': global_options, + 'dependencies': ['app'], }, - "size-components": { - "callback": build_target, - "help": "Print per-component size information.", - "options": global_options, - "dependencies": ["app"], + 'size-components': { + 'callback': build_target, + 'help': 'Print per-component size information.', + 'options': global_options, + 'dependencies': ['app'], }, - "size-files": { - "callback": build_target, - "help": "Print per-source-file size information.", - "options": global_options, - "dependencies": ["app"], + 'size-files': { + 'callback': build_target, + 'help': 'Print per-source-file size information.', + 'options': global_options, + 'dependencies': ['app'], }, - "bootloader": { - "callback": build_target, - "help": "Build only bootloader.", - "options": global_options, + 'bootloader': { + 'callback': build_target, + 'help': 'Build only bootloader.', + 'options': global_options, }, - "app": { - "callback": build_target, - "help": "Build only the app.", - "order_dependencies": ["clean", "fullclean", "reconfigure"], - "options": global_options, + 'app': { + 'callback': build_target, + 'help': 'Build only the app.', + 'order_dependencies': ['clean', 'fullclean', 'reconfigure'], + 'options': global_options, }, - "efuse_common_table": { - "callback": build_target, - "help": "Generate C-source for IDF's eFuse fields.", - "order_dependencies": ["reconfigure"], - "options": global_options, + 'efuse_common_table': { + 'callback': build_target, + 'help': "Generate C-source for IDF's eFuse fields.", + 'order_dependencies': ['reconfigure'], + 'options': global_options, }, - "efuse_custom_table": { - "callback": build_target, - "help": "Generate C-source for user's eFuse fields.", - "order_dependencies": ["reconfigure"], - "options": global_options, + 'efuse_custom_table': { + 'callback': build_target, + 'help': "Generate C-source for user's eFuse fields.", + 'order_dependencies': ['reconfigure'], + 'options': global_options, }, - "show_efuse_table": { - "callback": build_target, - "help": "Print eFuse table.", - "order_dependencies": ["reconfigure"], - "options": global_options, + 'show_efuse_table': { + 'callback': build_target, + 'help': 'Print eFuse table.', + 'order_dependencies': ['reconfigure'], + 'options': global_options, }, - "partition_table": { - "callback": build_target, - "help": "Build only partition table.", - "order_dependencies": ["reconfigure"], - "options": global_options, + 'partition_table': { + 'callback': build_target, + 'help': 'Build only partition table.', + 'order_dependencies': ['reconfigure'], + 'options': global_options, }, - "erase_otadata": { - "callback": build_target, - "help": "Erase otadata partition.", - "options": global_options, + 'erase_otadata': { + 'callback': build_target, + 'help': 'Erase otadata partition.', + 'options': global_options, }, - "read_otadata": { - "callback": build_target, - "help": "Read otadata partition.", - "options": global_options, + 'read_otadata': { + 'callback': build_target, + 'help': 'Read otadata partition.', + 'options': global_options, }, - "build-system-targets": { - "callback": list_build_system_targets, - "help": "Print list of build system targets.", + 'build-system-targets': { + 'callback': list_build_system_targets, + 'help': 'Print list of build system targets.', }, - "fallback": { - "callback": fallback_target, - "help": "Handle for targets not known for idf.py.", - "hidden": True, + 'fallback': { + 'callback': fallback_target, + 'help': 'Handle for targets not known for idf.py.', + 'hidden': True, } } } clean_actions = { - "actions": { - "reconfigure": { - "callback": reconfigure, - "short_help": "Re-run CMake.", - "help": ( + 'actions': { + 'reconfigure': { + 'callback': reconfigure, + 'short_help': 'Re-run CMake.', + 'help': ( "Re-run CMake even if it doesn't seem to need re-running. " "This isn't necessary during normal usage, " - "but can be useful after adding/removing files from the source tree, " - "or when modifying CMake cache variables. " + 'but can be useful after adding/removing files from the source tree, ' + 'or when modifying CMake cache variables. ' "For example, \"idf.py -DNAME='VALUE' reconfigure\" " 'can be used to set variable "NAME" in CMake cache to value "VALUE".'), - "options": global_options, - "order_dependencies": ["menuconfig", "fullclean"], + 'options': global_options, + 'order_dependencies': ['menuconfig', 'fullclean'], }, - "set-target": { - "callback": set_target, - "short_help": "Set the chip target to build.", - "help": ( - "Set the chip target to build. This will remove the " - "existing sdkconfig file and corresponding CMakeCache and " - "create new ones according to the new target.\nFor example, " + 'set-target': { + 'callback': set_target, + 'short_help': 'Set the chip target to build.', + 'help': ( + 'Set the chip target to build. This will remove the ' + 'existing sdkconfig file and corresponding CMakeCache and ' + 'create new ones according to the new target.\nFor example, ' "\"idf.py set-target esp32\" will select esp32 as the new chip " - "target."), - "arguments": [ + 'target.'), + 'arguments': [ { - "names": ["idf-target"], - "nargs": 1, - "type": TargetChoice(SUPPORTED_TARGETS + PREVIEW_TARGETS), + 'names': ['idf-target'], + 'nargs': 1, + 'type': TargetChoice(SUPPORTED_TARGETS + PREVIEW_TARGETS), }, ], - "dependencies": ["fullclean"], + 'dependencies': ['fullclean'], }, - "clean": { - "callback": clean, - "short_help": "Delete build output files from the build directory.", - "help": ( - "Delete build output files from the build directory, " + 'clean': { + 'callback': clean, + 'short_help': 'Delete build output files from the build directory.', + 'help': ( + 'Delete build output files from the build directory, ' "forcing a 'full rebuild' the next time " "the project is built. Cleaning doesn't delete " - "CMake configuration output and some other files"), - "order_dependencies": ["fullclean"], + 'CMake configuration output and some other files'), + 'order_dependencies': ['fullclean'], }, - "fullclean": { - "callback": fullclean, - "short_help": "Delete the entire build directory contents.", - "help": ( - "Delete the entire build directory contents. " - "This includes all CMake configuration output." - "The next time the project is built, " - "CMake will configure it from scratch. " - "Note that this option recursively deletes all files " - "in the build directory, so use with care." - "Project configuration is not deleted.") + 'fullclean': { + 'callback': fullclean, + 'short_help': 'Delete the entire build directory contents.', + 'help': ( + 'Delete the entire build directory contents. ' + 'This includes all CMake configuration output.' + 'The next time the project is built, ' + 'CMake will configure it from scratch. ' + 'Note that this option recursively deletes all files ' + 'in the build directory, so use with care.' + 'Project configuration is not deleted.') }, - "python-clean": { - "callback": python_clean, - "short_help": "Delete generated Python byte code from the IDF directory", - "help": ( - "Delete generated Python byte code from the IDF directory " - "which may cause issues when switching between IDF and Python versions. " - "It is advised to run this target after switching versions.") + 'python-clean': { + 'callback': python_clean, + 'short_help': 'Delete generated Python byte code from the IDF directory', + 'help': ( + 'Delete generated Python byte code from the IDF directory ' + 'which may cause issues when switching between IDF and Python versions. ' + 'It is advised to run this target after switching versions.') }, } } diff --git a/tools/idf_py_actions/create_ext.py b/tools/idf_py_actions/create_ext.py index bc06ba9014..ff448cfbd6 100644 --- a/tools/idf_py_actions/create_ext.py +++ b/tools/idf_py_actions/create_ext.py @@ -1,13 +1,13 @@ from __future__ import print_function -from distutils.dir_util import copy_tree import os import re import sys +from distutils.dir_util import copy_tree def get_type(action): - return action.split("-")[1] + return action.split('-')[1] def replace_in_file(filename, pattern, replacement): @@ -24,37 +24,37 @@ def is_empty_and_create(path, action): if not os.path.exists(abspath): os.makedirs(abspath) elif not os.path.isdir(abspath): - print("Your target path is not a directory. Please remove the", os.path.abspath(abspath), - "or use different target path.") + print('Your target path is not a directory. Please remove the', os.path.abspath(abspath), + 'or use different target path.') sys.exit(4) elif len(os.listdir(path)) > 0: - print("The directory", abspath, "is not empty. To create a", get_type(action), - "you must empty the directory or choose a different path.") + print('The directory', abspath, 'is not empty. To create a', get_type(action), + 'you must empty the directory or choose a different path.') sys.exit(3) def create_project(target_path, name): - copy_tree(os.path.join(os.environ['IDF_PATH'], "examples", "get-started", "sample_project"), target_path) - main_folder = os.path.join(target_path, "main") - os.rename(os.path.join(main_folder, "main.c"), os.path.join(main_folder, ".".join((name, "c")))) - replace_in_file(os.path.join(main_folder, "CMakeLists.txt"), "main", name) - replace_in_file(os.path.join(target_path, "CMakeLists.txt"), "main", name) - os.remove(os.path.join(target_path, "README.md")) + copy_tree(os.path.join(os.environ['IDF_PATH'], 'examples', 'get-started', 'sample_project'), target_path) + main_folder = os.path.join(target_path, 'main') + os.rename(os.path.join(main_folder, 'main.c'), os.path.join(main_folder, '.'.join((name, 'c')))) + replace_in_file(os.path.join(main_folder, 'CMakeLists.txt'), 'main', name) + replace_in_file(os.path.join(target_path, 'CMakeLists.txt'), 'main', name) + os.remove(os.path.join(target_path, 'README.md')) # after manual removing "Makefile" and "component.mk" from `examples/get-started/sample_project` # remove following two lines as well - os.remove(os.path.join(target_path, "Makefile")) - os.remove(os.path.join(target_path, "main", "component.mk")) + os.remove(os.path.join(target_path, 'Makefile')) + os.remove(os.path.join(target_path, 'main', 'component.mk')) def create_component(target_path, name): - copy_tree(os.path.join(os.environ['IDF_PATH'], "tools", "templates", "sample_component"), target_path) - os.rename(os.path.join(target_path, "main.c"), os.path.join(target_path, ".".join((name, "c")))) - os.rename(os.path.join(target_path, "include", "main.h"), - os.path.join(target_path, "include", ".".join((name, "h")))) + copy_tree(os.path.join(os.environ['IDF_PATH'], 'tools', 'templates', 'sample_component'), target_path) + os.rename(os.path.join(target_path, 'main.c'), os.path.join(target_path, '.'.join((name, 'c')))) + os.rename(os.path.join(target_path, 'include', 'main.h'), + os.path.join(target_path, 'include', '.'.join((name, 'h')))) - replace_in_file(os.path.join(target_path, ".".join((name, "c"))), "main", name) - replace_in_file(os.path.join(target_path, "CMakeLists.txt"), "main", name) + replace_in_file(os.path.join(target_path, '.'.join((name, 'c'))), 'main', name) + replace_in_file(os.path.join(target_path, 'CMakeLists.txt'), 'main', name) def action_extensions(base_actions, project_path): @@ -63,58 +63,58 @@ def action_extensions(base_actions, project_path): is_empty_and_create(target_path, action) - func_action_map = {"create-project": create_project, "create-component": create_component} + func_action_map = {'create-project': create_project, 'create-component': create_component} func_action_map[action](target_path, action_args['name']) - print("The", get_type(action), "was created in", os.path.abspath(target_path)) + print('The', get_type(action), 'was created in', os.path.abspath(target_path)) # after the command execution, no other commands are accepted and idf.py terminates sys.exit(0) return { - "actions": { - "create-project": { - "callback": create_new, - "short_help": "Create a new project.", - "help": ("Create a new project with the name NAME specified as argument. " - "For example: " - "`idf.py create-project new_proj` " - "will create a new project in subdirectory called `new_proj` " - "of the current working directory. " + 'actions': { + 'create-project': { + 'callback': create_new, + 'short_help': 'Create a new project.', + 'help': ('Create a new project with the name NAME specified as argument. ' + 'For example: ' + '`idf.py create-project new_proj` ' + 'will create a new project in subdirectory called `new_proj` ' + 'of the current working directory. ' "For specifying the new project's path, use either the option --path for specifying the " - "destination directory, or the global option -C if the project should be created as a " - "subdirectory of the specified directory. " - "If the target path does not exist it will be created. If the target folder is not empty " - "then the operation will fail with return code 3. " - "If the target path is not a folder, the script will fail with return code 4. " - "After the execution idf.py terminates " - "so this operation should be used alone."), - "arguments": [{"names": ["name"]}], - "options": [ + 'destination directory, or the global option -C if the project should be created as a ' + 'subdirectory of the specified directory. ' + 'If the target path does not exist it will be created. If the target folder is not empty ' + 'then the operation will fail with return code 3. ' + 'If the target path is not a folder, the script will fail with return code 4. ' + 'After the execution idf.py terminates ' + 'so this operation should be used alone.'), + 'arguments': [{'names': ['name']}], + 'options': [ { - "names": ["-p", "--path"], - "help": ("Set the path for the new project. The project " - "will be created directly in the given folder if it does not contain anything"), + 'names': ['-p', '--path'], + 'help': ('Set the path for the new project. The project ' + 'will be created directly in the given folder if it does not contain anything'), }, ], }, - "create-component": { - "callback": create_new, - "short_help": "Create a new component.", - "help": ("Create a new component with the name NAME specified as argument. " - "For example: " - "`idf.py create-component new_comp` " - "will create a new component in subdirectory called `new_comp` " - "of the current working directory. " + 'create-component': { + 'callback': create_new, + 'short_help': 'Create a new component.', + 'help': ('Create a new component with the name NAME specified as argument. ' + 'For example: ' + '`idf.py create-component new_comp` ' + 'will create a new component in subdirectory called `new_comp` ' + 'of the current working directory. ' "For specifying the new component's path use the option -C. " - "If the target path does not exist then it will be created. " - "If the target folder is not empty " - "then the operation will fail with return code 3. " - "If the target path is not a folder, the script will fail with return code 4. " - "After the execution idf.py terminates " - "so this operation should be used alone."), - "arguments": [{"names": ["name"]}], + 'If the target path does not exist then it will be created. ' + 'If the target folder is not empty ' + 'then the operation will fail with return code 3. ' + 'If the target path is not a folder, the script will fail with return code 4. ' + 'After the execution idf.py terminates ' + 'so this operation should be used alone.'), + 'arguments': [{'names': ['name']}], } } } diff --git a/tools/idf_py_actions/debug_ext.py b/tools/idf_py_actions/debug_ext.py index 57d1829608..5b89650a22 100644 --- a/tools/idf_py_actions/debug_ext.py +++ b/tools/idf_py_actions/debug_ext.py @@ -1,12 +1,12 @@ import json import os -import sys -import subprocess -import shlex -import time import re -from threading import Thread +import shlex +import subprocess +import sys import threading +import time +from threading import Thread from idf_py_actions.errors import FatalError from idf_py_actions.tools import ensure_build_directory @@ -15,42 +15,42 @@ PYTHON = sys.executable def action_extensions(base_actions, project_path): - OPENOCD_OUT_FILE = "openocd_out.txt" - GDBGUI_OUT_FILE = "gdbgui_out.txt" + OPENOCD_OUT_FILE = 'openocd_out.txt' + GDBGUI_OUT_FILE = 'gdbgui_out.txt' # Internal dictionary of currently active processes, threads and their output files - processes = {"threads_to_join": [], "openocd_issues": None} + processes = {'threads_to_join': [], 'openocd_issues': None} def _check_for_common_openocd_issues(file_name, print_all=True): - if processes["openocd_issues"] is not None: - return processes["openocd_issues"] + if processes['openocd_issues'] is not None: + return processes['openocd_issues'] try: - message = "Please check JTAG connection!" - with open(file_name, "r") as f: + message = 'Please check JTAG connection!' + with open(file_name, 'r') as f: content = f.read() if print_all: print(content) - if re.search(r"Address already in use", content): - message = ("Please check if another process uses the mentioned ports. OpenOCD already running, perhaps in the background?\n" - "Please list all processes to check if OpenOCD is already running; if so, terminate it before starting OpenOCD from idf.py") + if re.search(r'Address already in use', content): + message = ('Please check if another process uses the mentioned ports. OpenOCD already running, perhaps in the background?\n' + 'Please list all processes to check if OpenOCD is already running; if so, terminate it before starting OpenOCD from idf.py') finally: - processes["openocd_issues"] = message + processes['openocd_issues'] = message return message def _check_openocd_errors(fail_if_openocd_failed, target, ctx): if fail_if_openocd_failed: - if "openocd" in processes and processes["openocd"] is not None: - p = processes["openocd"] - name = processes["openocd_outfile_name"] + if 'openocd' in processes and processes['openocd'] is not None: + p = processes['openocd'] + name = processes['openocd_outfile_name'] # watch OpenOCD (for 5x500ms) to check if it hasn't terminated or outputs an error for _ in range(5): if p.poll() is not None: - print("OpenOCD exited with {}".format(p.poll())) + print('OpenOCD exited with {}'.format(p.poll())) break - with open(name, "r") as f: + with open(name, 'r') as f: content = f.read() - if re.search(r"no device found", content): + if re.search(r'no device found', content): break - if re.search(r"Listening on port \d+ for gdb connections", content): + if re.search(r'Listening on port \d+ for gdb connections', content): # expect OpenOCD has started successfully - stop watching return time.sleep(0.5) @@ -62,8 +62,8 @@ def action_extensions(base_actions, project_path): def _terminate_async_target(target): if target in processes and processes[target] is not None: try: - if target + "_outfile" in processes: - processes[target + "_outfile"].close() + if target + '_outfile' in processes: + processes[target + '_outfile'].close() p = processes[target] if p.poll() is None: p.terminate() @@ -74,13 +74,13 @@ def action_extensions(base_actions, project_path): time.sleep(0.1) else: p.kill() - if target + "_outfile_name" in processes: - if target == "openocd": - print(_check_for_common_openocd_issues(processes[target + "_outfile_name"], print_all=False)) - os.unlink(processes[target + "_outfile_name"]) + if target + '_outfile_name' in processes: + if target == 'openocd': + print(_check_for_common_openocd_issues(processes[target + '_outfile_name'], print_all=False)) + os.unlink(processes[target + '_outfile_name']) except Exception as e: print(e) - print("Failed to close/kill {}".format(target)) + print('Failed to close/kill {}'.format(target)) processes[target] = None # to indicate this has ended def _get_commandline_options(ctx): @@ -97,39 +97,39 @@ def action_extensions(base_actions, project_path): return result def create_local_gdbinit(gdbinit, elf_file): - with open(gdbinit, "w") as f: - f.write("target remote :3333\n") - if os.name == "nt": + with open(gdbinit, 'w') as f: + f.write('target remote :3333\n') + if os.name == 'nt': elf_file = elf_file.replace('\\','\\\\') - f.write("symbol-file {}\n".format(elf_file)) - f.write("mon reset halt\n") - f.write("flushregs\n") - f.write("thb app_main\n") - f.write("c\n") + f.write('symbol-file {}\n'.format(elf_file)) + f.write('mon reset halt\n') + f.write('flushregs\n') + f.write('thb app_main\n') + f.write('c\n') def debug_cleanup(): - print("cleaning up debug targets") - for t in processes["threads_to_join"]: + print('cleaning up debug targets') + for t in processes['threads_to_join']: if threading.currentThread() != t: t.join() - _terminate_async_target("openocd") - _terminate_async_target("gdbgui") - _terminate_async_target("gdb") + _terminate_async_target('openocd') + _terminate_async_target('gdbgui') + _terminate_async_target('gdb') def post_debug(action, ctx, args, **kwargs): """ Deal with asynchronous targets, such as openocd running in background """ - if kwargs["block"] == 1: - for target in ["openocd", "gdbgui"]: + if kwargs['block'] == 1: + for target in ['openocd', 'gdbgui']: if target in processes and processes[target] is not None: break else: return try: p = processes[target] - name = processes[target + "_outfile_name"] + name = processes[target + '_outfile_name'] pos = 0 while True: - with open(name, "r") as f: + with open(name, 'r') as f: f.seek(pos) for line in f: print(line.rstrip()) @@ -139,15 +139,15 @@ def action_extensions(base_actions, project_path): break time.sleep(0.5) except KeyboardInterrupt: - print("Terminated -> exiting debug utility targets") - _terminate_async_target("openocd") - _terminate_async_target("gdbgui") + print('Terminated -> exiting debug utility targets') + _terminate_async_target('openocd') + _terminate_async_target('gdbgui') def get_project_desc(args, ctx): - desc_path = os.path.join(args.build_dir, "project_description.json") + desc_path = os.path.join(args.build_dir, 'project_description.json') if not os.path.exists(desc_path): ensure_build_directory(args, ctx.info_name) - with open(desc_path, "r") as f: + with open(desc_path, 'r') as f: project_desc = json.load(f) return project_desc @@ -156,63 +156,63 @@ def action_extensions(base_actions, project_path): Execute openocd as external tool """ OPENOCD_TAGET_CONFIG = { - "esp32": "-f board/esp32-wrover-kit-3.3v.cfg", - "esp32s2": "-f board/esp32s2-kaluga-1.cfg", + 'esp32': '-f board/esp32-wrover-kit-3.3v.cfg', + 'esp32s2': '-f board/esp32s2-kaluga-1.cfg', } - if os.getenv("OPENOCD_SCRIPTS") is None: - raise FatalError("OPENOCD_SCRIPTS not found in the environment: Please run export.sh/export.bat", ctx) - openocd_arguments = os.getenv("OPENOCD_COMMANDS") if openocd_commands is None else openocd_commands + if os.getenv('OPENOCD_SCRIPTS') is None: + raise FatalError('OPENOCD_SCRIPTS not found in the environment: Please run export.sh/export.bat', ctx) + openocd_arguments = os.getenv('OPENOCD_COMMANDS') if openocd_commands is None else openocd_commands project_desc = get_project_desc(args, ctx) if openocd_arguments is None: # use default value if commands not defined in the environment nor command line - target = project_desc["target"] - default_args = "-f interface/ftdi/esp32_devkitj_v1.cfg -f target/{}.cfg".format(target) + target = project_desc['target'] + default_args = '-f interface/ftdi/esp32_devkitj_v1.cfg -f target/{}.cfg'.format(target) openocd_arguments = OPENOCD_TAGET_CONFIG.get(target, default_args) print('Note: OpenOCD cfg not found (via env variable OPENOCD_COMMANDS nor as a --openocd-commands argument)\n' 'OpenOCD arguments default to: "{}"'.format(openocd_arguments)) # script directory is taken from the environment by OpenOCD, update only if command line arguments to override if openocd_scripts is not None: - openocd_arguments += " -s {}".format(openocd_scripts) - local_dir = project_desc["build_dir"] - args = ["openocd"] + shlex.split(openocd_arguments) + openocd_arguments += ' -s {}'.format(openocd_scripts) + local_dir = project_desc['build_dir'] + args = ['openocd'] + shlex.split(openocd_arguments) openocd_out_name = os.path.join(local_dir, OPENOCD_OUT_FILE) - openocd_out = open(openocd_out_name, "a+") + openocd_out = open(openocd_out_name, 'a+') try: process = subprocess.Popen(args, stdout=openocd_out, stderr=subprocess.STDOUT, bufsize=1) except Exception as e: print(e) - raise FatalError("Error starting openocd. Please make sure it is installed and is present in executable paths", ctx) + raise FatalError('Error starting openocd. Please make sure it is installed and is present in executable paths', ctx) - processes["openocd"] = process - processes["openocd_outfile"] = openocd_out - processes["openocd_outfile_name"] = openocd_out_name - print("OpenOCD started as a background task {}".format(process.pid)) + processes['openocd'] = process + processes['openocd_outfile'] = openocd_out + processes['openocd_outfile_name'] = openocd_out_name + print('OpenOCD started as a background task {}'.format(process.pid)) def gdbui(action, ctx, args, gdbgui_port, gdbinit, require_openocd): """ Asynchronous GDB-UI target """ project_desc = get_project_desc(args, ctx) - local_dir = project_desc["build_dir"] - gdb = project_desc["monitor_toolprefix"] + "gdb" + local_dir = project_desc['build_dir'] + gdb = project_desc['monitor_toolprefix'] + 'gdb' if gdbinit is None: gdbinit = os.path.join(local_dir, 'gdbinit') - create_local_gdbinit(gdbinit, os.path.join(args.build_dir, project_desc["app_elf"])) - args = ["gdbgui", "-g", gdb, '--gdb-args="-x={}"'.format(gdbinit)] + create_local_gdbinit(gdbinit, os.path.join(args.build_dir, project_desc['app_elf'])) + args = ['gdbgui', '-g', gdb, '--gdb-args="-x={}"'.format(gdbinit)] if gdbgui_port is not None: - args += ["--port", gdbgui_port] + args += ['--port', gdbgui_port] gdbgui_out_name = os.path.join(local_dir, GDBGUI_OUT_FILE) - gdbgui_out = open(gdbgui_out_name, "a+") + gdbgui_out = open(gdbgui_out_name, 'a+') try: process = subprocess.Popen(args, stdout=gdbgui_out, stderr=subprocess.STDOUT, bufsize=1) except Exception as e: print(e) - raise FatalError("Error starting gdbgui. Please make sure gdbgui can be started", ctx) + raise FatalError('Error starting gdbgui. Please make sure gdbgui can be started', ctx) - processes["gdbgui"] = process - processes["gdbgui_outfile"] = gdbgui_out - processes["gdbgui_outfile_name"] = gdbgui_out_name - print("gdbgui started as a background task {}".format(process.pid)) + processes['gdbgui'] = process + processes['gdbgui_outfile'] = gdbgui_out + processes['gdbgui_outfile_name'] = gdbgui_out_name + print('gdbgui started as a background task {}'.format(process.pid)) _check_openocd_errors(fail_if_openocd_failed, action, ctx) def global_callback(ctx, global_args, tasks): @@ -222,28 +222,28 @@ def action_extensions(base_actions, project_path): tasks.insert(0, tasks.pop(index)) break - debug_targets = any([task.name in ("openocd", "gdbgui") for task in tasks]) + debug_targets = any([task.name in ('openocd', 'gdbgui') for task in tasks]) if debug_targets: # Register the meta cleanup callback -> called on FatalError - ctx.meta["cleanup"] = debug_cleanup - move_to_front("gdbgui") # possibly 2nd - move_to_front("openocd") # always 1st + ctx.meta['cleanup'] = debug_cleanup + move_to_front('gdbgui') # possibly 2nd + move_to_front('openocd') # always 1st # followed by "monitor", "gdb" or "gdbtui" in any order - post_action = ctx.invoke(ctx.command.get_command(ctx, "post_debug")) - if any([task.name in ("monitor", "gdb", "gdbtui") for task in tasks]): - post_action.action_args["block"] = 0 + post_action = ctx.invoke(ctx.command.get_command(ctx, 'post_debug')) + if any([task.name in ('monitor', 'gdb', 'gdbtui') for task in tasks]): + post_action.action_args['block'] = 0 else: - post_action.action_args["block"] = 1 + post_action.action_args['block'] = 1 tasks.append(post_action) # always last - if any([task.name == "openocd" for task in tasks]): + if any([task.name == 'openocd' for task in tasks]): for task in tasks: - if task.name in ("gdb", "gdbgui", "gdbtui"): - task.action_args["require_openocd"] = True + if task.name in ('gdb', 'gdbgui', 'gdbtui'): + task.action_args['require_openocd'] = True def run_gdb(gdb_args): p = subprocess.Popen(gdb_args) - processes["gdb"] = p + processes['gdb'] = p return p.wait() def gdbtui(action, ctx, args, gdbinit, require_openocd): @@ -258,18 +258,18 @@ def action_extensions(base_actions, project_path): """ watch_openocd = Thread(target=_check_openocd_errors, args=(fail_if_openocd_failed, action, ctx, )) watch_openocd.start() - processes["threads_to_join"].append(watch_openocd) - desc_path = os.path.join(args.build_dir, "project_description.json") + processes['threads_to_join'].append(watch_openocd) + desc_path = os.path.join(args.build_dir, 'project_description.json') if not os.path.exists(desc_path): ensure_build_directory(args, ctx.info_name) - with open(desc_path, "r") as f: + with open(desc_path, 'r') as f: project_desc = json.load(f) - elf_file = os.path.join(args.build_dir, project_desc["app_elf"]) + elf_file = os.path.join(args.build_dir, project_desc['app_elf']) if not os.path.exists(elf_file): - raise FatalError("ELF file not found. You need to build & flash the project before running debug targets", ctx) - gdb = project_desc["monitor_toolprefix"] + "gdb" - local_dir = project_desc["build_dir"] + raise FatalError('ELF file not found. You need to build & flash the project before running debug targets', ctx) + gdb = project_desc['monitor_toolprefix'] + 'gdb' + local_dir = project_desc['build_dir'] if gdbinit is None: gdbinit = os.path.join(local_dir, 'gdbinit') create_local_gdbinit(gdbinit, elf_file) @@ -288,92 +288,92 @@ def action_extensions(base_actions, project_path): finally: watch_openocd.join() try: - processes["threads_to_join"].remove(watch_openocd) + processes['threads_to_join'].remove(watch_openocd) except ValueError: # Valid scenario: watch_openocd task won't be in the list if openocd not started from idf.py pass fail_if_openocd_failed = { - "names": ["--require-openocd", "--require_openocd"], - "help": - ("Fail this target if openocd (this targets dependency) failed.\n"), - "is_flag": True, - "default": False, + 'names': ['--require-openocd', '--require_openocd'], + 'help': + ('Fail this target if openocd (this targets dependency) failed.\n'), + 'is_flag': True, + 'default': False, } gdbinit = { - "names": ["--gdbinit"], - "help": ("Specify the name of gdbinit file to use\n"), - "default": None, + 'names': ['--gdbinit'], + 'help': ('Specify the name of gdbinit file to use\n'), + 'default': None, } debug_actions = { - "global_action_callbacks": [global_callback], - "actions": { - "openocd": { - "callback": openocd, - "help": "Run openocd from current path", - "options": [ + 'global_action_callbacks': [global_callback], + 'actions': { + 'openocd': { + 'callback': openocd, + 'help': 'Run openocd from current path', + 'options': [ { - "names": ["--openocd-scripts", "--openocd_scripts"], - "help": - ("Script directory for openocd cfg files.\n"), - "default": + 'names': ['--openocd-scripts', '--openocd_scripts'], + 'help': + ('Script directory for openocd cfg files.\n'), + 'default': None, }, { - "names": ["--openocd-commands", "--openocd_commands"], - "help": - ("Command line arguments for openocd.\n"), - "default": None, + 'names': ['--openocd-commands', '--openocd_commands'], + 'help': + ('Command line arguments for openocd.\n'), + 'default': None, } ], - "order_dependencies": ["all", "flash"], + 'order_dependencies': ['all', 'flash'], }, - "gdb": { - "callback": gdb, - "help": "Run the GDB.", - "options": [ + 'gdb': { + 'callback': gdb, + 'help': 'Run the GDB.', + 'options': [ { - "names": ["--gdb-tui", "--gdb_tui"], - "help": - ("run gdb in TUI mode\n"), - "default": + 'names': ['--gdb-tui', '--gdb_tui'], + 'help': + ('run gdb in TUI mode\n'), + 'default': None, }, gdbinit, fail_if_openocd_failed ], - "order_dependencies": ["all", "flash"], + 'order_dependencies': ['all', 'flash'], }, - "gdbgui": { - "callback": gdbui, - "help": "GDB UI in default browser.", - "options": [ + 'gdbgui': { + 'callback': gdbui, + 'help': 'GDB UI in default browser.', + 'options': [ { - "names": ["--gdbgui-port", "--gdbgui_port"], - "help": - ("The port on which gdbgui will be hosted. Default: 5000\n"), - "default": + 'names': ['--gdbgui-port', '--gdbgui_port'], + 'help': + ('The port on which gdbgui will be hosted. Default: 5000\n'), + 'default': None, }, gdbinit, fail_if_openocd_failed ], - "order_dependencies": ["all", "flash"], + 'order_dependencies': ['all', 'flash'], }, - "gdbtui": { - "callback": gdbtui, - "help": "GDB TUI mode.", - "options": [gdbinit, fail_if_openocd_failed], - "order_dependencies": ["all", "flash"], + 'gdbtui': { + 'callback': gdbtui, + 'help': 'GDB TUI mode.', + 'options': [gdbinit, fail_if_openocd_failed], + 'order_dependencies': ['all', 'flash'], }, - "post_debug": { - "callback": post_debug, - "help": "Utility target to read the output of async debug action and stop them.", - "options": [ + 'post_debug': { + 'callback': post_debug, + 'help': 'Utility target to read the output of async debug action and stop them.', + 'options': [ { - "names": ["--block", "--block"], - "help": - ("Set to 1 for blocking the console on the outputs of async debug actions\n"), - "default": 0, + 'names': ['--block', '--block'], + 'help': + ('Set to 1 for blocking the console on the outputs of async debug actions\n'), + 'default': 0, }, ], - "order_dependencies": [], + 'order_dependencies': [], }, }, } diff --git a/tools/idf_py_actions/dfu_ext.py b/tools/idf_py_actions/dfu_ext.py index 6d70057bea..0062978b9a 100644 --- a/tools/idf_py_actions/dfu_ext.py +++ b/tools/idf_py_actions/dfu_ext.py @@ -1,5 +1,5 @@ -from idf_py_actions.tools import is_target_supported, ensure_build_directory, run_target from idf_py_actions.errors import FatalError +from idf_py_actions.tools import ensure_build_directory, is_target_supported, run_target def action_extensions(base_actions, project_path): @@ -14,7 +14,7 @@ def action_extensions(base_actions, project_path): ensure_build_directory(args, ctx.info_name) try: - run_target(target_name, args, {"ESP_DFU_PATH": path}) + run_target(target_name, args, {'ESP_DFU_PATH': path}) except FatalError: # Cannot capture the error from dfu-util here so the best advise is: print('Please have a look at the "Device Firmware Upgrade through USB" chapter in API Guides of the ' @@ -22,28 +22,28 @@ def action_extensions(base_actions, project_path): raise dfu_actions = { - "actions": { - "dfu": { - "callback": dfu_target, - "short_help": "Build the DFU binary", - "dependencies": ["all"], + 'actions': { + 'dfu': { + 'callback': dfu_target, + 'short_help': 'Build the DFU binary', + 'dependencies': ['all'], }, - "dfu-list": { - "callback": dfu_target, - "short_help": "List DFU capable devices", - "dependencies": [], + 'dfu-list': { + 'callback': dfu_target, + 'short_help': 'List DFU capable devices', + 'dependencies': [], }, - "dfu-flash": { - "callback": dfu_flash_target, - "short_help": "Flash the DFU binary", - "order_dependencies": ["dfu"], - "options": [ + 'dfu-flash': { + 'callback': dfu_flash_target, + 'short_help': 'Flash the DFU binary', + 'order_dependencies': ['dfu'], + 'options': [ { - "names": ["--path"], - "default": "", - "help": "Specify path to DFU device. The default empty path works if there is just one " - "ESP device with the same product identificator. See the device list for paths " - "of available devices." + 'names': ['--path'], + 'default': '', + 'help': 'Specify path to DFU device. The default empty path works if there is just one ' + 'ESP device with the same product identificator. See the device list for paths ' + 'of available devices.' } ], }, diff --git a/tools/idf_py_actions/errors.py b/tools/idf_py_actions/errors.py index fdcdd56179..e5b8eabbe7 100644 --- a/tools/idf_py_actions/errors.py +++ b/tools/idf_py_actions/errors.py @@ -5,6 +5,6 @@ class FatalError(RuntimeError): def __init__(self, message, ctx=None): super(RuntimeError, self).__init__(message) # if context is defined, check for the cleanup tasks - if ctx is not None and "cleanup" in ctx.meta: + if ctx is not None and 'cleanup' in ctx.meta: # cleans up the environment before failure - ctx.meta["cleanup"]() + ctx.meta['cleanup']() diff --git a/tools/idf_py_actions/global_options.py b/tools/idf_py_actions/global_options.py index 7f9a171a9f..c34c4a47fa 100644 --- a/tools/idf_py_actions/global_options.py +++ b/tools/idf_py_actions/global_options.py @@ -1,6 +1,6 @@ global_options = [{ - "names": ["-D", "--define-cache-entry"], - "help": "Create a cmake cache entry.", - "scope": "global", - "multiple": True, + 'names': ['-D', '--define-cache-entry'], + 'help': 'Create a cmake cache entry.', + 'scope': 'global', + 'multiple': True, }] diff --git a/tools/idf_py_actions/serial_ext.py b/tools/idf_py_actions/serial_ext.py index d5fb6202f1..a7f5760851 100644 --- a/tools/idf_py_actions/serial_ext.py +++ b/tools/idf_py_actions/serial_ext.py @@ -3,10 +3,9 @@ import os import sys import click - from idf_py_actions.errors import FatalError from idf_py_actions.global_options import global_options -from idf_py_actions.tools import ensure_build_directory, run_tool, run_target, get_sdkconfig_value +from idf_py_actions.tools import ensure_build_directory, get_sdkconfig_value, run_target, run_tool PYTHON = sys.executable @@ -16,7 +15,7 @@ def action_extensions(base_actions, project_path): # Import is done here in order to move it after the check_environment() ensured that pyserial has been installed try: import serial.tools.list_ports - esptool_path = os.path.join(os.environ["IDF_PATH"], "components/esptool_py/esptool/") + esptool_path = os.path.join(os.environ['IDF_PATH'], 'components/esptool_py/esptool/') sys.path.insert(0, esptool_path) import esptool ports = list(sorted(p.device for p in serial.tools.list_ports.comports())) @@ -28,22 +27,22 @@ def action_extensions(base_actions, project_path): raise FatalError("No serial ports found. Connect a device, or use '-p PORT' option to set a specific port.") def _get_esptool_args(args): - esptool_path = os.path.join(os.environ["IDF_PATH"], "components/esptool_py/esptool/esptool.py") + esptool_path = os.path.join(os.environ['IDF_PATH'], 'components/esptool_py/esptool/esptool.py') if args.port is None: args.port = _get_default_serial_port(args) result = [PYTHON, esptool_path] - result += ["-p", args.port] - result += ["-b", str(args.baud)] + result += ['-p', args.port] + result += ['-b', str(args.baud)] - with open(os.path.join(args.build_dir, "flasher_args.json")) as f: + with open(os.path.join(args.build_dir, 'flasher_args.json')) as f: flasher_args = json.load(f) - extra_esptool_args = flasher_args["extra_esptool_args"] - result += ["--before", extra_esptool_args["before"]] - result += ["--after", extra_esptool_args["after"]] - result += ["--chip", extra_esptool_args["chip"]] - if not extra_esptool_args["stub"]: - result += ["--no-stub"] + extra_esptool_args = flasher_args['extra_esptool_args'] + result += ['--before', extra_esptool_args['before']] + result += ['--after', extra_esptool_args['after']] + result += ['--chip', extra_esptool_args['chip']] + if not extra_esptool_args['stub']: + result += ['--no-stub'] return result def _get_commandline_options(ctx): @@ -64,49 +63,49 @@ def action_extensions(base_actions, project_path): Run idf_monitor.py to watch build output """ - desc_path = os.path.join(args.build_dir, "project_description.json") + desc_path = os.path.join(args.build_dir, 'project_description.json') if not os.path.exists(desc_path): ensure_build_directory(args, ctx.info_name) - with open(desc_path, "r") as f: + with open(desc_path, 'r') as f: project_desc = json.load(f) - elf_file = os.path.join(args.build_dir, project_desc["app_elf"]) + elf_file = os.path.join(args.build_dir, project_desc['app_elf']) if not os.path.exists(elf_file): raise FatalError("ELF file '%s' not found. You need to build & flash the project before running 'monitor', " - "and the binary on the device must match the one in the build directory exactly. " + 'and the binary on the device must match the one in the build directory exactly. ' "Try '%s flash monitor'." % (elf_file, ctx.info_name), ctx) - idf_monitor = os.path.join(os.environ["IDF_PATH"], "tools/idf_monitor.py") + idf_monitor = os.path.join(os.environ['IDF_PATH'], 'tools/idf_monitor.py') monitor_args = [PYTHON, idf_monitor] esp_port = args.port or _get_default_serial_port(args) - monitor_args += ["-p", esp_port] + monitor_args += ['-p', esp_port] if not monitor_baud: - monitor_baud = os.getenv("IDF_MONITOR_BAUD") or os.getenv("MONITORBAUD") or project_desc["monitor_baud"] + monitor_baud = os.getenv('IDF_MONITOR_BAUD') or os.getenv('MONITORBAUD') or project_desc['monitor_baud'] - monitor_args += ["-b", monitor_baud] - monitor_args += ["--toolchain-prefix", project_desc["monitor_toolprefix"]] + monitor_args += ['-b', monitor_baud] + monitor_args += ['--toolchain-prefix', project_desc['monitor_toolprefix']] - coredump_decode = get_sdkconfig_value(project_desc["config_file"], "CONFIG_ESP_COREDUMP_DECODE") + coredump_decode = get_sdkconfig_value(project_desc['config_file'], 'CONFIG_ESP_COREDUMP_DECODE') if coredump_decode is not None: - monitor_args += ["--decode-coredumps", coredump_decode] + monitor_args += ['--decode-coredumps', coredump_decode] - target_arch_riscv = get_sdkconfig_value(project_desc["config_file"], "CONFIG_IDF_TARGET_ARCH_RISCV") + target_arch_riscv = get_sdkconfig_value(project_desc['config_file'], 'CONFIG_IDF_TARGET_ARCH_RISCV') if target_arch_riscv: - monitor_args += ["--decode-panic", "backtrace", "--target", project_desc["target"]] + monitor_args += ['--decode-panic', 'backtrace', '--target', project_desc['target']] if print_filter is not None: - monitor_args += ["--print_filter", print_filter] + monitor_args += ['--print_filter', print_filter] monitor_args += [elf_file] if encrypted: monitor_args += ['--encrypted'] idf_py = [PYTHON] + _get_commandline_options(ctx) # commands to re-run idf.py - monitor_args += ["-m", " ".join("'%s'" % a for a in idf_py)] + monitor_args += ['-m', ' '.join("'%s'" % a for a in idf_py)] - if "MSYSTEM" in os.environ: - monitor_args = ["winpty"] + monitor_args - run_tool("idf_monitor", monitor_args, args.project_dir) + if 'MSYSTEM' in os.environ: + monitor_args = ['winpty'] + monitor_args + run_tool('idf_monitor', monitor_args, args.project_dir) def flash(action, ctx, args): """ @@ -114,126 +113,126 @@ def action_extensions(base_actions, project_path): """ ensure_build_directory(args, ctx.info_name) esp_port = args.port or _get_default_serial_port(args) - run_target(action, args, {"ESPBAUD": str(args.baud),"ESPPORT": esp_port}) + run_target(action, args, {'ESPBAUD': str(args.baud),'ESPPORT': esp_port}) def erase_flash(action, ctx, args): ensure_build_directory(args, ctx.info_name) esptool_args = _get_esptool_args(args) - esptool_args += ["erase_flash"] - run_tool("esptool.py", esptool_args, args.build_dir) + esptool_args += ['erase_flash'] + run_tool('esptool.py', esptool_args, args.build_dir) def global_callback(ctx, global_args, tasks): - encryption = any([task.name in ("encrypted-flash", "encrypted-app-flash") for task in tasks]) + encryption = any([task.name in ('encrypted-flash', 'encrypted-app-flash') for task in tasks]) if encryption: for task in tasks: - if task.name == "monitor": - task.action_args["encrypted"] = True + if task.name == 'monitor': + task.action_args['encrypted'] = True break baud_rate = { - "names": ["-b", "--baud"], - "help": "Baud rate for flashing.", - "scope": "global", - "envvar": "ESPBAUD", - "default": 460800, + 'names': ['-b', '--baud'], + 'help': 'Baud rate for flashing.', + 'scope': 'global', + 'envvar': 'ESPBAUD', + 'default': 460800, } port = { - "names": ["-p", "--port"], - "help": "Serial port.", - "scope": "global", - "envvar": "ESPPORT", - "default": None, + 'names': ['-p', '--port'], + 'help': 'Serial port.', + 'scope': 'global', + 'envvar': 'ESPPORT', + 'default': None, } serial_actions = { - "global_action_callbacks": [global_callback], - "actions": { - "flash": { - "callback": flash, - "help": "Flash the project.", - "options": global_options + [baud_rate, port], - "order_dependencies": ["all", "erase_flash"], + 'global_action_callbacks': [global_callback], + 'actions': { + 'flash': { + 'callback': flash, + 'help': 'Flash the project.', + 'options': global_options + [baud_rate, port], + 'order_dependencies': ['all', 'erase_flash'], }, - "erase_flash": { - "callback": erase_flash, - "help": "Erase entire flash chip.", - "options": [baud_rate, port], + 'erase_flash': { + 'callback': erase_flash, + 'help': 'Erase entire flash chip.', + 'options': [baud_rate, port], }, - "monitor": { - "callback": + 'monitor': { + 'callback': monitor, - "help": - "Display serial output.", - "options": [ + 'help': + 'Display serial output.', + 'options': [ port, { - "names": ["--print-filter", "--print_filter"], - "help": - ("Filter monitor output. " - "Restrictions on what to print can be specified as a series of : items " - "where is the tag string and is a character from the set " - "{N, E, W, I, D, V, *} referring to a level. " + 'names': ['--print-filter', '--print_filter'], + 'help': + ('Filter monitor output. ' + 'Restrictions on what to print can be specified as a series of : items ' + 'where is the tag string and is a character from the set ' + '{N, E, W, I, D, V, *} referring to a level. ' 'For example, "tag1:W" matches and prints only the outputs written with ' 'ESP_LOGW("tag1", ...) or at lower verbosity level, i.e. ESP_LOGE("tag1", ...). ' 'Not specifying a or using "*" defaults to Verbose level. ' 'Please see the IDF Monitor section of the ESP-IDF documentation ' 'for a more detailed description and further examples.'), - "default": + 'default': None, }, { - "names": ["--monitor-baud", "-B"], - "type": + 'names': ['--monitor-baud', '-B'], + 'type': click.INT, - "help": ("Baud rate for monitor. " - "If this option is not provided IDF_MONITOR_BAUD and MONITORBAUD " - "environment variables and project_description.json in build directory " + 'help': ('Baud rate for monitor. ' + 'If this option is not provided IDF_MONITOR_BAUD and MONITORBAUD ' + 'environment variables and project_description.json in build directory ' "(generated by CMake from project's sdkconfig) " - "will be checked for default value."), + 'will be checked for default value.'), }, { - "names": ["--encrypted", "-E"], - "is_flag": True, - "help": ("Enable encrypted flash targets. " - "IDF Monitor will invoke encrypted-flash and encrypted-app-flash targets " - "if this option is set. This option is set by default if IDF Monitor was invoked " - "together with encrypted-flash or encrypted-app-flash target."), + 'names': ['--encrypted', '-E'], + 'is_flag': True, + 'help': ('Enable encrypted flash targets. ' + 'IDF Monitor will invoke encrypted-flash and encrypted-app-flash targets ' + 'if this option is set. This option is set by default if IDF Monitor was invoked ' + 'together with encrypted-flash or encrypted-app-flash target.'), } ], - "order_dependencies": [ - "flash", - "encrypted-flash", - "partition_table-flash", - "bootloader-flash", - "app-flash", - "encrypted-app-flash", + 'order_dependencies': [ + 'flash', + 'encrypted-flash', + 'partition_table-flash', + 'bootloader-flash', + 'app-flash', + 'encrypted-app-flash', ], }, - "partition_table-flash": { - "callback": flash, - "help": "Flash partition table only.", - "options": [baud_rate, port], - "order_dependencies": ["partition_table", "erase_flash"], + 'partition_table-flash': { + 'callback': flash, + 'help': 'Flash partition table only.', + 'options': [baud_rate, port], + 'order_dependencies': ['partition_table', 'erase_flash'], }, - "bootloader-flash": { - "callback": flash, - "help": "Flash bootloader only.", - "options": [baud_rate, port], - "order_dependencies": ["bootloader", "erase_flash"], + 'bootloader-flash': { + 'callback': flash, + 'help': 'Flash bootloader only.', + 'options': [baud_rate, port], + 'order_dependencies': ['bootloader', 'erase_flash'], }, - "app-flash": { - "callback": flash, - "help": "Flash the app only.", - "options": [baud_rate, port], - "order_dependencies": ["app", "erase_flash"], + 'app-flash': { + 'callback': flash, + 'help': 'Flash the app only.', + 'options': [baud_rate, port], + 'order_dependencies': ['app', 'erase_flash'], }, - "encrypted-app-flash": { - "callback": flash, - "help": "Flash the encrypted app only.", - "order_dependencies": ["app", "erase_flash"], + 'encrypted-app-flash': { + 'callback': flash, + 'help': 'Flash the encrypted app only.', + 'order_dependencies': ['app', 'erase_flash'], }, - "encrypted-flash": { - "callback": flash, - "help": "Flash the encrypted project.", - "order_dependencies": ["all", "erase_flash"], + 'encrypted-flash': { + 'callback': flash, + 'help': 'Flash the encrypted project.', + 'order_dependencies': ['all', 'erase_flash'], }, }, } diff --git a/tools/idf_py_actions/tools.py b/tools/idf_py_actions/tools.py index 9dae020beb..6ba0702989 100644 --- a/tools/idf_py_actions/tools.py +++ b/tools/idf_py_actions/tools.py @@ -1,10 +1,11 @@ -import click import os import re import subprocess import sys from io import open +import click + from .constants import GENERATORS from .errors import FatalError @@ -29,8 +30,8 @@ def realpath(path): def _idf_version_from_cmake(): - version_path = os.path.join(os.environ["IDF_PATH"], "tools/cmake/version.cmake") - regex = re.compile(r"^\s*set\s*\(\s*IDF_VERSION_([A-Z]{5})\s+(\d+)") + version_path = os.path.join(os.environ['IDF_PATH'], 'tools/cmake/version.cmake') + regex = re.compile(r'^\s*set\s*\(\s*IDF_VERSION_([A-Z]{5})\s+(\d+)') ver = {} try: with open(version_path) as f: @@ -40,9 +41,9 @@ def _idf_version_from_cmake(): if m: ver[m.group(1)] = m.group(2) - return "v%s.%s.%s" % (ver["MAJOR"], ver["MINOR"], ver["PATCH"]) + return 'v%s.%s.%s' % (ver['MAJOR'], ver['MINOR'], ver['PATCH']) except (KeyError, OSError): - sys.stderr.write("WARNING: Cannot find ESP-IDF version in version.cmake\n") + sys.stderr.write('WARNING: Cannot find ESP-IDF version in version.cmake\n') return None @@ -52,13 +53,13 @@ def idf_version(): # Try to get version from git: try: version = subprocess.check_output([ - "git", - "--git-dir=%s" % os.path.join(os.environ["IDF_PATH"], '.git'), - "--work-tree=%s" % os.environ["IDF_PATH"], "describe", "--tags", "--dirty" + 'git', + '--git-dir=%s' % os.path.join(os.environ['IDF_PATH'], '.git'), + '--work-tree=%s' % os.environ['IDF_PATH'], 'describe', '--tags', '--dirty' ]).decode('utf-8', 'ignore').strip() except (subprocess.CalledProcessError, UnicodeError): # if failed, then try to parse cmake.version file - sys.stderr.write("WARNING: Git version unavailable, reading from source\n") + sys.stderr.write('WARNING: Git version unavailable, reading from source\n') version = _idf_version_from_cmake() return version @@ -67,13 +68,13 @@ def idf_version(): def run_tool(tool_name, args, cwd, env=dict()): def quote_arg(arg): " Quote 'arg' if necessary " - if " " in arg and not (arg.startswith('"') or arg.startswith("'")): + if ' ' in arg and not (arg.startswith('"') or arg.startswith("'")): return "'" + arg + "'" return arg args = [str(arg) for arg in args] - display_args = " ".join(quote_arg(arg) for arg in args) - print("Running %s in directory %s" % (tool_name, quote_arg(cwd))) + display_args = ' '.join(quote_arg(arg) for arg in args) + print('Running %s in directory %s' % (tool_name, quote_arg(cwd))) print('Executing "%s"...' % str(display_args)) env_copy = dict(os.environ) @@ -90,14 +91,14 @@ def run_tool(tool_name, args, cwd, env=dict()): # Note: we explicitly pass in os.environ here, as we may have set IDF_PATH there during startup subprocess.check_call(args, env=env_copy, cwd=cwd) except subprocess.CalledProcessError as e: - raise FatalError("%s failed with exit code %d" % (tool_name, e.returncode)) + raise FatalError('%s failed with exit code %d' % (tool_name, e.returncode)) def run_target(target_name, args, env=dict()): - generator_cmd = GENERATORS[args.generator]["command"] + generator_cmd = GENERATORS[args.generator]['command'] if args.verbose: - generator_cmd += [GENERATORS[args.generator]["verbose_flag"]] + generator_cmd += [GENERATORS[args.generator]['verbose_flag']] run_tool(generator_cmd[0], generator_cmd + [target_name], args.build_dir, env) @@ -123,7 +124,7 @@ def _parse_cmakecache(path): for line in f: # cmake cache lines look like: CMAKE_CXX_FLAGS_DEBUG:STRING=-g # groups are name, type, value - m = re.match(r"^([^#/:=]+):([^:=]+)=(.*)\n$", line) + m = re.match(r'^([^#/:=]+):([^:=]+)=(.*)\n$', line) if m: result[m.group(1)] = m.group(3) return result @@ -137,7 +138,7 @@ def _new_cmakecache_entries(cache_path, new_cache_entries): current_cache = _parse_cmakecache(cache_path) for entry in new_cache_entries: - key, value = entry.split("=", 1) + key, value = entry.split('=', 1) current_value = current_cache.get(key, None) if current_value is None or _strip_quotes(value) != current_value: return True @@ -150,7 +151,7 @@ def _detect_cmake_generator(prog_name): Find the default cmake generator, if none was specified. Raises an exception if no valid generator is found. """ for (generator_name, generator) in GENERATORS.items(): - if executable_exists(generator["version"]): + if executable_exists(generator['version']): return generator_name raise FatalError("To use %s, either the 'ninja' or 'GNU make' build tool must be available in the PATH" % prog_name) @@ -169,11 +170,11 @@ def ensure_build_directory(args, prog_name, always_run_cmake=False): # Verify the project directory if not os.path.isdir(project_dir): if not os.path.exists(project_dir): - raise FatalError("Project directory %s does not exist" % project_dir) + raise FatalError('Project directory %s does not exist' % project_dir) else: - raise FatalError("%s must be a project directory" % project_dir) - if not os.path.exists(os.path.join(project_dir, "CMakeLists.txt")): - raise FatalError("CMakeLists.txt not found in project directory %s" % project_dir) + raise FatalError('%s must be a project directory' % project_dir) + if not os.path.exists(os.path.join(project_dir, 'CMakeLists.txt')): + raise FatalError('CMakeLists.txt not found in project directory %s' % project_dir) # Verify/create the build directory build_dir = args.build_dir @@ -181,33 +182,33 @@ def ensure_build_directory(args, prog_name, always_run_cmake=False): os.makedirs(build_dir) # Parse CMakeCache, if it exists - cache_path = os.path.join(build_dir, "CMakeCache.txt") + cache_path = os.path.join(build_dir, 'CMakeCache.txt') cache = _parse_cmakecache(cache_path) if os.path.exists(cache_path) else {} # Validate or set IDF_TARGET _guess_or_check_idf_target(args, prog_name, cache) - args.define_cache_entry.append("CCACHE_ENABLE=%d" % args.ccache) + args.define_cache_entry.append('CCACHE_ENABLE=%d' % args.ccache) if always_run_cmake or _new_cmakecache_entries(cache_path, args.define_cache_entry): if args.generator is None: args.generator = _detect_cmake_generator(prog_name) try: cmake_args = [ - "cmake", - "-G", + 'cmake', + '-G', args.generator, - "-DPYTHON_DEPS_CHECKED=1", - "-DESP_PLATFORM=1", + '-DPYTHON_DEPS_CHECKED=1', + '-DESP_PLATFORM=1', ] if args.cmake_warn_uninitialized: - cmake_args += ["--warn-uninitialized"] + cmake_args += ['--warn-uninitialized'] if args.define_cache_entry: - cmake_args += ["-D" + d for d in args.define_cache_entry] + cmake_args += ['-D' + d for d in args.define_cache_entry] cmake_args += [project_dir] - run_tool("cmake", cmake_args, cwd=args.build_dir) + run_tool('cmake', cmake_args, cwd=args.build_dir) except Exception: # don't allow partially valid CMakeCache.txt files, # to keep the "should I run cmake?" logic simple @@ -219,7 +220,7 @@ def ensure_build_directory(args, prog_name, always_run_cmake=False): cache = _parse_cmakecache(cache_path) if os.path.exists(cache_path) else {} try: - generator = cache["CMAKE_GENERATOR"] + generator = cache['CMAKE_GENERATOR'] except KeyError: generator = _detect_cmake_generator(prog_name) if args.generator is None: @@ -229,7 +230,7 @@ def ensure_build_directory(args, prog_name, always_run_cmake=False): (generator, args.generator, prog_name)) try: - home_dir = cache["CMAKE_HOME_DIRECTORY"] + home_dir = cache['CMAKE_HOME_DIRECTORY'] if realpath(home_dir) != realpath(project_dir): raise FatalError( "Build directory '%s' configured for project '%s' not '%s'. Run '%s fullclean' to start again." % @@ -240,14 +241,14 @@ def ensure_build_directory(args, prog_name, always_run_cmake=False): def merge_action_lists(*action_lists): merged_actions = { - "global_options": [], - "actions": {}, - "global_action_callbacks": [], + 'global_options': [], + 'actions': {}, + 'global_action_callbacks': [], } for action_list in action_lists: - merged_actions["global_options"].extend(action_list.get("global_options", [])) - merged_actions["actions"].update(action_list.get("actions", {})) - merged_actions["global_action_callbacks"].extend(action_list.get("global_action_callbacks", [])) + merged_actions['global_options'].extend(action_list.get('global_options', [])) + merged_actions['actions'].update(action_list.get('actions', {})) + merged_actions['global_action_callbacks'].extend(action_list.get('global_action_callbacks', [])) return merged_actions @@ -256,14 +257,14 @@ def get_sdkconfig_value(sdkconfig_file, key): Return the value of given key from sdkconfig_file. If sdkconfig_file does not exist or the option is not present, returns None. """ - assert key.startswith("CONFIG_") + assert key.startswith('CONFIG_') if not os.path.exists(sdkconfig_file): return None # keep track of the last seen value for the given key value = None # if the value is quoted, this excludes the quotes from the value pattern = re.compile(r"^{}=\"?([^\"]*)\"?$".format(key)) - with open(sdkconfig_file, "r") as f: + with open(sdkconfig_file, 'r') as f: for line in f: match = re.match(pattern, line) if match: @@ -275,7 +276,7 @@ def is_target_supported(project_path, supported_targets): """ Returns True if the active target is supported, or False otherwise. """ - return get_sdkconfig_value(os.path.join(project_path, "sdkconfig"), 'CONFIG_IDF_TARGET') in supported_targets + return get_sdkconfig_value(os.path.join(project_path, 'sdkconfig'), 'CONFIG_IDF_TARGET') in supported_targets def _guess_or_check_idf_target(args, prog_name, cache): @@ -288,14 +289,14 @@ def _guess_or_check_idf_target(args, prog_name, cache): """ # Default locations of sdkconfig files. # FIXME: they may be overridden in the project or by a CMake variable (IDF-1369). - sdkconfig_path = os.path.join(args.project_dir, "sdkconfig") - sdkconfig_defaults_path = os.path.join(args.project_dir, "sdkconfig.defaults") + sdkconfig_path = os.path.join(args.project_dir, 'sdkconfig') + sdkconfig_defaults_path = os.path.join(args.project_dir, 'sdkconfig.defaults') # These are used to guess the target from sdkconfig, or set the default target by sdkconfig.defaults. - idf_target_from_sdkconfig = get_sdkconfig_value(sdkconfig_path, "CONFIG_IDF_TARGET") - idf_target_from_sdkconfig_defaults = get_sdkconfig_value(sdkconfig_defaults_path, "CONFIG_IDF_TARGET") - idf_target_from_env = os.environ.get("IDF_TARGET") - idf_target_from_cache = cache.get("IDF_TARGET") + idf_target_from_sdkconfig = get_sdkconfig_value(sdkconfig_path, 'CONFIG_IDF_TARGET') + idf_target_from_sdkconfig_defaults = get_sdkconfig_value(sdkconfig_defaults_path, 'CONFIG_IDF_TARGET') + idf_target_from_env = os.environ.get('IDF_TARGET') + idf_target_from_cache = cache.get('IDF_TARGET') if not cache and not idf_target_from_env: # CMakeCache.txt does not exist yet, and IDF_TARGET is not set in the environment. @@ -303,7 +304,7 @@ def _guess_or_check_idf_target(args, prog_name, cache): if guessed_target: if args.verbose: print("IDF_TARGET is not set, guessed '%s' from sdkconfig" % (guessed_target)) - args.define_cache_entry.append("IDF_TARGET=" + guessed_target) + args.define_cache_entry.append('IDF_TARGET=' + guessed_target) elif idf_target_from_env: # Let's check that IDF_TARGET values are consistent @@ -336,7 +337,7 @@ class TargetChoice(click.Choice): def convert(self, value, param, ctx): def normalize(str): - return str.lower().replace("-", "") + return str.lower().replace('-', '') saved_token_normalize_func = ctx.token_normalize_func ctx.token_normalize_func = normalize diff --git a/tools/idf_py_actions/uf2_ext.py b/tools/idf_py_actions/uf2_ext.py index 6635c860a0..5e0531ba1c 100644 --- a/tools/idf_py_actions/uf2_ext.py +++ b/tools/idf_py_actions/uf2_ext.py @@ -7,16 +7,16 @@ def action_extensions(base_actions, project_path): run_target(target_name, args) uf2_actions = { - "actions": { - "uf2": { - "callback": uf2_target, - "short_help": "Generate the UF2 binary with all the binaries included", - "dependencies": ["all"], + 'actions': { + 'uf2': { + 'callback': uf2_target, + 'short_help': 'Generate the UF2 binary with all the binaries included', + 'dependencies': ['all'], }, - "uf2-app": { - "callback": uf2_target, - "short_help": "Generate an UF2 binary for the application only", - "dependencies": ["all"], + 'uf2-app': { + 'callback': uf2_target, + 'short_help': 'Generate an UF2 binary for the application only', + 'dependencies': ['all'], }, } } diff --git a/tools/idf_size.py b/tools/idf_size.py index 76625780e4..9ad8e35407 100755 --- a/tools/idf_size.py +++ b/tools/idf_size.py @@ -20,10 +20,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from __future__ import print_function -from __future__ import unicode_literals -from __future__ import division -from future.utils import iteritems +from __future__ import division, print_function, unicode_literals + import argparse import collections import json @@ -31,6 +29,8 @@ import os.path import re import sys +from future.utils import iteritems + GLOBAL_JSON_INDENT = 4 GLOBAL_JSON_SEPARATORS = (',', ': ') @@ -202,8 +202,8 @@ def load_map_data(map_file): def load_memory_config(map_file): """ Memory Configuration section is the total size of each output section """ result = {} - scan_to_header(map_file, "Memory Configuration") - RE_MEMORY_SECTION = re.compile(r"(?P[^ ]+) +0x(?P[\da-f]+) +0x(?P[\da-f]+)") + scan_to_header(map_file, 'Memory Configuration') + RE_MEMORY_SECTION = re.compile(r'(?P[^ ]+) +0x(?P[\da-f]+) +0x(?P[\da-f]+)') for line in map_file: m = RE_MEMORY_SECTION.match(line) @@ -213,13 +213,13 @@ def load_memory_config(map_file): else: return result # we're at the end of the Memory Configuration section = { - "name": m.group("name"), - "origin": int(m.group("origin"), 16), - "length": int(m.group("length"), 16), + 'name': m.group('name'), + 'origin': int(m.group('origin'), 16), + 'length': int(m.group('length'), 16), } - if section["name"] != "*default*": - result[section["name"]] = section - raise RuntimeError("End of file while scanning memory configuration?") + if section['name'] != '*default*': + result[section['name']] = section + raise RuntimeError('End of file while scanning memory configuration?') def detect_target_chip(map_file): @@ -258,45 +258,45 @@ def load_sections(map_file): information for each symbol linked into the section. """ # output section header, ie '.iram0.text 0x0000000040080400 0x129a5' - RE_SECTION_HEADER = re.compile(r"(?P[^ ]+) +0x(?P
[\da-f]+) +0x(?P[\da-f]+)$") + RE_SECTION_HEADER = re.compile(r'(?P[^ ]+) +0x(?P
[\da-f]+) +0x(?P[\da-f]+)$') # source file line, ie # 0x0000000040080400 0xa4 /home/gus/esp/32/idf/examples/get-started/hello_world/build/esp32/libesp32.a(cpu_start.o) # cmake build system links some object files directly, not part of any archive, so make that part optional # .xtensa.info 0x0000000000000000 0x38 CMakeFiles/hello-world.elf.dir/project_elf_src.c.obj - RE_SOURCE_LINE = re.compile(r"\s*(?P\S*) +0x(?P
[\da-f]+) +0x(?P[\da-f]+) (?P.+\.a)?\(?(?P.+\.(o|obj))\)?") + RE_SOURCE_LINE = re.compile(r'\s*(?P\S*) +0x(?P
[\da-f]+) +0x(?P[\da-f]+) (?P.+\.a)?\(?(?P.+\.(o|obj))\)?') # Fast check to see if line is a potential source line before running the slower full regex against it - RE_PRE_FILTER = re.compile(r".*\.(o|obj)\)?") + RE_PRE_FILTER = re.compile(r'.*\.(o|obj)\)?') # Check for lines which only contain the sym name (and rest is on following lines) - RE_SYMBOL_ONLY_LINE = re.compile(r"^ (?P\S*)$") + RE_SYMBOL_ONLY_LINE = re.compile(r'^ (?P\S*)$') sections = {} section = None sym_backup = None for line in map_file: - if line.strip() == "Cross Reference Table": + if line.strip() == 'Cross Reference Table': # stop processing lines because we are at the next section in the map file break m = RE_SECTION_HEADER.match(line) if m is not None: # start of a new section section = { - "name": m.group("name"), - "address": int(m.group("address"), 16), - "size": int(m.group("size"), 16), - "sources": [], + 'name': m.group('name'), + 'address': int(m.group('address'), 16), + 'size': int(m.group('size'), 16), + 'sources': [], } - sections[section["name"]] = section + sections[section['name']] = section continue if section is not None: m = RE_SYMBOL_ONLY_LINE.match(line) if m is not None: # In some cases the section name appears on the previous line, back it up in here - sym_backup = m.group("sym_name") + sym_backup = m.group('sym_name') continue if not RE_PRE_FILTER.match(line): @@ -305,21 +305,21 @@ def load_sections(map_file): m = RE_SOURCE_LINE.match(line) if m is not None: # input source file details=ma,e - sym_name = m.group("sym_name") if len(m.group("sym_name")) > 0 else sym_backup - archive = m.group("archive") + sym_name = m.group('sym_name') if len(m.group('sym_name')) > 0 else sym_backup + archive = m.group('archive') if archive is None: # optional named group "archive" was not matched, so assign a value to it - archive = "(exe)" + archive = '(exe)' source = { - "size": int(m.group("size"), 16), - "address": int(m.group("address"), 16), - "archive": os.path.basename(archive), - "object_file": os.path.basename(m.group("object_file")), - "sym_name": sym_name, + 'size': int(m.group('size'), 16), + 'address': int(m.group('address'), 16), + 'archive': os.path.basename(archive), + 'object_file': os.path.basename(m.group('object_file')), + 'sym_name': sym_name, } - source["file"] = "%s:%s" % (source["archive"], source["object_file"]) - section["sources"] += [source] + source['file'] = '%s:%s' % (source['archive'], source['object_file']) + section['sources'] += [source] return sections @@ -339,12 +339,12 @@ class MemRegNames(object): def main(): - parser = argparse.ArgumentParser(description="idf_size - a tool to print size information from an IDF MAP file") + parser = argparse.ArgumentParser(description='idf_size - a tool to print size information from an IDF MAP file') parser.add_argument( '--json', - help="Output results as JSON", - action="store_true") + help='Output results as JSON', + action='store_true') parser.add_argument( 'map_file', help='MAP file produced by linker', @@ -373,7 +373,7 @@ def main(): '--output-file', type=argparse.FileType('w'), default=sys.stdout, - help="Print output to the specified file instead of stdout") + help='Print output to the specified file instead of stdout') args = parser.parse_args() @@ -415,9 +415,9 @@ def main(): args.another_map_file, mem_reg_diff, memory_config_diff, sections_diff) if args.archives: - output += get_detailed_sizes(mem_reg, sections, "archive", "Archive File", args.json, sections_diff) + output += get_detailed_sizes(mem_reg, sections, 'archive', 'Archive File', args.json, sections_diff) if args.files: - output += get_detailed_sizes(mem_reg, sections, "file", "Object File", args.json, sections_diff) + output += get_detailed_sizes(mem_reg, sections, 'file', 'Object File', args.json, sections_diff) if args.archive_details: output += get_archive_symbols(mem_reg, sections, args.archive_details, args.json, sections_diff) @@ -657,13 +657,13 @@ class StructureForDetailedSizes(object): """ result = {} for _, section in iteritems(sections): - for s in section["sources"]: + for s in section['sources']: if not s[key] in result: result[s[key]] = {} archive = result[s[key]] - if not section["name"] in archive: - archive[section["name"]] = 0 - archive[section["name"]] += s["size"] + if not section['name'] in archive: + archive[section['name']] = 0 + archive[section['name']] += s['size'] return result @staticmethod diff --git a/tools/idf_tools.py b/tools/idf_tools.py index 7e59dc9737..5dc5d2c2b1 100755 --- a/tools/idf_tools.py +++ b/tools/idf_tools.py @@ -40,23 +40,23 @@ # See the License for the specific language governing permissions and # limitations under the License. +import argparse +import contextlib +import copy +import errno +import functools +import hashlib import json import os +import platform +import re +import shutil +import ssl import subprocess import sys -import argparse -import re -import platform -import hashlib import tarfile import zipfile -import errno -import shutil -import functools -import copy from collections import OrderedDict, namedtuple -import ssl -import contextlib try: import typing # noqa: F401 @@ -64,10 +64,10 @@ except ImportError: pass try: - from urllib.request import urlopen from urllib.error import ContentTooShortError + from urllib.request import urlopen except ImportError: - from urllib import urlopen, ContentTooShortError + from urllib import ContentTooShortError, urlopen try: from exceptions import WindowsError @@ -277,7 +277,7 @@ def get_file_size_sha256(filename, block_size=65536): def report_progress(count, block_size, total_size): percent = int(count * block_size * 100 / total_size) percent = min(100, percent) - sys.stdout.write("\r%d%%" % percent) + sys.stdout.write('\r%d%%' % percent) sys.stdout.flush() @@ -319,13 +319,13 @@ def urlretrieve_ctx(url, filename, reporthook=None, data=None, context=None): # urlopen doesn't have context argument in Python <=2.7.9 extra_urlopen_args = {} if context: - extra_urlopen_args["context"] = context + extra_urlopen_args['context'] = context with contextlib.closing(urlopen(url, data, **extra_urlopen_args)) as fp: headers = fp.info() # Just return the local path and the "headers" for file:// # URLs. No sense in performing a copy unless requested. - if url_type == "file" and not filename: + if url_type == 'file' and not filename: return os.path.normpath(path), headers # Handle temporary file setup. @@ -334,7 +334,7 @@ def urlretrieve_ctx(url, filename, reporthook=None, data=None, context=None): with tfp: result = filename, headers bs = 1024 * 8 - size = int(headers.get("content-length", -1)) + size = int(headers.get('content-length', -1)) read = 0 blocknum = 0 @@ -353,7 +353,7 @@ def urlretrieve_ctx(url, filename, reporthook=None, data=None, context=None): if size >= 0 and read < size: raise ContentTooShortError( - "retrieval incomplete: got only %i out of %i bytes" + 'retrieval incomplete: got only %i out of %i bytes' % (read, size), result) return result @@ -571,7 +571,7 @@ class IDFTool(object): raise ToolExecError('Command {} has returned non-zero exit code ({})\n'.format( ' '.join(self._current_options.version_cmd), e.returncode)) - in_str = version_cmd_result.decode("utf-8") + in_str = version_cmd_result.decode('utf-8') match = re.search(self._current_options.version_regex, in_str) if not match: return UNKNOWN_VERSION @@ -677,7 +677,7 @@ class IDFTool(object): ctx = None # For dl.espressif.com, add the ISRG x1 root certificate. # This works around the issue with outdated certificate stores in some installations. - if "dl.espressif.com" in url: + if 'dl.espressif.com' in url: try: ctx = ssl.create_default_context() ctx.load_verify_locations(cadata=ISRG_X1_ROOT_CERT) @@ -687,7 +687,7 @@ class IDFTool(object): pass urlretrieve_ctx(url, local_temp_path, report_progress if not global_non_interactive else None, context=ctx) - sys.stdout.write("\rDone\n") + sys.stdout.write('\rDone\n') except Exception as e: # urlretrieve could throw different exceptions, e.g. IOError when the server is down # Errors are ignored because the downloaded file is checked a couple of lines later. @@ -959,7 +959,7 @@ def get_python_env_path(): version_file_path = os.path.join(global_idf_path, 'version.txt') if os.path.exists(version_file_path): - with open(version_file_path, "r") as version_file: + with open(version_file_path, 'r') as version_file: idf_version_str = version_file.read() else: try: @@ -1209,7 +1209,7 @@ def apply_github_assets_option(tool_download_obj): IDF_GITHUB_ASSETS is set. The github.com part of the URL will be replaced. """ try: - github_assets = os.environ["IDF_GITHUB_ASSETS"].strip() + github_assets = os.environ['IDF_GITHUB_ASSETS'].strip() except KeyError: return # no IDF_GITHUB_ASSETS if not github_assets: # variable exists but is empty @@ -1350,7 +1350,7 @@ def action_install_python_env(args): info('Creating a new Python environment in {}'.format(idf_python_env_path)) try: - import virtualenv # noqa: F401 + import virtualenv # noqa: F401 except ImportError: info('Installing virtualenv') subprocess.check_call([sys.executable, '-m', 'pip', 'install', '--user', 'virtualenv'], @@ -1454,17 +1454,17 @@ def action_gen_doc(args): def print_out(text): f.write(text + '\n') - print_out(".. |zwsp| unicode:: U+200B") - print_out(" :trim:") - print_out("") + print_out('.. |zwsp| unicode:: U+200B') + print_out(' :trim:') + print_out('') - idf_gh_url = "https://github.com/espressif/esp-idf" + idf_gh_url = 'https://github.com/espressif/esp-idf' for tool_name, tool_obj in tools_info.items(): info_url = tool_obj.options.info_url - if idf_gh_url + "/tree" in info_url: - info_url = re.sub(idf_gh_url + r"/tree/\w+/(.*)", r":idf:`\1`", info_url) + if idf_gh_url + '/tree' in info_url: + info_url = re.sub(idf_gh_url + r'/tree/\w+/(.*)', r':idf:`\1`', info_url) - license_url = "https://spdx.org/licenses/" + tool_obj.options.license + license_url = 'https://spdx.org/licenses/' + tool_obj.options.license print_out(""" .. _tool-{name}: @@ -1502,9 +1502,9 @@ More info: {info_url} if install_type == IDFTool.INSTALL_NEVER: continue elif install_type == IDFTool.INSTALL_ALWAYS: - install_type_str = "required" + install_type_str = 'required' elif install_type == IDFTool.INSTALL_ON_REQUEST: - install_type_str = "optional" + install_type_str = 'optional' else: raise NotImplementedError() @@ -1615,7 +1615,7 @@ def main(argv): if args.idf_path: global_idf_path = args.idf_path if not global_idf_path: - global_idf_path = os.path.realpath(os.path.join(os.path.dirname(__file__), "..")) + global_idf_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..')) os.environ['IDF_PATH'] = global_idf_path global global_idf_tools_path diff --git a/tools/kconfig_new/confgen.py b/tools/kconfig_new/confgen.py index d03d76fb38..6e6733f8b4 100755 --- a/tools/kconfig_new/confgen.py +++ b/tools/kconfig_new/confgen.py @@ -21,10 +21,9 @@ # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function -from future.utils import iteritems + import argparse import json -import kconfiglib import os import os.path import re @@ -32,11 +31,13 @@ import sys import tempfile import gen_kconfig_doc +import kconfiglib +from future.utils import iteritems -__version__ = "0.1" +__version__ = '0.1' -if "IDF_CMAKE" not in os.environ: - os.environ["IDF_CMAKE"] = "" +if 'IDF_CMAKE' not in os.environ: + os.environ['IDF_CMAKE'] = '' class DeprecatedOptions(object): @@ -168,7 +169,7 @@ class DeprecatedOptions(object): def _opt_defined(opt): if not opt.visibility: return False - return not (opt.orig_type in (kconfiglib.BOOL, kconfiglib.TRISTATE) and opt.str_value == "n") + return not (opt.orig_type in (kconfiglib.BOOL, kconfiglib.TRISTATE) and opt.str_value == 'n') if len(self.r_dic) > 0: with open(path_output, 'a') as f_o: @@ -241,7 +242,7 @@ def main(): sys.exit(1) try: - args.env = [(name,value) for (name,value) in (e.split("=",1) for e in args.env)] + args.env = [(name,value) for (name,value) in (e.split('=',1) for e in args.env)] except ValueError: print("--env arguments must each contain =. To unset an environment variable, use 'ENV='") sys.exit(1) @@ -258,7 +259,7 @@ def main(): config.warn_assign_override = False sdkconfig_renames = [args.sdkconfig_rename] if args.sdkconfig_rename else [] - sdkconfig_renames += os.environ.get("COMPONENT_SDKCONFIG_RENAMES", "").split() + sdkconfig_renames += os.environ.get('COMPONENT_SDKCONFIG_RENAMES', '').split() deprecated_options = DeprecatedOptions(config.config_prefix, path_rename_files=sdkconfig_renames) if len(args.defaults) > 0: @@ -275,13 +276,13 @@ def main(): # always load defaults first, so any items which are not defined in that config # will have the default defined in the defaults file for name in args.defaults: - print("Loading defaults file %s..." % name) + print('Loading defaults file %s...' % name) if not os.path.exists(name): - raise RuntimeError("Defaults file not found: %s" % name) + raise RuntimeError('Defaults file not found: %s' % name) try: - with tempfile.NamedTemporaryFile(prefix="confgen_tmp", delete=False) as f: + with tempfile.NamedTemporaryFile(prefix='confgen_tmp', delete=False) as f: temp_file1 = f.name - with tempfile.NamedTemporaryFile(prefix="confgen_tmp", delete=False) as f: + with tempfile.NamedTemporaryFile(prefix='confgen_tmp', delete=False) as f: temp_file2 = f.name deprecated_options.replace(sdkconfig_in=name, sdkconfig_out=temp_file1) _replace_empty_assignments(temp_file1, temp_file2) @@ -296,7 +297,7 @@ def main(): # If config file previously exists, load it if args.config and os.path.exists(args.config): # ... but replace deprecated options before that - with tempfile.NamedTemporaryFile(prefix="confgen_tmp", delete=False) as f: + with tempfile.NamedTemporaryFile(prefix='confgen_tmp', delete=False) as f: temp_file = f.name try: deprecated_options.replace(sdkconfig_in=args.config, sdkconfig_out=temp_file) @@ -315,7 +316,7 @@ def main(): # Output the files specified in the arguments for output_type, filename in args.output: - with tempfile.NamedTemporaryFile(prefix="confgen_tmp", delete=False) as f: + with tempfile.NamedTemporaryFile(prefix='confgen_tmp', delete=False) as f: temp_file = f.name try: output_function = OUTPUT_FORMATS[output_type] @@ -344,7 +345,7 @@ def write_makefile(deprecated_options, config, filename): # Espressif IoT Development Framework (ESP-IDF) Project Makefile Configuration # """ - with open(filename, "w") as f: + with open(filename, 'w') as f: tmp_dep_lines = [] f.write(CONFIG_HEADING) @@ -355,12 +356,12 @@ def write_makefile(deprecated_options, config, filename): try: value = int(value) except ValueError: - value = "" + value = '' elif orig_type == kconfiglib.HEX: try: value = hex(int(value, 16)) # ensure 0x prefix except ValueError: - value = "" + value = '' elif orig_type == kconfiglib.STRING: value = '"{}"'.format(kconfiglib.escape(value)) else: @@ -400,7 +401,7 @@ def write_header(deprecated_options, config, filename): def write_cmake(deprecated_options, config, filename): - with open(filename, "w") as f: + with open(filename, 'w') as f: tmp_dep_list = [] write = f.write prefix = config.config_prefix @@ -420,8 +421,8 @@ def write_cmake(deprecated_options, config, filename): if sym.config_string: val = sym.str_value - if sym.orig_type in (kconfiglib.BOOL, kconfiglib.TRISTATE) and val == "n": - val = "" # write unset values as empty variables + if sym.orig_type in (kconfiglib.BOOL, kconfiglib.TRISTATE) and val == 'n': + val = '' # write unset values as empty variables elif sym.orig_type == kconfiglib.STRING: val = kconfiglib.escape(val) elif sym.orig_type == kconfiglib.HEX: @@ -436,7 +437,7 @@ def write_cmake(deprecated_options, config, filename): for n in config.node_iter(): write_node(n) - write("set(CONFIGS_LIST {})".format(";".join(configs_list))) + write('set(CONFIGS_LIST {})'.format(';'.join(configs_list))) if len(tmp_dep_list) > 0: write('\n# List of deprecated options for backward compatibility\n') @@ -454,7 +455,7 @@ def get_json_values(config): if sym.config_string: val = sym.str_value if sym.type in [kconfiglib.BOOL, kconfiglib.TRISTATE]: - val = (val != "n") + val = (val != 'n') elif sym.type == kconfiglib.HEX: val = int(val, 16) elif sym.type == kconfiglib.INT: @@ -467,7 +468,7 @@ def get_json_values(config): def write_json(deprecated_options, config, filename): config_dict = get_json_values(config) - with open(filename, "w") as f: + with open(filename, 'w') as f: json.dump(config_dict, f, indent=4, sort_keys=True) @@ -491,7 +492,7 @@ def get_menu_node_id(node): result.append(slug) node = node.parent - result = "-".join(reversed(result)) + result = '-'.join(reversed(result)) return result @@ -502,7 +503,7 @@ def write_json_menus(deprecated_options, config, filename): def write_node(node): try: - json_parent = node_lookup[node.parent]["children"] + json_parent = node_lookup[node.parent]['children'] except KeyError: assert node.parent not in node_lookup # if fails, we have a parent node with no "children" entity (ie a bug) json_parent = result # root level node @@ -521,16 +522,16 @@ def write_json_menus(deprecated_options, config, filename): new_json = None if node.item == kconfiglib.MENU or is_menuconfig: - new_json = {"type": "menu", - "title": node.prompt[0], - "depends_on": depends, - "children": [], + new_json = {'type': 'menu', + 'title': node.prompt[0], + 'depends_on': depends, + 'children': [], } if is_menuconfig: sym = node.item - new_json["name"] = sym.name - new_json["help"] = node.help - new_json["is_menuconfig"] = is_menuconfig + new_json['name'] = sym.name + new_json['help'] = node.help + new_json['is_menuconfig'] = is_menuconfig greatest_range = None if len(sym.ranges) > 0: # Note: Evaluating the condition using kconfiglib's expr_value @@ -538,7 +539,7 @@ def write_json_menus(deprecated_options, config, filename): for min_range, max_range, cond_expr in sym.ranges: if kconfiglib.expr_value(cond_expr): greatest_range = [min_range, max_range] - new_json["range"] = greatest_range + new_json['range'] = greatest_range elif isinstance(node.item, kconfiglib.Symbol): sym = node.item @@ -553,38 +554,38 @@ def write_json_menus(deprecated_options, config, filename): break new_json = { - "type": kconfiglib.TYPE_TO_STR[sym.type], - "name": sym.name, - "title": node.prompt[0] if node.prompt else None, - "depends_on": depends, - "help": node.help, - "range": greatest_range, - "children": [], + 'type': kconfiglib.TYPE_TO_STR[sym.type], + 'name': sym.name, + 'title': node.prompt[0] if node.prompt else None, + 'depends_on': depends, + 'help': node.help, + 'range': greatest_range, + 'children': [], } elif isinstance(node.item, kconfiglib.Choice): choice = node.item new_json = { - "type": "choice", - "title": node.prompt[0], - "name": choice.name, - "depends_on": depends, - "help": node.help, - "children": [] + 'type': 'choice', + 'title': node.prompt[0], + 'name': choice.name, + 'depends_on': depends, + 'help': node.help, + 'children': [] } if new_json: node_id = get_menu_node_id(node) if node_id in existing_ids: - raise RuntimeError("Config file contains two items with the same id: %s (%s). " + - "Please rename one of these items to avoid ambiguity." % (node_id, node.prompt[0])) - new_json["id"] = node_id + raise RuntimeError('Config file contains two items with the same id: %s (%s). ' + + 'Please rename one of these items to avoid ambiguity.' % (node_id, node.prompt[0])) + new_json['id'] = node_id json_parent.append(new_json) node_lookup[node] = new_json for n in config.node_iter(): write_node(n) - with open(filename, "w") as f: + with open(filename, 'w') as f: f.write(json.dumps(result, sort_keys=True, indent=4)) @@ -601,26 +602,26 @@ def write_docs(deprecated_options, config, filename): def update_if_changed(source, destination): - with open(source, "r") as f: + with open(source, 'r') as f: source_contents = f.read() if os.path.exists(destination): - with open(destination, "r") as f: + with open(destination, 'r') as f: dest_contents = f.read() if source_contents == dest_contents: return # nothing to update - with open(destination, "w") as f: + with open(destination, 'w') as f: f.write(source_contents) -OUTPUT_FORMATS = {"config": write_config, - "makefile": write_makefile, # only used with make in order to generate auto.conf - "header": write_header, - "cmake": write_cmake, - "docs": write_docs, - "json": write_json, - "json_menus": write_json_menus, +OUTPUT_FORMATS = {'config': write_config, + 'makefile': write_makefile, # only used with make in order to generate auto.conf + 'header': write_header, + 'cmake': write_cmake, + 'docs': write_docs, + 'json': write_json, + 'json_menus': write_json_menus, } @@ -635,5 +636,5 @@ if __name__ == '__main__': try: main() except FatalError as e: - print("A fatal error occurred: %s" % e) + print('A fatal error occurred: %s' % e) sys.exit(2) diff --git a/tools/kconfig_new/confserver.py b/tools/kconfig_new/confserver.py index b84a3d0d41..80b0e113ee 100755 --- a/tools/kconfig_new/confserver.py +++ b/tools/kconfig_new/confserver.py @@ -4,14 +4,15 @@ # with a caller # from __future__ import print_function + import argparse -import confgen import json -import kconfiglib import os import sys import tempfile +import confgen +import kconfiglib from confgen import FatalError, __version__ # Min/Max supported protocol versions @@ -47,15 +48,15 @@ def main(): args = parser.parse_args() if args.version < MIN_PROTOCOL_VERSION: - print("Version %d is older than minimum supported protocol version %d. Client is much older than ESP-IDF version?" % + print('Version %d is older than minimum supported protocol version %d. Client is much older than ESP-IDF version?' % (args.version, MIN_PROTOCOL_VERSION)) if args.version > MAX_PROTOCOL_VERSION: - print("Version %d is newer than maximum supported protocol version %d. Client is newer than ESP-IDF version?" % + print('Version %d is newer than maximum supported protocol version %d. Client is newer than ESP-IDF version?' % (args.version, MAX_PROTOCOL_VERSION)) try: - args.env = [(name,value) for (name,value) in (e.split("=",1) for e in args.env)] + args.env = [(name,value) for (name,value) in (e.split('=',1) for e in args.env)] except ValueError: print("--env arguments must each contain =. To unset an environment variable, use 'ENV='") sys.exit(1) @@ -73,7 +74,7 @@ def main(): def run_server(kconfig, sdkconfig, sdkconfig_rename, default_version=MAX_PROTOCOL_VERSION): config = kconfiglib.Kconfig(kconfig) sdkconfig_renames = [sdkconfig_rename] if sdkconfig_rename else [] - sdkconfig_renames += os.environ.get("COMPONENT_SDKCONFIG_RENAMES", "").split() + sdkconfig_renames += os.environ.get('COMPONENT_SDKCONFIG_RENAMES', '').split() deprecated_options = confgen.DeprecatedOptions(config.config_prefix, path_rename_files=sdkconfig_renames) f_o = tempfile.NamedTemporaryFile(mode='w+b', delete=False) try: @@ -85,7 +86,7 @@ def run_server(kconfig, sdkconfig, sdkconfig_rename, default_version=MAX_PROTOCO os.unlink(f_o.name) config.load_config(sdkconfig) - print("Server running, waiting for requests on stdin...", file=sys.stderr) + print('Server running, waiting for requests on stdin...', file=sys.stderr) config_dict = confgen.get_json_values(config) ranges_dict = get_ranges(config) @@ -94,11 +95,11 @@ def run_server(kconfig, sdkconfig, sdkconfig_rename, default_version=MAX_PROTOCO if default_version == 1: # V1: no 'visibility' key, send value None for any invisible item values_dict = dict((k, v if visible_dict[k] else False) for (k,v) in config_dict.items()) - json.dump({"version": 1, "values": values_dict, "ranges": ranges_dict}, sys.stdout) + json.dump({'version': 1, 'values': values_dict, 'ranges': ranges_dict}, sys.stdout) else: # V2 onwards: separate visibility from version - json.dump({"version": default_version, "values": config_dict, "ranges": ranges_dict, "visible": visible_dict}, sys.stdout) - print("\n") + json.dump({'version': default_version, 'values': config_dict, 'ranges': ranges_dict, 'visible': visible_dict}, sys.stdout) + print('\n') sys.stdout.flush() while True: @@ -108,18 +109,18 @@ def run_server(kconfig, sdkconfig, sdkconfig_rename, default_version=MAX_PROTOCO try: req = json.loads(line) except ValueError as e: # json module throws JSONDecodeError (sublcass of ValueError) on Py3 but ValueError on Py2 - response = {"version": default_version, "error": ["JSON formatting error: %s" % e]} + response = {'version': default_version, 'error': ['JSON formatting error: %s' % e]} json.dump(response, sys.stdout) - print("\n") + print('\n') sys.stdout.flush() continue before = confgen.get_json_values(config) before_ranges = get_ranges(config) before_visible = get_visible(config) - if "load" in req: # load a new sdkconfig + if 'load' in req: # load a new sdkconfig - if req.get("version", default_version) == 1: + if req.get('version', default_version) == 1: # for V1 protocol, send all items when loading new sdkconfig. # (V2+ will only send changes, same as when setting an item) before = {} @@ -127,16 +128,16 @@ def run_server(kconfig, sdkconfig, sdkconfig_rename, default_version=MAX_PROTOCO before_visible = {} # if no new filename is supplied, use existing sdkconfig path, otherwise update the path - if req["load"] is None: - req["load"] = sdkconfig + if req['load'] is None: + req['load'] = sdkconfig else: - sdkconfig = req["load"] + sdkconfig = req['load'] - if "save" in req: - if req["save"] is None: - req["save"] = sdkconfig + if 'save' in req: + if req['save'] is None: + req['save'] = sdkconfig else: - sdkconfig = req["save"] + sdkconfig = req['save'] error = handle_request(deprecated_options, config, req) @@ -147,51 +148,51 @@ def run_server(kconfig, sdkconfig, sdkconfig_rename, default_version=MAX_PROTOCO values_diff = diff(before, after) ranges_diff = diff(before_ranges, after_ranges) visible_diff = diff(before_visible, after_visible) - if req["version"] == 1: + if req['version'] == 1: # V1 response, invisible items have value None for k in (k for (k,v) in visible_diff.items() if not v): values_diff[k] = None - response = {"version": 1, "values": values_diff, "ranges": ranges_diff} + response = {'version': 1, 'values': values_diff, 'ranges': ranges_diff} else: # V2+ response, separate visibility values - response = {"version": req["version"], "values": values_diff, "ranges": ranges_diff, "visible": visible_diff} + response = {'version': req['version'], 'values': values_diff, 'ranges': ranges_diff, 'visible': visible_diff} if error: for e in error: - print("Error: %s" % e, file=sys.stderr) - response["error"] = error + print('Error: %s' % e, file=sys.stderr) + response['error'] = error json.dump(response, sys.stdout) - print("\n") + print('\n') sys.stdout.flush() def handle_request(deprecated_options, config, req): - if "version" not in req: + if 'version' not in req: return ["All requests must have a 'version'"] - if req["version"] < MIN_PROTOCOL_VERSION or req["version"] > MAX_PROTOCOL_VERSION: - return ["Unsupported request version %d. Server supports versions %d-%d" % ( - req["version"], + if req['version'] < MIN_PROTOCOL_VERSION or req['version'] > MAX_PROTOCOL_VERSION: + return ['Unsupported request version %d. Server supports versions %d-%d' % ( + req['version'], MIN_PROTOCOL_VERSION, MAX_PROTOCOL_VERSION)] error = [] - if "load" in req: - print("Loading config from %s..." % req["load"], file=sys.stderr) + if 'load' in req: + print('Loading config from %s...' % req['load'], file=sys.stderr) try: - config.load_config(req["load"]) + config.load_config(req['load']) except Exception as e: - error += ["Failed to load from %s: %s" % (req["load"], e)] + error += ['Failed to load from %s: %s' % (req['load'], e)] - if "set" in req: - handle_set(config, error, req["set"]) + if 'set' in req: + handle_set(config, error, req['set']) - if "save" in req: + if 'save' in req: try: - print("Saving config to %s..." % req["save"], file=sys.stderr) - confgen.write_config(deprecated_options, config, req["save"]) + print('Saving config to %s...' % req['save'], file=sys.stderr) + confgen.write_config(deprecated_options, config, req['save']) except Exception as e: - error += ["Failed to save to %s: %s" % (req["save"], e)] + error += ['Failed to save to %s: %s' % (req['save'], e)] return error @@ -199,7 +200,7 @@ def handle_request(deprecated_options, config, req): def handle_set(config, error, to_set): missing = [k for k in to_set if k not in config.syms] if missing: - error.append("The following config symbol(s) were not found: %s" % (", ".join(missing))) + error.append('The following config symbol(s) were not found: %s' % (', '.join(missing))) # replace name keys with the full config symbol for each key: to_set = dict((config.syms[k],v) for (k,v) in to_set.items() if k not in missing) @@ -219,21 +220,21 @@ def handle_set(config, error, to_set): elif val is False: sym.set_value(0) else: - error.append("Boolean symbol %s only accepts true/false values" % sym.name) + error.append('Boolean symbol %s only accepts true/false values' % sym.name) elif sym.type == kconfiglib.HEX: try: if not isinstance(val, int): val = int(val, 16) # input can be a decimal JSON value or a string of hex digits sym.set_value(hex(val)) except ValueError: - error.append("Hex symbol %s can accept a decimal integer or a string of hex digits, only") + error.append('Hex symbol %s can accept a decimal integer or a string of hex digits, only') else: sym.set_value(str(val)) - print("Set %s" % sym.name) + print('Set %s' % sym.name) del to_set[sym] if len(to_set): - error.append("The following config symbol(s) were not visible so were not updated: %s" % (", ".join(s.name for s in to_set))) + error.append('The following config symbol(s) were not visible so were not updated: %s' % (', '.join(s.name for s in to_set))) def diff(before, after): @@ -320,5 +321,5 @@ if __name__ == '__main__': try: main() except FatalError as e: - print("A fatal error occurred: %s" % e, file=sys.stderr) + print('A fatal error occurred: %s' % e, file=sys.stderr) sys.exit(2) diff --git a/tools/kconfig_new/esp-windows-curses/setup.py b/tools/kconfig_new/esp-windows-curses/setup.py index eb4bfaace0..7f23aaab03 100644 --- a/tools/kconfig_new/esp-windows-curses/setup.py +++ b/tools/kconfig_new/esp-windows-curses/setup.py @@ -13,6 +13,7 @@ # limitations under the License. import os + from setuptools import setup setup(name='esp-windows-curses', diff --git a/tools/kconfig_new/gen_kconfig_doc.py b/tools/kconfig_new/gen_kconfig_doc.py index dfb26613ee..44ae500bef 100644 --- a/tools/kconfig_new/gen_kconfig_doc.py +++ b/tools/kconfig_new/gen_kconfig_doc.py @@ -21,7 +21,9 @@ # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function + import re + import kconfiglib # Indentation to be used in the generated file @@ -152,7 +154,7 @@ def write_docs(config, visibility, filename): """ Note: writing .rst documentation ignores the current value of any items. ie the --config option can be ignored. (However at time of writing it still needs to be set to something...) """ - with open(filename, "w") as f: + with open(filename, 'w') as f: for node in config.node_iter(): write_menu_item(f, node, visibility) @@ -170,14 +172,14 @@ def get_breadcrumbs(node): node = node.parent while node.parent: if node.prompt: - result = [":ref:`%s`" % get_link_anchor(node)] + result + result = [':ref:`%s`' % get_link_anchor(node)] + result node = node.parent - return " > ".join(result) + return ' > '.join(result) def get_link_anchor(node): try: - return "CONFIG_%s" % node.item.name + return 'CONFIG_%s' % node.item.name except AttributeError: assert(node_is_menu(node)) # only menus should have no item.name @@ -185,9 +187,9 @@ def get_link_anchor(node): result = [] while node.parent: if node.prompt: - result = [re.sub(r"[^a-zA-z0-9]+", "-", node.prompt[0])] + result + result = [re.sub(r'[^a-zA-z0-9]+', '-', node.prompt[0])] + result node = node.parent - result = "-".join(result).lower() + result = '-'.join(result).lower() return result @@ -206,8 +208,8 @@ def format_rest_text(text, indent): # Format an indented text block for use with ReST text = indent + text.replace('\n', '\n' + indent) # Escape some characters which are inline formatting in ReST - text = text.replace("*", "\\*") - text = text.replace("_", "\\_") + text = text.replace('*', '\\*') + text = text.replace('_', '\\_') # replace absolute links to documentation by relative ones text = re.sub(r'https://docs.espressif.com/projects/esp-idf/\w+/\w+/(.+)\.html', r':doc:`../\1`', text) text += '\n' @@ -297,7 +299,7 @@ def write_menu_item(f, node, visibility): # if no symbol name, use the prompt as the heading title = node.prompt[0] - f.write(".. _%s:\n\n" % get_link_anchor(node)) + f.write('.. _%s:\n\n' % get_link_anchor(node)) f.write('%s\n' % title) f.write(HEADING_SYMBOLS[get_heading_level(node)] * len(title)) f.write('\n\n') @@ -389,7 +391,7 @@ def write_menu_item(f, node, visibility): child_list.append((child.prompt[0], get_link_anchor(child))) child = child.next if len(child_list) > 0: - f.write("Contains:\n\n") + f.write('Contains:\n\n') sorted_child_list = sorted(child_list, key=lambda pair: pair[0].lower()) ref_list = ['- :ref:`{}`'.format(anchor) for _, anchor in sorted_child_list] f.write('\n'.join(ref_list)) diff --git a/tools/kconfig_new/prepare_kconfig_files.py b/tools/kconfig_new/prepare_kconfig_files.py index 360354dea0..1198467c3b 100644 --- a/tools/kconfig_new/prepare_kconfig_files.py +++ b/tools/kconfig_new/prepare_kconfig_files.py @@ -14,12 +14,12 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function -from __future__ import unicode_literals -from io import open +from __future__ import print_function, unicode_literals + import argparse import json import sys +from io import open def _prepare_source_files(env_dict): @@ -85,9 +85,9 @@ if __name__ == '__main__': args = parser.parse_args() try: - env = dict([(name, value) for (name, value) in (e.split("=", 1) for e in args.env)]) + env = dict([(name, value) for (name, value) in (e.split('=', 1) for e in args.env)]) except ValueError: - print("--env arguments must each contain =.") + print('--env arguments must each contain =.') sys.exit(1) if args.env_file is not None: diff --git a/tools/kconfig_new/test/confgen/test_confgen.py b/tools/kconfig_new/test/confgen/test_confgen.py index 8bc24c28fd..8a45a1c49c 100755 --- a/tools/kconfig_new/test/confgen/test_confgen.py +++ b/tools/kconfig_new/test/confgen/test_confgen.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -from future.utils import iteritems import os import re import subprocess @@ -9,6 +8,8 @@ import tempfile import textwrap import unittest +from future.utils import iteritems + class ConfgenBaseTestCase(unittest.TestCase): @classmethod @@ -270,5 +271,5 @@ hex "Hex Item default prefix" default 0x77 """ -if __name__ == "__main__": +if __name__ == '__main__': unittest.main() diff --git a/tools/kconfig_new/test/confserver/test_confserver.py b/tools/kconfig_new/test/confserver/test_confserver.py index 2153cda5fa..32bfa4107e 100755 --- a/tools/kconfig_new/test/confserver/test_confserver.py +++ b/tools/kconfig_new/test/confserver/test_confserver.py @@ -1,5 +1,6 @@ #!/usr/bin/env python from __future__ import print_function + import argparse import json import os @@ -13,24 +14,24 @@ PROTOCOL_VERSIONS = [1, 2] def parse_testcases(version): - with open("testcases_v%d.txt" % version, "r") as f: + with open('testcases_v%d.txt' % version, 'r') as f: cases = [line for line in f.readlines() if len(line.strip()) > 0] # Each 3 lines in the file should be formatted as: # * Description of the test change # * JSON "changes" to send to the server # * Result JSON to expect back from the server if len(cases) % 3 != 0: - print("Warning: testcases.txt has wrong number of non-empty lines (%d). Should be 3 lines per test case, always." % len(cases)) + print('Warning: testcases.txt has wrong number of non-empty lines (%d). Should be 3 lines per test case, always.' % len(cases)) for i in range(0, len(cases), 3): desc = cases[i] send = cases[i + 1] expect = cases[i + 2] - if not desc.startswith("* "): + if not desc.startswith('* '): raise RuntimeError("Unexpected description at line %d: '%s'" % (i + 1, desc)) - if not send.startswith("> "): + if not send.startswith('> '): raise RuntimeError("Unexpected send at line %d: '%s'" % (i + 2, send)) - if not expect.startswith("< "): + if not expect.startswith('< '): raise RuntimeError("Unexpected expect at line %d: '%s'" % (i + 3, expect)) desc = desc[2:] send = json.loads(send[2:]) @@ -45,9 +46,9 @@ def main(): try: # set up temporary file to use as sdkconfig copy - with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_sdkconfig: + with tempfile.NamedTemporaryFile(mode='w', delete=False) as temp_sdkconfig: temp_sdkconfig_path = os.path.join(tempfile.gettempdir(), temp_sdkconfig.name) - with open("sdkconfig") as orig: + with open('sdkconfig') as orig: temp_sdkconfig.write(orig.read()) with tempfile.NamedTemporaryFile(delete=False) as f: @@ -69,12 +70,12 @@ def main(): # prepare_kconfig_files.py doesn't have to be called because COMPONENT_KCONFIGS and # COMPONENT_KCONFIGS_PROJBUILD are empty - print("Running: %s" % cmdline) + print('Running: %s' % cmdline) p = pexpect.spawn(cmdline, timeout=30, logfile=args.logfile, echo=False, use_poll=True, maxread=1) - p.expect("Server running.+\r\n") + p.expect('Server running.+\r\n') initial = expect_json(p) - print("Initial: %s" % initial) + print('Initial: %s' % initial) for version in PROTOCOL_VERSIONS: test_protocol_version(p, version) @@ -83,7 +84,7 @@ def main(): test_invalid_json(p) - print("Done. All passed.") + print('Done. All passed.') finally: try: @@ -96,89 +97,89 @@ def main(): def expect_json(p): # run p.expect() to expect a json object back, and return it as parsed JSON - p.expect("{.+}\r\n") + p.expect('{.+}\r\n') result = p.match.group(0).strip().decode() - print("Read raw data from server: %s" % result) + print('Read raw data from server: %s' % result) return json.loads(result) def send_request(p, req): req = json.dumps(req) - print("Sending: %s" % (req)) - p.send("%s\n" % req) + print('Sending: %s' % (req)) + p.send('%s\n' % req) readback = expect_json(p) - print("Read back: %s" % (json.dumps(readback))) + print('Read back: %s' % (json.dumps(readback))) return readback def test_protocol_version(p, version): - print("*****") - print("Testing version %d..." % version) + print('*****') + print('Testing version %d...' % version) # reload the config from the sdkconfig file - req = {"version": version, "load": None} + req = {'version': version, 'load': None} readback = send_request(p, req) - print("Reset response: %s" % (json.dumps(readback))) + print('Reset response: %s' % (json.dumps(readback))) # run through each test case cases = parse_testcases(version) for (desc, send, expected) in cases: print(desc) - req = {"version": version, "set": send} + req = {'version': version, 'set': send} readback = send_request(p, req) - if readback.get("version", None) != version: + if readback.get('version', None) != version: raise RuntimeError('Expected {"version" : %d} in response' % version) for expect_key in expected.keys(): read_vals = readback[expect_key] exp_vals = expected[expect_key] if read_vals != exp_vals: expect_diff = dict((k,v) for (k,v) in exp_vals.items() if k not in read_vals or v != read_vals[k]) - raise RuntimeError("Test failed! Was expecting %s: %s" % (expect_key, json.dumps(expect_diff))) - print("OK") - print("Version %d OK" % version) + raise RuntimeError('Test failed! Was expecting %s: %s' % (expect_key, json.dumps(expect_diff))) + print('OK') + print('Version %d OK' % version) def test_load_save(p, temp_sdkconfig_path): - print("Testing load/save...") + print('Testing load/save...') before = os.stat(temp_sdkconfig_path).st_mtime - save_result = send_request(p, {"version": 2, "save": temp_sdkconfig_path}) - print("Save result: %s" % (json.dumps(save_result))) - assert "error" not in save_result - assert len(save_result["values"]) == 0 # nothing changes when we save - assert len(save_result["ranges"]) == 0 + save_result = send_request(p, {'version': 2, 'save': temp_sdkconfig_path}) + print('Save result: %s' % (json.dumps(save_result))) + assert 'error' not in save_result + assert len(save_result['values']) == 0 # nothing changes when we save + assert len(save_result['ranges']) == 0 after = os.stat(temp_sdkconfig_path).st_mtime assert after > before # something got written to disk # Do a V2 load - load_result = send_request(p, {"version": 2, "load": temp_sdkconfig_path}) - print("V2 Load result: %s" % (json.dumps(load_result))) - assert "error" not in load_result - assert len(load_result["values"]) == 0 # in V2, loading same file should return no config items - assert len(load_result["ranges"]) == 0 + load_result = send_request(p, {'version': 2, 'load': temp_sdkconfig_path}) + print('V2 Load result: %s' % (json.dumps(load_result))) + assert 'error' not in load_result + assert len(load_result['values']) == 0 # in V2, loading same file should return no config items + assert len(load_result['ranges']) == 0 # Do a V1 load - load_result = send_request(p, {"version": 1, "load": temp_sdkconfig_path}) - print("V1 Load result: %s" % (json.dumps(load_result))) - assert "error" not in load_result - assert len(load_result["values"]) > 0 # in V1, loading same file should return all config items - assert len(load_result["ranges"]) > 0 + load_result = send_request(p, {'version': 1, 'load': temp_sdkconfig_path}) + print('V1 Load result: %s' % (json.dumps(load_result))) + assert 'error' not in load_result + assert len(load_result['values']) > 0 # in V1, loading same file should return all config items + assert len(load_result['ranges']) > 0 def test_invalid_json(p): - print("Testing invalid JSON formatting...") + print('Testing invalid JSON formatting...') bad_escaping = r'{ "version" : 2, "load" : "c:\some\path\not\escaped\as\json" }' - p.send("%s\n" % bad_escaping) + p.send('%s\n' % bad_escaping) readback = expect_json(p) print(readback) - assert "json" in readback["error"][0].lower() + assert 'json' in readback['error'][0].lower() not_really_json = 'Hello world!!' - p.send("%s\n" % not_really_json) + p.send('%s\n' % not_really_json) readback = expect_json(p) print(readback) - assert "json" in readback["error"][0].lower() + assert 'json' in readback['error'][0].lower() -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/kconfig_new/test/gen_kconfig_doc/test_kconfig_out.py b/tools/kconfig_new/test/gen_kconfig_doc/test_kconfig_out.py index 3c3dbc6b6e..f86ad710a9 100755 --- a/tools/kconfig_new/test/gen_kconfig_doc/test_kconfig_out.py +++ b/tools/kconfig_new/test/gen_kconfig_doc/test_kconfig_out.py @@ -1,5 +1,6 @@ #!/usr/bin/env python from __future__ import unicode_literals + import io import os import sys @@ -76,5 +77,5 @@ class TestDocOutput(unittest.TestCase): self.assertNotIn('- from 100', s) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main() diff --git a/tools/kconfig_new/test/gen_kconfig_doc/test_target_visibility.py b/tools/kconfig_new/test/gen_kconfig_doc/test_target_visibility.py index 6425c17ced..b587253966 100755 --- a/tools/kconfig_new/test/gen_kconfig_doc/test_target_visibility.py +++ b/tools/kconfig_new/test/gen_kconfig_doc/test_target_visibility.py @@ -1,9 +1,10 @@ #!/usr/bin/env python -import kconfiglib import os import sys import unittest +import kconfiglib + try: import gen_kconfig_doc except ImportError: @@ -107,5 +108,5 @@ class ConfigTargetVisibilityChipB(ConfigTargetVisibilityTestCase): self.invisible('CHIPA_FEATURE_FROM_V3') -if __name__ == "__main__": +if __name__ == '__main__': unittest.main() diff --git a/tools/ldgen/fragments.py b/tools/ldgen/fragments.py index 556bb8700c..7183d5ed64 100644 --- a/tools/ldgen/fragments.py +++ b/tools/ldgen/fragments.py @@ -13,30 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. # +import abc import os import re - -from sdkconfig import SDKConfig -from pyparsing import OneOrMore -from pyparsing import restOfLine -from pyparsing import alphanums -from pyparsing import Word -from pyparsing import alphas -from pyparsing import ParseFatalException -from pyparsing import Suppress -from pyparsing import Group -from pyparsing import Literal -from pyparsing import ZeroOrMore -from pyparsing import Optional -from pyparsing import originalTextFor -from pyparsing import Forward -from pyparsing import indentedBlock -from pyparsing import Combine from collections import namedtuple -import abc +from pyparsing import (Combine, Forward, Group, Literal, OneOrMore, Optional, ParseFatalException, Suppress, Word, + ZeroOrMore, alphanums, alphas, indentedBlock, originalTextFor, restOfLine) +from sdkconfig import SDKConfig -KeyGrammar = namedtuple("KeyGrammar", "grammar min max required") +KeyGrammar = namedtuple('KeyGrammar', 'grammar min max required') class FragmentFile(): @@ -47,7 +33,7 @@ class FragmentFile(): def __init__(self, fragment_file, sdkconfig): try: - fragment_file = open(fragment_file, "r") + fragment_file = open(fragment_file, 'r') except TypeError: pass @@ -57,14 +43,14 @@ class FragmentFile(): class parse_ctx: fragment = None # current fragment - key = "" # current key + key = '' # current key keys = list() # list of keys parsed key_grammar = None # current key grammar @staticmethod def reset(): parse_ctx.fragment_instance = None - parse_ctx.key = "" + parse_ctx.key = '' parse_ctx.keys = list() parse_ctx.key_grammar = None @@ -75,11 +61,11 @@ class FragmentFile(): def expand_conditionals(toks, stmts): try: - stmt = toks["value"] + stmt = toks['value'] stmts.append(stmt) except KeyError: try: - conditions = toks["conditional"] + conditions = toks['conditional'] for condition in conditions: try: _toks = condition[1] @@ -111,7 +97,7 @@ class FragmentFile(): raise ParseFatalException(pstr, loc, "unable to add key '%s'; %s" % (parse_ctx.key, str(e))) return None - key = Word(alphanums + "_") + Suppress(":") + key = Word(alphanums + '_') + Suppress(':') key_stmt = Forward() condition_block = indentedBlock(key_stmt, indent_stack) @@ -119,11 +105,11 @@ class FragmentFile(): key_body = Suppress(key) + key_stmts key_body.setParseAction(key_body_parsed) - condition = originalTextFor(SDKConfig.get_expression_grammar()).setResultsName("condition") - if_condition = Group(Suppress("if") + condition + Suppress(":") + condition_block) - elif_condition = Group(Suppress("elif") + condition + Suppress(":") + condition_block) - else_condition = Group(Suppress("else") + Suppress(":") + condition_block) - conditional = (if_condition + Optional(OneOrMore(elif_condition)) + Optional(else_condition)).setResultsName("conditional") + condition = originalTextFor(SDKConfig.get_expression_grammar()).setResultsName('condition') + if_condition = Group(Suppress('if') + condition + Suppress(':') + condition_block) + elif_condition = Group(Suppress('elif') + condition + Suppress(':') + condition_block) + else_condition = Group(Suppress('else') + Suppress(':') + condition_block) + conditional = (if_condition + Optional(OneOrMore(elif_condition)) + Optional(else_condition)).setResultsName('conditional') def key_parse_action(pstr, loc, toks): key = toks[0] @@ -142,7 +128,7 @@ class FragmentFile(): except Exception as e: raise ParseFatalException(pstr, loc, "unable to parse key '%s'; %s" % (key, str(e))) - key_stmt << (conditional | Group(key_grammar).setResultsName("value")) + key_stmt << (conditional | Group(key_grammar).setResultsName('value')) return None @@ -152,33 +138,33 @@ class FragmentFile(): key.setParseAction(key_parse_action) ftype = Word(alphas).setParseAction(fragment_type_parse_action) - fid = Suppress(":") + Word(alphanums + "_.").setResultsName("name") + fid = Suppress(':') + Word(alphanums + '_.').setResultsName('name') fid.setParseAction(name_parse_action) - header = Suppress("[") + ftype + fid + Suppress("]") + header = Suppress('[') + ftype + fid + Suppress(']') def fragment_parse_action(pstr, loc, toks): key_grammars = parse_ctx.fragment.get_key_grammars() required_keys = set([k for (k,v) in key_grammars.items() if v.required]) present_keys = required_keys.intersection(set(parse_ctx.keys)) if present_keys != required_keys: - raise ParseFatalException(pstr, loc, "required keys %s for fragment not found" % + raise ParseFatalException(pstr, loc, 'required keys %s for fragment not found' % list(required_keys - present_keys)) return parse_ctx.fragment fragment_stmt = Forward() fragment_block = indentedBlock(fragment_stmt, indent_stack) - fragment_if_condition = Group(Suppress("if") + condition + Suppress(":") + fragment_block) - fragment_elif_condition = Group(Suppress("elif") + condition + Suppress(":") + fragment_block) - fragment_else_condition = Group(Suppress("else") + Suppress(":") + fragment_block) + fragment_if_condition = Group(Suppress('if') + condition + Suppress(':') + fragment_block) + fragment_elif_condition = Group(Suppress('elif') + condition + Suppress(':') + fragment_block) + fragment_else_condition = Group(Suppress('else') + Suppress(':') + fragment_block) fragment_conditional = (fragment_if_condition + Optional(OneOrMore(fragment_elif_condition)) + - Optional(fragment_else_condition)).setResultsName("conditional") + Optional(fragment_else_condition)).setResultsName('conditional') - fragment = (header + OneOrMore(indentedBlock(key_body, indent_stack, False))).setResultsName("value") + fragment = (header + OneOrMore(indentedBlock(key_body, indent_stack, False))).setResultsName('value') fragment.setParseAction(fragment_parse_action) - fragment.ignore("#" + restOfLine) + fragment.ignore('#' + restOfLine) - deprecated_mapping = DeprecatedMapping.get_fragment_grammar(sdkconfig, fragment_file.name).setResultsName("value") + deprecated_mapping = DeprecatedMapping.get_fragment_grammar(sdkconfig, fragment_file.name).setResultsName('value') fragment_stmt << (Group(deprecated_mapping) | Group(fragment) | Group(fragment_conditional)) @@ -203,8 +189,8 @@ class Fragment(): such as checking the validity of the fragment name and getting the entry values. """ - IDENTIFIER = Word(alphas + "_", alphanums + "_") - ENTITY = Word(alphanums + ".-_$") + IDENTIFIER = Word(alphas + '_', alphanums + '_') + ENTITY = Word(alphanums + '.-_$') @abc.abstractmethod def set_key_value(self, key, parse_results): @@ -219,12 +205,12 @@ class Sections(Fragment): # Unless quoted, symbol names start with a letter, underscore, or point # and may include any letters, underscores, digits, points, and hyphens. - GNU_LD_SYMBOLS = Word(alphas + "_.", alphanums + "._-") + GNU_LD_SYMBOLS = Word(alphas + '_.', alphanums + '._-') - entries_grammar = Combine(GNU_LD_SYMBOLS + Optional("+")) + entries_grammar = Combine(GNU_LD_SYMBOLS + Optional('+')) grammars = { - "entries": KeyGrammar(entries_grammar.setResultsName("section"), 1, None, True) + 'entries': KeyGrammar(entries_grammar.setResultsName('section'), 1, None, True) } """ @@ -235,22 +221,22 @@ class Sections(Fragment): def get_section_data_from_entry(sections_entry, symbol=None): if not symbol: sections = list() - sections.append(sections_entry.replace("+", "")) - sections.append(sections_entry.replace("+", ".*")) + sections.append(sections_entry.replace('+', '')) + sections.append(sections_entry.replace('+', '.*')) return sections else: - if sections_entry.endswith("+"): - section = sections_entry.replace("+", ".*") - expansion = section.replace(".*", "." + symbol) + if sections_entry.endswith('+'): + section = sections_entry.replace('+', '.*') + expansion = section.replace('.*', '.' + symbol) return (section, expansion) else: return (sections_entry, None) def set_key_value(self, key, parse_results): - if key == "entries": + if key == 'entries': self.entries = set() for result in parse_results: - self.entries.add(result["section"]) + self.entries.add(result['section']) def get_key_grammars(self): return self.__class__.grammars @@ -262,15 +248,15 @@ class Scheme(Fragment): """ grammars = { - "entries": KeyGrammar(Fragment.IDENTIFIER.setResultsName("sections") + Suppress("->") + - Fragment.IDENTIFIER.setResultsName("target"), 1, None, True) + 'entries': KeyGrammar(Fragment.IDENTIFIER.setResultsName('sections') + Suppress('->') + + Fragment.IDENTIFIER.setResultsName('target'), 1, None, True) } def set_key_value(self, key, parse_results): - if key == "entries": + if key == 'entries': self.entries = set() for result in parse_results: - self.entries.add((result["sections"], result["target"])) + self.entries.add((result['sections'], result['target'])) def get_key_grammars(self): return self.__class__.grammars @@ -281,7 +267,7 @@ class Mapping(Fragment): Encapsulates a mapping fragment, which defines what targets the input sections of mappable entties are placed under. """ - MAPPING_ALL_OBJECTS = "*" + MAPPING_ALL_OBJECTS = '*' def __init__(self): Fragment.__init__(self) @@ -289,26 +275,26 @@ class Mapping(Fragment): self.deprecated = False def set_key_value(self, key, parse_results): - if key == "archive": - self.archive = parse_results[0]["archive"] - elif key == "entries": + if key == 'archive': + self.archive = parse_results[0]['archive'] + elif key == 'entries': for result in parse_results: obj = None symbol = None scheme = None try: - obj = result["object"] + obj = result['object'] except KeyError: pass try: - symbol = result["symbol"] + symbol = result['symbol'] except KeyError: pass try: - scheme = result["scheme"] + scheme = result['scheme'] except KeyError: pass @@ -319,19 +305,19 @@ class Mapping(Fragment): # obj:symbol (scheme) # obj (scheme) # * (scheme) - obj = Fragment.ENTITY.setResultsName("object") - symbol = Suppress(":") + Fragment.IDENTIFIER.setResultsName("symbol") - scheme = Suppress("(") + Fragment.IDENTIFIER.setResultsName("scheme") + Suppress(")") + obj = Fragment.ENTITY.setResultsName('object') + symbol = Suppress(':') + Fragment.IDENTIFIER.setResultsName('symbol') + scheme = Suppress('(') + Fragment.IDENTIFIER.setResultsName('scheme') + Suppress(')') pattern1 = obj + symbol + scheme pattern2 = obj + scheme - pattern3 = Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName("object") + scheme + pattern3 = Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName('object') + scheme entry = pattern1 | pattern2 | pattern3 grammars = { - "archive": KeyGrammar(Fragment.ENTITY.setResultsName("archive"), 1, 1, True), - "entries": KeyGrammar(entry, 0, None, True) + 'archive': KeyGrammar(Fragment.ENTITY.setResultsName('archive'), 1, 1, True), + 'entries': KeyGrammar(entry, 0, None, True) } return grammars @@ -343,53 +329,53 @@ class DeprecatedMapping(): """ # Name of the default condition entry - DEFAULT_CONDITION = "default" - MAPPING_ALL_OBJECTS = "*" + DEFAULT_CONDITION = 'default' + MAPPING_ALL_OBJECTS = '*' @staticmethod def get_fragment_grammar(sdkconfig, fragment_file): # Match header [mapping] - header = Suppress("[") + Suppress("mapping") + Suppress("]") + header = Suppress('[') + Suppress('mapping') + Suppress(']') # There are three possible patterns for mapping entries: # obj:symbol (scheme) # obj (scheme) # * (scheme) - obj = Fragment.ENTITY.setResultsName("object") - symbol = Suppress(":") + Fragment.IDENTIFIER.setResultsName("symbol") - scheme = Suppress("(") + Fragment.IDENTIFIER.setResultsName("scheme") + Suppress(")") + obj = Fragment.ENTITY.setResultsName('object') + symbol = Suppress(':') + Fragment.IDENTIFIER.setResultsName('symbol') + scheme = Suppress('(') + Fragment.IDENTIFIER.setResultsName('scheme') + Suppress(')') pattern1 = Group(obj + symbol + scheme) pattern2 = Group(obj + scheme) - pattern3 = Group(Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName("object") + scheme) + pattern3 = Group(Literal(Mapping.MAPPING_ALL_OBJECTS).setResultsName('object') + scheme) mapping_entry = pattern1 | pattern2 | pattern3 # To simplify parsing, classify groups of condition-mapping entry into two types: normal and default # A normal grouping is one with a non-default condition. The default grouping is one which contains the # default condition - mapping_entries = Group(ZeroOrMore(mapping_entry)).setResultsName("mappings") + mapping_entries = Group(ZeroOrMore(mapping_entry)).setResultsName('mappings') - normal_condition = Suppress(":") + originalTextFor(SDKConfig.get_expression_grammar()) - default_condition = Optional(Suppress(":") + Literal(DeprecatedMapping.DEFAULT_CONDITION)) + normal_condition = Suppress(':') + originalTextFor(SDKConfig.get_expression_grammar()) + default_condition = Optional(Suppress(':') + Literal(DeprecatedMapping.DEFAULT_CONDITION)) - normal_group = Group(normal_condition.setResultsName("condition") + mapping_entries) - default_group = Group(default_condition + mapping_entries).setResultsName("default_group") + normal_group = Group(normal_condition.setResultsName('condition') + mapping_entries) + default_group = Group(default_condition + mapping_entries).setResultsName('default_group') - normal_groups = Group(ZeroOrMore(normal_group)).setResultsName("normal_groups") + normal_groups = Group(ZeroOrMore(normal_group)).setResultsName('normal_groups') # Any mapping fragment definition can have zero or more normal group and only one default group as a last entry. - archive = Suppress("archive") + Suppress(":") + Fragment.ENTITY.setResultsName("archive") - entries = Suppress("entries") + Suppress(":") + (normal_groups + default_group).setResultsName("entries") + archive = Suppress('archive') + Suppress(':') + Fragment.ENTITY.setResultsName('archive') + entries = Suppress('entries') + Suppress(':') + (normal_groups + default_group).setResultsName('entries') mapping = Group(header + archive + entries) - mapping.ignore("#" + restOfLine) + mapping.ignore('#' + restOfLine) def parsed_deprecated_mapping(pstr, loc, toks): fragment = Mapping() fragment.archive = toks[0].archive - fragment.name = re.sub(r"[^0-9a-zA-Z]+", "_", fragment.archive) + fragment.name = re.sub(r'[^0-9a-zA-Z]+', '_', fragment.archive) fragment.deprecated = True fragment.entries = set() @@ -413,10 +399,10 @@ class DeprecatedMapping(): fragment.entries.add((entry.object, None if entry.symbol == '' else entry.symbol, entry.scheme)) if not fragment.entries: - fragment.entries.add(("*", None, "default")) + fragment.entries.add(('*', None, 'default')) dep_warning = str(ParseFatalException(pstr, loc, - "Warning: Deprecated old-style mapping fragment parsed in file %s." % fragment_file)) + 'Warning: Deprecated old-style mapping fragment parsed in file %s.' % fragment_file)) print(dep_warning) return fragment @@ -426,7 +412,7 @@ class DeprecatedMapping(): FRAGMENT_TYPES = { - "sections": Sections, - "scheme": Scheme, - "mapping": Mapping + 'sections': Sections, + 'scheme': Scheme, + 'mapping': Mapping } diff --git a/tools/ldgen/generation.py b/tools/ldgen/generation.py index 2f221efdde..a2d0267fb6 100644 --- a/tools/ldgen/generation.py +++ b/tools/ldgen/generation.py @@ -15,14 +15,14 @@ # import collections +import fnmatch import itertools import os -import fnmatch -from fragments import Sections, Scheme, Mapping, Fragment -from pyparsing import Suppress, White, ParseException, Literal, Group, ZeroOrMore -from pyparsing import Word, OneOrMore, nums, alphas, restOfLine, SkipTo +from fragments import Fragment, Mapping, Scheme, Sections from ldgen_common import LdGenFailure +from pyparsing import (Group, Literal, OneOrMore, ParseException, SkipTo, Suppress, White, Word, ZeroOrMore, alphas, + nums, restOfLine) class PlacementRule(): @@ -39,13 +39,13 @@ class PlacementRule(): def __init__(self, content): self.content = content - __metadata = collections.namedtuple("__metadata", "excludes expansions expanded") + __metadata = collections.namedtuple('__metadata', 'excludes expansions expanded') def __init__(self, archive, obj, symbol, sections, target): - if archive == "*": + if archive == '*': archive = None - if obj == "*": + if obj == '*': obj = None self.archive = archive @@ -168,10 +168,10 @@ class PlacementRule(): exclusion_string = None if exclusions: - exclusion_string = " ".join(map(lambda e: "*" + e.archive + (":" + e.obj + ".*" if e.obj else ""), exclusions)) - exclusion_string = "EXCLUDE_FILE(" + exclusion_string + ")" + exclusion_string = ' '.join(map(lambda e: '*' + e.archive + (':' + e.obj + '.*' if e.obj else ''), exclusions)) + exclusion_string = 'EXCLUDE_FILE(' + exclusion_string + ')' else: - exclusion_string = "" + exclusion_string = '' section_string = None exclusion_section_string = None @@ -180,26 +180,26 @@ class PlacementRule(): section_expanded = self.sections[section].expanded.content if section_expansions and section_expanded: - section_string = " ".join(section_expansions) + section_string = ' '.join(section_expansions) exclusion_section_string = section_string else: section_string = section - exclusion_section_string = exclusion_string + " " + section_string + exclusion_section_string = exclusion_string + ' ' + section_string sections_string.append(exclusion_section_string) - sections_string = " ".join(sections_string) + sections_string = ' '.join(sections_string) - archive = str(self.archive) if self.archive else "" - obj = (str(self.obj) + (".*" if self.obj else "")) if self.obj else "" + archive = str(self.archive) if self.archive else '' + obj = (str(self.obj) + ('.*' if self.obj else '')) if self.obj else '' # Handle output string generation based on information available if self.specificity == PlacementRule.DEFAULT_SPECIFICITY: - rule_string = "*(%s)" % (sections_string) + rule_string = '*(%s)' % (sections_string) elif self.specificity == PlacementRule.ARCHIVE_SPECIFICITY: - rule_string = "*%s:(%s)" % (archive, sections_string) + rule_string = '*%s:(%s)' % (archive, sections_string) else: - rule_string = "*%s:%s(%s)" % (archive, obj, sections_string) + rule_string = '*%s:%s(%s)' % (archive, obj, sections_string) return rule_string @@ -251,7 +251,7 @@ class GenerationModel: Implements generation of placement rules based on collected sections, scheme and mapping fragment. """ - DEFAULT_SCHEME = "default" + DEFAULT_SCHEME = 'default' def __init__(self, check_mappings=False, check_mapping_exceptions=None): self.schemes = {} @@ -322,7 +322,7 @@ class GenerationModel: # targets. Raise exception. if intersection: scheme = self.schemes[scheme_name] - message = "Sections " + str(intersection) + " mapped to multiple targets." + message = 'Sections ' + str(intersection) + ' mapped to multiple targets.' raise GenerationException(message, scheme) return scheme_dictionary @@ -352,7 +352,7 @@ class GenerationModel: raise GenerationException(message, mapping) if symbol: - obj_sym = fnmatch.filter(obj_sections, "*%s" % symbol) + obj_sym = fnmatch.filter(obj_sections, '*%s' % symbol) if not obj_sym: message = "'%s:%s %s' not found" % (archive, obj, symbol) raise GenerationException(message, mapping) @@ -401,7 +401,7 @@ class GenerationModel: if intersections and rule_a.maps_same_entities_as(rule_b): rules_string = str([str(rule_a), str(rule_b)]) - message = "Rules " + rules_string + " map sections " + str(list(intersections)) + " into multiple targets." + message = 'Rules ' + rules_string + ' map sections ' + str(list(intersections)) + ' into multiple targets.' raise GenerationException(message) def _create_extra_rules(self, rules): @@ -499,7 +499,7 @@ class TemplateModel: final output. """ - Marker = collections.namedtuple("Marker", "target indent rules") + Marker = collections.namedtuple('Marker', 'target indent rules') def __init__(self, template_file): self.members = [] @@ -511,8 +511,8 @@ class TemplateModel: lines = template_file.readlines() target = Fragment.IDENTIFIER - reference = Suppress("mapping") + Suppress("[") + target.setResultsName("target") + Suppress("]") - pattern = White(" \t").setResultsName("indent") + reference + reference = Suppress('mapping') + Suppress('[') + target.setResultsName('target') + Suppress(']') + pattern = White(' \t').setResultsName('indent') + reference # Find the markers in the template file line by line. If line does not match marker grammar, # set it as a literal to be copied as is to the output file. @@ -548,10 +548,10 @@ class TemplateModel: def write(self, output_file): # Add information that this is a generated file. - output_file.write("/* Automatically generated file; DO NOT EDIT */\n") - output_file.write("/* Espressif IoT Development Framework Linker Script */\n") - output_file.write("/* Generated from: %s */\n" % self.file) - output_file.write("\n") + output_file.write('/* Automatically generated file; DO NOT EDIT */\n') + output_file.write('/* Espressif IoT Development Framework Linker Script */\n') + output_file.write('/* Generated from: %s */\n' % self.file) + output_file.write('\n') # Do the text replacement for member in self.members: @@ -560,7 +560,7 @@ class TemplateModel: rules = member.rules for rule in rules: - generated_line = "".join([indent, str(rule), '\n']) + generated_line = ''.join([indent, str(rule), '\n']) output_file.write(generated_line) except AttributeError: output_file.write(member) @@ -572,7 +572,7 @@ class GenerationException(LdGenFailure): evaluate conditions, duplicate mappings, etc. """ - UNDEFINED_REFERENCE = "Undefined reference" + UNDEFINED_REFERENCE = 'Undefined reference' def __init__(self, message, fragment=None): self.fragment = fragment @@ -591,7 +591,7 @@ class SectionsInfo(dict): and names """ - __info = collections.namedtuple("__info", "filename content") + __info = collections.namedtuple('__info', 'filename content') def __init__(self): self.sections = dict() @@ -599,10 +599,10 @@ class SectionsInfo(dict): def add_sections_info(self, sections_info_dump): first_line = sections_info_dump.readline() - archive_path = (Literal("In archive").suppress() + + archive_path = (Literal('In archive').suppress() + White().suppress() + # trim the colon and line ending characters from archive_path - restOfLine.setResultsName("archive_path").setParseAction(lambda s, loc, toks: s.rstrip(":\n\r "))) + restOfLine.setResultsName('archive_path').setParseAction(lambda s, loc, toks: s.rstrip(':\n\r '))) parser = archive_path results = None @@ -610,34 +610,34 @@ class SectionsInfo(dict): try: results = parser.parseString(first_line, parseAll=True) except ParseException as p: - raise ParseException("Parsing sections info for library " + sections_info_dump.name + " failed. " + p.msg) + raise ParseException('Parsing sections info for library ' + sections_info_dump.name + ' failed. ' + p.msg) archive = os.path.basename(results.archive_path) self.sections[archive] = SectionsInfo.__info(sections_info_dump.name, sections_info_dump.read()) def _get_infos_from_file(self, info): # {object}: file format elf32-xtensa-le - object_line = SkipTo(":").setResultsName("object") + Suppress(restOfLine) + object_line = SkipTo(':').setResultsName('object') + Suppress(restOfLine) # Sections: # Idx Name ... - section_start = Suppress(Literal("Sections:")) + section_start = Suppress(Literal('Sections:')) section_header = Suppress(OneOrMore(Word(alphas))) # 00 {section} 0000000 ... # CONTENTS, ALLOC, .... section_entry = Suppress(Word(nums)) + SkipTo(' ') + Suppress(restOfLine) + \ - Suppress(ZeroOrMore(Word(alphas) + Literal(",")) + Word(alphas)) + Suppress(ZeroOrMore(Word(alphas) + Literal(',')) + Word(alphas)) - content = Group(object_line + section_start + section_header + Group(OneOrMore(section_entry)).setResultsName("sections")) - parser = Group(ZeroOrMore(content)).setResultsName("contents") + content = Group(object_line + section_start + section_header + Group(OneOrMore(section_entry)).setResultsName('sections')) + parser = Group(ZeroOrMore(content)).setResultsName('contents') results = None try: results = parser.parseString(info.content, parseAll=True) except ParseException as p: - raise ParseException("Unable to parse section info file " + info.filename + ". " + p.msg) + raise ParseException('Unable to parse section info file ' + info.filename + '. ' + p.msg) return results @@ -657,15 +657,15 @@ class SectionsInfo(dict): self.sections[archive] = stored try: - res = stored[obj + ".o"] + res = stored[obj + '.o'] except KeyError: try: - res = stored[obj + ".c.obj"] + res = stored[obj + '.c.obj'] except KeyError: try: - res = stored[obj + ".cpp.obj"] + res = stored[obj + '.cpp.obj'] except KeyError: - res = stored[obj + ".S.obj"] + res = stored[obj + '.S.obj'] except KeyError: pass diff --git a/tools/ldgen/ldgen.py b/tools/ldgen/ldgen.py index 139a97d3ce..df1a74af92 100755 --- a/tools/ldgen/ldgen.py +++ b/tools/ldgen/ldgen.py @@ -16,33 +16,33 @@ # import argparse +import errno import json +import os +import subprocess import sys import tempfile -import subprocess -import os -import errno +from io import StringIO from fragments import FragmentFile -from sdkconfig import SDKConfig -from generation import GenerationModel, TemplateModel, SectionsInfo +from generation import GenerationModel, SectionsInfo, TemplateModel from ldgen_common import LdGenFailure from pyparsing import ParseException, ParseFatalException -from io import StringIO +from sdkconfig import SDKConfig try: import confgen except Exception: parent_dir_name = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) - kconfig_new_dir = os.path.abspath(parent_dir_name + "/kconfig_new") + kconfig_new_dir = os.path.abspath(parent_dir_name + '/kconfig_new') sys.path.insert(0, kconfig_new_dir) import confgen def _update_environment(args): - env = [(name, value) for (name,value) in (e.split("=",1) for e in args.env)] + env = [(name, value) for (name,value) in (e.split('=',1) for e in args.env)] for name, value in env: - value = " ".join(value.split()) + value = ' '.join(value.split()) os.environ[name] = value if args.env_file is not None: @@ -52,51 +52,51 @@ def _update_environment(args): def main(): - argparser = argparse.ArgumentParser(description="ESP-IDF linker script generator") + argparser = argparse.ArgumentParser(description='ESP-IDF linker script generator') argparser.add_argument( - "--input", "-i", - help="Linker template file", - type=argparse.FileType("r")) + '--input', '-i', + help='Linker template file', + type=argparse.FileType('r')) argparser.add_argument( - "--fragments", "-f", - type=argparse.FileType("r"), - help="Input fragment files", - nargs="+") + '--fragments', '-f', + type=argparse.FileType('r'), + help='Input fragment files', + nargs='+') argparser.add_argument( - "--libraries-file", - type=argparse.FileType("r"), - help="File that contains the list of libraries in the build") + '--libraries-file', + type=argparse.FileType('r'), + help='File that contains the list of libraries in the build') argparser.add_argument( - "--output", "-o", - help="Output linker script", + '--output', '-o', + help='Output linker script', type=str) argparser.add_argument( - "--config", "-c", - help="Project configuration") + '--config', '-c', + help='Project configuration') argparser.add_argument( - "--kconfig", "-k", - help="IDF Kconfig file") + '--kconfig', '-k', + help='IDF Kconfig file') argparser.add_argument( - "--check-mapping", - help="Perform a check if a mapping (archive, obj, symbol) exists", + '--check-mapping', + help='Perform a check if a mapping (archive, obj, symbol) exists', action='store_true' ) argparser.add_argument( - "--check-mapping-exceptions", - help="Mappings exempted from check", - type=argparse.FileType("r") + '--check-mapping-exceptions', + help='Mappings exempted from check', + type=argparse.FileType('r') ) argparser.add_argument( - "--env", "-e", + '--env', '-e', action='append', default=[], help='Environment to set when evaluating the config file', metavar='NAME=VAL') @@ -105,8 +105,8 @@ def main(): 'should be a JSON object where each key/value pair is a variable.') argparser.add_argument( - "--objdump", - help="Path to toolchain objdump") + '--objdump', + help='Path to toolchain objdump') args = argparser.parse_args() @@ -129,7 +129,7 @@ def main(): for library in libraries_file: library = library.strip() if library: - dump = StringIO(subprocess.check_output([objdump, "-h", library]).decode()) + dump = StringIO(subprocess.check_output([objdump, '-h', library]).decode()) dump.name = library sections_infos.add_sections_info(dump) @@ -146,7 +146,7 @@ def main(): # ParseException is raised on incorrect grammar # ParseFatalException is raised on correct grammar, but inconsistent contents (ex. duplicate # keys, key unsupported by fragment, unexpected number of values, etc.) - raise LdGenFailure("failed to parse %s\n%s" % (fragment_file.name, str(e))) + raise LdGenFailure('failed to parse %s\n%s' % (fragment_file.name, str(e))) generation_model.add_fragments_from_file(fragment_file) mapping_rules = generation_model.generate_rules(sections_infos) @@ -154,7 +154,7 @@ def main(): script_model = TemplateModel(input_file) script_model.fill(mapping_rules) - with tempfile.TemporaryFile("w+") as output: + with tempfile.TemporaryFile('w+') as output: script_model.write(output) output.seek(0) @@ -165,12 +165,12 @@ def main(): if exc.errno != errno.EEXIST: raise - with open(output_path, "w") as f: # only create output file after generation has suceeded + with open(output_path, 'w') as f: # only create output file after generation has suceeded f.write(output.read()) except LdGenFailure as e: - print("linker script generation failed for %s\nERROR: %s" % (input_file.name, e)) + print('linker script generation failed for %s\nERROR: %s' % (input_file.name, e)) sys.exit(1) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/ldgen/sdkconfig.py b/tools/ldgen/sdkconfig.py index 5c20f9a9eb..d41a94f089 100644 --- a/tools/ldgen/sdkconfig.py +++ b/tools/ldgen/sdkconfig.py @@ -14,8 +14,9 @@ # limitations under the License. # -from pyparsing import Word, alphanums, printables, Combine, Literal, hexnums, quotedString, Optional, nums, removeQuotes, oneOf, Group, infixNotation, opAssoc import kconfiglib +from pyparsing import (Combine, Group, Literal, Optional, Word, alphanums, hexnums, infixNotation, nums, oneOf, + opAssoc, printables, quotedString, removeQuotes) class SDKConfig: @@ -25,17 +26,17 @@ class SDKConfig: """ # A configuration entry is in the form CONFIG=VALUE. Definitions of components of that grammar - IDENTIFIER = Word(alphanums.upper() + "_") + IDENTIFIER = Word(alphanums.upper() + '_') - HEX = Combine("0x" + Word(hexnums)).setParseAction(lambda t:int(t[0], 16)) - DECIMAL = Combine(Optional(Literal("+") | Literal("-")) + Word(nums)).setParseAction(lambda t:int(t[0])) - LITERAL = Word(printables.replace(":", "")) + HEX = Combine('0x' + Word(hexnums)).setParseAction(lambda t:int(t[0], 16)) + DECIMAL = Combine(Optional(Literal('+') | Literal('-')) + Word(nums)).setParseAction(lambda t:int(t[0])) + LITERAL = Word(printables.replace(':', '')) QUOTED_LITERAL = quotedString.setParseAction(removeQuotes) VALUE = HEX | DECIMAL | LITERAL | QUOTED_LITERAL # Operators supported by the expression evaluation - OPERATOR = oneOf(["=", "!=", ">", "<", "<=", ">="]) + OPERATOR = oneOf(['=', '!=', '>', '<', '<=', '>=']) def __init__(self, kconfig_file, sdkconfig_file): self.config = kconfiglib.Kconfig(kconfig_file) @@ -49,24 +50,24 @@ class SDKConfig: elif result == 2: # y return True else: # m - raise Exception("unsupported config expression result") + raise Exception('unsupported config expression result') @staticmethod def get_expression_grammar(): - identifier = SDKConfig.IDENTIFIER.setResultsName("identifier") - operator = SDKConfig.OPERATOR.setResultsName("operator") - value = SDKConfig.VALUE.setResultsName("value") + identifier = SDKConfig.IDENTIFIER.setResultsName('identifier') + operator = SDKConfig.OPERATOR.setResultsName('operator') + value = SDKConfig.VALUE.setResultsName('value') test_binary = identifier + operator + value test_single = identifier test = test_binary | test_single - condition = Group(Optional("(").suppress() + test + Optional(")").suppress()) + condition = Group(Optional('(').suppress() + test + Optional(')').suppress()) grammar = infixNotation(condition, [ - ("!", 1, opAssoc.RIGHT), - ("&&", 2, opAssoc.LEFT), - ("||", 2, opAssoc.LEFT)]) + ('!', 1, opAssoc.RIGHT), + ('&&', 2, opAssoc.LEFT), + ('||', 2, opAssoc.LEFT)]) return grammar diff --git a/tools/ldgen/test/test_fragments.py b/tools/ldgen/test/test_fragments.py index dc6ff5b301..169af566a2 100755 --- a/tools/ldgen/test/test_fragments.py +++ b/tools/ldgen/test/test_fragments.py @@ -16,44 +16,44 @@ # import os import sys -import unittest import tempfile - +import unittest from io import StringIO -from pyparsing import Word, ParseException, ParseFatalException, alphanums + +from pyparsing import ParseException, ParseFatalException, Word, alphanums try: - from fragments import FragmentFile, FRAGMENT_TYPES, Fragment, KeyGrammar + from fragments import FRAGMENT_TYPES, Fragment, FragmentFile, KeyGrammar from sdkconfig import SDKConfig except ImportError: sys.path.append('../') - from fragments import FragmentFile, FRAGMENT_TYPES, Fragment, KeyGrammar + from fragments import FRAGMENT_TYPES, Fragment, FragmentFile, KeyGrammar from sdkconfig import SDKConfig class SampleFragment(Fragment): grammars = { - "key_1": KeyGrammar(Word(alphanums + "_").setResultsName("value"), 0, None, True), - "key_2": KeyGrammar(Word(alphanums + "_").setResultsName("value"), 0, None, False), - "key_3": KeyGrammar(Word(alphanums + "_").setResultsName("value"), 3, 5, False) + 'key_1': KeyGrammar(Word(alphanums + '_').setResultsName('value'), 0, None, True), + 'key_2': KeyGrammar(Word(alphanums + '_').setResultsName('value'), 0, None, False), + 'key_3': KeyGrammar(Word(alphanums + '_').setResultsName('value'), 3, 5, False) } def set_key_value(self, key, parse_results): - if key == "key_1": + if key == 'key_1': self.key_1 = list() for result in parse_results: - self.key_1.append(result["value"]) - elif key == "key_2": + self.key_1.append(result['value']) + elif key == 'key_2': self.key_2 = list() for result in parse_results: - self.key_2.append(result["value"]) + self.key_2.append(result['value']) def get_key_grammars(self): return self.__class__.grammars -FRAGMENT_TYPES["test"] = SampleFragment +FRAGMENT_TYPES['test'] = SampleFragment class FragmentTest(unittest.TestCase): @@ -72,7 +72,7 @@ class FragmentTest(unittest.TestCase): # prepare_kconfig_files.py doesn't have to be called because COMPONENT_KCONFIGS and # COMPONENT_KCONFIGS_PROJBUILD are empty - self.sdkconfig = SDKConfig("data/Kconfig", "data/sdkconfig") + self.sdkconfig = SDKConfig('data/Kconfig', 'data/sdkconfig') def tearDown(self): try: @@ -82,7 +82,7 @@ class FragmentTest(unittest.TestCase): pass @staticmethod - def create_fragment_file(contents, name="test_fragment.lf"): + def create_fragment_file(contents, name='test_fragment.lf'): f = StringIO(contents) f.name = name return f @@ -102,11 +102,11 @@ key_2: value_a fragment_file = FragmentFile(test_fragment, self.sdkconfig) self.assertEqual(len(fragment_file.fragments[0].key_1), 3) - self.assertEqual(fragment_file.fragments[0].key_1[0], "value_1") - self.assertEqual(fragment_file.fragments[0].key_1[1], "value_2") - self.assertEqual(fragment_file.fragments[0].key_1[2], "value_3") + self.assertEqual(fragment_file.fragments[0].key_1[0], 'value_1') + self.assertEqual(fragment_file.fragments[0].key_1[1], 'value_2') + self.assertEqual(fragment_file.fragments[0].key_1[2], 'value_3') self.assertEqual(len(fragment_file.fragments[0].key_2), 1) - self.assertEqual(fragment_file.fragments[0].key_2[0], "value_a") + self.assertEqual(fragment_file.fragments[0].key_2[0], 'value_a') def test_duplicate_keys(self): test_fragment = self.create_fragment_file(u""" @@ -139,10 +139,10 @@ key_1: value_5 """) fragment_file = FragmentFile(test_fragment, self.sdkconfig) - self.assertEqual(fragment_file.fragments[0].key_1[0], "value_1") - self.assertEqual(fragment_file.fragments[0].key_1[1], "value_2") - self.assertEqual(fragment_file.fragments[0].key_1[2], "value_3") - self.assertEqual(fragment_file.fragments[0].key_1[3], "value_5") + self.assertEqual(fragment_file.fragments[0].key_1[0], 'value_1') + self.assertEqual(fragment_file.fragments[0].key_1[1], 'value_2') + self.assertEqual(fragment_file.fragments[0].key_1[2], 'value_3') + self.assertEqual(fragment_file.fragments[0].key_1[3], 'value_5') test_fragment = self.create_fragment_file(u""" [test:test] @@ -160,9 +160,9 @@ key_1: """) fragment_file = FragmentFile(test_fragment, self.sdkconfig) - self.assertEqual(fragment_file.fragments[0].key_1[0], "value_1") - self.assertEqual(fragment_file.fragments[0].key_1[1], "value_3") - self.assertEqual(fragment_file.fragments[0].key_1[2], "value_6") + self.assertEqual(fragment_file.fragments[0].key_1[0], 'value_1') + self.assertEqual(fragment_file.fragments[0].key_1[1], 'value_3') + self.assertEqual(fragment_file.fragments[0].key_1[2], 'value_6') test_fragment = self.create_fragment_file(u""" [test:test] @@ -185,14 +185,14 @@ key_2: """) fragment_file = FragmentFile(test_fragment, self.sdkconfig) - self.assertEqual(fragment_file.fragments[0].key_1[0], "value_1") - self.assertEqual(fragment_file.fragments[0].key_1[1], "value_2") - self.assertEqual(fragment_file.fragments[0].key_1[2], "value_4") - self.assertEqual(fragment_file.fragments[0].key_1[3], "value_5") - self.assertEqual(fragment_file.fragments[0].key_1[4], "value_6") - self.assertEqual(fragment_file.fragments[0].key_1[5], "value_7") - self.assertEqual(fragment_file.fragments[0].key_2[0], "value_a") - self.assertEqual(fragment_file.fragments[0].key_2[1], "value_b") + self.assertEqual(fragment_file.fragments[0].key_1[0], 'value_1') + self.assertEqual(fragment_file.fragments[0].key_1[1], 'value_2') + self.assertEqual(fragment_file.fragments[0].key_1[2], 'value_4') + self.assertEqual(fragment_file.fragments[0].key_1[3], 'value_5') + self.assertEqual(fragment_file.fragments[0].key_1[4], 'value_6') + self.assertEqual(fragment_file.fragments[0].key_1[5], 'value_7') + self.assertEqual(fragment_file.fragments[0].key_2[0], 'value_a') + self.assertEqual(fragment_file.fragments[0].key_2[1], 'value_b') test_fragment = self.create_fragment_file(u""" [test:test] @@ -226,9 +226,9 @@ key_1: fragment_file = FragmentFile(test_fragment, self.sdkconfig) self.assertEqual(len(fragment_file.fragments[0].key_1), 3) - self.assertEqual(fragment_file.fragments[0].key_1[0], "value_1") - self.assertEqual(fragment_file.fragments[0].key_1[1], "value_2") - self.assertEqual(fragment_file.fragments[0].key_1[2], "value_3") + self.assertEqual(fragment_file.fragments[0].key_1[0], 'value_1') + self.assertEqual(fragment_file.fragments[0].key_1[1], 'value_2') + self.assertEqual(fragment_file.fragments[0].key_1[2], 'value_3') test_fragment = self.create_fragment_file(u""" [test:test] @@ -410,8 +410,8 @@ key_1: fragment_file = FragmentFile(test_fragment, self.sdkconfig) self.assertEqual(len(fragment_file.fragments), 2) - self.assertEqual(fragment_file.fragments[0].key_1[0], "value_1") - self.assertEqual(fragment_file.fragments[1].key_1[0], "value_2") + self.assertEqual(fragment_file.fragments[0].key_1[0], 'value_1') + self.assertEqual(fragment_file.fragments[1].key_1[0], 'value_2') def test_whole_conditional_fragment(self): test_fragment = self.create_fragment_file(u""" @@ -442,11 +442,11 @@ key_1: fragment_file = FragmentFile(test_fragment, self.sdkconfig) self.assertEqual(len(fragment_file.fragments), 4) - self.assertEqual(fragment_file.fragments[0].name, "test2") - self.assertEqual(fragment_file.fragments[1].name, "test3") - self.assertEqual(fragment_file.fragments[1].key_1[1], "value_6") - self.assertEqual(fragment_file.fragments[2].name, "test4") - self.assertEqual(fragment_file.fragments[3].name, "test5") + self.assertEqual(fragment_file.fragments[0].name, 'test2') + self.assertEqual(fragment_file.fragments[1].name, 'test3') + self.assertEqual(fragment_file.fragments[1].key_1[1], 'value_6') + self.assertEqual(fragment_file.fragments[2].name, 'test4') + self.assertEqual(fragment_file.fragments[3].name, 'test5') def test_equivalent_conditional_fragment(self): test_fragment1 = self.create_fragment_file(u""" @@ -462,7 +462,7 @@ else: fragment_file1 = FragmentFile(test_fragment1, self.sdkconfig) self.assertEqual(len(fragment_file1.fragments), 1) - self.assertEqual(fragment_file1.fragments[0].key_1[0], "value_1") + self.assertEqual(fragment_file1.fragments[0].key_1[0], 'value_1') test_fragment2 = self.create_fragment_file(u""" [test:test1] @@ -475,7 +475,7 @@ key_1: fragment_file2 = FragmentFile(test_fragment2, self.sdkconfig) self.assertEqual(len(fragment_file2.fragments), 1) - self.assertEqual(fragment_file2.fragments[0].key_1[0], "value_1") + self.assertEqual(fragment_file2.fragments[0].key_1[0], 'value_1') class SectionsTest(FragmentTest): @@ -489,7 +489,7 @@ entries: """) fragment_file = FragmentFile(test_fragment, self.sdkconfig) - self.assertEqual(fragment_file.fragments[0].entries, {".section1", ".section2"}) + self.assertEqual(fragment_file.fragments[0].entries, {'.section1', '.section2'}) def test_duplicate_entries(self): test_fragment = self.create_fragment_file(u""" @@ -502,7 +502,7 @@ entries: """) fragment_file = FragmentFile(test_fragment, self.sdkconfig) - self.assertEqual(fragment_file.fragments[0].entries, {".section1", ".section2", ".section3"}) + self.assertEqual(fragment_file.fragments[0].entries, {'.section1', '.section2', '.section3'}) def test_empty_entries(self): test_fragment = self.create_fragment_file(u""" @@ -535,7 +535,7 @@ entries: fragment_file = FragmentFile(test_fragment, self.sdkconfig) self.assertEqual(fragment_file.fragments[0].entries, - {"_valid1", "valid2.", ".valid3_-"}) + {'_valid1', 'valid2.', '.valid3_-'}) # invalid starting char test_fragment = self.create_fragment_file(u""" @@ -565,7 +565,7 @@ entries: fragment_file = FragmentFile(test_fragment, self.sdkconfig) self.assertEqual(fragment_file.fragments[0].entries, - {"valid+"}) + {'valid+'}) test_fragment = self.create_fragment_file(u""" [sections:test] @@ -589,8 +589,8 @@ entries: fragment_file = FragmentFile(test_fragment, self.sdkconfig) self.assertEqual(fragment_file.fragments[0].entries, - {("sections1", "target1"), - ("sections2", "target2")}) + {('sections1', 'target1'), + ('sections2', 'target2')}) def test_duplicate_entries(self): test_fragment = self.create_fragment_file(u""" @@ -603,8 +603,8 @@ entries: fragment_file = FragmentFile(test_fragment, self.sdkconfig) self.assertEqual(fragment_file.fragments[0].entries, - {("sections1", "target1"), - ("sections2", "target2")}) + {('sections1', 'target1'), + ('sections2', 'target2')}) def test_empty_entries(self): test_fragment = self.create_fragment_file(u""" @@ -650,11 +650,11 @@ entries: * (noflash) """) - expected = {("obj", "symbol", "noflash"), - ("obj", None, "noflash"), - ("obj", "symbol_2", "noflash"), - ("obj_2", None, "noflash"), - ("*", None, "noflash")} + expected = {('obj', 'symbol', 'noflash'), + ('obj', None, 'noflash'), + ('obj', 'symbol_2', 'noflash'), + ('obj_2', None, 'noflash'), + ('*', None, 'noflash')} fragment_file = FragmentFile(test_fragment, self.sdkconfig) self.assertEqual(expected, fragment_file.fragments[0].entries) @@ -717,7 +717,7 @@ entries: obj:symbol (noflash) """) - expected = {("obj", "symbol", "noflash")} + expected = {('obj', 'symbol', 'noflash')} fragment_file = FragmentFile(test_fragment, self.sdkconfig) self.assertEqual(expected, fragment_file.fragments[0].entries) @@ -828,14 +828,14 @@ entries: * (noflash) """) fragment_file = FragmentFile(test_fragment, self.sdkconfig) - self.assertEqual("lib.a", fragment_file.fragments[0].archive) - self.assertEqual("lib_a", fragment_file.fragments[0].name) + self.assertEqual('lib.a', fragment_file.fragments[0].archive) + self.assertEqual('lib_a', fragment_file.fragments[0].name) - expected = {("obj", "symbol", "noflash"), - ("obj", None, "noflash"), - ("obj", "symbol_2", "noflash"), - ("obj_2", None, "noflash"), - ("*", None, "noflash") + expected = {('obj', 'symbol', 'noflash'), + ('obj', None, 'noflash'), + ('obj', 'symbol_2', 'noflash'), + ('obj_2', None, 'noflash'), + ('*', None, 'noflash') } self.assertEqual(expected, fragment_file.fragments[0].entries) @@ -850,7 +850,7 @@ entries: : default """) fragment_file = FragmentFile(test_fragment, self.sdkconfig) - expected = {("*", None, "default")} + expected = {('*', None, 'default')} self.assertEqual(expected, fragment_file.fragments[0].entries) @@ -864,7 +864,7 @@ entries: """) fragment_file = FragmentFile(test_fragment, self.sdkconfig) - expected = {("*", None, "default")} + expected = {('*', None, 'default')} self.assertEqual(expected, fragment_file.fragments[0].entries) @@ -876,7 +876,7 @@ entries: : default """) fragment_file = FragmentFile(test_fragment, self.sdkconfig) - expected = {("*", None, "default")} + expected = {('*', None, 'default')} self.assertEqual(expected, fragment_file.fragments[0].entries) @@ -888,7 +888,7 @@ entries: : default """) fragment_file = FragmentFile(test_fragment, self.sdkconfig) - expected = {("*", None, "default")} + expected = {('*', None, 'default')} self.assertEqual(expected, fragment_file.fragments[0].entries) @@ -909,9 +909,9 @@ entries: """) fragment_file = FragmentFile(test_fragment, self.sdkconfig) - expected = {("obj_b1", None, "noflash"), - ("obj_b2", None, "noflash"), - ("obj_b3", None, "noflash")} + expected = {('obj_b1', None, 'noflash'), + ('obj_b2', None, 'noflash'), + ('obj_b3', None, 'noflash')} self.assertEqual(expected, fragment_file.fragments[0].entries) def test_blank_entries(self): @@ -928,7 +928,7 @@ entries: obj (noflash) """) fragment_file = FragmentFile(test_fragment, self.sdkconfig) - expected = {("*", None, "default")} + expected = {('*', None, 'default')} self.assertEqual(expected, fragment_file.fragments[0].entries) def test_blank_first_condition(self): @@ -1053,5 +1053,5 @@ entries: fragment_file.fragments[1].entries) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main() diff --git a/tools/ldgen/test/test_generation.py b/tools/ldgen/test/test_generation.py index 72d345ecef..d544fa43c3 100755 --- a/tools/ldgen/test/test_generation.py +++ b/tools/ldgen/test/test_generation.py @@ -26,15 +26,11 @@ except ImportError: sys.path.append('../') from generation import PlacementRule -from generation import GenerationException -from generation import SectionsInfo -from generation import TemplateModel -from generation import GenerationModel +from io import StringIO from fragments import FragmentFile - +from generation import GenerationException, GenerationModel, SectionsInfo, TemplateModel from sdkconfig import SDKConfig -from io import StringIO class GenerationModelTest(unittest.TestCase): @@ -59,22 +55,22 @@ class GenerationModelTest(unittest.TestCase): # prepare_kconfig_files.py doesn't have to be called because COMPONENT_KCONFIGS and # COMPONENT_KCONFIGS_PROJBUILD are empty - self.sdkconfig = SDKConfig("data/Kconfig", "data/sdkconfig") + self.sdkconfig = SDKConfig('data/Kconfig', 'data/sdkconfig') - with open("data/sample.lf") as fragment_file_obj: + with open('data/sample.lf') as fragment_file_obj: fragment_file = FragmentFile(fragment_file_obj, self.sdkconfig) self.model.add_fragments_from_file(fragment_file) self.sections_info = SectionsInfo() - with open("data/sections.info") as sections_info_file_obj: + with open('data/sections.info') as sections_info_file_obj: self.sections_info.add_sections_info(sections_info_file_obj) - with open("data/template.ld") as template_file_obj: + with open('data/template.ld') as template_file_obj: self.script_model = TemplateModel(template_file_obj) @staticmethod - def create_fragment_file(contents, name="test_fragment.lf"): + def create_fragment_file(contents, name='test_fragment.lf'): f = StringIO(contents) f.name = name return f @@ -86,61 +82,61 @@ class GenerationModelTest(unittest.TestCase): def write(self, expected, actual): self.script_model.fill(expected) - self.script_model.write(open("expected.ld", "w")) + self.script_model.write(open('expected.ld', 'w')) self.script_model.fill(actual) - self.script_model.write(open("actual.ld", "w")) + self.script_model.write(open('actual.ld', 'w')) def generate_default_rules(self): rules = dict() # flash_text placement_rules = list() - rule = PlacementRule(None, None, None, self.model.sections["text"].entries, "flash_text") + rule = PlacementRule(None, None, None, self.model.sections['text'].entries, 'flash_text') placement_rules.append(rule) - rules["flash_text"] = placement_rules + rules['flash_text'] = placement_rules # flash_rodata placement_rules = list() - rule = PlacementRule(None, None, None, self.model.sections["rodata"].entries, "flash_rodata") + rule = PlacementRule(None, None, None, self.model.sections['rodata'].entries, 'flash_rodata') placement_rules.append(rule) - rules["flash_rodata"] = placement_rules + rules['flash_rodata'] = placement_rules # dram0_data placement_rules = list() - rule = PlacementRule(None, None, None, self.model.sections["data"].entries | self.model.sections["dram"].entries, "dram0_data") + rule = PlacementRule(None, None, None, self.model.sections['data'].entries | self.model.sections['dram'].entries, 'dram0_data') placement_rules.append(rule) - rules["dram0_data"] = placement_rules + rules['dram0_data'] = placement_rules # dram0_bss placement_rules = list() - rule = PlacementRule(None, None, None, self.model.sections["bss"].entries | self.model.sections["common"].entries, "dram0_bss") + rule = PlacementRule(None, None, None, self.model.sections['bss'].entries | self.model.sections['common'].entries, 'dram0_bss') placement_rules.append(rule) - rules["dram0_bss"] = placement_rules + rules['dram0_bss'] = placement_rules # iram0_text placement_rules = list() - rule = PlacementRule(None, None, None, self.model.sections["iram"].entries, "iram0_text") + rule = PlacementRule(None, None, None, self.model.sections['iram'].entries, 'iram0_text') placement_rules.append(rule) - rules["iram0_text"] = placement_rules + rules['iram0_text'] = placement_rules # rtc_text placement_rules = list() - rule = PlacementRule(None, None, None, self.model.sections["rtc_text"].entries, "rtc_text") + rule = PlacementRule(None, None, None, self.model.sections['rtc_text'].entries, 'rtc_text') placement_rules.append(rule) - rules["rtc_text"] = placement_rules + rules['rtc_text'] = placement_rules # rtc_data placement_rules = list() - rule = PlacementRule(None, None, None, self.model.sections["rtc_data"].entries | self.model.sections["rtc_rodata"].entries, "rtc_data") + rule = PlacementRule(None, None, None, self.model.sections['rtc_data'].entries | self.model.sections['rtc_rodata'].entries, 'rtc_data') placement_rules.append(rule) - rules["rtc_data"] = placement_rules + rules['rtc_data'] = placement_rules # rtc_bss placement_rules = list() - rule = PlacementRule(None, None, None, self.model.sections["rtc_bss"].entries, "rtc_bss") + rule = PlacementRule(None, None, None, self.model.sections['rtc_bss'].entries, 'rtc_bss') placement_rules.append(rule) - rules["rtc_bss"] = placement_rules + rules['rtc_bss'] = placement_rules return rules @@ -149,7 +145,7 @@ class GenerationModelTest(unittest.TestCase): for (target, rules) in actual.items(): - message = "target: " + target + message = 'target: ' + target actual_target_rules = rules expected_target_rules = expected[target] @@ -192,19 +188,19 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) - iram0_text_E1 = PlacementRule("libfreertos.a", "*", None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E1 = PlacementRule("libfreertos.a", "*", None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E1 = PlacementRule('libfreertos.a', '*', None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E1 = PlacementRule('libfreertos.a', '*', None, self.model.sections['rodata'].entries, 'dram0_data') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E1) flash_rodata_default.add_exclusion(dram0_data_E1) # Add to the placement rules list - expected["iram0_text"].append(iram0_text_E1) - expected["dram0_data"].append(dram0_data_E1) + expected['iram0_text'].append(iram0_text_E1) + expected['dram0_data'].append(dram0_data_E1) self.compare_rules(expected, actual) @@ -222,14 +218,14 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) - dram0_data_default = self.get_default("dram0_data", expected) - dram0_bss_default = self.get_default("dram0_bss", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) + dram0_data_default = self.get_default('dram0_data', expected) + dram0_bss_default = self.get_default('dram0_bss', expected) - rtc_text_E1 = PlacementRule("libfreertos.a", "timers", None, self.model.sections["text"].entries, "rtc_text") - rtc_data_E1 = PlacementRule("libfreertos.a", "timers", None, self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E1 = PlacementRule("libfreertos.a", "timers", None, self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E1 = PlacementRule('libfreertos.a', 'timers', None, self.model.sections['text'].entries, 'rtc_text') + rtc_data_E1 = PlacementRule('libfreertos.a', 'timers', None, self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E1 = PlacementRule('libfreertos.a', 'timers', None, self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') # Add the exclusions flash_text_default.add_exclusion(rtc_text_E1) @@ -238,9 +234,9 @@ entries: dram0_bss_default.add_exclusion(rtc_bss_E1) # Add the rules - expected["rtc_text"].append(rtc_text_E1) - expected["rtc_data"].append(rtc_data_E1) - expected["rtc_bss"].append(rtc_bss_E1) + expected['rtc_text'].append(rtc_text_E1) + expected['rtc_data'].append(rtc_data_E1) + expected['rtc_bss'].append(rtc_bss_E1) self.compare_rules(expected, actual) @@ -259,17 +255,17 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) - dram0_data_default = self.get_default("dram0_data", expected) - dram0_bss_default = self.get_default("dram0_bss", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) + dram0_data_default = self.get_default('dram0_data', expected) + dram0_bss_default = self.get_default('dram0_bss', expected) - rtc_text_E1 = PlacementRule("libfreertos.a", "timers", None, self.model.sections["text"].entries, "rtc_text") - rtc_data_E1 = PlacementRule("libfreertos.a", "timers", None, self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E1 = PlacementRule("libfreertos.a", "timers", None, self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E1 = PlacementRule('libfreertos.a', 'timers', None, self.model.sections['text'].entries, 'rtc_text') + rtc_data_E1 = PlacementRule('libfreertos.a', 'timers', None, self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E1 = PlacementRule('libfreertos.a', 'timers', None, self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - iram0_text_E2 = PlacementRule("libfreertos.a", "*", None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E2 = PlacementRule("libfreertos.a", "*", None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E2 = PlacementRule('libfreertos.a', '*', None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E2 = PlacementRule('libfreertos.a', '*', None, self.model.sections['rodata'].entries, 'dram0_data') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E2) @@ -282,12 +278,12 @@ entries: dram0_data_E2.add_exclusion(rtc_data_E1) # Add the rules - expected["iram0_text"].append(iram0_text_E2) - expected["dram0_data"].append(dram0_data_E2) + expected['iram0_text'].append(iram0_text_E2) + expected['dram0_data'].append(dram0_data_E2) - expected["rtc_text"].append(rtc_text_E1) - expected["rtc_data"].append(rtc_data_E1) - expected["rtc_bss"].append(rtc_bss_E1) + expected['rtc_text'].append(rtc_text_E1) + expected['rtc_data'].append(rtc_data_E1) + expected['rtc_bss'].append(rtc_bss_E1) self.compare_rules(expected, actual) @@ -307,21 +303,21 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) - dram0_data_default = self.get_default("dram0_data", expected) - dram0_bss_default = self.get_default("dram0_bss", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) + dram0_data_default = self.get_default('dram0_data', expected) + dram0_bss_default = self.get_default('dram0_bss', expected) - rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "rtc_text") - rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E1 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['text'].entries, 'rtc_text') + rtc_data_E1 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E1 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - iram0_text_E2 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E2 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E2 = PlacementRule('libfreertos.a', 'event_groups', None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E2 = PlacementRule('libfreertos.a', 'event_groups', None, self.model.sections['rodata'].entries, 'dram0_data') - rtc_text_E3 = PlacementRule("libfreertos.a", "timers", None, self.model.sections["text"].entries, "rtc_text") - rtc_data_E3 = PlacementRule("libfreertos.a", "timers", None, self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E3 = PlacementRule("libfreertos.a", "timers", None, self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E3 = PlacementRule('libfreertos.a', 'timers', None, self.model.sections['text'].entries, 'rtc_text') + rtc_data_E3 = PlacementRule('libfreertos.a', 'timers', None, self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E3 = PlacementRule('libfreertos.a', 'timers', None, self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') # Add the exclusions flash_text_default.add_exclusion(rtc_text_E3) @@ -338,16 +334,16 @@ entries: dram0_bss_default.add_exclusion(rtc_bss_E1) # Add the rules - expected["rtc_text"].append(rtc_text_E3) - expected["rtc_data"].append(rtc_data_E3) - expected["rtc_bss"].append(rtc_bss_E3) + expected['rtc_text'].append(rtc_text_E3) + expected['rtc_data'].append(rtc_data_E3) + expected['rtc_bss'].append(rtc_bss_E3) - expected["iram0_text"].append(iram0_text_E2) - expected["dram0_data"].append(dram0_data_E2) + expected['iram0_text'].append(iram0_text_E2) + expected['dram0_data'].append(dram0_data_E2) - expected["rtc_text"].append(rtc_text_E1) - expected["rtc_data"].append(rtc_data_E1) - expected["rtc_bss"].append(rtc_bss_E1) + expected['rtc_text'].append(rtc_text_E1) + expected['rtc_data'].append(rtc_data_E1) + expected['rtc_bss'].append(rtc_bss_E1) self.compare_rules(expected, actual) @@ -368,24 +364,24 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) - dram0_data_default = self.get_default("dram0_data", expected) - dram0_bss_default = self.get_default("dram0_bss", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) + dram0_data_default = self.get_default('dram0_data', expected) + dram0_bss_default = self.get_default('dram0_bss', expected) - rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "rtc_text") - rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E1 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['text'].entries, 'rtc_text') + rtc_data_E1 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E1 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - iram0_text_E2 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E2 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E2 = PlacementRule('libfreertos.a', 'event_groups', None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E2 = PlacementRule('libfreertos.a', 'event_groups', None, self.model.sections['rodata'].entries, 'dram0_data') - rtc_text_E3 = PlacementRule("libfreertos.a", "timers", None, self.model.sections["text"].entries, "rtc_text") - rtc_data_E3 = PlacementRule("libfreertos.a", "timers", None, self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E3 = PlacementRule("libfreertos.a", "timers", None, self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E3 = PlacementRule('libfreertos.a', 'timers', None, self.model.sections['text'].entries, 'rtc_text') + rtc_data_E3 = PlacementRule('libfreertos.a', 'timers', None, self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E3 = PlacementRule('libfreertos.a', 'timers', None, self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - iram0_text_E4 = PlacementRule("libfreertos.a", "*", None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E4 = PlacementRule("libfreertos.a", "*", None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E4 = PlacementRule('libfreertos.a', '*', None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E4 = PlacementRule('libfreertos.a', '*', None, self.model.sections['rodata'].entries, 'dram0_data') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E4) @@ -405,19 +401,19 @@ entries: dram0_bss_default.add_exclusion(rtc_bss_E1) # Add the rules - expected["iram0_text"].append(iram0_text_E4) - expected["dram0_data"].append(dram0_data_E4) + expected['iram0_text'].append(iram0_text_E4) + expected['dram0_data'].append(dram0_data_E4) - expected["rtc_text"].append(rtc_text_E3) - expected["rtc_data"].append(rtc_data_E3) - expected["rtc_bss"].append(rtc_bss_E3) + expected['rtc_text'].append(rtc_text_E3) + expected['rtc_data'].append(rtc_data_E3) + expected['rtc_bss'].append(rtc_bss_E3) - expected["iram0_text"].append(iram0_text_E2) - expected["dram0_data"].append(dram0_data_E2) + expected['iram0_text'].append(iram0_text_E2) + expected['dram0_data'].append(dram0_data_E2) - expected["rtc_text"].append(rtc_text_E1) - expected["rtc_data"].append(rtc_data_E1) - expected["rtc_bss"].append(rtc_bss_E1) + expected['rtc_text'].append(rtc_text_E1) + expected['rtc_data'].append(rtc_data_E1) + expected['rtc_bss'].append(rtc_bss_E1) self.compare_rules(expected, actual) @@ -435,14 +431,14 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) - iram0_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckPendingReadyList", self.model.sections["text"].entries, "iram0_text") - dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckPendingReadyList", self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckPendingReadyList', self.model.sections['text'].entries, 'iram0_text') + dram0_data_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckPendingReadyList', self.model.sections['rodata'].entries, 'dram0_data') - iram0_text_E1_extra = PlacementRule("libfreertos.a", "croutine", None, [".text.*", ".literal.*"], "flash_text") - dram0_data_E1_extra = PlacementRule("libfreertos.a", "croutine", None, [".rodata.*"], "flash_rodata") + iram0_text_E1_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.text.*', '.literal.*'], 'flash_text') + dram0_data_E1_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.rodata.*'], 'flash_rodata') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E1_extra, self.sections_info) @@ -452,11 +448,11 @@ entries: dram0_data_E1_extra.add_exclusion(dram0_data_E1, self.sections_info) # Add the rules - expected["flash_text"].append(iram0_text_E1_extra) - expected["flash_rodata"].append(dram0_data_E1_extra) + expected['flash_text'].append(iram0_text_E1_extra) + expected['flash_rodata'].append(dram0_data_E1_extra) - expected["iram0_text"].append(iram0_text_E1) - expected["dram0_data"].append(dram0_data_E1) + expected['iram0_text'].append(iram0_text_E1) + expected['dram0_data'].append(dram0_data_E1) self.compare_rules(expected, actual) @@ -476,20 +472,20 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) - iram0_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckPendingReadyList", self.model.sections["text"].entries, "iram0_text") - dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckPendingReadyList", self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckPendingReadyList', self.model.sections['text'].entries, 'iram0_text') + dram0_data_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckPendingReadyList', self.model.sections['rodata'].entries, 'dram0_data') - iram0_text_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "iram0_text") - dram0_data_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E2 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['text'].entries, 'iram0_text') + dram0_data_E2 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['rodata'].entries, 'dram0_data') - iram0_text_E3 = PlacementRule("libfreertos.a", "croutine", "xCoRoutineCreate", self.model.sections["text"].entries, "iram0_text") - dram0_data_E3 = PlacementRule("libfreertos.a", "croutine", "xCoRoutineCreate", self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E3 = PlacementRule('libfreertos.a', 'croutine', 'xCoRoutineCreate', self.model.sections['text'].entries, 'iram0_text') + dram0_data_E3 = PlacementRule('libfreertos.a', 'croutine', 'xCoRoutineCreate', self.model.sections['rodata'].entries, 'dram0_data') - flash_text_extra = PlacementRule("libfreertos.a", "croutine", None, [".text.*", ".literal.*"], "flash_text") - flash_rodata_extra = PlacementRule("libfreertos.a", "croutine", None, [".rodata.*"], "flash_rodata") + flash_text_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.text.*', '.literal.*'], 'flash_text') + flash_rodata_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.rodata.*'], 'flash_rodata') # Add the exclusions flash_text_default.add_exclusion(flash_text_extra, self.sections_info) @@ -505,17 +501,17 @@ entries: flash_rodata_extra.add_exclusion(dram0_data_E3, self.sections_info) # Add the rules - expected["flash_text"].append(flash_text_extra) - expected["flash_rodata"].append(flash_rodata_extra) + expected['flash_text'].append(flash_text_extra) + expected['flash_rodata'].append(flash_rodata_extra) - expected["iram0_text"].append(iram0_text_E1) - expected["dram0_data"].append(dram0_data_E1) + expected['iram0_text'].append(iram0_text_E1) + expected['dram0_data'].append(dram0_data_E1) - expected["iram0_text"].append(iram0_text_E2) - expected["dram0_data"].append(dram0_data_E2) + expected['iram0_text'].append(iram0_text_E2) + expected['dram0_data'].append(dram0_data_E2) - expected["iram0_text"].append(iram0_text_E3) - expected["dram0_data"].append(dram0_data_E3) + expected['iram0_text'].append(iram0_text_E3) + expected['dram0_data'].append(dram0_data_E3) self.compare_rules(expected, actual) @@ -535,27 +531,27 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) - dram0_data_default = self.get_default("dram0_data", expected) - dram0_bss_default = self.get_default("dram0_bss", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) + dram0_data_default = self.get_default('dram0_data', expected) + dram0_bss_default = self.get_default('dram0_bss', expected) - iram0_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckPendingReadyList", self.model.sections["text"].entries, "iram0_text") - dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckPendingReadyList", self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckPendingReadyList', self.model.sections['text'].entries, 'iram0_text') + dram0_data_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckPendingReadyList', self.model.sections['rodata'].entries, 'dram0_data') - rtc_text_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text") - rtc_data_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", - self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E2 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", - self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E2 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['text'].entries, 'rtc_text') + rtc_data_E2 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', + self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E2 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', + self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - iram0_text_E3 = PlacementRule("libfreertos.a", "croutine", "xCoRoutineCreate", self.model.sections["text"].entries, "iram0_text") - dram0_data_E3 = PlacementRule("libfreertos.a", "croutine", "xCoRoutineCreate", self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E3 = PlacementRule('libfreertos.a', 'croutine', 'xCoRoutineCreate', self.model.sections['text'].entries, 'iram0_text') + dram0_data_E3 = PlacementRule('libfreertos.a', 'croutine', 'xCoRoutineCreate', self.model.sections['rodata'].entries, 'dram0_data') - flash_text_extra = PlacementRule("libfreertos.a", "croutine", None, [".text.*", ".literal.*"], "flash_text") - flash_rodata_extra = PlacementRule("libfreertos.a", "croutine", None, [".rodata.*"], "flash_rodata") - dram0_data_extra = PlacementRule("libfreertos.a", "croutine", None, [".data.*"], "dram0_data") - dram0_bss_extra = PlacementRule("libfreertos.a", "croutine", None, [".bss.*"], "dram0_bss") + flash_text_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.text.*', '.literal.*'], 'flash_text') + flash_rodata_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.rodata.*'], 'flash_rodata') + dram0_data_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.data.*'], 'dram0_data') + dram0_bss_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.bss.*'], 'dram0_bss') # Add the exclusions flash_text_default.add_exclusion(flash_text_extra, self.sections_info) @@ -575,20 +571,20 @@ entries: flash_rodata_extra.add_exclusion(dram0_data_E3, self.sections_info) # Add the rules - expected["flash_text"].append(flash_text_extra) - expected["flash_rodata"].append(flash_rodata_extra) - expected["dram0_data"].append(dram0_data_extra) - expected["dram0_bss"].append(dram0_bss_extra) + expected['flash_text'].append(flash_text_extra) + expected['flash_rodata'].append(flash_rodata_extra) + expected['dram0_data'].append(dram0_data_extra) + expected['dram0_bss'].append(dram0_bss_extra) - expected["iram0_text"].append(iram0_text_E1) - expected["dram0_data"].append(dram0_data_E1) + expected['iram0_text'].append(iram0_text_E1) + expected['dram0_data'].append(dram0_data_E1) - expected["rtc_text"].append(rtc_text_E2) - expected["rtc_data"].append(rtc_data_E2) - expected["rtc_bss"].append(rtc_bss_E2) + expected['rtc_text'].append(rtc_text_E2) + expected['rtc_data'].append(rtc_data_E2) + expected['rtc_bss'].append(rtc_bss_E2) - expected["iram0_text"].append(iram0_text_E3) - expected["dram0_data"].append(dram0_data_E3) + expected['iram0_text'].append(iram0_text_E3) + expected['dram0_data'].append(dram0_data_E3) self.compare_rules(expected, actual) @@ -607,22 +603,22 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) - dram0_data_default = self.get_default("dram0_data", expected) - dram0_bss_default = self.get_default("dram0_bss", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) + dram0_data_default = self.get_default('dram0_data', expected) + dram0_bss_default = self.get_default('dram0_bss', expected) - rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text") - rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", - self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", - self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['text'].entries, 'rtc_text') + rtc_data_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', + self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', + self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - iram0_text_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E2 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E2 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['rodata'].entries, 'dram0_data') - dram0_data_extra = PlacementRule("libfreertos.a", "croutine", None, [".data.*"], "dram0_data") - dram0_bss_extra = PlacementRule("libfreertos.a", "croutine", None, [".bss.*"], "dram0_bss") + dram0_data_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.data.*'], 'dram0_data') + dram0_bss_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.bss.*'], 'dram0_bss') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E2, self.sections_info) @@ -638,15 +634,15 @@ entries: dram0_data_E2.add_exclusion(rtc_data_E1, self.sections_info) # Add the rules - expected["dram0_data"].append(dram0_data_extra) - expected["dram0_bss"].append(dram0_bss_extra) + expected['dram0_data'].append(dram0_data_extra) + expected['dram0_bss'].append(dram0_bss_extra) - expected["iram0_text"].append(iram0_text_E2) - expected["dram0_data"].append(dram0_data_E2) + expected['iram0_text'].append(iram0_text_E2) + expected['dram0_data'].append(dram0_data_E2) - expected["rtc_text"].append(rtc_text_E1) - expected["rtc_data"].append(rtc_data_E1) - expected["rtc_bss"].append(rtc_bss_E1) + expected['rtc_text'].append(rtc_text_E1) + expected['rtc_data'].append(rtc_data_E1) + expected['rtc_bss'].append(rtc_bss_E1) self.compare_rules(expected, actual) @@ -665,24 +661,24 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) - iram0_text_default = self.get_default("iram0_text", expected) - dram0_data_default = self.get_default("dram0_data", expected) - dram0_bss_default = self.get_default("dram0_bss", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) + iram0_text_default = self.get_default('iram0_text', expected) + dram0_data_default = self.get_default('dram0_data', expected) + dram0_bss_default = self.get_default('dram0_bss', expected) - rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text") - rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", - self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", - self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['text'].entries, 'rtc_text') + rtc_data_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', + self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', + self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - iram0_text_E2 = PlacementRule("libfreertos.a", None, None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E2 = PlacementRule("libfreertos.a", None, None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E2 = PlacementRule('libfreertos.a', None, None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E2 = PlacementRule('libfreertos.a', None, None, self.model.sections['rodata'].entries, 'dram0_data') - iram0_text_extra = PlacementRule("libfreertos.a", "croutine", None, [".text.*", ".literal.*"], "iram0_text") - dram0_data_extra = PlacementRule("libfreertos.a", "croutine", None, [".data.*", ".rodata.*"], "dram0_data") - dram0_bss_extra = PlacementRule("libfreertos.a", "croutine", None, [".bss.*"], "dram0_bss") + iram0_text_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.text.*', '.literal.*'], 'iram0_text') + dram0_data_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.data.*', '.rodata.*'], 'dram0_data') + dram0_bss_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.bss.*'], 'dram0_bss') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E2, self.sections_info) @@ -700,16 +696,16 @@ entries: dram0_bss_extra.add_exclusion(rtc_bss_E1, self.sections_info) # Add the rules - expected["iram0_text"].append(iram0_text_extra) - expected["dram0_data"].append(dram0_data_extra) - expected["dram0_bss"].append(dram0_bss_extra) + expected['iram0_text'].append(iram0_text_extra) + expected['dram0_data'].append(dram0_data_extra) + expected['dram0_bss'].append(dram0_bss_extra) - expected["iram0_text"].append(iram0_text_E2) - expected["dram0_data"].append(dram0_data_E2) + expected['iram0_text'].append(iram0_text_E2) + expected['dram0_data'].append(dram0_data_E2) - expected["rtc_text"].append(rtc_text_E1) - expected["rtc_data"].append(rtc_data_E1) - expected["rtc_bss"].append(rtc_bss_E1) + expected['rtc_text'].append(rtc_text_E1) + expected['rtc_data'].append(rtc_data_E1) + expected['rtc_bss'].append(rtc_bss_E1) self.compare_rules(expected, actual) @@ -729,21 +725,21 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) - iram0_text_default = self.get_default("iram0_text", expected) - dram0_data_default = self.get_default("dram0_data", expected) - dram0_bss_default = self.get_default("dram0_bss", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) + iram0_text_default = self.get_default('iram0_text', expected) + dram0_data_default = self.get_default('dram0_data', expected) + dram0_bss_default = self.get_default('dram0_bss', expected) - iram0_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "iram0_text") - dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['text'].entries, 'iram0_text') + dram0_data_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['rodata'].entries, 'dram0_data') - rtc_text_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "rtc_text") - rtc_data_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E2 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['text'].entries, 'rtc_text') + rtc_data_E2 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E2 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - iram0_text_E3 = PlacementRule("libfreertos.a", None, None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E3 = PlacementRule("libfreertos.a", None, None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E3 = PlacementRule('libfreertos.a', None, None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E3 = PlacementRule('libfreertos.a', None, None, self.model.sections['rodata'].entries, 'dram0_data') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E3, self.sections_info) @@ -759,15 +755,15 @@ entries: rtc_data_E2.add_exclusion(dram0_data_E1, self.sections_info) # Add the rules - expected["iram0_text"].append(iram0_text_E1) - expected["dram0_data"].append(dram0_data_E1) + expected['iram0_text'].append(iram0_text_E1) + expected['dram0_data'].append(dram0_data_E1) - expected["rtc_text"].append(rtc_text_E2) - expected["rtc_data"].append(rtc_data_E2) - expected["rtc_bss"].append(rtc_bss_E2) + expected['rtc_text'].append(rtc_text_E2) + expected['rtc_data'].append(rtc_data_E2) + expected['rtc_bss'].append(rtc_bss_E2) - expected["iram0_text"].append(iram0_text_E3) - expected["dram0_data"].append(dram0_data_E3) + expected['iram0_text'].append(iram0_text_E3) + expected['dram0_data'].append(dram0_data_E3) self.compare_rules(expected, actual) @@ -787,26 +783,26 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) - dram0_data_default = self.get_default("dram0_data", expected) - dram0_bss_default = self.get_default("dram0_bss", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) + dram0_data_default = self.get_default('dram0_data', expected) + dram0_bss_default = self.get_default('dram0_bss', expected) - rtc_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "rtc_text") - rtc_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", - self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", - self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['text'].entries, 'rtc_text') + rtc_data_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', + self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', + self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - iram0_text_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E2 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E2 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E2 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['rodata'].entries, 'dram0_data') - rtc_text_E3 = PlacementRule("libfreertos.a", None, None, self.model.sections["text"].entries, "rtc_text") - rtc_data_E3 = PlacementRule("libfreertos.a", None, None, self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E3 = PlacementRule("libfreertos.a", None, None, self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E3 = PlacementRule('libfreertos.a', None, None, self.model.sections['text'].entries, 'rtc_text') + rtc_data_E3 = PlacementRule('libfreertos.a', None, None, self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E3 = PlacementRule('libfreertos.a', None, None, self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - rtc_data_extra = PlacementRule("libfreertos.a", "croutine", None, [".data.*"], "rtc_data") - rtc_bss_extra = PlacementRule("libfreertos.a", "croutine", None, [".bss.*"], "rtc_bss") + rtc_data_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.data.*'], 'rtc_data') + rtc_bss_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.bss.*'], 'rtc_bss') # Add the exclusions flash_text_default.add_exclusion(rtc_text_E3, self.sections_info) @@ -825,19 +821,19 @@ entries: dram0_data_E2.add_exclusion(rtc_data_E1, self.sections_info) # Add the rules - expected["rtc_data"].append(rtc_data_extra) - expected["rtc_bss"].append(rtc_bss_extra) + expected['rtc_data'].append(rtc_data_extra) + expected['rtc_bss'].append(rtc_bss_extra) - expected["rtc_text"].append(rtc_text_E1) - expected["rtc_data"].append(rtc_data_E1) - expected["rtc_bss"].append(rtc_bss_E1) + expected['rtc_text'].append(rtc_text_E1) + expected['rtc_data'].append(rtc_data_E1) + expected['rtc_bss'].append(rtc_bss_E1) - expected["iram0_text"].append(iram0_text_E2) - expected["dram0_data"].append(dram0_data_E2) + expected['iram0_text'].append(iram0_text_E2) + expected['dram0_data'].append(dram0_data_E2) - expected["rtc_text"].append(rtc_text_E3) - expected["rtc_data"].append(rtc_data_E3) - expected["rtc_bss"].append(rtc_bss_E3) + expected['rtc_text'].append(rtc_text_E3) + expected['rtc_data'].append(rtc_data_E3) + expected['rtc_bss'].append(rtc_bss_E3) self.compare_rules(expected, actual) @@ -859,29 +855,29 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) - dram0_data_default = self.get_default("dram0_data", expected) - dram0_bss_default = self.get_default("dram0_bss", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) + dram0_data_default = self.get_default('dram0_data', expected) + dram0_bss_default = self.get_default('dram0_bss', expected) - iram0_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "iram0_text") - dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['text'].entries, 'iram0_text') + dram0_data_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['rodata'].entries, 'dram0_data') - iram0_text_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["text"].entries, "iram0_text") - dram0_data_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E2 = PlacementRule('libfreertos.a', 'event_groups', 'xEventGroupCreate', self.model.sections['text'].entries, 'iram0_text') + dram0_data_E2 = PlacementRule('libfreertos.a', 'event_groups', 'xEventGroupCreate', self.model.sections['rodata'].entries, 'dram0_data') - rtc_text_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "rtc_text") - rtc_data_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E3 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['text'].entries, 'rtc_text') + rtc_data_E3 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E3 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - rtc_text_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["text"].entries, "rtc_text") - rtc_data_E4 = PlacementRule("libfreertos.a", "event_groups", None, - self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E4 = PlacementRule("libfreertos.a", "event_groups", None, - self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E4 = PlacementRule('libfreertos.a', 'event_groups', None, self.model.sections['text'].entries, 'rtc_text') + rtc_data_E4 = PlacementRule('libfreertos.a', 'event_groups', None, + self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E4 = PlacementRule('libfreertos.a', 'event_groups', None, + self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - iram0_text_E5 = PlacementRule("libfreertos.a", None, None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E5 = PlacementRule("libfreertos.a", None, None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E5 = PlacementRule('libfreertos.a', None, None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E5 = PlacementRule('libfreertos.a', None, None, self.model.sections['rodata'].entries, 'dram0_data') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E5, self.sections_info) @@ -903,22 +899,22 @@ entries: rtc_data_E3.add_exclusion(dram0_data_E1, self.sections_info) # Add the rules - expected["iram0_text"].append(iram0_text_E1) - expected["dram0_data"].append(dram0_data_E1) + expected['iram0_text'].append(iram0_text_E1) + expected['dram0_data'].append(dram0_data_E1) - expected["iram0_text"].append(iram0_text_E2) - expected["dram0_data"].append(dram0_data_E2) + expected['iram0_text'].append(iram0_text_E2) + expected['dram0_data'].append(dram0_data_E2) - expected["rtc_text"].append(rtc_text_E3) - expected["rtc_data"].append(rtc_data_E3) - expected["rtc_bss"].append(rtc_bss_E3) + expected['rtc_text'].append(rtc_text_E3) + expected['rtc_data'].append(rtc_data_E3) + expected['rtc_bss'].append(rtc_bss_E3) - expected["rtc_text"].append(rtc_text_E4) - expected["rtc_data"].append(rtc_data_E4) - expected["rtc_bss"].append(rtc_bss_E4) + expected['rtc_text'].append(rtc_text_E4) + expected['rtc_data'].append(rtc_data_E4) + expected['rtc_bss'].append(rtc_bss_E4) - expected["iram0_text"].append(iram0_text_E5) - expected["dram0_data"].append(dram0_data_E5) + expected['iram0_text'].append(iram0_text_E5) + expected['dram0_data'].append(dram0_data_E5) self.compare_rules(expected, actual) @@ -939,29 +935,29 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) - dram0_data_default = self.get_default("dram0_data", expected) - dram0_bss_default = self.get_default("dram0_bss", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) + dram0_data_default = self.get_default('dram0_data', expected) + dram0_bss_default = self.get_default('dram0_bss', expected) - iram0_text_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "iram0_text") - dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['text'].entries, 'iram0_text') + dram0_data_E1 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['rodata'].entries, 'dram0_data') - rtc_text_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", self.model.sections["text"].entries, "rtc_text") - rtc_data_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", - self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E2 = PlacementRule("libfreertos.a", "event_groups", "xEventGroupCreate", - self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E2 = PlacementRule('libfreertos.a', 'event_groups', 'xEventGroupCreate', self.model.sections['text'].entries, 'rtc_text') + rtc_data_E2 = PlacementRule('libfreertos.a', 'event_groups', 'xEventGroupCreate', + self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E2 = PlacementRule('libfreertos.a', 'event_groups', 'xEventGroupCreate', + self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - rtc_text_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "rtc_text") - rtc_data_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["data"].entries | self.model.sections["rodata"].entries, "rtc_data") - rtc_bss_E3 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["bss"].entries | self.model.sections["common"].entries, "rtc_bss") + rtc_text_E3 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['text'].entries, 'rtc_text') + rtc_data_E3 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['data'].entries | self.model.sections['rodata'].entries, 'rtc_data') + rtc_bss_E3 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['bss'].entries | self.model.sections['common'].entries, 'rtc_bss') - iram0_text_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E4 = PlacementRule("libfreertos.a", "event_groups", None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E4 = PlacementRule('libfreertos.a', 'event_groups', None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E4 = PlacementRule('libfreertos.a', 'event_groups', None, self.model.sections['rodata'].entries, 'dram0_data') - dram0_data_extra = PlacementRule("libfreertos.a", "event_groups", None, [".data.*"], "dram0_data") - dram0_bss_extra = PlacementRule("libfreertos.a", "event_groups", None, [".bss.*"], "dram0_bss") + dram0_data_extra = PlacementRule('libfreertos.a', 'event_groups', None, ['.data.*'], 'dram0_data') + dram0_bss_extra = PlacementRule('libfreertos.a', 'event_groups', None, ['.bss.*'], 'dram0_bss') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E4, self.sections_info) @@ -983,22 +979,22 @@ entries: rtc_data_E3.add_exclusion(dram0_data_E1, self.sections_info) # Add the rules - expected["iram0_text"].append(iram0_text_E1) - expected["dram0_data"].append(dram0_data_E1) + expected['iram0_text'].append(iram0_text_E1) + expected['dram0_data'].append(dram0_data_E1) - expected["rtc_text"].append(rtc_text_E2) - expected["rtc_data"].append(rtc_data_E2) - expected["rtc_bss"].append(rtc_bss_E2) + expected['rtc_text'].append(rtc_text_E2) + expected['rtc_data'].append(rtc_data_E2) + expected['rtc_bss'].append(rtc_bss_E2) - expected["rtc_text"].append(rtc_text_E3) - expected["rtc_data"].append(rtc_data_E3) - expected["rtc_bss"].append(rtc_bss_E3) + expected['rtc_text'].append(rtc_text_E3) + expected['rtc_data'].append(rtc_data_E3) + expected['rtc_bss'].append(rtc_bss_E3) - expected["iram0_text"].append(iram0_text_E4) - expected["dram0_data"].append(dram0_data_E4) + expected['iram0_text'].append(iram0_text_E4) + expected['dram0_data'].append(dram0_data_E4) - expected["dram0_data"].append(dram0_data_extra) - expected["dram0_bss"].append(dram0_bss_extra) + expected['dram0_data'].append(dram0_data_extra) + expected['dram0_bss'].append(dram0_bss_extra) self.compare_rules(expected, actual) @@ -1017,19 +1013,19 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) - iram0_text_E1 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E1 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E1 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['rodata'].entries, 'dram0_data') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E1) flash_rodata_default.add_exclusion(dram0_data_E1) # Add the rules - expected["iram0_text"].append(iram0_text_E1) - expected["dram0_data"].append(dram0_data_E1) + expected['iram0_text'].append(iram0_text_E1) + expected['dram0_data'].append(dram0_data_E1) self.compare_rules(expected, actual) @@ -1048,19 +1044,19 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) - iram0_text_E1 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["text"].entries, "iram0_text") - dram0_data_E1 = PlacementRule("libfreertos.a", "croutine", None, self.model.sections["rodata"].entries, "dram0_data") + iram0_text_E1 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['text'].entries, 'iram0_text') + dram0_data_E1 = PlacementRule('libfreertos.a', 'croutine', None, self.model.sections['rodata'].entries, 'dram0_data') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E1) flash_rodata_default.add_exclusion(dram0_data_E1) # Add the rules - expected["iram0_text"].append(iram0_text_E1) - expected["dram0_data"].append(dram0_data_E1) + expected['iram0_text'].append(iram0_text_E1) + expected['dram0_data'].append(dram0_data_E1) self.compare_rules(expected, actual) @@ -1101,7 +1097,7 @@ entries: """ for perf_level in range(0, 4): - self.sdkconfig.config.syms["PERFORMANCE_LEVEL"].set_value(str(perf_level)) + self.sdkconfig.config.syms['PERFORMANCE_LEVEL'].set_value(str(perf_level)) self.model.mappings = {} self.add_fragments(generation_with_condition) @@ -1111,17 +1107,17 @@ entries: if perf_level < 4: for append_no in range(1, perf_level + 1): - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) - iram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["text"].entries, "iram0_text") - dram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["rodata"].entries, "dram0_data") + iram_rule = PlacementRule('lib.a', 'obj' + str(append_no), None, self.model.sections['text'].entries, 'iram0_text') + dram_rule = PlacementRule('lib.a', 'obj' + str(append_no), None, self.model.sections['rodata'].entries, 'dram0_data') flash_text_default.add_exclusion(iram_rule) flash_rodata_default.add_exclusion(dram_rule) - expected["iram0_text"].append(iram_rule) - expected["dram0_data"].append(dram_rule) + expected['iram0_text'].append(iram_rule) + expected['dram0_data'].append(dram_rule) self.compare_rules(expected, actual) @@ -1162,21 +1158,21 @@ entries: * (cond_noflash) """ - self.sdkconfig.config.syms["PERFORMANCE_LEVEL"].set_value("1") + self.sdkconfig.config.syms['PERFORMANCE_LEVEL'].set_value('1') self.add_fragments(generation_with_condition) actual = self.model.generate_rules(self.sections_info) expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) + flash_text_default = self.get_default('flash_text', expected) - iram0_text_E1 = PlacementRule("lib.a", "*", None, self.model.sections["text"].entries, "iram0_text") + iram0_text_E1 = PlacementRule('lib.a', '*', None, self.model.sections['text'].entries, 'iram0_text') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E1) # Add to the placement rules list - expected["iram0_text"].append(iram0_text_E1) + expected['iram0_text'].append(iram0_text_E1) self.compare_rules(expected, actual) @@ -1203,21 +1199,21 @@ entries: * (cond_noflash) """ - self.sdkconfig.config.syms["PERFORMANCE_LEVEL"].set_value("0") + self.sdkconfig.config.syms['PERFORMANCE_LEVEL'].set_value('0') self.add_fragments(generation_with_condition) actual = self.model.generate_rules(self.sections_info) expected = self.generate_default_rules() - flash_rodata_default = self.get_default("flash_rodata", expected) + flash_rodata_default = self.get_default('flash_rodata', expected) - dram0_data_E1 = PlacementRule("lib.a", "*", None, self.model.sections["rodata"].entries, "dram0_data") + dram0_data_E1 = PlacementRule('lib.a', '*', None, self.model.sections['rodata'].entries, 'dram0_data') # Add the exclusions flash_rodata_default.add_exclusion(dram0_data_E1) # Add to the placement rules list - expected["dram0_data"].append(dram0_data_E1) + expected['dram0_data'].append(dram0_data_E1) self.compare_rules(expected, actual) @@ -1239,7 +1235,7 @@ entries: """ for perf_level in range(0, 4): - self.sdkconfig.config.syms["PERFORMANCE_LEVEL"].set_value(str(perf_level)) + self.sdkconfig.config.syms['PERFORMANCE_LEVEL'].set_value(str(perf_level)) self.model.mappings = {} self.add_fragments(generation_with_condition) @@ -1249,17 +1245,17 @@ entries: if perf_level < 4: for append_no in range(1, perf_level + 1): - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) - iram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["text"].entries, "iram0_text") - dram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["rodata"].entries, "dram0_data") + iram_rule = PlacementRule('lib.a', 'obj' + str(append_no), None, self.model.sections['text'].entries, 'iram0_text') + dram_rule = PlacementRule('lib.a', 'obj' + str(append_no), None, self.model.sections['rodata'].entries, 'dram0_data') flash_text_default.add_exclusion(iram_rule) flash_rodata_default.add_exclusion(dram_rule) - expected["iram0_text"].append(iram_rule) - expected["dram0_data"].append(dram_rule) + expected['iram0_text'].append(iram_rule) + expected['dram0_data'].append(dram_rule) self.compare_rules(expected, actual) @@ -1289,7 +1285,7 @@ entries: """ for perf_level in range(0, 4): - self.sdkconfig.config.syms["PERFORMANCE_LEVEL"].set_value(str(perf_level)) + self.sdkconfig.config.syms['PERFORMANCE_LEVEL'].set_value(str(perf_level)) self.model.mappings = {} self.add_fragments(multiple_deprecated_definitions) @@ -1299,17 +1295,17 @@ entries: if perf_level < 4: for append_no in range(1, perf_level + 1): - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) - iram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["text"].entries, "iram0_text") - dram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["rodata"].entries, "dram0_data") + iram_rule = PlacementRule('lib.a', 'obj' + str(append_no), None, self.model.sections['text'].entries, 'iram0_text') + dram_rule = PlacementRule('lib.a', 'obj' + str(append_no), None, self.model.sections['rodata'].entries, 'dram0_data') flash_text_default.add_exclusion(iram_rule) flash_rodata_default.add_exclusion(dram_rule) - expected["iram0_text"].append(iram_rule) - expected["dram0_data"].append(dram_rule) + expected['iram0_text'].append(iram_rule) + expected['dram0_data'].append(dram_rule) self.compare_rules(expected, actual) @@ -1342,7 +1338,7 @@ entries: """ for perf_level in range(0, 4): - self.sdkconfig.config.syms["PERFORMANCE_LEVEL"].set_value(str(perf_level)) + self.sdkconfig.config.syms['PERFORMANCE_LEVEL'].set_value(str(perf_level)) self.model.mappings = {} self.add_fragments(multiple_deprecated_definitions) @@ -1352,17 +1348,17 @@ entries: if perf_level < 4: for append_no in range(1, perf_level + 1): - flash_text_default = self.get_default("flash_text", expected) - flash_rodata_default = self.get_default("flash_rodata", expected) + flash_text_default = self.get_default('flash_text', expected) + flash_rodata_default = self.get_default('flash_rodata', expected) - iram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["text"].entries, "iram0_text") - dram_rule = PlacementRule("lib.a", "obj" + str(append_no), None, self.model.sections["rodata"].entries, "dram0_data") + iram_rule = PlacementRule('lib.a', 'obj' + str(append_no), None, self.model.sections['text'].entries, 'iram0_text') + dram_rule = PlacementRule('lib.a', 'obj' + str(append_no), None, self.model.sections['rodata'].entries, 'dram0_data') flash_text_default.add_exclusion(iram_rule) flash_rodata_default.add_exclusion(dram_rule) - expected["iram0_text"].append(iram_rule) - expected["dram0_data"].append(dram_rule) + expected['iram0_text'].append(iram_rule) + expected['dram0_data'].append(dram_rule) self.compare_rules(expected, actual) @@ -1391,14 +1387,14 @@ entries: expected = self.generate_default_rules() - flash_text_default = self.get_default("flash_text", expected) + flash_text_default = self.get_default('flash_text', expected) - iram0_text_E1 = PlacementRule("libfreertos2.a", "croutine2", None, self.model.sections["text"].entries, "iram0_text") - iram0_text_E2 = PlacementRule("libfreertos2.a", "croutine", None, self.model.sections["text"].entries, "iram0_text") - iram0_text_E3 = PlacementRule("libfreertos.a", "croutine", "prvCheckPendingReadyList", self.model.sections["text"].entries, "iram0_text") - iram0_text_E4 = PlacementRule("libfreertos.a", "croutine", "prvCheckDelayedList", self.model.sections["text"].entries, "iram0_text") + iram0_text_E1 = PlacementRule('libfreertos2.a', 'croutine2', None, self.model.sections['text'].entries, 'iram0_text') + iram0_text_E2 = PlacementRule('libfreertos2.a', 'croutine', None, self.model.sections['text'].entries, 'iram0_text') + iram0_text_E3 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckPendingReadyList', self.model.sections['text'].entries, 'iram0_text') + iram0_text_E4 = PlacementRule('libfreertos.a', 'croutine', 'prvCheckDelayedList', self.model.sections['text'].entries, 'iram0_text') - flash_text_extra = PlacementRule("libfreertos.a", "croutine", None, [".text.*", ".literal.*"], "flash_text") + flash_text_extra = PlacementRule('libfreertos.a', 'croutine', None, ['.text.*', '.literal.*'], 'flash_text') # Add the exclusions flash_text_default.add_exclusion(iram0_text_E1, self.sections_info) @@ -1409,32 +1405,32 @@ entries: flash_text_extra.add_exclusion(iram0_text_E4, self.sections_info) # Add the rules, arranged by expected order - expected["flash_text"].append(flash_text_extra) - expected["iram0_text"].append(iram0_text_E4) - expected["iram0_text"].append(iram0_text_E3) - expected["iram0_text"].append(iram0_text_E2) - expected["iram0_text"].append(iram0_text_E1) + expected['flash_text'].append(flash_text_extra) + expected['iram0_text'].append(iram0_text_E4) + expected['iram0_text'].append(iram0_text_E3) + expected['iram0_text'].append(iram0_text_E2) + expected['iram0_text'].append(iram0_text_E1) # Perform general comparison for all sections self.compare_rules(expected, actual) # Perform ordered comparison - self.assertListEqual(actual["flash_text"], expected["flash_text"]) - self.assertListEqual(actual["iram0_text"], expected["iram0_text"]) + self.assertListEqual(actual['flash_text'], expected['flash_text']) + self.assertListEqual(actual['iram0_text'], expected['iram0_text']) def test_sections_info_parsing(self): self.sections_info = SectionsInfo() - with open("data/sections_parse.info") as sections_info_file_obj: + with open('data/sections_parse.info') as sections_info_file_obj: self.sections_info.add_sections_info(sections_info_file_obj) - sections = self.sections_info.get_obj_sections("libsections_parse.a", "croutine") - self.assertEqual(set(sections), set([".text", ".data", ".bss"])) + sections = self.sections_info.get_obj_sections('libsections_parse.a', 'croutine') + self.assertEqual(set(sections), set(['.text', '.data', '.bss'])) - sections = self.sections_info.get_obj_sections("libsections_parse.a", "FreeRTOS-openocd") - self.assertEqual(set(sections), set([".literal.prvCheckPendingReadyList"])) + sections = self.sections_info.get_obj_sections('libsections_parse.a', 'FreeRTOS-openocd') + self.assertEqual(set(sections), set(['.literal.prvCheckPendingReadyList'])) -if __name__ == "__main__": +if __name__ == '__main__': unittest.main() diff --git a/tools/mass_mfg/mfg_gen.py b/tools/mass_mfg/mfg_gen.py index 6db50ee9f0..a3ad7699d8 100755 --- a/tools/mass_mfg/mfg_gen.py +++ b/tools/mass_mfg/mfg_gen.py @@ -16,26 +16,28 @@ # from __future__ import print_function -from future.moves.itertools import zip_longest -from io import open -import sys -import os + import argparse import distutils.dir_util +import os +import sys +from io import open + +from future.moves.itertools import zip_longest try: - sys.path.insert(0, os.getenv('IDF_PATH') + "/components/nvs_flash/nvs_partition_generator/") + sys.path.insert(0, os.getenv('IDF_PATH') + '/components/nvs_flash/nvs_partition_generator/') import nvs_partition_gen except Exception as e: print(e) - sys.exit("Please check IDF_PATH") + sys.exit('Please check IDF_PATH') def verify_values_exist(input_values_file, values_file_data, key_count_in_values_file, line_no=1): """ Verify all keys have corresponding values in values file """ if len(values_file_data) != key_count_in_values_file: - raise SystemExit("\nError: Number of values is not equal to number of keys in file: %s at line No:%s\n" + raise SystemExit('\nError: Number of values is not equal to number of keys in file: %s at line No:%s\n' % (str(input_values_file), str(line_no))) @@ -59,16 +61,16 @@ def verify_keys_exist(values_file_keys, config_file_data): if keys_missing: for key, line_no in keys_missing: - print("Key:`", str(key), "` at line no:", str(line_no), - " in config file is not found in values file.") + print('Key:`', str(key), '` at line no:', str(line_no), + ' in config file is not found in values file.') raise SystemExit(1) def verify_datatype_encoding(input_config_file, config_file_data): """ Verify datatype and encodings from config file is valid """ - valid_encodings = ["string", "binary", "hex2bin","u8", "i8", "u16", "u32", "i32","base64"] - valid_datatypes = ["file","data","namespace"] + valid_encodings = ['string', 'binary', 'hex2bin','u8', 'i8', 'u16', 'u32', 'i32','base64'] + valid_datatypes = ['file','data','namespace'] line_no = 0 for data in config_file_data: @@ -77,11 +79,11 @@ def verify_datatype_encoding(input_config_file, config_file_data): data = data.encode('utf-8') line = data.strip().split(',') if line[1] not in valid_datatypes: - raise SystemExit("Error: config file: %s has invalid datatype at line no:%s\n" + raise SystemExit('Error: config file: %s has invalid datatype at line no:%s\n' % (str(input_config_file), str(line_no))) if 'namespace' not in line: if line[2] not in valid_encodings: - raise SystemExit("Error: config file: %s has invalid encoding at line no:%s\n" + raise SystemExit('Error: config file: %s has invalid encoding at line no:%s\n' % (str(input_config_file), str(line_no))) @@ -97,7 +99,7 @@ def verify_file_data_count(cfg_file_data, keys_repeat): data = data.encode('utf-8') line = data.strip().split(',') if len(line) != 3 and line[0] not in keys_repeat: - raise SystemExit("Error: data missing in config file at line no:%s \n" + raise SystemExit('Error: data missing in config file at line no:%s \n' % str(line_no)) @@ -211,11 +213,11 @@ def add_data_to_file(config_data_to_write, key_value_pair, output_csv_file): """ header = ['key', 'type', 'encoding', 'value'] data_to_write = [] - newline = u"\n" + newline = u'\n' target_csv_file = open(output_csv_file, 'w', newline=None) - line_to_write = u",".join(header) + line_to_write = u','.join(header) target_csv_file.write(line_to_write) target_csv_file.write(newline) for namespace_config_data in config_data_to_write: @@ -223,7 +225,7 @@ def add_data_to_file(config_data_to_write, key_value_pair, output_csv_file): data_to_write = data[:] if 'namespace' in data: data_to_write.append('') - line_to_write = u",".join(data_to_write) + line_to_write = u','.join(data_to_write) target_csv_file.write(line_to_write) target_csv_file.write(newline) else: @@ -234,7 +236,7 @@ def add_data_to_file(config_data_to_write, key_value_pair, output_csv_file): value = key_value_pair[0][1] data_to_write.append(value) del key_value_pair[0] - line_to_write = u",".join(data_to_write) + line_to_write = u','.join(data_to_write) target_csv_file.write(line_to_write) target_csv_file.write(newline) @@ -257,7 +259,7 @@ def set_repeat_value(total_keys_repeat, keys, csv_file, target_filename): key_val_pair = [] key_repeated = [] line = None - newline = u"\n" + newline = u'\n' with open(csv_file, 'r', newline=None) as read_from, open(target_filename,'w', newline=None) as write_to: headers = read_from.readline() values = read_from.readline() @@ -288,7 +290,7 @@ def set_repeat_value(total_keys_repeat, keys, csv_file, target_filename): del key_val_new[0] del key_val_pair[0] - line_to_write = u",".join(row) + line_to_write = u','.join(row) write_to.write(line_to_write) write_to.write(newline) @@ -322,7 +324,7 @@ def create_intermediate_csv(args, keys_in_config_file, keys_in_values_file, keys keys = line.strip().split(',') filename, file_ext = os.path.splitext(args.values) - target_filename = filename + "_created" + file_ext + target_filename = filename + '_created' + file_ext if keys_repeat: target_values_file = set_repeat_value(keys_repeat, keys, args.values, target_filename) else: @@ -356,25 +358,25 @@ def create_intermediate_csv(args, keys_in_config_file, keys_in_values_file, keys key_value_pair = key_value_data[:] # Verify if output csv file does not exist - csv_filename = args.prefix + "-" + file_identifier_value + "." + csv_str + csv_filename = args.prefix + '-' + file_identifier_value + '.' + csv_str output_csv_file = output_csv_target_dir + csv_filename if os.path.isfile(output_csv_file): - raise SystemExit("Target csv file: %s already exists.`" % output_csv_file) + raise SystemExit('Target csv file: %s already exists.`' % output_csv_file) # Add values corresponding to each key to csv intermediate file add_data_to_file(config_data_to_write, key_value_pair, output_csv_file) - print("\nCreated CSV file: ===>", output_csv_file) + print('\nCreated CSV file: ===>', output_csv_file) # Verify if output bin file does not exist - bin_filename = args.prefix + "-" + file_identifier_value + "." + bin_str + bin_filename = args.prefix + '-' + file_identifier_value + '.' + bin_str output_bin_file = output_bin_target_dir + bin_filename if os.path.isfile(output_bin_file): - raise SystemExit("Target binary file: %s already exists.`" % output_bin_file) + raise SystemExit('Target binary file: %s already exists.`' % output_bin_file) args.input = output_csv_file args.output = os.path.join(bin_str, bin_filename) if set_output_keyfile: - args.keyfile = "keys-" + args.prefix + "-" + file_identifier_value + args.keyfile = 'keys-' + args.prefix + '-' + file_identifier_value if is_encr: nvs_partition_gen.encrypt(args) @@ -389,7 +391,7 @@ def create_intermediate_csv(args, keys_in_config_file, keys_in_values_file, keys if len(values_data_line) == 1 and '' in values_data_line: break - print("\nFiles generated in %s ..." % args.outdir) + print('\nFiles generated in %s ...' % args.outdir) except Exception as e: print(e) @@ -405,7 +407,7 @@ def verify_empty_lines_exist(file_name, input_file_data): cfg_data = data.strip().split(',') if len(cfg_data) == 1 and '' in cfg_data: - raise SystemExit("Error: file: %s cannot have empty lines. " % file_name) + raise SystemExit('Error: file: %s cannot have empty lines. ' % file_name) def verify_file_format(args): @@ -416,11 +418,11 @@ def verify_file_format(args): # Verify config file is not empty if os.stat(args.conf).st_size == 0: - raise SystemExit("Error: config file: %s is empty." % args.conf) + raise SystemExit('Error: config file: %s is empty.' % args.conf) # Verify values file is not empty if os.stat(args.values).st_size == 0: - raise SystemExit("Error: values file: %s is empty." % args.values) + raise SystemExit('Error: values file: %s is empty.' % args.values) # Verify config file does not have empty lines with open(args.conf, 'r', newline='\n') as csv_config_file: @@ -488,7 +490,7 @@ def generate(args): if (args.keygen or args.inputkey): encryption_enabled = True - print("\nGenerating encrypted NVS binary images...") + print('\nGenerating encrypted NVS binary images...') # Create intermediate csv file create_intermediate_csv(args, keys_in_config_file, keys_in_values_file, @@ -501,7 +503,7 @@ def generate_key(args): def main(): try: - parser = argparse.ArgumentParser(description="\nESP Manufacturing Utility", formatter_class=argparse.RawTextHelpFormatter) + parser = argparse.ArgumentParser(description='\nESP Manufacturing Utility', formatter_class=argparse.RawTextHelpFormatter) subparser = parser.add_subparsers(title='Commands', dest='command', help='\nRun mfg_gen.py {command} -h for additional help\n\n') @@ -536,7 +538,7 @@ def main(): \nVersion 2 - Multipage blob support enabled.\ \nDefault: Version 2 ''') parser_gen.add_argument('--keygen', - action="store_true", + action='store_true', default=False, help='Generates key for encrypting NVS partition') parser_gen.add_argument('--keyfile', @@ -576,5 +578,5 @@ def main(): print(e) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/mkdfu.py b/tools/mkdfu.py index 54109979fd..e521354fe1 100755 --- a/tools/mkdfu.py +++ b/tools/mkdfu.py @@ -21,14 +21,15 @@ # This file must be the first one in the archive. It contains binary structures describing each # subsequent file (for example, where the file needs to be flashed/loaded). -from collections import namedtuple -from future.utils import iteritems import argparse import hashlib import json import os import struct import zlib +from collections import namedtuple + +from future.utils import iteritems try: import typing @@ -43,28 +44,28 @@ except ImportError: pass # CPIO ("new ASCII") format related things -CPIO_MAGIC = b"070701" -CPIO_STRUCT = b"=6s" + b"8s" * 13 +CPIO_MAGIC = b'070701' +CPIO_STRUCT = b'=6s' + b'8s' * 13 CPIOHeader = namedtuple( - "CPIOHeader", + 'CPIOHeader', [ - "magic", - "ino", - "mode", - "uid", - "gid", - "nlink", - "mtime", - "filesize", - "devmajor", - "devminor", - "rdevmajor", - "rdevminor", - "namesize", - "check", + 'magic', + 'ino', + 'mode', + 'uid', + 'gid', + 'nlink', + 'mtime', + 'filesize', + 'devmajor', + 'devminor', + 'rdevmajor', + 'rdevminor', + 'namesize', + 'check', ], ) -CPIO_TRAILER = "TRAILER!!!" +CPIO_TRAILER = 'TRAILER!!!' def make_cpio_header( @@ -73,7 +74,7 @@ def make_cpio_header( """ Returns CPIOHeader for the given file name and file size """ def as_hex(val): # type: (int) -> bytes - return "{:08x}".format(val).encode("ascii") + return '{:08x}'.format(val).encode('ascii') hex_0 = as_hex(0) mode = hex_0 if is_trailer else as_hex(0o0100644) @@ -98,17 +99,17 @@ def make_cpio_header( # DFU format related things # Structure of one entry in dfuinfo0.dat -DFUINFO_STRUCT = b" int @@ -117,7 +118,7 @@ def dfu_crc(data, crc=0): # type: (bytes, int) -> int return uint32_max - (zlib.crc32(data, crc) & uint32_max) -def pad_bytes(b, multiple, padding=b"\x00"): # type: (bytes, int, bytes) -> bytes +def pad_bytes(b, multiple, padding=b'\x00'): # type: (bytes, int, bytes) -> bytes """ Pad 'b' to a length divisible by 'multiple' """ padded_len = (len(b) + multiple - 1) // multiple * multiple return b + padding * (padded_len - len(b)) @@ -132,25 +133,25 @@ class EspDfuWriter(object): def add_file(self, flash_addr, path): # type: (int, str) -> None """ Add file to be written into flash at given address """ - with open(path, "rb") as f: + with open(path, 'rb') as f: self._add_cpio_flash_entry(os.path.basename(path), flash_addr, f.read()) def finish(self): # type: () -> None """ Write DFU file """ # Prepare and add dfuinfo0.dat file - dfuinfo = b"".join([struct.pack(DFUINFO_STRUCT, *item) for item in self.index]) + dfuinfo = b''.join([struct.pack(DFUINFO_STRUCT, *item) for item in self.index]) self._add_cpio_entry(DFUINFO_FILE, dfuinfo, first=True) # Add CPIO archive trailer - self._add_cpio_entry(CPIO_TRAILER, b"", trailer=True) + self._add_cpio_entry(CPIO_TRAILER, b'', trailer=True) # Combine all the entries and pad the file - out_data = b"".join(self.entries) + out_data = b''.join(self.entries) cpio_block_size = 10240 out_data = pad_bytes(out_data, cpio_block_size) # Add DFU suffix and CRC - dfu_suffix = DFUSuffix(0xFFFF, self.pid, ESPRESSIF_VID, 0x0100, b"UFD", 16) + dfu_suffix = DFUSuffix(0xFFFF, self.pid, ESPRESSIF_VID, 0x0100, b'UFD', 16) out_data += struct.pack(DFUSUFFIX_STRUCT, *dfu_suffix) out_data += struct.pack(DFUCRC_STRUCT, dfu_crc(out_data)) @@ -166,7 +167,7 @@ class EspDfuWriter(object): DFUInfo( address=flash_addr, flags=0, - name=filename.encode("utf-8"), + name=filename.encode('utf-8'), md5=md5.digest(), ) ) @@ -175,7 +176,7 @@ class EspDfuWriter(object): def _add_cpio_entry( self, filename, data, first=False, trailer=False ): # type: (str, bytes, bool, bool) -> None - filename_b = filename.encode("utf-8") + b"\x00" + filename_b = filename.encode('utf-8') + b'\x00' cpio_header = make_cpio_header(len(filename_b), len(data), is_trailer=trailer) entry = pad_bytes( struct.pack(CPIO_STRUCT, *cpio_header) + filename_b, 4 @@ -199,21 +200,21 @@ def main(): parser = argparse.ArgumentParser() # Provision to add "info" command - subparsers = parser.add_subparsers(dest="command") - write_parser = subparsers.add_parser("write") - write_parser.add_argument("-o", "--output-file", + subparsers = parser.add_subparsers(dest='command') + write_parser = subparsers.add_parser('write') + write_parser.add_argument('-o', '--output-file', help='Filename for storing the output DFU image', required=True, - type=argparse.FileType("wb")) - write_parser.add_argument("--pid", + type=argparse.FileType('wb')) + write_parser.add_argument('--pid', required=True, type=lambda h: int(h, 16), help='Hexa-decimal product indentificator') - write_parser.add_argument("--json", + write_parser.add_argument('--json', help='Optional file for loading "flash_files" dictionary with
items') - write_parser.add_argument("files", - metavar="
", help='Add at
', - nargs="*") + write_parser.add_argument('files', + metavar='
', help='Add at
', + nargs='*') args = parser.parse_args() @@ -252,5 +253,5 @@ def main(): }[args.command](cmd_args) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/mkuf2.py b/tools/mkuf2.py index c672af15e1..b03d3afe76 100755 --- a/tools/mkuf2.py +++ b/tools/mkuf2.py @@ -15,13 +15,14 @@ # limitations under the License. from __future__ import division + import argparse import hashlib import json import os import struct - from functools import partial + from future.utils import iteritems try: @@ -126,30 +127,30 @@ def main(): return num # Provision to add "info" command - subparsers = parser.add_subparsers(dest="command") - write_parser = subparsers.add_parser("write") - write_parser.add_argument("-o", "--output-file", + subparsers = parser.add_subparsers(dest='command') + write_parser = subparsers.add_parser('write') + write_parser.add_argument('-o', '--output-file', help='Filename for storing the output UF2 image', required=True) - write_parser.add_argument("--chip-id", + write_parser.add_argument('--chip-id', required=True, type=parse_chip_id, help='Hexa-decimal chip identificator') - write_parser.add_argument("--chunk-size", + write_parser.add_argument('--chunk-size', required=False, type=parse_chunk_size, default=None, help='Specify the used data part of the 512 byte UF2 block. A common value is 256. By ' 'default the largest possible value will be used.') - write_parser.add_argument("--json", + write_parser.add_argument('--json', help='Optional file for loading "flash_files" dictionary with
items') - write_parser.add_argument("--bin", + write_parser.add_argument('--bin', help='Use only a subset of binaries from the JSON file, e.g. "partition_table ' 'bootloader app"', nargs='*') - write_parser.add_argument("files", - metavar="
", help='Add at
', - nargs="*") + write_parser.add_argument('files', + metavar='
', help='Add at
', + nargs='*') args = parser.parse_args() @@ -208,5 +209,5 @@ def main(): }[args.command](cmd_args) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/test_apps/protocols/mqtt/publish_connect_test/app_test.py b/tools/test_apps/protocols/mqtt/publish_connect_test/app_test.py index 78cc96219b..2cb19f58f0 100644 --- a/tools/test_apps/protocols/mqtt/publish_connect_test/app_test.py +++ b/tools/test_apps/protocols/mqtt/publish_connect_test/app_test.py @@ -1,16 +1,17 @@ -from __future__ import print_function -from __future__ import unicode_literals -import re +from __future__ import print_function, unicode_literals + import os -import socket -import select -import subprocess -from threading import Thread, Event -import ttfw_idf -import ssl -import paho.mqtt.client as mqtt -import string import random +import re +import select +import socket +import ssl +import string +import subprocess +from threading import Event, Thread + +import paho.mqtt.client as mqtt +import ttfw_idf DEFAULT_MSG_SIZE = 16 @@ -21,12 +22,12 @@ def _path(f): def set_server_cert_cn(ip): arg_list = [ - ['openssl', 'req', '-out', _path('srv.csr'), '-key', _path('server.key'),'-subj', "/CN={}".format(ip), '-new'], + ['openssl', 'req', '-out', _path('srv.csr'), '-key', _path('server.key'),'-subj', '/CN={}'.format(ip), '-new'], ['openssl', 'x509', '-req', '-in', _path('srv.csr'), '-CA', _path('ca.crt'), '-CAkey', _path('ca.key'), '-CAcreateserial', '-out', _path('srv.crt'), '-days', '360']] for args in arg_list: if subprocess.check_call(args) != 0: - raise("openssl command {} failed".format(args)) + raise('openssl command {} failed'.format(args)) def get_my_ip(): @@ -54,9 +55,9 @@ class MqttPublisher: self.log_details = log_details self.repeat = repeat self.publish_cfg = publish_cfg - self.publish_cfg["qos"] = qos - self.publish_cfg["queue"] = queue - self.publish_cfg["transport"] = transport + self.publish_cfg['qos'] = qos + self.publish_cfg['queue'] = queue + self.publish_cfg['transport'] = transport # static variables used to pass options to and from static callbacks of paho-mqtt client MqttPublisher.event_client_connected = Event() MqttPublisher.event_client_got_all = Event() @@ -90,52 +91,52 @@ class MqttPublisher: def __enter__(self): - qos = self.publish_cfg["qos"] - queue = self.publish_cfg["queue"] - transport = self.publish_cfg["transport"] - broker_host = self.publish_cfg["broker_host_" + transport] - broker_port = self.publish_cfg["broker_port_" + transport] + qos = self.publish_cfg['qos'] + queue = self.publish_cfg['queue'] + transport = self.publish_cfg['transport'] + broker_host = self.publish_cfg['broker_host_' + transport] + broker_port = self.publish_cfg['broker_port_' + transport] # Start the test self.print_details("PUBLISH TEST: transport:{}, qos:{}, sequence:{}, enqueue:{}, sample msg:'{}'" .format(transport, qos, MqttPublisher.published, queue, MqttPublisher.expected_data)) try: - if transport in ["ws", "wss"]: - self.client = mqtt.Client(transport="websockets") + if transport in ['ws', 'wss']: + self.client = mqtt.Client(transport='websockets') else: self.client = mqtt.Client() self.client.on_connect = MqttPublisher.on_connect self.client.on_message = MqttPublisher.on_message self.client.user_data_set(0) - if transport in ["ssl", "wss"]: + if transport in ['ssl', 'wss']: self.client.tls_set(None, None, None, cert_reqs=ssl.CERT_NONE, tls_version=ssl.PROTOCOL_TLSv1_2, ciphers=None) self.client.tls_insecure_set(True) - self.print_details("Connecting...") + self.print_details('Connecting...') self.client.connect(broker_host, broker_port, 60) except Exception: - self.print_details("ENV_TEST_FAILURE: Unexpected error while connecting to broker {}".format(broker_host)) + self.print_details('ENV_TEST_FAILURE: Unexpected error while connecting to broker {}'.format(broker_host)) raise # Starting a py-client in a separate thread thread1 = Thread(target=self.mqtt_client_task, args=(self.client,)) thread1.start() - self.print_details("Connecting py-client to broker {}:{}...".format(broker_host, broker_port)) + self.print_details('Connecting py-client to broker {}:{}...'.format(broker_host, broker_port)) if not MqttPublisher.event_client_connected.wait(timeout=30): - raise ValueError("ENV_TEST_FAILURE: Test script cannot connect to broker: {}".format(broker_host)) - self.client.subscribe(self.publish_cfg["subscribe_topic"], qos) - self.dut.write(' '.join(str(x) for x in (transport, self.sample_string, self.repeat, MqttPublisher.published, qos, queue)), eol="\n") + raise ValueError('ENV_TEST_FAILURE: Test script cannot connect to broker: {}'.format(broker_host)) + self.client.subscribe(self.publish_cfg['subscribe_topic'], qos) + self.dut.write(' '.join(str(x) for x in (transport, self.sample_string, self.repeat, MqttPublisher.published, qos, queue)), eol='\n') try: # waiting till subscribed to defined topic - self.dut.expect(re.compile(r"MQTT_EVENT_SUBSCRIBED"), timeout=30) + self.dut.expect(re.compile(r'MQTT_EVENT_SUBSCRIBED'), timeout=30) for _ in range(MqttPublisher.published): - self.client.publish(self.publish_cfg["publish_topic"], self.sample_string * self.repeat, qos) - self.print_details("Publishing...") - self.print_details("Checking esp-client received msg published from py-client...") - self.dut.expect(re.compile(r"Correct pattern received exactly x times"), timeout=60) + self.client.publish(self.publish_cfg['publish_topic'], self.sample_string * self.repeat, qos) + self.print_details('Publishing...') + self.print_details('Checking esp-client received msg published from py-client...') + self.dut.expect(re.compile(r'Correct pattern received exactly x times'), timeout=60) if not MqttPublisher.event_client_got_all.wait(timeout=60): - raise ValueError("Not all data received from ESP32") - print(" - all data received from ESP32") + raise ValueError('Not all data received from ESP32') + print(' - all data received from ESP32') finally: self.event_stop_client.set() thread1.join() @@ -164,7 +165,7 @@ class TlsServer: try: self.socket.bind(('', self.port)) except socket.error as e: - print("Bind failed:{}".format(e)) + print('Bind failed:{}'.format(e)) raise self.socket.listen(1) @@ -190,23 +191,23 @@ class TlsServer: context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) if self.client_cert: context.verify_mode = ssl.CERT_REQUIRED - context.load_verify_locations(cafile=_path("ca.crt")) - context.load_cert_chain(certfile=_path("srv.crt"), keyfile=_path("server.key")) + context.load_verify_locations(cafile=_path('ca.crt')) + context.load_cert_chain(certfile=_path('srv.crt'), keyfile=_path('server.key')) if self.use_alpn: - context.set_alpn_protocols(["mymqtt", "http/1.1"]) + context.set_alpn_protocols(['mymqtt', 'http/1.1']) self.socket = context.wrap_socket(self.socket, server_side=True) try: self.conn, address = self.socket.accept() # accept new connection self.socket.settimeout(10.0) - print(" - connection from: {}".format(address)) + print(' - connection from: {}'.format(address)) if self.use_alpn: self.negotiated_protocol = self.conn.selected_alpn_protocol() - print(" - negotiated_protocol: {}".format(self.negotiated_protocol)) + print(' - negotiated_protocol: {}'.format(self.negotiated_protocol)) self.handle_conn() except ssl.SSLError as e: self.conn = None self.ssl_error = str(e) - print(" - SSLError: {}".format(str(e))) + print(' - SSLError: {}'.format(str(e))) def handle_conn(self): while not self.shutdown.is_set(): @@ -216,7 +217,7 @@ class TlsServer: self.process_mqtt_connect() except socket.error as err: - print(" - error: {}".format(err)) + print(' - error: {}'.format(err)) raise def process_mqtt_connect(self): @@ -225,20 +226,20 @@ class TlsServer: message = ''.join(format(x, '02x') for x in data) if message[0:16] == '101800044d515454': if self.refuse_connection is False: - print(" - received mqtt connect, sending ACK") - self.conn.send(bytearray.fromhex("20020000")) + print(' - received mqtt connect, sending ACK') + self.conn.send(bytearray.fromhex('20020000')) else: # injecting connection not authorized error - print(" - received mqtt connect, sending NAK") - self.conn.send(bytearray.fromhex("20020005")) + print(' - received mqtt connect, sending NAK') + self.conn.send(bytearray.fromhex('20020005')) else: - raise Exception(" - error process_mqtt_connect unexpected connect received: {}".format(message)) + raise Exception(' - error process_mqtt_connect unexpected connect received: {}'.format(message)) finally: # stop the server after the connect message in happy flow, or if any exception occur self.shutdown.set() -@ttfw_idf.idf_custom_test(env_tag="Example_WIFI", group="test-apps") +@ttfw_idf.idf_custom_test(env_tag='Example_WIFI', group='test-apps') def test_app_protocol_mqtt_publish_connect(env, extra_data): """ steps: @@ -246,11 +247,11 @@ def test_app_protocol_mqtt_publish_connect(env, extra_data): 2. connect to uri specified in the config 3. send and receive data """ - dut1 = env.get_dut("mqtt_publish_connect_test", "tools/test_apps/protocols/mqtt/publish_connect_test", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('mqtt_publish_connect_test', 'tools/test_apps/protocols/mqtt/publish_connect_test', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "mqtt_publish_connect_test.bin") + binary_file = os.path.join(dut1.app.binary_path, 'mqtt_publish_connect_test.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("mqtt_publish_connect_test_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('mqtt_publish_connect_test_bin_size', '{}KB'.format(bin_size // 1024)) # Look for test case symbolic names and publish configs cases = {} @@ -263,30 +264,30 @@ def test_app_protocol_mqtt_publish_connect(env, extra_data): return value.group(1), int(value.group(2)) # Get connection test cases configuration: symbolic names for test cases - for i in ["CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT", - "CONFIG_EXAMPLE_CONNECT_CASE_SERVER_CERT", - "CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH", - "CONFIG_EXAMPLE_CONNECT_CASE_INVALID_SERVER_CERT", - "CONFIG_EXAMPLE_CONNECT_CASE_SERVER_DER_CERT", - "CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_KEY_PWD", - "CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_BAD_CRT", - "CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT_ALPN"]: + for i in ['CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT', + 'CONFIG_EXAMPLE_CONNECT_CASE_SERVER_CERT', + 'CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH', + 'CONFIG_EXAMPLE_CONNECT_CASE_INVALID_SERVER_CERT', + 'CONFIG_EXAMPLE_CONNECT_CASE_SERVER_DER_CERT', + 'CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_KEY_PWD', + 'CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_BAD_CRT', + 'CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT_ALPN']: cases[i] = dut1.app.get_sdkconfig()[i] # Get publish test configuration - publish_cfg["publish_topic"] = dut1.app.get_sdkconfig()["CONFIG_EXAMPLE_SUBSCIBE_TOPIC"].replace('"','') - publish_cfg["subscribe_topic"] = dut1.app.get_sdkconfig()["CONFIG_EXAMPLE_PUBLISH_TOPIC"].replace('"','') - publish_cfg["broker_host_ssl"], publish_cfg["broker_port_ssl"] = get_host_port_from_dut(dut1, "CONFIG_EXAMPLE_BROKER_SSL_URI") - publish_cfg["broker_host_tcp"], publish_cfg["broker_port_tcp"] = get_host_port_from_dut(dut1, "CONFIG_EXAMPLE_BROKER_TCP_URI") - publish_cfg["broker_host_ws"], publish_cfg["broker_port_ws"] = get_host_port_from_dut(dut1, "CONFIG_EXAMPLE_BROKER_WS_URI") - publish_cfg["broker_host_wss"], publish_cfg["broker_port_wss"] = get_host_port_from_dut(dut1, "CONFIG_EXAMPLE_BROKER_WSS_URI") + publish_cfg['publish_topic'] = dut1.app.get_sdkconfig()['CONFIG_EXAMPLE_SUBSCIBE_TOPIC'].replace('"','') + publish_cfg['subscribe_topic'] = dut1.app.get_sdkconfig()['CONFIG_EXAMPLE_PUBLISH_TOPIC'].replace('"','') + publish_cfg['broker_host_ssl'], publish_cfg['broker_port_ssl'] = get_host_port_from_dut(dut1, 'CONFIG_EXAMPLE_BROKER_SSL_URI') + publish_cfg['broker_host_tcp'], publish_cfg['broker_port_tcp'] = get_host_port_from_dut(dut1, 'CONFIG_EXAMPLE_BROKER_TCP_URI') + publish_cfg['broker_host_ws'], publish_cfg['broker_port_ws'] = get_host_port_from_dut(dut1, 'CONFIG_EXAMPLE_BROKER_WS_URI') + publish_cfg['broker_host_wss'], publish_cfg['broker_port_wss'] = get_host_port_from_dut(dut1, 'CONFIG_EXAMPLE_BROKER_WSS_URI') except Exception: print('ENV_TEST_FAILURE: Some mandatory test case not found in sdkconfig') raise dut1.start_app() - esp_ip = dut1.expect(re.compile(r" IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)"), timeout=30) - print("Got IP={}".format(esp_ip[0])) + esp_ip = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30) + print('Got IP={}'.format(esp_ip[0])) # # start connection test @@ -295,73 +296,73 @@ def test_app_protocol_mqtt_publish_connect(env, extra_data): server_port = 2222 def start_connection_case(case, desc): - print("Starting {}: {}".format(case, desc)) + print('Starting {}: {}'.format(case, desc)) case_id = cases[case] - dut1.write("conn {} {} {}".format(ip, server_port, case_id)) - dut1.expect("Test case:{} started".format(case_id)) + dut1.write('conn {} {} {}'.format(ip, server_port, case_id)) + dut1.expect('Test case:{} started'.format(case_id)) return case_id - for case in ["CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT", "CONFIG_EXAMPLE_CONNECT_CASE_SERVER_CERT", "CONFIG_EXAMPLE_CONNECT_CASE_SERVER_DER_CERT"]: + for case in ['CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT', 'CONFIG_EXAMPLE_CONNECT_CASE_SERVER_CERT', 'CONFIG_EXAMPLE_CONNECT_CASE_SERVER_DER_CERT']: # All these cases connect to the server with no server verification or with server only verification with TlsServer(server_port): - test_nr = start_connection_case(case, "default server - expect to connect normally") - dut1.expect("MQTT_EVENT_CONNECTED: Test={}".format(test_nr), timeout=30) + test_nr = start_connection_case(case, 'default server - expect to connect normally') + dut1.expect('MQTT_EVENT_CONNECTED: Test={}'.format(test_nr), timeout=30) with TlsServer(server_port, refuse_connection=True): - test_nr = start_connection_case(case, "ssl shall connect, but mqtt sends connect refusal") - dut1.expect("MQTT_EVENT_ERROR: Test={}".format(test_nr), timeout=30) - dut1.expect("MQTT ERROR: 0x5") # expecting 0x5 ... connection not authorized error + test_nr = start_connection_case(case, 'ssl shall connect, but mqtt sends connect refusal') + dut1.expect('MQTT_EVENT_ERROR: Test={}'.format(test_nr), timeout=30) + dut1.expect('MQTT ERROR: 0x5') # expecting 0x5 ... connection not authorized error with TlsServer(server_port, client_cert=True) as s: - test_nr = start_connection_case(case, "server with client verification - handshake error since client presents no client certificate") - dut1.expect("MQTT_EVENT_ERROR: Test={}".format(test_nr), timeout=30) - dut1.expect("ESP-TLS ERROR: 0x8010") # expect ... handshake error (PEER_DID_NOT_RETURN_A_CERTIFICATE) - if "PEER_DID_NOT_RETURN_A_CERTIFICATE" not in s.get_last_ssl_error(): - raise("Unexpected ssl error from the server {}".format(s.get_last_ssl_error())) + test_nr = start_connection_case(case, 'server with client verification - handshake error since client presents no client certificate') + dut1.expect('MQTT_EVENT_ERROR: Test={}'.format(test_nr), timeout=30) + dut1.expect('ESP-TLS ERROR: 0x8010') # expect ... handshake error (PEER_DID_NOT_RETURN_A_CERTIFICATE) + if 'PEER_DID_NOT_RETURN_A_CERTIFICATE' not in s.get_last_ssl_error(): + raise('Unexpected ssl error from the server {}'.format(s.get_last_ssl_error())) - for case in ["CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH", "CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_KEY_PWD"]: + for case in ['CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH', 'CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_KEY_PWD']: # These cases connect to server with both server and client verification (client key might be password protected) with TlsServer(server_port, client_cert=True): - test_nr = start_connection_case(case, "server with client verification - expect to connect normally") - dut1.expect("MQTT_EVENT_CONNECTED: Test={}".format(test_nr), timeout=30) + test_nr = start_connection_case(case, 'server with client verification - expect to connect normally') + dut1.expect('MQTT_EVENT_CONNECTED: Test={}'.format(test_nr), timeout=30) - case = "CONFIG_EXAMPLE_CONNECT_CASE_INVALID_SERVER_CERT" + case = 'CONFIG_EXAMPLE_CONNECT_CASE_INVALID_SERVER_CERT' with TlsServer(server_port) as s: - test_nr = start_connection_case(case, "invalid server certificate on default server - expect ssl handshake error") - dut1.expect("MQTT_EVENT_ERROR: Test={}".format(test_nr), timeout=30) - dut1.expect("ESP-TLS ERROR: 0x8010") # expect ... handshake error (TLSV1_ALERT_UNKNOWN_CA) - if "alert unknown ca" not in s.get_last_ssl_error(): - raise Exception("Unexpected ssl error from the server {}".format(s.get_last_ssl_error())) + test_nr = start_connection_case(case, 'invalid server certificate on default server - expect ssl handshake error') + dut1.expect('MQTT_EVENT_ERROR: Test={}'.format(test_nr), timeout=30) + dut1.expect('ESP-TLS ERROR: 0x8010') # expect ... handshake error (TLSV1_ALERT_UNKNOWN_CA) + if 'alert unknown ca' not in s.get_last_ssl_error(): + raise Exception('Unexpected ssl error from the server {}'.format(s.get_last_ssl_error())) - case = "CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_BAD_CRT" + case = 'CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_BAD_CRT' with TlsServer(server_port, client_cert=True) as s: - test_nr = start_connection_case(case, "Invalid client certificate on server with client verification - expect ssl handshake error") - dut1.expect("MQTT_EVENT_ERROR: Test={}".format(test_nr), timeout=30) - dut1.expect("ESP-TLS ERROR: 0x8010") # expect ... handshake error (CERTIFICATE_VERIFY_FAILED) - if "CERTIFICATE_VERIFY_FAILED" not in s.get_last_ssl_error(): - raise Exception("Unexpected ssl error from the server {}".format(s.get_last_ssl_error())) + test_nr = start_connection_case(case, 'Invalid client certificate on server with client verification - expect ssl handshake error') + dut1.expect('MQTT_EVENT_ERROR: Test={}'.format(test_nr), timeout=30) + dut1.expect('ESP-TLS ERROR: 0x8010') # expect ... handshake error (CERTIFICATE_VERIFY_FAILED) + if 'CERTIFICATE_VERIFY_FAILED' not in s.get_last_ssl_error(): + raise Exception('Unexpected ssl error from the server {}'.format(s.get_last_ssl_error())) - for case in ["CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT", "CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT_ALPN"]: + for case in ['CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT', 'CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT_ALPN']: with TlsServer(server_port, use_alpn=True) as s: - test_nr = start_connection_case(case, "server with alpn - expect connect, check resolved protocol") - dut1.expect("MQTT_EVENT_CONNECTED: Test={}".format(test_nr), timeout=30) - if case == "CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT" and s.get_negotiated_protocol() is None: - print(" - client with alpn off, no negotiated protocol: OK") - elif case == "CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT_ALPN" and s.get_negotiated_protocol() == "mymqtt": - print(" - client with alpn on, negotiated protocol resolved: OK") + test_nr = start_connection_case(case, 'server with alpn - expect connect, check resolved protocol') + dut1.expect('MQTT_EVENT_CONNECTED: Test={}'.format(test_nr), timeout=30) + if case == 'CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT' and s.get_negotiated_protocol() is None: + print(' - client with alpn off, no negotiated protocol: OK') + elif case == 'CONFIG_EXAMPLE_CONNECT_CASE_NO_CERT_ALPN' and s.get_negotiated_protocol() == 'mymqtt': + print(' - client with alpn on, negotiated protocol resolved: OK') else: - raise Exception("Unexpected negotiated protocol {}".format(s.get_negotiated_protocol())) + raise Exception('Unexpected negotiated protocol {}'.format(s.get_negotiated_protocol())) # # start publish tests def start_publish_case(transport, qos, repeat, published, queue): - print("Starting Publish test: transport:{}, qos:{}, nr_of_msgs:{}, msg_size:{}, enqueue:{}" + print('Starting Publish test: transport:{}, qos:{}, nr_of_msgs:{}, msg_size:{}, enqueue:{}' .format(transport, qos, published, repeat * DEFAULT_MSG_SIZE, queue)) with MqttPublisher(dut1, transport, qos, repeat, published, queue, publish_cfg): pass for qos in [0, 1, 2]: - for transport in ["tcp", "ssl", "ws", "wss"]: + for transport in ['tcp', 'ssl', 'ws', 'wss']: for q in [0, 1]: - if publish_cfg["broker_host_" + transport] is None: + if publish_cfg['broker_host_' + transport] is None: print('Skipping transport: {}...'.format(transport)) continue start_publish_case(transport, qos, 0, 5, q) diff --git a/tools/test_apps/protocols/openssl/app_test.py b/tools/test_apps/protocols/openssl/app_test.py index 7ed325b55a..1e71e52d31 100644 --- a/tools/test_apps/protocols/openssl/app_test.py +++ b/tools/test_apps/protocols/openssl/app_test.py @@ -1,13 +1,14 @@ -from __future__ import print_function -from __future__ import unicode_literals -import re -import os -import socket -from threading import Thread, Event -import ttfw_idf -import ssl +from __future__ import print_function, unicode_literals -SERVER_CERTS_DIR = "server_certs/" +import os +import re +import socket +import ssl +from threading import Event, Thread + +import ttfw_idf + +SERVER_CERTS_DIR = 'server_certs/' def _path(f): @@ -45,7 +46,7 @@ class TlsServer: try: self.socket.bind(('', self.port)) except socket.error as e: - print("Bind failed:{}".format(e)) + print('Bind failed:{}'.format(e)) raise self.socket.listen(1) @@ -63,62 +64,62 @@ class TlsServer: def run_server(self): context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) - context.load_verify_locations(cafile=_path(SERVER_CERTS_DIR + "ca.crt")) - context.load_cert_chain(certfile=_path(SERVER_CERTS_DIR + "server.crt"), keyfile=_path(SERVER_CERTS_DIR + "server.key")) + context.load_verify_locations(cafile=_path(SERVER_CERTS_DIR + 'ca.crt')) + context.load_cert_chain(certfile=_path(SERVER_CERTS_DIR + 'server.crt'), keyfile=_path(SERVER_CERTS_DIR + 'server.key')) context.verify_flags = self.negotiated_protocol self.socket = context.wrap_socket(self.socket, server_side=True) try: - print("Listening socket") + print('Listening socket') self.conn, address = self.socket.accept() # accept new connection self.socket.settimeout(20.0) - print(" - connection from: {}".format(address)) + print(' - connection from: {}'.format(address)) except ssl.SSLError as e: self.conn = None self.ssl_error = str(e) - print(" - SSLError: {}".format(str(e))) + print(' - SSLError: {}'.format(str(e))) -@ttfw_idf.idf_custom_test(env_tag="Example_WIFI", group="test-apps") +@ttfw_idf.idf_custom_test(env_tag='Example_WIFI', group='test-apps') def test_app_esp_openssl(env, extra_data): - dut1 = env.get_dut("openssl_connect_test", "tools/test_apps/protocols/openssl", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('openssl_connect_test', 'tools/test_apps/protocols/openssl', dut_class=ttfw_idf.ESP32DUT) # check and log bin size - binary_file = os.path.join(dut1.app.binary_path, "openssl_connect_test.bin") + binary_file = os.path.join(dut1.app.binary_path, 'openssl_connect_test.bin') bin_size = os.path.getsize(binary_file) - ttfw_idf.log_performance("openssl_connect_test_bin_size", "{}KB".format(bin_size // 1024)) + ttfw_idf.log_performance('openssl_connect_test_bin_size', '{}KB'.format(bin_size // 1024)) dut1.start_app() - esp_ip = dut1.expect(re.compile(r" IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)"), timeout=30) - print("Got IP={}".format(esp_ip[0])) + esp_ip = dut1.expect(re.compile(r' IPv4 address: ([0-9]+\.[0-9]+\.[0-9]+\.[0-9]+)'), timeout=30) + print('Got IP={}'.format(esp_ip[0])) ip = get_my_ip() server_port = 2222 def start_case(case, desc, negotiated_protocol, result): with TlsServer(server_port, negotiated_protocol=negotiated_protocol): - print("Starting {}: {}".format(case, desc)) - dut1.write("conn {} {} {}".format(ip, server_port, case)) + print('Starting {}: {}'.format(case, desc)) + dut1.write('conn {} {} {}'.format(ip, server_port, case)) dut1.expect(re.compile(result), timeout=10) return case # start test cases start_case( - case="CONFIG_TLSV1_1_CONNECT_WRONG_CERT_VERIFY_NONE", - desc="Connect with verify_none mode using wrong certs", + case='CONFIG_TLSV1_1_CONNECT_WRONG_CERT_VERIFY_NONE', + desc='Connect with verify_none mode using wrong certs', negotiated_protocol=ssl.PROTOCOL_TLSv1_1, - result="SSL Connection Succeed") + result='SSL Connection Succeed') start_case( - case="CONFIG_TLSV1_1_CONNECT_WRONG_CERT_VERIFY_PEER", - desc="Connect with verify_peer mode using wrong certs", + case='CONFIG_TLSV1_1_CONNECT_WRONG_CERT_VERIFY_PEER', + desc='Connect with verify_peer mode using wrong certs', negotiated_protocol=ssl.PROTOCOL_TLSv1_1, - result="SSL Connection Failed") + result='SSL Connection Failed') start_case( - case="CONFIG_TLSV1_2_CONNECT_WRONG_CERT_VERIFY_NONE", - desc="Connect with verify_none mode using wrong certs", + case='CONFIG_TLSV1_2_CONNECT_WRONG_CERT_VERIFY_NONE', + desc='Connect with verify_none mode using wrong certs', negotiated_protocol=ssl.PROTOCOL_TLSv1_2, - result="SSL Connection Succeed") + result='SSL Connection Succeed') start_case( - case="CONFIG_TLSV1_1_CONNECT_WRONG_CERT_VERIFY_PEER", - desc="Connect with verify_peer mode using wrong certs", + case='CONFIG_TLSV1_1_CONNECT_WRONG_CERT_VERIFY_PEER', + desc='Connect with verify_peer mode using wrong certs', negotiated_protocol=ssl.PROTOCOL_TLSv1_2, - result="SSL Connection Failed") + result='SSL Connection Failed') if __name__ == '__main__': diff --git a/tools/test_apps/protocols/pppos/app_test.py b/tools/test_apps/protocols/pppos/app_test.py index acf23683c1..cc9d2e0971 100644 --- a/tools/test_apps/protocols/pppos/app_test.py +++ b/tools/test_apps/protocols/pppos/app_test.py @@ -1,16 +1,17 @@ -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import print_function, unicode_literals + import re import socket import subprocess -import ttfw_idf import time +from threading import Event, Thread + import netifaces -from threading import Thread, Event +import ttfw_idf def run_server(server_stop, port, server_ip, client_ip): - print("Starting PPP server on port: {}".format(port)) + print('Starting PPP server on port: {}'.format(port)) try: arg_list = ['pppd', port, '115200', '{}:{}'.format(server_ip, client_ip), 'modem', 'local', 'noauth', 'debug', 'nocrtscts', 'nodetach', '+ipv6'] p = subprocess.Popen(arg_list, stdout=subprocess.PIPE, bufsize=1) @@ -19,17 +20,17 @@ def run_server(server_stop, port, server_ip, client_ip): raise ValueError('ENV_TEST_FAILURE: PPP terminated unexpectedly with {}'.format(p.poll())) line = p.stdout.readline() if line: - print("[PPPD:]{}".format(line.rstrip())) + print('[PPPD:]{}'.format(line.rstrip())) time.sleep(0.1) except Exception as e: print(e) raise ValueError('ENV_TEST_FAILURE: Error running PPP server') finally: p.terminate() - print("PPP server stopped") + print('PPP server stopped') -@ttfw_idf.idf_custom_test(env_tag="Example_PPP", group="test-apps") +@ttfw_idf.idf_custom_test(env_tag='Example_PPP', group='test-apps') def test_examples_protocol_pppos_connect(env, extra_data): """ steps: @@ -38,17 +39,17 @@ def test_examples_protocol_pppos_connect(env, extra_data): 3. check TCP client-server connection between client-server """ - dut1 = env.get_dut("pppos_connect_test", "tools/test_apps/protocols/pppos", dut_class=ttfw_idf.ESP32DUT) + dut1 = env.get_dut('pppos_connect_test', 'tools/test_apps/protocols/pppos', dut_class=ttfw_idf.ESP32DUT) # Look for test case symbolic names try: - server_ip = dut1.app.get_sdkconfig()["CONFIG_TEST_APP_PPP_SERVER_IP"].replace('"','') - client_ip = dut1.app.get_sdkconfig()["CONFIG_TEST_APP_PPP_CLIENT_IP"].replace('"','') - port_nr = dut1.app.get_sdkconfig()["CONFIG_TEST_APP_TCP_PORT"] + server_ip = dut1.app.get_sdkconfig()['CONFIG_TEST_APP_PPP_SERVER_IP'].replace('"','') + client_ip = dut1.app.get_sdkconfig()['CONFIG_TEST_APP_PPP_CLIENT_IP'].replace('"','') + port_nr = dut1.app.get_sdkconfig()['CONFIG_TEST_APP_TCP_PORT'] except Exception: print('ENV_TEST_FAILURE: Some mandatory configuration not found in sdkconfig') raise - print("Starting the test on {}".format(dut1)) + print('Starting the test on {}'.format(dut1)) dut1.start_app() # the PPP test env uses two ttyUSB's: one for ESP32 board, another one for ppp server @@ -60,29 +61,29 @@ def test_examples_protocol_pppos_connect(env, extra_data): t.start() try: ppp_server_timeout = time.time() + 30 - while "ppp0" not in netifaces.interfaces(): + while 'ppp0' not in netifaces.interfaces(): print("PPP server haven't yet setup its netif, list of active netifs:{}".format(netifaces.interfaces())) time.sleep(0.5) if time.time() > ppp_server_timeout: - raise ValueError("ENV_TEST_FAILURE: PPP server failed to setup ppp0 interface within timeout") - ip6_addr = dut1.expect(re.compile(r"Got IPv6 address (\w{4}\:\w{4}\:\w{4}\:\w{4}\:\w{4}\:\w{4}\:\w{4}\:\w{4})"), timeout=30)[0] - print("IPv6 address of ESP: {}".format(ip6_addr)) + raise ValueError('ENV_TEST_FAILURE: PPP server failed to setup ppp0 interface within timeout') + ip6_addr = dut1.expect(re.compile(r'Got IPv6 address (\w{4}\:\w{4}\:\w{4}\:\w{4}\:\w{4}\:\w{4}\:\w{4}\:\w{4})'), timeout=30)[0] + print('IPv6 address of ESP: {}'.format(ip6_addr)) - dut1.expect(re.compile(r"Socket listening")) - print("Starting the IPv6 test...") + dut1.expect(re.compile(r'Socket listening')) + print('Starting the IPv6 test...') # Connect to TCP server on ESP using IPv6 address - for res in socket.getaddrinfo(ip6_addr + "%ppp0", int(port_nr), socket.AF_INET6, + for res in socket.getaddrinfo(ip6_addr + '%ppp0', int(port_nr), socket.AF_INET6, socket.SOCK_STREAM, socket.SOL_TCP): af, socktype, proto, canonname, addr = res sock = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) sock.connect(addr) - sock.sendall(b"Espressif") + sock.sendall(b'Espressif') sock.close() - dut1.expect(re.compile(r"IPv6 test passed")) - print("IPv6 test passed!") + dut1.expect(re.compile(r'IPv6 test passed')) + print('IPv6 test passed!') - print("Starting the IPv4 test...") + print('Starting the IPv4 test...') # Start the TCP server and wait for the ESP to connect with IPv4 address try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) @@ -103,8 +104,8 @@ def test_examples_protocol_pppos_connect(env, extra_data): conn.send(data.encode()) break conn.close() - dut1.expect(re.compile(r"IPv4 test passed")) - print("IPv4 test passed!") + dut1.expect(re.compile(r'IPv4 test passed')) + print('IPv4 test passed!') finally: server_stop.set() t.join() diff --git a/tools/test_apps/system/gdb_loadable_elf/app_test.py b/tools/test_apps/system/gdb_loadable_elf/app_test.py index 84d7eca538..bca4822e45 100644 --- a/tools/test_apps/system/gdb_loadable_elf/app_test.py +++ b/tools/test_apps/system/gdb_loadable_elf/app_test.py @@ -1,12 +1,14 @@ from __future__ import unicode_literals -from tiny_test_fw import Utility -import debug_backend + import os -import pexpect -import serial import threading import time + +import debug_backend +import pexpect +import serial import ttfw_idf +from tiny_test_fw import Utility class SerialThread(object): @@ -33,12 +35,12 @@ class SerialThread(object): Utility.console_log('The pyserial thread is still alive', 'O') -@ttfw_idf.idf_custom_test(env_tag="test_jtag_arm", group="test-apps") +@ttfw_idf.idf_custom_test(env_tag='test_jtag_arm', group='test-apps') def test_app_loadable_elf(env, extra_data): rel_project_path = os.path.join('tools', 'test_apps', 'system', 'gdb_loadable_elf') app_files = ['gdb_loadable_elf.elf'] - app = ttfw_idf.LoadableElfTestApp(rel_project_path, app_files, target="esp32") + app = ttfw_idf.LoadableElfTestApp(rel_project_path, app_files, target='esp32') idf_path = app.get_sdk_path() proj_path = os.path.join(idf_path, rel_project_path) elf_path = os.path.join(app.binary_path, 'gdb_loadable_elf.elf') diff --git a/tools/test_apps/system/memprot/app_test.py b/tools/test_apps/system/memprot/app_test.py index fbc3ec2269..42e731db9f 100644 --- a/tools/test_apps/system/memprot/app_test.py +++ b/tools/test_apps/system/memprot/app_test.py @@ -3,7 +3,6 @@ import ttfw_idf from tiny_test_fw import Utility - mem_test = [ ['IRAM0_SRAM', 'WRX'], ['IRAM0_RTCFAST', 'WRX'], @@ -15,24 +14,24 @@ mem_test = [ ] -@ttfw_idf.idf_custom_test(env_tag="Example_GENERIC", target="esp32s2", group="test-apps") +@ttfw_idf.idf_custom_test(env_tag='Example_GENERIC', target='esp32s2', group='test-apps') def test_memprot(env, extra_data): - dut = env.get_dut("memprot", "tools/test_apps/system/memprot") + dut = env.get_dut('memprot', 'tools/test_apps/system/memprot') dut.start_app() for i in mem_test: if 'R' in i[1]: - dut.expect(i[0] + " read low: OK") - dut.expect(i[0] + " read high: OK") + dut.expect(i[0] + ' read low: OK') + dut.expect(i[0] + ' read high: OK') if 'W' in i[1]: - dut.expect(i[0] + " write low: OK") - dut.expect(i[0] + " write high: OK") + dut.expect(i[0] + ' write low: OK') + dut.expect(i[0] + ' write high: OK') if 'X' in i[1]: - dut.expect(i[0] + " exec low: OK") - dut.expect(i[0] + " exec high: OK") + dut.expect(i[0] + ' exec low: OK') + dut.expect(i[0] + ' exec high: OK') - Utility.console_log("Memprot test done") + Utility.console_log('Memprot test done') if __name__ == '__main__': diff --git a/tools/test_apps/system/monitor_ide_integration/app_test.py b/tools/test_apps/system/monitor_ide_integration/app_test.py index e978e3fca0..4aceecd041 100644 --- a/tools/test_apps/system/monitor_ide_integration/app_test.py +++ b/tools/test_apps/system/monitor_ide_integration/app_test.py @@ -1,12 +1,14 @@ from __future__ import unicode_literals -from SimpleWebSocketServer import SimpleWebSocketServer, WebSocket -from tiny_test_fw import Utility + import glob import json import os import re import threading + import ttfw_idf +from SimpleWebSocketServer import SimpleWebSocketServer, WebSocket +from tiny_test_fw import Utility class IDEWSProtocol(WebSocket): diff --git a/tools/test_apps/system/panic/app_test.py b/tools/test_apps/system/panic/app_test.py index bb0869f738..971ed2a735 100644 --- a/tools/test_apps/system/panic/app_test.py +++ b/tools/test_apps/system/panic/app_test.py @@ -1,295 +1,295 @@ #!/usr/bin/env python import sys + import panic_tests as test from test_panic_util.test_panic_util import panic_test, run_all # test_task_wdt - @panic_test(target=['ESP32', 'ESP32S2']) def test_panic_task_wdt(env, _extra_data): - test.task_wdt_inner(env, "panic") + test.task_wdt_inner(env, 'panic') @panic_test() def test_coredump_task_wdt_uart_elf_crc(env, _extra_data): - test.task_wdt_inner(env, "coredump_uart_elf_crc") + test.task_wdt_inner(env, 'coredump_uart_elf_crc') @panic_test() def test_coredump_task_wdt_uart_bin_crc(env, _extra_data): - test.task_wdt_inner(env, "coredump_uart_bin_crc") + test.task_wdt_inner(env, 'coredump_uart_bin_crc') @panic_test() def test_coredump_task_wdt_flash_elf_sha(env, _extra_data): - test.task_wdt_inner(env, "coredump_flash_elf_sha") + test.task_wdt_inner(env, 'coredump_flash_elf_sha') @panic_test() def test_coredump_task_wdt_flash_bin_crc(env, _extra_data): - test.task_wdt_inner(env, "coredump_flash_bin_crc") + test.task_wdt_inner(env, 'coredump_flash_bin_crc') @panic_test() def test_gdbstub_task_wdt(env, _extra_data): - test.task_wdt_inner(env, "gdbstub") + test.task_wdt_inner(env, 'gdbstub') # test_int_wdt @panic_test() def test_panic_int_wdt(env, _extra_data): - test.int_wdt_inner(env, "panic") + test.int_wdt_inner(env, 'panic') @panic_test() def test_coredump_int_wdt_uart_elf_crc(env, _extra_data): - test.int_wdt_inner(env, "coredump_uart_elf_crc") + test.int_wdt_inner(env, 'coredump_uart_elf_crc') @panic_test() def test_coredump_int_wdt_uart_bin_crc(env, _extra_data): - test.int_wdt_inner(env, "coredump_uart_bin_crc") + test.int_wdt_inner(env, 'coredump_uart_bin_crc') @panic_test() def test_coredump_int_wdt_flash_elf_sha(env, _extra_data): - test.int_wdt_inner(env, "coredump_flash_elf_sha") + test.int_wdt_inner(env, 'coredump_flash_elf_sha') @panic_test() def test_coredump_int_wdt_flash_bin_crc(env, _extra_data): - test.int_wdt_inner(env, "coredump_flash_bin_crc") + test.int_wdt_inner(env, 'coredump_flash_bin_crc') @panic_test() def test_gdbstub_int_wdt(env, _extra_data): - test.int_wdt_inner(env, "gdbstub") + test.int_wdt_inner(env, 'gdbstub') # test_int_wdt_cache_disabled @panic_test() def test_panic_int_wdt_cache_disabled(env, _extra_data): - test.int_wdt_cache_disabled_inner(env, "panic") + test.int_wdt_cache_disabled_inner(env, 'panic') @panic_test() def test_coredump_int_wdt_cache_disabled_uart_elf_crc(env, _extra_data): - test.int_wdt_cache_disabled_inner(env, "coredump_uart_elf_crc") + test.int_wdt_cache_disabled_inner(env, 'coredump_uart_elf_crc') @panic_test() def test_coredump_int_wdt_cache_disabled_uart_bin_crc(env, _extra_data): - test.int_wdt_cache_disabled_inner(env, "coredump_uart_bin_crc") + test.int_wdt_cache_disabled_inner(env, 'coredump_uart_bin_crc') @panic_test() def test_coredump_int_wdt_cache_disabled_flash_elf_sha(env, _extra_data): - test.int_wdt_cache_disabled_inner(env, "coredump_flash_elf_sha") + test.int_wdt_cache_disabled_inner(env, 'coredump_flash_elf_sha') @panic_test() def test_coredump_int_wdt_cache_disabled_flash_bin_crc(env, _extra_data): - test.int_wdt_cache_disabled_inner(env, "coredump_flash_bin_crc") + test.int_wdt_cache_disabled_inner(env, 'coredump_flash_bin_crc') @panic_test() def test_gdbstub_int_wdt_cache_disabled(env, _extra_data): - test.int_wdt_cache_disabled_inner(env, "gdbstub") + test.int_wdt_cache_disabled_inner(env, 'gdbstub') # test_cache_error @panic_test() def test_panic_cache_error(env, _extra_data): - test.cache_error_inner(env, "panic") + test.cache_error_inner(env, 'panic') @panic_test() def test_coredump_cache_error_uart_elf_crc(env, _extra_data): - test.cache_error_inner(env, "coredump_uart_elf_crc") + test.cache_error_inner(env, 'coredump_uart_elf_crc') @panic_test() def test_coredump_cache_error_uart_bin_crc(env, _extra_data): - test.cache_error_inner(env, "coredump_uart_bin_crc") + test.cache_error_inner(env, 'coredump_uart_bin_crc') @panic_test() def test_coredump_cache_error_flash_elf_sha(env, _extra_data): - test.cache_error_inner(env, "coredump_flash_elf_sha") + test.cache_error_inner(env, 'coredump_flash_elf_sha') @panic_test() def test_coredump_cache_error_flash_bin_crc(env, _extra_data): - test.cache_error_inner(env, "coredump_flash_bin_crc") + test.cache_error_inner(env, 'coredump_flash_bin_crc') @panic_test() def test_gdbstub_cache_error(env, _extra_data): - test.cache_error_inner(env, "gdbstub") + test.cache_error_inner(env, 'gdbstub') # test_stack_overflow @panic_test(target=['ESP32', 'ESP32S2']) def test_panic_stack_overflow(env, _extra_data): - test.stack_overflow_inner(env, "panic") + test.stack_overflow_inner(env, 'panic') @panic_test() def test_coredump_stack_overflow_uart_elf_crc(env, _extra_data): - test.stack_overflow_inner(env, "coredump_uart_elf_crc") + test.stack_overflow_inner(env, 'coredump_uart_elf_crc') @panic_test() def test_coredump_stack_overflow_uart_bin_crc(env, _extra_data): - test.stack_overflow_inner(env, "coredump_uart_bin_crc") + test.stack_overflow_inner(env, 'coredump_uart_bin_crc') @panic_test() def test_coredump_stack_overflow_flash_elf_sha(env, _extra_data): - test.stack_overflow_inner(env, "coredump_flash_elf_sha") + test.stack_overflow_inner(env, 'coredump_flash_elf_sha') @panic_test() def test_coredump_stack_overflow_flash_bin_crc(env, _extra_data): - test.stack_overflow_inner(env, "coredump_flash_bin_crc") + test.stack_overflow_inner(env, 'coredump_flash_bin_crc') @panic_test() def test_gdbstub_stack_overflow(env, _extra_data): - test.stack_overflow_inner(env, "gdbstub") + test.stack_overflow_inner(env, 'gdbstub') # test_instr_fetch_prohibited @panic_test(target=['ESP32', 'ESP32S2']) def test_panic_instr_fetch_prohibited(env, _extra_data): - test.instr_fetch_prohibited_inner(env, "panic") + test.instr_fetch_prohibited_inner(env, 'panic') @panic_test() def test_coredump_instr_fetch_prohibited_uart_elf_crc(env, _extra_data): - test.instr_fetch_prohibited_inner(env, "coredump_uart_elf_crc") + test.instr_fetch_prohibited_inner(env, 'coredump_uart_elf_crc') @panic_test() def test_coredump_instr_fetch_prohibited_uart_bin_crc(env, _extra_data): - test.instr_fetch_prohibited_inner(env, "coredump_uart_bin_crc") + test.instr_fetch_prohibited_inner(env, 'coredump_uart_bin_crc') @panic_test() def test_coredump_instr_fetch_prohibited_flash_elf_sha(env, _extra_data): - test.instr_fetch_prohibited_inner(env, "coredump_flash_elf_sha") + test.instr_fetch_prohibited_inner(env, 'coredump_flash_elf_sha') @panic_test() def test_coredump_instr_fetch_prohibited_flash_bin_crc(env, _extra_data): - test.instr_fetch_prohibited_inner(env, "coredump_flash_bin_crc") + test.instr_fetch_prohibited_inner(env, 'coredump_flash_bin_crc') @panic_test() def test_gdbstub_instr_fetch_prohibited(env, _extra_data): - test.instr_fetch_prohibited_inner(env, "gdbstub") + test.instr_fetch_prohibited_inner(env, 'gdbstub') # test_illegal_instruction @panic_test(target=['ESP32', 'ESP32S2']) def test_panic_illegal_instruction(env, _extra_data): - test.illegal_instruction_inner(env, "panic") + test.illegal_instruction_inner(env, 'panic') @panic_test() def test_coredump_illegal_instruction_uart_elf_crc(env, _extra_data): - test.illegal_instruction_inner(env, "coredump_uart_elf_crc") + test.illegal_instruction_inner(env, 'coredump_uart_elf_crc') @panic_test() def test_coredump_illegal_instruction_uart_bin_crc(env, _extra_data): - test.illegal_instruction_inner(env, "coredump_uart_bin_crc") + test.illegal_instruction_inner(env, 'coredump_uart_bin_crc') @panic_test() def test_coredump_illegal_instruction_flash_elf_sha(env, _extra_data): - test.illegal_instruction_inner(env, "coredump_flash_elf_sha") + test.illegal_instruction_inner(env, 'coredump_flash_elf_sha') @panic_test() def test_coredump_illegal_instruction_flash_bin_crc(env, _extra_data): - test.illegal_instruction_inner(env, "coredump_flash_bin_crc") + test.illegal_instruction_inner(env, 'coredump_flash_bin_crc') @panic_test() def test_gdbstub_illegal_instruction(env, _extra_data): - test.illegal_instruction_inner(env, "gdbstub") + test.illegal_instruction_inner(env, 'gdbstub') # test_storeprohibited @panic_test(target=['ESP32', 'ESP32S2']) def test_panic_storeprohibited(env, _extra_data): - test.storeprohibited_inner(env, "panic") + test.storeprohibited_inner(env, 'panic') @panic_test() def test_coredump_storeprohibited_uart_elf_crc(env, _extra_data): - test.storeprohibited_inner(env, "coredump_uart_elf_crc") + test.storeprohibited_inner(env, 'coredump_uart_elf_crc') @panic_test() def test_coredump_storeprohibited_uart_bin_crc(env, _extra_data): - test.storeprohibited_inner(env, "coredump_uart_bin_crc") + test.storeprohibited_inner(env, 'coredump_uart_bin_crc') @panic_test() def test_coredump_storeprohibited_flash_elf_sha(env, _extra_data): - test.storeprohibited_inner(env, "coredump_flash_elf_sha") + test.storeprohibited_inner(env, 'coredump_flash_elf_sha') @panic_test() def test_coredump_storeprohibited_flash_bin_crc(env, _extra_data): - test.storeprohibited_inner(env, "coredump_flash_bin_crc") + test.storeprohibited_inner(env, 'coredump_flash_bin_crc') @panic_test() def test_gdbstub_storeprohibited(env, _extra_data): - test.storeprohibited_inner(env, "gdbstub") + test.storeprohibited_inner(env, 'gdbstub') # test_abort @panic_test(target=['ESP32', 'ESP32S2']) def test_panic_abort(env, _extra_data): - test.abort_inner(env, "panic") + test.abort_inner(env, 'panic') @panic_test() def test_coredump_abort_uart_elf_crc(env, _extra_data): - test.abort_inner(env, "coredump_uart_elf_crc") + test.abort_inner(env, 'coredump_uart_elf_crc') @panic_test() def test_coredump_abort_uart_bin_crc(env, _extra_data): - test.abort_inner(env, "coredump_uart_bin_crc") + test.abort_inner(env, 'coredump_uart_bin_crc') @panic_test() def test_coredump_abort_flash_elf_sha(env, _extra_data): - test.abort_inner(env, "coredump_flash_elf_sha") + test.abort_inner(env, 'coredump_flash_elf_sha') @panic_test() def test_coredump_abort_flash_bin_crc(env, _extra_data): - test.abort_inner(env, "coredump_flash_bin_crc") + test.abort_inner(env, 'coredump_flash_bin_crc') @panic_test() def test_gdbstub_abort(env, _extra_data): - test.abort_inner(env, "gdbstub") + test.abort_inner(env, 'gdbstub') if __name__ == '__main__': diff --git a/tools/test_apps/system/panic/panic_tests.py b/tools/test_apps/system/panic/panic_tests.py index efb629391f..01478fd553 100644 --- a/tools/test_apps/system/panic/panic_tests.py +++ b/tools/test_apps/system/panic/panic_tests.py @@ -1,15 +1,16 @@ #!/usr/bin/env python -from pprint import pformat import re +from pprint import pformat + from test_panic_util.test_panic_util import get_dut def get_default_backtrace(test_name): return [ test_name, - "app_main", - "main_task", - "vPortTaskWrapper" + 'app_main', + 'main_task', + 'vPortTaskWrapper' ] @@ -17,133 +18,133 @@ def test_common(dut, test_name, expected_backtrace=None): if expected_backtrace is None: expected_backtrace = get_default_backtrace(dut.test_name) - if "gdbstub" in test_name: + if 'gdbstub' in test_name: dut.start_gdb() frames = dut.gdb_backtrace() if not dut.match_backtrace(frames, expected_backtrace): - raise AssertionError("Unexpected backtrace in test {}:\n{}".format(test_name, pformat(frames))) + raise AssertionError('Unexpected backtrace in test {}:\n{}'.format(test_name, pformat(frames))) return - if "uart" in test_name: + if 'uart' in test_name: dut.expect(dut.COREDUMP_UART_END) - dut.expect("Rebooting...") + dut.expect('Rebooting...') - if "uart" in test_name: + if 'uart' in test_name: dut.process_coredump_uart() # TODO: check backtrace - elif "flash" in test_name: + elif 'flash' in test_name: dut.process_coredump_flash() # TODO: check backtrace - elif "panic" in test_name: + elif 'panic' in test_name: # TODO: check backtrace pass def task_wdt_inner(env, test_name): - with get_dut(env, test_name, "test_task_wdt", qemu_wdt_enable=True) as dut: - dut.expect("Task watchdog got triggered. The following tasks did not reset the watchdog in time:") - dut.expect("CPU 0: main") - dut.expect(re.compile(r"abort\(\) was called at PC [0-9xa-f]+ on core 0")) - dut.expect_none("register dump:") + with get_dut(env, test_name, 'test_task_wdt', qemu_wdt_enable=True) as dut: + dut.expect('Task watchdog got triggered. The following tasks did not reset the watchdog in time:') + dut.expect('CPU 0: main') + dut.expect(re.compile(r'abort\(\) was called at PC [0-9xa-f]+ on core 0')) + dut.expect_none('register dump:') dut.expect_backtrace() dut.expect_elf_sha256() - dut.expect_none("Guru Meditation") + dut.expect_none('Guru Meditation') test_common(dut, test_name, expected_backtrace=[ # Backtrace interrupted when abort is called, IDF-842. # Task WDT calls abort internally. - "panic_abort", "esp_system_abort" + 'panic_abort', 'esp_system_abort' ]) def int_wdt_inner(env, test_name): - with get_dut(env, test_name, "test_int_wdt", qemu_wdt_enable=True) as dut: - dut.expect_gme("Interrupt wdt timeout on CPU0") + with get_dut(env, test_name, 'test_int_wdt', qemu_wdt_enable=True) as dut: + dut.expect_gme('Interrupt wdt timeout on CPU0') dut.expect_reg_dump(0) dut.expect_backtrace() - dut.expect_none("Guru Meditation") + dut.expect_none('Guru Meditation') dut.expect_reg_dump(1) dut.expect_backtrace() dut.expect_elf_sha256() - dut.expect_none("Guru Meditation") + dut.expect_none('Guru Meditation') test_common(dut, test_name) def int_wdt_cache_disabled_inner(env, test_name): - with get_dut(env, test_name, "test_int_wdt_cache_disabled", qemu_wdt_enable=True) as dut: - dut.expect("Re-enable cpu cache.") - dut.expect_gme("Interrupt wdt timeout on CPU0") + with get_dut(env, test_name, 'test_int_wdt_cache_disabled', qemu_wdt_enable=True) as dut: + dut.expect('Re-enable cpu cache.') + dut.expect_gme('Interrupt wdt timeout on CPU0') dut.expect_reg_dump(0) - dut.expect("Backtrace:") - dut.expect_none("Guru Meditation") + dut.expect('Backtrace:') + dut.expect_none('Guru Meditation') dut.expect_reg_dump(1) dut.expect_backtrace() dut.expect_elf_sha256() - dut.expect_none("Guru Meditation") + dut.expect_none('Guru Meditation') test_common(dut, test_name) def cache_error_inner(env, test_name): - with get_dut(env, test_name, "test_cache_error") as dut: - dut.expect("Re-enable cpu cache.") - dut.expect_gme("Cache disabled but cached memory region accessed") + with get_dut(env, test_name, 'test_cache_error') as dut: + dut.expect('Re-enable cpu cache.') + dut.expect_gme('Cache disabled but cached memory region accessed') dut.expect_reg_dump(0) dut.expect_backtrace() dut.expect_elf_sha256() - dut.expect_none("Guru Meditation") + dut.expect_none('Guru Meditation') test_common(dut, test_name, - expected_backtrace=["die"] + get_default_backtrace(dut.test_name)) + expected_backtrace=['die'] + get_default_backtrace(dut.test_name)) def abort_inner(env, test_name): - with get_dut(env, test_name, "test_abort") as dut: - dut.expect(re.compile(r"abort\(\) was called at PC [0-9xa-f]+ on core 0")) + with get_dut(env, test_name, 'test_abort') as dut: + dut.expect(re.compile(r'abort\(\) was called at PC [0-9xa-f]+ on core 0')) dut.expect_backtrace() dut.expect_elf_sha256() - dut.expect_none("Guru Meditation", "Re-entered core dump") + dut.expect_none('Guru Meditation', 'Re-entered core dump') test_common(dut, test_name, expected_backtrace=[ # Backtrace interrupted when abort is called, IDF-842 - "panic_abort", "esp_system_abort" + 'panic_abort', 'esp_system_abort' ]) def storeprohibited_inner(env, test_name): - with get_dut(env, test_name, "test_storeprohibited") as dut: - dut.expect_gme("StoreProhibited") + with get_dut(env, test_name, 'test_storeprohibited') as dut: + dut.expect_gme('StoreProhibited') dut.expect_reg_dump(0) dut.expect_backtrace() dut.expect_elf_sha256() - dut.expect_none("Guru Meditation") + dut.expect_none('Guru Meditation') test_common(dut, test_name) def stack_overflow_inner(env, test_name): - with get_dut(env, test_name, "test_stack_overflow") as dut: - dut.expect_gme("Unhandled debug exception") - dut.expect("Stack canary watchpoint triggered (main)") + with get_dut(env, test_name, 'test_stack_overflow') as dut: + dut.expect_gme('Unhandled debug exception') + dut.expect('Stack canary watchpoint triggered (main)') dut.expect_reg_dump(0) dut.expect_backtrace() dut.expect_elf_sha256() - dut.expect_none("Guru Meditation") + dut.expect_none('Guru Meditation') test_common(dut, test_name) def illegal_instruction_inner(env, test_name): - with get_dut(env, test_name, "test_illegal_instruction") as dut: - dut.expect_gme("IllegalInstruction") + with get_dut(env, test_name, 'test_illegal_instruction') as dut: + dut.expect_gme('IllegalInstruction') dut.expect_reg_dump(0) dut.expect_backtrace() dut.expect_elf_sha256() - dut.expect_none("Guru Meditation") + dut.expect_none('Guru Meditation') test_common(dut, test_name) def instr_fetch_prohibited_inner(env, test_name): - with get_dut(env, test_name, "test_instr_fetch_prohibited") as dut: - dut.expect_gme("InstrFetchProhibited") + with get_dut(env, test_name, 'test_instr_fetch_prohibited') as dut: + dut.expect_gme('InstrFetchProhibited') dut.expect_reg_dump(0) dut.expect_backtrace() dut.expect_elf_sha256() - dut.expect_none("Guru Meditation") + dut.expect_none('Guru Meditation') test_common(dut, test_name, - expected_backtrace=["_init"] + get_default_backtrace(dut.test_name)) + expected_backtrace=['_init'] + get_default_backtrace(dut.test_name)) diff --git a/tools/test_apps/system/panic/test_panic_util/test_panic_util.py b/tools/test_apps/system/panic/test_panic_util/test_panic_util.py index 3cc98a02f7..99112c3ebb 100644 --- a/tools/test_apps/system/panic/test_panic_util/test_panic_util.py +++ b/tools/test_apps/system/panic/test_panic_util/test_panic_util.py @@ -1,17 +1,17 @@ import logging import os -from pygdbmi.gdbcontroller import GdbController import re import subprocess import sys -import ttfw_idf -from tiny_test_fw import Utility, TinyFW, DUT -from tiny_test_fw.Utility import SearchCases, CaseConfig +import ttfw_idf +from pygdbmi.gdbcontroller import GdbController +from tiny_test_fw import DUT, TinyFW, Utility +from tiny_test_fw.Utility import CaseConfig, SearchCases # hard-coded to the path one level above - only intended to be used from the panic test app -TEST_PATH = os.path.relpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), ".."), os.getenv("IDF_PATH")) -TEST_SUITE = "Panic" +TEST_PATH = os.path.relpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..'), os.getenv('IDF_PATH')) +TEST_SUITE = 'Panic' def ok(data): @@ -21,7 +21,7 @@ def ok(data): def unexpected(data): """ Helper function used with dut.expect_any """ - raise AssertionError("Unexpected: {}".format(data)) + raise AssertionError('Unexpected: {}'.format(data)) class PanicTestApp(ttfw_idf.TestApp): @@ -33,8 +33,8 @@ class PanicTestMixin(object): BOOT_CMD_ADDR = 0x9000 BOOT_CMD_SIZE = 0x1000 DEFAULT_EXPECT_TIMEOUT = 10 - COREDUMP_UART_START = "================= CORE DUMP START =================" - COREDUMP_UART_END = "================= CORE DUMP END =================" + COREDUMP_UART_START = '================= CORE DUMP START =================' + COREDUMP_UART_END = '================= CORE DUMP END =================' def start_test(self, test_name): """ Starts the app and sends it the test name """ @@ -42,24 +42,24 @@ class PanicTestMixin(object): # Start the app and verify that it has started up correctly self.start_capture_raw_data() self.start_app() - self.expect("Enter test name: ") - Utility.console_log("Setting boot command: " + test_name) + self.expect('Enter test name: ') + Utility.console_log('Setting boot command: ' + test_name) self.write(test_name) - self.expect("Got test name: " + test_name) + self.expect('Got test name: ' + test_name) def expect_none(self, *patterns, **timeout_args): """ like dut.expect_all, but with an inverse logic """ found_data = [] - if "timeout" not in timeout_args: - timeout_args["timeout"] = 1 + if 'timeout' not in timeout_args: + timeout_args['timeout'] = 1 def found(data): - raise AssertionError("Unexpected: {}".format(data)) + raise AssertionError('Unexpected: {}'.format(data)) found_data.append(data) try: expect_items = [(pattern, found) for pattern in patterns] self.expect_any(*expect_items, **timeout_args) - raise AssertionError("Unexpected: {}".format(found_data)) + raise AssertionError('Unexpected: {}'.format(found_data)) except DUT.ExpectTimeout: return True @@ -69,18 +69,18 @@ class PanicTestMixin(object): def expect_reg_dump(self, core=0): """ Expect method for the register dump """ - self.expect(re.compile(r"Core\s+%d register dump:" % core)) + self.expect(re.compile(r'Core\s+%d register dump:' % core)) def expect_elf_sha256(self): """ Expect method for ELF SHA256 line """ elf_sha256 = self.app.get_elf_sha256() sdkconfig = self.app.get_sdkconfig() - elf_sha256_len = int(sdkconfig.get("CONFIG_APP_RETRIEVE_LEN_ELF_SHA", "16")) - self.expect("ELF file SHA256: " + elf_sha256[0:elf_sha256_len]) + elf_sha256_len = int(sdkconfig.get('CONFIG_APP_RETRIEVE_LEN_ELF_SHA', '16')) + self.expect('ELF file SHA256: ' + elf_sha256[0:elf_sha256_len]) def expect_backtrace(self): - self.expect("Backtrace:") - self.expect_none("CORRUPTED") + self.expect('Backtrace:') + self.expect_none('CORRUPTED') def __enter__(self): self._raw_data = None @@ -89,8 +89,8 @@ class PanicTestMixin(object): def __exit__(self, type, value, traceback): log_folder = self.app.get_log_folder(TEST_SUITE) - with open(os.path.join(log_folder, "log_" + self.test_name + ".txt"), "w") as log_file: - Utility.console_log("Writing output of {} to {}".format(self.test_name, log_file.name)) + with open(os.path.join(log_folder, 'log_' + self.test_name + '.txt'), 'w') as log_file: + Utility.console_log('Writing output of {} to {}'.format(self.test_name, log_file.name)) log_file.write(self.get_raw_data()) if self.gdb: self.gdb.exit() @@ -103,18 +103,18 @@ class PanicTestMixin(object): def _call_espcoredump(self, extra_args, coredump_file_name, output_file_name): # no "with" here, since we need the file to be open for later inspection by the test case - self.coredump_output = open(output_file_name, "w") - espcoredump_script = os.path.join(os.environ["IDF_PATH"], "components", "espcoredump", "espcoredump.py") + self.coredump_output = open(output_file_name, 'w') + espcoredump_script = os.path.join(os.environ['IDF_PATH'], 'components', 'espcoredump', 'espcoredump.py') espcoredump_args = [ sys.executable, espcoredump_script, - "info_corefile", - "--core", coredump_file_name, + 'info_corefile', + '--core', coredump_file_name, ] espcoredump_args += extra_args espcoredump_args.append(self.app.elf_file) - Utility.console_log("Running " + " ".join(espcoredump_args)) - Utility.console_log("espcoredump output is written to " + self.coredump_output.name) + Utility.console_log('Running ' + ' '.join(espcoredump_args)) + Utility.console_log('espcoredump output is written to ' + self.coredump_output.name) subprocess.check_call(espcoredump_args, stdout=self.coredump_output) self.coredump_output.flush() @@ -127,22 +127,22 @@ class PanicTestMixin(object): coredump_start = data.find(self.COREDUMP_UART_START) coredump_end = data.find(self.COREDUMP_UART_END) coredump_base64 = data[coredump_start + len(self.COREDUMP_UART_START):coredump_end] - with open(os.path.join(log_folder, "coredump_data_" + self.test_name + ".b64"), "w") as coredump_file: - Utility.console_log("Writing UART base64 core dump to " + coredump_file.name) + with open(os.path.join(log_folder, 'coredump_data_' + self.test_name + '.b64'), 'w') as coredump_file: + Utility.console_log('Writing UART base64 core dump to ' + coredump_file.name) coredump_file.write(coredump_base64) - output_file_name = os.path.join(log_folder, "coredump_uart_result_" + self.test_name + ".txt") - self._call_espcoredump(["--core-format", "b64"], coredump_file.name, output_file_name) + output_file_name = os.path.join(log_folder, 'coredump_uart_result_' + self.test_name + '.txt') + self._call_espcoredump(['--core-format', 'b64'], coredump_file.name, output_file_name) def process_coredump_flash(self): """ Extract the core dump from flash, run espcoredump on it """ log_folder = self.app.get_log_folder(TEST_SUITE) - coredump_file_name = os.path.join(log_folder, "coredump_data_" + self.test_name + ".bin") - Utility.console_log("Writing flash binary core dump to " + coredump_file_name) - self.dump_flash(coredump_file_name, partition="coredump") + coredump_file_name = os.path.join(log_folder, 'coredump_data_' + self.test_name + '.bin') + Utility.console_log('Writing flash binary core dump to ' + coredump_file_name) + self.dump_flash(coredump_file_name, partition='coredump') - output_file_name = os.path.join(log_folder, "coredump_flash_result_" + self.test_name + ".txt") - self._call_espcoredump(["--core-format", "raw"], coredump_file_name, output_file_name) + output_file_name = os.path.join(log_folder, 'coredump_flash_result_' + self.test_name + '.txt') + self._call_espcoredump(['--core-format', 'raw'], coredump_file_name, output_file_name) def start_gdb(self): """ @@ -152,44 +152,44 @@ class PanicTestMixin(object): self.stop_receive() self._port_close() - Utility.console_log("Starting GDB...", "orange") - self.gdb = GdbController(gdb_path=self.TOOLCHAIN_PREFIX + "gdb") + Utility.console_log('Starting GDB...', 'orange') + self.gdb = GdbController(gdb_path=self.TOOLCHAIN_PREFIX + 'gdb') # pygdbmi logs to console by default, make it log to a file instead log_folder = self.app.get_log_folder(TEST_SUITE) - pygdbmi_log_file_name = os.path.join(log_folder, "pygdbmi_log_" + self.test_name + ".txt") + pygdbmi_log_file_name = os.path.join(log_folder, 'pygdbmi_log_' + self.test_name + '.txt') pygdbmi_logger = self.gdb.logger pygdbmi_logger.setLevel(logging.DEBUG) while pygdbmi_logger.hasHandlers(): pygdbmi_logger.removeHandler(pygdbmi_logger.handlers[0]) log_handler = logging.FileHandler(pygdbmi_log_file_name) - log_handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s: %(message)s")) + log_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s')) pygdbmi_logger.addHandler(log_handler) # Set up logging for GDB remote protocol - gdb_remotelog_file_name = os.path.join(log_folder, "gdb_remote_log_" + self.test_name + ".txt") - self.gdb.write("-gdb-set remotelogfile " + gdb_remotelog_file_name) + gdb_remotelog_file_name = os.path.join(log_folder, 'gdb_remote_log_' + self.test_name + '.txt') + self.gdb.write('-gdb-set remotelogfile ' + gdb_remotelog_file_name) # Load the ELF file - self.gdb.write("-file-exec-and-symbols {}".format(self.app.elf_file)) + self.gdb.write('-file-exec-and-symbols {}'.format(self.app.elf_file)) # Connect GDB to UART - Utility.console_log("Connecting to GDB Stub...", "orange") - self.gdb.write("-gdb-set serial baud 115200") - responses = self.gdb.write("-target-select remote " + self.get_gdb_remote(), timeout_sec=3) + Utility.console_log('Connecting to GDB Stub...', 'orange') + self.gdb.write('-gdb-set serial baud 115200') + responses = self.gdb.write('-target-select remote ' + self.get_gdb_remote(), timeout_sec=3) # Make sure we get the 'stopped' notification stop_response = self.find_gdb_response('stopped', 'notify', responses) if not stop_response: - responses = self.gdb.write("-exec-interrupt", timeout_sec=3) + responses = self.gdb.write('-exec-interrupt', timeout_sec=3) stop_response = self.find_gdb_response('stopped', 'notify', responses) assert stop_response - frame = stop_response["payload"]["frame"] - if "file" not in frame: - frame["file"] = "?" - if "line" not in frame: - frame["line"] = "?" - Utility.console_log("Stopped in {func} at {addr} ({file}:{line})".format(**frame), "orange") + frame = stop_response['payload']['frame'] + if 'file' not in frame: + frame['file'] = '?' + if 'line' not in frame: + frame['line'] = '?' + Utility.console_log('Stopped in {func} at {addr} ({file}:{line})'.format(**frame), 'orange') # Drain remaining responses self.gdb.get_gdb_response(raise_error_on_timeout=False) @@ -201,8 +201,8 @@ class PanicTestMixin(object): """ assert self.gdb - responses = self.gdb.write("-stack-list-frames", timeout_sec=3) - return self.find_gdb_response("done", "result", responses)["payload"]["stack"] + responses = self.gdb.write('-stack-list-frames', timeout_sec=3) + return self.find_gdb_response('done', 'result', responses)['payload']['stack'] @staticmethod def match_backtrace(gdb_backtrace, expected_functions_list): @@ -211,7 +211,7 @@ class PanicTestMixin(object): given by gdb_backtrace argument. The latter is in the same format as returned by gdb_backtrace() function. """ - return all([frame["func"] == expected_functions_list[i] for i, frame in enumerate(gdb_backtrace)]) + return all([frame['func'] == expected_functions_list[i] for i, frame in enumerate(gdb_backtrace)]) @staticmethod def find_gdb_response(message, response_type, responses): @@ -220,8 +220,8 @@ class PanicTestMixin(object): by message and type. Returned message is a dictionary, refer to pygdbmi docs for the format. """ def match_response(response): - return (response["message"] == message and - response["type"] == response_type) + return (response['message'] == message and + response['type'] == response_type) filtered_responses = [r for r in responses if match_response(r)] if not filtered_responses: @@ -252,11 +252,11 @@ def panic_test(**kwargs): if 'additional_duts' not in kwargs: kwargs['additional_duts'] = PANIC_TEST_DUT_DICT - return ttfw_idf.idf_custom_test(app=PanicTestApp, env_tag="Example_GENERIC", **kwargs) + return ttfw_idf.idf_custom_test(app=PanicTestApp, env_tag='Example_GENERIC', **kwargs) def get_dut(env, app_config_name, test_name, qemu_wdt_enable=False): - dut = env.get_dut("panic", TEST_PATH, app_config_name=app_config_name, allow_dut_exception=True) + dut = env.get_dut('panic', TEST_PATH, app_config_name=app_config_name, allow_dut_exception=True) dut.qemu_wdt_enable = qemu_wdt_enable """ Wrapper for getting the DUT and starting the test """ dut.start_test(test_name) @@ -270,13 +270,13 @@ def run_all(filename, case_filter=[]): """ TinyFW.set_default_config(env_config_file=None, test_suite_name=TEST_SUITE) test_methods = SearchCases.Search.search_test_cases(filename) - test_methods = filter(lambda m: not m.case_info["ignore"], test_methods) + test_methods = filter(lambda m: not m.case_info['ignore'], test_methods) test_cases = CaseConfig.Parser.apply_config(test_methods, None) tests_failed = [] for case in test_cases: test_name = case.test_method.__name__ if case_filter: - if case_filter[0].endswith("*"): + if case_filter[0].endswith('*'): if not test_name.startswith(case_filter[0][:-1]): continue else: @@ -287,9 +287,9 @@ def run_all(filename, case_filter=[]): tests_failed.append(case) if tests_failed: - print("The following tests have failed:") + print('The following tests have failed:') for case in tests_failed: - print(" - " + case.test_method.__name__) + print(' - ' + case.test_method.__name__) raise SystemExit(1) - print("Tests pass") + print('Tests pass') diff --git a/tools/test_apps/system/startup/app_test.py b/tools/test_apps/system/startup/app_test.py index 6d3950ca1f..9ce0a214f0 100644 --- a/tools/test_apps/system/startup/app_test.py +++ b/tools/test_apps/system/startup/app_test.py @@ -1,22 +1,23 @@ #!/usr/bin/env python -import os import glob +import os + import ttfw_idf from tiny_test_fw import Utility -@ttfw_idf.idf_custom_test(env_tag="Example_GENERIC", group="test-apps") +@ttfw_idf.idf_custom_test(env_tag='Example_GENERIC', group='test-apps') def test_startup(env, extra_data): - config_files = glob.glob(os.path.join(os.path.dirname(__file__), "sdkconfig.ci.*")) - config_names = [os.path.basename(s).replace("sdkconfig.ci.", "") for s in config_files] + config_files = glob.glob(os.path.join(os.path.dirname(__file__), 'sdkconfig.ci.*')) + config_names = [os.path.basename(s).replace('sdkconfig.ci.', '') for s in config_files] for name in config_names: - Utility.console_log("Checking config \"{}\"... ".format(name), end="") - dut = env.get_dut("startup", "tools/test_apps/system/startup", app_config_name=name) + Utility.console_log("Checking config \"{}\"... ".format(name), end='') + dut = env.get_dut('startup', 'tools/test_apps/system/startup', app_config_name=name) dut.start_app() - dut.expect("app_main running") + dut.expect('app_main running') env.close_dut(dut.name) - Utility.console_log("done") + Utility.console_log('done') if __name__ == '__main__': diff --git a/tools/test_idf_monitor/idf_monitor_wrapper.py b/tools/test_idf_monitor/idf_monitor_wrapper.py index b11bb1d5f3..3381bb98a0 100644 --- a/tools/test_idf_monitor/idf_monitor_wrapper.py +++ b/tools/test_idf_monitor/idf_monitor_wrapper.py @@ -13,13 +13,15 @@ # limitations under the License. from __future__ import unicode_literals -import sys + import argparse -import serial +import sys import threading import time from io import open +import serial + try: import idf_monitor except ImportError: @@ -35,7 +37,7 @@ def monitor_serial_reader_state(serial_reader, file_to_create): """ while not serial_reader.serial.is_open or not serial_reader.alive: time.sleep(1) - with open(file_to_create, "w", encoding='utf-8'): + with open(file_to_create, 'w', encoding='utf-8'): pass @@ -45,7 +47,7 @@ def main(): parser.add_argument('--print_filter') parser.add_argument('--serial_alive_file') parser.add_argument('--toolchain-prefix') - parser.add_argument('--decode-panic', default="disable") + parser.add_argument('--decode-panic', default='disable') parser.add_argument('--target', default=None) parser.add_argument('--elf-file') args = parser.parse_args() @@ -65,5 +67,5 @@ def main(): sys.stderr.write('Monitoring thread joined.\n') -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/test_idf_monitor/run_test_idf_monitor.py b/tools/test_idf_monitor/run_test_idf_monitor.py index d928353d11..20f312f7c7 100755 --- a/tools/test_idf_monitor/run_test_idf_monitor.py +++ b/tools/test_idf_monitor/run_test_idf_monitor.py @@ -14,20 +14,20 @@ # See the License for the specific language governing permissions and # limitations under the License. -from __future__ import print_function -from __future__ import unicode_literals +from __future__ import print_function, unicode_literals + +import errno +import filecmp +import os +import pty +import socket +import subprocess +import sys +import tempfile +import threading +import time from builtins import object from io import open -import os -import sys -import time -import subprocess -import socket -import pty -import filecmp -import threading -import errno -import tempfile XTENSA_ARGS = '--toolchain-prefix xtensa-esp32-elf-' RISCV_ARGS = '--decode-panic backtrace --target esp32c3 --toolchain-prefix riscv32-esp-elf-' @@ -104,8 +104,8 @@ def test_iteration(runner, test): pass print('\nRunning test on {} with filter "{}" and expecting {}'.format(test[0], test[1], test[2])) try: - with open(OUT_DIR + test[2], "w", encoding='utf-8') as o_f, \ - tempfile.NamedTemporaryFile(dir=OUT_DIR, prefix=ERR_OUT, mode="w", delete=False) as e_f: + with open(OUT_DIR + test[2], 'w', encoding='utf-8') as o_f, \ + tempfile.NamedTemporaryFile(dir=OUT_DIR, prefix=ERR_OUT, mode='w', delete=False) as e_f: monitor_cmd = [sys.executable, IDF_MONITOR_WAPPER, '--port', 'socket://{}:{}?logging=debug'.format(HOST, runner.port), '--print_filter', test[1], @@ -171,7 +171,7 @@ def test_iteration(runner, test): if filecmp.cmp(f1, f2, shallow=False): print('\tTest has passed') else: - raise RuntimeError("The contents of the files are different. Please examine the artifacts.") + raise RuntimeError('The contents of the files are different. Please examine the artifacts.') def main(): @@ -202,5 +202,5 @@ def main(): print('Execution took {:.2f} seconds\n'.format(gend - gstart)) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/test_idf_py/extra_path/some_ext.py b/tools/test_idf_py/extra_path/some_ext.py index fad4384707..75e6c94e21 100644 --- a/tools/test_idf_py/extra_path/some_ext.py +++ b/tools/test_idf_py/extra_path/some_ext.py @@ -1,21 +1,21 @@ def action_extensions(base_actions, project_path): def some_callback(ut_apply_config_name, ctx, args): - print("!!! From some subcommand") + print('!!! From some subcommand') def some_global_callback(ctx, global_args, tasks): - print("!!! From some global callback: %s" % global_args.some_extension_option) + print('!!! From some global callback: %s' % global_args.some_extension_option) return { - "global_options": [{ - "names": ["--some-extension-option"], - "help": "Help for option --some-extension-option", - "default": "test", + 'global_options': [{ + 'names': ['--some-extension-option'], + 'help': 'Help for option --some-extension-option', + 'default': 'test', }], - "global_action_callbacks": [some_global_callback], - "actions": { - "extra_subcommand": { - "callback": some_callback, - "help": "Help for some subcommand.", + 'global_action_callbacks': [some_global_callback], + 'actions': { + 'extra_subcommand': { + 'callback': some_callback, + 'help': 'Help for some subcommand.', }, }, } diff --git a/tools/test_idf_py/idf_ext.py b/tools/test_idf_py/idf_ext.py index ae0a65a460..a854162a9f 100644 --- a/tools/test_idf_py/idf_ext.py +++ b/tools/test_idf_py/idf_ext.py @@ -3,88 +3,88 @@ def action_extensions(base_actions, project_path=None): print(name, args, kwargs) def verbose(name, ctx, args): - print("Output from test-verbose") + print('Output from test-verbose') if args.verbose: - print("Verbose mode on") + print('Verbose mode on') # Add global options extensions = { - "global_options": [ + 'global_options': [ { - "names": ["--test-0"], - "help": "Non-deprecated option.", - "deprecated": False + 'names': ['--test-0'], + 'help': 'Non-deprecated option.', + 'deprecated': False }, { - "names": ["--test-1"], - "help": "Deprecated option 1.", - "deprecated": True + 'names': ['--test-1'], + 'help': 'Deprecated option 1.', + 'deprecated': True }, { - "names": ["--test-2"], - "help": "Deprecated option 2.", - "deprecated": "Please update your parameters." + 'names': ['--test-2'], + 'help': 'Deprecated option 2.', + 'deprecated': 'Please update your parameters.' }, { - "names": ["--test-3"], - "help": "Deprecated option 3.", - "deprecated": { - "custom_message": "Please update your parameters." + 'names': ['--test-3'], + 'help': 'Deprecated option 3.', + 'deprecated': { + 'custom_message': 'Please update your parameters.' } }, { - "names": ["--test-4"], - "help": "Deprecated option 4.", - "deprecated": { - "since": "v4.0", - "removed": "v5.0" + 'names': ['--test-4'], + 'help': 'Deprecated option 4.', + 'deprecated': { + 'since': 'v4.0', + 'removed': 'v5.0' } }, { - "names": ["--test-5"], - "help": "Deprecated option 5.", - "deprecated": { - "since": "v2.0", - "removed": "v3.0", - "exit_with_error": True + 'names': ['--test-5'], + 'help': 'Deprecated option 5.', + 'deprecated': { + 'since': 'v2.0', + 'removed': 'v3.0', + 'exit_with_error': True } }, ], - "actions": { - "test-verbose": { - "callback": verbose, - "help": "Command that have some verbosity", + 'actions': { + 'test-verbose': { + 'callback': verbose, + 'help': 'Command that have some verbosity', }, - "test-0": { - "callback": echo, - "help": "Non-deprecated command 0", - "options": [ + 'test-0': { + 'callback': echo, + 'help': 'Non-deprecated command 0', + 'options': [ { - "names": ["--test-sub-0"], - "help": "Non-deprecated subcommand option 0", - "default": None, + 'names': ['--test-sub-0'], + 'help': 'Non-deprecated subcommand option 0', + 'default': None, }, { - "names": ["--test-sub-1"], - "help": "Deprecated subcommand option 1", - "default": None, - "deprecated": True + 'names': ['--test-sub-1'], + 'help': 'Deprecated subcommand option 1', + 'default': None, + 'deprecated': True }, ], - "arguments": [{ - "names": ["test-arg-0"], + 'arguments': [{ + 'names': ['test-arg-0'], }], }, - "test-1": { - "callback": echo, - "help": "Deprecated command 1", - "deprecated": "Please use alternative command." + 'test-1': { + 'callback': echo, + 'help': 'Deprecated command 1', + 'deprecated': 'Please use alternative command.' }, - "test-2": { - "callback": echo, - "help": "Deprecated command 2", - "deprecated": { - "exit_with_error": True + 'test-2': { + 'callback': echo, + 'help': 'Deprecated command 2', + 'deprecated': { + 'exit_with_error': True } }, }, diff --git a/tools/test_idf_py/test_idf_extensions/test_ext/test_extension.py b/tools/test_idf_py/test_idf_extensions/test_ext/test_extension.py index 9ec5bf1553..e9596f9664 100644 --- a/tools/test_idf_py/test_idf_extensions/test_ext/test_extension.py +++ b/tools/test_idf_py/test_idf_extensions/test_ext/test_extension.py @@ -3,26 +3,26 @@ import os def action_extensions(base_actions, project_path=os.getcwd()): def test_callback(ut_apply_config_name, ctx, args): - print("!!! From test_subcommand") + print('!!! From test_subcommand') def test_global_callback(ctx, global_args, tasks): - print("!!! From test global callback: %s" % global_args.test_extension_option) + print('!!! From test global callback: %s' % global_args.test_extension_option) return { - "global_options": [{ - "names": ["--test-extension-option"], - "help": "Help for option --test-extension-option", - "default": "test", + 'global_options': [{ + 'names': ['--test-extension-option'], + 'help': 'Help for option --test-extension-option', + 'default': 'test', }], - "global_action_callbacks": [test_global_callback], - "actions": { - "test_subcommand": { - "callback": test_callback, - "help": "Help for test subcommand.", + 'global_action_callbacks': [test_global_callback], + 'actions': { + 'test_subcommand': { + 'callback': test_callback, + 'help': 'Help for test subcommand.', }, - "hidden_one": { - "callback": test_callback, - "hidden": True + 'hidden_one': { + 'callback': test_callback, + 'hidden': True } } } diff --git a/tools/test_idf_py/test_idf_py.py b/tools/test_idf_py/test_idf_py.py index 3605a4c0c7..4049a141a0 100755 --- a/tools/test_idf_py/test_idf_py.py +++ b/tools/test_idf_py/test_idf_py.py @@ -15,9 +15,9 @@ # limitations under the License. import os +import subprocess import sys import unittest -import subprocess try: from StringIO import StringIO @@ -40,8 +40,8 @@ class TestExtensions(unittest.TestCase): def test_extension_loading(self): try: os.symlink(extension_path, link_path) - os.environ["IDF_EXTRA_ACTIONS_PATH"] = os.path.join(current_dir, 'extra_path') - output = subprocess.check_output([sys.executable, idf_py_path, "--help"], + os.environ['IDF_EXTRA_ACTIONS_PATH'] = os.path.join(current_dir, 'extra_path') + output = subprocess.check_output([sys.executable, idf_py_path, '--help'], env=os.environ).decode('utf-8', 'ignore') self.assertIn('--test-extension-option', output) @@ -54,9 +54,9 @@ class TestExtensions(unittest.TestCase): def test_extension_execution(self): try: os.symlink(extension_path, link_path) - os.environ["IDF_EXTRA_ACTIONS_PATH"] = ";".join([os.path.join(current_dir, 'extra_path')]) + os.environ['IDF_EXTRA_ACTIONS_PATH'] = ';'.join([os.path.join(current_dir, 'extra_path')]) output = subprocess.check_output( - [sys.executable, idf_py_path, "--some-extension-option=awesome", 'test_subcommand', "extra_subcommand"], + [sys.executable, idf_py_path, '--some-extension-option=awesome', 'test_subcommand', 'extra_subcommand'], env=os.environ).decode('utf-8', 'ignore') self.assertIn('!!! From some global callback: awesome', output) self.assertIn('!!! From some subcommand', output) @@ -68,8 +68,8 @@ class TestExtensions(unittest.TestCase): def test_hidden_commands(self): try: os.symlink(extension_path, link_path) - os.environ["IDF_EXTRA_ACTIONS_PATH"] = ";".join([os.path.join(current_dir, 'extra_path')]) - output = subprocess.check_output([sys.executable, idf_py_path, "--help"], + os.environ['IDF_EXTRA_ACTIONS_PATH'] = ';'.join([os.path.join(current_dir, 'extra_path')]) + output = subprocess.check_output([sys.executable, idf_py_path, '--help'], env=os.environ).decode('utf-8', 'ignore') self.assertIn('test_subcommand', output) self.assertNotIn('hidden_one', output) @@ -135,9 +135,9 @@ class TestVerboseFlag(unittest.TestCase): [ sys.executable, idf_py_path, - "-C%s" % current_dir, - "-v", - "test-verbose", + '-C%s' % current_dir, + '-v', + 'test-verbose', ], env=os.environ).decode('utf-8', 'ignore') self.assertIn('Verbose mode on', output) @@ -147,8 +147,8 @@ class TestVerboseFlag(unittest.TestCase): [ sys.executable, idf_py_path, - "-C%s" % current_dir, - "test-verbose", + '-C%s' % current_dir, + 'test-verbose', ], env=os.environ).decode('utf-8', 'ignore') self.assertIn('Output from test-verbose', output) @@ -177,14 +177,14 @@ class TestGlobalAndSubcommandParameters(unittest.TestCase): class TestDeprecations(unittest.TestCase): def test_exit_with_error_for_subcommand(self): try: - subprocess.check_output([sys.executable, idf_py_path, "-C%s" % current_dir, "test-2"], env=os.environ, + subprocess.check_output([sys.executable, idf_py_path, '-C%s' % current_dir, 'test-2'], env=os.environ, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: self.assertIn('Error: Command "test-2" is deprecated and was removed.', e.output.decode('utf-8', 'ignore')) def test_exit_with_error_for_option(self): try: - subprocess.check_output([sys.executable, idf_py_path, "-C%s" % current_dir, "--test-5=asdf"], + subprocess.check_output([sys.executable, idf_py_path, '-C%s' % current_dir, '--test-5=asdf'], env=os.environ, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: self.assertIn('Error: Option "test_5" is deprecated since v2.0 and was removed in v3.0.', @@ -195,16 +195,16 @@ class TestDeprecations(unittest.TestCase): [ sys.executable, idf_py_path, - "-C%s" % current_dir, - "--test-0=a", - "--test-1=b", - "--test-2=c", - "--test-3=d", - "test-0", - "--test-sub-0=sa", - "--test-sub-1=sb", - "ta", - "test-1", + '-C%s' % current_dir, + '--test-0=a', + '--test-1=b', + '--test-2=c', + '--test-3=d', + 'test-0', + '--test-sub-0=sa', + '--test-sub-1=sb', + 'ta', + 'test-1', ], env=os.environ, stderr=subprocess.STDOUT).decode('utf-8', 'ignore') diff --git a/tools/test_idf_size/test_idf_size.py b/tools/test_idf_size/test_idf_size.py index a3bfc7828d..68e6912ef0 100644 --- a/tools/test_idf_size/test_idf_size.py +++ b/tools/test_idf_size/test_idf_size.py @@ -15,8 +15,9 @@ # limitations under the License. from __future__ import print_function -import sys + import collections +import sys try: import idf_size @@ -25,7 +26,7 @@ except ImportError: import idf_size -if __name__ == "__main__": +if __name__ == '__main__': # Should deliver a RuntimeError as the 'test' header doesn't exist try: idf_size.scan_to_header([], 'test') @@ -34,10 +35,10 @@ if __name__ == "__main__": # Should deliver a RuntimeError as there's no content under the heading try: - idf_size.load_memory_config(["Memory Configuration"]) + idf_size.load_memory_config(['Memory Configuration']) pass except RuntimeError as e: - assert "End of file" in str(e) + assert 'End of file' in str(e) # This used to crash with a division by zero error but now it just prints nan% due to # zero lengths @@ -45,5 +46,5 @@ if __name__ == "__main__": 'used_dram_names', 'used_diram_names']) mem_reg = MemRegNames(set(), set(), set(), set(), set(), set()) - print(idf_size.get_summary('a.map', mem_reg, {"iram0_0_seg": {"origin":0,"length":0}, "dram0_0_seg": - {"origin":0, "length":0}}, {}), end="") + print(idf_size.get_summary('a.map', mem_reg, {'iram0_0_seg': {'origin':0,'length':0}, 'dram0_0_seg': + {'origin':0, 'length':0}}, {}), end='') diff --git a/tools/test_idf_tools/test_idf_tools.py b/tools/test_idf_tools/test_idf_tools.py index d48dc2743a..edff2b7de6 100755 --- a/tools/test_idf_tools/test_idf_tools.py +++ b/tools/test_idf_tools/test_idf_tools.py @@ -15,10 +15,10 @@ # limitations under the License. import os -import sys -import unittest -import tempfile import shutil +import sys +import tempfile +import unittest try: from contextlib import redirect_stdout diff --git a/tools/test_mkdfu/test_mkdfu.py b/tools/test_mkdfu/test_mkdfu.py index 1308441715..09d867a12f 100755 --- a/tools/test_mkdfu/test_mkdfu.py +++ b/tools/test_mkdfu/test_mkdfu.py @@ -16,15 +16,17 @@ # limitations under the License. from __future__ import unicode_literals + import filecmp import os -import pexpect import shutil import sys import tempfile import time import unittest +import pexpect + current_dir = os.path.dirname(os.path.realpath(__file__)) mkdfu_path = os.path.join(current_dir, '..', 'mkdfu.py') diff --git a/tools/test_mkuf2/test_mkuf2.py b/tools/test_mkuf2/test_mkuf2.py index b13cf5ea41..f87e167a63 100755 --- a/tools/test_mkuf2/test_mkuf2.py +++ b/tools/test_mkuf2/test_mkuf2.py @@ -20,18 +20,18 @@ from __future__ import unicode_literals import filecmp import hashlib import os -import pexpect import random import struct import sys import tempfile import time import unittest - from functools import partial from io import open from itertools import chain +import pexpect + try: from itertools import izip as zip except ImportError: diff --git a/tools/unit-test-app/idf_ext.py b/tools/unit-test-app/idf_ext.py index b4a7269e65..302f2150cd 100644 --- a/tools/unit-test-app/idf_ext.py +++ b/tools/unit-test-app/idf_ext.py @@ -9,17 +9,17 @@ import shutil def action_extensions(base_actions, project_path=os.getcwd()): """ Describes extensions for unit tests. This function expects that actions "all" and "reconfigure" """ - PROJECT_NAME = "unit-test-app" + PROJECT_NAME = 'unit-test-app' # List of unit-test-app configurations. # Each file in configs/ directory defines a configuration. The format is the # same as sdkconfig file. Configuration is applied on top of sdkconfig.defaults # file from the project directory - CONFIG_NAMES = os.listdir(os.path.join(project_path, "configs")) + CONFIG_NAMES = os.listdir(os.path.join(project_path, 'configs')) # Build (intermediate) and output (artifact) directories - BUILDS_DIR = os.path.join(project_path, "builds") - BINARIES_DIR = os.path.join(project_path, "output") + BUILDS_DIR = os.path.join(project_path, 'builds') + BINARIES_DIR = os.path.join(project_path, 'output') def parse_file_to_dict(path, regex): """ @@ -41,18 +41,18 @@ def action_extensions(base_actions, project_path=os.getcwd()): Expected format with default regex is "key=value" """ - return parse_file_to_dict(path, r"^([^=]+)=(.+)$") + return parse_file_to_dict(path, r'^([^=]+)=(.+)$') def ut_apply_config(ut_apply_config_name, ctx, args): - config_name = re.match(r"ut-apply-config-(.*)", ut_apply_config_name).group(1) + config_name = re.match(r'ut-apply-config-(.*)', ut_apply_config_name).group(1) # Make sure that define_cache_entry is list args.define_cache_entry = list(args.define_cache_entry) new_cache_values = {} - sdkconfig_set = list(filter(lambda s: "SDKCONFIG=" in s, args.define_cache_entry)) - sdkconfig_path = os.path.join(args.project_dir, "sdkconfig") + sdkconfig_set = list(filter(lambda s: 'SDKCONFIG=' in s, args.define_cache_entry)) + sdkconfig_path = os.path.join(args.project_dir, 'sdkconfig') if sdkconfig_set: - sdkconfig_path = sdkconfig_set[-1].split("=")[1] + sdkconfig_path = sdkconfig_set[-1].split('=')[1] sdkconfig_path = os.path.abspath(sdkconfig_path) try: @@ -62,26 +62,26 @@ def action_extensions(base_actions, project_path=os.getcwd()): if config_name in CONFIG_NAMES: # Parse the sdkconfig for components to be included/excluded and tests to be run - config_path = os.path.join(project_path, "configs", config_name) + config_path = os.path.join(project_path, 'configs', config_name) config = parse_config(config_path) - target = config.get("CONFIG_IDF_TARGET", "esp32").strip("'").strip('"') + target = config.get('CONFIG_IDF_TARGET', 'esp32').strip("'").strip('"') - print("Reconfigure: config %s, target %s" % (config_name, target)) + print('Reconfigure: config %s, target %s' % (config_name, target)) # Clean up and set idf-target - base_actions["actions"]["fullclean"]["callback"]("fullclean", ctx, args) + base_actions['actions']['fullclean']['callback']('fullclean', ctx, args) - new_cache_values["EXCLUDE_COMPONENTS"] = config.get("EXCLUDE_COMPONENTS", "''") - new_cache_values["TEST_EXCLUDE_COMPONENTS"] = config.get("TEST_EXCLUDE_COMPONENTS", "''") - new_cache_values["TEST_COMPONENTS"] = config.get("TEST_COMPONENTS", "''") - new_cache_values["TESTS_ALL"] = int(new_cache_values["TEST_COMPONENTS"] == "''") - new_cache_values["IDF_TARGET"] = target - new_cache_values["SDKCONFIG_DEFAULTS"] = ";".join([os.path.join(project_path, "sdkconfig.defaults"), config_path]) + new_cache_values['EXCLUDE_COMPONENTS'] = config.get('EXCLUDE_COMPONENTS', "''") + new_cache_values['TEST_EXCLUDE_COMPONENTS'] = config.get('TEST_EXCLUDE_COMPONENTS', "''") + new_cache_values['TEST_COMPONENTS'] = config.get('TEST_COMPONENTS', "''") + new_cache_values['TESTS_ALL'] = int(new_cache_values['TEST_COMPONENTS'] == "''") + new_cache_values['IDF_TARGET'] = target + new_cache_values['SDKCONFIG_DEFAULTS'] = ';'.join([os.path.join(project_path, 'sdkconfig.defaults'), config_path]) - args.define_cache_entry.extend(["%s=%s" % (k, v) for k, v in new_cache_values.items()]) + args.define_cache_entry.extend(['%s=%s' % (k, v) for k, v in new_cache_values.items()]) - reconfigure = base_actions["actions"]["reconfigure"]["callback"] + reconfigure = base_actions['actions']['reconfigure']['callback'] reconfigure(None, ctx, args) # This target builds the configuration. It does not currently track dependencies, @@ -93,7 +93,7 @@ def action_extensions(base_actions, project_path=os.getcwd()): # all configs are being built build_args = copy.copy(args) - config_name = re.match(r"ut-build-(.*)", ut_build_name).group(1) + config_name = re.match(r'ut-build-(.*)', ut_build_name).group(1) if config_name in CONFIG_NAMES: build_args.build_dir = os.path.join(BUILDS_DIR, config_name) @@ -107,56 +107,56 @@ def action_extensions(base_actions, project_path=os.getcwd()): pass # Build, tweaking paths to sdkconfig and sdkconfig.defaults - ut_apply_config("ut-apply-config-" + config_name, ctx, build_args) + ut_apply_config('ut-apply-config-' + config_name, ctx, build_args) - build_target = base_actions["actions"]["all"]["callback"] + build_target = base_actions['actions']['all']['callback'] - build_target("all", ctx, build_args) + build_target('all', ctx, build_args) # Copy artifacts to the output directory shutil.copyfile( - os.path.join(build_args.project_dir, "sdkconfig"), - os.path.join(dest, "sdkconfig"), + os.path.join(build_args.project_dir, 'sdkconfig'), + os.path.join(dest, 'sdkconfig'), ) - binaries = [PROJECT_NAME + x for x in [".elf", ".bin", ".map"]] + binaries = [PROJECT_NAME + x for x in ['.elf', '.bin', '.map']] for binary in binaries: shutil.copyfile(os.path.join(src, binary), os.path.join(dest, binary)) try: - os.mkdir(os.path.join(dest, "bootloader")) + os.mkdir(os.path.join(dest, 'bootloader')) except OSError: pass shutil.copyfile( - os.path.join(src, "bootloader", "bootloader.bin"), - os.path.join(dest, "bootloader", "bootloader.bin"), + os.path.join(src, 'bootloader', 'bootloader.bin'), + os.path.join(dest, 'bootloader', 'bootloader.bin'), ) - for partition_table in glob.glob(os.path.join(src, "partition_table", "partition-table*.bin")): + for partition_table in glob.glob(os.path.join(src, 'partition_table', 'partition-table*.bin')): try: - os.mkdir(os.path.join(dest, "partition_table")) + os.mkdir(os.path.join(dest, 'partition_table')) except OSError: pass shutil.copyfile( partition_table, - os.path.join(dest, "partition_table", os.path.basename(partition_table)), + os.path.join(dest, 'partition_table', os.path.basename(partition_table)), ) shutil.copyfile( - os.path.join(src, "flasher_args.json"), - os.path.join(dest, "flasher_args.json"), + os.path.join(src, 'flasher_args.json'), + os.path.join(dest, 'flasher_args.json'), ) - binaries = glob.glob(os.path.join(src, "*.bin")) + binaries = glob.glob(os.path.join(src, '*.bin')) binaries = [os.path.basename(s) for s in binaries] for binary in binaries: shutil.copyfile(os.path.join(src, binary), os.path.join(dest, binary)) def ut_clean(ut_clean_name, ctx, args): - config_name = re.match(r"ut-clean-(.*)", ut_clean_name).group(1) + config_name = re.match(r'ut-clean-(.*)', ut_clean_name).group(1) if config_name in CONFIG_NAMES: shutil.rmtree(os.path.join(BUILDS_DIR, config_name), ignore_errors=True) shutil.rmtree(os.path.join(BINARIES_DIR, config_name), ignore_errors=True) @@ -169,35 +169,35 @@ def action_extensions(base_actions, project_path=os.getcwd()): cache_entries = {} if test_components: - if "all" in test_components: - cache_entries["TESTS_ALL"] = 1 - cache_entries["TEST_COMPONENTS"] = "''" + if 'all' in test_components: + cache_entries['TESTS_ALL'] = 1 + cache_entries['TEST_COMPONENTS'] = "''" else: - cache_entries["TESTS_ALL"] = 0 - cache_entries["TEST_COMPONENTS"] = " ".join(test_components) + cache_entries['TESTS_ALL'] = 0 + cache_entries['TEST_COMPONENTS'] = ' '.join(test_components) if test_exclude_components: - cache_entries["TEST_EXCLUDE_COMPONENTS"] = " ".join(test_exclude_components) + cache_entries['TEST_EXCLUDE_COMPONENTS'] = ' '.join(test_exclude_components) if cache_entries: global_args.define_cache_entry = list(global_args.define_cache_entry) - global_args.define_cache_entry.extend(["%s=%s" % (k, v) for k, v in cache_entries.items()]) + global_args.define_cache_entry.extend(['%s=%s' % (k, v) for k, v in cache_entries.items()]) # Add global options extensions = { - "global_options": [{ - "names": ["-T", "--test-components"], - "help": "Specify the components to test.", - "scope": "shared", - "multiple": True, + 'global_options': [{ + 'names': ['-T', '--test-components'], + 'help': 'Specify the components to test.', + 'scope': 'shared', + 'multiple': True, }, { - "names": ["-E", "--test-exclude-components"], - "help": "Specify the components to exclude from testing.", - "scope": "shared", - "multiple": True, + 'names': ['-E', '--test-exclude-components'], + 'help': 'Specify the components to exclude from testing.', + 'scope': 'shared', + 'multiple': True, }], - "global_action_callbacks": [test_component_callback], - "actions": {}, + 'global_action_callbacks': [test_component_callback], + 'actions': {}, } # This generates per-config targets (clean, build, apply-config). @@ -205,45 +205,45 @@ def action_extensions(base_actions, project_path=os.getcwd()): clean_all_config_deps = [] for config in CONFIG_NAMES: - config_build_action_name = "ut-build-" + config - config_clean_action_name = "ut-clean-" + config - config_apply_config_action_name = "ut-apply-config-" + config + config_build_action_name = 'ut-build-' + config + config_clean_action_name = 'ut-clean-' + config + config_apply_config_action_name = 'ut-apply-config-' + config - extensions["actions"][config_build_action_name] = { - "callback": + extensions['actions'][config_build_action_name] = { + 'callback': ut_build, - "help": - "Build unit-test-app with configuration provided in configs/NAME. " + - "Build directory will be builds/%s/, " % config_build_action_name + - "output binaries will be under output/%s/" % config_build_action_name, + 'help': + 'Build unit-test-app with configuration provided in configs/NAME. ' + + 'Build directory will be builds/%s/, ' % config_build_action_name + + 'output binaries will be under output/%s/' % config_build_action_name, } - extensions["actions"][config_clean_action_name] = { - "callback": ut_clean, - "help": "Remove build and output directories for configuration %s." % config_clean_action_name, + extensions['actions'][config_clean_action_name] = { + 'callback': ut_clean, + 'help': 'Remove build and output directories for configuration %s.' % config_clean_action_name, } - extensions["actions"][config_apply_config_action_name] = { - "callback": + extensions['actions'][config_apply_config_action_name] = { + 'callback': ut_apply_config, - "help": - "Generates configuration based on configs/%s in sdkconfig file." % config_apply_config_action_name + - "After this, normal all/flash targets can be used. Useful for development/debugging.", + 'help': + 'Generates configuration based on configs/%s in sdkconfig file.' % config_apply_config_action_name + + 'After this, normal all/flash targets can be used. Useful for development/debugging.', } build_all_config_deps.append(config_build_action_name) clean_all_config_deps.append(config_clean_action_name) - extensions["actions"]["ut-build-all-configs"] = { - "callback": ut_build, - "help": "Build all configurations defined in configs/ directory.", - "dependencies": build_all_config_deps, + extensions['actions']['ut-build-all-configs'] = { + 'callback': ut_build, + 'help': 'Build all configurations defined in configs/ directory.', + 'dependencies': build_all_config_deps, } - extensions["actions"]["ut-clean-all-configs"] = { - "callback": ut_clean, - "help": "Remove build and output directories for all configurations defined in configs/ directory.", - "dependencies": clean_all_config_deps, + extensions['actions']['ut-clean-all-configs'] = { + 'callback': ut_clean, + 'help': 'Remove build and output directories for all configurations defined in configs/ directory.', + 'dependencies': clean_all_config_deps, } return extensions diff --git a/tools/unit-test-app/tools/CreateSectionTable.py b/tools/unit-test-app/tools/CreateSectionTable.py index a96dce7c20..ead1b7c69d 100644 --- a/tools/unit-test-app/tools/CreateSectionTable.py +++ b/tools/unit-test-app/tools/CreateSectionTable.py @@ -6,8 +6,8 @@ class Section(object): """ One Section of section table. contains info about section name, address and raw data """ - SECTION_START_PATTERN = re.compile(b"Contents of section (.+?):") - DATA_PATTERN = re.compile(b"([0-9a-f]{4,8})") + SECTION_START_PATTERN = re.compile(b'Contents of section (.+?):') + DATA_PATTERN = re.compile(b'([0-9a-f]{4,8})') def __init__(self, name, start_address, data): self.name = name @@ -16,8 +16,8 @@ class Section(object): def __contains__(self, item): """ check if the section name and address match this section """ - if (item["section"] == self.name or item["section"] == "any") \ - and (self.start_address <= item["address"] < (self.start_address + len(self.data))): + if (item['section'] == self.name or item['section'] == 'any') \ + and (self.start_address <= item['address'] < (self.start_address + len(self.data))): return True else: return False @@ -36,7 +36,7 @@ class Section(object): return self.data[item] def __str__(self): - return "%s [%08x - %08x]" % (self.name, self.start_address, self.start_address + len(self.data)) + return '%s [%08x - %08x]' % (self.name, self.start_address, self.start_address + len(self.data)) __repr__ = __str__ @@ -47,12 +47,12 @@ class Section(object): :param raw_data: lines of raw data generated by `objdump -s` :return: one section, un-processed lines """ - name = "" - data = "" + name = '' + data = '' start_address = 0 # first find start line for i, line in enumerate(raw_data): - if b"Contents of section " in line: # do strcmp first to speed up + if b'Contents of section ' in line: # do strcmp first to speed up match = cls.SECTION_START_PATTERN.search(line) if match is not None: name = match.group(1) @@ -60,11 +60,11 @@ class Section(object): break else: # do some error handling - raw_data = [b""] # add a dummy first data line + raw_data = [b''] # add a dummy first data line def process_data_line(line_to_process): # first remove the ascii part - hex_part = line_to_process.split(b" ")[0] + hex_part = line_to_process.split(b' ')[0] # process rest part data_list = cls.DATA_PATTERN.findall(hex_part) try: @@ -74,12 +74,12 @@ class Section(object): def hex_to_str(hex_data): if len(hex_data) % 2 == 1: - hex_data = b"0" + hex_data # append zero at the beginning + hex_data = b'0' + hex_data # append zero at the beginning _length = len(hex_data) - return "".join([chr(int(hex_data[_i:_i + 2], base=16)) + return ''.join([chr(int(hex_data[_i:_i + 2], base=16)) for _i in range(0, _length, 2)]) - return _address, "".join([hex_to_str(x) for x in data_list[1:]]) + return _address, ''.join([hex_to_str(x) for x in data_list[1:]]) # handle first line: address, _data = process_data_line(raw_data[0]) @@ -107,14 +107,14 @@ class SectionTable(object): """ elf section table """ def __init__(self, file_name): - with open(file_name, "rb") as f: + with open(file_name, 'rb') as f: raw_data = f.readlines() self.table = [] while raw_data: section, raw_data = Section.parse_raw_data(raw_data) self.table.append(section) - def get_unsigned_int(self, section, address, size=4, endian="LE"): + def get_unsigned_int(self, section, address, size=4, endian='LE'): """ get unsigned int from section table :param section: section name; use "any" will only match with address @@ -124,19 +124,19 @@ class SectionTable(object): :return: int or None """ if address % 4 != 0 or size % 4 != 0: - print("warning: try to access without 4 bytes aligned") - key = {"address": address, "section": section} + print('warning: try to access without 4 bytes aligned') + key = {'address': address, 'section': section} for section in self.table: if key in section: tmp = section[address:address + size] value = 0 for i in range(size): - if endian == "LE": + if endian == 'LE': value += ord(tmp[i]) << (i * 8) - elif endian == "BE": + elif endian == 'BE': value += ord(tmp[i]) << ((size - i - 1) * 8) else: - print("only support LE or BE for parameter endian") + print('only support LE or BE for parameter endian') assert False break else: @@ -151,7 +151,7 @@ class SectionTable(object): :return: string or None """ value = None - key = {"address": address, "section": section} + key = {'address': address, 'section': section} for section in self.table: if key in section: value = section[address:] diff --git a/tools/unit-test-app/tools/UnitTestParser.py b/tools/unit-test-app/tools/UnitTestParser.py index f6e740de9b..3fff495b18 100644 --- a/tools/unit-test-app/tools/UnitTestParser.py +++ b/tools/unit-test-app/tools/UnitTestParser.py @@ -1,15 +1,14 @@ from __future__ import print_function import argparse - -import yaml import os import re import shutil import subprocess - from copy import deepcopy + import CreateSectionTable +import yaml try: from yaml import CLoader as Loader @@ -17,43 +16,43 @@ except ImportError: from yaml import Loader as Loader TEST_CASE_PATTERN = { - "initial condition": "UTINIT1", - "chip_target": "esp32", - "level": "Unit", - "execution time": 0, - "auto test": "Yes", - "category": "Function", - "test point 1": "basic function", - "version": "v1 (2016-12-06)", - "test environment": "UT_T1_1", - "reset": "", - "expected result": "1. set succeed", - "cmd set": "test_unit_test_case", - "Test App": "UT", + 'initial condition': 'UTINIT1', + 'chip_target': 'esp32', + 'level': 'Unit', + 'execution time': 0, + 'auto test': 'Yes', + 'category': 'Function', + 'test point 1': 'basic function', + 'version': 'v1 (2016-12-06)', + 'test environment': 'UT_T1_1', + 'reset': '', + 'expected result': '1. set succeed', + 'cmd set': 'test_unit_test_case', + 'Test App': 'UT', } class Parser(object): """ parse unit test cases from build files and create files for test bench """ - TAG_PATTERN = re.compile(r"([^=]+)(=)?(.+)?") - DESCRIPTION_PATTERN = re.compile(r"\[([^]\[]+)\]") - CONFIG_PATTERN = re.compile(r"{([^}]+)}") - TEST_GROUPS_PATTERN = re.compile(r"TEST_GROUPS=(.*)$") + TAG_PATTERN = re.compile(r'([^=]+)(=)?(.+)?') + DESCRIPTION_PATTERN = re.compile(r'\[([^]\[]+)\]') + CONFIG_PATTERN = re.compile(r'{([^}]+)}') + TEST_GROUPS_PATTERN = re.compile(r'TEST_GROUPS=(.*)$') # file path (relative to idf path) - TAG_DEF_FILE = os.path.join("tools", "unit-test-app", "tools", "TagDefinition.yml") - MODULE_DEF_FILE = os.path.join("tools", "unit-test-app", "tools", "ModuleDefinition.yml") - CONFIG_DEPENDENCY_FILE = os.path.join("tools", "unit-test-app", "tools", "ConfigDependency.yml") - MODULE_ARTIFACT_FILE = os.path.join("components", "idf_test", "ModuleDefinition.yml") - TEST_CASE_FILE_DIR = os.path.join("components", "idf_test", "unit_test") - UT_CONFIG_FOLDER = os.path.join("tools", "unit-test-app", "configs") - ELF_FILE = "unit-test-app.elf" - SDKCONFIG_FILE = "sdkconfig" - STRIP_CONFIG_PATTERN = re.compile(r"(.+?)(_\d+)?$") + TAG_DEF_FILE = os.path.join('tools', 'unit-test-app', 'tools', 'TagDefinition.yml') + MODULE_DEF_FILE = os.path.join('tools', 'unit-test-app', 'tools', 'ModuleDefinition.yml') + CONFIG_DEPENDENCY_FILE = os.path.join('tools', 'unit-test-app', 'tools', 'ConfigDependency.yml') + MODULE_ARTIFACT_FILE = os.path.join('components', 'idf_test', 'ModuleDefinition.yml') + TEST_CASE_FILE_DIR = os.path.join('components', 'idf_test', 'unit_test') + UT_CONFIG_FOLDER = os.path.join('tools', 'unit-test-app', 'configs') + ELF_FILE = 'unit-test-app.elf' + SDKCONFIG_FILE = 'sdkconfig' + STRIP_CONFIG_PATTERN = re.compile(r'(.+?)(_\d+)?$') TOOLCHAIN_FOR_TARGET = { - "esp32": "xtensa-esp32-elf-", - "esp32s2": "xtensa-esp32s2-elf-", + 'esp32': 'xtensa-esp32-elf-', + 'esp32s2': 'xtensa-esp32s2-elf-', } def __init__(self, binary_folder): @@ -65,10 +64,10 @@ class Parser(object): self.idf_path = idf_path self.idf_target = idf_target self.ut_bin_folder = binary_folder - self.objdump = Parser.TOOLCHAIN_FOR_TARGET.get(idf_target, "") + "objdump" - self.tag_def = yaml.load(open(os.path.join(idf_path, self.TAG_DEF_FILE), "r"), Loader=Loader) - self.module_map = yaml.load(open(os.path.join(idf_path, self.MODULE_DEF_FILE), "r"), Loader=Loader) - self.config_dependencies = yaml.load(open(os.path.join(idf_path, self.CONFIG_DEPENDENCY_FILE), "r"), + self.objdump = Parser.TOOLCHAIN_FOR_TARGET.get(idf_target, '') + 'objdump' + self.tag_def = yaml.load(open(os.path.join(idf_path, self.TAG_DEF_FILE), 'r'), Loader=Loader) + self.module_map = yaml.load(open(os.path.join(idf_path, self.MODULE_DEF_FILE), 'r'), Loader=Loader) + self.config_dependencies = yaml.load(open(os.path.join(idf_path, self.CONFIG_DEPENDENCY_FILE), 'r'), Loader=Loader) # used to check if duplicated test case names self.test_case_names = set() @@ -82,7 +81,7 @@ class Parser(object): :param config_name: built unit test config name """ tags = self.parse_tags(os.path.join(config_output_folder, self.SDKCONFIG_FILE)) - print("Tags of config %s: %s" % (config_name, tags)) + print('Tags of config %s: %s' % (config_name, tags)) test_groups = self.get_test_groups(os.path.join(configs_folder, config_name)) @@ -91,7 +90,7 @@ class Parser(object): shell=True) subprocess.check_output('{} -s {} > section_table.tmp'.format(self.objdump, elf_file), shell=True) - table = CreateSectionTable.SectionTable("section_table.tmp") + table = CreateSectionTable.SectionTable('section_table.tmp') test_cases = [] # we could split cases of same config into multiple binaries as we have limited rom space @@ -99,7 +98,7 @@ class Parser(object): match = self.STRIP_CONFIG_PATTERN.match(config_name) stripped_config_name = match.group(1) - with open("case_address.tmp", "rb") as f: + with open('case_address.tmp', 'rb') as f: for line in f: # process symbol table like: "3ffb4310 l O .dram0.data 00000018 test_desc_33$5010" line = line.split() @@ -109,8 +108,8 @@ class Parser(object): name_addr = table.get_unsigned_int(section, test_addr, 4) desc_addr = table.get_unsigned_int(section, test_addr + 4, 4) function_count = table.get_unsigned_int(section, test_addr + 20, 4) - name = table.get_string("any", name_addr) - desc = table.get_string("any", desc_addr) + name = table.get_string('any', name_addr) + desc = table.get_string('any', desc_addr) tc = self.parse_one_test_case(name, desc, config_name, stripped_config_name, tags) @@ -119,30 +118,30 @@ class Parser(object): # if duplicated IDs, Unity could select incorrect case to run # and we need to check all cases no matter if it's going te be executed by CI # also add app_name here, we allow same case for different apps - if (tc["summary"] + stripped_config_name) in self.test_case_names: - self.parsing_errors.append("duplicated test case ID: " + tc["summary"]) + if (tc['summary'] + stripped_config_name) in self.test_case_names: + self.parsing_errors.append('duplicated test case ID: ' + tc['summary']) else: - self.test_case_names.add(tc["summary"] + stripped_config_name) + self.test_case_names.add(tc['summary'] + stripped_config_name) test_group_included = True - if test_groups is not None and tc["group"] not in test_groups: + if test_groups is not None and tc['group'] not in test_groups: test_group_included = False - if tc["CI ready"] == "Yes" and test_group_included: + if tc['CI ready'] == 'Yes' and test_group_included: # update test env list and the cases of same env list - if tc["test environment"] in self.test_env_tags: - self.test_env_tags[tc["test environment"]].append(tc["ID"]) + if tc['test environment'] in self.test_env_tags: + self.test_env_tags[tc['test environment']].append(tc['ID']) else: - self.test_env_tags.update({tc["test environment"]: [tc["ID"]]}) + self.test_env_tags.update({tc['test environment']: [tc['ID']]}) if function_count > 1: - tc.update({"child case num": function_count}) + tc.update({'child case num': function_count}) # only add cases need to be executed test_cases.append(tc) - os.remove("section_table.tmp") - os.remove("case_address.tmp") + os.remove('section_table.tmp') + os.remove('case_address.tmp') return test_cases @@ -159,14 +158,14 @@ class Parser(object): """ tags = self.DESCRIPTION_PATTERN.findall(tags_raw) assert len(tags) > 0 - p = dict([(k, self.tag_def[k]["omitted"]) for k in self.tag_def]) - p["module"] = tags[0] + p = dict([(k, self.tag_def[k]['omitted']) for k in self.tag_def]) + p['module'] = tags[0] # Use the original value of the first tag as test group name - p["group"] = p["module"] + p['group'] = p['module'] - if p["module"] not in self.module_map: - p["module"] = "misc" + if p['module'] not in self.module_map: + p['module'] = 'misc' # parsing rest tags, [type=value], =value is optional for tag in tags[1:]: @@ -174,12 +173,12 @@ class Parser(object): assert match is not None tag_type = match.group(1) tag_value = match.group(3) - if match.group(2) == "=" and tag_value is None: + if match.group(2) == '=' and tag_value is None: # [tag_type=] means tag_value is empty string - tag_value = "" + tag_value = '' if tag_type in p: if tag_value is None: - p[tag_type] = self.tag_def[tag_type]["default"] + p[tag_type] = self.tag_def[tag_type]['default'] else: p[tag_type] = tag_value else: @@ -230,7 +229,7 @@ class Parser(object): :param sdkconfig_file: sdk config file of the unit test config :return: required tags for runners """ - with open(sdkconfig_file, "r") as f: + with open(sdkconfig_file, 'r') as f: configs_raw_data = f.read() configs = configs_raw_data.splitlines(False) @@ -243,7 +242,7 @@ class Parser(object): :param config_file file under configs/ directory for given configuration :return: list of test groups, or None if TEST_GROUPS wasn't set """ - with open(config_file, "r") as f: + with open(config_file, 'r') as f: for line in f: match = self.TEST_GROUPS_PATTERN.match(line) if match is not None: @@ -263,22 +262,22 @@ class Parser(object): prop = self.parse_case_properties(description) test_case = deepcopy(TEST_CASE_PATTERN) - test_case.update({"config": config_name, - "module": self.module_map[prop["module"]]['module'], - "group": prop["group"], - "CI ready": "No" if prop["ignore"] == "Yes" else "Yes", - "ID": "[{}] {}".format(stripped_config_name, name), - "test point 2": prop["module"], - "steps": name, - "test environment": prop["test_env"], - "reset": prop["reset"], - "sub module": self.module_map[prop["module"]]['sub module'], - "summary": name, - "multi_device": prop["multi_device"], - "multi_stage": prop["multi_stage"], - "timeout": int(prop["timeout"]), - "tags": tags, - "chip_target": self.idf_target}) + test_case.update({'config': config_name, + 'module': self.module_map[prop['module']]['module'], + 'group': prop['group'], + 'CI ready': 'No' if prop['ignore'] == 'Yes' else 'Yes', + 'ID': '[{}] {}'.format(stripped_config_name, name), + 'test point 2': prop['module'], + 'steps': name, + 'test environment': prop['test_env'], + 'reset': prop['reset'], + 'sub module': self.module_map[prop['module']]['sub module'], + 'summary': name, + 'multi_device': prop['multi_device'], + 'multi_stage': prop['multi_stage'], + 'timeout': int(prop['timeout']), + 'tags': tags, + 'chip_target': self.idf_target}) return test_case def dump_test_cases(self, test_cases): @@ -286,13 +285,13 @@ class Parser(object): dump parsed test cases to YAML file for test bench input :param test_cases: parsed test cases """ - filename = os.path.join(self.idf_path, self.TEST_CASE_FILE_DIR, self.idf_target + ".yml") + filename = os.path.join(self.idf_path, self.TEST_CASE_FILE_DIR, self.idf_target + '.yml') try: os.mkdir(os.path.dirname(filename)) except OSError: pass - with open(os.path.join(filename), "w+") as f: - yaml.dump({"test cases": test_cases}, f, allow_unicode=True, default_flow_style=False) + with open(os.path.join(filename), 'w+') as f: + yaml.dump({'test cases': test_cases}, f, allow_unicode=True, default_flow_style=False) def copy_module_def_file(self): """ copy module def file to artifact path """ @@ -312,7 +311,7 @@ class Parser(object): config_output_folder = os.path.join(output_folder, config) if os.path.exists(config_output_folder): test_cases.extend(self.parse_test_cases_for_one_config(configs_folder, config_output_folder, config)) - test_cases.sort(key=lambda x: x["config"] + x["summary"]) + test_cases.sort(key=lambda x: x['config'] + x['summary']) self.dump_test_cases(test_cases) @@ -320,30 +319,30 @@ def test_parser(binary_folder): parser = Parser(binary_folder) # test parsing tags # parsing module only and module in module list - prop = parser.parse_case_properties("[esp32]") - assert prop["module"] == "esp32" + prop = parser.parse_case_properties('[esp32]') + assert prop['module'] == 'esp32' # module not in module list - prop = parser.parse_case_properties("[not_in_list]") - assert prop["module"] == "misc" + prop = parser.parse_case_properties('[not_in_list]') + assert prop['module'] == 'misc' # parsing a default tag, a tag with assigned value - prop = parser.parse_case_properties("[esp32][ignore][test_env=ABCD][not_support1][not_support2=ABCD]") - assert prop["ignore"] == "Yes" and prop["test_env"] == "ABCD" \ - and "not_support1" not in prop and "not_supported2" not in prop + prop = parser.parse_case_properties('[esp32][ignore][test_env=ABCD][not_support1][not_support2=ABCD]') + assert prop['ignore'] == 'Yes' and prop['test_env'] == 'ABCD' \ + and 'not_support1' not in prop and 'not_supported2' not in prop # parsing omitted value - prop = parser.parse_case_properties("[esp32]") - assert prop["ignore"] == "No" and prop["test_env"] == "UT_T1_1" + prop = parser.parse_case_properties('[esp32]') + assert prop['ignore'] == 'No' and prop['test_env'] == 'UT_T1_1' # parsing with incorrect format try: - parser.parse_case_properties("abcd") + parser.parse_case_properties('abcd') assert False except AssertionError: pass # skip invalid data parse, [type=] assigns empty string to type - prop = parser.parse_case_properties("[esp32]abdc aaaa [ignore=]") - assert prop["module"] == "esp32" and prop["ignore"] == "" + prop = parser.parse_case_properties('[esp32]abdc aaaa [ignore=]') + assert prop['module'] == 'esp32' and prop['ignore'] == '' # skip mis-paired [] - prop = parser.parse_case_properties("[esp32][[ignore=b]][]][test_env=AAA]]") - assert prop["module"] == "esp32" and prop["ignore"] == "b" and prop["test_env"] == "AAA" + prop = parser.parse_case_properties('[esp32][[ignore=b]][]][test_env=AAA]]') + assert prop['module'] == 'esp32' and prop['ignore'] == 'b' and prop['test_env'] == 'AAA' config_dependency = { 'a': '123', @@ -353,7 +352,7 @@ def test_parser(binary_folder): 'e': '{123} and not {789}', 'f': '({123} and {456}) or ({123} and {789})' } - sdkconfig = ["123", "789"] + sdkconfig = ['123', '789'] tags = parser.parse_tags_internal(sdkconfig, config_dependency, parser.CONFIG_PATTERN) assert sorted(tags) == ['a', 'd', 'f'] # sorted is required for older Python3, e.g. 3.4.8 diff --git a/tools/unit-test-app/unit_test.py b/tools/unit-test-app/unit_test.py index 8f9c827596..470a488365 100755 --- a/tools/unit-test-app/unit_test.py +++ b/tools/unit-test-app/unit_test.py @@ -18,30 +18,30 @@ Test script for unit test case. """ -import re -import time import argparse +import re import threading +import time -from tiny_test_fw import TinyFW, Utility, Env, DUT +import ttfw_idf +from tiny_test_fw import DUT, Env, TinyFW, Utility from tiny_test_fw.TinyFW import TestCaseFailed from tiny_test_fw.Utility import handle_unexpected_exception -import ttfw_idf -UT_APP_BOOT_UP_DONE = "Press ENTER to see the list of tests." +UT_APP_BOOT_UP_DONE = 'Press ENTER to see the list of tests.' -STRIP_CONFIG_PATTERN = re.compile(r"(.+?)(_\d+)?$") +STRIP_CONFIG_PATTERN = re.compile(r'(.+?)(_\d+)?$') # matches e.g.: "rst:0xc (SW_CPU_RESET),boot:0x13 (SPI_FAST_FLASH_BOOT)" -RESET_PATTERN = re.compile(r"(rst:0x[0-9a-fA-F]*\s\([\w].*?\),boot:0x[0-9a-fA-F]*\s\([\w].*?\))") +RESET_PATTERN = re.compile(r'(rst:0x[0-9a-fA-F]*\s\([\w].*?\),boot:0x[0-9a-fA-F]*\s\([\w].*?\))') EXCEPTION_PATTERN = re.compile(r"(Guru Meditation Error: Core\s+\d panic'ed \([\w].*?\))") -ABORT_PATTERN = re.compile(r"(abort\(\) was called at PC 0x[a-fA-F\d]{8} on core \d)") -FINISH_PATTERN = re.compile(r"1 Tests (\d) Failures (\d) Ignored") +ABORT_PATTERN = re.compile(r'(abort\(\) was called at PC 0x[a-fA-F\d]{8} on core \d)') +FINISH_PATTERN = re.compile(r'1 Tests (\d) Failures (\d) Ignored') END_LIST_STR = r'\r?\nEnter test for running' TEST_PATTERN = re.compile(r'\((\d+)\)\s+"([^"]+)" ([^\r\n]+)\r?\n(' + END_LIST_STR + r')?') TEST_SUBMENU_PATTERN = re.compile(r'\s+\((\d+)\)\s+"[^"]+"\r?\n(?=(?=\()|(' + END_LIST_STR + r'))') -UT_APP_PATH = "tools/unit-test-app" +UT_APP_PATH = 'tools/unit-test-app' SIMPLE_TEST_ID = 0 MULTI_STAGE_ID = 1 @@ -56,9 +56,9 @@ TEST_HISTORY_CHECK_TIMEOUT = 2 def reset_reason_matches(reported_str, expected_str): known_aliases = { - "_RESET": "_RST", - "POWERON_RESET": "POWERON", - "DEEPSLEEP_RESET": "DSLEEP", + '_RESET': '_RST', + 'POWERON_RESET': 'POWERON', + 'DEEPSLEEP_RESET': 'DSLEEP', } if expected_str in reported_str: @@ -105,32 +105,32 @@ def format_test_case_config(test_case_data): # strip space and remove white space only items _output = list() for _r in reset_list: - _data = _r.strip(" ") + _data = _r.strip(' ') if _data: _output.append(_data) return _output _case = dict() if isinstance(one_case_data, str): - _temp = one_case_data.split(" [reset=") - _case["name"] = _temp[0] + _temp = one_case_data.split(' [reset=') + _case['name'] = _temp[0] try: - _case["reset"] = process_reset_list(_temp[1][0:-1].split(",")) + _case['reset'] = process_reset_list(_temp[1][0:-1].split(',')) except IndexError: - _case["reset"] = list() + _case['reset'] = list() elif isinstance(one_case_data, dict): _case = one_case_data.copy() - assert "name" in _case - if "reset" not in _case: - _case["reset"] = list() + assert 'name' in _case + if 'reset' not in _case: + _case['reset'] = list() else: - if isinstance(_case["reset"], str): - _case["reset"] = process_reset_list(_case["reset"].split(",")) + if isinstance(_case['reset'], str): + _case['reset'] = process_reset_list(_case['reset'].split(',')) else: - raise TypeError("Not supported type during parsing unit test case") + raise TypeError('Not supported type during parsing unit test case') - if "config" not in _case: - _case["config"] = "default" + if 'config' not in _case: + _case['config'] = 'default' return _case @@ -140,9 +140,9 @@ def format_test_case_config(test_case_data): for case_data in test_case_data: parsed_case = parse_case(case_data) try: - case_config[parsed_case["config"]].append(parsed_case) + case_config[parsed_case['config']].append(parsed_case) except KeyError: - case_config[parsed_case["config"]] = [parsed_case] + case_config[parsed_case['config']] = [parsed_case] return case_config @@ -154,16 +154,16 @@ def replace_app_bin(dut, name, new_app_bin): for i, config in enumerate(dut.download_config): if config.endswith(search_pattern): dut.download_config[i] = new_app_bin - Utility.console_log("The replaced application binary is {}".format(new_app_bin), "O") + Utility.console_log('The replaced application binary is {}'.format(new_app_bin), 'O') break def format_case_name(case): # we could split cases of same config into multiple binaries as we have limited rom space # we should regard those configs like `default` and `default_2` as the same config - match = STRIP_CONFIG_PATTERN.match(case["config"]) + match = STRIP_CONFIG_PATTERN.match(case['config']) stripped_config_name = match.group(1) - return "[{}] {}".format(stripped_config_name, case["name"]) + return '[{}] {}'.format(stripped_config_name, case['name']) def reset_dut(dut): @@ -180,19 +180,19 @@ def reset_dut(dut): # To solve this problem, we will add a delay between reset and input `-` command. And we'll also enlarge expect timeout. time.sleep(DUT_DELAY_AFTER_RESET) for _ in range(DUT_STARTUP_CHECK_RETRY_COUNT): - dut.write("-") + dut.write('-') try: - dut.expect("0 Tests 0 Failures 0 Ignored", timeout=TEST_HISTORY_CHECK_TIMEOUT) + dut.expect('0 Tests 0 Failures 0 Ignored', timeout=TEST_HISTORY_CHECK_TIMEOUT) break except DUT.ExpectTimeout: pass else: - raise AssertionError("Reset {} ({}) failed!".format(dut.name, dut.port)) + raise AssertionError('Reset {} ({}) failed!'.format(dut.name, dut.port)) def log_test_case(description, test_case, ut_config): - Utility.console_log("Running {} '{}' (config {})".format(description, test_case["name"], ut_config), color="orange") - Utility.console_log("Tags: %s" % ", ".join("%s=%s" % (k, v) for (k, v) in test_case.items() if k != "name" and v is not None), color="orange") + Utility.console_log("Running {} '{}' (config {})".format(description, test_case['name'], ut_config), color='orange') + Utility.console_log('Tags: %s' % ', '.join('%s=%s' % (k, v) for (k, v) in test_case.items() if k != 'name' and v is not None), color='orange') def run_one_normal_case(dut, one_case, junit_test_case): @@ -200,8 +200,8 @@ def run_one_normal_case(dut, one_case, junit_test_case): dut.start_capture_raw_data() # run test case - dut.write("\"{}\"".format(one_case["name"])) - dut.expect("Running " + one_case["name"] + "...") + dut.write("\"{}\"".format(one_case['name'])) + dut.expect('Running ' + one_case['name'] + '...') exception_reset_list = [] @@ -215,9 +215,9 @@ def run_one_normal_case(dut, one_case, junit_test_case): test_finish.append(True) output = dut.stop_capture_raw_data() if result: - Utility.console_log("Success: " + format_case_name(one_case), color="green") + Utility.console_log('Success: ' + format_case_name(one_case), color='green') else: - Utility.console_log("Failed: " + format_case_name(one_case), color="red") + Utility.console_log('Failed: ' + format_case_name(one_case), color='red') junit_test_case.add_failure_info(output) raise TestCaseFailed(format_case_name(one_case)) @@ -234,30 +234,30 @@ def run_one_normal_case(dut, one_case, junit_test_case): assert not exception_reset_list if int(data[1]): # case ignored - Utility.console_log("Ignored: " + format_case_name(one_case), color="orange") - junit_test_case.add_skipped_info("ignored") + Utility.console_log('Ignored: ' + format_case_name(one_case), color='orange') + junit_test_case.add_skipped_info('ignored') one_case_finish(not int(data[0])) def handle_reset_finish(data): """ reset happened and reboot finished """ assert exception_reset_list # reboot but no exception/reset logged. should never happen result = False - if len(one_case["reset"]) == len(exception_reset_list): + if len(one_case['reset']) == len(exception_reset_list): for i, exception in enumerate(exception_reset_list): - if not reset_reason_matches(exception, one_case["reset"][i]): + if not reset_reason_matches(exception, one_case['reset'][i]): break else: result = True if not result: - err_msg = "Reset Check Failed: \r\n\tExpected: {}\r\n\tGet: {}".format(one_case["reset"], + err_msg = 'Reset Check Failed: \r\n\tExpected: {}\r\n\tGet: {}'.format(one_case['reset'], exception_reset_list) - Utility.console_log(err_msg, color="orange") + Utility.console_log(err_msg, color='orange') junit_test_case.add_failure_info(err_msg) one_case_finish(result) while not test_finish: try: - timeout_value = one_case["timeout"] + timeout_value = one_case['timeout'] dut.expect_any((RESET_PATTERN, handle_exception_reset), (EXCEPTION_PATTERN, handle_exception_reset), (ABORT_PATTERN, handle_exception_reset), @@ -265,13 +265,13 @@ def run_one_normal_case(dut, one_case, junit_test_case): (UT_APP_BOOT_UP_DONE, handle_reset_finish), timeout=timeout_value) except DUT.ExpectTimeout: - Utility.console_log("Timeout in expect (%s seconds)" % timeout_value, color="orange") - junit_test_case.add_failure_info("timeout") + Utility.console_log('Timeout in expect (%s seconds)' % timeout_value, color='orange') + junit_test_case.add_failure_info('timeout') one_case_finish(False) break -@ttfw_idf.idf_unit_test(env_tag="UT_T1_1", junit_report_by_case=True) +@ttfw_idf.idf_unit_test(env_tag='UT_T1_1', junit_report_by_case=True) def run_unit_test_cases(env, extra_data): """ extra_data can be three types of value @@ -297,15 +297,15 @@ def run_unit_test_cases(env, extra_data): failed_cases = [] for ut_config in case_config: - Utility.console_log("Running unit test for config: " + ut_config, "O") - dut = env.get_dut("unit-test-app", app_path=UT_APP_PATH, app_config_name=ut_config, allow_dut_exception=True) + Utility.console_log('Running unit test for config: ' + ut_config, 'O') + dut = env.get_dut('unit-test-app', app_path=UT_APP_PATH, app_config_name=ut_config, allow_dut_exception=True) if len(case_config[ut_config]) > 0: - replace_app_bin(dut, "unit-test-app", case_config[ut_config][0].get('app_bin')) + replace_app_bin(dut, 'unit-test-app', case_config[ut_config][0].get('app_bin')) dut.start_app() - Utility.console_log("Download finished, start running test cases", "O") + Utility.console_log('Download finished, start running test cases', 'O') for one_case in case_config[ut_config]: - log_test_case("test case", one_case, ut_config) + log_test_case('test case', one_case, ut_config) performance_items = [] # create junit report test case junit_test_case = TinyFW.JunitReport.create_test_case(format_case_name(one_case)) @@ -325,34 +325,34 @@ def run_unit_test_cases(env, extra_data): # raise exception if any case fails if failed_cases: - Utility.console_log("Failed Cases:", color="red") + Utility.console_log('Failed Cases:', color='red') for _case_name in failed_cases: - Utility.console_log("\t" + _case_name, color="red") + Utility.console_log('\t' + _case_name, color='red') raise TestCaseFailed(*failed_cases) class Handler(threading.Thread): WAIT_SIGNAL_PATTERN = re.compile(r'Waiting for signal: \[(.+)]!') SEND_SIGNAL_PATTERN = re.compile(r'Send signal: \[([^]]+)](\[([^]]+)])?!') - FINISH_PATTERN = re.compile(r"1 Tests (\d) Failures (\d) Ignored") + FINISH_PATTERN = re.compile(r'1 Tests (\d) Failures (\d) Ignored') def __init__(self, dut, sent_signal_list, lock, parent_case_name, child_case_index, timeout): self.dut = dut self.sent_signal_list = sent_signal_list self.lock = lock self.parent_case_name = parent_case_name - self.child_case_name = "" + self.child_case_name = '' self.child_case_index = child_case_index + 1 self.finish = False self.result = False - self.output = "" + self.output = '' self.fail_name = None self.timeout = timeout self.force_stop = threading.Event() # it show the running status reset_dut(self.dut) # reset the board to make it start from begining - threading.Thread.__init__(self, name="{} Handler".format(dut)) + threading.Thread.__init__(self, name='{} Handler'.format(dut)) def run(self): @@ -367,7 +367,7 @@ class Handler(threading.Thread): """ one test finished, let expect loop break and log result """ self.finish = True self.result = result - self.output = "[{}]\n\n{}\n".format(self.child_case_name, + self.output = '[{}]\n\n{}\n'.format(self.child_case_name, self.dut.stop_capture_raw_data()) if not result: self.fail_name = self.child_case_name @@ -377,12 +377,12 @@ class Handler(threading.Thread): expected_signal = data[0].encode('utf-8') while 1: if time.time() > start_time + self.timeout: - Utility.console_log("Timeout in device for function: %s" % self.child_case_name, color="orange") + Utility.console_log('Timeout in device for function: %s' % self.child_case_name, color='orange') break with self.lock: for sent_signal in self.sent_signal_list: - if expected_signal == sent_signal["name"]: - self.dut.write(sent_signal["parameter"]) + if expected_signal == sent_signal['name']: + self.dut.write(sent_signal['parameter']) self.sent_signal_list.remove(sent_signal) break else: @@ -393,8 +393,8 @@ class Handler(threading.Thread): def device_send_action(data): with self.lock: self.sent_signal_list.append({ - "name": data[0].encode('utf-8'), - "parameter": "" if data[2] is None else data[2].encode('utf-8') + 'name': data[0].encode('utf-8'), + 'parameter': '' if data[2] is None else data[2].encode('utf-8') # no parameter means we only write EOL to DUT }) @@ -403,15 +403,15 @@ class Handler(threading.Thread): # in this scenario reset should not happen if int(data[1]): # case ignored - Utility.console_log("Ignored: " + self.child_case_name, color="orange") + Utility.console_log('Ignored: ' + self.child_case_name, color='orange') one_device_case_finish(not int(data[0])) try: time.sleep(1) self.dut.write("\"{}\"".format(self.parent_case_name)) - self.dut.expect("Running " + self.parent_case_name + "...") + self.dut.expect('Running ' + self.parent_case_name + '...') except DUT.ExpectTimeout: - Utility.console_log("No case detected!", color="orange") + Utility.console_log('No case detected!', color='orange') while not self.finish and not self.force_stop.isSet(): try: self.dut.expect_any((re.compile('\(' + str(self.child_case_index) + '\)\s"(\w+)"'), # noqa: W605 - regex @@ -421,7 +421,7 @@ class Handler(threading.Thread): (self.FINISH_PATTERN, handle_device_test_finish), # test finish pattern timeout=self.timeout) except DUT.ExpectTimeout: - Utility.console_log("Timeout in expect (%s seconds)" % self.timeout, color="orange") + Utility.console_log('Timeout in expect (%s seconds)' % self.timeout, color='orange') one_device_case_finish(False) break @@ -430,8 +430,8 @@ class Handler(threading.Thread): def get_case_info(one_case): - parent_case = one_case["name"] - child_case_num = one_case["child case num"] + parent_case = one_case['name'] + child_case_num = one_case['child case num'] return parent_case, child_case_num @@ -441,7 +441,7 @@ def get_dut(duts, env, name, ut_config, app_bin=None): else: dut = env.get_dut(name, app_path=UT_APP_PATH, app_config_name=ut_config, allow_dut_exception=True) duts[name] = dut - replace_app_bin(dut, "unit-test-app", app_bin) + replace_app_bin(dut, 'unit-test-app', app_bin) dut.start_app() # download bin to board return dut @@ -454,13 +454,13 @@ def run_one_multiple_devices_case(duts, ut_config, env, one_case, app_bin, junit parent_case, case_num = get_case_info(one_case) for i in range(case_num): - dut = get_dut(duts, env, "dut%d" % i, ut_config, app_bin) + dut = get_dut(duts, env, 'dut%d' % i, ut_config, app_bin) threads.append(Handler(dut, send_signal_list, lock, - parent_case, i, one_case["timeout"])) + parent_case, i, one_case['timeout'])) for thread in threads: thread.setDaemon(True) thread.start() - output = "Multiple Device Failed\n" + output = 'Multiple Device Failed\n' for thread in threads: thread.join() result = result and thread.result @@ -480,7 +480,7 @@ def run_one_multiple_devices_case(duts, ut_config, env, one_case, app_bin, junit return result -@ttfw_idf.idf_unit_test(env_tag="UT_T2_1", junit_report_by_case=True) +@ttfw_idf.idf_unit_test(env_tag='UT_T2_1', junit_report_by_case=True) def run_multiple_devices_cases(env, extra_data): """ extra_data can be two types of value @@ -506,9 +506,9 @@ def run_multiple_devices_cases(env, extra_data): case_config = format_test_case_config(extra_data) duts = {} for ut_config in case_config: - Utility.console_log("Running unit test for config: " + ut_config, "O") + Utility.console_log('Running unit test for config: ' + ut_config, 'O') for one_case in case_config[ut_config]: - log_test_case("multi-device test", one_case, ut_config, ) + log_test_case('multi-device test', one_case, ut_config, ) result = False junit_test_case = TinyFW.JunitReport.create_test_case(format_case_name(one_case)) try: @@ -520,10 +520,10 @@ def run_multiple_devices_cases(env, extra_data): handle_unexpected_exception(junit_test_case, e) finally: if result: - Utility.console_log("Success: " + format_case_name(one_case), color="green") + Utility.console_log('Success: ' + format_case_name(one_case), color='green') else: failed_cases.append(format_case_name(one_case)) - Utility.console_log("Failed: " + format_case_name(one_case), color="red") + Utility.console_log('Failed: ' + format_case_name(one_case), color='red') TinyFW.JunitReport.test_case_finish(junit_test_case) # close all DUTs when finish running all cases for one config for dut in duts: @@ -531,9 +531,9 @@ def run_multiple_devices_cases(env, extra_data): duts = {} if failed_cases: - Utility.console_log("Failed Cases:", color="red") + Utility.console_log('Failed Cases:', color='red') for _case_name in failed_cases: - Utility.console_log("\t" + _case_name, color="red") + Utility.console_log('\t' + _case_name, color='red') raise TestCaseFailed(*failed_cases) @@ -544,10 +544,10 @@ def run_one_multiple_stage_case(dut, one_case, junit_test_case): exception_reset_list = [] - for test_stage in range(one_case["child case num"]): + for test_stage in range(one_case['child case num']): # select multi stage test case name - dut.write("\"{}\"".format(one_case["name"])) - dut.expect("Running " + one_case["name"] + "...") + dut.write("\"{}\"".format(one_case['name'])) + dut.expect('Running ' + one_case['name'] + '...') # select test function for current stage dut.write(str(test_stage + 1)) @@ -556,22 +556,22 @@ def run_one_multiple_stage_case(dut, one_case, junit_test_case): stage_finish = list() def last_stage(): - return test_stage == one_case["child case num"] - 1 + return test_stage == one_case['child case num'] - 1 def check_reset(): - if one_case["reset"]: + if one_case['reset']: assert exception_reset_list # reboot but no exception/reset logged. should never happen result = False - if len(one_case["reset"]) == len(exception_reset_list): + if len(one_case['reset']) == len(exception_reset_list): for i, exception in enumerate(exception_reset_list): - if not reset_reason_matches(exception, one_case["reset"][i]): + if not reset_reason_matches(exception, one_case['reset'][i]): break else: result = True if not result: - err_msg = "Reset Check Failed: \r\n\tExpected: {}\r\n\tGet: {}".format(one_case["reset"], + err_msg = 'Reset Check Failed: \r\n\tExpected: {}\r\n\tGet: {}'.format(one_case['reset'], exception_reset_list) - Utility.console_log(err_msg, color="orange") + Utility.console_log(err_msg, color='orange') junit_test_case.add_failure_info(err_msg) else: # we allow omit reset in multi stage cases @@ -585,12 +585,12 @@ def run_one_multiple_stage_case(dut, one_case, junit_test_case): result = result and check_reset() output = dut.stop_capture_raw_data() if result: - Utility.console_log("Success: " + format_case_name(one_case), color="green") + Utility.console_log('Success: ' + format_case_name(one_case), color='green') else: - Utility.console_log("Failed: " + format_case_name(one_case), color="red") + Utility.console_log('Failed: ' + format_case_name(one_case), color='red') junit_test_case.add_failure_info(output) raise TestCaseFailed(format_case_name(one_case)) - stage_finish.append("break") + stage_finish.append('break') def handle_exception_reset(data): """ @@ -604,27 +604,27 @@ def run_one_multiple_stage_case(dut, one_case, junit_test_case): # in this scenario reset should not happen if int(data[1]): # case ignored - Utility.console_log("Ignored: " + format_case_name(one_case), color="orange") - junit_test_case.add_skipped_info("ignored") + Utility.console_log('Ignored: ' + format_case_name(one_case), color='orange') + junit_test_case.add_skipped_info('ignored') # only passed in last stage will be regarded as real pass if last_stage(): one_case_finish(not int(data[0])) else: - Utility.console_log("test finished before enter last stage", color="orange") + Utility.console_log('test finished before enter last stage', color='orange') one_case_finish(False) def handle_next_stage(data): """ reboot finished. we goto next stage """ if last_stage(): # already last stage, should never goto next stage - Utility.console_log("didn't finish at last stage", color="orange") + Utility.console_log("didn't finish at last stage", color='orange') one_case_finish(False) else: - stage_finish.append("continue") + stage_finish.append('continue') while not stage_finish: try: - timeout_value = one_case["timeout"] + timeout_value = one_case['timeout'] dut.expect_any((RESET_PATTERN, handle_exception_reset), (EXCEPTION_PATTERN, handle_exception_reset), (ABORT_PATTERN, handle_exception_reset), @@ -632,15 +632,15 @@ def run_one_multiple_stage_case(dut, one_case, junit_test_case): (UT_APP_BOOT_UP_DONE, handle_next_stage), timeout=timeout_value) except DUT.ExpectTimeout: - Utility.console_log("Timeout in expect (%s seconds)" % timeout_value, color="orange") + Utility.console_log('Timeout in expect (%s seconds)' % timeout_value, color='orange') one_case_finish(False) break - if stage_finish[0] == "break": + if stage_finish[0] == 'break': # test breaks on current stage break -@ttfw_idf.idf_unit_test(env_tag="UT_T1_1", junit_report_by_case=True) +@ttfw_idf.idf_unit_test(env_tag='UT_T1_1', junit_report_by_case=True) def run_multiple_stage_cases(env, extra_data): """ extra_data can be 2 types of value @@ -661,14 +661,14 @@ def run_multiple_stage_cases(env, extra_data): failed_cases = [] for ut_config in case_config: - Utility.console_log("Running unit test for config: " + ut_config, "O") - dut = env.get_dut("unit-test-app", app_path=UT_APP_PATH, app_config_name=ut_config, allow_dut_exception=True) + Utility.console_log('Running unit test for config: ' + ut_config, 'O') + dut = env.get_dut('unit-test-app', app_path=UT_APP_PATH, app_config_name=ut_config, allow_dut_exception=True) if len(case_config[ut_config]) > 0: - replace_app_bin(dut, "unit-test-app", case_config[ut_config][0].get('app_bin')) + replace_app_bin(dut, 'unit-test-app', case_config[ut_config][0].get('app_bin')) dut.start_app() for one_case in case_config[ut_config]: - log_test_case("multi-stage test", one_case, ut_config) + log_test_case('multi-stage test', one_case, ut_config) performance_items = [] junit_test_case = TinyFW.JunitReport.create_test_case(format_case_name(one_case)) try: @@ -687,9 +687,9 @@ def run_multiple_stage_cases(env, extra_data): # raise exception if any case fails if failed_cases: - Utility.console_log("Failed Cases:", color="red") + Utility.console_log('Failed Cases:', color='red') for _case_name in failed_cases: - Utility.console_log("\t" + _case_name, color="red") + Utility.console_log('\t' + _case_name, color='red') raise TestCaseFailed(*failed_cases) @@ -697,14 +697,14 @@ def detect_update_unit_test_info(env, extra_data, app_bin): case_config = format_test_case_config(extra_data) for ut_config in case_config: - dut = env.get_dut("unit-test-app", app_path=UT_APP_PATH, app_config_name=ut_config) - replace_app_bin(dut, "unit-test-app", app_bin) + dut = env.get_dut('unit-test-app', app_path=UT_APP_PATH, app_config_name=ut_config) + replace_app_bin(dut, 'unit-test-app', app_bin) dut.start_app() reset_dut(dut) # get the list of test cases - dut.write("") + dut.write('') dut.expect("Here's the test menu, pick your combo:", timeout=DEFAULT_TIMEOUT) def find_update_dic(name, _t, _timeout, child_case_num=None): @@ -748,7 +748,7 @@ def detect_update_unit_test_info(env, extra_data, app_bin): if 'type' not in _dic: raise ValueError("Unit test \"{}\" doesn't exist in the flashed device!".format(_dic.get('name'))) except DUT.ExpectTimeout: - Utility.console_log("Timeout during getting the test list", color="red") + Utility.console_log('Timeout during getting the test list', color='red') finally: dut.close() @@ -764,12 +764,12 @@ if __name__ == '__main__': type=int, default=1 ) - parser.add_argument("--env_config_file", "-e", - help="test env config file", + parser.add_argument('--env_config_file', '-e', + help='test env config file', default=None ) - parser.add_argument("--app_bin", "-b", - help="application binary file for flashing the chip", + parser.add_argument('--app_bin', '-b', + help='application binary file for flashing the chip', default=None ) parser.add_argument( @@ -810,7 +810,7 @@ if __name__ == '__main__': for index in range(1, args.repeat + 1): if args.repeat > 1: - Utility.console_log("Repetition {}".format(index), color="green") + Utility.console_log('Repetition {}'.format(index), color='green') for dic in list_of_dicts: t = dic.get('type', SIMPLE_TEST_ID) if t == SIMPLE_TEST_ID: diff --git a/tools/windows/eclipse_make.py b/tools/windows/eclipse_make.py index 4f877ccf82..f1b3ecdf68 100644 --- a/tools/windows/eclipse_make.py +++ b/tools/windows/eclipse_make.py @@ -2,11 +2,12 @@ # # Wrapper to run make and preprocess any paths in the output from MSYS Unix-style paths # to Windows paths, for Eclipse -from __future__ import print_function, division -import sys -import subprocess +from __future__ import division, print_function + import os.path import re +import subprocess +import sys UNIX_PATH_RE = re.compile(r'(/[^ \'"]+)+') @@ -20,24 +21,24 @@ def check_path(path): pass paths[path] = path # cache as failed, replace with success if it works try: - winpath = subprocess.check_output(["cygpath", "-w", path]).decode().strip() + winpath = subprocess.check_output(['cygpath', '-w', path]).decode().strip() except subprocess.CalledProcessError: return path # something went wrong running cygpath, assume this is not a path! if not os.path.exists(winpath): return path # not actually a valid path - winpath = winpath.replace("\\", "/") # make consistent with forward-slashes used elsewhere + winpath = winpath.replace('\\', '/') # make consistent with forward-slashes used elsewhere paths[path] = winpath return winpath def main(): print("Running make in '%s'" % check_path(os.getcwd())) - make = subprocess.Popen(["make"] + sys.argv[1:] + ["BATCH_BUILD=1"], stdout=subprocess.PIPE) + make = subprocess.Popen(['make'] + sys.argv[1:] + ['BATCH_BUILD=1'], stdout=subprocess.PIPE) for line in iter(make.stdout.readline, ''): line = re.sub(UNIX_PATH_RE, lambda m: check_path(m.group(0)), line) print(line.rstrip()) sys.exit(make.wait()) -if __name__ == "__main__": +if __name__ == '__main__': main() diff --git a/tools/windows/tool_setup/system_check/system_check_download.py b/tools/windows/tool_setup/system_check/system_check_download.py index 0dd76a2251..981222d8af 100644 --- a/tools/windows/tool_setup/system_check/system_check_download.py +++ b/tools/windows/tool_setup/system_check/system_check_download.py @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + download_url = sys.argv[1] output_filename = sys.argv[2] if (sys.version_info > (3, 0)): @@ -8,5 +9,5 @@ if (sys.version_info > (3, 0)): else: import urllib2 response = urllib2.urlopen(download_url) - with open(output_filename, "w") as output_file: + with open(output_filename, 'w') as output_file: output_file.write(response.read()) diff --git a/tools/windows/tool_setup/system_check/system_check_subprocess.py b/tools/windows/tool_setup/system_check/system_check_subprocess.py index d7309a0984..6936972344 100644 --- a/tools/windows/tool_setup/system_check/system_check_subprocess.py +++ b/tools/windows/tool_setup/system_check/system_check_subprocess.py @@ -1,5 +1,6 @@ #!/usr/bin/env python import sys + if (sys.version_info > (3, 0)): import subprocess - subprocess.run("cmd /c echo hello") + subprocess.run('cmd /c echo hello') diff --git a/tools/windows/tool_setup/system_check/system_check_virtualenv.py b/tools/windows/tool_setup/system_check/system_check_virtualenv.py index 669d8e77fe..5a1fea3574 100644 --- a/tools/windows/tool_setup/system_check/system_check_virtualenv.py +++ b/tools/windows/tool_setup/system_check/system_check_virtualenv.py @@ -1,10 +1,11 @@ #!/usr/bin/env python import sys + expected_executable = sys.argv[1] active_executable = sys.executable if expected_executable != active_executable: - print("Failure. Expected executable does not match current executable.") - print("Expected:", expected_executable) - print("Active: ", active_executable) + print('Failure. Expected executable does not match current executable.') + print('Expected:', expected_executable) + print('Active: ', active_executable) sys.exit(1)