mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
Tools: Fix Python style warnings
This commit is contained in:
parent
f8729d905e
commit
f935c17a95
@ -118,7 +118,7 @@ class CertificateBundle:
|
||||
if start is True:
|
||||
crt += strg
|
||||
|
||||
if(count == 0):
|
||||
if count == 0:
|
||||
raise InputError('No certificate found')
|
||||
|
||||
status('Successfully added %d certificates' % count)
|
||||
|
@ -178,7 +178,7 @@ class SpiffsObjLuPage(SpiffsPage):
|
||||
img += struct.pack(SpiffsPage._endianness_dict[self.build_config.endianness] +
|
||||
SpiffsPage._len_dict[self.build_config.obj_id_len], obj_id)
|
||||
|
||||
assert(len(img) <= self.build_config.page_size)
|
||||
assert len(img) <= self.build_config.page_size
|
||||
|
||||
img += b'\xFF' * (self.build_config.page_size - len(img))
|
||||
|
||||
@ -260,7 +260,7 @@ class SpiffsObjIndexPage(SpiffsObjPageWithIdx):
|
||||
img += struct.pack(SpiffsPage._endianness_dict[self.build_config.endianness] +
|
||||
SpiffsPage._len_dict[self.build_config.page_ix_len], page)
|
||||
|
||||
assert(len(img) <= self.build_config.page_size)
|
||||
assert len(img) <= self.build_config.page_size
|
||||
|
||||
img += b'\xFF' * (self.build_config.page_size - len(img))
|
||||
|
||||
@ -286,7 +286,7 @@ class SpiffsObjDataPage(SpiffsObjPageWithIdx):
|
||||
|
||||
img += self.contents
|
||||
|
||||
assert(len(img) <= self.build_config.page_size)
|
||||
assert len(img) <= self.build_config.page_size
|
||||
|
||||
img += b'\xFF' * (self.build_config.page_size - len(img))
|
||||
|
||||
@ -385,7 +385,7 @@ class SpiffsBlock(object):
|
||||
for page in self.pages:
|
||||
img += page.to_binary()
|
||||
|
||||
assert(len(img) <= self.build_config.block_size)
|
||||
assert len(img) <= self.build_config.block_size
|
||||
|
||||
img += b'\xFF' * (self.build_config.block_size - len(img))
|
||||
return img
|
||||
|
@ -36,22 +36,22 @@ def test_examples_system_esp_timer(env, extra_data):
|
||||
print('Start time: {} us'.format(start_time))
|
||||
|
||||
groups = dut.expect(TIMER_DUMP_LINE_REGEX, timeout=2)
|
||||
assert(groups[0] == 'periodic' and int(groups[1]) == INITIAL_TIMER_PERIOD)
|
||||
assert groups[0] == 'periodic' and int(groups[1]) == INITIAL_TIMER_PERIOD
|
||||
groups = dut.expect(TIMER_DUMP_LINE_REGEX, timeout=2)
|
||||
assert(groups[0] == 'one-shot' and int(groups[1]) == 0)
|
||||
assert groups[0] == 'one-shot' and int(groups[1]) == 0
|
||||
|
||||
for i in range(0, 5):
|
||||
groups = dut.expect(PERIODIC_TIMER_REGEX, timeout=2)
|
||||
cur_time = int(groups[0])
|
||||
diff = start_time + (i + 1) * INITIAL_TIMER_PERIOD - cur_time
|
||||
print('Callback #{}, time: {} us, diff: {} us'.format(i, cur_time, diff))
|
||||
assert(abs(diff) < 100)
|
||||
assert abs(diff) < 100
|
||||
|
||||
groups = dut.expect(ONE_SHOT_REGEX, timeout=3)
|
||||
one_shot_timer_time = int(groups[0])
|
||||
diff = start_time + ONE_SHOT_TIMER_PERIOD - one_shot_timer_time
|
||||
print('One-shot timer, time: {} us, diff: {}'.format(one_shot_timer_time, diff))
|
||||
assert(abs(diff) < 220)
|
||||
assert abs(diff) < 220
|
||||
|
||||
groups = dut.expect(RESTART_REGEX, timeout=3)
|
||||
start_time = int(groups[0])
|
||||
@ -62,7 +62,7 @@ def test_examples_system_esp_timer(env, extra_data):
|
||||
cur_time = int(groups[0])
|
||||
diff = start_time + (i + 1) * FINAL_TIMER_PERIOD - cur_time
|
||||
print('Callback #{}, time: {} us, diff: {} us'.format(i, cur_time, diff))
|
||||
assert(abs(diff) < 100)
|
||||
assert abs(diff) < 100
|
||||
|
||||
groups = dut.expect(LIGHT_SLEEP_ENTER_REGEX, timeout=2)
|
||||
sleep_enter_time = int(groups[0])
|
||||
@ -73,14 +73,14 @@ def test_examples_system_esp_timer(env, extra_data):
|
||||
print('Enter sleep: {}, exit sleep: {}, slept: {}'.format(
|
||||
sleep_enter_time, sleep_exit_time, sleep_time))
|
||||
|
||||
assert(abs(sleep_time - LIGHT_SLEEP_TIME) < 1000)
|
||||
assert abs(sleep_time - LIGHT_SLEEP_TIME) < 1000
|
||||
|
||||
for i in range(5, 7):
|
||||
groups = dut.expect(PERIODIC_TIMER_REGEX, timeout=2)
|
||||
cur_time = int(groups[0])
|
||||
diff = abs(start_time + (i + 1) * FINAL_TIMER_PERIOD - cur_time)
|
||||
print('Callback #{}, time: {} us, diff: {} us'.format(i, cur_time, diff))
|
||||
assert(diff < 100)
|
||||
assert diff < 100
|
||||
|
||||
dut.expect(STOP_REGEX, timeout=2)
|
||||
|
||||
|
@ -32,7 +32,7 @@ def test_examples_system_light_sleep(env, extra_data):
|
||||
groups = dut.expect(EXIT_SLEEP_REGEX)
|
||||
print('Got second sleep period, wakeup from {}, slept for {}'.format(groups[0], groups[2]))
|
||||
# sleep time error should be less than 1ms
|
||||
assert(groups[0] == 'timer' and int(groups[2]) == WAKEUP_INTERVAL_MS)
|
||||
assert groups[0] == 'timer' and int(groups[2]) == WAKEUP_INTERVAL_MS
|
||||
|
||||
# this time we'll test gpio wakeup
|
||||
dut.expect(ENTERING_SLEEP_STR)
|
||||
@ -41,7 +41,7 @@ def test_examples_system_light_sleep(env, extra_data):
|
||||
time.sleep(1)
|
||||
groups = dut.expect(EXIT_SLEEP_REGEX)
|
||||
print('Got third sleep period, wakeup from {}, slept for {}'.format(groups[0], groups[2]))
|
||||
assert(groups[0] == 'pin' and int(groups[2]) < WAKEUP_INTERVAL_MS)
|
||||
assert groups[0] == 'pin' and int(groups[2]) < WAKEUP_INTERVAL_MS
|
||||
|
||||
dut.expect(WAITING_FOR_GPIO_STR)
|
||||
print('Is waiting for GPIO...')
|
||||
@ -50,7 +50,7 @@ def test_examples_system_light_sleep(env, extra_data):
|
||||
dut.expect(ENTERING_SLEEP_STR)
|
||||
print('Went to sleep again')
|
||||
groups = dut.expect(EXIT_SLEEP_REGEX)
|
||||
assert(groups[0] == 'timer' and int(groups[2]) == WAKEUP_INTERVAL_MS)
|
||||
assert groups[0] == 'timer' and int(groups[2]) == WAKEUP_INTERVAL_MS
|
||||
print('Woke up from timer again')
|
||||
|
||||
|
||||
|
@ -216,7 +216,7 @@ class IDFDUT(DUT.SerialDUT):
|
||||
if expected_rom_class and type(inst) != expected_rom_class:
|
||||
raise RuntimeError('Target not expected')
|
||||
return inst.read_mac() is not None, get_target_by_rom_class(type(inst))
|
||||
except(esptool.FatalError, RuntimeError):
|
||||
except (esptool.FatalError, RuntimeError):
|
||||
return False, None
|
||||
finally:
|
||||
if inst is not None:
|
||||
|
@ -96,9 +96,9 @@ class ErrItem(object):
|
||||
base = '_BASE'
|
||||
|
||||
if self.file == other.file:
|
||||
if self.name.endswith(base) and not(other.name.endswith(base)):
|
||||
if self.name.endswith(base) and not other.name.endswith(base):
|
||||
return 1
|
||||
elif not(self.name.endswith(base)) and other.name.endswith(base):
|
||||
elif not self.name.endswith(base) and other.name.endswith(base):
|
||||
return -1
|
||||
|
||||
self_key = self.file + self.name
|
||||
|
@ -535,14 +535,14 @@ class IDFTool(object):
|
||||
self._current_options = self._current_options._replace(**override_dict) # type: ignore
|
||||
|
||||
def add_version(self, version): # type: (IDFToolVersion) -> None
|
||||
assert(type(version) is IDFToolVersion)
|
||||
assert type(version) is IDFToolVersion
|
||||
self.versions[version.version] = version
|
||||
|
||||
def get_path(self): # type: () -> str
|
||||
return os.path.join(global_idf_tools_path, 'tools', self.name) # type: ignore
|
||||
|
||||
def get_path_for_version(self, version): # type: (str) -> str
|
||||
assert(version in self.versions)
|
||||
assert version in self.versions
|
||||
return os.path.join(self.get_path(), version)
|
||||
|
||||
def get_export_paths(self, version): # type: (str) -> list[str]
|
||||
@ -668,7 +668,7 @@ class IDFTool(object):
|
||||
self.versions_installed.append(version)
|
||||
|
||||
def download(self, version): # type: (str) -> None
|
||||
assert(version in self.versions)
|
||||
assert version in self.versions
|
||||
download_obj = self.versions[version].get_download_for_platform(self._platform)
|
||||
if not download_obj:
|
||||
fatal('No packages for tool {} platform {}!'.format(self.name, self._platform))
|
||||
@ -724,12 +724,12 @@ class IDFTool(object):
|
||||
def install(self, version): # type: (str) -> None
|
||||
# Currently this is called after calling 'download' method, so here are a few asserts
|
||||
# for the conditions which should be true once that method is done.
|
||||
assert (version in self.versions)
|
||||
assert version in self.versions
|
||||
download_obj = self.versions[version].get_download_for_platform(self._platform)
|
||||
assert (download_obj is not None)
|
||||
assert download_obj is not None
|
||||
archive_name = os.path.basename(download_obj.url)
|
||||
archive_path = os.path.join(global_idf_tools_path, 'dist', archive_name) # type: ignore
|
||||
assert (os.path.isfile(archive_path))
|
||||
assert os.path.isfile(archive_path)
|
||||
dest_dir = self.get_path_for_version(version)
|
||||
if os.path.exists(dest_dir):
|
||||
warn('destination path already exists, removing')
|
||||
|
@ -179,7 +179,7 @@ def get_link_anchor(node):
|
||||
try:
|
||||
return 'CONFIG_%s' % node.item.name
|
||||
except AttributeError:
|
||||
assert(node_is_menu(node)) # only menus should have no item.name
|
||||
assert node_is_menu(node) # only menus should have no item.name
|
||||
|
||||
# for menus, build a link anchor out of the parents
|
||||
result = []
|
||||
|
@ -145,12 +145,12 @@ class EntityNode():
|
||||
|
||||
def add_child(self, entity):
|
||||
child_specificity = self.entity.specificity.value + 1
|
||||
assert(child_specificity <= Entity.Specificity.SYMBOL.value)
|
||||
assert child_specificity <= Entity.Specificity.SYMBOL.value
|
||||
name = entity[Entity.Specificity(child_specificity)]
|
||||
assert(name and name != Entity.ALL)
|
||||
assert name and name != Entity.ALL
|
||||
|
||||
child = [c for c in self.children if c.name == name]
|
||||
assert(len(child) <= 1)
|
||||
assert len(child) <= 1
|
||||
|
||||
if not child:
|
||||
child = self.child_t(self, name)
|
||||
@ -185,7 +185,7 @@ class EntityNode():
|
||||
for sections in self.get_output_sections():
|
||||
placement = self.placements[sections]
|
||||
if placement.is_significant():
|
||||
assert(placement.node == self)
|
||||
assert placement.node == self
|
||||
|
||||
keep = False
|
||||
sort = None
|
||||
|
@ -76,7 +76,7 @@ class InputSectionDesc():
|
||||
"""
|
||||
|
||||
def __init__(self, entity, sections, exclusions=None, keep=False, sort=None):
|
||||
assert(entity.specificity != Entity.Specificity.SYMBOL)
|
||||
assert entity.specificity != Entity.Specificity.SYMBOL
|
||||
|
||||
self.entity = entity
|
||||
self.sections = set(sections)
|
||||
@ -84,8 +84,8 @@ class InputSectionDesc():
|
||||
self.exclusions = set()
|
||||
|
||||
if exclusions:
|
||||
assert(not [e for e in exclusions if e.specificity == Entity.Specificity.SYMBOL or
|
||||
e.specificity == Entity.Specificity.NONE])
|
||||
assert not [e for e in exclusions if e.specificity == Entity.Specificity.SYMBOL or
|
||||
e.specificity == Entity.Specificity.NONE]
|
||||
self.exclusions = set(exclusions)
|
||||
else:
|
||||
self.exclusions = set()
|
||||
|
@ -26,7 +26,7 @@ from functools import partial
|
||||
from future.utils import iteritems
|
||||
|
||||
try:
|
||||
from itertools import izip as zip
|
||||
from itertools import izip as zip # type: ignore
|
||||
except ImportError:
|
||||
# Python 3
|
||||
pass
|
||||
@ -82,7 +82,7 @@ class UF2Writer(object):
|
||||
md5_part = self._to_uint32(addr)
|
||||
md5_part += self._to_uint32(len_chunk)
|
||||
md5_part += hashlib.md5(chunk).digest()
|
||||
assert(len(md5_part) == self.UF2_MD5_PART_SIZE)
|
||||
assert len(md5_part) == self.UF2_MD5_PART_SIZE
|
||||
|
||||
block += md5_part
|
||||
block += b'\x00' * (self.UF2_DATA_SIZE - self.UF2_MD5_PART_SIZE - len_chunk)
|
||||
|
@ -29,7 +29,7 @@ def set_server_cert_cn(ip):
|
||||
'-CAkey', _path('ca.key'), '-CAcreateserial', '-out', _path('srv.crt'), '-days', '360']]
|
||||
for args in arg_list:
|
||||
if subprocess.check_call(args) != 0:
|
||||
raise('openssl command {} failed'.format(args))
|
||||
raise RuntimeError('openssl command {} failed'.format(args))
|
||||
|
||||
|
||||
def get_my_ip():
|
||||
@ -275,7 +275,7 @@ def connection_tests(dut, cases):
|
||||
dut.expect('MQTT_EVENT_ERROR: Test={}'.format(test_nr), timeout=30)
|
||||
dut.expect('ESP-TLS ERROR: ESP_ERR_MBEDTLS_SSL_HANDSHAKE_FAILED') # expect ... handshake error (PEER_DID_NOT_RETURN_A_CERTIFICATE)
|
||||
if 'PEER_DID_NOT_RETURN_A_CERTIFICATE' not in s.get_last_ssl_error():
|
||||
raise('Unexpected ssl error from the server {}'.format(s.get_last_ssl_error()))
|
||||
raise RuntimeError('Unexpected ssl error from the server {}'.format(s.get_last_ssl_error()))
|
||||
|
||||
for case in ['CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH', 'CONFIG_EXAMPLE_CONNECT_CASE_MUTUAL_AUTH_KEY_PWD']:
|
||||
# These cases connect to server with both server and client verification (client key might be password protected)
|
||||
|
Loading…
Reference in New Issue
Block a user