Tools: add Python 2 deprecation warning

This commit is contained in:
martin.gano 2020-11-04 23:32:40 +01:00 committed by Martin Gaňo
parent 0416586dcc
commit 7af26fa6d6
12 changed files with 1571 additions and 8 deletions

View File

@ -276,6 +276,10 @@ def erase_ota_partition(args):
def main(): def main():
if sys.version_info[0] < 3:
print("WARNING: Support for Python 2 is deprecated and will be removed in future versions.")
elif sys.version_info[0] == 3 and sys.version_info[1] < 6:
print("WARNING: Python 3 versions older than 3.6 are not supported.")
global quiet global quiet
parser = argparse.ArgumentParser("ESP-IDF OTA Partitions Tool") parser = argparse.ArgumentParser("ESP-IDF OTA Partitions Tool")

View File

@ -448,6 +448,10 @@ def create_output_files(name, output_table, debug):
def main(): def main():
if sys.version_info[0] < 3:
print("WARNING: Support for Python 2 is deprecated and will be removed in future versions.", file=sys.stderr)
elif sys.version_info[0] == 3 and sys.version_info[1] < 6:
print("WARNING: Python 3 versions older than 3.6 are not supported.", file=sys.stderr)
global quiet global quiet
global max_blk_len global max_blk_len

537
components/spiffs/spiffsgen.py Executable file
View File

@ -0,0 +1,537 @@
#!/usr/bin/env python
#
# spiffsgen is a tool used to generate a spiffs image from a directory
#
# Copyright 2019 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division, print_function
import os
import sys
import io
import math
import struct
import argparse
import ctypes
SPIFFS_PH_FLAG_USED_FINAL_INDEX = 0xF8
SPIFFS_PH_FLAG_USED_FINAL = 0xFC
SPIFFS_PH_FLAG_LEN = 1
SPIFFS_PH_IX_SIZE_LEN = 4
SPIFFS_PH_IX_OBJ_TYPE_LEN = 1
SPIFFS_TYPE_FILE = 1
# Based on typedefs under spiffs_config.h
SPIFFS_OBJ_ID_LEN = 2 # spiffs_obj_id
SPIFFS_SPAN_IX_LEN = 2 # spiffs_span_ix
SPIFFS_PAGE_IX_LEN = 2 # spiffs_page_ix
SPIFFS_BLOCK_IX_LEN = 2 # spiffs_block_ix
class SpiffsBuildConfig():
def __init__(self, page_size, page_ix_len, block_size,
block_ix_len, meta_len, obj_name_len, obj_id_len,
span_ix_len, packed, aligned, endianness, use_magic, use_magic_len):
if block_size % page_size != 0:
raise RuntimeError("block size should be a multiple of page size")
self.page_size = page_size
self.block_size = block_size
self.obj_id_len = obj_id_len
self.span_ix_len = span_ix_len
self.packed = packed
self.aligned = aligned
self.obj_name_len = obj_name_len
self.meta_len = meta_len
self.page_ix_len = page_ix_len
self.block_ix_len = block_ix_len
self.endianness = endianness
self.use_magic = use_magic
self.use_magic_len = use_magic_len
self.PAGES_PER_BLOCK = self.block_size // self.page_size
self.OBJ_LU_PAGES_PER_BLOCK = int(math.ceil(self.block_size / self.page_size * self.obj_id_len / self.page_size))
self.OBJ_USABLE_PAGES_PER_BLOCK = self.PAGES_PER_BLOCK - self.OBJ_LU_PAGES_PER_BLOCK
self.OBJ_LU_PAGES_OBJ_IDS_LIM = self.page_size // self.obj_id_len
self.OBJ_DATA_PAGE_HEADER_LEN = self.obj_id_len + self.span_ix_len + SPIFFS_PH_FLAG_LEN
pad = 4 - (4 if self.OBJ_DATA_PAGE_HEADER_LEN % 4 == 0 else self.OBJ_DATA_PAGE_HEADER_LEN % 4)
self.OBJ_DATA_PAGE_HEADER_LEN_ALIGNED = self.OBJ_DATA_PAGE_HEADER_LEN + pad
self.OBJ_DATA_PAGE_HEADER_LEN_ALIGNED_PAD = pad
self.OBJ_DATA_PAGE_CONTENT_LEN = self.page_size - self.OBJ_DATA_PAGE_HEADER_LEN
self.OBJ_INDEX_PAGES_HEADER_LEN = (self.OBJ_DATA_PAGE_HEADER_LEN_ALIGNED + SPIFFS_PH_IX_SIZE_LEN +
SPIFFS_PH_IX_OBJ_TYPE_LEN + self.obj_name_len + self.meta_len)
self.OBJ_INDEX_PAGES_OBJ_IDS_HEAD_LIM = (self.page_size - self.OBJ_INDEX_PAGES_HEADER_LEN) // self.block_ix_len
self.OBJ_INDEX_PAGES_OBJ_IDS_LIM = (self.page_size - self.OBJ_DATA_PAGE_HEADER_LEN_ALIGNED) / self.block_ix_len
class SpiffsFullError(RuntimeError):
def __init__(self, message=None):
super(SpiffsFullError, self).__init__(message)
class SpiffsPage():
_endianness_dict = {
"little": "<",
"big": ">"
}
_len_dict = {
1: "B",
2: "H",
4: "I",
8: "Q"
}
_type_dict = {
1: ctypes.c_ubyte,
2: ctypes.c_ushort,
4: ctypes.c_uint,
8: ctypes.c_ulonglong
}
def __init__(self, bix, build_config):
self.build_config = build_config
self.bix = bix
class SpiffsObjLuPage(SpiffsPage):
def __init__(self, bix, build_config):
SpiffsPage.__init__(self, bix, build_config)
self.obj_ids_limit = self.build_config.OBJ_LU_PAGES_OBJ_IDS_LIM
self.obj_ids = list()
def _calc_magic(self, blocks_lim):
# Calculate the magic value mirrorring computation done by the macro SPIFFS_MAGIC defined in
# spiffs_nucleus.h
magic = 0x20140529 ^ self.build_config.page_size
if self.build_config.use_magic_len:
magic = magic ^ (blocks_lim - self.bix)
magic = SpiffsPage._type_dict[self.build_config.obj_id_len](magic)
return magic.value
def register_page(self, page):
if not self.obj_ids_limit > 0:
raise SpiffsFullError()
obj_id = (page.obj_id, page.__class__)
self.obj_ids.append(obj_id)
self.obj_ids_limit -= 1
def to_binary(self):
global test
img = b""
for (obj_id, page_type) in self.obj_ids:
if page_type == SpiffsObjIndexPage:
obj_id ^= (1 << ((self.build_config.obj_id_len * 8) - 1))
img += struct.pack(SpiffsPage._endianness_dict[self.build_config.endianness] +
SpiffsPage._len_dict[self.build_config.obj_id_len], obj_id)
assert(len(img) <= self.build_config.page_size)
img += b"\xFF" * (self.build_config.page_size - len(img))
return img
def magicfy(self, blocks_lim):
# Only use magic value if no valid obj id has been written to the spot, which is the
# spot taken up by the last obj id on last lookup page. The parent is responsible
# for determining which is the last lookup page and calling this function.
remaining = self.obj_ids_limit
empty_obj_id_dict = {
1: 0xFF,
2: 0xFFFF,
4: 0xFFFFFFFF,
8: 0xFFFFFFFFFFFFFFFF
}
if (remaining >= 2):
for i in range(remaining):
if i == remaining - 2:
self.obj_ids.append((self._calc_magic(blocks_lim), SpiffsObjDataPage))
break
else:
self.obj_ids.append((empty_obj_id_dict[self.build_config.obj_id_len], SpiffsObjDataPage))
self.obj_ids_limit -= 1
class SpiffsObjIndexPage(SpiffsPage):
def __init__(self, obj_id, span_ix, size, name, build_config):
SpiffsPage.__init__(self, 0, build_config)
self.obj_id = obj_id
self.span_ix = span_ix
self.name = name
self.size = size
if self.span_ix == 0:
self.pages_lim = self.build_config.OBJ_INDEX_PAGES_OBJ_IDS_HEAD_LIM
else:
self.pages_lim = self.build_config.OBJ_INDEX_PAGES_OBJ_IDS_LIM
self.pages = list()
def register_page(self, page):
if not self.pages_lim > 0:
raise SpiffsFullError
self.pages.append(page.offset)
self.pages_lim -= 1
def to_binary(self):
obj_id = self.obj_id ^ (1 << ((self.build_config.obj_id_len * 8) - 1))
img = struct.pack(SpiffsPage._endianness_dict[self.build_config.endianness] +
SpiffsPage._len_dict[self.build_config.obj_id_len] +
SpiffsPage._len_dict[self.build_config.span_ix_len] +
SpiffsPage._len_dict[SPIFFS_PH_FLAG_LEN],
obj_id,
self.span_ix,
SPIFFS_PH_FLAG_USED_FINAL_INDEX)
# Add padding before the object index page specific information
img += b"\xFF" * self.build_config.OBJ_DATA_PAGE_HEADER_LEN_ALIGNED_PAD
# If this is the first object index page for the object, add filname, type
# and size information
if self.span_ix == 0:
img += struct.pack(SpiffsPage._endianness_dict[self.build_config.endianness] +
SpiffsPage._len_dict[SPIFFS_PH_IX_SIZE_LEN] +
SpiffsPage._len_dict[SPIFFS_PH_FLAG_LEN],
self.size,
SPIFFS_TYPE_FILE)
img += self.name.encode() + (b"\x00" * ((self.build_config.obj_name_len - len(self.name)) + self.build_config.meta_len))
# Finally, add the page index of daa pages
for page in self.pages:
page = page >> int(math.log(self.build_config.page_size, 2))
img += struct.pack(SpiffsPage._endianness_dict[self.build_config.endianness] +
SpiffsPage._len_dict[self.build_config.page_ix_len], page)
assert(len(img) <= self.build_config.page_size)
img += b"\xFF" * (self.build_config.page_size - len(img))
return img
class SpiffsObjDataPage(SpiffsPage):
def __init__(self, offset, obj_id, span_ix, contents, build_config):
SpiffsPage.__init__(self, 0, build_config)
self.obj_id = obj_id
self.span_ix = span_ix
self.contents = contents
self.offset = offset
def to_binary(self):
img = struct.pack(SpiffsPage._endianness_dict[self.build_config.endianness] +
SpiffsPage._len_dict[self.build_config.obj_id_len] +
SpiffsPage._len_dict[self.build_config.span_ix_len] +
SpiffsPage._len_dict[SPIFFS_PH_FLAG_LEN],
self.obj_id,
self.span_ix,
SPIFFS_PH_FLAG_USED_FINAL)
img += self.contents
assert(len(img) <= self.build_config.page_size)
img += b"\xFF" * (self.build_config.page_size - len(img))
return img
class SpiffsBlock():
def _reset(self):
self.cur_obj_index_span_ix = 0
self.cur_obj_data_span_ix = 0
self.cur_obj_id = 0
self.cur_obj_idx_page = None
def __init__(self, bix, blocks_lim, build_config):
self.build_config = build_config
self.offset = bix * self.build_config.block_size
self.remaining_pages = self.build_config.OBJ_USABLE_PAGES_PER_BLOCK
self.pages = list()
self.bix = bix
lu_pages = list()
for i in range(self.build_config.OBJ_LU_PAGES_PER_BLOCK):
page = SpiffsObjLuPage(self.bix, self.build_config)
lu_pages.append(page)
self.pages.extend(lu_pages)
self.lu_page_iter = iter(lu_pages)
self.lu_page = next(self.lu_page_iter)
self._reset()
def _register_page(self, page):
if isinstance(page, SpiffsObjDataPage):
self.cur_obj_idx_page.register_page(page) # can raise SpiffsFullError
try:
self.lu_page.register_page(page)
except SpiffsFullError:
self.lu_page = next(self.lu_page_iter)
try:
self.lu_page.register_page(page)
except AttributeError: # no next lookup page
# Since the amount of lookup pages is pre-computed at every block instance,
# this should never occur
raise RuntimeError("invalid attempt to add page to a block when there is no more space in lookup")
self.pages.append(page)
def begin_obj(self, obj_id, size, name, obj_index_span_ix=0, obj_data_span_ix=0):
if not self.remaining_pages > 0:
raise SpiffsFullError()
self._reset()
self.cur_obj_id = obj_id
self.cur_obj_index_span_ix = obj_index_span_ix
self.cur_obj_data_span_ix = obj_data_span_ix
page = SpiffsObjIndexPage(obj_id, self.cur_obj_index_span_ix, size, name, self.build_config)
self._register_page(page)
self.cur_obj_idx_page = page
self.remaining_pages -= 1
self.cur_obj_index_span_ix += 1
def update_obj(self, contents):
if not self.remaining_pages > 0:
raise SpiffsFullError()
page = SpiffsObjDataPage(self.offset + (len(self.pages) * self.build_config.page_size),
self.cur_obj_id, self.cur_obj_data_span_ix, contents, self.build_config)
self._register_page(page)
self.cur_obj_data_span_ix += 1
self.remaining_pages -= 1
def end_obj(self):
self._reset()
def is_full(self):
return self.remaining_pages <= 0
def to_binary(self, blocks_lim):
img = b""
if self.build_config.use_magic:
for (idx, page) in enumerate(self.pages):
if idx == self.build_config.OBJ_LU_PAGES_PER_BLOCK - 1:
page.magicfy(blocks_lim)
img += page.to_binary()
else:
for page in self.pages:
img += page.to_binary()
assert(len(img) <= self.build_config.block_size)
img += b"\xFF" * (self.build_config.block_size - len(img))
return img
class SpiffsFS():
def __init__(self, img_size, build_config):
if img_size % build_config.block_size != 0:
raise RuntimeError("image size should be a multiple of block size")
self.img_size = img_size
self.build_config = build_config
self.blocks = list()
self.blocks_lim = self.img_size // self.build_config.block_size
self.remaining_blocks = self.blocks_lim
self.cur_obj_id = 1 # starting object id
def _create_block(self):
if self.is_full():
raise SpiffsFullError("the image size has been exceeded")
block = SpiffsBlock(len(self.blocks), self.blocks_lim, self.build_config)
self.blocks.append(block)
self.remaining_blocks -= 1
return block
def is_full(self):
return self.remaining_blocks <= 0
def create_file(self, img_path, file_path):
contents = None
if len(img_path) > self.build_config.obj_name_len:
raise RuntimeError("object name '%s' too long" % img_path)
name = img_path
with open(file_path, "rb") as obj:
contents = obj.read()
stream = io.BytesIO(contents)
try:
block = self.blocks[-1]
block.begin_obj(self.cur_obj_id, len(contents), name)
except (IndexError, SpiffsFullError):
block = self._create_block()
block.begin_obj(self.cur_obj_id, len(contents), name)
contents_chunk = stream.read(self.build_config.OBJ_DATA_PAGE_CONTENT_LEN)
while contents_chunk:
try:
block = self.blocks[-1]
try:
# This can fail because either (1) all the pages in block have been
# used or (2) object index has been exhausted.
block.update_obj(contents_chunk)
except SpiffsFullError:
# If its (1), use the outer exception handler
if block.is_full():
raise SpiffsFullError
# If its (2), write another object index page
block.begin_obj(self.cur_obj_id, len(contents), name,
obj_index_span_ix=block.cur_obj_index_span_ix,
obj_data_span_ix=block.cur_obj_data_span_ix)
continue
except (IndexError, SpiffsFullError):
# All pages in the block have been exhausted. Create a new block, copying
# the previous state of the block to a new one for the continuation of the
# current object
prev_block = block
block = self._create_block()
block.cur_obj_id = prev_block.cur_obj_id
block.cur_obj_idx_page = prev_block.cur_obj_idx_page
block.cur_obj_data_span_ix = prev_block.cur_obj_data_span_ix
block.cur_obj_index_span_ix = prev_block.cur_obj_index_span_ix
continue
contents_chunk = stream.read(self.build_config.OBJ_DATA_PAGE_CONTENT_LEN)
block.end_obj()
self.cur_obj_id += 1
def to_binary(self):
img = b""
for block in self.blocks:
img += block.to_binary(self.blocks_lim)
bix = len(self.blocks)
if self.build_config.use_magic:
# Create empty blocks with magic numbers
while self.remaining_blocks > 0:
block = SpiffsBlock(bix, self.blocks_lim, self.build_config)
img += block.to_binary(self.blocks_lim)
self.remaining_blocks -= 1
bix += 1
else:
# Just fill remaining spaces FF's
img += "\xFF" * (self.img_size - len(img))
return img
def main():
if sys.version_info[0] < 3:
print("WARNING: Support for Python 2 is deprecated and will be removed in future versions.")
elif sys.version_info[0] == 3 and sys.version_info[1] < 6:
print("WARNING: Python 3 versions older than 3.6 are not supported.")
parser = argparse.ArgumentParser(description="SPIFFS Image Generator",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("image_size",
help="Size of the created image")
parser.add_argument("base_dir",
help="Path to directory from which the image will be created")
parser.add_argument("output_file",
help="Created image output file path")
parser.add_argument("--page-size",
help="Logical page size. Set to value same as CONFIG_SPIFFS_PAGE_SIZE.",
type=int,
default=256)
parser.add_argument("--block-size",
help="Logical block size. Set to the same value as the flash chip's sector size (g_rom_flashchip.sector_size).",
type=int,
default=4096)
parser.add_argument("--obj-name-len",
help="File full path maximum length. Set to value same as CONFIG_SPIFFS_OBJ_NAME_LEN.",
type=int,
default=32)
parser.add_argument("--meta-len",
help="File metadata length. Set to value same as CONFIG_SPIFFS_META_LENGTH.",
type=int,
default=4)
parser.add_argument("--use-magic",
help="Use magic number to create an identifiable SPIFFS image. Specify if CONFIG_SPIFFS_USE_MAGIC.",
action="store_true",
default=True)
parser.add_argument("--follow-symlinks",
help="Take into account symbolic links during partition image creation.",
action="store_true",
default=False)
parser.add_argument("--use-magic-len",
help="Use position in memory to create different magic numbers for each block. Specify if CONFIG_SPIFFS_USE_MAGIC_LENGTH.",
action="store_true",
default=True)
parser.add_argument("--big-endian",
help="Specify if the target architecture is big-endian. If not specified, little-endian is assumed.",
action="store_true",
default=False)
args = parser.parse_args()
if not os.path.exists(args.base_dir):
raise RuntimeError("given base directory %s does not exist" % args.base_dir)
with open(args.output_file, "wb") as image_file:
image_size = int(args.image_size, 0)
spiffs_build_default = SpiffsBuildConfig(args.page_size, SPIFFS_PAGE_IX_LEN,
args.block_size, SPIFFS_BLOCK_IX_LEN, args.meta_len,
args.obj_name_len, SPIFFS_OBJ_ID_LEN, SPIFFS_SPAN_IX_LEN,
True, True, "big" if args.big_endian else "little",
args.use_magic, args.use_magic_len)
spiffs = SpiffsFS(image_size, spiffs_build_default)
for root, dirs, files in os.walk(args.base_dir, followlinks=args.follow_symlinks):
for f in files:
full_path = os.path.join(root, f)
spiffs.create_file("/" + os.path.relpath(full_path, args.base_dir).replace("\\", "/"), full_path)
image = spiffs.to_binary()
image_file.write(image)
if __name__ == "__main__":
main()

View File

@ -5,7 +5,9 @@
# Copyright (c) 2016-2017 Espressif Systems (Shanghai) PTE LTD. # Copyright (c) 2016-2017 Espressif Systems (Shanghai) PTE LTD.
# Distributed under the terms of Apache License v2.0 found in the top-level LICENSE file. # Distributed under the terms of Apache License v2.0 found in the top-level LICENSE file.
from __future__ import print_function
from optparse import OptionParser from optparse import OptionParser
import sys
BASE_ADDR = 0x50000000 BASE_ADDR = 0x50000000
@ -26,6 +28,10 @@ def gen_ld_h_from_sym(f_sym, f_ld, f_h):
def main(): def main():
if sys.version_info[0] < 3:
print("WARNING: Support for Python 2 is deprecated and will be removed in future versions.", file=sys.stderr)
elif sys.version_info[0] == 3 and sys.version_info[1] < 6:
print("WARNING: Python 3 versions older than 3.6 are not supported.", file=sys.stderr)
description = ("This application generates .h and .ld files for symbols defined in input file. " description = ("This application generates .h and .ld files for symbols defined in input file. "
"The input symbols file can be generated using nm utility like this: " "The input symbols file can be generated using nm utility like this: "
"esp32-ulp-nm -g -f posix <elf_file> > <symbols_file>") "esp32-ulp-nm -g -f posix <elf_file> > <symbols_file>")

View File

@ -0,0 +1,343 @@
#!/usr/bin/env python
# coding=utf-8
#
# This script decodes Xtensa CPU trace dumps. It allows tracing the program
# execution at instruction level.
#
# Some trivia about the Xtensa CPU trace (TRAX):
# TRAX format mostly follows the IEEE-ISTO 5001-2003 (Nexus) standard.
# The following Nexus Program Trace messages are implemented by TRAX:
# - Indirect Branch Message
# - Syncronization Message
# - Indirect Branch with Synchronization Message
# - Correlation Message
# TRAX outputs compressed traces with 2 MSEO bits (LSB) and 6 MDO bits (MSB),
# packed into a byte. MSEO bits are used to split the stream into packets and messages,
# and MDO bits carry the actual data of the messages. Each message may contain multiple packets.
#
# This script can be used standalone, or loaded into GDB.
# When used standalone, it dumps the list of trace messages to stdout.
# When used from GDB, it also invokes GDB command to dump the list of assembly
# instructions corresponding to each of the messages.
#
# Standalone usage:
# traceparse.py <dump_file>
#
# Usage from GDB:
# xtensa-esp32-elf-gdb -n --batch program.elf -x gdbinit
# with the following gdbinit script:
# set pagination off
# set confirm off
# add-symbol-file rom.elf <address of ROM .text section>
# source traceparse.py
# python parse_and_dump("/path/to/dump_file")
#
# Loading the ROM code is optional; if not loaded, disassembly for ROM sections of code
# will be missing.
#
###
# Copyright 2020 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import sys
# Check if loaded into GDB
try:
assert gdb.__name__ == "gdb"
WITH_GDB = True
except NameError:
WITH_GDB = False
# MSEO bit masks:
MSEO_PKTEND = 1 << 0 # bit 0: indicates the last byte of a packet
MSEO_MSGEND = 1 << 1 # bit 1: indicates the last byte of the message
# Message types. The type is stored in the first 6 MDO bits or the first packet.
TVAL_INDBR = 4 # Indirect branch
TVAL_INDBRSYNC = 12 # Indirect branch w/ synchronisation
TVAL_SYNC = 9 # Synchronisation msg
TVAL_CORR = 33 # Correlation message
class TraxPacket(object):
def __init__(self, data, truncated=False):
self.data = data
self.size_bytes = len(data)
self.truncated = truncated
def get_bits(self, start, count=0):
"""
Extract data bits from the packet
:param start: offset, in bits, of the part to be extracted
:param count: number of bits to extract; if omitted or zero,
extracts until the end of the packet
:return: integer containing the extracted bits
"""
start_byte = start // 6
if count <= 0:
# all remaining bits
count = len(self.data) * 6 - start
bits_remaining = count
result = 0
shift = 0
for i, b in enumerate(self.data[start_byte:]):
# which bit in the byte is the starting bit
if i == 0:
# at start_byte: take the offset into account
start_bit = 2 + (start % 6)
else:
# every other byte: start after MSEO bits
start_bit = 2
# how many bits do we need to copy from this byte
cnt_bits = min(bits_remaining, 8 - start_bit)
mask = (2 ** cnt_bits) - 1
# take this many bits after the start_bit
bits = (b >> start_bit) & mask
# add these bits to the result
result |= bits << shift
# update the remaining bit count
shift += cnt_bits
bits_remaining -= cnt_bits
if bits_remaining == 0:
break
return result
def __str__(self):
return "%d byte packet%s" % (self.size_bytes, " (truncated)" if self.truncated else "")
class TraxMessage(object):
def __init__(self, packets, truncated=False):
"""
Create and parse a TRAX message from packets
:param packets: list of TraxPacket objects, must not be empty
:param truncated: whether the message was truncated in the stream
"""
assert len(packets) > 0
self.packets = packets
self.truncated = truncated
if truncated:
self.msg_type = None
else:
self.msg_type = self._get_type()
# Start and end of the instruction range corresponding to this message
self.pc_start = 0 # inclusive
self.pc_end = 0 # not inclusive
self.pc_target = 0 # PC of the next range
self.is_exception = False # whether the message indicates an exception
self.is_correlation = False # whether this is a correlation message
# message-specific fields
self.icnt = 0
self.uaddr = 0
self.dcont = 0
# decode the fields
if not truncated:
self._decode()
def _get_type(self):
"""
:return: Message type, one of TVAL_XXX values
"""
return self.packets[0].get_bits(0, 6)
def _decode(self):
""" Parse the packets and fill in the message-specific fields """
if self.msg_type == TVAL_INDBR:
self.icnt = self.packets[0].get_bits(7, -1)
self.btype = self.packets[0].get_bits(6, 1)
self.uaddr = self.packets[1].get_bits(0)
self.is_exception = self.btype > 0
elif self.msg_type == TVAL_INDBRSYNC:
self.icnt = self.packets[0].get_bits(8, -1)
self.btype = self.packets[0].get_bits(7, 1)
self.pc_target = self.packets[1].get_bits(0)
self.dcont = self.packets[0].get_bits(6, 1)
self.is_exception = self.btype > 0
elif self.msg_type == TVAL_SYNC:
self.icnt = self.packets[0].get_bits(7, -1)
self.dcont = self.packets[0].get_bits(6, 1)
self.pc_target = self.packets[1].get_bits(0)
elif self.msg_type == TVAL_CORR:
self.icnt = self.packets[0].get_bits(12, -1)
self.is_correlation = True
else:
raise NotImplementedError("Unknown message type (%d)" % self.msg_type)
def process_forward(self, cur_pc):
"""
Given the target PC known from the previous message, determine
the PC range corresponding to the current message.
:param cur_pc: previous known PC
:return: target PC after the current message
"""
assert not self.truncated
next_pc = cur_pc
if self.msg_type == TVAL_INDBR:
next_pc = cur_pc ^ self.uaddr
self.pc_target = next_pc
self.pc_start = cur_pc
self.pc_end = self.pc_start + self.icnt + 1
if self.msg_type == TVAL_INDBRSYNC:
next_pc = self.pc_target
self.pc_start = cur_pc
self.pc_end = cur_pc + self.icnt + 1
if self.msg_type == TVAL_SYNC:
next_pc = self.pc_target
self.pc_start = next_pc - self.icnt
self.pc_end = next_pc + 1
if self.msg_type == TVAL_CORR:
pass
return next_pc
def process_backward(self, cur_pc):
"""
Given the address of the PC known from the _next_ message, determine
the PC range corresponding to the current message.
:param cur_pc: next known PC
:return: target PC of the _previous_ message
"""
assert not self.truncated
# Backward pass is only used to resolve addresses of messages
# up to the first SYNC/INDBRSYNC message.
# SYNC/INDBRSYNC messages are only handled in the forward pass.
assert self.msg_type != TVAL_INDBRSYNC
assert self.msg_type != TVAL_SYNC
prev_pc = cur_pc
self.pc_target = cur_pc
if self.msg_type == TVAL_INDBR:
prev_pc ^= self.uaddr
self.pc_start = prev_pc
self.pc_end = prev_pc + self.icnt + 1
if self.msg_type == TVAL_CORR:
pass
return prev_pc
def __str__(self):
desc = "Unknown (%d)" % self.msg_type
extra = ""
if self.truncated:
desc = "Truncated"
if self.msg_type == TVAL_INDBR:
desc = "Indirect branch"
extra = ", icnt=%d, uaddr=0x%x, exc=%d" % (self.icnt, self.uaddr, self.is_exception)
if self.msg_type == TVAL_INDBRSYNC:
desc = "Indirect branch w/sync"
extra = ", icnt=%d, dcont=%d, exc=%d" % (self.icnt, self.dcont, self.is_exception)
if self.msg_type == TVAL_SYNC:
desc = "Synchronization"
extra = ", icnt=%d, dcont=%d" % (self.icnt, self.dcont)
if self.msg_type == TVAL_CORR:
desc = "Correlation"
extra = ", icnt=%d" % self.icnt
return "%s message, %d packets, PC range 0x%08x - 0x%08x, target PC 0x%08x" % (
desc, len(self.packets), self.pc_start, self.pc_end, self.pc_target) + extra
def load_messages(data):
"""
Decodes TRAX data and resolves PC ranges.
:param data: input data, bytes
:return: list of TraxMessage objects
"""
messages = []
packets = []
packet_start = 0
msg_cnt = 0
pkt_cnt = 0
# Iterate over the input data, splitting bytes into packets and messages
for i, b in enumerate(data):
if (b & MSEO_MSGEND) and not (b & MSEO_PKTEND):
raise AssertionError("Invalid MSEO bits in b=0x%x. Not a TRAX dump?" % b)
if b & MSEO_PKTEND:
pkt_cnt += 1
packets.append(TraxPacket(data[packet_start:i + 1], packet_start == 0))
packet_start = i + 1
if b & MSEO_MSGEND:
msg_cnt += 1
try:
messages.append(TraxMessage(packets, len(messages) == 0))
except NotImplementedError as e:
sys.stderr.write("Failed to parse message #%03d (at %d bytes): %s\n" % (msg_cnt, i, str(e)))
packets = []
# Resolve PC ranges of messages.
# Forward pass: skip messages until a message with known PC,
# i.e. a SYNC/INDBRSYNC message. Process all messages following it.
pc = 0
first_sync_index = -1
for i, m in enumerate(messages):
if pc == 0 and m.pc_target == 0:
continue
if first_sync_index < 0:
first_sync_index = i
pc = m.process_forward(pc)
# Now process the skipped messages in the reverse direction,
# starting from the first message with known PC.
pc = messages[first_sync_index].pc_start
for m in reversed(messages[0:first_sync_index]):
if m.truncated:
break
pc = m.process_backward(pc)
return messages
def parse_and_dump(filename, disassemble=WITH_GDB):
"""
Decode TRAX data from a file, print out the messages.
:param filename: file to load the dump from
:param disassemble: if True, print disassembly of PC ranges
"""
with open(filename, 'rb') as f:
data = f.read()
messages = load_messages(data)
sys.stderr.write("Loaded %d messages in %d bytes\n" % (len(messages), len(data)))
for i, m in enumerate(messages):
if m.truncated:
continue
print("%04d: %s" % (i, str(m)))
if m.is_exception:
print("*** Exception occurred ***")
if disassemble and WITH_GDB:
try:
gdb.execute("disassemble 0x%08x, 0x%08x" % (m.pc_start, m.pc_end)) # noqa: F821
except gdb.MemoryError: # noqa: F821
print("Failed to disassemble from 0x%08x to 0x%08x" % (m.pc_start, m.pc_end))
def main():
if sys.version_info[0] < 3:
print("WARNING: Support for Python 2 is deprecated and will be removed in future versions.", file=sys.stderr)
elif sys.version_info[0] == 3 and sys.version_info[1] < 6:
print("WARNING: Python 3 versions older than 3.6 are not supported.", file=sys.stderr)
if len(sys.argv) < 2:
sys.stderr.write("Usage: %s <dump_file>\n")
raise SystemExit(1)
parse_and_dump(sys.argv[1])
if __name__ == "__main__" and not WITH_GDB:
main()

View File

@ -12,13 +12,17 @@ Install Prerequisites
To compile with ESP-IDF you need to get the following packages: To compile with ESP-IDF you need to get the following packages:
- CentOS 7::
sudo yum -y update && sudo yum install git wget flex bison gperf python3
- Ubuntu and Debian:: - Ubuntu and Debian::
sudo apt-get install gcc git wget make libncurses-dev flex bison gperf python python-pip python-setuptools python-serial python-cryptography python-future python-pyparsing libffi-dev libssl-dev sudo apt-get install git wget libncurses-dev flex bison gperf python3 python3-pip python3-setuptools python3-serial python3-cryptography python3-future python3-pyparsing python3-pyelftools cmake ninja-build ccache libffi-dev libssl-dev
- Arch:: - Arch::
sudo pacman -S --needed gcc git make ncurses flex bison gperf python2-pyserial python2-cryptography python2-future python2-pyparsing sudo pacman -Sy --needed gcc git make ncurses flex bison gperf python-pyserial python-cryptography python-future python-pyparsing python-pyelftools cmake ninja ccache dfu-util
.. note:: .. note::
@ -33,7 +37,7 @@ Compile the Toolchain from Source
- CentOS 7:: - CentOS 7::
sudo yum install gawk gperf grep gettext ncurses-devel python python-devel automake bison flex texinfo help2man libtool sudo yum install gawk gperf grep gettext ncurses-devel python3 python3-devel automake bison flex texinfo help2man libtool make
- Ubuntu pre-16.04:: - Ubuntu pre-16.04::
@ -49,7 +53,7 @@ Compile the Toolchain from Source
- Arch:: - Arch::
TODO sudo pacman -Sy --needed python-pip
Create the working directory and go into it:: Create the working directory and go into it::
@ -68,6 +72,11 @@ Build the toolchain::
Toolchain will be built in ``~/esp/crosstool-NG/builds/xtensa-esp32-elf``. Follow :ref:`instructions for standard setup <setup-linux-toolchain-add-it-to-path>` to add the toolchain to your ``PATH``. Toolchain will be built in ``~/esp/crosstool-NG/builds/xtensa-esp32-elf``. Follow :ref:`instructions for standard setup <setup-linux-toolchain-add-it-to-path>` to add the toolchain to your ``PATH``.
Python 2 deprecation
====================
Python 2 reached its `end of life <https://www.python.org/doc/sunset-python-2/>`_ and support for it in ESP-IDF will be removed soon. Please install Python 3.6 or higher. Instructions for popular Linux distributions are listed above.
Next Steps Next Steps
========== ==========

View File

@ -10,15 +10,17 @@ To compile with ESP-IDF you need to get the following packages:
- CentOS 7:: - CentOS 7::
sudo yum install gcc git wget make ncurses-devel flex bison gperf python python2-cryptography sudo yum -y update && sudo yum install git wget flex bison gperf python3
CentOS 7 is still supported but CentOS version 8 is recommended for a better user experience.
- Ubuntu and Debian:: - Ubuntu and Debian::
sudo apt-get install gcc git wget make libncurses-dev flex bison gperf python python-pip python-setuptools python-serial python-cryptography python-future python-pyparsing libffi-dev libssl-dev sudo apt-get install git wget flex bison gperf python3 python3-pip python3-setuptools libffi-dev libssl-dev
- Arch:: - Arch::
sudo pacman -S --needed gcc git make ncurses flex bison gperf python2-pyserial python2-cryptography python2-future python2-pyparsing sudo pacman -S --needed gcc git make ncurses flex bison gperf
.. note:: .. note::
@ -84,7 +86,6 @@ Permission issues /dev/ttyUSB0
With some Linux distributions you may get the ``Failed to open port /dev/ttyUSB0`` error message when flashing the ESP32. :ref:`This can be solved by adding the current user to the dialout group<linux-dialout-group>`. With some Linux distributions you may get the ``Failed to open port /dev/ttyUSB0`` error message when flashing the ESP32. :ref:`This can be solved by adding the current user to the dialout group<linux-dialout-group>`.
Arch Linux Users Arch Linux Users
---------------- ----------------
@ -99,6 +100,46 @@ Before installing these packages you might need to add the author's public key t
Alternatively, use crosstool-NG to compile a gdb that links against ncurses 6. Alternatively, use crosstool-NG to compile a gdb that links against ncurses 6.
Setting up Python 3 as default for CentOS
-----------------------------------------
CentOS 7 and older is providing Python 2.7 as the default interpreter.
Python 3 is recommended instead and can be installed in old distributions as follows, or please consult the documentation of your operating system for other recommended ways to achieve this::
sudo yum -y update && sudo yum install python3 python3-pip python3-setuptools
Making Python 3 the default interpreter is possible by running::
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3 10 && alias pip=pip3
Setting up Python 3 as default for Ubuntu and Debian
----------------------------------------------------
Ubuntu (version 18.04 and older) and Debian (version 9 and older) are still providing Python 2.7 as the default interpreter.
Python 3 is recommended instead and can be installed in old distributions as follows, or please consult the documentation of your operating system for other recommended ways to achieve this::
sudo apt-get install python3 python3-pip python3-setuptools
Making Python 3 the default interpreter is possible by running::
sudo update-alternatives --install /usr/bin/python python /usr/bin/python3 10 && alias pip=pip3
.. note::
This is system-wide change which may affect all of the applications.
Fixing broken pip on Ubuntu 16.04
=================================
Package ``python3-pip`` could be broken without possibility to upgrade it.
Package has to be removed and installed manually using script `get-pip.py <https://bootstrap.pypa.io/get-pip.py>`_.::
apt remove python3-pip python3-virtualenv; rm -r ~/.local
rm -r ~/.espressif/python_env && python get-pip.py
Python 2 deprecation
====================
Python 2 reached its `end of life <https://www.python.org/doc/sunset-python-2/>`_ and support for it in ESP-IDF will be removed soon. Please install Python 3.6 or higher. Instructions for popular Linux distributions are listed above.
Next Steps Next Steps
========== ==========

View File

@ -65,6 +65,10 @@ Build the toolchain::
Toolchain will be built in ``~/esp/ctng-volume/crosstool-NG/builds/xtensa-esp32-elf``. Follow :ref:`instructions for standard setup <setup-macos-toolchain-add-it-to-path>` to add the toolchain to your ``PATH``. Toolchain will be built in ``~/esp/ctng-volume/crosstool-NG/builds/xtensa-esp32-elf``. Follow :ref:`instructions for standard setup <setup-macos-toolchain-add-it-to-path>` to add the toolchain to your ``PATH``.
Python 2 deprecation
====================
Python 2 reached its `end of life <https://www.python.org/doc/sunset-python-2/>`_ and support for it in ESP-IDF will be removed soon. Please install Python 3.6 or higher. Instructions for macOS are listed above.
Next Steps Next Steps
========== ==========

View File

@ -42,6 +42,11 @@ Alternatively, you may create an alias for the above command. This way you can g
Then when you need the toolchain you can type ``get_esp32`` on the command line and the toolchain will be added to your ``PATH``. Then when you need the toolchain you can type ``get_esp32`` on the command line and the toolchain will be added to your ``PATH``.
Python 2 deprecation
====================
Python 2 reached its `end of life <https://www.python.org/doc/sunset-python-2/>`_ and support for it in ESP-IDF will be removed soon. Please install Python 3.6 or higher. Instructions for macOS are listed above.
Next Steps Next Steps
========== ==========

102
tools/ci/check_build_warnings.py Executable file
View File

@ -0,0 +1,102 @@
#!/usr/bin/env python
# coding=utf-8
#
# CI script to check build logs for warnings.
# Reads the list of builds, in the format produced by find_apps.py or build_apps.py, and finds warnings in the
# log files for every build.
# Exits with a non-zero exit code if any warning is found.
import argparse
import logging
import os
import re
import sys
try:
from find_build_apps import BuildItem, setup_logging
except ImportError:
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
from find_build_apps import BuildItem, setup_logging
WARNING_REGEX = re.compile(r"(?:error|warning)[^\w]", re.MULTILINE | re.IGNORECASE)
IGNORE_WARNS = [
re.compile(r_str) for r_str in [
r"library/error\.o",
r".*error.*\.c\.obj",
r"-Werror",
r"error\.d",
r"reassigning to symbol",
r"changes choice state",
r"crosstool_version_check\.cmake",
r"CryptographyDeprecationWarning",
r"Python 3 versions older than 3.6 are not supported."
]
]
def line_has_warnings(line): # type: (str) -> bool
if not WARNING_REGEX.search(line):
return False
has_warnings = True
for ignored in IGNORE_WARNS:
if re.search(ignored, line):
has_warnings = False
break
return has_warnings
def main():
parser = argparse.ArgumentParser(description="ESP-IDF app builder")
parser.add_argument(
"-v",
"--verbose",
action="count",
help="Increase the logging level of the script. Can be specified multiple times.",
)
parser.add_argument(
"--log-file",
type=argparse.FileType("w"),
help="Write the script log to the specified file, instead of stderr",
)
parser.add_argument(
"build_list",
type=argparse.FileType("r"),
nargs="?",
default=sys.stdin,
help="Name of the file to read the list of builds from. If not specified, read from stdin.",
)
args = parser.parse_args()
setup_logging(args)
build_items = [BuildItem.from_json(line) for line in args.build_list]
if not build_items:
logging.warning("Empty build list")
SystemExit(0)
found_warnings = 0
for build_item in build_items:
if not build_item.build_log_path:
logging.debug("No log file for {}".format(build_item.work_dir))
continue
with open(build_item.build_log_path, "r") as log_file:
for line_no, line in enumerate(log_file):
if line_has_warnings(line):
logging.error("Issue in app {}, config {}:".format(build_item.app_dir, build_item.config_name))
logging.error(line.rstrip("\n"))
logging.error("See {}:{} for details".format(os.path.basename(build_item.build_log_path),
line_no + 1))
found_warnings += 1
break
if found_warnings:
logging.error("Checked {} builds, found {} warnings".format(len(build_items), found_warnings))
raise SystemExit(1)
logging.info("No warnings found")
if __name__ == "__main__":
main()

502
tools/ci/config/build.yml Normal file
View File

@ -0,0 +1,502 @@
.build_template:
stage: build
image: $CI_DOCKER_REGISTRY/esp32-ci-env$BOT_DOCKER_IMAGE_TAG
tags:
- build
variables:
SIZE_INFO_LOCATION: "$CI_PROJECT_DIR/size_info.txt"
dependencies: []
.build_template_app_template:
extends:
- .build_template
- .rules:labels:build
variables:
LOG_PATH: "${CI_PROJECT_DIR}/log_template_app"
BUILD_PATH: "${CI_PROJECT_DIR}/build_template_app"
BUILD_DIR: "@t/@w"
BUILD_LOG_MAKE: "${LOG_PATH}/make_@t_@w.txt"
BUILD_LOG_CMAKE: "${LOG_PATH}/cmake_@t_@w.txt"
BUILD_COMMAND_ARGS: ""
artifacts:
when: always
paths:
- log_template_app/*
- size_info.txt
- build_template_app/**/size.json
script:
# Set the variable for 'esp-idf-template' testing
- ESP_IDF_TEMPLATE_GIT=${ESP_IDF_TEMPLATE_GIT:-"https://github.com/espressif/esp-idf-template.git"}
- retry_failed git clone ${ESP_IDF_TEMPLATE_GIT}
# Try to use the same branch name for esp-idf-template that we're
# using on esp-idf. If it doesn't exist then just stick to the default branch
- python $CHECKOUT_REF_SCRIPT esp-idf-template esp-idf-template
- export PATH="$IDF_PATH/tools:$PATH"
- export EXTRA_CFLAGS=${PEDANTIC_CFLAGS}
- export EXTRA_CXXFLAGS=${PEDANTIC_CXXFLAGS}
# Only do the default cmake build for each target, remaining part are done in the build_template_app job
- tools/ci/build_template_app.sh ${BUILD_COMMAND_ARGS}
# Check if there are any stray printf/ets_printf references in WiFi libs
- cd components/esp_wifi/lib
- for dir in esp32 esp32s2; do test $(xtensa-esp32-elf-nm $dir/*.a | grep -w printf | wc -l) -eq 0; done;
- for dir in esp32 esp32s2; do test $(xtensa-esp32-elf-nm $dir/*.a | grep -w ets_printf | wc -l) -eq 0; done;
# build-related-pre-check-jobs ------------------------------------------------
# Build at least one project for each target at earliest stage to reduce build cost for obvious failing commits
fast_template_app:
extends:
- .build_template_app_template
- .rules:build_tests:target_test-weekend_test
stage: pre_check
variables:
BUILD_COMMAND_ARGS: "-p"
check_docs_gh_links:
extends: .build_docs_template
stage: pre_check
variables:
SUBMODULES_TO_FETCH: "none"
script:
- cd docs
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 pip install -r requirements.txt
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 ./build_docs.py gh-linkcheck
#------------------------------------------------------------------------------
.build_ssc_template:
extends:
- .build_template
- .rules:build_tests:integration_test
artifacts:
paths:
- SSC/ssc_bin
expire_in: 1 week
script:
- retry_failed git clone $SSC_REPOSITORY
- python $CHECKOUT_REF_SCRIPT SSC SSC
- cd SSC
- MAKEFLAGS= ./ci_build_ssc.sh $TARGET_NAME
build_ssc_esp32:
extends: .build_ssc_template
parallel: 3
variables:
TARGET_NAME: "ESP32"
build_ssc_esp32s2:
extends: .build_ssc_template
parallel: 2
variables:
TARGET_NAME: "ESP32S2"
.build_esp_idf_tests_cmake:
extends:
- .build_template
- .rules:build_tests:unit_test
dependencies: # set dependencies to null to avoid missing artifacts issue
needs:
- job: fast_template_app
artifacts: false
- scan_tests
artifacts:
paths:
- tools/unit-test-app/output/${IDF_TARGET}
- tools/unit-test-app/builds/*.json
- tools/unit-test-app/builds/${IDF_TARGET}/*/size.json
- components/idf_test/unit_test/*.yml
- $LOG_PATH
- $SIZE_INFO_LOCATION
when: always
expire_in: 4 days
variables:
LOG_PATH: "$CI_PROJECT_DIR/log_ut_cmake"
BUILD_PATH: ${CI_PROJECT_DIR}/tools/unit-test-app/builds
OUTPUT_PATH: ${CI_PROJECT_DIR}/tools/unit-test-app/output
BUILD_SYSTEM: "cmake"
TEST_TYPE: "unit_test"
PYTHON_VER: 3
script:
# RISC-V toolchain is optional but ULP may need it, so install:
- $IDF_PATH/tools/idf_tools.py install riscv-none-embed-gcc
- . $IDF_PATH/export.sh
- ${IDF_PATH}/tools/ci/find_apps_build_apps.sh
- cd $CI_PROJECT_DIR/tools/unit-test-app
- python tools/UnitTestParser.py ${BUILD_PATH}
build_esp_idf_tests_cmake_esp32:
extends: .build_esp_idf_tests_cmake
variables:
IDF_TARGET: esp32
build_esp_idf_tests_cmake_esp32s2:
extends: .build_esp_idf_tests_cmake
variables:
IDF_TARGET: esp32s2
build_esp_idf_tests_cmake_esp32s3:
extends: .build_esp_idf_tests_cmake
variables:
IDF_TARGET: esp32s3
.build_examples_template:
extends:
- .build_template
- .rules:build_tests:example_test-weekend_test
dependencies: # set dependencies to null to avoid missing artifacts issue
needs:
- job: fast_template_app
artifacts: false
- scan_tests
variables:
TEST_PREFIX: examples
TEST_RELATIVE_DIR: examples
SCAN_TEST_JSON: ${CI_PROJECT_DIR}/${TEST_RELATIVE_DIR}/test_configs/scan_${IDF_TARGET}_${BUILD_SYSTEM}.json
TEST_TYPE: example_test
LOG_PATH: ${CI_PROJECT_DIR}/log_${TEST_PREFIX}
BUILD_PATH: ${CI_PROJECT_DIR}/build_${TEST_PREFIX}
PYTHON_VER: 3
script:
# RISC-V toolchain is optional but ULP may need it, so install:
- $IDF_PATH/tools/idf_tools.py install riscv-none-embed-gcc
- . $IDF_PATH/export.sh
# it's not possible to build 100% out-of-tree and have the "artifacts"
# mechanism work, but this is the next best thing
- ${IDF_PATH}/tools/ci/find_apps_build_apps.sh
build_examples_make:
extends:
- .build_examples_template
# This is a workaround for a rarely encountered issue with building examples in CI.
# Probably related to building of Kconfig in 'make clean' stage
retry: 1
parallel: 8
artifacts:
paths:
- $LOG_PATH
- build_${TEST_PREFIX}/*/*/*/build/size.json
- $SIZE_INFO_LOCATION
when: always
expire_in: 4 days
variables:
BUILD_SYSTEM: make
IDF_TARGET: esp32 # currently we only support esp32
# same as above, but for CMake
.build_examples_cmake:
extends: .build_examples_template
artifacts:
paths:
- build_${TEST_PREFIX}/list.json
- build_${TEST_PREFIX}/list_job_*.json
- build_${TEST_PREFIX}/*/*/*/sdkconfig
- build_${TEST_PREFIX}/*/*/*/build/size.json
- build_${TEST_PREFIX}/*/*/*/build/*.bin
- build_${TEST_PREFIX}/*/*/*/build/*.elf
- build_${TEST_PREFIX}/*/*/*/build/*.map
- build_${TEST_PREFIX}/*/*/*/build/flasher_args.json
- build_${TEST_PREFIX}/*/*/*/build/bootloader/*.bin
- build_${TEST_PREFIX}/*/*/*/build/partition_table/*.bin
- $LOG_PATH
- $SIZE_INFO_LOCATION
when: always
expire_in: 4 days
variables:
BUILD_SYSTEM: cmake
build_examples_cmake_esp32:
extends: .build_examples_cmake
parallel: 10
variables:
IDF_TARGET: esp32
build_examples_cmake_esp32s2:
extends: .build_examples_cmake
parallel: 8
variables:
IDF_TARGET: esp32s2
.build_test_apps:
extends:
- .build_examples_cmake
- .rules:build_tests:custom_test-weekend_test
variables:
TEST_PREFIX: test_apps
TEST_RELATIVE_DIR: tools/test_apps
TEST_TYPE: custom_test
script:
- ${IDF_PATH}/tools/ci/find_apps_build_apps.sh
build_test_apps_esp32:
extends: .build_test_apps
parallel: 8
variables:
IDF_TARGET: esp32
build_test_apps_esp32s2:
extends: .build_test_apps
parallel: 8
variables:
IDF_TARGET: esp32s2
build_test_apps_esp32s3:
extends: .build_test_apps
parallel: 8
variables:
IDF_TARGET: esp32s3
.build_component_ut:
extends:
- .build_test_apps
- .rules:build_tests:unit_test
variables:
TEST_PREFIX: component_ut
TEST_RELATIVE_DIR: component_ut
build_component_ut_esp32:
extends: .build_component_ut
variables:
IDF_TARGET: esp32
build_component_ut_esp32s2:
extends: .build_component_ut
variables:
IDF_TARGET: esp32s2
.build_docs_template:
stage: build
image: $ESP_IDF_DOC_ENV_IMAGE
tags:
- build_docs
script:
- cd docs
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 pip install -r requirements.txt
- ${IDF_PATH}/tools/ci/multirun_with_pyenv.sh -p 3.6.10 ./build_docs.py -bs $DOC_BUILDERS -l $DOCLANG -t $DOCTGT build
parallel:
matrix:
- DOCLANG: [ "en", "zh_CN" ]
DOCTGT: [ "esp32", "esp32s2" ]
build_docs_html:
extends:
- .build_docs_template
- .rules:labels:build_docs
artifacts:
when: always
paths:
- docs/_build/*/*/*.txt
- docs/_build/*/*/html/*
expire_in: 4 days
variables:
DOC_BUILDERS: "html"
build_docs_pdf:
extends:
- .build_docs_template
- .rules:labels:build_docs-slim
artifacts:
when: always
paths:
- docs/_build/*/*/latex/*
expire_in: 4 days
variables:
DOC_BUILDERS: "latex"
.test_build_system_template:
extends:
- .build_template
- .rules:build_tests:weekend_test
dependencies: # set dependencies to null to avoid missing artifacts issue
needs:
- job: fast_template_app
artifacts: false
script:
- ${IDF_PATH}/tools/ci/test_configure_ci_environment.sh
- rm -rf test_build_system
- mkdir test_build_system
- cd test_build_system
- ${IDF_PATH}/tools/ci/${SHELL_TEST_SCRIPT}
test_build_system:
extends: .test_build_system_template
variables:
SHELL_TEST_SCRIPT: test_build_system.sh
test_build_system_cmake:
extends: .test_build_system_template
variables:
SHELL_TEST_SCRIPT: test_build_system_cmake.sh
test_build_system_cmake_macos:
extends:
- .test_build_system_template
- .before_script_macos
- .rules:os:mac_os
tags:
- macos_shell
variables:
SHELL_TEST_SCRIPT: test_build_system_cmake.sh
build_docker:
extends:
- .before_script_slim
- .rules:protected-schedule
stage: build
image: espressif/docker-builder:1
tags:
- build_docker_amd64_brno
variables:
DOCKER_TMP_IMAGE_NAME: "idf_tmp_image"
script:
- export LOCAL_CI_REPOSITORY_URL=$CI_REPOSITORY_URL
- if [ -n "$LOCAL_GITLAB_HTTPS_HOST" ]; then export LOCAL_CI_REPOSITORY_URL="https://gitlab-ci-token:${CI_JOB_TOKEN}@${LOCAL_GITLAB_HTTPS_HOST}/${CI_PROJECT_PATH}"; fi
- echo "Using repository at $LOCAL_CI_REPOSITORY_URL"
- export DOCKER_BUILD_ARGS="--build-arg IDF_CLONE_URL=${LOCAL_CI_REPOSITORY_URL} --build-arg IDF_CLONE_BRANCH_OR_TAG=${CI_COMMIT_REF_NAME} --build-arg IDF_CHECKOUT_REF=${CI_COMMIT_TAG:-$CI_COMMIT_SHA}"
# Build
- docker build --tag ${DOCKER_TMP_IMAGE_NAME} ${DOCKER_BUILD_ARGS} tools/docker/
# We can't mount $PWD/examples/get-started/blink into the container, see https://gitlab.com/gitlab-org/gitlab-ce/issues/41227.
# The workaround mentioned there works, but leaves around directories which need to be cleaned up manually.
# Therefore, build a copy of the example located inside the container.
- docker run --rm --workdir /opt/esp/idf/examples/get-started/blink ${DOCKER_TMP_IMAGE_NAME} idf.py build
.test-on-windows:
extends:
- .before_script_slim
- .rules:protected-schedule
stage: build
image: $CI_DOCKER_REGISTRY/esp32-toolchain-win-cross
tags:
- build
script:
- cd $TEST_DIR
- mkdir build
- cd build
- cmake -DCMAKE_TOOLCHAIN_FILE=../toolchain-i686-w64-mingw32.cmake -DCMAKE_BUILD_TYPE=Release ..
- cmake --build .
build_idf_exe:
extends: .test-on-windows
artifacts:
paths:
- tools/windows/idf_exe/build/idf-exe-v*.zip
expire_in: 4 days
variables:
TEST_DIR: tools/windows/idf_exe
build_cmdlinerunner:
extends: .test-on-windows
artifacts:
paths:
- tools/windows/tool_setup/cmdlinerunner/build/cmdlinerunner.dll
expire_in: 4 days
variables:
TEST_DIR: tools/windows/tool_setup/cmdlinerunner
build_installer:
extends:
- .before_script_slim
- .rules:protected-schedule
# using a different stage here to be able to use artifacts from build_cmdlinerunner job
stage: host_test
image: $CI_DOCKER_REGISTRY/wine-innosetup:1
tags:
- build
dependencies: # set dependencies to null to avoid missing artifacts issue
needs:
- build_cmdlinerunner
script:
- cd tools/windows/tool_setup/
- ./build_installer.sh
# This job builds template app with permutations of targets and optimization levels
build_template_app:
needs:
- job: fast_template_app
artifacts: false
extends: .build_template_app_template
# Sonarqube related jobs put here for this reason:
# Here we have two jobs. code_quality_check and code_quality_report.
#
# code_quality_check will analyze the code changes between your MR and
# code repo stored in sonarqube server. The analysis result is only shown in
# the comments under this MR and won't be transferred to the server.
#
# code_quality_report will analyze and transfer both of the newly added code
# and the analysis result to the server.
#
# Put in the front to ensure that the newly merged code can be stored in
# sonarqube server ASAP, in order to avoid reporting unrelated code issues
.sonar_scan_template:
stage: build
image:
name: $CI_DOCKER_REGISTRY/sonarqube-scanner:2
before_script:
- source tools/ci/utils.sh
- export PYTHONPATH="$CI_PROJECT_DIR/tools:$CI_PROJECT_DIR/tools/ci/python_packages:$PYTHONPATH"
- fetch_submodules
# Exclude the submodules, all paths ends with /**
- export SUBMODULES=$(get_all_submodules)
# get all exclude paths specified in tools/ci/sonar_exclude_list.txt | ignore lines start with # | xargs | replace all <space> to <comma>
- export CUSTOM_EXCLUDES=$(cat $CI_PROJECT_DIR/tools/ci/sonar_exclude_list.txt | grep -v '^#' | xargs | sed -e 's/ /,/g')
# Exclude the report dir
- export EXCLUSIONS="$SUBMODULES,$REPORT_DIR/**,docs/_static/**,**/*.png,**/*.jpg"
- python $NORMALIZE_CLANGTIDY_PY $CI_PROJECT_DIR/$REPORT_DIR/warnings.txt $CI_PROJECT_DIR/$REPORT_DIR/clang_tidy_report.txt $CI_PROJECT_DIR
variables:
GIT_DEPTH: 0
NORMALIZE_CLANGTIDY_PY: $CI_PROJECT_DIR/tools/ci/normalize_clangtidy_path.py
REPORT_DIR: examples/get-started/hello_world/tidybuild/report
tags:
- host_test
dependencies: # set dependencies to null to avoid missing artifacts issue
needs:
- clang_tidy_check_regular
code_quality_check:
extends:
- .sonar_scan_template
- .rules:trigger
allow_failure: true
script:
- export CI_MR_IID=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py id ${CI_COMMIT_BRANCH})
- export CI_MR_COMMITS=$(python ${CI_PROJECT_DIR}/tools/ci/ci_get_mr_info.py commits ${CI_COMMIT_BRANCH} | tr '\n' ',')
# test if this branch have merge request, if not, exit 0
- test -n "$CI_MR_IID" || exit 0
- test -n "$CI_MR_COMMITS" || exit 0
- sonar-scanner
-Dsonar.analysis.mode=preview
-Dsonar.host.url=$SONAR_HOST_URL
-Dsonar.login=$SONAR_LOGIN
-Dsonar.sources=$CI_PROJECT_DIR
-Dsonar.sourceEncoding=UTF-8
-Dsonar.projectKey=esp-idf
-Dsonar.projectBaseDir=$CI_PROJECT_DIR
-Dsonar.exclusions=$EXCLUSIONS
-Dsonar.gitlab.project_id=$CI_PROJECT_ID
-Dsonar.gitlab.commit_sha=$CI_MR_COMMITS
-Dsonar.gitlab.ref_name=$CI_COMMIT_REF_NAME
-Dsonar.cxx.clangtidy.reportPath=$REPORT_DIR/clang_tidy_report.txt
-Dsonar.cxx.includeDirectories=components,/usr/include
-Dsonar.python.pylint_config=.pylintrc
-Dsonar.gitlab.ci_merge_request_iid=$CI_MR_IID
-Dsonar.gitlab.merge_request_discussion=true
-Dsonar.branch.name=$CI_COMMIT_REF_NAME
code_quality_report:
extends:
- .sonar_scan_template
- .rules:protected-schedule
script:
- sonar-scanner
-Dsonar.host.url=$SONAR_HOST_URL
-Dsonar.login=$SONAR_LOGIN
-Dsonar.sources=$CI_PROJECT_DIR
-Dsonar.sourceEncoding=UTF-8
-Dsonar.projectKey=esp-idf
-Dsonar.projectBaseDir=$CI_PROJECT_DIR
-Dsonar.exclusions=$EXCLUSIONS
-Dsonar.gitlab.project_id=$CI_PROJECT_ID
-Dsonar.gitlab.commit_sha=$CI_COMMIT_SHA
-Dsonar.gitlab.ref_name=$CI_COMMIT_REF_NAME
-Dsonar.cxx.clangtidy.reportPath=$REPORT_DIR/clang_tidy_report.txt
-Dsonar.cxx.includeDirectories=components,/usr/include
-Dsonar.python.pylint_config=.pylintrc
-Dsonar.branch.name=$CI_COMMIT_REF_NAME

View File

@ -108,6 +108,12 @@ def check_environment():
print("Setting IDF_PATH environment variable: %s" % detected_idf_path) print("Setting IDF_PATH environment variable: %s" % detected_idf_path)
os.environ["IDF_PATH"] = detected_idf_path os.environ["IDF_PATH"] = detected_idf_path
# check Python version
if sys.version_info[0] < 3:
print_warning("WARNING: Support for Python 2 is deprecated and will be removed in future versions.")
elif sys.version_info[0] == 3 and sys.version_info[1] < 6:
print_warning("WARNING: Python 3 versions older than 3.6 are not supported.")
# check Python dependencies # check Python dependencies
print("Checking Python dependencies...") print("Checking Python dependencies...")
try: try: