mirror of
https://github.com/espressif/esp-idf.git
synced 2024-10-05 20:47:46 -04:00
docs: update to use esp-docs
This commit is contained in:
parent
9ae67e0373
commit
7411321603
4
.flake8
4
.flake8
@ -164,3 +164,7 @@ exclude =
|
|||||||
components/wifi_provisioning/python/wifi_constants_pb2.py,
|
components/wifi_provisioning/python/wifi_constants_pb2.py,
|
||||||
components/esp_local_ctrl/python/esp_local_ctrl_pb2.py,
|
components/esp_local_ctrl/python/esp_local_ctrl_pb2.py,
|
||||||
examples/provisioning/legacy/custom_config/components/custom_provisioning/python/custom_config_pb2.py,
|
examples/provisioning/legacy/custom_config/components/custom_provisioning/python/custom_config_pb2.py,
|
||||||
|
|
||||||
|
per-file-ignores =
|
||||||
|
# Sphinx conf.py files use star imports to setup config variables
|
||||||
|
docs/conf_common.py: F405
|
||||||
|
@ -48,7 +48,7 @@ variables:
|
|||||||
|
|
||||||
# Docker images
|
# Docker images
|
||||||
BOT_DOCKER_IMAGE_TAG: ":latest"
|
BOT_DOCKER_IMAGE_TAG: ":latest"
|
||||||
ESP_IDF_DOC_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-idf-doc-env:v4.4-1-v2"
|
ESP_IDF_DOC_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-idf-doc-env:v4.4-1-v3"
|
||||||
ESP_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-env:v4.4-1"
|
ESP_ENV_IMAGE: "$CI_DOCKER_REGISTRY/esp-env:v4.4-1"
|
||||||
AFL_FUZZER_TEST_IMAGE: "$CI_DOCKER_REGISTRY/afl-fuzzer-test:v4.4-1-1"
|
AFL_FUZZER_TEST_IMAGE: "$CI_DOCKER_REGISTRY/afl-fuzzer-test:v4.4-1-1"
|
||||||
CLANG_STATIC_ANALYSIS_IMAGE: "${CI_DOCKER_REGISTRY}/clang-static-analysis:v4.4-1-1"
|
CLANG_STATIC_ANALYSIS_IMAGE: "${CI_DOCKER_REGISTRY}/clang-static-analysis:v4.4-1-1"
|
||||||
|
@ -62,8 +62,8 @@ check_docs_lang_sync:
|
|||||||
dependencies: []
|
dependencies: []
|
||||||
script:
|
script:
|
||||||
- cd docs
|
- cd docs
|
||||||
- python -m pip install -r requirements.txt
|
- pip install -r requirements.txt
|
||||||
- python ./build_docs.py -bs $DOC_BUILDERS -l $DOCLANG -t $DOCTGT build
|
- build-docs -t $DOCTGT -bs $DOC_BUILDERS -l $DOCLANG build
|
||||||
parallel:
|
parallel:
|
||||||
matrix:
|
matrix:
|
||||||
- DOCLANG: ["en", "zh_CN"]
|
- DOCLANG: ["en", "zh_CN"]
|
||||||
@ -78,8 +78,8 @@ check_docs_gh_links:
|
|||||||
- .doc-rules:build:docs
|
- .doc-rules:build:docs
|
||||||
script:
|
script:
|
||||||
- cd docs
|
- cd docs
|
||||||
- python -m pip install -r requirements.txt
|
- pip install -r requirements.txt
|
||||||
- python ./build_docs.py gh-linkcheck
|
- build-docs gh-linkcheck
|
||||||
|
|
||||||
# stage: build_doc
|
# stage: build_doc
|
||||||
# Add this stage to let the build_docs job run in parallel with build
|
# Add this stage to let the build_docs job run in parallel with build
|
||||||
@ -168,7 +168,8 @@ build_docs_pdf:
|
|||||||
script:
|
script:
|
||||||
- add_doc_server_ssh_keys $DOCS_DEPLOY_PRIVATEKEY $DOCS_DEPLOY_SERVER $DOCS_DEPLOY_SERVER_USER
|
- add_doc_server_ssh_keys $DOCS_DEPLOY_PRIVATEKEY $DOCS_DEPLOY_SERVER $DOCS_DEPLOY_SERVER_USER
|
||||||
- export GIT_VER=$(git describe --always)
|
- export GIT_VER=$(git describe --always)
|
||||||
- python ${IDF_PATH}/tools/ci/deploy_docs.py
|
- pip install -r docs/requirements.txt
|
||||||
|
- deploy-docs
|
||||||
|
|
||||||
# stage: test_deploy
|
# stage: test_deploy
|
||||||
deploy_docs_preview:
|
deploy_docs_preview:
|
||||||
@ -227,6 +228,6 @@ check_doc_links:
|
|||||||
allow_failure: true
|
allow_failure: true
|
||||||
script:
|
script:
|
||||||
- cd docs
|
- cd docs
|
||||||
- python -m pip install -r requirements.txt
|
- pip install -r requirements.txt
|
||||||
# At the moment this check will always fail due to multiple known limitations, ignore result
|
# At the moment this check will always fail due to multiple known limitations, ignore result
|
||||||
- python ./build_docs.py -l $DOCLANG -t $DOCTGT linkcheck || { echo "THERE ARE ISSUES DUE TO KNOWN LIMITATIONS, PLEASE FIX THEM. Nowadays we're ignored them to pass pipeline."; true; }
|
- build-docs -t $DOCTGT -l $DOCLANG linkcheck || { echo "THERE ARE ISSUES DUE TO KNOWN LIMITATIONS, PLEASE FIX THEM. Nowadays we're ignored them to pass pipeline."; true; }
|
||||||
|
@ -295,22 +295,6 @@ test_mkuf2:
|
|||||||
- cd ${IDF_PATH}/tools/test_mkuf2
|
- cd ${IDF_PATH}/tools/test_mkuf2
|
||||||
- ./test_mkuf2.py
|
- ./test_mkuf2.py
|
||||||
|
|
||||||
test_docs:
|
|
||||||
extends: .host_test_template
|
|
||||||
image: $ESP_IDF_DOC_ENV_IMAGE
|
|
||||||
variables:
|
|
||||||
PYTHON_VER: 3.6.13
|
|
||||||
artifacts:
|
|
||||||
when: on_failure
|
|
||||||
paths:
|
|
||||||
- docs/test/_build/*/*/*/html/*
|
|
||||||
expire_in: 1 week
|
|
||||||
script:
|
|
||||||
- cd ${IDF_PATH}/docs/test
|
|
||||||
- python -m pip install -r ${IDF_PATH}/docs/requirements.txt
|
|
||||||
- ./test_docs.py
|
|
||||||
- ./test_sphinx_idf_extensions.py
|
|
||||||
|
|
||||||
test_autocomplete:
|
test_autocomplete:
|
||||||
extends: .host_test_template
|
extends: .host_test_template
|
||||||
image: $CI_DOCKER_REGISTRY/linux-shells:1
|
image: $CI_DOCKER_REGISTRY/linux-shells:1
|
||||||
|
@ -56,7 +56,6 @@ Related Documents
|
|||||||
style-guide
|
style-guide
|
||||||
install-pre-commit-hook
|
install-pre-commit-hook
|
||||||
documenting-code
|
documenting-code
|
||||||
add-ons-reference
|
|
||||||
creating-examples
|
creating-examples
|
||||||
../api-reference/template
|
../api-reference/template
|
||||||
contributor-agreement
|
contributor-agreement
|
||||||
|
@ -15,6 +15,4 @@ The above URLs are all for the master branch latest version. Click the drop-down
|
|||||||
|
|
||||||
# Building Documentation
|
# Building Documentation
|
||||||
|
|
||||||
The documentation build requirements Python 3 and running the wrapper `./build_docs.py` tool. Running `./build_docs.py --help` will give a summary of available options.
|
The documentation is built using the python package `esp-docs`, which can be installed by running `pip install esp-docs`. Running `build-docs --help` will give a summary of available options. For more information see the `esp-docs` documentation at https://github.com/espressif/esp-docs/blob/master/README.md
|
||||||
|
|
||||||
See [Documenting Code](https://docs.espressif.com/projects/esp-idf/en/latest/contribute/documenting-code.html) in the ESP-IDF Programming Guide for full information about setting up to build the docs, and how to use the `./build_docs.py` tool.
|
|
||||||
|
@ -1,472 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
# coding=utf-8
|
|
||||||
#
|
|
||||||
# Top-level docs builder
|
|
||||||
#
|
|
||||||
# This is just a front-end to sphinx-build that can call it multiple times for different language/target combinations
|
|
||||||
#
|
|
||||||
# Will build out to _build/LANG/TARGET by default
|
|
||||||
#
|
|
||||||
# Specific custom docs functionality should be added in conf_common.py or in a Sphinx extension, not here.
|
|
||||||
#
|
|
||||||
# Copyright 2020 Espressif Systems (Shanghai) PTE LTD
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import locale
|
|
||||||
import math
|
|
||||||
import multiprocessing
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
from packaging import version
|
|
||||||
|
|
||||||
LANGUAGES = ['en', 'zh_CN']
|
|
||||||
TARGETS = ['esp32', 'esp32s2', 'esp32s3', 'esp32c3']
|
|
||||||
|
|
||||||
SPHINX_WARN_LOG = 'sphinx-warning-log.txt'
|
|
||||||
SPHINX_SANITIZED_LOG = 'sphinx-warning-log-sanitized.txt'
|
|
||||||
SPHINX_KNOWN_WARNINGS = os.path.join(os.environ['IDF_PATH'], 'docs', 'sphinx-known-warnings.txt')
|
|
||||||
|
|
||||||
DXG_WARN_LOG = 'doxygen-warning-log.txt'
|
|
||||||
DXG_SANITIZED_LOG = 'doxygen-warning-log-sanitized.txt'
|
|
||||||
DXG_KNOWN_WARNINGS = os.path.join(os.environ['IDF_PATH'], 'docs', 'doxygen-known-warnings.txt')
|
|
||||||
DXG_CI_VERSION = version.parse('1.8.11')
|
|
||||||
|
|
||||||
LogMessage = namedtuple('LogMessage', 'original_text sanitized_text')
|
|
||||||
|
|
||||||
languages = LANGUAGES
|
|
||||||
targets = TARGETS
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
# check Python dependencies for docs
|
|
||||||
try:
|
|
||||||
subprocess.check_call([sys.executable,
|
|
||||||
os.path.join(os.environ['IDF_PATH'],
|
|
||||||
'tools',
|
|
||||||
'check_python_dependencies.py'),
|
|
||||||
'-r',
|
|
||||||
'{}/docs/requirements.txt'.format(os.environ['IDF_PATH'])
|
|
||||||
])
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
raise SystemExit(2) # stdout will already have these errors
|
|
||||||
|
|
||||||
# This is not the only way to make sure that all files opened by Python are treated as UTF-8, but the other way is passing encoding='utf-8' to all open()
|
|
||||||
# functions and this way makes Python 2 compatibility really tough if there is any code that assumes text files contain strings (kconfiglib assumes this).
|
|
||||||
# The reason for that is that you need to import io.open() to support the encoding argument on Python 2, and this function always uses Py2's unicode
|
|
||||||
# type not the str type.
|
|
||||||
if ('UTF-8' not in locale.getlocale()) and ('utf8' not in locale.getlocale()):
|
|
||||||
raise RuntimeError("build_docs.py requires the default locale's encoding to be UTF-8.\n" +
|
|
||||||
' - Linux. Setting environment variable LC_ALL=C.UTF-8 when running build_docs.py may be ' +
|
|
||||||
'enough to fix this.\n'
|
|
||||||
' - Windows. Possible solution for the Windows 10 starting version 1803. Go to ' +
|
|
||||||
'Control Panel->Clock and Region->Region->Administrative->Change system locale...; ' +
|
|
||||||
'Check `Beta: Use Unicode UTF-8 for worldwide language support` and reboot')
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(description='build_docs.py: Build IDF docs', prog='build_docs.py')
|
|
||||||
|
|
||||||
parser.add_argument('--language', '-l', choices=LANGUAGES, required=False)
|
|
||||||
parser.add_argument('--target', '-t', choices=TARGETS, required=False)
|
|
||||||
parser.add_argument('--build-dir', '-b', type=str, default='_build')
|
|
||||||
parser.add_argument('--source-dir', '-s', type=str, default='')
|
|
||||||
parser.add_argument('--builders', '-bs', nargs='+', type=str, default=['html'],
|
|
||||||
help='List of builders for Sphinx, e.g. html or latex, for latex a PDF is also generated')
|
|
||||||
parser.add_argument('--sphinx-parallel-builds', '-p', choices=['auto'] + [str(x) for x in range(8)],
|
|
||||||
help='Parallel Sphinx builds - number of independent Sphinx builds to run', default='auto')
|
|
||||||
parser.add_argument('--sphinx-parallel-jobs', '-j', choices=['auto'] + [str(x) for x in range(8)],
|
|
||||||
help='Sphinx parallel jobs argument - number of threads for each Sphinx build to use', default='1')
|
|
||||||
parser.add_argument('--input-docs', '-i', nargs='+', default=[''],
|
|
||||||
help='List of documents to build relative to the doc base folder, i.e. the language folder. Defaults to all documents')
|
|
||||||
|
|
||||||
action_parsers = parser.add_subparsers(dest='action')
|
|
||||||
|
|
||||||
build_parser = action_parsers.add_parser('build', help='Build documentation')
|
|
||||||
build_parser.add_argument('--check-warnings-only', '-w', action='store_true')
|
|
||||||
build_parser.add_argument('--fast-build', '-f', action='store_true', help='Skips including doxygen generated APIs into the Sphinx build')
|
|
||||||
|
|
||||||
action_parsers.add_parser('linkcheck', help='Check links (a current IDF revision should be uploaded to GitHub)')
|
|
||||||
|
|
||||||
action_parsers.add_parser('gh-linkcheck', help='Checking for hardcoded GitHub links')
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
global languages
|
|
||||||
if args.language is None:
|
|
||||||
print('Building all languages')
|
|
||||||
languages = LANGUAGES
|
|
||||||
else:
|
|
||||||
languages = [args.language]
|
|
||||||
|
|
||||||
global targets
|
|
||||||
if args.target is None:
|
|
||||||
print('Building all targets')
|
|
||||||
targets = TARGETS
|
|
||||||
else:
|
|
||||||
targets = [args.target]
|
|
||||||
|
|
||||||
if args.action == 'build' or args.action is None:
|
|
||||||
if args.action is None:
|
|
||||||
args.check_warnings_only = False
|
|
||||||
args.fast_build = False
|
|
||||||
|
|
||||||
if args.fast_build:
|
|
||||||
os.environ['DOCS_FAST_BUILD'] = 'y'
|
|
||||||
|
|
||||||
sys.exit(action_build(args))
|
|
||||||
|
|
||||||
if args.action == 'linkcheck':
|
|
||||||
sys.exit(action_linkcheck(args))
|
|
||||||
|
|
||||||
if args.action == 'gh-linkcheck':
|
|
||||||
sys.exit(action_gh_linkcheck(args))
|
|
||||||
|
|
||||||
|
|
||||||
def parallel_call(args, callback):
|
|
||||||
num_sphinx_builds = len(languages) * len(targets)
|
|
||||||
num_cpus = multiprocessing.cpu_count()
|
|
||||||
|
|
||||||
if args.sphinx_parallel_builds == 'auto':
|
|
||||||
# at most one sphinx build per CPU, up to the number of CPUs
|
|
||||||
args.sphinx_parallel_builds = min(num_sphinx_builds, num_cpus)
|
|
||||||
else:
|
|
||||||
args.sphinx_parallel_builds = int(args.sphinx_parallel_builds)
|
|
||||||
|
|
||||||
# Force -j1 because sphinx works incorrectly
|
|
||||||
args.sphinx_parallel_jobs = 1
|
|
||||||
if args.sphinx_parallel_jobs == 'auto':
|
|
||||||
# N CPUs per build job, rounded up - (maybe smarter to round down to avoid contention, idk)
|
|
||||||
args.sphinx_parallel_jobs = int(math.ceil(num_cpus / args.sphinx_parallel_builds))
|
|
||||||
else:
|
|
||||||
args.sphinx_parallel_jobs = int(args.sphinx_parallel_jobs)
|
|
||||||
|
|
||||||
print('Will use %d parallel builds and %d jobs per build' % (args.sphinx_parallel_builds, args.sphinx_parallel_jobs))
|
|
||||||
pool = multiprocessing.Pool(args.sphinx_parallel_builds)
|
|
||||||
|
|
||||||
if args.sphinx_parallel_jobs > 1:
|
|
||||||
print('WARNING: Sphinx parallel jobs currently produce incorrect docs output with Sphinx 1.8.5')
|
|
||||||
|
|
||||||
# make a list of all combinations of build_docs() args as tuples
|
|
||||||
#
|
|
||||||
# there's probably a fancy way to do this with itertools but this way is actually readable
|
|
||||||
entries = []
|
|
||||||
for target in targets:
|
|
||||||
for language in languages:
|
|
||||||
build_dir = os.path.realpath(os.path.join(args.build_dir, language, target))
|
|
||||||
source_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)), args.source_dir, language)
|
|
||||||
|
|
||||||
entries.append((language, target, build_dir, source_dir, args.sphinx_parallel_jobs, args.builders, args.input_docs))
|
|
||||||
|
|
||||||
print(entries)
|
|
||||||
errcodes = pool.map(callback, entries)
|
|
||||||
print(errcodes)
|
|
||||||
|
|
||||||
is_error = False
|
|
||||||
for ret in errcodes:
|
|
||||||
if ret != 0:
|
|
||||||
print('\nThe following language/target combinations failed to build:')
|
|
||||||
is_error = True
|
|
||||||
break
|
|
||||||
if is_error:
|
|
||||||
for ret, entry in zip(errcodes, entries):
|
|
||||||
if ret != 0:
|
|
||||||
print('language: %s, target: %s, errcode: %d' % (entry[0], entry[1], ret))
|
|
||||||
# Don't re-throw real error code from each parallel process
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def sphinx_call(language, target, build_dir, src_dir, sphinx_parallel_jobs, buildername, input_docs):
|
|
||||||
# Note: because this runs in a multiprocessing Process, everything which happens here should be isolated to a single process
|
|
||||||
# (ie it doesn't matter if Sphinx is using global variables, as they're it's own copy of the global variables)
|
|
||||||
|
|
||||||
# wrap stdout & stderr in a way that lets us see which build_docs instance they come from
|
|
||||||
#
|
|
||||||
# this doesn't apply to subprocesses, they write to OS stdout & stderr so no prefix appears
|
|
||||||
prefix = '%s/%s: ' % (language, target)
|
|
||||||
|
|
||||||
print('Building in build_dir: %s' % (build_dir))
|
|
||||||
try:
|
|
||||||
os.makedirs(build_dir)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
environ = {}
|
|
||||||
environ.update(os.environ)
|
|
||||||
environ['BUILDDIR'] = build_dir
|
|
||||||
|
|
||||||
args = [sys.executable, '-u', '-m', 'sphinx.cmd.build',
|
|
||||||
'-j', str(sphinx_parallel_jobs),
|
|
||||||
'-b', buildername,
|
|
||||||
'-d', os.path.join(build_dir, 'doctrees'),
|
|
||||||
'-w', SPHINX_WARN_LOG,
|
|
||||||
'-t', target,
|
|
||||||
'-D', 'idf_target={}'.format(target),
|
|
||||||
'-D', 'docs_to_build={}'.format(','. join(input_docs)),
|
|
||||||
src_dir,
|
|
||||||
os.path.join(build_dir, buildername) # build directory
|
|
||||||
]
|
|
||||||
|
|
||||||
saved_cwd = os.getcwd()
|
|
||||||
os.chdir(build_dir) # also run sphinx in the build directory
|
|
||||||
print("Running '%s'" % (' '.join(args)))
|
|
||||||
|
|
||||||
ret = 1
|
|
||||||
try:
|
|
||||||
# Note: we can't call sphinx.cmd.build.main() here as multiprocessing doesn't est >1 layer deep
|
|
||||||
# and sphinx.cmd.build() also does a lot of work in the calling thread, especially for j ==1,
|
|
||||||
# so using a Pyhthon thread for this part is a poor option (GIL)
|
|
||||||
p = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
||||||
for c in iter(lambda: p.stdout.readline(), b''):
|
|
||||||
sys.stdout.write(prefix)
|
|
||||||
sys.stdout.write(c.decode('utf-8'))
|
|
||||||
ret = p.wait()
|
|
||||||
assert (ret is not None)
|
|
||||||
sys.stdout.flush()
|
|
||||||
except KeyboardInterrupt: # this seems to be the only way to get Ctrl-C to kill everything?
|
|
||||||
p.kill()
|
|
||||||
os.chdir(saved_cwd)
|
|
||||||
return 130 # FIXME It doesn't return this errorcode, why? Just prints stacktrace
|
|
||||||
os.chdir(saved_cwd)
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def action_build(args):
|
|
||||||
if not args.check_warnings_only:
|
|
||||||
ret = parallel_call(args, call_build_docs)
|
|
||||||
if ret != 0:
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def check_doxygen_version():
|
|
||||||
# Different version of doxygen may produce different warnings
|
|
||||||
# This could cause a build to fail locally, but pass CI and vice versa
|
|
||||||
process = subprocess.run(['doxygen', '--version'], encoding='utf-8', stdout=subprocess.PIPE)
|
|
||||||
doxygen_ver = process.stdout.strip()
|
|
||||||
|
|
||||||
if version.parse(doxygen_ver) > DXG_CI_VERSION:
|
|
||||||
print('Local doxygen version {} is newer than CI doxygen version {}. Local build may contain '
|
|
||||||
'warnings that will not be raised when built by CI.'.format(doxygen_ver, DXG_CI_VERSION))
|
|
||||||
|
|
||||||
|
|
||||||
def call_build_docs(entry):
|
|
||||||
(language, target, build_dir, src_dir, sphinx_parallel_jobs, builders, input_docs) = entry
|
|
||||||
for buildername in builders:
|
|
||||||
ret = sphinx_call(language, target, build_dir, src_dir, sphinx_parallel_jobs, buildername, input_docs)
|
|
||||||
|
|
||||||
# Warnings are checked after each builder as logs are overwritten
|
|
||||||
# check Doxygen warnings:
|
|
||||||
ret += check_docs(language, target,
|
|
||||||
log_file=os.path.join(build_dir, DXG_WARN_LOG),
|
|
||||||
known_warnings_file=DXG_KNOWN_WARNINGS,
|
|
||||||
out_sanitized_log_file=os.path.join(build_dir, DXG_SANITIZED_LOG))
|
|
||||||
# check Sphinx warnings:
|
|
||||||
ret += check_docs(language, target,
|
|
||||||
log_file=os.path.join(build_dir, SPHINX_WARN_LOG),
|
|
||||||
known_warnings_file=SPHINX_KNOWN_WARNINGS,
|
|
||||||
out_sanitized_log_file=os.path.join(build_dir, SPHINX_SANITIZED_LOG))
|
|
||||||
|
|
||||||
if ret != 0:
|
|
||||||
check_doxygen_version()
|
|
||||||
return ret
|
|
||||||
|
|
||||||
# Build PDF from tex
|
|
||||||
if 'latex' in builders:
|
|
||||||
latex_dir = os.path.join(build_dir, 'latex')
|
|
||||||
ret = build_pdf(language, target, latex_dir)
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
def build_pdf(language, target, latex_dir):
|
|
||||||
# Note: because this runs in a multiprocessing Process, everything which happens here should be isolated to a single process
|
|
||||||
|
|
||||||
# wrap stdout & stderr in a way that lets us see which build_docs instance they come from
|
|
||||||
#
|
|
||||||
# this doesn't apply to subprocesses, they write to OS stdout & stderr so no prefix appears
|
|
||||||
prefix = '%s/%s: ' % (language, target)
|
|
||||||
|
|
||||||
print('Building PDF in latex_dir: %s' % (latex_dir))
|
|
||||||
|
|
||||||
saved_cwd = os.getcwd()
|
|
||||||
os.chdir(latex_dir)
|
|
||||||
|
|
||||||
# Based on read the docs PDFBuilder
|
|
||||||
rcfile = 'latexmkrc'
|
|
||||||
|
|
||||||
cmd = [
|
|
||||||
'latexmk',
|
|
||||||
'-r',
|
|
||||||
rcfile,
|
|
||||||
'-pdf',
|
|
||||||
# When ``-f`` is used, latexmk will continue building if it
|
|
||||||
# encounters errors. We still receive a failure exit code in this
|
|
||||||
# case, but the correct steps should run.
|
|
||||||
'-f',
|
|
||||||
'-dvi-', # dont generate dvi
|
|
||||||
'-ps-', # dont generate ps
|
|
||||||
'-interaction=nonstopmode',
|
|
||||||
'-quiet',
|
|
||||||
'-outdir=build',
|
|
||||||
]
|
|
||||||
|
|
||||||
try:
|
|
||||||
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
|
||||||
for c in iter(lambda: p.stdout.readline(), b''):
|
|
||||||
sys.stdout.write(prefix)
|
|
||||||
sys.stdout.write(c.decode('utf-8'))
|
|
||||||
ret = p.wait()
|
|
||||||
assert (ret is not None)
|
|
||||||
sys.stdout.flush()
|
|
||||||
except KeyboardInterrupt: # this seems to be the only way to get Ctrl-C to kill everything?
|
|
||||||
p.kill()
|
|
||||||
os.chdir(saved_cwd)
|
|
||||||
return 130 # FIXME It doesn't return this errorcode, why? Just prints stacktrace
|
|
||||||
os.chdir(saved_cwd)
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
|
|
||||||
SANITIZE_FILENAME_REGEX = re.compile('[^:]*/([^/:]*)(:.*)')
|
|
||||||
SANITIZE_LINENUM_REGEX = re.compile('([^:]*)(:[0-9]+:)(.*)')
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_line(line):
|
|
||||||
"""
|
|
||||||
Clear a log message from insignificant parts
|
|
||||||
|
|
||||||
filter:
|
|
||||||
- only filename, no path at the beginning
|
|
||||||
- no line numbers after the filename
|
|
||||||
"""
|
|
||||||
|
|
||||||
line = re.sub(SANITIZE_FILENAME_REGEX, r'\1\2', line)
|
|
||||||
line = re.sub(SANITIZE_LINENUM_REGEX, r'\1:line:\3', line)
|
|
||||||
return line
|
|
||||||
|
|
||||||
|
|
||||||
def check_docs(language, target, log_file, known_warnings_file, out_sanitized_log_file):
|
|
||||||
"""
|
|
||||||
Check for Documentation warnings in `log_file`: should only contain (fuzzy) matches to `known_warnings_file`
|
|
||||||
|
|
||||||
It prints all unknown messages with `target`/`language` prefix
|
|
||||||
It leaves `out_sanitized_log_file` file for observe and debug
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Sanitize all messages
|
|
||||||
all_messages = list()
|
|
||||||
with open(log_file) as f, open(out_sanitized_log_file, 'w') as o:
|
|
||||||
for line in f:
|
|
||||||
sanitized_line = sanitize_line(line)
|
|
||||||
all_messages.append(LogMessage(line, sanitized_line))
|
|
||||||
o.write(sanitized_line)
|
|
||||||
|
|
||||||
known_messages = list()
|
|
||||||
with open(known_warnings_file) as k:
|
|
||||||
for known_line in k:
|
|
||||||
known_messages.append(known_line)
|
|
||||||
|
|
||||||
if 'doxygen' in known_warnings_file:
|
|
||||||
# Clean a known Doxygen limitation: it's expected to always document anonymous
|
|
||||||
# structs/unions but we don't do this in our docs, so filter these all out with a regex
|
|
||||||
# (this won't match any named field, only anonymous members -
|
|
||||||
# ie the last part of the field is is just <something>::@NUM not <something>::name)
|
|
||||||
RE_ANONYMOUS_FIELD = re.compile(r'.+:line: warning: parameters of member [^:\s]+(::[^:\s]+)*(::@\d+)+ are not \(all\) documented')
|
|
||||||
all_messages = [msg for msg in all_messages if not re.match(RE_ANONYMOUS_FIELD, msg.sanitized_text)]
|
|
||||||
|
|
||||||
# Collect all new messages that are not match with the known messages.
|
|
||||||
# The order is an important.
|
|
||||||
new_messages = list()
|
|
||||||
known_idx = 0
|
|
||||||
for msg in all_messages:
|
|
||||||
try:
|
|
||||||
known_idx = known_messages.index(msg.sanitized_text, known_idx)
|
|
||||||
except ValueError:
|
|
||||||
new_messages.append(msg)
|
|
||||||
|
|
||||||
if new_messages:
|
|
||||||
print('\n%s/%s: Build failed due to new/different warnings (%s):\n' % (language, target, log_file))
|
|
||||||
for msg in new_messages:
|
|
||||||
print('%s/%s: %s' % (language, target, msg.original_text), end='')
|
|
||||||
print('\n%s/%s: (Check files %s and %s for full details.)' % (language, target, known_warnings_file, log_file))
|
|
||||||
return 1
|
|
||||||
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
def action_linkcheck(args):
|
|
||||||
args.builders = 'linkcheck'
|
|
||||||
return parallel_call(args, call_linkcheck)
|
|
||||||
|
|
||||||
|
|
||||||
def call_linkcheck(entry):
|
|
||||||
return sphinx_call(*entry)
|
|
||||||
|
|
||||||
|
|
||||||
# https://github.com/espressif/esp-idf/tree/
|
|
||||||
# https://github.com/espressif/esp-idf/blob/
|
|
||||||
# https://github.com/espressif/esp-idf/raw/
|
|
||||||
GH_LINK_RE = r'https://github.com/espressif/esp-idf/(?:tree|blob|raw)/[^\s]+'
|
|
||||||
|
|
||||||
# we allow this one doc, because we always want users to see the latest support policy
|
|
||||||
GH_LINK_ALLOWED = ['https://github.com/espressif/esp-idf/blob/master/SUPPORT_POLICY.md',
|
|
||||||
'https://github.com/espressif/esp-idf/blob/master/SUPPORT_POLICY_CN.md']
|
|
||||||
|
|
||||||
|
|
||||||
def action_gh_linkcheck(args):
|
|
||||||
print('Checking for hardcoded GitHub links\n')
|
|
||||||
|
|
||||||
github_links = []
|
|
||||||
|
|
||||||
docs_dir = os.path.relpath(os.path.dirname(__file__))
|
|
||||||
for root, _, files in os.walk(docs_dir):
|
|
||||||
if '_build' in root:
|
|
||||||
continue
|
|
||||||
files = [os.path.join(root, f) for f in files if f.endswith('.rst')]
|
|
||||||
for path in files:
|
|
||||||
with open(path, 'r') as f:
|
|
||||||
for link in re.findall(GH_LINK_RE, f.read()):
|
|
||||||
if link not in GH_LINK_ALLOWED:
|
|
||||||
github_links.append((path, link))
|
|
||||||
|
|
||||||
if github_links:
|
|
||||||
for path, link in github_links:
|
|
||||||
print('%s: %s' % (path, link))
|
|
||||||
print('WARNING: Some .rst files contain hardcoded Github links.')
|
|
||||||
print('Please check above output and replace links with one of the following:')
|
|
||||||
print('- :idf:`dir` - points to directory inside ESP-IDF')
|
|
||||||
print('- :idf_file:`file` - points to file inside ESP-IDF')
|
|
||||||
print('- :idf_raw:`file` - points to raw view of the file inside ESP-IDF')
|
|
||||||
print('- :component:`dir` - points to directory inside ESP-IDF components dir')
|
|
||||||
print('- :component_file:`file` - points to file inside ESP-IDF components dir')
|
|
||||||
print('- :component_raw:`file` - points to raw view of the file inside ESP-IDF components dir')
|
|
||||||
print('- :example:`dir` - points to directory inside ESP-IDF examples dir')
|
|
||||||
print('- :example_file:`file` - points to file inside ESP-IDF examples dir')
|
|
||||||
print('- :example_raw:`file` - points to raw view of the file inside ESP-IDF examples dir')
|
|
||||||
print('These link types will point to the correct GitHub version automatically')
|
|
||||||
return 1
|
|
||||||
else:
|
|
||||||
print('No hardcoded links found')
|
|
||||||
return 0
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
@ -1,138 +1,19 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
#
|
#
|
||||||
# Common (non-language-specific) configuration for Read The Docs & Sphinx
|
# Common (non-language-specific) configuration for Sphinx
|
||||||
#
|
|
||||||
# Based on a Read the Docs Template documentation build configuration file,
|
|
||||||
# created by sphinx-quickstart on Tue Aug 26 14:19:49 2014.
|
|
||||||
#
|
#
|
||||||
# This file is imported from a language-specific conf.py (ie en/conf.py or
|
# This file is imported from a language-specific conf.py (ie en/conf.py or
|
||||||
# zh_CN/conf.py)
|
# zh_CN/conf.py)
|
||||||
#
|
|
||||||
# Note that not all possible configuration values are present in this
|
# type: ignore
|
||||||
# autogenerated file.
|
# pylint: disable=wildcard-import
|
||||||
#
|
# pylint: disable=undefined-variable
|
||||||
# All configuration values have a default; values that are commented out
|
|
||||||
# serve to show the default.
|
|
||||||
|
|
||||||
from __future__ import print_function, unicode_literals
|
from __future__ import print_function, unicode_literals
|
||||||
|
|
||||||
import os
|
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from get_github_rev import get_github_rev
|
from esp_docs.conf_docs import * # noqa: F403,F401
|
||||||
from idf_extensions.util import download_file_if_missing
|
|
||||||
from sanitize_version import sanitize_version
|
|
||||||
|
|
||||||
# build_docs on the CI server sometimes fails under Python3. This is a workaround:
|
|
||||||
sys.setrecursionlimit(3500)
|
|
||||||
|
|
||||||
config_dir = os.path.abspath(os.path.dirname(__file__))
|
|
||||||
|
|
||||||
# http://stackoverflow.com/questions/12772927/specifying-an-online-image-in-sphinx-restructuredtext-format
|
|
||||||
#
|
|
||||||
suppress_warnings = ['image.nonlocal_uri']
|
|
||||||
|
|
||||||
# -- General configuration ------------------------------------------------
|
|
||||||
|
|
||||||
# If your documentation needs a minimal Sphinx version, state it here.
|
|
||||||
# needs_sphinx = '1.0'
|
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be
|
|
||||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
|
||||||
# ones.
|
|
||||||
extensions = ['breathe',
|
|
||||||
|
|
||||||
'sphinx.ext.todo',
|
|
||||||
'sphinx_copybutton',
|
|
||||||
'sphinx_idf_theme',
|
|
||||||
'sphinxcontrib.blockdiag',
|
|
||||||
'sphinxcontrib.seqdiag',
|
|
||||||
'sphinxcontrib.actdiag',
|
|
||||||
'sphinxcontrib.nwdiag',
|
|
||||||
'sphinxcontrib.rackdiag',
|
|
||||||
'sphinxcontrib.packetdiag',
|
|
||||||
'sphinxcontrib.cairosvgconverter',
|
|
||||||
|
|
||||||
'extensions.html_redirects',
|
|
||||||
'extensions.toctree_filter',
|
|
||||||
'extensions.list_filter',
|
|
||||||
'extensions.google_analytics',
|
|
||||||
|
|
||||||
# Note: order is important here, events must
|
|
||||||
# be registered by one extension before they can be
|
|
||||||
# connected to another extension
|
|
||||||
|
|
||||||
'idf_extensions.include_build_file',
|
|
||||||
'idf_extensions.link_roles',
|
|
||||||
'idf_extensions.build_system',
|
|
||||||
'idf_extensions.esp_err_definitions',
|
|
||||||
'idf_extensions.gen_toolchain_links',
|
|
||||||
'idf_extensions.gen_version_specific_includes',
|
|
||||||
'idf_extensions.kconfig_reference',
|
|
||||||
'idf_extensions.gen_defines',
|
|
||||||
'idf_extensions.run_doxygen',
|
|
||||||
'idf_extensions.gen_idf_tools_links',
|
|
||||||
'idf_extensions.format_idf_target',
|
|
||||||
'idf_extensions.latex_builder',
|
|
||||||
'idf_extensions.exclude_docs',
|
|
||||||
|
|
||||||
# from https://github.com/pfalcon/sphinx_selective_exclude
|
|
||||||
'sphinx_selective_exclude.eager_only',
|
|
||||||
# TODO: determine if we need search_auto_exclude
|
|
||||||
# 'sphinx_selective_exclude.search_auto_exclude',
|
|
||||||
]
|
|
||||||
|
|
||||||
# sphinx.ext.todo extension parameters
|
|
||||||
# If the below parameter is True, the extension
|
|
||||||
# produces output, else it produces nothing.
|
|
||||||
todo_include_todos = False
|
|
||||||
|
|
||||||
# Enabling this fixes cropping of blockdiag edge labels
|
|
||||||
seqdiag_antialias = True
|
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
|
||||||
templates_path = ['_templates']
|
|
||||||
|
|
||||||
# The suffix of source filenames.
|
|
||||||
source_suffix = ['.rst', '.md']
|
|
||||||
|
|
||||||
source_parsers = {'.md': 'recommonmark.parser.CommonMarkParser',
|
|
||||||
}
|
|
||||||
|
|
||||||
# The encoding of source files.
|
|
||||||
# source_encoding = 'utf-8-sig'
|
|
||||||
|
|
||||||
# The master toctree document.
|
|
||||||
master_doc = 'index'
|
|
||||||
|
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
|
||||||
# |version| and |release|, also used in various other places throughout the
|
|
||||||
# built documents.
|
|
||||||
#
|
|
||||||
|
|
||||||
# This is the full exact version, canonical git version description
|
|
||||||
# visible when you open index.html.
|
|
||||||
version = subprocess.check_output(['git', 'describe']).strip().decode('utf-8')
|
|
||||||
|
|
||||||
# The 'release' version is the same as version for non-CI builds, but for CI
|
|
||||||
# builds on a branch then it's replaced with the branch name
|
|
||||||
release = sanitize_version(version)
|
|
||||||
|
|
||||||
print('Version: {0} Release: {1}'.format(version, release))
|
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
|
||||||
# non-false value, then it is used:
|
|
||||||
# today = ''
|
|
||||||
# Else, today_fmt is used as the format for a strftime call.
|
|
||||||
# today_fmt = '%B %d, %Y'
|
|
||||||
|
|
||||||
# List of patterns, relative to source directory, that match files and
|
|
||||||
# directories to ignore when looking for source files.
|
|
||||||
exclude_patterns = ['**/inc/**', '_static/', '_build/**']
|
|
||||||
|
|
||||||
BT_DOCS = ['api-guides/blufi.rst',
|
BT_DOCS = ['api-guides/blufi.rst',
|
||||||
'api-guides/esp-ble-mesh/**',
|
'api-guides/esp-ble-mesh/**',
|
||||||
@ -231,30 +112,26 @@ conditional_include_dict = {'SOC_BT_SUPPORTED':BT_DOCS,
|
|||||||
'esp32s3':ESP32S3_DOCS,
|
'esp32s3':ESP32S3_DOCS,
|
||||||
'esp32c3':ESP32C3_DOCS}
|
'esp32c3':ESP32C3_DOCS}
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all
|
extensions += ['sphinx_copybutton',
|
||||||
# documents.
|
# Note: order is important here, events must
|
||||||
# default_role = None
|
# be registered by one extension before they can be
|
||||||
|
# connected to another extension
|
||||||
|
'esp_docs.idf_extensions.build_system',
|
||||||
|
'esp_docs.idf_extensions.esp_err_definitions',
|
||||||
|
'esp_docs.idf_extensions.gen_toolchain_links',
|
||||||
|
'esp_docs.idf_extensions.gen_defines',
|
||||||
|
'esp_docs.idf_extensions.gen_version_specific_includes',
|
||||||
|
'esp_docs.idf_extensions.kconfig_reference',
|
||||||
|
'esp_docs.idf_extensions.gen_idf_tools_links',
|
||||||
|
'esp_docs.esp_extensions.run_doxygen',
|
||||||
|
]
|
||||||
|
|
||||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
# link roles config
|
||||||
# add_function_parentheses = True
|
github_repo = 'espressif/esp-idf'
|
||||||
|
|
||||||
# If true, the current module name will be prepended to all description
|
|
||||||
# unit titles (such as .. function::).
|
|
||||||
# add_module_names = True
|
|
||||||
|
|
||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
|
||||||
# output. They are ignored by default.
|
|
||||||
# show_authors = False
|
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
|
||||||
pygments_style = 'sphinx'
|
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
|
||||||
# modindex_common_prefix = []
|
|
||||||
|
|
||||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
|
||||||
# keep_warnings = False
|
|
||||||
|
|
||||||
|
# context used by sphinx_idf_theme
|
||||||
|
html_context['github_user'] = 'espressif'
|
||||||
|
html_context['github_repo'] = 'esp-docs'
|
||||||
|
|
||||||
# Extra options required by sphinx_idf_theme
|
# Extra options required by sphinx_idf_theme
|
||||||
project_slug = 'esp-idf'
|
project_slug = 'esp-idf'
|
||||||
@ -263,252 +140,6 @@ versions_url = 'https://dl.espressif.com/dl/esp-idf/idf_versions.js'
|
|||||||
idf_targets = ['esp32', 'esp32s2', 'esp32c3']
|
idf_targets = ['esp32', 'esp32s2', 'esp32c3']
|
||||||
languages = ['en', 'zh_CN']
|
languages = ['en', 'zh_CN']
|
||||||
|
|
||||||
project_homepage = 'https://github.com/espressif/esp-idf'
|
|
||||||
|
|
||||||
# -- Options for HTML output ----------------------------------------------
|
|
||||||
|
|
||||||
# Custom added feature to allow redirecting old URLs
|
|
||||||
#
|
|
||||||
# Redirects should be listed in page_redirects.xt
|
|
||||||
#
|
|
||||||
with open('../page_redirects.txt') as f:
|
|
||||||
lines = [re.sub(' +', ' ', line.strip()) for line in f.readlines() if line.strip() != '' and not line.startswith('#')]
|
|
||||||
for line in lines: # check for well-formed entries
|
|
||||||
if len(line.split(' ')) != 2:
|
|
||||||
raise RuntimeError('Invalid line in page_redirects.txt: %s' % line)
|
|
||||||
html_redirect_pages = [tuple(line.split(' ')) for line in lines]
|
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
|
||||||
# a list of builtin themes.
|
|
||||||
|
|
||||||
html_theme = 'sphinx_idf_theme'
|
|
||||||
|
|
||||||
# context used by sphinx_idf_theme
|
|
||||||
html_context = {
|
|
||||||
'display_github': True, # Add 'Edit on Github' link instead of 'View page source'
|
|
||||||
'github_user': 'espressif',
|
|
||||||
'github_repo': 'esp-idf',
|
|
||||||
'github_version': get_github_rev(),
|
|
||||||
}
|
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
|
||||||
# further. For a list of options available for each theme, see the
|
|
||||||
# documentation.
|
|
||||||
# html_theme_options = {}
|
|
||||||
|
|
||||||
# Add any paths that contain custom themes here, relative to this directory.
|
|
||||||
# html_theme_path = []
|
|
||||||
|
|
||||||
# The name for this set of Sphinx documents. If None, it defaults to
|
|
||||||
# "<project> v<release> documentation".
|
|
||||||
# html_title = None
|
|
||||||
|
|
||||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
|
||||||
# html_short_title = None
|
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top
|
|
||||||
# of the sidebar.
|
|
||||||
html_logo = '../_static/espressif-logo.svg'
|
|
||||||
|
|
||||||
|
|
||||||
# The name of an image file (within the static path) to use as favicon of the
|
|
||||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
|
||||||
# pixels large.
|
|
||||||
# html_favicon = None
|
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
|
||||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
|
||||||
html_static_path = ['../_static']
|
|
||||||
|
|
||||||
# Add any extra paths that contain custom files (such as robots.txt or
|
|
||||||
# .htaccess) here, relative to this directory. These files are copied
|
|
||||||
# directly to the root of the documentation.
|
|
||||||
# html_extra_path = []
|
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
|
||||||
# using the given strftime format.
|
|
||||||
# html_last_updated_fmt = '%b %d, %Y'
|
|
||||||
|
|
||||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
|
||||||
# typographically correct entities.
|
|
||||||
# html_use_smartypants = True
|
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
|
||||||
# html_sidebars = {}
|
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
|
||||||
# template names.
|
|
||||||
# html_additional_pages = {}
|
|
||||||
|
|
||||||
# If false, no module index is generated.
|
|
||||||
# html_domain_indices = True
|
|
||||||
|
|
||||||
# If false, no index is generated.
|
|
||||||
# html_use_index = True
|
|
||||||
|
|
||||||
# If true, the index is split into individual pages for each letter.
|
|
||||||
# html_split_index = False
|
|
||||||
|
|
||||||
# If true, links to the reST sources are added to the pages.
|
|
||||||
# html_show_sourcelink = True
|
|
||||||
|
|
||||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
|
||||||
# html_show_sphinx = True
|
|
||||||
|
|
||||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
|
||||||
# html_show_copyright = True
|
|
||||||
|
|
||||||
# If true, an OpenSearch description file will be output, and all pages will
|
|
||||||
# contain a <link> tag referring to it. The value of this option must be the
|
|
||||||
# base URL from which the finished HTML is served.
|
|
||||||
# html_use_opensearch = ''
|
|
||||||
|
|
||||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
|
||||||
# html_file_suffix = None
|
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
|
||||||
htmlhelp_basename = 'ReadtheDocsTemplatedoc'
|
|
||||||
|
|
||||||
google_analytics_id = os.environ.get('CI_GOOGLE_ANALYTICS_ID', None)
|
google_analytics_id = os.environ.get('CI_GOOGLE_ANALYTICS_ID', None)
|
||||||
|
|
||||||
# -- Options for LaTeX output ---------------------------------------------
|
project_homepage = 'https://github.com/espressif/esp-idf'
|
||||||
|
|
||||||
latex_template_dir = os.path.join(config_dir, 'latex_templates')
|
|
||||||
|
|
||||||
preamble = ''
|
|
||||||
with open(os.path.join(latex_template_dir, 'preamble.tex')) as f:
|
|
||||||
preamble = f.read()
|
|
||||||
|
|
||||||
titlepage = ''
|
|
||||||
with open(os.path.join(latex_template_dir, 'titlepage.tex')) as f:
|
|
||||||
titlepage = f.read()
|
|
||||||
|
|
||||||
|
|
||||||
latex_elements = {
|
|
||||||
'papersize': 'a4paper',
|
|
||||||
|
|
||||||
# Latex figure (float) alignment
|
|
||||||
'figure_align':'htbp',
|
|
||||||
|
|
||||||
'pointsize': '10pt',
|
|
||||||
|
|
||||||
# Additional stuff for the LaTeX preamble.
|
|
||||||
'fncychap': '\\usepackage[Sonny]{fncychap}',
|
|
||||||
|
|
||||||
'preamble': preamble,
|
|
||||||
|
|
||||||
'maketitle': titlepage,
|
|
||||||
}
|
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the bottom of
|
|
||||||
# the title page.
|
|
||||||
latex_logo = '../_static/espressif2.pdf'
|
|
||||||
latex_engine = 'xelatex'
|
|
||||||
latex_use_xindy = False
|
|
||||||
|
|
||||||
# -- Options for manual page output ---------------------------------------
|
|
||||||
|
|
||||||
# One entry per manual page. List of tuples
|
|
||||||
# (source start file, name, description, authors, manual section).
|
|
||||||
man_pages = [
|
|
||||||
('index', 'readthedocstemplate', u'Read the Docs Template Documentation',
|
|
||||||
[u'Read the Docs'], 1)
|
|
||||||
]
|
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
|
||||||
# man_show_urls = False
|
|
||||||
|
|
||||||
|
|
||||||
# -- Options for Texinfo output -------------------------------------------
|
|
||||||
|
|
||||||
# Grouping the document tree into Texinfo files. List of tuples
|
|
||||||
# (source start file, target name, title, author,
|
|
||||||
# dir menu entry, description, category)
|
|
||||||
texinfo_documents = [
|
|
||||||
('index', 'ReadtheDocsTemplate', u'Read the Docs Template Documentation',
|
|
||||||
u'Read the Docs', 'ReadtheDocsTemplate', 'One line description of project.',
|
|
||||||
'Miscellaneous'),
|
|
||||||
]
|
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
|
||||||
# texinfo_appendices = []
|
|
||||||
|
|
||||||
# If false, no module index is generated.
|
|
||||||
# texinfo_domain_indices = True
|
|
||||||
|
|
||||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
|
||||||
# texinfo_show_urls = 'footnote'
|
|
||||||
|
|
||||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
|
||||||
# texinfo_no_detailmenu = False
|
|
||||||
|
|
||||||
|
|
||||||
# Override RTD CSS theme to introduce the theme corrections
|
|
||||||
# https://github.com/rtfd/sphinx_rtd_theme/pull/432
|
|
||||||
def setup(app):
|
|
||||||
app.add_stylesheet('theme_overrides.css')
|
|
||||||
|
|
||||||
# these two must be pushed in by build_docs.py
|
|
||||||
if 'idf_target' not in app.config:
|
|
||||||
app.add_config_value('idf_target', None, 'env')
|
|
||||||
app.add_config_value('idf_targets', None, 'env')
|
|
||||||
|
|
||||||
app.add_config_value('conditional_include_dict', None, 'env')
|
|
||||||
app.add_config_value('docs_to_build', None, 'env')
|
|
||||||
|
|
||||||
# Breathe extension variables (depend on build_dir)
|
|
||||||
# note: we generate into xml_in and then copy_if_modified to xml dir
|
|
||||||
app.config.breathe_projects = {'esp32-idf': os.path.join(app.config.build_dir, 'xml_in/')}
|
|
||||||
app.config.breathe_default_project = 'esp32-idf'
|
|
||||||
|
|
||||||
setup_diag_font(app)
|
|
||||||
|
|
||||||
# Config values pushed by -D using the cmdline is not available when setup is called
|
|
||||||
app.connect('config-inited', setup_config_values)
|
|
||||||
app.connect('config-inited', setup_html_context)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_config_values(app, config):
|
|
||||||
# Sets up global config values needed by other extensions
|
|
||||||
idf_target_title_dict = {
|
|
||||||
'esp32': 'ESP32',
|
|
||||||
'esp32s2': 'ESP32-S2',
|
|
||||||
'esp32c3': 'ESP32-C3'
|
|
||||||
}
|
|
||||||
|
|
||||||
app.add_config_value('idf_target_title_dict', idf_target_title_dict, 'env')
|
|
||||||
|
|
||||||
pdf_name = 'esp-idf-{}-{}-{}'.format(app.config.language, app.config.version, app.config.idf_target)
|
|
||||||
app.add_config_value('pdf_file', pdf_name, 'env')
|
|
||||||
|
|
||||||
|
|
||||||
def setup_html_context(app, config):
|
|
||||||
# Setup path for 'edit on github'-link
|
|
||||||
config.html_context['conf_py_path'] = '/docs/{}/'.format(app.config.language)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_diag_font(app):
|
|
||||||
# blockdiag and other tools require a font which supports their character set
|
|
||||||
# the font file is stored on the download server to save repo size
|
|
||||||
|
|
||||||
font_name = {
|
|
||||||
'en': 'DejaVuSans.ttf',
|
|
||||||
'zh_CN': 'NotoSansSC-Regular.otf',
|
|
||||||
}[app.config.language]
|
|
||||||
|
|
||||||
font_dir = os.path.join(config_dir, '_static')
|
|
||||||
assert os.path.exists(font_dir)
|
|
||||||
|
|
||||||
print('Downloading font file %s for %s' % (font_name, app.config.language))
|
|
||||||
download_file_if_missing('https://dl.espressif.com/dl/esp-idf/docs/_static/{}'.format(font_name), font_dir)
|
|
||||||
|
|
||||||
font_path = os.path.abspath(os.path.join(font_dir, font_name))
|
|
||||||
assert os.path.exists(font_path)
|
|
||||||
|
|
||||||
app.config.blockdiag_fontpath = font_path
|
|
||||||
app.config.seqdiag_fontpath = font_path
|
|
||||||
app.config.actdiag_fontpath = font_path
|
|
||||||
app.config.nwdiag_fontpath = font_path
|
|
||||||
app.config.rackdiag_fontpath = font_path
|
|
||||||
app.config.packetdiag_fontpath = font_path
|
|
||||||
|
270
docs/doxygen/Doxyfile
Normal file
270
docs/doxygen/Doxyfile
Normal file
@ -0,0 +1,270 @@
|
|||||||
|
# This is Doxygen configuration file
|
||||||
|
#
|
||||||
|
# Doxygen provides over 260 configuration statements
|
||||||
|
# To make this file easier to follow,
|
||||||
|
# it contains only statements that are non-default
|
||||||
|
#
|
||||||
|
# NOTE:
|
||||||
|
# It is recommended not to change defaults unless specifically required
|
||||||
|
# Test any changes how they affect generated documentation
|
||||||
|
# Make sure that correct warnings are generated to flag issues with documented code
|
||||||
|
#
|
||||||
|
# For the complete list of configuration statements see:
|
||||||
|
# http://doxygen.nl/manual/config.html
|
||||||
|
|
||||||
|
|
||||||
|
PROJECT_NAME = "IDF Programming Guide"
|
||||||
|
|
||||||
|
## The 'INPUT' statement below is used as input by script 'gen-df-input.py'
|
||||||
|
## to automatically generate API reference list files heder_file.inc
|
||||||
|
## These files are placed in '_inc' directory
|
||||||
|
## and used to include in API reference documentation
|
||||||
|
|
||||||
|
INPUT = \
|
||||||
|
$(PROJECT_PATH)/components/tinyusb/additions/include/tinyusb.h \
|
||||||
|
$(PROJECT_PATH)/components/tinyusb/additions/include/tinyusb_types.h \
|
||||||
|
$(PROJECT_PATH)/components/tinyusb/additions/include/tusb_cdc_acm.h \
|
||||||
|
$(PROJECT_PATH)/components/tinyusb/additions/include/tusb_config.h \
|
||||||
|
$(PROJECT_PATH)/components/tinyusb/additions/include/tusb_console.h \
|
||||||
|
$(PROJECT_PATH)/components/tinyusb/additions/include/tusb_tasks.h \
|
||||||
|
$(PROJECT_PATH)/components/tinyusb/additions/include/vfs_tinyusb.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_wifi/include/esp_wifi.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_wifi/include/esp_wifi_types.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_wifi/include/esp_smartconfig.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_wifi/include/esp_now.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_wifi/include/esp_wifi_default.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_wifi/include/esp_mesh.h \
|
||||||
|
$(PROJECT_PATH)/components/wpa_supplicant/esp_supplicant/include/esp_dpp.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_event/include/esp_event.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_event/include/esp_event_base.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_event/include/esp_event_legacy.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/include/esp32/include/esp_bt.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/common/api/include/api/esp_blufi_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_bt_defs.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_bt_main.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_bt_device.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_gap_ble_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_gatt_defs.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_gatts_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_gattc_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_gap_bt_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_a2dp_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_avrc_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_spp_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_hf_defs.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_hf_client_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/bluedroid/api/include/api/esp_hf_ag_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/host/nimble/esp-hci/include/esp_nimble_hci.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/core/include/esp_ble_mesh_common_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/core/include/esp_ble_mesh_local_data_operation_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/core/include/esp_ble_mesh_low_power_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/core/include/esp_ble_mesh_networking_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/core/include/esp_ble_mesh_provisioning_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/core/include/esp_ble_mesh_proxy_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/models/include/esp_ble_mesh_config_model_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/models/include/esp_ble_mesh_generic_model_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/models/include/esp_ble_mesh_health_model_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/models/include/esp_ble_mesh_lighting_model_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/models/include/esp_ble_mesh_sensor_model_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/models/include/esp_ble_mesh_time_scene_model_api.h \
|
||||||
|
$(PROJECT_PATH)/components/bt/esp_ble_mesh/api/esp_ble_mesh_defs.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_eth/include/esp_eth.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_eth/include/esp_eth_com.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_eth/include/esp_eth_mac.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_eth/include/esp_eth_phy.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_eth/include/esp_eth_netif_glue.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/adc_types.h \
|
||||||
|
$(PROJECT_PATH)/components/soc/$(IDF_TARGET)/include/soc/adc_channel.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/adc_common.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/$(IDF_TARGET)/include/driver/adc.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_adc_cal/include/esp_adc_cal.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/dac_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/dac_common.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/gpio_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/gpio.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/dedic_gpio.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/rtc_io_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/rtc_io.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/i2c_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/i2c.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/i2s_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/i2s.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/ledc_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/ledc.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/rmt_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/rmt.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/sigmadelta_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/sigmadelta.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/spi_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/spi_common.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/spi_master.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/spi_slave.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/spi_slave_hd.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/timer_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/timer.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/touch_sensor_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/touch_sensor_common.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/twai_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/twai.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/uart_types.h \
|
||||||
|
$(PROJECT_PATH)/components/soc/$(IDF_TARGET)/include/soc/uart_channel.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/uart.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_netif/include/esp_netif.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_netif/include/esp_netif_net_stack.h \
|
||||||
|
$(PROJECT_PATH)/components/esp-tls/esp_tls.h \
|
||||||
|
$(PROJECT_PATH)/components/mqtt/esp-mqtt/include/mqtt_client.h \
|
||||||
|
$(PROJECT_PATH)/components/lwip/include/apps/ping/ping_sock.h \
|
||||||
|
$(PROJECT_PATH)/components/lwip/include/apps/esp_sntp.h \
|
||||||
|
$(PROJECT_PATH)/components/mdns/include/mdns.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_http_client/include/esp_http_client.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_websocket_client/include/esp_websocket_client.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_http_server/include/esp_http_server.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_https_server/include/esp_https_server.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_local_ctrl/include/esp_local_ctrl.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_serial_slave_link/include/esp_serial_slave_link/essl.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_serial_slave_link/include/esp_serial_slave_link/essl_sdio.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_serial_slave_link/include/esp_serial_slave_link/essl_spi.h \
|
||||||
|
$(PROJECT_PATH)/components/mbedtls/esp_crt_bundle/include/esp_crt_bundle.h \
|
||||||
|
$(PROJECT_PATH)/components/protocomm/include/common/protocomm.h \
|
||||||
|
$(PROJECT_PATH)/components/protocomm/include/security/protocomm_security.h \
|
||||||
|
$(PROJECT_PATH)/components/protocomm/include/security/protocomm_security0.h \
|
||||||
|
$(PROJECT_PATH)/components/protocomm/include/security/protocomm_security1.h \
|
||||||
|
$(PROJECT_PATH)/components/protocomm/include/transports/protocomm_ble.h \
|
||||||
|
$(PROJECT_PATH)/components/protocomm/include/transports/protocomm_console.h \
|
||||||
|
$(PROJECT_PATH)/components/protocomm/include/transports/protocomm_httpd.h \
|
||||||
|
$(PROJECT_PATH)/components/wifi_provisioning/include/wifi_provisioning/manager.h \
|
||||||
|
$(PROJECT_PATH)/components/wifi_provisioning/include/wifi_provisioning/scheme_ble.h \
|
||||||
|
$(PROJECT_PATH)/components/wifi_provisioning/include/wifi_provisioning/scheme_softap.h \
|
||||||
|
$(PROJECT_PATH)/components/wifi_provisioning/include/wifi_provisioning/scheme_console.h \
|
||||||
|
$(PROJECT_PATH)/components/wifi_provisioning/include/wifi_provisioning/wifi_config.h \
|
||||||
|
$(PROJECT_PATH)/components/wifi_provisioning/include/wifi_provisioning/wifi_scan.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/spi_flash_types.h \
|
||||||
|
$(PROJECT_PATH)/components/spi_flash/include/esp_flash_spi_init.h \
|
||||||
|
$(PROJECT_PATH)/components/spi_flash/include/esp_flash.h \
|
||||||
|
$(PROJECT_PATH)/components/spi_flash/include/esp_partition.h \
|
||||||
|
$(PROJECT_PATH)/components/bootloader_support/include/esp_flash_encrypt.h \
|
||||||
|
$(PROJECT_PATH)/components/bootloader_support/include/bootloader_random.h \
|
||||||
|
$(PROJECT_PATH)/components/spiffs/include/esp_spiffs.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/sdmmc_types.h \
|
||||||
|
$(PROJECT_PATH)/components/sdmmc/include/sdmmc_cmd.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/sdmmc_host.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/sdspi_host.h \
|
||||||
|
$(PROJECT_PATH)/components/hal/include/hal/sdio_slave_types.h \
|
||||||
|
$(PROJECT_PATH)/components/driver/include/driver/sdio_slave.h \
|
||||||
|
$(PROJECT_PATH)/components/nvs_flash/include/nvs.h \
|
||||||
|
$(PROJECT_PATH)/components/nvs_flash/include/nvs_flash.h \
|
||||||
|
$(PROJECT_PATH)/components/openthread/include/esp_openthread.h \
|
||||||
|
$(PROJECT_PATH)/components/openthread/include/esp_openthread_border_router.h \
|
||||||
|
$(PROJECT_PATH)/components/openthread/include/esp_openthread_lock.h \
|
||||||
|
$(PROJECT_PATH)/components/openthread/include/esp_openthread_netif_glue.h \
|
||||||
|
$(PROJECT_PATH)/components/openthread/include/esp_openthread_types.h \
|
||||||
|
$(PROJECT_PATH)/components/openthread/include/openthread-core-esp32x-config.h \
|
||||||
|
$(PROJECT_PATH)/components/vfs/include/esp_vfs.h \
|
||||||
|
$(PROJECT_PATH)/components/vfs/include/esp_vfs_dev.h \
|
||||||
|
$(PROJECT_PATH)/components/vfs/include/esp_vfs_eventfd.h \
|
||||||
|
$(PROJECT_PATH)/components/vfs/include/esp_vfs_semihost.h \
|
||||||
|
$(PROJECT_PATH)/components/fatfs/vfs/esp_vfs_fat.h \
|
||||||
|
$(PROJECT_PATH)/components/fatfs/diskio/diskio_impl.h \
|
||||||
|
$(PROJECT_PATH)/components/fatfs/diskio/diskio_sdmmc.h \
|
||||||
|
$(PROJECT_PATH)/components/fatfs/diskio/diskio_wl.h \
|
||||||
|
$(PROJECT_PATH)/components/fatfs/diskio/diskio_rawflash.h \
|
||||||
|
$(PROJECT_PATH)/components/wear_levelling/include/wear_levelling.h \
|
||||||
|
$(PROJECT_PATH)/components/console/esp_console.h \
|
||||||
|
$(PROJECT_PATH)/components/heap/include/esp_heap_caps.h \
|
||||||
|
$(PROJECT_PATH)/components/heap/include/esp_heap_trace.h \
|
||||||
|
$(PROJECT_PATH)/components/heap/include/esp_heap_caps_init.h \
|
||||||
|
$(PROJECT_PATH)/components/heap/include/multi_heap.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_hw_support/include/esp_intr_alloc.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_system/include/esp_int_wdt.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_system/include/esp_task_wdt.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_hw_support/include/esp_crc.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_system/include/esp_freertos_hooks.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_ipc/include/esp_ipc.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_system/include/esp_expression_with_stack.h \
|
||||||
|
$(PROJECT_PATH)/components/app_update/include/esp_ota_ops.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_https_ota/include/esp_https_ota.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_hw_support/include/esp_async_memcpy.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_hw_support/include/esp_random.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_hw_support/include/esp_sleep.h \
|
||||||
|
$(PROJECT_PATH)/components/log/include/esp_log.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_rom/include/esp_rom_sys.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_system/include/esp_system.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_common/include/esp_idf_version.h \
|
||||||
|
$(PROJECT_PATH)/components/ulp/include/ulp_common.h \
|
||||||
|
$(PROJECT_PATH)/components/app_trace/include/esp_app_trace.h \
|
||||||
|
$(PROJECT_PATH)/components/app_trace/include/esp_sysview_trace.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_pm/include/esp_pm.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_pm/include/$(IDF_TARGET)/pm.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_timer/include/esp_timer.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_event/include/esp_event.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_event/include/esp_event_base.h \
|
||||||
|
$(PROJECT_PATH)/components/efuse/include/esp_efuse.h \
|
||||||
|
$(PROJECT_PATH)/components/bootloader_support/include/esp_app_format.h \
|
||||||
|
$(PROJECT_PATH)/components/pthread/include/esp_pthread.h \
|
||||||
|
$(PROJECT_PATH)/components/freertos/include/freertos/task.h \
|
||||||
|
$(PROJECT_PATH)/components/freertos/include/freertos/queue.h \
|
||||||
|
$(PROJECT_PATH)/components/freertos/include/freertos/semphr.h \
|
||||||
|
$(PROJECT_PATH)/components/freertos/include/freertos/timers.h \
|
||||||
|
$(PROJECT_PATH)/components/freertos/include/freertos/event_groups.h \
|
||||||
|
$(PROJECT_PATH)/components/freertos/include/freertos/stream_buffer.h \
|
||||||
|
$(PROJECT_PATH)/components/freertos/include/freertos/message_buffer.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_ringbuf/include/freertos/ringbuf.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_common/include/esp_err.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_common/include/esp_check.h \
|
||||||
|
$(PROJECT_PATH)/components/esp_system/include/esp_system.h \
|
||||||
|
$(PROJECT_PATH)/components/freemodbus/common/include/esp_modbus_common.h \
|
||||||
|
$(PROJECT_PATH)/components/freemodbus/common/include/esp_modbus_slave.h \
|
||||||
|
$(PROJECT_PATH)/components/freemodbus/common/include/esp_modbus_master.h \
|
||||||
|
$(PROJECT_PATH)/components/perfmon/include/xtensa_perfmon_access.h \
|
||||||
|
$(PROJECT_PATH)/components/perfmon/include/xtensa_perfmon_apis.h \
|
||||||
|
$(PROJECT_PATH)/components/perfmon/include/xtensa_perfmon_masks.h \
|
||||||
|
$(PROJECT_PATH)/components/ieee802154/include/esp_ieee802154.h \
|
||||||
|
$(PROJECT_PATH)/components/ieee802154/include/esp_ieee802154_types.h
|
||||||
|
|
||||||
|
|
||||||
|
## Target specific headers are in seperate Doxyfile files
|
||||||
|
@INCLUDE = $(PROJECT_PATH)/docs/doxygen/Doxyfile_$(IDF_TARGET)
|
||||||
|
|
||||||
|
## Get warnings for functions that have no documentation for their parameters or return value
|
||||||
|
##
|
||||||
|
WARN_NO_PARAMDOC = YES
|
||||||
|
|
||||||
|
## Enable preprocessing and remove __attribute__(...) expressions from the INPUT files
|
||||||
|
##
|
||||||
|
ENABLE_PREPROCESSING = YES
|
||||||
|
MACRO_EXPANSION = YES
|
||||||
|
EXPAND_ONLY_PREDEF = YES
|
||||||
|
PREDEFINED = \
|
||||||
|
$(ENV_DOXYGEN_DEFINES) \
|
||||||
|
__attribute__(x)= \
|
||||||
|
_Static_assert()= \
|
||||||
|
IDF_DEPRECATED(X)= \
|
||||||
|
IRAM_ATTR= \
|
||||||
|
configSUPPORT_DYNAMIC_ALLOCATION=1 \
|
||||||
|
configSUPPORT_STATIC_ALLOCATION=1 \
|
||||||
|
configQUEUE_REGISTRY_SIZE=1 \
|
||||||
|
configUSE_RECURSIVE_MUTEXES=1 \
|
||||||
|
configTHREAD_LOCAL_STORAGE_DELETE_CALLBACKS=1 \
|
||||||
|
configNUM_THREAD_LOCAL_STORAGE_POINTERS=1 \
|
||||||
|
configUSE_APPLICATION_TASK_TAG=1 \
|
||||||
|
configTASKLIST_INCLUDE_COREID=1 \
|
||||||
|
"ESP_EVENT_DECLARE_BASE(x)=extern esp_event_base_t x"
|
||||||
|
|
||||||
|
## Do not complain about not having dot
|
||||||
|
##
|
||||||
|
HAVE_DOT = NO
|
||||||
|
|
||||||
|
## Generate XML that is required for Breathe
|
||||||
|
##
|
||||||
|
GENERATE_XML = YES
|
||||||
|
XML_OUTPUT = xml
|
||||||
|
|
||||||
|
GENERATE_HTML = NO
|
||||||
|
HAVE_DOT = NO
|
||||||
|
GENERATE_LATEX = NO
|
||||||
|
GENERATE_MAN = YES
|
||||||
|
GENERATE_RTF = NO
|
||||||
|
|
||||||
|
## Skip distracting progress messages
|
||||||
|
##
|
||||||
|
QUIET = YES
|
@ -1,270 +0,0 @@
|
|||||||
# This is Doxygen configuration file
|
|
||||||
#
|
|
||||||
# Doxygen provides over 260 configuration statements
|
|
||||||
# To make this file easier to follow,
|
|
||||||
# it contains only statements that are non-default
|
|
||||||
#
|
|
||||||
# NOTE:
|
|
||||||
# It is recommended not to change defaults unless specifically required
|
|
||||||
# Test any changes how they affect generated documentation
|
|
||||||
# Make sure that correct warnings are generated to flag issues with documented code
|
|
||||||
#
|
|
||||||
# For the complete list of configuration statements see:
|
|
||||||
# http://doxygen.nl/manual/config.html
|
|
||||||
|
|
||||||
|
|
||||||
PROJECT_NAME = "IDF Programming Guide"
|
|
||||||
|
|
||||||
## The 'INPUT' statement below is used as input by script 'gen-df-input.py'
|
|
||||||
## to automatically generate API reference list files heder_file.inc
|
|
||||||
## These files are placed in '_inc' directory
|
|
||||||
## and used to include in API reference documentation
|
|
||||||
|
|
||||||
INPUT = \
|
|
||||||
$(IDF_PATH)/components/tinyusb/additions/include/tinyusb.h \
|
|
||||||
$(IDF_PATH)/components/tinyusb/additions/include/tinyusb_types.h \
|
|
||||||
$(IDF_PATH)/components/tinyusb/additions/include/tusb_cdc_acm.h \
|
|
||||||
$(IDF_PATH)/components/tinyusb/additions/include/tusb_config.h \
|
|
||||||
$(IDF_PATH)/components/tinyusb/additions/include/tusb_console.h \
|
|
||||||
$(IDF_PATH)/components/tinyusb/additions/include/tusb_tasks.h \
|
|
||||||
$(IDF_PATH)/components/tinyusb/additions/include/vfs_tinyusb.h \
|
|
||||||
$(IDF_PATH)/components/esp_wifi/include/esp_wifi.h \
|
|
||||||
$(IDF_PATH)/components/esp_wifi/include/esp_wifi_types.h \
|
|
||||||
$(IDF_PATH)/components/esp_wifi/include/esp_smartconfig.h \
|
|
||||||
$(IDF_PATH)/components/esp_wifi/include/esp_now.h \
|
|
||||||
$(IDF_PATH)/components/esp_wifi/include/esp_wifi_default.h \
|
|
||||||
$(IDF_PATH)/components/esp_wifi/include/esp_mesh.h \
|
|
||||||
$(IDF_PATH)/components/wpa_supplicant/esp_supplicant/include/esp_dpp.h \
|
|
||||||
$(IDF_PATH)/components/esp_event/include/esp_event.h \
|
|
||||||
$(IDF_PATH)/components/esp_event/include/esp_event_base.h \
|
|
||||||
$(IDF_PATH)/components/esp_event/include/esp_event_legacy.h \
|
|
||||||
$(IDF_PATH)/components/bt/include/esp32/include/esp_bt.h \
|
|
||||||
$(IDF_PATH)/components/bt/common/api/include/api/esp_blufi_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_bt_defs.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_bt_main.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_bt_device.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_gap_ble_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_gatt_defs.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_gatts_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_gattc_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_gap_bt_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_a2dp_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_avrc_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_spp_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_hf_defs.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_hf_client_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/bluedroid/api/include/api/esp_hf_ag_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/host/nimble/esp-hci/include/esp_nimble_hci.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/core/include/esp_ble_mesh_common_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/core/include/esp_ble_mesh_local_data_operation_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/core/include/esp_ble_mesh_low_power_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/core/include/esp_ble_mesh_networking_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/core/include/esp_ble_mesh_provisioning_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/core/include/esp_ble_mesh_proxy_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/models/include/esp_ble_mesh_config_model_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/models/include/esp_ble_mesh_generic_model_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/models/include/esp_ble_mesh_health_model_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/models/include/esp_ble_mesh_lighting_model_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/models/include/esp_ble_mesh_sensor_model_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/models/include/esp_ble_mesh_time_scene_model_api.h \
|
|
||||||
$(IDF_PATH)/components/bt/esp_ble_mesh/api/esp_ble_mesh_defs.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/eth_types.h \
|
|
||||||
$(IDF_PATH)/components/esp_eth/include/esp_eth.h \
|
|
||||||
$(IDF_PATH)/components/esp_eth/include/esp_eth_com.h \
|
|
||||||
$(IDF_PATH)/components/esp_eth/include/esp_eth_mac.h \
|
|
||||||
$(IDF_PATH)/components/esp_eth/include/esp_eth_phy.h \
|
|
||||||
$(IDF_PATH)/components/esp_eth/include/esp_eth_netif_glue.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/adc_types.h \
|
|
||||||
$(IDF_PATH)/components/soc/$(IDF_TARGET)/include/soc/adc_channel.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/adc_common.h \
|
|
||||||
$(IDF_PATH)/components/driver/$(IDF_TARGET)/include/driver/adc.h \
|
|
||||||
$(IDF_PATH)/components/esp_adc_cal/include/esp_adc_cal.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/dac_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/dac_common.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/gpio_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/gpio.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/dedic_gpio.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/rtc_io_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/rtc_io.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/i2c_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/i2c.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/i2s_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/i2s.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/ledc_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/ledc.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/rmt_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/rmt.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/sigmadelta_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/sigmadelta.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/spi_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/spi_common.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/spi_master.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/spi_slave.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/spi_slave_hd.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/timer_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/timer.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/touch_sensor_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/touch_sensor_common.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/twai_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/twai.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/uart_types.h \
|
|
||||||
$(IDF_PATH)/components/soc/$(IDF_TARGET)/include/soc/uart_channel.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/uart.h \
|
|
||||||
$(IDF_PATH)/components/esp_netif/include/esp_netif.h \
|
|
||||||
$(IDF_PATH)/components/esp_netif/include/esp_netif_net_stack.h \
|
|
||||||
$(IDF_PATH)/components/esp-tls/esp_tls.h \
|
|
||||||
$(IDF_PATH)/components/mqtt/esp-mqtt/include/mqtt_client.h \
|
|
||||||
$(IDF_PATH)/components/lwip/include/apps/ping/ping_sock.h \
|
|
||||||
$(IDF_PATH)/components/lwip/include/apps/esp_sntp.h \
|
|
||||||
$(IDF_PATH)/components/mdns/include/mdns.h \
|
|
||||||
$(IDF_PATH)/components/esp_http_client/include/esp_http_client.h \
|
|
||||||
$(IDF_PATH)/components/esp_websocket_client/include/esp_websocket_client.h \
|
|
||||||
$(IDF_PATH)/components/esp_http_server/include/esp_http_server.h \
|
|
||||||
$(IDF_PATH)/components/esp_https_server/include/esp_https_server.h \
|
|
||||||
$(IDF_PATH)/components/esp_local_ctrl/include/esp_local_ctrl.h \
|
|
||||||
$(IDF_PATH)/components/esp_serial_slave_link/include/esp_serial_slave_link/essl.h \
|
|
||||||
$(IDF_PATH)/components/esp_serial_slave_link/include/esp_serial_slave_link/essl_sdio.h \
|
|
||||||
$(IDF_PATH)/components/esp_serial_slave_link/include/esp_serial_slave_link/essl_spi.h \
|
|
||||||
$(IDF_PATH)/components/mbedtls/esp_crt_bundle/include/esp_crt_bundle.h \
|
|
||||||
$(IDF_PATH)/components/protocomm/include/common/protocomm.h \
|
|
||||||
$(IDF_PATH)/components/protocomm/include/security/protocomm_security.h \
|
|
||||||
$(IDF_PATH)/components/protocomm/include/security/protocomm_security0.h \
|
|
||||||
$(IDF_PATH)/components/protocomm/include/security/protocomm_security1.h \
|
|
||||||
$(IDF_PATH)/components/protocomm/include/transports/protocomm_ble.h \
|
|
||||||
$(IDF_PATH)/components/protocomm/include/transports/protocomm_console.h \
|
|
||||||
$(IDF_PATH)/components/protocomm/include/transports/protocomm_httpd.h \
|
|
||||||
$(IDF_PATH)/components/wifi_provisioning/include/wifi_provisioning/manager.h \
|
|
||||||
$(IDF_PATH)/components/wifi_provisioning/include/wifi_provisioning/scheme_ble.h \
|
|
||||||
$(IDF_PATH)/components/wifi_provisioning/include/wifi_provisioning/scheme_softap.h \
|
|
||||||
$(IDF_PATH)/components/wifi_provisioning/include/wifi_provisioning/scheme_console.h \
|
|
||||||
$(IDF_PATH)/components/wifi_provisioning/include/wifi_provisioning/wifi_config.h \
|
|
||||||
$(IDF_PATH)/components/wifi_provisioning/include/wifi_provisioning/wifi_scan.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/spi_flash_types.h \
|
|
||||||
$(IDF_PATH)/components/spi_flash/include/esp_flash_spi_init.h \
|
|
||||||
$(IDF_PATH)/components/spi_flash/include/esp_flash.h \
|
|
||||||
$(IDF_PATH)/components/spi_flash/include/esp_partition.h \
|
|
||||||
$(IDF_PATH)/components/bootloader_support/include/esp_flash_encrypt.h \
|
|
||||||
$(IDF_PATH)/components/bootloader_support/include/bootloader_random.h \
|
|
||||||
$(IDF_PATH)/components/spiffs/include/esp_spiffs.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/sdmmc_types.h \
|
|
||||||
$(IDF_PATH)/components/sdmmc/include/sdmmc_cmd.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/sdmmc_host.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/sdspi_host.h \
|
|
||||||
$(IDF_PATH)/components/hal/include/hal/sdio_slave_types.h \
|
|
||||||
$(IDF_PATH)/components/driver/include/driver/sdio_slave.h \
|
|
||||||
$(IDF_PATH)/components/nvs_flash/include/nvs.h \
|
|
||||||
$(IDF_PATH)/components/nvs_flash/include/nvs_flash.h \
|
|
||||||
$(IDF_PATH)/components/openthread/include/esp_openthread.h \
|
|
||||||
$(IDF_PATH)/components/openthread/include/esp_openthread_border_router.h \
|
|
||||||
$(IDF_PATH)/components/openthread/include/esp_openthread_lock.h \
|
|
||||||
$(IDF_PATH)/components/openthread/include/esp_openthread_netif_glue.h \
|
|
||||||
$(IDF_PATH)/components/openthread/include/esp_openthread_types.h \
|
|
||||||
$(IDF_PATH)/components/openthread/include/openthread-core-esp32x-config.h \
|
|
||||||
$(IDF_PATH)/components/vfs/include/esp_vfs.h \
|
|
||||||
$(IDF_PATH)/components/vfs/include/esp_vfs_dev.h \
|
|
||||||
$(IDF_PATH)/components/vfs/include/esp_vfs_eventfd.h \
|
|
||||||
$(IDF_PATH)/components/vfs/include/esp_vfs_semihost.h \
|
|
||||||
$(IDF_PATH)/components/fatfs/vfs/esp_vfs_fat.h \
|
|
||||||
$(IDF_PATH)/components/fatfs/diskio/diskio_impl.h \
|
|
||||||
$(IDF_PATH)/components/fatfs/diskio/diskio_sdmmc.h \
|
|
||||||
$(IDF_PATH)/components/fatfs/diskio/diskio_wl.h \
|
|
||||||
$(IDF_PATH)/components/fatfs/diskio/diskio_rawflash.h \
|
|
||||||
$(IDF_PATH)/components/wear_levelling/include/wear_levelling.h \
|
|
||||||
$(IDF_PATH)/components/console/esp_console.h \
|
|
||||||
$(IDF_PATH)/components/heap/include/esp_heap_caps.h \
|
|
||||||
$(IDF_PATH)/components/heap/include/esp_heap_trace.h \
|
|
||||||
$(IDF_PATH)/components/heap/include/esp_heap_caps_init.h \
|
|
||||||
$(IDF_PATH)/components/heap/include/multi_heap.h \
|
|
||||||
$(IDF_PATH)/components/esp_hw_support/include/esp_intr_alloc.h \
|
|
||||||
$(IDF_PATH)/components/esp_system/include/esp_int_wdt.h \
|
|
||||||
$(IDF_PATH)/components/esp_system/include/esp_task_wdt.h \
|
|
||||||
$(IDF_PATH)/components/esp_hw_support/include/esp_crc.h \
|
|
||||||
$(IDF_PATH)/components/esp_system/include/esp_freertos_hooks.h \
|
|
||||||
$(IDF_PATH)/components/esp_ipc/include/esp_ipc.h \
|
|
||||||
$(IDF_PATH)/components/esp_system/include/esp_expression_with_stack.h \
|
|
||||||
$(IDF_PATH)/components/app_update/include/esp_ota_ops.h \
|
|
||||||
$(IDF_PATH)/components/esp_https_ota/include/esp_https_ota.h \
|
|
||||||
$(IDF_PATH)/components/esp_hw_support/include/esp_async_memcpy.h \
|
|
||||||
$(IDF_PATH)/components/esp_hw_support/include/esp_random.h \
|
|
||||||
$(IDF_PATH)/components/esp_hw_support/include/esp_sleep.h \
|
|
||||||
$(IDF_PATH)/components/log/include/esp_log.h \
|
|
||||||
$(IDF_PATH)/components/esp_rom/include/esp_rom_sys.h \
|
|
||||||
$(IDF_PATH)/components/esp_system/include/esp_system.h \
|
|
||||||
$(IDF_PATH)/components/esp_common/include/esp_idf_version.h \
|
|
||||||
$(IDF_PATH)/components/ulp/include/ulp_common.h \
|
|
||||||
$(IDF_PATH)/components/app_trace/include/esp_app_trace.h \
|
|
||||||
$(IDF_PATH)/components/app_trace/include/esp_sysview_trace.h \
|
|
||||||
$(IDF_PATH)/components/esp_pm/include/esp_pm.h \
|
|
||||||
$(IDF_PATH)/components/esp_pm/include/$(IDF_TARGET)/pm.h \
|
|
||||||
$(IDF_PATH)/components/esp_timer/include/esp_timer.h \
|
|
||||||
$(IDF_PATH)/components/esp_event/include/esp_event.h \
|
|
||||||
$(IDF_PATH)/components/esp_event/include/esp_event_base.h \
|
|
||||||
$(IDF_PATH)/components/efuse/include/esp_efuse.h \
|
|
||||||
$(IDF_PATH)/components/bootloader_support/include/esp_app_format.h \
|
|
||||||
$(IDF_PATH)/components/pthread/include/esp_pthread.h \
|
|
||||||
$(IDF_PATH)/components/freertos/include/freertos/task.h \
|
|
||||||
$(IDF_PATH)/components/freertos/include/freertos/queue.h \
|
|
||||||
$(IDF_PATH)/components/freertos/include/freertos/semphr.h \
|
|
||||||
$(IDF_PATH)/components/freertos/include/freertos/timers.h \
|
|
||||||
$(IDF_PATH)/components/freertos/include/freertos/event_groups.h \
|
|
||||||
$(IDF_PATH)/components/freertos/include/freertos/stream_buffer.h \
|
|
||||||
$(IDF_PATH)/components/freertos/include/freertos/message_buffer.h \
|
|
||||||
$(IDF_PATH)/components/esp_ringbuf/include/freertos/ringbuf.h \
|
|
||||||
$(IDF_PATH)/components/esp_common/include/esp_err.h \
|
|
||||||
$(IDF_PATH)/components/esp_common/include/esp_check.h \
|
|
||||||
$(IDF_PATH)/components/esp_system/include/esp_system.h \
|
|
||||||
$(IDF_PATH)/components/freemodbus/common/include/esp_modbus_common.h \
|
|
||||||
$(IDF_PATH)/components/freemodbus/common/include/esp_modbus_slave.h \
|
|
||||||
$(IDF_PATH)/components/freemodbus/common/include/esp_modbus_master.h \
|
|
||||||
$(IDF_PATH)/components/perfmon/include/xtensa_perfmon_access.h \
|
|
||||||
$(IDF_PATH)/components/perfmon/include/xtensa_perfmon_apis.h \
|
|
||||||
$(IDF_PATH)/components/perfmon/include/xtensa_perfmon_masks.h \
|
|
||||||
$(IDF_PATH)/components/ieee802154/include/esp_ieee802154.h \
|
|
||||||
$(IDF_PATH)/components/ieee802154/include/esp_ieee802154_types.h
|
|
||||||
|
|
||||||
## Target specific headers are in seperate Doxyfile files
|
|
||||||
@INCLUDE = $(IDF_PATH)/docs/doxygen/Doxyfile_$(IDF_TARGET)
|
|
||||||
|
|
||||||
## Get warnings for functions that have no documentation for their parameters or return value
|
|
||||||
##
|
|
||||||
WARN_NO_PARAMDOC = YES
|
|
||||||
|
|
||||||
## Enable preprocessing and remove __attribute__(...) expressions from the INPUT files
|
|
||||||
##
|
|
||||||
ENABLE_PREPROCESSING = YES
|
|
||||||
MACRO_EXPANSION = YES
|
|
||||||
EXPAND_ONLY_PREDEF = YES
|
|
||||||
PREDEFINED = \
|
|
||||||
$(ENV_DOXYGEN_DEFINES) \
|
|
||||||
__attribute__(x)= \
|
|
||||||
_Static_assert()= \
|
|
||||||
IDF_DEPRECATED(X)= \
|
|
||||||
IRAM_ATTR= \
|
|
||||||
configSUPPORT_DYNAMIC_ALLOCATION=1 \
|
|
||||||
configSUPPORT_STATIC_ALLOCATION=1 \
|
|
||||||
configQUEUE_REGISTRY_SIZE=1 \
|
|
||||||
configUSE_RECURSIVE_MUTEXES=1 \
|
|
||||||
configTHREAD_LOCAL_STORAGE_DELETE_CALLBACKS=1 \
|
|
||||||
configNUM_THREAD_LOCAL_STORAGE_POINTERS=1 \
|
|
||||||
configUSE_APPLICATION_TASK_TAG=1 \
|
|
||||||
configTASKLIST_INCLUDE_COREID=1 \
|
|
||||||
"ESP_EVENT_DECLARE_BASE(x)=extern esp_event_base_t x"
|
|
||||||
|
|
||||||
## Do not complain about not having dot
|
|
||||||
##
|
|
||||||
HAVE_DOT = NO
|
|
||||||
|
|
||||||
## Generate XML that is required for Breathe
|
|
||||||
##
|
|
||||||
GENERATE_XML = YES
|
|
||||||
XML_OUTPUT = xml
|
|
||||||
|
|
||||||
GENERATE_HTML = NO
|
|
||||||
HAVE_DOT = NO
|
|
||||||
GENERATE_LATEX = NO
|
|
||||||
GENERATE_MAN = YES
|
|
||||||
GENERATE_RTF = NO
|
|
||||||
|
|
||||||
## Skip distracting progress messages
|
|
||||||
##
|
|
||||||
QUIET = YES
|
|
@ -1,12 +1,12 @@
|
|||||||
INPUT += \
|
INPUT += \
|
||||||
$(IDF_PATH)/components/soc/$(IDF_TARGET)/include/soc/dac_channel.h \
|
$(PROJECT_PATH)/components/soc/$(IDF_TARGET)/include/soc/dac_channel.h \
|
||||||
$(IDF_PATH)/components/driver/$(IDF_TARGET)/include/driver/dac.h \
|
$(PROJECT_PATH)/components/driver/$(IDF_TARGET)/include/driver/dac.h \
|
||||||
$(IDF_PATH)/components/hal/include/hal/mcpwm_types.h \
|
$(PROJECT_PATH)/components/hal/include/hal/mcpwm_types.h \
|
||||||
$(IDF_PATH)/components/driver/include/driver/mcpwm.h \
|
$(PROJECT_PATH)/components/driver/include/driver/mcpwm.h \
|
||||||
$(IDF_PATH)/components/hal/include/hal/pcnt_types.h \
|
$(PROJECT_PATH)/components/hal/include/hal/pcnt_types.h \
|
||||||
$(IDF_PATH)/components/driver/include/driver/pcnt.h \
|
$(PROJECT_PATH)/components/driver/include/driver/pcnt.h \
|
||||||
$(IDF_PATH)/components/soc/$(IDF_TARGET)/include/soc/rtc_io_channel.h \
|
$(PROJECT_PATH)/components/soc/$(IDF_TARGET)/include/soc/rtc_io_channel.h \
|
||||||
$(IDF_PATH)/components/soc/$(IDF_TARGET)/include/soc/touch_sensor_channel.h \
|
$(PROJECT_PATH)/components/soc/$(IDF_TARGET)/include/soc/touch_sensor_channel.h \
|
||||||
$(IDF_PATH)/components/driver/$(IDF_TARGET)/include/driver/touch_sensor.h \
|
$(PROJECT_PATH)/components/driver/$(IDF_TARGET)/include/driver/touch_sensor.h \
|
||||||
$(IDF_PATH)/components/esp_hw_support/include/soc/esp32/himem.h \
|
$(PROJECT_PATH)/components/esp_hw_support/include/soc/esp32/himem.h \
|
||||||
$(IDF_PATH)/components/ulp/include/$(IDF_TARGET)/ulp.h
|
$(PROJECT_PATH)/components/ulp/include/$(IDF_TARGET)/ulp.h
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
INPUT += \
|
INPUT += \
|
||||||
$(IDF_PATH)/components/driver/esp32c3/include/driver/temp_sensor.h \
|
$(PROJECT_PATH)/components/driver/esp32c3/include/driver/temp_sensor.h \
|
||||||
$(IDF_PATH)/components/esp_hw_support/include/soc/esp32c3/esp_ds.h \
|
$(PROJECT_PATH)/components/esp_hw_support/include/soc/esp32c3/esp_ds.h \
|
||||||
$(IDF_PATH)/components/esp_hw_support/include/soc/esp32c3/esp_hmac.h
|
$(PROJECT_PATH)/components/esp_hw_support/include/soc/esp32c3/esp_hmac.h
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
INPUT += \
|
INPUT += \
|
||||||
$(IDF_PATH)/components/soc/$(IDF_TARGET)/include/soc/dac_channel.h \
|
$(PROJECT_PATH)/components/soc/$(IDF_TARGET)/include/soc/dac_channel.h \
|
||||||
$(IDF_PATH)/components/driver/$(IDF_TARGET)/include/driver/dac.h \
|
$(PROJECT_PATH)/components/driver/$(IDF_TARGET)/include/driver/dac.h \
|
||||||
$(IDF_PATH)/components/hal/include/hal/pcnt_types.h \
|
$(PROJECT_PATH)/components/hal/include/hal/pcnt_types.h \
|
||||||
$(IDF_PATH)/components/driver/include/driver/pcnt.h \
|
$(PROJECT_PATH)/components/driver/include/driver/pcnt.h \
|
||||||
$(IDF_PATH)/components/soc/$(IDF_TARGET)/include/soc/rtc_io_channel.h \
|
$(PROJECT_PATH)/components/soc/$(IDF_TARGET)/include/soc/rtc_io_channel.h \
|
||||||
$(IDF_PATH)/components/driver/esp32s2/include/driver/temp_sensor.h \
|
$(PROJECT_PATH)/components/driver/esp32s2/include/driver/temp_sensor.h \
|
||||||
$(IDF_PATH)/components/soc/$(IDF_TARGET)/include/soc/touch_sensor_channel.h \
|
$(PROJECT_PATH)/components/soc/$(IDF_TARGET)/include/soc/touch_sensor_channel.h \
|
||||||
$(IDF_PATH)/components/driver/$(IDF_TARGET)/include/driver/touch_sensor.h \
|
$(PROJECT_PATH)/components/driver/$(IDF_TARGET)/include/driver/touch_sensor.h \
|
||||||
$(IDF_PATH)/components/esp_hw_support/include/soc/esp32s2/esp_ds.h \
|
$(PROJECT_PATH)/components/esp_hw_support/include/soc/esp32s2/esp_ds.h \
|
||||||
$(IDF_PATH)/components/esp_hw_support/include/soc/esp32s2/esp_hmac.h \
|
$(PROJECT_PATH)/components/esp_hw_support/include/soc/esp32s2/esp_hmac.h \
|
||||||
$(IDF_PATH)/components/ulp/include/esp32s2/ulp_riscv.h \
|
$(PROJECT_PATH)/components/ulp/include/esp32s2/ulp_riscv.h \
|
||||||
$(IDF_PATH)/components/ulp/include/$(IDF_TARGET)/ulp.h \
|
$(PROJECT_PATH)/components/ulp/include/$(IDF_TARGET)/ulp.h \
|
||||||
$(IDF_PATH)/components/touch_element/include/touch_element/touch_element.h \
|
$(PROJECT_PATH)/components/touch_element/include/touch_element/touch_element.h \
|
||||||
$(IDF_PATH)/components/touch_element/include/touch_element/touch_button.h \
|
$(PROJECT_PATH)/components/touch_element/include/touch_element/touch_button.h \
|
||||||
$(IDF_PATH)/components/touch_element/include/touch_element/touch_slider.h \
|
$(PROJECT_PATH)/components/touch_element/include/touch_element/touch_slider.h \
|
||||||
$(IDF_PATH)/components/touch_element/include/touch_element/touch_matrix.h
|
$(PROJECT_PATH)/components/touch_element/include/touch_element/touch_matrix.h
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
INPUT += \
|
INPUT += \
|
||||||
$(IDF_PATH)/components/ulp/include/$(IDF_TARGET)/ulp.h \
|
$(PROJECT_PATH)/components/ulp/include/$(IDF_TARGET)/ulp.h \
|
||||||
$(IDF_PATH)/components/esp_hw_support/include/soc/$(IDF_TARGET)/esp_hmac.h \
|
$(PROJECT_PATH)/components/esp_hw_support/include/soc/$(IDF_TARGET)/esp_hmac.h \
|
||||||
$(IDF_PATH)/components/hal/include/hal/mcpwm_types.h \
|
$(PROJECT_PATH)/components/hal/include/hal/mcpwm_types.h \
|
||||||
$(IDF_PATH)/components/driver/include/driver/mcpwm.h \
|
$(PROJECT_PATH)/components/driver/include/driver/mcpwm.h \
|
||||||
$(IDF_PATH)/components/hal/include/hal/pcnt_types.h \
|
$(PROJECT_PATH)/components/hal/include/hal/pcnt_types.h \
|
||||||
$(IDF_PATH)/components/driver/include/driver/pcnt.h \
|
$(PROJECT_PATH)/components/driver/include/driver/pcnt.h \
|
||||||
$(IDF_PATH)/components/soc/$(IDF_TARGET)/include/soc/touch_sensor_channel.h \
|
$(PROJECT_PATH)/components/soc/$(IDF_TARGET)/include/soc/touch_sensor_channel.h \
|
||||||
$(IDF_PATH)/components/driver/$(IDF_TARGET)/include/driver/touch_sensor.h
|
$(PROJECT_PATH)/components/driver/$(IDF_TARGET)/include/driver/touch_sensor.h
|
||||||
|
@ -61,7 +61,7 @@ API Reference
|
|||||||
|
|
||||||
1. This repository provides for automatic update of API reference documentation using :doc:`code markup retrieved by Doxygen from header files <../contribute/documenting-code>`.
|
1. This repository provides for automatic update of API reference documentation using :doc:`code markup retrieved by Doxygen from header files <../contribute/documenting-code>`.
|
||||||
|
|
||||||
1. Update is done on each documentation build by invoking Sphinx extension :idf_file:`docs/idf_extensions/run_doxygen.py` for all header files listed in the ``INPUT`` statement of :idf_file:`docs/doxygen/Doxyfile_common`.
|
1. Update is done on each documentation build by invoking Sphinx extension :`esp_extensions/run_doxygen.py` for all header files listed in the ``INPUT`` statement of :idf_file:`docs/doxygen/Doxyfile`.
|
||||||
|
|
||||||
1. Each line of the ``INPUT`` statement (other than a comment that begins with ``##``) contains a path to header file ``*.h`` that will be used to generate corresponding ``*.inc`` files::
|
1. Each line of the ``INPUT`` statement (other than a comment that begins with ``##``) contains a path to header file ``*.h`` that will be used to generate corresponding ``*.inc`` files::
|
||||||
|
|
||||||
@ -94,10 +94,10 @@ API Reference
|
|||||||
|
|
||||||
See `Breathe documentation <https://breathe.readthedocs.io/en/latest/directives.html>`_ for additional information.
|
See `Breathe documentation <https://breathe.readthedocs.io/en/latest/directives.html>`_ for additional information.
|
||||||
|
|
||||||
To provide a link to header file, use the :ref:`link custom role <link-custom-roles>` as follows::
|
To provide a link to header file, use the `link custom role` directive as follows::
|
||||||
|
|
||||||
* :component_file:`path_to/header_file.h`
|
* :component_file:`path_to/header_file.h`
|
||||||
|
|
||||||
1. In any case, to generate API reference, the file :idf_file:`docs/doxygen/Doxyfile_common` should be updated with paths to ``*.h`` headers that are being documented.
|
1. In any case, to generate API reference, the file :idf_file:`docs/doxygen/Doxyfile` should be updated with paths to ``*.h`` headers that are being documented.
|
||||||
|
|
||||||
1. When changes are committed and documentation is build, check how this section has been rendered. :doc:`Correct annotations <../contribute/documenting-code>` in respective header files, if required.
|
1. When changes are committed and documentation is build, check how this section has been rendered. :doc:`Correct annotations <../contribute/documenting-code>` in respective header files, if required.
|
||||||
|
@ -11,7 +11,7 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
sys.path.insert(0, os.path.abspath('..'))
|
sys.path.insert(0, os.path.abspath('../'))
|
||||||
from conf_common import * # noqa: F403,F401
|
from conf_common import * # noqa: F403,F401
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
|
@ -1,197 +0,0 @@
|
|||||||
Documentation Add-ons and Extensions Reference
|
|
||||||
==============================================
|
|
||||||
|
|
||||||
:link_to_translation:`zh_CN:[中文]`
|
|
||||||
|
|
||||||
This documentation is created using `Sphinx <http://www.sphinx-doc.org/>`_ application that renders text source files in `reStructuredText <https://en.wikipedia.org/wiki/ReStructuredText>`_ (``.rst``) format located in :idf:`docs` directory. For some more details on that process, please refer to section :doc:`documenting-code`.
|
|
||||||
|
|
||||||
Besides Sphinx, there are several other applications that help to provide nicely formatted and easy to navigate documentation. These applications are listed in section :ref:`setup-for-building-documentation` with the installed version numbers provided in file :idf_file:`docs/requirements.txt`.
|
|
||||||
|
|
||||||
We build ESP-IDF documentation for two languages (English, Simplified Chinese) and for multiple chips. Therefore we don't run ``sphinx`` directly, there is a wrapper Python program ``build_docs.py`` that runs Sphinx.
|
|
||||||
|
|
||||||
On top of that, we have created a couple of custom add-ons and extensions to help integrate documentation with underlining `ESP-IDF`_ repository and further improve navigation as well as maintenance of documentation.
|
|
||||||
|
|
||||||
The purpose of this section is to provide a quick reference to the add-ons and the extensions.
|
|
||||||
|
|
||||||
Documentation Folder Structure
|
|
||||||
------------------------------
|
|
||||||
|
|
||||||
* The ESP-IDF repository contains a dedicated documentation folder :idf:`docs` in the root.
|
|
||||||
* The ``docs`` folder contains localized documentation in :idf:`docs/en` (English) and :idf:`docs/zh_CN` (simplified Chinese) subfolders.
|
|
||||||
* Graphics files and fonts common to localized documentation are contained in :idf:`docs/_static` subfolder.
|
|
||||||
* Remaining files in the root of ``docs`` as well as ``docs/en`` and ``docs/zh_CN`` provide configuration and scripts used to automate documentation processing including the add-ons and extensions.
|
|
||||||
* Sphinx extensions are provided in two directories, ``extensions`` and ``idf_extensions``.
|
|
||||||
* A ``_build`` directory is created in the ``docs`` folder by ``build_docs.py``. This directory is not added to the `ESP-IDF`_ repository.
|
|
||||||
|
|
||||||
|
|
||||||
Add-ons and Extensions Reference
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
Config Files
|
|
||||||
^^^^^^^^^^^^
|
|
||||||
|
|
||||||
:idf_file:`docs/conf_common.py`
|
|
||||||
This file contains configuration common to each localized documentation (e.g. English, Chinese). The contents of this file is imported to standard Sphinx configuration file ``conf.py`` located in respective language folders (e.g. ``docs/en``, ``docs/zh_CN``) during build for each language.
|
|
||||||
|
|
||||||
:idf_file:`docs/sphinx-known-warnings.txt`
|
|
||||||
There are couple of spurious Sphinx warnings that cannot be resolved without doing update to the Sphinx source code itself. For such specific cases, respective warnings are documented in ``sphinx-known-warnings.txt`` file, that is checked during documentation build, to ignore the spurious warnings.
|
|
||||||
|
|
||||||
|
|
||||||
Scripts
|
|
||||||
^^^^^^^
|
|
||||||
|
|
||||||
:idf_file:`docs/build_docs.py`
|
|
||||||
|
|
||||||
Top-level executable program which runs a Sphinx build for one or more language/target combinations. Run ``build_docs.py --help`` for full command line options.
|
|
||||||
|
|
||||||
When ``build_docs.py`` runs Sphinx it sets the ``idf_target`` configuration variable, sets a Sphinx tag with the same name as the configuration variable, and uses some environment variables to communicate paths to :ref:`IDF-Specific Extensions`.
|
|
||||||
|
|
||||||
:idf_file:`docs/check_lang_folder_sync.sh`
|
|
||||||
To reduce potential discrepancies when maintaining concurrent language version, the structure and filenames of language folders ``docs/en`` and ``docs/zh_CN`` folders should be kept identical. The script ``check_lang_folder_sync.sh`` is run on each documentation build to verify if this condition is met.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
If a new content is provided in e.g. English, and there is no any translation yet, then the corresponding file in ``zh_CN`` folder should contain an ``.. include::`` directive pointing to the source file in English. This will automatically include the English version visible to Chinese readers. For example if a file ``docs/zh_CN/contribute/documenting-code.rst`` does not have a Chinese translation, then it should contain ``.. include:: ../../en/contribute/documenting-code.rst`` instead.
|
|
||||||
|
|
||||||
Non-Docs Scripts
|
|
||||||
^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
These scripts are used to build docs but also used for other purposes:
|
|
||||||
|
|
||||||
:idf_file:`tools/gen_esp_err_to_name.py`
|
|
||||||
This script is traversing the `ESP-IDF`_ directory structure looking for error codes and messages in source code header files to generate an ``.inc`` file to include in documentation under :doc:`../api-reference/error-codes`.
|
|
||||||
|
|
||||||
:idf_file:`tools/kconfig_new/confgen.py`
|
|
||||||
Options to configure ESP-IDF's :idf:`components` are contained in ``Kconfig`` files located inside directories of individual components, e.g. :idf_file:`components/bt/Kconfig`. This script is traversing the ``component`` directories to collect configuration options and generate an ``.inc`` file to include in documentation under :ref:`configuration-options-reference`.
|
|
||||||
|
|
||||||
Generic Extensions
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
These are Sphinx extensions developed for IDF that don't rely on any IDF-docs-specific behaviour or configuration:
|
|
||||||
|
|
||||||
:idf_file:`docs/extensions/toctree_filter.py`
|
|
||||||
Sphinx extensions overrides the ``:toctree:`` directive to allow filtering entries based on whether a tag is set, as ``:tagname: toctree_entry``. See the Python file for a more complete description.
|
|
||||||
|
|
||||||
:idf_file:`docs/extensions/list_filter.py`
|
|
||||||
Sphinx extensions that provides a ``.. list::`` directive that allows filtering of entries in lists based on whether a tag is set, as ``:tagname: - list content``. See the Python file for a more complete description.
|
|
||||||
|
|
||||||
:idf_file:`docs/extensions/html_redirects.py`
|
|
||||||
During documentation lifetime, some source files are moved between folders or renamed. This Sphinx extension adds a mechanism to redirect documentation pages that have changed URL by generating in the Sphinx output static HTML redirect pages. The script is used together with a redirection list ``html_redirect_pages``. ``conf_common.py`` builds this list from :idf_file:`docs/page_redirects.txt`.
|
|
||||||
|
|
||||||
|
|
||||||
Third Party Extensions
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
- ``sphinxcontrib`` extensions for blockdiag, seqdiag, actdiag, nwdiag, rackdiag & packetdiag diagrams.
|
|
||||||
- `Sphinx selective exclude`_ ``eager_only`` extension.
|
|
||||||
|
|
||||||
.. _idf-specific extensions:
|
|
||||||
|
|
||||||
IDF-Specific Extensions
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Build System Integration
|
|
||||||
########################
|
|
||||||
|
|
||||||
:idf:`docs/idf_extensions/build_system/`
|
|
||||||
|
|
||||||
Python package implementing a Sphinx extension to pull IDF build system information into the docs build.
|
|
||||||
|
|
||||||
* Creates a dummy CMake IDF project and runs CMake to generate metadata.
|
|
||||||
* Registers some new configuration variables and emits a new Sphinx event, both for use by other extensions.
|
|
||||||
|
|
||||||
Configuration Variables
|
|
||||||
@@@@@@@@@@@@@@@@@@@@@@@
|
|
||||||
|
|
||||||
* ``docs_root`` - The absolute path of the $IDF_PATH/docs directory
|
|
||||||
* ``idf_path`` - The value of IDF_PATH variable, or the absolute path of IDF_PATH if environment unset
|
|
||||||
* ``build_dir`` - The build directory passed in by ``build_docs.py``, default will be like ``_build/<lang>/<target>``
|
|
||||||
* ``idf_target`` - The IDF_TARGET value. Expected that ``build_docs.py`` set this on the Sphinx command line
|
|
||||||
|
|
||||||
New Event
|
|
||||||
@@@@@@@@@
|
|
||||||
|
|
||||||
``idf-info`` event is emitted early in the build, after the dummy project CMake run is complete.
|
|
||||||
|
|
||||||
Arguments are ``(app, project_description)``, where ``project_description`` is a dict containing the values parsed from ``project_description.json`` in the CMake build directory.
|
|
||||||
|
|
||||||
Other IDF-specific extensions subscribe to this event and use it to set up some docs parameters based on build system info.
|
|
||||||
|
|
||||||
Other Extensions
|
|
||||||
################
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/include_build_file.py`
|
|
||||||
The ``include-build-file`` directive is like the built-in ``include-file`` directive, but file path is evaluated relative to ``build_dir``.
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/kconfig_reference.py`
|
|
||||||
Subscribes to ``idf-info`` event and uses confgen to generate ``kconfig.inc`` from the components included in the default project build. This file is then included into :doc:`/api-reference/kconfig`.
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/link_roles.py`
|
|
||||||
This is an implementation of a custom `Sphinx Roles <https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html>`_ to help linking from documentation to specific files and folders in `ESP-IDF`_. For description of implemented roles, please see :ref:`link-custom-roles` and :ref:`link-language-versions`.
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/esp_err_definitions.py`
|
|
||||||
Small wrapper extension that calls ``gen_esp_err_to_name.py`` and updates the included .rst file if it has changed.
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/gen_toolchain_links.py`
|
|
||||||
There couple of places in documentation that provide links to download the toolchain. To provide one source of this information and reduce effort to manually update several files, this script generates toolchain download links and toolchain unpacking code snippets based on information found in :idf_file:`tools/toolchain_versions.mk`.
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/gen_version_specific_includes.py`
|
|
||||||
Another extension to automatically generate reStructuredText Text ``.inc`` snippets with version-based content for this ESP-IDF version.
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/util.py`
|
|
||||||
A collection of utility functions useful primarily when building documentation locally (see :ref:`setup-for-building-documentation`) to reduce the time to generate documentation on a second and subsequent builds.
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/format_idf_target.py`
|
|
||||||
An extension for replacing generic target related names with the idf_target passed to the Sphinx command line.
|
|
||||||
|
|
||||||
This is a {\IDF_TARGET_NAME}, with /{\IDF_TARGET_PATH_NAME}/soc.c, compiled with `{\IDF_TARGET_TOOLCHAIN_PREFIX}-gcc`
|
|
||||||
with `CONFIG_{\IDF_TARGET_CFG_PREFIX}_MULTI_DOC`
|
|
||||||
will, if the backspaces are removed, render as
|
|
||||||
This is a {IDF_TARGET_NAME}, with /{IDF_TARGET_PATH_NAME}/soc.c, compiled with `{IDF_TARGET_TOOLCHAIN_PREFIX}-gcc` with `CONFIG_{IDF_TARGET_CFG_PREFIX}_MULTI_DOC`.
|
|
||||||
|
|
||||||
|
|
||||||
Also supports markup for defining local (single .rst-file) substitions with the following syntax:
|
|
||||||
{\IDF_TARGET_TX_PIN:default="IO3",esp32="IO4",esp32s2="IO5"}
|
|
||||||
|
|
||||||
This will define a replacement of the tag {\IDF_TARGET_TX_PIN} in the current rst-file.
|
|
||||||
|
|
||||||
The extension also overrides the default ``.. include::`` directive in order to format any included content using the same rules.
|
|
||||||
|
|
||||||
These replacements cannot be used inside markup that rely on alignment of characters, e.g. tables.
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/latex_builder.py`
|
|
||||||
An extension for adding ESP-IDF specific functionality to the latex builder. Overrides the default Sphinx latex builder.
|
|
||||||
|
|
||||||
Creates and adds the espidf.sty latex package to the output directory, which contains some macros for run-time variables such as IDF-Target.
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/gen_defines.py`
|
|
||||||
Sphinx extension to integrate defines from IDF into the Sphinx build, runs after the IDF dummy project has been built.
|
|
||||||
|
|
||||||
Parses defines and adds them as sphinx tags.
|
|
||||||
|
|
||||||
Emits the new 'idf-defines-generated' event which has a dictionary of raw text define values that other extensions can use to generate relevant data.
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/exclude_docs.py`
|
|
||||||
Sphinx extension that updates the excluded documents according to the conditional_include_dict {tag:documents}. If the tag is set, then the list of documents will be included.
|
|
||||||
|
|
||||||
Also responsible for excluding documents when building with the config value ``docs_to_build`` set. In these cases all documents not listed in ``docs_to_build`` will be excluded.
|
|
||||||
|
|
||||||
Subscribes to ``idf-defines-generated`` as it relies on the sphinx tags to determine which documents to exclude
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/run_doxygen.py`
|
|
||||||
Subscribes to ``idf-defines-generated`` event and runs Doxygen (:idf_file:`docs/doxygen/Doxyfile_common`) to generate XML files describing key headers, and then runs Breathe to convert these to ``.inc`` files which can be included directly into API reference pages.
|
|
||||||
|
|
||||||
Pushes a number of target-specific custom environment variables into Doxygen, including all macros defined in the project's default ``sdkconfig.h`` file and all macros defined in all ``soc`` component ``xxx_caps.h`` headers. This means that public API headers can depend on target-specific configuration options or ``soc`` capabilities headers options as ``#ifdef`` & ``#if`` preprocessor selections in the header.
|
|
||||||
|
|
||||||
This means we can generate different Doxygen files, depending on the target we are building docs for.
|
|
||||||
|
|
||||||
Please refer to :doc:`documenting-code` and :doc:`../api-reference/template`, section **API Reference** for additional details on this process.
|
|
||||||
|
|
||||||
Related Documents
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
* :doc:`documenting-code`
|
|
||||||
|
|
||||||
|
|
||||||
.. _ESP-IDF: https://github.com/espressif/esp-idf/
|
|
||||||
.. _Sphinx selective exclude: https://github.com/pfalcon/sphinx_selective_exclude
|
|
@ -83,8 +83,8 @@ Go one extra mile
|
|||||||
|
|
||||||
Here are a couple of tips on how you can make your documentation even better and more useful to the reader and writer.
|
Here are a couple of tips on how you can make your documentation even better and more useful to the reader and writer.
|
||||||
|
|
||||||
When writing codes, please follow the guidelines below:
|
When writing codes, please follow the guidelines below:
|
||||||
|
|
||||||
1. Add code snippets to illustrate implementation. To do so, enclose snippet using ``@code{c}`` and ``@endcode`` commands. ::
|
1. Add code snippets to illustrate implementation. To do so, enclose snippet using ``@code{c}`` and ``@endcode`` commands. ::
|
||||||
|
|
||||||
*
|
*
|
||||||
@ -164,422 +164,26 @@ When it comes to text, please follow guidelines below to provide well formatted
|
|||||||
:align: center
|
:align: center
|
||||||
:scale: 50%
|
:scale: 50%
|
||||||
:alt: Keep the line number for EN and CN files consistent (click to enlarge)
|
:alt: Keep the line number for EN and CN files consistent (click to enlarge)
|
||||||
|
|
||||||
Keep the line number for EN and CN documents consistent (click to enlarge)
|
Keep the line number for EN and CN documents consistent (click to enlarge)
|
||||||
|
|
||||||
.. _link-custom-roles:
|
|
||||||
|
|
||||||
Linking Examples
|
|
||||||
----------------
|
|
||||||
|
|
||||||
When linking to examples on GitHub, do not use absolute/hardcoded URLs. Instead, use docutils custom roles that will generate links for you. These auto-generated links point to the tree or blob for the git commit ID (or tag) of the repository. This is needed to ensure that links do not get broken when files in master branch are moved around or deleted. The roles will transparently handle files that are located in submodules and will link to the submodule's repository with the correct commit ID.
|
|
||||||
|
|
||||||
The following roles are provided:
|
|
||||||
|
|
||||||
- ``:idf:`path``` - points to directory inside ESP-IDF
|
|
||||||
- ``:idf_file:`path``` - points to file inside ESP-IDF
|
|
||||||
- ``:idf_raw:`path``` - points to raw view of the file inside ESP-IDF
|
|
||||||
- ``:component:`path``` - points to directory inside ESP-IDF components dir
|
|
||||||
- ``:component_file:`path``` - points to file inside ESP-IDF components dir
|
|
||||||
- ``:component_raw:`path``` - points to raw view of the file inside ESP-IDF components dir
|
|
||||||
- ``:example:`path``` - points to directory inside ESP-IDF examples dir
|
|
||||||
- ``:example_file:`path``` - points to file inside ESP-IDF examples dir
|
|
||||||
- ``:example_raw:`path``` - points to raw view of the file inside ESP-IDF examples dir
|
|
||||||
|
|
||||||
Example implementation::
|
|
||||||
|
|
||||||
* :example:`get-started/hello_world`
|
|
||||||
* :example:`Hello World! <get-started/hello_world>`
|
|
||||||
|
|
||||||
How it renders:
|
|
||||||
|
|
||||||
* :example:`get-started/hello_world`
|
|
||||||
* :example:`Hello World! <get-started/hello_world>`
|
|
||||||
|
|
||||||
A check is added to the CI build script, which searches RST files for presence of hard-coded links (identified by tree/master, blob/master, or raw/master part of the URL). This check can be run manually: ``cd docs`` and then ``make gh-linkcheck``.
|
|
||||||
|
|
||||||
|
|
||||||
.. _link-language-versions:
|
|
||||||
|
|
||||||
Linking Language Versions
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
Switching between documentation in different languages may be done using ``:link_to_translation:`` custom role. The role placed on a page of documentation provides a link to the same page in a language specified as a parameter. Examples below show how to enter links to Chinese and English versions of documentation::
|
|
||||||
|
|
||||||
:link_to_translation:`zh_CN:中文版`
|
|
||||||
:link_to_translation:`en:English`
|
|
||||||
|
|
||||||
The language is specified using standard abbreviations like ``en`` or ``zh_CN``. The text after last semicolon is not standardized and may be entered depending on the context where the link is placed, e.g.::
|
|
||||||
|
|
||||||
:link_to_translation:`en:see description in English`
|
|
||||||
|
|
||||||
|
|
||||||
.. _add-illustrations:
|
|
||||||
|
|
||||||
Add Illustrations
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
Consider adding diagrams and pictures to illustrate described concepts.
|
|
||||||
|
|
||||||
Sometimes it is better to add an illustration than writing a lengthy paragraph to describe a complex idea, a data structure or an algorithm. This repository is using `blockdiag <http://blockdiag.com/en/index.html>`_ suite of tools to generate diagram images from simple text files.
|
|
||||||
|
|
||||||
The following types of diagrams are supported:
|
|
||||||
|
|
||||||
* `Block diagram <http://blockdiag.com/en/blockdiag/index.html>`_
|
|
||||||
* `Sequence diagram <http://blockdiag.com/en/seqdiag/index.html>`_
|
|
||||||
* `Activity diagram <http://blockdiag.com/en/actdiag/index.html>`_
|
|
||||||
* `Logical network diagram <http://blockdiag.com/en/nwdiag/index.html>`_
|
|
||||||
|
|
||||||
With this suite of tools, it is possible to generate beautiful diagram images from simple text format (similar to graphviz’s DOT format). The diagram elements are laid out automatically. The diagram code is then converted into ".png" graphics and integrated "behind the scenes" into **Sphinx** documents.
|
|
||||||
|
|
||||||
For the diagram preparation, you can use an on-line `interactive shell`_ that instantly shows the rendered image.
|
|
||||||
|
|
||||||
Below are couple of diagram examples:
|
|
||||||
|
|
||||||
* Simple **block diagram** / ``blockdiag`` - `Wi-Fi Buffer Configuration <http://interactive.blockdiag.com/?compression=deflate&src=eJylUk1rwkAQvfsrBntpIUKiRQqSgK0VSj0EtCi0EjbJxCyuuyG7QW3pf-9m06hJeyg0t33zmHkfCZmItjElGwiLJME8IEwjRFHBA3WAj04H9HcFGyZCwoAoldOwUCgNzkWMwZ7GKgUXnKE9gjOcIt2kSuN39sigMiP8jDqX6GmF_Y3GmJCCqUCmJEM9yEXBY4xDcWjOE8GVpO9oztdaGQmRSRAJlMZysjOCKsVj358Fi_H8GV4Nze2Os4zRyvEbB0XktrseQWVktn_ym-wS-UFb0ilt0pa0N6Vn3i_KUEY5zcqrbXWTx_nDaZHjwYvEHGKiSNeC2q_r3FpQZekObAtMTi4XCi2IBBO5e0Rd5L7ppLG574GvO__PUuO7sXTgweTIyY5GcD1XOtToBhYruDf_VvuUad3tD-0_Xq1TLPPSI84xKvNrF9vzLnrTj1M7rYhrXv24cCPVkZUaOK47n1-lOvbk>`_
|
|
||||||
* Slightly more complicated **block diagram** - `Wi-Fi programming model <http://interactive.blockdiag.com/?compression=deflate&src=eJyFk09P40AMxe98CqscIVILq72UIFX8kSoQWy0RHABFTuImFtOZaGYKuyC-O840bagaRI7Pfs7Pz0mmTP5cMJbwynNOa2tKi4sF6zJdmIIUvO_tgTz7UCqToQL03nK29OSCrqUpfeXCVxDD6Gg47tSKuKy8yL9b1dWov1E3E4atWtAcl8qnrsKapGDNUhdUZObfdr2UQp3mRhkrXdpoGq-BGwhQmJFaoSZns_Q2mZxdwUNQ44Eojxqcx_x5cAhzo73jN4pHv55WL7m4u0nSZHLbOeiFtBePR9dvmcxm19sWrGvFOXo2utd4CGH5eHQ8bGfcTy-n6fnfO9jMuOfoksV9bvmFbO-Lr27-JPAQ4oqbGJ62c8iN1pQ3EA4O-lOJTncXDvvupCGdu3vmqFQmSQqm3CIYBx0EWou6pADjQJbw3Bj-h3I4onxpsHrCQLnmoD0yVKgLJXuP1x3GsowPmUpfbay3yH5T7khPoi7NnpU-1nisPdkFyY_gV4x9XB3Y0pHdpfoJ60toURQOtqbYuvpJ1B6zDXYym0qmTVpNnh-fpWcbRA>`_
|
|
||||||
* **Sequence diagram** / ``seqdiag`` - `Scan for a Specific AP in All Channels <http://interactive.blockdiag.com/seqdiag/?compression=deflate&src=eJyVkU1PwzAMhu_7FdburUgQXMomTaPcKIdOIIRQlDVuG1EloUknPsR_J2s2rRsT2nKJ9drvY8ex-C4kr8AWXLFSt8waLBg38D0Cf3jh5Io7qRVMQGmFSS-jqJA1qCpXe51cXwTZGg-pUVa1W8tXQRVY8q5xzNbcoNdb3SmBYqk_9vOlVs7Kr3UJoQmMwgDGMMftWwK4QuU28ZOM7uQm3q_zYTQd5OGl4UtsJmMSE5jCXKtSVl2LUPgpXPvpb4Hj1-RUCPWQ3O_K-wKpX84WMLAcB9B-igCouVLYADnDTA_N9GRzHMdnNMoOG2Vb8-4b4CY6Zr4MT3zOF-k9Sx_TbMHy-Sxjtw9Z-mfRHjEA7hD0X8TPLxU91AQ>`_
|
|
||||||
* **Packet diagram** / ``packetdiag`` - `NVS Page Structure <http://interactive.blockdiag.com/packetdiag/?compression=deflate&src=eJxFkMFOwzAQRO_9ij2mh63idRKaSj1V_ACIE6DIxG4StTgh3oCg6r_j2JTs8c3szNqDqk-GdacasJ-uGlRjKsfjVPM0GriswE_dn786zS3sQRJAYLbXprpRkS-sNV3TcrAGqM1RTWeujr1l1_2Y2U6rIKUod_DIis2LTbJ1YBneeWY-Nj5ts-AtkudPdnJGQ0JppLRFKXZweDhIWrySsPDB95bHb3BzPLx1_K4GSCSt_-4vMizzmykNSuBlgWKuioJYBOHLROnbEBGe_ZfEh-7pNcolIdF_raA8rl5_AaqqWyE>`_
|
|
||||||
|
|
||||||
Try them out by modifying the source code and see the diagram instantly rendering below.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
There may be slight differences in rendering of font used by the `interactive shell`_ compared to the font used in the esp-idf documentation.
|
|
||||||
|
|
||||||
|
|
||||||
Add Notes
|
|
||||||
---------
|
|
||||||
|
|
||||||
Working on a document, you might need to:
|
|
||||||
|
|
||||||
- Place some suggestions on what should be added or modified in future.
|
|
||||||
- Leave a reminder for yourself or somebody else to follow up.
|
|
||||||
|
|
||||||
In this case, add a todo note to your reST file using the directive ``.. todo::``. For example:
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
.. todo::
|
|
||||||
|
|
||||||
Add a package diagram.
|
|
||||||
|
|
||||||
If you add ``.. todolist::`` to a reST file, the directive will be replaced by a list of all todo notes from the whole documentation.
|
|
||||||
|
|
||||||
By default, the directives ``.. todo::`` and ``.. todolist::`` are ignored by documentation builders. If you want the notes and the list of notes to be visible in your locally built documentation, do the following:
|
|
||||||
|
|
||||||
1. Open your local ``conf_common.py`` file.
|
|
||||||
2. Find the parameter ``todo_include_todos``.
|
|
||||||
3. Change its value from ``False`` to ``True``.
|
|
||||||
|
|
||||||
Before pushing your changes to origin, please set the value of ``todo_include_todos`` back to ``False``.
|
|
||||||
|
|
||||||
For more details about the extension, see `sphinx.ext.todo <https://www.sphinx-doc.org/en/master/usage/extensions/todo.html#directive-todolist>`_ documentation.
|
|
||||||
|
|
||||||
Writing generic documentation for multiple chips
|
|
||||||
------------------------------------------------
|
|
||||||
|
|
||||||
The documentation for all of Espressif's chips is built from the same files. To faciliate the writing of documents that can be re-used for multiple different chips (called below "targets"), we provide you with the following functionality:
|
|
||||||
|
|
||||||
Exclusion of content based on chip-target
|
|
||||||
""""""""""""""""""""""""""""""""""""""""""""
|
|
||||||
|
|
||||||
Occasionally there will be content that is only relevant for one of targets. When this is the case, you can exclude that content by using the ''.. only:: TAG'' directive, where you replace 'TAG' with one of the following names:
|
|
||||||
|
|
||||||
Chip name:
|
|
||||||
|
|
||||||
* esp32
|
|
||||||
* esp32s2
|
|
||||||
* esp32c3
|
|
||||||
|
|
||||||
Define identifiers from 'sdkconfig.h', generated by the default menuconfig settings for the target, e.g:
|
|
||||||
|
|
||||||
* CONFIG_FREERTOS_UNICORE
|
|
||||||
|
|
||||||
Define identifiers from the soc '\*_caps' headers, e.g:
|
|
||||||
|
|
||||||
* SOC_BT_SUPPORTED
|
|
||||||
* SOC_CAN_SUPPORTED
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
.. only:: esp32
|
|
||||||
|
|
||||||
ESP32 specific content.
|
|
||||||
|
|
||||||
This directive also supports the boolean operators 'and', 'or' and 'not'. Example:
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
.. only:: SOC_BT_SUPPORTED and CONFIG_FREERTOS_UNICORE
|
|
||||||
|
|
||||||
BT specific content only relevant for single-core targets.
|
|
||||||
|
|
||||||
This functionality is provided by the `Sphinx selective exclude <https://github.com/pfalcon/sphinx_selective_exclude>`_ extension.
|
|
||||||
|
|
||||||
A weakness in this extension is that it does not correctly handle the case where you exclude a section, that is directly followed by a labeled new section. In these cases everything will render correctly, but the label will not correctly link to the section that follows. A temporary work-around for the cases where this can't be avoided is the following:
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
.. only:: esp32
|
|
||||||
|
|
||||||
.. _section_1_label:
|
|
||||||
|
|
||||||
Section 1
|
|
||||||
^^^^^^^^^
|
|
||||||
|
|
||||||
Section one content
|
|
||||||
|
|
||||||
.. _section_2_label:
|
|
||||||
|
|
||||||
.. only:: not esp32
|
|
||||||
|
|
||||||
.. _section_2_label:
|
|
||||||
|
|
||||||
Section 2
|
|
||||||
^^^^^^^^^
|
|
||||||
Section 2 content
|
|
||||||
|
|
||||||
The :TAG: role is used for excluding content from a table of content tree. For example:
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 1
|
|
||||||
|
|
||||||
:esp32: configure-wrover
|
|
||||||
configure-other-jtag
|
|
||||||
|
|
||||||
When building the documents, Sphinx will use the above mentioned directive and role to include or exclude content based on the target tag it was called with.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
If excluding an entire document from the toctree based on targets, it's necessary to also update the ``exclude_patterns`` list in :idf_file:`docs/conf_common.py` to exclude the file for other targets, or a Sphinx warning "WARNING: document isn't included in any toctree" will be generated..
|
|
||||||
|
|
||||||
The recommended way of doing it is adding the document to one of the list that gets included in ``conditional_include_dict`` in :idf_file:`docs/conf_common.py`, e.g. a document which should only be shown for BT capable targets should be added to ``BT_DOCS``. :idf_file:`docs/idf_extensions/exclude_docs.py` will then take care of adding it to ``exclude_patterns`` if the corresponding tag is not set.
|
|
||||||
|
|
||||||
If you need to exclude content inside a list or bullet points, then this should be done by using the '':TAG:'' role inside the ''.. list:: '' directive.
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
.. list::
|
|
||||||
|
|
||||||
:esp32: - ESP32 specific content
|
|
||||||
:SOC_BT_SUPPORTED: - BT specific content
|
|
||||||
- Common bullet point
|
|
||||||
- Also common bullet point
|
|
||||||
|
|
||||||
|
|
||||||
Substitution macros
|
|
||||||
"""""""""""""""""""
|
|
||||||
When you need to refer to the chip's name, toolchain name, path or other common names that depend on the target type you can consider using the substitution macros supplied by :idf_file:`docs/idf_extensions/format_idf_target.py`.
|
|
||||||
|
|
||||||
For example, the following reStructuredText content:
|
|
||||||
|
|
||||||
This is a {\IDF_TARGET_NAME}, with /{\IDF_TARGET_PATH_NAME}/soc.c, compiled with `{\IDF_TARGET_TOOLCHAIN_PREFIX}-gcc` with `CONFIG_{\IDF_TARGET_CFG_PREFIX}_MULTI_DOC`
|
|
||||||
|
|
||||||
Would render in the documentation as:
|
|
||||||
|
|
||||||
This is a {IDF_TARGET_NAME}, with /{IDF_TARGET_PATH_NAME}/soc.c, compiled with `{IDF_TARGET_TOOLCHAIN_PREFIX}-gcc` with `CONFIG_{IDF_TARGET_CFG_PREFIX}_MULTI_DOC`.
|
|
||||||
|
|
||||||
This extension also supports markup for defining local (within a single source file) substitutions. Place a definition like the following into a single line of the RST file:
|
|
||||||
|
|
||||||
{\IDF_TARGET_SUFFIX:default="DEFAULT_VALUE", esp32="ESP32_VALUE", esp32s2="ESP32S2_VALUE", esp32c3="ESP32C3_VALUE"}
|
|
||||||
|
|
||||||
This will define a target-dependent substitution of the tag {\IDF_TARGET_SUFFIX} in the current RST file. For example:
|
|
||||||
|
|
||||||
{\IDF_TARGET_TX_PIN:default="IO3", esp32="IO4", esp32s2="IO5", esp32c3="IO6"}
|
|
||||||
|
|
||||||
Will define a substitution for the tag {\IDF_TARGET_TX_PIN}, which would be replaced by the text IO5 if sphinx was called with the tag esp32s2.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
These single-file definitions can be placed anywhere in the .rst file (on their own line), but the name of the directive must start with ``IDF_TARGET_``.
|
|
||||||
|
|
||||||
|
|
||||||
Put it all together
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
Once documentation is ready, follow instruction in :doc:`../api-reference/template` and create a single file, that will merge all individual pieces of prepared documentation. Finally add a link to this file to respective ``.. toctree::`` in ``index.rst`` file located in ``/docs`` folder or subfolders.
|
|
||||||
|
|
||||||
|
|
||||||
OK, but I am new to Sphinx!
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
1. No worries. All the software you need is well documented. It is also open source and free. Start by checking `Sphinx`_ documentation. If you are not clear how to write using rst markup language, see `reStructuredText Primer <https://www.sphinx-doc.org/en/stable/rest.html>`_. You can also use markdown (.md) files, and find out more about the specific markdown syntax that we use on `Recommonmark parser's documentation page <https://recommonmark.readthedocs.io/en/latest/>`_.
|
|
||||||
|
|
||||||
2. Check the source files of this documentation to understand what is behind of what you see now on the screen. Sources are maintained on GitHub in `espressif/esp-idf`_ repository in :idf:`docs` folder. You can go directly to the source file of this page by scrolling up and clicking the link in the top right corner. When on GitHub, see what's really inside, open source files by clicking ``Raw`` button.
|
|
||||||
|
|
||||||
3. You will likely want to see how documentation builds and looks like before posting it on the GitHub. There are two options to do so:
|
|
||||||
|
|
||||||
* Install `Sphinx`_, `Breathe`_, `Blockdiag <http://blockdiag.com/en/index.html>`_ and `Doxygen <http://doxygen.nl/>`_ to build it locally, see chapter below.
|
|
||||||
|
|
||||||
* Set up an account on `Read the Docs <https://readthedocs.org/>`_ and build documentation in the cloud. Read the Docs provides document building and hosting for free and their service works really quick and great.
|
|
||||||
|
|
||||||
4. To preview documentation before building, use `Sublime Text <https://www.sublimetext.com/>`_ editor together with `OmniMarkupPreviewer <https://github.com/timonwong/OmniMarkupPreviewer>`_ plugin.
|
|
||||||
|
|
||||||
|
|
||||||
.. _setup-for-building-documentation:
|
|
||||||
|
|
||||||
Setup for building documentation locally
|
|
||||||
----------------------------------------
|
|
||||||
|
|
||||||
Install Dependencies
|
|
||||||
""""""""""""""""""""
|
|
||||||
|
|
||||||
You can setup environment to build documentation locally on your PC by installing:
|
|
||||||
|
|
||||||
1. Doxygen - http://doxygen.nl/
|
|
||||||
2. Sphinx - https://github.com/sphinx-doc/sphinx/#readme-for-sphinx
|
|
||||||
3. Breathe - https://github.com/michaeljones/breathe#breathe
|
|
||||||
4. Document theme "sphinx_idf_theme" - https://github.com/espressif/sphinx_idf_theme
|
|
||||||
5. Custom 404 page "sphinx-notfound-page" - https://github.com/readthedocs/sphinx-notfound-page
|
|
||||||
6. Blockdiag - http://blockdiag.com/en/index.html
|
|
||||||
7. Recommonmark - https://github.com/rtfd/recommonmark
|
|
||||||
|
|
||||||
The package "sphinx_idf_theme" is added to have the same "look and feel" of `ESP-IDF Programming Guide <https://docs.espressif.com/projects/esp-idf/en/latest/index.html>`_.
|
|
||||||
|
|
||||||
Do not worry about being confronted with several packages to install. Besides Doxygen, all remaining packages are written in pure Python. Therefore installation of all of them is combined into one simple step.
|
|
||||||
|
|
||||||
.. important:: Docs building now supports Python 3 only. Python 2 installations will not work.
|
|
||||||
|
|
||||||
Doxygen
|
|
||||||
@@@@@@@
|
|
||||||
|
|
||||||
Installation of Doxygen is OS dependent:
|
|
||||||
|
|
||||||
**Linux**
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
sudo apt-get install doxygen
|
|
||||||
|
|
||||||
**Windows** - install in MSYS2 console
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pacman -S doxygen
|
|
||||||
|
|
||||||
**MacOS**
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
brew install doxygen
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
If you are installing on Windows MSYS2 system (Linux and MacOS users should skip this note, Windows users who don't use MSYS2 will need to find other alternatives), **before** going further, execute two extra steps below. These steps are required to install dependencies of "blockdiag" discussed under :ref:`add-illustrations`.
|
|
||||||
|
|
||||||
1. Update all the system packages:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
$ pacman -Syu
|
|
||||||
|
|
||||||
This process will likely require restarting of the MSYS2 MINGW32 console and repeating above commands, until update is complete.
|
|
||||||
|
|
||||||
2. Install *pillow*, that is one of dependences of the *blockdiag*:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
$ pacman -S mingw32/mingw-w64-i686-python-pillow
|
|
||||||
|
|
||||||
Check the log on the screen that ``mingw-w64-i686-python-pillow-4.3.0-1`` or newer is installed. Previous versions of *pillow* will not work.
|
|
||||||
|
|
||||||
A downside of Windows installation is that fonts of the `blockdiag pictures <add-illustrations>` do not render correctly, you will see some random characters instead. Until this issue is fixed, you can use the `interactive shell`_ to see how the complete picture looks like.
|
|
||||||
|
|
||||||
|
|
||||||
Remaining applications
|
|
||||||
@@@@@@@@@@@@@@@@@@@@@@
|
|
||||||
|
|
||||||
|
|
||||||
All remaining applications are `Python <https://www.python.org/>`_ packages and you can install them in one step as follows:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
cd ~/esp/esp-idf/docs
|
|
||||||
pip install --user -r requirements.txt
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
Installation steps assume that ESP-IDF is placed in ``~/esp/esp-idf`` directory, that is default location of ESP-IDF used in documentation.
|
|
||||||
|
|
||||||
Building Documentation
|
Building Documentation
|
||||||
""""""""""""""""""""""
|
----------------------
|
||||||
|
The documentation is built with the `esp-docs` Python package, which is a wrapper around `Sphinx <https://www.sphinx-doc.org/>`_
|
||||||
|
|
||||||
::
|
To install it simply do::
|
||||||
|
|
||||||
cd ~/esp/esp-idf/docs
|
pip install esp-docs
|
||||||
|
|
||||||
Now you should be ready to build documentation by invoking::
|
After a successful install then the documentation can be built from the docs folder with::
|
||||||
|
|
||||||
./build_docs.py build
|
build-docs build
|
||||||
|
|
||||||
This will build docs for all supported ESP-IDF languages & targets. This can take some time, although jobs will run in parallel up to the number of CPU cores you have (can modify this with the ``--sphinx-parallel-builds`` option, see ``./build_docs.py --help`` for details).
|
or for specific target and language with::
|
||||||
|
|
||||||
To build for a single language and target combination only::
|
build-docs -t esp32 -l en build
|
||||||
|
|
||||||
./build_docs.py -l en -t esp32 build
|
For more in-depth documentation about `esp-docs` features please see the `esp-doc` documentation.
|
||||||
|
|
||||||
Choices for language (``-l``) are ``en`` and ``zh_CN``. Choices for target (``-t``) are any supported ESP-IDF build system target (for example ``esp32`` and ``esp32s2``).
|
|
||||||
|
|
||||||
Build documentation will be placed in ``_build/<language>/<target>/html`` folder. To see it, open the ``index.html`` inside this directory in a web browser.
|
|
||||||
|
|
||||||
Building a subset of the documentation
|
|
||||||
""""""""""""""""""""""""""""""""""""""
|
|
||||||
Since building the full documentation can be quite slow, it might be useful to just build just the subset of the documentation you are interested in.
|
|
||||||
|
|
||||||
This can be achieved by listing the document you want to build::
|
|
||||||
|
|
||||||
./build_docs.py -l en -t esp32 -i api-reference/peripherals/can.rst build
|
|
||||||
|
|
||||||
Building multiple documents is also possible::
|
|
||||||
|
|
||||||
./build_docs.py -l en -t esp32 -i api-reference/peripherals/can.rst api-reference/peripherals/adc.rst build
|
|
||||||
|
|
||||||
As well as wildcards::
|
|
||||||
|
|
||||||
./build_docs.py -l en -t esp32 -i api-reference/peripherals/* build
|
|
||||||
|
|
||||||
Note that this is a feature intended to simply testing and debugging during writing of documentation. The HTML output won't be perfect, i.e. it will not build a proper index that lists all the documents, and any references to documents that are not built will result in warnings.
|
|
||||||
|
|
||||||
Fast build
|
|
||||||
""""""""""
|
|
||||||
Another trick to speed up building is to skip including doxygen generated API documention into the Sphinx build process, skipping this drastically reduces build time.
|
|
||||||
|
|
||||||
This can be achieved by adding the fast-build argument::
|
|
||||||
|
|
||||||
./build_docs.py build -f
|
|
||||||
|
|
||||||
or by setting the environment variable `DOCS_FAST_BUILD`. Note that the the `-f` argument is a subargument to `build` and thus must be listed after `build`.
|
|
||||||
|
|
||||||
Building PDF
|
|
||||||
""""""""""""
|
|
||||||
|
|
||||||
It is also possible to build latex files and a PDF of the documentation using ``build_docs.py``. To do this the following Latex packages are required to be installed:
|
|
||||||
|
|
||||||
* latexmk
|
|
||||||
* texlive-latex-recommended
|
|
||||||
* texlive-fonts-recommended
|
|
||||||
* texlive-xetex
|
|
||||||
|
|
||||||
The following fonts are also required to be installed:
|
|
||||||
|
|
||||||
* Freefont Serif, Sans and Mono OpenType fonts, available as the package ``fonts-freefont-otf`` on Ubuntu
|
|
||||||
* Lmodern, available as the package ``fonts-lmodern`` on Ubuntu
|
|
||||||
* Fandol, can be downloaded from `here <https://ctan.org/tex-archive/fonts/fandol>`_
|
|
||||||
|
|
||||||
Now you can build the PDF for a target by invoking::
|
|
||||||
|
|
||||||
./build_docs.py -bs latex -l en -t esp32 build
|
|
||||||
|
|
||||||
Or alternatively build both html and PDF::
|
|
||||||
|
|
||||||
./build_docs.py -bs html latex -l en -t esp32 build
|
|
||||||
|
|
||||||
Latex files and the PDF will be placed in ``_build/<language>/<target>/latex`` folder.
|
|
||||||
|
|
||||||
Wrap up
|
Wrap up
|
||||||
-------
|
-------
|
||||||
@ -594,12 +198,10 @@ Related Documents
|
|||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
* :doc:`../api-reference/template`
|
* :doc:`../api-reference/template`
|
||||||
* :doc:`add-ons-reference`
|
|
||||||
|
|
||||||
|
|
||||||
.. _espressif/esp-idf: https://github.com/espressif/esp-idf/
|
.. _espressif/esp-idf: https://github.com/espressif/esp-idf/
|
||||||
|
|
||||||
.. _interactive shell: http://interactive.blockdiag.com/?compression=deflate&src=eJxlUMFOwzAMvecrrO3aITYQQirlAIIzEseJQ5q4TUSIq8TVGIh_J2m7jbKc7Ge_5_dSO1Lv2soWvoVYgieNoMh7VGzJR9FJtugZ7lYQ0UcKEbYNOY36rRQHZHUPT68vV5tceGLbWCUzPfeaFFMoBZzecVc56vWwJFnWMmJ59CCZg617xpOFbTSyw0pmvT_HJ7hxtFNGBr6wvuu5SCkchcrZ1vAeXZomznh5YgTqfcpR02cBO6vZVDeXBRjMjKEcFRbLh8f18-Z2UUBDnqP9wmp9ncRmSSfND2ldGo2h_zse407g0Mxc1q7HzJ3-4jzYYTJjtQH3iSV-fgFzx50J
|
.. _interactive shell: http://interactive.blockdiag.com/?compression=deflate&src=eJxlUMFOwzAMvecrrO3aITYQQirlAIIzEseJQ5q4TUSIq8TVGIh_J2m7jbKc7Ge_5_dSO1Lv2soWvoVYgieNoMh7VGzJR9FJtugZ7lYQ0UcKEbYNOY36rRQHZHUPT68vV5tceGLbWCUzPfeaFFMoBZzecVc56vWwJFnWMmJ59CCZg617xpOFbTSyw0pmvT_HJ7hxtFNGBr6wvuu5SCkchcrZ1vAeXZomznh5YgTqfcpR02cBO6vZVDeXBRjMjKEcFRbLh8f18-Z2UUBDnqP9wmp9ncRmSSfND2ldGo2h_zse407g0Mxc1q7HzJ3-4jzYYTJjtQH3iSV-fgFzx50J
|
||||||
|
|
||||||
.. _Sphinx: https://www.sphinx-doc.org/
|
|
||||||
.. _Breathe: https://breathe.readthedocs.io
|
.. _Breathe: https://breathe.readthedocs.io
|
||||||
|
@ -1 +0,0 @@
|
|||||||
See docs/en/contribute/add-ons-reference.rst (or in the IDF docs) for details.
|
|
@ -1,30 +0,0 @@
|
|||||||
# Embeds a google analytics tracking tag in every HTML page
|
|
||||||
def setup(app):
|
|
||||||
app.add_config_value('google_analytics_id', None, 'html')
|
|
||||||
app.connect('html-page-context', google_analytics_embed)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
||||||
|
|
||||||
|
|
||||||
def google_analytics_embed(app, pagename, templatename, context, doctree):
|
|
||||||
|
|
||||||
ga_id = app.config.google_analytics_id
|
|
||||||
if not ga_id:
|
|
||||||
return
|
|
||||||
|
|
||||||
metatags = context.get('metatags', '')
|
|
||||||
|
|
||||||
google_analytics_snippet = """
|
|
||||||
<!-- Global site tag (gtag.js) - Google Analytics -->
|
|
||||||
<script async src="https://www.googletagmanager.com/gtag/js?id={}"></script>
|
|
||||||
<script>
|
|
||||||
window.dataLayer = window.dataLayer || [];
|
|
||||||
function gtag(){{dataLayer.push(arguments);}}
|
|
||||||
gtag('js', new Date());
|
|
||||||
|
|
||||||
gtag('config', '{}');
|
|
||||||
</script>""".format(ga_id, ga_id)
|
|
||||||
|
|
||||||
# Prepend the google analytics to the HTML metatags (which will be passed to the sphinx templating engine)
|
|
||||||
metatags = google_analytics_snippet + metatags
|
|
||||||
context['metatags'] = metatags
|
|
@ -1,82 +0,0 @@
|
|||||||
# Copyright 2018-2019 Espressif Systems (Shanghai) PTE LTD
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
# Mechanism to generate static HTML redirect pages in the output
|
|
||||||
#
|
|
||||||
# Uses redirect_template.html and the list of pages given in
|
|
||||||
# the file conf.html_redirect_pages
|
|
||||||
#
|
|
||||||
# Redirections can be internal or absolute (i.e., external).
|
|
||||||
# - Internal redirects are supplied without quotation marks and must be relative to the document root
|
|
||||||
# - External redirects are wrapped in doulbe quotation marks and are used verbatim
|
|
||||||
#
|
|
||||||
# Adapted from ideas in https://tech.signavio.com/2017/managing-sphinx-redirects
|
|
||||||
import os.path
|
|
||||||
|
|
||||||
from sphinx.builders.html import StandaloneHTMLBuilder
|
|
||||||
|
|
||||||
REDIRECT_TEMPLATE = """
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta http-equiv="refresh" content="0; url=$NEWURL" />
|
|
||||||
<script>
|
|
||||||
window.location.href = "$NEWURL"
|
|
||||||
</script>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<p>Page has moved <a href="$NEWURL">here</a>.</p>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
app.add_config_value('html_redirect_pages', [], 'html')
|
|
||||||
# attaching to this event is a hack, but it's a convenient stage in the build
|
|
||||||
# to create HTML redirects
|
|
||||||
app.connect('html-collect-pages', create_redirect_pages)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
||||||
|
|
||||||
|
|
||||||
def create_redirect_pages(app):
|
|
||||||
if not isinstance(app.builder, StandaloneHTMLBuilder):
|
|
||||||
return # only relevant for standalone HTML output
|
|
||||||
|
|
||||||
for (old_url, new_url) in app.config.html_redirect_pages:
|
|
||||||
if old_url.startswith('/'):
|
|
||||||
print('Stripping leading / from URL in config file...')
|
|
||||||
old_url = old_url[1:]
|
|
||||||
out_file = app.builder.get_outfilename(old_url)
|
|
||||||
|
|
||||||
if new_url.startswith('\"') and new_url.endswith('\"'):
|
|
||||||
# This is an absolute redirect. Slice away the surrouding quotation marks and copy the url verbatim
|
|
||||||
new_url = new_url[1:-1]
|
|
||||||
else:
|
|
||||||
# This is an internal redirect. Find the relative url to the target document
|
|
||||||
new_url = app.builder.get_relative_uri(old_url, new_url)
|
|
||||||
|
|
||||||
print('HTML file %s redirects to URL %s' % (out_file, new_url))
|
|
||||||
out_dir = os.path.dirname(out_file)
|
|
||||||
if not os.path.exists(out_dir):
|
|
||||||
os.makedirs(out_dir)
|
|
||||||
|
|
||||||
content = REDIRECT_TEMPLATE.replace('$NEWURL', new_url)
|
|
||||||
|
|
||||||
with open(out_file, 'w') as rp:
|
|
||||||
rp.write(content)
|
|
||||||
|
|
||||||
return []
|
|
@ -1,60 +0,0 @@
|
|||||||
import re
|
|
||||||
|
|
||||||
from docutils import nodes
|
|
||||||
from docutils.parsers.rst import Directive
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
app.add_directive('list', ListFilter)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
||||||
|
|
||||||
|
|
||||||
class ListFilter(Directive):
|
|
||||||
"""
|
|
||||||
Provides a list implementation directive that support clauses of the kind
|
|
||||||
|
|
||||||
.. list::
|
|
||||||
|
|
||||||
- Content
|
|
||||||
:filter: - Content
|
|
||||||
- Content
|
|
||||||
|
|
||||||
Where the :filter: part becomes selective to only include the content if
|
|
||||||
one of the provided tags is set, same as the logic used by the "only" directive.
|
|
||||||
|
|
||||||
The directive also works for numbered list.
|
|
||||||
|
|
||||||
"""
|
|
||||||
RE_PATTERN = re.compile(r'^\s*:(.+?):\s*(.+)$')
|
|
||||||
has_content = True
|
|
||||||
required_arguments = 0
|
|
||||||
optional_arguments = 0
|
|
||||||
final_argument_whitespace = True
|
|
||||||
option_spec = {}
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
# Raise an error if the directive does not have contents.
|
|
||||||
self.assert_has_content()
|
|
||||||
|
|
||||||
# Remove all list entries that should not be on display
|
|
||||||
env = self.state.document.settings.env
|
|
||||||
filt_data = [self.filter_entry(env, e) for e in self.content.data if e is not None]
|
|
||||||
|
|
||||||
# Clean up deleted values from content
|
|
||||||
self.content.data = [data for data in filt_data if data is not None]
|
|
||||||
self.content.items = [items for data, items in zip(filt_data, self.content.items) if data is not None]
|
|
||||||
|
|
||||||
# Parse the filtered content and return the new node
|
|
||||||
node = nodes.paragraph()
|
|
||||||
self.state.nested_parse(self.content, self.content_offset, node)
|
|
||||||
|
|
||||||
return [node]
|
|
||||||
|
|
||||||
def filter_entry(self, env, entry):
|
|
||||||
m = self.RE_PATTERN.match(entry)
|
|
||||||
if m is not None:
|
|
||||||
tag_filter, entry = m.groups()
|
|
||||||
if not env.app.builder.tags.eval_condition(tag_filter):
|
|
||||||
return None
|
|
||||||
return entry
|
|
@ -1,43 +0,0 @@
|
|||||||
# Based on https://stackoverflow.com/a/46600038 with some modifications
|
|
||||||
import re
|
|
||||||
|
|
||||||
from sphinx.directives.other import TocTree
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
app.add_directive('toctree', TocTreeFilt, override=True)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
||||||
|
|
||||||
|
|
||||||
class TocTreeFilt(TocTree):
|
|
||||||
"""
|
|
||||||
Override normal toctree directive to support clauses of the kind
|
|
||||||
|
|
||||||
:filter: Name <link>
|
|
||||||
|
|
||||||
Where the :filter: part becomes selective to only include the document if
|
|
||||||
one of the provided tags is set, same as the logic used by the "only" directive.
|
|
||||||
|
|
||||||
If no :filter: is supplied, works the same as default Sphinx :toctree:
|
|
||||||
|
|
||||||
Note that excluding via filter doesn't prevent Sphinx from finding these .rst files
|
|
||||||
when it scan the src/ directory, so it's also necessary to make sure that the files
|
|
||||||
are covered by the exclude_patterns list in conf.py
|
|
||||||
"""
|
|
||||||
RE_PATTERN = re.compile(r'^\s*:(.+?):\s*(.+)$')
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
# Remove all TOC entries that should not be on display
|
|
||||||
env = self.state.document.settings.env
|
|
||||||
self.content = [self.filter_entry(env, e) for e in self.content if e is not None]
|
|
||||||
|
|
||||||
return super(TocTreeFilt, self).run()
|
|
||||||
|
|
||||||
def filter_entry(self, env, entry):
|
|
||||||
m = self.RE_PATTERN.match(entry)
|
|
||||||
if m is not None:
|
|
||||||
tag_filter, entry = m.groups()
|
|
||||||
if not env.app.builder.tags.eval_condition(tag_filter):
|
|
||||||
return None
|
|
||||||
return entry
|
|
@ -1,15 +0,0 @@
|
|||||||
import subprocess
|
|
||||||
|
|
||||||
|
|
||||||
# Get revision used for constructing github URLs
|
|
||||||
def get_github_rev():
|
|
||||||
path = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode('utf-8')
|
|
||||||
try:
|
|
||||||
tag = subprocess.check_output(['git', 'describe', '--exact-match']).strip().decode('utf-8')
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
tag = None
|
|
||||||
print('Git commit ID: ', path)
|
|
||||||
if tag:
|
|
||||||
print('Git tag: ', tag)
|
|
||||||
return tag
|
|
||||||
return path
|
|
@ -1,2 +0,0 @@
|
|||||||
See docs/en/contribute/add-ons-reference.rst (or in the IDF docs) for details.
|
|
||||||
|
|
@ -1,6 +0,0 @@
|
|||||||
# The following lines of boilerplate have to be in your project's
|
|
||||||
# CMakeLists in this exact order for cmake to work correctly
|
|
||||||
cmake_minimum_required(VERSION 3.5)
|
|
||||||
|
|
||||||
include($ENV{IDF_PATH}/tools/cmake/project.cmake)
|
|
||||||
project(dummy_project)
|
|
@ -1,97 +0,0 @@
|
|||||||
# Sphinx extension to integrate IDF build system information
|
|
||||||
# into the Sphinx Build
|
|
||||||
#
|
|
||||||
# Runs early in the Sphinx process, runs CMake to generate the dummy IDF project
|
|
||||||
# in this directory - including resolving paths, etc.
|
|
||||||
#
|
|
||||||
# Then emits the new 'idf-info' event which has information read from IDF
|
|
||||||
# build system, that other extensions can use to generate relevant data.
|
|
||||||
import json
|
|
||||||
import os.path
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# this directory also contains the dummy IDF project
|
|
||||||
project_path = os.path.abspath(os.path.dirname(__file__))
|
|
||||||
|
|
||||||
# Targets which needs --preview to build
|
|
||||||
PREVIEW_TARGETS = ['esp32s3']
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
# Setup some common paths
|
|
||||||
|
|
||||||
try:
|
|
||||||
build_dir = os.environ['BUILDDIR'] # TODO see if we can remove this
|
|
||||||
except KeyError:
|
|
||||||
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.mkdir(build_dir)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.mkdir(os.path.join(build_dir, 'inc'))
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Fill in a default IDF_PATH if it's missing (ie when Read The Docs is building the docs)
|
|
||||||
try:
|
|
||||||
idf_path = os.environ['IDF_PATH']
|
|
||||||
except KeyError:
|
|
||||||
idf_path = os.path.realpath(os.path.join(os.path.dirname(__file__), '..'))
|
|
||||||
|
|
||||||
app.add_config_value('docs_root', os.path.join(idf_path, 'docs'), 'env')
|
|
||||||
app.add_config_value('idf_path', idf_path, 'env')
|
|
||||||
app.add_config_value('build_dir', build_dir, 'env') # not actually an IDF thing
|
|
||||||
app.add_event('idf-info')
|
|
||||||
|
|
||||||
# we want this to run early in the docs build but unclear exactly when
|
|
||||||
app.connect('config-inited', generate_idf_info)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
||||||
|
|
||||||
|
|
||||||
def generate_idf_info(app, config):
|
|
||||||
print('Running CMake on dummy project to get build info...')
|
|
||||||
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
|
|
||||||
cmake_build_dir = os.path.join(build_dir, 'build_dummy_project')
|
|
||||||
idf_py_path = os.path.join(app.config.idf_path, 'tools', 'idf.py')
|
|
||||||
print('Running idf.py...')
|
|
||||||
idf_py = [sys.executable,
|
|
||||||
idf_py_path,
|
|
||||||
'-B',
|
|
||||||
cmake_build_dir,
|
|
||||||
'-C',
|
|
||||||
project_path,
|
|
||||||
'-D',
|
|
||||||
'SDKCONFIG={}'.format(os.path.join(build_dir, 'dummy_project_sdkconfig'))
|
|
||||||
]
|
|
||||||
|
|
||||||
# force a clean idf.py build w/ new sdkconfig each time
|
|
||||||
# (not much slower than 'reconfigure', avoids any potential config & build versioning problems
|
|
||||||
shutil.rmtree(cmake_build_dir, ignore_errors=True)
|
|
||||||
print('Starting new dummy IDF project... ')
|
|
||||||
|
|
||||||
if (app.config.idf_target in PREVIEW_TARGETS):
|
|
||||||
subprocess.check_call(idf_py + ['--preview', 'set-target', app.config.idf_target])
|
|
||||||
else:
|
|
||||||
subprocess.check_call(idf_py + ['set-target', app.config.idf_target])
|
|
||||||
|
|
||||||
print('Running CMake on dummy project...')
|
|
||||||
subprocess.check_call(idf_py + ['reconfigure'])
|
|
||||||
|
|
||||||
with open(os.path.join(cmake_build_dir, 'project_description.json')) as f:
|
|
||||||
project_description = json.load(f)
|
|
||||||
if project_description['target'] != app.config.idf_target:
|
|
||||||
# this shouldn't really happen unless someone has been moving around directories inside _build, as
|
|
||||||
# the cmake_build_dir path should be target-specific
|
|
||||||
raise RuntimeError(('Error configuring the dummy IDF project for {}. ' +
|
|
||||||
'Target in project description is {}. ' +
|
|
||||||
'Is build directory contents corrupt?')
|
|
||||||
.format(app.config.idf_target, project_description['target']))
|
|
||||||
app.emit('idf-info', project_description)
|
|
||||||
|
|
||||||
return []
|
|
@ -1,14 +0,0 @@
|
|||||||
# Extension to generate esp_err definition as .rst
|
|
||||||
from .util import call_with_python, copy_if_modified
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
app.connect('idf-info', generate_err_defs)
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
||||||
|
|
||||||
|
|
||||||
def generate_err_defs(app, project_description):
|
|
||||||
# Generate 'esp_err_defs.inc' file with ESP_ERR_ error code definitions from inc file
|
|
||||||
esp_err_inc_path = '{}/inc/esp_err_defs.inc'.format(app.config.build_dir)
|
|
||||||
call_with_python('{}/tools/gen_esp_err_to_name.py --rst_output {}.in'.format(app.config.idf_path, esp_err_inc_path))
|
|
||||||
copy_if_modified(esp_err_inc_path + '.in', esp_err_inc_path)
|
|
@ -1,53 +0,0 @@
|
|||||||
from sphinx.util import get_matching_files
|
|
||||||
from sphinx.util.matching import compile_matchers
|
|
||||||
|
|
||||||
|
|
||||||
# Updates the excluded documents according to the conditional_include_dict {tag:documents}
|
|
||||||
def update_exclude_patterns(app, config):
|
|
||||||
|
|
||||||
# Default to building all if option not set
|
|
||||||
if config.docs_to_build:
|
|
||||||
build_subset(app, config)
|
|
||||||
|
|
||||||
include_set = set()
|
|
||||||
exclude_set = set()
|
|
||||||
|
|
||||||
for tag, docs in config.conditional_include_dict.items():
|
|
||||||
if not app.tags.has(tag):
|
|
||||||
exclude_set.update(docs)
|
|
||||||
else:
|
|
||||||
include_set.update(docs)
|
|
||||||
# Do not exclude docs that have been explicitly included, e.g. if a doc is listed in both
|
|
||||||
# ESP32_DOCS and ESP32S2_DOCS it will be included for those targets.
|
|
||||||
app.config.exclude_patterns.extend(exclude_set - include_set)
|
|
||||||
|
|
||||||
|
|
||||||
def build_subset(app, config):
|
|
||||||
# Convert to list of docs to build
|
|
||||||
docs_to_build = config.docs_to_build.split(',')
|
|
||||||
|
|
||||||
# Exclude all documents which were not set as docs_to_build when build_docs were called
|
|
||||||
exclude_docs = [filename for filename in get_matching_files(app.srcdir, compile_matchers(docs_to_build))]
|
|
||||||
docs = [filename for filename in get_matching_files(app.srcdir, compile_matchers(exclude_docs))]
|
|
||||||
|
|
||||||
app.config.exclude_patterns.extend(exclude_docs)
|
|
||||||
|
|
||||||
# Get all docs that will be built
|
|
||||||
docs = [filename for filename in get_matching_files(app.srcdir, compile_matchers(exclude_docs))]
|
|
||||||
if not docs:
|
|
||||||
raise ValueError('No documents to build')
|
|
||||||
print('Building a subset of the documents: {}'.format(docs))
|
|
||||||
|
|
||||||
# Sphinx requires a master document, if there is a document name 'index' then we pick that
|
|
||||||
index_docs = [doc for doc in docs if 'index' in doc]
|
|
||||||
if index_docs:
|
|
||||||
config.master_doc = index_docs[0].replace('.rst', '')
|
|
||||||
else:
|
|
||||||
config.master_doc = docs[0].replace('.rst', '')
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
# Tags are generated together with defines
|
|
||||||
app.connect('config-inited', update_exclude_patterns)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
@ -1,229 +0,0 @@
|
|||||||
import os
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
|
|
||||||
from docutils import io, nodes, statemachine, utils
|
|
||||||
from docutils.parsers.rst import directives
|
|
||||||
from docutils.utils.error_reporting import ErrorString, SafeString
|
|
||||||
from sphinx.directives.other import Include as BaseInclude
|
|
||||||
from sphinx.util import logging
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
sub = StringSubstituter()
|
|
||||||
|
|
||||||
# Config values not available when setup is called
|
|
||||||
app.connect('config-inited', lambda _, config: sub.init_sub_strings(config))
|
|
||||||
app.connect('source-read', sub.substitute_source_read_cb)
|
|
||||||
|
|
||||||
# Override the default include directive to include formatting with idf_target
|
|
||||||
# This is needed since there are no source-read events for includes
|
|
||||||
app.add_directive('include', FormatedInclude, override=True)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.2'}
|
|
||||||
|
|
||||||
|
|
||||||
def check_content(content, docname):
|
|
||||||
# Log warnings for any {IDF_TARGET} expressions that haven't been replaced
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
errors = re.findall(r'{IDF_TARGET.*?}', content)
|
|
||||||
|
|
||||||
for err in errors:
|
|
||||||
logger.warning('Badly formated string substitution: {}'.format(err), location=docname)
|
|
||||||
|
|
||||||
|
|
||||||
class StringSubstituter:
|
|
||||||
""" Allows for string substitution of target related strings
|
|
||||||
before any markup is parsed
|
|
||||||
|
|
||||||
Supports the following replacements (examples shown is for target=esp32s2):
|
|
||||||
{IDF_TARGET_NAME}, replaced with the current target name, e.g. ESP32-S2 Beta
|
|
||||||
{IDF_TARGET_TOOLCHAIN_PREFIX}, replaced with the toolchain prefix, e.g. xtensa-esp32-elf
|
|
||||||
{IDF_TARGET_PATH_NAME}, replaced with the path name, e.g. esp32s2
|
|
||||||
{IDF_TARGET_CFG_PREFIX}, replaced with the prefix used for config parameters, e.g. ESP32S2
|
|
||||||
{IDF_TARGET_TRM_EN_URL}, replaced with the url to the English technical reference manual
|
|
||||||
{IDF_TARGET_TRM_CH_URL}, replaced with the url to the Chinese technical reference manual
|
|
||||||
|
|
||||||
Also supports defines of local (single rst file) with the format:
|
|
||||||
{IDF_TARGET_TX_PIN:default="IO3",esp32="IO4",esp32s2="IO5"}
|
|
||||||
|
|
||||||
This will define a replacement of the tag {IDF_TARGET_TX_PIN} in the current rst-file, see e.g. uart.rst for example
|
|
||||||
|
|
||||||
"""
|
|
||||||
TARGET_NAMES = {'esp32': 'ESP32', 'esp32s2': 'ESP32-S2', 'esp32s3': 'ESP32-S3', 'esp32c3': 'ESP32-C3'}
|
|
||||||
TOOLCHAIN_PREFIX = {'esp32': 'xtensa-esp32-elf', 'esp32s2': 'xtensa-esp32s2-elf', 'esp32s3': 'xtensa-esp32s3-elf', 'esp32c3': 'riscv32-esp-elf'}
|
|
||||||
CONFIG_PREFIX = {'esp32': 'ESP32', 'esp32s2': 'ESP32S2', 'esp32s3': 'ESP32S3', 'esp32c3': 'ESP32C3'}
|
|
||||||
|
|
||||||
TRM_EN_URL = {'esp32': 'https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_en.pdf',
|
|
||||||
'esp32s2': 'https://www.espressif.com/sites/default/files/documentation/esp32-s2_technical_reference_manual_en.pdf',
|
|
||||||
'esp32s3': 'https://www.espressif.com/sites/default/files/documentation/esp32-s3_technical_reference_manual_en.pdf',
|
|
||||||
'esp32c3': 'https://www.espressif.com/sites/default/files/documentation/esp32-c3_technical_reference_manual_en.pdf'}
|
|
||||||
|
|
||||||
TRM_CN_URL = {'esp32': 'https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_cn.pdf',
|
|
||||||
'esp32s2': 'https://www.espressif.com/sites/default/files/documentation/esp32-s2_technical_reference_manual_cn.pdf',
|
|
||||||
'esp32s3': 'https://www.espressif.com/sites/default/files/documentation/esp32-s3_technical_reference_manual_cn.pdf',
|
|
||||||
'esp32c3': 'https://www.espressif.com/sites/default/files/documentation/esp32-c3_technical_reference_manual_cn.pdf'}
|
|
||||||
RE_PATTERN = re.compile(r'^\s*{IDF_TARGET_(\w+?):(.+?)}', re.MULTILINE)
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.substitute_strings = {}
|
|
||||||
self.local_sub_strings = {}
|
|
||||||
|
|
||||||
def add_pair(self, tag, replace_value):
|
|
||||||
self.substitute_strings[tag] = replace_value
|
|
||||||
|
|
||||||
def init_sub_strings(self, config):
|
|
||||||
self.target_name = config.idf_target
|
|
||||||
|
|
||||||
self.add_pair('{IDF_TARGET_NAME}', self.TARGET_NAMES[config.idf_target])
|
|
||||||
self.add_pair('{IDF_TARGET_PATH_NAME}', config.idf_target)
|
|
||||||
self.add_pair('{IDF_TARGET_TOOLCHAIN_PREFIX}', self.TOOLCHAIN_PREFIX[config.idf_target])
|
|
||||||
self.add_pair('{IDF_TARGET_CFG_PREFIX}', self.CONFIG_PREFIX[config.idf_target])
|
|
||||||
self.add_pair('{IDF_TARGET_TRM_EN_URL}', self.TRM_EN_URL[config.idf_target])
|
|
||||||
self.add_pair('{IDF_TARGET_TRM_CN_URL}', self.TRM_CN_URL[config.idf_target])
|
|
||||||
|
|
||||||
def add_local_subs(self, matches):
|
|
||||||
|
|
||||||
for sub_def in matches:
|
|
||||||
if len(sub_def) != 2:
|
|
||||||
raise ValueError('IDF_TARGET_X substitution define invalid, val={}'.format(sub_def))
|
|
||||||
|
|
||||||
tag = '{' + 'IDF_TARGET_{}'.format(sub_def[0]) + '}'
|
|
||||||
|
|
||||||
match_default = re.match(r'^\s*default(\s*)=(\s*)\"(.*?)\"', sub_def[1])
|
|
||||||
|
|
||||||
if match_default is None:
|
|
||||||
# There should always be a default value
|
|
||||||
raise ValueError('No default value in IDF_TARGET_X substitution define, val={}'.format(sub_def))
|
|
||||||
|
|
||||||
match_target = re.match(r'^.*{}(\s*)=(\s*)\"(.*?)\"'.format(self.target_name), sub_def[1])
|
|
||||||
|
|
||||||
if match_target is None:
|
|
||||||
sub_value = match_default.groups()[2]
|
|
||||||
else:
|
|
||||||
sub_value = match_target.groups()[2]
|
|
||||||
|
|
||||||
self.local_sub_strings[tag] = sub_value
|
|
||||||
|
|
||||||
def substitute(self, content):
|
|
||||||
# Add any new local tags that matches the reg.ex.
|
|
||||||
sub_defs = re.findall(self.RE_PATTERN, content)
|
|
||||||
|
|
||||||
if len(sub_defs) != 0:
|
|
||||||
self.add_local_subs(sub_defs)
|
|
||||||
|
|
||||||
# Remove the tag defines
|
|
||||||
content = re.sub(self.RE_PATTERN,'', content)
|
|
||||||
|
|
||||||
for key in self.local_sub_strings:
|
|
||||||
content = content.replace(key, self.local_sub_strings[key])
|
|
||||||
|
|
||||||
self.local_sub_strings = {}
|
|
||||||
|
|
||||||
for key in self.substitute_strings:
|
|
||||||
content = content.replace(key, self.substitute_strings[key])
|
|
||||||
|
|
||||||
return content
|
|
||||||
|
|
||||||
def substitute_source_read_cb(self, app, docname, source):
|
|
||||||
source[0] = self.substitute(source[0])
|
|
||||||
|
|
||||||
check_content(source[0], docname)
|
|
||||||
|
|
||||||
|
|
||||||
class FormatedInclude(BaseInclude):
|
|
||||||
|
|
||||||
"""
|
|
||||||
Include and format content read from a separate source file.
|
|
||||||
|
|
||||||
Code is based on the default include directive from docutils
|
|
||||||
but extended to also format the content according to IDF target.
|
|
||||||
|
|
||||||
"""
|
|
||||||
def run(self):
|
|
||||||
|
|
||||||
# For code or literal include blocks we run the normal include
|
|
||||||
if 'literal' in self.options or 'code' in self.options:
|
|
||||||
return super(FormatedInclude, self).run()
|
|
||||||
|
|
||||||
"""Include a file as part of the content of this reST file."""
|
|
||||||
if not self.state.document.settings.file_insertion_enabled:
|
|
||||||
raise self.warning('"%s" directive disabled.' % self.name)
|
|
||||||
source = self.state_machine.input_lines.source(
|
|
||||||
self.lineno - self.state_machine.input_offset - 1)
|
|
||||||
|
|
||||||
source_dir = os.path.dirname(os.path.abspath(source))
|
|
||||||
|
|
||||||
rel_filename, filename = self.env.relfn2path(self.arguments[0])
|
|
||||||
self.arguments[0] = filename
|
|
||||||
self.env.note_included(filename)
|
|
||||||
path = directives.path(self.arguments[0])
|
|
||||||
|
|
||||||
if path.startswith('<') and path.endswith('>'):
|
|
||||||
path = os.path.join(self.standard_include_path, path[1:-1])
|
|
||||||
path = os.path.normpath(os.path.join(source_dir, path))
|
|
||||||
|
|
||||||
path = utils.relative_path(None, path)
|
|
||||||
path = nodes.reprunicode(path)
|
|
||||||
|
|
||||||
encoding = self.options.get(
|
|
||||||
'encoding', self.state.document.settings.input_encoding)
|
|
||||||
e_handler = self.state.document.settings.input_encoding_error_handler
|
|
||||||
tab_width = self.options.get(
|
|
||||||
'tab-width', self.state.document.settings.tab_width)
|
|
||||||
try:
|
|
||||||
self.state.document.settings.record_dependencies.add(path)
|
|
||||||
include_file = io.FileInput(source_path=path,
|
|
||||||
encoding=encoding,
|
|
||||||
error_handler=e_handler)
|
|
||||||
except UnicodeEncodeError:
|
|
||||||
raise self.severe(u'Problems with "%s" directive path:\n'
|
|
||||||
'Cannot encode input file path "%s" '
|
|
||||||
'(wrong locale?).' %
|
|
||||||
(self.name, SafeString(path)))
|
|
||||||
except IOError as error:
|
|
||||||
raise self.severe(u'Problems with "%s" directive path:\n%s.' %
|
|
||||||
(self.name, ErrorString(error)))
|
|
||||||
startline = self.options.get('start-line', None)
|
|
||||||
endline = self.options.get('end-line', None)
|
|
||||||
try:
|
|
||||||
if startline or (endline is not None):
|
|
||||||
lines = include_file.readlines()
|
|
||||||
rawtext = ''.join(lines[startline:endline])
|
|
||||||
else:
|
|
||||||
rawtext = include_file.read()
|
|
||||||
except UnicodeError as error:
|
|
||||||
raise self.severe(u'Problem with "%s" directive:\n%s' %
|
|
||||||
(self.name, ErrorString(error)))
|
|
||||||
|
|
||||||
# Format input
|
|
||||||
sub = StringSubstituter()
|
|
||||||
config = self.state.document.settings.env.config
|
|
||||||
sub.init_sub_strings(config)
|
|
||||||
rawtext = sub.substitute(rawtext)
|
|
||||||
|
|
||||||
# start-after/end-before: no restrictions on newlines in match-text,
|
|
||||||
# and no restrictions on matching inside lines vs. line boundaries
|
|
||||||
after_text = self.options.get('start-after', None)
|
|
||||||
if after_text:
|
|
||||||
# skip content in rawtext before *and incl.* a matching text
|
|
||||||
after_index = rawtext.find(after_text)
|
|
||||||
if after_index < 0:
|
|
||||||
raise self.severe('Problem with "start-after" option of "%s" '
|
|
||||||
'directive:\nText not found.' % self.name)
|
|
||||||
rawtext = rawtext[after_index + len(after_text):]
|
|
||||||
before_text = self.options.get('end-before', None)
|
|
||||||
if before_text:
|
|
||||||
# skip content in rawtext after *and incl.* a matching text
|
|
||||||
before_index = rawtext.find(before_text)
|
|
||||||
if before_index < 0:
|
|
||||||
raise self.severe('Problem with "end-before" option of "%s" '
|
|
||||||
'directive:\nText not found.' % self.name)
|
|
||||||
rawtext = rawtext[:before_index]
|
|
||||||
|
|
||||||
include_lines = statemachine.string2lines(rawtext, tab_width,
|
|
||||||
convert_whitespace=True)
|
|
||||||
|
|
||||||
self.state_machine.insert_input(include_lines, path)
|
|
||||||
return []
|
|
@ -1,84 +0,0 @@
|
|||||||
# Sphinx extension to integrate defines into the Sphinx Build
|
|
||||||
#
|
|
||||||
# Runs after the IDF dummy project has been built
|
|
||||||
#
|
|
||||||
# Then emits the new 'idf-defines-generated' event which has a dictionary of raw text define values
|
|
||||||
# that other extensions can use to generate relevant data.
|
|
||||||
|
|
||||||
import glob
|
|
||||||
import os
|
|
||||||
import pprint
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
|
|
||||||
def generate_defines(app, project_description):
|
|
||||||
sdk_config_path = os.path.join(project_description['build_dir'], 'config')
|
|
||||||
|
|
||||||
# Parse kconfig macros to pass into doxygen
|
|
||||||
#
|
|
||||||
# TODO: this should use the set of "config which can't be changed" eventually,
|
|
||||||
# not the header
|
|
||||||
defines = get_defines(os.path.join(project_description['build_dir'],
|
|
||||||
'config', 'sdkconfig.h'), sdk_config_path)
|
|
||||||
|
|
||||||
# Add all SOC _caps.h headers and kconfig macros to the defines
|
|
||||||
#
|
|
||||||
# kind of a hack, be nicer to add a component info dict in project_description.json
|
|
||||||
soc_path = [p for p in project_description['build_component_paths'] if p.endswith('/soc')][0]
|
|
||||||
soc_headers = glob.glob(os.path.join(soc_path, project_description['target'],
|
|
||||||
'include', 'soc', '*_caps.h'))
|
|
||||||
assert len(soc_headers) > 0
|
|
||||||
|
|
||||||
for soc_header in soc_headers:
|
|
||||||
defines.update(get_defines(soc_header, sdk_config_path))
|
|
||||||
|
|
||||||
# write a list of definitions to make debugging easier
|
|
||||||
with open(os.path.join(app.config.build_dir, 'macro-definitions.txt'), 'w') as f:
|
|
||||||
pprint.pprint(defines, f)
|
|
||||||
print('Saved macro list to %s' % f.name)
|
|
||||||
|
|
||||||
add_tags(app, defines)
|
|
||||||
|
|
||||||
app.emit('idf-defines-generated', defines)
|
|
||||||
|
|
||||||
|
|
||||||
def get_defines(header_path, sdk_config_path):
|
|
||||||
defines = {}
|
|
||||||
# Note: we run C preprocessor here without any -I arguments (except "sdkconfig.h"), so assumption is
|
|
||||||
# that these headers are all self-contained and don't include any other headers
|
|
||||||
# not in the same directory
|
|
||||||
print('Reading macros from %s...' % (header_path))
|
|
||||||
processed_output = subprocess.check_output(['xtensa-esp32-elf-gcc', '-I', sdk_config_path,
|
|
||||||
'-dM', '-E', header_path]).decode()
|
|
||||||
for line in processed_output.split('\n'):
|
|
||||||
line = line.strip()
|
|
||||||
m = re.search('#define ([^ ]+) ?(.*)', line)
|
|
||||||
if m:
|
|
||||||
name = m.group(1)
|
|
||||||
value = m.group(2)
|
|
||||||
if name.startswith('_'):
|
|
||||||
continue # toolchain macro
|
|
||||||
if (' ' in value) or ('=' in value):
|
|
||||||
value = '' # macros that expand to multiple tokens (ie function macros) cause doxygen errors, so just mark as 'defined'
|
|
||||||
defines[name] = value
|
|
||||||
|
|
||||||
return defines
|
|
||||||
|
|
||||||
|
|
||||||
def add_tags(app, defines):
|
|
||||||
# try to parse define values as ints and add to tags
|
|
||||||
for name, value in defines.items():
|
|
||||||
try:
|
|
||||||
define_value = int(value.strip('()'))
|
|
||||||
if define_value > 0:
|
|
||||||
app.tags.add(name)
|
|
||||||
except ValueError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
app.connect('idf-info', generate_defines)
|
|
||||||
app.add_event('idf-defines-generated')
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.2'}
|
|
@ -1,22 +0,0 @@
|
|||||||
# Generate toolchain download links from toolchain info makefile
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os.path
|
|
||||||
|
|
||||||
from .util import call_with_python, copy_if_modified
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
# we don't actually need idf-info, just a convenient event to trigger this on
|
|
||||||
app.connect('idf-info', generate_idf_tools_links)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
||||||
|
|
||||||
|
|
||||||
def generate_idf_tools_links(app, project_description):
|
|
||||||
print('Generating IDF Tools list')
|
|
||||||
os.environ['IDF_MAINTAINER'] = '1'
|
|
||||||
tools_rst = os.path.join(app.config.build_dir, 'inc', 'idf-tools-inc.rst')
|
|
||||||
tools_rst_tmp = os.path.join(app.config.build_dir, 'idf-tools-inc.rst')
|
|
||||||
call_with_python('{}/tools/idf_tools.py gen-doc --output {}'.format(app.config.idf_path, tools_rst_tmp))
|
|
||||||
copy_if_modified(tools_rst_tmp, tools_rst)
|
|
@ -1,88 +0,0 @@
|
|||||||
# Generate toolchain download links from toolchain info makefile
|
|
||||||
from __future__ import print_function
|
|
||||||
|
|
||||||
import os.path
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
from .util import copy_if_modified
|
|
||||||
|
|
||||||
BASE_URL = 'https://dl.espressif.com/dl/'
|
|
||||||
|
|
||||||
PlatformInfo = namedtuple('PlatformInfo', [
|
|
||||||
'platform_name',
|
|
||||||
'platform_archive_suffix',
|
|
||||||
'extension',
|
|
||||||
'unpack_cmd',
|
|
||||||
'unpack_code'
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
# we don't actually need idf-info, just a convenient event to trigger this on
|
|
||||||
app.connect('idf-info', generate_toolchain_download_links)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
||||||
|
|
||||||
|
|
||||||
def generate_toolchain_download_links(app, project_description):
|
|
||||||
print('Generating toolchain download links')
|
|
||||||
toolchain_tmpdir = '{}/toolchain_inc'.format(app.config.build_dir)
|
|
||||||
toolchain_versions = os.path.join(app.config.idf_path, 'tools/toolchain_versions.mk')
|
|
||||||
gen_toolchain_links(toolchain_versions, toolchain_tmpdir)
|
|
||||||
copy_if_modified(toolchain_tmpdir, '{}/inc'.format(app.config.build_dir))
|
|
||||||
|
|
||||||
|
|
||||||
def gen_toolchain_links(versions_file, out_dir):
|
|
||||||
version_vars = {}
|
|
||||||
with open(versions_file) as f:
|
|
||||||
for line in f:
|
|
||||||
name, var = line.partition('=')[::2]
|
|
||||||
version_vars[name.strip()] = var.strip()
|
|
||||||
|
|
||||||
gcc_version = version_vars['CURRENT_TOOLCHAIN_GCC_VERSION']
|
|
||||||
toolchain_desc = version_vars['CURRENT_TOOLCHAIN_COMMIT_DESC_SHORT']
|
|
||||||
|
|
||||||
unpack_code_linux_macos = """
|
|
||||||
::
|
|
||||||
|
|
||||||
mkdir -p ~/esp
|
|
||||||
cd ~/esp
|
|
||||||
tar -x{}f ~/Downloads/{}
|
|
||||||
"""
|
|
||||||
|
|
||||||
scratch_build_code_linux_macos = """
|
|
||||||
::
|
|
||||||
|
|
||||||
git clone https://github.com/espressif/crosstool-NG.git
|
|
||||||
cd crosstool-NG
|
|
||||||
git checkout {}
|
|
||||||
git submodule update --init
|
|
||||||
./bootstrap && ./configure --enable-local && make
|
|
||||||
"""
|
|
||||||
|
|
||||||
platform_info = [
|
|
||||||
PlatformInfo('linux64', 'linux-amd64', 'tar.gz', 'z', unpack_code_linux_macos),
|
|
||||||
PlatformInfo('linux32', 'linux-i686','tar.gz', 'z', unpack_code_linux_macos),
|
|
||||||
PlatformInfo('osx', 'macos', 'tar.gz', 'z', unpack_code_linux_macos),
|
|
||||||
PlatformInfo('win32', 'win32', 'zip', None, None)
|
|
||||||
]
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.mkdir(out_dir)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
with open(os.path.join(out_dir, 'download-links.inc'), 'w') as links_file:
|
|
||||||
for p in platform_info:
|
|
||||||
archive_name = 'xtensa-esp32-elf-gcc{}-{}-{}.{}'.format(
|
|
||||||
gcc_version.replace('.', '_'), toolchain_desc, p.platform_archive_suffix, p.extension)
|
|
||||||
|
|
||||||
print('.. |download_link_{}| replace:: {}{}'.format(
|
|
||||||
p.platform_name, BASE_URL, archive_name), file=links_file)
|
|
||||||
|
|
||||||
if p.unpack_code is not None:
|
|
||||||
with open(os.path.join(out_dir, 'unpack-code-%s.inc' % p.platform_name), 'w') as f:
|
|
||||||
print(p.unpack_code.format(p.unpack_cmd, archive_name), file=f)
|
|
||||||
|
|
||||||
with open(os.path.join(out_dir, 'scratch-build-code.inc'), 'w') as code_file:
|
|
||||||
print(scratch_build_code_linux_macos.format(toolchain_desc), file=code_file)
|
|
@ -1,218 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
# Sphinx extension to generate ReSTructured Text .inc snippets
|
|
||||||
# with version-based content for this IDF version
|
|
||||||
|
|
||||||
from __future__ import print_function, unicode_literals
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
from io import open
|
|
||||||
|
|
||||||
from .util import copy_if_modified
|
|
||||||
|
|
||||||
TEMPLATES = {
|
|
||||||
'en': {
|
|
||||||
'git-clone-bash': """
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
mkdir -p ~/esp
|
|
||||||
cd ~/esp
|
|
||||||
git clone %(clone_args)s--recursive https://github.com/espressif/esp-idf.git
|
|
||||||
""",
|
|
||||||
|
|
||||||
'git-clone-windows': """
|
|
||||||
.. code-block:: batch
|
|
||||||
|
|
||||||
mkdir %%userprofile%%\\esp
|
|
||||||
cd %%userprofile%%\\esp
|
|
||||||
git clone %(clone_args)s--recursive https://github.com/espressif/esp-idf.git
|
|
||||||
""",
|
|
||||||
|
|
||||||
'git-clone-notes': {
|
|
||||||
'template': """
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
%(extra_note)s
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
%(zipfile_note)s
|
|
||||||
""",
|
|
||||||
'master': 'This command will clone the master branch, which has the latest development ("bleeding edge") '
|
|
||||||
'version of ESP-IDF. It is fully functional and updated on weekly basis with the most recent features and bugfixes.',
|
|
||||||
'branch': 'The ``git clone`` option ``-b %(clone_arg)s`` tells git to clone the %(ver_type)s in the ESP-IDF repository ``git clone`` '
|
|
||||||
'corresponding to this version of the documentation.',
|
|
||||||
'zipfile': {
|
|
||||||
'stable': 'As a fallback, it is also possible to download a zip file of this stable release from the `Releases page`_. '
|
|
||||||
'Do not download the "Source code" zip file(s) generated automatically by GitHub, they do not work with ESP-IDF.',
|
|
||||||
'unstable': 'GitHub\'s "Download zip file" feature does not work with ESP-IDF, a ``git clone`` is required. As a fallback, '
|
|
||||||
'`Stable version`_ can be installed without Git.'
|
|
||||||
}, # zipfile
|
|
||||||
}, # git-clone-notes
|
|
||||||
'version-note': {
|
|
||||||
'master': """
|
|
||||||
.. note::
|
|
||||||
This is documentation for the master branch (latest version) of ESP-IDF. This version is under continual development.
|
|
||||||
`Stable version`_ documentation is available, as well as other :doc:`/versions`.
|
|
||||||
""",
|
|
||||||
'stable': """
|
|
||||||
.. note::
|
|
||||||
This is documentation for stable version %s of ESP-IDF. Other :doc:`/versions` are also available.
|
|
||||||
""",
|
|
||||||
'branch': """
|
|
||||||
.. note::
|
|
||||||
This is documentation for %s ``%s`` of ESP-IDF. Other :doc:`/versions` are also available.
|
|
||||||
"""
|
|
||||||
}, # version-note
|
|
||||||
}, # en
|
|
||||||
'zh_CN': {
|
|
||||||
'git-clone-bash': """
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
mkdir -p ~/esp
|
|
||||||
cd ~/esp
|
|
||||||
git clone %(clone_args)s--recursive https://github.com/espressif/esp-idf.git
|
|
||||||
""",
|
|
||||||
|
|
||||||
'git-clone-windows': """
|
|
||||||
.. code-block:: batch
|
|
||||||
|
|
||||||
mkdir %%userprofile%%\\esp
|
|
||||||
cd %%userprofile%%\\esp
|
|
||||||
git clone %(clone_args)s--recursive https://github.com/espressif/esp-idf.git
|
|
||||||
""",
|
|
||||||
|
|
||||||
'git-clone-notes': {
|
|
||||||
'template': """
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
%(extra_note)s
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
%(zipfile_note)s
|
|
||||||
""",
|
|
||||||
'master': '此命令将克隆 master 分支,该分支保存着 ESP-IDF 的最新版本,它功能齐全,每周都会更新一些新功能并修正一些错误。',
|
|
||||||
'branch': '``git clone`` 命令的 ``-b %(clone_arg)s`` 选项告诉 git 从 ESP-IDF 仓库中克隆与此版本的文档对应的分支。',
|
|
||||||
'zipfile': {
|
|
||||||
'stable': '作为备份,还可以从 `Releases page`_ 下载此稳定版本的 zip 文件。不要下载由 GitHub 自动生成的"源代码"的 zip 文件,它们不适用于 ESP-IDF。',
|
|
||||||
'unstable': 'GitHub 中"下载 zip 文档"的功能不适用于 ESP-IDF,所以需要使用 ``git clone`` 命令。作为备份,可以在没有安装 Git 的环境中下载 '
|
|
||||||
'`Stable version`_ 的 zip 归档文件。'
|
|
||||||
}, # zipfile
|
|
||||||
}, # git-clone
|
|
||||||
'version-note': {
|
|
||||||
'master': """
|
|
||||||
.. note::
|
|
||||||
这是ESP-IDF master 分支(最新版本)的文档,该版本在持续开发中。还有 `Stable version`_ 的文档,以及其他版本的文档 :doc:`/versions` 供参考。
|
|
||||||
""",
|
|
||||||
'stable': """
|
|
||||||
.. note::
|
|
||||||
这是ESP-IDF 稳定版本 %s 的文档,还有其他版本的文档 :doc:`/versions` 供参考。
|
|
||||||
""",
|
|
||||||
'branch': """
|
|
||||||
.. note::
|
|
||||||
这是ESP-IDF %s ``%s`` 版本的文档,还有其他版本的文档 :doc:`/versions` 供参考。
|
|
||||||
"""
|
|
||||||
}, # version-note
|
|
||||||
} # zh_CN
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
# doesn't need to be this event specifically, but this is roughly the right time
|
|
||||||
app.connect('idf-info', generate_version_specific_includes)
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
||||||
|
|
||||||
|
|
||||||
def generate_version_specific_includes(app, project_description):
|
|
||||||
language = app.config.language
|
|
||||||
tmp_out_dir = os.path.join(app.config.build_dir, 'version_inc')
|
|
||||||
if not os.path.exists(tmp_out_dir):
|
|
||||||
print('Creating directory %s' % tmp_out_dir)
|
|
||||||
os.mkdir(tmp_out_dir)
|
|
||||||
|
|
||||||
template = TEMPLATES[language]
|
|
||||||
|
|
||||||
version, ver_type, is_stable = get_version()
|
|
||||||
|
|
||||||
write_git_clone_inc_files(template, tmp_out_dir, version, ver_type, is_stable)
|
|
||||||
write_version_note(template['version-note'], tmp_out_dir, version, ver_type, is_stable)
|
|
||||||
copy_if_modified(tmp_out_dir, os.path.join(app.config.build_dir, 'inc'))
|
|
||||||
print('Done')
|
|
||||||
|
|
||||||
|
|
||||||
def write_git_clone_inc_files(templates, out_dir, version, ver_type, is_stable):
|
|
||||||
def out_file(basename):
|
|
||||||
p = os.path.join(out_dir, '%s.inc' % basename)
|
|
||||||
print('Writing %s...' % p)
|
|
||||||
return p
|
|
||||||
|
|
||||||
if version == 'master':
|
|
||||||
clone_args = ''
|
|
||||||
else:
|
|
||||||
clone_args = '-b %s ' % version
|
|
||||||
|
|
||||||
with open(out_file('git-clone-bash'), 'w', encoding='utf-8') as f:
|
|
||||||
f.write(templates['git-clone-bash'] % locals())
|
|
||||||
|
|
||||||
with open(out_file('git-clone-windows'), 'w', encoding='utf-8') as f:
|
|
||||||
f.write(templates['git-clone-windows'] % locals())
|
|
||||||
|
|
||||||
with open(out_file('git-clone-notes'), 'w', encoding='utf-8') as f:
|
|
||||||
template = templates['git-clone-notes']
|
|
||||||
|
|
||||||
zipfile = template['zipfile']
|
|
||||||
|
|
||||||
if version == 'master':
|
|
||||||
extra_note = template['master']
|
|
||||||
zipfile_note = zipfile['unstable']
|
|
||||||
else:
|
|
||||||
extra_note = template['branch'] % {'clone_arg': version, 'ver_type': ver_type}
|
|
||||||
zipfile_note = zipfile['stable'] if is_stable else zipfile['unstable']
|
|
||||||
|
|
||||||
f.write(template['template'] % locals())
|
|
||||||
|
|
||||||
print('Wrote git-clone-xxx.inc files')
|
|
||||||
|
|
||||||
|
|
||||||
def write_version_note(template, out_dir, version, ver_type, is_stable):
|
|
||||||
if version == 'master':
|
|
||||||
content = template['master']
|
|
||||||
elif ver_type == 'tag' and is_stable:
|
|
||||||
content = template['stable'] % version
|
|
||||||
else:
|
|
||||||
content = template['branch'] % (ver_type, version)
|
|
||||||
out_file = os.path.join(out_dir, 'version-note.inc')
|
|
||||||
with open(out_file, 'w', encoding='utf-8') as f:
|
|
||||||
f.write(content)
|
|
||||||
print('%s written' % out_file)
|
|
||||||
|
|
||||||
|
|
||||||
def get_version():
|
|
||||||
"""
|
|
||||||
Returns a tuple of (name of branch/tag/commit-id, type branch/tag/commit, is_stable)
|
|
||||||
"""
|
|
||||||
# Use git to look for a tag
|
|
||||||
try:
|
|
||||||
tag = subprocess.check_output(['git', 'describe', '--exact-match']).strip().decode('utf-8')
|
|
||||||
is_stable = re.match(r'v[0-9\.]+$', tag) is not None
|
|
||||||
return (tag, 'tag', is_stable)
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# No tag, look at branch name from CI, this will give the correct branch name even if the ref for the branch we
|
|
||||||
# merge into has moved forward before the pipeline runs
|
|
||||||
branch = os.environ.get('CI_COMMIT_REF_NAME', None)
|
|
||||||
if branch is not None:
|
|
||||||
return (branch, 'branch', False)
|
|
||||||
|
|
||||||
# Try to find the branch name even if docs are built locally
|
|
||||||
branch = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip().decode('utf-8')
|
|
||||||
if branch != 'HEAD':
|
|
||||||
return (branch, 'branch', False)
|
|
||||||
|
|
||||||
# As a last resort we return commit SHA-1, should never happen in CI/docs that should be published
|
|
||||||
return (subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).strip().decode('utf-8'), 'commit', False)
|
|
@ -1,23 +0,0 @@
|
|||||||
import os.path
|
|
||||||
|
|
||||||
from docutils.parsers.rst import directives
|
|
||||||
from docutils.parsers.rst.directives.misc import Include as BaseInclude
|
|
||||||
from sphinx.util.docutils import SphinxDirective
|
|
||||||
|
|
||||||
|
|
||||||
class IncludeBuildFile(BaseInclude, SphinxDirective):
|
|
||||||
"""
|
|
||||||
Like the standard "Include" directive, but relative to the app
|
|
||||||
build directory
|
|
||||||
"""
|
|
||||||
def run(self):
|
|
||||||
abspath = os.path.join(self.env.config.build_dir, self.arguments[0])
|
|
||||||
self.arguments[0] = abspath
|
|
||||||
self.env.note_included(abspath)
|
|
||||||
return super(IncludeBuildFile, self).run()
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
directives.register_directive('include-build-file', IncludeBuildFile)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
@ -1,63 +0,0 @@
|
|||||||
# Extension to generate the KConfig reference list
|
|
||||||
import os.path
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from .util import copy_if_modified
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
# The idf_build_system extension will emit this event once it
|
|
||||||
# has parsed the IDF project's information
|
|
||||||
app.connect('idf-info', generate_reference)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
||||||
|
|
||||||
|
|
||||||
def generate_reference(app, project_description):
|
|
||||||
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
|
|
||||||
|
|
||||||
# Generate 'kconfig.inc' file from components' Kconfig files
|
|
||||||
print('Generating kconfig.inc from kconfig contents')
|
|
||||||
kconfig_inc_path = '{}/inc/kconfig.inc'.format(build_dir)
|
|
||||||
temp_sdkconfig_path = '{}/sdkconfig.tmp'.format(build_dir)
|
|
||||||
|
|
||||||
kconfigs = project_description['config_environment']['COMPONENT_KCONFIGS'].split(';')
|
|
||||||
kconfig_projbuilds = project_description['config_environment']['COMPONENT_KCONFIGS_PROJBUILD'].split(';')
|
|
||||||
|
|
||||||
sdkconfig_renames = set()
|
|
||||||
# TODO: this should be generated in project description as well, if possible
|
|
||||||
for k in kconfigs + kconfig_projbuilds:
|
|
||||||
component_dir = os.path.dirname(k)
|
|
||||||
sdkconfig_rename = os.path.join(component_dir, 'sdkconfig.rename')
|
|
||||||
if os.path.exists(sdkconfig_rename):
|
|
||||||
sdkconfig_renames.add(sdkconfig_rename)
|
|
||||||
|
|
||||||
kconfigs_source_path = '{}/inc/kconfigs_source.in'.format(build_dir)
|
|
||||||
kconfig_projbuilds_source_path = '{}/inc/kconfig_projbuilds_source.in'.format(build_dir)
|
|
||||||
|
|
||||||
prepare_kconfig_files_args = [sys.executable,
|
|
||||||
'{}/tools/kconfig_new/prepare_kconfig_files.py'.format(app.config.idf_path),
|
|
||||||
'--env', 'COMPONENT_KCONFIGS={}'.format(' '.join(kconfigs)),
|
|
||||||
'--env', 'COMPONENT_KCONFIGS_PROJBUILD={}'.format(' '.join(kconfig_projbuilds)),
|
|
||||||
'--env', 'COMPONENT_KCONFIGS_SOURCE_FILE={}'.format(kconfigs_source_path),
|
|
||||||
'--env', 'COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE={}'.format(kconfig_projbuilds_source_path),
|
|
||||||
]
|
|
||||||
subprocess.check_call(prepare_kconfig_files_args)
|
|
||||||
|
|
||||||
confgen_args = [sys.executable,
|
|
||||||
'{}/tools/kconfig_new/confgen.py'.format(app.config.idf_path),
|
|
||||||
'--kconfig', './Kconfig',
|
|
||||||
'--sdkconfig-rename', './sdkconfig.rename',
|
|
||||||
'--config', temp_sdkconfig_path,
|
|
||||||
'--env', 'COMPONENT_KCONFIGS={}'.format(' '.join(kconfigs)),
|
|
||||||
'--env', 'COMPONENT_KCONFIGS_PROJBUILD={}'.format(' '.join(kconfig_projbuilds)),
|
|
||||||
'--env', 'COMPONENT_SDKCONFIG_RENAMES={}'.format(' '.join(sdkconfig_renames)),
|
|
||||||
'--env', 'COMPONENT_KCONFIGS_SOURCE_FILE={}'.format(kconfigs_source_path),
|
|
||||||
'--env', 'COMPONENT_KCONFIGS_PROJBUILD_SOURCE_FILE={}'.format(kconfig_projbuilds_source_path),
|
|
||||||
'--env', 'IDF_PATH={}'.format(app.config.idf_path),
|
|
||||||
'--env', 'IDF_TARGET={}'.format(app.config.idf_target),
|
|
||||||
'--output', 'docs', kconfig_inc_path + '.in'
|
|
||||||
]
|
|
||||||
subprocess.check_call(confgen_args, cwd=app.config.idf_path)
|
|
||||||
copy_if_modified(kconfig_inc_path + '.in', kconfig_inc_path)
|
|
@ -1,56 +0,0 @@
|
|||||||
import os
|
|
||||||
|
|
||||||
from sphinx.builders.latex import LaTeXBuilder
|
|
||||||
|
|
||||||
|
|
||||||
# Overrides the default Sphinx latex build
|
|
||||||
class IdfLatexBuilder(LaTeXBuilder):
|
|
||||||
|
|
||||||
def __init__(self, app):
|
|
||||||
|
|
||||||
# Sets up the latex_documents config value, done here instead of conf.py since it depends on the runtime value 'idf_target'
|
|
||||||
self.init_latex_documents(app)
|
|
||||||
|
|
||||||
super().__init__(app)
|
|
||||||
|
|
||||||
def init_latex_documents(self, app):
|
|
||||||
|
|
||||||
file_name = app.config.pdf_file + '.tex'
|
|
||||||
|
|
||||||
if app.config.language == 'zh_CN':
|
|
||||||
latex_documents = [('index', file_name, u'ESP-IDF 编程指南', u'乐鑫信息科技', 'manual')]
|
|
||||||
else:
|
|
||||||
# Default to english naming
|
|
||||||
latex_documents = [('index', file_name, u'ESP-IDF Programming Guide', u'Espressif Systems', 'manual')]
|
|
||||||
|
|
||||||
app.config.latex_documents = latex_documents
|
|
||||||
|
|
||||||
def prepare_latex_macros(self, package_path, config):
|
|
||||||
|
|
||||||
PACKAGE_NAME = 'espidf.sty'
|
|
||||||
latex_package = ''
|
|
||||||
with open(package_path, 'r') as template:
|
|
||||||
|
|
||||||
latex_package = template.read()
|
|
||||||
|
|
||||||
idf_target_title = config.idf_target_title_dict[config.idf_target]
|
|
||||||
latex_package = latex_package.replace('<idf_target_title>', idf_target_title)
|
|
||||||
|
|
||||||
# Release name for the PDF front page, remove '_' as this is used for subscript in Latex
|
|
||||||
idf_release_name = 'Release {}'.format(config.version.replace('_', '-'))
|
|
||||||
latex_package = latex_package.replace('<idf_release_name>', idf_release_name)
|
|
||||||
|
|
||||||
with open(os.path.join(self.outdir, PACKAGE_NAME), 'w') as package_file:
|
|
||||||
package_file.write(latex_package)
|
|
||||||
|
|
||||||
def finish(self):
|
|
||||||
super().finish()
|
|
||||||
|
|
||||||
TEMPLATE_PATH = '../latex_templates/espidf.sty'
|
|
||||||
self.prepare_latex_macros(os.path.join(self.confdir,TEMPLATE_PATH), self.config)
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
app.add_builder(IdfLatexBuilder, override=True)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.1'}
|
|
@ -1,179 +0,0 @@
|
|||||||
# based on http://protips.readthedocs.io/link-roles.html
|
|
||||||
|
|
||||||
from __future__ import print_function, unicode_literals
|
|
||||||
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
from docutils import nodes
|
|
||||||
from get_github_rev import get_github_rev
|
|
||||||
from sphinx.transforms.post_transforms import SphinxPostTransform
|
|
||||||
|
|
||||||
|
|
||||||
# Creates a dict of all submodules with the format {submodule_path : (url relative to git root), commit)}
|
|
||||||
def get_submodules():
|
|
||||||
git_root = subprocess.check_output(['git', 'rev-parse', '--show-toplevel']).strip().decode('utf-8')
|
|
||||||
gitmodules_file = os.path.join(git_root, '.gitmodules')
|
|
||||||
|
|
||||||
submodules = subprocess.check_output(['git', 'submodule', 'status']).strip().decode('utf-8').split('\n')
|
|
||||||
|
|
||||||
submodule_dict = {}
|
|
||||||
Submodule = namedtuple('Submodule', 'url rev')
|
|
||||||
for sub in submodules:
|
|
||||||
sub_info = sub.lstrip().split(' ')
|
|
||||||
|
|
||||||
# Get short hash, 7 digits
|
|
||||||
rev = sub_info[0].lstrip('-')[0:7]
|
|
||||||
path = sub_info[1].lstrip('./')
|
|
||||||
|
|
||||||
config_key_arg = 'submodule.{}.url'.format(path)
|
|
||||||
rel_url = subprocess.check_output(['git', 'config', '--file', gitmodules_file, '--get', config_key_arg]).decode('utf-8').lstrip('./').rstrip('\n')
|
|
||||||
|
|
||||||
submodule_dict[path] = Submodule(rel_url, rev)
|
|
||||||
|
|
||||||
return submodule_dict
|
|
||||||
|
|
||||||
|
|
||||||
def url_join(*url_parts):
|
|
||||||
""" Make a URL out of multiple components, assume first part is the https:// part and
|
|
||||||
anything else is a path component """
|
|
||||||
result = '/'.join(url_parts)
|
|
||||||
result = re.sub(r'([^:])//+', r'\1/', result) # remove any // that isn't in the https:// part
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def github_link(link_type, idf_rev, submods, root_path, app_config):
|
|
||||||
def role(name, rawtext, text, lineno, inliner, options={}, content=[]):
|
|
||||||
msgs = []
|
|
||||||
BASE_URL = 'https://github.com/'
|
|
||||||
IDF_REPO = 'espressif/esp-idf'
|
|
||||||
|
|
||||||
def warning(msg):
|
|
||||||
system_msg = inliner.reporter.warning(msg)
|
|
||||||
system_msg.line = lineno
|
|
||||||
msgs.append(system_msg)
|
|
||||||
|
|
||||||
# Redirects to submodule repo if path is a submodule, else default to IDF repo
|
|
||||||
def redirect_submodule(path, submods, rev):
|
|
||||||
for key, value in submods.items():
|
|
||||||
# Add path separator to end of submodule path to ensure we are matching a directory
|
|
||||||
if path.lstrip('/').startswith(os.path.join(key, '')):
|
|
||||||
return value.url.replace('.git', ''), value.rev, re.sub('^/{}/'.format(key), '', path)
|
|
||||||
|
|
||||||
return IDF_REPO, rev, path
|
|
||||||
|
|
||||||
# search for a named link (:label<path>) with descriptive label vs a plain URL
|
|
||||||
m = re.search(r'(.*)\s*<(.*)>', text)
|
|
||||||
if m:
|
|
||||||
link_text = m.group(1)
|
|
||||||
link = m.group(2)
|
|
||||||
else:
|
|
||||||
link_text = text
|
|
||||||
link = text
|
|
||||||
|
|
||||||
rel_path = root_path + link
|
|
||||||
abs_path = os.path.join(app_config.idf_path, rel_path.lstrip('/'))
|
|
||||||
|
|
||||||
repo, repo_rev, rel_path = redirect_submodule(rel_path, submods, idf_rev)
|
|
||||||
|
|
||||||
line_no = None
|
|
||||||
url = url_join(BASE_URL, repo, link_type, repo_rev, rel_path)
|
|
||||||
|
|
||||||
if '#L' in abs_path:
|
|
||||||
# drop any URL line number from the file, line numbers take the form #Lnnn or #Lnnn-Lnnn for a range
|
|
||||||
abs_path, line_no = abs_path.split('#L')
|
|
||||||
line_no = re.search(r'^(\d+)(?:-L(\d+))?', line_no)
|
|
||||||
if line_no is None:
|
|
||||||
warning("Line number anchor in URL %s doesn't seem to be valid" % link)
|
|
||||||
else:
|
|
||||||
line_no = tuple(int(ln_group) for ln_group in line_no.groups() if ln_group) # tuple of (nnn,) or (nnn, NNN) for ranges
|
|
||||||
elif '#' in abs_path: # drop any other anchor from the line
|
|
||||||
abs_path = abs_path.split('#')[0]
|
|
||||||
warning('URL %s seems to contain an unusable anchor after the #, only line numbers are supported' % link)
|
|
||||||
|
|
||||||
is_dir = (link_type == 'tree')
|
|
||||||
|
|
||||||
if not os.path.exists(abs_path):
|
|
||||||
warning('IDF path %s does not appear to exist (absolute path %s)' % (rel_path, abs_path))
|
|
||||||
elif is_dir and not os.path.isdir(abs_path):
|
|
||||||
# note these "wrong type" warnings are not strictly needed as GitHub will apply a redirect,
|
|
||||||
# but the may become important in the future (plus make for cleaner links)
|
|
||||||
warning('IDF path %s is not a directory but role :%s: is for linking to a directory, try :%s_file:' % (rel_path, name, name))
|
|
||||||
elif not is_dir and os.path.isdir(abs_path):
|
|
||||||
warning('IDF path %s is a directory but role :%s: is for linking to a file' % (rel_path, name))
|
|
||||||
|
|
||||||
# check the line number is valid
|
|
||||||
if line_no:
|
|
||||||
if is_dir:
|
|
||||||
warning('URL %s contains a line number anchor but role :%s: is for linking to a directory' % (rel_path, name, name))
|
|
||||||
elif os.path.exists(abs_path) and not os.path.isdir(abs_path):
|
|
||||||
with open(abs_path, 'r') as f:
|
|
||||||
lines = len(f.readlines())
|
|
||||||
if any(True for ln in line_no if ln > lines):
|
|
||||||
warning('URL %s specifies a range larger than file (file has %d lines)' % (rel_path, lines))
|
|
||||||
|
|
||||||
if tuple(sorted(line_no)) != line_no: # second line number comes before first one!
|
|
||||||
warning('URL %s specifies a backwards line number range' % rel_path)
|
|
||||||
|
|
||||||
node = nodes.reference(rawtext, link_text, refuri=url, **options)
|
|
||||||
return [node], msgs
|
|
||||||
return role
|
|
||||||
|
|
||||||
|
|
||||||
class translation_link(nodes.Element):
|
|
||||||
"""Node for "link_to_translation" role."""
|
|
||||||
|
|
||||||
|
|
||||||
# Linking to translation is done at the "writing" stage to avoid issues with the info being cached between builders
|
|
||||||
def link_to_translation(name, rawtext, text, lineno, inliner, options={}, content=[]):
|
|
||||||
node = translation_link()
|
|
||||||
node['expr'] = (rawtext, text, options)
|
|
||||||
return [node], []
|
|
||||||
|
|
||||||
|
|
||||||
class TranslationLinkNodeTransform(SphinxPostTransform):
|
|
||||||
# Transform needs to happen early to ensure the new reference node is also transformed
|
|
||||||
default_priority = 0
|
|
||||||
|
|
||||||
def run(self, **kwargs):
|
|
||||||
|
|
||||||
# Only output relative links if building HTML
|
|
||||||
for node in self.document.traverse(translation_link):
|
|
||||||
if 'html' in self.app.builder.name:
|
|
||||||
rawtext, text, options = node['expr']
|
|
||||||
(language, link_text) = text.split(':')
|
|
||||||
env = self.document.settings.env
|
|
||||||
docname = env.docname
|
|
||||||
doc_path = env.doc2path(docname, None, None)
|
|
||||||
return_path = '../' * doc_path.count('/') # path back to the root from 'docname'
|
|
||||||
# then take off 3 more paths for language/release/targetname and build the new URL
|
|
||||||
url = '{}.html'.format(os.path.join(return_path, '../../..', language, env.config.release,
|
|
||||||
env.config.idf_target, docname))
|
|
||||||
node.replace_self(nodes.reference(rawtext, link_text, refuri=url, **options))
|
|
||||||
else:
|
|
||||||
node.replace_self([])
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
rev = get_github_rev()
|
|
||||||
submods = get_submodules()
|
|
||||||
|
|
||||||
# links to files or folders on the GitHub
|
|
||||||
app.add_role('idf', github_link('tree', rev, submods, '/', app.config))
|
|
||||||
app.add_role('idf_file', github_link('blob', rev, submods, '/', app.config))
|
|
||||||
app.add_role('idf_raw', github_link('raw', rev, submods, '/', app.config))
|
|
||||||
app.add_role('component', github_link('tree', rev, submods, '/components/', app.config))
|
|
||||||
app.add_role('component_file', github_link('blob', rev, submods, '/components/', app.config))
|
|
||||||
app.add_role('component_raw', github_link('raw', rev, submods, '/components/', app.config))
|
|
||||||
app.add_role('example', github_link('tree', rev, submods, '/examples/', app.config))
|
|
||||||
app.add_role('example_file', github_link('blob', rev, submods, '/examples/', app.config))
|
|
||||||
app.add_role('example_raw', github_link('raw', rev, submods, '/examples/', app.config))
|
|
||||||
|
|
||||||
# link to the current documentation file in specific language version
|
|
||||||
app.add_role('link_to_translation', link_to_translation)
|
|
||||||
app.add_node(translation_link)
|
|
||||||
app.add_post_transform(TranslationLinkNodeTransform)
|
|
||||||
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.5'}
|
|
@ -1,315 +0,0 @@
|
|||||||
# Extension to generate Doxygen XML include files, with IDF config & soc macros included
|
|
||||||
from __future__ import print_function, unicode_literals
|
|
||||||
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
from io import open
|
|
||||||
|
|
||||||
from .util import copy_if_modified
|
|
||||||
|
|
||||||
ALL_KINDS = [
|
|
||||||
('function', 'Functions'),
|
|
||||||
('union', 'Unions'),
|
|
||||||
('struct', 'Structures'),
|
|
||||||
('define', 'Macros'),
|
|
||||||
('typedef', 'Type Definitions'),
|
|
||||||
('enum', 'Enumerations')
|
|
||||||
]
|
|
||||||
"""list of items that will be generated for a single API file
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def setup(app):
|
|
||||||
# The idf_build_system extension will emit this event once it has generated documentation macro definitions
|
|
||||||
app.connect('idf-defines-generated', generate_doxygen)
|
|
||||||
return {'parallel_read_safe': True, 'parallel_write_safe': True, 'version': '0.2'}
|
|
||||||
|
|
||||||
|
|
||||||
def generate_doxygen(app, defines):
|
|
||||||
build_dir = os.path.dirname(app.doctreedir.rstrip(os.sep))
|
|
||||||
|
|
||||||
# Call Doxygen to get XML files from the header files
|
|
||||||
print('Calling Doxygen to generate latest XML files')
|
|
||||||
doxy_env = os.environ
|
|
||||||
doxy_env.update({
|
|
||||||
'ENV_DOXYGEN_DEFINES': ' '.join('{}={}'.format(key, value) for key, value in defines.items()),
|
|
||||||
'IDF_PATH': app.config.idf_path,
|
|
||||||
'IDF_TARGET': app.config.idf_target,
|
|
||||||
})
|
|
||||||
doxyfile_dir = os.path.join(app.config.docs_root, 'doxygen')
|
|
||||||
doxyfile_main = os.path.join(doxyfile_dir, 'Doxyfile_common')
|
|
||||||
doxyfile_target = os.path.join(doxyfile_dir, 'Doxyfile_' + app.config.idf_target)
|
|
||||||
print('Running doxygen with doxyfiles {} and {}'.format(doxyfile_main, doxyfile_target))
|
|
||||||
|
|
||||||
# It's possible to have doxygen log warnings to a file using WARN_LOGFILE directive,
|
|
||||||
# but in some cases it will still log an error to stderr and return success!
|
|
||||||
#
|
|
||||||
# So take all of stderr and redirect it to a logfile (will contain warnings and errors)
|
|
||||||
logfile = os.path.join(build_dir, 'doxygen-warning-log.txt')
|
|
||||||
|
|
||||||
with open(logfile, 'w') as f:
|
|
||||||
# note: run Doxygen in the build directory, so the xml & xml_in files end up in there
|
|
||||||
subprocess.check_call(['doxygen', doxyfile_main], env=doxy_env, cwd=build_dir, stderr=f)
|
|
||||||
|
|
||||||
# Doxygen has generated XML files in 'xml' directory.
|
|
||||||
# Copy them to 'xml_in', only touching the files which have changed.
|
|
||||||
copy_if_modified(os.path.join(build_dir, 'xml/'), os.path.join(build_dir, 'xml_in/'))
|
|
||||||
|
|
||||||
# Generate 'api_name.inc' files from the Doxygen XML files
|
|
||||||
doxygen_paths = [doxyfile_main, doxyfile_target]
|
|
||||||
convert_api_xml_to_inc(app, doxygen_paths)
|
|
||||||
|
|
||||||
|
|
||||||
def convert_api_xml_to_inc(app, doxyfiles):
|
|
||||||
""" Generate header_file.inc files
|
|
||||||
with API reference made of doxygen directives
|
|
||||||
for each header file
|
|
||||||
specified in the 'INPUT' statement of the Doxyfile.
|
|
||||||
"""
|
|
||||||
build_dir = app.config.build_dir
|
|
||||||
|
|
||||||
xml_directory_path = '{}/xml'.format(build_dir)
|
|
||||||
inc_directory_path = '{}/inc'.format(build_dir)
|
|
||||||
|
|
||||||
fast_build = os.environ.get('DOCS_FAST_BUILD', None)
|
|
||||||
|
|
||||||
if not os.path.isdir(xml_directory_path):
|
|
||||||
raise RuntimeError('Directory {} does not exist!'.format(xml_directory_path))
|
|
||||||
|
|
||||||
if not os.path.exists(inc_directory_path):
|
|
||||||
os.makedirs(inc_directory_path)
|
|
||||||
|
|
||||||
header_paths = [p for d in doxyfiles for p in get_doxyfile_input_paths(app, d)]
|
|
||||||
|
|
||||||
print("Generating 'api_name.inc' files with Doxygen directives")
|
|
||||||
for header_file_path in header_paths:
|
|
||||||
api_name = get_api_name(header_file_path)
|
|
||||||
inc_file_path = inc_directory_path + '/' + api_name + '.inc'
|
|
||||||
rst_output = generate_directives(header_file_path, xml_directory_path)
|
|
||||||
|
|
||||||
previous_rst_output = ''
|
|
||||||
if os.path.isfile(inc_file_path):
|
|
||||||
with open(inc_file_path, 'r', encoding='utf-8') as inc_file_old:
|
|
||||||
previous_rst_output = inc_file_old.read()
|
|
||||||
|
|
||||||
if previous_rst_output != rst_output:
|
|
||||||
with open(inc_file_path, 'w', encoding='utf-8') as inc_file:
|
|
||||||
inc_file.write(rst_output)
|
|
||||||
|
|
||||||
# For fast builds we wipe the doxygen api documention.
|
|
||||||
# Parsing this output during the sphinx build process is
|
|
||||||
# what takes 95% of the build time
|
|
||||||
if fast_build:
|
|
||||||
with open(inc_file_path, 'w', encoding='utf-8') as inc_file:
|
|
||||||
inc_file.write('')
|
|
||||||
app.tags.add('fast_build')
|
|
||||||
|
|
||||||
|
|
||||||
def get_doxyfile_input_paths(app, doxyfile_path):
|
|
||||||
"""Get contents of Doxyfile's INPUT statement.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Contents of Doxyfile's INPUT.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if not os.path.isfile(doxyfile_path):
|
|
||||||
raise RuntimeError("Doxyfile '{}' does not exist!".format(doxyfile_path))
|
|
||||||
|
|
||||||
print("Getting Doxyfile's INPUT")
|
|
||||||
|
|
||||||
with open(doxyfile_path, 'r', encoding='utf-8') as input_file:
|
|
||||||
line = input_file.readline()
|
|
||||||
# read contents of Doxyfile until 'INPUT' statement
|
|
||||||
while line:
|
|
||||||
if line.find('INPUT') == 0:
|
|
||||||
break
|
|
||||||
line = input_file.readline()
|
|
||||||
|
|
||||||
doxyfile_INPUT = []
|
|
||||||
line = input_file.readline()
|
|
||||||
# skip input_file contents until end of 'INPUT' statement
|
|
||||||
while line:
|
|
||||||
if line.isspace():
|
|
||||||
# we have reached the end of 'INPUT' statement
|
|
||||||
break
|
|
||||||
# process only lines that are not comments
|
|
||||||
if line.find('#') == -1:
|
|
||||||
# extract header file path inside components folder
|
|
||||||
m = re.search('components/(.*\.h)', line) # noqa: W605 - regular expression
|
|
||||||
header_file_path = m.group(1)
|
|
||||||
|
|
||||||
# Replace env variable used for multi target header
|
|
||||||
header_file_path = header_file_path.replace('$(IDF_TARGET)', app.config.idf_target)
|
|
||||||
|
|
||||||
doxyfile_INPUT.append(header_file_path)
|
|
||||||
|
|
||||||
# proceed reading next line
|
|
||||||
line = input_file.readline()
|
|
||||||
|
|
||||||
return doxyfile_INPUT
|
|
||||||
|
|
||||||
|
|
||||||
def get_api_name(header_file_path):
|
|
||||||
"""Get name of API from header file path.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
header_file_path: path to the header file.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The name of API.
|
|
||||||
|
|
||||||
"""
|
|
||||||
api_name = ''
|
|
||||||
regex = r'.*/(.*)\.h'
|
|
||||||
m = re.search(regex, header_file_path)
|
|
||||||
if m:
|
|
||||||
api_name = m.group(1)
|
|
||||||
|
|
||||||
return api_name
|
|
||||||
|
|
||||||
|
|
||||||
def generate_directives(header_file_path, xml_directory_path):
|
|
||||||
"""Generate API reference with Doxygen directives for a header file.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
header_file_path: a path to the header file with API.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Doxygen directives for the header file.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
api_name = get_api_name(header_file_path)
|
|
||||||
|
|
||||||
# in XLT file name each "_" in the api name is expanded by Doxygen to "__"
|
|
||||||
xlt_api_name = api_name.replace('_', '__')
|
|
||||||
xml_file_path = '%s/%s_8h.xml' % (xml_directory_path, xlt_api_name)
|
|
||||||
|
|
||||||
rst_output = ''
|
|
||||||
rst_output = ".. File automatically generated by 'gen-dxd.py'\n"
|
|
||||||
rst_output += '\n'
|
|
||||||
rst_output += get_rst_header('Header File')
|
|
||||||
rst_output += '* :component_file:`' + header_file_path + '`\n'
|
|
||||||
rst_output += '\n'
|
|
||||||
|
|
||||||
try:
|
|
||||||
import xml.etree.cElementTree as ET
|
|
||||||
except ImportError:
|
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
|
|
||||||
tree = ET.ElementTree(file=xml_file_path)
|
|
||||||
for kind, label in ALL_KINDS:
|
|
||||||
rst_output += get_directives(tree, kind)
|
|
||||||
|
|
||||||
return rst_output
|
|
||||||
|
|
||||||
|
|
||||||
def get_rst_header(header_name):
|
|
||||||
"""Get rst formatted code with a header.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
header_name: name of header.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Formatted rst code with the header.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
rst_output = ''
|
|
||||||
rst_output += header_name + '\n'
|
|
||||||
rst_output += '^' * len(header_name) + '\n'
|
|
||||||
rst_output += '\n'
|
|
||||||
|
|
||||||
return rst_output
|
|
||||||
|
|
||||||
|
|
||||||
def select_unions(innerclass_list):
|
|
||||||
"""Select unions from innerclass list.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
innerclass_list: raw list with unions and structures
|
|
||||||
extracted from Dogygen's xml file.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Doxygen directives with unions selected from the list.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
rst_output = ''
|
|
||||||
for line in innerclass_list.splitlines():
|
|
||||||
# union is denoted by "union" at the beginning of line
|
|
||||||
if line.find('union') == 0:
|
|
||||||
union_id, union_name = re.split(r'\t+', line)
|
|
||||||
rst_output += '.. doxygenunion:: '
|
|
||||||
rst_output += union_name
|
|
||||||
rst_output += '\n'
|
|
||||||
|
|
||||||
return rst_output
|
|
||||||
|
|
||||||
|
|
||||||
def select_structs(innerclass_list):
|
|
||||||
"""Select structures from innerclass list.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
innerclass_list: raw list with unions and structures
|
|
||||||
extracted from Dogygen's xml file.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Doxygen directives with structures selected from the list.
|
|
||||||
Note: some structures are excluded as described on code below.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
rst_output = ''
|
|
||||||
for line in innerclass_list.splitlines():
|
|
||||||
# structure is denoted by "struct" at the beginning of line
|
|
||||||
if line.find('struct') == 0:
|
|
||||||
# skip structures that are part of union
|
|
||||||
# they are documented by 'doxygenunion' directive
|
|
||||||
if line.find('::') > 0:
|
|
||||||
continue
|
|
||||||
struct_id, struct_name = re.split(r'\t+', line)
|
|
||||||
rst_output += '.. doxygenstruct:: '
|
|
||||||
rst_output += struct_name
|
|
||||||
rst_output += '\n'
|
|
||||||
rst_output += ' :members:\n'
|
|
||||||
rst_output += '\n'
|
|
||||||
|
|
||||||
return rst_output
|
|
||||||
|
|
||||||
|
|
||||||
def get_directives(tree, kind):
|
|
||||||
"""Get directives for specific 'kind'.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
tree: the ElementTree 'tree' of XML by Doxygen
|
|
||||||
kind: name of API "kind" to be generated
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Doxygen directives for selected 'kind'.
|
|
||||||
Note: the header with "kind" name is included.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
rst_output = ''
|
|
||||||
if kind in ['union', 'struct']:
|
|
||||||
innerclass_list = ''
|
|
||||||
for elem in tree.iterfind('compounddef/innerclass'):
|
|
||||||
innerclass_list += elem.attrib['refid'] + '\t' + elem.text + '\n'
|
|
||||||
if kind == 'union':
|
|
||||||
rst_output += select_unions(innerclass_list)
|
|
||||||
else:
|
|
||||||
rst_output += select_structs(innerclass_list)
|
|
||||||
else:
|
|
||||||
for elem in tree.iterfind(
|
|
||||||
'compounddef/sectiondef/memberdef[@kind="%s"]' % kind):
|
|
||||||
name = elem.find('name')
|
|
||||||
rst_output += '.. doxygen%s:: ' % kind
|
|
||||||
rst_output += name.text + '\n'
|
|
||||||
if rst_output:
|
|
||||||
all_kinds_dict = dict(ALL_KINDS)
|
|
||||||
rst_output = get_rst_header(all_kinds_dict[kind]) + rst_output + '\n'
|
|
||||||
|
|
||||||
return rst_output
|
|
@ -1,81 +0,0 @@
|
|||||||
# Utility functions used in conf.py
|
|
||||||
#
|
|
||||||
# Copyright 2017 Espressif Systems (Shanghai) PTE LTD
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http:#www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
from io import open
|
|
||||||
|
|
||||||
try:
|
|
||||||
import urllib.request
|
|
||||||
_urlretrieve = urllib.request.urlretrieve
|
|
||||||
except ImportError:
|
|
||||||
# Python 2 fallback
|
|
||||||
import urllib
|
|
||||||
_urlretrieve = urllib.urlretrieve
|
|
||||||
|
|
||||||
|
|
||||||
def files_equal(path_1, path_2):
|
|
||||||
if not os.path.exists(path_1) or not os.path.exists(path_2):
|
|
||||||
return False
|
|
||||||
file_1_contents = ''
|
|
||||||
with open(path_1, 'r', encoding='utf-8') as f_1:
|
|
||||||
file_1_contents = f_1.read()
|
|
||||||
file_2_contents = ''
|
|
||||||
with open(path_2, 'r', encoding='utf-8') as f_2:
|
|
||||||
file_2_contents = f_2.read()
|
|
||||||
return file_1_contents == file_2_contents
|
|
||||||
|
|
||||||
|
|
||||||
def copy_file_if_modified(src_file_path, dst_file_path):
|
|
||||||
if not files_equal(src_file_path, dst_file_path):
|
|
||||||
dst_dir_name = os.path.dirname(dst_file_path)
|
|
||||||
if not os.path.isdir(dst_dir_name):
|
|
||||||
os.makedirs(dst_dir_name)
|
|
||||||
shutil.copy(src_file_path, dst_file_path)
|
|
||||||
|
|
||||||
|
|
||||||
def copy_if_modified(src_path, dst_path):
|
|
||||||
if os.path.isfile(src_path):
|
|
||||||
copy_file_if_modified(src_path, dst_path)
|
|
||||||
return
|
|
||||||
|
|
||||||
src_path_len = len(src_path)
|
|
||||||
for root, dirs, files in os.walk(src_path):
|
|
||||||
for src_file_name in files:
|
|
||||||
src_file_path = os.path.join(root, src_file_name)
|
|
||||||
dst_file_path = os.path.join(dst_path + root[src_path_len:], src_file_name)
|
|
||||||
copy_file_if_modified(src_file_path, dst_file_path)
|
|
||||||
|
|
||||||
|
|
||||||
def download_file_if_missing(from_url, to_path):
|
|
||||||
filename_with_path = to_path + '/' + os.path.basename(from_url)
|
|
||||||
exists = os.path.isfile(filename_with_path)
|
|
||||||
if exists:
|
|
||||||
print("The file '%s' already exists" % (filename_with_path))
|
|
||||||
else:
|
|
||||||
tmp_file, header = _urlretrieve(from_url)
|
|
||||||
with open(filename_with_path, 'wb') as fobj:
|
|
||||||
with open(tmp_file, 'rb') as tmp:
|
|
||||||
fobj.write(tmp.read())
|
|
||||||
|
|
||||||
|
|
||||||
def call_with_python(cmd):
|
|
||||||
# using sys.executable ensures that the scripts are called with the same Python interpreter
|
|
||||||
if os.system('{} {}'.format(sys.executable, cmd)) != 0:
|
|
||||||
raise RuntimeError('{} failed'.format(cmd))
|
|
@ -1,7 +0,0 @@
|
|||||||
\NeedsTeXFormat{LaTeX2e}[1995/12/01]
|
|
||||||
\ProvidesPackage{espidf}[2020/03/25 v0.1.0 LaTeX package (ESP-IDF markup)]
|
|
||||||
|
|
||||||
\newcommand{\idfTarget}{<idf_target_title>}
|
|
||||||
\newcommand{\idfReleaseName}{<idf_release_name>}
|
|
||||||
|
|
||||||
\endinput
|
|
@ -1,129 +0,0 @@
|
|||||||
% package with esp-idf specific macros
|
|
||||||
\usepackage{espidf}
|
|
||||||
|
|
||||||
\setcounter{secnumdepth}{2}
|
|
||||||
\setcounter{tocdepth}{2}
|
|
||||||
|
|
||||||
\usepackage{amsmath,amsfonts,amssymb,amsthm}
|
|
||||||
\usepackage{graphicx}
|
|
||||||
%%% reduce spaces for Table of contents, figures and tables
|
|
||||||
%%% it is used "\addtocontents{toc}{\vskip -1.2cm}" etc. in the document
|
|
||||||
\usepackage[notlot,nottoc,notlof]{}
|
|
||||||
|
|
||||||
\usepackage{color}
|
|
||||||
\usepackage{transparent}
|
|
||||||
\usepackage{eso-pic}
|
|
||||||
\usepackage{lipsum}
|
|
||||||
|
|
||||||
%%% Needed for displaying Chinese in English documentation
|
|
||||||
\usepackage{xeCJK}
|
|
||||||
|
|
||||||
\usepackage{footnotebackref} %%link at the footnote to go to the place of footnote in the text
|
|
||||||
|
|
||||||
%% spacing between line
|
|
||||||
\usepackage{setspace}
|
|
||||||
\singlespacing
|
|
||||||
|
|
||||||
|
|
||||||
\definecolor{myred}{RGB}{229, 32, 26}
|
|
||||||
\definecolor{mygrayy}{RGB}{127, 127, 127}
|
|
||||||
\definecolor{myblack}{RGB}{64, 64, 64}
|
|
||||||
|
|
||||||
|
|
||||||
%%%%%%%%%%% datetime
|
|
||||||
\usepackage{datetime}
|
|
||||||
|
|
||||||
\newdateformat{MonthYearFormat}{%
|
|
||||||
\monthname[\THEMONTH], \THEYEAR}
|
|
||||||
|
|
||||||
|
|
||||||
%% RO, LE will not work for 'oneside' layout.
|
|
||||||
%% Change oneside to twoside in document class
|
|
||||||
\usepackage{fancyhdr}
|
|
||||||
\pagestyle{fancy}
|
|
||||||
\fancyhf{}
|
|
||||||
|
|
||||||
% Header and footer
|
|
||||||
\makeatletter
|
|
||||||
\fancypagestyle{normal}{
|
|
||||||
\fancyhf{}
|
|
||||||
\fancyhead[L]{\nouppercase{\leftmark}}
|
|
||||||
\fancyfoot[C]{\py@HeaderFamily\thepage \\ \href{https://www.espressif.com/en/company/documents/documentation_feedback?docId=4287§ions=&version=\idfReleaseName}{Submit Document Feedback}}
|
|
||||||
\fancyfoot[L]{Espressif Systems}
|
|
||||||
\fancyfoot[R]{\idfReleaseName}
|
|
||||||
\renewcommand{\headrulewidth}{0.4pt}
|
|
||||||
\renewcommand{\footrulewidth}{0.4pt}
|
|
||||||
}
|
|
||||||
\makeatother
|
|
||||||
|
|
||||||
\renewcommand{\headrulewidth}{0.5pt}
|
|
||||||
\renewcommand{\footrulewidth}{0.5pt}
|
|
||||||
|
|
||||||
|
|
||||||
% Define a spacing for section, subsection and subsubsection
|
|
||||||
% http://tex.stackexchange.com/questions/108684/spacing-before-and-after-section-titles
|
|
||||||
|
|
||||||
\titlespacing*{\section}{0pt}{6pt plus 0pt minus 0pt}{6pt plus 0pt minus 0pt}
|
|
||||||
\titlespacing*{\subsection}{0pt}{18pt plus 64pt minus 0pt}{0pt}
|
|
||||||
\titlespacing*{\subsubsection}{0pt}{12pt plus 0pt minus 0pt}{0pt}
|
|
||||||
\titlespacing*{\paragraph} {0pt}{3.25ex plus 1ex minus .2ex}{1.5ex plus .2ex}
|
|
||||||
\titlespacing*{\subparagraph} {0pt}{3.25ex plus 1ex minus .2ex}{1.5ex plus .2ex}
|
|
||||||
|
|
||||||
% Define the colors of table of contents
|
|
||||||
% This is helpful to understand http://tex.stackexchange.com/questions/110253/what-the-first-argument-for-lsubsection-actually-is
|
|
||||||
\definecolor{LochmaraColor}{HTML}{1020A0}
|
|
||||||
|
|
||||||
% Hyperlinks
|
|
||||||
\hypersetup{
|
|
||||||
colorlinks = true,
|
|
||||||
allcolors = {LochmaraColor},
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
\RequirePackage{tocbibind} %%% comment this to remove page number for following
|
|
||||||
\addto\captionsenglish{\renewcommand{\contentsname}{Table of contents}}
|
|
||||||
\addto\captionsenglish{\renewcommand{\listfigurename}{List of figures}}
|
|
||||||
\addto\captionsenglish{\renewcommand{\listtablename}{List of tables}}
|
|
||||||
% \addto\captionsenglish{\renewcommand{\chaptername}{Chapter}}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
%%reduce spacing for itemize
|
|
||||||
\usepackage{enumitem}
|
|
||||||
\setlist{nosep}
|
|
||||||
|
|
||||||
%%%%%%%%%%% Quote Styles at the top of chapter
|
|
||||||
\usepackage{epigraph}
|
|
||||||
\setlength{\epigraphwidth}{0.8\columnwidth}
|
|
||||||
\newcommand{\chapterquote}[2]{\epigraphhead[60]{\epigraph{\textit{#1}}{\textbf {\textit{--#2}}}}}
|
|
||||||
%%%%%%%%%%% Quote for all places except Chapter
|
|
||||||
\newcommand{\sectionquote}[2]{{\quote{\textit{``#1''}}{\textbf {\textit{--#2}}}}}
|
|
||||||
|
|
||||||
% Insert 22pt white space before roc title. \titlespacing at line 65 changes it by -22 later on.
|
|
||||||
\renewcommand*\contentsname{\hspace{0pt}Contents}
|
|
||||||
|
|
||||||
|
|
||||||
% Define section, subsection and subsubsection font size and color
|
|
||||||
\usepackage{sectsty}
|
|
||||||
\definecolor{AllportsColor}{HTML}{A02010}
|
|
||||||
\allsectionsfont{\color{AllportsColor}}
|
|
||||||
|
|
||||||
\usepackage{titlesec}
|
|
||||||
\titleformat{\section}
|
|
||||||
{\color{AllportsColor}\LARGE\bfseries}{\thesection.}{1em}{}
|
|
||||||
|
|
||||||
\titleformat{\subsection}
|
|
||||||
{\color{AllportsColor}\Large\bfseries}{\thesubsection.}{1em}{}
|
|
||||||
|
|
||||||
\titleformat{\subsubsection}
|
|
||||||
{\color{AllportsColor}\large\bfseries}{\thesubsubsection.}{1em}{}
|
|
||||||
|
|
||||||
\titleformat{\paragraph}
|
|
||||||
{\color{AllportsColor}\large\bfseries}{\theparagraph}{1em}{}
|
|
||||||
|
|
||||||
\titleformat{\subparagraph}
|
|
||||||
{\normalfont\normalsize\bfseries}{\thesubparagraph}{1em}{}
|
|
||||||
|
|
||||||
\titleformat{\subsubparagraph}
|
|
||||||
{\normalfont\normalsize\bfseries}{\thesubsubparagraph}{1em}{}
|
|
@ -1,39 +0,0 @@
|
|||||||
\makeatletter
|
|
||||||
\newgeometry{left=0cm,right=0cm,bottom=2cm}
|
|
||||||
|
|
||||||
|
|
||||||
\cfoot{www.espressif.com}
|
|
||||||
|
|
||||||
\renewcommand{\headrulewidth}{0pt}
|
|
||||||
|
|
||||||
{\color{myred}\rule{30pt}{2.1cm}}
|
|
||||||
\hspace{0.2cm}
|
|
||||||
\begin{minipage}[b]{18cm}
|
|
||||||
{\fontsize{36pt}{48pt}\textbf{\idfTarget}}\\
|
|
||||||
|
|
||||||
{\fontsize{28pt}{18pt}\textbf{\color{mygrayy}\@title}}
|
|
||||||
\end{minipage}
|
|
||||||
\hspace{\stretch{1}}
|
|
||||||
|
|
||||||
\vspace{48em}
|
|
||||||
|
|
||||||
|
|
||||||
\begin{flushright}
|
|
||||||
\setlength\parindent{8em}
|
|
||||||
\begin{minipage}[b]{2cm}
|
|
||||||
\sphinxlogo
|
|
||||||
\end{minipage}
|
|
||||||
\hspace{0.2cm}
|
|
||||||
\rule{3pt}{1.9cm}
|
|
||||||
\hspace{0.2cm}
|
|
||||||
\begin{minipage}[b]{7cm}
|
|
||||||
{\large{\idfReleaseName}}\smallskip\newline
|
|
||||||
{\large{\@author}}\smallskip\newline
|
|
||||||
{\large{\@date}}\smallskip
|
|
||||||
\end{minipage}
|
|
||||||
{\color{myred}\rule{30pt}{1.9cm}}
|
|
||||||
\end{flushright}
|
|
||||||
|
|
||||||
|
|
||||||
\restoregeometry
|
|
||||||
\makeatother
|
|
@ -1,22 +1 @@
|
|||||||
# This is a list of python packages used to generate documentation. This file is used with pip:
|
esp-docs==0.1.0
|
||||||
# pip install --user -r requirements.txt
|
|
||||||
#
|
|
||||||
# matplotlib is currently required only by the script generate_chart.py
|
|
||||||
matplotlib==3.3.1 ; python_version>="3"
|
|
||||||
matplotlib==2.0.1 ; python_version=="2.7"
|
|
||||||
cairosvg==2.5.1 # required by sphinxcontrib-svg2pdfconverter[CairoSVG]
|
|
||||||
sphinx==2.3.1
|
|
||||||
breathe==4.14.1
|
|
||||||
sphinx-copybutton==0.3.0
|
|
||||||
sphinx-notfound-page
|
|
||||||
sphinxcontrib-blockdiag==2.0.0
|
|
||||||
sphinxcontrib-seqdiag==2.0.0
|
|
||||||
sphinxcontrib-actdiag==2.0.0
|
|
||||||
sphinxcontrib-nwdiag==2.0.0
|
|
||||||
sphinxcontrib-wavedrom==2.0.0
|
|
||||||
sphinxcontrib-svg2pdfconverter[CairoSVG]==1.1.0
|
|
||||||
nwdiag==2.0.0
|
|
||||||
recommonmark
|
|
||||||
future>=0.16.0 # for ../tools/gen_esp_err_to_name.py
|
|
||||||
sphinx_selective_exclude==1.0.3
|
|
||||||
sphinx_idf_theme==0.3.0
|
|
||||||
|
@ -1,43 +0,0 @@
|
|||||||
# Tiny Python module to sanitize a Git version into something that can be used in a URL
|
|
||||||
#
|
|
||||||
# (this is used in multiple places: conf_common.py and in tools/ci/docs_deploy
|
|
||||||
#
|
|
||||||
# Copyright 2020 Espressif Systems (Shanghai) PTE LTD
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_version(original_version):
|
|
||||||
""" Given a version (probably output from 'git describe --always' or similar), return
|
|
||||||
a URL-safe sanitized version. (this is used as 'release' config variable when building
|
|
||||||
the docs.)
|
|
||||||
|
|
||||||
Will override the original version with the Gitlab CI CI_COMMIT_REF_NAME environment variable if
|
|
||||||
this is present.
|
|
||||||
|
|
||||||
Also follows the RTD-ism that master branch is named 'latest'
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
try:
|
|
||||||
version = os.environ['CI_COMMIT_REF_NAME']
|
|
||||||
except KeyError:
|
|
||||||
version = original_version
|
|
||||||
|
|
||||||
if version == 'master':
|
|
||||||
return 'latest'
|
|
||||||
|
|
||||||
version = version.replace('/', '-')
|
|
||||||
|
|
||||||
return version
|
|
@ -1,25 +0,0 @@
|
|||||||
# Documentation Test Folder
|
|
||||||
|
|
||||||
This folder contains the files needed for running tests the ESP-IDF documentation building system.
|
|
||||||
|
|
||||||
Tests are divided into two categories: unit tests and integration tests:
|
|
||||||
|
|
||||||
## Unit Tests
|
|
||||||
|
|
||||||
The Sphinx IDF extensions are unit-tested in [test_sphinx_idf_extensions.py](test_sphinx_idf_extensions.py)
|
|
||||||
|
|
||||||
## Integration Tests
|
|
||||||
Due to the thigh integration with Sphinx some functionality is difficult to test with simple unit tests.
|
|
||||||
|
|
||||||
To check that the output from the Sphinx build process is as expected [test_docs.py](test_docs.py) builds a test subset of the documentation, found in the [en](en/) folder. The HTML output is then checked to see that it contains the expected content.
|
|
||||||
|
|
||||||
# Running Tests
|
|
||||||
|
|
||||||
Both [test_sphinx_idf_extensions.py](test_sphinx_idf_extensions.py) and [test_docs.py](test_docs.py) are run as part of the `host_test` stage of the CI pipeline.
|
|
||||||
|
|
||||||
It's also possible to run the tests locally by running the following commands from the test folder:
|
|
||||||
|
|
||||||
* `./test_sphinx_idf_extensions.py`
|
|
||||||
* `./test_docs.py`
|
|
||||||
|
|
||||||
Note that [test_docs.py](test_docs.py) tries to build a test subset of the documentation, and thus requires your environment to be set up for building documents. See [Documenting Code](https://docs.espressif.com/projects/esp-idf/en/latest/contribute/documenting-code.html) for instructions on how to set up the `build_docs` environment.
|
|
@ -1,7 +0,0 @@
|
|||||||
Bluetooth
|
|
||||||
=========
|
|
||||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed iaculis, est ut blandit faucibus, dolor libero luctus tortor, finibus luctus neque elit et lacus.
|
|
||||||
|
|
||||||
Sed at enim sed felis vehicula vehicula. Etiam ex ante, lacinia non purus quis, luctus ornare nibh. Phasellus rhoncus massa vitae tincidunt semper.
|
|
||||||
|
|
||||||
Ut dapibus iaculis metus, vel consectetur diam euismod placerat. Maecenas nibh mauris, maximus et accumsan sit amet, lacinia at felis. Curabitur commodo eu lacus gravida volutpat. In hac habitasse platea dictumst. Quisque et tellus pulvinar, convallis nunc nec, sollicitudin mi. Curabitur et purus justo. Fusce non turpis quis nisi eleifend placerat a vitae.
|
|
@ -1,33 +0,0 @@
|
|||||||
# -*- coding: utf-8 -*-
|
|
||||||
#
|
|
||||||
# English Language RTD & Sphinx config file
|
|
||||||
#
|
|
||||||
# Uses ../conf_common.py for most non-language-specific settings.
|
|
||||||
# Importing conf_common adds all the non-language-specific
|
|
||||||
# parts to this conf module
|
|
||||||
try:
|
|
||||||
from conf_common import * # noqa: F403,F401
|
|
||||||
except ImportError:
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
sys.path.insert(0, os.path.abspath('../..'))
|
|
||||||
from conf_common import * # noqa: F403,F401
|
|
||||||
|
|
||||||
# General information about the project.
|
|
||||||
project = u'ESP-IDF Programming Guide'
|
|
||||||
copyright = u'2016 - 2020, Espressif Systems (Shanghai) CO., LTD'
|
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
|
||||||
# for a list of supported languages.
|
|
||||||
language = 'en'
|
|
||||||
html_copy_source = False
|
|
||||||
|
|
||||||
|
|
||||||
html_logo = None
|
|
||||||
latex_logo = None
|
|
||||||
html_static_path = []
|
|
||||||
|
|
||||||
conditional_include_dict = {'esp32':['esp32_page.rst'],
|
|
||||||
'esp32s2':['esp32s2_page.rst'],
|
|
||||||
'SOC_BT_SUPPORTED':['bt_page.rst'],
|
|
||||||
}
|
|
@ -1,3 +0,0 @@
|
|||||||
ESP32 Page
|
|
||||||
============
|
|
||||||
{ESP32_CONTENT}
|
|
@ -1,19 +0,0 @@
|
|||||||
ESP32S2 Page
|
|
||||||
============
|
|
||||||
Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed iaculis, est ut blandit faucibus, dolor libero luctus tortor, finibus luctus neque elit et lacus.
|
|
||||||
|
|
||||||
Sed at enim sed felis vehicula vehicula. Etiam ex ante, lacinia non purus quis, luctus ornare nibh. Phasellus rhoncus massa vitae tincidunt semper.
|
|
||||||
|
|
||||||
Ut dapibus iaculis metus, vel consectetur diam euismod placerat. Maecenas nibh mauris, maximus et accumsan sit amet, lacinia at felis. Curabitur commodo eu lacus gravida volutpat. In hac habitasse platea dictumst. Quisque et tellus pulvinar, convallis nunc nec, sollicitudin mi. Curabitur et purus justo. Fusce non turpis quis nisi eleifend placerat a vitae.
|
|
||||||
|
|
||||||
.. only:: esp32
|
|
||||||
|
|
||||||
ESP32 Content. !ESP32_CONTENT!
|
|
||||||
|
|
||||||
.. only:: esp32s2
|
|
||||||
|
|
||||||
ESP32 S2 Content. !ESP32_S2_CONTENT!
|
|
||||||
|
|
||||||
.. only:: SOC_BT_SUPPORTED
|
|
||||||
|
|
||||||
Bluetooth Content. !BT_CONTENT!
|
|
@ -1,3 +0,0 @@
|
|||||||
IDF Target Format
|
|
||||||
=================
|
|
||||||
|
|
@ -1,10 +0,0 @@
|
|||||||
Build docs test index
|
|
||||||
=====================
|
|
||||||
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
|
|
||||||
:esp32s2: ESP32-S2 Page <esp32s2_page>
|
|
||||||
:esp32: ESP32 Page !ESP32_CONTENT! <esp32_page>
|
|
||||||
:SOC_BT_SUPPORTED: BT Page !BT_CONTENT! <bt_page.rst>
|
|
||||||
IDF Target Format <idf_target_format>
|
|
@ -1,7 +0,0 @@
|
|||||||
# Redirects from "old URL" "new URL"
|
|
||||||
#
|
|
||||||
# Space delimited
|
|
||||||
#
|
|
||||||
# New URL should be relative to document root, only)
|
|
||||||
#
|
|
||||||
# Empty lines and lines starting with # are ignored
|
|
@ -1,98 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import unittest
|
|
||||||
|
|
||||||
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
|
|
||||||
ESP32_DOC = 'esp32_page'
|
|
||||||
ESP32_S2_DOC = 'esp32s2_page'
|
|
||||||
BT_DOC = 'bt_page'
|
|
||||||
LINK_ROLES_DOC = 'link_roles'
|
|
||||||
IDF_FORMAT_DOC = 'idf_target_format'
|
|
||||||
|
|
||||||
|
|
||||||
class DocBuilder():
|
|
||||||
build_docs_py_path = os.path.join(CURRENT_DIR, '..', 'build_docs.py')
|
|
||||||
|
|
||||||
def __init__(self, src_dir, build_dir, target, language):
|
|
||||||
self.language = language
|
|
||||||
self.target = target
|
|
||||||
self.src_dir = src_dir
|
|
||||||
self.build_dir = build_dir
|
|
||||||
self.html_out_dir = os.path.join(CURRENT_DIR, build_dir, language, target, 'html')
|
|
||||||
|
|
||||||
def build(self, opt_args=[]):
|
|
||||||
args = [sys.executable, self.build_docs_py_path, '-b', self.build_dir, '-s', self.src_dir, '-t', self.target, '-l', self.language]
|
|
||||||
args.extend(opt_args)
|
|
||||||
return subprocess.call(args)
|
|
||||||
|
|
||||||
|
|
||||||
class TestDocs(unittest.TestCase):
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def setUpClass(cls):
|
|
||||||
cls.builder = DocBuilder('test', '_build/test_docs', 'esp32s2', 'en')
|
|
||||||
cls.build_ret_flag = cls.builder.build()
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
if self.build_ret_flag:
|
|
||||||
self.fail('Build docs failed with return: {}'.format(self.build_ret_flag))
|
|
||||||
|
|
||||||
def assert_str_not_in_doc(self, doc_name, str_to_find):
|
|
||||||
with open(os.path.join(self.builder.html_out_dir, doc_name)) as f:
|
|
||||||
content = f.read()
|
|
||||||
self.assertFalse(str_to_find in content, 'Found {} in {}'.format(str_to_find, doc_name))
|
|
||||||
|
|
||||||
def assert_str_in_doc(self, doc_name, str_to_find):
|
|
||||||
with open(os.path.join(self.builder.html_out_dir, doc_name)) as f:
|
|
||||||
content = f.read()
|
|
||||||
self.assertTrue(str_to_find in content, 'Did not find {} in {}'.format(str_to_find, doc_name))
|
|
||||||
|
|
||||||
def test_only_dir(self):
|
|
||||||
# Test that ESP32 content was excluded
|
|
||||||
self.assert_str_not_in_doc(ESP32_S2_DOC + '.html', '!ESP32_CONTENT!')
|
|
||||||
|
|
||||||
# Test that ESP32 S2 content was included
|
|
||||||
self.assert_str_in_doc(ESP32_S2_DOC + '.html', '!ESP32_S2_CONTENT!')
|
|
||||||
|
|
||||||
# Test that BT content was excluded
|
|
||||||
self.assert_str_not_in_doc(ESP32_S2_DOC + '.html', '!BT_CONTENT!')
|
|
||||||
|
|
||||||
def test_toctree_filter(self):
|
|
||||||
# ESP32 page should NOT be built
|
|
||||||
esp32_doc = os.path.join(self.builder.html_out_dir, ESP32_DOC + '.html')
|
|
||||||
self.assertFalse(os.path.isfile(esp32_doc), 'Found {}'.format(esp32_doc))
|
|
||||||
self.assert_str_not_in_doc('index.html', '!ESP32_CONTENT!')
|
|
||||||
|
|
||||||
esp32s2_doc = os.path.join(self.builder.html_out_dir, ESP32_S2_DOC + '.html')
|
|
||||||
self.assertTrue(os.path.isfile(esp32s2_doc), '{} not found'.format(esp32s2_doc))
|
|
||||||
|
|
||||||
# Spot check a few other tags
|
|
||||||
# No Bluetooth on ESP32 S2
|
|
||||||
bt_doc = os.path.join(self.builder.html_out_dir, BT_DOC + '.html')
|
|
||||||
self.assertFalse(os.path.isfile(bt_doc), 'Found {}'.format(bt_doc))
|
|
||||||
self.assert_str_not_in_doc('index.html', '!BT_CONTENT!')
|
|
||||||
|
|
||||||
def test_link_roles(self):
|
|
||||||
print('test')
|
|
||||||
|
|
||||||
|
|
||||||
class TestBuildSubset(unittest.TestCase):
|
|
||||||
def test_build_subset(self):
|
|
||||||
builder = DocBuilder('test', '_build/test_build_subset', 'esp32', 'en')
|
|
||||||
|
|
||||||
docs_to_build = 'esp32_page.rst'
|
|
||||||
|
|
||||||
self.assertFalse(builder.build(['-i', docs_to_build]))
|
|
||||||
|
|
||||||
# Check that we only built the input docs
|
|
||||||
bt_doc = os.path.join(builder.html_out_dir, BT_DOC + '.html')
|
|
||||||
esp32_doc = os.path.join(builder.html_out_dir, ESP32_DOC + '.html')
|
|
||||||
self.assertFalse(os.path.isfile(bt_doc), 'Found {}'.format(bt_doc))
|
|
||||||
self.assertTrue(os.path.isfile(esp32_doc), 'Found {}'.format(esp32_doc))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
@ -1,117 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import unittest
|
|
||||||
from tempfile import TemporaryDirectory
|
|
||||||
from unittest.mock import MagicMock
|
|
||||||
|
|
||||||
from sphinx.util import tags
|
|
||||||
|
|
||||||
try:
|
|
||||||
from idf_extensions import exclude_docs
|
|
||||||
except ImportError:
|
|
||||||
sys.path.append('..')
|
|
||||||
from idf_extensions import exclude_docs
|
|
||||||
|
|
||||||
from idf_extensions import format_idf_target, gen_idf_tools_links, link_roles
|
|
||||||
|
|
||||||
|
|
||||||
class TestFormatIdfTarget(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.str_sub = format_idf_target.StringSubstituter()
|
|
||||||
|
|
||||||
config = MagicMock()
|
|
||||||
config.idf_target = 'esp32'
|
|
||||||
self.str_sub.init_sub_strings(config)
|
|
||||||
|
|
||||||
def test_add_subs(self):
|
|
||||||
|
|
||||||
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_NAME}'], 'ESP32')
|
|
||||||
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_PATH_NAME}'], 'esp32')
|
|
||||||
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_TOOLCHAIN_PREFIX}'], 'xtensa-esp32-elf')
|
|
||||||
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_CFG_PREFIX}'], 'ESP32')
|
|
||||||
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_TRM_EN_URL}'],
|
|
||||||
'https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_en.pdf')
|
|
||||||
self.assertEqual(self.str_sub.substitute_strings['{IDF_TARGET_TRM_CN_URL}'],
|
|
||||||
'https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_cn.pdf')
|
|
||||||
|
|
||||||
def test_sub(self):
|
|
||||||
content = ('This is a {IDF_TARGET_NAME}, with {IDF_TARGET_PATH_NAME}/soc.c, compiled with '
|
|
||||||
'{IDF_TARGET_TOOLCHAIN_PREFIX}-gcc with CONFIG_{IDF_TARGET_CFG_PREFIX}_MULTI_DOC. '
|
|
||||||
'TRM can be found at {IDF_TARGET_TRM_EN_URL} or {IDF_TARGET_TRM_CN_URL}')
|
|
||||||
|
|
||||||
expected = ('This is a ESP32, with esp32/soc.c, compiled with xtensa-esp32-elf-gcc with CONFIG_ESP32_MULTI_DOC. '
|
|
||||||
'TRM can be found at https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_en.pdf '
|
|
||||||
'or https://www.espressif.com/sites/default/files/documentation/esp32_technical_reference_manual_cn.pdf')
|
|
||||||
|
|
||||||
self.assertEqual(self.str_sub.substitute(content), expected)
|
|
||||||
|
|
||||||
def test_local_sub(self):
|
|
||||||
content = ('{IDF_TARGET_TX_PIN:default="IO3", esp32="IO4", esp32s2="IO5"}'
|
|
||||||
'The {IDF_TARGET_NAME} UART {IDF_TARGET_TX_PIN} uses for TX')
|
|
||||||
|
|
||||||
expected = 'The ESP32 UART IO4 uses for TX'
|
|
||||||
self.assertEqual(self.str_sub.substitute(content), expected)
|
|
||||||
|
|
||||||
def test_local_sub_default(self):
|
|
||||||
content = ('{IDF_TARGET_TX_PIN:default="IO3", esp32s2="IO5"}'
|
|
||||||
'The {IDF_TARGET_NAME} UART {IDF_TARGET_TX_PIN} uses for TX')
|
|
||||||
|
|
||||||
expected = 'The ESP32 UART IO3 uses for TX'
|
|
||||||
self.assertEqual(self.str_sub.substitute(content), expected)
|
|
||||||
|
|
||||||
def test_local_sub_no_default(self):
|
|
||||||
content = ('{IDF_TARGET_TX_PIN: esp32="IO4", esp32s2="IO5"}'
|
|
||||||
'The {IDF_TARGET_NAME} UART {IDF_TARGET_TX_PIN} uses for TX')
|
|
||||||
|
|
||||||
self.assertRaises(ValueError, self.str_sub.substitute, content)
|
|
||||||
|
|
||||||
|
|
||||||
class TestExclude(unittest.TestCase):
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
self.app = MagicMock()
|
|
||||||
self.app.tags = tags.Tags()
|
|
||||||
self.app.config.conditional_include_dict = {'esp32':['esp32.rst', 'bt.rst'], 'esp32s2':['esp32s2.rst']}
|
|
||||||
self.app.config.docs_to_build = None
|
|
||||||
self.app.config.exclude_patterns = []
|
|
||||||
|
|
||||||
def test_update_exclude_pattern(self):
|
|
||||||
self.app.tags.add('esp32')
|
|
||||||
exclude_docs.update_exclude_patterns(self.app, self.app.config)
|
|
||||||
docs_to_build = set(self.app.config.conditional_include_dict['esp32'])
|
|
||||||
|
|
||||||
# Check that the set of docs to build and the set of docs to exclude do not overlap
|
|
||||||
self.assertFalse(docs_to_build & set(self.app.config.exclude_patterns))
|
|
||||||
|
|
||||||
|
|
||||||
class TestGenIDFToolLinks(unittest.TestCase):
|
|
||||||
def setUp(self):
|
|
||||||
self.app = MagicMock()
|
|
||||||
self.app.config.build_dir = '_build'
|
|
||||||
self.app.config.idf_path = os.environ['IDF_PATH']
|
|
||||||
|
|
||||||
def test_gen_idf_tool_links(self):
|
|
||||||
|
|
||||||
with TemporaryDirectory() as temp_dir:
|
|
||||||
self.app.config.build_dir = temp_dir
|
|
||||||
gen_idf_tools_links.generate_idf_tools_links(self.app, None)
|
|
||||||
self.assertTrue(os.path.isfile(os.path.join(self.app.config.build_dir, 'inc', 'idf-tools-inc.rst')))
|
|
||||||
|
|
||||||
|
|
||||||
class TestLinkRoles(unittest.TestCase):
|
|
||||||
def test_get_submodules(self):
|
|
||||||
submod_dict = link_roles.get_submodules()
|
|
||||||
|
|
||||||
# Test a known submodule to see if it's in the dict
|
|
||||||
test_submod_name = 'components/asio/asio'
|
|
||||||
self.assertIn(test_submod_name, submod_dict)
|
|
||||||
self.assertIsNotNone(submod_dict[test_submod_name].url)
|
|
||||||
self.assertIsNotNone(submod_dict[test_submod_name].rev)
|
|
||||||
self.assertIsNotNone(submod_dict[test_submod_name].url)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
unittest.main()
|
|
@ -1,197 +0,0 @@
|
|||||||
文档的附加工具和扩展功能指南
|
|
||||||
=============================
|
|
||||||
|
|
||||||
:link_to_translation:`en:[English]`
|
|
||||||
|
|
||||||
ESP-IDF 文档由 `Sphinx <http://www.sphinx-doc.org/>`_ 应用程序生成,使用 Sphinx 对 :idf:`docs` 目录中的 `reStructuredText <https://en.wikipedia.org/wiki/ReStructuredText>`_ (``.rst``) 格式文档进行渲染。关于渲染过程的详细信息,请参阅 :doc:`documenting-code`。
|
|
||||||
|
|
||||||
除 Sphinx 外,我们也使用了其它几种可为用户提供格式精美、便于查找的文档的应用程序。:ref:`setup-for-building-documentation` 中列出了这些应用程序,:idf_file:`docs/requirements.txt` 中列出了其相应的版本号。
|
|
||||||
|
|
||||||
ESP-IDF 中包含多种芯片的双语文档(英文,简体中文)。如运行 Sphinx,不需直接使用 ``sphinx``,可运行 Python 程序包 ``build_docs.py``。
|
|
||||||
|
|
||||||
在此基础上,我们也开发了一些自定义的附加工具和扩展功能,旨在帮助整合 `ESP-IDF`_ 目录下的各个文档以及更好地查找和维护文档内容。
|
|
||||||
|
|
||||||
本章节主要帮您快速了解这些附加工具和扩展功能。
|
|
||||||
|
|
||||||
文件夹结构
|
|
||||||
--------------
|
|
||||||
|
|
||||||
* ESP-IDF 根目录下包含一个专门放置文档的文件夹 :idf:`docs`。
|
|
||||||
* ``docs`` 目录下的 :idf:`docs/en` (英文)和 :idf:`docs/zh_CN` (简体中文)子文件夹中包含本地化文档。
|
|
||||||
* 图像文件和本地化文档通用的字体包位于 :idf:`docs/_static` 子文件夹中。
|
|
||||||
* ``docs`` 根目录下以及 ``docs/en`` 和 ``docs/zh_CN`` 中的其它文件则提供了自动生成文档过程中所使用的配置和脚本,其中就包括本章节提到的附加工具和扩展功能。
|
|
||||||
* ``extensions`` 和 ``idf_extensions`` 两目录中提供了 Sphinx 的扩展功能。·
|
|
||||||
* 使用 ``build_docs.py``,``docs`` 文件夹中将自动创建一个 ``_build`` 目录。这个目录不会被添加到 `ESP-IDF`_ 项目库中。
|
|
||||||
|
|
||||||
|
|
||||||
附加工具和扩展功能指南
|
|
||||||
--------------------------------
|
|
||||||
|
|
||||||
配置文件
|
|
||||||
^^^^^^^^^^^^
|
|
||||||
|
|
||||||
:idf_file:`docs/conf_common.py`
|
|
||||||
该文件中包含每个本地化文档(包括英文文档、中文文档)所通用的配置信息。在文档每一次的构建过程中,该文件中的内容都将被导入至相应语言文件夹(包括,``docs/en``、``docs/zh_CN``)下的标准 Sphinx 配置文件 ``conf.py`` 中。
|
|
||||||
|
|
||||||
:idf_file:`docs/sphinx-known-warnings.txt`
|
|
||||||
Sphinx 中存在一些伪错误警报,这些警报只能通过更新 Sphinx 源代码本身来解决。针对这一情况,我们将这些伪错误警报列在了 ``sphinx-known-warnings.txt`` 文件中,每一次生成文档时系统都将检测该文件并忽略这些伪错误警报。
|
|
||||||
|
|
||||||
|
|
||||||
脚本
|
|
||||||
^^^^^^^
|
|
||||||
|
|
||||||
:idf_file:`docs/build_docs.py`
|
|
||||||
|
|
||||||
最高级可执行程序,负责运行 Sphinx 为单个或多个语言/目标生成文档。运行 ``build_docs.py --help`` 可查阅所有命令选项。
|
|
||||||
|
|
||||||
当使用 ``build_docs.py`` 运行 Sphinx 时,系统将为 ``idf_target`` 配置变量,并设置一个与该配置变量相同名称的 Sphinx 标签,然后使用一些环境变量将路径发送至 :ref:`IDF-Specific Extensions`。
|
|
||||||
|
|
||||||
:idf_file:`docs/check_lang_folder_sync.sh`
|
|
||||||
同时更新双语文档时,语言文件夹 ``docs/en`` 和 ``docs/zh_CN`` 下的文档结构和文件名应保持一致,以减少两文档间的不一致。每一次生成文档时都将运行 ``check_lang_folder_sync.sh`` 脚本,检测是否出现上述不一致的情况。
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
若一个新的章节为英语版本,且暂时还没有中文翻译,那么 ``zh_CN`` 文件夹中相应的中文文件内应写入 ``.. include::`` 指令,路径指向英文源文件。这样,中文读者将也可以看到英文版源文件。例如,如果 ``docs/zh_CN/contribute/documenting-code.rst`` 这一文件还没有中文翻译,则该文件中应写入 ``.. include:: ../../en/contribute/documenting-code.rst``。
|
|
||||||
|
|
||||||
非文档脚本
|
|
||||||
^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
以下脚本除了生成文档之外,也可以用于其它用途:
|
|
||||||
|
|
||||||
:idf_file:`tools/gen_esp_err_to_name.py`
|
|
||||||
该脚本将检测整个 `ESP-IDF`_ 库,在源代码头文件中查找是否有错误代码和信息,然后在 :doc:`../api-reference/error-codes` 内生成一个 ``.inc`` 文件记录这些信息。
|
|
||||||
|
|
||||||
:idf_file:`tools/kconfig_new/confgen.py`
|
|
||||||
ESP-IDF :idf:`components` 的配置选项包含在每个组件目录下的 ``Kconfig`` 文件中,如 :idf_file:`components/bt/Kconfig`。该脚本将检测所有 ``component`` 目录并记录检测到的配置选项,然后在 :ref:`configuration-options-reference` 内生成一个 ``.inc`` 文件记录这些信息。
|
|
||||||
|
|
||||||
通用扩展
|
|
||||||
^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
以下是专为 IDF 开发的 Sphinx 扩展,这些扩展不依赖于任何特定的 IDF 文档操作或配置:
|
|
||||||
|
|
||||||
:idf_file:`docs/extensions/toctree_filter.py`
|
|
||||||
Sphinx 扩展功能,优先于 ``:toctree:`` 指令,允许系统根据是否有标签(如 ``:tagname: toctree_entry``)来过滤条目。完整描述请参考 Python 文件。
|
|
||||||
|
|
||||||
:idf_file:`docs/extensions/list_filter.py`
|
|
||||||
Sphinx 扩展功能,提供一个 ``.. list::`` 指令,允许系统根据是否有标签(如 ``:tagname: - list content``)来过滤条目列表。完整描述请参考 Python 文件。
|
|
||||||
|
|
||||||
:idf_file:`docs/extensions/html_redirects.py`
|
|
||||||
在文档的维护过程中,一些源文件可能会转移位置或被重命名。这个 Sphinx 扩展功能便添加了一个重新导向机制,通过在 Sphinx 输出中生成静态 HTML 重新导向页面来为 URL 地址已改变的文档重新导向。该脚本与重新导向列表 ``html_redirect_pages`` 一起使用。``conf_common.py`` 将负责从 :idf_file:`docs/page_redirects.txt` 中生成这个重新导向列表。
|
|
||||||
|
|
||||||
|
|
||||||
第三方扩展
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
- ``sphinxcontrib`` 为 blockdiag、seqdiag、actdiag、nwdiag、rackdiag & packetdiag 等图表的扩展
|
|
||||||
- `Sphinx selective exclude`_ 为 ``eager_only`` 的扩展
|
|
||||||
|
|
||||||
.. _idf-specific extensions:
|
|
||||||
|
|
||||||
IDF 专属扩展
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
构建系统集成
|
|
||||||
###################
|
|
||||||
|
|
||||||
:idf:`docs/idf_extensions/build_system/`
|
|
||||||
|
|
||||||
Python 包实现了一个 Sphinx 扩展功能,即将 IDF 构建系统信息拉入文档构建中
|
|
||||||
|
|
||||||
* 创建一个 CMake IDF 项目模型,并运行 CMake 生成元数据。
|
|
||||||
* 注册一些新的配置变量并发出一个 Sphinx 新事件,这些信息都用于其它扩展功能中。
|
|
||||||
|
|
||||||
配置变量
|
|
||||||
@@@@@@@@@@@@@
|
|
||||||
|
|
||||||
* ``docs_root`` - $IDF_PATH/docs 目录的绝对路径
|
|
||||||
* ``idf_path`` - IDF_PATH 变量的值,未设置环境时为 IDF_PATH 的绝对路径
|
|
||||||
* ``build_dir`` - 运行 ``build_docs.py`` 时自动创建的文档生成目录,默认为 ``_build/<lang>/<target>``
|
|
||||||
* ``idf_target`` - IDF_TARGET 的值。``build_docs.py`` 应负责在 Sphinx 命令行中设置该值。
|
|
||||||
|
|
||||||
新事件
|
|
||||||
@@@@@@@@@
|
|
||||||
|
|
||||||
CMake 项目模型运行完成后,系统将在构建初期发出 ``idf-info`` 事件。
|
|
||||||
|
|
||||||
参数为 ``(app, project_description)``,其中 ``project_description`` 是一个字典,其中包含从 CMake 构建目录中的 ``project_description.json`` 内解析出的值。
|
|
||||||
|
|
||||||
其它 IDF 专属的扩展功能均订阅该事件,并使用该事件根据系统构建信息来设置一些文档参数。
|
|
||||||
|
|
||||||
其它扩展
|
|
||||||
#############
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/include_build_file.py`
|
|
||||||
``include-build-file`` 指令相当于是内置的 ``include-file`` 指令,只是文件路径是相对于 ``build_dir`` 来评估的。
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/kconfig_reference.py`
|
|
||||||
订阅 ``idf-info`` 事件,并使用 confgen 从默认构建项目所使用的组件中生成 ``kconfig.inc`` 文件。之后,这个文件将被存储至 :doc:`/api-reference/kconfig` 中。
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/link_roles.py`
|
|
||||||
一个自定义的 `Sphinx 角色 <https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html>`_ 的实现功能,帮助从文档链接到 `ESP-IDF`_ 项目中具体的文件和文件夹处。有关具体实现了哪些角色,请参阅 :ref:`link-custom-roles` 和 :ref:`link-language-versions`。
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/esp_err_definitions.py`
|
|
||||||
小扩展包,调用 ``gen_esp_err_to_name.py`` 并更新修改后的 .rst 文件。
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/gen_toolchain_links.py`
|
|
||||||
文档内许多地方提供了下载工具链的链接。为了整合这些链接,减少需要分别手动更新这些链接的时间,该脚本会根据 :idf_file:`tools/toolchain_versions.mk` 内的信息生成工具链下载链接和工具链解压代码片段。
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/gen_version_specific_includes.py`
|
|
||||||
也是一个自动生成 reStructuredText 文本 ``.inc`` 的扩展功能,其中内容是基于当前 ESP-IDF 版本所写。
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/util.py`
|
|
||||||
提供一系列实用功能,主要提高本地化生成文档(请参见 :ref:`setup-for-building-documentation`)的效率,节省后续再次生成文本所需时间。
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/format_idf_target.py`
|
|
||||||
通过将 idf_target 发送至 Sphinx 命令行替换 target 相关名称的扩展功能。例如:
|
|
||||||
|
|
||||||
This is a {\IDF_TARGET_NAME}, with /{\IDF_TARGET_PATH_NAME}/soc.c, compiled with `{\IDF_TARGET_TOOLCHAIN_PREFIX}-gcc` with `CONFIG_{\IDF_TARGET_CFG_PREFIX}_MULTI_DOC`
|
|
||||||
|
|
||||||
删掉退格键后,将被渲染为
|
|
||||||
|
|
||||||
This is a {IDF_TARGET_NAME}, with /{IDF_TARGET_PATH_NAME}/soc.c, compiled with `{IDF_TARGET_TOOLCHAIN_PREFIX}-gcc` with `CONFIG_{IDF_TARGET_CFG_PREFIX}_MULTI_DOC`.
|
|
||||||
|
|
||||||
同时,也支持使用以下语法标记本地(单个 rst 文件)替代文件的定义:
|
|
||||||
{\IDF_TARGET_TX_PIN:default="IO3",esp32="IO4",esp32s2="IO5"}
|
|
||||||
|
|
||||||
这样将在当前的 rst 文件中定义标签 {\IDF_TARGET_TX_PIN} 的替换名称。
|
|
||||||
|
|
||||||
为了使用相同的格式规则规范文档内容,该扩展功能优先于默认的 ``.. include::`` 指令。
|
|
||||||
|
|
||||||
在依赖于字符排列方式的格式内无法使用这一替换方式,例如,表格内。
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/latex_builder.py`
|
|
||||||
一个在 latex 生成器内添加 ESP-IDF 专属功能的扩展,优先于默认的 Sphinx latex 生成器。
|
|
||||||
|
|
||||||
在输出目录内创建并添加 espidf.sty latex 包,其中包含一些运行时所需变量的宏包,如 IDF-Target。
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/gen_defines.py`
|
|
||||||
Sphinx 扩展,将 IDF 中的定义整合入 Sphinx 构建过程中,在 IDF 项目模型创建完成后开始运行。
|
|
||||||
|
|
||||||
解析这些定义值,并将其添加为 Sphinx 标签。
|
|
||||||
|
|
||||||
发出新的 'idf-defines-generated' 事件,其中有一个包含所有原始定义值的字典,其它扩展功能可以使用这些原始值生成相关数据。
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/exclude_docs.py`
|
|
||||||
Sphinx 扩展,根据 conditional_include_dict {tag:documents} 标签更新已被排除的文档。如果文档设置有这个标签,则其将被添加至文档列表内。
|
|
||||||
|
|
||||||
同时也负责在使用 config 值 ``docs_to_build`` 生成文档时,排除不相关文档。此时,未在 ``docs_to_build`` 列表内的文档都将被排除。
|
|
||||||
|
|
||||||
订阅 ``idf-defines-generated`` 事件,因为该扩展功能需要根据 Sphinx 标签来决定需排除哪些文档。
|
|
||||||
|
|
||||||
:idf_file:`docs/idf_extensions/run_doxygen.py`
|
|
||||||
订阅 ``idf-defines-generated`` 事件,运行 Doxygen (:idf_file:`docs/doxygen/Doxyfile_common`) 生成描述密钥头文件的 XML 文件,然后运行 Breathe 将这些文件转换为可直接被添加至 API 参考页面的 ``.inc`` 文件。
|
|
||||||
|
|
||||||
将一些特定目标的自定义环境变量推入 Doxygen 中,包括项目的默认 ``sdkconfig.h`` 文件内定义的所有宏包以及 ``soc`` 部件 ``xxx_caps.h`` 的头文件中定义的所有宏包。这意味着,公共 API 头文件可以依赖于特定目标的配置选项或者 ``soc`` 功能头文件选项,如头文件中 ``#ifdef`` & ``#if`` 预处理器选项。
|
|
||||||
|
|
||||||
也就是说,我们可以根据生成文档的目标来生成不同的 Doxygen 文件。
|
|
||||||
|
|
||||||
有关这一流程的更多信息,请参考 :doc:`documenting-code` 和 :doc:`../api-reference/template` 中的 **API 参考** 章节。
|
|
||||||
|
|
||||||
相关文档
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
* :doc:`documenting-code`
|
|
||||||
|
|
||||||
|
|
||||||
.. _ESP-IDF: https://github.com/espressif/esp-idf/
|
|
||||||
.. _Sphinx selective exclude: https://github.com/pfalcon/sphinx_selective_exclude
|
|
@ -1,605 +1 @@
|
|||||||
编写代码文档
|
.. include:: ../../en/contribute/documenting-code.rst
|
||||||
============
|
|
||||||
|
|
||||||
:link_to_translation:`en:[English]`
|
|
||||||
|
|
||||||
本文简要介绍了 `espressif/esp-idf`_ 项目库采用的文件风格以及如何在项目库中添加新文件。
|
|
||||||
|
|
||||||
概述
|
|
||||||
----
|
|
||||||
|
|
||||||
在项目库内编写代码文档时,请遵循 `Doxygen 代码注释风格 <http://doxygen.nl/manual/docblocks.html#specialblock>`_。要采用这一风格,您可以将 ``@param`` 等特殊命令插入到标准注释块中,比如::
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param ratio this is oxygen to air ratio
|
|
||||||
*/
|
|
||||||
|
|
||||||
Doxygen 会解析代码,提取命令和后续文本,生成代码文档。
|
|
||||||
|
|
||||||
注释块通常包含对功能的记述,如下所示。
|
|
||||||
|
|
||||||
.. image:: ../../_static/doc-code-documentation-inline.png
|
|
||||||
:align: center
|
|
||||||
:alt: 内联代码样本文档
|
|
||||||
|
|
||||||
Doxygen 支持多种排版风格,对于文档中可以包含的细节非常灵活。请参考数据丰富、条理清晰的 `Doxygen 手册 <http://doxygen.nl/manual/index.html>`_ 熟悉 Doxygen 特性。
|
|
||||||
|
|
||||||
|
|
||||||
为什么需要 Doxygen?
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
使用 Doxygen 的最终目的是确保所有代码编写风格一致,以便在代码变更时使用 `Sphinx`_ 和 `Breathe`_ 等工具协助筹备、自动更新 API 文档。
|
|
||||||
|
|
||||||
使用这类工具时,上文代码渲染后呈现效果如下:
|
|
||||||
|
|
||||||
.. image:: ../../_static/doc-code-documentation-rendered.png
|
|
||||||
:align: center
|
|
||||||
:alt: 渲染后的内联代码样本文档
|
|
||||||
|
|
||||||
|
|
||||||
尝试一下!
|
|
||||||
----------
|
|
||||||
|
|
||||||
在本项目库编写代码文档时,请遵守下列准则。
|
|
||||||
|
|
||||||
1. 写明代码的基本内容:函数、结构体、类型定义、枚举、宏等。请详细说明代码的用途、功能和限制,因为在阅读他人的文档时你也想看到这些信息。
|
|
||||||
|
|
||||||
2. 函数文档需简述该函数的功能,并解释输入参数和返回值的含义。
|
|
||||||
|
|
||||||
3. 请不要在参数或除空格外的其他字符前面添加数据类型。所有空格和换行符都会压缩为一个空格。如需换行,请执行换行操作两次。
|
|
||||||
|
|
||||||
.. image:: ../../_static/doc-code-function.png
|
|
||||||
:align: center
|
|
||||||
:alt: 内联函数样本文档及渲染后的效果
|
|
||||||
|
|
||||||
4. 如果函数没有输入参数或返回值,请跳过 ``@param`` 或 ``@return``。
|
|
||||||
|
|
||||||
.. image:: ../../_static/doc-code-void-function.png
|
|
||||||
:align: center
|
|
||||||
:alt: 隐式内联函数样本文档及渲染后的效果
|
|
||||||
|
|
||||||
5. 为 ``define``、``struct`` 和 ``enum`` 的成员编写文档时,请在每一项后添加注释,如下所示。
|
|
||||||
|
|
||||||
.. image:: ../../_static/doc-code-member.png
|
|
||||||
:align: center
|
|
||||||
:alt: 内联函数成员样本文档及渲染后的效果
|
|
||||||
|
|
||||||
6. 请在命令后换行(如下文中的 ``@return`` ),呈现排版精美的列表。 ::
|
|
||||||
|
|
||||||
*
|
|
||||||
* @return
|
|
||||||
* - ESP_OK if erase operation was successful
|
|
||||||
* - ESP_ERR_NVS_INVALID_HANDLE if handle has been closed or is NULL
|
|
||||||
* - ESP_ERR_NVS_READ_ONLY if handle was opened as read only
|
|
||||||
* - ESP_ERR_NVS_NOT_FOUND if the requested key doesn't exist
|
|
||||||
* - other error codes from the underlying storage driver
|
|
||||||
*
|
|
||||||
|
|
||||||
7. 头文件的功能概览和库文件应当存在同一个项目库之下,放入单独的 ``README.rst`` 文件。如果目录下包含不同 API 的头文件,应将文件命名为 ``apiname-readme.rst``。
|
|
||||||
|
|
||||||
|
|
||||||
进阶
|
|
||||||
----
|
|
||||||
|
|
||||||
以下小贴士可以帮助你进一步提高文档质量,增强可读性。
|
|
||||||
|
|
||||||
对于代码,请遵循下列准则:
|
|
||||||
|
|
||||||
1. 添加代码片段举例说明。请在片段前后添加 ``@code{c}`` 和 ``@endcode`` 命令。 ::
|
|
||||||
|
|
||||||
*
|
|
||||||
* @code{c}
|
|
||||||
* // Example of using nvs_get_i32:
|
|
||||||
* int32_t max_buffer_size = 4096; // default value
|
|
||||||
* esp_err_t err = nvs_get_i32(my_handle, "max_buffer_size", &max_buffer_size);
|
|
||||||
* assert(err == ESP_OK || err == ESP_ERR_NVS_NOT_FOUND);
|
|
||||||
* // if ESP_ERR_NVS_NOT_FOUND was returned, max_buffer_size will still
|
|
||||||
* // have its default value.
|
|
||||||
* @endcode
|
|
||||||
*
|
|
||||||
|
|
||||||
代码片段应放入所介绍功能的注释块中。
|
|
||||||
|
|
||||||
2. 使用 ``@attention`` 或 ``@note`` 命令高亮显示重要信息。 ::
|
|
||||||
|
|
||||||
*
|
|
||||||
* @attention
|
|
||||||
* 1. This API only impact WIFI_MODE_STA or WIFI_MODE_APSTA mode
|
|
||||||
* 2. If the ESP32 is connected to an AP, call esp_wifi_disconnect to disconnect.
|
|
||||||
*
|
|
||||||
|
|
||||||
上述例子介绍了如何使用编号列表。
|
|
||||||
|
|
||||||
3. 给相似的函数编写文档时,可在前后使用 ``/**@{*/`` 和 ``/**@}*/`` 标记命令。 ::
|
|
||||||
|
|
||||||
/**@{*/
|
|
||||||
/**
|
|
||||||
* @brief common description of similar functions
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
void first_similar_function (void);
|
|
||||||
void second_similar_function (void);
|
|
||||||
/**@}*/
|
|
||||||
|
|
||||||
示例请参照 :component_file:`nvs_flash/include/nvs.h`。
|
|
||||||
|
|
||||||
4. 如果想跳过重复的宏定义、枚举项等代码,不添加描述,请在代码前后添加 ``/** @cond */`` 和 ``/** @endcond */`` 命令。示例请参照 :component_file:`driver/include/driver/gpio.h`。
|
|
||||||
|
|
||||||
5. 使用 markdown 增强文档可读性,添加页眉、链接、表格及更多内容。 ::
|
|
||||||
|
|
||||||
*
|
|
||||||
* [{IDF_TARGET_NAME} 技术参考手册]({IDF_TARGET_TRM_CN_URL})
|
|
||||||
*
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
代码片段、注释、链接等内容如没有附在所述对象对应的注释块中,将不会添加到文档中。
|
|
||||||
|
|
||||||
6. 准备一个或更多完整的代码示例和描述,将描述放入单独的 ``README.md`` 文件中,置于 :idf:`examples` 目录的特定文件夹中。
|
|
||||||
|
|
||||||
统一文档格式
|
|
||||||
------------------
|
|
||||||
|
|
||||||
对于 Markdown (.md) 或 reST (.rst) 文档中的文本类信息,请遵守下列规范保证文档格式统一。
|
|
||||||
|
|
||||||
1. 请确保一个段落仅有一行,同段落中不用断行,如下图所示。通过断行来提高可读性的准则仅限用于书写代码。对于文本类信息,可以通过添加空白行来增加段落,从而提高可读性。
|
|
||||||
|
|
||||||
.. figure:: ../../_static/doc-format1-recommend.png
|
|
||||||
:align: center
|
|
||||||
:scale: 30%
|
|
||||||
:alt: 推荐一个段落仅有一行(点击放大)
|
|
||||||
|
|
||||||
一个段落仅有一行(点击放大)
|
|
||||||
|
|
||||||
.. figure:: ../../_static/doc-format2-notrecommend.png
|
|
||||||
:align: center
|
|
||||||
:scale: 30%
|
|
||||||
:alt: 不推荐段落内断行(点击放大)
|
|
||||||
|
|
||||||
不推荐段落内断行(点击放大)
|
|
||||||
|
|
||||||
2. 请确保中文与英文文档中的行号一一对应,如下图所示。这个方法可以提高日后更新文档的效率。当工程师需要更新文档时,可以迅速定位到对应中文或英文文档的同一行进行更新。对于翻译人员来说,如果文档仅更新了英文版本,译员可以在对应的中文版本中迅速找到需要更新的地方。此外,通过比较中英文文档中的总行数,可以快速判断中文文档是否进行了及时更新。
|
|
||||||
|
|
||||||
.. figure:: ../../_static/doc-format3-recommend.png
|
|
||||||
:align: center
|
|
||||||
:scale: 50%
|
|
||||||
:alt: 中英文文档中行号一一对应(点击放大)
|
|
||||||
|
|
||||||
中英文文档行号一一对应(点击放大)
|
|
||||||
|
|
||||||
.. _link-custom-roles:
|
|
||||||
|
|
||||||
链接到示例
|
|
||||||
----------
|
|
||||||
|
|
||||||
链接到 GitHub 上的示例时,请不要使用绝对 URLs 或硬编码 URLs。请使用 Docutils 自定义角色生成链接。自动生成的链接指向项目库中 git commit 编号(或标记)的 tree 或 blob。这种做法可以确保 master 分支上的文件移动或删除时,链接不会失效。Docutils 自定义角色将以透明的方式处理子模块中的文件,并使用正确的 commit ID 链接到子模块目录中。
|
|
||||||
|
|
||||||
有如下角色可以选择:
|
|
||||||
|
|
||||||
- ``:idf:`path``` - 指向 ESP-IDF 内的目录
|
|
||||||
- ``:idf_file:`path``` - 指向 ESP-IDF 内的文件
|
|
||||||
- ``:idf_raw:`path``` - 指向 ESP-IDF 内的原始格式文件
|
|
||||||
- ``:component:`path``` - 指向 ESP-IDF components 内的文件夹
|
|
||||||
- ``:component_file:`path``` - 指向 ESP-IDF components 内的文件
|
|
||||||
- ``:component_raw:`path``` - 指向 ESP-IDF components 内的原始格式文件
|
|
||||||
- ``:example:`path``` - 指向 ESP-IDF examples 内的文件夹
|
|
||||||
- ``:example_file:`path``` - 指向 ESP-IDF examples 内的文件
|
|
||||||
- ``:example_raw:`path``` - 指向 inside ESP-IDF examples 内的原始格式文件
|
|
||||||
|
|
||||||
示例::
|
|
||||||
|
|
||||||
* :example:`get-started/hello_world`
|
|
||||||
* :example:`Hello World! <get-started/hello_world>`
|
|
||||||
|
|
||||||
渲染效果:
|
|
||||||
|
|
||||||
* :example:`get-started/hello_world`
|
|
||||||
* :example:`Hello World! <get-started/hello_world>`
|
|
||||||
|
|
||||||
CI build 脚本中添加了检查功能,查找 RST 文件中的硬编码链接(通过 URL 的 tree/master、blob/master 或 raw/master 部分识别)。该功能可通过 ``cd docs`` 和 ``make gh-linkcheck`` 命令手动运行。
|
|
||||||
|
|
||||||
|
|
||||||
.. _link-language-versions:
|
|
||||||
|
|
||||||
链接到其他语言文档
|
|
||||||
------------------
|
|
||||||
|
|
||||||
要切换不同语言的文档,可使用 ``:link_to_translation:`` 自定义角色。文档页面中的角色提供其他语言版本的链接。下文的例子说明了如何在文档中添加中英文版本的链接::
|
|
||||||
|
|
||||||
:link_to_translation:`zh_CN:中文版`
|
|
||||||
:link_to_translation:`en:English`
|
|
||||||
|
|
||||||
语言用 ``en`` 或 ``zh_CN`` 等标准简写表示。最后一个分号后的文本非标准化内容,可根据链接的位置自行输入,如::
|
|
||||||
|
|
||||||
:link_to_translation:`en:see description in English`
|
|
||||||
|
|
||||||
|
|
||||||
.. _add-illustrations:
|
|
||||||
|
|
||||||
添加图例
|
|
||||||
--------
|
|
||||||
|
|
||||||
请考虑使用图表和图片解释表述的概念。
|
|
||||||
|
|
||||||
相比于长篇的表述,图例有时可以更好地描述复杂的理念、数据结构或算法。本项目库使用 `blockdiag <http://blockdiag.com/en/index.html>`_ 工具包由简单的文本文件生成图表。
|
|
||||||
|
|
||||||
工具包支持下列图表类型:
|
|
||||||
|
|
||||||
* `框图 <http://blockdiag.com/en/blockdiag/index.html>`_
|
|
||||||
* `时序图 <http://blockdiag.com/en/seqdiag/index.html>`_
|
|
||||||
* `活动图 <http://blockdiag.com/en/actdiag/index.html>`_
|
|
||||||
* `逻辑网络图 <http://blockdiag.com/en/nwdiag/index.html>`_
|
|
||||||
|
|
||||||
使用该工具包,可以将简单的文本(与 graphviz 的 DOT 格式类似)转换成美观的图片。图中内容自动排版。图标代码之后会转换为 ".png" 图片,在后台添加进 **Sphinx** 文档中。
|
|
||||||
|
|
||||||
要查看图表的渲染效果,可使用线上的 `interactive shell`_ 即时显示生成的图片。
|
|
||||||
|
|
||||||
下面是一些图表示例:
|
|
||||||
|
|
||||||
* 简单的 **框图** / ``blockdiag`` - `Wi-Fi Buffer 配置 <http://interactive.blockdiag.com/?compression=deflate&src=eJylUk1rwkAQvfsrBntpIUKiRQqSgK0VSj0EtCi0EjbJxCyuuyG7QW3pf-9m06hJeyg0t33zmHkfCZmItjElGwiLJME8IEwjRFHBA3WAj04H9HcFGyZCwoAoldOwUCgNzkWMwZ7GKgUXnKE9gjOcIt2kSuN39sigMiP8jDqX6GmF_Y3GmJCCqUCmJEM9yEXBY4xDcWjOE8GVpO9oztdaGQmRSRAJlMZysjOCKsVj358Fi_H8GV4Nze2Os4zRyvEbB0XktrseQWVktn_ym-wS-UFb0ilt0pa0N6Vn3i_KUEY5zcqrbXWTx_nDaZHjwYvEHGKiSNeC2q_r3FpQZekObAtMTi4XCi2IBBO5e0Rd5L7ppLG574GvO__PUuO7sXTgweTIyY5GcD1XOtToBhYruDf_VvuUad3tD-0_Xq1TLPPSI84xKvNrF9vzLnrTj1M7rYhrXv24cCPVkZUaOK47n1-lOvbk>`_
|
|
||||||
* 稍复杂的 **框图** - `Wi-Fi 编程模型 <http://interactive.blockdiag.com/?compression=deflate&src=eJyFk09P40AMxe98CqscIVILq72UIFX8kSoQWy0RHABFTuImFtOZaGYKuyC-O840bagaRI7Pfs7Pz0mmTP5cMJbwynNOa2tKi4sF6zJdmIIUvO_tgTz7UCqToQL03nK29OSCrqUpfeXCVxDD6Gg47tSKuKy8yL9b1dWov1E3E4atWtAcl8qnrsKapGDNUhdUZObfdr2UQp3mRhkrXdpoGq-BGwhQmJFaoSZns_Q2mZxdwUNQ44Eojxqcx_x5cAhzo73jN4pHv55WL7m4u0nSZHLbOeiFtBePR9dvmcxm19sWrGvFOXo2utd4CGH5eHQ8bGfcTy-n6fnfO9jMuOfoksV9bvmFbO-Lr27-JPAQ4oqbGJ62c8iN1pQ3EA4O-lOJTncXDvvupCGdu3vmqFQmSQqm3CIYBx0EWou6pADjQJbw3Bj-h3I4onxpsHrCQLnmoD0yVKgLJXuP1x3GsowPmUpfbay3yH5T7khPoi7NnpU-1nisPdkFyY_gV4x9XB3Y0pHdpfoJ60toURQOtqbYuvpJ1B6zDXYym0qmTVpNnh-fpWcbRA>`_
|
|
||||||
* **时序图** / ``seqdiag`` - `在所有信道中扫描特定 AP <http://interactive.blockdiag.com/seqdiag/?compression=deflate&src=eJyVkU1PwzAMhu_7FdburUgQXMomTaPcKIdOIIRQlDVuG1EloUknPsR_J2s2rRsT2nKJ9drvY8ex-C4kr8AWXLFSt8waLBg38D0Cf3jh5Io7qRVMQGmFSS-jqJA1qCpXe51cXwTZGg-pUVa1W8tXQRVY8q5xzNbcoNdb3SmBYqk_9vOlVs7Kr3UJoQmMwgDGMMftWwK4QuU28ZOM7uQm3q_zYTQd5OGl4UtsJmMSE5jCXKtSVl2LUPgpXPvpb4Hj1-RUCPWQ3O_K-wKpX84WMLAcB9B-igCouVLYADnDTA_N9GRzHMdnNMoOG2Vb8-4b4CY6Zr4MT3zOF-k9Sx_TbMHy-Sxjtw9Z-mfRHjEA7hD0X8TPLxU91AQ>`_
|
|
||||||
* **包图** / ``packetdiag`` - `NVS 页面结构 <http://interactive.blockdiag.com/packetdiag/?compression=deflate&src=eJxFkMFOwzAQRO_9ij2mh63idRKaSj1V_ACIE6DIxG4StTgh3oCg6r_j2JTs8c3szNqDqk-GdacasJ-uGlRjKsfjVPM0GriswE_dn786zS3sQRJAYLbXprpRkS-sNV3TcrAGqM1RTWeujr1l1_2Y2U6rIKUod_DIis2LTbJ1YBneeWY-Nj5ts-AtkudPdnJGQ0JppLRFKXZweDhIWrySsPDB95bHb3BzPLx1_K4GSCSt_-4vMizzmykNSuBlgWKuioJYBOHLROnbEBGe_ZfEh-7pNcolIdF_raA8rl5_AaqqWyE>`_
|
|
||||||
|
|
||||||
尝试修改源代码,看看图表会发生什么变化。
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
`interactive shell`_ 使用的字体和 esp-idf 文档使用的字体渲染后显示的效果略有不同。
|
|
||||||
|
|
||||||
|
|
||||||
添加注释
|
|
||||||
--------
|
|
||||||
|
|
||||||
写文档时,您可能需要:
|
|
||||||
|
|
||||||
- 留下建议,说明之后哪些内容需要添加或修改。
|
|
||||||
- 提醒自己或其他人跟进。
|
|
||||||
|
|
||||||
这时,您可以使用 ``.. todo::`` 命令在 reST 文件中添加待做事项。如:
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
.. todo::
|
|
||||||
|
|
||||||
Add a package diagram.
|
|
||||||
|
|
||||||
如果在 reST 文件中添加 ``.. todolist::`` 命令,整篇文档中的所有待做事项将会罗列成表。
|
|
||||||
|
|
||||||
默认情况下,文档生成器会忽视 ``.. todo::`` 和 ``.. todolist::`` 命令。如果您想在本地生成的文档中显示注释和注释列表,请执行下列步骤:
|
|
||||||
|
|
||||||
1. 打开本地的 ``conf_common.py`` 文件。
|
|
||||||
2. 找到 ``todo_include_todos`` 参数。
|
|
||||||
3. 将该参数的值由 ``False`` 改为 ``True``。
|
|
||||||
|
|
||||||
将改动推送到远端分支之前,请把 ``todo_include_todos`` 的值重置为 ``False``。
|
|
||||||
|
|
||||||
更多关于扩展的信息,请参阅 `sphinx.ext.todo <https://www.sphinx-doc.org/en/master/usage/extensions/todo.html#directive-todolist>`_ 的相关文档。
|
|
||||||
|
|
||||||
为不同芯片书写通用文档
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
乐鑫各芯片的文档是基于现有文档完成的。为提高文档写作效率,使所写文档可重复用于其它芯片(以下称“目标”)文档中,我们为您提供以下功能:
|
|
||||||
|
|
||||||
依据目标类型排除内容
|
|
||||||
"""""""""""""""""""""
|
|
||||||
|
|
||||||
有时会出现某一内容只适用于一个目标的情况。这种情况下,你可以使用 ''.. only:: TAG'' 指令将这部分内容设为某个目标的专属内容,'TAG' 处替换为以下名称:
|
|
||||||
|
|
||||||
芯片名称:
|
|
||||||
|
|
||||||
* esp32
|
|
||||||
* esp32s2
|
|
||||||
* esp32c3
|
|
||||||
|
|
||||||
从 'sdkconfig.h' 中定义标识符,标识符由目标的默认 menuconfig 设置生成,例如:
|
|
||||||
|
|
||||||
* CONFIG_FREERTOS_UNICORE
|
|
||||||
|
|
||||||
从 soc '\*_caps' 头文件中定义标识符,例如:
|
|
||||||
|
|
||||||
* SOC_BT_SUPPORTED
|
|
||||||
* SOC_CAN_SUPPORTED
|
|
||||||
|
|
||||||
示例:
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
.. only:: esp32
|
|
||||||
|
|
||||||
ESP32 specific content.
|
|
||||||
|
|
||||||
该指令也支持布尔逻辑操作符 'and'、'or' 和 'not'。示例:
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
.. only:: SOC_BT_SUPPORTED and CONFIG_FREERTOS_UNICORE
|
|
||||||
|
|
||||||
BT specific content only relevant for single-core targets.
|
|
||||||
|
|
||||||
该功能由 `Sphinx selective exclude <https://github.com/pfalcon/sphinx_selective_exclude>`_ 的扩展提供。
|
|
||||||
|
|
||||||
这个扩展有一个缺点,当你想要排除某个章节时,而这个章节后直接跟着一个加了标签的新章节,此时该功能无法成功操作。这种情况下,章节的标签将无法正确链接到下一节,但其它内容都可正确渲染。如遇这一情况,可暂时使用以下应急方法:
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
.. only:: esp32
|
|
||||||
|
|
||||||
.. _section_1_label:
|
|
||||||
|
|
||||||
Section 1
|
|
||||||
^^^^^^^^^
|
|
||||||
|
|
||||||
Section one content
|
|
||||||
|
|
||||||
.. _section_2_label:
|
|
||||||
|
|
||||||
.. only:: not esp32
|
|
||||||
|
|
||||||
.. _section_2_label:
|
|
||||||
|
|
||||||
Section 2
|
|
||||||
^^^^^^^^^
|
|
||||||
Section 2 content
|
|
||||||
|
|
||||||
':TAG:' 角色的作用为从树形目录中排除特定内容。例如:
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 1
|
|
||||||
|
|
||||||
:esp32: configure-wrover
|
|
||||||
configure-other-jtag
|
|
||||||
|
|
||||||
生成文档时,Sphinx 会使用上述提到的指令和角色,根据其被调用的目标标签来添加或排除某些内容。
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
如希望根据目标的标签从 toctree 中排除一整个文档,则需同时更新 :idf_file:`docs/conf_common.py` 中的 ``exclude_patterns`` 列表,为其它目标排除该文档。否则,Sphinx 将发出一条错误警报:WARNING: document isn't included in any toctree。
|
|
||||||
|
|
||||||
对此推荐的解决方案是:将这个文档添加到 :idf_file:`docs/conf_common.py` ``conditional_include_dict`` 中的一个列表里,例如,一个仅供支持蓝牙的目标可见的文档应被添加至 ``BT_DOCS``。此后,如果该文档未设置对应的标签,则 :idf_file:`docs/idf_extensions/exclude_docs.py` 会将其添加至 ``exclude_patterns``。
|
|
||||||
|
|
||||||
如果你需要从一个列表或项目符号条目中排除某一内容,应通过在 ''.. list:: '' 指令中使用 '':TAG:'' 角色来完成。
|
|
||||||
|
|
||||||
.. code-block:: none
|
|
||||||
|
|
||||||
.. list::
|
|
||||||
|
|
||||||
:esp32: - ESP32 specific content
|
|
||||||
:SOC_BT_SUPPORTED: - BT specific content
|
|
||||||
- Common bullet point
|
|
||||||
- Also common bullet point
|
|
||||||
|
|
||||||
|
|
||||||
替代宏
|
|
||||||
"""""""""""
|
|
||||||
如果你需要指向根据目标类型定义的芯片名称、工具链名称、路径名称或其它通用名称,可以选择使用 :idf_file:`docs/idf_extensions/format_idf_target.py` 提供的替代宏。
|
|
||||||
|
|
||||||
例如,以下 reStructuredText 内容:
|
|
||||||
|
|
||||||
This is a {\IDF_TARGET_NAME}, with /{\IDF_TARGET_PATH_NAME}/soc.c, compiled with `{\IDF_TARGET_TOOLCHAIN_PREFIX}-gcc` with `CONFIG_{\IDF_TARGET_CFG_PREFIX}_MULTI_DOC`
|
|
||||||
|
|
||||||
将在文档中渲染为:
|
|
||||||
|
|
||||||
This is a {IDF_TARGET_NAME}, with /{IDF_TARGET_PATH_NAME}/soc.c, compiled with `{IDF_TARGET_TOOLCHAIN_PREFIX}-gcc` with `CONFIG_{IDF_TARGET_CFG_PREFIX}_MULTI_DOC`.
|
|
||||||
|
|
||||||
这一扩展也支持定义本地(在单个源文件中)替代名称的标记。请在 RST 文件的一行中插入以下定义语言:
|
|
||||||
|
|
||||||
{\IDF_TARGET_SUFFIX:default="DEFAULT_VALUE", esp32="ESP32_VALUE", esp32s2="ESP32S2_VALUE", esp32c3="ESP32C3_VALUE"}
|
|
||||||
|
|
||||||
这样将在当前的 RST 文件中根据目标类型为 {\IDF_TARGET_SUFFIX} 标签定义一个替代名称。例如:
|
|
||||||
|
|
||||||
{\IDF_TARGET_TX_PIN:default="IO3", esp32="IO4", esp32s2="IO5", esp32c3="IO6"}
|
|
||||||
|
|
||||||
上例将为 {\IDF_TARGET_TX_PIN} 标签定义一个替代名称,当使用 esp32s2 标签调用 sphinx 时,{\IDF_TARGET_TX_PIN} 将被替代为 "IO5"。
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
这样的单个文档定义指令可置于 .rst 文档中的任意位置(单独一行),指令名须以 ``IDF_TARGET_`` 为开头。
|
|
||||||
|
|
||||||
|
|
||||||
汇总文档
|
|
||||||
--------
|
|
||||||
|
|
||||||
文档准备好后,请参照 :doc:`../api-reference/template` 的要求创建一个文件,汇总所有准备好的文档。最后,在文件中添加链接指向 ``/docs`` 文件夹或子文件夹下 ``index.rst`` 文件的 ``.. toctree::``。
|
|
||||||
|
|
||||||
|
|
||||||
Sphinx 新手怎么办
|
|
||||||
------------------
|
|
||||||
|
|
||||||
1. 不要担心。所有需要的软件均有详细文档,并且开源、免费。您可以先查看 `Sphinx`_ 文档。如果您不清楚如何用 rst markup 语言写作,请查看 `reStructuredText Primer <http://www.sphinx-doc.org/en/stable/rest.html>`_。您也可以使用 markdown (.md) 文件,查找更多在 `Recommonmark parser' 文档页面 <https://recommonmark.readthedocs.io/en/latest/>`_ 使用的特定 markdown 句法信息。
|
|
||||||
|
|
||||||
2. 查看本文档的源文件,了解本文档使用的代码。源文件存储于 GitHub `espressif/esp-idf`_ 项目库的 :idf:`docs` 文件夹下。您可以滑动到页面上方,点击右上角的链接,直接查看本页面的源文件。您也可以通过点击 ``Raw`` 按键打开源文件,在 GitHub 上查看文件的代码。
|
|
||||||
|
|
||||||
3. 想要查看在上传至 GitHub 前文档如何生成、呈现,有两种方式:
|
|
||||||
|
|
||||||
* 安装`Sphinx`_、`Breathe`_、`Blockdiag <http://blockdiag.com/en/index.html>`_ 和 `Doxygen <http://doxygen.nl/>`_ 本地生成文档,具体可查看下文。
|
|
||||||
|
|
||||||
* 在 `Read the Docs <https://readthedocs.org/>`_ 建立账号,在云端生成文档。 Read the Docs 免费提供文档生成和存储,且速度快、质量高。
|
|
||||||
|
|
||||||
4. 在生成文档前预览,可使用 `Sublime Text <https://www.sublimetext.com/>`_ 编辑器和 `OmniMarkupPreviewer <https://github.com/timonwong/OmniMarkupPreviewer>`_ 插件。
|
|
||||||
|
|
||||||
|
|
||||||
.. _setup-for-building-documentation:
|
|
||||||
|
|
||||||
搭建环境本地生成文档
|
|
||||||
--------------------
|
|
||||||
|
|
||||||
安装依赖项
|
|
||||||
"""""""""""""
|
|
||||||
|
|
||||||
您可以安装下列包,通过搭建环境在电脑上本地生成文档:
|
|
||||||
|
|
||||||
1. Doxygen - http://doxygen.nl/
|
|
||||||
2. Sphinx - https://github.com/sphinx-doc/sphinx/#readme-for-sphinx
|
|
||||||
3. Breathe - https://github.com/michaeljones/breathe#breathe
|
|
||||||
4. "sphinx_idf_theme" 文档主题 - https://github.com/espressif/sphinx_idf_theme
|
|
||||||
5. "sphinx-notfound-page" 自定义 404 页面 - https://github.com/readthedocs/sphinx-notfound-page
|
|
||||||
6. Blockdiag - http://blockdiag.com/en/index.html
|
|
||||||
7. Recommonmark - https://github.com/rtfd/recommonmark
|
|
||||||
|
|
||||||
添加 "sphinx_idf_theme" 包之后,文档将与 `ESP-IDF 编程指南 <https://docs.espressif.com/projects/esp-idf/en/latest/index.html>`_ 的风格保持一致。
|
|
||||||
|
|
||||||
不用担心需要安装太多包。除 Doxygen 之外,其他包均使用纯 Python 语言,可一键安装。
|
|
||||||
|
|
||||||
.. important:: 目前仅支持 Python 3 版本生成文档,无法使用 Python 2。
|
|
||||||
|
|
||||||
Doxygen
|
|
||||||
@@@@@@@
|
|
||||||
|
|
||||||
Doxygen 的安装取决于操作系统:
|
|
||||||
|
|
||||||
**Linux**
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
sudo apt-get install doxygen
|
|
||||||
|
|
||||||
**Windows** - 在 MSYS2 控制台中安装
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
pacman -S doxygen
|
|
||||||
|
|
||||||
**MacOS**
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
brew install doxygen
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
如果您是在 Windows MSYS2 系统上安装(Linux 和 MacOS 用户可以跳过此说明,不使用 MSYS2 的 Windows 用户需找到其它可替代系统),在安装 **之前**,请完成以下两步。这是安装 :ref:`add-illustrations` 提到的 "blockdiag" 依赖项的必须步骤。
|
|
||||||
|
|
||||||
1. 更新所有系统包:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
$ pacman -Syu
|
|
||||||
|
|
||||||
该过程可能需要重启 MSYS2 MINGW32 控制台并重复上述命令,直至更新完成。
|
|
||||||
|
|
||||||
2. 安装 *blockdiag* 的依赖项之一 *pillow*:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
$ pacman -S mingw32/mingw-w64-i686-python-pillow
|
|
||||||
|
|
||||||
查看屏幕上的记录,确定 ``mingw-w64-i686-python-pillow-4.3.0-1`` 或更新的版本已安装。旧版本 *pillow* 无法运行。
|
|
||||||
|
|
||||||
Windows 安装 Doxygen 的缺点是 `blockdiag pictures <add-illustrations>`_ 字体不能正确加载,可能会存在乱码。在此问题解决之前,您可以使用 `interactive shell`_ 查看完整图片。
|
|
||||||
|
|
||||||
|
|
||||||
其它应用
|
|
||||||
@@@@@@@@@@@
|
|
||||||
|
|
||||||
|
|
||||||
其他所有应用都是 `Python <https://www.python.org/>`_ 包,可以按照下列步骤一键安装:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
cd ~/esp/esp-idf/docs
|
|
||||||
pip install --user -r requirements.txt
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
安装步骤设定将 ESP-IDF 放在 ``~/esp/esp-idf`` 目录下,这是文档中使用的 ESP-IDF 默认地址。
|
|
||||||
|
|
||||||
生成文档
|
|
||||||
"""""""""
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
cd ~/esp/esp-idf/docs
|
|
||||||
|
|
||||||
现在可以调用如下命令生成文档::
|
|
||||||
|
|
||||||
./build_docs.py build
|
|
||||||
|
|
||||||
运行该命令后,系统将为 ESP-IDF 中所有可支持的语言和目标生成文档。这一过程将需要一些时间,但各文档会并行进行,速度取决于系统中 CPU 内核的个数(可通过 ``--sphinx-parallel-builds`` 选项修改该进程,详情可见 ``./build_docs.py --help``)。
|
|
||||||
|
|
||||||
如需生成某一目标 (esp32) 某一语种 (en) 的全部文档,即 ``en`` 文件夹下所有文档,运行::
|
|
||||||
|
|
||||||
./build_docs.py -l en -t esp32 build
|
|
||||||
|
|
||||||
其中,语言 (``-l``) 可选择 ``en`` 和 ``zh_CN``;目标 (``-t``) 可选择 ESP-IDF 中任意可支持的构建目标(如 ``esp32`` 和 ``esp32s2``)。
|
|
||||||
|
|
||||||
生成后的文档将位于 ``_build/<language>/<target>/html`` 文件夹中。如需查阅,请在网页浏览器中打开该目录里的 ``index.html``。
|
|
||||||
|
|
||||||
生成文档子集
|
|
||||||
""""""""""""""
|
|
||||||
编译某一语言的所有文档可能速度较慢,因此,也可以选择只生成所需的某个文档或部分所选文档。
|
|
||||||
|
|
||||||
在指令中列出你需要生成的文档名称即可::
|
|
||||||
|
|
||||||
./build_docs.py -l en -t esp32 -i api-reference/peripherals/can.rst build
|
|
||||||
|
|
||||||
也可以同时生成多个文档::
|
|
||||||
|
|
||||||
./build_docs.py -l en -t esp32 -i api-reference/peripherals/can.rst api-reference/peripherals/adc.rst build
|
|
||||||
|
|
||||||
还可以使用通配符,选择生成所有符合条件的文档::
|
|
||||||
|
|
||||||
./build_docs.py -l en -t esp32 -i api-reference/peripherals/* build
|
|
||||||
|
|
||||||
请注意,这一功能仅用于文档写作过程中的检查和测试。其生成的 HTML 页面并非渲染完成后的格式,比如,运行这一指令并不会生成一个列有所有文档的索引,而且如果其中涉及到任何还未生成的文档参考都将导致错误警报出现。
|
|
||||||
|
|
||||||
快速生成文档
|
|
||||||
""""""""""""""""
|
|
||||||
可以通过跳过 doxygen 生成的 API 文档直接进入 Sphinx 构建过程来加速文档生成,这样做可以大幅缩短文档构建时间。
|
|
||||||
|
|
||||||
可通过添加如下参数来实现::
|
|
||||||
|
|
||||||
./build_docs.py build -f
|
|
||||||
|
|
||||||
或者通过设置环境变量 `DOCS_FAST_BUILD`。请注意 `-f` 参数是 `build` 的一个子参数,因此必须放在 `build` 后面。
|
|
||||||
|
|
||||||
生成 PDF
|
|
||||||
""""""""""""
|
|
||||||
|
|
||||||
可以使用 ``build_docs.py`` 生成文档的 latex 和 PDF 格式,需安装以下 latex 工具包:
|
|
||||||
|
|
||||||
* latexmk
|
|
||||||
* texlive-latex-recommended
|
|
||||||
* texlive-fonts-recommended
|
|
||||||
* texlive-xetex
|
|
||||||
|
|
||||||
同时,也需要安装以下字体:
|
|
||||||
|
|
||||||
* Freefont Serif、Sans 和 Mono OpenType fonts,类似于 Ubuntu 上的 ``fonts-freefont-otf`` 包
|
|
||||||
* Lmodern,类似于 Ubuntu 上的 ``fonts-lmodern`` 包
|
|
||||||
* Fandol,可从 `这里 <https://ctan.org/tex-archive/fonts/fandol>`_ 下载
|
|
||||||
|
|
||||||
现在,可通过以下指令生成文档的 PDF 格式::
|
|
||||||
|
|
||||||
./build_docs.py -bs latex -l en -t esp32 build
|
|
||||||
|
|
||||||
或者,也可以同时生成 html 和 PDF 格式::
|
|
||||||
|
|
||||||
./build_docs.py -bs html latex -l en -t esp32 build
|
|
||||||
|
|
||||||
Latex 和 PDF 文件将位于 ``_build/<language>/<target>/latex`` 文件夹中。
|
|
||||||
|
|
||||||
大功告成
|
|
||||||
--------
|
|
||||||
|
|
||||||
我们喜欢可以做酷炫事情的好代码。
|
|
||||||
但我们更喜欢有清晰文档的好代码,可以让读者快速上手,做酷炫的事情。
|
|
||||||
|
|
||||||
尝试一下,贡献你的代码和文档!
|
|
||||||
|
|
||||||
|
|
||||||
相关文档
|
|
||||||
--------
|
|
||||||
|
|
||||||
* :doc:`../api-reference/template`
|
|
||||||
* :doc:`add-ons-reference`
|
|
||||||
|
|
||||||
|
|
||||||
.. _espressif/esp-idf: https://github.com/espressif/esp-idf/
|
|
||||||
|
|
||||||
.. _interactive shell: http://interactive.blockdiag.com/?compression=deflate&src=eJxlUMFOwzAMvecrrO3aITYQQirlAIIzEseJQ5q4TUSIq8TVGIh_J2m7jbKc7Ge_5_dSO1Lv2soWvoVYgieNoMh7VGzJR9FJtugZ7lYQ0UcKEbYNOY36rRQHZHUPT68vV5tceGLbWCUzPfeaFFMoBZzecVc56vWwJFnWMmJ59CCZg617xpOFbTSyw0pmvT_HJ7hxtFNGBr6wvuu5SCkchcrZ1vAeXZomznh5YgTqfcpR02cBO6vZVDeXBRjMjKEcFRbLh8f18-Z2UUBDnqP9wmp9ncRmSSfND2ldGo2h_zse407g0Mxc1q7HzJ3-4jzYYTJjtQH3iSV-fgFzx50J
|
|
||||||
|
|
||||||
.. _Sphinx: http://www.sphinx-doc.org/
|
|
||||||
.. _Breathe: https://breathe.readthedocs.io
|
|
||||||
|
@ -20,11 +20,7 @@ components/partition_table/test_gen_esp32part_host/gen_esp32part_tests.py
|
|||||||
components/spiffs/spiffsgen.py
|
components/spiffs/spiffsgen.py
|
||||||
components/spiffs/test_spiffsgen/test_spiffsgen.py
|
components/spiffs/test_spiffsgen/test_spiffsgen.py
|
||||||
components/ulp/esp32ulp_mapgen.py
|
components/ulp/esp32ulp_mapgen.py
|
||||||
docs/build_docs.py
|
|
||||||
docs/check_lang_folder_sync.sh
|
docs/check_lang_folder_sync.sh
|
||||||
docs/idf_extensions/gen_version_specific_includes.py
|
|
||||||
docs/test/test_docs.py
|
|
||||||
docs/test/test_sphinx_idf_extensions.py
|
|
||||||
examples/build_system/cmake/idf_as_lib/build-esp32.sh
|
examples/build_system/cmake/idf_as_lib/build-esp32.sh
|
||||||
examples/build_system/cmake/idf_as_lib/build.sh
|
examples/build_system/cmake/idf_as_lib/build.sh
|
||||||
examples/build_system/cmake/idf_as_lib/run-esp32.sh
|
examples/build_system/cmake/idf_as_lib/run-esp32.sh
|
||||||
|
@ -24,8 +24,6 @@ components/wifi_provisioning/python/wifi_constants_pb2.py
|
|||||||
components/wifi_provisioning/python/wifi_scan_pb2.py
|
components/wifi_provisioning/python/wifi_scan_pb2.py
|
||||||
components/xtensa/trax/traceparse.py
|
components/xtensa/trax/traceparse.py
|
||||||
docs/build_docs.py
|
docs/build_docs.py
|
||||||
docs/conf_common.py
|
|
||||||
docs/en/conf.py
|
|
||||||
docs/extensions/google_analytics.py
|
docs/extensions/google_analytics.py
|
||||||
docs/extensions/html_redirects.py
|
docs/extensions/html_redirects.py
|
||||||
docs/extensions/list_filter.py
|
docs/extensions/list_filter.py
|
||||||
|
Loading…
Reference in New Issue
Block a user