docs: support building a subset of the documentation with build_docs

Closes IDF-1688
This commit is contained in:
Marius Vikhammer 2020-05-12 16:11:09 +08:00
parent 9f801fd2f5
commit a316082f53
5 changed files with 60 additions and 4 deletions

View File

@ -84,6 +84,8 @@ def main():
help="Parallel Sphinx builds - number of independent Sphinx builds to run", default="auto")
parser.add_argument("--sphinx-parallel-jobs", "-j", choices=["auto"] + [str(x) for x in range(8)],
help="Sphinx parallel jobs argument - number of threads for each Sphinx build to use", default="1")
parser.add_argument("--input-docs", "-i", nargs='+', default=[""],
help="List of documents to build relative to the doc base folder, i.e. the language folder. Defaults to all documents")
action_parsers = parser.add_subparsers(dest='action')
@ -153,7 +155,7 @@ def parallel_call(args, callback):
for target in targets:
for language in languages:
build_dir = os.path.realpath(os.path.join(args.build_dir, language, target))
entries.append((language, target, build_dir, args.sphinx_parallel_jobs, args.builders))
entries.append((language, target, build_dir, args.sphinx_parallel_jobs, args.builders, args.input_docs))
print(entries)
errcodes = pool.map(callback, entries)
@ -175,7 +177,7 @@ def parallel_call(args, callback):
return 0
def sphinx_call(language, target, build_dir, sphinx_parallel_jobs, buildername):
def sphinx_call(language, target, build_dir, sphinx_parallel_jobs, buildername, input_docs):
# Note: because this runs in a multiprocessing Process, everything which happens here should be isolated to a single process
# (ie it doesn't matter if Sphinx is using global variables, as they're it's own copy of the global variables)
@ -201,6 +203,7 @@ def sphinx_call(language, target, build_dir, sphinx_parallel_jobs, buildername):
"-w", SPHINX_WARN_LOG,
"-t", target,
"-D", "idf_target={}".format(target),
"-D", "docs_to_build={}".format(",". join(input_docs)),
os.path.join(os.path.abspath(os.path.dirname(__file__)), language), # srcdir for this language
os.path.join(build_dir, buildername) # build directory
]
@ -259,9 +262,9 @@ def action_build(args):
def call_build_docs(entry):
(language, target, build_dir, sphinx_parallel_jobs, builders) = entry
(language, target, build_dir, sphinx_parallel_jobs, builders, input_docs) = entry
for buildername in builders:
ret = sphinx_call(language, target, build_dir, sphinx_parallel_jobs, buildername)
ret = sphinx_call(language, target, build_dir, sphinx_parallel_jobs, buildername, input_docs)
if ret != 0:
return ret

View File

@ -383,6 +383,7 @@ def setup(app):
app.add_config_value('idf_targets', None, 'env')
app.add_config_value('conditional_include_dict', None, 'env')
app.add_config_value('docs_to_build', None, 'env')
# Breathe extension variables (depend on build_dir)
# note: we generate into xml_in and then copy_if_modified to xml dir

View File

@ -167,6 +167,8 @@ Other Extensions
:idf_file:`docs/idf_extensions/exclude_docs.py`
Sphinx extension that updates the excluded documents according to the conditional_include_dict {tag:documents}. If the tag is set, then the list of documents will be included.
Also responsible for excluding documents when building with the config value ``docs_to_build`` set. In these cases all documents not listed in ``docs_to_build`` will be excluded.
Subscribes to ``idf-defines-generated`` as it relies on the sphinx tags to determine which documents to exclude
:idf_file:`docs/idf_extensions/run_doxygen.py`

View File

@ -511,6 +511,23 @@ Choices for language (``-l``) are ``en`` and ``zh_CN``. Choices for target (``-t
Build documentation will be placed in ``_build/<language>/<target>/html`` folder. To see it, open the ``index.html`` inside this directory in a web browser.
Building a subset of the documentation
""""""""""""""""""""""""""""""""""""""
Since building the full documentation can be quite slow, it might be useful to just build just the subset of the documentation you are interested in.
This can be achieved by listing the document you want to build::
./build_docs.py -l en -t esp32 -i api-reference/peripherals/can.rst build
Building multiple documents is also possible::
./build_docs.py -l en -t esp32 -i api-reference/peripherals/can.rst api-reference/peripherals/adc.rst build
As well as wildcards::
./build_docs.py -l en -t esp32 -i api-reference/peripherals/* build
Note that this is a feature intended to simply testing and debugging during writing of documentation. The HTML output won't be perfect, i.e. it will not build a proper index that lists all the documents, and any references to documents that are not built will result in warnings.
Building PDF
""""""""""""

View File

@ -1,10 +1,43 @@
from sphinx.util import get_matching_files
from sphinx.util.matching import compile_matchers
# Updates the excluded documents according to the conditional_include_dict {tag:documents}
def update_exclude_patterns(app, config):
# Default to building all if option not set
if config.docs_to_build:
build_subset(app, config)
for tag, docs in config.conditional_include_dict.items():
if not app.tags.has(tag):
app.config.exclude_patterns.extend(docs)
def build_subset(app, config):
# Convert to list of docs to build
docs_to_build = config.docs_to_build.split(',')
# Exclude all documents which were not set as docs_to_build when build_docs were called
exclude_docs = [filename for filename in get_matching_files(app.srcdir, compile_matchers(docs_to_build))]
docs = [filename for filename in get_matching_files(app.srcdir, compile_matchers(exclude_docs))]
app.config.exclude_patterns.extend(exclude_docs)
# Get all docs that will be built
docs = [filename for filename in get_matching_files(app.srcdir, compile_matchers(exclude_docs))]
if not docs:
raise ValueError("No documents to build")
print("Building a subset of the documents: {}".format(docs))
# Sphinx requires a master document, if there is a document name 'index' then we pick that
index_docs = [doc for doc in docs if 'index' in doc]
if index_docs:
config.master_doc = index_docs[0].replace('.rst', '')
else:
config.master_doc = docs[0].replace('.rst', '')
def setup(app):
# Tags are generated together with defines
app.connect('config-inited', update_exclude_patterns)