summaryrefslogtreecommitdiff
path: root/tools/docs
diff options
context:
space:
mode:
Diffstat (limited to 'tools/docs')
-rwxr-xr-xtools/docs/checktransupdate.py307
-rwxr-xr-xtools/docs/documentation-file-ref-check245
-rwxr-xr-xtools/docs/features-refresh.sh98
-rwxr-xr-xtools/docs/find-unused-docs.sh62
-rwxr-xr-xtools/docs/get_abi.py214
-rwxr-xr-xtools/docs/get_feat.pl641
-rwxr-xr-xtools/docs/list-arch.sh11
-rwxr-xr-xtools/docs/sphinx-pre-install2
-rwxr-xr-xtools/docs/test_doc_build.py513
9 files changed, 2092 insertions, 1 deletions
diff --git a/tools/docs/checktransupdate.py b/tools/docs/checktransupdate.py
new file mode 100755
index 000000000000..e894652369a5
--- /dev/null
+++ b/tools/docs/checktransupdate.py
@@ -0,0 +1,307 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0
+
+"""
+This script helps track the translation status of the documentation
+in different locales, e.g., zh_CN. More specially, it uses `git log`
+commit to find the latest english commit from the translation commit
+(order by author date) and the latest english commits from HEAD. If
+differences occur, report the file and commits that need to be updated.
+
+The usage is as follows:
+- tools/docs/checktransupdate.py -l zh_CN
+This will print all the files that need to be updated or translated in the zh_CN locale.
+- tools/docs/checktransupdate.py Documentation/translations/zh_CN/dev-tools/testing-overview.rst
+This will only print the status of the specified file.
+
+The output is something like:
+Documentation/dev-tools/kfence.rst
+No translation in the locale of zh_CN
+
+Documentation/translations/zh_CN/dev-tools/testing-overview.rst
+commit 42fb9cfd5b18 ("Documentation: dev-tools: Add link to RV docs")
+1 commits needs resolving in total
+"""
+
+import os
+import re
+import time
+import logging
+from argparse import ArgumentParser, ArgumentTypeError, BooleanOptionalAction
+from datetime import datetime
+
+
+def get_origin_path(file_path):
+ """Get the origin path from the translation path"""
+ paths = file_path.split("/")
+ tidx = paths.index("translations")
+ opaths = paths[:tidx]
+ opaths += paths[tidx + 2 :]
+ return "/".join(opaths)
+
+
+def get_latest_commit_from(file_path, commit):
+ """Get the latest commit from the specified commit for the specified file"""
+ command = f"git log --pretty=format:%H%n%aD%n%cD%n%n%B {commit} -1 -- {file_path}"
+ logging.debug(command)
+ pipe = os.popen(command)
+ result = pipe.read()
+ result = result.split("\n")
+ if len(result) <= 1:
+ return None
+
+ logging.debug("Result: %s", result[0])
+
+ return {
+ "hash": result[0],
+ "author_date": datetime.strptime(result[1], "%a, %d %b %Y %H:%M:%S %z"),
+ "commit_date": datetime.strptime(result[2], "%a, %d %b %Y %H:%M:%S %z"),
+ "message": result[4:],
+ }
+
+
+def get_origin_from_trans(origin_path, t_from_head):
+ """Get the latest origin commit from the translation commit"""
+ o_from_t = get_latest_commit_from(origin_path, t_from_head["hash"])
+ while o_from_t is not None and o_from_t["author_date"] > t_from_head["author_date"]:
+ o_from_t = get_latest_commit_from(origin_path, o_from_t["hash"] + "^")
+ if o_from_t is not None:
+ logging.debug("tracked origin commit id: %s", o_from_t["hash"])
+ return o_from_t
+
+
+def get_origin_from_trans_smartly(origin_path, t_from_head):
+ """Get the latest origin commit from the formatted translation commit:
+ (1) update to commit HASH (TITLE)
+ (2) Update the translation through commit HASH (TITLE)
+ """
+ # catch flag for 12-bit commit hash
+ HASH = r'([0-9a-f]{12})'
+ # pattern 1: contains "update to commit HASH"
+ pat_update_to = re.compile(rf'update to commit {HASH}')
+ # pattern 2: contains "Update the translation through commit HASH"
+ pat_update_translation = re.compile(rf'Update the translation through commit {HASH}')
+
+ origin_commit_hash = None
+ for line in t_from_head["message"]:
+ # check if the line matches the first pattern
+ match = pat_update_to.search(line)
+ if match:
+ origin_commit_hash = match.group(1)
+ break
+ # check if the line matches the second pattern
+ match = pat_update_translation.search(line)
+ if match:
+ origin_commit_hash = match.group(1)
+ break
+ if origin_commit_hash is None:
+ return None
+ o_from_t = get_latest_commit_from(origin_path, origin_commit_hash)
+ if o_from_t is not None:
+ logging.debug("tracked origin commit id: %s", o_from_t["hash"])
+ return o_from_t
+
+
+def get_commits_count_between(opath, commit1, commit2):
+ """Get the commits count between two commits for the specified file"""
+ command = f"git log --pretty=format:%H {commit1}...{commit2} -- {opath}"
+ logging.debug(command)
+ pipe = os.popen(command)
+ result = pipe.read().split("\n")
+ # filter out empty lines
+ result = list(filter(lambda x: x != "", result))
+ return result
+
+
+def pretty_output(commit):
+ """Pretty print the commit message"""
+ command = f"git log --pretty='format:%h (\"%s\")' -1 {commit}"
+ logging.debug(command)
+ pipe = os.popen(command)
+ return pipe.read()
+
+
+def valid_commit(commit):
+ """Check if the commit is valid or not"""
+ msg = pretty_output(commit)
+ return "Merge tag" not in msg
+
+def check_per_file(file_path):
+ """Check the translation status for the specified file"""
+ opath = get_origin_path(file_path)
+
+ if not os.path.isfile(opath):
+ logging.error("Cannot find the origin path for {file_path}")
+ return
+
+ o_from_head = get_latest_commit_from(opath, "HEAD")
+ t_from_head = get_latest_commit_from(file_path, "HEAD")
+
+ if o_from_head is None or t_from_head is None:
+ logging.error("Cannot find the latest commit for %s", file_path)
+ return
+
+ o_from_t = get_origin_from_trans_smartly(opath, t_from_head)
+ # notice, o_from_t from get_*_smartly() is always more accurate than from get_*()
+ if o_from_t is None:
+ o_from_t = get_origin_from_trans(opath, t_from_head)
+
+ if o_from_t is None:
+ logging.error("Error: Cannot find the latest origin commit for %s", file_path)
+ return
+
+ if o_from_head["hash"] == o_from_t["hash"]:
+ logging.debug("No update needed for %s", file_path)
+ else:
+ logging.info(file_path)
+ commits = get_commits_count_between(
+ opath, o_from_t["hash"], o_from_head["hash"]
+ )
+ count = 0
+ for commit in commits:
+ if valid_commit(commit):
+ logging.info("commit %s", pretty_output(commit))
+ count += 1
+ logging.info("%d commits needs resolving in total\n", count)
+
+
+def valid_locales(locale):
+ """Check if the locale is valid or not"""
+ script_path = os.path.dirname(os.path.abspath(__file__))
+ linux_path = os.path.join(script_path, "../..")
+ if not os.path.isdir(f"{linux_path}/Documentation/translations/{locale}"):
+ raise ArgumentTypeError("Invalid locale: {locale}")
+ return locale
+
+
+def list_files_with_excluding_folders(folder, exclude_folders, include_suffix):
+ """List all files with the specified suffix in the folder and its subfolders"""
+ files = []
+ stack = [folder]
+
+ while stack:
+ pwd = stack.pop()
+ # filter out the exclude folders
+ if os.path.basename(pwd) in exclude_folders:
+ continue
+ # list all files and folders
+ for item in os.listdir(pwd):
+ ab_item = os.path.join(pwd, item)
+ if os.path.isdir(ab_item):
+ stack.append(ab_item)
+ else:
+ if ab_item.endswith(include_suffix):
+ files.append(ab_item)
+
+ return files
+
+
+class DmesgFormatter(logging.Formatter):
+ """Custom dmesg logging formatter"""
+ def format(self, record):
+ timestamp = time.time()
+ formatted_time = f"[{timestamp:>10.6f}]"
+ log_message = f"{formatted_time} {record.getMessage()}"
+ return log_message
+
+
+def config_logging(log_level, log_file="checktransupdate.log"):
+ """configure logging based on the log level"""
+ # set up the root logger
+ logger = logging.getLogger()
+ logger.setLevel(log_level)
+
+ # Create console handler
+ console_handler = logging.StreamHandler()
+ console_handler.setLevel(log_level)
+
+ # Create file handler
+ file_handler = logging.FileHandler(log_file)
+ file_handler.setLevel(log_level)
+
+ # Create formatter and add it to the handlers
+ formatter = DmesgFormatter()
+ console_handler.setFormatter(formatter)
+ file_handler.setFormatter(formatter)
+
+ # Add the handler to the logger
+ logger.addHandler(console_handler)
+ logger.addHandler(file_handler)
+
+
+def main():
+ """Main function of the script"""
+ script_path = os.path.dirname(os.path.abspath(__file__))
+ linux_path = os.path.join(script_path, "../..")
+
+ parser = ArgumentParser(description="Check the translation update")
+ parser.add_argument(
+ "-l",
+ "--locale",
+ default="zh_CN",
+ type=valid_locales,
+ help="Locale to check when files are not specified",
+ )
+
+ parser.add_argument(
+ "--print-missing-translations",
+ action=BooleanOptionalAction,
+ default=True,
+ help="Print files that do not have translations",
+ )
+
+ parser.add_argument(
+ '--log',
+ default='INFO',
+ choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'],
+ help='Set the logging level')
+
+ parser.add_argument(
+ '--logfile',
+ default='checktransupdate.log',
+ help='Set the logging file (default: checktransupdate.log)')
+
+ parser.add_argument(
+ "files", nargs="*", help="Files to check, if not specified, check all files"
+ )
+ args = parser.parse_args()
+
+ # Configure logging based on the --log argument
+ log_level = getattr(logging, args.log.upper(), logging.INFO)
+ config_logging(log_level)
+
+ # Get files related to linux path
+ files = args.files
+ if len(files) == 0:
+ offical_files = list_files_with_excluding_folders(
+ os.path.join(linux_path, "Documentation"), ["translations", "output"], "rst"
+ )
+
+ for file in offical_files:
+ # split the path into parts
+ path_parts = file.split(os.sep)
+ # find the index of the "Documentation" directory
+ kindex = path_parts.index("Documentation")
+ # insert the translations and locale after the Documentation directory
+ new_path_parts = path_parts[:kindex + 1] + ["translations", args.locale] \
+ + path_parts[kindex + 1 :]
+ # join the path parts back together
+ new_file = os.sep.join(new_path_parts)
+ if os.path.isfile(new_file):
+ files.append(new_file)
+ else:
+ if args.print_missing_translations:
+ logging.info(os.path.relpath(os.path.abspath(file), linux_path))
+ logging.info("No translation in the locale of %s\n", args.locale)
+
+ files = list(map(lambda x: os.path.relpath(os.path.abspath(x), linux_path), files))
+
+ # cd to linux root directory
+ os.chdir(linux_path)
+
+ for file in files:
+ check_per_file(file)
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tools/docs/documentation-file-ref-check b/tools/docs/documentation-file-ref-check
new file mode 100755
index 000000000000..0cad42f6943b
--- /dev/null
+++ b/tools/docs/documentation-file-ref-check
@@ -0,0 +1,245 @@
+#!/usr/bin/env perl
+# SPDX-License-Identifier: GPL-2.0
+#
+# Treewide grep for references to files under Documentation, and report
+# non-existing files in stderr.
+
+use warnings;
+use strict;
+use Getopt::Long qw(:config no_auto_abbrev);
+
+# NOTE: only add things here when the file was gone, but the text wants
+# to mention a past documentation file, for example, to give credits for
+# the original work.
+my %false_positives = (
+ "Documentation/scsi/scsi_mid_low_api.rst" => "Documentation/Configure.help",
+ "drivers/vhost/vhost.c" => "Documentation/virtual/lguest/lguest.c",
+);
+
+my $scriptname = $0;
+$scriptname =~ s,tools/docs/([^/]+/),$1,;
+
+# Parse arguments
+my $help = 0;
+my $fix = 0;
+my $warn = 0;
+
+if (! -e ".git") {
+ printf "Warning: can't check if file exists, as this is not a git tree\n";
+ exit 0;
+}
+
+GetOptions(
+ 'fix' => \$fix,
+ 'warn' => \$warn,
+ 'h|help|usage' => \$help,
+);
+
+if ($help != 0) {
+ print "$scriptname [--help] [--fix]\n";
+ exit -1;
+}
+
+# Step 1: find broken references
+print "Finding broken references. This may take a while... " if ($fix);
+
+my %broken_ref;
+
+my $doc_fix = 0;
+
+open IN, "git grep ':doc:\`' Documentation/|"
+ or die "Failed to run git grep";
+while (<IN>) {
+ next if (!m,^([^:]+):.*\:doc\:\`([^\`]+)\`,);
+ next if (m,sphinx/,);
+
+ my $file = $1;
+ my $d = $1;
+ my $doc_ref = $2;
+
+ my $f = $doc_ref;
+
+ $d =~ s,(.*/).*,$1,;
+ $f =~ s,.*\<([^\>]+)\>,$1,;
+
+ if ($f =~ m,^/,) {
+ $f = "$f.rst";
+ $f =~ s,^/,Documentation/,;
+ } else {
+ $f = "$d$f.rst";
+ }
+
+ next if (grep -e, glob("$f"));
+
+ if ($fix && !$doc_fix) {
+ print STDERR "\nWARNING: Currently, can't fix broken :doc:`` fields\n";
+ }
+ $doc_fix++;
+
+ print STDERR "$file: :doc:`$doc_ref`\n";
+}
+close IN;
+
+open IN, "git grep 'Documentation/'|"
+ or die "Failed to run git grep";
+while (<IN>) {
+ next if (!m/^([^:]+):(.*)/);
+
+ my $f = $1;
+ my $ln = $2;
+
+ # On linux-next, discard the Next/ directory
+ next if ($f =~ m,^Next/,);
+
+ # Makefiles and scripts contain nasty expressions to parse docs
+ next if ($f =~ m/Makefile/ || $f =~ m/\.(sh|py|pl|~|rej|org|orig)$/);
+
+ # It doesn't make sense to parse hidden files
+ next if ($f =~ m#/\.#);
+
+ # Skip this script
+ next if ($f eq $scriptname);
+
+ # Ignore the dir where documentation will be built
+ next if ($ln =~ m,\b(\S*)Documentation/output,);
+
+ if ($ln =~ m,\b(\S*)(Documentation/[A-Za-z0-9\_\.\,\~/\*\[\]\?+-]*)(.*),) {
+ my $prefix = $1;
+ my $ref = $2;
+ my $base = $2;
+ my $extra = $3;
+
+ # some file references are like:
+ # /usr/src/linux/Documentation/DMA-{API,mapping}.txt
+ # For now, ignore them
+ next if ($extra =~ m/^{/);
+
+ # Remove footnotes at the end like:
+ # Documentation/devicetree/dt-object-internal.txt[1]
+ $ref =~ s/(txt|rst)\[\d+]$/$1/;
+
+ # Remove ending ']' without any '['
+ $ref =~ s/\].*// if (!($ref =~ m/\[/));
+
+ # Remove puntuation marks at the end
+ $ref =~ s/[\,\.]+$//;
+
+ my $fulref = "$prefix$ref";
+
+ $fulref =~ s/^(\<file|ref)://;
+ $fulref =~ s/^[\'\`]+//;
+ $fulref =~ s,^\$\(.*\)/,,;
+ $base =~ s,.*/,,;
+
+ # Remove URL false-positives
+ next if ($fulref =~ m/^http/);
+
+ # Remove sched-pelt false-positive
+ next if ($fulref =~ m,^Documentation/scheduler/sched-pelt$,);
+
+ # Discard some build examples from Documentation/target/tcm_mod_builder.rst
+ next if ($fulref =~ m,mnt/sdb/lio-core-2.6.git/Documentation/target,);
+
+ # Check if exists, evaluating wildcards
+ next if (grep -e, glob("$ref $fulref"));
+
+ # Accept relative Documentation patches for tools/
+ if ($f =~ m/tools/) {
+ my $path = $f;
+ $path =~ s,(.*)/.*,$1,;
+ $path =~ s,testing/selftests/bpf,bpf/bpftool,;
+ next if (grep -e, glob("$path/$ref $path/../$ref $path/$fulref"));
+ }
+
+ # Discard known false-positives
+ if (defined($false_positives{$f})) {
+ next if ($false_positives{$f} eq $fulref);
+ }
+
+ if ($fix) {
+ if (!($ref =~ m/(scripts|Kconfig|Kbuild)/)) {
+ $broken_ref{$ref}++;
+ }
+ } elsif ($warn) {
+ print STDERR "Warning: $f references a file that doesn't exist: $fulref\n";
+ } else {
+ print STDERR "$f: $fulref\n";
+ }
+ }
+}
+close IN;
+
+exit 0 if (!$fix);
+
+# Step 2: Seek for file name alternatives
+print "Auto-fixing broken references. Please double-check the results\n";
+
+foreach my $ref (keys %broken_ref) {
+ my $new =$ref;
+
+ my $basedir = ".";
+ # On translations, only seek inside the translations directory
+ $basedir = $1 if ($ref =~ m,(Documentation/translations/[^/]+),);
+
+ # get just the basename
+ $new =~ s,.*/,,;
+
+ my $f="";
+
+ # usual reason for breakage: DT file moved around
+ if ($ref =~ /devicetree/) {
+ # usual reason for breakage: DT file renamed to .yaml
+ if (!$f) {
+ my $new_ref = $ref;
+ $new_ref =~ s/\.txt$/.yaml/;
+ $f=$new_ref if (-f $new_ref);
+ }
+
+ if (!$f) {
+ my $search = $new;
+ $search =~ s,^.*/,,;
+ $f = qx(find Documentation/devicetree/ -iname "*$search*") if ($search);
+ if (!$f) {
+ # Manufacturer name may have changed
+ $search =~ s/^.*,//;
+ $f = qx(find Documentation/devicetree/ -iname "*$search*") if ($search);
+ }
+ }
+ }
+
+ # usual reason for breakage: file renamed to .rst
+ if (!$f) {
+ $new =~ s/\.txt$/.rst/;
+ $f=qx(find $basedir -iname $new) if ($new);
+ }
+
+ # usual reason for breakage: use dash or underline
+ if (!$f) {
+ $new =~ s/[-_]/[-_]/g;
+ $f=qx(find $basedir -iname $new) if ($new);
+ }
+
+ # Wild guess: seek for the same name on another place
+ if (!$f) {
+ $f = qx(find $basedir -iname $new) if ($new);
+ }
+
+ my @find = split /\s+/, $f;
+
+ if (!$f) {
+ print STDERR "ERROR: Didn't find a replacement for $ref\n";
+ } elsif (scalar(@find) > 1) {
+ print STDERR "WARNING: Won't auto-replace, as found multiple files close to $ref:\n";
+ foreach my $j (@find) {
+ $j =~ s,^./,,;
+ print STDERR " $j\n";
+ }
+ } else {
+ $f = $find[0];
+ $f =~ s,^./,,;
+ print "INFO: Replacing $ref to $f\n";
+ foreach my $j (qx(git grep -l $ref)) {
+ qx(sed "s\@$ref\@$f\@g" -i $j);
+ }
+ }
+}
diff --git a/tools/docs/features-refresh.sh b/tools/docs/features-refresh.sh
new file mode 100755
index 000000000000..c2288124e94a
--- /dev/null
+++ b/tools/docs/features-refresh.sh
@@ -0,0 +1,98 @@
+#
+# Small script that refreshes the kernel feature support status in place.
+#
+
+for F_FILE in Documentation/features/*/*/arch-support.txt; do
+ F=$(grep "^# Kconfig:" "$F_FILE" | cut -c26-)
+
+ #
+ # Each feature F is identified by a pair (O, K), where 'O' can
+ # be either the empty string (for 'nop') or "not" (the logical
+ # negation operator '!'); other operators are not supported.
+ #
+ O=""
+ K=$F
+ if [[ "$F" == !* ]]; then
+ O="not"
+ K=$(echo $F | sed -e 's/^!//g')
+ fi
+
+ #
+ # F := (O, K) is 'valid' iff there is a Kconfig file (for some
+ # arch) which contains K.
+ #
+ # Notice that this definition entails an 'asymmetry' between
+ # the case 'O = ""' and the case 'O = "not"'. E.g., F may be
+ # _invalid_ if:
+ #
+ # [case 'O = ""']
+ # 1) no arch provides support for F,
+ # 2) K does not exist (e.g., it was renamed/mis-typed);
+ #
+ # [case 'O = "not"']
+ # 3) all archs provide support for F,
+ # 4) as in (2).
+ #
+ # The rationale for adopting this definition (and, thus, for
+ # keeping the asymmetry) is:
+ #
+ # We want to be able to 'detect' (2) (or (4)).
+ #
+ # (1) and (3) may further warn the developers about the fact
+ # that K can be removed.
+ #
+ F_VALID="false"
+ for ARCH_DIR in arch/*/; do
+ K_FILES=$(find $ARCH_DIR -name "Kconfig*")
+ K_GREP=$(grep "$K" $K_FILES)
+ if [ ! -z "$K_GREP" ]; then
+ F_VALID="true"
+ break
+ fi
+ done
+ if [ "$F_VALID" = "false" ]; then
+ printf "WARNING: '%s' is not a valid Kconfig\n" "$F"
+ fi
+
+ T_FILE="$F_FILE.tmp"
+ grep "^#" $F_FILE > $T_FILE
+ echo " -----------------------" >> $T_FILE
+ echo " | arch |status|" >> $T_FILE
+ echo " -----------------------" >> $T_FILE
+ for ARCH_DIR in arch/*/; do
+ ARCH=$(echo $ARCH_DIR | sed -e 's/^arch//g' | sed -e 's/\///g')
+ K_FILES=$(find $ARCH_DIR -name "Kconfig*")
+ K_GREP=$(grep "$K" $K_FILES)
+ #
+ # Arch support status values for (O, K) are updated according
+ # to the following rules.
+ #
+ # - ("", K) is 'supported by a given arch', if there is a
+ # Kconfig file for that arch which contains K;
+ #
+ # - ("not", K) is 'supported by a given arch', if there is
+ # no Kconfig file for that arch which contains K;
+ #
+ # - otherwise: preserve the previous status value (if any),
+ # default to 'not yet supported'.
+ #
+ # Notice that, according these rules, invalid features may be
+ # updated/modified.
+ #
+ if [ "$O" = "" ] && [ ! -z "$K_GREP" ]; then
+ printf " |%12s: | ok |\n" "$ARCH" >> $T_FILE
+ elif [ "$O" = "not" ] && [ -z "$K_GREP" ]; then
+ printf " |%12s: | ok |\n" "$ARCH" >> $T_FILE
+ else
+ S=$(grep -v "^#" "$F_FILE" | grep " $ARCH:")
+ if [ ! -z "$S" ]; then
+ echo "$S" >> $T_FILE
+ else
+ printf " |%12s: | TODO |\n" "$ARCH" \
+ >> $T_FILE
+ fi
+ fi
+ done
+ echo " -----------------------" >> $T_FILE
+ mv $T_FILE $F_FILE
+done
diff --git a/tools/docs/find-unused-docs.sh b/tools/docs/find-unused-docs.sh
new file mode 100755
index 000000000000..05552dbda5bc
--- /dev/null
+++ b/tools/docs/find-unused-docs.sh
@@ -0,0 +1,62 @@
+#!/bin/bash
+# (c) 2017, Jonathan Corbet <corbet@lwn.net>
+# sayli karnik <karniksayli1995@gmail.com>
+#
+# This script detects files with kernel-doc comments for exported functions
+# that are not included in documentation.
+#
+# usage: Run 'tools/docs/find-unused-docs.sh directory' from top level of kernel
+# tree.
+#
+# example: $tools/docs/find-unused-docs.sh drivers/scsi
+#
+# Licensed under the terms of the GNU GPL License
+
+if ! [ -d "Documentation" ]; then
+ echo "Run from top level of kernel tree"
+ exit 1
+fi
+
+if [ "$#" -ne 1 ]; then
+ echo "Usage: tools/docs/find-unused-docs.sh directory"
+ exit 1
+fi
+
+if ! [ -d "$1" ]; then
+ echo "Directory $1 doesn't exist"
+ exit 1
+fi
+
+cd "$( dirname "${BASH_SOURCE[0]}" )"
+cd ..
+
+cd Documentation/
+
+echo "The following files contain kerneldoc comments for exported functions \
+that are not used in the formatted documentation"
+
+# FILES INCLUDED
+
+files_included=($(grep -rHR ".. kernel-doc" --include \*.rst | cut -d " " -f 3))
+
+declare -A FILES_INCLUDED
+
+for each in "${files_included[@]}"; do
+ FILES_INCLUDED[$each]="$each"
+ done
+
+cd ..
+
+# FILES NOT INCLUDED
+
+for file in `find $1 -name '*.c'`; do
+
+ if [[ ${FILES_INCLUDED[$file]+_} ]]; then
+ continue;
+ fi
+ str=$(PYTHONDONTWRITEBYTECODE=1 scripts/kernel-doc -export "$file" 2>/dev/null)
+ if [[ -n "$str" ]]; then
+ echo "$file"
+ fi
+ done
+
diff --git a/tools/docs/get_abi.py b/tools/docs/get_abi.py
new file mode 100755
index 000000000000..7ce4748a46d2
--- /dev/null
+++ b/tools/docs/get_abi.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python3
+# pylint: disable=R0903
+# Copyright(c) 2025: Mauro Carvalho Chehab <mchehab@kernel.org>.
+# SPDX-License-Identifier: GPL-2.0
+
+"""
+Parse ABI documentation and produce results from it.
+"""
+
+import argparse
+import logging
+import os
+import sys
+
+# Import Python modules
+
+LIB_DIR = "lib/abi"
+SRC_DIR = os.path.dirname(os.path.realpath(__file__))
+
+sys.path.insert(0, os.path.join(SRC_DIR, LIB_DIR))
+
+from abi_parser import AbiParser # pylint: disable=C0413
+from abi_regex import AbiRegex # pylint: disable=C0413
+from helpers import ABI_DIR, DEBUG_HELP # pylint: disable=C0413
+from system_symbols import SystemSymbols # pylint: disable=C0413
+
+# Command line classes
+
+
+REST_DESC = """
+Produce output in ReST format.
+
+The output is done on two sections:
+
+- Symbols: show all parsed symbols in alphabetic order;
+- Files: cross reference the content of each file with the symbols on it.
+"""
+
+class AbiRest:
+ """Initialize an argparse subparser for rest output"""
+
+ def __init__(self, subparsers):
+ """Initialize argparse subparsers"""
+
+ parser = subparsers.add_parser("rest",
+ formatter_class=argparse.RawTextHelpFormatter,
+ description=REST_DESC)
+
+ parser.add_argument("--enable-lineno", action="store_true",
+ help="enable lineno")
+ parser.add_argument("--raw", action="store_true",
+ help="output text as contained in the ABI files. "
+ "It not used, output will contain dynamically"
+ " generated cross references when possible.")
+ parser.add_argument("--no-file", action="store_true",
+ help="Don't the files section")
+ parser.add_argument("--show-hints", help="Show-hints")
+
+ parser.set_defaults(func=self.run)
+
+ def run(self, args):
+ """Run subparser"""
+
+ parser = AbiParser(args.dir, debug=args.debug)
+ parser.parse_abi()
+ parser.check_issues()
+
+ for t in parser.doc(args.raw, not args.no_file):
+ if args.enable_lineno:
+ print (f".. LINENO {t[1]}#{t[2]}\n\n")
+
+ print(t[0])
+
+class AbiValidate:
+ """Initialize an argparse subparser for ABI validation"""
+
+ def __init__(self, subparsers):
+ """Initialize argparse subparsers"""
+
+ parser = subparsers.add_parser("validate",
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ description="list events")
+
+ parser.set_defaults(func=self.run)
+
+ def run(self, args):
+ """Run subparser"""
+
+ parser = AbiParser(args.dir, debug=args.debug)
+ parser.parse_abi()
+ parser.check_issues()
+
+
+class AbiSearch:
+ """Initialize an argparse subparser for ABI search"""
+
+ def __init__(self, subparsers):
+ """Initialize argparse subparsers"""
+
+ parser = subparsers.add_parser("search",
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
+ description="Search ABI using a regular expression")
+
+ parser.add_argument("expression",
+ help="Case-insensitive search pattern for the ABI symbol")
+
+ parser.set_defaults(func=self.run)
+
+ def run(self, args):
+ """Run subparser"""
+
+ parser = AbiParser(args.dir, debug=args.debug)
+ parser.parse_abi()
+ parser.search_symbols(args.expression)
+
+UNDEFINED_DESC="""
+Check undefined ABIs on local machine.
+
+Read sysfs devnodes and check if the devnodes there are defined inside
+ABI documentation.
+
+The search logic tries to minimize the number of regular expressions to
+search per each symbol.
+
+By default, it runs on a single CPU, as Python support for CPU threads
+is still experimental, and multi-process runs on Python is very slow.
+
+On experimental tests, if the number of ABI symbols to search per devnode
+is contained on a limit of ~150 regular expressions, using a single CPU
+is a lot faster than using multiple processes. However, if the number of
+regular expressions to check is at the order of ~30000, using multiple
+CPUs speeds up the check.
+"""
+
+class AbiUndefined:
+ """
+ Initialize an argparse subparser for logic to check undefined ABI at
+ the current machine's sysfs
+ """
+
+ def __init__(self, subparsers):
+ """Initialize argparse subparsers"""
+
+ parser = subparsers.add_parser("undefined",
+ formatter_class=argparse.RawTextHelpFormatter,
+ description=UNDEFINED_DESC)
+
+ parser.add_argument("-S", "--sysfs-dir", default="/sys",
+ help="directory where sysfs is mounted")
+ parser.add_argument("-s", "--search-string",
+ help="search string regular expression to limit symbol search")
+ parser.add_argument("-H", "--show-hints", action="store_true",
+ help="Hints about definitions for missing ABI symbols.")
+ parser.add_argument("-j", "--jobs", "--max-workers", type=int, default=1,
+ help="If bigger than one, enables multiprocessing.")
+ parser.add_argument("-c", "--max-chunk-size", type=int, default=50,
+ help="Maximum number of chunk size")
+ parser.add_argument("-f", "--found", action="store_true",
+ help="Also show found items. "
+ "Helpful to debug the parser."),
+ parser.add_argument("-d", "--dry-run", action="store_true",
+ help="Don't actually search for undefined. "
+ "Helpful to debug the parser."),
+
+ parser.set_defaults(func=self.run)
+
+ def run(self, args):
+ """Run subparser"""
+
+ abi = AbiRegex(args.dir, debug=args.debug,
+ search_string=args.search_string)
+
+ abi_symbols = SystemSymbols(abi=abi, hints=args.show_hints,
+ sysfs=args.sysfs_dir)
+
+ abi_symbols.check_undefined_symbols(dry_run=args.dry_run,
+ found=args.found,
+ max_workers=args.jobs,
+ chunk_size=args.max_chunk_size)
+
+
+def main():
+ """Main program"""
+
+ parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter)
+
+ parser.add_argument("-d", "--debug", type=int, default=0, help="debug level")
+ parser.add_argument("-D", "--dir", default=ABI_DIR, help=DEBUG_HELP)
+
+ subparsers = parser.add_subparsers()
+
+ AbiRest(subparsers)
+ AbiValidate(subparsers)
+ AbiSearch(subparsers)
+ AbiUndefined(subparsers)
+
+ args = parser.parse_args()
+
+ if args.debug:
+ level = logging.DEBUG
+ else:
+ level = logging.INFO
+
+ logging.basicConfig(level=level, format="[%(levelname)s] %(message)s")
+
+ if "func" in args:
+ args.func(args)
+ else:
+ sys.exit(f"Please specify a valid command for {sys.argv[0]}")
+
+
+# Call main method
+if __name__ == "__main__":
+ main()
diff --git a/tools/docs/get_feat.pl b/tools/docs/get_feat.pl
new file mode 100755
index 000000000000..d75e7c85dc85
--- /dev/null
+++ b/tools/docs/get_feat.pl
@@ -0,0 +1,641 @@
+#!/usr/bin/env perl
+# SPDX-License-Identifier: GPL-2.0
+
+use strict;
+use Pod::Usage;
+use Getopt::Long;
+use File::Find;
+use Fcntl ':mode';
+use Cwd 'abs_path';
+
+my $help;
+my $man;
+my $debug;
+my $arch;
+my $feat;
+my $enable_fname;
+
+my $basename = abs_path($0);
+$basename =~ s,/[^/]+$,/,;
+
+my $prefix=$basename . "../../Documentation/features";
+
+# Used only at for full features output. The script will auto-adjust
+# such values for the minimal possible values
+my $status_size = 1;
+my $description_size = 1;
+
+GetOptions(
+ "debug|d+" => \$debug,
+ "dir=s" => \$prefix,
+ 'help|?' => \$help,
+ 'arch=s' => \$arch,
+ 'feat=s' => \$feat,
+ 'feature=s' => \$feat,
+ "enable-fname" => \$enable_fname,
+ man => \$man
+) or pod2usage(2);
+
+pod2usage(1) if $help;
+pod2usage(-exitstatus => 0, -verbose => 2) if $man;
+
+pod2usage(1) if (scalar @ARGV < 1 || @ARGV > 2);
+
+my ($cmd, $arg) = @ARGV;
+
+pod2usage(2) if ($cmd ne "current" && $cmd ne "rest" && $cmd ne "validate"
+ && $cmd ne "ls" && $cmd ne "list");
+
+require Data::Dumper if ($debug);
+
+my %data;
+my %archs;
+
+#
+# Displays an error message, printing file name and line
+#
+sub parse_error($$$$) {
+ my ($file, $ln, $msg, $data) = @_;
+
+ $data =~ s/\s+$/\n/;
+
+ print STDERR "Warning: file $file#$ln:\n\t$msg";
+
+ if ($data ne "") {
+ print STDERR ". Line\n\t\t$data";
+ } else {
+ print STDERR "\n";
+ }
+}
+
+#
+# Parse a features file, storing its contents at %data
+#
+
+my $h_name = "Feature";
+my $h_kconfig = "Kconfig";
+my $h_description = "Description";
+my $h_subsys = "Subsystem";
+my $h_status = "Status";
+my $h_arch = "Architecture";
+
+my $max_size_name = length($h_name);
+my $max_size_kconfig = length($h_kconfig);
+my $max_size_description = length($h_description);
+my $max_size_subsys = length($h_subsys);
+my $max_size_status = length($h_status);
+
+my $max_size_arch = 0;
+my $max_size_arch_with_header;
+my $max_description_word = 0;
+
+sub parse_feat {
+ my $file = $File::Find::name;
+
+ my $mode = (stat($file))[2];
+ return if ($mode & S_IFDIR);
+ return if ($file =~ m,($prefix)/arch-support.txt,);
+ return if (!($file =~ m,arch-support.txt$,));
+
+ if ($enable_fname) {
+ printf ".. FILE %s\n", abs_path($file);
+ }
+
+ my $subsys = "";
+ $subsys = $2 if ( m,.*($prefix)/([^/]+).*,);
+
+ if (length($subsys) > $max_size_subsys) {
+ $max_size_subsys = length($subsys);
+ }
+
+ my $name;
+ my $kconfig;
+ my $description;
+ my $comments = "";
+ my $last_status;
+ my $ln;
+ my %arch_table;
+
+ print STDERR "Opening $file\n" if ($debug > 1);
+ open IN, $file;
+
+ while(<IN>) {
+ $ln++;
+
+ if (m/^\#\s+Feature\s+name:\s*(.*\S)/) {
+ $name = $1;
+ if (length($name) > $max_size_name) {
+ $max_size_name = length($name);
+ }
+ next;
+ }
+ if (m/^\#\s+Kconfig:\s*(.*\S)/) {
+ $kconfig = $1;
+ if (length($kconfig) > $max_size_kconfig) {
+ $max_size_kconfig = length($kconfig);
+ }
+ next;
+ }
+ if (m/^\#\s+description:\s*(.*\S)/) {
+ $description = $1;
+ if (length($description) > $max_size_description) {
+ $max_size_description = length($description);
+ }
+
+ foreach my $word (split /\s+/, $description) {
+ if (length($word) > $max_description_word) {
+ $max_description_word = length($word);
+ }
+ }
+
+ next;
+ }
+ next if (m/^\\s*$/);
+ next if (m/^\s*\-+\s*$/);
+ next if (m/^\s*\|\s*arch\s*\|\s*status\s*\|\s*$/);
+
+ if (m/^\#\s*(.*)/) {
+ $comments .= "$1\n";
+ next;
+ }
+ if (m/^\s*\|\s*(\S+):\s*\|\s*(\S+)\s*\|\s*$/) {
+ my $a = $1;
+ my $status = $2;
+
+ if (length($status) > $max_size_status) {
+ $max_size_status = length($status);
+ }
+ if (length($a) > $max_size_arch) {
+ $max_size_arch = length($a);
+ }
+
+ $status = "---" if ($status =~ m/^\.\.$/);
+
+ $archs{$a} = 1;
+ $arch_table{$a} = $status;
+ next;
+ }
+
+ #Everything else is an error
+ parse_error($file, $ln, "line is invalid", $_);
+ }
+ close IN;
+
+ if (!$name) {
+ parse_error($file, $ln, "Feature name not found", "");
+ return;
+ }
+
+ parse_error($file, $ln, "Subsystem not found", "") if (!$subsys);
+ parse_error($file, $ln, "Kconfig not found", "") if (!$kconfig);
+ parse_error($file, $ln, "Description not found", "") if (!$description);
+
+ if (!%arch_table) {
+ parse_error($file, $ln, "Architecture table not found", "");
+ return;
+ }
+
+ $data{$name}->{where} = $file;
+ $data{$name}->{subsys} = $subsys;
+ $data{$name}->{kconfig} = $kconfig;
+ $data{$name}->{description} = $description;
+ $data{$name}->{comments} = $comments;
+ $data{$name}->{table} = \%arch_table;
+
+ $max_size_arch_with_header = $max_size_arch + length($h_arch);
+}
+
+#
+# Output feature(s) for a given architecture
+#
+sub output_arch_table {
+ my $title = "Feature status on $arch architecture";
+
+ print "=" x length($title) . "\n";
+ print "$title\n";
+ print "=" x length($title) . "\n\n";
+
+ print "=" x $max_size_subsys;
+ print " ";
+ print "=" x $max_size_name;
+ print " ";
+ print "=" x $max_size_kconfig;
+ print " ";
+ print "=" x $max_size_status;
+ print " ";
+ print "=" x $max_size_description;
+ print "\n";
+ printf "%-${max_size_subsys}s ", $h_subsys;
+ printf "%-${max_size_name}s ", $h_name;
+ printf "%-${max_size_kconfig}s ", $h_kconfig;
+ printf "%-${max_size_status}s ", $h_status;
+ printf "%-${max_size_description}s\n", $h_description;
+ print "=" x $max_size_subsys;
+ print " ";
+ print "=" x $max_size_name;
+ print " ";
+ print "=" x $max_size_kconfig;
+ print " ";
+ print "=" x $max_size_status;
+ print " ";
+ print "=" x $max_size_description;
+ print "\n";
+
+ foreach my $name (sort {
+ ($data{$a}->{subsys} cmp $data{$b}->{subsys}) ||
+ ("\L$a" cmp "\L$b")
+ } keys %data) {
+ next if ($feat && $name ne $feat);
+
+ my %arch_table = %{$data{$name}->{table}};
+ printf "%-${max_size_subsys}s ", $data{$name}->{subsys};
+ printf "%-${max_size_name}s ", $name;
+ printf "%-${max_size_kconfig}s ", $data{$name}->{kconfig};
+ printf "%-${max_size_status}s ", $arch_table{$arch};
+ printf "%-s\n", $data{$name}->{description};
+ }
+
+ print "=" x $max_size_subsys;
+ print " ";
+ print "=" x $max_size_name;
+ print " ";
+ print "=" x $max_size_kconfig;
+ print " ";
+ print "=" x $max_size_status;
+ print " ";
+ print "=" x $max_size_description;
+ print "\n";
+}
+
+#
+# list feature(s) for a given architecture
+#
+sub list_arch_features {
+ print "#\n# Kernel feature support matrix of the '$arch' architecture:\n#\n";
+
+ foreach my $name (sort {
+ ($data{$a}->{subsys} cmp $data{$b}->{subsys}) ||
+ ("\L$a" cmp "\L$b")
+ } keys %data) {
+ next if ($feat && $name ne $feat);
+
+ my %arch_table = %{$data{$name}->{table}};
+
+ my $status = $arch_table{$arch};
+ $status = " " x ((4 - length($status)) / 2) . $status;
+
+ printf " %${max_size_subsys}s/ ", $data{$name}->{subsys};
+ printf "%-${max_size_name}s: ", $name;
+ printf "%-5s| ", $status;
+ printf "%${max_size_kconfig}s # ", $data{$name}->{kconfig};
+ printf " %s\n", $data{$name}->{description};
+ }
+}
+
+#
+# Output a feature on all architectures
+#
+sub output_feature {
+ my $title = "Feature $feat";
+
+ print "=" x length($title) . "\n";
+ print "$title\n";
+ print "=" x length($title) . "\n\n";
+
+ print ":Subsystem: $data{$feat}->{subsys} \n" if ($data{$feat}->{subsys});
+ print ":Kconfig: $data{$feat}->{kconfig} \n" if ($data{$feat}->{kconfig});
+
+ my $desc = $data{$feat}->{description};
+ $desc =~ s/^([a-z])/\U$1/;
+ $desc =~ s/\.?\s*//;
+ print "\n$desc.\n\n";
+
+ my $com = $data{$feat}->{comments};
+ $com =~ s/^\s+//;
+ $com =~ s/\s+$//;
+ if ($com) {
+ print "Comments\n";
+ print "--------\n\n";
+ print "$com\n\n";
+ }
+
+ print "=" x $max_size_arch_with_header;
+ print " ";
+ print "=" x $max_size_status;
+ print "\n";
+
+ printf "%-${max_size_arch}s ", $h_arch;
+ printf "%-${max_size_status}s", $h_status . "\n";
+
+ print "=" x $max_size_arch_with_header;
+ print " ";
+ print "=" x $max_size_status;
+ print "\n";
+
+ my %arch_table = %{$data{$feat}->{table}};
+ foreach my $arch (sort keys %arch_table) {
+ printf "%-${max_size_arch}s ", $arch;
+ printf "%-${max_size_status}s\n", $arch_table{$arch};
+ }
+
+ print "=" x $max_size_arch_with_header;
+ print " ";
+ print "=" x $max_size_status;
+ print "\n";
+}
+
+#
+# Output all features for all architectures
+#
+
+sub matrix_lines($$$) {
+ my $desc_size = shift;
+ my $status_size = shift;
+ my $header = shift;
+ my $fill;
+ my $ln_marker;
+
+ if ($header) {
+ $ln_marker = "=";
+ } else {
+ $ln_marker = "-";
+ }
+
+ $fill = $ln_marker;
+
+ print "+";
+ print $fill x $max_size_name;
+ print "+";
+ print $fill x $desc_size;
+ print "+";
+ print $ln_marker x $status_size;
+ print "+\n";
+}
+
+sub output_matrix {
+ my $title = "Feature status on all architectures";
+ my $notcompat = "Not compatible";
+
+ print "=" x length($title) . "\n";
+ print "$title\n";
+ print "=" x length($title) . "\n\n";
+
+ my $desc_title = "$h_kconfig / $h_description";
+
+ my $desc_size = $max_size_kconfig + 4;
+ if (!$description_size) {
+ $desc_size = $max_size_description if ($max_size_description > $desc_size);
+ } else {
+ $desc_size = $description_size if ($description_size > $desc_size);
+ }
+ $desc_size = $max_description_word if ($max_description_word > $desc_size);
+
+ $desc_size = length($desc_title) if (length($desc_title) > $desc_size);
+
+ $max_size_status = length($notcompat) if (length($notcompat) > $max_size_status);
+
+ # Ensure that the status will fit
+ my $min_status_size = $max_size_status + $max_size_arch + 6;
+ $status_size = $min_status_size if ($status_size < $min_status_size);
+
+
+ my $cur_subsys = "";
+ foreach my $name (sort {
+ ($data{$a}->{subsys} cmp $data{$b}->{subsys}) or
+ ("\L$a" cmp "\L$b")
+ } keys %data) {
+
+ if ($cur_subsys ne $data{$name}->{subsys}) {
+ if ($cur_subsys ne "") {
+ printf "\n";
+ }
+
+ $cur_subsys = $data{$name}->{subsys};
+
+ my $title = "Subsystem: $cur_subsys";
+ print "$title\n";
+ print "=" x length($title) . "\n\n";
+
+
+ matrix_lines($desc_size, $status_size, 0);
+
+ printf "|%-${max_size_name}s", $h_name;
+ printf "|%-${desc_size}s", $desc_title;
+
+ printf "|%-${status_size}s|\n", "Status per architecture";
+ matrix_lines($desc_size, $status_size, 1);
+ }
+
+ my %arch_table = %{$data{$name}->{table}};
+ my $cur_status = "";
+
+ my (@lines, @descs);
+ my $line = "";
+ foreach my $arch (sort {
+ ($arch_table{$b} cmp $arch_table{$a}) or
+ ("\L$a" cmp "\L$b")
+ } keys %arch_table) {
+
+ my $status = $arch_table{$arch};
+
+ if ($status eq "---") {
+ $status = $notcompat;
+ }
+
+ if ($status ne $cur_status) {
+ if ($line ne "") {
+ push @lines, $line;
+ $line = "";
+ }
+ $line = "- **" . $status . "**: " . $arch;
+ } elsif (length($line) + length ($arch) + 2 < $status_size) {
+ $line .= ", " . $arch;
+ } else {
+ push @lines, $line;
+ $line = " " . $arch;
+ }
+ $cur_status = $status;
+ }
+ push @lines, $line if ($line ne "");
+
+ my $description = $data{$name}->{description};
+ while (length($description) > $desc_size) {
+ my $d = substr $description, 0, $desc_size;
+
+ # Ensure that it will end on a space
+ # if it can't, it means that the size is too small
+ # Instead of aborting it, let's print what we have
+ if (!($d =~ s/^(.*)\s+.*/$1/)) {
+ $d = substr $d, 0, -1;
+ push @descs, "$d\\";
+ $description =~ s/^\Q$d\E//;
+ } else {
+ push @descs, $d;
+ $description =~ s/^\Q$d\E\s+//;
+ }
+ }
+ push @descs, $description;
+
+ # Ensure that the full description will be printed
+ push @lines, "" while (scalar(@lines) < 2 + scalar(@descs));
+
+ my $ln = 0;
+ for my $line(@lines) {
+ if (!$ln) {
+ printf "|%-${max_size_name}s", $name;
+ printf "|%-${desc_size}s", "``" . $data{$name}->{kconfig} . "``";
+ } elsif ($ln >= 2 && scalar(@descs)) {
+ printf "|%-${max_size_name}s", "";
+ printf "|%-${desc_size}s", shift @descs;
+ } else {
+ printf "|%-${max_size_name}s", "";
+ printf "|%-${desc_size}s", "";
+ }
+
+ printf "|%-${status_size}s|\n", $line;
+
+ $ln++;
+ }
+ matrix_lines($desc_size, $status_size, 0);
+ }
+}
+
+
+#
+# Parses all feature files located at $prefix dir
+#
+find({wanted =>\&parse_feat, no_chdir => 1}, $prefix);
+
+print STDERR Data::Dumper->Dump([\%data], [qw(*data)]) if ($debug);
+
+#
+# Handles the command
+#
+if ($cmd eq "current") {
+ $arch = qx(uname -m | sed 's/x86_64/x86/' | sed 's/i386/x86/' | sed 's/s390x/s390/');
+ $arch =~s/\s+$//;
+}
+
+if ($cmd eq "ls" or $cmd eq "list") {
+ if (!$arch) {
+ $arch = qx(uname -m | sed 's/x86_64/x86/' | sed 's/i386/x86/' | sed 's/s390x/s390/');
+ $arch =~s/\s+$//;
+ }
+
+ list_arch_features;
+
+ exit;
+}
+
+if ($cmd ne "validate") {
+ if ($arch) {
+ output_arch_table;
+ } elsif ($feat) {
+ output_feature;
+ } else {
+ output_matrix;
+ }
+}
+
+__END__
+
+=head1 NAME
+
+get_feat.pl - parse the Linux Feature files and produce a ReST book.
+
+=head1 SYNOPSIS
+
+B<get_feat.pl> [--debug] [--man] [--help] [--dir=<dir>] [--arch=<arch>]
+ [--feature=<feature>|--feat=<feature>] <COMAND> [<ARGUMENT>]
+
+Where <COMMAND> can be:
+
+=over 8
+
+B<current> - output table in ReST compatible ASCII format
+ with features for this machine's architecture
+
+B<rest> - output table(s) in ReST compatible ASCII format
+ with features in ReST markup language. The output
+ is affected by --arch or --feat/--feature flags.
+
+B<validate> - validate the contents of the files under
+ Documentation/features.
+
+B<ls> or B<list> - list features for this machine's architecture,
+ using an easier to parse format.
+ The output is affected by --arch flag.
+
+=back
+
+=head1 OPTIONS
+
+=over 8
+
+=item B<--arch>
+
+Output features for an specific architecture, optionally filtering for
+a single specific feature.
+
+=item B<--feat> or B<--feature>
+
+Output features for a single specific feature.
+
+=item B<--dir>
+
+Changes the location of the Feature files. By default, it uses
+the Documentation/features directory.
+
+=item B<--enable-fname>
+
+Prints the file name of the feature files. This can be used in order to
+track dependencies during documentation build.
+
+=item B<--debug>
+
+Put the script in verbose mode, useful for debugging. Can be called multiple
+times, to increase verbosity.
+
+=item B<--help>
+
+Prints a brief help message and exits.
+
+=item B<--man>
+
+Prints the manual page and exits.
+
+=back
+
+=head1 DESCRIPTION
+
+Parse the Linux feature files from Documentation/features (by default),
+optionally producing results at ReST format.
+
+It supports output data per architecture, per feature or a
+feature x arch matrix.
+
+When used with B<rest> command, it will use either one of the tree formats:
+
+If neither B<--arch> or B<--feature> arguments are used, it will output a
+matrix with features per architecture.
+
+If B<--arch> argument is used, it will output the features availability for
+a given architecture.
+
+If B<--feat> argument is used, it will output the content of the feature
+file using ReStructured Text markup.
+
+=head1 BUGS
+
+Report bugs to Mauro Carvalho Chehab <mchehab+samsung@kernel.org>
+
+=head1 COPYRIGHT
+
+Copyright (c) 2019 by Mauro Carvalho Chehab <mchehab+samsung@kernel.org>.
+
+License GPLv2: GNU GPL version 2 <http://gnu.org/licenses/gpl.html>.
+
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+
+=cut
diff --git a/tools/docs/list-arch.sh b/tools/docs/list-arch.sh
new file mode 100755
index 000000000000..96fe83b7058b
--- /dev/null
+++ b/tools/docs/list-arch.sh
@@ -0,0 +1,11 @@
+# SPDX-License-Identifier: GPL-2.0
+#
+# Small script that visualizes the kernel feature support status
+# of an architecture.
+#
+# (If no arguments are given then it will print the host architecture's status.)
+#
+
+ARCH=${1:-$(uname -m | sed 's/x86_64/x86/' | sed 's/i386/x86/' | sed 's/s390x/s390/')}
+
+$(dirname $0)/get_feat.pl list --arch $ARCH
diff --git a/tools/docs/sphinx-pre-install b/tools/docs/sphinx-pre-install
index 698989584b6a..647e1f60357f 100755
--- a/tools/docs/sphinx-pre-install
+++ b/tools/docs/sphinx-pre-install
@@ -313,7 +313,7 @@ class MissingCheckers(AncillaryMethods):
Right now, we still need Perl for doc build, as it is required
by some tools called at docs or kernel build time, like:
- scripts/documentation-file-ref-check
+ tools/docs/documentation-file-ref-check
Also, checkpatch is on Perl.
"""
diff --git a/tools/docs/test_doc_build.py b/tools/docs/test_doc_build.py
new file mode 100755
index 000000000000..47b4606569f9
--- /dev/null
+++ b/tools/docs/test_doc_build.py
@@ -0,0 +1,513 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: GPL-2.0
+# Copyright(c) 2025: Mauro Carvalho Chehab <mchehab+huawei@kernel.org>
+#
+# pylint: disable=R0903,R0912,R0913,R0914,R0917,C0301
+
+"""
+Install minimal supported requirements for different Sphinx versions
+and optionally test the build.
+"""
+
+import argparse
+import asyncio
+import os.path
+import shutil
+import sys
+import time
+import subprocess
+
+# Minimal python version supported by the building system.
+
+PYTHON = os.path.basename(sys.executable)
+
+min_python_bin = None
+
+for i in range(9, 13):
+ p = f"python3.{i}"
+ if shutil.which(p):
+ min_python_bin = p
+ break
+
+if not min_python_bin:
+ min_python_bin = PYTHON
+
+# Starting from 8.0, Python 3.9 is not supported anymore.
+PYTHON_VER_CHANGES = {(8, 0, 0): PYTHON}
+
+DEFAULT_VERSIONS_TO_TEST = [
+ (3, 4, 3), # Minimal supported version
+ (5, 3, 0), # CentOS Stream 9 / AlmaLinux 9
+ (6, 1, 1), # Debian 12
+ (7, 2, 1), # openSUSE Leap 15.6
+ (7, 2, 6), # Ubuntu 24.04 LTS
+ (7, 4, 7), # Ubuntu 24.10
+ (7, 3, 0), # openSUSE Tumbleweed
+ (8, 1, 3), # Fedora 42
+ (8, 2, 3) # Latest version - covers rolling distros
+]
+
+# Sphinx versions to be installed and their incremental requirements
+SPHINX_REQUIREMENTS = {
+ # Oldest versions we support for each package required by Sphinx 3.4.3
+ (3, 4, 3): {
+ "docutils": "0.16",
+ "alabaster": "0.7.12",
+ "babel": "2.8.0",
+ "certifi": "2020.6.20",
+ "docutils": "0.16",
+ "idna": "2.10",
+ "imagesize": "1.2.0",
+ "Jinja2": "2.11.2",
+ "MarkupSafe": "1.1.1",
+ "packaging": "20.4",
+ "Pygments": "2.6.1",
+ "PyYAML": "5.1",
+ "requests": "2.24.0",
+ "snowballstemmer": "2.0.0",
+ "sphinxcontrib-applehelp": "1.0.2",
+ "sphinxcontrib-devhelp": "1.0.2",
+ "sphinxcontrib-htmlhelp": "1.0.3",
+ "sphinxcontrib-jsmath": "1.0.1",
+ "sphinxcontrib-qthelp": "1.0.3",
+ "sphinxcontrib-serializinghtml": "1.1.4",
+ "urllib3": "1.25.9",
+ },
+
+ # Update package dependencies to a more modern base. The goal here
+ # is to avoid to many incremental changes for the next entries
+ (3, 5, 0): {
+ "alabaster": "0.7.13",
+ "babel": "2.17.0",
+ "certifi": "2025.6.15",
+ "idna": "3.10",
+ "imagesize": "1.4.1",
+ "packaging": "25.0",
+ "Pygments": "2.8.1",
+ "requests": "2.32.4",
+ "snowballstemmer": "3.0.1",
+ "sphinxcontrib-applehelp": "1.0.4",
+ "sphinxcontrib-htmlhelp": "2.0.1",
+ "sphinxcontrib-serializinghtml": "1.1.5",
+ "urllib3": "2.0.0",
+ },
+
+ # Starting from here, ensure all docutils versions are covered with
+ # supported Sphinx versions. Other packages are upgraded only when
+ # required by pip
+ (4, 0, 0): {
+ "PyYAML": "5.1",
+ },
+ (4, 1, 0): {
+ "docutils": "0.17",
+ "Pygments": "2.19.1",
+ "Jinja2": "3.0.3",
+ "MarkupSafe": "2.0",
+ },
+ (4, 3, 0): {},
+ (4, 4, 0): {},
+ (4, 5, 0): {
+ "docutils": "0.17.1",
+ },
+ (5, 0, 0): {},
+ (5, 1, 0): {},
+ (5, 2, 0): {
+ "docutils": "0.18",
+ "Jinja2": "3.1.2",
+ "MarkupSafe": "2.0",
+ "PyYAML": "5.3.1",
+ },
+ (5, 3, 0): {
+ "docutils": "0.18.1",
+ },
+ (6, 0, 0): {},
+ (6, 1, 0): {},
+ (6, 2, 0): {
+ "PyYAML": "5.4.1",
+ },
+ (7, 0, 0): {},
+ (7, 1, 0): {},
+ (7, 2, 0): {
+ "docutils": "0.19",
+ "PyYAML": "6.0.1",
+ "sphinxcontrib-serializinghtml": "1.1.9",
+ },
+ (7, 2, 6): {
+ "docutils": "0.20",
+ },
+ (7, 3, 0): {
+ "alabaster": "0.7.14",
+ "PyYAML": "6.0.1",
+ "tomli": "2.0.1",
+ },
+ (7, 4, 0): {
+ "docutils": "0.20.1",
+ "PyYAML": "6.0.1",
+ },
+ (8, 0, 0): {
+ "docutils": "0.21",
+ },
+ (8, 1, 0): {
+ "docutils": "0.21.1",
+ "PyYAML": "6.0.1",
+ "sphinxcontrib-applehelp": "1.0.7",
+ "sphinxcontrib-devhelp": "1.0.6",
+ "sphinxcontrib-htmlhelp": "2.0.6",
+ "sphinxcontrib-qthelp": "1.0.6",
+ },
+ (8, 2, 0): {
+ "docutils": "0.21.2",
+ "PyYAML": "6.0.1",
+ "sphinxcontrib-serializinghtml": "1.1.9",
+ },
+}
+
+
+class AsyncCommands:
+ """Excecute command synchronously"""
+
+ def __init__(self, fp=None):
+
+ self.stdout = None
+ self.stderr = None
+ self.output = None
+ self.fp = fp
+
+ def log(self, out, verbose, is_info=True):
+ out = out.removesuffix('\n')
+
+ if verbose:
+ if is_info:
+ print(out)
+ else:
+ print(out, file=sys.stderr)
+
+ if self.fp:
+ self.fp.write(out + "\n")
+
+ async def _read(self, stream, verbose, is_info):
+ """Ancillary routine to capture while displaying"""
+
+ while stream is not None:
+ line = await stream.readline()
+ if line:
+ out = line.decode("utf-8", errors="backslashreplace")
+ self.log(out, verbose, is_info)
+ if is_info:
+ self.stdout += out
+ else:
+ self.stderr += out
+ else:
+ break
+
+ async def run(self, cmd, capture_output=False, check=False,
+ env=None, verbose=True):
+
+ """
+ Execute an arbitrary command, handling errors.
+
+ Please notice that this class is not thread safe
+ """
+
+ self.stdout = ""
+ self.stderr = ""
+
+ self.log("$ " + " ".join(cmd), verbose)
+
+ proc = await asyncio.create_subprocess_exec(cmd[0],
+ *cmd[1:],
+ env=env,
+ stdout=asyncio.subprocess.PIPE,
+ stderr=asyncio.subprocess.PIPE)
+
+ # Handle input and output in realtime
+ await asyncio.gather(
+ self._read(proc.stdout, verbose, True),
+ self._read(proc.stderr, verbose, False),
+ )
+
+ await proc.wait()
+
+ if check and proc.returncode > 0:
+ raise subprocess.CalledProcessError(returncode=proc.returncode,
+ cmd=" ".join(cmd),
+ output=self.stdout,
+ stderr=self.stderr)
+
+ if capture_output:
+ if proc.returncode > 0:
+ self.log(f"Error {proc.returncode}", verbose=True, is_info=False)
+ return ""
+
+ return self.output
+
+ ret = subprocess.CompletedProcess(args=cmd,
+ returncode=proc.returncode,
+ stdout=self.stdout,
+ stderr=self.stderr)
+
+ return ret
+
+
+class SphinxVenv:
+ """
+ Installs Sphinx on one virtual env per Sphinx version with a minimal
+ set of dependencies, adjusting them to each specific version.
+ """
+
+ def __init__(self):
+ """Initialize instance variables"""
+
+ self.built_time = {}
+ self.first_run = True
+
+ async def _handle_version(self, args, fp,
+ cur_ver, cur_requirements, python_bin):
+ """Handle a single Sphinx version"""
+
+ cmd = AsyncCommands(fp)
+
+ ver = ".".join(map(str, cur_ver))
+
+ if not self.first_run and args.wait_input and args.build:
+ ret = input("Press Enter to continue or 'a' to abort: ").strip().lower()
+ if ret == "a":
+ print("Aborted.")
+ sys.exit()
+ else:
+ self.first_run = False
+
+ venv_dir = f"Sphinx_{ver}"
+ req_file = f"requirements_{ver}.txt"
+
+ cmd.log(f"\nSphinx {ver} with {python_bin}", verbose=True)
+
+ # Create venv
+ await cmd.run([python_bin, "-m", "venv", venv_dir],
+ verbose=args.verbose, check=True)
+ pip = os.path.join(venv_dir, "bin/pip")
+
+ # Create install list
+ reqs = []
+ for pkg, verstr in cur_requirements.items():
+ reqs.append(f"{pkg}=={verstr}")
+
+ reqs.append(f"Sphinx=={ver}")
+
+ await cmd.run([pip, "install"] + reqs, check=True, verbose=args.verbose)
+
+ # Freeze environment
+ result = await cmd.run([pip, "freeze"], verbose=False, check=True)
+
+ # Pip install succeeded. Write requirements file
+ if args.req_file:
+ with open(req_file, "w", encoding="utf-8") as fp:
+ fp.write(result.stdout)
+
+ if args.build:
+ start_time = time.time()
+
+ # Prepare a venv environment
+ env = os.environ.copy()
+ bin_dir = os.path.join(venv_dir, "bin")
+ env["PATH"] = bin_dir + ":" + env["PATH"]
+ env["VIRTUAL_ENV"] = venv_dir
+ if "PYTHONHOME" in env:
+ del env["PYTHONHOME"]
+
+ # Test doc build
+ await cmd.run(["make", "cleandocs"], env=env, check=True)
+ make = ["make"]
+
+ if args.output:
+ sphinx_build = os.path.realpath(f"{bin_dir}/sphinx-build")
+ make += [f"O={args.output}", f"SPHINXBUILD={sphinx_build}"]
+
+ if args.make_args:
+ make += args.make_args
+
+ make += args.targets
+
+ if args.verbose:
+ cmd.log(f". {bin_dir}/activate", verbose=True)
+ await cmd.run(make, env=env, check=True, verbose=True)
+ if args.verbose:
+ cmd.log("deactivate", verbose=True)
+
+ end_time = time.time()
+ elapsed_time = end_time - start_time
+ hours, minutes = divmod(elapsed_time, 3600)
+ minutes, seconds = divmod(minutes, 60)
+
+ hours = int(hours)
+ minutes = int(minutes)
+ seconds = int(seconds)
+
+ self.built_time[ver] = f"{hours:02d}:{minutes:02d}:{seconds:02d}"
+
+ cmd.log(f"Finished doc build for Sphinx {ver}. Elapsed time: {self.built_time[ver]}", verbose=True)
+
+ async def run(self, args):
+ """
+ Navigate though multiple Sphinx versions, handling each of them
+ on a loop.
+ """
+
+ if args.log:
+ fp = open(args.log, "w", encoding="utf-8")
+ if not args.verbose:
+ args.verbose = False
+ else:
+ fp = None
+ if not args.verbose:
+ args.verbose = True
+
+ cur_requirements = {}
+ python_bin = min_python_bin
+
+ vers = set(SPHINX_REQUIREMENTS.keys()) | set(args.versions)
+
+ for cur_ver in sorted(vers):
+ if cur_ver in SPHINX_REQUIREMENTS:
+ new_reqs = SPHINX_REQUIREMENTS[cur_ver]
+ cur_requirements.update(new_reqs)
+
+ if cur_ver in PYTHON_VER_CHANGES: # pylint: disable=R1715
+ python_bin = PYTHON_VER_CHANGES[cur_ver]
+
+ if cur_ver not in args.versions:
+ continue
+
+ if args.min_version:
+ if cur_ver < args.min_version:
+ continue
+
+ if args.max_version:
+ if cur_ver > args.max_version:
+ break
+
+ await self._handle_version(args, fp, cur_ver, cur_requirements,
+ python_bin)
+
+ if args.build:
+ cmd = AsyncCommands(fp)
+ cmd.log("\nSummary:", verbose=True)
+ for ver, elapsed_time in sorted(self.built_time.items()):
+ cmd.log(f"\tSphinx {ver} elapsed time: {elapsed_time}",
+ verbose=True)
+
+ if fp:
+ fp.close()
+
+def parse_version(ver_str):
+ """Convert a version string into a tuple."""
+
+ return tuple(map(int, ver_str.split(".")))
+
+
+DEFAULT_VERS = " - "
+DEFAULT_VERS += "\n - ".join(map(lambda v: f"{v[0]}.{v[1]}.{v[2]}",
+ DEFAULT_VERSIONS_TO_TEST))
+
+SCRIPT = os.path.relpath(__file__)
+
+DESCRIPTION = f"""
+This tool allows creating Python virtual environments for different
+Sphinx versions that are supported by the Linux Kernel build system.
+
+Besides creating the virtual environment, it can also test building
+the documentation using "make htmldocs" (and/or other doc targets).
+
+If called without "--versions" argument, it covers the versions shipped
+on major distros, plus the lowest supported version:
+
+{DEFAULT_VERS}
+
+A typical usage is to run:
+
+ {SCRIPT} -m -l sphinx_builds.log
+
+This will create one virtual env for the default version set and run
+"make htmldocs" for each version, creating a log file with the
+excecuted commands on it.
+
+NOTE: The build time can be very long, specially on old versions. Also, there
+is a known bug with Sphinx version 6.0.x: each subprocess uses a lot of
+memory. That, together with "-jauto" may cause OOM killer to cause
+failures at the doc generation. To minimize the risk, you may use the
+"-a" command line parameter to constrain the built directories and/or
+reduce the number of threads from "-jauto" to, for instance, "-j4":
+
+ {SCRIPT} -m -V 6.0.1 -a "SPHINXDIRS=process" "SPHINXOPTS='-j4'"
+
+"""
+
+MAKE_TARGETS = [
+ "htmldocs",
+ "texinfodocs",
+ "infodocs",
+ "latexdocs",
+ "pdfdocs",
+ "epubdocs",
+ "xmldocs",
+]
+
+async def main():
+ """Main program"""
+
+ parser = argparse.ArgumentParser(description=DESCRIPTION,
+ formatter_class=argparse.RawDescriptionHelpFormatter)
+
+ ver_group = parser.add_argument_group("Version range options")
+
+ ver_group.add_argument('-V', '--versions', nargs="*",
+ default=DEFAULT_VERSIONS_TO_TEST,type=parse_version,
+ help='Sphinx versions to test')
+ ver_group.add_argument('--min-version', "--min", type=parse_version,
+ help='Sphinx minimal version')
+ ver_group.add_argument('--max-version', "--max", type=parse_version,
+ help='Sphinx maximum version')
+ ver_group.add_argument('-f', '--full', action='store_true',
+ help='Add all Sphinx (major,minor) supported versions to the version range')
+
+ build_group = parser.add_argument_group("Build options")
+
+ build_group.add_argument('-b', '--build', action='store_true',
+ help='Build documentation')
+ build_group.add_argument('-a', '--make-args', nargs="*",
+ help='extra arguments for make, like SPHINXDIRS=netlink/specs',
+ )
+ build_group.add_argument('-t', '--targets', nargs="+", choices=MAKE_TARGETS,
+ default=[MAKE_TARGETS[0]],
+ help="make build targets. Default: htmldocs.")
+ build_group.add_argument("-o", '--output',
+ help="output directory for the make O=OUTPUT")
+
+ other_group = parser.add_argument_group("Other options")
+
+ other_group.add_argument('-r', '--req-file', action='store_true',
+ help='write a requirements.txt file')
+ other_group.add_argument('-l', '--log',
+ help='Log command output on a file')
+ other_group.add_argument('-v', '--verbose', action='store_true',
+ help='Verbose all commands')
+ other_group.add_argument('-i', '--wait-input', action='store_true',
+ help='Wait for an enter before going to the next version')
+
+ args = parser.parse_args()
+
+ if not args.make_args:
+ args.make_args = []
+
+ sphinx_versions = sorted(list(SPHINX_REQUIREMENTS.keys()))
+
+ if args.full:
+ args.versions += list(SPHINX_REQUIREMENTS.keys())
+
+ venv = SphinxVenv()
+ await venv.run(args)
+
+
+# Call main method
+if __name__ == "__main__":
+ asyncio.run(main())