summaryrefslogtreecommitdiff
path: root/scripts/clang-tools
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/clang-tools')
-rwxr-xr-xscripts/clang-tools/gen_compile_commands.py155
-rwxr-xr-xscripts/clang-tools/run-clang-tools.py53
2 files changed, 177 insertions, 31 deletions
diff --git a/scripts/clang-tools/gen_compile_commands.py b/scripts/clang-tools/gen_compile_commands.py
index 0227522959a4..6f4afa92a466 100755
--- a/scripts/clang-tools/gen_compile_commands.py
+++ b/scripts/clang-tools/gen_compile_commands.py
@@ -19,8 +19,14 @@ _DEFAULT_OUTPUT = 'compile_commands.json'
_DEFAULT_LOG_LEVEL = 'WARNING'
_FILENAME_PATTERN = r'^\..*\.cmd$'
-_LINE_PATTERN = r'^cmd_[^ ]*\.o := (.* )([^ ]*\.c) *(;|$)'
+_LINE_PATTERN = r'^(saved)?cmd_[^ ]*\.o := (?P<command_prefix>.* )(?P<file_path>[^ ]*\.[cS]) *(;|$)'
_VALID_LOG_LEVELS = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
+
+# Pre-compiled regexes for better performance
+_INCLUDE_PATTERN = re.compile(r'^\s*#\s*include\s*[<"]([^>"]*)[>"]')
+_C_INCLUDE_PATTERN = re.compile(r'^\s*#\s*include\s*"([^"]*\.c)"\s*$')
+_FILENAME_MATCHER = re.compile(_FILENAME_PATTERN)
+
# The tools/ directory adopts a different build system, and produces .cmd
# files in a different format. Do not support it.
_EXCLUDE_DIRS = ['.git', 'Documentation', 'include', 'tools']
@@ -64,7 +70,7 @@ def parse_arguments():
args = parser.parse_args()
return (args.log_level,
- os.path.abspath(args.directory),
+ os.path.realpath(args.directory),
args.output,
args.ar,
args.paths if len(args.paths) > 0 else [args.directory])
@@ -82,7 +88,6 @@ def cmdfiles_in_dir(directory):
The path to a .cmd file.
"""
- filename_matcher = re.compile(_FILENAME_PATTERN)
exclude_dirs = [ os.path.join(directory, d) for d in _EXCLUDE_DIRS ]
for dirpath, dirnames, filenames in os.walk(directory, topdown=True):
@@ -92,7 +97,7 @@ def cmdfiles_in_dir(directory):
continue
for filename in filenames:
- if filename_matcher.match(filename):
+ if _FILENAME_MATCHER.match(filename):
yield os.path.join(dirpath, filename)
@@ -149,8 +154,87 @@ def cmdfiles_for_modorder(modorder):
yield to_cmdfile(mod_line.rstrip())
+def extract_includes_from_file(source_file, root_directory):
+ """Extract #include statements from a C file.
+
+ Args:
+ source_file: Path to the source .c file to analyze
+ root_directory: Root directory for resolving relative paths
+
+ Returns:
+ List of header files that should be included (without quotes/brackets)
+ """
+ includes = []
+ if not os.path.exists(source_file):
+ return includes
+
+ try:
+ with open(source_file, 'r') as f:
+ for line in f:
+ line = line.strip()
+ # Look for #include statements.
+ # Match both #include "header.h" and #include <header.h>.
+ match = _INCLUDE_PATTERN.match(line)
+ if match:
+ header = match.group(1)
+ # Skip including other .c files to avoid circular includes.
+ if not header.endswith('.c'):
+ # For relative includes (quoted), resolve path relative to source file.
+ if '"' in line:
+ src_dir = os.path.dirname(source_file)
+ header_path = os.path.join(src_dir, header)
+ if os.path.exists(header_path):
+ rel_header = os.path.relpath(header_path, root_directory)
+ includes.append(rel_header)
+ else:
+ includes.append(header)
+ else:
+ # System include like <linux/sched.h>.
+ includes.append(header)
+ except IOError:
+ pass
+
+ return includes
+
+
+def find_included_c_files(source_file, root_directory):
+ """Find .c files that are included by the given source file.
+
+ Args:
+ source_file: Path to the source .c file
+ root_directory: Root directory for resolving relative paths
+
+ Yields:
+ Full paths to included .c files
+ """
+ if not os.path.exists(source_file):
+ return
+
+ try:
+ with open(source_file, 'r') as f:
+ for line in f:
+ line = line.strip()
+ # Look for #include "*.c" patterns.
+ match = _C_INCLUDE_PATTERN.match(line)
+ if match:
+ included_file = match.group(1)
+ # Handle relative paths.
+ if not os.path.isabs(included_file):
+ src_dir = os.path.dirname(source_file)
+ included_file = os.path.join(src_dir, included_file)
+
+ # Normalize the path.
+ included_file = os.path.normpath(included_file)
+
+ # Check if the file exists.
+ if os.path.exists(included_file):
+ yield included_file
+ except IOError:
+ pass
+
+
def process_line(root_directory, command_prefix, file_path):
- """Extracts information from a .cmd line and creates an entry from it.
+ """Extracts information from a .cmd line and creates entries from it.
Args:
root_directory: The directory that was searched for .cmd files. Usually
@@ -160,27 +244,64 @@ def process_line(root_directory, command_prefix, file_path):
Usually relative to root_directory, but sometimes absolute.
Returns:
- An entry to append to compile_commands.
+ A list of entries to append to compile_commands (may include multiple
+ entries if the source file includes other .c files).
Raises:
ValueError: Could not find the extracted file based on file_path and
root_directory or file_directory.
"""
# The .cmd files are intended to be included directly by Make, so they
- # escape the pound sign '#', either as '\#' or '$(pound)' (depending on the
- # kernel version). The compile_commands.json file is not interepreted
- # by Make, so this code replaces the escaped version with '#'.
- prefix = command_prefix.replace('\#', '#').replace('$(pound)', '#')
+ # escape the pound sign '#' as '$(pound)'. The compile_commands.json file
+ # is not interepreted by Make, so this code replaces the escaped version
+ # with '#'.
+ prefix = command_prefix.replace('$(pound)', '#')
- # Use os.path.abspath() to normalize the path resolving '.' and '..' .
- abs_path = os.path.abspath(os.path.join(root_directory, file_path))
+ # Return the canonical path, eliminating any symbolic links encountered in the path.
+ abs_path = os.path.realpath(os.path.join(root_directory, file_path))
if not os.path.exists(abs_path):
raise ValueError('File %s not found' % abs_path)
- return {
+
+ entries = []
+
+ # Create entry for the main source file.
+ main_entry = {
'directory': root_directory,
'file': abs_path,
'command': prefix + file_path,
}
+ entries.append(main_entry)
+
+ # Find and create entries for included .c files.
+ for included_c_file in find_included_c_files(abs_path, root_directory):
+ # For included .c files, create a compilation command that:
+ # 1. Uses the same compilation flags as the parent file
+ # 2. But compiles the included file directly (not the parent)
+ # 3. Includes necessary headers from the parent file for proper macro resolution
+
+ # Convert absolute path to relative for the command.
+ rel_path = os.path.relpath(included_c_file, root_directory)
+
+ # Extract includes from the parent file to provide proper compilation context.
+ extra_includes = ''
+ try:
+ parent_includes = extract_includes_from_file(abs_path, root_directory)
+ if parent_includes:
+ extra_includes = ' ' + ' '.join('-include ' + inc for inc in parent_includes)
+ except IOError:
+ pass
+
+ included_entry = {
+ 'directory': root_directory,
+ 'file': included_c_file,
+ # Use the same compilation prefix but target the included file directly.
+ # Add extra headers for proper macro resolution.
+ 'command': prefix + extra_includes + ' ' + rel_path,
+ }
+ entries.append(included_entry)
+ logging.debug('Added entry for included file: %s', included_c_file)
+
+ return entries
def main():
@@ -213,15 +334,15 @@ def main():
result = line_matcher.match(f.readline())
if result:
try:
- entry = process_line(directory, result.group(1),
- result.group(2))
- compile_commands.append(entry)
+ entries = process_line(directory, result.group('command_prefix'),
+ result.group('file_path'))
+ compile_commands.extend(entries)
except ValueError as err:
logging.info('Could not add line from %s: %s',
cmdfile, err)
with open(output, 'wt') as f:
- json.dump(compile_commands, f, indent=2, sort_keys=True)
+ json.dump(sorted(compile_commands, key=lambda x: x["file"]), f, indent=2, sort_keys=True)
if __name__ == '__main__':
diff --git a/scripts/clang-tools/run-clang-tools.py b/scripts/clang-tools/run-clang-tools.py
index 56f2ec8f0f40..f31ffd09e1ea 100755
--- a/scripts/clang-tools/run-clang-tools.py
+++ b/scripts/clang-tools/run-clang-tools.py
@@ -33,6 +33,11 @@ def parse_arguments():
path_help = "Path to the compilation database to parse"
parser.add_argument("path", type=str, help=path_help)
+ checks_help = "Checks to pass to the analysis"
+ parser.add_argument("-checks", type=str, default=None, help=checks_help)
+ header_filter_help = "Pass the -header-filter value to the tool"
+ parser.add_argument("-header-filter", type=str, default=None, help=header_filter_help)
+
return parser.parse_args()
@@ -45,14 +50,27 @@ def init(l, a):
def run_analysis(entry):
# Disable all checks, then re-enable the ones we want
- checks = []
- checks.append("-checks=-*")
- if args.type == "clang-tidy":
- checks.append("linuxkernel-*")
+ global args
+ checks = None
+ if args.checks:
+ checks = args.checks.split(',')
else:
- checks.append("clang-analyzer-*")
- checks.append("-clang-analyzer-security.insecureAPI.DeprecatedOrUnsafeBufferHandling")
- p = subprocess.run(["clang-tidy", "-p", args.path, ",".join(checks), entry["file"]],
+ checks = ["-*"]
+ if args.type == "clang-tidy":
+ checks.append("linuxkernel-*")
+ else:
+ checks.append("clang-analyzer-*")
+ checks.append("-clang-analyzer-security.insecureAPI.DeprecatedOrUnsafeBufferHandling")
+ file = entry["file"]
+ if not file.endswith(".c") and not file.endswith(".cpp"):
+ with lock:
+ print(f"Skipping non-C file: '{file}'", file=sys.stderr)
+ return
+ pargs = ["clang-tidy", "-p", args.path, "-checks=" + ",".join(checks)]
+ if args.header_filter:
+ pargs.append("-header-filter=" + args.header_filter)
+ pargs.append(file)
+ p = subprocess.run(pargs,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=entry["directory"])
@@ -61,14 +79,21 @@ def run_analysis(entry):
def main():
- args = parse_arguments()
+ try:
+ args = parse_arguments()
- lock = multiprocessing.Lock()
- pool = multiprocessing.Pool(initializer=init, initargs=(lock, args))
- # Read JSON data into the datastore variable
- with open(args.path, "r") as f:
- datastore = json.load(f)
- pool.map(run_analysis, datastore)
+ lock = multiprocessing.Lock()
+ pool = multiprocessing.Pool(initializer=init, initargs=(lock, args))
+ # Read JSON data into the datastore variable
+ with open(args.path, "r") as f:
+ datastore = json.load(f)
+ pool.map(run_analysis, datastore)
+ except BrokenPipeError:
+ # Python flushes standard streams on exit; redirect remaining output
+ # to devnull to avoid another BrokenPipeError at shutdown
+ devnull = os.open(os.devnull, os.O_WRONLY)
+ os.dup2(devnull, sys.stdout.fileno())
+ sys.exit(1) # Python exits with error code 1 on EPIPE
if __name__ == "__main__":