wireshark/tools/check_tfs.py

390 lines
13 KiB
Python
Executable File

#!/usr/bin/env python3
# Wireshark - Network traffic analyzer
# By Gerald Combs <gerald@wireshark.org>
# Copyright 1998 Gerald Combs
#
# SPDX-License-Identifier: GPL-2.0-or-later
import os
import re
import subprocess
import argparse
import signal
# This utility scans for tfs items, and works out if standard ones
# could have been used intead (from epan/tfs.c)
# TODO:
# - check how many of the definitions in epan/tfs.c are used in other dissectors
# - although even if unused, might be in external dissectors?
# Try to exit soon after Ctrl-C is pressed.
should_exit = False
def signal_handler(sig, frame):
global should_exit
should_exit = True
print('You pressed Ctrl+C - exiting')
signal.signal(signal.SIGINT, signal_handler)
# Test for whether the given file was automatically generated.
def isGeneratedFile(filename):
# Open file
f_read = open(os.path.join(filename), 'r')
lines_tested = 0
for line in f_read:
# The comment to say that its generated is near the top, so give up once
# get a few lines down.
if lines_tested > 10:
f_read.close()
return False
if (line.find('Generated automatically') != -1 or
line.find('Generated Automatically') != -1 or
line.find('Autogenerated from') != -1 or
line.find('is autogenerated') != -1 or
line.find('automatically generated by Pidl') != -1 or
line.find('Created by: The Qt Meta Object Compiler') != -1 or
line.find('This file was generated') != -1 or
line.find('This filter was automatically generated') != -1 or
line.find('This file is auto generated, do not edit!') != -1 or
line.find('This file is auto generated') != -1):
f_read.close()
return True
lines_tested = lines_tested + 1
# OK, looks like a hand-written file!
f_read.close()
return False
# Keep track of custom entries that might appear in multiple dissectors,
# so we can consider adding them to tfs.c
custom_tfs_entries = {}
def AddCustomEntry(val1, val2, file):
global custom_tfs_entries
if (val1, val2) in custom_tfs_entries:
custom_tfs_entries[(val1, val2)].append(file)
else:
custom_tfs_entries[(val1, val2)] = [file]
class TFS:
def __init__(self, file, name, val1, val2):
self.file = file
self.name = name
self.val1 = val1
self.val2 = val2
# Do some extra checks on values.
if val1.startswith(' ') or val1.endswith(' '):
print('N.B.: file=' + self.file + ' ' + self.name + ' - false val begins or ends with space \"' + self.val1 + '\"')
if val2.startswith(' ') or val2.endswith(' '):
print('N.B.: file=' + self.file + ' ' + self.name + ' - true val begins or ends with space \"' + self.val2 + '\"')
def __str__(self):
return '{' + '"' + self.val1 + '", "' + self.val2 + '"}'
class ValueString:
def __init__(self, file, name, vals):
self.file = file
self.name = name
self.raw_vals = vals
self.parsed_vals = {}
self.looks_like_tfs = True
no_lines = self.raw_vals.count('{')
if no_lines != 3:
self.looks_like_tfs = False
return
# Now parse out each entry in the value_string
matches = re.finditer(r'\{([\"a-zA-Z\s\d\,]*)\}', self.raw_vals)
for m in matches:
entry = m[1]
# Check each entry looks like part of a TFS entry.
match = re.match(r'\s*([01])\,\s*\"([a-zA-Z\d\s]*\s*)\"', entry)
if match:
if match[1] == '1':
self.parsed_vals[True] = match[2]
else:
self.parsed_vals[False] = match[2]
# Now have both entries
if len(self.parsed_vals) == 2:
break
else:
self.looks_like_tfs = False
break
def __str__(self):
return '{' + '"' + self.raw_vals + '"}'
def removeComments(code_string):
code_string = re.sub(re.compile(r"/\*.*?\*/",re.DOTALL ) ,"" ,code_string) # C-style comment
code_string = re.sub(re.compile(r"//.*?\n" ) ,"" ,code_string) # C++-style comment
return code_string
# Look for true_false_string items in a dissector file.
def findItems(filename):
items = {}
with open(filename, 'r') as f:
contents = f.read()
# Example: const true_false_string tfs_true_false = { "True", "False" };
# Remove comments so as not to trip up RE.
contents = removeComments(contents)
matches = re.finditer(r'.*const\s*true_false_string\s*([a-z_]*)\s*=\s*{\s*\"([a-zA-Z_ ]*)\"\s*,\s*\"([a-zA-Z_ ]*)\"', contents)
for m in matches:
name = m.group(1)
val1 = m.group(2)
val2 = m.group(3)
# Store this entry.
items[name] = TFS(filename, name, val1, val2)
return items
# Look for value_string entries in a dissector file.
def findValueStrings(filename):
items = {}
#static const value_string radio_type_vals[] =
#{
# { 0, "FDD"},
# { 1, "TDD"},
# { 0, NULL }
#};
with open(filename, 'r') as f:
contents = f.read()
# Remove comments so as not to trip up RE.
contents = removeComments(contents)
matches = re.finditer(r'.*const value_string\s*([a-zA-Z0-9_]*)\s*\[\s*\]\s*\=\s*\{([\{\}\d\,a-zA-Z\s\"]*)\};', contents)
for m in matches:
name = m.group(1)
vals = m.group(2)
items[name] = ValueString(filename, name, vals)
return items
def is_dissector_file(filename):
p = re.compile(r'.*packet-.*\.c')
return p.match(filename)
def findDissectorFilesInFolder(folder):
# Look at files in sorted order, to give some idea of how far through is.
files = []
for f in sorted(os.listdir(folder)):
if should_exit:
return
if is_dissector_file(f):
filename = os.path.join(folder, f)
files.append(filename)
return files
warnings_found = 0
errors_found = 0
# Check the given dissector file.
def checkFile(filename, tfs_items, look_for_common=False, check_value_strings=False):
global warnings_found
global errors_found
# Check file exists - e.g. may have been deleted in a recent commit.
if not os.path.exists(filename):
print(filename, 'does not exist!')
return
# Find items.
items = findItems(filename)
# See if any of these items already existed in tfs.c
for i in items:
for t in tfs_items:
found = False
#
# Do not do this check for plugins; plugins cannot import
# data values from libwireshark (functions, yes; data
# values, no).
#
# Test whether there's a common prefix for the file name
# and "plugin/epan/"; if so, this is a plugin, and there
# is no common path and os.path.commonprefix returns an
# empty string, otherwise it returns the common path, so
# we check whether the common path is an empty string.
#
if os.path.commonprefix([filename, 'plugin/epan/']) == '':
exact_case = False
if tfs_items[t].val1 == items[i].val1 and tfs_items[t].val2 == items[i].val2:
found = True
exact_case = True
elif tfs_items[t].val1.upper() == items[i].val1.upper() and tfs_items[t].val2.upper() == items[i].val2.upper():
found = True
if found:
print("Error:" if exact_case else "Warn: ", filename, i, "- could have used", t, 'from tfs.c instead: ', tfs_items[t],
'' if exact_case else ' (capitalisation differs)')
if exact_case:
errors_found += 1
else:
warnings_found += 1
break
if not found:
if look_for_common:
AddCustomEntry(items[i].val1, items[i].val2, filename)
if check_value_strings:
vs = findValueStrings(filename)
for v in vs:
if vs[v].looks_like_tfs:
found = False
exact_case = False
#print('Candidate', v, vs[v])
for t in tfs_items:
found = False
#
# Do not do this check for plugins; plugins cannot import
# data values from libwireshark (functions, yes; data
# values, no).
#
# Test whether there's a common prefix for the file name
# and "plugin/epan/"; if so, this is a plugin, and there
# is no common path and os.path.commonprefix returns an
# empty string, otherwise it returns the common path, so
# we check whether the common path is an empty string.
#
if os.path.commonprefix([filename, 'plugin/epan/']) == '':
exact_case = False
if tfs_items[t].val1 == vs[v].parsed_vals[True] and tfs_items[t].val2 == vs[v].parsed_vals[False]:
found = True
exact_case = True
elif tfs_items[t].val1.upper() == vs[v].parsed_vals[True].upper() and tfs_items[t].val2.upper() == vs[v].parsed_vals[False].upper():
found = True
if found:
print("Warn:" if exact_case else "Note:", filename, 'value_string', v, "- could have used", t, 'from tfs.c instead: ', tfs_items[t],
'' if exact_case else ' (capitalisation differs)')
if exact_case:
warnings_found += 1
break
#################################################################
# Main logic.
# command-line args. Controls which dissector files should be checked.
# If no args given, will just scan epan/dissectors folder.
parser = argparse.ArgumentParser(description='Check calls in dissectors')
parser.add_argument('--file', action='append',
help='specify individual dissector file to test')
parser.add_argument('--commits', action='store',
help='last N commits to check')
parser.add_argument('--open', action='store_true',
help='check open files')
parser.add_argument('--check-value-strings', action='store_true',
help='check whether value_strings could have been tfs?')
parser.add_argument('--common', action='store_true',
help='check for potential new entries for tfs.c')
args = parser.parse_args()
# Get files from wherever command-line args indicate.
files = []
if args.file:
# Add specified file(s)
for f in args.file:
if not f.startswith('epan'):
f = os.path.join('epan', 'dissectors', f)
if not os.path.isfile(f):
print('Chosen file', f, 'does not exist.')
exit(1)
else:
files.append(f)
elif args.commits:
# Get files affected by specified number of commits.
command = ['git', 'diff', '--name-only', 'HEAD~' + args.commits]
files = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Will examine dissector files only
files = list(filter(lambda f : is_dissector_file(f), files))
elif args.open:
# Unstaged changes.
command = ['git', 'diff', '--name-only']
files = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Only interested in dissector files.
files = list(filter(lambda f : is_dissector_file(f), files))
# Staged changes.
command = ['git', 'diff', '--staged', '--name-only']
files_staged = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Only interested in dissector files.
files_staged = list(filter(lambda f : is_dissector_file(f), files_staged))
for f in files_staged:
if not f in files:
files.append(f)
else:
# Find all dissector files from folder.
files = findDissectorFilesInFolder(os.path.join('epan', 'dissectors'))
# If scanning a subset of files, list them here.
print('Examining:')
if args.file or args.commits or args.open:
if files:
print(' '.join(files), '\n')
else:
print('No files to check.\n')
else:
print('All dissector modules\n')
# Get standard/ shared ones.
tfs_entries = findItems(os.path.join('epan', 'tfs.c'))
# Now check the files to see if they could have used shared ones instead.
for f in files:
if should_exit:
exit(1)
if not isGeneratedFile(f):
checkFile(f, tfs_entries, look_for_common=args.common, check_value_strings=args.check_value_strings)
# Show summary.
print(warnings_found, 'warnings found')
if errors_found:
print(errors_found, 'errors found')
exit(1)
if args.common:
# Looking for items that could potentially be moved to tfs.c
for c in custom_tfs_entries:
# Only want to see items that have 3 or more occurrences.
# Even then, probably only want to consider ones that sound generic.
if len(custom_tfs_entries[c]) > 2:
print(c, 'appears', len(custom_tfs_entries[c]), 'times, in: ', custom_tfs_entries[c])