Fix issues discovered by common python linters

Fix some issues discovered by common python linters including:
* switch `None` comparisons to use `is` rather than `==`. Identity !=
equality, and I've spent 40+ hours before tracking down a subtle bug
caused by exactly this issue. Note that this may introduce a problem if
one of the scripts is depending on this behavior, in which case the
comparison should be changed to `True`/`False` rather than `None`.
* Use `except Exception:` as bare `except:` statements have been
discouraged for years. Ideally for some of these we'd examine if there
were specific exceptions that should be caught, but for now I simply
caught all. Again, this could introduce very subtle behavioral changes
under Python 2, but IIUC, that was all fixed in Python 3, so safe to
move to `except Exception:`.
* Use more idiomatic `if not x in y`--> `if x not in y`
* Use more idiomatic 2 blank lines. I only did this at the beginning,
until I realized how overwhelming this was going to be to apply, then I
stopped.
* Add a TODO where an undefined function name is called, so will fail
whenever that code is run.
* Add more idiomatic spacing around `:`. This is also only partially
cleaned up, as I gave up when I saw how `asn2wrs.py` was clearly
infatuated with the construct.
* Various other small cleanups, removed some trailing whitespace and
improper indentation that wasn't a multiple of 4, etc.

There is still _much_ to do, but I haven't been heavily involved with
this project before, so thought this was a sufficient amount to put up
and see what the feedback is.

Linters that I have enabled which highlighted some of these issues
include:
* `pylint`
* `flake8`
* `pycodestyle`
This commit is contained in:
Jeff Widman 2020-09-20 22:44:41 -07:00
parent c5926c51e7
commit 8d7ebc732e
22 changed files with 258 additions and 239 deletions

View File

@ -31,14 +31,11 @@ other script-based formates beside VBScript
from __future__ import print_function
import os
import sys
import signal
import re
import argparse
import time
import struct
import binascii
from threading import Thread
ERROR_USAGE = 0
@ -127,19 +124,19 @@ def extcap_config(interface, option):
args.append((11, '--radio', 'Radio Test', 'Radio Test Value', 'radio', '{group=Selection}'))
args.append((12, '--multi', 'MultiCheck Test', 'MultiCheck Test Value', 'multicheck', '{group=Selection}'))
if ( option == "remote" ):
if option == "remote":
values.append((3, "if1", "Remote Interface 1", "false"))
values.append((3, "if2", "Remote Interface 2", "true"))
values.append((3, "if3", "Remote Interface 3", "false"))
values.append((3, "if4", "Remote Interface 4", "false"))
if ( option == "radio" ):
if option == "radio":
values.append((11, "r1", "Radio Option 1", "false"))
values.append((11, "r2", "Radio Option 2", "false"))
values.append((11, "r3", "Radio Option 3", "true"))
if ( len(option) <= 0 ):
if len(option) <= 0:
for arg in args:
print("arg {number=%d}{call=%s}{display=%s}{tooltip=%s}{type=%s}%s" % arg)
@ -149,7 +146,7 @@ def extcap_config(interface, option):
values.append((11, "r1", "Radio1", "false"))
values.append((11, "r2", "Radio2", "true"))
if ( len(option) <= 0 ):
if len(option) <= 0:
multi_values.append(((12, "m1", "Checkable Parent 1", "false", "true"), None))
multi_values.append(((12, "m1c1", "Checkable Child 1", "false", "true"), "m1"))
multi_values.append(((12, "m1c1g1", "Uncheckable Grandchild", "false", "false"), "m1c1"))
@ -192,9 +189,9 @@ def extcap_interfaces():
def extcap_dlts(interface):
if ( interface == '1' ):
if interface == '1':
print("dlt {number=147}{name=USER0}{display=Demo Implementation for Extcap}")
elif ( interface == '2' ):
elif interface == '2':
print("dlt {number=148}{name=USER1}{display=Demo Implementation for Extcap}")
def validate_capture_filter(capture_filter):
@ -233,8 +230,8 @@ def pcap_fake_header():
# Calculates and returns the IP checksum based on the given IP Header
def ip_checksum(iph):
#split into bytes
words = splitN(''.join(iph.split()),4)
csum = 0;
words = splitN(''.join(iph.split()), 4) # TODO splitN() func undefined, this code will fail
csum = 0
for word in words:
csum += int(word, base=16)
csum += (csum >> 16)
@ -291,14 +288,14 @@ def control_read(fn):
else:
payload = ''
return arg, typ, payload
except:
except Exception:
return None, None, None
def control_read_thread(control_in, fn_out):
global initialized, message, delay, verify, button, button_disabled
with open(control_in, 'rb', 0) as fn:
arg = 0
while arg != None:
while arg is not None:
arg, typ, payload = control_read(fn)
log = ''
if typ == CTRL_CMD_INITIALIZED:
@ -318,7 +315,7 @@ def control_read_thread(control_in, fn_out):
elif arg == CTRL_ARG_BUTTON:
control_write(fn_out, CTRL_ARG_BUTTON, CTRL_CMD_DISABLE, "")
button_disabled = True
if button == True:
if button:
control_write(fn_out, CTRL_ARG_BUTTON, CTRL_CMD_SET, "Turn on")
button = False
log = "Button turned off"
@ -367,25 +364,25 @@ def extcap_capture(interface, fifo, control_in, control_out, in_delay, in_verify
with open(fifo, 'wb', 0) as fh:
fh.write(pcap_fake_header())
if control_out != None:
if control_out is not None:
fn_out = open(control_out, 'wb', 0)
control_write(fn_out, CTRL_ARG_LOGGER, CTRL_CMD_SET, "Log started at " + time.strftime("%c") + "\n")
if control_in != None:
if control_in is not None:
# Start reading thread
thread = Thread(target=control_read_thread, args=(control_in, fn_out))
thread.start()
if fn_out != None:
if fn_out is not None:
control_write_defaults(fn_out)
while True:
if fn_out != None:
if fn_out is not None:
log = "Received packet #" + str(counter) + "\n"
control_write(fn_out, CTRL_ARG_LOGGER, CTRL_CMD_ADD, log)
counter = counter + 1
if button_disabled == True:
if button_disabled:
control_write(fn_out, CTRL_ARG_BUTTON, CTRL_CMD_ENABLE, "")
control_write(fn_out, CTRL_ARG_NONE, CTRL_CMD_INFORMATION, "Turn action finished.")
button_disabled = False
@ -395,7 +392,7 @@ def extcap_capture(interface, fifo, control_in, control_out, in_delay, in_verify
time.sleep(delay)
thread.join()
if fn_out != None:
if fn_out is not None:
fn_out.close()
def extcap_close_fifo(fifo):
@ -452,32 +449,32 @@ if __name__ == '__main__':
fifo_found = 0
fifo = ""
for arg in sys.argv:
if (arg == "--fifo" or arg == "--extcap-fifo") :
if arg == "--fifo" or arg == "--extcap-fifo":
fifo_found = 1
elif ( fifo_found == 1 ):
elif fifo_found == 1:
fifo = arg
break
extcap_close_fifo(fifo)
sys.exit(ERROR_ARG)
if ( len(sys.argv) <= 1 ):
if len(sys.argv) <= 1:
parser.exit("No arguments given!")
if ( args.extcap_version and not args.extcap_interfaces ):
if args.extcap_version and not args.extcap_interfaces:
extcap_version()
sys.exit(0)
if ( args.extcap_interfaces == False and args.extcap_interface == None ):
if not args.extcap_interfaces and args.extcap_interface is None:
parser.exit("An interface must be provided or the selection must be displayed")
if ( args.extcap_capture_filter and not args.capture ):
if args.extcap_capture_filter and not args.capture:
validate_capture_filter(args.extcap_capture_filter)
sys.exit(0)
if ( args.extcap_interfaces == True or args.extcap_interface == None ):
if args.extcap_interfaces or args.extcap_interface is None:
extcap_interfaces()
sys.exit(0)
if ( len(unknown) > 1 ):
if len(unknown) > 1:
print("Extcap Example %d unknown arguments given" % len(unknown))
m = re.match('example(\d+)', args.extcap_interface)
@ -486,16 +483,16 @@ if __name__ == '__main__':
interface = m.group(1)
message = args.message
if ( args.message == None or len(args.message) == 0 ):
if args.message is None or len(args.message) == 0:
message = "Extcap Test"
fake_ip = args.fake_ip
if ( args.fake_ip == None or len(args.fake_ip) < 7 or len(args.fake_ip.split('.')) != 4 ):
if args.fake_ip is None or len(args.fake_ip) < 7 or len(args.fake_ip.split('.')) != 4:
fake_ip = "127.0.0.1"
ts = args.ts
if ( args.extcap_reload_option and len(args.extcap_reload_option) > 0 ):
if args.extcap_reload_option and len(args.extcap_reload_option) > 0:
option = args.extcap_reload_option
if args.extcap_config:

View File

@ -246,7 +246,7 @@ class _ExecutionScope(object):
for cleanup in self.finalizers:
try:
cleanup()
except:
except Exception:
exceptions.append(sys.exc_info()[1])
self.cache.clear()
self.finalizers.clear()

View File

@ -129,7 +129,7 @@ class SubprocessTestCase(unittest.TestCase):
for proc in self.processes:
try:
proc.kill()
except:
except Exception:
pass
def setUp(self):

View File

@ -131,7 +131,7 @@ def check_capture_fifo(cmd_dumpcap):
try:
# If a previous test left its fifo laying around, e.g. from a failure, remove it.
os.unlink(fifo_file)
except:
except Exception:
pass
os.mkfifo(fifo_file)
slow_dhcp_cmd = subprocesstest.cat_dhcp_command('slow')
@ -334,7 +334,7 @@ def check_dumpcap_pcapng_sections(cmd_dumpcap, cmd_tshark, capture_file):
# If a previous test left its fifo laying around, e.g. from a failure, remove it.
try:
os.unlink(fifo_file)
except: pass
except Exception: pass
os.mkfifo(fifo_file)
cat_cmd = subprocesstest.cat_cap_file_command(in_files)
fifo_procs.append(self.startProcess(('{0} > {1}'.format(cat_cmd, fifo_file)), shell=True))

View File

@ -164,7 +164,7 @@ class case_tshark_dump_glossaries(subprocesstest.SubprocessTestCase):
for glossary in glossaries:
try:
self.log_fd.truncate()
except:
except Exception:
pass
self.assertRun((cmd_tshark, '-G', glossary), env=base_env)
self.assertEqual(self.countOutput(count_stdout=False, count_stderr=True), 0, 'Found error output while printing glossary ' + glossary)

View File

@ -23,7 +23,7 @@ class PacketList:
items within it."""
def __init__(self, children=None):
if children == None:
if children is None:
self.children = []
else:
self.children = children
@ -61,7 +61,7 @@ class PacketList:
def get_items(self, name, items=None):
"""Return all items that match the name 'name'.
They are returned in order of a depth-first-search."""
if items == None:
if items is None:
top_level = 1
items = []
else:
@ -83,7 +83,7 @@ class PacketList:
before other protocols. For example, if you have an HTTP
protocol, you can find all tcp.dstport fields *before* that HTTP
protocol. This helps analyze in the presence of tunneled protocols."""
if items == None:
if items is None:
top_level = 1
items = []
else:

View File

@ -319,7 +319,7 @@ reserved_words = {
}
for k in list(static_tokens.keys()):
if static_tokens [k] == None:
if static_tokens [k] is None:
static_tokens [k] = k
StringTypes = ['Numeric', 'Printable', 'IA5', 'BMP', 'Universal', 'UTF8',
@ -1594,7 +1594,7 @@ class EthCtx:
name=self.eth_hf[f]['attr']['NAME']
try: # Python < 3
trantab = maketrans("- ", "__")
except:
except Exception:
trantab = str.maketrans("- ", "__")
name = name.translate(trantab)
namelower = name.lower()
@ -4132,7 +4132,7 @@ class SequenceOfType (SeqOfType):
# name, tag (None for no tag, EXPLICIT() for explicit), typ)
# or '' + (1,) for optional
sizestr = ''
if self.size_constr != None:
if self.size_constr is not None:
print("#Ignoring size constraint:", self.size_constr.subtype)
return "%sasn1.SEQUENCE_OF (%s%s)" % (ctx.spaces (),
self.val.to_python (ctx),
@ -4267,7 +4267,7 @@ class SequenceType (SeqType):
# CHOICE or SEQUENCE_OF (where should the SEQUENCE_OF name come
# from? for others, element or arm name would be fine)
seq_name = getattr (self, 'sequence_name', None)
if seq_name == None:
if seq_name is None:
seq_name = 'None'
else:
seq_name = "'" + seq_name + "'"
@ -4751,7 +4751,7 @@ class EnumeratedType (Type):
def eth_type_default_pars(self, ectx, tname):
pars = Type.eth_type_default_pars(self, ectx, tname)
(root_num, ext_num, map_table) = self.get_vals_etc(ectx)[1:]
if (self.ext != None):
if self.ext is not None:
ext = 'TRUE'
else:
ext = 'FALSE'
@ -4767,7 +4767,7 @@ class EnumeratedType (Type):
def eth_type_default_table(self, ectx, tname):
if (not ectx.Per() and not ectx.Oer()): return ''
map_table = self.get_vals_etc(ectx)[3]
if (map_table == None): return ''
if map_table is None: return ''
table = "static guint32 %(TABLE)s[%(ROOT_NUM)s+%(EXT_NUM)s] = {"
table += ", ".join([str(v) for v in map_table])
table += "};\n"
@ -8022,7 +8022,7 @@ def eth_main():
try:
data = data.decode(encoding)
break
except:
except Exception:
warnings.warn_explicit("Decoding %s as %s failed, trying next." % (fn, encoding), UserWarning, '', 0)
# Py2 compat, name.translate in eth_output_hf_arr fails with unicode
if not isinstance(data, str):

View File

@ -5,13 +5,14 @@
#
# SPDX-License-Identifier: GPL-2.0-or-later
import argparse
import os
import re
import requests
import shutil
import subprocess
import argparse
import signal
import subprocess
import requests
# This utility scans the dissector code for URLs, then attempts to
# fetch the links. The results are shown in stdout, but also, at
@ -38,6 +39,7 @@ def signal_handler(sig, frame):
should_exit = True
print('You pressed Ctrl+C - exiting')
signal.signal(signal.SIGINT, signal_handler)
@ -127,7 +129,7 @@ links = []
files = []
def findLinksInFile(filename):
def find_links_in_file(filename):
with open(filename, 'r') as f:
for line_number, line in enumerate(f, start=1):
# TODO: not matching
@ -149,13 +151,13 @@ def findLinksInFile(filename):
# Scan the given folder for links to test.
def findLinksInFolder(folder):
def find_links_in_folder(folder):
# Look at files in sorted order, to give some idea of how far through it
# is.
for filename in sorted(os.listdir(folder)):
if filename.endswith('.c'):
global links
findLinksInFile(os.path.join(folder, filename))
find_links_in_file(os.path.join(folder, filename))
#################################################################
@ -176,44 +178,44 @@ parser.add_argument('--verbose', action='store_true',
args = parser.parse_args()
def isDissectorFile(filename):
p = re.compile('epan/dissectors/packet-.*\.c')
def is_dissector_file(filename):
p = re.compile(r'epan/dissectors/packet-.*\.c')
return p.match(filename)
# Get files from wherever command-line args indicate.
if args.file:
# Fetch links from single file.
findLinksInFile(args.file)
find_links_in_file(args.file)
elif args.commits:
# Get files affected by specified number of commits.
command = ['git', 'diff', '--name-only', 'HEAD~' + args.commits]
files = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Fetch links from files (dissectors files only)
files = list(filter(lambda f : isDissectorFile(f), files))
files = list(filter(lambda f: is_dissector_file(f), files))
for f in files:
findLinksInFile(f)
find_links_in_file(f)
elif args.open:
# Unstaged changes.
command = ['git', 'diff', '--name-only']
files = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
files = list(filter(lambda f : isDissectorFile(f), files))
files = list(filter(lambda f: is_dissector_file(f), files))
# Staged changes.
command = ['git', 'diff', '--staged', '--name-only']
files_staged = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
files_staged = list(filter(lambda f : isDissectorFile(f), files_staged))
files_staged = list(filter(lambda f: is_dissector_file(f), files_staged))
for f in files:
findLinksInFile(f)
find_links_in_file(f)
for f in files_staged:
if not f in files:
findLinksInFile(f)
if f not in files:
find_links_in_file(f)
files.append(f)
else:
# Find links from dissector folder.
findLinksInFolder(os.path.join(os.path.dirname(
find_links_in_folder(os.path.join(os.path.dirname(
__file__), '..', 'epan', 'dissectors'))

View File

@ -335,13 +335,13 @@ elif args.open:
files = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Only interested in dissector files.
files = list(filter(lambda f : isDissectorFile(f), files))
files = list(filter(lambda f : is_dissector_file(f), files))
# Staged changes.
command = ['git', 'diff', '--staged', '--name-only']
files_staged = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Only interested in dissector files.
files_staged = list(filter(lambda f : isDissectorFile(f), files_staged))
files_staged = list(filter(lambda f : is_dissector_file(f), files_staged))
for f in files:
files.append(f)
for f in files_staged:

View File

@ -88,7 +88,7 @@ def findItems(filename):
def isDissectorFile(filename):
def is_dissector_file(filename):
p = re.compile('.*packet-.*\.c')
return p.match(filename)
@ -99,7 +99,7 @@ def findDissectorFilesInFolder(folder):
for f in sorted(os.listdir(folder)):
if should_exit:
return
if isDissectorFile(f):
if is_dissector_file(f):
filename = os.path.join(folder, f)
files.append(filename)
return files
@ -160,20 +160,20 @@ elif args.commits:
files = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Will examine dissector files only
files = list(filter(lambda f : isDissectorFile(f), files))
files = list(filter(lambda f : is_dissector_file(f), files))
elif args.open:
# Unstaged changes.
command = ['git', 'diff', '--name-only']
files = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Only interested in dissector files.
files = list(filter(lambda f : isDissectorFile(f), files))
files = list(filter(lambda f : is_dissector_file(f), files))
# Staged changes.
command = ['git', 'diff', '--staged', '--name-only']
files_staged = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Only interested in dissector files.
files_staged = list(filter(lambda f : isDissectorFile(f), files_staged))
files_staged = list(filter(lambda f : is_dissector_file(f), files_staged))
for f in files:
files.append(f)
for f in files_staged:

View File

@ -192,7 +192,7 @@ class Item:
return
n += 1
except:
except Exception:
# Sometimes, macro is used for item type so catch and keep going.
pass
@ -262,7 +262,7 @@ def find_items(filename, check_mask=False, check_label=False):
def isDissectorFile(filename):
def is_dissector_file(filename):
p = re.compile('.*packet-.*\.c')
return p.match(filename)
@ -282,7 +282,7 @@ def findDissectorFilesInFolder(folder, dissector_files=[], recursive=False):
filename = os.path.join(folder, f)
dissector_files.append(filename)
return [x for x in filter(isDissectorFile, dissector_files)]
return [x for x in filter(is_dissector_file, dissector_files)]
@ -332,20 +332,20 @@ elif args.commits:
files = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Will examine dissector files only
files = list(filter(lambda f : isDissectorFile(f), files))
files = list(filter(lambda f : is_dissector_file(f), files))
elif args.open:
# Unstaged changes.
command = ['git', 'diff', '--name-only']
files = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Only interested in dissector files.
files = list(filter(lambda f : isDissectorFile(f), files))
files = list(filter(lambda f : is_dissector_file(f), files))
# Staged changes.
command = ['git', 'diff', '--staged', '--name-only']
files_staged = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Only interested in dissector files.
files_staged = list(filter(lambda f : isDissectorFile(f), files_staged))
files_staged = list(filter(lambda f : is_dissector_file(f), files_staged))
for f in files:
files.append(f)
for f in files_staged:

View File

@ -16,9 +16,10 @@
# --protoshortname DUMB --protoabbrev dumb --license GPL-2.0-or-later --years "2019-2020"
#
import os
import argparse
from datetime import datetime
import os
parser = argparse.ArgumentParser(description='The Wireshark Dissector Generator')
parser.add_argument("--name", help="The author of the dissector", required=True)
@ -30,17 +31,21 @@ parser.add_argument("--license", help="The license for this dissector (please us
parser.add_argument("--years", help="Years of validity for the license. If omitted, the current year will be used")
parser.add_argument("-f", "--force", action='store_true', help="Force overwriting the dissector file if it already exists")
def wsdir():
return os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))
def output_file(args):
return os.path.join(wsdir(), "epan/dissectors/packet-" + args.protoabbrev + ".c")
def read_skeleton():
skeletonfile = os.path.join(wsdir(), "doc/packet-PROTOABBREV.c")
print("Reading skeleton file: " + skeletonfile)
return open(skeletonfile).read()
def replace_fields(buffer, args):
print("Replacing fields in skeleton")
output = buffer\
@ -77,6 +82,7 @@ def replace_fields(buffer, args):
return output
def write_dissector(buffer, args):
ofile = output_file(args)
if os.path.isfile(ofile) and not args.force:
@ -84,6 +90,7 @@ def write_dissector(buffer, args):
print("Writing output file: " + ofile)
return open(ofile, "w").write(buffer)
def patch_makefile(args):
cmakefile = os.path.join(wsdir(), "epan/dissectors/CMakeLists.txt")
print("Patching makefile: " + cmakefile)
@ -104,6 +111,7 @@ def patch_makefile(args):
output += line
open(cmakefile, "w").write(output)
def print_header():
print("")
print("**************************************************")
@ -117,12 +125,14 @@ def print_header():
print("**************************************************")
print("")
def print_trailer(args):
print("")
print("The skeleton for the dissector of the " + args.protoshortname + " protocol has been generated.")
print("Please review/extend it to match your specific criterias.")
print("")
if __name__ == '__main__':
print_header()
args = parser.parse_args()

View File

@ -47,7 +47,7 @@ def get_url_lines(url):
exit_msg("URL error fetching {0}: {1}".format(url, err.reason))
except OSError as err:
exit_msg("OS error fetching {0}".format(url, err.strerror))
except:
except Exception:
exit_msg("Unexpected error:", sys.exc_info()[0])
return lines

View File

@ -24,7 +24,7 @@ from textwrap import TextWrapper
try:
from HTMLParser import HTMLParser
from htmlentitydefs import name2codepoint
except: # Python 3
except ImportError: # Python 3
from html.parser import HTMLParser
from html.entities import name2codepoint
unichr = chr # for html entity handling
@ -35,7 +35,7 @@ class TextHTMLParser(HTMLParser):
try:
# Python 3.4
HTMLParser. __init__(self, convert_charrefs=True)
except:
except Exception:
HTMLParser. __init__(self)
# All text, concatenated
self.output_buffer = ''

View File

@ -14,11 +14,9 @@ import sys
import ijson
import operator
import copy
import os
import binascii
import array
import argparse
import subprocess
import string
import random
import math
@ -135,13 +133,13 @@ def read_py_function(name):
ind = len(line) - len(line.lstrip())
if (line.find("def " + name) != -1):
if line.find("def " + name) != -1:
record = True
indent = ind
elif (record == True and indent == ind and len(line) > 1):
elif record and indent == ind and len(line) > 1:
record = False
if (record == True):
if record:
s = s + line
file.close()
@ -227,8 +225,8 @@ def py_generator(d, r, frame_name='frame_raw', frame_position=0):
for k, v in d.items():
# no recursion
if ( k.endswith("_raw") or ("_raw_" in k) ):
if (isinstance(v[1], (list, tuple)) or isinstance(v[2], (list, tuple)) ):
if k.endswith("_raw") or "_raw_" in k:
if isinstance(v[1], (list, tuple)) or isinstance(v[2], (list, tuple)):
#i = 1;
for _v in v:
h = _v[0]
@ -328,7 +326,7 @@ def lsb(x):
def multiply_strings(original_string, new_string, mask):
ret_string = new_string
if mask == None:
if mask is None:
return ret_string
for i in range(0, min(len(original_string), len(new_string), len(mask)), 2):
if mask[i:i + 2] == 'ff':
@ -389,14 +387,14 @@ def rewrite_frame(frame_raw, h, p, l, b, t, frame_amask = None):
# print "{0:08b}".format(M[i]),
# print
j = 0;
j = 0
for i in range(len(_H)):
if (M[i] != 0):
v = H[j] << lsb(M[i])
# print "Debug: {0:08b}".format(v),
_H[i] = (_H[i] & ~M[i]) | (v & M[i])
# print "Debug: " + str(_H[i]),
j = j + 1;
j = j + 1
# for i in range(len(_H)):
# print "{0:08b}".format(_H[i]),
@ -412,8 +410,8 @@ def rewrite_frame(frame_raw, h, p, l, b, t, frame_amask = None):
def assemble_frame(d, frame_time):
input = d['frame_raw'][1]
isFlat = False
linux_cooked_header = False;
while(isFlat == False):
linux_cooked_header = False
while not isFlat:
isFlat = True
_d = d.copy()
for key, val in _d.items():
@ -424,7 +422,7 @@ def assemble_frame(d, frame_time):
t = val[5] # type
if (key == "sll_raw"):
linux_cooked_header = True;
linux_cooked_header = True
# only if the node is not parent
isParent = False
@ -434,7 +432,7 @@ def assemble_frame(d, frame_time):
isFlat = False
break
if (isParent == False and val[0] is not None):
if not isParent and val[0] is not None:
d[val[0]][1] = rewrite_frame(d[val[0]][1], h, p, l, b, t)
del d[key]
@ -554,14 +552,14 @@ else:
anonymize = {}
if args.mask:
for m in args.mask:
if not '_raw' in m:
if '_raw' not in m:
print("Error: The specified fields by -m switch should be raw fields. " + m + " does not have _raw suffix")
sys.exit()
af = AnonymizedField(m, 0)
anonymize[af.field] = af
if args.anonymize:
for a in args.anonymize:
if not '_raw' in a:
if '_raw' not in a:
print("Error: The specified fields by -a switch should be raw fields. " + a + " does not have _raw suffix")
sys.exit()
af = AnonymizedField(a, 1)
@ -577,13 +575,13 @@ if salt is None:
salt = ''.join(random.SystemRandom().choice(string.ascii_letters + string.digits) for _ in range(10))
# Generate pcap
if args.python == False:
if args.python is False:
pcap_out = scapy.PcapWriter(outfile, append=False, sync=False)
# Iterate over packets in JSON
for packet in ijson.items(data_file, "item", buf_size=200000):
_list = []
linux_cooked_header = False;
linux_cooked_header = False
# get flat raw fields into _list
for raw in raw_flat_collector(packet['_source']['layers']):

View File

@ -52,7 +52,7 @@ def open_url(url):
req = urllib.request.Request(url, headers=req_headers)
response = urllib.request.urlopen(req)
body = response.read().decode('UTF-8', 'replace')
except:
except Exception:
exit_msg('Error opening ' + url)
return (body, dict(response.info()))
@ -208,7 +208,7 @@ def main():
try:
tmpl_fd = io.open(template_path, 'r', encoding='UTF-8')
except:
except Exception:
exit_msg("Couldn't open template file for reading ({}) ".format(template_path))
for tmpl_line in tmpl_fd:
tmpl_line = tmpl_line.strip()
@ -265,7 +265,7 @@ def main():
try:
manuf_fd = io.open(manuf_path, 'w', encoding='UTF-8')
except:
except Exception:
exit_msg("Couldn't open manuf file for reading ({}) ".format(manuf_path))
manuf_fd.write(u"# This file was generated by running ./tools/make-manuf.py.\n")

View File

@ -64,19 +64,19 @@ def parse_rows(svc_fd):
try:
sn_pos = headers.index('Service Name')
except:
except Exception:
sn_pos = 0
try:
pn_pos = headers.index('Port Number')
except:
except Exception:
pn_pos = 1
try:
tp_pos = headers.index('Transport Protocol')
except:
except Exception:
tp_pos = 2
try:
desc_pos = headers.index('Description')
except:
except Exception:
desc_pos = 3
services_map = {}
@ -176,7 +176,7 @@ def main(argv):
else:
req = urllib.request.urlopen(svc_url)
svc_fd = codecs.getreader('utf8')(req)
except:
except Exception:
exit_msg('Error opening ' + svc_url)
body = parse_rows(svc_fd)

View File

@ -173,7 +173,7 @@ class NamedList:
def Name(self, new_name = None):
"Get/Set name of list"
if new_name != None:
if new_name is not None:
self.name = new_name
return self.name
@ -183,7 +183,7 @@ class NamedList:
def Null(self):
"Is there no list (different from an empty list)?"
return self.list == None
return self.list is None
def Empty(self):
"It the list empty (different from a null list)?"
@ -253,7 +253,7 @@ class PTVC(NamedList):
ptvc_rec = PTVCRecord(field, length, endianness, var, repeat, req_cond, info_str, code)
if expected_offset == None:
if expected_offset is None:
expected_offset = offset
elif expected_offset == -1:
@ -381,7 +381,7 @@ class PTVCRecord:
req_cond = "NO_REQ_COND"
else:
req_cond = global_req_cond[self.req_cond]
assert req_cond != None
assert req_cond is not None
if isinstance(self.field, struct):
return self.field.ReferenceString(var, repeat, req_cond)
@ -489,7 +489,7 @@ class NCP:
def FunctionCode(self, part=None):
"Returns the function code for this NCP packet."
if part == None:
if part is None:
return self.__code__
elif part == 'high':
if self.HasSubFunction():
@ -685,7 +685,7 @@ class NCP:
realizes that because Python lists are the input and
output."""
if codes == None:
if codes is None:
return self.codes
# Sanity check
@ -729,7 +729,7 @@ def srec(field, endianness=None, **kw):
def _rec(start, length, field, endianness, kw):
# If endianness not explicitly given, use the field's
# default endiannes.
if endianness == None:
if endianness is None:
endianness = field.Endianness()
# Setting a var?
@ -804,7 +804,7 @@ class Type:
return self.ftype
def Display(self, newval=None):
if newval != None:
if newval is not None:
self.disp = newval
return self.disp
@ -6786,7 +6786,7 @@ static expert_field ei_ncp_address_type = EI_INIT;
req_cond_size = "NO_REQ_COND_SIZE"
else:
req_cond_size = pkt.ReqCondSize()
if req_cond_size == None:
if req_cond_size is None:
msg.write("NCP packet %s needs a ReqCondSize*() call\n" \
% (pkt.CName(),))
sys.exit(1)
@ -8626,7 +8626,7 @@ def main():
msg.write("Defined %d NCP types.\n" % (len(packets),))
produce_code()
except:
except Exception:
traceback.print_exc(20, msg)
try:
out_file.close()

View File

@ -13,6 +13,7 @@ import os
import stat
import time
class OutputFile:
TIMER_MAX = 99999.9
@ -28,11 +29,11 @@ class OutputFile:
def PrintPacket(self, timestamp, datalines):
# What do to with the timestamp? I need more data about what
# the netscreen timestamp is, then I can generate one for the text file.
# print "TS:", timestamp.group("time")
# print("TS:", timestamp.group("time"))
try:
timestamp = float(timestamp.group("time"))
except ValueError:
sys.exit("Unable to convert '%s' to floating point." % \
sys.exit("Unable to convert '%s' to floating point." %
(timestamp,))
# Did we wrap around the timeer max?
@ -63,12 +64,14 @@ class OutputFile:
# Blank line
print >> self.fh
# Find a timestamp line
re_timestamp = re.compile(r"^(?P<time>\d+\.\d): [\w/]+\((?P<io>.)\)(:| len=)")
# Find a hex dump line
re_hex_line = re.compile(r"(?P<hex>([0-9a-f]{2} ){1,16})\s+(?P<ascii>.){1,16}")
def run(input_filename, output_filename):
try:
ifh = open(input_filename, "r")
@ -122,11 +125,13 @@ def usage():
print >> sys.stderr, "Usage: netscreen2dump.py netscreen-dump-file new-dump-file"
sys.exit(1)
def main():
if len(sys.argv) != 3:
usage()
run(sys.argv[1], sys.argv[2])
if __name__ == "__main__":
main()

View File

@ -46,7 +46,7 @@ class BackTrace:
# what we expect it should be.
frame_num = int(m.group("num"))
if frame_num != frame_will_be:
sys.exit("Found frame %d instead of %d" % \
sys.exit("Found frame %d instead of %d" %
(frame_num, frame_will_be))
# Find the function name. XXX - need to handle '???'
@ -193,7 +193,7 @@ def run_gdb(*commands):
except IOError, err:
try:
os.unlink(fname)
except:
except Exception:
pass
sys.exit("Cannot close %s: %s" % (fname, err))
@ -207,17 +207,17 @@ def run_gdb(*commands):
except OSError, err:
try:
os.unlink(fname)
except:
except Exception:
pass
sys.exit("Cannot run gdb: %s" % (err,))
# Get gdb's output
result = pipe.readlines()
error = pipe.close()
if error != None:
if error is not None:
try:
os.unlink(fname)
except:
except Exception:
pass
sys.exit("gdb returned an exit value of %s" % (error,))
@ -225,7 +225,7 @@ def run_gdb(*commands):
# Remove the temp file and return the results
try:
os.unlink(fname)
except:
except Exception:
pass
return result
@ -341,7 +341,7 @@ def make_cap_file(pkt_data, lnk_t):
except IOError, err:
try:
os.unlink(fname)
except:
except Exception:
pass
sys.exit("Cannot close %s: %s" % (fname, err))
@ -354,14 +354,14 @@ def make_cap_file(pkt_data, lnk_t):
except OSError, err:
try:
os.unlink(fname)
except:
except Exception:
pass
sys.exit("Cannot run text2pcap: %s" % (err,))
# Remove the temp file
try:
os.unlink(fname)
except:
except Exception:
pass
if retval == 0:
@ -448,7 +448,7 @@ def main():
else:
assert 0
if output_file == None:
if output_file is None:
usage()
if len(args) != 2:

View File

@ -22,6 +22,7 @@ Ported to Python from rdps.c.
import sys
import os.path
def ps_clean_string(raw_str):
ps_str = ''
for c in raw_str:
@ -35,29 +36,35 @@ def ps_clean_string(raw_str):
ps_str += c
return ps_str
def start_code(fd, func):
script_name = os.path.split(__file__)[-1]
fd.write("void print_ps_%s(FILE *fd) {\n" % func)
def write_code(fd, raw_str):
ps_str = ps_clean_string(raw_str)
fd.write("\tfprintf(fd, \"%s\");\n" % ps_str)
def end_code(fd):
fd.write("}\n\n\n")
def exit_err(msg=None, *param):
if msg is not None:
sys.stderr.write(msg % param)
sys.exit(1)
# Globals
STATE_NULL = 'null'
STATE_PREAMBLE = 'preamble'
STATE_FINALE = 'finale'
def main():
state = STATE_NULL;
state = STATE_NULL
if len(sys.argv) != 3:
exit_err("%s: input_file output_file\n", __file__)

View File

@ -51,7 +51,7 @@ def main():
if not 'wireshark-' in tag_cp.stdout:
print('Wireshark release tag not found')
sys.exit(1)
except:
except Exception:
print('`git tag` returned {}:'.format(tag_cp.returncode))
raise
@ -61,7 +61,7 @@ def main():
check=True,
encoding='UTF-8',
stdout=subprocess.PIPE, stderr=subprocess.PIPE).stdout
except:
except Exception:
print('Unable to fetch most recent rc0.')
raise
@ -69,7 +69,7 @@ def main():
ver_m = re.match('v(\d+\.\d+)\.(\d+)rc0.*', cur_rc0)
maj_min = ver_m.group(1)
next_micro = ver_m.group(2)
except:
except Exception:
print('Unable to fetch major.minor version.')
raise
@ -90,7 +90,7 @@ def main():
encoding='UTF-8',
stdout=subprocess.PIPE, stderr=subprocess.PIPE).stdout.strip()
release_tag_l.append(release_tag_fmt.format(maj_min, micro, tag_date))
except:
except Exception:
print('Unable to fetch release tag')
raise