mirror of
https://gitlab.isc.org/isc-projects/bind9
synced 2025-09-01 06:55:30 +00:00
Merge branch 'ondrej/add-python-static-analysis-to-gitlab-ci' into 'master'
Add python static analysis to GitLab CI See merge request isc-projects/bind9!3311
This commit is contained in:
@@ -418,6 +418,37 @@ coccinelle:
|
|||||||
- util/check-cocci
|
- util/check-cocci
|
||||||
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
- if test "$(git status --porcelain | grep -Ev '\?\?' | wc -l)" -gt "0"; then git status --short; exit 1; fi
|
||||||
|
|
||||||
|
flake8:
|
||||||
|
<<: *default_triggering_rules
|
||||||
|
<<: *base_image
|
||||||
|
stage: postcheck
|
||||||
|
needs:
|
||||||
|
- job: autoreconf
|
||||||
|
artifacts: true
|
||||||
|
before_script:
|
||||||
|
- pip3 install flake8
|
||||||
|
script:
|
||||||
|
- *configure
|
||||||
|
- flake8 --max-line-length=80 $(git ls-files '*.py' | grep -v 'ans\.py')
|
||||||
|
only:
|
||||||
|
- merge_requests
|
||||||
|
|
||||||
|
pylint:
|
||||||
|
<<: *default_triggering_rules
|
||||||
|
<<: *base_image
|
||||||
|
stage: postcheck
|
||||||
|
needs:
|
||||||
|
- job: autoreconf
|
||||||
|
artifacts: true
|
||||||
|
before_script:
|
||||||
|
- pip3 install pylint
|
||||||
|
- PYTHONPATH="$PYTHONPATH:$CI_PROJECT_DIR/bin/python"
|
||||||
|
script:
|
||||||
|
- *configure
|
||||||
|
- pylint --rcfile $CI_PROJECT_DIR/.pylintrc $(git ls-files '*.py' | grep -v 'ans\.py')
|
||||||
|
only:
|
||||||
|
- merge_requests
|
||||||
|
|
||||||
tarball-create:
|
tarball-create:
|
||||||
stage: precheck
|
stage: precheck
|
||||||
<<: *base_image
|
<<: *base_image
|
||||||
|
6
.pylintrc
Normal file
6
.pylintrc
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[MASTER]
|
||||||
|
disable=
|
||||||
|
C0114, # missing-module-docstring
|
||||||
|
C0115, # missing-class-docstring
|
||||||
|
C0116, # missing-function-docstring
|
||||||
|
R0801, # duplicate-code
|
@@ -9,25 +9,22 @@
|
|||||||
# information regarding copyright ownership.
|
# information regarding copyright ownership.
|
||||||
############################################################################
|
############################################################################
|
||||||
|
|
||||||
try:
|
|
||||||
import yaml
|
|
||||||
except:
|
|
||||||
print("No python yaml module, skipping")
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
import subprocess
|
|
||||||
import pprint
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
f = open(sys.argv[1], "r")
|
try:
|
||||||
for item in yaml.safe_load_all(f):
|
import yaml
|
||||||
for key in sys.argv[2:]:
|
except (ModuleNotFoundError, ImportError):
|
||||||
try:
|
print("No python yaml module, skipping")
|
||||||
key = int(key)
|
sys.exit(1)
|
||||||
except: pass
|
|
||||||
try:
|
with open(sys.argv[1], "r") as f:
|
||||||
item = item[key]
|
for item in yaml.safe_load_all(f):
|
||||||
except:
|
for key in sys.argv[2:]:
|
||||||
print('error: index not found')
|
try:
|
||||||
exit(1)
|
key = int(key)
|
||||||
print (item)
|
except ValueError:
|
||||||
|
pass
|
||||||
|
if key not in item:
|
||||||
|
print('error: index not found')
|
||||||
|
sys.exit(1)
|
||||||
|
print(item)
|
||||||
|
@@ -9,18 +9,21 @@
|
|||||||
# information regarding copyright ownership.
|
# information regarding copyright ownership.
|
||||||
############################################################################
|
############################################################################
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import yaml
|
import yaml
|
||||||
except:
|
except (ModuleNotFoundError, ImportError):
|
||||||
print("No python yaml module, skipping")
|
print("No python yaml module, skipping")
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import pprint
|
import pprint
|
||||||
import sys
|
|
||||||
|
|
||||||
DNSTAP_READ=sys.argv[1]
|
DNSTAP_READ = sys.argv[1]
|
||||||
DATAFILE=sys.argv[2]
|
DATAFILE = sys.argv[2]
|
||||||
|
ARGS = [DNSTAP_READ, '-y', DATAFILE]
|
||||||
|
|
||||||
f = subprocess.Popen([DNSTAP_READ, '-y', DATAFILE], stdout=subprocess.PIPE)
|
with subprocess.Popen(ARGS, stdout=subprocess.PIPE) as f:
|
||||||
pprint.pprint([l for l in yaml.load_all(f.stdout)])
|
for l in yaml.load_all(f.stdout):
|
||||||
|
pprint.pprint(l)
|
||||||
|
@@ -10,31 +10,30 @@
|
|||||||
############################################################################
|
############################################################################
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
sys.path.insert(0, '../../../python')
|
from isc import policy
|
||||||
from isc import *
|
|
||||||
|
|
||||||
pp = policy.dnssec_policy()
|
PP = policy.dnssec_policy()
|
||||||
# print the unmodified default and a generated zone policy
|
# print the unmodified default and a generated zone policy
|
||||||
print(pp.named_policy['default'])
|
print(PP.named_policy['default'])
|
||||||
print(pp.named_policy['global'])
|
print(PP.named_policy['global'])
|
||||||
print(pp.policy('example.com'))
|
print(PP.policy('example.com'))
|
||||||
|
|
||||||
if len(sys.argv) > 0:
|
if len(sys.argv) > 0:
|
||||||
for policy_file in sys.argv[1:]:
|
for policy_file in sys.argv[1:]:
|
||||||
pp.load(policy_file)
|
PP.load(policy_file)
|
||||||
|
|
||||||
# now print the modified default and generated zone policies
|
# now print the modified default and generated zone policies
|
||||||
print(pp.named_policy['default'])
|
print(PP.named_policy['default'])
|
||||||
print(pp.policy('example.com'))
|
print(PP.policy('example.com'))
|
||||||
print(pp.policy('example.org'))
|
print(PP.policy('example.org'))
|
||||||
print(pp.policy('example.net'))
|
print(PP.policy('example.net'))
|
||||||
|
|
||||||
# print algorithm policies
|
# print algorithm policies
|
||||||
print(pp.alg_policy['RSASHA1'])
|
print(PP.alg_policy['RSASHA1'])
|
||||||
print(pp.alg_policy['RSASHA256'])
|
print(PP.alg_policy['RSASHA256'])
|
||||||
print(pp.alg_policy['ECDSAP256SHA256'])
|
print(PP.alg_policy['ECDSAP256SHA256'])
|
||||||
|
|
||||||
# print another named policy
|
# print another named policy
|
||||||
print(pp.named_policy['extra'])
|
print(PP.named_policy['extra'])
|
||||||
else:
|
else:
|
||||||
print("ERROR: Please provide an input file")
|
print("ERROR: Please provide an input file")
|
||||||
|
@@ -133,7 +133,7 @@ n=`expr $n + 1`
|
|||||||
|
|
||||||
echo_i "checking policy.conf parser ($n)"
|
echo_i "checking policy.conf parser ($n)"
|
||||||
ret=0
|
ret=0
|
||||||
${PYTHON} testpolicy.py policy.sample > policy.out
|
PYTHONPATH="../../../python:$PYTHONPATH" ${PYTHON} testpolicy.py policy.sample > policy.out
|
||||||
$DOS2UNIX policy.out > /dev/null 2>&1
|
$DOS2UNIX policy.out > /dev/null 2>&1
|
||||||
cmp -s policy.good policy.out || ret=1
|
cmp -s policy.good policy.out || ret=1
|
||||||
if [ $ret != 0 ]; then echo_i "failed"; fi
|
if [ $ret != 0 ]; then echo_i "failed"; fi
|
||||||
|
@@ -21,17 +21,21 @@
|
|||||||
|
|
||||||
from xml.etree import cElementTree as ET
|
from xml.etree import cElementTree as ET
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from isc import dnskey
|
import re
|
||||||
import ply.yacc as yacc
|
import ply.yacc as yacc
|
||||||
import ply.lex as lex
|
import ply.lex as lex
|
||||||
import re
|
from isc import dnskey
|
||||||
|
|
||||||
|
|
||||||
############################################################################
|
############################################################################
|
||||||
# Translate KASP duration values into seconds
|
# Translate KASP duration values into seconds
|
||||||
############################################################################
|
############################################################################
|
||||||
class kasptime:
|
class KaspTime:
|
||||||
class ktlex:
|
# pylint: disable=invalid-name
|
||||||
tokens = ( 'P', 'T', 'Y', 'M', 'D', 'H', 'S', 'NUM' )
|
class KTLex:
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
|
||||||
|
tokens = ('P', 'T', 'Y', 'M', 'D', 'H', 'S', 'NUM')
|
||||||
|
|
||||||
t_P = r'(?i)P'
|
t_P = r'(?i)P'
|
||||||
t_T = r'(?i)T'
|
t_T = r'(?i)T'
|
||||||
@@ -41,12 +45,14 @@ class kasptime:
|
|||||||
t_H = r'(?i)H'
|
t_H = r'(?i)H'
|
||||||
t_S = r'(?i)S'
|
t_S = r'(?i)S'
|
||||||
|
|
||||||
def t_NUM(self, t):
|
@staticmethod
|
||||||
|
def t_NUM(t):
|
||||||
r'\d+'
|
r'\d+'
|
||||||
t.value = int(t.value)
|
t.value = int(t.value)
|
||||||
return t
|
return t
|
||||||
|
|
||||||
def t_error(self, t):
|
@staticmethod
|
||||||
|
def t_error(t):
|
||||||
print("Illegal character '%s'" % t.value[0])
|
print("Illegal character '%s'" % t.value[0])
|
||||||
t.lexer.skip(1)
|
t.lexer.skip(1)
|
||||||
|
|
||||||
@@ -54,7 +60,7 @@ class kasptime:
|
|||||||
self.lexer = lex.lex(object=self)
|
self.lexer = lex.lex(object=self)
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.lexer = self.ktlex()
|
self.lexer = self.KTLex()
|
||||||
self.tokens = self.lexer.tokens
|
self.tokens = self.lexer.tokens
|
||||||
self.parser = yacc.yacc(debug=False, write_tables=False, module=self)
|
self.parser = yacc.yacc(debug=False, write_tables=False, module=self)
|
||||||
|
|
||||||
@@ -62,35 +68,43 @@ class kasptime:
|
|||||||
self.lexer.lexer.lineno = 0
|
self.lexer.lexer.lineno = 0
|
||||||
return self.parser.parse(text)
|
return self.parser.parse(text)
|
||||||
|
|
||||||
def p_ktime_4(self, p):
|
@staticmethod
|
||||||
|
def p_ktime_4(p):
|
||||||
"ktime : P periods T times"
|
"ktime : P periods T times"
|
||||||
p[0] = p[2] + p[4]
|
p[0] = p[2] + p[4]
|
||||||
|
|
||||||
def p_ktime_3(self, p):
|
@staticmethod
|
||||||
|
def p_ktime_3(p):
|
||||||
"ktime : P T times"
|
"ktime : P T times"
|
||||||
p[0] = p[3]
|
p[0] = p[3]
|
||||||
|
|
||||||
def p_ktime_2(self, p):
|
@staticmethod
|
||||||
|
def p_ktime_2(p):
|
||||||
"ktime : P periods"
|
"ktime : P periods"
|
||||||
p[0] = p[2]
|
p[0] = p[2]
|
||||||
|
|
||||||
def p_periods_1(self, p):
|
@staticmethod
|
||||||
|
def p_periods_1(p):
|
||||||
"periods : period"
|
"periods : period"
|
||||||
p[0] = p[1]
|
p[0] = p[1]
|
||||||
|
|
||||||
def p_periods_2(self, p):
|
@staticmethod
|
||||||
|
def p_periods_2(p):
|
||||||
"periods : periods period"
|
"periods : periods period"
|
||||||
p[0] = p[1] + p[2]
|
p[0] = p[1] + p[2]
|
||||||
|
|
||||||
def p_times_1(self, p):
|
@staticmethod
|
||||||
|
def p_times_1(p):
|
||||||
"times : time"
|
"times : time"
|
||||||
p[0] = p[1]
|
p[0] = p[1]
|
||||||
|
|
||||||
def p_times_2(self, p):
|
@staticmethod
|
||||||
|
def p_times_2(p):
|
||||||
"times : times time"
|
"times : times time"
|
||||||
p[0] = p[1] + p[2]
|
p[0] = p[1] + p[2]
|
||||||
|
|
||||||
def p_period(self, p):
|
@staticmethod
|
||||||
|
def p_period(p):
|
||||||
'''period : NUM Y
|
'''period : NUM Y
|
||||||
| NUM M
|
| NUM M
|
||||||
| NUM D'''
|
| NUM D'''
|
||||||
@@ -101,7 +115,8 @@ class kasptime:
|
|||||||
elif p[2].lower() == 'd':
|
elif p[2].lower() == 'd':
|
||||||
p[0] += int(p[1]) * 86400
|
p[0] += int(p[1]) * 86400
|
||||||
|
|
||||||
def p_time(self, p):
|
@staticmethod
|
||||||
|
def p_time(p):
|
||||||
'''time : NUM H
|
'''time : NUM H
|
||||||
| NUM M
|
| NUM M
|
||||||
| NUM S'''
|
| NUM S'''
|
||||||
@@ -112,24 +127,28 @@ class kasptime:
|
|||||||
elif p[2].lower() == 's':
|
elif p[2].lower() == 's':
|
||||||
p[0] = int(p[1])
|
p[0] = int(p[1])
|
||||||
|
|
||||||
def p_error(self, p):
|
@staticmethod
|
||||||
|
def p_error():
|
||||||
print("Syntax error")
|
print("Syntax error")
|
||||||
|
|
||||||
|
|
||||||
############################################################################
|
############################################################################
|
||||||
# Load the contents of a KASP XML file as a python dictionary
|
# Load the contents of a KASP XML file as a python dictionary
|
||||||
############################################################################
|
############################################################################
|
||||||
class kasp():
|
class Kasp():
|
||||||
|
# pylint: disable=invalid-name
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _todict(t):
|
def _todict(t):
|
||||||
d = {t.tag: {} if t.attrib else None}
|
d = {t.tag: {} if t.attrib else None}
|
||||||
children = list(t)
|
children = list(t)
|
||||||
if children:
|
if children:
|
||||||
dd = defaultdict(list)
|
dd = defaultdict(list)
|
||||||
for dc in map(kasp._todict, children):
|
for dc in map(Kasp._todict, children):
|
||||||
for k, v in dc.iteritems():
|
for k, v in dc.iteritems():
|
||||||
dd[k].append(v)
|
dd[k].append(v)
|
||||||
d = {t.tag:
|
k = {k: v[0] if len(v) == 1 else v for k, v in dd.items()}
|
||||||
{k:v[0] if len(v) == 1 else v for k, v in dd.iteritems()}}
|
d = {t.tag: k}
|
||||||
if t.attrib:
|
if t.attrib:
|
||||||
d[t.tag].update(('@' + k, v) for k, v in t.attrib.iteritems())
|
d[t.tag].update(('@' + k, v) for k, v in t.attrib.iteritems())
|
||||||
if t.text:
|
if t.text:
|
||||||
@@ -142,7 +161,7 @@ class kasp():
|
|||||||
return d
|
return d
|
||||||
|
|
||||||
def __init__(self, filename):
|
def __init__(self, filename):
|
||||||
self._dict = kasp._todict(ET.parse(filename).getroot())
|
self._dict = Kasp._todict(ET.parse(filename).getroot())
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
return self._dict[key]
|
return self._dict[key]
|
||||||
@@ -156,52 +175,54 @@ class kasp():
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return repr(self._dict)
|
return repr(self._dict)
|
||||||
|
|
||||||
|
|
||||||
############################################################################
|
############################################################################
|
||||||
# Load the contents of a KASP XML file as a python dictionary
|
# Load the contents of a KASP XML file as a python dictionary
|
||||||
############################################################################
|
############################################################################
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
from pprint import *
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
if len(sys.argv) < 2:
|
if len(sys.argv) < 2:
|
||||||
print("Usage: kasp2policy <filename>")
|
print("Usage: kasp2policy <filename>")
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
KINFO = Kasp(sys.argv[1])
|
||||||
try:
|
try:
|
||||||
kinfo = kasp(sys.argv[1])
|
KINFO = Kasp(sys.argv[1])
|
||||||
except:
|
except FileNotFoundError:
|
||||||
print("%s: unable to load KASP file '%s'" % (sys.argv[0], sys.argv[1]))
|
print("%s: unable to load KASP file '%s'" % (sys.argv[0], sys.argv[1]))
|
||||||
exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
kt = kasptime()
|
KT = KaspTime()
|
||||||
first = True
|
FIRST = True
|
||||||
|
|
||||||
for p in kinfo['KASP']['Policy']:
|
for policy in KINFO['KASP']['Policy']:
|
||||||
if not p['@name'] or not p['Keys']: continue
|
if not policy['@name'] or not policy['Keys']:
|
||||||
if not first:
|
continue
|
||||||
|
if not FIRST:
|
||||||
print("")
|
print("")
|
||||||
first = False
|
FIRST = False
|
||||||
if p['Description']:
|
if policy['Description']:
|
||||||
d = p['Description'].strip()
|
desc = policy['Description'].strip()
|
||||||
print("# %s" % re.sub(r"\n\s*", "\n# ", d))
|
print("# %s" % re.sub(r"\n\s*", "\n# ", desc))
|
||||||
print("policy %s {" % p['@name'])
|
print("policy %s {" % policy['@name'])
|
||||||
ksk = p['Keys']['KSK']
|
ksk = policy['Keys']['KSK']
|
||||||
zsk = p['Keys']['ZSK']
|
zsk = policy['Keys']['ZSK']
|
||||||
kalg = ksk['Algorithm']
|
kalg = ksk['Algorithm']
|
||||||
zalg = zsk['Algorithm']
|
zalg = zsk['Algorithm']
|
||||||
algnum = kalg['#text'] or zalg['#text']
|
algnum = kalg['#text'] or zalg['#text']
|
||||||
if algnum:
|
if algnum:
|
||||||
print("\talgorithm %s;" % dnskey.algstr(int(algnum)))
|
print("\talgorithm %s;" % dnskey.algstr(int(algnum)))
|
||||||
if p['Keys']['TTL']:
|
if policy['Keys']['TTL']:
|
||||||
print("\tkeyttl %d;" % kt.parse(p['Keys']['TTL']))
|
print("\tkeyttl %d;" % KT.parse(policy['Keys']['TTL']))
|
||||||
if kalg['@length']:
|
if kalg['@length']:
|
||||||
print("\tkey-size ksk %d;" % int(kalg['@length']))
|
print("\tkey-size ksk %d;" % int(kalg['@length']))
|
||||||
if zalg['@length']:
|
if zalg['@length']:
|
||||||
print("\tkey-size zsk %d;" % int(zalg['@length']))
|
print("\tkey-size zsk %d;" % int(zalg['@length']))
|
||||||
if ksk['Lifetime']:
|
if ksk['Lifetime']:
|
||||||
print("\troll-period ksk %d;" % kt.parse(ksk['Lifetime']))
|
print("\troll-period ksk %d;" % KT.parse(ksk['Lifetime']))
|
||||||
if zsk['Lifetime']:
|
if zsk['Lifetime']:
|
||||||
print("\troll-period zsk %d;" % kt.parse(zsk['Lifetime']))
|
print("\troll-period zsk %d;" % KT.parse(zsk['Lifetime']))
|
||||||
if ksk['Standby']:
|
if ksk['Standby']:
|
||||||
print("\tstandby ksk %d;" % int(ksk['Standby']))
|
print("\tstandby ksk %d;" % int(ksk['Standby']))
|
||||||
if zsk['Standby']:
|
if zsk['Standby']:
|
||||||
|
@@ -15,16 +15,18 @@
|
|||||||
# "domain.example" can be represented in a catalog zone called
|
# "domain.example" can be represented in a catalog zone called
|
||||||
# "catalog.example" by adding the following record:
|
# "catalog.example" by adding the following record:
|
||||||
#
|
#
|
||||||
# 5960775ba382e7a4e09263fc06e7c00569b6a05c.zones.catalog.example. IN PTR domain.example.
|
# 5960775ba382e7a4e09263fc06e7c00569b6a05c.zones.catalog.example. \
|
||||||
|
# IN PTR domain.example.
|
||||||
#
|
#
|
||||||
# The label "5960775ba382e7a4e09263fc06e7c00569b6a05c" is the output of
|
# The label "5960775ba382e7a4e09263fc06e7c00569b6a05c" is the output of
|
||||||
# this script when run with the argument "domain.example".
|
# this script when run with the argument "domain.example".
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import dns.name
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import dns.name
|
||||||
|
|
||||||
if len(sys.argv) < 2:
|
if len(sys.argv) < 2:
|
||||||
print("Usage: %s name" % sys.argv[0])
|
print("Usage: %s name" % sys.argv[0])
|
||||||
|
|
||||||
print (hashlib.sha1(dns.name.from_text(sys.argv[1]).to_wire()).hexdigest())
|
NAME = dns.name.from_text(sys.argv[1]).to_wire()
|
||||||
|
print(hashlib.sha1(NAME).hexdigest())
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
./.gitlab-ci.yml X 2018,2019,2020
|
./.gitlab-ci.yml X 2018,2019,2020
|
||||||
|
./.pylintrc X 2020
|
||||||
./.uncrustify.cfg X 2018,2019,2020
|
./.uncrustify.cfg X 2018,2019,2020
|
||||||
./CHANGES X 2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020
|
./CHANGES X 2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013,2014,2015,2016,2017,2018,2019,2020
|
||||||
./CODE_OF_CONDUCT X 2019,2020
|
./CODE_OF_CONDUCT X 2019,2020
|
||||||
|
@@ -10,10 +10,21 @@
|
|||||||
# information regarding copyright ownership.
|
# information regarding copyright ownership.
|
||||||
############################################################################
|
############################################################################
|
||||||
|
|
||||||
import sys, os, os.path, re
|
"""Parse the ThreadSanizer reports, unify them and put them into unique dirs."""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import os.path
|
||||||
|
import re
|
||||||
from hashlib import sha256
|
from hashlib import sha256
|
||||||
|
|
||||||
|
|
||||||
class State:
|
class State:
|
||||||
|
"""Class that holds state of the TSAN parser."""
|
||||||
|
|
||||||
|
# pylint: disable=too-many-instance-attributes
|
||||||
|
# pylint: disable=too-few-public-methods
|
||||||
|
|
||||||
inside = False
|
inside = False
|
||||||
block = ""
|
block = ""
|
||||||
last_line = None
|
last_line = None
|
||||||
@@ -25,10 +36,12 @@ class State:
|
|||||||
pointers = {}
|
pointers = {}
|
||||||
p_index = 1
|
p_index = 1
|
||||||
|
|
||||||
def init(self):
|
def __init__(self):
|
||||||
self.reset()
|
self.reset()
|
||||||
|
|
||||||
def reset(self):
|
def reset(self):
|
||||||
|
"""Reset the object to initial state"""
|
||||||
|
|
||||||
self.inside = False
|
self.inside = False
|
||||||
self.block = ""
|
self.block = ""
|
||||||
|
|
||||||
@@ -41,72 +54,73 @@ class State:
|
|||||||
self.t_index = 1
|
self.t_index = 1
|
||||||
self.p_index = 1
|
self.p_index = 1
|
||||||
|
|
||||||
top = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
|
||||||
|
|
||||||
out = os.path.join(top, "tsan")
|
TOP = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||||
|
|
||||||
if not os.path.isdir(out):
|
OUT = os.path.join(TOP, "tsan")
|
||||||
os.mkdir(out)
|
|
||||||
|
if not os.path.isdir(OUT):
|
||||||
|
os.mkdir(OUT)
|
||||||
|
|
||||||
# Regular Expressions
|
# Regular Expressions
|
||||||
mutex = re.compile(r"M\d+")
|
MUTEX = re.compile(r"M\d+")
|
||||||
thread = re.compile(r"T\d+")
|
THREAD = re.compile(r"T\d+")
|
||||||
stack = re.compile(r"\s\(\S+\+0x\S+\)")
|
STACK = re.compile(r"\s\(\S+\+0x\S+\)")
|
||||||
pointer = re.compile(r"0x[0-9a-f]+")
|
POINTER = re.compile(r"0x[0-9a-f]+")
|
||||||
pid = re.compile(r"\(pid=\d+,?\)")
|
PID = re.compile(r"\(pid=\d+,?\)")
|
||||||
tid = re.compile(r"tid=\d+,?\s*")
|
TID = re.compile(r"tid=\d+,?\s*")
|
||||||
worker = re.compile(r"\s+'(isc-worker|isc-net-)\d+'")
|
WORKER = re.compile(r"\s+'(isc-worker|isc-net-)\d+'")
|
||||||
path = re.compile(top + "/")
|
PATH = re.compile(TOP + "/")
|
||||||
|
|
||||||
s = State()
|
|
||||||
|
|
||||||
|
S = State()
|
||||||
|
|
||||||
with open(sys.argv[1], "r", encoding='utf-8') as f:
|
with open(sys.argv[1], "r", encoding='utf-8') as f:
|
||||||
lines = f.readlines()
|
for line in f.readlines():
|
||||||
for line in lines:
|
|
||||||
if line == "==================\n":
|
if line == "==================\n":
|
||||||
if not s.inside:
|
if not S.inside:
|
||||||
s.inside = True
|
S.inside = True
|
||||||
else:
|
else:
|
||||||
dname = os.path.join(out, sha256(s.last_line.encode('utf-8')).hexdigest())
|
dname = sha256(S.last_line.encode('utf-8')).hexdigest()
|
||||||
if not os.path.isdir(dname):
|
dname = os.path.join(OUT, dname)
|
||||||
os.mkdir(dname)
|
if not os.path.isdir(dname):
|
||||||
fname = os.path.join(dname, sha256(s.block.encode('utf-8')).hexdigest() + ".tsan")
|
os.mkdir(dname)
|
||||||
if not os.path.isfile(fname):
|
fname = sha256(S.block.encode('utf-8')).hexdigest() + ".tsan"
|
||||||
with open(fname, "w", encoding='utf-8') as w:
|
fname = os.path.join(dname, fname)
|
||||||
w.write(s.block)
|
if not os.path.isfile(fname):
|
||||||
s.reset()
|
with open(fname, "w", encoding='utf-8') as w:
|
||||||
|
w.write(S.block)
|
||||||
|
S.reset()
|
||||||
else:
|
else:
|
||||||
for m in mutex.finditer(line):
|
for m in MUTEX.finditer(line):
|
||||||
k = m.group()
|
k = m.group()
|
||||||
if k not in s.mutexes:
|
if k not in S.mutexes:
|
||||||
s.mutexes[k] = s.m_index
|
S.mutexes[k] = S.m_index
|
||||||
s.m_index += 1
|
S.m_index += 1
|
||||||
for m in thread.finditer(line):
|
for m in THREAD.finditer(line):
|
||||||
k = m.group()
|
k = m.group()
|
||||||
if k not in s.threads:
|
if k not in S.threads:
|
||||||
s.threads[k] = s.t_index
|
S.threads[k] = S.t_index
|
||||||
s.t_index += 1
|
S.t_index += 1
|
||||||
for m in pointer.finditer(line):
|
for m in POINTER.finditer(line):
|
||||||
k = m.group()
|
k = m.group()
|
||||||
if k not in s.pointers:
|
if k not in S.pointers:
|
||||||
s.pointers[k] = s.p_index
|
S.pointers[k] = S.p_index
|
||||||
s.p_index += 1
|
S.p_index += 1
|
||||||
for k, v in s.mutexes.items():
|
for k, v in S.mutexes.items():
|
||||||
r = re.compile(k)
|
r = re.compile(k)
|
||||||
line = r.sub("M%s" % v, line)
|
line = r.sub("M%s" % v, line)
|
||||||
for k, v in s.threads.items():
|
for k, v in S.threads.items():
|
||||||
r = re.compile(k)
|
r = re.compile(k)
|
||||||
line = r.sub("T%s" % v, line)
|
line = r.sub("T%s" % v, line)
|
||||||
for k, v in s.pointers.items():
|
for k, v in S.pointers.items():
|
||||||
r = re.compile(k)
|
r = re.compile(k)
|
||||||
line = r.sub("0x%s" % str(v).zfill(12), line)
|
line = r.sub("0x%s" % str(v).zfill(12), line)
|
||||||
|
|
||||||
line = stack.sub("", line)
|
line = STACK.sub("", line)
|
||||||
line = pid.sub("", line)
|
line = PID.sub("", line)
|
||||||
line = tid.sub("", line)
|
line = TID.sub("", line)
|
||||||
line = worker.sub("", line)
|
line = WORKER.sub("", line)
|
||||||
line = path.sub("", line)
|
line = PATH.sub("", line)
|
||||||
|
|
||||||
s.block += line
|
S.block += line
|
||||||
s.last_line = line
|
S.last_line = line
|
||||||
|
Reference in New Issue
Block a user