2
0
mirror of https://gitlab.isc.org/isc-projects/bind9 synced 2025-08-30 14:07:59 +00:00

Merge branch 'mnowak/qa-tools-update' into 'main'

Python formatting updates

See merge request isc-projects/bind9!7551
This commit is contained in:
Michal Nowak
2023-02-17 15:00:07 +00:00
12 changed files with 7 additions and 22 deletions

View File

@@ -265,7 +265,7 @@ def create_response(msg):
if wantsigs:
r.answer.append(sigs[-1])
else:
for (i, sig) in rrs:
for i, sig in rrs:
if sig and not wantsigs:
continue
elif sig:

View File

@@ -35,6 +35,7 @@ from dns.rdataclass import *
from dns.rdatatype import *
from dns.tsig import *
# Log query to file
def logquery(type, qname):
with open("qlog", "a") as f:
@@ -54,6 +55,7 @@ except:
dopass2 = False
############################################################################
#
# This server will serve valid and spoofed answers. A spoofed answer will

View File

@@ -25,6 +25,7 @@ from dns.rdataclass import *
from dns.rcode import *
from dns.name import *
# Log query to file
def logquery(type, qname):
with open("qlog", "a") as f:

View File

@@ -26,6 +26,7 @@ from dns.rdataclass import *
from dns.rcode import *
from dns.name import *
# Log query to file
def logquery(type, qname):
with open("qlog", "a") as f:

View File

@@ -233,7 +233,6 @@ def main():
# later by run.sh
print("export ALGORITHM_SET=error")
raise
else:
for name, value in algs_env.items():
print(f"export {name}={value}")

View File

@@ -35,7 +35,6 @@ class RawFormatHeader(dict):
def test_unsigned_serial_number():
"""
Check whether all signed zone files in the "ns8" subdirectory contain the
serial number of the unsigned version of the zone in the raw-format header.

View File

@@ -70,7 +70,6 @@ def do_work(named_proc, resolver, rndc_cmd, kill_method, n_workers, n_queries):
# We're going to execute queries in parallel by means of a thread pool.
# dnspython functions block, so we need to circunvent that.
with ThreadPoolExecutor(n_workers + 1) as executor:
# Helper dict, where keys=Future objects and values are tags used
# to process results later.
futures = {}

View File

@@ -58,7 +58,6 @@ def check_manykeys(name, zone=None):
def zone_mtime(zonedir, name):
try:
si = os.stat(os.path.join(zonedir, "{}.db".format(name)))
except FileNotFoundError:
@@ -70,7 +69,6 @@ def zone_mtime(zonedir, name):
def test_zone_timers_primary(fetch_zones, load_timers, **kwargs):
statsip = kwargs["statsip"]
statsport = kwargs["statsport"]
zonedir = kwargs["zonedir"]
@@ -84,7 +82,6 @@ def test_zone_timers_primary(fetch_zones, load_timers, **kwargs):
def test_zone_timers_secondary(fetch_zones, load_timers, **kwargs):
statsip = kwargs["statsip"]
statsport = kwargs["statsport"]
zonedir = kwargs["zonedir"]
@@ -98,7 +95,6 @@ def test_zone_timers_secondary(fetch_zones, load_timers, **kwargs):
def test_zone_with_many_keys(fetch_zones, load_zone, **kwargs):
statsip = kwargs["statsip"]
statsport = kwargs["statsport"]

View File

@@ -28,7 +28,6 @@ def create_msg(qname, qtype):
def udp_query(ip, port, msg):
ans = dns.query.udp(msg, ip, TIMEOUT, port=port)
assert ans.rcode() == dns.rcode.NOERROR
@@ -36,7 +35,6 @@ def udp_query(ip, port, msg):
def tcp_query(ip, port, msg):
ans = dns.query.tcp(msg, ip, TIMEOUT, port=port)
assert ans.rcode() == dns.rcode.NOERROR
@@ -90,7 +88,6 @@ def check_traffic(data, expected):
def test_traffic(fetch_traffic, **kwargs):
statsip = kwargs["statsip"]
statsport = kwargs["statsport"]
port = kwargs["port"]

View File

@@ -24,7 +24,6 @@ requests = pytest.importorskip("requests")
# JSON helper functions
def fetch_zones_json(statsip, statsport):
r = requests.get(
"http://{}:{}/json/v1/zones".format(statsip, statsport), timeout=600
)
@@ -35,7 +34,6 @@ def fetch_zones_json(statsip, statsport):
def fetch_traffic_json(statsip, statsport):
r = requests.get(
"http://{}:{}/json/v1/traffic".format(statsip, statsport), timeout=600
)
@@ -47,7 +45,6 @@ def fetch_traffic_json(statsip, statsport):
def load_timers_json(zone, primary=True):
name = zone["name"]
# Check if the primary zone timer exists

View File

@@ -25,7 +25,6 @@ requests = pytest.importorskip("requests")
# XML helper functions
def fetch_zones_xml(statsip, statsport):
r = requests.get(
"http://{}:{}/xml/v3/zones".format(statsip, statsport), timeout=600
)
@@ -75,7 +74,6 @@ def fetch_traffic_xml(statsip, statsport):
def load_timers_xml(zone, primary=True):
name = zone.attrib["name"]
loaded_el = zone.find("loaded")

View File

@@ -44,7 +44,6 @@ def create_socket(host, port):
def test_tcp_garbage(named_port):
with create_socket("10.53.0.7", named_port) as sock:
msg = create_msg("a.example.", "A")
(sbytes, stime) = dns.query.send_tcp(sock, msg, timeout())
(response, rtime) = dns.query.receive_tcp(sock, timeout())
@@ -67,7 +66,6 @@ def test_tcp_garbage(named_port):
def test_tcp_garbage_response(named_port):
with create_socket("10.53.0.7", named_port) as sock:
msg = create_msg("a.example.", "A")
(sbytes, stime) = dns.query.send_tcp(sock, msg, timeout())
(response, rtime) = dns.query.receive_tcp(sock, timeout())
@@ -91,7 +89,6 @@ def test_tcp_garbage_response(named_port):
# Regression test for CVE-2022-0396
def test_close_wait(named_port):
with create_socket("10.53.0.7", named_port) as sock:
msg = create_msg("a.example.", "A")
(sbytes, stime) = dns.query.send_tcp(sock, msg, timeout())
(response, rtime) = dns.query.receive_tcp(sock, timeout())
@@ -114,7 +111,6 @@ def test_close_wait(named_port):
# request. If it gets stuck in CLOSE_WAIT state, there is no connection
# available for the query below and it will time out.
with create_socket("10.53.0.7", named_port) as sock:
msg = create_msg("a.example.", "A")
(sbytes, stime) = dns.query.send_tcp(sock, msg, timeout())
(response, rtime) = dns.query.receive_tcp(sock, timeout())