Commit f011d004 authored by Petr Špaček's avatar Petr Špaček

Merge branch 'pylint-fixes' into 'master'

ci: pylint, flake8, mypy

See merge request !138
parents 4e6e22cc 1fd25156
Pipeline #41632 passed with stage
in 1 minute and 50 seconds
......@@ -5,66 +5,48 @@ variables:
stages:
- test
test:augeas:
.test: &test
stage: test
script:
- augparse pydnstest/deckard.aug
tags:
- docker
- linux
- amd64
test:pep8:
stage: test
test:augeas:
<<: *test
script:
- cp ci/common.sh /tmp
- cp ci/compare-pep8.sh /tmp
- /tmp/compare-pep8.sh
tags:
- docker
- linux
- amd64
- augparse pydnstest/deckard.aug
test:flake8:
<<: *test
script:
- python3 -m flake8 --max-line-length=100 . && echo "OK, no flake8 errors detected"
test:mypy:
<<: *test
script:
- ci/mypy-run.sh && echo "OK, no mypy error detected"
test:pylint:
stage: test
<<: *test
script:
- cp ci/common.sh /tmp
- cp ci/compare-pylint.sh /tmp
- /tmp/compare-pylint.sh
artifacts:
when: on_failure
expire_in: '1 hour'
paths:
- base.log
- head.log
tags:
- docker
- linux
- amd64
- ci/pylint-run.sh
test:rplint:
stage: test
<<: *test
script:
- cp ci/common.sh /tmp
- cp ci/compare-rplint.sh /tmp
- /tmp/compare-rplint.sh
tags:
- docker
- linux
- amd64
test:unittests:
stage: test
<<: *test
script:
- make check
tags:
- docker
- linux
- amd64
# changes in Deckard itself must not change result of tests
test:comparative:kresd:
stage: test
<<: *test
script:
# test kresd binary
- git clone --depth=1 https://gitlab.labs.nic.cz/knot/knot-resolver.git /tmp/kresd-local-build
......@@ -84,15 +66,11 @@ test:comparative:kresd:
- modified_tests
- base.xml
- head.xml
tags:
- docker
- linux
- amd64
# Run all tests on the latest kresd version to ensure that we not push tests
# which do not work on latest kresd. It would lead to breakage in kresd CI.
test:latest:kresd:
stage: test
<<: *test
script:
- git clone --depth=1 https://gitlab.labs.nic.cz/knot/knot-resolver.git kresd-local-build
- GIT_DIR=$(pwd)/kresd-local-build/.git git log -1
......@@ -105,16 +83,12 @@ test:latest:kresd:
expire_in: 1 week
paths:
- tmpdeckard*
tags:
- docker
- linux
- amd64
# sanity check that Unbound under Deckard still works
# I've selected the only tests which are working
# on kresd and Unbound 1.5.8 as well as 1.6.0
test:sanity:unbound:
stage: test
<<: *test
script:
- TMPDIR=$(pwd) ./unbound_run.sh -k sets/resolver/iter_hint_lame.rpl
- TMPDIR=$(pwd) ./unbound_run.sh -k sets/resolver/iter_lame_root.rpl
......@@ -126,16 +100,12 @@ test:sanity:unbound:
expire_in: 1 week
paths:
- tmpdeckard*
tags:
- docker
- linux
- amd64
# sanity check that PowerDNS recursor under Deckard still works
# I've selected couple tests which are working
# on kresd and PowerDNS recursor 4.0.0~alpha2 as well as 4.0.4
test:sanity:pdnsrecursor:
stage: test
<<: *test
script:
- TMPDIR=$(pwd) ./pdns_run.sh -k sets/resolver/iter_recurse.rpl
- TMPDIR=$(pwd) ./pdns_run.sh -k sets/resolver/iter_tcbit.rpl
......@@ -144,7 +114,3 @@ test:sanity:pdnsrecursor:
expire_in: 1 week
paths:
- tmpdeckard*
tags:
- docker
- linux
- amd64
#!/bin/bash
set -o nounset -o xtrace -o errexit
source "$(dirname "$0")/common.sh"
git diff "${MERGEBASE}..${HEAD}" | ${PYTHON} -m pep8 --ignore=W503 --diff --show-source --max-line-length=100 && echo "OK, no PEP8 errors detected"
#!/bin/bash
set -o nounset -o xtrace -o errexit
source "$(dirname "$0")/common.sh"
PYFILES=$(find . \
-path ./.git -prune -o \
-path ./contrib -o \
-type d -exec test -e '{}/__init__.py' \; -print -prune -o \
-name '*.py' -print -o \
-type f -exec grep -qsm1 '^#!.*\bpython' '{}' \; -print)
: check if version under test does not produce critical errors
${PYTHON} -m pylint -j $(nproc) -E ${PYFILES}
: no critical errors, compare score between versions
rm -rf ~/.pylint.d
: get test results from common ancestor with master branch
git checkout --force --detach "${MERGEBASE}"
git clean -xdf
# set of Python files might have changed during checkout
PYFILES=$(find . \
-type d -exec test -e '{}/__init__.py' \; -print -prune -o \
-name '*.py' -print -o \
-type f -exec grep -qsm1 '^#!.*\bpython' '{}' \; -print)
${PYTHON} -m pylint -j $(nproc) ${PYFILES} &> /tmp/base.log || : old version is not clear
LOGS[0]="/tmp/base.log"
echo ==================== merge base ====================
cat /tmp/base.log
echo ==================== merge base end ====================
: get test results from version under test
git checkout --force --detach "${HEAD}"
git clean -xdf
${PYTHON} -m pylint -j $(nproc) ${PYFILES} &> /tmp/head.log || : version under test is not clear
LOGS[1]="/tmp/head.log"
echo ==================== candidate version ====================
cat /tmp/head.log
echo ==================== candidate end ====================
: check if candidate version produced more messages than the base
grep '^|\(convention\|refactor\|warning\|error\).*+' /tmp/head.log \
&& { \
echo "New pylint message detected: Use diff base.log head.log and go fix it!"; \
exit 1; \
} \
|| echo "OK, no new pylint messages detected"
......@@ -22,6 +22,7 @@ def parse_junit_xml(filename):
return results
new = sys.argv[1]
old = sys.argv[2]
modified_tests = [line.strip() for line in open(sys.argv[3]).readlines()]
......
#!/usr/bin/env bash
set -o nounset -o errexit
source "$(dirname "$0")/common.sh"
PYFILES=$(find . \
-path ./.git -prune -o \
-path ./contrib -o \
-type d -exec test -e '{}/__init__.py' \; -print -prune -o \
-name '*.py' -print -o \
-type f -exec grep -qsm1 '^#!.*\bpython' '{}' \; -print)
set -e
${PYTHON} -m mypy --ignore-missing-imports ${PYFILES}
#!/bin/bash
set -o nounset -o errexit
source "$(dirname "$0")/common.sh"
PYFILES=$(find . \
-path ./.git -prune -o \
-path ./contrib -o \
-type d -exec test -e '{}/__init__.py' \; -print -prune -o \
-name '*.py' -print -o \
-type f -exec grep -qsm1 '^#!.*\bpython' '{}' \; -print)
${PYTHON} -m pylint -j 0 --rcfile pylintrc ${PYFILES}
......@@ -17,7 +17,7 @@ def ordered_load(stream, Loader=yaml.Loader, object_pairs_hook=OrderedDict):
We need to ensure that it is ordered in the same way.
See https://github.com/pytest-dev/pytest/issues/1075.
"""
class OrderedLoader(Loader):
class OrderedLoader(Loader): # pylint: disable=too-many-ancestors
pass
def construct_mapping(loader, node):
......@@ -46,7 +46,7 @@ def config_sanity_check(config_dict, config_name):
% (cfg['name'], config_name))
for additional in cfg["additional"]:
assert type(additional) is str,\
assert isinstance(additional, str),\
"All additional arguments in yaml should be strings. (%s, %s)"\
% (cfg['name'], config_name)
......@@ -61,6 +61,7 @@ def get_qmin_config(path):
return True
if re.search(r"^\s*query-minimization:\s*(off|no)", line):
return False
return None
def scenarios(paths, configs):
......
......@@ -26,7 +26,7 @@ class DeckardUnderLoadError(Exception):
pass
class IfaceManager(object):
class IfaceManager:
"""
Network interface allocation manager
......@@ -250,7 +250,7 @@ def run_daemon(cfg, environ):
logging.getLogger('deckard.daemon.%s.argv' % name).debug('%s', args)
try:
proc = subprocess.Popen(args, stdout=daemon_log_file, stderr=subprocess.STDOUT,
cwd=cfg['dir'], preexec_fn=os.setsid, env=environ)
cwd=cfg['dir'], env=environ, start_new_session=True)
except subprocess.CalledProcessError:
logger = logging.getLogger('deckard.daemon_log.%s' % name)
logger.exception("Can't start '%s'", args)
......@@ -304,7 +304,7 @@ def process_file(path, qmin, prog_cfgs):
'test working directory %s', tmpdir)
else:
shutil.rmtree(tmpdir)
except:
except Exception:
logging.getLogger('deckard.hint').info(
'test failed, inspect working directory %s', tmpdir)
raise
......@@ -321,25 +321,25 @@ def setup_daemons(tmpdir, prog_cfgs, template_ctx, ta_files):
daemons.append({'proc': daemon_proc, 'cfg': prog_cfg})
try:
conncheck_daemon(daemon_proc, prog_cfg, template_ctx['_SOCKET_FAMILY'])
except:
except: # noqa -- bare except might be valid here?
daemon_proc.terminate()
raise
return daemons
def check_for_icmp():
""" Checks Deckards's PCAP for ICMP packets """
path = os.environ["SOCKET_WRAPPER_PCAP_FILE"]
with open(path, "rb") as f:
pcap = dpkt.pcap.Reader(f)
for _, packet in pcap:
try:
ip = dpkt.ip.IP(packet)
except dpkt.dpkt.UnpackError:
ip = dpkt.ip6.IP6(packet)
if isinstance(ip.data, dpkt.icmp.ICMP) or isinstance(ip.data, dpkt.icmp6.ICMP6):
return True
return False
""" Checks Deckards's PCAP for ICMP packets """
path = os.environ["SOCKET_WRAPPER_PCAP_FILE"]
with open(path, "rb") as f:
pcap = dpkt.pcap.Reader(f)
for _, packet in pcap:
try:
ip = dpkt.ip.IP(packet)
except dpkt.dpkt.UnpackError:
ip = dpkt.ip6.IP6(packet)
if isinstance(ip.data, (dpkt.icmp.ICMP, dpkt.icmp6.ICMP6)):
return True
return False
def run_testcase(daemons, case, root_addr, addr_family, prog_under_test_ip):
......
import logging
import os
import pytest
import subprocess
import sys
import pytest
import deckard
......@@ -24,6 +25,7 @@ def check_platform():
if sys.platform == 'windows':
pytest.exit('Not supported at all on Windows')
# Suppress extensive Augeas logging
logging.getLogger("augeas").setLevel(logging.ERROR)
......
......@@ -6,7 +6,6 @@ import posixpath
import logging
import os
import collections
import sys
from augeas import Augeas
......@@ -37,7 +36,7 @@ def join(*paths):
return posixpath.normpath(new_path)
class AugeasWrapper(object):
class AugeasWrapper:
"""python-augeas higher-level wrapper.
Load single augeas lens and configuration file.
......@@ -155,6 +154,14 @@ class AugeasNode(collections.MutableMapping):
log.debug('tree get: %s = %s', self._path, value)
return value
@value.setter
def value(self, value):
"""
set value of this node in Augeas tree
"""
log.debug('tree set: %s = %s', self._path, value)
self._aug.set(self._path, value)
@property
def span(self):
if self._span is None:
......@@ -165,14 +172,6 @@ class AugeasNode(collections.MutableMapping):
def char(self):
return self._aug.span(self._path)[5]
@value.setter
def value(self, value):
"""
set value of this node in Augeas tree
"""
log.debug('tree set: %s = %s', self._path, value)
self._aug.set(self._path, value)
def __len__(self):
"""
number of items matching this path
......
"""matchpart is used to compare two DNS messages using a single criterion"""
from typing import ( # noqa
Any, Hashable, Sequence, Tuple, Union)
import dns.edns
import dns.rcode
import dns.set
from typing import ( # noqa
Any, Hashable, Sequence, Tuple, Union)
MismatchValue = Union[str, Sequence[Any]]
......
from __future__ import absolute_import
import binascii
import calendar
import errno
......@@ -85,8 +83,10 @@ def sendto_msg(stream, message, addr=None):
raise
def replay_rrs(rrs, nqueries, destination, args=[]):
def replay_rrs(rrs, nqueries, destination, args=None):
""" Replay list of queries and report statistics. """
if args is None:
args = []
navail, queries = len(rrs), []
chunksize = 16
for i in range(nqueries if 'RAND' in args else navail):
......@@ -321,7 +321,7 @@ class Entry:
pass
if len(rcodes) > 1:
raise ValueError("Parse failed, too many rcode values.", rcodes)
if len(rcodes) == 0:
if not rcodes:
return None
return rcodes[0]
......@@ -339,7 +339,7 @@ class Entry:
pass
if len(opcodes) > 1:
raise ValueError("Parse failed, too many opcode values.")
if len(opcodes) == 0:
if not opcodes:
return None
return opcodes[0]
......@@ -446,7 +446,7 @@ class Range:
address = node["/address"].value
self.addresses = {address} if address is not None else set()
self.addresses |= set([a.value for a in node.match("/address/*")])
self.addresses |= {a.value for a in node.match("/address/*")}
self.stored = [Entry(n) for n in node.match("/entry")]
self.args = {}
self.received = 0
......@@ -467,9 +467,9 @@ class Range:
txt += 'RANGE_END\n\n'
return txt
def eligible(self, id, address):
def eligible(self, ident, address):
""" Return true if this range is eligible for fetching reply. """
if self.a <= id <= self.b:
if self.a <= ident <= self.b:
return (None is address
or set() == self.addresses
or address in self.addresses)
......@@ -577,9 +577,9 @@ class Step:
# Parse QUERY-specific parameters
choice, tcp, source = None, False, None
return self.__query(ctx, tcp=tcp, choice=choice, source=source)
elif self.type == 'CHECK_OUT_QUERY':
elif self.type == 'CHECK_OUT_QUERY': # ignore
self.log.info('')
pass # Ignore
return None
elif self.type == 'CHECK_ANSWER' or self.type == 'ANSWER':
self.log.info('')
return self.__check_answer(ctx)
......@@ -588,6 +588,7 @@ class Step:
return self.__time_passes()
elif self.type == 'REPLY' or self.type == 'MOCK':
self.log.info('')
return None
# Parser currently doesn't support step types LOG, REPLAY and ASSERT.
# No test uses them.
# elif self.type == 'LOG':
......@@ -748,8 +749,8 @@ class Scenario:
if self.info:
txt += ' {0}'.format(self.info)
txt += '\n'
for range in self.ranges:
txt += str(range)
for range_ in self.ranges:
txt += str(range_)
for step in self.steps:
txt += str(step)
txt += "\nSCENARIO_END"
......@@ -839,15 +840,15 @@ def get_next(file_in, skip_empty=True):
if not line:
return False
quoted, escaped = False, False
for i in range(len(line)):
if line[i] == '\\':
for i, char in enumerate(line):
if char == '\\':
escaped = not escaped
if not escaped and line[i] == '"':
if not escaped and char == '"':
quoted = not quoted
if line[i] in ';' and not quoted:
if char == ';' and not quoted:
line = line[0:i]
break
if line[i] != '\\':
if char != '\\':
escaped = False
tokens = ' '.join(line.strip().split()).split()
if not tokens:
......@@ -859,7 +860,7 @@ def get_next(file_in, skip_empty=True):
return op, tokens
def parse_config(scn_cfg, qmin, installdir):
def parse_config(scn_cfg, qmin, installdir): # FIXME: pylint: disable=too-many-statements
"""
Transform scene config (key, value) pairs into dict filled with defaults.
Returns tuple:
......
from __future__ import absolute_import
import argparse
import itertools
import logging
......@@ -13,7 +11,6 @@ import time
import dns.message
import dns.rdatatype
import dpkt
from pydnstest import scenario
......@@ -36,6 +33,7 @@ class TestServer:
self.cur_iface = self.start_iface
self.kroot_local = root_addr
self.addr_family = addr_family
self.undefined_answers = 0
def __del__(self):
""" Cleanup after deletion. """
......@@ -51,8 +49,8 @@ class TestServer:
raise Exception('TestServer already started')
with self.active_lock:
self.active = True
self.addr, _ = self.start_srv((self.kroot_local, port), self.addr_family)
self.start_srv(self.addr, self.addr_family, socket.IPPROTO_TCP)
addr, _ = self.start_srv((self.kroot_local, port), self.addr_family)
self.start_srv(addr, self.addr_family, socket.IPPROTO_TCP)
self._bind_sockets()
def stop(self):
......@@ -267,13 +265,13 @@ def standalone_self_test():
logging.info("[==========] Mirror server running at %s", server.address())
def exit(signum, frame):
def kill(signum, frame): # pylint: disable=unused-argument
logging.info("[==========] Shutdown.")
server.stop()
sys.exit(128 + signum)
signal.signal(signal.SIGINT, exit)
signal.signal(signal.SIGTERM, exit)
signal.signal(signal.SIGINT, kill)
signal.signal(signal.SIGTERM, kill)
while True:
time.sleep(0.5)
......
[MESSAGES CONTROL]
disable=
missing-docstring,
too-few-public-methods,
too-many-arguments,
too-many-locals,
too-many-branches,
too-many-instance-attributes,
fixme,
unused-import, # checked by flake8
line-too-long, # checked by flake8
invalid-name,
global-statement,
no-else-return,
[SIMILARITIES]
min-similarity-lines=6
ignore-comments=yes
ignore-docstrings=yes
ignore-imports=no
[DESIGN]
max-parents=10
#!/usr/bin/env python3
from contextlib import suppress
import glob
import itertools
import os
import sys
from typing import Any, Callable, Iterable, Iterator, Optional, List, Union, Set
import dns.name
from typing import Any, Callable, Iterable, Iterator, Optional, List, Union, Set # noqa
import pydnstest.augwrap
import pydnstest.matchpart
......@@ -315,7 +311,7 @@ def test_run_rplint(rpl_path: str) -> None:
raise RplintError(t.fails)
if __name__ == '__main__':
def main():
try:
test_path = sys.argv[1]
except IndexError:
......@@ -333,3 +329,7 @@ if __name__ == '__main__':
if passed:
sys.exit(0)
sys.exit(1)
if __name__ == '__main__':
main()
......@@ -41,7 +41,7 @@ def get_templates():
"""
Return empty objects for RANGE and ENTRY suitable as object templates.
"""
empty_case, empty_config = pydnstest.scenario.parse_file(os.path.realpath('empty.rpl'))
empty_case, _ = pydnstest.scenario.parse_file(os.path.realpath('empty.rpl'))
rng = copy.copy(empty_case.ranges[0])
......@@ -122,57 +122,61 @@ def generate_step_elapse(tstep, id_prefix):
return out
resolver_init()
rng_templ, entry_templ = get_templates()
ranges = []
# transform data in zones files into RANGEs
files = os.listdir()
files.sort()
for fn in files:
if not fn.endswith('.db') or not fn.startswith('20'):
continue
ranges.append(generate_range(fn, rng_templ, entry_templ))
# connect ranges
for i in range(1, len(ranges)):
ranges[i - 1].b = ranges[i].a - 1
ranges[-1].b = 99999999999999
# steps
steps = []
tstart = datetime.datetime(year=2017, month=7, day=1)
tend = datetime.datetime(year=2017, month=12, day=31, hour=23, minute=59, second=59)
tstep = datetime.timedelta(days=1)
tcurr = tstart
while tcurr < tend:
id_prefix = tcurr.strftime('%Y%m%d')
steps.append(generate_step_query(tcurr, id_prefix))
steps.append(generate_step_check(id_prefix))
steps.append(generate_step_elapse(tstep, id_prefix))
tcurr += tstep
# generate output
with open('keys/ds') as dsfile:
ta = dsfile.read().strip()
# constant RPL file header
print("""stub-addr: 2001:503:ba3e::2:30
trust-anchor: {ta}
val-override-date: 20170701000000
query-minimization: off
CONFIG_END
SCENARIO_BEGIN Simulation of successfull RFC 5011 KSK roll-over during 2017
""".format(ta=ta))
for rng in ranges:
print(rng)
for step in steps:
print(step)
# constant RPL file footer
print('''
SCENARIO_END
''')
def main():
resolver_init()
rng_templ, entry_templ = get_templates()
ranges = []
# transform data in zones files into RANGEs
files = os.listdir()
files.sort()
for fn in files:
if not fn.endswith('.db') or not fn.startswith('20'):
continue
ranges.append(generate_range(fn, rng_templ, entry_templ))
# connect ranges
for i in range(1, len(ranges)):
ranges[i - 1].b = ranges[i].a - 1
ranges[-1].b = 99999999999999
# steps
steps = []
tstart = datetime.datetime(year=2017, month=7, day=1)
tend = datetime.datetime(year=2017, month=12, day=31, hour=23, minute=59, second=59)
tstep = datetime.timedelta(days=1)
tcurr = tstart
while tcurr < tend:
id_prefix = tcurr.strftime('%Y%m%d')
steps.append(generate_step_query(tcurr, id_prefix))
steps.append(generate_step_check(id_prefix))
steps.append(generate_step_elapse(tstep, id_prefix))
tcurr += tstep
# generate output
with open('keys/ds') as dsfile:
ta = dsfile.read().strip()
# constant RPL file header
print("""stub-addr: 2001:503:ba3e::2:30
trust-anchor: {ta}