Skip to content

Commit 1cacea3

Browse files
authored
Merge pull request #3384 from gizmoguy/test-debug
Implement test debug mode
2 parents 58b52b0 + d41893b commit 1cacea3

File tree

3 files changed

+79
-17
lines changed

3 files changed

+79
-17
lines changed

Dockerfile.tests

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
## Image name: faucet/tests
22

3-
FROM faucet/test-base:6.0.0
3+
FROM faucet/test-base:7.0.0
44

55
COPY ./ /faucet-src/
66
WORKDIR /faucet-src/

clib/clib_mininet_test_main.py

Lines changed: 32 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -455,6 +455,13 @@ def addSuccess(self, test):
455455
super(FaucetCleanupResult, self).addSuccess(test)
456456

457457

458+
def debug_exception_handler(etype, value, trace):
459+
import traceback
460+
import pdb
461+
traceback.print_exception(etype, value, trace)
462+
print()
463+
pdb.pm()
464+
458465
def test_runner(root_tmpdir, resultclass, failfast=False):
459466
resultclass.root_tmpdir = root_tmpdir
460467
return unittest.TextTestRunner(verbosity=255, resultclass=resultclass, failfast=failfast)
@@ -472,13 +479,19 @@ def run_parallel_test_suites(root_tmpdir, resultclass, parallel_tests):
472479
return results
473480

474481

475-
def run_single_test_suites(root_tmpdir, resultclass, single_tests):
482+
def run_single_test_suites(debug, root_tmpdir, resultclass, single_tests):
476483
results = []
477484
# TODO: Tests that are serialized generally depend on hardcoded ports.
478485
# Make them use dynamic ports.
479486
if single_tests.countTestCases():
480487
single_runner = test_runner(root_tmpdir, resultclass)
481-
results.append(single_runner.run(single_tests))
488+
if debug:
489+
oldexcepthook = sys.excepthook
490+
sys.excepthook = debug_exception_handler
491+
single_tests.debug()
492+
sys.excepthook = oldexcepthook
493+
else:
494+
results.append(single_runner.run(single_tests))
482495
return results
483496

484497

@@ -496,7 +509,10 @@ def report_tests(test_status, test_list, result):
496509
test_duration_secs = result.test_duration_secs[test_class.id()]
497510
tests_json.update({
498511
test_class.id(): {
499-
'status': test_status, 'output': test_text, 'test_duration_secs': test_duration_secs}})
512+
'status': test_status,
513+
'output': test_text,
514+
'test_duration_secs': test_duration_secs
515+
}})
500516
return tests_json
501517

502518

@@ -529,14 +545,14 @@ def report_results(results, hw_config, report_json_filename):
529545
report_json_file.write(json.dumps(report_json))
530546

531547

532-
def run_test_suites(report_json_filename, hw_config, root_tmpdir,
548+
def run_test_suites(debug, report_json_filename, hw_config, root_tmpdir,
533549
resultclass, single_tests, parallel_tests, sanity_result):
534550
print('running %u tests in parallel and %u tests serial' % (
535551
parallel_tests.countTestCases(), single_tests.countTestCases()))
536552
results = []
537553
results.append(sanity_result)
538554
results.extend(run_parallel_test_suites(root_tmpdir, resultclass, parallel_tests))
539-
results.extend(run_single_test_suites(root_tmpdir, resultclass, single_tests))
555+
results.extend(run_single_test_suites(debug, root_tmpdir, resultclass, single_tests))
540556
report_results(results, hw_config, report_json_filename)
541557
successful_results = [result for result in results if result.wasSuccessful()]
542558
return len(results) == len(successful_results)
@@ -612,7 +628,7 @@ def clean_test_dirs(root_tmpdir, all_successful, sanity, keep_logs, dumpfail):
612628
dump_failed_test(test_name, test_dir)
613629

614630

615-
def run_tests(module, hw_config, requested_test_classes, dumpfail,
631+
def run_tests(module, hw_config, requested_test_classes, dumpfail, debug,
616632
keep_logs, serial, repeat, excluded_test_classes, report_json_filename,
617633
port_order):
618634
"""Actually run the test suites, potentially in parallel."""
@@ -643,10 +659,11 @@ def run_tests(module, hw_config, requested_test_classes, dumpfail,
643659
sanity_result = run_sanity_test_suite(root_tmpdir, resultclass, sanity_tests)
644660
if sanity_result.wasSuccessful():
645661
while True:
646-
all_successful = run_test_suites(
647-
report_json_filename, hw_config, root_tmpdir,
648-
resultclass, copy.deepcopy(single_tests),
649-
copy.deepcopy(parallel_tests), sanity_result)
662+
all_successful = run_test_suites(debug, report_json_filename,
663+
hw_config, root_tmpdir, resultclass,
664+
copy.deepcopy(single_tests),
665+
copy.deepcopy(parallel_tests),
666+
sanity_result)
650667
if not repeat:
651668
break
652669
if not all_successful:
@@ -679,6 +696,8 @@ def parse_args():
679696
'-c', '--clean', action='store_true', help='run mininet cleanup')
680697
parser.add_argument(
681698
'-d', '--dumpfail', action='store_true', help='dump logs for failed tests')
699+
parser.add_argument(
700+
'--debug', action='store_true', help='enter debug breakpoint on assertion failure')
682701
parser.add_argument(
683702
'-k', '--keep_logs', action='store_true', help='keep logs even for OK tests')
684703
loglevels = ('debug', 'error', 'warning', 'info', 'output')
@@ -728,7 +747,7 @@ def parse_args():
728747

729748

730749
return (
731-
requested_test_classes, args.clean, args.dumpfail,
750+
requested_test_classes, args.clean, args.dumpfail, args.debug,
732751
args.keep_logs, args.nocheck, args.serial, args.repeat,
733752
excluded_test_classes, report_json_filename, port_order,
734753
args.loglevel, args.profile)
@@ -739,7 +758,7 @@ def test_main(module):
739758

740759
print('testing module %s' % module)
741760

742-
(requested_test_classes, clean, dumpfail, keep_logs, nocheck,
761+
(requested_test_classes, clean, dumpfail, debug, keep_logs, nocheck,
743762
serial, repeat, excluded_test_classes, report_json_filename, port_order,
744763
loglevel, profile) = parse_args()
745764

@@ -769,7 +788,7 @@ def test_main(module):
769788
pr.enable()
770789

771790
run_tests(
772-
module, hw_config, requested_test_classes, dumpfail,
791+
module, hw_config, requested_test_classes, dumpfail, debug,
773792
keep_logs, serial, repeat, excluded_test_classes, report_json_filename, port_order)
774793

775794
if profile:

docs/testing.rst

Lines changed: 46 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,8 @@ You can build and run the mininet tests with the following commands:
1818
sudo docker build --pull -t faucet/tests -f Dockerfile.tests .
1919
sudo apparmor_parser -R /etc/apparmor.d/usr.sbin.tcpdump
2020
sudo modprobe openvswitch
21-
sudo docker run --sysctl net.ipv6.conf.all.disable_ipv6=0 --privileged --rm \
21+
sudo docker run --name=faucet-tests \
22+
--sysctl net.ipv6.conf.all.disable_ipv6=0 --privileged --rm \
2223
-v /var/local/lib/docker:/var/lib/docker \
2324
-v /tmp/faucet-pip-cache:/var/tmp/pip-cache \
2425
-ti faucet/tests
@@ -185,7 +186,8 @@ Then you can build and run the test suite:
185186
.. code:: console
186187
187188
sudo docker build --pull -t faucet/tests -f Dockerfile.tests .
188-
sudo docker run --privileged --rm --net=host --cap-add=NET_ADMIN \
189+
sudo docker run --name=faucet-tests \
190+
--privileged --rm --net=host --cap-add=NET_ADMIN \
189191
-v /var/local/lib/docker:/var/lib/docker \
190192
-v /tmp/faucet-pip-cache:/var/tmp/pip-cache \
191193
-v /etc/faucet:/etc/faucet \
@@ -272,7 +274,6 @@ the test suite will keep these files.
272274
273275
-e FAUCET_TESTS="-k"
274276
275-
276277
Repeatedly running tests until failure
277278
--------------------------------------
278279

@@ -282,3 +283,45 @@ Tests will continue to run forever until at least one fails or the test is inter
282283
.. code:: console
283284
284285
-e FAUCET_TESTS="-r"
286+
287+
Test debugging
288+
--------------
289+
290+
Often while debugging a failed integration test it can be useful to pause the
291+
test suite at the point of the failure. The test can then be inspected live to
292+
narrow down the exact issue. To do this, run your test with the ``--debug``
293+
flag (replace `TEST_NAME` with actual name of test).
294+
295+
.. code:: console
296+
297+
-e FAUCET_TESTS="--debug TEST_NAME"
298+
299+
The test suite will now run in a mode where it ignores successful tests and
300+
drops into a pdb shell when a failure occurs inside a test.
301+
There are a number of different
302+
`pdb commands <https://docs.python.org/3/library/pdb.html#debugger-commands>`_
303+
that can be run to check the actual test code.
304+
305+
It is also possible to login to the virtual container environment to run
306+
interactive debug commands to inspect the state of the system.
307+
308+
.. code:: console
309+
310+
sudo sudo docker exec -it faucet-tests /bin/bash
311+
312+
One useful thing can be to find the running mininet containers and execute
313+
commands inside of them, e.g ping:
314+
315+
.. code:: console
316+
317+
root@35b98943f736:/faucet-src# ps w | grep mininet:
318+
319+
995 pts/1 Ss+ 0:00 bash --norc --noediting -is mininet:faucet-637
320+
997 pts/2 Ss+ 0:00 bash --norc --noediting -is mininet:u021
321+
1001 pts/3 Ss+ 0:00 bash --norc --noediting -is mininet:u022
322+
1005 pts/4 Ss+ 0:00 bash --norc --noediting -is mininet:u023
323+
1009 pts/5 Ss+ 0:00 bash --norc --noediting -is mininet:u024
324+
1013 pts/6 Ss+ 0:00 bash --norc --noediting -is mininet:s02
325+
1077 pts/7 Ss+ 0:00 bash --norc --noediting -is mininet:gauge-637
326+
327+
root@35b98943f736:/faucet-src# m u021 ping 127.0.0.1

0 commit comments

Comments
 (0)