aboutsummaryrefslogtreecommitdiffstats
path: root/mgw
diff options
context:
space:
mode:
authorNeels Hofmeyr <neels@hofmeyr.de>2018-04-05 16:56:38 +0200
committerNeels Hofmeyr <neels@hofmeyr.de>2018-04-06 01:07:15 +0200
commit3cf797d7a59a9f5fc05bd22af32d21c23409e60d (patch)
treed3dd5b5741ba5e576814b8879319e8d85a076f7b /mgw
parentdef4ad4bcefbe065bc3446b20fb8b32769158425 (diff)
add compare-results.sh, call from start-testsuite.sh
Compare current test results to the expected results, and exit in error on discrepancies. Add compare-result.sh: (trivially) grep junit xml output to determine which tests passed and which didn't, and compare against an expected-result.log, another junit file from a previous run. Summarize and determine success. Include an "xfail" feature: tests that are expected to fail are marked as "xfail", unexpected failures as "FAIL". In various subdirs, copy the current jenkins jobs' junit xml outputs as expected-results.log, so that we will start getting useful output in both jenkins runs and manual local runs. In start-testsuite.sh, after running the tests, invoke the results comparison. Due to the single-line parsing nature, the script so far does not distinguish between error and failure. I doubt that we actually need to do that though. Related: OS#3136 Change-Id: I87d62a8be73d73a5eeff61a842e7c27a0066079d
Diffstat (limited to 'mgw')
-rw-r--r--mgw/expected-results.log36
1 files changed, 36 insertions, 0 deletions
diff --git a/mgw/expected-results.log b/mgw/expected-results.log
new file mode 100644
index 00000000..c67d5d91
--- /dev/null
+++ b/mgw/expected-results.log
@@ -0,0 +1,36 @@
+<?xml version="1.0"?>
+<testsuite name='Titan' tests='26' failures='1' errors='0' skipped='1' inconc='0' time='177.00'>
+ <testcase classname='MGCP_Test' name='TC_selftest' time='0.002990'>
+ <skipped>no verdict</skipped>
+ </testcase>
+ <testcase classname='MGCP_Test' name='TC_crcx' time='0.008097'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_noprefix' time='0.007103'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_unsupp_mode' time='0.006485'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_early_bidir_mode' time='0.023678'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_unsupp_param' time='0.005827'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_missing_callid' time='0.006864'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_missing_mode' time='0.006328'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_unsupp_packet_intv' time='0.005716'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_illegal_double_lco' time='0.014705'>
+ <failure type='fail-verdict'>
+ MGCP_Test.ttcn:887 MGCP_Test control part
+ MGCP_Test.ttcn:436 TC_crcx_illegal_double_lco testcase
+ </failure>
+ </testcase>
+ <testcase classname='MGCP_Test' name='TC_crcx_sdp' time='0.008481'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_wildcarded' time='0.006528'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_wildcarded_exhaust' time='0.033504'/>
+ <testcase classname='MGCP_Test' name='TC_mdcx_without_crcx' time='0.023214'/>
+ <testcase classname='MGCP_Test' name='TC_dlcx_without_crcx' time='0.006506'/>
+ <testcase classname='MGCP_Test' name='TC_mdcx_wildcarded' time='0.006675'/>
+ <testcase classname='MGCP_Test' name='TC_dlcx_wildcarded' time='0.022936'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_and_dlcx_ep_callid_connid' time='0.006310'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_and_dlcx_ep_callid' time='0.006509'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_and_dlcx_ep' time='0.006822'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_and_dlcx_ep_callid_inval' time='0.007519'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_and_dlcx_ep_callid_connid_inval' time='0.006912'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_and_dlcx_retrans' time='0.006681'/>
+ <testcase classname='MGCP_Test' name='TC_crcx_dlcx_30ep' time='0.032986'/>
+ <testcase classname='MGCP_Test' name='TC_rtpem_selftest' time='5.514538'/>
+ <testcase classname='MGCP_Test' name='TC_two_crcx_and_rtp' time='1.111828'/>
+</testsuite>