[benchmark] LogParser support for --verbose mode

LogParser doesn’t use `csv.reader` anymore.
Parsing is handled by a Finite State Machine. Each line is matched against a set of (mutually exclusive) regular expressions that represent known states. When a match is found, corresponding parsing action is taken.
This commit is contained in:
Pavol Vaskovic
2018-08-13 17:28:41 +02:00
parent 9852e9a32a
commit d0cdaee798
2 changed files with 135 additions and 22 deletions

View File

@@ -173,25 +173,66 @@ class OldAndNewLog(unittest.TestCase):
self.assertIn(text, report)
class TestLogParser(FileSystemIntegration):
def test_load_from_csv(self):
class TestLogParser(unittest.TestCase):
def test_parse_results_csv(self):
"""Ignores header row, empty lines and Totals row."""
log_file = self.write_temp_file('log.log',"""
#,TEST,SAMPLES,MIN(us),MAX(us),MEAN(us),SD(us),MEDIAN(us)
log = """#,TEST,SAMPLES,MIN(us),MAX(us),MEAN(us),SD(us),MEDIAN(us)
34,BitCount,20,3,4,4,0,4
Totals,269
""")
results = LogParser.load_from_csv(log_file)
self.assertEquals(results.keys(), ['BitCount'])
self.assertTrue(isinstance(results['BitCount'], PerformanceTestResult))
"""
parser = LogParser()
results = parser.parse_results(log.splitlines())
self.assertTrue(isinstance(results[0], PerformanceTestResult))
self.assertEquals(results[0].name, 'BitCount')
def test_merge(self):
concatenated_logs = self.write_temp_file('concat.log', """
4,ArrayAppend,20,23641,29000,24990,0,24990
4,ArrayAppend,1,20000,20000,20000,0,20000
""")
results = LogParser.load_from_csv(concatenated_logs)
def test_parse_results_verbose(self):
"""Parse multiple performance test results with 2 sample formats:
single line for N = 1; two lines for N > 1.
"""
verbose_log = """--- DATA ---
#,TEST,SAMPLES,MIN(us),MAX(us),MEAN(us),SD(us),MEDIAN(us)
Running AngryPhonebook for 3 samples.
Measuring with scale 78.
Sample 0,11812
Measuring with scale 90.
Sample 1,13898
Measuring with scale 91.
Sample 2,11467
1,AngryPhonebook,3,11467,13898,12392,1315,11812
Running Array2D for 3 samples.
Sample 0,369900
Sample 1,381039
Sample 2,371043
3,Array2D,3,369900,381039,373994,6127,371043
Totals,2"""
parser = LogParser()
results = parser.parse_results(verbose_log.split('\n'))
r = results[0]
self.assertEquals(
(r.name, r.min, r.max, int(r.mean), int(r.sd), r.median),
('AngryPhonebook', 11467, 13898, 12392, 1315, 11812)
)
self.assertEquals(r.samples, len(r.all_samples))
self.assertEquals(results[0].all_samples,
[(0, 78, 11812), (1, 90, 13898), (2, 91, 11467)])
r = results[1]
self.assertEquals(
(r.name, r.min, r.max, int(r.mean), int(r.sd), r.median),
('Array2D', 369900, 381039, 373994, 6127, 371043)
)
self.assertEquals(r.samples, len(r.all_samples))
self.assertEquals(results[1].all_samples,
[(0, 1, 369900), (1, 1, 381039), (2, 1, 371043)])
def test_results_from_merge(self):
"""Parsing concatenated log merges same PerformanceTestResults"""
concatenated_logs = """4,ArrayAppend,20,23641,29000,24990,0,24990
4,ArrayAppend,1,20000,20000,20000,0,20000"""
results = LogParser.results_from_string(concatenated_logs)
self.assertEquals(results.keys(), ['ArrayAppend'])
result = results['ArrayAppend']
self.assertTrue(isinstance(result, PerformanceTestResult))