mirror of
https://github.com/dashpay/dash.git
synced 2024-12-26 12:32:48 +01:00
405b86a92a
Instead of using lcov -r (which is extremely slow), first use a python script to perform bulk cleanup of the /usr/include/* coverage. Then use lcov -a to remove the duplicate entries. This has the same effect of lcov -r but runs significantly faster
25 lines
844 B
Python
Executable File
25 lines
844 B
Python
Executable File
#!/usr/bin/env python3
|
|
|
|
import argparse
|
|
|
|
parser = argparse.ArgumentParser(description='Remove the coverage data from a tracefile for all files matching the pattern.')
|
|
parser.add_argument('pattern', help='the pattern of files to remove')
|
|
parser.add_argument('tracefile', help='the tracefile to remove the coverage data from')
|
|
parser.add_argument('outfile', help='filename for the output to be written to')
|
|
|
|
args = parser.parse_args()
|
|
tracefile = args.tracefile
|
|
pattern = args.pattern
|
|
outfile = args.outfile
|
|
|
|
in_remove = False
|
|
with open(tracefile, 'r') as f:
|
|
with open(outfile, 'w') as wf:
|
|
for line in f:
|
|
if line.startswith("SF:") and pattern in line:
|
|
in_remove = True
|
|
if not in_remove:
|
|
wf.write(line)
|
|
if line == 'end_of_record\n':
|
|
in_remove = False
|