coverage.py 9.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257
  1. #!/usr/bin/env python3
  2. #
  3. # Parse and report coverage info from .info files generated by lcov
  4. #
  5. import os
  6. import glob
  7. import csv
  8. import re
  9. import collections as co
  10. import bisect as b
  11. INFO_PATHS = ['tests/*.toml.info']
  12. def collect(paths, **args):
  13. file = None
  14. funcs = []
  15. lines = co.defaultdict(lambda: 0)
  16. pattern = re.compile(
  17. '^(?P<file>SF:/?(?P<file_name>.*))$'
  18. '|^(?P<func>FN:(?P<func_lineno>[0-9]*),(?P<func_name>.*))$'
  19. '|^(?P<line>DA:(?P<line_lineno>[0-9]*),(?P<line_hits>[0-9]*))$')
  20. for path in paths:
  21. with open(path) as f:
  22. for line in f:
  23. m = pattern.match(line)
  24. if m and m.group('file'):
  25. file = m.group('file_name')
  26. elif m and file and m.group('func'):
  27. funcs.append((file, int(m.group('func_lineno')),
  28. m.group('func_name')))
  29. elif m and file and m.group('line'):
  30. lines[(file, int(m.group('line_lineno')))] += (
  31. int(m.group('line_hits')))
  32. # map line numbers to functions
  33. funcs.sort()
  34. def func_from_lineno(file, lineno):
  35. i = b.bisect(funcs, (file, lineno))
  36. if i and funcs[i-1][0] == file:
  37. return funcs[i-1][2]
  38. else:
  39. return None
  40. # reduce to function info
  41. reduced_funcs = co.defaultdict(lambda: (0, 0))
  42. for (file, line_lineno), line_hits in lines.items():
  43. func = func_from_lineno(file, line_lineno)
  44. if not func:
  45. continue
  46. hits, count = reduced_funcs[(file, func)]
  47. reduced_funcs[(file, func)] = (hits + (line_hits > 0), count + 1)
  48. results = []
  49. for (file, func), (hits, count) in reduced_funcs.items():
  50. # discard internal/testing functions (test_* injected with
  51. # internal testing)
  52. if not args.get('everything'):
  53. if func.startswith('__') or func.startswith('test_'):
  54. continue
  55. # discard .8449 suffixes created by optimizer
  56. func = re.sub('\.[0-9]+', '', func)
  57. results.append((file, func, hits, count))
  58. return results
  59. def main(**args):
  60. # find coverage
  61. if not args.get('use'):
  62. # find *.info files
  63. paths = []
  64. for path in args['info_paths']:
  65. if os.path.isdir(path):
  66. path = path + '/*.gcov'
  67. for path in glob.glob(path):
  68. paths.append(path)
  69. if not paths:
  70. print('no .info files found in %r?' % args['info_paths'])
  71. sys.exit(-1)
  72. results = collect(paths, **args)
  73. else:
  74. with open(args['use']) as f:
  75. r = csv.DictReader(f)
  76. results = [
  77. ( result['file'],
  78. result['function'],
  79. int(result['hits']),
  80. int(result['count']))
  81. for result in r]
  82. total_hits, total_count = 0, 0
  83. for _, _, hits, count in results:
  84. total_hits += hits
  85. total_count += count
  86. # find previous results?
  87. if args.get('diff'):
  88. with open(args['diff']) as f:
  89. r = csv.DictReader(f)
  90. prev_results = [
  91. ( result['file'],
  92. result['function'],
  93. int(result['hits']),
  94. int(result['count']))
  95. for result in r]
  96. prev_total_hits, prev_total_count = 0, 0
  97. for _, _, hits, count in prev_results:
  98. prev_total_hits += hits
  99. prev_total_count += count
  100. # write results to CSV
  101. if args.get('output'):
  102. with open(args['output'], 'w') as f:
  103. w = csv.writer(f)
  104. w.writerow(['file', 'function', 'hits', 'count'])
  105. for file, func, hits, count in sorted(results):
  106. w.writerow((file, func, hits, count))
  107. # print results
  108. def dedup_entries(results, by='function'):
  109. entries = co.defaultdict(lambda: (0, 0))
  110. for file, func, hits, count in results:
  111. entry = (file if by == 'file' else func)
  112. entry_hits, entry_count = entries[entry]
  113. entries[entry] = (entry_hits + hits, entry_count + count)
  114. return entries
  115. def diff_entries(olds, news):
  116. diff = co.defaultdict(lambda: (0, 0, 0, 0, 0, 0, 0))
  117. for name, (new_hits, new_count) in news.items():
  118. diff[name] = (
  119. 0, 0,
  120. new_hits, new_count,
  121. new_hits, new_count,
  122. (new_hits/new_count if new_count else 1.0) - 1.0)
  123. for name, (old_hits, old_count) in olds.items():
  124. _, _, new_hits, new_count, _, _, _ = diff[name]
  125. diff[name] = (
  126. old_hits, old_count,
  127. new_hits, new_count,
  128. new_hits-old_hits, new_count-old_count,
  129. ((new_hits/new_count if new_count else 1.0)
  130. - (old_hits/old_count if old_count else 1.0)))
  131. return diff
  132. def print_header(by=''):
  133. if not args.get('diff'):
  134. print('%-36s %19s' % (by, 'hits/line'))
  135. else:
  136. print('%-36s %19s %19s %11s' % (by, 'old', 'new', 'diff'))
  137. def print_entries(by='function'):
  138. entries = dedup_entries(results, by=by)
  139. if not args.get('diff'):
  140. print_header(by=by)
  141. for name, (hits, count) in sorted(entries.items()):
  142. print("%-36s %11s %7s" % (name,
  143. '%d/%d' % (hits, count)
  144. if count else '-',
  145. '%.1f%%' % (100*hits/count)
  146. if count else '-'))
  147. else:
  148. prev_entries = dedup_entries(prev_results, by=by)
  149. diff = diff_entries(prev_entries, entries)
  150. print_header(by='%s (%d added, %d removed)' % (by,
  151. sum(1 for _, old, _, _, _, _, _ in diff.values() if not old),
  152. sum(1 for _, _, _, new, _, _, _ in diff.values() if not new)))
  153. for name, (
  154. old_hits, old_count,
  155. new_hits, new_count,
  156. diff_hits, diff_count, ratio) in sorted(diff.items(),
  157. key=lambda x: (-x[1][6], x)):
  158. if ratio or args.get('all'):
  159. print("%-36s %11s %7s %11s %7s %11s%s" % (name,
  160. '%d/%d' % (old_hits, old_count)
  161. if old_count else '-',
  162. '%.1f%%' % (100*old_hits/old_count)
  163. if old_count else '-',
  164. '%d/%d' % (new_hits, new_count)
  165. if new_count else '-',
  166. '%.1f%%' % (100*new_hits/new_count)
  167. if new_count else '-',
  168. '%+d/%+d' % (diff_hits, diff_count),
  169. ' (%+.1f%%)' % (100*ratio) if ratio else ''))
  170. def print_totals():
  171. if not args.get('diff'):
  172. print("%-36s %11s %7s" % ('TOTAL',
  173. '%d/%d' % (total_hits, total_count)
  174. if total_count else '-',
  175. '%.1f%%' % (100*total_hits/total_count)
  176. if total_count else '-'))
  177. else:
  178. ratio = ((total_hits/total_count
  179. if total_count else 1.0)
  180. - (prev_total_hits/prev_total_count
  181. if prev_total_count else 1.0))
  182. print("%-36s %11s %7s %11s %7s %11s%s" % ('TOTAL',
  183. '%d/%d' % (prev_total_hits, prev_total_count)
  184. if prev_total_count else '-',
  185. '%.1f%%' % (100*prev_total_hits/prev_total_count)
  186. if prev_total_count else '-',
  187. '%d/%d' % (total_hits, total_count)
  188. if total_count else '-',
  189. '%.1f%%' % (100*total_hits/total_count)
  190. if total_count else '-',
  191. '%+d/%+d' % (total_hits-prev_total_hits,
  192. total_count-prev_total_count),
  193. ' (%+.1f%%)' % (100*ratio) if ratio else ''))
  194. if args.get('quiet'):
  195. pass
  196. elif args.get('summary'):
  197. print_header()
  198. print_totals()
  199. elif args.get('files'):
  200. print_entries(by='file')
  201. print_totals()
  202. else:
  203. print_entries(by='function')
  204. print_totals()
  205. if __name__ == "__main__":
  206. import argparse
  207. import sys
  208. parser = argparse.ArgumentParser(
  209. description="Parse and report coverage info from .info files \
  210. generated by lcov")
  211. parser.add_argument('info_paths', nargs='*', default=INFO_PATHS,
  212. help="Description of where to find *.info files. May be a directory \
  213. or list of paths. *.info files will be merged to show the total \
  214. coverage. Defaults to %r." % INFO_PATHS)
  215. parser.add_argument('-v', '--verbose', action='store_true',
  216. help="Output commands that run behind the scenes.")
  217. parser.add_argument('-o', '--output',
  218. help="Specify CSV file to store results.")
  219. parser.add_argument('-u', '--use',
  220. help="Don't do any work, instead use this CSV file.")
  221. parser.add_argument('-d', '--diff',
  222. help="Specify CSV file to diff code size against.")
  223. parser.add_argument('-a', '--all', action='store_true',
  224. help="Show all functions, not just the ones that changed.")
  225. parser.add_argument('-A', '--everything', action='store_true',
  226. help="Include builtin and libc specific symbols.")
  227. parser.add_argument('--files', action='store_true',
  228. help="Show file-level coverage.")
  229. parser.add_argument('--summary', action='store_true',
  230. help="Only show the total coverage.")
  231. parser.add_argument('-q', '--quiet', action='store_true',
  232. help="Don't show anything, useful with -o.")
  233. sys.exit(main(**vars(parser.parse_args())))