Coverage for coverage / html.py: 100.000%

415 statements  

« prev     ^ index     » next       coverage.py v7.12.1a0.dev1, created at 2025-11-30 17:57 +0000

1# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0 

2# For details: https://github.com/coveragepy/coveragepy/blob/main/NOTICE.txt 

3 

4"""HTML reporting for coverage.py.""" 

5 

6from __future__ import annotations 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

7 

8import collections 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

9import dataclasses 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

10import datetime 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

11import functools 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

12import json 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

13import os 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

14import re 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

15import string 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

16from collections.abc import Iterable 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

17from dataclasses import dataclass, field 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

18from typing import TYPE_CHECKING, Any 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

19 

20import coverage 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

21from coverage.data import CoverageData, add_data_to_hash 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

22from coverage.exceptions import NoDataError 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

23from coverage.files import flat_rootname 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

24from coverage.misc import ( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

25 Hasher, 

26 ensure_dir, 

27 file_be_gone, 

28 format_local_datetime, 

29 human_sorted, 

30 isolate_module, 

31 plural, 

32 stdout_link, 

33) 

34from coverage.report_core import get_analysis_to_report 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

35from coverage.results import Analysis, AnalysisNarrower, Numbers 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

36from coverage.templite import Templite 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

37from coverage.types import TLineNo, TMorf 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

38from coverage.version import __url__ 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

39 

40if TYPE_CHECKING: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

41 from coverage import Coverage 

42 from coverage.plugins import FileReporter 

43 

44 

45os = isolate_module(os) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

46 

47 

48def data_filename(fname: str) -> str: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

49 """Return the path to an "htmlfiles" data file of ours.""" 

50 static_dir = os.path.join(os.path.dirname(__file__), "htmlfiles") 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

51 static_filename = os.path.join(static_dir, fname) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

52 return static_filename 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

53 

54 

55def read_data(fname: str) -> str: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

56 """Return the contents of a data file of ours.""" 

57 with open(data_filename(fname), encoding="utf-8") as data_file: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

58 return data_file.read() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

59 

60 

61def write_html(fname: str, html: str) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

62 """Write `html` to `fname`, properly encoded.""" 

63 html = re.sub(r"(\A\s+)|(\s+$)", "", html, flags=re.MULTILINE) + "\n" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

64 with open(fname, "wb") as fout: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

65 fout.write(html.encode("ascii", "xmlcharrefreplace")) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

66 

67 

68@dataclass 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

69class LineData: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

70 """The data for each source line of HTML output.""" 

71 

72 tokens: list[tuple[str, str]] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

73 number: TLineNo 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

74 category: str 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

75 contexts: list[str] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

76 contexts_label: str 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

77 context_list: list[str] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

78 short_annotations: list[str] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

79 long_annotations: list[str] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

80 html: str = "" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

81 context_str: str | None = None 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

82 annotate: str | None = None 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

83 annotate_long: str | None = None 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

84 css_class: str = "" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

85 

86 

87@dataclass 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

88class FileData: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

89 """The data for each source file of HTML output.""" 

90 

91 relative_filename: str 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

92 nums: Numbers 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

93 lines: list[LineData] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

94 

95 

96@dataclass 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

97class IndexItem: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

98 """Information for each index entry, to render an index page.""" 

99 

100 url: str = "" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

101 file: str = "" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

102 description: str = "" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

103 nums: Numbers = field(default_factory=Numbers) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

104 

105 

106@dataclass 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

107class IndexPage: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

108 """Data for each index page.""" 

109 

110 noun: str 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

111 plural: str 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

112 filename: str 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

113 summaries: list[IndexItem] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

114 totals: Numbers 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

115 skipped_covered_count: int 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

116 skipped_empty_count: int 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

117 

118 

119class HtmlDataGeneration: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

120 """Generate structured data to be turned into HTML reports.""" 

121 

122 EMPTY = "(empty)" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

123 

124 def __init__(self, cov: Coverage) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

125 self.coverage = cov 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

126 self.config = self.coverage.config 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

127 self.data = self.coverage.get_data() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

128 self.has_arcs = self.data.has_arcs() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

129 if self.config.show_contexts: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

130 if self.data.measured_contexts() == {""}: 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

131 self.coverage._warn("No contexts were measured") 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

132 self.data.set_query_contexts(self.config.report_contexts) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

133 

134 def data_for_file(self, fr: FileReporter, analysis: Analysis) -> FileData: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

135 """Produce the data needed for one file's report.""" 

136 if self.has_arcs: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

137 missing_branch_arcs = analysis.missing_branch_arcs() 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

138 arcs_executed = analysis.arcs_executed 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

139 else: 

140 missing_branch_arcs = {} 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

141 arcs_executed = [] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

142 

143 if self.config.show_contexts: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

144 contexts_by_lineno = self.data.contexts_by_lineno(analysis.filename) 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

145 

146 lines = [] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

147 branch_stats = analysis.branch_stats() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

148 multiline_map = {} 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

149 if hasattr(fr, "multiline_map"): 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

150 multiline_map = fr.multiline_map() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

151 

152 for lineno, tokens in enumerate(fr.source_token_lines(), start=1): 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

153 # Figure out how to mark this line. 

154 category = category2 = "" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

155 short_annotations = [] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

156 long_annotations = [] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

157 

158 if lineno in analysis.excluded: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

159 category = "exc" 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

160 elif lineno in analysis.missing: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

161 category = "mis" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

162 elif self.has_arcs and lineno in missing_branch_arcs: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

163 category = "par" 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

164 mba = missing_branch_arcs[lineno] 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

165 if len(mba) == branch_stats[lineno][0]: 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

166 # None of the branches were taken from this line. 

167 short_annotations.append("anywhere") 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

168 long_annotations.append( 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

169 f"line {lineno} didn't jump anywhere: it always raised an exception." 

170 ) 

171 else: 

172 for b in missing_branch_arcs[lineno]: 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

173 if b < 0: 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

174 short_annotations.append("exit") 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

175 else: 

176 short_annotations.append(str(b)) 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

177 long_annotations.append( 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

178 fr.missing_arc_description(lineno, b, arcs_executed) 

179 ) 

180 elif lineno in analysis.statements: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

181 category = "run" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

182 elif first_line := multiline_map.get(lineno): 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

183 if first_line in analysis.excluded: 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

184 category2 = "exc2" 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

185 elif first_line in analysis.missing: 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

186 category2 = "mis2" 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

187 elif self.has_arcs and first_line in missing_branch_arcs: 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

188 category2 = "par2" 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

189 # I don't understand why this last condition is marked as 

190 # partial. If I add an else with an exception, the exception 

191 # is raised. 

192 elif first_line in analysis.statements: # pragma: part covered 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

193 category2 = "run2" 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

194 

195 contexts = [] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

196 contexts_label = "" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

197 context_list = [] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

198 if category and self.config.show_contexts: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

199 contexts = human_sorted(c or self.EMPTY for c in contexts_by_lineno.get(lineno, ())) 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

200 if contexts == [self.EMPTY]: 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

201 contexts_label = self.EMPTY 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

202 else: 

203 contexts_label = f"{len(contexts)} ctx" 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

204 context_list = contexts 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

205 

206 lines.append( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

207 LineData( 

208 tokens=tokens, 

209 number=lineno, 

210 category=category or category2, 

211 contexts=contexts, 

212 contexts_label=contexts_label, 

213 context_list=context_list, 

214 short_annotations=short_annotations, 

215 long_annotations=long_annotations, 

216 ) 

217 ) 

218 

219 file_data = FileData( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

220 relative_filename=fr.relative_filename(), 

221 nums=analysis.numbers, 

222 lines=lines, 

223 ) 

224 

225 return file_data 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

226 

227 

228class FileToReport: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

229 """A file we're considering reporting.""" 

230 

231 def __init__(self, fr: FileReporter, analysis: Analysis) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

232 self.fr = fr 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

233 self.analysis = analysis 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

234 self.rootname = flat_rootname(fr.relative_filename()) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

235 self.html_filename = self.rootname + ".html" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

236 self.prev_html = self.next_html = "" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

237 

238 

239HTML_SAFE = string.ascii_letters + string.digits + "!#$%'()*+,-./:;=?@[]^_`{|}~" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

240 

241 

242@functools.cache 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

243def encode_int(n: int) -> str: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

244 """Create a short HTML-safe string from an integer, using HTML_SAFE.""" 

245 if n == 0: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

246 return HTML_SAFE[0] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

247 

248 r = [] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

249 while n: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

250 n, t = divmod(n, len(HTML_SAFE)) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

251 r.append(HTML_SAFE[t]) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

252 return "".join(r) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

253 

254 

255def copy_with_cache_bust(src: str, dest_dir: str) -> str: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

256 """Copy `src` to `dest_dir`, adding a hash to the name. 

257 

258 Returns the updated destination file name with hash. 

259 """ 

260 with open(src, "rb") as f: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

261 text = f.read() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

262 h = Hasher() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

263 h.update(text) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

264 cache_bust = h.hexdigest()[:8] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

265 src_base = os.path.basename(src) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

266 dest = src_base.replace(".", f"_cb_{cache_bust}.") 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

267 with open(os.path.join(dest_dir, dest), "wb") as f: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

268 f.write(text) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

269 return dest 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

270 

271 

272class HtmlReporter: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

273 """HTML reporting.""" 

274 

275 # These files will be copied from the htmlfiles directory to the output 

276 # directory. 

277 STATIC_FILES = [ 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

278 "style.css", 

279 "coverage_html.js", 

280 "keybd_closed.png", 

281 "favicon_32.png", 

282 ] 

283 

284 def __init__(self, cov: Coverage) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

285 self.coverage = cov 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

286 self.config = self.coverage.config 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

287 self.directory = self.config.html_dir 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

288 

289 self.skip_covered = self.config.html_skip_covered 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

290 if self.skip_covered is None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

291 self.skip_covered = self.config.skip_covered 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

292 self.skip_empty = self.config.html_skip_empty 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

293 if self.skip_empty is None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

294 self.skip_empty = self.config.skip_empty 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

295 

296 title = self.config.html_title 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

297 

298 self.extra_css = bool(self.config.extra_css) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

299 

300 self.data = self.coverage.get_data() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

301 self.has_arcs = self.data.has_arcs() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

302 

303 self.index_pages: dict[str, IndexPage] = { 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

304 "file": self.new_index_page("file", "files"), 

305 } 

306 self.incr = IncrementalChecker(self.directory) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

307 self.datagen = HtmlDataGeneration(self.coverage) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

308 self.directory_was_empty = False 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

309 self.first_fr = None 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

310 self.final_fr = None 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

311 

312 self.template_globals = { 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

313 # Functions available in the templates. 

314 "escape": escape, 

315 "pair": pair, 

316 "pretty_file": pretty_file, 

317 # Constants for this report. 

318 "__url__": __url__, 

319 "__version__": coverage.__version__, 

320 "title": title, 

321 "time_stamp": format_local_datetime(datetime.datetime.now()), 

322 "extra_css": self.extra_css, 

323 "has_arcs": self.has_arcs, 

324 "statics": {}, 

325 # Constants for all reports. 

326 # These css classes determine which lines are highlighted by default. 

327 "category": { 

328 "exc": "exc show_exc", 

329 "mis": "mis show_mis", 

330 "par": "par run show_par", 

331 "run": "run", 

332 "exc2": "exc exc2 show_exc", 

333 "mis2": "mis mis2 show_mis", 

334 "par2": "par par2 ru2 show_par", 

335 "run2": "run run2", 

336 }, 

337 } 

338 self.index_tmpl = Templite(read_data("index.html"), self.template_globals) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

339 self.pyfile_html_source = read_data("pyfile.html") 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

340 self.source_tmpl = Templite(self.pyfile_html_source, self.template_globals) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

341 

342 def new_index_page(self, noun: str, plural_noun: str) -> IndexPage: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

343 """Create an IndexPage for a kind of region.""" 

344 return IndexPage( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

345 noun=noun, 

346 plural=plural_noun, 

347 filename="index.html" if noun == "file" else f"{noun}_index.html", 

348 summaries=[], 

349 totals=Numbers(precision=self.config.precision), 

350 skipped_covered_count=0, 

351 skipped_empty_count=0, 

352 ) 

353 

354 def report(self, morfs: Iterable[TMorf] | None) -> float: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

355 """Generate an HTML report for `morfs`. 

356 

357 `morfs` is a list of modules or file names. 

358 

359 """ 

360 # Read the status data and check that this run used the same 

361 # global data as the last run. 

362 self.incr.read() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

363 self.incr.check_global_data(self.config, self.pyfile_html_source) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

364 

365 # Process all the files. For each page we need to supply a link 

366 # to the next and previous page. 

367 files_to_report = [] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

368 

369 have_data = False 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

370 for fr, analysis in get_analysis_to_report(self.coverage, morfs): 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

371 have_data = True 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

372 ftr = FileToReport(fr, analysis) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

373 if self.should_report(analysis, self.index_pages["file"]): 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

374 files_to_report.append(ftr) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

375 else: 

376 file_be_gone(os.path.join(self.directory, ftr.html_filename)) 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

377 

378 if not have_data: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

379 raise NoDataError("No data to report.") 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

380 

381 self.make_directory() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

382 self.make_local_static_report_files() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

383 

384 if files_to_report: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

385 for ftr1, ftr2 in zip(files_to_report[:-1], files_to_report[1:]): 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

386 ftr1.next_html = ftr2.html_filename 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

387 ftr2.prev_html = ftr1.html_filename 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

388 files_to_report[0].prev_html = "index.html" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

389 files_to_report[-1].next_html = "index.html" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

390 

391 for ftr in files_to_report: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

392 self.write_html_page(ftr) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

393 for noun, plural_noun in ftr.fr.code_region_kinds(): 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

394 if noun not in self.index_pages: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

395 self.index_pages[noun] = self.new_index_page(noun, plural_noun) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

396 

397 # Write the index page. 

398 if files_to_report: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

399 first_html = files_to_report[0].html_filename 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

400 final_html = files_to_report[-1].html_filename 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

401 else: 

402 first_html = final_html = "index.html" 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

403 self.write_file_index_page(first_html, final_html) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

404 

405 # Write function and class index pages. 

406 self.write_region_index_pages(files_to_report) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

407 

408 return ( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

409 self.index_pages["file"].totals.n_statements 

410 and self.index_pages["file"].totals.pc_covered 

411 ) 

412 

413 def make_directory(self) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

414 """Make sure our htmlcov directory exists.""" 

415 ensure_dir(self.directory) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

416 if not os.listdir(self.directory): 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

417 self.directory_was_empty = True 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

418 

419 def copy_static_file(self, src: str, slug: str = "") -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

420 """Copy a static file into the output directory with cache busting.""" 

421 dest = copy_with_cache_bust(src, self.directory) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

422 if not slug: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

423 slug = os.path.basename(src).replace(".", "_") 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

424 self.template_globals["statics"][slug] = dest # type: ignore 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

425 

426 def make_local_static_report_files(self) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

427 """Make local instances of static files for HTML report.""" 

428 

429 # The files we provide must always be copied. 

430 for static in self.STATIC_FILES: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

431 self.copy_static_file(data_filename(static)) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

432 

433 # The user may have extra CSS they want copied. 

434 if self.extra_css: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

435 assert self.config.extra_css is not None 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

436 self.copy_static_file(self.config.extra_css, slug="extra_css") 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

437 

438 # Only write the .gitignore file if the directory was originally empty. 

439 # .gitignore can't be copied from the source tree because if it was in 

440 # the source tree, it would stop the static files from being checked in. 

441 if self.directory_was_empty: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

442 with open(os.path.join(self.directory, ".gitignore"), "w", encoding="utf-8") as fgi: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

443 fgi.write("# Created by coverage.py\n*\n") 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

444 

445 def should_report(self, analysis: Analysis, index_page: IndexPage) -> bool: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

446 """Determine if we'll report this file or region.""" 

447 # Get the numbers for this file. 

448 nums = analysis.numbers 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

449 index_page.totals += nums 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

450 

451 if self.skip_covered: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

452 # Don't report on 100% files. 

453 no_missing_lines = (nums.n_missing == 0) # fmt: skip 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

454 no_missing_branches = (nums.n_partial_branches == 0) # fmt: skip 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

455 if no_missing_lines and no_missing_branches: 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

456 index_page.skipped_covered_count += 1 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

457 return False 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

458 

459 if self.skip_empty: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

460 # Don't report on empty files. 

461 if nums.n_statements == 0: 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

462 index_page.skipped_empty_count += 1 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

463 return False 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

464 

465 return True 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

466 

467 def write_html_page(self, ftr: FileToReport) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

468 """Generate an HTML page for one source file. 

469 

470 If the page on disk is already correct based on our incremental status 

471 checking, then the page doesn't have to be generated, and this function 

472 only does page summary bookkeeping. 

473 

474 """ 

475 # Find out if the page on disk is already correct. 

476 if self.incr.can_skip_file(self.data, ftr.fr, ftr.rootname): 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

477 self.index_pages["file"].summaries.append(self.incr.index_info(ftr.rootname)) 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

478 return 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

479 

480 # Write the HTML page for this source file. 

481 file_data = self.datagen.data_for_file(ftr.fr, ftr.analysis) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

482 

483 contexts = collections.Counter(c for cline in file_data.lines for c in cline.contexts) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

484 context_codes = {y: i for (i, y) in enumerate(x[0] for x in contexts.most_common())} 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

485 if context_codes: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

486 contexts_json = json.dumps( 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

487 {encode_int(v): k for (k, v) in context_codes.items()}, 

488 indent=2, 

489 ) 

490 else: 

491 contexts_json = None 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

492 

493 for ldata in file_data.lines: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

494 # Build the HTML for the line. 

495 html_parts = [] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

496 for tok_type, tok_text in ldata.tokens: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

497 if tok_type == "ws": 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

498 html_parts.append(escape(tok_text)) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

499 else: 

500 tok_html = escape(tok_text) or "&nbsp;" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

501 html_parts.append(f'<span class="{tok_type}">{tok_html}</span>') 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

502 ldata.html = "".join(html_parts) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

503 if ldata.context_list: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

504 encoded_contexts = [ 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

505 encode_int(context_codes[c_context]) for c_context in ldata.context_list 

506 ] 

507 code_width = max(len(ec) for ec in encoded_contexts) 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

508 ldata.context_str = str(code_width) + "".join( 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

509 ec.ljust(code_width) for ec in encoded_contexts 

510 ) 

511 else: 

512 ldata.context_str = "" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

513 

514 if ldata.short_annotations: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

515 # 202F is NARROW NO-BREAK SPACE. 

516 # 219B is RIGHTWARDS ARROW WITH STROKE. 

517 ldata.annotate = ",&nbsp;&nbsp; ".join( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

518 f"{ldata.number}&#x202F;&#x219B;&#x202F;{d}" for d in ldata.short_annotations 

519 ) 

520 else: 

521 ldata.annotate = None 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

522 

523 if ldata.long_annotations: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

524 longs = ldata.long_annotations 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

525 # A line can only have two branch destinations. If there were 

526 # two missing, we would have written one as "always raised." 

527 assert len(longs) == 1, ( 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

528 f"Had long annotations in {ftr.fr.relative_filename()}: {longs}" 

529 ) 

530 ldata.annotate_long = longs[0] 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

531 else: 

532 ldata.annotate_long = None 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

533 

534 css_classes = [] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

535 if ldata.category: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

536 css_classes.append( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

537 self.template_globals["category"][ldata.category], # type: ignore[index] 

538 ) 

539 ldata.css_class = " ".join(css_classes) or "pln" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

540 

541 html_path = os.path.join(self.directory, ftr.html_filename) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

542 html = self.source_tmpl.render( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

543 { 

544 **file_data.__dict__, 

545 "contexts_json": contexts_json, 

546 "prev_html": ftr.prev_html, 

547 "next_html": ftr.next_html, 

548 } 

549 ) 

550 write_html(html_path, html) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

551 

552 # Save this file's information for the index page. 

553 index_info = IndexItem( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

554 url=ftr.html_filename, 

555 file=escape(ftr.fr.relative_filename()), 

556 nums=ftr.analysis.numbers, 

557 ) 

558 self.index_pages["file"].summaries.append(index_info) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

559 self.incr.set_index_info(ftr.rootname, index_info) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

560 

561 def write_file_index_page(self, first_html: str, final_html: str) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

562 """Write the file index page for this report.""" 

563 index_file = self.write_index_page( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

564 self.index_pages["file"], 

565 first_html=first_html, 

566 final_html=final_html, 

567 ) 

568 

569 print_href = stdout_link(index_file, f"file://{os.path.abspath(index_file)}") 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

570 self.coverage._message(f"Wrote HTML report to {print_href}") 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

571 

572 # Write the latest hashes for next time. 

573 self.incr.write() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

574 

575 def write_region_index_pages(self, files_to_report: Iterable[FileToReport]) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

576 """Write the other index pages for this report.""" 

577 for ftr in files_to_report: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

578 region_nouns = [pair[0] for pair in ftr.fr.code_region_kinds()] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

579 num_lines = len(ftr.fr.source().splitlines()) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

580 regions = ftr.fr.code_regions() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

581 

582 for noun in region_nouns: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

583 page_data = self.index_pages[noun] 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

584 

585 outside_lines = set(range(1, num_lines + 1)) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

586 for region in regions: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

587 if region.kind != noun: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

588 continue 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

589 outside_lines -= region.lines 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

590 

591 narrower = AnalysisNarrower(ftr.analysis) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

592 narrower.add_regions(r.lines for r in regions if r.kind == noun) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

593 narrower.add_regions([outside_lines]) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

594 

595 for region in regions: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

596 if region.kind != noun: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

597 continue 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

598 analysis = narrower.narrow(region.lines) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

599 if not self.should_report(analysis, page_data): 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

600 continue 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

601 sorting_name = region.name.rpartition(".")[-1].lstrip("_") 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

602 page_data.summaries.append( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

603 IndexItem( 

604 url=f"{ftr.html_filename}#t{region.start}", 

605 file=escape(ftr.fr.relative_filename()), 

606 description=( 

607 f"<data value='{escape(sorting_name)}'>" 

608 + escape(region.name) 

609 + "</data>" 

610 ), 

611 nums=analysis.numbers, 

612 ) 

613 ) 

614 

615 analysis = narrower.narrow(outside_lines) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

616 if self.should_report(analysis, page_data): 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

617 page_data.summaries.append( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

618 IndexItem( 

619 url=ftr.html_filename, 

620 file=escape(ftr.fr.relative_filename()), 

621 description=( 

622 "<data value=''>" 

623 + f"<span class='no-noun'>(no {escape(noun)})</span>" 

624 + "</data>" 

625 ), 

626 nums=analysis.numbers, 

627 ) 

628 ) 

629 

630 for noun, index_page in self.index_pages.items(): 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

631 if noun != "file": 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

632 self.write_index_page(index_page) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

633 

634 def write_index_page(self, index_page: IndexPage, **kwargs: str) -> str: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

635 """Write an index page specified by `index_page`. 

636 

637 Returns the filename created. 

638 """ 

639 skipped_covered_msg = skipped_empty_msg = "" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

640 if n := index_page.skipped_covered_count: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

641 word = plural(n, index_page.noun, index_page.plural) 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

642 skipped_covered_msg = f"{n} {word} skipped due to complete coverage." 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

643 if n := index_page.skipped_empty_count: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

644 word = plural(n, index_page.noun, index_page.plural) 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

645 skipped_empty_msg = f"{n} empty {word} skipped." 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

646 

647 index_buttons = [ 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

648 { 

649 "label": ip.plural.title(), 

650 "url": ip.filename if ip.noun != index_page.noun else "", 

651 "current": ip.noun == index_page.noun, 

652 } 

653 for ip in self.index_pages.values() 

654 ] 

655 render_data = { 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

656 "regions": index_page.summaries, 

657 "totals": index_page.totals, 

658 "noun": index_page.noun, 

659 "region_noun": index_page.noun if index_page.noun != "file" else "", 

660 "skip_covered": self.skip_covered, 

661 "skipped_covered_msg": skipped_covered_msg, 

662 "skipped_empty_msg": skipped_empty_msg, 

663 "first_html": "", 

664 "final_html": "", 

665 "index_buttons": index_buttons, 

666 } 

667 render_data.update(kwargs) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

668 html = self.index_tmpl.render(render_data) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

669 

670 index_file = os.path.join(self.directory, index_page.filename) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

671 write_html(index_file, html) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

672 return index_file 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

673 

674 

675@dataclass 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

676class FileInfo: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

677 """Summary of the information from last rendering, to avoid duplicate work.""" 

678 

679 hash: str = "" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

680 index: IndexItem = field(default_factory=IndexItem) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

681 

682 

683class IncrementalChecker: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

684 """Logic and data to support incremental reporting. 

685 

686 When generating an HTML report, often only a few of the source files have 

687 changed since the last time we made the HTML report. This means previously 

688 created HTML pages can be reused without generating them again, speeding 

689 the command. 

690 

691 This class manages a JSON data file that captures enough information to 

692 know whether an HTML page for a .py file needs to be regenerated or not. 

693 The data file also needs to store all the information needed to create the 

694 entry for the file on the index page so that if the HTML page is reused, 

695 the index page can still be created to refer to it. 

696 

697 The data looks like:: 

698 

699 { 

700 "note": "This file is an internal implementation detail ...", 

701 // A fixed number indicating the data format. STATUS_FORMAT 

702 "format": 5, 

703 // The version of coverage.py 

704 "version": "7.4.4", 

705 // A hash of a number of global things, including the configuration 

706 // settings and the pyfile.html template itself. 

707 "globals": "540ee119c15d52a68a53fe6f0897346d", 

708 "files": { 

709 // An entry for each source file keyed by the flat_rootname(). 

710 "z_7b071bdc2a35fa80___init___py": { 

711 // Hash of the source, the text of the .py file. 

712 "hash": "e45581a5b48f879f301c0f30bf77a50c", 

713 // Information for the index.html file. 

714 "index": { 

715 "url": "z_7b071bdc2a35fa80___init___py.html", 

716 "file": "cogapp/__init__.py", 

717 "description": "", 

718 // The Numbers for this file. 

719 "nums": { "precision": 2, "n_files": 1, "n_statements": 43, ... } 

720 } 

721 }, 

722 ... 

723 } 

724 } 

725 

726 """ 

727 

728 STATUS_FILE = "status.json" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

729 STATUS_FORMAT = 5 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

730 NOTE = ( 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

731 "This file is an internal implementation detail to speed up HTML report" 

732 + " generation. Its format can change at any time. You might be looking" 

733 + " for the JSON report: https://coverage.rtfd.io/cmd.html#cmd-json" 

734 ) 

735 

736 def __init__(self, directory: str) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

737 self.directory = directory 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

738 self._reset() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

739 

740 def _reset(self) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

741 """Initialize to empty. Causes all files to be reported.""" 

742 self.globals = "" 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

743 self.files: dict[str, FileInfo] = {} 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

744 

745 def read(self) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

746 """Read the information we stored last time.""" 

747 try: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

748 status_file = os.path.join(self.directory, self.STATUS_FILE) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

749 with open(status_file, encoding="utf-8") as fstatus: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

750 status = json.load(fstatus) 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

751 except (OSError, ValueError): 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

752 # Status file is missing or malformed. 

753 usable = False 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

754 else: 

755 if status["format"] != self.STATUS_FORMAT: 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

756 usable = False 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

757 elif status["version"] != coverage.__version__: 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

758 usable = False 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

759 else: 

760 usable = True 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

761 

762 if usable: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

763 self.files = {} 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

764 for filename, filedict in status["files"].items(): 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

765 indexdict = filedict["index"] 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

766 index_item = IndexItem(**indexdict) 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

767 index_item.nums = Numbers(**indexdict["nums"]) 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

768 fileinfo = FileInfo( 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

769 hash=filedict["hash"], 

770 index=index_item, 

771 ) 

772 self.files[filename] = fileinfo 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

773 self.globals = status["globals"] 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

774 else: 

775 self._reset() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

776 

777 def write(self) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

778 """Write the current status.""" 

779 status_file = os.path.join(self.directory, self.STATUS_FILE) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

780 status_data = { 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

781 "note": self.NOTE, 

782 "format": self.STATUS_FORMAT, 

783 "version": coverage.__version__, 

784 "globals": self.globals, 

785 "files": {fname: dataclasses.asdict(finfo) for fname, finfo in self.files.items()}, 

786 } 

787 with open(status_file, "w", encoding="utf-8") as fout: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

788 json.dump(status_data, fout, separators=(",", ":")) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

789 

790 def check_global_data(self, *data: Any) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

791 """Check the global data that can affect incremental reporting. 

792 

793 Pass in whatever global information could affect the content of the 

794 HTML pages. If the global data has changed since last time, this will 

795 clear the data so that all files are regenerated. 

796 

797 """ 

798 m = Hasher() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

799 for d in data: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

800 m.update(d) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

801 these_globals = m.hexdigest() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

802 if self.globals != these_globals: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

803 self._reset() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

804 self.globals = these_globals 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

805 

806 def can_skip_file(self, data: CoverageData, fr: FileReporter, rootname: str) -> bool: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

807 """Can we skip reporting this file? 

808 

809 `data` is a CoverageData object, `fr` is a `FileReporter`, and 

810 `rootname` is the name being used for the file. 

811 

812 Returns True if the HTML page is fine as-is, False if we need to recreate 

813 the HTML page. 

814 

815 """ 

816 m = Hasher() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

817 m.update(fr.source().encode("utf-8")) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

818 add_data_to_hash(data, fr.filename, m) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

819 this_hash = m.hexdigest() 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

820 

821 file_info = self.files.setdefault(rootname, FileInfo()) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

822 

823 if this_hash == file_info.hash: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

824 # Nothing has changed to require the file to be reported again. 

825 return True 1abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ01234

826 else: 

827 # File has changed, record the latest hash and force regeneration. 

828 file_info.hash = this_hash 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

829 return False 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

830 

831 def index_info(self, fname: str) -> IndexItem: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

832 """Get the information for index.html for `fname`.""" 

833 return self.files.get(fname, FileInfo()).index 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

834 

835 def set_index_info(self, fname: str, info: IndexItem) -> None: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

836 """Set the information for index.html for `fname`.""" 

837 self.files.setdefault(fname, FileInfo()).index = info 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

838 

839 

840# Helpers for templates and generating HTML 

841 

842 

843def escape(t: str) -> str: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

844 """HTML-escape the text in `t`. 

845 

846 This is only suitable for HTML text, not attributes. 

847 

848 """ 

849 # Convert HTML special chars into HTML entities. 

850 return t.replace("&", "&amp;").replace("<", "&lt;") 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

851 

852 

853def pair(ratio: tuple[int, int]) -> str: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

854 """Format a pair of numbers so JavaScript can read them in an attribute.""" 

855 return "{} {}".format(*ratio) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

856 

857 

858def pretty_file(filename: str) -> str: 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234

859 """Return a prettier version of `filename` for display.""" 

860 return re.sub(r"[/\\]", "\N{THIN SPACE}\\g<0>\N{THIN SPACE}", filename) 1abcdefghijklmnopqrstuvwxyzABCDEF5GH6IJ7KL8MN9OP!QR#ST$UV%WX'YZ(01)234