Coverage for tests / test_documentation.py: 78%

93 statements  

« prev     ^ index     » next       coverage.py v7.13.0, created at 2026-04-03 18:53 +0000

1"""Documentation testing suite for Lintro. 

2 

3This module tests various aspects of the project documentation to ensure 

4consistency, accuracy, and completeness. 

5""" 

6 

7import re 

8import subprocess 

9from pathlib import Path 

10 

11import pytest 

12from assertpy import assert_that 

13 

14 

15def test_scripts_have_help() -> None: 

16 """Test that all executable scripts support --help flag.""" 

17 script_dir = Path("scripts") 

18 failed_scripts = [] 

19 

20 for script_file in script_dir.rglob("*.sh"): 

21 # Skip utility files that are sourced by other scripts 

22 if script_file.name in ["utils.sh", "install.sh"]: 

23 continue 

24 

25 try: 

26 result = subprocess.run( 

27 [str(script_file), "--help"], 

28 capture_output=True, 

29 text=True, 

30 timeout=10, 

31 ) 

32 if result.returncode != 0: 

33 failed_scripts.append( 

34 f"{script_file}: exit code {result.returncode}", 

35 ) 

36 except subprocess.TimeoutExpired: 

37 failed_scripts.append(f"{script_file}: timeout") 

38 except Exception as e: 

39 failed_scripts.append(f"{script_file}: {e}") 

40 

41 if failed_scripts: 

42 pytest.fail("Scripts without --help support:\n" + "\n".join(failed_scripts)) 

43 

44 

45def test_scripts_readme_coverage() -> None: 

46 """Test that all scripts are documented in scripts/README.md.""" 

47 scripts_readme = Path("scripts/README.md") 

48 if not scripts_readme.exists(): 

49 pytest.skip("scripts/README.md not found") 

50 

51 with open(scripts_readme, encoding="utf-8") as f: 

52 content = f.read() 

53 

54 # Get all script files 

55 script_files = set() 

56 for script_file in Path("scripts").rglob("*.sh"): 

57 script_files.add(script_file.name) 

58 for script_file in Path("scripts").rglob("*.py"): 

59 if script_file.name != "__init__.py": # Exclude __init__.py files 

60 script_files.add(script_file.name) 

61 

62 # Find documented scripts 

63 documented_scripts = set() 

64 for script_name in script_files: 

65 if script_name in content: 

66 documented_scripts.add(script_name) 

67 

68 missing_docs = script_files - documented_scripts 

69 if missing_docs: 

70 pytest.fail( 

71 "Scripts not documented in scripts/README.md:\n" + "\n".join(missing_docs), 

72 ) 

73 

74 

75def test_cli_help_works() -> None: 

76 """Test that lintro --help works and shows expected commands.""" 

77 try: 

78 result = subprocess.run( 

79 ["uv", "run", "python", "-m", "lintro", "--help"], 

80 capture_output=True, 

81 text=True, 

82 timeout=10, 

83 ) 

84 assert_that(result.returncode).is_equal_to(0) 

85 assert_that(result.stdout).contains("check") 

86 assert_that(result.stdout).contains("format") 

87 assert_that(result.stdout).contains("list-tools") 

88 except subprocess.TimeoutExpired: 

89 pytest.fail("lintro --help timed out") 

90 

91 

92def test_internal_doc_links() -> None: 

93 """Test that internal documentation links are valid.""" 

94 doc_files = [ 

95 "README.md", 

96 "docs/getting-started.md", 

97 "docs/contributing.md", 

98 "docs/docker.md", 

99 "docs/github-integration.md", 

100 "scripts/README.md", 

101 ] 

102 

103 broken_links = [] 

104 for doc_file in doc_files: 

105 if not Path(doc_file).exists(): 

106 continue 

107 

108 with open(doc_file, encoding="utf-8") as f: 

109 content = f.read() 

110 

111 # Find markdown links 

112 links = re.findall(r"\[([^\]]+)\]\(([^)]+)\)", content) 

113 for link_text, link_url in links: 

114 if link_url.startswith("docs/") or link_url.startswith("./docs/"): 

115 # Internal documentation link 

116 link_path = link_url 

117 if link_path.startswith("./"): 

118 link_path = link_path[2:] 

119 

120 if not Path(link_path).exists(): 

121 broken_links.append(f"{doc_file}: {link_text} -> {link_url}") 

122 

123 if broken_links: 

124 pytest.fail("Broken internal links:\n" + "\n".join(broken_links)) 

125 

126 

127def test_all_docs_have_titles() -> None: 

128 """Test that all documentation files have proper titles.""" 

129 doc_files = [ 

130 "README.md", 

131 "docs/getting-started.md", 

132 "docs/contributing.md", 

133 "docs/docker.md", 

134 "docs/github-integration.md", 

135 "docs/configuration.md", 

136 "scripts/README.md", 

137 ] 

138 

139 files_without_titles = [] 

140 for doc_file in doc_files: 

141 if not Path(doc_file).exists(): 

142 continue 

143 

144 with open(doc_file, encoding="utf-8") as f: 

145 first_line = f.readline().strip() 

146 

147 if not first_line.startswith("# "): 

148 files_without_titles.append(doc_file) 

149 

150 if files_without_titles: 

151 pytest.fail("Docs without titles:\n" + "\n".join(files_without_titles)) 

152 

153 

154def test_command_consistency() -> None: 

155 """Test that CLI commands are consistently documented.""" 

156 doc_files = [ 

157 "README.md", 

158 "docs/getting-started.md", 

159 "docs/configuration.md", 

160 ] 

161 

162 inconsistent_commands = [] 

163 for doc_file in doc_files: 

164 if not Path(doc_file).exists(): 

165 continue 

166 

167 with open(doc_file, encoding="utf-8") as f: 

168 content = f.read() 

169 

170 # Check for old command aliases that shouldn't be in docs 

171 old_aliases = ["lintro fmt", "lintro chk", "lintro ls"] 

172 for alias in old_aliases: 

173 if alias in content: 

174 inconsistent_commands.append( 

175 f"{doc_file}: uses old alias '{alias}'", 

176 ) 

177 

178 if inconsistent_commands: 

179 pytest.fail( 

180 "Inconsistent command usage:\n" + "\n".join(inconsistent_commands), 

181 )