@@ -256,7 +256,7 @@ fn matches_check(check: &WeightedCheck, readme_lower: &str, headings: &[String])
256256 let normalized_alias = normalize_phrase ( alias) ;
257257 headings
258258 . iter ( )
259- . any ( |heading| heading == & normalized_alias || heading . contains ( & normalized_alias) )
259+ . any ( |heading| contains_token_sequence ( heading, & normalized_alias) )
260260 } ) ;
261261
262262 if heading_match {
@@ -270,27 +270,45 @@ fn matches_check(check: &WeightedCheck, readme_lower: &str, headings: &[String])
270270}
271271
272272fn extract_normalized_headings ( readme_lower : & str ) -> Vec < String > {
273- readme_lower
274- . lines ( )
275- . filter_map ( |line| {
276- let trimmed = line. trim_start ( ) ;
277- if !trimmed. starts_with ( '#' ) {
278- return None ;
279- }
280-
281- let heading = trimmed. trim_start_matches ( '#' ) . trim ( ) ;
282- if heading. is_empty ( ) {
283- return None ;
284- }
285-
286- let normalized = normalize_phrase ( heading) ;
287- if normalized. is_empty ( ) {
288- None
289- } else {
290- Some ( normalized)
291- }
292- } )
293- . collect ( )
273+ let mut headings = Vec :: new ( ) ;
274+ let mut in_fenced_block = false ;
275+
276+ for line in readme_lower. lines ( ) {
277+ let trimmed = line. trim_start ( ) ;
278+ if trimmed. starts_with ( "```" ) || trimmed. starts_with ( "~~~" ) {
279+ in_fenced_block = !in_fenced_block;
280+ continue ;
281+ }
282+
283+ if in_fenced_block || !trimmed. starts_with ( '#' ) {
284+ continue ;
285+ }
286+
287+ let heading = trimmed. trim_start_matches ( '#' ) . trim ( ) ;
288+ if heading. is_empty ( ) {
289+ continue ;
290+ }
291+
292+ let normalized = normalize_phrase ( heading) ;
293+ if !normalized. is_empty ( ) {
294+ headings. push ( normalized) ;
295+ }
296+ }
297+
298+ headings
299+ }
300+
301+ fn contains_token_sequence ( heading : & str , alias : & str ) -> bool {
302+ let heading_tokens: Vec < & str > = heading. split_whitespace ( ) . collect ( ) ;
303+ let alias_tokens: Vec < & str > = alias. split_whitespace ( ) . collect ( ) ;
304+
305+ if alias_tokens. is_empty ( ) || heading_tokens. len ( ) < alias_tokens. len ( ) {
306+ return false ;
307+ }
308+
309+ heading_tokens
310+ . windows ( alias_tokens. len ( ) )
311+ . any ( |window| window == alias_tokens)
294312}
295313
296314fn normalize_phrase ( value : & str ) -> String {
@@ -410,4 +428,33 @@ quickstart-for-agents.vercel.app/api/header.svg
410428 assert_eq ! ( regular. score, strict. score) ;
411429 assert_eq ! ( regular. missing_required, strict. missing_required) ;
412430 }
431+
432+ #[ test]
433+ fn does_not_match_alias_inside_other_words ( ) {
434+ let readme = "
435+ ## Features
436+ ## Quick Start
437+ ## Architecture
438+ ## License
439+ ## Contests
440+ " ;
441+
442+ let audit = audit_repo ( & example_repo ( ) , Some ( readme) , 70 , false ) ;
443+ assert ! ( audit. missing_recommended. contains( & "Run Tests" ) ) ;
444+ }
445+
446+ #[ test]
447+ fn ignores_headings_inside_fenced_code_blocks ( ) {
448+ let readme = "
449+ ```markdown
450+ ## Features
451+ ```
452+ ## Quick Start
453+ ## Architecture
454+ ## License
455+ " ;
456+
457+ let audit = audit_repo ( & example_repo ( ) , Some ( readme) , 70 , false ) ;
458+ assert ! ( audit. missing_required. contains( & "Features" ) ) ;
459+ }
413460}
0 commit comments