Ver código fonte

feat(config): define multiple global allowlists (#1777)

Richard Gomez 9 meses atrás
pai
commit
4451b455f3
60 arquivos alterados com 1134 adições e 763 exclusões
  1. 1 1
      .github/workflows/test.yml
  2. 15 6
      README.md
  3. 1 1
      cmd/detect.go
  4. 1 1
      cmd/directory.go
  5. 91 89
      cmd/generate/config/base/config.go
  6. 32 16
      cmd/generate/config/base/config_test.go
  7. 13 11
      cmd/generate/config/rules/config.tmpl
  8. 33 34
      config/allowlist.go
  9. 1 1
      config/allowlist_test.go
  10. 153 116
      config/config.go
  11. 368 166
      config/config_test.go
  12. 17 15
      config/gitleaks.toml
  13. 167 139
      detect/detect.go
  14. 150 119
      detect/detect_test.go
  15. 5 3
      detect/git.go
  16. 12 5
      sources/directory.go
  17. 2 2
      testdata/config/generic.toml
  18. 8 9
      testdata/config/generic_with_py_path.toml
  19. 2 0
      testdata/config/invalid/allowlist_global_empty.toml
  20. 4 0
      testdata/config/invalid/allowlist_global_old_and_new.toml
  21. 3 0
      testdata/config/invalid/allowlist_global_regextarget.toml
  22. 7 0
      testdata/config/invalid/allowlist_global_target_rule_id.toml
  23. 0 2
      testdata/config/invalid/allowlist_rule_empty.toml
  24. 0 2
      testdata/config/invalid/allowlist_rule_old_and_new.toml
  25. 0 2
      testdata/config/invalid/allowlist_rule_regextarget.toml
  26. 5 3
      testdata/config/invalid/extend_invalid_base.toml
  27. 0 0
      testdata/config/invalid/rule_bad_entropy_group.toml
  28. 0 0
      testdata/config/invalid/rule_missing_id.toml
  29. 0 0
      testdata/config/invalid/rule_no_regex_or_path.toml
  30. 10 0
      testdata/config/valid/allowlist_global_multiple.toml
  31. 2 0
      testdata/config/valid/allowlist_global_old_compat.toml
  32. 2 0
      testdata/config/valid/allowlist_global_regex.toml
  33. 20 0
      testdata/config/valid/allowlist_global_target_rules.toml
  34. 0 0
      testdata/config/valid/allowlist_rule_commit.toml
  35. 0 2
      testdata/config/valid/allowlist_rule_old_compat.toml
  36. 0 0
      testdata/config/valid/allowlist_rule_path.toml
  37. 0 0
      testdata/config/valid/allowlist_rule_regex.toml
  38. 1 3
      testdata/config/valid/extend.toml
  39. 1 1
      testdata/config/valid/extend_base_1.toml
  40. 0 0
      testdata/config/valid/extend_base_2.toml
  41. 0 0
      testdata/config/valid/extend_base_3.toml
  42. 1 1
      testdata/config/valid/extend_base_rule_including_keywords_with_attribute.toml
  43. 1 1
      testdata/config/valid/extend_disabled.toml
  44. 0 0
      testdata/config/valid/extend_disabled_base.toml
  45. 1 1
      testdata/config/valid/extend_rule_allowlist_and.toml
  46. 0 0
      testdata/config/valid/extend_rule_allowlist_base.toml
  47. 1 1
      testdata/config/valid/extend_rule_allowlist_or.toml
  48. 0 0
      testdata/config/valid/extend_rule_keywords_base.toml
  49. 1 1
      testdata/config/valid/extend_rule_new.toml
  50. 1 1
      testdata/config/valid/extend_rule_no_regexpath.toml
  51. 0 0
      testdata/config/valid/extend_rule_override_description.toml
  52. 0 0
      testdata/config/valid/extend_rule_override_entropy.toml
  53. 0 0
      testdata/config/valid/extend_rule_override_keywords.toml
  54. 0 0
      testdata/config/valid/extend_rule_override_path.toml
  55. 0 0
      testdata/config/valid/extend_rule_override_regex.toml
  56. 0 0
      testdata/config/valid/extend_rule_override_secret_group.toml
  57. 0 0
      testdata/config/valid/extend_rule_override_tags.toml
  58. 0 2
      testdata/config/valid/rule_entropy_group.toml
  59. 0 2
      testdata/config/valid/rule_path_only.toml
  60. 1 4
      testdata/config/valid/rule_regex_escaped_character_group.toml

+ 1 - 1
.github/workflows/test.yml

@@ -23,7 +23,7 @@ jobs:
           go-version: 1.23
           go-version: 1.23
 
 
       - name: Build
       - name: Build
-        run: go build -v ./...
+        run: go build ./...
 
 
       - name: Set up gotestsum
       - name: Set up gotestsum
         run: |
         run: |

+ 15 - 6
README.md

@@ -290,11 +290,10 @@ disabledRules = [ "generic-api-key"]
 # An array of tables that contain information that define instructions
 # An array of tables that contain information that define instructions
 # on how to detect secrets
 # on how to detect secrets
 [[rules]]
 [[rules]]
-
 # Unique identifier for this rule
 # Unique identifier for this rule
 id = "awesome-rule-1"
 id = "awesome-rule-1"
 
 
-# Short human readable description of the rule.
+# Short human-readable description of the rule.
 description = "awesome rule 1"
 description = "awesome rule 1"
 
 
 # Golang regular expression used to detect secrets. Note Golang's regex engine
 # Golang regular expression used to detect secrets. Note Golang's regex engine
@@ -368,19 +367,20 @@ id = "gitlab-pat"
     regexTarget = "line"
     regexTarget = "line"
     regexes = [ '''MY-glpat-''' ]
     regexes = [ '''MY-glpat-''' ]
 
 
-# This is a global allowlist which has a higher order of precedence than rule-specific allowlists.
+
+# ⚠️ In v8.25.0 `[allowlist]` was replaced with `[[allowlists]]`.
+# 
+# Global allowlists have a higher order of precedence than rule-specific allowlists.
 # If a commit listed in the `commits` field below is encountered then that commit will be skipped and no
 # If a commit listed in the `commits` field below is encountered then that commit will be skipped and no
 # secrets will be detected for said commit. The same logic applies for regexes and paths.
 # secrets will be detected for said commit. The same logic applies for regexes and paths.
-[allowlist]
+[[allowlists]]
 description = "global allow list"
 description = "global allow list"
 commits = [ "commit-A", "commit-B", "commit-C"]
 commits = [ "commit-A", "commit-B", "commit-C"]
 paths = [
 paths = [
   '''gitleaks\.toml''',
   '''gitleaks\.toml''',
   '''(.*?)(jpg|gif|doc)'''
   '''(.*?)(jpg|gif|doc)'''
 ]
 ]
-
 # note: (global) regexTarget defaults to check the _Secret_ in the finding.
 # note: (global) regexTarget defaults to check the _Secret_ in the finding.
-# if regexTarget is not specified then _Secret_ will be used.
 # Acceptable values for regexTarget are "match" and "line"
 # Acceptable values for regexTarget are "match" and "line"
 regexTarget = "match"
 regexTarget = "match"
 regexes = [
 regexes = [
@@ -394,6 +394,15 @@ stopwords = [
   '''client''',
   '''client''',
   '''endpoint''',
   '''endpoint''',
 ]
 ]
+
+# ⚠️ In v8.25.0, `[[allowlists]]` have a new field called |targetRules|.
+#
+# Common allowlists can be defined once and assigned to multiple rules using |targetRules|.
+# This will only run on the specified rules, not globally.
+[[allowlists]]
+targetRules = ["awesome-rule-1", "awesome-rule-2"]
+description = "Our test assets trigger false-positives in a couple rules."
+paths = ['''tests/expected/._\.json$''']
 ```
 ```
 
 
 Refer to the default [gitleaks config](https://github.com/gitleaks/gitleaks/blob/master/config/gitleaks.toml) for examples or follow the [contributing guidelines](https://github.com/gitleaks/gitleaks/blob/master/CONTRIBUTING.md) if you would like to contribute to the default configuration. Additionally, you can check out [this gitleaks blog post](https://blog.gitleaks.io/stop-leaking-secrets-configuration-2-3-aeed293b1fbf) which covers advanced configuration setups.
 Refer to the default [gitleaks config](https://github.com/gitleaks/gitleaks/blob/master/config/gitleaks.toml) for examples or follow the [contributing guidelines](https://github.com/gitleaks/gitleaks/blob/master/CONTRIBUTING.md) if you would like to contribute to the default configuration. Additionally, you can check out [this gitleaks blog post](https://blog.gitleaks.io/stop-leaking-secrets-configuration-2-3-aeed293b1fbf) which covers advanced configuration setups.

+ 1 - 1
cmd/detect.go

@@ -78,7 +78,7 @@ func runDetect(cmd *cobra.Command, args []string) {
 			source,
 			source,
 			detector.Sema,
 			detector.Sema,
 			detector.FollowSymlinks,
 			detector.FollowSymlinks,
-			detector.Config.Allowlist.PathAllowed,
+			detector.Config.Allowlists,
 		)
 		)
 		if err != nil {
 		if err != nil {
 			logging.Fatal().Err(err).Send()
 			logging.Fatal().Err(err).Send()

+ 1 - 1
cmd/directory.go

@@ -60,7 +60,7 @@ func runDirectory(cmd *cobra.Command, args []string) {
 		source,
 		source,
 		detector.Sema,
 		detector.Sema,
 		detector.FollowSymlinks,
 		detector.FollowSymlinks,
-		detector.Config.Allowlist.PathAllowed,
+		detector.Config.Allowlists,
 	)
 	)
 	if err != nil {
 	if err != nil {
 		logging.Fatal().Err(err)
 		logging.Fatal().Err(err)

+ 91 - 89
cmd/generate/config/base/config.go

@@ -11,106 +11,108 @@ import (
 func CreateGlobalConfig() config.Config {
 func CreateGlobalConfig() config.Config {
 	return config.Config{
 	return config.Config{
 		Title: "gitleaks config",
 		Title: "gitleaks config",
-		Allowlist: &config.Allowlist{
-			Description: "global allow lists",
-			Regexes: []*regexp.Regexp{
-				// ----------- General placeholders -----------
-				regexp.MustCompile(`(?i)^true|false|null$`),
-				// Awkward workaround to detect repeated characters.
-				func() *regexp.Regexp {
-					var (
-						letters  = "abcdefghijklmnopqrstuvwxyz*."
-						patterns []string
-					)
-					for _, char := range letters {
-						if char == '*' || char == '.' {
-							patterns = append(patterns, fmt.Sprintf("\\%c+", char))
-						} else {
-							patterns = append(patterns, fmt.Sprintf("%c+", char))
+		Allowlists: []*config.Allowlist{
+			{
+				Description: "global allow lists",
+				Regexes: []*regexp.Regexp{
+					// ----------- General placeholders -----------
+					regexp.MustCompile(`(?i)^true|false|null$`),
+					// Awkward workaround to detect repeated characters.
+					func() *regexp.Regexp {
+						var (
+							letters  = "abcdefghijklmnopqrstuvwxyz*."
+							patterns []string
+						)
+						for _, char := range letters {
+							if char == '*' || char == '.' {
+								patterns = append(patterns, fmt.Sprintf("\\%c+", char))
+							} else {
+								patterns = append(patterns, fmt.Sprintf("%c+", char))
+							}
 						}
 						}
-					}
-					return regexp.MustCompile("^(?i:" + strings.Join(patterns, "|") + ")$")
-				}(),
+						return regexp.MustCompile("^(?i:" + strings.Join(patterns, "|") + ")$")
+					}(),
 
 
-				// ----------- Environment Variables -----------
-				regexp.MustCompile(`^\$(?:\d+|{\d+})$`),
-				regexp.MustCompile(`^\$(?:[A-Z_]+|[a-z_]+)$`),
-				regexp.MustCompile(`^\${(?:[A-Z_]+|[a-z_]+)}$`),
+					// ----------- Environment Variables -----------
+					regexp.MustCompile(`^\$(?:\d+|{\d+})$`),
+					regexp.MustCompile(`^\$(?:[A-Z_]+|[a-z_]+)$`),
+					regexp.MustCompile(`^\${(?:[A-Z_]+|[a-z_]+)}$`),
 
 
-				// ----------- Interpolated Variables -----------
-				// Ansible (https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html)
-				regexp.MustCompile(`^\{\{[ \t]*[\w ().|]+[ \t]*}}$`),
-				// GitHub Actions
-				// https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables
-				// https://docs.github.com/en/actions/security-for-github-actions/security-guides/using-secrets-in-github-actions
-				regexp.MustCompile(`^\$\{\{[ \t]*(?:(?:env|github|secrets|vars)(?:\.[A-Za-z]\w+)+[\w "'&./=|]*)[ \t]*}}$`),
-				// NuGet (https://learn.microsoft.com/en-us/nuget/reference/nuget-config-file#using-environment-variables)
-				regexp.MustCompile(`^%(?:[A-Z_]+|[a-z_]+)%$`),
-				// String formatting.
-				regexp.MustCompile(`^%[+\-# 0]?[bcdeEfFgGoOpqstTUvxX]$`), // Golang (https://pkg.go.dev/fmt)
-				regexp.MustCompile(`^\{\d{0,2}}$`),                       // Python (https://docs.python.org/3/tutorial/inputoutput.html)
-				// Urban Code Deploy (https://www.ibm.com/support/pages/replace-token-step-replaces-replacement-values-windows-variables)
-				regexp.MustCompile(`^@(?:[A-Z_]+|[a-z_]+)@$`),
+					// ----------- Interpolated Variables -----------
+					// Ansible (https://docs.ansible.com/ansible/latest/playbook_guide/playbooks_variables.html)
+					regexp.MustCompile(`^\{\{[ \t]*[\w ().|]+[ \t]*}}$`),
+					// GitHub Actions
+					// https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables
+					// https://docs.github.com/en/actions/security-for-github-actions/security-guides/using-secrets-in-github-actions
+					regexp.MustCompile(`^\$\{\{[ \t]*(?:(?:env|github|secrets|vars)(?:\.[A-Za-z]\w+)+[\w "'&./=|]*)[ \t]*}}$`),
+					// NuGet (https://learn.microsoft.com/en-us/nuget/reference/nuget-config-file#using-environment-variables)
+					regexp.MustCompile(`^%(?:[A-Z_]+|[a-z_]+)%$`),
+					// String formatting.
+					regexp.MustCompile(`^%[+\-# 0]?[bcdeEfFgGoOpqstTUvxX]$`), // Golang (https://pkg.go.dev/fmt)
+					regexp.MustCompile(`^\{\d{0,2}}$`),                       // Python (https://docs.python.org/3/tutorial/inputoutput.html)
+					// Urban Code Deploy (https://www.ibm.com/support/pages/replace-token-step-replaces-replacement-values-windows-variables)
+					regexp.MustCompile(`^@(?:[A-Z_]+|[a-z_]+)@$`),
 
 
-				// ----------- Miscellaneous -----------
-				// File paths
-				regexp.MustCompile(`^/Users/(?i)[a-z0-9]+/[\w .-/]+$`),              // MacOS
-				regexp.MustCompile(`^/(?:bin|etc|home|opt|tmp|usr|var)/[\w ./-]+$`), // Linux
-				// 11980 Jps -Dapplication.home=D:\develop_tools\jdk\jdk1.8.0_131 -Xms8m
-				//regexp.MustCompile(`^$`), // Windows
-			},
-			Paths: []*regexp.Regexp{
-				regexp.MustCompile(`gitleaks\.toml`),
+					// ----------- Miscellaneous -----------
+					// File paths
+					regexp.MustCompile(`^/Users/(?i)[a-z0-9]+/[\w .-/]+$`),              // MacOS
+					regexp.MustCompile(`^/(?:bin|etc|home|opt|tmp|usr|var)/[\w ./-]+$`), // Linux
+					// 11980 Jps -Dapplication.home=D:\develop_tools\jdk\jdk1.8.0_131 -Xms8m
+					//regexp.MustCompile(`^$`), // Windows
+				},
+				Paths: []*regexp.Regexp{
+					regexp.MustCompile(`gitleaks\.toml`),
 
 
-				// ----------- Documents and media -----------
-				regexp.MustCompile(`(?i)\.(?:bmp|gif|jpe?g|png|svg|tiff?)$`), // Images
-				regexp.MustCompile(`(?i)\.(?:eot|[ot]tf|woff2?)$`),           // Fonts
-				regexp.MustCompile(`(?i)\.(?:docx?|xlsx?|pdf|bin|socket|vsidx|v2|suo|wsuo|.dll|pdb|exe|gltf|zip)$`),
+					// ----------- Documents and media -----------
+					regexp.MustCompile(`(?i)\.(?:bmp|gif|jpe?g|png|svg|tiff?)$`), // Images
+					regexp.MustCompile(`(?i)\.(?:eot|[ot]tf|woff2?)$`),           // Fonts
+					regexp.MustCompile(`(?i)\.(?:docx?|xlsx?|pdf|bin|socket|vsidx|v2|suo|wsuo|.dll|pdb|exe|gltf|zip)$`),
 
 
-				// ----------- Golang files -----------
-				regexp.MustCompile(`go\.(?:mod|sum|work(?:\.sum)?)$`),
-				regexp.MustCompile(`(?:^|/)vendor/modules\.txt$`),
-				regexp.MustCompile(`(?:^|/)vendor/(?:github\.com|golang\.org/x|google\.golang\.org|gopkg\.in|istio\.io|k8s\.io|sigs\.k8s\.io)(?:/.*)?$`),
+					// ----------- Golang files -----------
+					regexp.MustCompile(`go\.(?:mod|sum|work(?:\.sum)?)$`),
+					regexp.MustCompile(`(?:^|/)vendor/modules\.txt$`),
+					regexp.MustCompile(`(?:^|/)vendor/(?:github\.com|golang\.org/x|google\.golang\.org|gopkg\.in|istio\.io|k8s\.io|sigs\.k8s\.io)(?:/.*)?$`),
 
 
-				// ----------- Java files -----------
-				// Gradle
-				regexp.MustCompile(`(?:^|/)gradlew(?:\.bat)?$`),
-				regexp.MustCompile(`(?:^|/)gradle\.lockfile$`),
-				regexp.MustCompile(`(?:^|/)mvnw(?:\.cmd)?$`),
-				regexp.MustCompile(`(?:^|/)\.mvn/wrapper/MavenWrapperDownloader\.java$`),
+					// ----------- Java files -----------
+					// Gradle
+					regexp.MustCompile(`(?:^|/)gradlew(?:\.bat)?$`),
+					regexp.MustCompile(`(?:^|/)gradle\.lockfile$`),
+					regexp.MustCompile(`(?:^|/)mvnw(?:\.cmd)?$`),
+					regexp.MustCompile(`(?:^|/)\.mvn/wrapper/MavenWrapperDownloader\.java$`),
 
 
-				// ----------- JavaScript files -----------
-				// Dependencies and lock files.
-				regexp.MustCompile(`(?:^|/)node_modules(?:/.*)?$`),
-				regexp.MustCompile(`(?:^|/)(?:deno\.lock|npm-shrinkwrap\.json|package-lock\.json|pnpm-lock\.yaml|yarn\.lock)$`),
-				regexp.MustCompile(`(?:^|/)bower_components(?:/.*)?$`),
-				// TODO: Add more common static assets, such as swagger-ui.
-				regexp.MustCompile(`(?:^|/)(?:angular|bootstrap|jquery(?:-?ui)?|plotly|swagger-?ui)[a-zA-Z0-9.-]*(?:\.min)?\.js(?:\.map)?$`),
-				regexp.MustCompile(`(?:^|/)javascript\.json$`),
+					// ----------- JavaScript files -----------
+					// Dependencies and lock files.
+					regexp.MustCompile(`(?:^|/)node_modules(?:/.*)?$`),
+					regexp.MustCompile(`(?:^|/)(?:deno\.lock|npm-shrinkwrap\.json|package-lock\.json|pnpm-lock\.yaml|yarn\.lock)$`),
+					regexp.MustCompile(`(?:^|/)bower_components(?:/.*)?$`),
+					// TODO: Add more common static assets, such as swagger-ui.
+					regexp.MustCompile(`(?:^|/)(?:angular|bootstrap|jquery(?:-?ui)?|plotly|swagger-?ui)[a-zA-Z0-9.-]*(?:\.min)?\.js(?:\.map)?$`),
+					regexp.MustCompile(`(?:^|/)javascript\.json$`),
 
 
-				// ----------- Python files -----------
-				// Dependencies and lock files.
-				regexp.MustCompile(`(?:^|/)(?:Pipfile|poetry)\.lock$`),
-				// Virtual environments
-				regexp.MustCompile(`(?i)(?:^|/)(?:v?env|virtualenv)/lib(?:64)?(?:/.*)?$`),
-				regexp.MustCompile(`(?i)(?:^|/)(?:lib(?:64)?/python[23](?:\.\d{1,2})+|python/[23](?:\.\d{1,2})+/lib(?:64)?)(?:/.*)?$`),
-				// dist-info directory (https://py-pkgs.org/04-package-structure.html#building-sdists-and-wheels)
-				regexp.MustCompile(`(?i)(?:^|/)[a-z0-9_.]+-[0-9.]+\.dist-info(?:/.+)?$`),
+					// ----------- Python files -----------
+					// Dependencies and lock files.
+					regexp.MustCompile(`(?:^|/)(?:Pipfile|poetry)\.lock$`),
+					// Virtual environments
+					regexp.MustCompile(`(?i)(?:^|/)(?:v?env|virtualenv)/lib(?:64)?(?:/.*)?$`),
+					regexp.MustCompile(`(?i)(?:^|/)(?:lib(?:64)?/python[23](?:\.\d{1,2})+|python/[23](?:\.\d{1,2})+/lib(?:64)?)(?:/.*)?$`),
+					// dist-info directory (https://py-pkgs.org/04-package-structure.html#building-sdists-and-wheels)
+					regexp.MustCompile(`(?i)(?:^|/)[a-z0-9_.]+-[0-9.]+\.dist-info(?:/.+)?$`),
 
 
-				// ----------- Ruby files -----------
-				regexp.MustCompile(`(?:^|/)vendor/(?:bundle|ruby)(?:/.*?)?$`),
-				regexp.MustCompile(`\.gem$`), // tar archive
+					// ----------- Ruby files -----------
+					regexp.MustCompile(`(?:^|/)vendor/(?:bundle|ruby)(?:/.*?)?$`),
+					regexp.MustCompile(`\.gem$`), // tar archive
 
 
-				// Misc
-				regexp.MustCompile(`verification-metadata\.xml`),
-				regexp.MustCompile(`Database.refactorlog`),
-				// regexp.MustCompile(`vendor`),
-			},
-			StopWords: []string{
-				"abcdefghijklmnopqrstuvwxyz", // character range
-				// ----------- Secrets -----------
-				// Checkmarx client secret. (https://github.com/checkmarx-ts/checkmarx-python-sdk/blob/86560f6e2a3e46d16322101294da10d5d190312d/README.md?plain=1#L56)
-				"014df517-39d1-4453-b7b3-9930c563627c",
+					// Misc
+					regexp.MustCompile(`verification-metadata\.xml`),
+					regexp.MustCompile(`Database.refactorlog`),
+					// regexp.MustCompile(`vendor`),
+				},
+				StopWords: []string{
+					"abcdefghijklmnopqrstuvwxyz", // character range
+					// ----------- Secrets -----------
+					// Checkmarx client secret. (https://github.com/checkmarx-ts/checkmarx-python-sdk/blob/86560f6e2a3e46d16322101294da10d5d190312d/README.md?plain=1#L56)
+					"014df517-39d1-4453-b7b3-9930c563627c",
+				},
 			},
 			},
 		},
 		},
 	}
 	}

+ 32 - 16
cmd/generate/config/base/config_test.go

@@ -75,18 +75,22 @@ var allowlistRegexTests = map[string]struct {
 
 
 func TestConfigAllowlistRegexes(t *testing.T) {
 func TestConfigAllowlistRegexes(t *testing.T) {
 	cfg := CreateGlobalConfig()
 	cfg := CreateGlobalConfig()
-	allowlist := cfg.Allowlist
+	allowlists := cfg.Allowlists
 	for name, cases := range allowlistRegexTests {
 	for name, cases := range allowlistRegexTests {
 		t.Run(name, func(t *testing.T) {
 		t.Run(name, func(t *testing.T) {
 			for _, c := range cases.invalid {
 			for _, c := range cases.invalid {
-				if !allowlist.RegexAllowed(c) {
-					t.Errorf("invalid value not marked as allowed: %s", c)
+				for _, a := range allowlists {
+					if !a.RegexAllowed(c) {
+						t.Errorf("invalid value not marked as allowed: %s", c)
+					}
 				}
 				}
 			}
 			}
 
 
 			for _, c := range cases.valid {
 			for _, c := range cases.valid {
-				if allowlist.RegexAllowed(c) {
-					t.Errorf("valid value marked as allowed: %s", c)
+				for _, a := range allowlists {
+					if a.RegexAllowed(c) {
+						t.Errorf("valid value marked as allowed: %s", c)
+					}
 				}
 				}
 			}
 			}
 		})
 		})
@@ -95,15 +99,19 @@ func TestConfigAllowlistRegexes(t *testing.T) {
 
 
 func BenchmarkConfigAllowlistRegexes(b *testing.B) {
 func BenchmarkConfigAllowlistRegexes(b *testing.B) {
 	cfg := CreateGlobalConfig()
 	cfg := CreateGlobalConfig()
-	allowlist := cfg.Allowlist
+	allowlists := cfg.Allowlists
 	for n := 0; n < b.N; n++ {
 	for n := 0; n < b.N; n++ {
 		for _, cases := range allowlistRegexTests {
 		for _, cases := range allowlistRegexTests {
 			for _, c := range cases.invalid {
 			for _, c := range cases.invalid {
-				allowlist.RegexAllowed(c)
+				for _, a := range allowlists {
+					a.RegexAllowed(c)
+				}
 			}
 			}
 
 
 			for _, c := range cases.valid {
 			for _, c := range cases.valid {
-				allowlist.RegexAllowed(c)
+				for _, a := range allowlists {
+					a.RegexAllowed(c)
+				}
 			}
 			}
 		}
 		}
 	}
 	}
@@ -152,18 +160,22 @@ var allowlistPathsTests = map[string]struct {
 
 
 func TestConfigAllowlistPaths(t *testing.T) {
 func TestConfigAllowlistPaths(t *testing.T) {
 	cfg := CreateGlobalConfig()
 	cfg := CreateGlobalConfig()
-	allowlist := cfg.Allowlist
+	allowlists := cfg.Allowlists
 	for name, cases := range allowlistPathsTests {
 	for name, cases := range allowlistPathsTests {
 		t.Run(name, func(t *testing.T) {
 		t.Run(name, func(t *testing.T) {
 			for _, c := range cases.invalid {
 			for _, c := range cases.invalid {
-				if !allowlist.PathAllowed(c) {
-					t.Errorf("invalid path not marked as allowed: %s", c)
+				for _, a := range allowlists {
+					if !a.PathAllowed(c) {
+						t.Errorf("invalid path not marked as allowed: %s", c)
+					}
 				}
 				}
 			}
 			}
 
 
 			for _, c := range cases.valid {
 			for _, c := range cases.valid {
-				if allowlist.PathAllowed(c) {
-					t.Errorf("valid path marked as allowed: %s", c)
+				for _, a := range allowlists {
+					if a.PathAllowed(c) {
+						t.Errorf("valid path marked as allowed: %s", c)
+					}
 				}
 				}
 			}
 			}
 		})
 		})
@@ -172,15 +184,19 @@ func TestConfigAllowlistPaths(t *testing.T) {
 
 
 func BenchmarkConfigAllowlistPaths(b *testing.B) {
 func BenchmarkConfigAllowlistPaths(b *testing.B) {
 	cfg := CreateGlobalConfig()
 	cfg := CreateGlobalConfig()
-	allowlist := cfg.Allowlist
+	allowlists := cfg.Allowlists
 	for n := 0; n < b.N; n++ {
 	for n := 0; n < b.N; n++ {
 		for _, cases := range allowlistPathsTests {
 		for _, cases := range allowlistPathsTests {
 			for _, c := range cases.invalid {
 			for _, c := range cases.invalid {
-				allowlist.PathAllowed(c)
+				for _, a := range allowlists {
+					a.PathAllowed(c)
+				}
 			}
 			}
 
 
 			for _, c := range cases.valid {
 			for _, c := range cases.valid {
-				allowlist.PathAllowed(c)
+				for _, a := range allowlists {
+					a.PathAllowed(c)
+				}
 			}
 			}
 		}
 		}
 	}
 	}

+ 13 - 11
cmd/generate/config/rules/config.tmpl

@@ -13,21 +13,22 @@
 
 
 title = "{{.Title}}"
 title = "{{.Title}}"
 
 
-[allowlist]
-description = "{{.Allowlist.Description}}"
-{{- with .Allowlist.RegexTarget }}
-regexTarget = "{{ . }}"{{ end -}}
-{{- with .Allowlist.Regexes }}
-regexes = [{{ range $i, $regex := . }}
-    '''{{ $regex }}''',{{ end }}
+{{ with .Allowlists }}{{ range $i, $allowlist := . }}{{ if or $allowlist.Regexes $allowlist.Paths $allowlist.Commits $allowlist.StopWords }}# TODO: change to [[allowlists]]{{println}}[allowlist]
+{{- with .Description }}{{println}}description = "{{ . }}"{{ end }}
+{{- with .MatchCondition }}{{println}}condition = "{{ .String }}"{{ end }}
+{{- with .Commits -}}{{println}}commits = [
+    {{ range $j, $commit := . }}"{{ $commit }}",{{ end }}
 ]{{ end }}
 ]{{ end }}
-{{- with .Allowlist.Paths }}
-paths = [{{ range $i, $path := . }}
+{{- with .Paths }}{{println}}paths = [{{ range $j, $path := . }}
     '''{{ $path }}''',{{ end }}
     '''{{ $path }}''',{{ end }}
 ]{{ end }}
 ]{{ end }}
-{{- with .Allowlist.StopWords }}{{println}}stopwords = [{{ range $j, $stopword := . }}
+{{- if and .RegexTarget .Regexes }}{{println}}regexTarget = "{{ .RegexTarget }}"{{ end -}}
+{{- with .Regexes }}{{println}}regexes = [{{ range $i, $regex := . }}
+    '''{{ $regex }}''',{{ end }}
+]{{ end }}
+{{- with .StopWords }}{{println}}stopwords = [{{ range $j, $stopword := . }}
     "{{ $stopword }}",{{ end }}
     "{{ $stopword }}",{{ end }}
-]{{ end }}{{println}}
+]{{ end }}{{ end }}{{ end }}{{ end }}{{println}}
 
 
 {{- range $i, $rule := .Rules }}{{println}}[[rules]]
 {{- range $i, $rule := .Rules }}{{println}}[[rules]]
 id = "{{$rule.RuleID}}"
 id = "{{$rule.RuleID}}"
@@ -51,6 +52,7 @@ tags = [
     {{ range $j, $tag := . }}"{{ $tag }}",{{ end }}
     {{ range $j, $tag := . }}"{{ $tag }}",{{ end }}
 ]{{ end }}
 ]{{ end }}
 {{- with $rule.Allowlists }}{{ range $i, $allowlist := . }}{{ if or $allowlist.Regexes $allowlist.Paths $allowlist.Commits $allowlist.StopWords }}{{println}}[[rules.allowlists]]
 {{- with $rule.Allowlists }}{{ range $i, $allowlist := . }}{{ if or $allowlist.Regexes $allowlist.Paths $allowlist.Commits $allowlist.StopWords }}{{println}}[[rules.allowlists]]
+{{- with .Description }}{{println}}description = "{{ . }}"{{ end }}
 {{- with .MatchCondition }}{{println}}condition = "{{ .String }}"{{ end }}
 {{- with .MatchCondition }}{{println}}condition = "{{ .String }}"{{ end }}
 {{- with .Commits -}}{{println}}commits = [
 {{- with .Commits -}}{{println}}commits = [
     {{ range $j, $commit := . }}"{{ $commit }}",{{ end }}
     {{ range $j, $commit := . }}"{{ $commit }}",{{ end }}

+ 33 - 34
config/allowlist.go

@@ -59,6 +59,39 @@ type Allowlist struct {
 	validated bool
 	validated bool
 }
 }
 
 
+func (a *Allowlist) Validate() error {
+	if a.validated {
+		return nil
+	}
+
+	// Disallow empty allowlists.
+	if len(a.Commits) == 0 &&
+		len(a.Paths) == 0 &&
+		len(a.Regexes) == 0 &&
+		len(a.StopWords) == 0 {
+		return fmt.Errorf("must contain at least one check for: commits, paths, regexes, or stopwords")
+	}
+
+	// Deduplicate commits and stopwords.
+	if len(a.Commits) > 0 {
+		uniqueCommits := make(map[string]struct{})
+		for _, commit := range a.Commits {
+			uniqueCommits[commit] = struct{}{}
+		}
+		a.Commits = maps.Keys(uniqueCommits)
+	}
+	if len(a.StopWords) > 0 {
+		uniqueStopwords := make(map[string]struct{})
+		for _, stopWord := range a.StopWords {
+			uniqueStopwords[stopWord] = struct{}{}
+		}
+		a.StopWords = maps.Keys(uniqueStopwords)
+	}
+
+	a.validated = true
+	return nil
+}
+
 // CommitAllowed returns true if the commit is allowed to be ignored.
 // CommitAllowed returns true if the commit is allowed to be ignored.
 func (a *Allowlist) CommitAllowed(c string) (bool, string) {
 func (a *Allowlist) CommitAllowed(c string) (bool, string) {
 	if a == nil || c == "" {
 	if a == nil || c == "" {
@@ -102,37 +135,3 @@ func (a *Allowlist) ContainsStopWord(s string) (bool, string) {
 	}
 	}
 	return false, ""
 	return false, ""
 }
 }
-
-func (a *Allowlist) Validate() error {
-	if a.validated {
-		return nil
-	}
-
-	// Disallow empty allowlists.
-	if len(a.Commits) == 0 &&
-		len(a.Paths) == 0 &&
-		len(a.Regexes) == 0 &&
-		len(a.StopWords) == 0 {
-		return fmt.Errorf("[[rules.allowlists]] must contain at least one check for: commits, paths, regexes, or stopwords")
-	}
-
-	// Deduplicate commits and stopwords.
-	if len(a.Commits) > 0 {
-		uniqueCommits := make(map[string]struct{})
-		for _, commit := range a.Commits {
-			uniqueCommits[commit] = struct{}{}
-		}
-		a.Commits = maps.Keys(uniqueCommits)
-	}
-
-	if len(a.StopWords) > 0 {
-		uniqueStopwords := make(map[string]struct{})
-		for _, stopWord := range a.StopWords {
-			uniqueStopwords[stopWord] = struct{}{}
-		}
-		a.StopWords = maps.Keys(uniqueStopwords)
-	}
-
-	a.validated = true
-	return nil
-}

+ 1 - 1
config/allowlist_test.go

@@ -105,7 +105,7 @@ func TestValidate(t *testing.T) {
 	}{
 	}{
 		"empty conditions": {
 		"empty conditions": {
 			input:   Allowlist{},
 			input:   Allowlist{},
-			wantErr: errors.New("[[rules.allowlists]] must contain at least one check for: commits, paths, regexes, or stopwords"),
+			wantErr: errors.New("must contain at least one check for: commits, paths, regexes, or stopwords"),
 		},
 		},
 		"deduplicated commits and stopwords": {
 		"deduplicated commits and stopwords": {
 			input: Allowlist{
 			input: Allowlist{

+ 153 - 116
config/config.go

@@ -2,6 +2,7 @@ package config
 
 
 import (
 import (
 	_ "embed"
 	_ "embed"
+	"errors"
 	"fmt"
 	"fmt"
 	"sort"
 	"sort"
 	"strings"
 	"strings"
@@ -12,12 +13,14 @@ import (
 	"github.com/zricethezav/gitleaks/v8/regexp"
 	"github.com/zricethezav/gitleaks/v8/regexp"
 )
 )
 
 
-//go:embed gitleaks.toml
-var DefaultConfig string
+var (
+	//go:embed gitleaks.toml
+	DefaultConfig string
 
 
-// use to keep track of how many configs we can extend
-// yea I know, globals bad
-var extendDepth int
+	// use to keep track of how many configs we can extend
+	// yea I know, globals bad
+	extendDepth int
+)
 
 
 const maxExtendDepth = 2
 const maxExtendDepth = 2
 
 
@@ -25,29 +28,28 @@ const maxExtendDepth = 2
 // to parse the config file. This struct does not include regular expressions.
 // to parse the config file. This struct does not include regular expressions.
 // It is used as an intermediary to convert the Viper config to the Config struct.
 // It is used as an intermediary to convert the Viper config to the Config struct.
 type ViperConfig struct {
 type ViperConfig struct {
+	Title       string
 	Description string
 	Description string
 	Extend      Extend
 	Extend      Extend
 	Rules       []struct {
 	Rules       []struct {
 		ID          string
 		ID          string
 		Description string
 		Description string
+		Path        string
 		Regex       string
 		Regex       string
 		SecretGroup int
 		SecretGroup int
 		Entropy     float64
 		Entropy     float64
 		Keywords    []string
 		Keywords    []string
-		Path        string
 		Tags        []string
 		Tags        []string
 
 
-		// Deprecated: this is a shim for backwards-compatibility. It should be removed in 9.x.
+		// Deprecated: this is a shim for backwards-compatibility.
+		// TODO: Remove this in 9.x.
 		AllowList  *viperRuleAllowlist
 		AllowList  *viperRuleAllowlist
-		Allowlists []viperRuleAllowlist
-	}
-	Allowlist struct {
-		Commits     []string
-		Paths       []string
-		RegexTarget string
-		Regexes     []string
-		StopWords   []string
+		Allowlists []*viperRuleAllowlist
 	}
 	}
+	// Deprecated: this is a shim for backwards-compatibility.
+	// TODO: Remove this in 9.x.
+	AllowList  *viperGlobalAllowlist
+	Allowlists []*viperGlobalAllowlist
 }
 }
 
 
 type viperRuleAllowlist struct {
 type viperRuleAllowlist struct {
@@ -60,6 +62,11 @@ type viperRuleAllowlist struct {
 	StopWords   []string
 	StopWords   []string
 }
 }
 
 
+type viperGlobalAllowlist struct {
+	TargetRules        []string
+	viperRuleAllowlist `mapstructure:",squash"`
+}
+
 // Config is a configuration struct that contains rules and an allowlist if present.
 // Config is a configuration struct that contains rules and an allowlist if present.
 type Config struct {
 type Config struct {
 	Title       string
 	Title       string
@@ -67,11 +74,10 @@ type Config struct {
 	Path        string
 	Path        string
 	Description string
 	Description string
 	Rules       map[string]Rule
 	Rules       map[string]Rule
-	Allowlist   *Allowlist
 	Keywords    map[string]struct{}
 	Keywords    map[string]struct{}
-
 	// used to keep sarif results consistent
 	// used to keep sarif results consistent
 	OrderedRules []string
 	OrderedRules []string
+	Allowlists   []*Allowlist
 }
 }
 
 
 // Extend is a struct that allows users to define how they want their
 // Extend is a struct that allows users to define how they want their
@@ -85,13 +91,24 @@ type Extend struct {
 
 
 func (vc *ViperConfig) Translate() (Config, error) {
 func (vc *ViperConfig) Translate() (Config, error) {
 	var (
 	var (
-		keywords     = make(map[string]struct{})
-		orderedRules []string
-		rulesMap     = make(map[string]Rule)
+		keywords       = make(map[string]struct{})
+		orderedRules   []string
+		rulesMap       = make(map[string]Rule)
+		ruleAllowlists = make(map[string][]*Allowlist)
 	)
 	)
 
 
 	// Validate individual rules.
 	// Validate individual rules.
 	for _, vr := range vc.Rules {
 	for _, vr := range vc.Rules {
+		var (
+			pathPat  *regexp.Regexp
+			regexPat *regexp.Regexp
+		)
+		if vr.Path != "" {
+			pathPat = regexp.MustCompile(vr.Path)
+		}
+		if vr.Regex != "" {
+			regexPat = regexp.MustCompile(vr.Regex)
+		}
 		if vr.Keywords == nil {
 		if vr.Keywords == nil {
 			vr.Keywords = []string{}
 			vr.Keywords = []string{}
 		} else {
 		} else {
@@ -101,115 +118,85 @@ func (vc *ViperConfig) Translate() (Config, error) {
 				vr.Keywords[i] = keyword
 				vr.Keywords[i] = keyword
 			}
 			}
 		}
 		}
-
 		if vr.Tags == nil {
 		if vr.Tags == nil {
 			vr.Tags = []string{}
 			vr.Tags = []string{}
 		}
 		}
-
-		var configRegex *regexp.Regexp
-		var configPathRegex *regexp.Regexp
-		if vr.Regex != "" {
-			configRegex = regexp.MustCompile(vr.Regex)
-		}
-		if vr.Path != "" {
-			configPathRegex = regexp.MustCompile(vr.Path)
-		}
-
 		cr := Rule{
 		cr := Rule{
 			RuleID:      vr.ID,
 			RuleID:      vr.ID,
 			Description: vr.Description,
 			Description: vr.Description,
-			Regex:       configRegex,
+			Regex:       regexPat,
 			SecretGroup: vr.SecretGroup,
 			SecretGroup: vr.SecretGroup,
 			Entropy:     vr.Entropy,
 			Entropy:     vr.Entropy,
-			Path:        configPathRegex,
+			Path:        pathPat,
 			Keywords:    vr.Keywords,
 			Keywords:    vr.Keywords,
 			Tags:        vr.Tags,
 			Tags:        vr.Tags,
 		}
 		}
-		// Parse the allowlist, including the older format for backwards compatibility.
+
+		// Parse the rule allowlists, including the older format for backwards compatibility.
 		if vr.AllowList != nil {
 		if vr.AllowList != nil {
+			// TODO: Remove this in v9.
 			if len(vr.Allowlists) > 0 {
 			if len(vr.Allowlists) > 0 {
 				return Config{}, fmt.Errorf("%s: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]", cr.RuleID)
 				return Config{}, fmt.Errorf("%s: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]", cr.RuleID)
 			}
 			}
-			vr.Allowlists = append(vr.Allowlists, *vr.AllowList)
+			vr.Allowlists = append(vr.Allowlists, vr.AllowList)
 		}
 		}
 		for _, a := range vr.Allowlists {
 		for _, a := range vr.Allowlists {
-			var condition AllowlistMatchCondition
-			c := strings.ToUpper(a.Condition)
-			switch c {
-			case "AND", "&&":
-				condition = AllowlistMatchAnd
-			case "", "OR", "||":
-				condition = AllowlistMatchOr
-			default:
-				return Config{}, fmt.Errorf("%s: unknown allowlist condition '%s' (expected 'and', 'or')", cr.RuleID, c)
-			}
-
-			// Validate the target.
-			if a.RegexTarget != "" {
-				switch a.RegexTarget {
-				case "secret":
-					a.RegexTarget = ""
-				case "match", "line":
-					// do nothing
-				default:
-					return Config{}, fmt.Errorf("%s: unknown allowlist |regexTarget| '%s' (expected 'match', 'line')", cr.RuleID, a.RegexTarget)
-				}
-			}
-			var allowlistRegexes []*regexp.Regexp
-			for _, a := range a.Regexes {
-				allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
-			}
-			var allowlistPaths []*regexp.Regexp
-			for _, a := range a.Paths {
-				allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
-			}
-
-			allowlist := &Allowlist{
-				Description:    a.Description,
-				MatchCondition: condition,
-				Commits:        a.Commits,
-				Paths:          allowlistPaths,
-				RegexTarget:    a.RegexTarget,
-				Regexes:        allowlistRegexes,
-				StopWords:      a.StopWords,
+			allowlist, err := parseAllowlist(a)
+			if err != nil {
+				return Config{}, fmt.Errorf("%s: [[rules.allowlists]] %w", cr.RuleID, err)
 			}
 			}
 			cr.Allowlists = append(cr.Allowlists, allowlist)
 			cr.Allowlists = append(cr.Allowlists, allowlist)
 		}
 		}
 		orderedRules = append(orderedRules, cr.RuleID)
 		orderedRules = append(orderedRules, cr.RuleID)
 		rulesMap[cr.RuleID] = cr
 		rulesMap[cr.RuleID] = cr
 	}
 	}
-	var allowlistRegexes []*regexp.Regexp
-	for _, a := range vc.Allowlist.Regexes {
-		allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
-	}
-	var allowlistPaths []*regexp.Regexp
-	for _, a := range vc.Allowlist.Paths {
-		allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
-	}
+
+	// Assemble the config.
 	c := Config{
 	c := Config{
-		Description: vc.Description,
-		Extend:      vc.Extend,
-		Rules:       rulesMap,
-		Allowlist: &Allowlist{
-			RegexTarget: vc.Allowlist.RegexTarget,
-			Regexes:     allowlistRegexes,
-			Paths:       allowlistPaths,
-			Commits:     vc.Allowlist.Commits,
-			StopWords:   vc.Allowlist.StopWords,
-		},
+		Title:        vc.Title,
+		Description:  vc.Description,
+		Extend:       vc.Extend,
+		Rules:        rulesMap,
 		Keywords:     keywords,
 		Keywords:     keywords,
 		OrderedRules: orderedRules,
 		OrderedRules: orderedRules,
 	}
 	}
+	// Parse the config allowlists, including the older format for backwards compatibility.
+	if vc.AllowList != nil {
+		// TODO: Remove this in v9.
+		if len(vc.Allowlists) > 0 {
+			return Config{}, errors.New("[allowlist] is deprecated, it cannot be used alongside [[allowlists]]")
+		}
+		vc.Allowlists = append(vc.Allowlists, vc.AllowList)
+	}
+	for _, a := range vc.Allowlists {
+		allowlist, err := parseAllowlist(&a.viperRuleAllowlist)
+		if err != nil {
+			return Config{}, fmt.Errorf("[[allowlists]] %w", err)
+		}
+		// Allowlists with |targetRules| aren't added to the global list.
+		if len(a.TargetRules) > 0 {
+			for _, ruleID := range a.TargetRules {
+				// It's not possible to validate |ruleID| until after extend.
+				ruleAllowlists[ruleID] = append(ruleAllowlists[ruleID], allowlist)
+			}
+		} else {
+			c.Allowlists = append(c.Allowlists, allowlist)
+		}
+	}
 
 
 	if maxExtendDepth != extendDepth {
 	if maxExtendDepth != extendDepth {
 		// disallow both usedefault and path from being set
 		// disallow both usedefault and path from being set
 		if c.Extend.Path != "" && c.Extend.UseDefault {
 		if c.Extend.Path != "" && c.Extend.UseDefault {
-			logging.Fatal().Msg("unable to load config due to extend.path and extend.useDefault being set")
+			return Config{}, errors.New("unable to load config due to extend.path and extend.useDefault being set")
 		}
 		}
 		if c.Extend.UseDefault {
 		if c.Extend.UseDefault {
-			c.extendDefault()
+			if err := c.extendDefault(); err != nil {
+				return Config{}, err
+			}
 		} else if c.Extend.Path != "" {
 		} else if c.Extend.Path != "" {
-			c.extendPath()
+			if err := c.extendPath(); err != nil {
+				return Config{}, err
+			}
 		}
 		}
 	}
 	}
 
 
@@ -220,11 +207,68 @@ func (vc *ViperConfig) Translate() (Config, error) {
 				return Config{}, err
 				return Config{}, err
 			}
 			}
 		}
 		}
+
+		// Populate targeted configs.
+		for ruleID, allowlists := range ruleAllowlists {
+			rule, ok := c.Rules[ruleID]
+			if !ok {
+				return Config{}, fmt.Errorf("[[allowlists]] target rule ID '%s' does not exist", ruleID)
+			}
+			rule.Allowlists = append(rule.Allowlists, allowlists...)
+			c.Rules[ruleID] = rule
+		}
 	}
 	}
 
 
 	return c, nil
 	return c, nil
 }
 }
 
 
+func parseAllowlist(a *viperRuleAllowlist) (*Allowlist, error) {
+	var matchCondition AllowlistMatchCondition
+	switch strings.ToUpper(a.Condition) {
+	case "AND", "&&":
+		matchCondition = AllowlistMatchAnd
+	case "", "OR", "||":
+		matchCondition = AllowlistMatchOr
+	default:
+		return nil, fmt.Errorf("unknown allowlist |condition| '%s' (expected 'and', 'or')", a.Condition)
+	}
+
+	// Validate the target.
+	regexTarget := a.RegexTarget
+	if regexTarget != "" {
+		switch regexTarget {
+		case "secret":
+			regexTarget = ""
+		case "match", "line":
+			// do nothing
+		default:
+			return nil, fmt.Errorf("unknown allowlist |regexTarget| '%s' (expected 'match', 'line')", regexTarget)
+		}
+	}
+	var allowlistRegexes []*regexp.Regexp
+	for _, a := range a.Regexes {
+		allowlistRegexes = append(allowlistRegexes, regexp.MustCompile(a))
+	}
+	var allowlistPaths []*regexp.Regexp
+	for _, a := range a.Paths {
+		allowlistPaths = append(allowlistPaths, regexp.MustCompile(a))
+	}
+
+	allowlist := &Allowlist{
+		Description:    a.Description,
+		MatchCondition: matchCondition,
+		Commits:        a.Commits,
+		Paths:          allowlistPaths,
+		RegexTarget:    regexTarget,
+		Regexes:        allowlistRegexes,
+		StopWords:      a.StopWords,
+	}
+	if err := allowlist.Validate(); err != nil {
+		return nil, err
+	}
+	return allowlist, nil
+}
+
 func (c *Config) GetOrderedRules() []Rule {
 func (c *Config) GetOrderedRules() []Rule {
 	var orderedRules []Rule
 	var orderedRules []Rule
 	for _, id := range c.OrderedRules {
 	for _, id := range c.OrderedRules {
@@ -235,47 +279,43 @@ func (c *Config) GetOrderedRules() []Rule {
 	return orderedRules
 	return orderedRules
 }
 }
 
 
-func (c *Config) extendDefault() {
+func (c *Config) extendDefault() error {
 	extendDepth++
 	extendDepth++
 	viper.SetConfigType("toml")
 	viper.SetConfigType("toml")
 	if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
 	if err := viper.ReadConfig(strings.NewReader(DefaultConfig)); err != nil {
-		logging.Fatal().Msgf("failed to load extended config, err: %s", err)
-		return
+		return fmt.Errorf("failed to load extended default config, err: %w", err)
 	}
 	}
 	defaultViperConfig := ViperConfig{}
 	defaultViperConfig := ViperConfig{}
 	if err := viper.Unmarshal(&defaultViperConfig); err != nil {
 	if err := viper.Unmarshal(&defaultViperConfig); err != nil {
-		logging.Fatal().Msgf("failed to load extended config, err: %s", err)
-		return
+		return fmt.Errorf("failed to load extended default config, err: %w", err)
 	}
 	}
 	cfg, err := defaultViperConfig.Translate()
 	cfg, err := defaultViperConfig.Translate()
 	if err != nil {
 	if err != nil {
-		logging.Fatal().Msgf("failed to load extended config, err: %s", err)
-		return
+		return fmt.Errorf("failed to load extended default config, err: %w", err)
+
 	}
 	}
 	logging.Debug().Msg("extending config with default config")
 	logging.Debug().Msg("extending config with default config")
 	c.extend(cfg)
 	c.extend(cfg)
-
+	return nil
 }
 }
 
 
-func (c *Config) extendPath() {
+func (c *Config) extendPath() error {
 	extendDepth++
 	extendDepth++
 	viper.SetConfigFile(c.Extend.Path)
 	viper.SetConfigFile(c.Extend.Path)
 	if err := viper.ReadInConfig(); err != nil {
 	if err := viper.ReadInConfig(); err != nil {
-		logging.Fatal().Msgf("failed to load extended config, err: %s", err)
-		return
+		return fmt.Errorf("failed to load extended config, err: %w", err)
 	}
 	}
 	extensionViperConfig := ViperConfig{}
 	extensionViperConfig := ViperConfig{}
 	if err := viper.Unmarshal(&extensionViperConfig); err != nil {
 	if err := viper.Unmarshal(&extensionViperConfig); err != nil {
-		logging.Fatal().Msgf("failed to load extended config, err: %s", err)
-		return
+		return fmt.Errorf("failed to load extended config, err: %w", err)
 	}
 	}
 	cfg, err := extensionViperConfig.Translate()
 	cfg, err := extensionViperConfig.Translate()
 	if err != nil {
 	if err != nil {
-		logging.Fatal().Msgf("failed to load extended config, err: %s", err)
-		return
+		return fmt.Errorf("failed to load extended config, err: %w", err)
 	}
 	}
 	logging.Debug().Msgf("extending config with %s", c.Extend.Path)
 	logging.Debug().Msgf("extending config with %s", c.Extend.Path)
 	c.extend(cfg)
 	c.extend(cfg)
+	return nil
 }
 }
 
 
 func (c *Config) extendURL() {
 func (c *Config) extendURL() {
@@ -351,12 +391,9 @@ func (c *Config) extend(extensionConfig Config) {
 	}
 	}
 
 
 	// append allowlists, not attempting to merge
 	// append allowlists, not attempting to merge
-	c.Allowlist.Commits = append(c.Allowlist.Commits,
-		extensionConfig.Allowlist.Commits...)
-	c.Allowlist.Paths = append(c.Allowlist.Paths,
-		extensionConfig.Allowlist.Paths...)
-	c.Allowlist.Regexes = append(c.Allowlist.Regexes,
-		extensionConfig.Allowlist.Regexes...)
+	for _, a := range extensionConfig.Allowlists {
+		c.Allowlists = append(c.Allowlists, a)
+	}
 
 
 	// sort to keep extended rules in order
 	// sort to keep extended rules in order
 	sort.Strings(c.OrderedRules)
 	sort.Strings(c.OrderedRules)

+ 368 - 166
config/config_test.go

@@ -2,10 +2,10 @@ package config
 
 
 import (
 import (
 	"fmt"
 	"fmt"
-	"github.com/google/go-cmp/cmp/cmpopts"
 	"testing"
 	"testing"
 
 
 	"github.com/google/go-cmp/cmp"
 	"github.com/google/go-cmp/cmp"
+	"github.com/google/go-cmp/cmp/cmpopts"
 	"github.com/spf13/viper"
 	"github.com/spf13/viper"
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/assert"
 	"github.com/stretchr/testify/require"
 	"github.com/stretchr/testify/require"
@@ -15,19 +15,218 @@ import (
 
 
 const configPath = "../testdata/config/"
 const configPath = "../testdata/config/"
 
 
+var regexComparer = func(x, y *regexp.Regexp) bool {
+	if x == nil || y == nil {
+		return x == y
+	}
+	return x.String() == y.String()
+}
+
+type translateCase struct {
+	// Configuration file basename to load, from `../testdata/config/`.
+	cfgName string
+	// Expected result.
+	cfg Config
+	// Rules to compare.
+	rules []string
+	// Error to expect.
+	wantError error
+}
+
 func TestTranslate(t *testing.T) {
 func TestTranslate(t *testing.T) {
-	tests := []struct {
-		// Configuration file basename to load, from `../testdata/config/`.
-		cfgName string
-		// Expected result.
-		cfg Config
-		// Rules to compare.
-		rules []string
-		// Error to expect.
-		wantError error
-	}{
+	tests := []translateCase{
+		// Valid
+		{
+			cfgName: "generic",
+			cfg: Config{
+				Title: "gitleaks config",
+				Rules: map[string]Rule{"generic-api-key": {
+					RuleID:      "generic-api-key",
+					Description: "Generic API Key",
+					Regex:       regexp.MustCompile(`(?i)(?:key|api|token|secret|client|passwd|password|auth|access)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|\"|\s|=|\x60){0,5}([0-9a-z\-_.=]{10,150})(?:['|\"|\n|\r|\s|\x60|;]|$)`),
+					Entropy:     3.5,
+					Keywords:    []string{"key", "api", "token", "secret", "client", "passwd", "password", "auth", "access"},
+					Tags:        []string{},
+				}},
+			},
+		},
+		{
+			cfgName: "valid/rule_path_only",
+			cfg: Config{
+				Rules: map[string]Rule{"python-files-only": {
+					RuleID:      "python-files-only",
+					Description: "Python Files",
+					Path:        regexp.MustCompile(`.py`),
+					Keywords:    []string{},
+					Tags:        []string{},
+				}},
+			},
+		},
+		{
+			cfgName: "valid/rule_regex_escaped_character_group",
+			cfg: Config{
+				Rules: map[string]Rule{"pypi-upload-token": {
+					RuleID:      "pypi-upload-token",
+					Description: "PyPI upload token",
+					Regex:       regexp.MustCompile(`pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}`),
+					Keywords:    []string{},
+					Tags:        []string{"key", "pypi"},
+				}},
+			},
+		},
+		{
+			cfgName: "valid/rule_entropy_group",
+			cfg: Config{
+				Rules: map[string]Rule{"discord-api-key": {
+					RuleID:      "discord-api-key",
+					Description: "Discord API key",
+					Regex:       regexp.MustCompile(`(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{64})['\"]`),
+					Entropy:     3.5,
+					SecretGroup: 3,
+					Keywords:    []string{},
+					Tags:        []string{},
+				}},
+			},
+		},
+
+		// Invalid
+		{
+			cfgName:   "invalid/rule_missing_id",
+			cfg:       Config{},
+			wantError: fmt.Errorf("rule |id| is missing or empty, regex: (?i)(discord[a-z0-9_ .\\-,]{0,25})(=|>|:=|\\|\\|:|<=|=>|:).{0,5}['\\\"]([a-h0-9]{64})['\\\"]"),
+		},
+		{
+			cfgName:   "invalid/rule_no_regex_or_path",
+			cfg:       Config{},
+			wantError: fmt.Errorf("discord-api-key: both |regex| and |path| are empty, this rule will have no effect"),
+		},
+		{
+			cfgName:   "invalid/rule_bad_entropy_group",
+			cfg:       Config{},
+			wantError: fmt.Errorf("discord-api-key: invalid regex secret group 5, max regex secret group 3"),
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.cfgName, func(t *testing.T) {
+			testTranslate(t, tt)
+		})
+	}
+}
+
+func TestTranslateAllowlists(t *testing.T) {
+	tests := []translateCase{
+		// Global
+		{
+			cfgName: "valid/allowlist_global_old_compat",
+			cfg: Config{
+				Rules: map[string]Rule{},
+				Allowlists: []*Allowlist{
+					{
+						StopWords: []string{"0989c462-69c9-49fa-b7d2-30dc5c576a97"},
+					},
+				},
+			},
+		},
+		{
+			cfgName: "valid/allowlist_global_multiple",
+			cfg: Config{
+				Rules: map[string]Rule{
+					"test": {
+						RuleID:   "test",
+						Regex:    regexp.MustCompile(`token = "(.+)"`),
+						Keywords: []string{},
+						Tags:     []string{},
+					},
+				},
+				Allowlists: []*Allowlist{
+					{
+						Regexes: []*regexp.Regexp{regexp.MustCompile("^changeit$")},
+					},
+					{
+						MatchCondition: AllowlistMatchAnd,
+						Paths:          []*regexp.Regexp{regexp.MustCompile("^node_modules/.*")},
+						StopWords:      []string{"mock"},
+					},
+				},
+			},
+		},
+		{
+			cfgName: "valid/allowlist_global_target_rules",
+			cfg: Config{
+				Rules: map[string]Rule{
+					"github-app-token": {
+						RuleID:   "github-app-token",
+						Regex:    regexp.MustCompile(`(?:ghu|ghs)_[0-9a-zA-Z]{36}`),
+						Tags:     []string{},
+						Keywords: []string{},
+						Allowlists: []*Allowlist{
+							{
+								Paths: []*regexp.Regexp{regexp.MustCompile(`(?:^|/)@octokit/auth-token/README\.md$`)},
+							},
+						},
+					},
+					"github-oauth": {
+						RuleID:     "github-oauth",
+						Regex:      regexp.MustCompile(`gho_[0-9a-zA-Z]{36}`),
+						Tags:       []string{},
+						Keywords:   []string{},
+						Allowlists: nil,
+					},
+					"github-pat": {
+						RuleID:   "github-pat",
+						Regex:    regexp.MustCompile(`ghp_[0-9a-zA-Z]{36}`),
+						Tags:     []string{},
+						Keywords: []string{},
+						Allowlists: []*Allowlist{
+							{
+								Paths: []*regexp.Regexp{regexp.MustCompile(`(?:^|/)@octokit/auth-token/README\.md$`)},
+							},
+						},
+					},
+				},
+				Allowlists: []*Allowlist{
+					{
+						Regexes: []*regexp.Regexp{regexp.MustCompile(".*fake.*")},
+					},
+				},
+			},
+		},
+		{
+			cfgName: "valid/allowlist_global_regex",
+			cfg: Config{
+				Rules: map[string]Rule{},
+				Allowlists: []*Allowlist{
+					{
+						MatchCondition: AllowlistMatchOr,
+						Regexes:        []*regexp.Regexp{regexp.MustCompile("AKIALALEM.L33243OLIA")},
+					},
+				},
+			},
+		},
+		{
+			cfgName:   "invalid/allowlist_global_empty",
+			cfg:       Config{},
+			wantError: fmt.Errorf("[[allowlists]] must contain at least one check for: commits, paths, regexes, or stopwords"),
+		},
+		{
+			cfgName:   "invalid/allowlist_global_old_and_new",
+			cfg:       Config{},
+			wantError: fmt.Errorf("[allowlist] is deprecated, it cannot be used alongside [[allowlists]]"),
+		},
+		{
+			cfgName:   "invalid/allowlist_global_target_rule_id",
+			cfg:       Config{},
+			wantError: fmt.Errorf("[[allowlists]] target rule ID 'github-pat' does not exist"),
+		},
+		{
+			cfgName:   "invalid/allowlist_global_regextarget",
+			cfg:       Config{},
+			wantError: fmt.Errorf("[[allowlists]] unknown allowlist |regexTarget| 'mtach' (expected 'match', 'line')"),
+		},
+
+		// Rule
 		{
 		{
-			cfgName: "allowlist_old_compat",
+			cfgName: "valid/allowlist_rule_old_compat",
 			cfg: Config{
 			cfg: Config{
 				Rules: map[string]Rule{"example": {
 				Rules: map[string]Rule{"example": {
 					RuleID:   "example",
 					RuleID:   "example",
@@ -44,23 +243,9 @@ func TestTranslate(t *testing.T) {
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName:   "allowlist_invalid_empty",
-			cfg:       Config{},
-			wantError: fmt.Errorf("example: [[rules.allowlists]] must contain at least one check for: commits, paths, regexes, or stopwords"),
-		},
-		{
-			cfgName:   "allowlist_invalid_old_and_new",
-			cfg:       Config{},
-			wantError: fmt.Errorf("example: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]"),
-		},
-		{
-			cfgName:   "allowlist_invalid_regextarget",
-			cfg:       Config{},
-			wantError: fmt.Errorf("example: unknown allowlist |regexTarget| 'mtach' (expected 'match', 'line')"),
-		},
-		{
-			cfgName: "allow_aws_re",
+			cfgName: "valid/allowlist_rule_regex",
 			cfg: Config{
 			cfg: Config{
+				Title: "simple config with allowlist for aws",
 				Rules: map[string]Rule{"aws-access-key": {
 				Rules: map[string]Rule{"aws-access-key": {
 					RuleID:      "aws-access-key",
 					RuleID:      "aws-access-key",
 					Description: "AWS Access Key",
 					Description: "AWS Access Key",
@@ -77,8 +262,9 @@ func TestTranslate(t *testing.T) {
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "allow_commit",
+			cfgName: "valid/allowlist_rule_commit",
 			cfg: Config{
 			cfg: Config{
+				Title: "simple config with allowlist for a specific commit",
 				Rules: map[string]Rule{"aws-access-key": {
 				Rules: map[string]Rule{"aws-access-key": {
 					RuleID:      "aws-access-key",
 					RuleID:      "aws-access-key",
 					Description: "AWS Access Key",
 					Description: "AWS Access Key",
@@ -95,8 +281,9 @@ func TestTranslate(t *testing.T) {
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "allow_path",
+			cfgName: "valid/allowlist_rule_path",
 			cfg: Config{
 			cfg: Config{
+				Title: "simple config with allowlist for .go files",
 				Rules: map[string]Rule{"aws-access-key": {
 				Rules: map[string]Rule{"aws-access-key": {
 					RuleID:      "aws-access-key",
 					RuleID:      "aws-access-key",
 					Description: "AWS Access Key",
 					Description: "AWS Access Key",
@@ -113,36 +300,34 @@ func TestTranslate(t *testing.T) {
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "entropy_group",
-			cfg: Config{
-				Rules: map[string]Rule{"discord-api-key": {
-					RuleID:      "discord-api-key",
-					Description: "Discord API key",
-					Regex:       regexp.MustCompile(`(?i)(discord[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([a-h0-9]{64})['\"]`),
-					Entropy:     3.5,
-					SecretGroup: 3,
-					Keywords:    []string{},
-					Tags:        []string{},
-				}},
-			},
-		},
-		{
-			cfgName:   "missing_id",
+			cfgName:   "invalid/allowlist_rule_empty",
 			cfg:       Config{},
 			cfg:       Config{},
-			wantError: fmt.Errorf("rule |id| is missing or empty, regex: (?i)(discord[a-z0-9_ .\\-,]{0,25})(=|>|:=|\\|\\|:|<=|=>|:).{0,5}['\\\"]([a-h0-9]{64})['\\\"]"),
+			wantError: fmt.Errorf("example: [[rules.allowlists]] must contain at least one check for: commits, paths, regexes, or stopwords"),
 		},
 		},
 		{
 		{
-			cfgName:   "no_regex_or_path",
+			cfgName:   "invalid/allowlist_rule_old_and_new",
 			cfg:       Config{},
 			cfg:       Config{},
-			wantError: fmt.Errorf("discord-api-key: both |regex| and |path| are empty, this rule will have no effect"),
+			wantError: fmt.Errorf("example: [rules.allowlist] is deprecated, it cannot be used alongside [[rules.allowlist]]"),
 		},
 		},
 		{
 		{
-			cfgName:   "bad_entropy_group",
+			cfgName:   "invalid/allowlist_rule_regextarget",
 			cfg:       Config{},
 			cfg:       Config{},
-			wantError: fmt.Errorf("discord-api-key: invalid regex secret group 5, max regex secret group 3"),
+			wantError: fmt.Errorf("example: [[rules.allowlists]] unknown allowlist |regexTarget| 'mtach' (expected 'match', 'line')"),
 		},
 		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.cfgName, func(t *testing.T) {
+			testTranslate(t, tt)
+		})
+	}
+}
+
+func TestTranslateExtend(t *testing.T) {
+	tests := []translateCase{
+		// Valid
 		{
 		{
-			cfgName: "base",
+			cfgName: "valid/extend",
 			cfg: Config{
 			cfg: Config{
 				Rules: map[string]Rule{
 				Rules: map[string]Rule{
 					"aws-access-key": {
 					"aws-access-key": {
@@ -170,63 +355,27 @@ func TestTranslate(t *testing.T) {
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "extend_rule_allowlist_or",
+			cfgName: "valid/extend_disabled",
 			cfg: Config{
 			cfg: Config{
+				Title: "gitleaks extend disable",
 				Rules: map[string]Rule{
 				Rules: map[string]Rule{
-					"aws-secret-key-again-again": {
-						RuleID:      "aws-secret-key-again-again",
-						Description: "AWS Secret Key",
-						Regex:       regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
-						Keywords:    []string{},
-						Tags:        []string{"key", "AWS"},
-						Allowlists: []*Allowlist{
-							{
-								MatchCondition: AllowlistMatchOr,
-								StopWords:      []string{"fake"},
-							},
-							{
-								MatchCondition: AllowlistMatchOr,
-								Commits:        []string{"abcdefg1"},
-								Paths:          []*regexp.Regexp{regexp.MustCompile(`ignore\.xaml`)},
-								Regexes:        []*regexp.Regexp{regexp.MustCompile(`foo.+bar`)},
-								RegexTarget:    "line",
-								StopWords:      []string{"example"},
-							},
-						},
+					"aws-secret-key": {
+						RuleID:   "aws-secret-key",
+						Regex:    regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
+						Tags:     []string{"key", "AWS"},
+						Keywords: []string{},
 					},
 					},
-				},
-			},
-		},
-		{
-			cfgName: "extend_rule_allowlist_and",
-			cfg: Config{
-				Rules: map[string]Rule{
-					"aws-secret-key-again-again": {
-						RuleID:      "aws-secret-key-again-again",
-						Description: "AWS Secret Key",
-						Regex:       regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
-						Keywords:    []string{},
-						Tags:        []string{"key", "AWS"},
-						Allowlists: []*Allowlist{
-							{
-								MatchCondition: AllowlistMatchOr,
-								StopWords:      []string{"fake"},
-							},
-							{
-								MatchCondition: AllowlistMatchAnd,
-								Commits:        []string{"abcdefg1"},
-								Paths:          []*regexp.Regexp{regexp.MustCompile(`ignore\.xaml`)},
-								Regexes:        []*regexp.Regexp{regexp.MustCompile(`foo.+bar`)},
-								RegexTarget:    "line",
-								StopWords:      []string{"example"},
-							},
-						},
+					"pypi-upload-token": {
+						RuleID:   "pypi-upload-token",
+						Regex:    regexp.MustCompile(`pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}`),
+						Tags:     []string{},
+						Keywords: []string{},
 					},
 					},
 				},
 				},
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "extend_empty_regexpath",
+			cfgName: "valid/extend_rule_no_regexpath",
 			cfg: Config{
 			cfg: Config{
 				Rules: map[string]Rule{
 				Rules: map[string]Rule{
 					"aws-secret-key-again-again": {
 					"aws-secret-key-again-again": {
@@ -247,9 +396,10 @@ func TestTranslate(t *testing.T) {
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "override_description",
+			cfgName: "valid/extend_rule_override_description",
 			rules:   []string{"aws-access-key"},
 			rules:   []string{"aws-access-key"},
 			cfg: Config{
 			cfg: Config{
+				Title: "override a built-in rule's description",
 				Rules: map[string]Rule{"aws-access-key": {
 				Rules: map[string]Rule{"aws-access-key": {
 					RuleID:      "aws-access-key",
 					RuleID:      "aws-access-key",
 					Description: "Puppy Doggy",
 					Description: "Puppy Doggy",
@@ -261,14 +411,15 @@ func TestTranslate(t *testing.T) {
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "override_entropy",
+			cfgName: "valid/extend_rule_override_path",
 			rules:   []string{"aws-access-key"},
 			rules:   []string{"aws-access-key"},
 			cfg: Config{
 			cfg: Config{
+				Title: "override a built-in rule's path",
 				Rules: map[string]Rule{"aws-access-key": {
 				Rules: map[string]Rule{"aws-access-key": {
 					RuleID:      "aws-access-key",
 					RuleID:      "aws-access-key",
 					Description: "AWS Access Key",
 					Description: "AWS Access Key",
 					Regex:       regexp.MustCompile("(?:A3T[A-Z0-9]|AKIA|ASIA|ABIA|ACCA)[A-Z0-9]{16}"),
 					Regex:       regexp.MustCompile("(?:A3T[A-Z0-9]|AKIA|ASIA|ABIA|ACCA)[A-Z0-9]{16}"),
-					Entropy:     999.0,
+					Path:        regexp.MustCompile("(?:puppy)"),
 					Keywords:    []string{},
 					Keywords:    []string{},
 					Tags:        []string{"key", "AWS"},
 					Tags:        []string{"key", "AWS"},
 				},
 				},
@@ -276,14 +427,14 @@ func TestTranslate(t *testing.T) {
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "override_secret_group",
+			cfgName: "valid/extend_rule_override_regex",
 			rules:   []string{"aws-access-key"},
 			rules:   []string{"aws-access-key"},
 			cfg: Config{
 			cfg: Config{
+				Title: "override a built-in rule's regex",
 				Rules: map[string]Rule{"aws-access-key": {
 				Rules: map[string]Rule{"aws-access-key": {
 					RuleID:      "aws-access-key",
 					RuleID:      "aws-access-key",
 					Description: "AWS Access Key",
 					Description: "AWS Access Key",
-					Regex:       regexp.MustCompile("(?:a)(?:a)"),
-					SecretGroup: 2,
+					Regex:       regexp.MustCompile("(?:a)"),
 					Keywords:    []string{},
 					Keywords:    []string{},
 					Tags:        []string{"key", "AWS"},
 					Tags:        []string{"key", "AWS"},
 				},
 				},
@@ -291,13 +442,15 @@ func TestTranslate(t *testing.T) {
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "override_regex",
+			cfgName: "valid/extend_rule_override_secret_group",
 			rules:   []string{"aws-access-key"},
 			rules:   []string{"aws-access-key"},
 			cfg: Config{
 			cfg: Config{
+				Title: "override a built-in rule's secretGroup",
 				Rules: map[string]Rule{"aws-access-key": {
 				Rules: map[string]Rule{"aws-access-key": {
 					RuleID:      "aws-access-key",
 					RuleID:      "aws-access-key",
 					Description: "AWS Access Key",
 					Description: "AWS Access Key",
-					Regex:       regexp.MustCompile("(?:a)"),
+					Regex:       regexp.MustCompile("(?:a)(?:a)"),
+					SecretGroup: 2,
 					Keywords:    []string{},
 					Keywords:    []string{},
 					Tags:        []string{"key", "AWS"},
 					Tags:        []string{"key", "AWS"},
 				},
 				},
@@ -305,14 +458,15 @@ func TestTranslate(t *testing.T) {
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "override_path",
+			cfgName: "valid/extend_rule_override_entropy",
 			rules:   []string{"aws-access-key"},
 			rules:   []string{"aws-access-key"},
 			cfg: Config{
 			cfg: Config{
+				Title: "override a built-in rule's entropy",
 				Rules: map[string]Rule{"aws-access-key": {
 				Rules: map[string]Rule{"aws-access-key": {
 					RuleID:      "aws-access-key",
 					RuleID:      "aws-access-key",
 					Description: "AWS Access Key",
 					Description: "AWS Access Key",
 					Regex:       regexp.MustCompile("(?:A3T[A-Z0-9]|AKIA|ASIA|ABIA|ACCA)[A-Z0-9]{16}"),
 					Regex:       regexp.MustCompile("(?:A3T[A-Z0-9]|AKIA|ASIA|ABIA|ACCA)[A-Z0-9]{16}"),
-					Path:        regexp.MustCompile("(?:puppy)"),
+					Entropy:     999.0,
 					Keywords:    []string{},
 					Keywords:    []string{},
 					Tags:        []string{"key", "AWS"},
 					Tags:        []string{"key", "AWS"},
 				},
 				},
@@ -320,97 +474,145 @@ func TestTranslate(t *testing.T) {
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "override_tags",
+			cfgName: "valid/extend_rule_override_keywords",
 			rules:   []string{"aws-access-key"},
 			rules:   []string{"aws-access-key"},
 			cfg: Config{
 			cfg: Config{
+				Title: "override a built-in rule's keywords",
 				Rules: map[string]Rule{"aws-access-key": {
 				Rules: map[string]Rule{"aws-access-key": {
 					RuleID:      "aws-access-key",
 					RuleID:      "aws-access-key",
 					Description: "AWS Access Key",
 					Description: "AWS Access Key",
 					Regex:       regexp.MustCompile("(?:A3T[A-Z0-9]|AKIA|ASIA|ABIA|ACCA)[A-Z0-9]{16}"),
 					Regex:       regexp.MustCompile("(?:A3T[A-Z0-9]|AKIA|ASIA|ABIA|ACCA)[A-Z0-9]{16}"),
-					Keywords:    []string{},
-					Tags:        []string{"key", "AWS", "puppy"},
+					Keywords:    []string{"puppy"},
+					Tags:        []string{"key", "AWS"},
 				},
 				},
 				},
 				},
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "override_keywords",
+			cfgName: "valid/extend_rule_override_tags",
 			rules:   []string{"aws-access-key"},
 			rules:   []string{"aws-access-key"},
 			cfg: Config{
 			cfg: Config{
+				Title: "override a built-in rule's tags",
 				Rules: map[string]Rule{"aws-access-key": {
 				Rules: map[string]Rule{"aws-access-key": {
 					RuleID:      "aws-access-key",
 					RuleID:      "aws-access-key",
 					Description: "AWS Access Key",
 					Description: "AWS Access Key",
 					Regex:       regexp.MustCompile("(?:A3T[A-Z0-9]|AKIA|ASIA|ABIA|ACCA)[A-Z0-9]{16}"),
 					Regex:       regexp.MustCompile("(?:A3T[A-Z0-9]|AKIA|ASIA|ABIA|ACCA)[A-Z0-9]{16}"),
-					Keywords:    []string{"puppy"},
-					Tags:        []string{"key", "AWS"},
+					Keywords:    []string{},
+					Tags:        []string{"key", "AWS", "puppy"},
 				},
 				},
 				},
 				},
 			},
 			},
 		},
 		},
 		{
 		{
-			cfgName: "extend_disabled",
+			cfgName: "valid/extend_rule_allowlist_or",
 			cfg: Config{
 			cfg: Config{
+				Title: "gitleaks extended 3",
 				Rules: map[string]Rule{
 				Rules: map[string]Rule{
-					"aws-secret-key": {
-						RuleID:   "aws-secret-key",
-						Regex:    regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
-						Tags:     []string{"key", "AWS"},
-						Keywords: []string{},
+					"aws-secret-key-again-again": {
+						RuleID:      "aws-secret-key-again-again",
+						Description: "AWS Secret Key",
+						Regex:       regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
+						Keywords:    []string{},
+						Tags:        []string{"key", "AWS"},
+						Allowlists: []*Allowlist{
+							{
+								MatchCondition: AllowlistMatchOr,
+								StopWords:      []string{"fake"},
+							},
+							{
+								MatchCondition: AllowlistMatchOr,
+								Commits:        []string{"abcdefg1"},
+								Paths:          []*regexp.Regexp{regexp.MustCompile(`ignore\.xaml`)},
+								Regexes:        []*regexp.Regexp{regexp.MustCompile(`foo.+bar`)},
+								RegexTarget:    "line",
+								StopWords:      []string{"example"},
+							},
+						},
 					},
 					},
-					"pypi-upload-token": {
-						RuleID:   "pypi-upload-token",
-						Regex:    regexp.MustCompile(`pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}`),
-						Tags:     []string{},
-						Keywords: []string{},
+				},
+			},
+		},
+		{
+			cfgName: "valid/extend_rule_allowlist_and",
+			cfg: Config{
+				Title: "gitleaks extended 3",
+				Rules: map[string]Rule{
+					"aws-secret-key-again-again": {
+						RuleID:      "aws-secret-key-again-again",
+						Description: "AWS Secret Key",
+						Regex:       regexp.MustCompile(`(?i)aws_(.{0,20})?=?.[\'\"0-9a-zA-Z\/+]{40}`),
+						Keywords:    []string{},
+						Tags:        []string{"key", "AWS"},
+						Allowlists: []*Allowlist{
+							{
+								MatchCondition: AllowlistMatchOr,
+								StopWords:      []string{"fake"},
+							},
+							{
+								MatchCondition: AllowlistMatchAnd,
+								Commits:        []string{"abcdefg1"},
+								Paths:          []*regexp.Regexp{regexp.MustCompile(`ignore\.xaml`)},
+								Regexes:        []*regexp.Regexp{regexp.MustCompile(`foo.+bar`)},
+								RegexTarget:    "line",
+								StopWords:      []string{"example"},
+							},
+						},
 					},
 					},
 				},
 				},
 			},
 			},
 		},
 		},
+
+		// Invalid
 	}
 	}
 
 
 	for _, tt := range tests {
 	for _, tt := range tests {
 		t.Run(tt.cfgName, func(t *testing.T) {
 		t.Run(tt.cfgName, func(t *testing.T) {
-			t.Cleanup(func() {
-				extendDepth = 0
-				viper.Reset()
-			})
+			testTranslate(t, tt)
+		})
+	}
+}
 
 
-			viper.AddConfigPath(configPath)
-			viper.SetConfigName(tt.cfgName)
-			viper.SetConfigType("toml")
-			err := viper.ReadInConfig()
-			require.NoError(t, err)
+func testTranslate(t *testing.T, test translateCase) {
+	t.Helper()
+	t.Cleanup(func() {
+		extendDepth = 0
+		viper.Reset()
+	})
 
 
-			var vc ViperConfig
-			err = viper.Unmarshal(&vc)
-			require.NoError(t, err)
-			cfg, err := vc.Translate()
-			if err != nil && !assert.EqualError(t, tt.wantError, err.Error()) {
-				return
-			}
+	viper.AddConfigPath(configPath)
+	viper.SetConfigName(test.cfgName)
+	viper.SetConfigType("toml")
+	err := viper.ReadInConfig()
+	require.NoError(t, err)
 
 
-			if len(tt.rules) > 0 {
-				rules := make(map[string]Rule)
-				for _, name := range tt.rules {
-					rules[name] = cfg.Rules[name]
-				}
-				cfg.Rules = rules
-			}
+	var vc ViperConfig
+	err = viper.Unmarshal(&vc)
+	require.NoError(t, err)
+	cfg, err := vc.Translate()
+	if err != nil && !assert.EqualError(t, err, test.wantError.Error()) {
+		return
+	}
 
 
-			var regexComparer = func(x, y *regexp.Regexp) bool {
-				if x == nil || y == nil {
-					return x == y
-				}
-				return x.String() == y.String()
-			}
-			opts := cmp.Options{
-				cmp.Comparer(regexComparer),
-				cmpopts.IgnoreUnexported(Rule{}, Allowlist{}),
-			}
-			if diff := cmp.Diff(tt.cfg.Rules, cfg.Rules, opts); diff != "" {
-				t.Errorf("%s diff: (-want +got)\n%s", tt.cfgName, diff)
-			}
-		})
+	if len(test.rules) > 0 {
+		rules := make(map[string]Rule)
+		for _, name := range test.rules {
+			rules[name] = cfg.Rules[name]
+		}
+		cfg.Rules = rules
+	}
+
+	opts := cmp.Options{
+		cmp.Comparer(regexComparer),
+		cmpopts.IgnoreUnexported(Rule{}, Allowlist{}),
+	}
+	if diff := cmp.Diff(test.cfg.Title, cfg.Title); diff != "" {
+		t.Errorf("%s diff: (-want +got)\n%s", test.cfgName, diff)
+	}
+	if diff := cmp.Diff(test.cfg.Rules, cfg.Rules, opts); diff != "" {
+		t.Errorf("%s diff: (-want +got)\n%s", test.cfgName, diff)
+	}
+	if diff := cmp.Diff(test.cfg.Allowlists, cfg.Allowlists, opts); diff != "" {
+		t.Errorf("%s diff: (-want +got)\n%s", test.cfgName, diff)
 	}
 	}
 }
 }
 
 
@@ -422,12 +624,12 @@ func TestExtendedRuleKeywordsAreDowncase(t *testing.T) {
 	}{
 	}{
 		{
 		{
 			name:             "Extend base rule that includes AWS keyword with new attribute",
 			name:             "Extend base rule that includes AWS keyword with new attribute",
-			cfgName:          "extend_base_rule_including_keysword_with_attribute",
+			cfgName:          "valid/extend_base_rule_including_keywords_with_attribute",
 			expectedKeywords: "aws",
 			expectedKeywords: "aws",
 		},
 		},
 		{
 		{
 			name:             "Extend base with a new rule with CMS keyword",
 			name:             "Extend base with a new rule with CMS keyword",
-			cfgName:          "extend_with_new_rule",
+			cfgName:          "valid/extend_rule_new",
 			expectedKeywords: "cms",
 			expectedKeywords: "cms",
 		},
 		},
 	}
 	}

+ 17 - 15
config/gitleaks.toml

@@ -13,23 +13,9 @@
 
 
 title = "gitleaks config"
 title = "gitleaks config"
 
 
+# TODO: change to [[allowlists]]
 [allowlist]
 [allowlist]
 description = "global allow lists"
 description = "global allow lists"
-regexes = [
-    '''(?i)^true|false|null$''',
-    '''^(?i:a+|b+|c+|d+|e+|f+|g+|h+|i+|j+|k+|l+|m+|n+|o+|p+|q+|r+|s+|t+|u+|v+|w+|x+|y+|z+|\*+|\.+)$''',
-    '''^\$(?:\d+|{\d+})$''',
-    '''^\$(?:[A-Z_]+|[a-z_]+)$''',
-    '''^\${(?:[A-Z_]+|[a-z_]+)}$''',
-    '''^\{\{[ \t]*[\w ().|]+[ \t]*}}$''',
-    '''^\$\{\{[ \t]*(?:(?:env|github|secrets|vars)(?:\.[A-Za-z]\w+)+[\w "'&./=|]*)[ \t]*}}$''',
-    '''^%(?:[A-Z_]+|[a-z_]+)%$''',
-    '''^%[+\-# 0]?[bcdeEfFgGoOpqstTUvxX]$''',
-    '''^\{\d{0,2}}$''',
-    '''^@(?:[A-Z_]+|[a-z_]+)@$''',
-    '''^/Users/(?i)[a-z0-9]+/[\w .-/]+$''',
-    '''^/(?:bin|etc|home|opt|tmp|usr|var)/[\w ./-]+$''',
-]
 paths = [
 paths = [
     '''gitleaks\.toml''',
     '''gitleaks\.toml''',
     '''(?i)\.(?:bmp|gif|jpe?g|png|svg|tiff?)$''',
     '''(?i)\.(?:bmp|gif|jpe?g|png|svg|tiff?)$''',
@@ -56,6 +42,21 @@ paths = [
     '''verification-metadata\.xml''',
     '''verification-metadata\.xml''',
     '''Database.refactorlog''',
     '''Database.refactorlog''',
 ]
 ]
+regexes = [
+    '''(?i)^true|false|null$''',
+    '''^(?i:a+|b+|c+|d+|e+|f+|g+|h+|i+|j+|k+|l+|m+|n+|o+|p+|q+|r+|s+|t+|u+|v+|w+|x+|y+|z+|\*+|\.+)$''',
+    '''^\$(?:\d+|{\d+})$''',
+    '''^\$(?:[A-Z_]+|[a-z_]+)$''',
+    '''^\${(?:[A-Z_]+|[a-z_]+)}$''',
+    '''^\{\{[ \t]*[\w ().|]+[ \t]*}}$''',
+    '''^\$\{\{[ \t]*(?:(?:env|github|secrets|vars)(?:\.[A-Za-z]\w+)+[\w "'&./=|]*)[ \t]*}}$''',
+    '''^%(?:[A-Z_]+|[a-z_]+)%$''',
+    '''^%[+\-# 0]?[bcdeEfFgGoOpqstTUvxX]$''',
+    '''^\{\d{0,2}}$''',
+    '''^@(?:[A-Z_]+|[a-z_]+)@$''',
+    '''^/Users/(?i)[a-z0-9]+/[\w .-/]+$''',
+    '''^/(?:bin|etc|home|opt|tmp|usr|var)/[\w ./-]+$''',
+]
 stopwords = [
 stopwords = [
     "abcdefghijklmnopqrstuvwxyz",
     "abcdefghijklmnopqrstuvwxyz",
     "014df517-39d1-4453-b7b3-9930c563627c",
     "014df517-39d1-4453-b7b3-9930c563627c",
@@ -596,6 +597,7 @@ regexes = [
     '''^[a-zA-Z_.-]+$''',
     '''^[a-zA-Z_.-]+$''',
 ]
 ]
 [[rules.allowlists]]
 [[rules.allowlists]]
+description = "Allowlist for Generic API Keys"
 regexTarget = "match"
 regexTarget = "match"
 regexes = [
 regexes = [
     '''(?i)(?:access(?:ibility|or)|access[_.-]?id|random[_.-]?access|api[_.-]?(?:id|name|version)|rapid|capital|[a-z0-9-]*?api[a-z0-9-]*?:jar:|author|X-MS-Exchange-Organization-Auth|Authentication-Results|(?:credentials?[_.-]?id|withCredentials)|(?:bucket|foreign|hot|idx|natural|primary|pub(?:lic)?|schema|sequence)[_.-]?key|(?:turkey)|key[_.-]?(?:alias|board|code|frame|id|length|mesh|name|pair|press(?:ed)?|ring|selector|signature|size|stone|storetype|word|up|down|left|right)|key[_.-]?vault[_.-]?(?:id|name)|keyVaultToStoreSecrets|key(?:store|tab)[_.-]?(?:file|path)|issuerkeyhash|(?-i:[DdMm]onkey|[DM]ONKEY)|keying|(?:secret)[_.-]?(?:length|name|size)|UserSecretsId|(?:csrf)[_.-]?token|(?:io\.jsonwebtoken[ \t]?:[ \t]?[\w-]+)|(?:api|credentials|token)[_.-]?(?:endpoint|ur[il])|public[_.-]?token|(?:key|token)[_.-]?file|(?-i:(?:[A-Z_]+=\n[A-Z_]+=|[a-z_]+=\n[a-z_]+=)(?:\n|\z))|(?-i:(?:[A-Z.]+=\n[A-Z.]+=|[a-z.]+=\n[a-z.]+=)(?:\n|\z)))''',
     '''(?i)(?:access(?:ibility|or)|access[_.-]?id|random[_.-]?access|api[_.-]?(?:id|name|version)|rapid|capital|[a-z0-9-]*?api[a-z0-9-]*?:jar:|author|X-MS-Exchange-Organization-Auth|Authentication-Results|(?:credentials?[_.-]?id|withCredentials)|(?:bucket|foreign|hot|idx|natural|primary|pub(?:lic)?|schema|sequence)[_.-]?key|(?:turkey)|key[_.-]?(?:alias|board|code|frame|id|length|mesh|name|pair|press(?:ed)?|ring|selector|signature|size|stone|storetype|word|up|down|left|right)|key[_.-]?vault[_.-]?(?:id|name)|keyVaultToStoreSecrets|key(?:store|tab)[_.-]?(?:file|path)|issuerkeyhash|(?-i:[DdMm]onkey|[DM]ONKEY)|keying|(?:secret)[_.-]?(?:length|name|size)|UserSecretsId|(?:csrf)[_.-]?token|(?:io\.jsonwebtoken[ \t]?:[ \t]?[\w-]+)|(?:api|credentials|token)[_.-]?(?:endpoint|ur[il])|public[_.-]?token|(?:key|token)[_.-]?file|(?-i:(?:[A-Z_]+=\n[A-Z_]+=|[a-z_]+=\n[a-z_]+=)(?:\n|\z))|(?-i:(?:[A-Z.]+=\n[A-Z.]+=|[a-z.]+=\n[a-z.]+=)(?:\n|\z)))''',

+ 167 - 139
detect/detect.go

@@ -212,17 +212,30 @@ func (d *Detector) Detect(fragment Fragment) []report.Finding {
 	}
 	}
 	d.TotalBytes.Add(uint64(len(fragment.Bytes)))
 	d.TotalBytes.Add(uint64(len(fragment.Bytes)))
 
 
-	var findings []report.Finding
+	var (
+		findings []report.Finding
+		logger   = func() zerolog.Logger {
+			l := logging.With().Str("path", fragment.FilePath)
+			if fragment.CommitSHA != "" {
+				l = l.Str("commit", fragment.CommitSHA)
+			}
+			return l.Logger()
+		}()
+	)
 
 
 	// check if filepath is allowed
 	// check if filepath is allowed
 	if fragment.FilePath != "" {
 	if fragment.FilePath != "" {
 		// is the path our config or baseline file?
 		// is the path our config or baseline file?
-		if fragment.FilePath == d.Config.Path || (d.baselinePath != "" && fragment.FilePath == d.baselinePath) ||
-			// is the path excluded by the global allowlist?
-			(d.Config.Allowlist.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && d.Config.Allowlist.PathAllowed(fragment.WindowsFilePath))) {
+		if fragment.FilePath == d.Config.Path || (d.baselinePath != "" && fragment.FilePath == d.baselinePath) {
+			logging.Trace().Msg("skipping file: matches config or baseline path")
 			return findings
 			return findings
 		}
 		}
 	}
 	}
+	// check if commit or filepath is allowed.
+	if isAllowed, event := checkCommitOrPathAllowed(logger, fragment, d.Config.Allowlists); isAllowed {
+		event.Msg("skipping file: global allowlist")
+		return findings
+	}
 
 
 	// add newline indices for location calculation in detectRule
 	// add newline indices for location calculation in detectRule
 	fragment.newlineIndices = newLineRegexp.FindAllStringIndex(fragment.Raw, -1)
 	fragment.newlineIndices = newLineRegexp.FindAllStringIndex(fragment.Raw, -1)
@@ -292,46 +305,10 @@ func (d *Detector) detectRule(fragment Fragment, currentRaw string, r config.Rul
 		}()
 		}()
 	)
 	)
 
 
-	// check if filepath or commit is allowed for this rule
-	for _, a := range r.Allowlists {
-		var (
-			isAllowed             bool
-			commitAllowed, commit = a.CommitAllowed(fragment.CommitSHA)
-			pathAllowed           = a.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && a.PathAllowed(fragment.WindowsFilePath))
-		)
-		if a.MatchCondition == config.AllowlistMatchAnd {
-			// Determine applicable checks.
-			var allowlistChecks []bool
-			if len(a.Commits) > 0 {
-				allowlistChecks = append(allowlistChecks, commitAllowed)
-			}
-			if len(a.Paths) > 0 {
-				allowlistChecks = append(allowlistChecks, pathAllowed)
-			}
-			// These will be checked later.
-			if len(a.Regexes) > 0 {
-				allowlistChecks = append(allowlistChecks, false)
-			}
-			if len(a.StopWords) > 0 {
-				allowlistChecks = append(allowlistChecks, false)
-			}
-
-			// Check if allowed.
-			isAllowed = allTrue(allowlistChecks)
-		} else {
-			isAllowed = commitAllowed || pathAllowed
-		}
-		if isAllowed {
-			event := logger.Trace().Str("condition", a.MatchCondition.String())
-			if commitAllowed {
-				event.Str("allowed-commit", commit)
-			}
-			if pathAllowed {
-				event.Bool("allowed-path", pathAllowed)
-			}
-			event.Msg("skipping file: rule allowlist")
-			return findings
-		}
+	// check if commit or file is allowed for this rule.
+	if isAllowed, event := checkCommitOrPathAllowed(logger, fragment, r.Allowlists); isAllowed {
+		event.Msg("skipping file: rule allowlist")
+		return findings
 	}
 	}
 
 
 	if r.Path != nil {
 	if r.Path != nil {
@@ -377,7 +354,6 @@ func (d *Detector) detectRule(fragment Fragment, currentRaw string, r config.Rul
 
 
 	// use currentRaw instead of fragment.Raw since this represents the current
 	// use currentRaw instead of fragment.Raw since this represents the current
 	// decoding pass on the text
 	// decoding pass on the text
-MatchLoop:
 	for _, matchIndex := range r.Regex.FindAllStringIndex(currentRaw, -1) {
 	for _, matchIndex := range r.Regex.FindAllStringIndex(currentRaw, -1) {
 		// Extract secret from match
 		// Extract secret from match
 		secret := strings.Trim(currentRaw[matchIndex[0]:matchIndex[1]], "\n")
 		secret := strings.Trim(currentRaw[matchIndex[0]:matchIndex[1]], "\n")
@@ -474,109 +450,22 @@ MatchLoop:
 			}
 			}
 		}
 		}
 
 
-		if d.Config.Allowlist != nil {
-			// check if the regexTarget is defined in the allowlist "regexes" entry
-			// or if the secret is in the list of stopwords
-			globalAllowlistTarget := finding.Secret
-			switch d.Config.Allowlist.RegexTarget {
-			case "match":
-				globalAllowlistTarget = finding.Match
-			case "line":
-				globalAllowlistTarget = currentLine
-			}
-			if d.Config.Allowlist.RegexAllowed(globalAllowlistTarget) {
-				logger.Trace().
-					Str("finding", globalAllowlistTarget).
-					Msg("skipping finding: global allowlist regex")
-				continue
-			} else if ok, word := d.Config.Allowlist.ContainsStopWord(finding.Secret); ok {
-				logger.Trace().
-					Str("finding", finding.Secret).
-					Str("allowed-stopword", word).
-					Msg("skipping finding: global allowlist stopword")
-				continue
-			}
+		// check if the result matches any of the global allowlists.
+		if isAllowed, event := checkFindingAllowed(logger, finding, fragment, currentLine, d.Config.Allowlists); isAllowed {
+			event.Msg("skipping finding: global allowlist")
+			continue
 		}
 		}
 
 
 		// check if the result matches any of the rule allowlists.
 		// check if the result matches any of the rule allowlists.
-		for _, a := range r.Allowlists {
-			allowlistTarget := finding.Secret
-			switch a.RegexTarget {
-			case "match":
-				allowlistTarget = finding.Match
-			case "line":
-				allowlistTarget = currentLine
-			}
-
-			var (
-				isAllowed              bool
-				commitAllowed          bool
-				commit                 string
-				pathAllowed            bool
-				regexAllowed           = a.RegexAllowed(allowlistTarget)
-				containsStopword, word = a.ContainsStopWord(finding.Secret)
-			)
-			// check if the secret is in the list of stopwords
-			if a.MatchCondition == config.AllowlistMatchAnd {
-				// Determine applicable checks.
-				var allowlistChecks []bool
-				if len(a.Commits) > 0 {
-					commitAllowed, commit = a.CommitAllowed(fragment.CommitSHA)
-					allowlistChecks = append(allowlistChecks, commitAllowed)
-				}
-				if len(a.Paths) > 0 {
-					pathAllowed = a.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && a.PathAllowed(fragment.WindowsFilePath))
-					allowlistChecks = append(allowlistChecks, pathAllowed)
-				}
-				if len(a.Regexes) > 0 {
-					allowlistChecks = append(allowlistChecks, regexAllowed)
-				}
-				if len(a.StopWords) > 0 {
-					allowlistChecks = append(allowlistChecks, containsStopword)
-				}
-
-				// Check if allowed.
-				isAllowed = allTrue(allowlistChecks)
-			} else {
-				isAllowed = regexAllowed || containsStopword
-			}
-
-			if isAllowed {
-				event := logger.Trace().
-					Str("finding", finding.Secret).
-					Str("condition", a.MatchCondition.String())
-				if commitAllowed {
-					event.Str("allowed-commit", commit)
-				}
-				if pathAllowed {
-					event.Bool("allowed-path", pathAllowed)
-				}
-				if regexAllowed {
-					event.Bool("allowed-regex", regexAllowed)
-				}
-				if containsStopword {
-					event.Str("allowed-stopword", word)
-				}
-				event.Msg("skipping finding: rule allowlist")
-				continue MatchLoop
-			}
+		if isAllowed, event := checkFindingAllowed(logger, finding, fragment, currentLine, r.Allowlists); isAllowed {
+			event.Msg("skipping finding: rule allowlist")
+			continue
 		}
 		}
 		findings = append(findings, finding)
 		findings = append(findings, finding)
 	}
 	}
 	return findings
 	return findings
 }
 }
 
 
-func allTrue(bools []bool) bool {
-	allMatch := true
-	for _, check := range bools {
-		if !check {
-			allMatch = false
-			break
-		}
-	}
-	return allMatch
-}
-
 // AddFinding synchronously adds a finding to the findings slice
 // AddFinding synchronously adds a finding to the findings slice
 func (d *Detector) AddFinding(finding report.Finding) {
 func (d *Detector) AddFinding(finding report.Finding) {
 	globalFingerprint := fmt.Sprintf("%s:%s:%d", finding.File, finding.RuleID, finding.StartLine)
 	globalFingerprint := fmt.Sprintf("%s:%s:%d", finding.File, finding.RuleID, finding.StartLine)
@@ -627,3 +516,142 @@ func (d *Detector) Findings() []report.Finding {
 func (d *Detector) addCommit(commit string) {
 func (d *Detector) addCommit(commit string) {
 	d.commitMap[commit] = true
 	d.commitMap[commit] = true
 }
 }
+
+// checkCommitOrPathAllowed evaluates |fragment| against all provided |allowlists|.
+//
+// If the match condition is "OR", only commit and path are checked.
+// Otherwise, if regexes or stopwords are defined this will fail.
+func checkCommitOrPathAllowed(
+	logger zerolog.Logger,
+	fragment Fragment,
+	allowlists []*config.Allowlist,
+) (bool, *zerolog.Event) {
+	if fragment.FilePath == "" && fragment.CommitSHA == "" {
+		return false, nil
+	}
+
+	for _, a := range allowlists {
+		var (
+			isAllowed        bool
+			allowlistChecks  []bool
+			commitAllowed, _ = a.CommitAllowed(fragment.CommitSHA)
+			pathAllowed      = a.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && a.PathAllowed(fragment.WindowsFilePath))
+		)
+		// If the condition is "AND" we need to check all conditions.
+		if a.MatchCondition == config.AllowlistMatchAnd {
+			if len(a.Commits) > 0 {
+				allowlistChecks = append(allowlistChecks, commitAllowed)
+			}
+			if len(a.Paths) > 0 {
+				allowlistChecks = append(allowlistChecks, pathAllowed)
+			}
+			// These will be checked later.
+			if len(a.Regexes) > 0 {
+				return false, nil
+			}
+			if len(a.StopWords) > 0 {
+				return false, nil
+			}
+
+			isAllowed = allTrue(allowlistChecks)
+		} else {
+			isAllowed = commitAllowed || pathAllowed
+		}
+		if isAllowed {
+			event := logger.Trace().Str("condition", a.MatchCondition.String())
+			if commitAllowed {
+				event.Bool("allowed-commit", commitAllowed)
+			}
+			if pathAllowed {
+				event.Bool("allowed-path", pathAllowed)
+			}
+			return true, event
+		}
+	}
+	return false, nil
+}
+
+// checkFindingAllowed evaluates |finding| against all provided |allowlists|.
+//
+// If the match condition is "OR", only regex and stopwords are run. (Commit and path should be handled separately).
+// Otherwise, all conditions are checked.
+//
+// TODO: The method signature is awkward. I can't think of a better way to log helpful info.
+func checkFindingAllowed(
+	logger zerolog.Logger,
+	finding report.Finding,
+	fragment Fragment,
+	currentLine string,
+	allowlists []*config.Allowlist,
+) (bool, *zerolog.Event) {
+	for _, a := range allowlists {
+		allowlistTarget := finding.Secret
+		switch a.RegexTarget {
+		case "match":
+			allowlistTarget = finding.Match
+		case "line":
+			allowlistTarget = currentLine
+		}
+
+		var (
+			checks                 []bool
+			isAllowed              bool
+			commitAllowed          bool
+			commit                 string
+			pathAllowed            bool
+			regexAllowed           = a.RegexAllowed(allowlistTarget)
+			containsStopword, word = a.ContainsStopWord(finding.Secret)
+		)
+		// If the condition is "AND" we need to check all conditions.
+		if a.MatchCondition == config.AllowlistMatchAnd {
+			// Determine applicable checks.
+			if len(a.Commits) > 0 {
+				commitAllowed, commit = a.CommitAllowed(fragment.CommitSHA)
+				checks = append(checks, commitAllowed)
+			}
+			if len(a.Paths) > 0 {
+				pathAllowed = a.PathAllowed(fragment.FilePath) || (fragment.WindowsFilePath != "" && a.PathAllowed(fragment.WindowsFilePath))
+				checks = append(checks, pathAllowed)
+			}
+			if len(a.Regexes) > 0 {
+				checks = append(checks, regexAllowed)
+			}
+			if len(a.StopWords) > 0 {
+				checks = append(checks, containsStopword)
+			}
+
+			isAllowed = allTrue(checks)
+		} else {
+			isAllowed = regexAllowed || containsStopword
+		}
+
+		if isAllowed {
+			event := logger.Trace().
+				Str("finding", finding.Secret).
+				Str("condition", a.MatchCondition.String())
+			if commitAllowed {
+				event.Str("allowed-commit", commit)
+			}
+			if pathAllowed {
+				event.Bool("allowed-path", pathAllowed)
+			}
+			if regexAllowed {
+				event.Bool("allowed-regex", regexAllowed)
+			}
+			if containsStopword {
+				event.Str("allowed-stopword", word)
+			}
+			return true, event
+		}
+	}
+	return false, nil
+}
+
+func allTrue(bools []bool) bool {
+	for _, check := range bools {
+		if !check {
+			return false
+		}
+	}
+	return true
+}

+ 150 - 119
detect/detect_test.go

@@ -53,7 +53,8 @@ password="bFJxQkstejVrZjQtcGxlYXNlLWlnbm9yZS1tZS1YLVhJSk0yUGRkdw=="
 `
 `
 
 
 func TestDetect(t *testing.T) {
 func TestDetect(t *testing.T) {
-	tests := []struct {
+	logging.Logger = logging.Logger.Level(zerolog.TraceLevel)
+	tests := map[string]struct {
 		cfgName      string
 		cfgName      string
 		baselinePath string
 		baselinePath string
 		fragment     Fragment
 		fragment     Fragment
@@ -65,14 +66,15 @@ func TestDetect(t *testing.T) {
 		expectedFindings []report.Finding
 		expectedFindings []report.Finding
 		wantError        error
 		wantError        error
 	}{
 	}{
-		{
+		// General
+		"valid allow comment (1)": {
 			cfgName: "simple",
 			cfgName: "simple",
 			fragment: Fragment{
 			fragment: Fragment{
 				Raw:      `awsToken := \"AKIALALEMEL33243OKIA\ // gitleaks:allow"`,
 				Raw:      `awsToken := \"AKIALALEMEL33243OKIA\ // gitleaks:allow"`,
 				FilePath: "tmp.go",
 				FilePath: "tmp.go",
 			},
 			},
 		},
 		},
-		{
+		"valid allow comment (2)": {
 			cfgName: "simple",
 			cfgName: "simple",
 			fragment: Fragment{
 			fragment: Fragment{
 				Raw: `awsToken := \
 				Raw: `awsToken := \
@@ -83,7 +85,7 @@ func TestDetect(t *testing.T) {
 				FilePath: "tmp.go",
 				FilePath: "tmp.go",
 			},
 			},
 		},
 		},
-		{
+		"invalid allow comment": {
 			cfgName: "simple",
 			cfgName: "simple",
 			fragment: Fragment{
 			fragment: Fragment{
 				Raw: `awsToken := \"AKIALALEMEL33243OKIA\"
 				Raw: `awsToken := \"AKIALALEMEL33243OKIA\"
@@ -110,30 +112,7 @@ func TestDetect(t *testing.T) {
 				},
 				},
 			},
 			},
 		},
 		},
-		{
-			cfgName: "escaped_character_group",
-			fragment: Fragment{
-				Raw:      `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
-				FilePath: "tmp.go",
-			},
-			expectedFindings: []report.Finding{
-				{
-					Description: "PyPI upload token",
-					Secret:      "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
-					Match:       "pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB",
-					Line:        `pypi-AgEIcHlwaS5vcmcAAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAA-AAAAAAAAAAB`,
-					File:        "tmp.go",
-					RuleID:      "pypi-upload-token",
-					Tags:        []string{"key", "pypi"},
-					StartLine:   0,
-					EndLine:     0,
-					StartColumn: 1,
-					EndColumn:   86,
-					Entropy:     1.9606875,
-				},
-			},
-		},
-		{
+		"detect finding - aws": {
 			cfgName: "simple",
 			cfgName: "simple",
 			fragment: Fragment{
 			fragment: Fragment{
 				Raw:      `awsToken := \"AKIALALEMEL33243OLIA\"`,
 				Raw:      `awsToken := \"AKIALALEMEL33243OLIA\"`,
@@ -141,22 +120,22 @@ func TestDetect(t *testing.T) {
 			},
 			},
 			expectedFindings: []report.Finding{
 			expectedFindings: []report.Finding{
 				{
 				{
+					RuleID:      "aws-access-key",
 					Description: "AWS Access Key",
 					Description: "AWS Access Key",
-					Secret:      "AKIALALEMEL33243OLIA",
-					Match:       "AKIALALEMEL33243OLIA",
-					Line:        `awsToken := \"AKIALALEMEL33243OLIA\"`,
 					File:        "tmp.go",
 					File:        "tmp.go",
-					RuleID:      "aws-access-key",
-					Tags:        []string{"key", "AWS"},
+					Line:        `awsToken := \"AKIALALEMEL33243OLIA\"`,
+					Match:       "AKIALALEMEL33243OLIA",
+					Secret:      "AKIALALEMEL33243OLIA",
+					Entropy:     3.0841837,
 					StartLine:   0,
 					StartLine:   0,
 					EndLine:     0,
 					EndLine:     0,
 					StartColumn: 15,
 					StartColumn: 15,
 					EndColumn:   34,
 					EndColumn:   34,
-					Entropy:     3.0841837,
+					Tags:        []string{"key", "AWS"},
 				},
 				},
 			},
 			},
 		},
 		},
-		{
+		"detect finding - sidekiq env var": {
 			cfgName: "simple",
 			cfgName: "simple",
 			fragment: Fragment{
 			fragment: Fragment{
 				Raw:      `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
 				Raw:      `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
@@ -164,22 +143,22 @@ func TestDetect(t *testing.T) {
 			},
 			},
 			expectedFindings: []report.Finding{
 			expectedFindings: []report.Finding{
 				{
 				{
+					RuleID:      "sidekiq-secret",
 					Description: "Sidekiq Secret",
 					Description: "Sidekiq Secret",
+					File:        "tmp.sh",
+					Line:        `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
 					Match:       "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;",
 					Match:       "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;",
 					Secret:      "cafebabe:deadbeef",
 					Secret:      "cafebabe:deadbeef",
-					Line:        `export BUNDLE_ENTERPRISE__CONTRIBSYS__COM=cafebabe:deadbeef;`,
-					File:        "tmp.sh",
-					RuleID:      "sidekiq-secret",
-					Tags:        []string{},
 					Entropy:     2.6098502,
 					Entropy:     2.6098502,
 					StartLine:   0,
 					StartLine:   0,
 					EndLine:     0,
 					EndLine:     0,
 					StartColumn: 8,
 					StartColumn: 8,
 					EndColumn:   60,
 					EndColumn:   60,
+					Tags:        []string{},
 				},
 				},
 			},
 			},
 		},
 		},
-		{
+		"detect finding - sidekiq env var, semicolon": {
 			cfgName: "simple",
 			cfgName: "simple",
 			fragment: Fragment{
 			fragment: Fragment{
 				Raw:      `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
 				Raw:      `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
@@ -187,22 +166,22 @@ func TestDetect(t *testing.T) {
 			},
 			},
 			expectedFindings: []report.Finding{
 			expectedFindings: []report.Finding{
 				{
 				{
+					RuleID:      "sidekiq-secret",
 					Description: "Sidekiq Secret",
 					Description: "Sidekiq Secret",
-					Match:       "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=\"cafebabe:deadbeef\"",
-					Secret:      "cafebabe:deadbeef",
 					File:        "tmp.sh",
 					File:        "tmp.sh",
 					Line:        `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
 					Line:        `echo hello1; export BUNDLE_ENTERPRISE__CONTRIBSYS__COM="cafebabe:deadbeef" && echo hello2`,
-					RuleID:      "sidekiq-secret",
-					Tags:        []string{},
+					Match:       "BUNDLE_ENTERPRISE__CONTRIBSYS__COM=\"cafebabe:deadbeef\"",
+					Secret:      "cafebabe:deadbeef",
 					Entropy:     2.6098502,
 					Entropy:     2.6098502,
 					StartLine:   0,
 					StartLine:   0,
 					EndLine:     0,
 					EndLine:     0,
 					StartColumn: 21,
 					StartColumn: 21,
 					EndColumn:   74,
 					EndColumn:   74,
+					Tags:        []string{},
 				},
 				},
 			},
 			},
 		},
 		},
-		{
+		"detect finding - sidekiq url": {
 			cfgName: "simple",
 			cfgName: "simple",
 			fragment: Fragment{
 			fragment: Fragment{
 				Raw:      `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true&param2=false#heading1"`,
 				Raw:      `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true&param2=false#heading1"`,
@@ -210,150 +189,203 @@ func TestDetect(t *testing.T) {
 			},
 			},
 			expectedFindings: []report.Finding{
 			expectedFindings: []report.Finding{
 				{
 				{
+					RuleID:      "sidekiq-sensitive-url",
 					Description: "Sidekiq Sensitive URL",
 					Description: "Sidekiq Sensitive URL",
-					Match:       "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:",
-					Secret:      "cafeb4b3:d3adb33f",
 					File:        "tmp.sh",
 					File:        "tmp.sh",
 					Line:        `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true&param2=false#heading1"`,
 					Line:        `url = "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:80/path?param1=true&param2=false#heading1"`,
-					RuleID:      "sidekiq-sensitive-url",
-					Tags:        []string{},
+					Match:       "http://cafeb4b3:d3adb33f@enterprise.contribsys.com:",
+					Secret:      "cafeb4b3:d3adb33f",
 					Entropy:     2.984234,
 					Entropy:     2.984234,
 					StartLine:   0,
 					StartLine:   0,
 					EndLine:     0,
 					EndLine:     0,
 					StartColumn: 8,
 					StartColumn: 8,
 					EndColumn:   58,
 					EndColumn:   58,
+					Tags:        []string{},
 				},
 				},
 			},
 			},
 		},
 		},
-		{
-			cfgName: "allow_aws_re",
+		"ignore finding - our config file": {
+			cfgName: "simple",
 			fragment: Fragment{
 			fragment: Fragment{
 				Raw:      `awsToken := \"AKIALALEMEL33243OLIA\"`,
 				Raw:      `awsToken := \"AKIALALEMEL33243OLIA\"`,
-				FilePath: "tmp.go",
+				FilePath: filepath.Join(configPath, "simple.toml"),
 			},
 			},
 		},
 		},
-		{
-			cfgName: "allow_path",
+		"ignore finding - doesn't match path": {
+			cfgName: "generic_with_py_path",
 			fragment: Fragment{
 			fragment: Fragment{
-				Raw:      `awsToken := \"AKIALALEMEL33243OLIA\"`,
+				Raw:      `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
 				FilePath: "tmp.go",
 				FilePath: "tmp.go",
 			},
 			},
 		},
 		},
-		{
-			cfgName: "allow_commit",
-			fragment: Fragment{
-				Raw:       `awsToken := \"AKIALALEMEL33243OLIA\"`,
-				FilePath:  "tmp.go",
-				CommitSHA: "allowthiscommit",
-			},
-		},
-		{
-			cfgName: "entropy_group",
+		"detect finding - matches path,regex,entropy": {
+			cfgName: "generic_with_py_path",
 			fragment: Fragment{
 			fragment: Fragment{
 				Raw:      `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
 				Raw:      `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
-				FilePath: "tmp.go",
+				FilePath: "tmp.py",
 			},
 			},
 			expectedFindings: []report.Finding{
 			expectedFindings: []report.Finding{
 				{
 				{
-					Description: "Discord API key",
-					Match:       "Discord_Public_Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
-					Secret:      "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
+					RuleID:      "generic-api-key",
+					Description: "Generic API Key",
+					File:        "tmp.py",
 					Line:        `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
 					Line:        `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
-					File:        "tmp.go",
-					RuleID:      "discord-api-key",
-					Tags:        []string{},
+					Match:       "Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
+					Secret:      "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
 					Entropy:     3.7906237,
 					Entropy:     3.7906237,
 					StartLine:   0,
 					StartLine:   0,
 					EndLine:     0,
 					EndLine:     0,
-					StartColumn: 7,
+					StartColumn: 22,
 					EndColumn:   93,
 					EndColumn:   93,
+					Tags:        []string{},
 				},
 				},
 			},
 			},
 		},
 		},
-		{
+		"ignore finding - allowlist regex": {
 			cfgName: "generic_with_py_path",
 			cfgName: "generic_with_py_path",
+			fragment: Fragment{
+				Raw:      `const Discord_Public_Key = "load2523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
+				FilePath: "tmp.py",
+			},
+		},
+
+		// Rule
+		"rule - ignore path": {
+			cfgName:      "valid/rule_path_only",
+			baselinePath: ".baseline.json",
 			fragment: Fragment{
 			fragment: Fragment{
 				Raw:      `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
 				Raw:      `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
-				FilePath: "tmp.go",
+				FilePath: ".baseline.json",
 			},
 			},
 		},
 		},
-		{
-			cfgName: "generic_with_py_path",
+		"rule - detect path ": {
+			cfgName: "valid/rule_path_only",
 			fragment: Fragment{
 			fragment: Fragment{
 				Raw:      `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
 				Raw:      `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
 				FilePath: "tmp.py",
 				FilePath: "tmp.py",
 			},
 			},
 			expectedFindings: []report.Finding{
 			expectedFindings: []report.Finding{
 				{
 				{
-					Description: "Generic API Key",
-					Match:       "Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
-					Secret:      "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
-					Line:        `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
+					Description: "Python Files",
+					Match:       "file detected: tmp.py",
 					File:        "tmp.py",
 					File:        "tmp.py",
-					RuleID:      "generic-api-key",
+					RuleID:      "python-files-only",
 					Tags:        []string{},
 					Tags:        []string{},
+				},
+			},
+		},
+		"rule - match based on entropy": {
+			cfgName: "valid/rule_entropy_group",
+			fragment: Fragment{
+				Raw: `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"
+//const Discord_Public_Key = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
+`,
+				FilePath: "tmp.go",
+			},
+			expectedFindings: []report.Finding{
+				{
+					RuleID:      "discord-api-key",
+					Description: "Discord API key",
+					File:        "tmp.go",
+					Line:        `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
+					Match:       "Discord_Public_Key = \"e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5\"",
+					Secret:      "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5",
 					Entropy:     3.7906237,
 					Entropy:     3.7906237,
 					StartLine:   0,
 					StartLine:   0,
 					EndLine:     0,
 					EndLine:     0,
-					StartColumn: 22,
+					StartColumn: 7,
 					EndColumn:   93,
 					EndColumn:   93,
+					Tags:        []string{},
 				},
 				},
 			},
 			},
 		},
 		},
-		{
-			cfgName: "path_only",
+
+		// Allowlists
+		"global allowlist - ignore regex": {
+			cfgName: "valid/allowlist_global_regex",
 			fragment: Fragment{
 			fragment: Fragment{
-				Raw:      `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
-				FilePath: "tmp.py",
+				Raw:      `awsToken := \"AKIALALEMEL33243OLIA\"`,
+				FilePath: "tmp.go",
+			},
+		},
+		"global allowlist - detect, doesn't match all conditions": {
+			cfgName: "valid/allowlist_global_multiple",
+			fragment: Fragment{
+				Raw: `
+const token = "mockSecret";
+// const token = "changeit";`,
+				FilePath: "config.txt",
 			},
 			},
 			expectedFindings: []report.Finding{
 			expectedFindings: []report.Finding{
 				{
 				{
-					Description: "Python Files",
-					Match:       "file detected: tmp.py",
-					File:        "tmp.py",
-					RuleID:      "python-files-only",
+					RuleID:      "test",
+					File:        "config.txt",
+					Line:        "\nconst token = \"mockSecret\";",
+					Match:       `token = "mockSecret"`,
+					Secret:      "mockSecret",
+					Entropy:     2.9219282,
+					StartLine:   1,
+					EndLine:     1,
+					StartColumn: 8,
+					EndColumn:   27,
 					Tags:        []string{},
 					Tags:        []string{},
 				},
 				},
 			},
 			},
 		},
 		},
-		{
-			cfgName: "bad_entropy_group",
+		"global allowlist - ignore, matches all conditions": {
+			cfgName: "valid/allowlist_global_multiple",
 			fragment: Fragment{
 			fragment: Fragment{
-				Raw:      `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
-				FilePath: "tmp.go",
+				Raw:      `token := "mockSecret";`,
+				FilePath: "node_modules/config.txt",
 			},
 			},
-			wantError: fmt.Errorf("discord-api-key: invalid regex secret group 5, max regex secret group 3"),
 		},
 		},
-		{
-			cfgName: "simple",
+		"global allowlist - detect path, doesn't match all conditions": {
+			cfgName: "valid/allowlist_global_multiple",
 			fragment: Fragment{
 			fragment: Fragment{
-				Raw:      `awsToken := \"AKIALALEMEL33243OLIA\"`,
-				FilePath: filepath.Join(configPath, "simple.toml"),
+				Raw:      `var token = "fakeSecret";`,
+				FilePath: "node_modules/config.txt",
+			},
+			expectedFindings: []report.Finding{
+				{
+					RuleID:      "test",
+					File:        "node_modules/config.txt",
+					Line:        "var token = \"fakeSecret\";",
+					Match:       `token = "fakeSecret"`,
+					Secret:      "fakeSecret",
+					Entropy:     2.8464394,
+					StartLine:   0,
+					EndLine:     0,
+					StartColumn: 5,
+					EndColumn:   24,
+					Tags:        []string{},
+				},
 			},
 			},
 		},
 		},
-		{
-			cfgName: "allow_global_aws_re",
+		"allowlist - ignore commit": {
+			cfgName: "valid/allowlist_rule_commit",
 			fragment: Fragment{
 			fragment: Fragment{
-				Raw:      `awsToken := \"AKIALALEMEL33243OLIA\"`,
-				FilePath: "tmp.go",
+				Raw:       `awsToken := \"AKIALALEMEL33243OLIA\"`,
+				FilePath:  "tmp.go",
+				CommitSHA: "allowthiscommit",
 			},
 			},
 		},
 		},
-		{
-			cfgName: "generic_with_py_path",
+		"allowlist - ignore path": {
+			cfgName: "valid/allowlist_rule_path",
 			fragment: Fragment{
 			fragment: Fragment{
-				Raw:      `const Discord_Public_Key = "load2523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
-				FilePath: "tmp.py",
+				Raw:      `awsToken := \"AKIALALEMEL33243OLIA\"`,
+				FilePath: "tmp.go",
 			},
 			},
 		},
 		},
-		{
-			cfgName:      "path_only",
-			baselinePath: ".baseline.json",
+		"allowlist - ignore regex": {
+			cfgName: "valid/allowlist_rule_regex",
 			fragment: Fragment{
 			fragment: Fragment{
-				Raw:      `const Discord_Public_Key = "e7322523fb86ed64c836a979cf8465fbd436378c653c1db38f9ae87bc62a6fd5"`,
-				FilePath: ".baseline.json",
+				Raw:      `awsToken := \"AKIALALEMEL33243OLIA\"`,
+				FilePath: "tmp.go",
 			},
 			},
 		},
 		},
-		{
+
+		// Base64-decoding
+		"detect base64": {
 			cfgName: "base64_encoded",
 			cfgName: "base64_encoded",
 			fragment: Fragment{
 			fragment: Fragment{
 				Raw:      b64TestValues,
 				Raw:      b64TestValues,
@@ -490,8 +522,8 @@ func TestDetect(t *testing.T) {
 		},
 		},
 	}
 	}
 
 
-	for _, tt := range tests {
-		t.Run(fmt.Sprintf("%s - %s", tt.cfgName, tt.fragment.FilePath), func(t *testing.T) {
+	for name, tt := range tests {
+		t.Run(name, func(t *testing.T) {
 			viper.Reset()
 			viper.Reset()
 			viper.AddConfigPath(configPath)
 			viper.AddConfigPath(configPath)
 			viper.SetConfigName(tt.cfgName)
 			viper.SetConfigName(tt.cfgName)
@@ -829,7 +861,7 @@ func TestFromFiles(t *testing.T) {
 			require.NoError(t, err)
 			require.NoError(t, err)
 
 
 			detector.FollowSymlinks = true
 			detector.FollowSymlinks = true
-			paths, err := sources.DirectoryTargets(tt.source, detector.Sema, true, cfg.Allowlist.PathAllowed)
+			paths, err := sources.DirectoryTargets(tt.source, detector.Sema, true, cfg.Allowlists)
 			require.NoError(t, err)
 			require.NoError(t, err)
 
 
 			findings, err := detector.DetectFiles(paths)
 			findings, err := detector.DetectFiles(paths)
@@ -903,7 +935,7 @@ func TestDetectWithSymlinks(t *testing.T) {
 		cfg, _ := vc.Translate()
 		cfg, _ := vc.Translate()
 		detector := NewDetector(cfg)
 		detector := NewDetector(cfg)
 		detector.FollowSymlinks = true
 		detector.FollowSymlinks = true
-		paths, err := sources.DirectoryTargets(tt.source, detector.Sema, true, cfg.Allowlist.PathAllowed)
+		paths, err := sources.DirectoryTargets(tt.source, detector.Sema, true, cfg.Allowlists)
 		require.NoError(t, err)
 		require.NoError(t, err)
 
 
 		findings, err := detector.DetectFiles(paths)
 		findings, err := detector.DetectFiles(paths)
@@ -1189,7 +1221,6 @@ func TestNormalizeGitleaksIgnorePaths(t *testing.T) {
 }
 }
 
 
 func TestWindowsFileSeparator_RulePath(t *testing.T) {
 func TestWindowsFileSeparator_RulePath(t *testing.T) {
-	logging.Logger = logging.Logger.Level(zerolog.TraceLevel)
 	unixRule := config.Rule{
 	unixRule := config.Rule{
 		RuleID: "test-rule",
 		RuleID: "test-rule",
 		Path:   regexp.MustCompile(`(^|/)\.m2/settings\.xml`),
 		Path:   regexp.MustCompile(`(^|/)\.m2/settings\.xml`),

+ 5 - 3
detect/git.go

@@ -42,9 +42,11 @@ func (d *Detector) DetectGit(cmd *sources.GitCmd, remote *RemoteInfo) ([]report.
 			commitSHA := ""
 			commitSHA := ""
 			if gitdiffFile.PatchHeader != nil {
 			if gitdiffFile.PatchHeader != nil {
 				commitSHA = gitdiffFile.PatchHeader.SHA
 				commitSHA = gitdiffFile.PatchHeader.SHA
-				if ok, c := d.Config.Allowlist.CommitAllowed(gitdiffFile.PatchHeader.SHA); ok {
-					logging.Trace().Str("allowed-commit", c).Msg("skipping commit: global allowlist")
-					continue
+				for _, a := range d.Config.Allowlists {
+					if ok, c := a.CommitAllowed(gitdiffFile.PatchHeader.SHA); ok {
+						logging.Trace().Str("allowed-commit", c).Msg("skipping commit: global allowlist")
+						continue
+					}
 				}
 				}
 			}
 			}
 			d.addCommit(commitSHA)
 			d.addCommit(commitSHA)

+ 12 - 5
sources/directory.go

@@ -8,6 +8,7 @@ import (
 
 
 	"github.com/fatih/semgroup"
 	"github.com/fatih/semgroup"
 
 
+	"github.com/zricethezav/gitleaks/v8/config"
 	"github.com/zricethezav/gitleaks/v8/logging"
 	"github.com/zricethezav/gitleaks/v8/logging"
 )
 )
 
 
@@ -18,7 +19,7 @@ type ScanTarget struct {
 
 
 var isWindows = runtime.GOOS == "windows"
 var isWindows = runtime.GOOS == "windows"
 
 
-func DirectoryTargets(source string, s *semgroup.Group, followSymlinks bool, shouldSkip func(string) bool) (<-chan ScanTarget, error) {
+func DirectoryTargets(source string, s *semgroup.Group, followSymlinks bool, allowlists []*config.Allowlist) (<-chan ScanTarget, error) {
 	paths := make(chan ScanTarget)
 	paths := make(chan ScanTarget)
 	s.Go(func() error {
 	s.Go(func() error {
 		defer close(paths)
 		defer close(paths)
@@ -66,10 +67,16 @@ func DirectoryTargets(source string, s *semgroup.Group, followSymlinks bool, sho
 				}
 				}
 
 
 				// TODO: Also run this check against the resolved symlink?
 				// TODO: Also run this check against the resolved symlink?
-				skip := shouldSkip(path) ||
-					// TODO: Remove this in v9.
-					// This is an awkward hack to mitigate https://github.com/gitleaks/gitleaks/issues/1641.
-					(isWindows && shouldSkip(filepath.ToSlash(path)))
+				var skip bool
+				for _, a := range allowlists {
+					skip = a.PathAllowed(path) ||
+						// TODO: Remove this in v9.
+						// This is an awkward hack to mitigate https://github.com/gitleaks/gitleaks/issues/1641.
+						(isWindows && a.PathAllowed(filepath.ToSlash(path)))
+					if skip {
+						break
+					}
+				}
 				if fInfo.IsDir() {
 				if fInfo.IsDir() {
 					// Directory
 					// Directory
 					if skip {
 					if skip {

+ 2 - 2
testdata/config/generic.toml

@@ -1,10 +1,10 @@
 title = "gitleaks config"
 title = "gitleaks config"
 
 
 [[rules]]
 [[rules]]
-description = "Generic API Key"
 id = "generic-api-key"
 id = "generic-api-key"
+description = "Generic API Key"
 regex = '''(?i)(?:key|api|token|secret|client|passwd|password|auth|access)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|\"|\s|=|\x60){0,5}([0-9a-z\-_.=]{10,150})(?:['|\"|\n|\r|\s|\x60|;]|$)'''
 regex = '''(?i)(?:key|api|token|secret|client|passwd|password|auth|access)(?:[0-9a-z\-_\t .]{0,20})(?:[\s|']|[\s|"]){0,3}(?:=|>|:{1,3}=|\|\|:|<=|=>|:|\?=)(?:'|\"|\s|=|\x60){0,5}([0-9a-z\-_.=]{10,150})(?:['|\"|\n|\r|\s|\x60|;]|$)'''
 entropy = 3.5
 entropy = 3.5
 keywords = [
 keywords = [
     "key","api","token","secret","client","passwd","password","auth","access",
     "key","api","token","secret","client","passwd","password","auth","access",
-]
+]

+ 8 - 9
testdata/config/generic_with_py_path.toml

@@ -1,15 +1,20 @@
 title = "gitleaks config"
 title = "gitleaks config"
 
 
 [[rules]]
 [[rules]]
-description = "Generic API Key"
 id = "generic-api-key"
 id = "generic-api-key"
-regex = '''(?i)((key|api|token|secret|password)[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9a-zA-Z\-_=]{8,64})['\"]'''
+description = "Generic API Key"
 path = '''.py'''
 path = '''.py'''
-entropy = 3.7
+regex = '''(?i)((key|api|token|secret|password)[a-z0-9_ .\-,]{0,25})(=|>|:=|\|\|:|<=|=>|:).{0,5}['\"]([0-9a-zA-Z\-_=]{8,64})['\"]'''
 secretGroup = 4
 secretGroup = 4
+entropy = 3.7
 
 
 [allowlist]
 [allowlist]
 description = "global allow lists"
 description = "global allow lists"
+paths = [
+	'''gitleaks.toml''',
+	'''(.*?)(jpg|gif|doc|pdf|bin|svg|socket)$''',
+	'''(go.mod|go.sum)$'''
+]
 regexes = [
 regexes = [
     '''219-09-9999''', 
     '''219-09-9999''', 
     '''078-05-1120''', 
     '''078-05-1120''', 
@@ -27,10 +32,4 @@ regexes = [
 	'''api\_key''',
 	'''api\_key''',
 	'''apikey''',
 	'''apikey''',
 	'''api\-key''',
 	'''api\-key''',
-    ]
-paths = [
-    '''gitleaks.toml''',
-    '''(.*?)(jpg|gif|doc|pdf|bin|svg|socket)$''',
-    '''(go.mod|go.sum)$'''
 ]
 ]
-

+ 2 - 0
testdata/config/invalid/allowlist_global_empty.toml

@@ -0,0 +1,2 @@
+
+[[allowlists]]

+ 4 - 0
testdata/config/invalid/allowlist_global_old_and_new.toml

@@ -0,0 +1,4 @@
+[allowlist]
+regexes = ['''123''']
+[[allowlists]]
+regexes = ['''456''']

+ 3 - 0
testdata/config/invalid/allowlist_global_regextarget.toml

@@ -0,0 +1,3 @@
+[[allowlists]]
+regexTarget = "mtach"
+regexes = ['''456''']

+ 7 - 0
testdata/config/invalid/allowlist_global_target_rule_id.toml

@@ -0,0 +1,7 @@
+[[rules]]
+id = "github-app-token"
+regex = '''(?:ghu|ghs)_[0-9a-zA-Z]{36}'''
+
+[[allowlists]]
+targetRules = ["github-app-token", "github-pat"]
+regexes = ['''.*fake.*''']

+ 0 - 2
testdata/config/allowlist_invalid_empty.toml → testdata/config/invalid/allowlist_rule_empty.toml

@@ -1,5 +1,3 @@
-title = "simple config with allowlist for aws"
-
 [[rules]]
 [[rules]]
 id = "example"
 id = "example"
 regex = '''example\d+'''
 regex = '''example\d+'''

+ 0 - 2
testdata/config/allowlist_invalid_old_and_new.toml → testdata/config/invalid/allowlist_rule_old_and_new.toml

@@ -1,5 +1,3 @@
-title = "simple config with allowlist for aws"
-
 [[rules]]
 [[rules]]
 id = "example"
 id = "example"
 regex = '''example\d+'''
 regex = '''example\d+'''

+ 0 - 2
testdata/config/allowlist_invalid_regextarget.toml → testdata/config/invalid/allowlist_rule_regextarget.toml

@@ -1,5 +1,3 @@
-title = "simple config with allowlist for aws"
-
 [[rules]]
 [[rules]]
 id = "example"
 id = "example"
 regex = '''example\d+'''
 regex = '''example\d+'''

+ 5 - 3
testdata/config/allow_global_aws_re.toml → testdata/config/invalid/extend_invalid_base.toml

@@ -1,8 +1,10 @@
+title = "gitleaks extended 1"
+
+[extend]
+path="../testdata/config/invalid/does_not_exist.toml"
+
 [[rules]]
 [[rules]]
     description = "AWS Access Key"
     description = "AWS Access Key"
     id = "aws-access-key"
     id = "aws-access-key"
     regex = '''(?:A3T[A-Z0-9]|AKIA|ASIA|ABIA|ACCA)[A-Z0-9]{16}'''
     regex = '''(?:A3T[A-Z0-9]|AKIA|ASIA|ABIA|ACCA)[A-Z0-9]{16}'''
     tags = ["key", "AWS"]
     tags = ["key", "AWS"]
-
-[allowlist]
-    regexes = ['''AKIALALEMEL33243OLIA''']

+ 0 - 0
testdata/config/bad_entropy_group.toml → testdata/config/invalid/rule_bad_entropy_group.toml


+ 0 - 0
testdata/config/missing_id.toml → testdata/config/invalid/rule_missing_id.toml


+ 0 - 0
testdata/config/no_regex_or_path.toml → testdata/config/invalid/rule_no_regex_or_path.toml


+ 10 - 0
testdata/config/valid/allowlist_global_multiple.toml

@@ -0,0 +1,10 @@
+[[rules]]
+id = "test"
+regex = '''token = "(.+)"'''
+
+[[allowlists]]
+regexes = ["^changeit$"]
+[[allowlists]]
+condition = "AND"
+paths = ["^node_modules/.*"]
+stopwords = ["mock"]

+ 2 - 0
testdata/config/valid/allowlist_global_old_compat.toml

@@ -0,0 +1,2 @@
+[allowlist]
+stopwords = ["0989c462-69c9-49fa-b7d2-30dc5c576a97"]

+ 2 - 0
testdata/config/valid/allowlist_global_regex.toml

@@ -0,0 +1,2 @@
+[allowlist]
+    regexes = ['''AKIALALEM.L33243OLIA''']

+ 20 - 0
testdata/config/valid/allowlist_global_target_rules.toml

@@ -0,0 +1,20 @@
+[[rules]]
+id = "github-app-token"
+regex = '''(?:ghu|ghs)_[0-9a-zA-Z]{36}'''
+
+[[rules]]
+id = "github-oauth"
+regex = '''gho_[0-9a-zA-Z]{36}'''
+
+[[rules]]
+id = "github-pat"
+regex = '''ghp_[0-9a-zA-Z]{36}'''
+
+
+[[allowlists]]
+regexes = ['''.*fake.*''']
+[[allowlists]]
+targetRules = ["github-app-token", "github-pat"]
+paths = [
+    '''(?:^|/)@octokit/auth-token/README\.md$''',
+]

+ 0 - 0
testdata/config/allow_commit.toml → testdata/config/valid/allowlist_rule_commit.toml


+ 0 - 2
testdata/config/allowlist_old_compat.toml → testdata/config/valid/allowlist_rule_old_compat.toml

@@ -1,5 +1,3 @@
-title = "simple config with allowlist for aws"
-
 [[rules]]
 [[rules]]
     id = "example"
     id = "example"
     regex = '''example\d+'''
     regex = '''example\d+'''

+ 0 - 0
testdata/config/allow_path.toml → testdata/config/valid/allowlist_rule_path.toml


+ 0 - 0
testdata/config/allow_aws_re.toml → testdata/config/valid/allowlist_rule_regex.toml


+ 1 - 3
testdata/config/base.toml → testdata/config/valid/extend.toml

@@ -1,7 +1,5 @@
-title = "gitleaks config"
-
 [extend]
 [extend]
-path="../testdata/config/extend_1.toml"
+path="../testdata/config/valid/extend_base_1.toml"
 
 
 [[rules]]
 [[rules]]
     description = "AWS Secret Key"
     description = "AWS Secret Key"

+ 1 - 1
testdata/config/extend_1.toml → testdata/config/valid/extend_base_1.toml

@@ -1,7 +1,7 @@
 title = "gitleaks extended 1"
 title = "gitleaks extended 1"
 
 
 [extend]
 [extend]
-path="../testdata/config/extend_2.toml"
+path="../testdata/config/valid/extend_base_2.toml"
 
 
 [[rules]]
 [[rules]]
     description = "AWS Access Key"
     description = "AWS Access Key"

+ 0 - 0
testdata/config/extend_2.toml → testdata/config/valid/extend_base_2.toml


+ 0 - 0
testdata/config/extend_3.toml → testdata/config/valid/extend_base_3.toml


+ 1 - 1
testdata/config/extend_base_rule_including_keysword_with_attribute.toml → testdata/config/valid/extend_base_rule_including_keywords_with_attribute.toml

@@ -1,7 +1,7 @@
 title = "gitleaks extended 3"
 title = "gitleaks extended 3"
 
 
 [extend]
 [extend]
-path="../testdata/config/extend_rule_keywords_base.toml"
+path="../testdata/config/valid/extend_rule_keywords_base.toml"
 
 
 [[rules]]
 [[rules]]
     id = "aws-secret-key-again-again"
     id = "aws-secret-key-again-again"

+ 1 - 1
testdata/config/extend_disabled.toml → testdata/config/valid/extend_disabled.toml

@@ -1,7 +1,7 @@
 title = "gitleaks extend disable"
 title = "gitleaks extend disable"
 
 
 [extend]
 [extend]
-path = "../testdata/config/extend_disabled_base.toml"
+path = "../testdata/config/valid/extend_disabled_base.toml"
 disabledRules = [
 disabledRules = [
     'custom-rule1'
     'custom-rule1'
 ]
 ]

+ 0 - 0
testdata/config/extend_disabled_base.toml → testdata/config/valid/extend_disabled_base.toml


+ 1 - 1
testdata/config/extend_rule_allowlist_and.toml → testdata/config/valid/extend_rule_allowlist_and.toml

@@ -1,7 +1,7 @@
 title = "gitleaks extended 3"
 title = "gitleaks extended 3"
 
 
 [extend]
 [extend]
-path="../testdata/config/extend_rule_allowlist_base.toml"
+path="../testdata/config/valid/extend_rule_allowlist_base.toml"
 
 
 [[rules]]
 [[rules]]
     id = "aws-secret-key-again-again"
     id = "aws-secret-key-again-again"

+ 0 - 0
testdata/config/extend_rule_allowlist_base.toml → testdata/config/valid/extend_rule_allowlist_base.toml


+ 1 - 1
testdata/config/extend_rule_allowlist_or.toml → testdata/config/valid/extend_rule_allowlist_or.toml

@@ -1,7 +1,7 @@
 title = "gitleaks extended 3"
 title = "gitleaks extended 3"
 
 
 [extend]
 [extend]
-path="../testdata/config/extend_rule_allowlist_base.toml"
+path="../testdata/config/valid/extend_rule_allowlist_base.toml"
 
 
 [[rules]]
 [[rules]]
     id = "aws-secret-key-again-again"
     id = "aws-secret-key-again-again"

+ 0 - 0
testdata/config/extend_rule_keywords_base.toml → testdata/config/valid/extend_rule_keywords_base.toml


+ 1 - 1
testdata/config/extend_with_new_rule.toml → testdata/config/valid/extend_rule_new.toml

@@ -1,7 +1,7 @@
 title = "gitleaks extended 3"
 title = "gitleaks extended 3"
 
 
 [extend]
 [extend]
-path="../testdata/config/extend_rule_keywords_base.toml"
+path="../testdata/config/valid/extend_rule_keywords_base.toml"
 
 
 [[rules]]
 [[rules]]
     id = "aws-rule-that-is-not-in-base"
     id = "aws-rule-that-is-not-in-base"

+ 1 - 1
testdata/config/extend_empty_regexpath.toml → testdata/config/valid/extend_rule_no_regexpath.toml

@@ -1,5 +1,5 @@
 [extend]
 [extend]
-path="../testdata/config/extend_3.toml"
+path="../testdata/config/valid/extend_base_3.toml"
 
 
 [[rules]]
 [[rules]]
 id = "aws-secret-key-again-again"
 id = "aws-secret-key-again-again"

+ 0 - 0
testdata/config/override_description.toml → testdata/config/valid/extend_rule_override_description.toml


+ 0 - 0
testdata/config/override_entropy.toml → testdata/config/valid/extend_rule_override_entropy.toml


+ 0 - 0
testdata/config/override_keywords.toml → testdata/config/valid/extend_rule_override_keywords.toml


+ 0 - 0
testdata/config/override_path.toml → testdata/config/valid/extend_rule_override_path.toml


+ 0 - 0
testdata/config/override_regex.toml → testdata/config/valid/extend_rule_override_regex.toml


+ 0 - 0
testdata/config/override_secret_group.toml → testdata/config/valid/extend_rule_override_secret_group.toml


+ 0 - 0
testdata/config/override_tags.toml → testdata/config/valid/extend_rule_override_tags.toml


+ 0 - 2
testdata/config/entropy_group.toml → testdata/config/valid/rule_entropy_group.toml

@@ -1,5 +1,3 @@
-title = "gitleaks config"
-
 [[rules]]
 [[rules]]
 id = "discord-api-key"
 id = "discord-api-key"
 description = "Discord API key"
 description = "Discord API key"

+ 0 - 2
testdata/config/path_only.toml → testdata/config/valid/rule_path_only.toml

@@ -1,5 +1,3 @@
-title = "gitleaks config"
-
 [[rules]]
 [[rules]]
 description = "Python Files"
 description = "Python Files"
 id = "python-files-only"
 id = "python-files-only"

+ 1 - 4
testdata/config/escaped_character_group.toml → testdata/config/valid/rule_regex_escaped_character_group.toml

@@ -1,8 +1,5 @@
-title = "gitleaks config"
-# https://learnxinyminutes.com/docs/toml/ for toml reference
-
 [[rules]]
 [[rules]]
     id = "pypi-upload-token"
     id = "pypi-upload-token"
     description = "PyPI upload token"
     description = "PyPI upload token"
     regex = '''pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}'''
     regex = '''pypi-AgEIcHlwaS5vcmc[A-Za-z0-9\-_]{50,1000}'''
-    tags = ["key", "pypi"]
+    tags = ["key", "pypi"]