detect.go 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422
  1. package detect
  2. import (
  3. "bufio"
  4. "context"
  5. "fmt"
  6. "os"
  7. "regexp"
  8. "strings"
  9. "sync"
  10. "github.com/zricethezav/gitleaks/v8/config"
  11. "github.com/zricethezav/gitleaks/v8/report"
  12. ahocorasick "github.com/BobuSumisu/aho-corasick"
  13. "github.com/fatih/semgroup"
  14. "github.com/rs/zerolog/log"
  15. "github.com/spf13/viper"
  16. )
  17. const (
  18. gitleaksAllowSignature = "gitleaks:allow"
  19. chunkSize = 10 * 1_000 // 10kb
  20. )
  21. // Detector is the main detector struct
  22. type Detector struct {
  23. // Config is the configuration for the detector
  24. Config config.Config
  25. // Redact is a flag to redact findings. This is exported
  26. // so users using gitleaks as a library can set this flag
  27. // without calling `detector.Start(cmd *cobra.Command)`
  28. Redact uint
  29. // verbose is a flag to print findings
  30. Verbose bool
  31. // files larger than this will be skipped
  32. MaxTargetMegaBytes int
  33. // followSymlinks is a flag to enable scanning symlink files
  34. FollowSymlinks bool
  35. // NoColor is a flag to disable color output
  36. NoColor bool
  37. // IgnoreGitleaksAllow is a flag to ignore gitleaks:allow comments.
  38. IgnoreGitleaksAllow bool
  39. // commitMap is used to keep track of commits that have been scanned.
  40. // This is only used for logging purposes and git scans.
  41. commitMap map[string]bool
  42. // findingMutex is to prevent concurrent access to the
  43. // findings slice when adding findings.
  44. findingMutex *sync.Mutex
  45. // findings is a slice of report.Findings. This is the result
  46. // of the detector's scan which can then be used to generate a
  47. // report.
  48. findings []report.Finding
  49. // prefilter is a ahocorasick struct used for doing efficient string
  50. // matching given a set of words (keywords from the rules in the config)
  51. prefilter ahocorasick.Trie
  52. // a list of known findings that should be ignored
  53. baseline []report.Finding
  54. // path to baseline
  55. baselinePath string
  56. // gitleaksIgnore
  57. gitleaksIgnore map[string]bool
  58. // Sema (https://github.com/fatih/semgroup) controls the concurrency
  59. Sema *semgroup.Group
  60. }
  61. // Fragment contains the data to be scanned
  62. type Fragment struct {
  63. // Raw is the raw content of the fragment
  64. Raw string
  65. // FilePath is the path to the file if applicable
  66. FilePath string
  67. SymlinkFile string
  68. // CommitSHA is the SHA of the commit if applicable
  69. CommitSHA string
  70. // newlineIndices is a list of indices of newlines in the raw content.
  71. // This is used to calculate the line location of a finding
  72. newlineIndices [][]int
  73. // keywords is a map of all the keywords contain within the contents
  74. // of this fragment
  75. keywords map[string]bool
  76. }
  77. // NewDetector creates a new detector with the given config
  78. func NewDetector(cfg config.Config) *Detector {
  79. return &Detector{
  80. commitMap: make(map[string]bool),
  81. gitleaksIgnore: make(map[string]bool),
  82. findingMutex: &sync.Mutex{},
  83. findings: make([]report.Finding, 0),
  84. Config: cfg,
  85. prefilter: *ahocorasick.NewTrieBuilder().AddStrings(cfg.Keywords).Build(),
  86. Sema: semgroup.NewGroup(context.Background(), 40),
  87. }
  88. }
  89. // NewDetectorDefaultConfig creates a new detector with the default config
  90. func NewDetectorDefaultConfig() (*Detector, error) {
  91. viper.SetConfigType("toml")
  92. err := viper.ReadConfig(strings.NewReader(config.DefaultConfig))
  93. if err != nil {
  94. return nil, err
  95. }
  96. var vc config.ViperConfig
  97. err = viper.Unmarshal(&vc)
  98. if err != nil {
  99. return nil, err
  100. }
  101. cfg, err := vc.Translate()
  102. if err != nil {
  103. return nil, err
  104. }
  105. return NewDetector(cfg), nil
  106. }
  107. func (d *Detector) AddGitleaksIgnore(gitleaksIgnorePath string) error {
  108. log.Debug().Msgf("found .gitleaksignore file: %s", gitleaksIgnorePath)
  109. file, err := os.Open(gitleaksIgnorePath)
  110. if err != nil {
  111. return err
  112. }
  113. // https://github.com/securego/gosec/issues/512
  114. defer func() {
  115. if err := file.Close(); err != nil {
  116. log.Warn().Msgf("Error closing .gitleaksignore file: %s\n", err)
  117. }
  118. }()
  119. scanner := bufio.NewScanner(file)
  120. for scanner.Scan() {
  121. line := strings.TrimSpace(scanner.Text())
  122. // Skip lines that start with a comment
  123. if line != "" && !strings.HasPrefix(line, "#") {
  124. d.gitleaksIgnore[line] = true
  125. }
  126. }
  127. return nil
  128. }
  129. // DetectBytes scans the given bytes and returns a list of findings
  130. func (d *Detector) DetectBytes(content []byte) []report.Finding {
  131. return d.DetectString(string(content))
  132. }
  133. // DetectString scans the given string and returns a list of findings
  134. func (d *Detector) DetectString(content string) []report.Finding {
  135. return d.Detect(Fragment{
  136. Raw: content,
  137. })
  138. }
  139. // Detect scans the given fragment and returns a list of findings
  140. func (d *Detector) Detect(fragment Fragment) []report.Finding {
  141. var findings []report.Finding
  142. // initiate fragment keywords
  143. fragment.keywords = make(map[string]bool)
  144. // check if filepath is allowed
  145. if fragment.FilePath != "" && (d.Config.Allowlist.PathAllowed(fragment.FilePath) ||
  146. fragment.FilePath == d.Config.Path || (d.baselinePath != "" && fragment.FilePath == d.baselinePath)) {
  147. return findings
  148. }
  149. // add newline indices for location calculation in detectRule
  150. fragment.newlineIndices = regexp.MustCompile("\n").FindAllStringIndex(fragment.Raw, -1)
  151. // build keyword map for prefiltering rules
  152. normalizedRaw := strings.ToLower(fragment.Raw)
  153. matches := d.prefilter.MatchString(normalizedRaw)
  154. for _, m := range matches {
  155. fragment.keywords[normalizedRaw[m.Pos():int(m.Pos())+len(m.Match())]] = true
  156. }
  157. for _, rule := range d.Config.Rules {
  158. if len(rule.Keywords) == 0 {
  159. // if not keywords are associated with the rule always scan the
  160. // fragment using the rule
  161. findings = append(findings, d.detectRule(fragment, rule)...)
  162. continue
  163. }
  164. fragmentContainsKeyword := false
  165. // check if keywords are in the fragment
  166. for _, k := range rule.Keywords {
  167. if _, ok := fragment.keywords[strings.ToLower(k)]; ok {
  168. fragmentContainsKeyword = true
  169. }
  170. }
  171. if fragmentContainsKeyword {
  172. findings = append(findings, d.detectRule(fragment, rule)...)
  173. }
  174. }
  175. return filter(findings, d.Redact)
  176. }
  177. // detectRule scans the given fragment for the given rule and returns a list of findings
  178. func (d *Detector) detectRule(fragment Fragment, rule config.Rule) []report.Finding {
  179. var findings []report.Finding
  180. // check if filepath or commit is allowed for this rule
  181. if rule.Allowlist.CommitAllowed(fragment.CommitSHA) ||
  182. rule.Allowlist.PathAllowed(fragment.FilePath) {
  183. return findings
  184. }
  185. if rule.Path != nil && rule.Regex == nil {
  186. // Path _only_ rule
  187. if rule.Path.MatchString(fragment.FilePath) {
  188. finding := report.Finding{
  189. Description: rule.Description,
  190. File: fragment.FilePath,
  191. SymlinkFile: fragment.SymlinkFile,
  192. RuleID: rule.RuleID,
  193. Match: fmt.Sprintf("file detected: %s", fragment.FilePath),
  194. Tags: rule.Tags,
  195. }
  196. return append(findings, finding)
  197. }
  198. } else if rule.Path != nil {
  199. // if path is set _and_ a regex is set, then we need to check both
  200. // so if the path does not match, then we should return early and not
  201. // consider the regex
  202. if !rule.Path.MatchString(fragment.FilePath) {
  203. return findings
  204. }
  205. }
  206. // if path only rule, skip content checks
  207. if rule.Regex == nil {
  208. return findings
  209. }
  210. // If flag configure and raw data size bigger then the flag
  211. if d.MaxTargetMegaBytes > 0 {
  212. rawLength := len(fragment.Raw) / 1000000
  213. if rawLength > d.MaxTargetMegaBytes {
  214. log.Debug().Msgf("skipping file: %s scan due to size: %d", fragment.FilePath, rawLength)
  215. return findings
  216. }
  217. }
  218. matchIndices := rule.Regex.FindAllStringIndex(fragment.Raw, -1)
  219. for _, matchIndex := range matchIndices {
  220. // extract secret from match
  221. secret := strings.Trim(fragment.Raw[matchIndex[0]:matchIndex[1]], "\n")
  222. // Fixes: https://github.com/gitleaks/gitleaks/issues/1352
  223. // removes the incorrectly following line that was detected by regex expression '\n'
  224. matchIndex[1] = matchIndex[0] + len(secret)
  225. // determine location of match. Note that the location
  226. // in the finding will be the line/column numbers of the _match_
  227. // not the _secret_, which will be different if the secretGroup
  228. // value is set for this rule
  229. loc := location(fragment, matchIndex)
  230. if matchIndex[1] > loc.endLineIndex {
  231. loc.endLineIndex = matchIndex[1]
  232. }
  233. finding := report.Finding{
  234. Description: rule.Description,
  235. File: fragment.FilePath,
  236. SymlinkFile: fragment.SymlinkFile,
  237. RuleID: rule.RuleID,
  238. StartLine: loc.startLine,
  239. EndLine: loc.endLine,
  240. StartColumn: loc.startColumn,
  241. EndColumn: loc.endColumn,
  242. Secret: secret,
  243. Match: secret,
  244. Tags: rule.Tags,
  245. Line: fragment.Raw[loc.startLineIndex:loc.endLineIndex],
  246. }
  247. if strings.Contains(fragment.Raw[loc.startLineIndex:loc.endLineIndex],
  248. gitleaksAllowSignature) && !d.IgnoreGitleaksAllow {
  249. continue
  250. }
  251. // Set the value of |secret|, if the pattern contains at least one capture group.
  252. // (The first element is the full match, hence we check >= 2.)
  253. groups := rule.Regex.FindStringSubmatch(finding.Secret)
  254. if len(groups) >= 2 {
  255. if rule.SecretGroup > 0 {
  256. if len(groups) <= rule.SecretGroup {
  257. // Config validation should prevent this
  258. continue
  259. }
  260. finding.Secret = groups[rule.SecretGroup]
  261. } else {
  262. // If |secretGroup| is not set, we will use the first suitable capture group.
  263. if len(groups) == 2 {
  264. // Use the only group.
  265. finding.Secret = groups[1]
  266. } else {
  267. // Use the first non-empty group.
  268. for _, s := range groups[1:] {
  269. if len(s) > 0 {
  270. finding.Secret = s
  271. break
  272. }
  273. }
  274. }
  275. }
  276. }
  277. // check if the regexTarget is defined in the allowlist "regexes" entry
  278. allowlistTarget := finding.Secret
  279. switch rule.Allowlist.RegexTarget {
  280. case "match":
  281. allowlistTarget = finding.Match
  282. case "line":
  283. allowlistTarget = finding.Line
  284. }
  285. globalAllowlistTarget := finding.Secret
  286. switch d.Config.Allowlist.RegexTarget {
  287. case "match":
  288. globalAllowlistTarget = finding.Match
  289. case "line":
  290. globalAllowlistTarget = finding.Line
  291. }
  292. if rule.Allowlist.RegexAllowed(allowlistTarget) ||
  293. d.Config.Allowlist.RegexAllowed(globalAllowlistTarget) {
  294. continue
  295. }
  296. // check if the secret is in the list of stopwords
  297. if rule.Allowlist.ContainsStopWord(finding.Secret) ||
  298. d.Config.Allowlist.ContainsStopWord(finding.Secret) {
  299. continue
  300. }
  301. // check entropy
  302. entropy := shannonEntropy(finding.Secret)
  303. finding.Entropy = float32(entropy)
  304. if rule.Entropy != 0.0 {
  305. if entropy <= rule.Entropy {
  306. // entropy is too low, skip this finding
  307. continue
  308. }
  309. // NOTE: this is a goofy hack to get around the fact there golang's regex engine
  310. // does not support positive lookaheads. Ideally we would want to add a
  311. // restriction on generic rules regex that requires the secret match group
  312. // contains both numbers and alphabetical characters, not just alphabetical characters.
  313. // What this bit of code does is check if the ruleid is prepended with "generic" and enforces the
  314. // secret contains both digits and alphabetical characters.
  315. // TODO: this should be replaced with stop words
  316. if strings.HasPrefix(rule.RuleID, "generic") {
  317. if !containsDigit(finding.Secret) {
  318. continue
  319. }
  320. }
  321. }
  322. findings = append(findings, finding)
  323. }
  324. return findings
  325. }
  326. // addFinding synchronously adds a finding to the findings slice
  327. func (d *Detector) addFinding(finding report.Finding) {
  328. globalFingerprint := fmt.Sprintf("%s:%s:%d", finding.File, finding.RuleID, finding.StartLine)
  329. if finding.Commit != "" {
  330. finding.Fingerprint = fmt.Sprintf("%s:%s:%s:%d", finding.Commit, finding.File, finding.RuleID, finding.StartLine)
  331. } else {
  332. finding.Fingerprint = globalFingerprint
  333. }
  334. // check if we should ignore this finding
  335. if _, ok := d.gitleaksIgnore[globalFingerprint]; ok {
  336. log.Debug().Msgf("ignoring finding with global Fingerprint %s",
  337. finding.Fingerprint)
  338. return
  339. } else if finding.Commit != "" {
  340. // Awkward nested if because I'm not sure how to chain these two conditions.
  341. if _, ok := d.gitleaksIgnore[finding.Fingerprint]; ok {
  342. log.Debug().Msgf("ignoring finding with Fingerprint %s",
  343. finding.Fingerprint)
  344. return
  345. }
  346. }
  347. if d.baseline != nil && !IsNew(finding, d.baseline) {
  348. log.Debug().Msgf("baseline duplicate -- ignoring finding with Fingerprint %s", finding.Fingerprint)
  349. return
  350. }
  351. d.findingMutex.Lock()
  352. d.findings = append(d.findings, finding)
  353. if d.Verbose {
  354. printFinding(finding, d.NoColor)
  355. }
  356. d.findingMutex.Unlock()
  357. }
  358. // addCommit synchronously adds a commit to the commit slice
  359. func (d *Detector) addCommit(commit string) {
  360. d.commitMap[commit] = true
  361. }