detect.go 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559
  1. package detect
  2. import (
  3. "bufio"
  4. "context"
  5. "fmt"
  6. "os"
  7. "regexp"
  8. "strings"
  9. "sync"
  10. "sync/atomic"
  11. "github.com/zricethezav/gitleaks/v8/config"
  12. "github.com/zricethezav/gitleaks/v8/logging"
  13. "github.com/zricethezav/gitleaks/v8/report"
  14. ahocorasick "github.com/BobuSumisu/aho-corasick"
  15. "github.com/fatih/semgroup"
  16. "github.com/rs/zerolog"
  17. "github.com/spf13/viper"
  18. "golang.org/x/exp/maps"
  19. )
  20. const (
  21. gitleaksAllowSignature = "gitleaks:allow"
  22. chunkSize = 100 * 1_000 // 100kb
  23. )
  24. var newLineRegexp = regexp.MustCompile("\n")
  25. // Detector is the main detector struct
  26. type Detector struct {
  27. // Config is the configuration for the detector
  28. Config config.Config
  29. // Redact is a flag to redact findings. This is exported
  30. // so users using gitleaks as a library can set this flag
  31. // without calling `detector.Start(cmd *cobra.Command)`
  32. Redact uint
  33. // verbose is a flag to print findings
  34. Verbose bool
  35. // MaxDecodeDepths limits how many recursive decoding passes are allowed
  36. MaxDecodeDepth int
  37. // files larger than this will be skipped
  38. MaxTargetMegaBytes int
  39. // followSymlinks is a flag to enable scanning symlink files
  40. FollowSymlinks bool
  41. // NoColor is a flag to disable color output
  42. NoColor bool
  43. // IgnoreGitleaksAllow is a flag to ignore gitleaks:allow comments.
  44. IgnoreGitleaksAllow bool
  45. // commitMap is used to keep track of commits that have been scanned.
  46. // This is only used for logging purposes and git scans.
  47. commitMap map[string]bool
  48. // findingMutex is to prevent concurrent access to the
  49. // findings slice when adding findings.
  50. findingMutex *sync.Mutex
  51. // findings is a slice of report.Findings. This is the result
  52. // of the detector's scan which can then be used to generate a
  53. // report.
  54. findings []report.Finding
  55. // prefilter is a ahocorasick struct used for doing efficient string
  56. // matching given a set of words (keywords from the rules in the config)
  57. prefilter ahocorasick.Trie
  58. // a list of known findings that should be ignored
  59. baseline []report.Finding
  60. // path to baseline
  61. baselinePath string
  62. // gitleaksIgnore
  63. gitleaksIgnore map[string]bool
  64. // Sema (https://github.com/fatih/semgroup) controls the concurrency
  65. Sema *semgroup.Group
  66. // report-related settings.
  67. ReportPath string
  68. Reporter report.Reporter
  69. TotalBytes atomic.Uint64
  70. }
  71. // Fragment contains the data to be scanned
  72. type Fragment struct {
  73. // Raw is the raw content of the fragment
  74. Raw string
  75. Bytes []byte
  76. // FilePath is the path to the file if applicable
  77. FilePath string
  78. SymlinkFile string
  79. // CommitSHA is the SHA of the commit if applicable
  80. CommitSHA string
  81. // newlineIndices is a list of indices of newlines in the raw content.
  82. // This is used to calculate the line location of a finding
  83. newlineIndices [][]int
  84. }
  85. // NewDetector creates a new detector with the given config
  86. func NewDetector(cfg config.Config) *Detector {
  87. return &Detector{
  88. commitMap: make(map[string]bool),
  89. gitleaksIgnore: make(map[string]bool),
  90. findingMutex: &sync.Mutex{},
  91. findings: make([]report.Finding, 0),
  92. Config: cfg,
  93. prefilter: *ahocorasick.NewTrieBuilder().AddStrings(maps.Keys(cfg.Keywords)).Build(),
  94. Sema: semgroup.NewGroup(context.Background(), 40),
  95. }
  96. }
  97. // NewDetectorDefaultConfig creates a new detector with the default config
  98. func NewDetectorDefaultConfig() (*Detector, error) {
  99. viper.SetConfigType("toml")
  100. err := viper.ReadConfig(strings.NewReader(config.DefaultConfig))
  101. if err != nil {
  102. return nil, err
  103. }
  104. var vc config.ViperConfig
  105. err = viper.Unmarshal(&vc)
  106. if err != nil {
  107. return nil, err
  108. }
  109. cfg, err := vc.Translate()
  110. if err != nil {
  111. return nil, err
  112. }
  113. return NewDetector(cfg), nil
  114. }
  115. func (d *Detector) AddGitleaksIgnore(gitleaksIgnorePath string) error {
  116. logging.Debug().Msgf("found .gitleaksignore file: %s", gitleaksIgnorePath)
  117. file, err := os.Open(gitleaksIgnorePath)
  118. if err != nil {
  119. return err
  120. }
  121. // https://github.com/securego/gosec/issues/512
  122. defer func() {
  123. if err := file.Close(); err != nil {
  124. logging.Warn().Msgf("Error closing .gitleaksignore file: %s\n", err)
  125. }
  126. }()
  127. scanner := bufio.NewScanner(file)
  128. for scanner.Scan() {
  129. line := strings.TrimSpace(scanner.Text())
  130. // Skip lines that start with a comment
  131. if line != "" && !strings.HasPrefix(line, "#") {
  132. d.gitleaksIgnore[line] = true
  133. }
  134. }
  135. return nil
  136. }
  137. // DetectBytes scans the given bytes and returns a list of findings
  138. func (d *Detector) DetectBytes(content []byte) []report.Finding {
  139. return d.DetectString(string(content))
  140. }
  141. // DetectString scans the given string and returns a list of findings
  142. func (d *Detector) DetectString(content string) []report.Finding {
  143. return d.Detect(Fragment{
  144. Raw: content,
  145. })
  146. }
  147. // Detect scans the given fragment and returns a list of findings
  148. func (d *Detector) Detect(fragment Fragment) []report.Finding {
  149. if fragment.Bytes == nil {
  150. d.TotalBytes.Add(uint64(len(fragment.Raw)))
  151. }
  152. d.TotalBytes.Add(uint64(len(fragment.Bytes)))
  153. var findings []report.Finding
  154. // check if filepath is allowed
  155. if fragment.FilePath != "" && (d.Config.Allowlist.PathAllowed(fragment.FilePath) ||
  156. fragment.FilePath == d.Config.Path || (d.baselinePath != "" && fragment.FilePath == d.baselinePath)) {
  157. return findings
  158. }
  159. // add newline indices for location calculation in detectRule
  160. fragment.newlineIndices = newLineRegexp.FindAllStringIndex(fragment.Raw, -1)
  161. // setup variables to handle different decoding passes
  162. currentRaw := fragment.Raw
  163. encodedSegments := []EncodedSegment{}
  164. currentDecodeDepth := 0
  165. decoder := NewDecoder()
  166. for {
  167. // build keyword map for prefiltering rules
  168. keywords := make(map[string]bool)
  169. normalizedRaw := strings.ToLower(currentRaw)
  170. matches := d.prefilter.MatchString(normalizedRaw)
  171. for _, m := range matches {
  172. keywords[normalizedRaw[m.Pos():int(m.Pos())+len(m.Match())]] = true
  173. }
  174. for _, rule := range d.Config.Rules {
  175. if len(rule.Keywords) == 0 {
  176. // if no keywords are associated with the rule always scan the
  177. // fragment using the rule
  178. findings = append(findings, d.detectRule(fragment, currentRaw, rule, encodedSegments)...)
  179. continue
  180. }
  181. // check if keywords are in the fragment
  182. for _, k := range rule.Keywords {
  183. if _, ok := keywords[strings.ToLower(k)]; ok {
  184. findings = append(findings, d.detectRule(fragment, currentRaw, rule, encodedSegments)...)
  185. break
  186. }
  187. }
  188. }
  189. // increment the depth by 1 as we start our decoding pass
  190. currentDecodeDepth++
  191. // stop the loop if we've hit our max decoding depth
  192. if currentDecodeDepth > d.MaxDecodeDepth {
  193. break
  194. }
  195. // decode the currentRaw for the next pass
  196. currentRaw, encodedSegments = decoder.decode(currentRaw, encodedSegments)
  197. // stop the loop when there's nothing else to decode
  198. if len(encodedSegments) == 0 {
  199. break
  200. }
  201. }
  202. return filter(findings, d.Redact)
  203. }
  204. // detectRule scans the given fragment for the given rule and returns a list of findings
  205. func (d *Detector) detectRule(fragment Fragment, currentRaw string, r config.Rule, encodedSegments []EncodedSegment) []report.Finding {
  206. var (
  207. findings []report.Finding
  208. logger = func() zerolog.Logger {
  209. l := logging.With().Str("rule-id", r.RuleID)
  210. if fragment.CommitSHA != "" {
  211. l = l.Str("commit", fragment.CommitSHA)
  212. }
  213. l = l.Str("path", fragment.FilePath)
  214. return l.Logger()
  215. }()
  216. )
  217. // check if filepath or commit is allowed for this rule
  218. for _, a := range r.Allowlists {
  219. var (
  220. isAllowed bool
  221. commitAllowed = a.CommitAllowed(fragment.CommitSHA)
  222. pathAllowed = a.PathAllowed(fragment.FilePath)
  223. )
  224. if a.MatchCondition == config.AllowlistMatchAnd {
  225. // Determine applicable checks.
  226. var allowlistChecks []bool
  227. if len(a.Commits) > 0 {
  228. allowlistChecks = append(allowlistChecks, commitAllowed)
  229. }
  230. if len(a.Paths) > 0 {
  231. allowlistChecks = append(allowlistChecks, pathAllowed)
  232. }
  233. // These will be checked later.
  234. if len(a.Regexes) > 0 {
  235. allowlistChecks = append(allowlistChecks, false)
  236. }
  237. if len(a.StopWords) > 0 {
  238. allowlistChecks = append(allowlistChecks, false)
  239. }
  240. // Check if allowed.
  241. isAllowed = allTrue(allowlistChecks)
  242. } else {
  243. isAllowed = commitAllowed || pathAllowed
  244. }
  245. if isAllowed {
  246. logger.Trace().
  247. Str("condition", a.MatchCondition.String()).
  248. Bool("commit-allowed", commitAllowed).
  249. Bool("path-allowed", commitAllowed).
  250. Msg("Skipping fragment due to rule allowlist")
  251. return findings
  252. }
  253. }
  254. if r.Path != nil && r.Regex == nil && len(encodedSegments) == 0 {
  255. // Path _only_ rule
  256. if r.Path.MatchString(fragment.FilePath) {
  257. finding := report.Finding{
  258. Description: r.Description,
  259. File: fragment.FilePath,
  260. SymlinkFile: fragment.SymlinkFile,
  261. RuleID: r.RuleID,
  262. Match: fmt.Sprintf("file detected: %s", fragment.FilePath),
  263. Tags: r.Tags,
  264. }
  265. return append(findings, finding)
  266. }
  267. } else if r.Path != nil {
  268. // if path is set _and_ a regex is set, then we need to check both
  269. // so if the path does not match, then we should return early and not
  270. // consider the regex
  271. if !r.Path.MatchString(fragment.FilePath) {
  272. return findings
  273. }
  274. }
  275. // if path only rule, skip content checks
  276. if r.Regex == nil {
  277. return findings
  278. }
  279. // if flag configure and raw data size bigger then the flag
  280. if d.MaxTargetMegaBytes > 0 {
  281. rawLength := len(currentRaw) / 1000000
  282. if rawLength > d.MaxTargetMegaBytes {
  283. logging.Debug().Msgf("skipping file: %s scan due to size: %d", fragment.FilePath, rawLength)
  284. return findings
  285. }
  286. }
  287. // use currentRaw instead of fragment.Raw since this represents the current
  288. // decoding pass on the text
  289. MatchLoop:
  290. for _, matchIndex := range r.Regex.FindAllStringIndex(currentRaw, -1) {
  291. // Extract secret from match
  292. secret := strings.Trim(currentRaw[matchIndex[0]:matchIndex[1]], "\n")
  293. // For any meta data from decoding
  294. var metaTags []string
  295. // Check if the decoded portions of the segment overlap with the match
  296. // to see if its potentially a new match
  297. if len(encodedSegments) > 0 {
  298. if segment := segmentWithDecodedOverlap(encodedSegments, matchIndex[0], matchIndex[1]); segment != nil {
  299. matchIndex = segment.adjustMatchIndex(matchIndex)
  300. metaTags = append(metaTags, segment.tags()...)
  301. } else {
  302. // This item has already been added to a finding
  303. continue
  304. }
  305. } else {
  306. // Fixes: https://github.com/gitleaks/gitleaks/issues/1352
  307. // removes the incorrectly following line that was detected by regex expression '\n'
  308. matchIndex[1] = matchIndex[0] + len(secret)
  309. }
  310. // determine location of match. Note that the location
  311. // in the finding will be the line/column numbers of the _match_
  312. // not the _secret_, which will be different if the secretGroup
  313. // value is set for this rule
  314. loc := location(fragment, matchIndex)
  315. if matchIndex[1] > loc.endLineIndex {
  316. loc.endLineIndex = matchIndex[1]
  317. }
  318. finding := report.Finding{
  319. Description: r.Description,
  320. File: fragment.FilePath,
  321. SymlinkFile: fragment.SymlinkFile,
  322. RuleID: r.RuleID,
  323. StartLine: loc.startLine,
  324. EndLine: loc.endLine,
  325. StartColumn: loc.startColumn,
  326. EndColumn: loc.endColumn,
  327. Secret: secret,
  328. Match: secret,
  329. Tags: append(r.Tags, metaTags...),
  330. Line: fragment.Raw[loc.startLineIndex:loc.endLineIndex],
  331. }
  332. if !d.IgnoreGitleaksAllow &&
  333. strings.Contains(fragment.Raw[loc.startLineIndex:loc.endLineIndex], gitleaksAllowSignature) {
  334. logger.Trace().
  335. Str("finding", finding.Secret).
  336. Msg("Skipping finding due to 'gitleaks:allow' signature")
  337. continue
  338. }
  339. // Set the value of |secret|, if the pattern contains at least one capture group.
  340. // (The first element is the full match, hence we check >= 2.)
  341. groups := r.Regex.FindStringSubmatch(finding.Secret)
  342. if len(groups) >= 2 {
  343. if r.SecretGroup > 0 {
  344. if len(groups) <= r.SecretGroup {
  345. // Config validation should prevent this
  346. continue
  347. }
  348. finding.Secret = groups[r.SecretGroup]
  349. } else {
  350. // If |secretGroup| is not set, we will use the first suitable capture group.
  351. for _, s := range groups[1:] {
  352. if len(s) > 0 {
  353. finding.Secret = s
  354. break
  355. }
  356. }
  357. }
  358. }
  359. // check entropy
  360. entropy := shannonEntropy(finding.Secret)
  361. finding.Entropy = float32(entropy)
  362. if r.Entropy != 0.0 {
  363. if entropy <= r.Entropy {
  364. logger.Trace().
  365. Float32("entropy", finding.Entropy).
  366. Msg("Skipping finding due to low entropy")
  367. // entropy is too low, skip this finding
  368. continue
  369. }
  370. }
  371. // check if the regexTarget is defined in the allowlist "regexes" entry
  372. // or if the secret is in the list of stopwords
  373. globalAllowlistTarget := finding.Secret
  374. switch d.Config.Allowlist.RegexTarget {
  375. case "match":
  376. globalAllowlistTarget = finding.Match
  377. case "line":
  378. globalAllowlistTarget = finding.Line
  379. }
  380. if d.Config.Allowlist.RegexAllowed(globalAllowlistTarget) {
  381. logger.Trace().
  382. Str("finding", globalAllowlistTarget).
  383. Msg("Skipping finding due to global allowlist regex")
  384. continue
  385. } else if d.Config.Allowlist.ContainsStopWord(finding.Secret) {
  386. logger.Trace().
  387. Str("finding", finding.Secret).
  388. Msg("Skipping finding due to global allowlist stopword")
  389. continue
  390. }
  391. // check if the result matches any of the rule allowlists.
  392. for _, a := range r.Allowlists {
  393. allowlistTarget := finding.Secret
  394. switch a.RegexTarget {
  395. case "match":
  396. allowlistTarget = finding.Match
  397. case "line":
  398. allowlistTarget = finding.Line
  399. }
  400. var (
  401. isAllowed bool
  402. regexAllowed = a.RegexAllowed(allowlistTarget)
  403. containsStopword = a.ContainsStopWord(finding.Secret)
  404. )
  405. // check if the secret is in the list of stopwords
  406. if a.MatchCondition == config.AllowlistMatchAnd {
  407. // Determine applicable checks.
  408. var allowlistChecks []bool
  409. if len(a.Commits) > 0 {
  410. allowlistChecks = append(allowlistChecks, a.CommitAllowed(fragment.CommitSHA))
  411. }
  412. if len(a.Paths) > 0 {
  413. allowlistChecks = append(allowlistChecks, a.PathAllowed(fragment.FilePath))
  414. }
  415. if len(a.Regexes) > 0 {
  416. allowlistChecks = append(allowlistChecks, regexAllowed)
  417. }
  418. if len(a.StopWords) > 0 {
  419. allowlistChecks = append(allowlistChecks, containsStopword)
  420. }
  421. // Check if allowed.
  422. isAllowed = allTrue(allowlistChecks)
  423. } else {
  424. isAllowed = regexAllowed || containsStopword
  425. }
  426. if isAllowed {
  427. logger.Trace().
  428. Str("finding", finding.Secret).
  429. Str("condition", a.MatchCondition.String()).
  430. Bool("regex-allowed", regexAllowed).
  431. Bool("contains-stopword", containsStopword).
  432. Msg("Skipping finding due to rule allowlist")
  433. continue MatchLoop
  434. }
  435. }
  436. findings = append(findings, finding)
  437. }
  438. return findings
  439. }
  440. func allTrue(bools []bool) bool {
  441. allMatch := true
  442. for _, check := range bools {
  443. if !check {
  444. allMatch = false
  445. break
  446. }
  447. }
  448. return allMatch
  449. }
  450. // addFinding synchronously adds a finding to the findings slice
  451. func (d *Detector) addFinding(finding report.Finding) {
  452. globalFingerprint := fmt.Sprintf("%s:%s:%d", finding.File, finding.RuleID, finding.StartLine)
  453. if finding.Commit != "" {
  454. finding.Fingerprint = fmt.Sprintf("%s:%s:%s:%d", finding.Commit, finding.File, finding.RuleID, finding.StartLine)
  455. } else {
  456. finding.Fingerprint = globalFingerprint
  457. }
  458. // check if we should ignore this finding
  459. if _, ok := d.gitleaksIgnore[globalFingerprint]; ok {
  460. logging.Debug().Msgf("ignoring finding with global Fingerprint %s",
  461. finding.Fingerprint)
  462. return
  463. } else if finding.Commit != "" {
  464. // Awkward nested if because I'm not sure how to chain these two conditions.
  465. if _, ok := d.gitleaksIgnore[finding.Fingerprint]; ok {
  466. logging.Debug().Msgf("ignoring finding with Fingerprint %s",
  467. finding.Fingerprint)
  468. return
  469. }
  470. }
  471. if d.baseline != nil && !IsNew(finding, d.baseline) {
  472. logging.Debug().Msgf("baseline duplicate -- ignoring finding with Fingerprint %s", finding.Fingerprint)
  473. return
  474. }
  475. d.findingMutex.Lock()
  476. d.findings = append(d.findings, finding)
  477. if d.Verbose {
  478. printFinding(finding, d.NoColor)
  479. }
  480. d.findingMutex.Unlock()
  481. }
  482. // addCommit synchronously adds a commit to the commit slice
  483. func (d *Detector) addCommit(commit string) {
  484. d.commitMap[commit] = true
  485. }