detect.go 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471
  1. package detect
  2. import (
  3. "bufio"
  4. "context"
  5. "fmt"
  6. "os"
  7. "path/filepath"
  8. "regexp"
  9. "strings"
  10. "sync"
  11. "github.com/zricethezav/gitleaks/v8/config"
  12. "github.com/zricethezav/gitleaks/v8/detect/git"
  13. "github.com/zricethezav/gitleaks/v8/report"
  14. "github.com/fatih/semgroup"
  15. "github.com/gitleaks/go-gitdiff/gitdiff"
  16. "github.com/h2non/filetype"
  17. ahocorasick "github.com/petar-dambovaliev/aho-corasick"
  18. "github.com/rs/zerolog/log"
  19. "github.com/spf13/viper"
  20. )
  21. // Type used to differentiate between git scan types:
  22. // $ gitleaks detect
  23. // $ gitleaks protect
  24. // $ gitleaks protect staged
  25. type GitScanType int
  26. const (
  27. DetectType GitScanType = iota
  28. ProtectType
  29. ProtectStagedType
  30. gitleaksAllowSignature = "gitleaks:allow"
  31. )
  32. // Detector is the main detector struct
  33. type Detector struct {
  34. // Config is the configuration for the detector
  35. Config config.Config
  36. // Redact is a flag to redact findings. This is exported
  37. // so users using gitleaks as a library can set this flag
  38. // without calling `detector.Start(cmd *cobra.Command)`
  39. Redact bool
  40. // verbose is a flag to print findings
  41. Verbose bool
  42. // commitMap is used to keep track of commits that have been scanned.
  43. // This is only used for logging purposes and git scans.
  44. commitMap map[string]bool
  45. // findingMutex is to prevent concurrent access to the
  46. // findings slice when adding findings.
  47. findingMutex *sync.Mutex
  48. // findings is a slice of report.Findings. This is the result
  49. // of the detector's scan which can then be used to generate a
  50. // report.
  51. findings []report.Finding
  52. // prefilter is a ahocorasick struct used for doing efficient string
  53. // matching given a set of words (keywords from the rules in the config)
  54. prefilter ahocorasick.AhoCorasick
  55. // gitleaksIgnore
  56. gitleaksIgnore map[string]bool
  57. }
  58. // Fragment contains the data to be scanned
  59. type Fragment struct {
  60. // Raw is the raw content of the fragment
  61. Raw string
  62. // FilePath is the path to the file if applicable
  63. FilePath string
  64. // CommitSHA is the SHA of the commit if applicable
  65. CommitSHA string
  66. // newlineIndices is a list of indices of newlines in the raw content.
  67. // This is used to calculate the line location of a finding
  68. newlineIndices [][]int
  69. // keywords is a map of all the keywords contain within the contents
  70. // of this fragment
  71. keywords map[string]bool
  72. }
  73. // NewDetector creates a new detector with the given config
  74. func NewDetector(cfg config.Config) *Detector {
  75. builder := ahocorasick.NewAhoCorasickBuilder(ahocorasick.Opts{
  76. AsciiCaseInsensitive: true,
  77. MatchOnlyWholeWords: false,
  78. MatchKind: ahocorasick.LeftMostLongestMatch,
  79. DFA: true,
  80. })
  81. return &Detector{
  82. commitMap: make(map[string]bool),
  83. gitleaksIgnore: make(map[string]bool),
  84. findingMutex: &sync.Mutex{},
  85. findings: make([]report.Finding, 0),
  86. Config: cfg,
  87. prefilter: builder.Build(cfg.Keywords),
  88. }
  89. }
  90. // NewDetectorDefaultConfig creates a new detector with the default config
  91. func NewDetectorDefaultConfig() (*Detector, error) {
  92. viper.SetConfigType("toml")
  93. err := viper.ReadConfig(strings.NewReader(config.DefaultConfig))
  94. if err != nil {
  95. return nil, err
  96. }
  97. var vc config.ViperConfig
  98. err = viper.Unmarshal(&vc)
  99. if err != nil {
  100. return nil, err
  101. }
  102. cfg, err := vc.Translate()
  103. if err != nil {
  104. return nil, err
  105. }
  106. return NewDetector(cfg), nil
  107. }
  108. func (d *Detector) AddGitleaksIgnore(gitleaksIgnorePath string) error {
  109. log.Debug().Msg("found .gitleaksignore file")
  110. file, err := os.Open(gitleaksIgnorePath)
  111. if err != nil {
  112. return err
  113. }
  114. defer file.Close()
  115. scanner := bufio.NewScanner(file)
  116. for scanner.Scan() {
  117. d.gitleaksIgnore[scanner.Text()] = true
  118. }
  119. return nil
  120. }
  121. // DetectBytes scans the given bytes and returns a list of findings
  122. func (d *Detector) DetectBytes(content []byte) []report.Finding {
  123. return d.DetectString(string(content))
  124. }
  125. // DetectString scans the given string and returns a list of findings
  126. func (d *Detector) DetectString(content string) []report.Finding {
  127. return d.Detect(Fragment{
  128. Raw: content,
  129. })
  130. }
  131. // detectRule scans the given fragment for the given rule and returns a list of findings
  132. func (d *Detector) detectRule(fragment Fragment, rule config.Rule) []report.Finding {
  133. var findings []report.Finding
  134. // check if filepath or commit is allowed for this rule
  135. if rule.Allowlist.CommitAllowed(fragment.CommitSHA) ||
  136. rule.Allowlist.PathAllowed(fragment.FilePath) {
  137. return findings
  138. }
  139. if rule.Path != nil && rule.Regex == nil {
  140. // Path _only_ rule
  141. if rule.Path.Match([]byte(fragment.FilePath)) {
  142. finding := report.Finding{
  143. Description: rule.Description,
  144. File: fragment.FilePath,
  145. RuleID: rule.RuleID,
  146. Match: fmt.Sprintf("file detected: %s", fragment.FilePath),
  147. Tags: rule.Tags,
  148. }
  149. return append(findings, finding)
  150. }
  151. } else if rule.Path != nil {
  152. // if path is set _and_ a regex is set, then we need to check both
  153. // so if the path does not match, then we should return early and not
  154. // consider the regex
  155. if !rule.Path.Match([]byte(fragment.FilePath)) {
  156. return findings
  157. }
  158. }
  159. // if path only rule, skip content checks
  160. if rule.Regex == nil {
  161. return findings
  162. }
  163. matchIndices := rule.Regex.FindAllStringIndex(fragment.Raw, -1)
  164. for _, matchIndex := range matchIndices {
  165. // extract secret from match
  166. secret := strings.Trim(fragment.Raw[matchIndex[0]:matchIndex[1]], "\n")
  167. // determine location of match. Note that the location
  168. // in the finding will be the line/column numbers of the _match_
  169. // not the _secret_, which will be different if the secretGroup
  170. // value is set for this rule
  171. loc := location(fragment, matchIndex)
  172. finding := report.Finding{
  173. Description: rule.Description,
  174. File: fragment.FilePath,
  175. RuleID: rule.RuleID,
  176. StartLine: loc.startLine,
  177. EndLine: loc.endLine,
  178. StartColumn: loc.startColumn,
  179. EndColumn: loc.endColumn,
  180. Secret: secret,
  181. Match: secret,
  182. Tags: rule.Tags,
  183. }
  184. if strings.Contains(fragment.Raw[loc.startLineIndex:loc.endLineIndex],
  185. gitleaksAllowSignature) {
  186. continue
  187. }
  188. // check if the secret is in the allowlist
  189. if rule.Allowlist.RegexAllowed(finding.Secret) ||
  190. d.Config.Allowlist.RegexAllowed(finding.Secret) {
  191. continue
  192. }
  193. // extract secret from secret group if set
  194. if rule.SecretGroup != 0 {
  195. groups := rule.Regex.FindStringSubmatch(secret)
  196. if len(groups) <= rule.SecretGroup || len(groups) == 0 {
  197. // Config validation should prevent this
  198. continue
  199. }
  200. secret = groups[rule.SecretGroup]
  201. finding.Secret = secret
  202. }
  203. // check if the secret is in the list of stopwords
  204. if rule.Allowlist.ContainsStopWord(finding.Secret) ||
  205. d.Config.Allowlist.ContainsStopWord(finding.Secret) {
  206. continue
  207. }
  208. // check entropy
  209. entropy := shannonEntropy(finding.Secret)
  210. finding.Entropy = float32(entropy)
  211. if rule.Entropy != 0.0 {
  212. if entropy <= rule.Entropy {
  213. // entropy is too low, skip this finding
  214. continue
  215. }
  216. // NOTE: this is a goofy hack to get around the fact there golang's regex engine
  217. // does not support positive lookaheads. Ideally we would want to add a
  218. // restriction on generic rules regex that requires the secret match group
  219. // contains both numbers and alphabetical characters, not just alphabetical characters.
  220. // What this bit of code does is check if the ruleid is prepended with "generic" and enforces the
  221. // secret contains both digits and alphabetical characters.
  222. // TODO: this should be replaced with stop words
  223. if strings.HasPrefix(rule.RuleID, "generic") {
  224. if !containsDigit(secret) {
  225. continue
  226. }
  227. }
  228. }
  229. findings = append(findings, finding)
  230. }
  231. return findings
  232. }
  233. // GitScan accepts a *gitdiff.File channel which contents a git history generated from
  234. // the output of `git log -p ...`. startGitScan will look at each file (patch) in the history
  235. // and determine if the patch contains any findings.
  236. func (d *Detector) DetectGit(source string, logOpts string, gitScanType GitScanType) ([]report.Finding, error) {
  237. var (
  238. gitdiffFiles <-chan *gitdiff.File
  239. err error
  240. )
  241. switch gitScanType {
  242. case DetectType:
  243. gitdiffFiles, err = git.GitLog(source, logOpts)
  244. if err != nil {
  245. return d.findings, err
  246. }
  247. case ProtectType:
  248. gitdiffFiles, err = git.GitDiff(source, false)
  249. if err != nil {
  250. return d.findings, err
  251. }
  252. case ProtectStagedType:
  253. gitdiffFiles, err = git.GitDiff(source, true)
  254. if err != nil {
  255. return d.findings, err
  256. }
  257. }
  258. s := semgroup.NewGroup(context.Background(), 4)
  259. for gitdiffFile := range gitdiffFiles {
  260. gitdiffFile := gitdiffFile
  261. // skip binary files
  262. if gitdiffFile.IsBinary || gitdiffFile.IsDelete {
  263. continue
  264. }
  265. // Check if commit is allowed
  266. commitSHA := ""
  267. if gitdiffFile.PatchHeader != nil {
  268. commitSHA = gitdiffFile.PatchHeader.SHA
  269. if d.Config.Allowlist.CommitAllowed(gitdiffFile.PatchHeader.SHA) {
  270. continue
  271. }
  272. }
  273. d.addCommit(commitSHA)
  274. s.Go(func() error {
  275. for _, textFragment := range gitdiffFile.TextFragments {
  276. if textFragment == nil {
  277. return nil
  278. }
  279. fragment := Fragment{
  280. Raw: textFragment.Raw(gitdiff.OpAdd),
  281. CommitSHA: commitSHA,
  282. FilePath: gitdiffFile.NewName,
  283. }
  284. for _, finding := range d.Detect(fragment) {
  285. d.addFinding(augmentGitFinding(finding, textFragment, gitdiffFile))
  286. }
  287. }
  288. return nil
  289. })
  290. }
  291. if err := s.Wait(); err != nil {
  292. return d.findings, err
  293. }
  294. log.Debug().Msgf("%d commits scanned. Note: this number might be smaller than expected due to commits with no additions", len(d.commitMap))
  295. return d.findings, nil
  296. }
  297. // DetectFiles accepts a path to a source directory or file and begins a scan of the
  298. // file or directory.
  299. func (d *Detector) DetectFiles(source string) ([]report.Finding, error) {
  300. s := semgroup.NewGroup(context.Background(), 4)
  301. paths := make(chan string)
  302. s.Go(func() error {
  303. defer close(paths)
  304. return filepath.Walk(source,
  305. func(path string, fInfo os.FileInfo, err error) error {
  306. if err != nil {
  307. return err
  308. }
  309. if fInfo.Name() == ".git" && fInfo.IsDir() {
  310. return filepath.SkipDir
  311. }
  312. if fInfo.Mode().IsRegular() {
  313. paths <- path
  314. }
  315. return nil
  316. })
  317. })
  318. for pa := range paths {
  319. p := pa
  320. s.Go(func() error {
  321. b, err := os.ReadFile(p)
  322. if err != nil {
  323. return err
  324. }
  325. mimetype, err := filetype.Match(b)
  326. if err != nil {
  327. return err
  328. }
  329. if mimetype.MIME.Type == "application" {
  330. return nil // skip binary files
  331. }
  332. fragment := Fragment{
  333. Raw: string(b),
  334. FilePath: p,
  335. }
  336. for _, finding := range d.Detect(fragment) {
  337. // need to add 1 since line counting starts at 1
  338. finding.EndLine++
  339. finding.StartLine++
  340. d.addFinding(finding)
  341. }
  342. return nil
  343. })
  344. }
  345. if err := s.Wait(); err != nil {
  346. return d.findings, err
  347. }
  348. return d.findings, nil
  349. }
  350. // Detect scans the given fragment and returns a list of findings
  351. func (d *Detector) Detect(fragment Fragment) []report.Finding {
  352. var findings []report.Finding
  353. // initiate fragment keywords
  354. fragment.keywords = make(map[string]bool)
  355. // check if filepath is allowed
  356. if fragment.FilePath != "" && (d.Config.Allowlist.PathAllowed(fragment.FilePath) ||
  357. fragment.FilePath == d.Config.Path) {
  358. return findings
  359. }
  360. // add newline indices for location calculation in detectRule
  361. fragment.newlineIndices = regexp.MustCompile("\n").FindAllStringIndex(fragment.Raw, -1)
  362. // build keyword map for prefiltering rules
  363. normalizedRaw := strings.ToLower(fragment.Raw)
  364. matches := d.prefilter.FindAll(normalizedRaw)
  365. for _, m := range matches {
  366. fragment.keywords[normalizedRaw[m.Start():m.End()]] = true
  367. }
  368. for _, rule := range d.Config.Rules {
  369. if len(rule.Keywords) == 0 {
  370. // if not keywords are associated with the rule always scan the
  371. // fragment using the rule
  372. findings = append(findings, d.detectRule(fragment, rule)...)
  373. continue
  374. }
  375. fragmentContainsKeyword := false
  376. // check if keywords are in the fragment
  377. for _, k := range rule.Keywords {
  378. if _, ok := fragment.keywords[strings.ToLower(k)]; ok {
  379. fragmentContainsKeyword = true
  380. }
  381. }
  382. if fragmentContainsKeyword {
  383. findings = append(findings, d.detectRule(fragment, rule)...)
  384. }
  385. }
  386. return filter(findings, d.Redact)
  387. }
  388. // addFinding synchronously adds a finding to the findings slice
  389. func (d *Detector) addFinding(finding report.Finding) {
  390. // check if we should ignore this finding
  391. if _, ok := d.gitleaksIgnore[finding.Fingerprint]; ok {
  392. log.Debug().Msgf("ignoring finding with Fingerprint %s",
  393. finding.Fingerprint)
  394. return
  395. }
  396. d.findingMutex.Lock()
  397. d.findings = append(d.findings, finding)
  398. if d.Verbose {
  399. printFinding(finding)
  400. }
  401. d.findingMutex.Unlock()
  402. }
  403. // addCommit synchronously adds a commit to the commit slice
  404. func (d *Detector) addCommit(commit string) {
  405. d.commitMap[commit] = true
  406. }