directory.go 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231
  1. package detect
  2. import (
  3. "bufio"
  4. "bytes"
  5. "io"
  6. "os"
  7. "path/filepath"
  8. "strings"
  9. "github.com/h2non/filetype"
  10. "github.com/zricethezav/gitleaks/v8/logging"
  11. "github.com/zricethezav/gitleaks/v8/report"
  12. "github.com/zricethezav/gitleaks/v8/sources"
  13. )
  14. const maxPeekSize = 25 * 1_000 // 10kb
  15. // DetectFiles schedules each ScanTarget—file or archive—for concurrent scanning.
  16. func (d *Detector) DetectFiles(paths <-chan sources.ScanTarget) ([]report.Finding, error) {
  17. for pa := range paths {
  18. d.Sema.Go(func() error {
  19. return d.detectScanTarget(pa)
  20. })
  21. }
  22. if err := d.Sema.Wait(); err != nil {
  23. return d.findings, err
  24. }
  25. return d.findings, nil
  26. }
  27. // detectScanTarget handles one ScanTarget: it unpacks archives recursively
  28. // or scans a regular file, always using VirtualPath for reporting.
  29. func (d *Detector) detectScanTarget(scanTarget sources.ScanTarget) error {
  30. // Choose display path: either VirtualPath (archive chain) or on-disk path.
  31. display := scanTarget.Path
  32. if scanTarget.VirtualPath != "" {
  33. display = scanTarget.VirtualPath
  34. }
  35. logger := logging.With().Str("path", display).Logger()
  36. logger.Trace().Msg("Scanning path")
  37. if isArchive(scanTarget.Path) {
  38. logger.Debug().Msg("Found archive")
  39. targets, tmpArchiveDir, err := extractArchive(scanTarget.Path)
  40. if err != nil {
  41. logger.Warn().Err(err).Msg("Failed to extract archive")
  42. return nil
  43. }
  44. // Schedule each extracted file for its own scan, carrying forward VirtualPath.
  45. for _, t := range targets {
  46. t := t
  47. // compute path INSIDE this archive
  48. rel, rerr := filepath.Rel(tmpArchiveDir, t.Path)
  49. if rerr != nil {
  50. rel = filepath.Base(t.Path)
  51. }
  52. rel = filepath.ToSlash(rel)
  53. // prepend existing chain or archive base name
  54. if scanTarget.VirtualPath != "" {
  55. t.VirtualPath = scanTarget.VirtualPath + "/" + rel
  56. } else {
  57. t.VirtualPath = filepath.Base(scanTarget.Path) + "/" + rel
  58. }
  59. d.Sema.Go(func() error {
  60. return d.detectScanTarget(t)
  61. })
  62. }
  63. return nil
  64. }
  65. // --- Regular file branch ---
  66. f, err := os.Open(scanTarget.Path)
  67. if err != nil {
  68. if os.IsPermission(err) {
  69. logger.Warn().Msg("Skipping file: permission denied")
  70. return nil
  71. }
  72. return err
  73. }
  74. defer f.Close()
  75. // Skip binary files by sniffing header
  76. head := make([]byte, 261)
  77. if n, _ := io.ReadFull(f, head); n > 0 {
  78. if kind, _ := filetype.Match(head[:n]); kind != filetype.Unknown {
  79. logger.Debug().Str("kind", kind.Extension).Msg("Skipping binary")
  80. return nil
  81. }
  82. }
  83. if _, err := f.Seek(0, io.SeekStart); err != nil {
  84. return err
  85. }
  86. reader := bufio.NewReader(f)
  87. buf := make([]byte, chunkSize)
  88. totalLines := 0
  89. for {
  90. n, err := reader.Read(buf)
  91. if n > 0 {
  92. peekBuf := bytes.NewBuffer(buf[:n])
  93. if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
  94. return readErr
  95. }
  96. chunk := peekBuf.String()
  97. linesInChunk := strings.Count(chunk, "\n")
  98. // build fragment and set FilePath to our display chain
  99. fragment := Fragment{
  100. Raw: chunk,
  101. Bytes: peekBuf.Bytes(),
  102. }
  103. fragment.FilePath = display
  104. // if this file was itself a symlink
  105. if scanTarget.Symlink != "" {
  106. fragment.SymlinkFile = scanTarget.Symlink
  107. }
  108. if isWindows {
  109. fragment.FilePath = filepath.ToSlash(scanTarget.Path)
  110. fragment.SymlinkFile = filepath.ToSlash(fragment.SymlinkFile)
  111. fragment.WindowsFilePath = scanTarget.Path
  112. }
  113. // run detection and adjust line numbers
  114. for _, finding := range d.Detect(fragment) {
  115. finding.StartLine += totalLines + 1
  116. finding.EndLine += totalLines + 1
  117. // We have to augment the finding if the source is coming
  118. // from a archive committed in Git
  119. if scanTarget.Source == "github-archive" {
  120. finding.Author = scanTarget.GitInfo.Author
  121. finding.Commit = scanTarget.GitInfo.Commit
  122. finding.Email = scanTarget.GitInfo.Email
  123. finding.Date = scanTarget.GitInfo.Date
  124. finding.Message = scanTarget.GitInfo.Message
  125. }
  126. d.AddFinding(finding)
  127. }
  128. totalLines += linesInChunk
  129. }
  130. if err != nil {
  131. if err == io.EOF {
  132. return nil
  133. }
  134. return err
  135. }
  136. }
  137. }
  138. // readUntilSafeBoundary consumes |f| until it finds two consecutive `\n` characters, up to |maxPeekSize|.
  139. // This hopefully avoids splitting. (https://github.com/gitleaks/gitleaks/issues/1651)
  140. func readUntilSafeBoundary(r *bufio.Reader, n int, maxPeekSize int, peekBuf *bytes.Buffer) error {
  141. if peekBuf.Len() == 0 {
  142. return nil
  143. }
  144. // Does the buffer end in consecutive newlines?
  145. var (
  146. data = peekBuf.Bytes()
  147. lastChar = data[len(data)-1]
  148. newlineCount = 0 // Tracks consecutive newlines
  149. )
  150. if isWhitespace(lastChar) {
  151. for i := len(data) - 1; i >= 0; i-- {
  152. lastChar = data[i]
  153. if lastChar == '\n' {
  154. newlineCount++
  155. // Stop if two consecutive newlines are found
  156. if newlineCount >= 2 {
  157. return nil
  158. }
  159. } else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
  160. // The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
  161. // (Intentionally do nothing.)
  162. } else {
  163. break
  164. }
  165. }
  166. }
  167. // If not, read ahead until we (hopefully) find some.
  168. newlineCount = 0
  169. for {
  170. data = peekBuf.Bytes()
  171. // Check if the last character is a newline.
  172. lastChar = data[len(data)-1]
  173. if lastChar == '\n' {
  174. newlineCount++
  175. // Stop if two consecutive newlines are found
  176. if newlineCount >= 2 {
  177. break
  178. }
  179. } else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
  180. // The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
  181. // (Intentionally do nothing.)
  182. } else {
  183. newlineCount = 0 // Reset if a non-newline character is found
  184. }
  185. // Stop growing the buffer if it reaches maxSize
  186. if (peekBuf.Len() - n) >= maxPeekSize {
  187. break
  188. }
  189. // Read additional data into a temporary buffer
  190. b, err := r.ReadByte()
  191. if err != nil {
  192. if err == io.EOF {
  193. break
  194. }
  195. return err
  196. }
  197. peekBuf.WriteByte(b)
  198. }
  199. return nil
  200. }