directory.go 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229
  1. package detect
  2. import (
  3. "bufio"
  4. "bytes"
  5. "io"
  6. "os"
  7. "path/filepath"
  8. "strings"
  9. "github.com/h2non/filetype"
  10. "github.com/zricethezav/gitleaks/v8/logging"
  11. "github.com/zricethezav/gitleaks/v8/report"
  12. "github.com/zricethezav/gitleaks/v8/sources"
  13. )
  14. const maxPeekSize = 25 * 1_000 // 10kb
  15. // DetectFiles schedules each ScanTarget—file or archive—for concurrent scanning.
  16. func (d *Detector) DetectFiles(paths <-chan sources.ScanTarget) ([]report.Finding, error) {
  17. for pa := range paths {
  18. d.Sema.Go(func() error {
  19. return d.detectScanTarget(pa)
  20. })
  21. }
  22. if err := d.Sema.Wait(); err != nil {
  23. return d.findings, err
  24. }
  25. return d.findings, nil
  26. }
  27. // detectScanTarget handles one ScanTarget: it unpacks archives recursively
  28. // or scans a regular file, always using VirtualPath for reporting.
  29. func (d *Detector) detectScanTarget(scanTarget sources.ScanTarget) error {
  30. // Choose display path: either VirtualPath (archive chain) or on-disk path.
  31. display := scanTarget.Path
  32. if scanTarget.VirtualPath != "" {
  33. display = scanTarget.VirtualPath
  34. }
  35. logger := logging.With().Str("path", display).Logger()
  36. logger.Trace().Msg("Scanning path")
  37. // skipping windows archives for now
  38. if isArchive(scanTarget.Path) && !isWindows {
  39. logger.Debug().Msg("Found archive")
  40. targets, tmpArchiveDir, err := extractArchive(scanTarget.Path)
  41. if err != nil {
  42. logger.Warn().Err(err).Msg("Failed to extract archive")
  43. return nil
  44. }
  45. // Schedule each extracted file for its own scan, carrying forward VirtualPath.
  46. for _, t := range targets {
  47. t := t
  48. // compute path INSIDE this archive
  49. rel, rerr := filepath.Rel(tmpArchiveDir, t.Path)
  50. if rerr != nil {
  51. rel = filepath.Base(t.Path)
  52. }
  53. rel = filepath.ToSlash(rel)
  54. // prepend existing chain or archive base name
  55. if scanTarget.VirtualPath != "" {
  56. t.VirtualPath = scanTarget.VirtualPath + "/" + rel
  57. } else {
  58. t.VirtualPath = filepath.Base(scanTarget.Path) + "/" + rel
  59. }
  60. d.Sema.Go(func() error {
  61. return d.detectScanTarget(t)
  62. })
  63. }
  64. return nil
  65. }
  66. // --- Regular file branch ---
  67. f, err := os.Open(scanTarget.Path)
  68. if err != nil {
  69. if os.IsPermission(err) {
  70. logger.Warn().Msg("Skipping file: permission denied")
  71. return nil
  72. }
  73. return err
  74. }
  75. defer f.Close()
  76. // Skip binary files by sniffing header
  77. head := make([]byte, 261)
  78. if n, _ := io.ReadFull(f, head); n > 0 {
  79. if kind, _ := filetype.Match(head[:n]); kind != filetype.Unknown {
  80. logger.Debug().Str("kind", kind.Extension).Msg("Skipping binary")
  81. return nil
  82. }
  83. }
  84. if _, err := f.Seek(0, io.SeekStart); err != nil {
  85. return err
  86. }
  87. reader := bufio.NewReader(f)
  88. buf := make([]byte, chunkSize)
  89. totalLines := 0
  90. for {
  91. n, err := reader.Read(buf)
  92. if n > 0 {
  93. peekBuf := bytes.NewBuffer(buf[:n])
  94. if readErr := readUntilSafeBoundary(reader, n, maxPeekSize, peekBuf); readErr != nil {
  95. return readErr
  96. }
  97. chunk := peekBuf.String()
  98. linesInChunk := strings.Count(chunk, "\n")
  99. // build fragment and set FilePath to our display chain
  100. fragment := Fragment{
  101. Raw: chunk,
  102. Bytes: peekBuf.Bytes(),
  103. }
  104. fragment.FilePath = display
  105. // if this file was itself a symlink
  106. if scanTarget.Symlink != "" {
  107. fragment.SymlinkFile = scanTarget.Symlink
  108. }
  109. if isWindows {
  110. fragment.WindowsFilePath = scanTarget.Path
  111. }
  112. // run detection and adjust line numbers
  113. for _, finding := range d.Detect(fragment) {
  114. finding.StartLine += totalLines + 1
  115. finding.EndLine += totalLines + 1
  116. // We have to augment the finding if the source is coming
  117. // from a archive committed in Git
  118. if scanTarget.Source == "github-archive" {
  119. finding.Author = scanTarget.GitInfo.Author
  120. finding.Commit = scanTarget.GitInfo.Commit
  121. finding.Email = scanTarget.GitInfo.Email
  122. finding.Date = scanTarget.GitInfo.Date
  123. finding.Message = scanTarget.GitInfo.Message
  124. }
  125. d.AddFinding(finding)
  126. }
  127. totalLines += linesInChunk
  128. }
  129. if err != nil {
  130. if err == io.EOF {
  131. return nil
  132. }
  133. return err
  134. }
  135. }
  136. }
  137. // readUntilSafeBoundary consumes |f| until it finds two consecutive `\n` characters, up to |maxPeekSize|.
  138. // This hopefully avoids splitting. (https://github.com/gitleaks/gitleaks/issues/1651)
  139. func readUntilSafeBoundary(r *bufio.Reader, n int, maxPeekSize int, peekBuf *bytes.Buffer) error {
  140. if peekBuf.Len() == 0 {
  141. return nil
  142. }
  143. // Does the buffer end in consecutive newlines?
  144. var (
  145. data = peekBuf.Bytes()
  146. lastChar = data[len(data)-1]
  147. newlineCount = 0 // Tracks consecutive newlines
  148. )
  149. if isWhitespace(lastChar) {
  150. for i := len(data) - 1; i >= 0; i-- {
  151. lastChar = data[i]
  152. if lastChar == '\n' {
  153. newlineCount++
  154. // Stop if two consecutive newlines are found
  155. if newlineCount >= 2 {
  156. return nil
  157. }
  158. } else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
  159. // The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
  160. // (Intentionally do nothing.)
  161. } else {
  162. break
  163. }
  164. }
  165. }
  166. // If not, read ahead until we (hopefully) find some.
  167. newlineCount = 0
  168. for {
  169. data = peekBuf.Bytes()
  170. // Check if the last character is a newline.
  171. lastChar = data[len(data)-1]
  172. if lastChar == '\n' {
  173. newlineCount++
  174. // Stop if two consecutive newlines are found
  175. if newlineCount >= 2 {
  176. break
  177. }
  178. } else if lastChar == '\r' || lastChar == ' ' || lastChar == '\t' {
  179. // The presence of other whitespace characters (`\r`, ` `, `\t`) shouldn't reset the count.
  180. // (Intentionally do nothing.)
  181. } else {
  182. newlineCount = 0 // Reset if a non-newline character is found
  183. }
  184. // Stop growing the buffer if it reaches maxSize
  185. if (peekBuf.Len() - n) >= maxPeekSize {
  186. break
  187. }
  188. // Read additional data into a temporary buffer
  189. b, err := r.ReadByte()
  190. if err != nil {
  191. if err == io.EOF {
  192. break
  193. }
  194. return err
  195. }
  196. peekBuf.WriteByte(b)
  197. }
  198. return nil
  199. }