Ver código fonte

go mod and even more work

zach rice 6 anos atrás
pai
commit
e79ffc6ae8
49 arquivos alterados com 1003 adições e 8132 exclusões
  1. 0 266
      Gopkg.lock
  2. 0 62
      Gopkg.toml
  3. 0 890
      gitleaks_test.go
  4. 24 0
      go.mod
  5. 107 0
      go.sum
  6. 2 638
      main.go
  7. 1 70
      src/config.go
  8. 1 0
      src/config_test.go
  9. 77 0
      src/constants.go
  10. 141 0
      src/core.go
  11. 1 1
      src/entropy.go
  12. 3 3
      src/github.go
  13. 3 3
      src/gitlab.go
  14. 1 1
      src/options.go
  15. 319 0
      src/repo.go
  16. 189 0
      src/utils.go
  17. 0 21
      vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING
  18. 0 21
      vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING
  19. 0 21
      vendor/github.com/BurntSushi/toml/cmd/tomlv/COPYING
  20. 0 22
      vendor/github.com/franela/goblin/.gitignore
  21. 0 7
      vendor/github.com/franela/goblin/.travis.yml
  22. 0 19
      vendor/github.com/franela/goblin/LICENSE
  23. 0 3
      vendor/github.com/franela/goblin/Makefile
  24. 0 149
      vendor/github.com/franela/goblin/README.md
  25. 0 70
      vendor/github.com/franela/goblin/assertions.go
  26. 0 36
      vendor/github.com/franela/goblin/go.snippets
  27. 0 337
      vendor/github.com/franela/goblin/goblin.go
  28. BIN
      vendor/github.com/franela/goblin/goblin_logo.jpg
  29. BIN
      vendor/github.com/franela/goblin/goblin_output.png
  30. 0 30
      vendor/github.com/franela/goblin/mono_reporter.go
  31. 0 153
      vendor/github.com/franela/goblin/reporting.go
  32. 0 21
      vendor/github.com/franela/goblin/resolver.go
  33. 3 3
      vendor/github.com/jessevdk/go-flags/check_crosscompile.sh
  34. 4 4
      vendor/github.com/src-d/gcfg/scanner/scanner.go
  35. 0 369
      vendor/golang.org/x/sys/unix/mkall.sh
  36. 0 333
      vendor/golang.org/x/text/internal/gen/gen.go
  37. 0 58
      vendor/golang.org/x/text/internal/triegen/compact.go
  38. 0 251
      vendor/golang.org/x/text/internal/triegen/print.go
  39. 0 494
      vendor/golang.org/x/text/internal/triegen/triegen.go
  40. 0 371
      vendor/golang.org/x/text/internal/ucd/ucd.go
  41. 0 105
      vendor/golang.org/x/text/unicode/cldr/base.go
  42. 0 130
      vendor/golang.org/x/text/unicode/cldr/cldr.go
  43. 0 359
      vendor/golang.org/x/text/unicode/cldr/collate.go
  44. 0 171
      vendor/golang.org/x/text/unicode/cldr/decode.go
  45. 0 400
      vendor/golang.org/x/text/unicode/cldr/makexml.go
  46. 0 602
      vendor/golang.org/x/text/unicode/cldr/resolve.go
  47. 0 144
      vendor/golang.org/x/text/unicode/cldr/slice.go
  48. 0 1494
      vendor/golang.org/x/text/unicode/cldr/xml.go
  49. 127 0
      vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto

+ 0 - 266
Gopkg.lock

@@ -1,266 +0,0 @@
-# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
-
-
-[[projects]]
-  name = "github.com/BurntSushi/toml"
-  packages = ["."]
-  revision = "3012a1dbe2e4bd1391d42b32f0577cb7bbc7f005"
-  version = "v0.3.1"
-
-[[projects]]
-  name = "github.com/emirpasic/gods"
-  packages = [
-    "containers",
-    "lists",
-    "lists/arraylist",
-    "trees",
-    "trees/binaryheap",
-    "utils"
-  ]
-  revision = "1615341f118ae12f353cc8a983f35b584342c9b3"
-  version = "v1.12.0"
-
-[[projects]]
-  name = "github.com/franela/goblin"
-  packages = ["."]
-  revision = "cd5d08fb4ede9eaac1812fdb513552e7404eae2e"
-  version = "0.0.2"
-
-[[projects]]
-  name = "github.com/golang/protobuf"
-  packages = ["proto"]
-  revision = "aa810b61a9c79d51363740d207bb46cf8e620ed5"
-  version = "v1.2.0"
-
-[[projects]]
-  name = "github.com/google/go-github"
-  packages = ["github"]
-  revision = "e48060a28fac52d0f1cb758bc8b87c07bac4a87d"
-  version = "v15.0.0"
-
-[[projects]]
-  name = "github.com/google/go-querystring"
-  packages = ["query"]
-  revision = "44c6ddd0a2342c386950e880b658017258da92fc"
-  version = "v1.0.0"
-
-[[projects]]
-  name = "github.com/hako/durafmt"
-  packages = ["."]
-  revision = "7b7ae1e72eade09dbc9c2cfba3e6c4bae7b8bcac"
-  version = "1.0.0"
-
-[[projects]]
-  branch = "master"
-  name = "github.com/jbenet/go-context"
-  packages = ["io"]
-  revision = "d14ea06fba99483203c19d92cfcd13ebe73135f4"
-
-[[projects]]
-  name = "github.com/jessevdk/go-flags"
-  packages = ["."]
-  revision = "c6ca198ec95c841fdb89fc0de7496fed11ab854e"
-  version = "v1.4.0"
-
-[[projects]]
-  name = "github.com/kevinburke/ssh_config"
-  packages = ["."]
-  revision = "81db2a75821ed34e682567d48be488a1c3121088"
-  version = "0.5"
-
-[[projects]]
-  name = "github.com/mitchellh/go-homedir"
-  packages = ["."]
-  revision = "ae18d6b8b3205b561c79e8e5f69bff09736185f4"
-  version = "v1.0.0"
-
-[[projects]]
-  name = "github.com/pelletier/go-buffruneio"
-  packages = ["."]
-  revision = "c37440a7cf42ac63b919c752ca73a85067e05992"
-  version = "v0.2.0"
-
-[[projects]]
-  name = "github.com/sergi/go-diff"
-  packages = ["diffmatchpatch"]
-  revision = "1744e2970ca51c86172c8190fadad617561ed6e7"
-  version = "v1.0.0"
-
-[[projects]]
-  name = "github.com/sirupsen/logrus"
-  packages = ["."]
-  revision = "3e01752db0189b9157070a0e1668a620f9a85da2"
-  version = "v1.0.6"
-
-[[projects]]
-  name = "github.com/src-d/gcfg"
-  packages = [
-    ".",
-    "scanner",
-    "token",
-    "types"
-  ]
-  revision = "f187355171c936ac84a82793659ebb4936bc1c23"
-  version = "v1.3.0"
-
-[[projects]]
-  name = "github.com/xanzy/go-gitlab"
-  packages = ["."]
-  revision = "183a80bb43ec9746d72f7cb37116e756d075fb91"
-  version = "v0.11.3"
-
-[[projects]]
-  name = "github.com/xanzy/ssh-agent"
-  packages = ["."]
-  revision = "640f0ab560aeb89d523bb6ac322b1244d5c3796c"
-  version = "v0.2.0"
-
-[[projects]]
-  branch = "master"
-  name = "golang.org/x/crypto"
-  packages = [
-    "cast5",
-    "curve25519",
-    "ed25519",
-    "ed25519/internal/edwards25519",
-    "internal/chacha20",
-    "internal/subtle",
-    "openpgp",
-    "openpgp/armor",
-    "openpgp/elgamal",
-    "openpgp/errors",
-    "openpgp/packet",
-    "openpgp/s2k",
-    "poly1305",
-    "ssh",
-    "ssh/agent",
-    "ssh/knownhosts",
-    "ssh/terminal"
-  ]
-  revision = "0e37d006457bf46f9e6692014ba72ef82c33022c"
-
-[[projects]]
-  branch = "master"
-  name = "golang.org/x/net"
-  packages = [
-    "context",
-    "context/ctxhttp"
-  ]
-  revision = "f04abc6bdfa7a0171a8a0c9fd2ada9391044d056"
-
-[[projects]]
-  branch = "master"
-  name = "golang.org/x/oauth2"
-  packages = [
-    ".",
-    "internal"
-  ]
-  revision = "d2e6202438beef2727060aa7cabdd924d92ebfd9"
-
-[[projects]]
-  branch = "master"
-  name = "golang.org/x/sys"
-  packages = [
-    "unix",
-    "windows"
-  ]
-  revision = "b09afc3d579e346c4a7e4705953acaf6f9e551bd"
-
-[[projects]]
-  name = "golang.org/x/text"
-  packages = [
-    "internal/gen",
-    "internal/triegen",
-    "internal/ucd",
-    "transform",
-    "unicode/cldr",
-    "unicode/norm"
-  ]
-  revision = "f21a4dfb5e38f5895301dc265a8def02365cc3d0"
-  version = "v0.3.0"
-
-[[projects]]
-  name = "google.golang.org/appengine"
-  packages = [
-    "internal",
-    "internal/base",
-    "internal/datastore",
-    "internal/log",
-    "internal/remote_api",
-    "internal/urlfetch",
-    "urlfetch"
-  ]
-  revision = "ae0ab99deb4dc413a2b4bd6c8bdd0eb67f1e4d06"
-  version = "v1.2.0"
-
-[[projects]]
-  name = "gopkg.in/src-d/go-billy.v4"
-  packages = [
-    ".",
-    "helper/chroot",
-    "helper/polyfill",
-    "osfs",
-    "util"
-  ]
-  revision = "982626487c60a5252e7d0b695ca23fb0fa2fd670"
-  version = "v4.3.0"
-
-[[projects]]
-  name = "gopkg.in/src-d/go-git.v4"
-  packages = [
-    ".",
-    "config",
-    "internal/revision",
-    "plumbing",
-    "plumbing/cache",
-    "plumbing/filemode",
-    "plumbing/format/config",
-    "plumbing/format/diff",
-    "plumbing/format/gitignore",
-    "plumbing/format/idxfile",
-    "plumbing/format/index",
-    "plumbing/format/objfile",
-    "plumbing/format/packfile",
-    "plumbing/format/pktline",
-    "plumbing/object",
-    "plumbing/protocol/packp",
-    "plumbing/protocol/packp/capability",
-    "plumbing/protocol/packp/sideband",
-    "plumbing/revlist",
-    "plumbing/storer",
-    "plumbing/transport",
-    "plumbing/transport/client",
-    "plumbing/transport/file",
-    "plumbing/transport/git",
-    "plumbing/transport/http",
-    "plumbing/transport/internal/common",
-    "plumbing/transport/server",
-    "plumbing/transport/ssh",
-    "storage",
-    "storage/filesystem",
-    "storage/filesystem/dotgit",
-    "storage/memory",
-    "utils/binary",
-    "utils/diff",
-    "utils/ioutil",
-    "utils/merkletrie",
-    "utils/merkletrie/filesystem",
-    "utils/merkletrie/index",
-    "utils/merkletrie/internal/frame",
-    "utils/merkletrie/noder"
-  ]
-  revision = "a1f6ef44dfed1253ef7f3bc049f66b15f8fc2ab2"
-  version = "v4.9.1"
-
-[[projects]]
-  name = "gopkg.in/warnings.v0"
-  packages = ["."]
-  revision = "ec4a0fea49c7b46c2aeb0b51aac55779c607e52b"
-  version = "v0.1.2"
-
-[solve-meta]
-  analyzer-name = "dep"
-  analyzer-version = 1
-  inputs-digest = "94efaa76ee4e1f9c4d3a38138dc1ceebf7e8f50c8cdc56721c2dc122f810b8c5"
-  solver-name = "gps-cdcl"
-  solver-version = 1

+ 0 - 62
Gopkg.toml

@@ -1,62 +0,0 @@
-# Gopkg.toml example
-#
-# Refer to https://github.com/golang/dep/blob/master/docs/Gopkg.toml.md
-# for detailed Gopkg.toml documentation.
-#
-# required = ["github.com/user/thing/cmd/thing"]
-# ignored = ["github.com/user/project/pkgX", "bitbucket.org/user/project/pkgA/pkgY"]
-#
-# [[constraint]]
-#   name = "github.com/user/project"
-#   version = "1.0.0"
-#
-# [[constraint]]
-#   name = "github.com/user/project2"
-#   branch = "dev"
-#   source = "github.com/myfork/project2"
-#
-# [[override]]
-#   name = "github.com/x/y"
-#   version = "2.4.0"
-#
-# [prune]
-#   non-go = false
-#   go-tests = true
-#   unused-packages = true
-
-
-[[constraint]]
-  name = "github.com/BurntSushi/toml"
-  version = "0.3.0"
-
-[[constraint]]
-  name = "github.com/franela/goblin"
-  version = "0.0.2"
-
-[[constraint]]
-  name = "github.com/google/go-github"
-  version = "15.0.0"
-
-[[constraint]]
-  name = "github.com/jessevdk/go-flags"
-  version = "1.4.0"
-
-[[constraint]]
-  name = "github.com/sirupsen/logrus"
-  version = "1.0.5"
-
-[[constraint]]
-  branch = "master"
-  name = "golang.org/x/oauth2"
-
-[[constraint]]
-  name = "gopkg.in/src-d/go-git.v4"
-  version = "4.9.1"
-
-[prune]
-  go-tests = true
-  unused-packages = true
-
-[[constraint]]
-  name = "github.com/xanzy/go-gitlab"
-  version = "0.11.3"

+ 0 - 890
gitleaks_test.go

@@ -1,890 +0,0 @@
-package main
-
-import (
-	"fmt"
-	"io/ioutil"
-	"os"
-	"path"
-	"regexp"
-	"strings"
-	"testing"
-	"time"
-
-	"github.com/franela/goblin"
-	git "gopkg.in/src-d/go-git.v4"
-	"gopkg.in/src-d/go-git.v4/storage/memory"
-)
-
-const testWhitelistCommit = `
-[[regexes]]
-description = "AWS"
-regex = '''AKIA[0-9A-Z]{16}'''
-
-[whitelist]
-commits = [
-  "eaeffdc65b4c73ccb67e75d96bd8743be2c85973",
-]
-`
-const testWhitelistFile = `
-[[regexes]]
-description = "AWS"
-regex = '''AKIA[0-9A-Z]{16}'''
-
-[whitelist]
-files = [
-  ".go",
-]
-`
-
-const testWhitelistRegex = `
-[[regexes]]
-description = "AWS"
-regex = '''AKIA[0-9A-Z]{16}'''
-
-[whitelist]
-regexes= [
-  "AKIA",
-]
-`
-
-const testWhitelistRepo = `
-[[regexes]]
-description = "AWS"
-regex = '''AKIA[0-9A-Z]{16}'''
-
-[whitelist]
-repos = [
-  "gronit",
-]
-`
-
-const testEntropyRange = `
-[misc]
-entropy = [
-  "7.5-8.0",
-  "3.3-3.4",
-]
-`
-const testBadEntropyRange = `
-[misc]
-entropy = [
-  "8.0-3.0",
-]
-`
-const testBadEntropyRange2 = `
-[misc]
-entropy = [
-  "8.0-8.9",
-]
-`
-
-func TestGetRepo(t *testing.T) {
-	var err error
-	dir, err = ioutil.TempDir("", "gitleaksTestRepo")
-	defer os.RemoveAll(dir)
-	if err != nil {
-		panic(err)
-	}
-	_, err = git.PlainClone(dir, false, &git.CloneOptions{
-		URL: "https://github.com/gitleakstest/gronit",
-	})
-
-	if err != nil {
-		panic(err)
-	}
-
-	var tests = []struct {
-		testOpts       Options
-		description    string
-		expectedErrMsg string
-	}{
-		{
-			testOpts: Options{
-				Repo: "https://github.com/gitleakstest/gronit",
-			},
-			description:    "test plain clone remote repo",
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				Repo: "https://github.com/gitleakstest/gronit",
-				Disk: true,
-			},
-			description:    "test on disk clone remote repo",
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				RepoPath: dir,
-			},
-			description:    "test local clone repo",
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				Repo: "https://github.com/gitleakstest/nope",
-			},
-			description:    "test no repo",
-			expectedErrMsg: "authentication required",
-		},
-		{
-			testOpts: Options{
-				Repo: "https://github.com/gitleakstest/private",
-			},
-			description:    "test private repo",
-			expectedErrMsg: "invalid auth method",
-		},
-		{
-			testOpts: Options{
-				Repo: "https://github.com/gitleakstest/private",
-				Disk: true,
-			},
-			description:    "test private repo",
-			expectedErrMsg: "invalid auth method",
-		},
-	}
-	g := goblin.Goblin(t)
-	for _, test := range tests {
-		g.Describe("TestGetRepo", func() {
-			g.It(test.description, func() {
-				opts = test.testOpts
-				_, err := cloneRepo()
-				if err != nil {
-					g.Assert(err.Error()).Equal(test.expectedErrMsg)
-				}
-			})
-		})
-	}
-}
-func TestRun(t *testing.T) {
-	var err error
-	configsDir := testTomlLoader()
-
-	dir, err = ioutil.TempDir("", "gitleaksTestOwner")
-	defer os.RemoveAll(dir)
-	if err != nil {
-		panic(err)
-	}
-	git.PlainClone(dir+"/gronit", false, &git.CloneOptions{
-		URL: "https://github.com/gitleakstest/gronit",
-	})
-	git.PlainClone(dir+"/h1domains", false, &git.CloneOptions{
-		URL: "https://github.com/gitleakstest/h1domains",
-	})
-	var tests = []struct {
-		testOpts       Options
-		description    string
-		expectedErrMsg string
-		whiteListRepos []string
-		whiteListFiles []*regexp.Regexp
-		numLeaks       int
-		configPath     string
-		commitPerPage  int
-	}{
-		{
-			testOpts: Options{
-				GitLabUser: "gitleakstest",
-			},
-			description:    "test github user",
-			numLeaks:       2,
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				GithubUser: "gitleakstest",
-			},
-			description:    "test github user",
-			numLeaks:       2,
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				GithubUser: "gitleakstest",
-				Disk:       true,
-			},
-			description:    "test github user on disk ",
-			numLeaks:       2,
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				GithubOrg: "gitleakstestorg",
-			},
-			description:    "test github org",
-			numLeaks:       2,
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				GithubOrg: "gitleakstestorg",
-				Disk:      true,
-			},
-			description:    "test org on disk",
-			numLeaks:       2,
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				OwnerPath: dir,
-			},
-			description:    "test owner path",
-			numLeaks:       2,
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				Repo:   "git@github.com:gitleakstest/gronit.git",
-				SSHKey: "trash",
-			},
-			description:    "test leak",
-			numLeaks:       0,
-			expectedErrMsg: "unable to generate ssh key: open trash: no such file or directory",
-		},
-		{
-			testOpts: Options{
-				Repo: "https://github.com/gitleakstest/gronit.git",
-			},
-			description:    "test leak",
-			numLeaks:       2,
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				Repo: "https://github.com/gitleakstest/h1domains.git",
-			},
-			description:    "test clean",
-			numLeaks:       0,
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				Repo: "https://github.com/gitleakstest/empty.git",
-			},
-			description:    "test empty",
-			numLeaks:       0,
-			expectedErrMsg: "reference not found",
-		},
-		{
-			testOpts: Options{
-				GithubOrg: "gitleakstestorg",
-			},
-			description:    "test github org, whitelist repo",
-			numLeaks:       0,
-			expectedErrMsg: "",
-			configPath:     path.Join(configsDir, "repo"),
-		},
-		{
-			testOpts: Options{
-				GithubOrg:    "gitleakstestorg",
-				ExcludeForks: true,
-			},
-			description:    "test github org, exclude forks",
-			numLeaks:       0,
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				GithubPR: "https://github.com/gitleakstest/gronit/pull/1",
-			},
-			description:    "test github pr",
-			numLeaks:       4,
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				GithubPR: "https://github.com/gitleakstest/gronit/pull/1",
-			},
-			description:    "test github pr",
-			numLeaks:       4,
-			expectedErrMsg: "",
-			commitPerPage:  1,
-		},
-		{
-			testOpts: Options{
-				GithubPR: "https://github.com/gitleakstest/gronit/pull/1",
-			},
-			description:    "test github pr with whitelisted files",
-			numLeaks:       0,
-			expectedErrMsg: "",
-			commitPerPage:  1,
-			whiteListFiles: []*regexp.Regexp{
-				regexp.MustCompile("main.go"),
-			},
-		},
-		{
-			testOpts: Options{
-				GithubPR: "https://github.com/gitleakstest/gronit/pull/2",
-			},
-			description:    "test github pr with commits without patch info",
-			numLeaks:       0,
-			expectedErrMsg: "",
-			commitPerPage:  1,
-		},
-	}
-	g := goblin.Goblin(t)
-	for _, test := range tests {
-		g.Describe("TestRun", func() {
-			g.It(test.description, func() {
-				if test.configPath != "" {
-					os.Setenv("GITLEAKS_CONFIG", test.configPath)
-				}
-				if test.commitPerPage != 0 {
-					githubPages = test.commitPerPage
-				}
-				if test.whiteListFiles != nil {
-					whiteListFiles = test.whiteListFiles
-				} else {
-					whiteListFiles = nil
-				}
-				opts = test.testOpts
-				leaks, err := run()
-				if err != nil {
-					g.Assert(err.Error()).Equal(test.expectedErrMsg)
-				}
-				g.Assert(len(leaks)).Equal(test.numLeaks)
-				githubPages = 100
-			})
-		})
-	}
-}
-
-func TestWriteReport(t *testing.T) {
-	tmpDir, _ := ioutil.TempDir("", "reportDir")
-	reportJSON := path.Join(tmpDir, "report.json")
-	reportJASON := path.Join(tmpDir, "report.jason")
-	reportVOID := path.Join("thereIsNoWay", "thisReportWillGetWritten.json")
-	reportCSV := path.Join(tmpDir, "report.csv")
-	defer os.RemoveAll(tmpDir)
-	leaks := []Leak{
-		{
-			Line:     "eat",
-			Commit:   "your",
-			Offender: "veggies",
-			Type:     "and",
-			Message:  "get",
-			Author:   "some",
-			File:     "sleep",
-			Date:     time.Now(),
-		},
-	}
-
-	var tests = []struct {
-		leaks          []Leak
-		reportFile     string
-		fileName       string
-		description    string
-		testOpts       Options
-		expectedErrMsg string
-	}{
-		{
-			leaks:       leaks,
-			reportFile:  reportJSON,
-			fileName:    "report.json",
-			description: "can we write a json file",
-			testOpts: Options{
-				Report: reportJSON,
-			},
-		},
-		{
-			leaks:       leaks,
-			reportFile:  reportCSV,
-			fileName:    "report.csv",
-			description: "can we write a csv file",
-			testOpts: Options{
-				Report: reportCSV,
-			},
-		},
-		{
-			leaks:          leaks,
-			reportFile:     reportJASON,
-			fileName:       "report.jason",
-			description:    "bad file",
-			expectedErrMsg: "Report should be a .json or .csv file",
-			testOpts: Options{
-				Report: reportJASON,
-			},
-		},
-		{
-			leaks:          leaks,
-			reportFile:     reportVOID,
-			fileName:       "report.jason",
-			description:    "bad dir",
-			expectedErrMsg: "thereIsNoWay does not exist",
-			testOpts: Options{
-				Report: reportVOID,
-			},
-		},
-	}
-	g := goblin.Goblin(t)
-	for _, test := range tests {
-		g.Describe("TestWriteReport", func() {
-			g.It(test.description, func() {
-				opts = test.testOpts
-				err := optsGuard()
-				if err != nil {
-					g.Assert(err.Error()).Equal(test.expectedErrMsg)
-				} else {
-					writeReport(test.leaks)
-					f, _ := os.Stat(test.reportFile)
-					g.Assert(f.Name()).Equal(test.fileName)
-				}
-			})
-		})
-	}
-
-}
-
-func testTomlLoader() string {
-	tmpDir, _ := ioutil.TempDir("", "whiteListConfigs")
-	ioutil.WriteFile(path.Join(tmpDir, "regex"), []byte(testWhitelistRegex), 0644)
-	ioutil.WriteFile(path.Join(tmpDir, "commit"), []byte(testWhitelistCommit), 0644)
-	ioutil.WriteFile(path.Join(tmpDir, "file"), []byte(testWhitelistFile), 0644)
-	ioutil.WriteFile(path.Join(tmpDir, "repo"), []byte(testWhitelistRepo), 0644)
-	ioutil.WriteFile(path.Join(tmpDir, "entropy"), []byte(testEntropyRange), 0644)
-	ioutil.WriteFile(path.Join(tmpDir, "badEntropy"), []byte(testBadEntropyRange), 0644)
-	ioutil.WriteFile(path.Join(tmpDir, "badEntropy2"), []byte(testBadEntropyRange2), 0644)
-	return tmpDir
-}
-
-func TestAuditRepo(t *testing.T) {
-	var leaks []Leak
-	err := loadToml()
-	configsDir := testTomlLoader()
-	defer os.RemoveAll(configsDir)
-
-	if err != nil {
-		panic(err)
-	}
-	leaksR, err := git.Clone(memory.NewStorage(), nil, &git.CloneOptions{
-		URL: "https://github.com/gitleakstest/gronit.git",
-	})
-	if err != nil {
-		panic(err)
-	}
-	leaksRepo := &RepoDescriptor{
-		repository: leaksR,
-		name:       "gronit",
-	}
-
-	cleanR, err := git.Clone(memory.NewStorage(), nil, &git.CloneOptions{
-		URL: "https://github.com/gitleakstest/h1domains.git",
-	})
-	if err != nil {
-		panic(err)
-	}
-	cleanRepo := &RepoDescriptor{
-		repository: cleanR,
-		name:       "h1domains",
-	}
-
-	var tests = []struct {
-		testOpts         Options
-		description      string
-		expectedErrMsg   string
-		numLeaks         int
-		repo             *RepoDescriptor
-		whiteListFiles   []*regexp.Regexp
-		whiteListCommits map[string]bool
-		whiteListRepos   []*regexp.Regexp
-		whiteListRegexes []*regexp.Regexp
-		configPath       string
-	}{
-		{
-			repo:        leaksRepo,
-			description: "pinned config",
-			numLeaks:    0,
-			testOpts: Options{
-				RepoConfig: true,
-			},
-		},
-		{
-			repo:        leaksRepo,
-			description: "commit depth = 1, one leak",
-			numLeaks:    1,
-			testOpts: Options{
-				Depth: 1,
-			},
-		},
-		{
-			repo:        leaksRepo,
-			description: "two leaks present",
-			numLeaks:    2,
-		},
-		{
-			repo:        leaksRepo,
-			description: "two leaks present limit goroutines",
-			numLeaks:    2,
-			testOpts: Options{
-				Threads: 4,
-			},
-		},
-		{
-			repo:        leaksRepo,
-			description: "two leaks present whitelist AWS.. no leaks",
-			whiteListRegexes: []*regexp.Regexp{
-				regexp.MustCompile("AKIA"),
-			},
-			numLeaks: 0,
-		},
-		{
-			repo:        leaksRepo,
-			description: "two leaks present limit goroutines",
-			numLeaks:    2,
-		},
-		{
-			repo:        cleanRepo,
-			description: "no leaks present",
-			numLeaks:    0,
-		},
-		{
-			repo:        leaksRepo,
-			description: "two leaks present whitelist go files",
-			whiteListFiles: []*regexp.Regexp{
-				regexp.MustCompile(".go"),
-			},
-			numLeaks: 0,
-		},
-		{
-			repo:        leaksRepo,
-			description: "two leaks present whitelist bad commit",
-			whiteListCommits: map[string]bool{
-				"eaeffdc65b4c73ccb67e75d96bd8743be2c85973": true,
-			},
-			numLeaks: 1,
-		},
-		{
-			repo:        leaksRepo,
-			description: "redact",
-			testOpts: Options{
-				Redact: true,
-			},
-			numLeaks: 2,
-		},
-		{
-			repo:        leaksRepo,
-			description: "Audit a specific commit",
-			numLeaks:    1,
-			testOpts: Options{
-				Commit: "cb5599aeed261b2c038aa4729e2d53ca050a4988",
-			},
-		},
-		{
-			repo:        leaksRepo,
-			description: "Audit a specific commit no leaks",
-			numLeaks:    0,
-			testOpts: Options{
-				Commit: "2b033e012eee364fc41b4ab7c5db1497399b8e67",
-			},
-		},
-		{
-			repo:        leaksRepo,
-			description: "toml whitelist regex",
-			configPath:  path.Join(configsDir, "regex"),
-			numLeaks:    0,
-		},
-		{
-			repo:        leaksRepo,
-			description: "toml whitelist file",
-			configPath:  path.Join(configsDir, "file"),
-			numLeaks:    0,
-		},
-		{
-			repo:        leaksRepo,
-			description: "toml whitelist commit",
-			configPath:  path.Join(configsDir, "commit"),
-			numLeaks:    1,
-		},
-		{
-			repo:        leaksRepo,
-			description: "audit whitelist repo",
-			numLeaks:    0,
-			whiteListRepos: []*regexp.Regexp{
-				regexp.MustCompile("gronit"),
-			},
-		},
-		{
-			repo:        leaksRepo,
-			description: "toml whitelist repo",
-			numLeaks:    0,
-			configPath:  path.Join(configsDir, "repo"),
-		},
-		{
-			repo:        leaksRepo,
-			description: "leaks present with entropy",
-			testOpts: Options{
-				Entropy: 4.7,
-			},
-			numLeaks: 6,
-		},
-		{
-			repo:        leaksRepo,
-			description: "leaks present with entropy",
-			testOpts: Options{
-				Entropy:        4.7,
-				NoiseReduction: true,
-			},
-			numLeaks: 2,
-		},
-		{
-			repo:        leaksRepo,
-			description: "Audit until specific commit",
-			numLeaks:    2,
-			testOpts: Options{
-				CommitStop: "f6839959b7bbdcd23008f1fb16f797f35bcd3a0c",
-			},
-		},
-		{
-			repo:        leaksRepo,
-			description: "commit depth = 2, two leaks",
-			numLeaks:    2,
-			testOpts: Options{
-				Depth: 2,
-			},
-		},
-		{
-			repo:        leaksRepo,
-			description: "toml entropy range",
-			numLeaks:    298,
-			configPath:  path.Join(configsDir, "entropy"),
-		},
-		{
-			repo: leaksRepo,
-			testOpts: Options{
-				NoiseReduction: true,
-			},
-			description: "toml entropy range",
-			numLeaks:    58,
-			configPath:  path.Join(configsDir, "entropy"),
-		},
-		{
-			repo:           leaksRepo,
-			description:    "toml bad entropy range",
-			numLeaks:       0,
-			configPath:     path.Join(configsDir, "badEntropy"),
-			expectedErrMsg: "entropy range must be ascending",
-		},
-		{
-			repo:           leaksRepo,
-			description:    "toml bad entropy2 range",
-			numLeaks:       0,
-			configPath:     path.Join(configsDir, "badEntropy2"),
-			expectedErrMsg: "invalid entropy ranges, must be within 0.0-8.0",
-		},
-	}
-	whiteListCommits = make(map[string]bool)
-	g := goblin.Goblin(t)
-	for _, test := range tests {
-		g.Describe("TestAuditRepo", func() {
-			g.It(test.description, func() {
-				auditDone = false
-				opts = test.testOpts
-				// settin da globs
-				if test.whiteListFiles != nil {
-					whiteListFiles = test.whiteListFiles
-				} else {
-					whiteListFiles = nil
-				}
-				if test.whiteListCommits != nil {
-					whiteListCommits = test.whiteListCommits
-				} else {
-					whiteListCommits = nil
-				}
-				if test.whiteListRegexes != nil {
-					whiteListRegexes = test.whiteListRegexes
-				} else {
-					whiteListRegexes = nil
-				}
-				if test.whiteListRepos != nil {
-					whiteListRepos = test.whiteListRepos
-				} else {
-					whiteListRepos = nil
-				}
-				skip := false
-				totalCommits = 0
-				// config paths
-				if test.configPath != "" {
-					os.Setenv("GITLEAKS_CONFIG", test.configPath)
-					err := loadToml()
-					if err != nil {
-						g.Assert(err.Error()).Equal(test.expectedErrMsg)
-						skip = true
-					}
-				}
-				if !skip {
-					leaks, err = auditGitRepo(test.repo)
-					if test.testOpts.Depth != 0 {
-						g.Assert(totalCommits).Equal(test.testOpts.Depth)
-					} else {
-						if opts.Redact {
-							g.Assert(leaks[0].Offender).Equal("REDACTED")
-						}
-						g.Assert(len(leaks)).Equal(test.numLeaks)
-					}
-				}
-			})
-		})
-	}
-}
-
-func TestOptionGuard(t *testing.T) {
-	var tests = []struct {
-		testOpts            Options
-		githubToken         bool
-		description         string
-		expectedErrMsg      string
-		expectedErrMsgFuzzy string
-	}{
-		{
-			testOpts:       Options{},
-			description:    "default no opts",
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				GithubUser: "fakeUser",
-				GithubOrg:  "fakeOrg",
-			},
-			description:    "double owner",
-			expectedErrMsg: "github user and organization set",
-		},
-		{
-			testOpts: Options{
-				GithubOrg: "fakeOrg",
-				OwnerPath: "/dev/null",
-			},
-			description:    "local and remote target",
-			expectedErrMsg: "github organization set and local owner path",
-		},
-		{
-			testOpts: Options{
-				GithubUser: "fakeUser",
-				OwnerPath:  "/dev/null",
-			},
-			description:    "local and remote target",
-			expectedErrMsg: "github user set and local owner path",
-		},
-		{
-			testOpts: Options{
-				GithubUser:   "fakeUser",
-				SingleSearch: "*/./....",
-			},
-			description:         "single search invalid regex gaurd",
-			expectedErrMsgFuzzy: "unable to compile regex: */./...., ",
-		},
-		{
-			testOpts: Options{
-				GithubUser:   "fakeUser",
-				SingleSearch: "mystring",
-			},
-			description:    "single search regex gaurd",
-			expectedErrMsg: "",
-		},
-		{
-			testOpts: Options{
-				GithubOrg: "fakeOrg",
-				Entropy:   9,
-			},
-			description:    "Invalid entropy level guard",
-			expectedErrMsg: "The maximum level of entropy is 8",
-		},
-	}
-	g := goblin.Goblin(t)
-	for _, test := range tests {
-		g.Describe("Test Option Gaurd", func() {
-			g.It(test.description, func() {
-				os.Clearenv()
-				opts = test.testOpts
-				if test.githubToken {
-					os.Setenv("GITHUB_TOKEN", "fakeToken")
-				}
-				err := optsGuard()
-				if err != nil {
-					if test.expectedErrMsgFuzzy != "" {
-						g.Assert(strings.Contains(err.Error(), test.expectedErrMsgFuzzy)).Equal(true)
-					} else {
-						g.Assert(err.Error()).Equal(test.expectedErrMsg)
-					}
-				} else {
-					g.Assert("").Equal(test.expectedErrMsg)
-				}
-
-			})
-		})
-	}
-}
-
-func TestLoadToml(t *testing.T) {
-	tmpDir, _ := ioutil.TempDir("", "gitleaksTestConfigDir")
-	defer os.RemoveAll(tmpDir)
-	err := ioutil.WriteFile(path.Join(tmpDir, "gitleaksConfig"), []byte(defaultConfig), 0644)
-	if err != nil {
-		panic(err)
-	}
-
-	configPath := path.Join(tmpDir, "gitleaksConfig")
-	noConfigPath := path.Join(tmpDir, "gitleaksConfigNope")
-
-	var tests = []struct {
-		testOpts       Options
-		description    string
-		configPath     string
-		expectedErrMsg string
-		singleSearch   bool
-	}{
-		{
-			testOpts: Options{
-				ConfigPath: configPath,
-			},
-			description: "path to config",
-		},
-		{
-			testOpts:     Options{},
-			description:  "env var path to no config",
-			singleSearch: true,
-		},
-		{
-			testOpts: Options{
-				ConfigPath: noConfigPath,
-			},
-			description:    "no path to config",
-			expectedErrMsg: fmt.Sprintf("no gitleaks config at %s", noConfigPath),
-		},
-		{
-			testOpts:       Options{},
-			description:    "env var path to config",
-			configPath:     configPath,
-			expectedErrMsg: "",
-		},
-		{
-			testOpts:       Options{},
-			description:    "env var path to no config",
-			configPath:     noConfigPath,
-			expectedErrMsg: fmt.Sprintf("problem loading config: open %s: no such file or directory", noConfigPath),
-		},
-	}
-
-	g := goblin.Goblin(t)
-	for _, test := range tests {
-		g.Describe("TestLoadToml", func() {
-			g.It(test.description, func() {
-				opts = test.testOpts
-				if test.singleSearch {
-					singleSearchRegex = regexp.MustCompile("test")
-				} else {
-					singleSearchRegex = nil
-				}
-				if test.configPath != "" {
-					os.Setenv("GITLEAKS_CONFIG", test.configPath)
-				} else {
-					os.Clearenv()
-				}
-				err := loadToml()
-				if err != nil {
-					g.Assert(err.Error()).Equal(test.expectedErrMsg)
-				} else {
-					g.Assert("").Equal(test.expectedErrMsg)
-				}
-			})
-		})
-	}
-}

+ 24 - 0
go.mod

@@ -0,0 +1,24 @@
+module github.com/zricethezav/gitleaks
+
+require (
+	github.com/BurntSushi/toml v0.3.1
+	github.com/emirpasic/gods v1.12.0 // indirect
+	github.com/google/go-github v15.0.0+incompatible
+	github.com/google/go-querystring v1.0.0 // indirect
+	github.com/hako/durafmt v0.0.0-20180520121703-7b7ae1e72ead
+	github.com/jessevdk/go-flags v1.4.0
+	github.com/onsi/ginkgo v1.8.0 // indirect
+	github.com/onsi/gomega v1.5.0 // indirect
+	github.com/sirupsen/logrus v1.0.6
+	github.com/xanzy/go-gitlab v0.11.3
+	golang.org/x/crypto v0.0.0-20180910181607-0e37d006457b // indirect
+	golang.org/x/net v0.0.0-20180925072008-f04abc6bdfa7 // indirect
+	golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be
+	golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6 // indirect
+	golang.org/x/sys v0.0.0-20180925112736-b09afc3d579e // indirect
+	google.golang.org/appengine v1.2.0 // indirect
+	gopkg.in/airbrake/gobrake.v2 v2.0.9 // indirect
+	gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2 // indirect
+	gopkg.in/src-d/go-billy.v4 v4.3.0 // indirect
+	gopkg.in/src-d/go-git.v4 v4.9.1
+)

+ 107 - 0
go.sum

@@ -0,0 +1,107 @@
+github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
+github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
+github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7 h1:uSoVVbwJiQipAclBbw+8quDsfcvFjOpI5iCf4p/cqCs=
+github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs=
+github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239 h1:kFOfPq6dUM1hTo4JG6LR5AXSUEsOjtdm0kw0FtQtMJA=
+github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
+github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
+github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
+github.com/emirpasic/gods v1.9.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
+github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
+github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
+github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
+github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
+github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
+github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
+github.com/gliderlabs/ssh v0.1.1 h1:j3L6gSLQalDETeEg/Jg0mGY0/y/N6zI2xX1978P0Uqw=
+github.com/gliderlabs/ssh v0.1.1/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
+github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM=
+github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
+github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ=
+github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
+github.com/google/go-github v15.0.0+incompatible h1:jlPg2Cpsxb/FyEV/MFiIE9tW/2RAevQNZDPeHbf5a94=
+github.com/google/go-github v15.0.0+incompatible/go.mod h1:zLgOLi98H3fifZn+44m+umXrS52loVEgC2AApnigrVQ=
+github.com/google/go-querystring v1.0.0 h1:Xkwi/a1rcvNg1PPYe5vI8GbeBY/jrVuDX5ASuANWTrk=
+github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck=
+github.com/hako/durafmt v0.0.0-20180520121703-7b7ae1e72ead h1:Y9WOGZY2nw5ksbEf5AIpk+vK52Tdg/VN/rHFRfEeeGQ=
+github.com/hako/durafmt v0.0.0-20180520121703-7b7ae1e72ead/go.mod h1:5Scbynm8dF1XAPwIwkGPqzkM/shndPm79Jd1003hTjE=
+github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
+github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
+github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A=
+github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo=
+github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA=
+github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
+github.com/kevinburke/ssh_config v0.0.0-20180830205328-81db2a75821e h1:RgQk53JHp/Cjunrr1WlsXSZpqXn+uREuHvUVcK82CV8=
+github.com/kevinburke/ssh_config v0.0.0-20180830205328-81db2a75821e/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
+github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
+github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
+github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
+github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
+github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
+github.com/mitchellh/go-homedir v1.0.0 h1:vKb8ShqSby24Yrqr/yDYkuFz8d0WUjys40rvnGC8aR0=
+github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
+github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
+github.com/onsi/ginkgo v1.8.0 h1:VkHVNpR4iVnU8XQR6DBm8BqYjN7CRzw+xKUbVVbbW9w=
+github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
+github.com/onsi/gomega v1.5.0 h1:izbySO9zDPmjJ8rDjLvkA2zJHIo+HkYXHnf7eN7SSyo=
+github.com/onsi/gomega v1.5.0/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
+github.com/pelletier/go-buffruneio v0.2.0 h1:U4t4R6YkofJ5xHm3dJzuRpPZ0mr5MMCoAWooScCR7aA=
+github.com/pelletier/go-buffruneio v0.2.0/go.mod h1:JkE26KsDizTr40EUHkXVtNPvgGtbSNq5BcowyYOWdKo=
+github.com/pkg/errors v0.8.0 h1:WdK/asTD0HN+q6hsWO3/vpuAkAr+tw6aNJNDFFf0+qw=
+github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
+github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
+github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
+github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
+github.com/sirupsen/logrus v1.0.6 h1:hcP1GmhGigz/O7h1WVUM5KklBp1JoNS9FggWKdj/j3s=
+github.com/sirupsen/logrus v1.0.6/go.mod h1:pMByvHTf9Beacp5x1UXfOR9xyW/9antXMhjMPG0dEzc=
+github.com/src-d/gcfg v1.4.0 h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4=
+github.com/src-d/gcfg v1.4.0/go.mod h1:p/UMsR43ujA89BJY9duynAwIpvqEujIH/jFlfL7jWoI=
+github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
+github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
+github.com/xanzy/go-gitlab v0.11.3 h1:gSYcSb+pCx3fco6/O3w784/omQVTcrgxRzyf14SBvUQ=
+github.com/xanzy/go-gitlab v0.11.3/go.mod h1:CRKHkvFWNU6C3AEfqLWjnCNnAs4nj8Zk95rX2S3X6Mw=
+github.com/xanzy/ssh-agent v0.2.0 h1:Adglfbi5p9Z0BmK2oKU9nTG+zKfniSfnaMYB+ULd+Ro=
+github.com/xanzy/ssh-agent v0.2.0/go.mod h1:0NyE30eGUDliuLEHJgYte/zncp2zdTStcOnWhgSqHD8=
+golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
+golang.org/x/crypto v0.0.0-20180910181607-0e37d006457b h1:2b9XGzhjiYsYPnKXoEfL7klWZQIt8IfyRCz62gCqqlQ=
+golang.org/x/crypto v0.0.0-20180910181607-0e37d006457b/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
+golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/net v0.0.0-20180925072008-f04abc6bdfa7 h1:zKzVgSQ8WOSHzD7I4k8LQjrHUUCNOlBsgc0PcYLVNnY=
+golang.org/x/net v0.0.0-20180925072008-f04abc6bdfa7/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
+golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be h1:vEDujvNQGv4jgYKudGeI/+DAX4Jffq6hpD55MmoEvKs=
+golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
+golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6 h1:bjcUS9ztw9kFmmIxJInhon/0Is3p+EHBKNgquIzo1OI=
+golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
+golang.org/x/sys v0.0.0-20180903190138-2b024373dcd9/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20180925112736-b09afc3d579e h1:LSlw/Dbj0MkNvPYAAkGinYmGliq+aqS7eKPYlE4oWC4=
+golang.org/x/sys v0.0.0-20180925112736-b09afc3d579e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
+golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
+google.golang.org/appengine v1.2.0 h1:S0iUepdCWODXRvtE+gcRDd15L+k+k1AiHlMiMjefH24=
+google.golang.org/appengine v1.2.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
+gopkg.in/airbrake/gobrake.v2 v2.0.9 h1:7z2uVWwn7oVeeugY1DtlPAy5H+KYgB1KeKTnqjNatLo=
+gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
+gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
+gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
+gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4=
+gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
+gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2 h1:OAj3g0cR6Dx/R07QgQe8wkA9RNjB2u4i700xBkIT4e0=
+gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo=
+gopkg.in/src-d/go-billy.v4 v4.2.1/go.mod h1:tm33zBoOwxjYHZIE+OV8bxTWFMJLrconzFMd38aARFk=
+gopkg.in/src-d/go-billy.v4 v4.3.0 h1:KtlZ4c1OWbIs4jCv5ZXrTqG8EQocr0g/d4DjNg70aek=
+gopkg.in/src-d/go-billy.v4 v4.3.0/go.mod h1:tm33zBoOwxjYHZIE+OV8bxTWFMJLrconzFMd38aARFk=
+gopkg.in/src-d/go-git-fixtures.v3 v3.1.1 h1:XWW/s5W18RaJpmo1l0IYGqXKuJITWRFuA45iOf1dKJs=
+gopkg.in/src-d/go-git-fixtures.v3 v3.1.1/go.mod h1:dLBcvytrw/TYZsNTWCnkNF2DSIlzWYqTe3rJR56Ac7g=
+gopkg.in/src-d/go-git.v4 v4.9.1 h1:0oKHJZY8tM7B71378cfTg2c5jmWyNlXvestTT6WfY+4=
+gopkg.in/src-d/go-git.v4 v4.9.1/go.mod h1:Vtut8izDyrM8BUVQnzJ+YvmNcem2J89EmfZYCkLokZk=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
+gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
+gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
+gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
+gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE=
+gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

+ 2 - 638
main.go

@@ -1,643 +1,7 @@
 package main
 
-import (
-	"crypto/md5"
-	"encoding/csv"
-	"encoding/json"
-	"fmt"
-	"io/ioutil"
-	"os"
-	"path"
-	"path/filepath"
-	"regexp"
-	"strings"
-	"sync"
-	"time"
-
-	"gopkg.in/src-d/go-git.v4/plumbing"
-
-	"github.com/google/go-github/github"
-	"github.com/hako/durafmt"
-	log "github.com/sirupsen/logrus"
-	"gopkg.in/src-d/go-git.v4"
-	diffType "gopkg.in/src-d/go-git.v4/plumbing/format/diff"
-	"gopkg.in/src-d/go-git.v4/plumbing/object"
-	"gopkg.in/src-d/go-git.v4/plumbing/storer"
-	"gopkg.in/src-d/go-git.v4/storage/memory"
-)
-
-// Leak represents a leaked secret or regex match.
-type Leak struct {
-	Line     string    `json:"line"`
-	Commit   string    `json:"commit"`
-	Offender string    `json:"offender"`
-	Type     string    `json:"reason"`
-	Message  string    `json:"commitMsg"`
-	Author   string    `json:"author"`
-	File     string    `json:"file"`
-	Repo     string    `json:"repo"`
-	Date     time.Time `json:"date"`
-}
-
-// RepoDescriptor contains a src-d git repository and other data about the repo
-type RepoDescriptor struct {
-	path       string
-	url        string
-	name       string
-	repository *git.Repository
-	err        error
-}
-
-type gitDiff struct {
-	content      string
-	commit       *object.Commit
-	filePath     string
-	repoName     string
-	githubCommit *github.RepositoryCommit
-	sha          string
-	message      string
-	author       string
-	date         time.Time
-}
-
-const defaultGithubURL = "https://api.github.com/"
-const version = "1.24.0"
-const errExit = 2
-const leakExit = 1
-
-var (
-	opts              *Options
-	config            *Config
-	singleSearchRegex *regexp.Regexp
-	dir               string
-	threads           int
-	totalCommits      int64
-	commitMap         = make(map[string]bool)
-	cMutex            = &sync.Mutex{}
-	auditDone         bool
-)
-
-func init() {
-	log.SetOutput(os.Stdout)
-	// threads = runtime.GOMAXPROCS(0) / 2
-	threads = 1
-}
+import "github.com/zricethezav/gitleaks/src"
 
 func main() {
-	var err error
-	opts, err = setupOpts()
-	if err != nil {
-		log.Fatal(err)
-	}
-	config, err = newConfig()
-	if err != nil {
-		log.Fatal(err)
-	}
-
-	now := time.Now()
-	leaks, err := run()
-	if err != nil {
-		if strings.Contains(err.Error(), "whitelisted") {
-			log.Info(err.Error())
-			os.Exit(0)
-		}
-		log.Error(err)
-		os.Exit(errExit)
-	}
-
-	if opts.Report != "" {
-		writeReport(leaks)
-	}
-
-	if len(leaks) != 0 {
-		log.Warnf("%d leaks detected. %d commits inspected in %s", len(leaks), totalCommits, durafmt.Parse(time.Now().Sub(now)).String())
-		os.Exit(leakExit)
-	} else {
-		log.Infof("%d leaks detected. %d commits inspected in %s", len(leaks), totalCommits, durafmt.Parse(time.Now().Sub(now)).String())
-	}
-}
-
-// run parses options and kicks off the audit
-func run() ([]Leak, error) {
-	var (
-		leaks []Leak
-		err   error
-	)
-
-	if opts.Disk {
-		// temporary directory where all the gitleaks plain clones will reside
-		dir, err = ioutil.TempDir("", "gitleaks")
-		defer os.RemoveAll(dir)
-		if err != nil {
-			return nil, err
-		}
-	}
-
-	// start audits
-	if opts.Repo != "" || opts.RepoPath != "" {
-		// Audit a single remote repo or a local repo.
-		repo, err := cloneRepo()
-		if err != nil {
-			return leaks, err
-		}
-		return auditGitRepo(repo)
-	} else if opts.OwnerPath != "" {
-		// Audit local repos. Gitleaks will look for all child directories of OwnerPath for
-		// git repos and perform an audit on said repos.
-		repos, err := discoverRepos(opts.OwnerPath)
-		if err != nil {
-			return leaks, err
-		}
-		for _, repo := range repos {
-			leaksFromRepo, err := auditGitRepo(repo)
-			if err != nil {
-				return leaks, err
-			}
-			leaks = append(leaksFromRepo, leaks...)
-		}
-	} else if opts.GithubOrg != "" || opts.GithubUser != "" {
-		// Audit a github owner -- a user or organization.
-		leaks, err = auditGithubRepos()
-		if err != nil {
-			return leaks, err
-		}
-	} else if opts.GitLabOrg != "" || opts.GitLabUser != "" {
-		leaks, err = auditGitlabRepos()
-		if err != nil {
-			return leaks, err
-		}
-	} else if opts.GithubPR != "" {
-		return auditGithubPR()
-	}
-	return leaks, nil
-}
-
-// writeReport writes a report to a file specified in the --report= option.
-// Default format for report is JSON. You can use the --csv option to write the report as a csv
-func writeReport(leaks []Leak) error {
-	var err error
-
-	if len(leaks) == 0 {
-		return nil
-	}
-
-	log.Infof("writing report to %s", opts.Report)
-	if strings.HasSuffix(opts.Report, ".csv") {
-		f, err := os.Create(opts.Report)
-		if err != nil {
-			return err
-		}
-		defer f.Close()
-		w := csv.NewWriter(f)
-		w.Write([]string{"repo", "line", "commit", "offender", "reason", "commitMsg", "author", "file", "date"})
-		for _, leak := range leaks {
-			w.Write([]string{leak.Repo, leak.Line, leak.Commit, leak.Offender, leak.Type, leak.Message, leak.Author, leak.File, leak.Date.Format(time.RFC3339)})
-		}
-		w.Flush()
-	} else {
-		var (
-			f       *os.File
-			encoder *json.Encoder
-		)
-		f, err := os.Create(opts.Report)
-		if err != nil {
-			return err
-		}
-		defer f.Close()
-		encoder = json.NewEncoder(f)
-		encoder.SetIndent("", "\t")
-		if _, err := f.WriteString("[\n"); err != nil {
-			return err
-		}
-		for i := 0; i < len(leaks); i++ {
-			if err := encoder.Encode(leaks[i]); err != nil {
-				return err
-			}
-			// for all but the last leak, seek back and overwrite the newline appended by Encode() with comma & newline
-			if i+1 < len(leaks) {
-				if _, err := f.Seek(-1, 1); err != nil {
-					return err
-				}
-				if _, err := f.WriteString(",\n"); err != nil {
-					return err
-				}
-			}
-		}
-		if _, err := f.WriteString("]"); err != nil {
-			return err
-		}
-		if err := f.Sync(); err != nil {
-			log.Error(err)
-			return err
-		}
-	}
-	return err
-}
-
-// cloneRepo clones a repo to memory(default) or to disk if the --disk option is set.
-func cloneRepo() (*RepoDescriptor, error) {
-	var (
-		err  error
-		repo *git.Repository
-	)
-	// check if repo is whitelisted
-	for _, re := range config.WhiteList.repos {
-		if re.FindString(opts.Repo) != "" {
-			return nil, fmt.Errorf("skipping %s, whitelisted", opts.Repo)
-		}
-	}
-
-	// check if cloning to disk
-	if opts.Disk {
-		log.Infof("cloning %s to disk", opts.Repo)
-		cloneTarget := fmt.Sprintf("%s/%x", dir, md5.Sum([]byte(fmt.Sprintf("%s%s", opts.GithubUser, opts.Repo))))
-		if strings.HasPrefix(opts.Repo, "git") {
-			// private
-			repo, err = git.PlainClone(cloneTarget, false, &git.CloneOptions{
-				URL:      opts.Repo,
-				Progress: os.Stdout,
-				Auth:     config.sshAuth,
-			})
-		} else {
-			// non-private
-			repo, err = git.PlainClone(cloneTarget, false, &git.CloneOptions{
-				URL:      opts.Repo,
-				Progress: os.Stdout,
-			})
-		}
-	} else if opts.RepoPath != "" {
-		// local repo
-		log.Infof("opening %s", opts.RepoPath)
-		repo, err = git.PlainOpen(opts.RepoPath)
-	} else {
-		// cloning to memory
-		log.Infof("cloning %s", opts.Repo)
-		if strings.HasPrefix(opts.Repo, "git") {
-			repo, err = git.Clone(memory.NewStorage(), nil, &git.CloneOptions{
-				URL:      opts.Repo,
-				Progress: os.Stdout,
-				Auth:     config.sshAuth,
-			})
-		} else {
-			repo, err = git.Clone(memory.NewStorage(), nil, &git.CloneOptions{
-				URL:      opts.Repo,
-				Progress: os.Stdout,
-			})
-		}
-	}
-	return &RepoDescriptor{
-		repository: repo,
-		path:       opts.RepoPath,
-		url:        opts.Repo,
-		name:       filepath.Base(opts.Repo),
-		err:        err,
-	}, nil
-}
-
-// auditGitRepo beings an audit on a git repository
-func auditGitRepo(repo *RepoDescriptor) ([]Leak, error) {
-	var (
-		err   error
-		leaks []Leak
-	)
-	for _, re := range config.WhiteList.repos {
-		if re.FindString(repo.name) != "" {
-			return leaks, fmt.Errorf("skipping %s, whitelisted", repo.name)
-		}
-	}
-
-	// check if target contains an external gitleaks toml
-	if opts.RepoConfig {
-		err := config.updateFromRepo(repo)
-		if err != nil {
-			return leaks, nil
-		}
-	}
-
-	// clear commit cache
-	commitMap = make(map[string]bool)
-
-	refs, err := repo.repository.Storer.IterReferences()
-	if err != nil {
-		return leaks, err
-	}
-	err = refs.ForEach(func(ref *plumbing.Reference) error {
-		if ref.Name().IsTag() {
-			return nil
-		}
-		branchLeaks := auditGitReference(repo, ref)
-		for _, leak := range branchLeaks {
-			leaks = append(leaks, leak)
-		}
-		return nil
-	})
-	return leaks, err
-}
-
-// auditGitReference beings the audit for a git reference. This function will
-// traverse the git reference and audit each line of each diff.
-func auditGitReference(repo *RepoDescriptor, ref *plumbing.Reference) []Leak {
-	var (
-		err         error
-		repoName    string
-		leaks       []Leak
-		commitCount int64
-		commitWg    sync.WaitGroup
-		mutex       = &sync.Mutex{}
-		semaphore   chan bool
-	)
-	if auditDone {
-		return nil
-	}
-	repoName = repo.name
-	if opts.Threads != 0 {
-		threads = opts.Threads
-	}
-	if opts.RepoPath != "" {
-		threads = 1
-	}
-	semaphore = make(chan bool, threads)
-
-	cIter, err := repo.repository.Log(&git.LogOptions{From: ref.Hash()})
-	if err != nil {
-		return nil
-	}
-	err = cIter.ForEach(func(c *object.Commit) error {
-		if c == nil || (opts.Depth != 0 && commitCount == opts.Depth) || auditDone {
-			if commitCount == opts.Depth {
-				auditDone = true
-			}
-			return storer.ErrStop
-		}
-		commitCount = commitCount + 1
-		if config.WhiteList.commits[c.Hash.String()] {
-			log.Infof("skipping commit: %s\n", c.Hash.String())
-			return nil
-		}
-
-		// commits w/o parent (root of git the git ref) or option for single commit is not empty str
-		if len(c.ParentHashes) == 0 || opts.Commit == c.Hash.String() {
-			if commitMap[c.Hash.String()] {
-				return nil
-			}
-
-			if opts.Commit == c.Hash.String() {
-				auditDone = true
-			}
-
-			cMutex.Lock()
-			commitMap[c.Hash.String()] = true
-			cMutex.Unlock()
-			totalCommits = totalCommits + 1
-
-			fIter, err := c.Files()
-			if err != nil {
-				return nil
-			}
-			err = fIter.ForEach(func(f *object.File) error {
-				bin, err := f.IsBinary()
-				if bin || err != nil {
-					return nil
-				}
-				for _, re := range config.WhiteList.files {
-					if re.FindString(f.Name) != "" {
-						log.Debugf("skipping whitelisted file (matched regex '%s'): %s", re.String(), f.Name)
-						return nil
-					}
-				}
-				content, err := f.Contents()
-				if err != nil {
-					return nil
-				}
-				diff := gitDiff{
-					repoName: repoName,
-					filePath: f.Name,
-					content:  content,
-					sha:      c.Hash.String(),
-					author:   c.Author.String(),
-					message:  strings.Replace(c.Message, "\n", " ", -1),
-					date:     c.Author.When,
-				}
-				fileLeaks := inspect(diff)
-				mutex.Lock()
-				leaks = append(leaks, fileLeaks...)
-				mutex.Unlock()
-				return nil
-			})
-			return nil
-		}
-
-		// single commit
-		if opts.Commit != "" {
-			return nil
-		}
-
-		skipCount := false
-		err = c.Parents().ForEach(func(parent *object.Commit) error {
-			// check if we've seen this diff before
-			if commitMap[c.Hash.String()+parent.Hash.String()] {
-				return nil
-			}
-			cMutex.Lock()
-			commitMap[c.Hash.String()+parent.Hash.String()] = true
-			cMutex.Unlock()
-
-			if !skipCount {
-				totalCommits = totalCommits + 1
-				skipCount = true
-			}
-
-			commitWg.Add(1)
-			semaphore <- true
-			go func(c *object.Commit, parent *object.Commit) {
-				var (
-					filePath string
-					skipFile bool
-				)
-				defer func() {
-					commitWg.Done()
-					<-semaphore
-					if r := recover(); r != nil {
-						log.Warnf("recovering from panic on commit %s, likely large diff causing panic", c.Hash.String())
-					}
-				}()
-				patch, err := c.Patch(parent)
-				if err != nil {
-					log.Warnf("problem generating patch for commit: %s\n", c.Hash.String())
-					return
-				}
-				for _, f := range patch.FilePatches() {
-					if f.IsBinary() {
-						continue
-					}
-					skipFile = false
-					from, to := f.Files()
-					filePath = "???"
-					if from != nil {
-						filePath = from.Path()
-					} else if to != nil {
-						filePath = to.Path()
-					}
-					for _, re := range config.WhiteList.files {
-						if re.FindString(filePath) != "" {
-							log.Debugf("skipping whitelisted file (matched regex '%s'): %s", re.String(), filePath)
-							skipFile = true
-							break
-						}
-					}
-					if skipFile {
-						continue
-					}
-					chunks := f.Chunks()
-					for _, chunk := range chunks {
-						if chunk.Type() == diffType.Add || chunk.Type() == diffType.Delete {
-							diff := gitDiff{
-								repoName: repoName,
-								filePath: filePath,
-								content:  chunk.Content(),
-								sha:      c.Hash.String(),
-								author:   c.Author.String(),
-								message:  strings.Replace(c.Message, "\n", " ", -1),
-								date:     c.Author.When,
-							}
-							chunkLeaks := inspect(diff)
-							for _, leak := range chunkLeaks {
-								mutex.Lock()
-								leaks = append(leaks, leak)
-								mutex.Unlock()
-							}
-						}
-					}
-				}
-			}(c, parent)
-
-			return nil
-		})
-
-		// stop audit if we are at commitStop
-		if c.Hash.String() == opts.CommitStop {
-			auditDone = true
-			return storer.ErrStop
-		}
-
-		return nil
-	})
-	commitWg.Wait()
-	return leaks
-}
-
-// inspect will parse each line of the git diff's content against a set of regexes or
-// a set of regexes set by the config (see gitleaks.toml for example). This function
-// will skip lines that include a whitelisted regex. A list of leaks is returned.
-// If verbose mode (-v/--verbose) is set, then checkDiff will log leaks as they are discovered.
-func inspect(diff gitDiff) []Leak {
-	var (
-		leaks    []Leak
-		skipLine bool
-	)
-
-	lines := strings.Split(diff.content, "\n")
-
-	for _, line := range lines {
-		skipLine = false
-		for _, re := range config.Regexes {
-			match := re.regex.FindString(line)
-			if match == "" {
-				continue
-			}
-			if skipLine = isLineWhitelisted(line); skipLine {
-				break
-			}
-			leaks = addLeak(leaks, line, match, re.description, diff)
-		}
-
-		if !skipLine && (opts.Entropy > 0 || len(config.Entropy.entropyRanges) != 0) {
-			words := strings.Fields(line)
-			for _, word := range words {
-				entropy := getShannonEntropy(word)
-				// Only check entropyRegexes and whiteListRegexes once per line, and only if an entropy leak type
-				// was found above, since regex checks are expensive.
-				if !entropyIsHighEnough(entropy) {
-					continue
-				}
-				// If either the line is whitelisted or the line fails the noiseReduction check (when enabled),
-				// then we can skip checking the rest of the line for high entropy words.
-				if skipLine = !highEntropyLineIsALeak(line) || isLineWhitelisted(line); skipLine {
-					break
-				}
-				leaks = addLeak(leaks, line, word, fmt.Sprintf("Entropy: %.2f", entropy), diff)
-			}
-		}
-	}
-	return leaks
-}
-
-// isLineWhitelisted returns true iff the line is matched by at least one of the whiteListRegexes.
-func isLineWhitelisted(line string) bool {
-	for _, wRe := range config.WhiteList.regexes {
-		whitelistMatch := wRe.FindString(line)
-		if whitelistMatch != "" {
-			return true
-		}
-	}
-	return false
-}
-
-// addLeak is helper for func inspect() to append leaks if found during a diff check.
-func addLeak(leaks []Leak, line string, offender string, leakType string, diff gitDiff) []Leak {
-	leak := Leak{
-		Line:     line,
-		Commit:   diff.sha,
-		Offender: offender,
-		Type:     leakType,
-		Author:   diff.author,
-		File:     diff.filePath,
-		Repo:     diff.repoName,
-		Message:  diff.message,
-		Date:     diff.date,
-	}
-	if opts.Redact {
-		leak.Offender = "REDACTED"
-		leak.Line = strings.Replace(line, offender, "REDACTED", -1)
-	}
-
-	if opts.Verbose {
-		leak.log()
-	}
-
-	leaks = append(leaks, leak)
-	return leaks
-}
-
-// discoverRepos walks all the children of `path`. If a child directory
-// contain a .git file then that repo will be added to the list of repos returned
-func discoverRepos(ownerPath string) ([]*RepoDescriptor, error) {
-	var (
-		err   error
-		repos []*RepoDescriptor
-	)
-	files, err := ioutil.ReadDir(ownerPath)
-	if err != nil {
-		return repos, err
-	}
-	for _, f := range files {
-		if f.IsDir() {
-			repoPath := path.Join(ownerPath, f.Name())
-			r, err := git.PlainOpen(repoPath)
-			if err != nil {
-				continue
-			}
-			repos = append(repos, &RepoDescriptor{
-				repository: r,
-				name:       f.Name(),
-				path:       repoPath,
-			})
-		}
-	}
-	return repos, err
-}
-
-func (leak Leak) log() {
-	b, _ := json.MarshalIndent(leak, "", "   ")
-	fmt.Println(string(b))
+	gitleaks.Run()
 }

+ 1 - 70
config.go → src/config.go

@@ -1,4 +1,4 @@
-package main
+package gitleaks
 
 import (
 	"fmt"
@@ -12,75 +12,6 @@ import (
 	"gopkg.in/src-d/go-git.v4/plumbing/transport/ssh"
 )
 
-const defaultConfig = `
-# This is a sample config file for gitleaks. You can configure gitleaks what to search for and what to whitelist.
-# The output you are seeing here is the default gitleaks config. If GITLEAKS_CONFIG environment variable
-# is set, gitleaks will load configurations from that path. If option --config-path is set, gitleaks will load
-# configurations from that path. Gitleaks does not whitelist anything by default.
-
-title = "gitleaks config"
-# add regexes to the regex table
-[[regexes]]
-description = "AWS"
-regex = '''AKIA[0-9A-Z]{16}'''
-[[regexes]]
-description = "PKCS8"
-regex = '''-----BEGIN PRIVATE KEY-----'''
-[[regexes]]
-description = "RSA"
-regex = '''-----BEGIN RSA PRIVATE KEY-----'''
-[[regexes]]
-description = "SSH"
-regex = '''-----BEGIN OPENSSH PRIVATE KEY-----'''
-[[regexes]]
-description = "PGP"
-regex = '''-----BEGIN PGP PRIVATE KEY BLOCK-----'''
-[[regexes]]
-description = "Facebook"
-regex = '''(?i)facebook(.{0,4})?['\"][0-9a-f]{32}['\"]'''
-[[regexes]]
-description = "Twitter"
-regex = '''(?i)twitter(.{0,4})?['\"][0-9a-zA-Z]{35,44}['\"]'''
-[[regexes]]
-description = "Github"
-regex = '''(?i)github(.{0,4})?['\"][0-9a-zA-Z]{35,40}['\"]'''
-[[regexes]]
-description = "Slack"
-regex = '''xox[baprs]-([0-9a-zA-Z]{10,48})?'''
-
-[entropy]
-lineregexes = [
-	"api",
-	"key",
-	"signature",
-	"secret",
-	"password",
-	"pass",
-	"pwd",
-	"token",
-	"curl",
-	"wget",
-	"https?",
-]
-
-[whitelist]
-files = [
-  "(.*?)(jpg|gif|doc|pdf|bin)$"
-]
-#commits = [
-#  "BADHA5H1",
-#  "BADHA5H2",
-#]
-#repos = [
-#	"mygoodrepo"
-#]
-[misc]
-#entropy = [
-#	"3.3-4.30"
-#	"6.0-8.0
-#]
-`
-
 type entropyRange struct {
 	v1 float64
 	v2 float64

+ 1 - 0
src/config_test.go

@@ -0,0 +1 @@
+package gitleaks

+ 77 - 0
src/constants.go

@@ -0,0 +1,77 @@
+package gitleaks
+
+const version = "1.24.0"
+
+const defaultGithubURL = "https://api.github.com/"
+const defaultThreadNum = 1
+const errExit = 2
+const leakExit = 1
+
+const defaultConfig = `
+# This is a sample config file for gitleaks. You can configure gitleaks what to search for and what to whitelist.
+# The output you are seeing here is the default gitleaks config. If GITLEAKS_CONFIG environment variable
+# is set, gitleaks will load configurations from that path. If option --config-path is set, gitleaks will load
+# configurations from that path. Gitleaks does not whitelist anything by default.
+
+title = "gitleaks config"
+# add regexes to the regex table
+[[regexes]]
+description = "AWS"
+regex = '''AKIA[0-9A-Z]{16}'''
+[[regexes]]
+description = "PKCS8"
+regex = '''-----BEGIN PRIVATE KEY-----'''
+[[regexes]]
+description = "RSA"
+regex = '''-----BEGIN RSA PRIVATE KEY-----'''
+[[regexes]]
+description = "SSH"
+regex = '''-----BEGIN OPENSSH PRIVATE KEY-----'''
+[[regexes]]
+description = "PGP"
+regex = '''-----BEGIN PGP PRIVATE KEY BLOCK-----'''
+[[regexes]]
+description = "Facebook"
+regex = '''(?i)facebook(.{0,4})?['\"][0-9a-f]{32}['\"]'''
+[[regexes]]
+description = "Twitter"
+regex = '''(?i)twitter(.{0,4})?['\"][0-9a-zA-Z]{35,44}['\"]'''
+[[regexes]]
+description = "Github"
+regex = '''(?i)github(.{0,4})?['\"][0-9a-zA-Z]{35,40}['\"]'''
+[[regexes]]
+description = "Slack"
+regex = '''xox[baprs]-([0-9a-zA-Z]{10,48})?'''
+
+[entropy]
+lineregexes = [
+	"api",
+	"key",
+	"signature",
+	"secret",
+	"password",
+	"pass",
+	"pwd",
+	"token",
+	"curl",
+	"wget",
+	"https?",
+]
+
+[whitelist]
+files = [
+  "(.*?)(jpg|gif|doc|pdf|bin)$"
+]
+#commits = [
+#  "BADHA5H1",
+#  "BADHA5H2",
+#]
+#repos = [
+#	"mygoodrepo"
+#]
+[misc]
+#entropy = [
+#	"3.3-4.30"
+#	"6.0-8.0
+#]
+`

+ 141 - 0
src/core.go

@@ -0,0 +1,141 @@
+package gitleaks
+
+import (
+	"io/ioutil"
+	"os"
+	"regexp"
+	"strings"
+	"sync"
+	"time"
+
+	"github.com/google/go-github/github"
+	"github.com/hako/durafmt"
+	log "github.com/sirupsen/logrus"
+	"gopkg.in/src-d/go-git.v4/plumbing/object"
+)
+
+// Leak represents a leaked secret or regex match.
+type Leak struct {
+	Line     string    `json:"line"`
+	Commit   string    `json:"commit"`
+	Offender string    `json:"offender"`
+	Type     string    `json:"reason"`
+	Message  string    `json:"commitMsg"`
+	Author   string    `json:"author"`
+	File     string    `json:"file"`
+	Repo     string    `json:"repo"`
+	Date     time.Time `json:"date"`
+}
+
+type gitDiff struct {
+	content      string
+	commit       *object.Commit
+	filePath     string
+	repoName     string
+	githubCommit *github.RepositoryCommit
+	sha          string
+	message      string
+	author       string
+	date         time.Time
+}
+
+var (
+	opts              *Options
+	config            *Config
+	singleSearchRegex *regexp.Regexp
+	dir               string
+	threads           int
+	totalCommits      int64
+	commitMap         = make(map[string]bool)
+	cMutex            = &sync.Mutex{}
+	auditDone         bool
+)
+
+func init() {
+	log.SetOutput(os.Stdout)
+	threads = defaultThreadNum
+}
+
+// Run is the entry point for gitleaks
+func Run() {
+	var (
+		leaks []Leak
+		err   error
+	)
+	opts, err = setupOpts()
+	if err != nil {
+		log.Fatal(err)
+	}
+	config, err = newConfig()
+	if err != nil {
+		log.Fatal(err)
+	}
+
+	now := time.Now()
+
+	if opts.Disk {
+		// temporary directory where all the gitleaks plain clones will reside
+		dir, err = ioutil.TempDir("", "gitleaks")
+		defer os.RemoveAll(dir)
+		if err != nil {
+			goto postAudit
+		}
+	}
+
+	// start audits
+	if opts.Repo != "" || opts.RepoPath != "" {
+		repoD, err := newRepoD()
+		if err != nil {
+			goto postAudit
+		}
+		err = repoD.clone()
+		if err != nil {
+			goto postAudit
+		}
+		leaks, err = repoD.audit()
+	} else if opts.OwnerPath != "" {
+		repoDs, err := discoverRepos(opts.OwnerPath)
+		if err != nil {
+			goto postAudit
+		}
+		for _, repoD := range repoDs {
+			err = repoD.clone()
+			if err != nil {
+				continue
+			}
+			leaksFromRepo, err := repoD.audit()
+
+			if err != nil {
+				log.Warnf("error occured auditing repo: %s, continuing", repoD.name)
+			}
+			leaks = append(leaksFromRepo, leaks...)
+		}
+	} else if opts.GithubOrg != "" || opts.GithubUser != "" {
+		leaks, err = auditGithubRepos()
+	} else if opts.GitLabOrg != "" || opts.GitLabUser != "" {
+		leaks, err = auditGitlabRepos()
+	} else if opts.GithubPR != "" {
+		leaks, err = auditGithubPR()
+	}
+
+postAudit:
+	if err != nil {
+		if strings.Contains(err.Error(), "whitelisted") {
+			log.Info(err.Error())
+			os.Exit(0)
+		}
+		log.Error(err)
+		os.Exit(errExit)
+	}
+
+	if opts.Report != "" {
+		writeReport(leaks)
+	}
+
+	if len(leaks) != 0 {
+		log.Warnf("%d leaks detected. %d commits inspected in %s", len(leaks), totalCommits, durafmt.Parse(time.Now().Sub(now)).String())
+		os.Exit(leakExit)
+	} else {
+		log.Infof("%d leaks detected. %d commits inspected in %s", len(leaks), totalCommits, durafmt.Parse(time.Now().Sub(now)).String())
+	}
+}

+ 1 - 1
entropy.go → src/entropy.go

@@ -1,4 +1,4 @@
-package main
+package gitleaks
 
 import "math"
 

+ 3 - 3
github.go → src/github.go

@@ -1,4 +1,4 @@
-package main
+package gitleaks
 
 import (
 	"context"
@@ -164,12 +164,12 @@ func auditGithubRepos() ([]Leak, error) {
 		ownerDir, _ = ioutil.TempDir(dir, opts.GithubUser)
 	}
 	for _, githubRepo := range githubRepos {
-		repo, err := cloneGithubRepo(githubRepo)
+		repoD, err := cloneGithubRepo(githubRepo)
 		if err != nil {
 			log.Warn(err)
 			continue
 		}
-		leaksFromRepo, err := auditGitRepo(repo)
+		leaksFromRepo, err := repoD.audit()
 		if opts.Disk {
 			os.RemoveAll(fmt.Sprintf("%s/%s", ownerDir, *githubRepo.Name))
 		}

+ 3 - 3
gitlab.go → src/gitlab.go

@@ -1,4 +1,4 @@
-package main
+package gitleaks
 
 import (
 	"fmt"
@@ -78,13 +78,13 @@ func auditGitlabRepos() ([]Leak, error) {
 	}
 
 	for _, p := range repos {
-		repo, err := cloneGitlabRepo(tempDir, p)
+		repoD, err := cloneGitlabRepo(tempDir, p)
 		if err != nil {
 			log.Warn(err)
 			continue
 		}
 
-		leaksFromRepo, err := auditGitRepo(repo)
+		leaksFromRepo, err := repoD.audit()
 		if err != nil {
 			log.Warn(err)
 		}

+ 1 - 1
options.go → src/options.go

@@ -1,4 +1,4 @@
-package main
+package gitleaks
 
 import (
 	"fmt"

+ 319 - 0
src/repo.go

@@ -0,0 +1,319 @@
+package gitleaks
+
+import (
+	"crypto/md5"
+	"fmt"
+	"os"
+	"path/filepath"
+	"strings"
+	"sync"
+
+	log "github.com/sirupsen/logrus"
+	git "gopkg.in/src-d/go-git.v4"
+	"gopkg.in/src-d/go-git.v4/plumbing"
+	diffType "gopkg.in/src-d/go-git.v4/plumbing/format/diff"
+	"gopkg.in/src-d/go-git.v4/plumbing/object"
+	"gopkg.in/src-d/go-git.v4/plumbing/storer"
+	"gopkg.in/src-d/go-git.v4/storage/memory"
+)
+
+// RepoDescriptor contains a src-d git repository and other data about the repo
+type RepoDescriptor struct {
+	path       string
+	url        string
+	name       string
+	repository *git.Repository
+	err        error
+}
+
+func newRepoD() (*RepoDescriptor, error) {
+	for _, re := range config.WhiteList.repos {
+		if re.FindString(opts.Repo) != "" {
+			return nil, fmt.Errorf("skipping %s, whitelisted", opts.Repo)
+		}
+	}
+	return &RepoDescriptor{
+		path: opts.RepoPath,
+		url:  opts.Repo,
+		name: filepath.Base(opts.Repo),
+	}, nil
+}
+
+func (repoD *RepoDescriptor) clone() error {
+	var (
+		err  error
+		repo *git.Repository
+	)
+
+	// check if cloning to disk
+	if opts.Disk {
+		log.Infof("cloning %s to disk", opts.Repo)
+		cloneTarget := fmt.Sprintf("%s/%x", dir, md5.Sum([]byte(fmt.Sprintf("%s%s", opts.GithubUser, opts.Repo))))
+		if strings.HasPrefix(opts.Repo, "git") {
+			// private
+			repo, err = git.PlainClone(cloneTarget, false, &git.CloneOptions{
+				URL:      opts.Repo,
+				Progress: os.Stdout,
+				Auth:     config.sshAuth,
+			})
+		} else {
+			// non-private
+			repo, err = git.PlainClone(cloneTarget, false, &git.CloneOptions{
+				URL:      opts.Repo,
+				Progress: os.Stdout,
+			})
+		}
+	} else if repoD.path != "" {
+		log.Infof("opening %s", opts.RepoPath)
+		repo, err = git.PlainOpen(repoD.path)
+	} else {
+		// cloning to memory
+		log.Infof("cloning %s", opts.Repo)
+		if strings.HasPrefix(opts.Repo, "git") {
+			repo, err = git.Clone(memory.NewStorage(), nil, &git.CloneOptions{
+				URL:      opts.Repo,
+				Progress: os.Stdout,
+				Auth:     config.sshAuth,
+			})
+		} else {
+			repo, err = git.Clone(memory.NewStorage(), nil, &git.CloneOptions{
+				URL:      opts.Repo,
+				Progress: os.Stdout,
+			})
+		}
+	}
+	repoD.repository = repo
+	repoD.err = err
+	return err
+}
+
+func (repoD *RepoDescriptor) audit() ([]Leak, error) {
+	var (
+		err   error
+		leaks []Leak
+	)
+	for _, re := range config.WhiteList.repos {
+		if re.FindString(repoD.name) != "" {
+			return leaks, fmt.Errorf("skipping %s, whitelisted", repoD.name)
+		}
+	}
+
+	// check if target contains an external gitleaks toml
+	if opts.RepoConfig {
+		err := config.updateFromRepo(repoD)
+		if err != nil {
+			return leaks, nil
+		}
+	}
+
+	// clear commit cache
+	commitMap = make(map[string]bool)
+
+	refs, err := repoD.repository.Storer.IterReferences()
+	if err != nil {
+		return leaks, err
+	}
+	err = refs.ForEach(func(ref *plumbing.Reference) error {
+		if ref.Name().IsTag() {
+			return nil
+		}
+		branchLeaks := repoD.auditRef(ref)
+		for _, leak := range branchLeaks {
+			leaks = append(leaks, leak)
+		}
+		return nil
+	})
+	return leaks, err
+}
+
+// auditGitReference beings the audit for a git reference. This function will
+// traverse the git reference and audit each line of each diff.
+func (repoD *RepoDescriptor) auditRef(ref *plumbing.Reference) []Leak {
+	var (
+		err         error
+		repoName    string
+		leaks       []Leak
+		commitCount int64
+		commitWg    sync.WaitGroup
+		mutex       = &sync.Mutex{}
+		semaphore   chan bool
+	)
+	if auditDone {
+		return nil
+	}
+	repoName = repoD.name
+	if opts.Threads != 0 {
+		threads = opts.Threads
+	}
+	if opts.RepoPath != "" {
+		threads = 1
+	}
+	semaphore = make(chan bool, threads)
+
+	cIter, err := repoD.repository.Log(&git.LogOptions{From: ref.Hash()})
+	if err != nil {
+		return nil
+	}
+	err = cIter.ForEach(func(c *object.Commit) error {
+		if c == nil || (opts.Depth != 0 && commitCount == opts.Depth) || auditDone {
+			if commitCount == opts.Depth {
+				auditDone = true
+			}
+			return storer.ErrStop
+		}
+		commitCount = commitCount + 1
+		if config.WhiteList.commits[c.Hash.String()] {
+			log.Infof("skipping commit: %s\n", c.Hash.String())
+			return nil
+		}
+
+		// commits w/o parent (root of git the git ref) or option for single commit is not empty str
+		if len(c.ParentHashes) == 0 || opts.Commit == c.Hash.String() {
+			if commitMap[c.Hash.String()] {
+				return nil
+			}
+
+			if opts.Commit == c.Hash.String() {
+				auditDone = true
+			}
+
+			cMutex.Lock()
+			commitMap[c.Hash.String()] = true
+			cMutex.Unlock()
+			totalCommits = totalCommits + 1
+
+			fIter, err := c.Files()
+			if err != nil {
+				return nil
+			}
+			err = fIter.ForEach(func(f *object.File) error {
+				bin, err := f.IsBinary()
+				if bin || err != nil {
+					return nil
+				}
+				for _, re := range config.WhiteList.files {
+					if re.FindString(f.Name) != "" {
+						log.Debugf("skipping whitelisted file (matched regex '%s'): %s", re.String(), f.Name)
+						return nil
+					}
+				}
+				content, err := f.Contents()
+				if err != nil {
+					return nil
+				}
+				diff := gitDiff{
+					repoName: repoName,
+					filePath: f.Name,
+					content:  content,
+					sha:      c.Hash.String(),
+					author:   c.Author.String(),
+					message:  strings.Replace(c.Message, "\n", " ", -1),
+					date:     c.Author.When,
+				}
+				fileLeaks := inspect(diff)
+				mutex.Lock()
+				leaks = append(leaks, fileLeaks...)
+				mutex.Unlock()
+				return nil
+			})
+			return nil
+		}
+
+		// single commit
+		if opts.Commit != "" {
+			return nil
+		}
+
+		skipCount := false
+		err = c.Parents().ForEach(func(parent *object.Commit) error {
+			// check if we've seen this diff before
+			if commitMap[c.Hash.String()+parent.Hash.String()] {
+				return nil
+			}
+			cMutex.Lock()
+			commitMap[c.Hash.String()+parent.Hash.String()] = true
+			cMutex.Unlock()
+
+			if !skipCount {
+				totalCommits = totalCommits + 1
+				skipCount = true
+			}
+
+			commitWg.Add(1)
+			semaphore <- true
+			go func(c *object.Commit, parent *object.Commit) {
+				var (
+					filePath string
+					skipFile bool
+				)
+				defer func() {
+					commitWg.Done()
+					<-semaphore
+					if r := recover(); r != nil {
+						log.Warnf("recovering from panic on commit %s, likely large diff causing panic", c.Hash.String())
+					}
+				}()
+				patch, err := c.Patch(parent)
+				if err != nil {
+					log.Warnf("problem generating patch for commit: %s\n", c.Hash.String())
+					return
+				}
+				for _, f := range patch.FilePatches() {
+					if f.IsBinary() {
+						continue
+					}
+					skipFile = false
+					from, to := f.Files()
+					filePath = "???"
+					if from != nil {
+						filePath = from.Path()
+					} else if to != nil {
+						filePath = to.Path()
+					}
+					for _, re := range config.WhiteList.files {
+						if re.FindString(filePath) != "" {
+							log.Debugf("skipping whitelisted file (matched regex '%s'): %s", re.String(), filePath)
+							skipFile = true
+							break
+						}
+					}
+					if skipFile {
+						continue
+					}
+					chunks := f.Chunks()
+					for _, chunk := range chunks {
+						if chunk.Type() == diffType.Add || chunk.Type() == diffType.Delete {
+							diff := gitDiff{
+								repoName: repoName,
+								filePath: filePath,
+								content:  chunk.Content(),
+								sha:      c.Hash.String(),
+								author:   c.Author.String(),
+								message:  strings.Replace(c.Message, "\n", " ", -1),
+								date:     c.Author.When,
+							}
+							chunkLeaks := inspect(diff)
+							for _, leak := range chunkLeaks {
+								mutex.Lock()
+								leaks = append(leaks, leak)
+								mutex.Unlock()
+							}
+						}
+					}
+				}
+			}(c, parent)
+
+			return nil
+		})
+
+		// stop audit if we are at commitStop
+		if c.Hash.String() == opts.CommitStop {
+			auditDone = true
+			return storer.ErrStop
+		}
+
+		return nil
+	})
+	commitWg.Wait()
+	return leaks
+}

+ 189 - 0
src/utils.go

@@ -0,0 +1,189 @@
+package gitleaks
+
+import (
+	"encoding/csv"
+	"encoding/json"
+	"fmt"
+	"io/ioutil"
+	"os"
+	"path"
+	"strings"
+	"time"
+
+	log "github.com/sirupsen/logrus"
+)
+
+// writeReport writes a report to a file specified in the --report= option.
+// Default format for report is JSON. You can use the --csv option to write the report as a csv
+func writeReport(leaks []Leak) error {
+	if len(leaks) == 0 {
+		return nil
+	}
+
+	log.Infof("writing report to %s", opts.Report)
+	if strings.HasSuffix(opts.Report, ".csv") {
+		f, err := os.Create(opts.Report)
+		if err != nil {
+			return err
+		}
+		defer f.Close()
+		w := csv.NewWriter(f)
+		w.Write([]string{"repo", "line", "commit", "offender", "reason", "commitMsg", "author", "file", "date"})
+		for _, leak := range leaks {
+			w.Write([]string{leak.Repo, leak.Line, leak.Commit, leak.Offender, leak.Type, leak.Message, leak.Author, leak.File, leak.Date.Format(time.RFC3339)})
+		}
+		w.Flush()
+	} else {
+		f, err := os.Create(opts.Report)
+		if err != nil {
+			return err
+		}
+		defer f.Close()
+		encoder := json.NewEncoder(f)
+		encoder.SetIndent("", "\t")
+		if _, err := f.WriteString("[\n"); err != nil {
+			return err
+		}
+		for i := 0; i < len(leaks); i++ {
+			if err := encoder.Encode(leaks[i]); err != nil {
+				return err
+			}
+			// for all but the last leak, seek back and overwrite the newline appended by Encode() with comma & newline
+			if i+1 < len(leaks) {
+				if _, err := f.Seek(-1, 1); err != nil {
+					return err
+				}
+				if _, err := f.WriteString(",\n"); err != nil {
+					return err
+				}
+			}
+		}
+		if _, err := f.WriteString("]"); err != nil {
+			return err
+		}
+		if err := f.Sync(); err != nil {
+			log.Error(err)
+			return err
+		}
+	}
+	return nil
+}
+
+// inspect will parse each line of the git diff's content against a set of regexes or
+// a set of regexes set by the config (see gitleaks.toml for example). This function
+// will skip lines that include a whitelisted regex. A list of leaks is returned.
+// If verbose mode (-v/--verbose) is set, then checkDiff will log leaks as they are discovered.
+func inspect(diff gitDiff) []Leak {
+	var (
+		leaks    []Leak
+		skipLine bool
+	)
+
+	lines := strings.Split(diff.content, "\n")
+
+	for _, line := range lines {
+		skipLine = false
+		for _, re := range config.Regexes {
+			match := re.regex.FindString(line)
+			if match == "" {
+				continue
+			}
+			if skipLine = isLineWhitelisted(line); skipLine {
+				break
+			}
+			leaks = addLeak(leaks, line, match, re.description, diff)
+		}
+
+		if !skipLine && (opts.Entropy > 0 || len(config.Entropy.entropyRanges) != 0) {
+			words := strings.Fields(line)
+			for _, word := range words {
+				entropy := getShannonEntropy(word)
+				// Only check entropyRegexes and whiteListRegexes once per line, and only if an entropy leak type
+				// was found above, since regex checks are expensive.
+				if !entropyIsHighEnough(entropy) {
+					continue
+				}
+				// If either the line is whitelisted or the line fails the noiseReduction check (when enabled),
+				// then we can skip checking the rest of the line for high entropy words.
+				if skipLine = !highEntropyLineIsALeak(line) || isLineWhitelisted(line); skipLine {
+					break
+				}
+				leaks = addLeak(leaks, line, word, fmt.Sprintf("Entropy: %.2f", entropy), diff)
+			}
+		}
+	}
+	return leaks
+}
+
+// isLineWhitelisted returns true iff the line is matched by at least one of the whiteListRegexes.
+func isLineWhitelisted(line string) bool {
+	for _, wRe := range config.WhiteList.regexes {
+		whitelistMatch := wRe.FindString(line)
+		if whitelistMatch != "" {
+			return true
+		}
+	}
+	return false
+}
+
+// addLeak is helper for func inspect() to append leaks if found during a diff check.
+func addLeak(leaks []Leak, line string, offender string, leakType string, diff gitDiff) []Leak {
+	leak := Leak{
+		Line:     line,
+		Commit:   diff.sha,
+		Offender: offender,
+		Type:     leakType,
+		Author:   diff.author,
+		File:     diff.filePath,
+		Repo:     diff.repoName,
+		Message:  diff.message,
+		Date:     diff.date,
+	}
+	if opts.Redact {
+		leak.Offender = "REDACTED"
+		leak.Line = strings.Replace(line, offender, "REDACTED", -1)
+	}
+
+	if opts.Verbose {
+		leak.log()
+	}
+
+	leaks = append(leaks, leak)
+	return leaks
+}
+
+// discoverRepos walks all the children of `path`. If a child directory
+// contain a .git subdirectory then that repo will be added to the list of repos returned
+func discoverRepos(ownerPath string) ([]*RepoDescriptor, error) {
+	var (
+		err    error
+		repoDs []*RepoDescriptor
+	)
+	files, err := ioutil.ReadDir(ownerPath)
+	if err != nil {
+		return repoDs, err
+	}
+	for _, f := range files {
+		repoPath := path.Join(ownerPath, f.Name())
+		if f.IsDir() && containsGit(repoPath) {
+			repoDs = append(repoDs, &RepoDescriptor{
+				name: f.Name(),
+				path: repoPath,
+			})
+		}
+	}
+	return repoDs, err
+}
+
+func (leak Leak) log() {
+	b, _ := json.MarshalIndent(leak, "", "   ")
+	fmt.Println(string(b))
+}
+
+func containsGit(repoPath string) bool {
+	if _, err := os.Stat(repoPath); os.IsNotExist(err) {
+		// path/to/whatever does not exist
+		return false
+	}
+	return true
+}

+ 0 - 21
vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING

@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2013 TOML authors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.

+ 0 - 21
vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING

@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2013 TOML authors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.

+ 0 - 21
vendor/github.com/BurntSushi/toml/cmd/tomlv/COPYING

@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2013 TOML authors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.

+ 0 - 22
vendor/github.com/franela/goblin/.gitignore

@@ -1,22 +0,0 @@
-# Compiled Object files, Static and Dynamic libs (Shared Objects)
-*.o
-*.a
-*.so
-
-# Folders
-_obj
-_test
-
-# Architecture specific extensions/prefixes
-*.[568vq]
-[568vq].out
-
-*.cgo1.go
-*.cgo2.c
-_cgo_defun.c
-_cgo_gotypes.go
-_cgo_export.*
-
-_testmain.go
-
-*.exe

+ 0 - 7
vendor/github.com/franela/goblin/.travis.yml

@@ -1,7 +0,0 @@
-language: go
-go:
- - 1.6.3
-notifications:
-  email:
-      - ionathan@gmail.com
-      - marcosnils@gmail.com

+ 0 - 19
vendor/github.com/franela/goblin/LICENSE

@@ -1,19 +0,0 @@
-Copyright (c) 2013 Marcos Lilljedahl and Jonathan Leibiusky
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.

+ 0 - 3
vendor/github.com/franela/goblin/Makefile

@@ -1,3 +0,0 @@
-export GOPATH=$(shell pwd)
-test:
-	go test -v

+ 0 - 149
vendor/github.com/franela/goblin/README.md

@@ -1,149 +0,0 @@
-Goblin
-======
-
-[![Build Status](https://travis-ci.org/franela/goblin.svg)](https://travis-ci.org/franela/goblin)
-[![Go Reportcard](https://goreportcard.com/badge/github.com/franela/goblin)](https://goreportcard.com/report/github.com/franela/goblin)
-[![GoDoc](https://godoc.org/github.com/franela/goblin?status.svg)](https://godoc.org/github.com/franela/goblin)
-[![License](https://img.shields.io/github/license/franela/goblin.svg)](https://github.com/franela/goblin/blob/master/LICENSE.md)
-[![Release](https://img.shields.io/github/release/franela/goblin.svg)](https://github.com/franela/goblin/releases/latest)
-
-
-A [Mocha](http://mochajs.org/) like BDD testing framework written in Go that requires no additional dependencies. Requires no extensive documentation nor complicated steps to get it running.
-
-![](https://github.com/marcosnils/goblin/blob/master/goblin_logo.jpg?raw=true)
-
-Why Goblin?
------------
-
-Inspired by the flexibility and simplicity of Node BDD and frustrated by the
-rigorousness of Go way of testing, we wanted to bring a new tool to
-write self-describing and comprehensive code.
-
-
-
-What do I get with it?
-----------------------
-
-- Run tests as usual with `go test`
-- Colorful reports and beautiful syntax
-- Preserve the exact same syntax and behaviour as Node's Mocha
-- Nest as many `Describe` and `It` blocks as you want
-- Use `Before`, `BeforeEach`, `After` and `AfterEach` for setup and teardown your tests
-- No need to remember confusing parameters in `Describe` and `It` blocks
-- Use a declarative and expressive language to write your tests
-- Plug different assertion libraries
- - [Gomega](https://github.com/onsi/gomega) (supported so far)
-- Skip your tests the same way as you would do in Mocha
-- Automatic terminal support for colored outputs
-- Two line setup is all you need to get up running
-
-
-
-How do I use it?
-----------------
-
-Since ```go test``` is not currently extensive, you will have to hook Goblin to it. You do that by
-adding a single test method in your test file. All your goblin tests will be implemented inside this function.
-
-```go
-package foobar
-
-import (
-    "testing"
-    . "github.com/franela/goblin"
-)
-
-func Test(t *testing.T) {
-    g := Goblin(t)
-    g.Describe("Numbers", func() {
-        // Passing Test
-        g.It("Should add two numbers ", func() {
-            g.Assert(1+1).Equal(2)
-        })
-        // Failing Test
-        g.It("Should match equal numbers", func() {
-            g.Assert(2).Equal(4)
-        })
-        // Pending Test
-        g.It("Should substract two numbers")
-        // Excluded Test
-        g.XIt("Should add two numbers ", func() {
-            g.Assert(3+1).Equal(4)
-        })
-    })
-}
-```
-
-Ouput will be something like:
-
-![](https://github.com/marcosnils/goblin/blob/master/goblin_output.png?raw=true)
-
-Nice and easy, right?
-
-Can I do asynchronous tests?
-----------------------------
-
-Yes! Goblin will help you to test asynchronous things, like goroutines, etc. You just need to add a ```done``` parameter to the handler function of your ```It```. This handler function should be called when your test passes.
-
-```go
-  ...
-  g.Describe("Numbers", func() {
-      g.It("Should add two numbers asynchronously", func(done Done) {
-          go func() {
-              g.Assert(1+1).Equal(2)
-              done()
-          }()
-      })
-  })
-  ...
-```
-
-Goblin will wait for the ```done``` call, a ```Fail``` call or any false assertion.
-
-How do I use it with Gomega?
-----------------------------
-
-Gomega is a nice assertion framework. But it doesn't provide a nice way to hook it to testing frameworks. It should just panic instead of requiring a fail function. There is an issue about that [here](https://github.com/onsi/gomega/issues/5).
-While this is being discussed and hopefully fixed, the way to use Gomega with Goblin is:
-
-```go
-package foobar
-
-import (
-    "testing"
-    . "github.com/franela/goblin"
-    . "github.com/onsi/gomega"
-)
-
-func Test(t *testing.T) {
-    g := Goblin(t)
-
-    //special hook for gomega
-    RegisterFailHandler(func(m string, _ ...int) { g.Fail(m) })
-
-    g.Describe("lala", func() {
-        g.It("lslslslsls", func() {
-            Expect(1).To(Equal(10))
-        })
-    })
-}
-```
-
-
-FAQ
-----
-
-### How do I run specific tests?
-
-If `-goblin.run=$REGES` is supplied to the `go test` command then only tests that match the supplied regex will run
-
-
-Contributing
------
-
-We do have a couple of [issues](https://github.com/franela/goblin/issues) pending.  Feel free to contribute and send us PRs (with tests please :smile:).
-
-Special Thanks
-------------
-
-Special thanks to [Leandro Reox](https://github.com/leandroreox) (Leitan) for the goblin logo.

+ 0 - 70
vendor/github.com/franela/goblin/assertions.go

@@ -1,70 +0,0 @@
-package goblin
-
-import (
-	"fmt"
-	"reflect"
-	"strings"
-)
-
-// Assertion represents a fact stated about a source object. It contains the source object and function to call
-type Assertion struct {
-	src  interface{}
-	fail func(interface{})
-}
-
-func objectsAreEqual(a, b interface{}) bool {
-	if reflect.TypeOf(a) != reflect.TypeOf(b) {
-		return false
-	}
-
-	if reflect.DeepEqual(a, b) {
-		return true
-	}
-
-	if fmt.Sprintf("%#v", a) == fmt.Sprintf("%#v", b) {
-		return true
-	}
-
-	return false
-}
-
-func formatMessages(messages ...string) string {
-	if len(messages) > 0 {
-		return ", " + strings.Join(messages, " ")
-	}
-	return ""
-}
-
-// Eql is a shorthand alias of Equal for convenience
-func (a *Assertion) Eql(dst interface{}) {
-	a.Equal(dst)
-}
-
-// Equal takes a destination object and asserts that a source object and
-// destination object are equal to one another. It will fail the assertion and
-// print a corresponding message if the objects are not equivalent.
-func (a *Assertion) Equal(dst interface{}) {
-	if !objectsAreEqual(a.src, dst) {
-		a.fail(fmt.Sprintf("%#v %s %#v", a.src, "does not equal", dst))
-	}
-}
-
-// IsTrue asserts that a source is equal to true. Optional messages can be
-// provided for inclusion in the displayed message if the assertion fails. It
-// will fail the assertion if the source does not resolve to true.
-func (a *Assertion) IsTrue(messages ...string) {
-	if !objectsAreEqual(a.src, true) {
-		message := fmt.Sprintf("%v %s%s", a.src, "expected false to be truthy", formatMessages(messages...))
-		a.fail(message)
-	}
-}
-
-// IsFalse asserts that a source is equal to false. Optional messages can be
-// provided for inclusion in the displayed message if the assertion fails. It
-// will fail the assertion if the source does not resolve to false.
-func (a *Assertion) IsFalse(messages ...string) {
-	if !objectsAreEqual(a.src, false) {
-		message := fmt.Sprintf("%v %s%s", a.src, "expected true to be falsey", formatMessages(messages...))
-		a.fail(message)
-	}
-}

+ 0 - 36
vendor/github.com/franela/goblin/go.snippets

@@ -1,36 +0,0 @@
-snippet gd
-	g.Describe("${1:name}", func() {
-		${2}
-	})
-	${0}
-snippet git 
-	g.It("${1:name}", func() {
-		${2}
-	})
-	${0}
-snippet gait 
-	g.It("${1:name}", func(done Done) {
-		done()
-		${2}
-	})
-	${0}
-snippet gb 
-	g.Before(func() {
-		${1}
-	})
-	${0}
-snippet gbe 
-	g.BeforeEach(func() {
-		${1}
-	})
-	${0}
-snippet ga 
-	g.After(func() {
-		${1}
-	})
-	${0}
-snippet gae 
-	g.AfterEach(func() {
-		${1}
-	})
-	${0}

+ 0 - 337
vendor/github.com/franela/goblin/goblin.go

@@ -1,337 +0,0 @@
-package goblin
-
-import (
-	"flag"
-	"fmt"
-	"regexp"
-	"runtime"
-	"sync"
-	"testing"
-	"time"
-)
-
-type Done func(error ...interface{})
-
-type Runnable interface {
-	run(*G) bool
-}
-
-type Itable interface {
-	run(*G) bool
-	failed(string, []string)
-}
-
-func (g *G) Describe(name string, h func()) {
-	d := &Describe{name: name, h: h, parent: g.parent}
-
-	if d.parent != nil {
-		d.parent.children = append(d.parent.children, Runnable(d))
-	}
-
-	g.parent = d
-
-	h()
-
-	g.parent = d.parent
-
-	if g.parent == nil && d.hasTests {
-		g.reporter.begin()
-		if d.run(g) {
-			g.t.Fail()
-		}
-		g.reporter.end()
-	}
-}
-func (g *G) Timeout(time time.Duration) {
-	g.timeout = time
-	g.timer.Reset(time)
-}
-
-type Describe struct {
-	name       string
-	h          func()
-	children   []Runnable
-	befores    []func()
-	afters     []func()
-	afterEach  []func()
-	beforeEach []func()
-	hasTests   bool
-	parent     *Describe
-}
-
-func (d *Describe) runBeforeEach() {
-	if d.parent != nil {
-		d.parent.runBeforeEach()
-	}
-
-	for _, b := range d.beforeEach {
-		b()
-	}
-}
-
-func (d *Describe) runAfterEach() {
-
-	if d.parent != nil {
-		d.parent.runAfterEach()
-	}
-
-	for _, a := range d.afterEach {
-		a()
-	}
-}
-
-func (d *Describe) run(g *G) bool {
-	failed := false
-	if d.hasTests {
-		g.reporter.beginDescribe(d.name)
-
-		for _, b := range d.befores {
-			b()
-		}
-
-		for _, r := range d.children {
-			if r.run(g) {
-				failed = true
-			}
-		}
-
-		for _, a := range d.afters {
-			a()
-		}
-
-		g.reporter.endDescribe()
-	}
-
-	return failed
-}
-
-type Failure struct {
-	stack    []string
-	testName string
-	message  string
-}
-
-type It struct {
-	h        interface{}
-	name     string
-	parent   *Describe
-	failure  *Failure
-	reporter Reporter
-	isAsync  bool
-}
-
-func (it *It) run(g *G) bool {
-	g.currentIt = it
-
-	if it.h == nil {
-		g.reporter.itIsPending(it.name)
-		return false
-	}
-	//TODO: should handle errors for beforeEach
-	it.parent.runBeforeEach()
-
-	runIt(g, it.h)
-
-	it.parent.runAfterEach()
-
-	failed := false
-	if it.failure != nil {
-		failed = true
-	}
-
-	if failed {
-		g.reporter.itFailed(it.name)
-		g.reporter.failure(it.failure)
-	} else {
-		g.reporter.itPassed(it.name)
-	}
-	return failed
-}
-
-func (it *It) failed(msg string, stack []string) {
-	it.failure = &Failure{stack: stack, message: msg, testName: it.parent.name + " " + it.name}
-}
-
-type Xit struct {
-	h        interface{}
-	name     string
-	parent   *Describe
-	failure  *Failure
-	reporter Reporter
-	isAsync  bool
-}
-
-func (xit *Xit) run(g *G) bool {
-	g.currentIt = xit
-
-	g.reporter.itIsExcluded(xit.name)
-	return false
-}
-
-func (xit *Xit) failed(msg string, stack []string) {
-	xit.failure = nil
-}
-
-func parseFlags() {
-	//Flag parsing
-	flag.Parse()
-	if *regexParam != "" {
-		runRegex = regexp.MustCompile(*regexParam)
-	} else {
-		runRegex = nil
-	}
-}
-
-var timeout = flag.Duration("goblin.timeout", 5*time.Second, "Sets default timeouts for all tests")
-var isTty = flag.Bool("goblin.tty", true, "Sets the default output format (color / monochrome)")
-var regexParam = flag.String("goblin.run", "", "Runs only tests which match the supplied regex")
-var runRegex *regexp.Regexp
-
-func Goblin(t *testing.T, arguments ...string) *G {
-	if !flag.Parsed() {
-		parseFlags()
-	}
-	g := &G{t: t, timeout: *timeout}
-	var fancy TextFancier
-	if *isTty {
-		fancy = &TerminalFancier{}
-	} else {
-		fancy = &Monochrome{}
-	}
-
-	g.reporter = Reporter(&DetailedReporter{fancy: fancy})
-	return g
-}
-
-func runIt(g *G, h interface{}) {
-	defer timeTrack(time.Now(), g)
-	g.mutex.Lock()
-	g.timedOut = false
-	g.mutex.Unlock()
-	g.timer = time.NewTimer(g.timeout)
-	g.shouldContinue = make(chan bool)
-	if call, ok := h.(func()); ok {
-		// the test is synchronous
-		go func(c chan bool) { call(); c <- true }(g.shouldContinue)
-	} else if call, ok := h.(func(Done)); ok {
-		doneCalled := 0
-		go func(c chan bool) {
-			call(func(msg ...interface{}) {
-				if len(msg) > 0 {
-					g.Fail(msg)
-				} else {
-					doneCalled++
-					if doneCalled > 1 {
-						g.Fail("Done called multiple times")
-					}
-					c <- true
-				}
-			})
-		}(g.shouldContinue)
-	} else {
-		panic("Not implemented.")
-	}
-	select {
-	case <-g.shouldContinue:
-	case <-g.timer.C:
-		//Set to nil as it shouldn't continue
-		g.shouldContinue = nil
-		g.timedOut = true
-		g.Fail("Test exceeded " + fmt.Sprintf("%s", g.timeout))
-	}
-	// Reset timeout value
-	g.timeout = *timeout
-}
-
-type G struct {
-	t              *testing.T
-	parent         *Describe
-	currentIt      Itable
-	timeout        time.Duration
-	reporter       Reporter
-	timedOut       bool
-	shouldContinue chan bool
-	mutex          sync.Mutex
-	timer          *time.Timer
-}
-
-func (g *G) SetReporter(r Reporter) {
-	g.reporter = r
-}
-
-func (g *G) It(name string, h ...interface{}) {
-	if matchesRegex(name) {
-		it := &It{name: name, parent: g.parent, reporter: g.reporter}
-		notifyParents(g.parent)
-		if len(h) > 0 {
-			it.h = h[0]
-		}
-		g.parent.children = append(g.parent.children, Runnable(it))
-	}
-}
-
-func (g *G) Xit(name string, h ...interface{}) {
-	if matchesRegex(name) {
-		xit := &Xit{name: name, parent: g.parent, reporter: g.reporter}
-		notifyParents(g.parent)
-		if len(h) > 0 {
-			xit.h = h[0]
-		}
-		g.parent.children = append(g.parent.children, Runnable(xit))
-	}
-}
-
-func matchesRegex(value string) bool {
-	if runRegex != nil {
-		return runRegex.MatchString(value)
-	}
-	return true
-}
-
-func notifyParents(d *Describe) {
-	d.hasTests = true
-	if d.parent != nil {
-		notifyParents(d.parent)
-	}
-}
-
-func (g *G) Before(h func()) {
-	g.parent.befores = append(g.parent.befores, h)
-}
-
-func (g *G) BeforeEach(h func()) {
-	g.parent.beforeEach = append(g.parent.beforeEach, h)
-}
-
-func (g *G) After(h func()) {
-	g.parent.afters = append(g.parent.afters, h)
-}
-
-func (g *G) AfterEach(h func()) {
-	g.parent.afterEach = append(g.parent.afterEach, h)
-}
-
-func (g *G) Assert(src interface{}) *Assertion {
-	return &Assertion{src: src, fail: g.Fail}
-}
-
-func timeTrack(start time.Time, g *G) {
-	g.reporter.itTook(time.Since(start))
-}
-
-func (g *G) Fail(error interface{}) {
-	//Skips 7 stacks due to the functions between the stack and the test
-	stack := ResolveStack(7)
-	message := fmt.Sprintf("%v", error)
-	g.currentIt.failed(message, stack)
-	if g.shouldContinue != nil {
-		g.shouldContinue <- true
-	}
-	g.mutex.Lock()
-	defer g.mutex.Unlock()
-	if !g.timedOut {
-		//Stop test function execution
-		runtime.Goexit()
-	}
-
-}

BIN
vendor/github.com/franela/goblin/goblin_logo.jpg


BIN
vendor/github.com/franela/goblin/goblin_output.png


+ 0 - 30
vendor/github.com/franela/goblin/mono_reporter.go

@@ -1,30 +0,0 @@
-package goblin
-
-import ()
-
-type Monochrome struct {
-}
-
-func (self *Monochrome) Red(text string) string {
-	return "!" + text
-}
-
-func (self *Monochrome) Gray(text string) string {
-	return text
-}
-
-func (self *Monochrome) Cyan(text string) string {
-	return text
-}
-
-func (self *Monochrome) WithCheck(text string) string {
-	return ">>>" + text
-}
-
-func (self *Monochrome) Green(text string) string {
-	return text
-}
-
-func (self *Monochrome) Yellow(text string) string {
-	return text
-}

+ 0 - 153
vendor/github.com/franela/goblin/reporting.go

@@ -1,153 +0,0 @@
-package goblin
-
-import (
-	"fmt"
-	"strconv"
-	"strings"
-	"time"
-)
-
-type Reporter interface {
-	beginDescribe(string)
-	endDescribe()
-	begin()
-	end()
-	failure(*Failure)
-	itTook(time.Duration)
-	itFailed(string)
-	itPassed(string)
-	itIsPending(string)
-	itIsExcluded(string)
-}
-
-type TextFancier interface {
-	Red(text string) string
-	Gray(text string) string
-	Cyan(text string) string
-	Green(text string) string
-	Yellow(text string) string
-	WithCheck(text string) string
-}
-
-type DetailedReporter struct {
-	level, failed, passed, pending, excluded int
-	failures                                 []*Failure
-	executionTime, totalExecutionTime        time.Duration
-	fancy                                    TextFancier
-}
-
-func (r *DetailedReporter) SetTextFancier(f TextFancier) {
-	r.fancy = f
-}
-
-type TerminalFancier struct {
-}
-
-func (self *TerminalFancier) Red(text string) string {
-	return "\033[31m" + text + "\033[0m"
-}
-
-func (self *TerminalFancier) Gray(text string) string {
-	return "\033[90m" + text + "\033[0m"
-}
-
-func (self *TerminalFancier) Cyan(text string) string {
-	return "\033[36m" + text + "\033[0m"
-}
-
-func (self *TerminalFancier) Green(text string) string {
-	return "\033[32m" + text + "\033[0m"
-}
-
-func (self *TerminalFancier) Yellow(text string) string {
-	return "\033[33m" + text + "\033[0m"
-}
-
-func (self *TerminalFancier) WithCheck(text string) string {
-	return "\033[32m\u2713\033[0m " + text
-}
-
-func (r *DetailedReporter) getSpace() string {
-	return strings.Repeat(" ", (r.level+1)*2)
-}
-
-func (r *DetailedReporter) failure(failure *Failure) {
-	r.failures = append(r.failures, failure)
-}
-
-func (r *DetailedReporter) print(text string) {
-	fmt.Printf("%v%v\n", r.getSpace(), text)
-}
-
-func (r *DetailedReporter) printWithCheck(text string) {
-	fmt.Printf("%v%v\n", r.getSpace(), r.fancy.WithCheck(text))
-}
-
-func (r *DetailedReporter) beginDescribe(name string) {
-	fmt.Println("")
-	r.print(name)
-	r.level++
-}
-
-func (r *DetailedReporter) endDescribe() {
-	r.level--
-}
-
-func (r *DetailedReporter) itTook(duration time.Duration) {
-	r.executionTime = duration
-	r.totalExecutionTime += duration
-}
-
-func (r *DetailedReporter) itFailed(name string) {
-	r.failed++
-	r.print(r.fancy.Red(strconv.Itoa(r.failed) + ") " + name))
-}
-
-func (r *DetailedReporter) itPassed(name string) {
-	r.passed++
-	r.printWithCheck(r.fancy.Gray(name))
-}
-
-func (r *DetailedReporter) itIsPending(name string) {
-	r.pending++
-	r.print(r.fancy.Cyan("- " + name))
-}
-
-func (r *DetailedReporter) itIsExcluded(name string) {
-	r.excluded++
-	r.print(r.fancy.Yellow("- " + name))
-}
-
-func (r *DetailedReporter) begin() {
-}
-
-func (r *DetailedReporter) end() {
-	comp := fmt.Sprintf("%d tests complete", r.passed)
-	t := fmt.Sprintf("(%d ms)", r.totalExecutionTime/time.Millisecond)
-
-	//fmt.Printf("\n\n \033[32m%d tests complete\033[0m \033[90m(%d ms)\033[0m\n", r.passed, r.totalExecutionTime/time.Millisecond)
-	fmt.Printf("\n\n %v %v\n", r.fancy.Green(comp), r.fancy.Gray(t))
-
-	if r.pending > 0 {
-		pend := fmt.Sprintf("%d test(s) pending", r.pending)
-		fmt.Printf(" %v\n\n", r.fancy.Cyan(pend))
-	}
-
-	if r.excluded > 0 {
-		excl := fmt.Sprintf("%d test(s) excluded", r.excluded)
-		fmt.Printf(" %v\n\n", r.fancy.Yellow(excl))
-	}
-
-	if len(r.failures) > 0 {
-		fmt.Printf("%s \n\n", r.fancy.Red(fmt.Sprintf(" %d tests failed:", len(r.failures))))
-
-	}
-
-	for i, failure := range r.failures {
-		fmt.Printf("  %d) %s:\n\n", i+1, failure.testName)
-		fmt.Printf("    %s\n", r.fancy.Red(failure.message))
-		for _, stackItem := range failure.stack {
-			fmt.Printf("    %s\n", r.fancy.Gray(stackItem))
-		}
-	}
-}

+ 0 - 21
vendor/github.com/franela/goblin/resolver.go

@@ -1,21 +0,0 @@
-package goblin
-
-import (
-	"runtime/debug"
-	"strings"
-)
-
-func ResolveStack(skip int) []string {
-	return cleanStack(debug.Stack(), skip)
-}
-
-func cleanStack(stack []byte, skip int) []string {
-	arrayStack := strings.Split(string(stack), "\n")
-	var finalStack []string
-	for i := skip; i < len(arrayStack); i++ {
-		if strings.Contains(arrayStack[i], ".go") {
-			finalStack = append(finalStack, arrayStack[i])
-		}
-	}
-	return finalStack
-}

+ 3 - 3
vendor/github.com/jessevdk/go-flags/check_crosscompile.sh

@@ -12,7 +12,7 @@ import (
 	"gopkg.in/warnings.v0"
 )
 
-var unescape = map[rune]rune{'\\': '\\', '"': '"', 'n': '\n', 't': '\t'}
+var unescape = map[rune]rune{'\\': '\\', '"': '"', 'n': '\n', 't': '\t', 'b': '\b'}
 
 // no error: invalid literals should be caught by scanner
 func unquote(s string) string {
@@ -48,7 +48,7 @@ func unquote(s string) string {
 	return string(u)
 }
 
-func read(c *warnings.Collector, callback func(string,string,string,string,bool)error,
+func read(c *warnings.Collector, callback func(string, string, string, string, bool) error,
 	fset *token.FileSet, file *token.File, src []byte) error {
 	//
 	var s scanner.Scanner
@@ -223,7 +223,7 @@ func readInto(config interface{}, fset *token.FileSet, file *token.File,
 // (as opposed to set to empty string).
 //
 // If callback returns an error, ReadWithCallback terminates with an error too.
-func ReadWithCallback(reader io.Reader, callback func(string,string,string,string,bool)error) error {
+func ReadWithCallback(reader io.Reader, callback func(string, string, string, string, bool) error) error {
 	src, err := ioutil.ReadAll(reader)
 	if err != nil {
 		return err

+ 4 - 4
vendor/github.com/src-d/gcfg/scanner/scanner.go

@@ -170,7 +170,7 @@ func (s *Scanner) scanEscape(val bool) {
 	switch ch {
 	case '\\', '"':
 		// ok
-	case 'n', 't':
+	case 'n', 't', 'b':
 		if val {
 			break // ok
 		}
@@ -232,10 +232,10 @@ loop:
 				s.next()
 			}
 			if s.ch != '\n' {
-				s.error(offs, "unquoted '\\' must be followed by new line")
-				break loop
+				s.scanEscape(true)
+			} else {
+				s.next()
 			}
-			s.next()
 		case ch == '"':
 			inQuote = !inQuote
 		case ch == '\r':

+ 0 - 369
vendor/golang.org/x/sys/unix/mkall.sh

@@ -1,369 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package gen
-
-import (
-	"bytes"
-	"encoding/gob"
-	"fmt"
-	"hash"
-	"hash/fnv"
-	"io"
-	"log"
-	"os"
-	"reflect"
-	"strings"
-	"unicode"
-	"unicode/utf8"
-)
-
-// This file contains utilities for generating code.
-
-// TODO: other write methods like:
-// - slices, maps, types, etc.
-
-// CodeWriter is a utility for writing structured code. It computes the content
-// hash and size of written content. It ensures there are newlines between
-// written code blocks.
-type CodeWriter struct {
-	buf  bytes.Buffer
-	Size int
-	Hash hash.Hash32 // content hash
-	gob  *gob.Encoder
-	// For comments we skip the usual one-line separator if they are followed by
-	// a code block.
-	skipSep bool
-}
-
-func (w *CodeWriter) Write(p []byte) (n int, err error) {
-	return w.buf.Write(p)
-}
-
-// NewCodeWriter returns a new CodeWriter.
-func NewCodeWriter() *CodeWriter {
-	h := fnv.New32()
-	return &CodeWriter{Hash: h, gob: gob.NewEncoder(h)}
-}
-
-// WriteGoFile appends the buffer with the total size of all created structures
-// and writes it as a Go file to the the given file with the given package name.
-func (w *CodeWriter) WriteGoFile(filename, pkg string) {
-	f, err := os.Create(filename)
-	if err != nil {
-		log.Fatalf("Could not create file %s: %v", filename, err)
-	}
-	defer f.Close()
-	if _, err = w.WriteGo(f, pkg, ""); err != nil {
-		log.Fatalf("Error writing file %s: %v", filename, err)
-	}
-}
-
-// WriteVersionedGoFile appends the buffer with the total size of all created
-// structures and writes it as a Go file to the the given file with the given
-// package name and build tags for the current Unicode version,
-func (w *CodeWriter) WriteVersionedGoFile(filename, pkg string) {
-	tags := buildTags()
-	if tags != "" {
-		filename = insertVersion(filename, UnicodeVersion())
-	}
-	f, err := os.Create(filename)
-	if err != nil {
-		log.Fatalf("Could not create file %s: %v", filename, err)
-	}
-	defer f.Close()
-	if _, err = w.WriteGo(f, pkg, tags); err != nil {
-		log.Fatalf("Error writing file %s: %v", filename, err)
-	}
-}
-
-// WriteGo appends the buffer with the total size of all created structures and
-// writes it as a Go file to the the given writer with the given package name.
-func (w *CodeWriter) WriteGo(out io.Writer, pkg, tags string) (n int, err error) {
-	sz := w.Size
-	w.WriteComment("Total table size %d bytes (%dKiB); checksum: %X\n", sz, sz/1024, w.Hash.Sum32())
-	defer w.buf.Reset()
-	return WriteGo(out, pkg, tags, w.buf.Bytes())
-}
-
-func (w *CodeWriter) printf(f string, x ...interface{}) {
-	fmt.Fprintf(w, f, x...)
-}
-
-func (w *CodeWriter) insertSep() {
-	if w.skipSep {
-		w.skipSep = false
-		return
-	}
-	// Use at least two newlines to ensure a blank space between the previous
-	// block. WriteGoFile will remove extraneous newlines.
-	w.printf("\n\n")
-}
-
-// WriteComment writes a comment block. All line starts are prefixed with "//".
-// Initial empty lines are gobbled. The indentation for the first line is
-// stripped from consecutive lines.
-func (w *CodeWriter) WriteComment(comment string, args ...interface{}) {
-	s := fmt.Sprintf(comment, args...)
-	s = strings.Trim(s, "\n")
-
-	// Use at least two newlines to ensure a blank space between the previous
-	// block. WriteGoFile will remove extraneous newlines.
-	w.printf("\n\n// ")
-	w.skipSep = true
-
-	// strip first indent level.
-	sep := "\n"
-	for ; len(s) > 0 && (s[0] == '\t' || s[0] == ' '); s = s[1:] {
-		sep += s[:1]
-	}
-
-	strings.NewReplacer(sep, "\n// ", "\n", "\n// ").WriteString(w, s)
-
-	w.printf("\n")
-}
-
-func (w *CodeWriter) writeSizeInfo(size int) {
-	w.printf("// Size: %d bytes\n", size)
-}
-
-// WriteConst writes a constant of the given name and value.
-func (w *CodeWriter) WriteConst(name string, x interface{}) {
-	w.insertSep()
-	v := reflect.ValueOf(x)
-
-	switch v.Type().Kind() {
-	case reflect.String:
-		w.printf("const %s %s = ", name, typeName(x))
-		w.WriteString(v.String())
-		w.printf("\n")
-	default:
-		w.printf("const %s = %#v\n", name, x)
-	}
-}
-
-// WriteVar writes a variable of the given name and value.
-func (w *CodeWriter) WriteVar(name string, x interface{}) {
-	w.insertSep()
-	v := reflect.ValueOf(x)
-	oldSize := w.Size
-	sz := int(v.Type().Size())
-	w.Size += sz
-
-	switch v.Type().Kind() {
-	case reflect.String:
-		w.printf("var %s %s = ", name, typeName(x))
-		w.WriteString(v.String())
-	case reflect.Struct:
-		w.gob.Encode(x)
-		fallthrough
-	case reflect.Slice, reflect.Array:
-		w.printf("var %s = ", name)
-		w.writeValue(v)
-		w.writeSizeInfo(w.Size - oldSize)
-	default:
-		w.printf("var %s %s = ", name, typeName(x))
-		w.gob.Encode(x)
-		w.writeValue(v)
-		w.writeSizeInfo(w.Size - oldSize)
-	}
-	w.printf("\n")
-}
-
-func (w *CodeWriter) writeValue(v reflect.Value) {
-	x := v.Interface()
-	switch v.Kind() {
-	case reflect.String:
-		w.WriteString(v.String())
-	case reflect.Array:
-		// Don't double count: callers of WriteArray count on the size being
-		// added, so we need to discount it here.
-		w.Size -= int(v.Type().Size())
-		w.writeSlice(x, true)
-	case reflect.Slice:
-		w.writeSlice(x, false)
-	case reflect.Struct:
-		w.printf("%s{\n", typeName(v.Interface()))
-		t := v.Type()
-		for i := 0; i < v.NumField(); i++ {
-			w.printf("%s: ", t.Field(i).Name)
-			w.writeValue(v.Field(i))
-			w.printf(",\n")
-		}
-		w.printf("}")
-	default:
-		w.printf("%#v", x)
-	}
-}
-
-// WriteString writes a string literal.
-func (w *CodeWriter) WriteString(s string) {
-	s = strings.Replace(s, `\`, `\\`, -1)
-	io.WriteString(w.Hash, s) // content hash
-	w.Size += len(s)
-
-	const maxInline = 40
-	if len(s) <= maxInline {
-		w.printf("%q", s)
-		return
-	}
-
-	// We will render the string as a multi-line string.
-	const maxWidth = 80 - 4 - len(`"`) - len(`" +`)
-
-	// When starting on its own line, go fmt indents line 2+ an extra level.
-	n, max := maxWidth, maxWidth-4
-
-	// As per https://golang.org/issue/18078, the compiler has trouble
-	// compiling the concatenation of many strings, s0 + s1 + s2 + ... + sN,
-	// for large N. We insert redundant, explicit parentheses to work around
-	// that, lowering the N at any given step: (s0 + s1 + ... + s63) + (s64 +
-	// ... + s127) + etc + (etc + ... + sN).
-	explicitParens, extraComment := len(s) > 128*1024, ""
-	if explicitParens {
-		w.printf(`(`)
-		extraComment = "; the redundant, explicit parens are for https://golang.org/issue/18078"
-	}
-
-	// Print "" +\n, if a string does not start on its own line.
-	b := w.buf.Bytes()
-	if p := len(bytes.TrimRight(b, " \t")); p > 0 && b[p-1] != '\n' {
-		w.printf("\"\" + // Size: %d bytes%s\n", len(s), extraComment)
-		n, max = maxWidth, maxWidth
-	}
-
-	w.printf(`"`)
-
-	for sz, p, nLines := 0, 0, 0; p < len(s); {
-		var r rune
-		r, sz = utf8.DecodeRuneInString(s[p:])
-		out := s[p : p+sz]
-		chars := 1
-		if !unicode.IsPrint(r) || r == utf8.RuneError || r == '"' {
-			switch sz {
-			case 1:
-				out = fmt.Sprintf("\\x%02x", s[p])
-			case 2, 3:
-				out = fmt.Sprintf("\\u%04x", r)
-			case 4:
-				out = fmt.Sprintf("\\U%08x", r)
-			}
-			chars = len(out)
-		}
-		if n -= chars; n < 0 {
-			nLines++
-			if explicitParens && nLines&63 == 63 {
-				w.printf("\") + (\"")
-			}
-			w.printf("\" +\n\"")
-			n = max - len(out)
-		}
-		w.printf("%s", out)
-		p += sz
-	}
-	w.printf(`"`)
-	if explicitParens {
-		w.printf(`)`)
-	}
-}
-
-// WriteSlice writes a slice value.
-func (w *CodeWriter) WriteSlice(x interface{}) {
-	w.writeSlice(x, false)
-}
-
-// WriteArray writes an array value.
-func (w *CodeWriter) WriteArray(x interface{}) {
-	w.writeSlice(x, true)
-}
-
-func (w *CodeWriter) writeSlice(x interface{}, isArray bool) {
-	v := reflect.ValueOf(x)
-	w.gob.Encode(v.Len())
-	w.Size += v.Len() * int(v.Type().Elem().Size())
-	name := typeName(x)
-	if isArray {
-		name = fmt.Sprintf("[%d]%s", v.Len(), name[strings.Index(name, "]")+1:])
-	}
-	if isArray {
-		w.printf("%s{\n", name)
-	} else {
-		w.printf("%s{ // %d elements\n", name, v.Len())
-	}
-
-	switch kind := v.Type().Elem().Kind(); kind {
-	case reflect.String:
-		for _, s := range x.([]string) {
-			w.WriteString(s)
-			w.printf(",\n")
-		}
-	case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64,
-		reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
-		// nLine and nBlock are the number of elements per line and block.
-		nLine, nBlock, format := 8, 64, "%d,"
-		switch kind {
-		case reflect.Uint8:
-			format = "%#02x,"
-		case reflect.Uint16:
-			format = "%#04x,"
-		case reflect.Uint32:
-			nLine, nBlock, format = 4, 32, "%#08x,"
-		case reflect.Uint, reflect.Uint64:
-			nLine, nBlock, format = 4, 32, "%#016x,"
-		case reflect.Int8:
-			nLine = 16
-		}
-		n := nLine
-		for i := 0; i < v.Len(); i++ {
-			if i%nBlock == 0 && v.Len() > nBlock {
-				w.printf("// Entry %X - %X\n", i, i+nBlock-1)
-			}
-			x := v.Index(i).Interface()
-			w.gob.Encode(x)
-			w.printf(format, x)
-			if n--; n == 0 {
-				n = nLine
-				w.printf("\n")
-			}
-		}
-		w.printf("\n")
-	case reflect.Struct:
-		zero := reflect.Zero(v.Type().Elem()).Interface()
-		for i := 0; i < v.Len(); i++ {
-			x := v.Index(i).Interface()
-			w.gob.EncodeValue(v)
-			if !reflect.DeepEqual(zero, x) {
-				line := fmt.Sprintf("%#v,\n", x)
-				line = line[strings.IndexByte(line, '{'):]
-				w.printf("%d: ", i)
-				w.printf(line)
-			}
-		}
-	case reflect.Array:
-		for i := 0; i < v.Len(); i++ {
-			w.printf("%d: %#v,\n", i, v.Index(i).Interface())
-		}
-	default:
-		panic("gen: slice elem type not supported")
-	}
-	w.printf("}")
-}
-
-// WriteType writes a definition of the type of the given value and returns the
-// type name.
-func (w *CodeWriter) WriteType(x interface{}) string {
-	t := reflect.TypeOf(x)
-	w.printf("type %s struct {\n", t.Name())
-	for i := 0; i < t.NumField(); i++ {
-		w.printf("\t%s %s\n", t.Field(i).Name, t.Field(i).Type)
-	}
-	w.printf("}\n")
-	return t.Name()
-}
-
-// typeName returns the name of the go type of x.
-func typeName(x interface{}) string {
-	t := reflect.ValueOf(x).Type()
-	return strings.Replace(fmt.Sprint(t), "main.", "", 1)
-}

+ 0 - 333
vendor/golang.org/x/text/internal/gen/gen.go

@@ -1,333 +0,0 @@
-// Copyright 2015 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package gen contains common code for the various code generation tools in the
-// text repository. Its usage ensures consistency between tools.
-//
-// This package defines command line flags that are common to most generation
-// tools. The flags allow for specifying specific Unicode and CLDR versions
-// in the public Unicode data repository (http://www.unicode.org/Public).
-//
-// A local Unicode data mirror can be set through the flag -local or the
-// environment variable UNICODE_DIR. The former takes precedence. The local
-// directory should follow the same structure as the public repository.
-//
-// IANA data can also optionally be mirrored by putting it in the iana directory
-// rooted at the top of the local mirror. Beware, though, that IANA data is not
-// versioned. So it is up to the developer to use the right version.
-package gen // import "golang.org/x/text/internal/gen"
-
-import (
-	"bytes"
-	"flag"
-	"fmt"
-	"go/build"
-	"go/format"
-	"io"
-	"io/ioutil"
-	"log"
-	"net/http"
-	"os"
-	"path"
-	"path/filepath"
-	"strings"
-	"sync"
-	"unicode"
-
-	"golang.org/x/text/unicode/cldr"
-)
-
-var (
-	url = flag.String("url",
-		"http://www.unicode.org/Public",
-		"URL of Unicode database directory")
-	iana = flag.String("iana",
-		"http://www.iana.org",
-		"URL of the IANA repository")
-	unicodeVersion = flag.String("unicode",
-		getEnv("UNICODE_VERSION", unicode.Version),
-		"unicode version to use")
-	cldrVersion = flag.String("cldr",
-		getEnv("CLDR_VERSION", cldr.Version),
-		"cldr version to use")
-)
-
-func getEnv(name, def string) string {
-	if v := os.Getenv(name); v != "" {
-		return v
-	}
-	return def
-}
-
-// Init performs common initialization for a gen command. It parses the flags
-// and sets up the standard logging parameters.
-func Init() {
-	log.SetPrefix("")
-	log.SetFlags(log.Lshortfile)
-	flag.Parse()
-}
-
-const header = `// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
-
-`
-
-// UnicodeVersion reports the requested Unicode version.
-func UnicodeVersion() string {
-	return *unicodeVersion
-}
-
-// CLDRVersion reports the requested CLDR version.
-func CLDRVersion() string {
-	return *cldrVersion
-}
-
-var tags = []struct{ version, buildTags string }{
-	{"10.0.0", "go1.10"},
-	{"", "!go1.10"},
-}
-
-// buildTags reports the build tags used for the current Unicode version.
-func buildTags() string {
-	v := UnicodeVersion()
-	for _, x := range tags {
-		// We should do a numeric comparison, but including the collate package
-		// would create an import cycle. We approximate it by assuming that
-		// longer version strings are later.
-		if len(x.version) <= len(v) {
-			return x.buildTags
-		}
-		if len(x.version) == len(v) && x.version <= v {
-			return x.buildTags
-		}
-	}
-	return tags[0].buildTags
-}
-
-// IsLocal reports whether data files are available locally.
-func IsLocal() bool {
-	dir, err := localReadmeFile()
-	if err != nil {
-		return false
-	}
-	if _, err = os.Stat(dir); err != nil {
-		return false
-	}
-	return true
-}
-
-// OpenUCDFile opens the requested UCD file. The file is specified relative to
-// the public Unicode root directory. It will call log.Fatal if there are any
-// errors.
-func OpenUCDFile(file string) io.ReadCloser {
-	return openUnicode(path.Join(*unicodeVersion, "ucd", file))
-}
-
-// OpenCLDRCoreZip opens the CLDR core zip file. It will call log.Fatal if there
-// are any errors.
-func OpenCLDRCoreZip() io.ReadCloser {
-	return OpenUnicodeFile("cldr", *cldrVersion, "core.zip")
-}
-
-// OpenUnicodeFile opens the requested file of the requested category from the
-// root of the Unicode data archive. The file is specified relative to the
-// public Unicode root directory. If version is "", it will use the default
-// Unicode version. It will call log.Fatal if there are any errors.
-func OpenUnicodeFile(category, version, file string) io.ReadCloser {
-	if version == "" {
-		version = UnicodeVersion()
-	}
-	return openUnicode(path.Join(category, version, file))
-}
-
-// OpenIANAFile opens the requested IANA file. The file is specified relative
-// to the IANA root, which is typically either http://www.iana.org or the
-// iana directory in the local mirror. It will call log.Fatal if there are any
-// errors.
-func OpenIANAFile(path string) io.ReadCloser {
-	return Open(*iana, "iana", path)
-}
-
-var (
-	dirMutex sync.Mutex
-	localDir string
-)
-
-const permissions = 0755
-
-func localReadmeFile() (string, error) {
-	p, err := build.Import("golang.org/x/text", "", build.FindOnly)
-	if err != nil {
-		return "", fmt.Errorf("Could not locate package: %v", err)
-	}
-	return filepath.Join(p.Dir, "DATA", "README"), nil
-}
-
-func getLocalDir() string {
-	dirMutex.Lock()
-	defer dirMutex.Unlock()
-
-	readme, err := localReadmeFile()
-	if err != nil {
-		log.Fatal(err)
-	}
-	dir := filepath.Dir(readme)
-	if _, err := os.Stat(readme); err != nil {
-		if err := os.MkdirAll(dir, permissions); err != nil {
-			log.Fatalf("Could not create directory: %v", err)
-		}
-		ioutil.WriteFile(readme, []byte(readmeTxt), permissions)
-	}
-	return dir
-}
-
-const readmeTxt = `Generated by golang.org/x/text/internal/gen. DO NOT EDIT.
-
-This directory contains downloaded files used to generate the various tables
-in the golang.org/x/text subrepo.
-
-Note that the language subtag repo (iana/assignments/language-subtag-registry)
-and all other times in the iana subdirectory are not versioned and will need
-to be periodically manually updated. The easiest way to do this is to remove
-the entire iana directory. This is mostly of concern when updating the language
-package.
-`
-
-// Open opens subdir/path if a local directory is specified and the file exists,
-// where subdir is a directory relative to the local root, or fetches it from
-// urlRoot/path otherwise. It will call log.Fatal if there are any errors.
-func Open(urlRoot, subdir, path string) io.ReadCloser {
-	file := filepath.Join(getLocalDir(), subdir, filepath.FromSlash(path))
-	return open(file, urlRoot, path)
-}
-
-func openUnicode(path string) io.ReadCloser {
-	file := filepath.Join(getLocalDir(), filepath.FromSlash(path))
-	return open(file, *url, path)
-}
-
-// TODO: automatically periodically update non-versioned files.
-
-func open(file, urlRoot, path string) io.ReadCloser {
-	if f, err := os.Open(file); err == nil {
-		return f
-	}
-	r := get(urlRoot, path)
-	defer r.Close()
-	b, err := ioutil.ReadAll(r)
-	if err != nil {
-		log.Fatalf("Could not download file: %v", err)
-	}
-	os.MkdirAll(filepath.Dir(file), permissions)
-	if err := ioutil.WriteFile(file, b, permissions); err != nil {
-		log.Fatalf("Could not create file: %v", err)
-	}
-	return ioutil.NopCloser(bytes.NewReader(b))
-}
-
-func get(root, path string) io.ReadCloser {
-	url := root + "/" + path
-	fmt.Printf("Fetching %s...", url)
-	defer fmt.Println(" done.")
-	resp, err := http.Get(url)
-	if err != nil {
-		log.Fatalf("HTTP GET: %v", err)
-	}
-	if resp.StatusCode != 200 {
-		log.Fatalf("Bad GET status for %q: %q", url, resp.Status)
-	}
-	return resp.Body
-}
-
-// TODO: use Write*Version in all applicable packages.
-
-// WriteUnicodeVersion writes a constant for the Unicode version from which the
-// tables are generated.
-func WriteUnicodeVersion(w io.Writer) {
-	fmt.Fprintf(w, "// UnicodeVersion is the Unicode version from which the tables in this package are derived.\n")
-	fmt.Fprintf(w, "const UnicodeVersion = %q\n\n", UnicodeVersion())
-}
-
-// WriteCLDRVersion writes a constant for the CLDR version from which the
-// tables are generated.
-func WriteCLDRVersion(w io.Writer) {
-	fmt.Fprintf(w, "// CLDRVersion is the CLDR version from which the tables in this package are derived.\n")
-	fmt.Fprintf(w, "const CLDRVersion = %q\n\n", CLDRVersion())
-}
-
-// WriteGoFile prepends a standard file comment and package statement to the
-// given bytes, applies gofmt, and writes them to a file with the given name.
-// It will call log.Fatal if there are any errors.
-func WriteGoFile(filename, pkg string, b []byte) {
-	w, err := os.Create(filename)
-	if err != nil {
-		log.Fatalf("Could not create file %s: %v", filename, err)
-	}
-	defer w.Close()
-	if _, err = WriteGo(w, pkg, "", b); err != nil {
-		log.Fatalf("Error writing file %s: %v", filename, err)
-	}
-}
-
-func insertVersion(filename, version string) string {
-	suffix := ".go"
-	if strings.HasSuffix(filename, "_test.go") {
-		suffix = "_test.go"
-	}
-	return fmt.Sprint(filename[:len(filename)-len(suffix)], version, suffix)
-}
-
-// WriteVersionedGoFile prepends a standard file comment, adds build tags to
-// version the file for the current Unicode version, and package statement to
-// the given bytes, applies gofmt, and writes them to a file with the given
-// name. It will call log.Fatal if there are any errors.
-func WriteVersionedGoFile(filename, pkg string, b []byte) {
-	tags := buildTags()
-	if tags != "" {
-		filename = insertVersion(filename, UnicodeVersion())
-	}
-	w, err := os.Create(filename)
-	if err != nil {
-		log.Fatalf("Could not create file %s: %v", filename, err)
-	}
-	defer w.Close()
-	if _, err = WriteGo(w, pkg, tags, b); err != nil {
-		log.Fatalf("Error writing file %s: %v", filename, err)
-	}
-}
-
-// WriteGo prepends a standard file comment and package statement to the given
-// bytes, applies gofmt, and writes them to w.
-func WriteGo(w io.Writer, pkg, tags string, b []byte) (n int, err error) {
-	src := []byte(header)
-	if tags != "" {
-		src = append(src, fmt.Sprintf("// +build %s\n\n", tags)...)
-	}
-	src = append(src, fmt.Sprintf("package %s\n\n", pkg)...)
-	src = append(src, b...)
-	formatted, err := format.Source(src)
-	if err != nil {
-		// Print the generated code even in case of an error so that the
-		// returned error can be meaningfully interpreted.
-		n, _ = w.Write(src)
-		return n, err
-	}
-	return w.Write(formatted)
-}
-
-// Repackage rewrites a Go file from belonging to package main to belonging to
-// the given package.
-func Repackage(inFile, outFile, pkg string) {
-	src, err := ioutil.ReadFile(inFile)
-	if err != nil {
-		log.Fatalf("reading %s: %v", inFile, err)
-	}
-	const toDelete = "package main\n\n"
-	i := bytes.Index(src, []byte(toDelete))
-	if i < 0 {
-		log.Fatalf("Could not find %q in %s.", toDelete, inFile)
-	}
-	w := &bytes.Buffer{}
-	w.Write(src[i+len(toDelete):])
-	WriteGoFile(outFile, pkg, w.Bytes())
-}

+ 0 - 58
vendor/golang.org/x/text/internal/triegen/compact.go

@@ -1,58 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package triegen
-
-// This file defines Compacter and its implementations.
-
-import "io"
-
-// A Compacter generates an alternative, more space-efficient way to store a
-// trie value block. A trie value block holds all possible values for the last
-// byte of a UTF-8 encoded rune. Excluding ASCII characters, a trie value block
-// always has 64 values, as a UTF-8 encoding ends with a byte in [0x80, 0xC0).
-type Compacter interface {
-	// Size returns whether the Compacter could encode the given block as well
-	// as its size in case it can. len(v) is always 64.
-	Size(v []uint64) (sz int, ok bool)
-
-	// Store stores the block using the Compacter's compression method.
-	// It returns a handle with which the block can be retrieved.
-	// len(v) is always 64.
-	Store(v []uint64) uint32
-
-	// Print writes the data structures associated to the given store to w.
-	Print(w io.Writer) error
-
-	// Handler returns the name of a function that gets called during trie
-	// lookup for blocks generated by the Compacter. The function should be of
-	// the form func (n uint32, b byte) uint64, where n is the index returned by
-	// the Compacter's Store method and b is the last byte of the UTF-8
-	// encoding, where 0x80 <= b < 0xC0, for which to do the lookup in the
-	// block.
-	Handler() string
-}
-
-// simpleCompacter is the default Compacter used by builder. It implements a
-// normal trie block.
-type simpleCompacter builder
-
-func (b *simpleCompacter) Size([]uint64) (sz int, ok bool) {
-	return blockSize * b.ValueSize, true
-}
-
-func (b *simpleCompacter) Store(v []uint64) uint32 {
-	h := uint32(len(b.ValueBlocks) - blockOffset)
-	b.ValueBlocks = append(b.ValueBlocks, v)
-	return h
-}
-
-func (b *simpleCompacter) Print(io.Writer) error {
-	// Structures are printed in print.go.
-	return nil
-}
-
-func (b *simpleCompacter) Handler() string {
-	panic("Handler should be special-cased for this Compacter")
-}

+ 0 - 251
vendor/golang.org/x/text/internal/triegen/print.go

@@ -1,251 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package triegen
-
-import (
-	"bytes"
-	"fmt"
-	"io"
-	"strings"
-	"text/template"
-)
-
-// print writes all the data structures as well as the code necessary to use the
-// trie to w.
-func (b *builder) print(w io.Writer) error {
-	b.Stats.NValueEntries = len(b.ValueBlocks) * blockSize
-	b.Stats.NValueBytes = len(b.ValueBlocks) * blockSize * b.ValueSize
-	b.Stats.NIndexEntries = len(b.IndexBlocks) * blockSize
-	b.Stats.NIndexBytes = len(b.IndexBlocks) * blockSize * b.IndexSize
-	b.Stats.NHandleBytes = len(b.Trie) * 2 * b.IndexSize
-
-	// If we only have one root trie, all starter blocks are at position 0 and
-	// we can access the arrays directly.
-	if len(b.Trie) == 1 {
-		// At this point we cannot refer to the generated tables directly.
-		b.ASCIIBlock = b.Name + "Values"
-		b.StarterBlock = b.Name + "Index"
-	} else {
-		// Otherwise we need to have explicit starter indexes in the trie
-		// structure.
-		b.ASCIIBlock = "t.ascii"
-		b.StarterBlock = "t.utf8Start"
-	}
-
-	b.SourceType = "[]byte"
-	if err := lookupGen.Execute(w, b); err != nil {
-		return err
-	}
-
-	b.SourceType = "string"
-	if err := lookupGen.Execute(w, b); err != nil {
-		return err
-	}
-
-	if err := trieGen.Execute(w, b); err != nil {
-		return err
-	}
-
-	for _, c := range b.Compactions {
-		if err := c.c.Print(w); err != nil {
-			return err
-		}
-	}
-
-	return nil
-}
-
-func printValues(n int, values []uint64) string {
-	w := &bytes.Buffer{}
-	boff := n * blockSize
-	fmt.Fprintf(w, "\t// Block %#x, offset %#x", n, boff)
-	var newline bool
-	for i, v := range values {
-		if i%6 == 0 {
-			newline = true
-		}
-		if v != 0 {
-			if newline {
-				fmt.Fprintf(w, "\n")
-				newline = false
-			}
-			fmt.Fprintf(w, "\t%#02x:%#04x, ", boff+i, v)
-		}
-	}
-	return w.String()
-}
-
-func printIndex(b *builder, nr int, n *node) string {
-	w := &bytes.Buffer{}
-	boff := nr * blockSize
-	fmt.Fprintf(w, "\t// Block %#x, offset %#x", nr, boff)
-	var newline bool
-	for i, c := range n.children {
-		if i%8 == 0 {
-			newline = true
-		}
-		if c != nil {
-			v := b.Compactions[c.index.compaction].Offset + uint32(c.index.index)
-			if v != 0 {
-				if newline {
-					fmt.Fprintf(w, "\n")
-					newline = false
-				}
-				fmt.Fprintf(w, "\t%#02x:%#02x, ", boff+i, v)
-			}
-		}
-	}
-	return w.String()
-}
-
-var (
-	trieGen = template.Must(template.New("trie").Funcs(template.FuncMap{
-		"printValues": printValues,
-		"printIndex":  printIndex,
-		"title":       strings.Title,
-		"dec":         func(x int) int { return x - 1 },
-		"psize": func(n int) string {
-			return fmt.Sprintf("%d bytes (%.2f KiB)", n, float64(n)/1024)
-		},
-	}).Parse(trieTemplate))
-	lookupGen = template.Must(template.New("lookup").Parse(lookupTemplate))
-)
-
-// TODO: consider the return type of lookup. It could be uint64, even if the
-// internal value type is smaller. We will have to verify this with the
-// performance of unicode/norm, which is very sensitive to such changes.
-const trieTemplate = `{{$b := .}}{{$multi := gt (len .Trie) 1}}
-// {{.Name}}Trie. Total size: {{psize .Size}}. Checksum: {{printf "%08x" .Checksum}}.
-type {{.Name}}Trie struct { {{if $multi}}
-	ascii []{{.ValueType}} // index for ASCII bytes
-	utf8Start  []{{.IndexType}} // index for UTF-8 bytes >= 0xC0
-{{end}}}
-
-func new{{title .Name}}Trie(i int) *{{.Name}}Trie { {{if $multi}}
-	h := {{.Name}}TrieHandles[i]
-	return &{{.Name}}Trie{ {{.Name}}Values[uint32(h.ascii)<<6:], {{.Name}}Index[uint32(h.multi)<<6:] }
-}
-
-type {{.Name}}TrieHandle struct {
-	ascii, multi {{.IndexType}}
-}
-
-// {{.Name}}TrieHandles: {{len .Trie}} handles, {{.Stats.NHandleBytes}} bytes
-var {{.Name}}TrieHandles = [{{len .Trie}}]{{.Name}}TrieHandle{
-{{range .Trie}}	{ {{.ASCIIIndex}}, {{.StarterIndex}} }, // {{printf "%08x" .Checksum}}: {{.Name}}
-{{end}}}{{else}}
-	return &{{.Name}}Trie{}
-}
-{{end}}
-// lookupValue determines the type of block n and looks up the value for b.
-func (t *{{.Name}}Trie) lookupValue(n uint32, b byte) {{.ValueType}}{{$last := dec (len .Compactions)}} {
-	switch { {{range $i, $c := .Compactions}}
-		{{if eq $i $last}}default{{else}}case n < {{$c.Cutoff}}{{end}}:{{if ne $i 0}}
-			n -= {{$c.Offset}}{{end}}
-			return {{print $b.ValueType}}({{$c.Handler}}){{end}}
-	}
-}
-
-// {{.Name}}Values: {{len .ValueBlocks}} blocks, {{.Stats.NValueEntries}} entries, {{.Stats.NValueBytes}} bytes
-// The third block is the zero block.
-var {{.Name}}Values = [{{.Stats.NValueEntries}}]{{.ValueType}} {
-{{range $i, $v := .ValueBlocks}}{{printValues $i $v}}
-{{end}}}
-
-// {{.Name}}Index: {{len .IndexBlocks}} blocks, {{.Stats.NIndexEntries}} entries, {{.Stats.NIndexBytes}} bytes
-// Block 0 is the zero block.
-var {{.Name}}Index = [{{.Stats.NIndexEntries}}]{{.IndexType}} {
-{{range $i, $v := .IndexBlocks}}{{printIndex $b $i $v}}
-{{end}}}
-`
-
-// TODO: consider allowing zero-length strings after evaluating performance with
-// unicode/norm.
-const lookupTemplate = `
-// lookup{{if eq .SourceType "string"}}String{{end}} returns the trie value for the first UTF-8 encoding in s and
-// the width in bytes of this encoding. The size will be 0 if s does not
-// hold enough bytes to complete the encoding. len(s) must be greater than 0.
-func (t *{{.Name}}Trie) lookup{{if eq .SourceType "string"}}String{{end}}(s {{.SourceType}}) (v {{.ValueType}}, sz int) {
-	c0 := s[0]
-	switch {
-	case c0 < 0x80: // is ASCII
-		return {{.ASCIIBlock}}[c0], 1
-	case c0 < 0xC2:
-		return 0, 1  // Illegal UTF-8: not a starter, not ASCII.
-	case c0 < 0xE0: // 2-byte UTF-8
-		if len(s) < 2 {
-			return 0, 0
-		}
-		i := {{.StarterBlock}}[c0]
-		c1 := s[1]
-		if c1 < 0x80 || 0xC0 <= c1 {
-			return 0, 1 // Illegal UTF-8: not a continuation byte.
-		}
-		return t.lookupValue(uint32(i), c1), 2
-	case c0 < 0xF0: // 3-byte UTF-8
-		if len(s) < 3 {
-			return 0, 0
-		}
-		i := {{.StarterBlock}}[c0]
-		c1 := s[1]
-		if c1 < 0x80 || 0xC0 <= c1 {
-			return 0, 1 // Illegal UTF-8: not a continuation byte.
-		}
-		o := uint32(i)<<6 + uint32(c1)
-		i = {{.Name}}Index[o]
-		c2 := s[2]
-		if c2 < 0x80 || 0xC0 <= c2 {
-			return 0, 2 // Illegal UTF-8: not a continuation byte.
-		}
-		return t.lookupValue(uint32(i), c2), 3
-	case c0 < 0xF8: // 4-byte UTF-8
-		if len(s) < 4 {
-			return 0, 0
-		}
-		i := {{.StarterBlock}}[c0]
-		c1 := s[1]
-		if c1 < 0x80 || 0xC0 <= c1 {
-			return 0, 1 // Illegal UTF-8: not a continuation byte.
-		}
-		o := uint32(i)<<6 + uint32(c1)
-		i = {{.Name}}Index[o]
-		c2 := s[2]
-		if c2 < 0x80 || 0xC0 <= c2 {
-			return 0, 2 // Illegal UTF-8: not a continuation byte.
-		}
-		o = uint32(i)<<6 + uint32(c2)
-		i = {{.Name}}Index[o]
-		c3 := s[3]
-		if c3 < 0x80 || 0xC0 <= c3 {
-			return 0, 3 // Illegal UTF-8: not a continuation byte.
-		}
-		return t.lookupValue(uint32(i), c3), 4
-	}
-	// Illegal rune
-	return 0, 1
-}
-
-// lookup{{if eq .SourceType "string"}}String{{end}}Unsafe returns the trie value for the first UTF-8 encoding in s.
-// s must start with a full and valid UTF-8 encoded rune.
-func (t *{{.Name}}Trie) lookup{{if eq .SourceType "string"}}String{{end}}Unsafe(s {{.SourceType}}) {{.ValueType}} {
-	c0 := s[0]
-	if c0 < 0x80 { // is ASCII
-		return {{.ASCIIBlock}}[c0]
-	}
-	i := {{.StarterBlock}}[c0]
-	if c0 < 0xE0 { // 2-byte UTF-8
-		return t.lookupValue(uint32(i), s[1])
-	}
-	i = {{.Name}}Index[uint32(i)<<6+uint32(s[1])]
-	if c0 < 0xF0 { // 3-byte UTF-8
-		return t.lookupValue(uint32(i), s[2])
-	}
-	i = {{.Name}}Index[uint32(i)<<6+uint32(s[2])]
-	if c0 < 0xF8 { // 4-byte UTF-8
-		return t.lookupValue(uint32(i), s[3])
-	}
-	return 0
-}
-`

+ 0 - 494
vendor/golang.org/x/text/internal/triegen/triegen.go

@@ -1,494 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package triegen implements a code generator for a trie for associating
-// unsigned integer values with UTF-8 encoded runes.
-//
-// Many of the go.text packages use tries for storing per-rune information.  A
-// trie is especially useful if many of the runes have the same value. If this
-// is the case, many blocks can be expected to be shared allowing for
-// information on many runes to be stored in little space.
-//
-// As most of the lookups are done directly on []byte slices, the tries use the
-// UTF-8 bytes directly for the lookup. This saves a conversion from UTF-8 to
-// runes and contributes a little bit to better performance. It also naturally
-// provides a fast path for ASCII.
-//
-// Space is also an issue. There are many code points defined in Unicode and as
-// a result tables can get quite large. So every byte counts. The triegen
-// package automatically chooses the smallest integer values to represent the
-// tables. Compacters allow further compression of the trie by allowing for
-// alternative representations of individual trie blocks.
-//
-// triegen allows generating multiple tries as a single structure. This is
-// useful when, for example, one wants to generate tries for several languages
-// that have a lot of values in common. Some existing libraries for
-// internationalization store all per-language data as a dynamically loadable
-// chunk. The go.text packages are designed with the assumption that the user
-// typically wants to compile in support for all supported languages, in line
-// with the approach common to Go to create a single standalone binary. The
-// multi-root trie approach can give significant storage savings in this
-// scenario.
-//
-// triegen generates both tables and code. The code is optimized to use the
-// automatically chosen data types. The following code is generated for a Trie
-// or multiple Tries named "foo":
-//	- type fooTrie
-//		The trie type.
-//
-//	- func newFooTrie(x int) *fooTrie
-//		Trie constructor, where x is the index of the trie passed to Gen.
-//
-//	- func (t *fooTrie) lookup(s []byte) (v uintX, sz int)
-//		The lookup method, where uintX is automatically chosen.
-//
-//	- func lookupString, lookupUnsafe and lookupStringUnsafe
-//		Variants of the above.
-//
-//	- var fooValues and fooIndex and any tables generated by Compacters.
-//		The core trie data.
-//
-//	- var fooTrieHandles
-//		Indexes of starter blocks in case of multiple trie roots.
-//
-// It is recommended that users test the generated trie by checking the returned
-// value for every rune. Such exhaustive tests are possible as the the number of
-// runes in Unicode is limited.
-package triegen // import "golang.org/x/text/internal/triegen"
-
-// TODO: Arguably, the internally optimized data types would not have to be
-// exposed in the generated API. We could also investigate not generating the
-// code, but using it through a package. We would have to investigate the impact
-// on performance of making such change, though. For packages like unicode/norm,
-// small changes like this could tank performance.
-
-import (
-	"encoding/binary"
-	"fmt"
-	"hash/crc64"
-	"io"
-	"log"
-	"unicode/utf8"
-)
-
-// builder builds a set of tries for associating values with runes. The set of
-// tries can share common index and value blocks.
-type builder struct {
-	Name string
-
-	// ValueType is the type of the trie values looked up.
-	ValueType string
-
-	// ValueSize is the byte size of the ValueType.
-	ValueSize int
-
-	// IndexType is the type of trie index values used for all UTF-8 bytes of
-	// a rune except the last one.
-	IndexType string
-
-	// IndexSize is the byte size of the IndexType.
-	IndexSize int
-
-	// SourceType is used when generating the lookup functions. If the user
-	// requests StringSupport, all lookup functions will be generated for
-	// string input as well.
-	SourceType string
-
-	Trie []*Trie
-
-	IndexBlocks []*node
-	ValueBlocks [][]uint64
-	Compactions []compaction
-	Checksum    uint64
-
-	ASCIIBlock   string
-	StarterBlock string
-
-	indexBlockIdx map[uint64]int
-	valueBlockIdx map[uint64]nodeIndex
-	asciiBlockIdx map[uint64]int
-
-	// Stats are used to fill out the template.
-	Stats struct {
-		NValueEntries int
-		NValueBytes   int
-		NIndexEntries int
-		NIndexBytes   int
-		NHandleBytes  int
-	}
-
-	err error
-}
-
-// A nodeIndex encodes the index of a node, which is defined by the compaction
-// which stores it and an index within the compaction. For internal nodes, the
-// compaction is always 0.
-type nodeIndex struct {
-	compaction int
-	index      int
-}
-
-// compaction keeps track of stats used for the compaction.
-type compaction struct {
-	c         Compacter
-	blocks    []*node
-	maxHandle uint32
-	totalSize int
-
-	// Used by template-based generator and thus exported.
-	Cutoff  uint32
-	Offset  uint32
-	Handler string
-}
-
-func (b *builder) setError(err error) {
-	if b.err == nil {
-		b.err = err
-	}
-}
-
-// An Option can be passed to Gen.
-type Option func(b *builder) error
-
-// Compact configures the trie generator to use the given Compacter.
-func Compact(c Compacter) Option {
-	return func(b *builder) error {
-		b.Compactions = append(b.Compactions, compaction{
-			c:       c,
-			Handler: c.Handler() + "(n, b)"})
-		return nil
-	}
-}
-
-// Gen writes Go code for a shared trie lookup structure to w for the given
-// Tries. The generated trie type will be called nameTrie. newNameTrie(x) will
-// return the *nameTrie for tries[x]. A value can be looked up by using one of
-// the various lookup methods defined on nameTrie. It returns the table size of
-// the generated trie.
-func Gen(w io.Writer, name string, tries []*Trie, opts ...Option) (sz int, err error) {
-	// The index contains two dummy blocks, followed by the zero block. The zero
-	// block is at offset 0x80, so that the offset for the zero block for
-	// continuation bytes is 0.
-	b := &builder{
-		Name:        name,
-		Trie:        tries,
-		IndexBlocks: []*node{{}, {}, {}},
-		Compactions: []compaction{{
-			Handler: name + "Values[n<<6+uint32(b)]",
-		}},
-		// The 0 key in indexBlockIdx and valueBlockIdx is the hash of the zero
-		// block.
-		indexBlockIdx: map[uint64]int{0: 0},
-		valueBlockIdx: map[uint64]nodeIndex{0: {}},
-		asciiBlockIdx: map[uint64]int{},
-	}
-	b.Compactions[0].c = (*simpleCompacter)(b)
-
-	for _, f := range opts {
-		if err := f(b); err != nil {
-			return 0, err
-		}
-	}
-	b.build()
-	if b.err != nil {
-		return 0, b.err
-	}
-	if err = b.print(w); err != nil {
-		return 0, err
-	}
-	return b.Size(), nil
-}
-
-// A Trie represents a single root node of a trie. A builder may build several
-// overlapping tries at once.
-type Trie struct {
-	root *node
-
-	hiddenTrie
-}
-
-// hiddenTrie contains values we want to be visible to the template generator,
-// but hidden from the API documentation.
-type hiddenTrie struct {
-	Name         string
-	Checksum     uint64
-	ASCIIIndex   int
-	StarterIndex int
-}
-
-// NewTrie returns a new trie root.
-func NewTrie(name string) *Trie {
-	return &Trie{
-		&node{
-			children: make([]*node, blockSize),
-			values:   make([]uint64, utf8.RuneSelf),
-		},
-		hiddenTrie{Name: name},
-	}
-}
-
-// Gen is a convenience wrapper around the Gen func passing t as the only trie
-// and uses the name passed to NewTrie. It returns the size of the generated
-// tables.
-func (t *Trie) Gen(w io.Writer, opts ...Option) (sz int, err error) {
-	return Gen(w, t.Name, []*Trie{t}, opts...)
-}
-
-// node is a node of the intermediate trie structure.
-type node struct {
-	// children holds this node's children. It is always of length 64.
-	// A child node may be nil.
-	children []*node
-
-	// values contains the values of this node. If it is non-nil, this node is
-	// either a root or leaf node:
-	// For root nodes, len(values) == 128 and it maps the bytes in [0x00, 0x7F].
-	// For leaf nodes, len(values) ==  64 and it maps the bytes in [0x80, 0xBF].
-	values []uint64
-
-	index nodeIndex
-}
-
-// Insert associates value with the given rune. Insert will panic if a non-zero
-// value is passed for an invalid rune.
-func (t *Trie) Insert(r rune, value uint64) {
-	if value == 0 {
-		return
-	}
-	s := string(r)
-	if []rune(s)[0] != r && value != 0 {
-		// Note: The UCD tables will always assign what amounts to a zero value
-		// to a surrogate. Allowing a zero value for an illegal rune allows
-		// users to iterate over [0..MaxRune] without having to explicitly
-		// exclude surrogates, which would be tedious.
-		panic(fmt.Sprintf("triegen: non-zero value for invalid rune %U", r))
-	}
-	if len(s) == 1 {
-		// It is a root node value (ASCII).
-		t.root.values[s[0]] = value
-		return
-	}
-
-	n := t.root
-	for ; len(s) > 1; s = s[1:] {
-		if n.children == nil {
-			n.children = make([]*node, blockSize)
-		}
-		p := s[0] % blockSize
-		c := n.children[p]
-		if c == nil {
-			c = &node{}
-			n.children[p] = c
-		}
-		if len(s) > 2 && c.values != nil {
-			log.Fatalf("triegen: insert(%U): found internal node with values", r)
-		}
-		n = c
-	}
-	if n.values == nil {
-		n.values = make([]uint64, blockSize)
-	}
-	if n.children != nil {
-		log.Fatalf("triegen: insert(%U): found leaf node that also has child nodes", r)
-	}
-	n.values[s[0]-0x80] = value
-}
-
-// Size returns the number of bytes the generated trie will take to store. It
-// needs to be exported as it is used in the templates.
-func (b *builder) Size() int {
-	// Index blocks.
-	sz := len(b.IndexBlocks) * blockSize * b.IndexSize
-
-	// Skip the first compaction, which represents the normal value blocks, as
-	// its totalSize does not account for the ASCII blocks, which are managed
-	// separately.
-	sz += len(b.ValueBlocks) * blockSize * b.ValueSize
-	for _, c := range b.Compactions[1:] {
-		sz += c.totalSize
-	}
-
-	// TODO: this computation does not account for the fixed overhead of a using
-	// a compaction, either code or data. As for data, though, the typical
-	// overhead of data is in the order of bytes (2 bytes for cases). Further,
-	// the savings of using a compaction should anyway be substantial for it to
-	// be worth it.
-
-	// For multi-root tries, we also need to account for the handles.
-	if len(b.Trie) > 1 {
-		sz += 2 * b.IndexSize * len(b.Trie)
-	}
-	return sz
-}
-
-func (b *builder) build() {
-	// Compute the sizes of the values.
-	var vmax uint64
-	for _, t := range b.Trie {
-		vmax = maxValue(t.root, vmax)
-	}
-	b.ValueType, b.ValueSize = getIntType(vmax)
-
-	// Compute all block allocations.
-	// TODO: first compute the ASCII blocks for all tries and then the other
-	// nodes. ASCII blocks are more restricted in placement, as they require two
-	// blocks to be placed consecutively. Processing them first may improve
-	// sharing (at least one zero block can be expected to be saved.)
-	for _, t := range b.Trie {
-		b.Checksum += b.buildTrie(t)
-	}
-
-	// Compute the offsets for all the Compacters.
-	offset := uint32(0)
-	for i := range b.Compactions {
-		c := &b.Compactions[i]
-		c.Offset = offset
-		offset += c.maxHandle + 1
-		c.Cutoff = offset
-	}
-
-	// Compute the sizes of indexes.
-	// TODO: different byte positions could have different sizes. So far we have
-	// not found a case where this is beneficial.
-	imax := uint64(b.Compactions[len(b.Compactions)-1].Cutoff)
-	for _, ib := range b.IndexBlocks {
-		if x := uint64(ib.index.index); x > imax {
-			imax = x
-		}
-	}
-	b.IndexType, b.IndexSize = getIntType(imax)
-}
-
-func maxValue(n *node, max uint64) uint64 {
-	if n == nil {
-		return max
-	}
-	for _, c := range n.children {
-		max = maxValue(c, max)
-	}
-	for _, v := range n.values {
-		if max < v {
-			max = v
-		}
-	}
-	return max
-}
-
-func getIntType(v uint64) (string, int) {
-	switch {
-	case v < 1<<8:
-		return "uint8", 1
-	case v < 1<<16:
-		return "uint16", 2
-	case v < 1<<32:
-		return "uint32", 4
-	}
-	return "uint64", 8
-}
-
-const (
-	blockSize = 64
-
-	// Subtract two blocks to offset 0x80, the first continuation byte.
-	blockOffset = 2
-
-	// Subtract three blocks to offset 0xC0, the first non-ASCII starter.
-	rootBlockOffset = 3
-)
-
-var crcTable = crc64.MakeTable(crc64.ISO)
-
-func (b *builder) buildTrie(t *Trie) uint64 {
-	n := t.root
-
-	// Get the ASCII offset. For the first trie, the ASCII block will be at
-	// position 0.
-	hasher := crc64.New(crcTable)
-	binary.Write(hasher, binary.BigEndian, n.values)
-	hash := hasher.Sum64()
-
-	v, ok := b.asciiBlockIdx[hash]
-	if !ok {
-		v = len(b.ValueBlocks)
-		b.asciiBlockIdx[hash] = v
-
-		b.ValueBlocks = append(b.ValueBlocks, n.values[:blockSize], n.values[blockSize:])
-		if v == 0 {
-			// Add the zero block at position 2 so that it will be assigned a
-			// zero reference in the lookup blocks.
-			// TODO: always do this? This would allow us to remove a check from
-			// the trie lookup, but at the expense of extra space. Analyze
-			// performance for unicode/norm.
-			b.ValueBlocks = append(b.ValueBlocks, make([]uint64, blockSize))
-		}
-	}
-	t.ASCIIIndex = v
-
-	// Compute remaining offsets.
-	t.Checksum = b.computeOffsets(n, true)
-	// We already subtracted the normal blockOffset from the index. Subtract the
-	// difference for starter bytes.
-	t.StarterIndex = n.index.index - (rootBlockOffset - blockOffset)
-	return t.Checksum
-}
-
-func (b *builder) computeOffsets(n *node, root bool) uint64 {
-	// For the first trie, the root lookup block will be at position 3, which is
-	// the offset for UTF-8 non-ASCII starter bytes.
-	first := len(b.IndexBlocks) == rootBlockOffset
-	if first {
-		b.IndexBlocks = append(b.IndexBlocks, n)
-	}
-
-	// We special-case the cases where all values recursively are 0. This allows
-	// for the use of a zero block to which all such values can be directed.
-	hash := uint64(0)
-	if n.children != nil || n.values != nil {
-		hasher := crc64.New(crcTable)
-		for _, c := range n.children {
-			var v uint64
-			if c != nil {
-				v = b.computeOffsets(c, false)
-			}
-			binary.Write(hasher, binary.BigEndian, v)
-		}
-		binary.Write(hasher, binary.BigEndian, n.values)
-		hash = hasher.Sum64()
-	}
-
-	if first {
-		b.indexBlockIdx[hash] = rootBlockOffset - blockOffset
-	}
-
-	// Compacters don't apply to internal nodes.
-	if n.children != nil {
-		v, ok := b.indexBlockIdx[hash]
-		if !ok {
-			v = len(b.IndexBlocks) - blockOffset
-			b.IndexBlocks = append(b.IndexBlocks, n)
-			b.indexBlockIdx[hash] = v
-		}
-		n.index = nodeIndex{0, v}
-	} else {
-		h, ok := b.valueBlockIdx[hash]
-		if !ok {
-			bestI, bestSize := 0, blockSize*b.ValueSize
-			for i, c := range b.Compactions[1:] {
-				if sz, ok := c.c.Size(n.values); ok && bestSize > sz {
-					bestI, bestSize = i+1, sz
-				}
-			}
-			c := &b.Compactions[bestI]
-			c.totalSize += bestSize
-			v := c.c.Store(n.values)
-			if c.maxHandle < v {
-				c.maxHandle = v
-			}
-			h = nodeIndex{bestI, int(v)}
-			b.valueBlockIdx[hash] = h
-		}
-		n.index = h
-	}
-	return hash
-}

+ 0 - 371
vendor/golang.org/x/text/internal/ucd/ucd.go

@@ -1,371 +0,0 @@
-// Copyright 2014 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// Package ucd provides a parser for Unicode Character Database files, the
-// format of which is defined in http://www.unicode.org/reports/tr44/. See
-// http://www.unicode.org/Public/UCD/latest/ucd/ for example files.
-//
-// It currently does not support substitutions of missing fields.
-package ucd // import "golang.org/x/text/internal/ucd"
-
-import (
-	"bufio"
-	"errors"
-	"fmt"
-	"io"
-	"log"
-	"regexp"
-	"strconv"
-	"strings"
-)
-
-// UnicodeData.txt fields.
-const (
-	CodePoint = iota
-	Name
-	GeneralCategory
-	CanonicalCombiningClass
-	BidiClass
-	DecompMapping
-	DecimalValue
-	DigitValue
-	NumericValue
-	BidiMirrored
-	Unicode1Name
-	ISOComment
-	SimpleUppercaseMapping
-	SimpleLowercaseMapping
-	SimpleTitlecaseMapping
-)
-
-// Parse calls f for each entry in the given reader of a UCD file. It will close
-// the reader upon return. It will call log.Fatal if any error occurred.
-//
-// This implements the most common usage pattern of using Parser.
-func Parse(r io.ReadCloser, f func(p *Parser)) {
-	defer r.Close()
-
-	p := New(r)
-	for p.Next() {
-		f(p)
-	}
-	if err := p.Err(); err != nil {
-		r.Close() // os.Exit will cause defers not to be called.
-		log.Fatal(err)
-	}
-}
-
-// An Option is used to configure a Parser.
-type Option func(p *Parser)
-
-func keepRanges(p *Parser) {
-	p.keepRanges = true
-}
-
-var (
-	// KeepRanges prevents the expansion of ranges. The raw ranges can be
-	// obtained by calling Range(0) on the parser.
-	KeepRanges Option = keepRanges
-)
-
-// The Part option register a handler for lines starting with a '@'. The text
-// after a '@' is available as the first field. Comments are handled as usual.
-func Part(f func(p *Parser)) Option {
-	return func(p *Parser) {
-		p.partHandler = f
-	}
-}
-
-// The CommentHandler option passes comments that are on a line by itself to
-// a given handler.
-func CommentHandler(f func(s string)) Option {
-	return func(p *Parser) {
-		p.commentHandler = f
-	}
-}
-
-// A Parser parses Unicode Character Database (UCD) files.
-type Parser struct {
-	scanner *bufio.Scanner
-
-	keepRanges bool // Don't expand rune ranges in field 0.
-
-	err     error
-	comment string
-	field   []string
-	// parsedRange is needed in case Range(0) is called more than once for one
-	// field. In some cases this requires scanning ahead.
-	line                 int
-	parsedRange          bool
-	rangeStart, rangeEnd rune
-
-	partHandler    func(p *Parser)
-	commentHandler func(s string)
-}
-
-func (p *Parser) setError(err error, msg string) {
-	if p.err == nil && err != nil {
-		if msg == "" {
-			p.err = fmt.Errorf("ucd:line:%d: %v", p.line, err)
-		} else {
-			p.err = fmt.Errorf("ucd:line:%d:%s: %v", p.line, msg, err)
-		}
-	}
-}
-
-func (p *Parser) getField(i int) string {
-	if i >= len(p.field) {
-		return ""
-	}
-	return p.field[i]
-}
-
-// Err returns a non-nil error if any error occurred during parsing.
-func (p *Parser) Err() error {
-	return p.err
-}
-
-// New returns a Parser for the given Reader.
-func New(r io.Reader, o ...Option) *Parser {
-	p := &Parser{
-		scanner: bufio.NewScanner(r),
-	}
-	for _, f := range o {
-		f(p)
-	}
-	return p
-}
-
-// Next parses the next line in the file. It returns true if a line was parsed
-// and false if it reached the end of the file.
-func (p *Parser) Next() bool {
-	if !p.keepRanges && p.rangeStart < p.rangeEnd {
-		p.rangeStart++
-		return true
-	}
-	p.comment = ""
-	p.field = p.field[:0]
-	p.parsedRange = false
-
-	for p.scanner.Scan() && p.err == nil {
-		p.line++
-		s := p.scanner.Text()
-		if s == "" {
-			continue
-		}
-		if s[0] == '#' {
-			if p.commentHandler != nil {
-				p.commentHandler(strings.TrimSpace(s[1:]))
-			}
-			continue
-		}
-
-		// Parse line
-		if i := strings.IndexByte(s, '#'); i != -1 {
-			p.comment = strings.TrimSpace(s[i+1:])
-			s = s[:i]
-		}
-		if s[0] == '@' {
-			if p.partHandler != nil {
-				p.field = append(p.field, strings.TrimSpace(s[1:]))
-				p.partHandler(p)
-				p.field = p.field[:0]
-			}
-			p.comment = ""
-			continue
-		}
-		for {
-			i := strings.IndexByte(s, ';')
-			if i == -1 {
-				p.field = append(p.field, strings.TrimSpace(s))
-				break
-			}
-			p.field = append(p.field, strings.TrimSpace(s[:i]))
-			s = s[i+1:]
-		}
-		if !p.keepRanges {
-			p.rangeStart, p.rangeEnd = p.getRange(0)
-		}
-		return true
-	}
-	p.setError(p.scanner.Err(), "scanner failed")
-	return false
-}
-
-func parseRune(b string) (rune, error) {
-	if len(b) > 2 && b[0] == 'U' && b[1] == '+' {
-		b = b[2:]
-	}
-	x, err := strconv.ParseUint(b, 16, 32)
-	return rune(x), err
-}
-
-func (p *Parser) parseRune(s string) rune {
-	x, err := parseRune(s)
-	p.setError(err, "failed to parse rune")
-	return x
-}
-
-// Rune parses and returns field i as a rune.
-func (p *Parser) Rune(i int) rune {
-	if i > 0 || p.keepRanges {
-		return p.parseRune(p.getField(i))
-	}
-	return p.rangeStart
-}
-
-// Runes interprets and returns field i as a sequence of runes.
-func (p *Parser) Runes(i int) (runes []rune) {
-	add := func(s string) {
-		if s = strings.TrimSpace(s); len(s) > 0 {
-			runes = append(runes, p.parseRune(s))
-		}
-	}
-	for b := p.getField(i); ; {
-		i := strings.IndexByte(b, ' ')
-		if i == -1 {
-			add(b)
-			break
-		}
-		add(b[:i])
-		b = b[i+1:]
-	}
-	return
-}
-
-var (
-	errIncorrectLegacyRange = errors.New("ucd: unmatched <* First>")
-
-	// reRange matches one line of a legacy rune range.
-	reRange = regexp.MustCompile("^([0-9A-F]*);<([^,]*), ([^>]*)>(.*)$")
-)
-
-// Range parses and returns field i as a rune range. A range is inclusive at
-// both ends. If the field only has one rune, first and last will be identical.
-// It supports the legacy format for ranges used in UnicodeData.txt.
-func (p *Parser) Range(i int) (first, last rune) {
-	if !p.keepRanges {
-		return p.rangeStart, p.rangeStart
-	}
-	return p.getRange(i)
-}
-
-func (p *Parser) getRange(i int) (first, last rune) {
-	b := p.getField(i)
-	if k := strings.Index(b, ".."); k != -1 {
-		return p.parseRune(b[:k]), p.parseRune(b[k+2:])
-	}
-	// The first field may not be a rune, in which case we may ignore any error
-	// and set the range as 0..0.
-	x, err := parseRune(b)
-	if err != nil {
-		// Disable range parsing henceforth. This ensures that an error will be
-		// returned if the user subsequently will try to parse this field as
-		// a Rune.
-		p.keepRanges = true
-	}
-	// Special case for UnicodeData that was retained for backwards compatibility.
-	if i == 0 && len(p.field) > 1 && strings.HasSuffix(p.field[1], "First>") {
-		if p.parsedRange {
-			return p.rangeStart, p.rangeEnd
-		}
-		mf := reRange.FindStringSubmatch(p.scanner.Text())
-		p.line++
-		if mf == nil || !p.scanner.Scan() {
-			p.setError(errIncorrectLegacyRange, "")
-			return x, x
-		}
-		// Using Bytes would be more efficient here, but Text is a lot easier
-		// and this is not a frequent case.
-		ml := reRange.FindStringSubmatch(p.scanner.Text())
-		if ml == nil || mf[2] != ml[2] || ml[3] != "Last" || mf[4] != ml[4] {
-			p.setError(errIncorrectLegacyRange, "")
-			return x, x
-		}
-		p.rangeStart, p.rangeEnd = x, p.parseRune(p.scanner.Text()[:len(ml[1])])
-		p.parsedRange = true
-		return p.rangeStart, p.rangeEnd
-	}
-	return x, x
-}
-
-// bools recognizes all valid UCD boolean values.
-var bools = map[string]bool{
-	"":      false,
-	"N":     false,
-	"No":    false,
-	"F":     false,
-	"False": false,
-	"Y":     true,
-	"Yes":   true,
-	"T":     true,
-	"True":  true,
-}
-
-// Bool parses and returns field i as a boolean value.
-func (p *Parser) Bool(i int) bool {
-	f := p.getField(i)
-	for s, v := range bools {
-		if f == s {
-			return v
-		}
-	}
-	p.setError(strconv.ErrSyntax, "error parsing bool")
-	return false
-}
-
-// Int parses and returns field i as an integer value.
-func (p *Parser) Int(i int) int {
-	x, err := strconv.ParseInt(string(p.getField(i)), 10, 64)
-	p.setError(err, "error parsing int")
-	return int(x)
-}
-
-// Uint parses and returns field i as an unsigned integer value.
-func (p *Parser) Uint(i int) uint {
-	x, err := strconv.ParseUint(string(p.getField(i)), 10, 64)
-	p.setError(err, "error parsing uint")
-	return uint(x)
-}
-
-// Float parses and returns field i as a decimal value.
-func (p *Parser) Float(i int) float64 {
-	x, err := strconv.ParseFloat(string(p.getField(i)), 64)
-	p.setError(err, "error parsing float")
-	return x
-}
-
-// String parses and returns field i as a string value.
-func (p *Parser) String(i int) string {
-	return string(p.getField(i))
-}
-
-// Strings parses and returns field i as a space-separated list of strings.
-func (p *Parser) Strings(i int) []string {
-	ss := strings.Split(string(p.getField(i)), " ")
-	for i, s := range ss {
-		ss[i] = strings.TrimSpace(s)
-	}
-	return ss
-}
-
-// Comment returns the comments for the current line.
-func (p *Parser) Comment() string {
-	return string(p.comment)
-}
-
-var errUndefinedEnum = errors.New("ucd: undefined enum value")
-
-// Enum interprets and returns field i as a value that must be one of the values
-// in enum.
-func (p *Parser) Enum(i int, enum ...string) string {
-	f := p.getField(i)
-	for _, s := range enum {
-		if f == s {
-			return s
-		}
-	}
-	p.setError(errUndefinedEnum, "error parsing enum")
-	return ""
-}

+ 0 - 105
vendor/golang.org/x/text/unicode/cldr/base.go

@@ -1,105 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package cldr
-
-import (
-	"encoding/xml"
-	"regexp"
-	"strconv"
-)
-
-// Elem is implemented by every XML element.
-type Elem interface {
-	setEnclosing(Elem)
-	setName(string)
-	enclosing() Elem
-
-	GetCommon() *Common
-}
-
-type hidden struct {
-	CharData string `xml:",chardata"`
-	Alias    *struct {
-		Common
-		Source string `xml:"source,attr"`
-		Path   string `xml:"path,attr"`
-	} `xml:"alias"`
-	Def *struct {
-		Common
-		Choice string `xml:"choice,attr,omitempty"`
-		Type   string `xml:"type,attr,omitempty"`
-	} `xml:"default"`
-}
-
-// Common holds several of the most common attributes and sub elements
-// of an XML element.
-type Common struct {
-	XMLName         xml.Name
-	name            string
-	enclElem        Elem
-	Type            string `xml:"type,attr,omitempty"`
-	Reference       string `xml:"reference,attr,omitempty"`
-	Alt             string `xml:"alt,attr,omitempty"`
-	ValidSubLocales string `xml:"validSubLocales,attr,omitempty"`
-	Draft           string `xml:"draft,attr,omitempty"`
-	hidden
-}
-
-// Default returns the default type to select from the enclosed list
-// or "" if no default value is specified.
-func (e *Common) Default() string {
-	if e.Def == nil {
-		return ""
-	}
-	if e.Def.Choice != "" {
-		return e.Def.Choice
-	} else if e.Def.Type != "" {
-		// Type is still used by the default element in collation.
-		return e.Def.Type
-	}
-	return ""
-}
-
-// Element returns the XML element name.
-func (e *Common) Element() string {
-	return e.name
-}
-
-// GetCommon returns e. It is provided such that Common implements Elem.
-func (e *Common) GetCommon() *Common {
-	return e
-}
-
-// Data returns the character data accumulated for this element.
-func (e *Common) Data() string {
-	e.CharData = charRe.ReplaceAllStringFunc(e.CharData, replaceUnicode)
-	return e.CharData
-}
-
-func (e *Common) setName(s string) {
-	e.name = s
-}
-
-func (e *Common) enclosing() Elem {
-	return e.enclElem
-}
-
-func (e *Common) setEnclosing(en Elem) {
-	e.enclElem = en
-}
-
-// Escape characters that can be escaped without further escaping the string.
-var charRe = regexp.MustCompile(`&#x[0-9a-fA-F]*;|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|\\x[0-9a-fA-F]{2}|\\[0-7]{3}|\\[abtnvfr]`)
-
-// replaceUnicode converts hexadecimal Unicode codepoint notations to a one-rune string.
-// It assumes the input string is correctly formatted.
-func replaceUnicode(s string) string {
-	if s[1] == '#' {
-		r, _ := strconv.ParseInt(s[3:len(s)-1], 16, 32)
-		return string(r)
-	}
-	r, _, _, _ := strconv.UnquoteChar(s, 0)
-	return string(r)
-}

+ 0 - 130
vendor/golang.org/x/text/unicode/cldr/cldr.go

@@ -1,130 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-//go:generate go run makexml.go -output xml.go
-
-// Package cldr provides a parser for LDML and related XML formats.
-// This package is intended to be used by the table generation tools
-// for the various internationalization-related packages.
-// As the XML types are generated from the CLDR DTD, and as the CLDR standard
-// is periodically amended, this package may change considerably over time.
-// This mostly means that data may appear and disappear between versions.
-// That is, old code should keep compiling for newer versions, but data
-// may have moved or changed.
-// CLDR version 22 is the first version supported by this package.
-// Older versions may not work.
-package cldr // import "golang.org/x/text/unicode/cldr"
-
-import (
-	"fmt"
-	"sort"
-)
-
-// CLDR provides access to parsed data of the Unicode Common Locale Data Repository.
-type CLDR struct {
-	parent   map[string][]string
-	locale   map[string]*LDML
-	resolved map[string]*LDML
-	bcp47    *LDMLBCP47
-	supp     *SupplementalData
-}
-
-func makeCLDR() *CLDR {
-	return &CLDR{
-		parent:   make(map[string][]string),
-		locale:   make(map[string]*LDML),
-		resolved: make(map[string]*LDML),
-		bcp47:    &LDMLBCP47{},
-		supp:     &SupplementalData{},
-	}
-}
-
-// BCP47 returns the parsed BCP47 LDML data. If no such data was parsed, nil is returned.
-func (cldr *CLDR) BCP47() *LDMLBCP47 {
-	return nil
-}
-
-// Draft indicates the draft level of an element.
-type Draft int
-
-const (
-	Approved Draft = iota
-	Contributed
-	Provisional
-	Unconfirmed
-)
-
-var drafts = []string{"unconfirmed", "provisional", "contributed", "approved", ""}
-
-// ParseDraft returns the Draft value corresponding to the given string. The
-// empty string corresponds to Approved.
-func ParseDraft(level string) (Draft, error) {
-	if level == "" {
-		return Approved, nil
-	}
-	for i, s := range drafts {
-		if level == s {
-			return Unconfirmed - Draft(i), nil
-		}
-	}
-	return Approved, fmt.Errorf("cldr: unknown draft level %q", level)
-}
-
-func (d Draft) String() string {
-	return drafts[len(drafts)-1-int(d)]
-}
-
-// SetDraftLevel sets which draft levels to include in the evaluated LDML.
-// Any draft element for which the draft level is higher than lev will be excluded.
-// If multiple draft levels are available for a single element, the one with the
-// lowest draft level will be selected, unless preferDraft is true, in which case
-// the highest draft will be chosen.
-// It is assumed that the underlying LDML is canonicalized.
-func (cldr *CLDR) SetDraftLevel(lev Draft, preferDraft bool) {
-	// TODO: implement
-	cldr.resolved = make(map[string]*LDML)
-}
-
-// RawLDML returns the LDML XML for id in unresolved form.
-// id must be one of the strings returned by Locales.
-func (cldr *CLDR) RawLDML(loc string) *LDML {
-	return cldr.locale[loc]
-}
-
-// LDML returns the fully resolved LDML XML for loc, which must be one of
-// the strings returned by Locales.
-func (cldr *CLDR) LDML(loc string) (*LDML, error) {
-	return cldr.resolve(loc)
-}
-
-// Supplemental returns the parsed supplemental data. If no such data was parsed,
-// nil is returned.
-func (cldr *CLDR) Supplemental() *SupplementalData {
-	return cldr.supp
-}
-
-// Locales returns the locales for which there exist files.
-// Valid sublocales for which there is no file are not included.
-// The root locale is always sorted first.
-func (cldr *CLDR) Locales() []string {
-	loc := []string{"root"}
-	hasRoot := false
-	for l, _ := range cldr.locale {
-		if l == "root" {
-			hasRoot = true
-			continue
-		}
-		loc = append(loc, l)
-	}
-	sort.Strings(loc[1:])
-	if !hasRoot {
-		return loc[1:]
-	}
-	return loc
-}
-
-// Get fills in the fields of x based on the XPath path.
-func Get(e Elem, path string) (res Elem, err error) {
-	return walkXPath(e, path)
-}

+ 0 - 359
vendor/golang.org/x/text/unicode/cldr/collate.go

@@ -1,359 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package cldr
-
-import (
-	"bufio"
-	"encoding/xml"
-	"errors"
-	"fmt"
-	"strconv"
-	"strings"
-	"unicode"
-	"unicode/utf8"
-)
-
-// RuleProcessor can be passed to Collator's Process method, which
-// parses the rules and calls the respective method for each rule found.
-type RuleProcessor interface {
-	Reset(anchor string, before int) error
-	Insert(level int, str, context, extend string) error
-	Index(id string)
-}
-
-const (
-	// cldrIndex is a Unicode-reserved sentinel value used to mark the start
-	// of a grouping within an index.
-	// We ignore any rule that starts with this rune.
-	// See http://unicode.org/reports/tr35/#Collation_Elements for details.
-	cldrIndex = "\uFDD0"
-
-	// specialAnchor is the format in which to represent logical reset positions,
-	// such as "first tertiary ignorable".
-	specialAnchor = "<%s/>"
-)
-
-// Process parses the rules for the tailorings of this collation
-// and calls the respective methods of p for each rule found.
-func (c Collation) Process(p RuleProcessor) (err error) {
-	if len(c.Cr) > 0 {
-		if len(c.Cr) > 1 {
-			return fmt.Errorf("multiple cr elements, want 0 or 1")
-		}
-		return processRules(p, c.Cr[0].Data())
-	}
-	if c.Rules.Any != nil {
-		return c.processXML(p)
-	}
-	return errors.New("no tailoring data")
-}
-
-// processRules parses rules in the Collation Rule Syntax defined in
-// http://www.unicode.org/reports/tr35/tr35-collation.html#Collation_Tailorings.
-func processRules(p RuleProcessor, s string) (err error) {
-	chk := func(s string, e error) string {
-		if err == nil {
-			err = e
-		}
-		return s
-	}
-	i := 0 // Save the line number for use after the loop.
-	scanner := bufio.NewScanner(strings.NewReader(s))
-	for ; scanner.Scan() && err == nil; i++ {
-		for s := skipSpace(scanner.Text()); s != "" && s[0] != '#'; s = skipSpace(s) {
-			level := 5
-			var ch byte
-			switch ch, s = s[0], s[1:]; ch {
-			case '&': // followed by <anchor> or '[' <key> ']'
-				if s = skipSpace(s); consume(&s, '[') {
-					s = chk(parseSpecialAnchor(p, s))
-				} else {
-					s = chk(parseAnchor(p, 0, s))
-				}
-			case '<': // sort relation '<'{1,4}, optionally followed by '*'.
-				for level = 1; consume(&s, '<'); level++ {
-				}
-				if level > 4 {
-					err = fmt.Errorf("level %d > 4", level)
-				}
-				fallthrough
-			case '=': // identity relation, optionally followed by *.
-				if consume(&s, '*') {
-					s = chk(parseSequence(p, level, s))
-				} else {
-					s = chk(parseOrder(p, level, s))
-				}
-			default:
-				chk("", fmt.Errorf("illegal operator %q", ch))
-				break
-			}
-		}
-	}
-	if chk("", scanner.Err()); err != nil {
-		return fmt.Errorf("%d: %v", i, err)
-	}
-	return nil
-}
-
-// parseSpecialAnchor parses the anchor syntax which is either of the form
-//    ['before' <level>] <anchor>
-// or
-//    [<label>]
-// The starting should already be consumed.
-func parseSpecialAnchor(p RuleProcessor, s string) (tail string, err error) {
-	i := strings.IndexByte(s, ']')
-	if i == -1 {
-		return "", errors.New("unmatched bracket")
-	}
-	a := strings.TrimSpace(s[:i])
-	s = s[i+1:]
-	if strings.HasPrefix(a, "before ") {
-		l, err := strconv.ParseUint(skipSpace(a[len("before "):]), 10, 3)
-		if err != nil {
-			return s, err
-		}
-		return parseAnchor(p, int(l), s)
-	}
-	return s, p.Reset(fmt.Sprintf(specialAnchor, a), 0)
-}
-
-func parseAnchor(p RuleProcessor, level int, s string) (tail string, err error) {
-	anchor, s, err := scanString(s)
-	if err != nil {
-		return s, err
-	}
-	return s, p.Reset(anchor, level)
-}
-
-func parseOrder(p RuleProcessor, level int, s string) (tail string, err error) {
-	var value, context, extend string
-	if value, s, err = scanString(s); err != nil {
-		return s, err
-	}
-	if strings.HasPrefix(value, cldrIndex) {
-		p.Index(value[len(cldrIndex):])
-		return
-	}
-	if consume(&s, '|') {
-		if context, s, err = scanString(s); err != nil {
-			return s, errors.New("missing string after context")
-		}
-	}
-	if consume(&s, '/') {
-		if extend, s, err = scanString(s); err != nil {
-			return s, errors.New("missing string after extension")
-		}
-	}
-	return s, p.Insert(level, value, context, extend)
-}
-
-// scanString scans a single input string.
-func scanString(s string) (str, tail string, err error) {
-	if s = skipSpace(s); s == "" {
-		return s, s, errors.New("missing string")
-	}
-	buf := [16]byte{} // small but enough to hold most cases.
-	value := buf[:0]
-	for s != "" {
-		if consume(&s, '\'') {
-			i := strings.IndexByte(s, '\'')
-			if i == -1 {
-				return "", "", errors.New(`unmatched single quote`)
-			}
-			if i == 0 {
-				value = append(value, '\'')
-			} else {
-				value = append(value, s[:i]...)
-			}
-			s = s[i+1:]
-			continue
-		}
-		r, sz := utf8.DecodeRuneInString(s)
-		if unicode.IsSpace(r) || strings.ContainsRune("&<=#", r) {
-			break
-		}
-		value = append(value, s[:sz]...)
-		s = s[sz:]
-	}
-	return string(value), skipSpace(s), nil
-}
-
-func parseSequence(p RuleProcessor, level int, s string) (tail string, err error) {
-	if s = skipSpace(s); s == "" {
-		return s, errors.New("empty sequence")
-	}
-	last := rune(0)
-	for s != "" {
-		r, sz := utf8.DecodeRuneInString(s)
-		s = s[sz:]
-
-		if r == '-' {
-			// We have a range. The first element was already written.
-			if last == 0 {
-				return s, errors.New("range without starter value")
-			}
-			r, sz = utf8.DecodeRuneInString(s)
-			s = s[sz:]
-			if r == utf8.RuneError || r < last {
-				return s, fmt.Errorf("invalid range %q-%q", last, r)
-			}
-			for i := last + 1; i <= r; i++ {
-				if err := p.Insert(level, string(i), "", ""); err != nil {
-					return s, err
-				}
-			}
-			last = 0
-			continue
-		}
-
-		if unicode.IsSpace(r) || unicode.IsPunct(r) {
-			break
-		}
-
-		// normal case
-		if err := p.Insert(level, string(r), "", ""); err != nil {
-			return s, err
-		}
-		last = r
-	}
-	return s, nil
-}
-
-func skipSpace(s string) string {
-	return strings.TrimLeftFunc(s, unicode.IsSpace)
-}
-
-// consumes returns whether the next byte is ch. If so, it gobbles it by
-// updating s.
-func consume(s *string, ch byte) (ok bool) {
-	if *s == "" || (*s)[0] != ch {
-		return false
-	}
-	*s = (*s)[1:]
-	return true
-}
-
-// The following code parses Collation rules of CLDR version 24 and before.
-
-var lmap = map[byte]int{
-	'p': 1,
-	's': 2,
-	't': 3,
-	'i': 5,
-}
-
-type rulesElem struct {
-	Rules struct {
-		Common
-		Any []*struct {
-			XMLName xml.Name
-			rule
-		} `xml:",any"`
-	} `xml:"rules"`
-}
-
-type rule struct {
-	Value  string `xml:",chardata"`
-	Before string `xml:"before,attr"`
-	Any    []*struct {
-		XMLName xml.Name
-		rule
-	} `xml:",any"`
-}
-
-var emptyValueError = errors.New("cldr: empty rule value")
-
-func (r *rule) value() (string, error) {
-	// Convert hexadecimal Unicode codepoint notation to a string.
-	s := charRe.ReplaceAllStringFunc(r.Value, replaceUnicode)
-	r.Value = s
-	if s == "" {
-		if len(r.Any) != 1 {
-			return "", emptyValueError
-		}
-		r.Value = fmt.Sprintf(specialAnchor, r.Any[0].XMLName.Local)
-		r.Any = nil
-	} else if len(r.Any) != 0 {
-		return "", fmt.Errorf("cldr: XML elements found in collation rule: %v", r.Any)
-	}
-	return r.Value, nil
-}
-
-func (r rule) process(p RuleProcessor, name, context, extend string) error {
-	v, err := r.value()
-	if err != nil {
-		return err
-	}
-	switch name {
-	case "p", "s", "t", "i":
-		if strings.HasPrefix(v, cldrIndex) {
-			p.Index(v[len(cldrIndex):])
-			return nil
-		}
-		if err := p.Insert(lmap[name[0]], v, context, extend); err != nil {
-			return err
-		}
-	case "pc", "sc", "tc", "ic":
-		level := lmap[name[0]]
-		for _, s := range v {
-			if err := p.Insert(level, string(s), context, extend); err != nil {
-				return err
-			}
-		}
-	default:
-		return fmt.Errorf("cldr: unsupported tag: %q", name)
-	}
-	return nil
-}
-
-// processXML parses the format of CLDR versions 24 and older.
-func (c Collation) processXML(p RuleProcessor) (err error) {
-	// Collation is generated and defined in xml.go.
-	var v string
-	for _, r := range c.Rules.Any {
-		switch r.XMLName.Local {
-		case "reset":
-			level := 0
-			switch r.Before {
-			case "primary", "1":
-				level = 1
-			case "secondary", "2":
-				level = 2
-			case "tertiary", "3":
-				level = 3
-			case "":
-			default:
-				return fmt.Errorf("cldr: unknown level %q", r.Before)
-			}
-			v, err = r.value()
-			if err == nil {
-				err = p.Reset(v, level)
-			}
-		case "x":
-			var context, extend string
-			for _, r1 := range r.Any {
-				v, err = r1.value()
-				switch r1.XMLName.Local {
-				case "context":
-					context = v
-				case "extend":
-					extend = v
-				}
-			}
-			for _, r1 := range r.Any {
-				if t := r1.XMLName.Local; t == "context" || t == "extend" {
-					continue
-				}
-				r1.rule.process(p, r1.XMLName.Local, context, extend)
-			}
-		default:
-			err = r.rule.process(p, r.XMLName.Local, "", "")
-		}
-		if err != nil {
-			return err
-		}
-	}
-	return nil
-}

+ 0 - 171
vendor/golang.org/x/text/unicode/cldr/decode.go

@@ -1,171 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package cldr
-
-import (
-	"archive/zip"
-	"bytes"
-	"encoding/xml"
-	"fmt"
-	"io"
-	"io/ioutil"
-	"log"
-	"os"
-	"path/filepath"
-	"regexp"
-)
-
-// A Decoder loads an archive of CLDR data.
-type Decoder struct {
-	dirFilter     []string
-	sectionFilter []string
-	loader        Loader
-	cldr          *CLDR
-	curLocale     string
-}
-
-// SetSectionFilter takes a list top-level LDML element names to which
-// evaluation of LDML should be limited.  It automatically calls SetDirFilter.
-func (d *Decoder) SetSectionFilter(filter ...string) {
-	d.sectionFilter = filter
-	// TODO: automatically set dir filter
-}
-
-// SetDirFilter limits the loading of LDML XML files of the specied directories.
-// Note that sections may be split across directories differently for different CLDR versions.
-// For more robust code, use SetSectionFilter.
-func (d *Decoder) SetDirFilter(dir ...string) {
-	d.dirFilter = dir
-}
-
-// A Loader provides access to the files of a CLDR archive.
-type Loader interface {
-	Len() int
-	Path(i int) string
-	Reader(i int) (io.ReadCloser, error)
-}
-
-var fileRe = regexp.MustCompile(`.*[/\\](.*)[/\\](.*)\.xml`)
-
-// Decode loads and decodes the files represented by l.
-func (d *Decoder) Decode(l Loader) (cldr *CLDR, err error) {
-	d.cldr = makeCLDR()
-	for i := 0; i < l.Len(); i++ {
-		fname := l.Path(i)
-		if m := fileRe.FindStringSubmatch(fname); m != nil {
-			if len(d.dirFilter) > 0 && !in(d.dirFilter, m[1]) {
-				continue
-			}
-			var r io.Reader
-			if r, err = l.Reader(i); err == nil {
-				err = d.decode(m[1], m[2], r)
-			}
-			if err != nil {
-				return nil, err
-			}
-		}
-	}
-	d.cldr.finalize(d.sectionFilter)
-	return d.cldr, nil
-}
-
-func (d *Decoder) decode(dir, id string, r io.Reader) error {
-	var v interface{}
-	var l *LDML
-	cldr := d.cldr
-	switch {
-	case dir == "supplemental":
-		v = cldr.supp
-	case dir == "transforms":
-		return nil
-	case dir == "bcp47":
-		v = cldr.bcp47
-	case dir == "validity":
-		return nil
-	default:
-		ok := false
-		if v, ok = cldr.locale[id]; !ok {
-			l = &LDML{}
-			v, cldr.locale[id] = l, l
-		}
-	}
-	x := xml.NewDecoder(r)
-	if err := x.Decode(v); err != nil {
-		log.Printf("%s/%s: %v", dir, id, err)
-		return err
-	}
-	if l != nil {
-		if l.Identity == nil {
-			return fmt.Errorf("%s/%s: missing identity element", dir, id)
-		}
-		// TODO: verify when CLDR bug http://unicode.org/cldr/trac/ticket/8970
-		// is resolved.
-		// path := strings.Split(id, "_")
-		// if lang := l.Identity.Language.Type; lang != path[0] {
-		// 	return fmt.Errorf("%s/%s: language was %s; want %s", dir, id, lang, path[0])
-		// }
-	}
-	return nil
-}
-
-type pathLoader []string
-
-func makePathLoader(path string) (pl pathLoader, err error) {
-	err = filepath.Walk(path, func(path string, _ os.FileInfo, err error) error {
-		pl = append(pl, path)
-		return err
-	})
-	return pl, err
-}
-
-func (pl pathLoader) Len() int {
-	return len(pl)
-}
-
-func (pl pathLoader) Path(i int) string {
-	return pl[i]
-}
-
-func (pl pathLoader) Reader(i int) (io.ReadCloser, error) {
-	return os.Open(pl[i])
-}
-
-// DecodePath loads CLDR data from the given path.
-func (d *Decoder) DecodePath(path string) (cldr *CLDR, err error) {
-	loader, err := makePathLoader(path)
-	if err != nil {
-		return nil, err
-	}
-	return d.Decode(loader)
-}
-
-type zipLoader struct {
-	r *zip.Reader
-}
-
-func (zl zipLoader) Len() int {
-	return len(zl.r.File)
-}
-
-func (zl zipLoader) Path(i int) string {
-	return zl.r.File[i].Name
-}
-
-func (zl zipLoader) Reader(i int) (io.ReadCloser, error) {
-	return zl.r.File[i].Open()
-}
-
-// DecodeZip loads CLDR data from the zip archive for which r is the source.
-func (d *Decoder) DecodeZip(r io.Reader) (cldr *CLDR, err error) {
-	buffer, err := ioutil.ReadAll(r)
-	if err != nil {
-		return nil, err
-	}
-	archive, err := zip.NewReader(bytes.NewReader(buffer), int64(len(buffer)))
-	if err != nil {
-		return nil, err
-	}
-	return d.Decode(zipLoader{archive})
-}

+ 0 - 400
vendor/golang.org/x/text/unicode/cldr/makexml.go

@@ -1,400 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-// +build ignore
-
-// This tool generates types for the various XML formats of CLDR.
-package main
-
-import (
-	"archive/zip"
-	"bytes"
-	"encoding/xml"
-	"flag"
-	"fmt"
-	"io"
-	"io/ioutil"
-	"log"
-	"os"
-	"regexp"
-	"strings"
-
-	"golang.org/x/text/internal/gen"
-)
-
-var outputFile = flag.String("output", "xml.go", "output file name")
-
-func main() {
-	flag.Parse()
-
-	r := gen.OpenCLDRCoreZip()
-	buffer, err := ioutil.ReadAll(r)
-	if err != nil {
-		log.Fatal("Could not read zip file")
-	}
-	r.Close()
-	z, err := zip.NewReader(bytes.NewReader(buffer), int64(len(buffer)))
-	if err != nil {
-		log.Fatalf("Could not read zip archive: %v", err)
-	}
-
-	var buf bytes.Buffer
-
-	version := gen.CLDRVersion()
-
-	for _, dtd := range files {
-		for _, f := range z.File {
-			if strings.HasSuffix(f.Name, dtd.file+".dtd") {
-				r, err := f.Open()
-				failOnError(err)
-
-				b := makeBuilder(&buf, dtd)
-				b.parseDTD(r)
-				b.resolve(b.index[dtd.top[0]])
-				b.write()
-				if b.version != "" && version != b.version {
-					println(f.Name)
-					log.Fatalf("main: inconsistent versions: found %s; want %s", b.version, version)
-				}
-				break
-			}
-		}
-	}
-	fmt.Fprintln(&buf, "// Version is the version of CLDR from which the XML definitions are generated.")
-	fmt.Fprintf(&buf, "const Version = %q\n", version)
-
-	gen.WriteGoFile(*outputFile, "cldr", buf.Bytes())
-}
-
-func failOnError(err error) {
-	if err != nil {
-		log.New(os.Stderr, "", log.Lshortfile).Output(2, err.Error())
-		os.Exit(1)
-	}
-}
-
-// configuration data per DTD type
-type dtd struct {
-	file string   // base file name
-	root string   // Go name of the root XML element
-	top  []string // create a different type for this section
-
-	skipElem    []string // hard-coded or deprecated elements
-	skipAttr    []string // attributes to exclude
-	predefined  []string // hard-coded elements exist of the form <name>Elem
-	forceRepeat []string // elements to make slices despite DTD
-}
-
-var files = []dtd{
-	{
-		file: "ldmlBCP47",
-		root: "LDMLBCP47",
-		top:  []string{"ldmlBCP47"},
-		skipElem: []string{
-			"cldrVersion", // deprecated, not used
-		},
-	},
-	{
-		file: "ldmlSupplemental",
-		root: "SupplementalData",
-		top:  []string{"supplementalData"},
-		skipElem: []string{
-			"cldrVersion", // deprecated, not used
-		},
-		forceRepeat: []string{
-			"plurals", // data defined in plurals.xml and ordinals.xml
-		},
-	},
-	{
-		file: "ldml",
-		root: "LDML",
-		top: []string{
-			"ldml", "collation", "calendar", "timeZoneNames", "localeDisplayNames", "numbers",
-		},
-		skipElem: []string{
-			"cp",       // not used anywhere
-			"special",  // not used anywhere
-			"fallback", // deprecated, not used
-			"alias",    // in Common
-			"default",  // in Common
-		},
-		skipAttr: []string{
-			"hiraganaQuarternary", // typo in DTD, correct version included as well
-		},
-		predefined: []string{"rules"},
-	},
-}
-
-var comments = map[string]string{
-	"ldmlBCP47": `
-// LDMLBCP47 holds information on allowable values for various variables in LDML.
-`,
-	"supplementalData": `
-// SupplementalData holds information relevant for internationalization
-// and proper use of CLDR, but that is not contained in the locale hierarchy.
-`,
-	"ldml": `
-// LDML is the top-level type for locale-specific data.
-`,
-	"collation": `
-// Collation contains rules that specify a certain sort-order,
-// as a tailoring of the root order. 
-// The parsed rules are obtained by passing a RuleProcessor to Collation's
-// Process method.
-`,
-	"calendar": `
-// Calendar specifies the fields used for formatting and parsing dates and times.
-// The month and quarter names are identified numerically, starting at 1.
-// The day (of the week) names are identified with short strings, since there is
-// no universally-accepted numeric designation.
-`,
-	"dates": `
-// Dates contains information regarding the format and parsing of dates and times.
-`,
-	"localeDisplayNames": `
-// LocaleDisplayNames specifies localized display names for for scripts, languages,
-// countries, currencies, and variants.
-`,
-	"numbers": `
-// Numbers supplies information for formatting and parsing numbers and currencies.
-`,
-}
-
-type element struct {
-	name      string // XML element name
-	category  string // elements contained by this element
-	signature string // category + attrKey*
-
-	attr []*attribute // attributes supported by this element.
-	sub  []struct {   // parsed and evaluated sub elements of this element.
-		e      *element
-		repeat bool // true if the element needs to be a slice
-	}
-
-	resolved bool // prevent multiple resolutions of this element.
-}
-
-type attribute struct {
-	name string
-	key  string
-	list []string
-
-	tag string // Go tag
-}
-
-var (
-	reHead  = regexp.MustCompile(` *(\w+) +([\w\-]+)`)
-	reAttr  = regexp.MustCompile(` *(\w+) *(?:(\w+)|\(([\w\- \|]+)\)) *(?:#([A-Z]*) *(?:\"([\.\d+])\")?)? *("[\w\-:]*")?`)
-	reElem  = regexp.MustCompile(`^ *(EMPTY|ANY|\(.*\)[\*\+\?]?) *$`)
-	reToken = regexp.MustCompile(`\w\-`)
-)
-
-// builder is used to read in the DTD files from CLDR and generate Go code
-// to be used with the encoding/xml package.
-type builder struct {
-	w       io.Writer
-	index   map[string]*element
-	elem    []*element
-	info    dtd
-	version string
-}
-
-func makeBuilder(w io.Writer, d dtd) builder {
-	return builder{
-		w:     w,
-		index: make(map[string]*element),
-		elem:  []*element{},
-		info:  d,
-	}
-}
-
-// parseDTD parses a DTD file.
-func (b *builder) parseDTD(r io.Reader) {
-	for d := xml.NewDecoder(r); ; {
-		t, err := d.Token()
-		if t == nil {
-			break
-		}
-		failOnError(err)
-		dir, ok := t.(xml.Directive)
-		if !ok {
-			continue
-		}
-		m := reHead.FindSubmatch(dir)
-		dir = dir[len(m[0]):]
-		ename := string(m[2])
-		el, elementFound := b.index[ename]
-		switch string(m[1]) {
-		case "ELEMENT":
-			if elementFound {
-				log.Fatal("parseDTD: duplicate entry for element %q", ename)
-			}
-			m := reElem.FindSubmatch(dir)
-			if m == nil {
-				log.Fatalf("parseDTD: invalid element %q", string(dir))
-			}
-			if len(m[0]) != len(dir) {
-				log.Fatal("parseDTD: invalid element %q", string(dir), len(dir), len(m[0]), string(m[0]))
-			}
-			s := string(m[1])
-			el = &element{
-				name:     ename,
-				category: s,
-			}
-			b.index[ename] = el
-		case "ATTLIST":
-			if !elementFound {
-				log.Fatalf("parseDTD: unknown element %q", ename)
-			}
-			s := string(dir)
-			m := reAttr.FindStringSubmatch(s)
-			if m == nil {
-				log.Fatal(fmt.Errorf("parseDTD: invalid attribute %q", string(dir)))
-			}
-			if m[4] == "FIXED" {
-				b.version = m[5]
-			} else {
-				switch m[1] {
-				case "draft", "references", "alt", "validSubLocales", "standard" /* in Common */ :
-				case "type", "choice":
-				default:
-					el.attr = append(el.attr, &attribute{
-						name: m[1],
-						key:  s,
-						list: reToken.FindAllString(m[3], -1),
-					})
-					el.signature = fmt.Sprintf("%s=%s+%s", el.signature, m[1], m[2])
-				}
-			}
-		}
-	}
-}
-
-var reCat = regexp.MustCompile(`[ ,\|]*(?:(\(|\)|\#?[\w_-]+)([\*\+\?]?))?`)
-
-// resolve takes a parsed element and converts it into structured data
-// that can be used to generate the XML code.
-func (b *builder) resolve(e *element) {
-	if e.resolved {
-		return
-	}
-	b.elem = append(b.elem, e)
-	e.resolved = true
-	s := e.category
-	found := make(map[string]bool)
-	sequenceStart := []int{}
-	for len(s) > 0 {
-		m := reCat.FindStringSubmatch(s)
-		if m == nil {
-			log.Fatalf("%s: invalid category string %q", e.name, s)
-		}
-		repeat := m[2] == "*" || m[2] == "+" || in(b.info.forceRepeat, m[1])
-		switch m[1] {
-		case "":
-		case "(":
-			sequenceStart = append(sequenceStart, len(e.sub))
-		case ")":
-			if len(sequenceStart) == 0 {
-				log.Fatalf("%s: unmatched closing parenthesis", e.name)
-			}
-			for i := sequenceStart[len(sequenceStart)-1]; i < len(e.sub); i++ {
-				e.sub[i].repeat = e.sub[i].repeat || repeat
-			}
-			sequenceStart = sequenceStart[:len(sequenceStart)-1]
-		default:
-			if in(b.info.skipElem, m[1]) {
-			} else if sub, ok := b.index[m[1]]; ok {
-				if !found[sub.name] {
-					e.sub = append(e.sub, struct {
-						e      *element
-						repeat bool
-					}{sub, repeat})
-					found[sub.name] = true
-					b.resolve(sub)
-				}
-			} else if m[1] == "#PCDATA" || m[1] == "ANY" {
-			} else if m[1] != "EMPTY" {
-				log.Fatalf("resolve:%s: element %q not found", e.name, m[1])
-			}
-		}
-		s = s[len(m[0]):]
-	}
-}
-
-// return true if s is contained in set.
-func in(set []string, s string) bool {
-	for _, v := range set {
-		if v == s {
-			return true
-		}
-	}
-	return false
-}
-
-var repl = strings.NewReplacer("-", " ", "_", " ")
-
-// title puts the first character or each character following '_' in title case and
-// removes all occurrences of '_'.
-func title(s string) string {
-	return strings.Replace(strings.Title(repl.Replace(s)), " ", "", -1)
-}
-
-// writeElem generates Go code for a single element, recursively.
-func (b *builder) writeElem(tab int, e *element) {
-	p := func(f string, x ...interface{}) {
-		f = strings.Replace(f, "\n", "\n"+strings.Repeat("\t", tab), -1)
-		fmt.Fprintf(b.w, f, x...)
-	}
-	if len(e.sub) == 0 && len(e.attr) == 0 {
-		p("Common")
-		return
-	}
-	p("struct {")
-	tab++
-	p("\nCommon")
-	for _, attr := range e.attr {
-		if !in(b.info.skipAttr, attr.name) {
-			p("\n%s string `xml:\"%s,attr\"`", title(attr.name), attr.name)
-		}
-	}
-	for _, sub := range e.sub {
-		if in(b.info.predefined, sub.e.name) {
-			p("\n%sElem", sub.e.name)
-			continue
-		}
-		if in(b.info.skipElem, sub.e.name) {
-			continue
-		}
-		p("\n%s ", title(sub.e.name))
-		if sub.repeat {
-			p("[]")
-		}
-		p("*")
-		if in(b.info.top, sub.e.name) {
-			p(title(sub.e.name))
-		} else {
-			b.writeElem(tab, sub.e)
-		}
-		p(" `xml:\"%s\"`", sub.e.name)
-	}
-	tab--
-	p("\n}")
-}
-
-// write generates the Go XML code.
-func (b *builder) write() {
-	for i, name := range b.info.top {
-		e := b.index[name]
-		if e != nil {
-			fmt.Fprintf(b.w, comments[name])
-			name := title(e.name)
-			if i == 0 {
-				name = b.info.root
-			}
-			fmt.Fprintf(b.w, "type %s ", name)
-			b.writeElem(0, e)
-			fmt.Fprint(b.w, "\n")
-		}
-	}
-}

+ 0 - 602
vendor/golang.org/x/text/unicode/cldr/resolve.go

@@ -1,602 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package cldr
-
-// This file implements the various inheritance constructs defined by LDML.
-// See http://www.unicode.org/reports/tr35/#Inheritance_and_Validity
-// for more details.
-
-import (
-	"fmt"
-	"log"
-	"reflect"
-	"regexp"
-	"sort"
-	"strings"
-)
-
-// fieldIter iterates over fields in a struct. It includes
-// fields of embedded structs.
-type fieldIter struct {
-	v        reflect.Value
-	index, n []int
-}
-
-func iter(v reflect.Value) fieldIter {
-	if v.Kind() != reflect.Struct {
-		log.Panicf("value %v must be a struct", v)
-	}
-	i := fieldIter{
-		v:     v,
-		index: []int{0},
-		n:     []int{v.NumField()},
-	}
-	i.descent()
-	return i
-}
-
-func (i *fieldIter) descent() {
-	for f := i.field(); f.Anonymous && f.Type.NumField() > 0; f = i.field() {
-		i.index = append(i.index, 0)
-		i.n = append(i.n, f.Type.NumField())
-	}
-}
-
-func (i *fieldIter) done() bool {
-	return len(i.index) == 1 && i.index[0] >= i.n[0]
-}
-
-func skip(f reflect.StructField) bool {
-	return !f.Anonymous && (f.Name[0] < 'A' || f.Name[0] > 'Z')
-}
-
-func (i *fieldIter) next() {
-	for {
-		k := len(i.index) - 1
-		i.index[k]++
-		if i.index[k] < i.n[k] {
-			if !skip(i.field()) {
-				break
-			}
-		} else {
-			if k == 0 {
-				return
-			}
-			i.index = i.index[:k]
-			i.n = i.n[:k]
-		}
-	}
-	i.descent()
-}
-
-func (i *fieldIter) value() reflect.Value {
-	return i.v.FieldByIndex(i.index)
-}
-
-func (i *fieldIter) field() reflect.StructField {
-	return i.v.Type().FieldByIndex(i.index)
-}
-
-type visitor func(v reflect.Value) error
-
-var stopDescent = fmt.Errorf("do not recurse")
-
-func (f visitor) visit(x interface{}) error {
-	return f.visitRec(reflect.ValueOf(x))
-}
-
-// visit recursively calls f on all nodes in v.
-func (f visitor) visitRec(v reflect.Value) error {
-	if v.Kind() == reflect.Ptr {
-		if v.IsNil() {
-			return nil
-		}
-		return f.visitRec(v.Elem())
-	}
-	if err := f(v); err != nil {
-		if err == stopDescent {
-			return nil
-		}
-		return err
-	}
-	switch v.Kind() {
-	case reflect.Struct:
-		for i := iter(v); !i.done(); i.next() {
-			if err := f.visitRec(i.value()); err != nil {
-				return err
-			}
-		}
-	case reflect.Slice:
-		for i := 0; i < v.Len(); i++ {
-			if err := f.visitRec(v.Index(i)); err != nil {
-				return err
-			}
-		}
-	}
-	return nil
-}
-
-// getPath is used for error reporting purposes only.
-func getPath(e Elem) string {
-	if e == nil {
-		return "<nil>"
-	}
-	if e.enclosing() == nil {
-		return e.GetCommon().name
-	}
-	if e.GetCommon().Type == "" {
-		return fmt.Sprintf("%s.%s", getPath(e.enclosing()), e.GetCommon().name)
-	}
-	return fmt.Sprintf("%s.%s[type=%s]", getPath(e.enclosing()), e.GetCommon().name, e.GetCommon().Type)
-}
-
-// xmlName returns the xml name of the element or attribute
-func xmlName(f reflect.StructField) (name string, attr bool) {
-	tags := strings.Split(f.Tag.Get("xml"), ",")
-	for _, s := range tags {
-		attr = attr || s == "attr"
-	}
-	return tags[0], attr
-}
-
-func findField(v reflect.Value, key string) (reflect.Value, error) {
-	v = reflect.Indirect(v)
-	for i := iter(v); !i.done(); i.next() {
-		if n, _ := xmlName(i.field()); n == key {
-			return i.value(), nil
-		}
-	}
-	return reflect.Value{}, fmt.Errorf("cldr: no field %q in element %#v", key, v.Interface())
-}
-
-var xpathPart = regexp.MustCompile(`(\pL+)(?:\[@(\pL+)='([\w-]+)'\])?`)
-
-func walkXPath(e Elem, path string) (res Elem, err error) {
-	for _, c := range strings.Split(path, "/") {
-		if c == ".." {
-			if e = e.enclosing(); e == nil {
-				panic("path ..")
-				return nil, fmt.Errorf(`cldr: ".." moves past root in path %q`, path)
-			}
-			continue
-		} else if c == "" {
-			continue
-		}
-		m := xpathPart.FindStringSubmatch(c)
-		if len(m) == 0 || len(m[0]) != len(c) {
-			return nil, fmt.Errorf("cldr: syntax error in path component %q", c)
-		}
-		v, err := findField(reflect.ValueOf(e), m[1])
-		if err != nil {
-			return nil, err
-		}
-		switch v.Kind() {
-		case reflect.Slice:
-			i := 0
-			if m[2] != "" || v.Len() > 1 {
-				if m[2] == "" {
-					m[2] = "type"
-					if m[3] = e.GetCommon().Default(); m[3] == "" {
-						return nil, fmt.Errorf("cldr: type selector or default value needed for element %s", m[1])
-					}
-				}
-				for ; i < v.Len(); i++ {
-					vi := v.Index(i)
-					key, err := findField(vi.Elem(), m[2])
-					if err != nil {
-						return nil, err
-					}
-					key = reflect.Indirect(key)
-					if key.Kind() == reflect.String && key.String() == m[3] {
-						break
-					}
-				}
-			}
-			if i == v.Len() || v.Index(i).IsNil() {
-				return nil, fmt.Errorf("no %s found with %s==%s", m[1], m[2], m[3])
-			}
-			e = v.Index(i).Interface().(Elem)
-		case reflect.Ptr:
-			if v.IsNil() {
-				return nil, fmt.Errorf("cldr: element %q not found within element %q", m[1], e.GetCommon().name)
-			}
-			var ok bool
-			if e, ok = v.Interface().(Elem); !ok {
-				return nil, fmt.Errorf("cldr: %q is not an XML element", m[1])
-			} else if m[2] != "" || m[3] != "" {
-				return nil, fmt.Errorf("cldr: no type selector allowed for element %s", m[1])
-			}
-		default:
-			return nil, fmt.Errorf("cldr: %q is not an XML element", m[1])
-		}
-	}
-	return e, nil
-}
-
-const absPrefix = "//ldml/"
-
-func (cldr *CLDR) resolveAlias(e Elem, src, path string) (res Elem, err error) {
-	if src != "locale" {
-		if !strings.HasPrefix(path, absPrefix) {
-			return nil, fmt.Errorf("cldr: expected absolute path, found %q", path)
-		}
-		path = path[len(absPrefix):]
-		if e, err = cldr.resolve(src); err != nil {
-			return nil, err
-		}
-	}
-	return walkXPath(e, path)
-}
-
-func (cldr *CLDR) resolveAndMergeAlias(e Elem) error {
-	alias := e.GetCommon().Alias
-	if alias == nil {
-		return nil
-	}
-	a, err := cldr.resolveAlias(e, alias.Source, alias.Path)
-	if err != nil {
-		return fmt.Errorf("%v: error evaluating path %q: %v", getPath(e), alias.Path, err)
-	}
-	// Ensure alias node was already evaluated. TODO: avoid double evaluation.
-	err = cldr.resolveAndMergeAlias(a)
-	v := reflect.ValueOf(e).Elem()
-	for i := iter(reflect.ValueOf(a).Elem()); !i.done(); i.next() {
-		if vv := i.value(); vv.Kind() != reflect.Ptr || !vv.IsNil() {
-			if _, attr := xmlName(i.field()); !attr {
-				v.FieldByIndex(i.index).Set(vv)
-			}
-		}
-	}
-	return err
-}
-
-func (cldr *CLDR) aliasResolver() visitor {
-	return func(v reflect.Value) (err error) {
-		if e, ok := v.Addr().Interface().(Elem); ok {
-			err = cldr.resolveAndMergeAlias(e)
-			if err == nil && blocking[e.GetCommon().name] {
-				return stopDescent
-			}
-		}
-		return err
-	}
-}
-
-// elements within blocking elements do not inherit.
-// Taken from CLDR's supplementalMetaData.xml.
-var blocking = map[string]bool{
-	"identity":         true,
-	"supplementalData": true,
-	"cldrTest":         true,
-	"collation":        true,
-	"transform":        true,
-}
-
-// Distinguishing attributes affect inheritance; two elements with different
-// distinguishing attributes are treated as different for purposes of inheritance,
-// except when such attributes occur in the indicated elements.
-// Taken from CLDR's supplementalMetaData.xml.
-var distinguishing = map[string][]string{
-	"key":        nil,
-	"request_id": nil,
-	"id":         nil,
-	"registry":   nil,
-	"alt":        nil,
-	"iso4217":    nil,
-	"iso3166":    nil,
-	"mzone":      nil,
-	"from":       nil,
-	"to":         nil,
-	"type": []string{
-		"abbreviationFallback",
-		"default",
-		"mapping",
-		"measurementSystem",
-		"preferenceOrdering",
-	},
-	"numberSystem": nil,
-}
-
-func in(set []string, s string) bool {
-	for _, v := range set {
-		if v == s {
-			return true
-		}
-	}
-	return false
-}
-
-// attrKey computes a key based on the distinguishable attributes of
-// an element and it's values.
-func attrKey(v reflect.Value, exclude ...string) string {
-	parts := []string{}
-	ename := v.Interface().(Elem).GetCommon().name
-	v = v.Elem()
-	for i := iter(v); !i.done(); i.next() {
-		if name, attr := xmlName(i.field()); attr {
-			if except, ok := distinguishing[name]; ok && !in(exclude, name) && !in(except, ename) {
-				v := i.value()
-				if v.Kind() == reflect.Ptr {
-					v = v.Elem()
-				}
-				if v.IsValid() {
-					parts = append(parts, fmt.Sprintf("%s=%s", name, v.String()))
-				}
-			}
-		}
-	}
-	sort.Strings(parts)
-	return strings.Join(parts, ";")
-}
-
-// Key returns a key for e derived from all distinguishing attributes
-// except those specified by exclude.
-func Key(e Elem, exclude ...string) string {
-	return attrKey(reflect.ValueOf(e), exclude...)
-}
-
-// linkEnclosing sets the enclosing element as well as the name
-// for all sub-elements of child, recursively.
-func linkEnclosing(parent, child Elem) {
-	child.setEnclosing(parent)
-	v := reflect.ValueOf(child).Elem()
-	for i := iter(v); !i.done(); i.next() {
-		vf := i.value()
-		if vf.Kind() == reflect.Slice {
-			for j := 0; j < vf.Len(); j++ {
-				linkEnclosing(child, vf.Index(j).Interface().(Elem))
-			}
-		} else if vf.Kind() == reflect.Ptr && !vf.IsNil() && vf.Elem().Kind() == reflect.Struct {
-			linkEnclosing(child, vf.Interface().(Elem))
-		}
-	}
-}
-
-func setNames(e Elem, name string) {
-	e.setName(name)
-	v := reflect.ValueOf(e).Elem()
-	for i := iter(v); !i.done(); i.next() {
-		vf := i.value()
-		name, _ = xmlName(i.field())
-		if vf.Kind() == reflect.Slice {
-			for j := 0; j < vf.Len(); j++ {
-				setNames(vf.Index(j).Interface().(Elem), name)
-			}
-		} else if vf.Kind() == reflect.Ptr && !vf.IsNil() && vf.Elem().Kind() == reflect.Struct {
-			setNames(vf.Interface().(Elem), name)
-		}
-	}
-}
-
-// deepCopy copies elements of v recursively.  All elements of v that may
-// be modified by inheritance are explicitly copied.
-func deepCopy(v reflect.Value) reflect.Value {
-	switch v.Kind() {
-	case reflect.Ptr:
-		if v.IsNil() || v.Elem().Kind() != reflect.Struct {
-			return v
-		}
-		nv := reflect.New(v.Elem().Type())
-		nv.Elem().Set(v.Elem())
-		deepCopyRec(nv.Elem(), v.Elem())
-		return nv
-	case reflect.Slice:
-		nv := reflect.MakeSlice(v.Type(), v.Len(), v.Len())
-		for i := 0; i < v.Len(); i++ {
-			deepCopyRec(nv.Index(i), v.Index(i))
-		}
-		return nv
-	}
-	panic("deepCopy: must be called with pointer or slice")
-}
-
-// deepCopyRec is only called by deepCopy.
-func deepCopyRec(nv, v reflect.Value) {
-	if v.Kind() == reflect.Struct {
-		t := v.Type()
-		for i := 0; i < v.NumField(); i++ {
-			if name, attr := xmlName(t.Field(i)); name != "" && !attr {
-				deepCopyRec(nv.Field(i), v.Field(i))
-			}
-		}
-	} else {
-		nv.Set(deepCopy(v))
-	}
-}
-
-// newNode is used to insert a missing node during inheritance.
-func (cldr *CLDR) newNode(v, enc reflect.Value) reflect.Value {
-	n := reflect.New(v.Type())
-	for i := iter(v); !i.done(); i.next() {
-		if name, attr := xmlName(i.field()); name == "" || attr {
-			n.Elem().FieldByIndex(i.index).Set(i.value())
-		}
-	}
-	n.Interface().(Elem).GetCommon().setEnclosing(enc.Addr().Interface().(Elem))
-	return n
-}
-
-// v, parent must be pointers to struct
-func (cldr *CLDR) inheritFields(v, parent reflect.Value) (res reflect.Value, err error) {
-	t := v.Type()
-	nv := reflect.New(t)
-	nv.Elem().Set(v)
-	for i := iter(v); !i.done(); i.next() {
-		vf := i.value()
-		f := i.field()
-		name, attr := xmlName(f)
-		if name == "" || attr {
-			continue
-		}
-		pf := parent.FieldByIndex(i.index)
-		if blocking[name] {
-			if vf.IsNil() {
-				vf = pf
-			}
-			nv.Elem().FieldByIndex(i.index).Set(deepCopy(vf))
-			continue
-		}
-		switch f.Type.Kind() {
-		case reflect.Ptr:
-			if f.Type.Elem().Kind() == reflect.Struct {
-				if !vf.IsNil() {
-					if vf, err = cldr.inheritStructPtr(vf, pf); err != nil {
-						return reflect.Value{}, err
-					}
-					vf.Interface().(Elem).setEnclosing(nv.Interface().(Elem))
-					nv.Elem().FieldByIndex(i.index).Set(vf)
-				} else if !pf.IsNil() {
-					n := cldr.newNode(pf.Elem(), v)
-					if vf, err = cldr.inheritStructPtr(n, pf); err != nil {
-						return reflect.Value{}, err
-					}
-					vf.Interface().(Elem).setEnclosing(nv.Interface().(Elem))
-					nv.Elem().FieldByIndex(i.index).Set(vf)
-				}
-			}
-		case reflect.Slice:
-			vf, err := cldr.inheritSlice(nv.Elem(), vf, pf)
-			if err != nil {
-				return reflect.Zero(t), err
-			}
-			nv.Elem().FieldByIndex(i.index).Set(vf)
-		}
-	}
-	return nv, nil
-}
-
-func root(e Elem) *LDML {
-	for ; e.enclosing() != nil; e = e.enclosing() {
-	}
-	return e.(*LDML)
-}
-
-// inheritStructPtr first merges possible aliases in with v and then inherits
-// any underspecified elements from parent.
-func (cldr *CLDR) inheritStructPtr(v, parent reflect.Value) (r reflect.Value, err error) {
-	if !v.IsNil() {
-		e := v.Interface().(Elem).GetCommon()
-		alias := e.Alias
-		if alias == nil && !parent.IsNil() {
-			alias = parent.Interface().(Elem).GetCommon().Alias
-		}
-		if alias != nil {
-			a, err := cldr.resolveAlias(v.Interface().(Elem), alias.Source, alias.Path)
-			if a != nil {
-				if v, err = cldr.inheritFields(v.Elem(), reflect.ValueOf(a).Elem()); err != nil {
-					return reflect.Value{}, err
-				}
-			}
-		}
-		if !parent.IsNil() {
-			return cldr.inheritFields(v.Elem(), parent.Elem())
-		}
-	} else if parent.IsNil() {
-		panic("should not reach here")
-	}
-	return v, nil
-}
-
-// Must be slice of struct pointers.
-func (cldr *CLDR) inheritSlice(enc, v, parent reflect.Value) (res reflect.Value, err error) {
-	t := v.Type()
-	index := make(map[string]reflect.Value)
-	if !v.IsNil() {
-		for i := 0; i < v.Len(); i++ {
-			vi := v.Index(i)
-			key := attrKey(vi)
-			index[key] = vi
-		}
-	}
-	if !parent.IsNil() {
-		for i := 0; i < parent.Len(); i++ {
-			vi := parent.Index(i)
-			key := attrKey(vi)
-			if w, ok := index[key]; ok {
-				index[key], err = cldr.inheritStructPtr(w, vi)
-			} else {
-				n := cldr.newNode(vi.Elem(), enc)
-				index[key], err = cldr.inheritStructPtr(n, vi)
-			}
-			index[key].Interface().(Elem).setEnclosing(enc.Addr().Interface().(Elem))
-			if err != nil {
-				return v, err
-			}
-		}
-	}
-	keys := make([]string, 0, len(index))
-	for k, _ := range index {
-		keys = append(keys, k)
-	}
-	sort.Strings(keys)
-	sl := reflect.MakeSlice(t, len(index), len(index))
-	for i, k := range keys {
-		sl.Index(i).Set(index[k])
-	}
-	return sl, nil
-}
-
-func parentLocale(loc string) string {
-	parts := strings.Split(loc, "_")
-	if len(parts) == 1 {
-		return "root"
-	}
-	parts = parts[:len(parts)-1]
-	key := strings.Join(parts, "_")
-	return key
-}
-
-func (cldr *CLDR) resolve(loc string) (res *LDML, err error) {
-	if r := cldr.resolved[loc]; r != nil {
-		return r, nil
-	}
-	x := cldr.RawLDML(loc)
-	if x == nil {
-		return nil, fmt.Errorf("cldr: unknown locale %q", loc)
-	}
-	var v reflect.Value
-	if loc == "root" {
-		x = deepCopy(reflect.ValueOf(x)).Interface().(*LDML)
-		linkEnclosing(nil, x)
-		err = cldr.aliasResolver().visit(x)
-	} else {
-		key := parentLocale(loc)
-		var parent *LDML
-		for ; cldr.locale[key] == nil; key = parentLocale(key) {
-		}
-		if parent, err = cldr.resolve(key); err != nil {
-			return nil, err
-		}
-		v, err = cldr.inheritFields(reflect.ValueOf(x).Elem(), reflect.ValueOf(parent).Elem())
-		x = v.Interface().(*LDML)
-		linkEnclosing(nil, x)
-	}
-	if err != nil {
-		return nil, err
-	}
-	cldr.resolved[loc] = x
-	return x, err
-}
-
-// finalize finalizes the initialization of the raw LDML structs.  It also
-// removed unwanted fields, as specified by filter, so that they will not
-// be unnecessarily evaluated.
-func (cldr *CLDR) finalize(filter []string) {
-	for _, x := range cldr.locale {
-		if filter != nil {
-			v := reflect.ValueOf(x).Elem()
-			t := v.Type()
-			for i := 0; i < v.NumField(); i++ {
-				f := t.Field(i)
-				name, _ := xmlName(f)
-				if name != "" && name != "identity" && !in(filter, name) {
-					v.Field(i).Set(reflect.Zero(f.Type))
-				}
-			}
-		}
-		linkEnclosing(nil, x) // for resolving aliases and paths
-		setNames(x, "ldml")
-	}
-}

+ 0 - 144
vendor/golang.org/x/text/unicode/cldr/slice.go

@@ -1,144 +0,0 @@
-// Copyright 2013 The Go Authors. All rights reserved.
-// Use of this source code is governed by a BSD-style
-// license that can be found in the LICENSE file.
-
-package cldr
-
-import (
-	"fmt"
-	"reflect"
-	"sort"
-)
-
-// Slice provides utilities for modifying slices of elements.
-// It can be wrapped around any slice of which the element type implements
-// interface Elem.
-type Slice struct {
-	ptr reflect.Value
-	typ reflect.Type
-}
-
-// Value returns the reflect.Value of the underlying slice.
-func (s *Slice) Value() reflect.Value {
-	return s.ptr.Elem()
-}
-
-// MakeSlice wraps a pointer to a slice of Elems.
-// It replaces the array pointed to by the slice so that subsequent modifications
-// do not alter the data in a CLDR type.
-// It panics if an incorrect type is passed.
-func MakeSlice(slicePtr interface{}) Slice {
-	ptr := reflect.ValueOf(slicePtr)
-	if ptr.Kind() != reflect.Ptr {
-		panic(fmt.Sprintf("MakeSlice: argument must be pointer to slice, found %v", ptr.Type()))
-	}
-	sl := ptr.Elem()
-	if sl.Kind() != reflect.Slice {
-		panic(fmt.Sprintf("MakeSlice: argument must point to a slice, found %v", sl.Type()))
-	}
-	intf := reflect.TypeOf((*Elem)(nil)).Elem()
-	if !sl.Type().Elem().Implements(intf) {
-		panic(fmt.Sprintf("MakeSlice: element type of slice (%v) does not implement Elem", sl.Type().Elem()))
-	}
-	nsl := reflect.MakeSlice(sl.Type(), sl.Len(), sl.Len())
-	reflect.Copy(nsl, sl)
-	sl.Set(nsl)
-	return Slice{
-		ptr: ptr,
-		typ: sl.Type().Elem().Elem(),
-	}
-}
-
-func (s Slice) indexForAttr(a string) []int {
-	for i := iter(reflect.Zero(s.typ)); !i.done(); i.next() {
-		if n, _ := xmlName(i.field()); n == a {
-			return i.index
-		}
-	}
-	panic(fmt.Sprintf("MakeSlice: no attribute %q for type %v", a, s.typ))
-}
-
-// Filter filters s to only include elements for which fn returns true.
-func (s Slice) Filter(fn func(e Elem) bool) {
-	k := 0
-	sl := s.Value()
-	for i := 0; i < sl.Len(); i++ {
-		vi := sl.Index(i)
-		if fn(vi.Interface().(Elem)) {
-			sl.Index(k).Set(vi)
-			k++
-		}
-	}
-	sl.Set(sl.Slice(0, k))
-}
-
-// Group finds elements in s for which fn returns the same value and groups
-// them in a new Slice.
-func (s Slice) Group(fn func(e Elem) string) []Slice {
-	m := make(map[string][]reflect.Value)
-	sl := s.Value()
-	for i := 0; i < sl.Len(); i++ {
-		vi := sl.Index(i)
-		key := fn(vi.Interface().(Elem))
-		m[key] = append(m[key], vi)
-	}
-	keys := []string{}
-	for k, _ := range m {
-		keys = append(keys, k)
-	}
-	sort.Strings(keys)
-	res := []Slice{}
-	for _, k := range keys {
-		nsl := reflect.New(sl.Type())
-		nsl.Elem().Set(reflect.Append(nsl.Elem(), m[k]...))
-		res = append(res, MakeSlice(nsl.Interface()))
-	}
-	return res
-}
-
-// SelectAnyOf filters s to contain only elements for which attr matches
-// any of the values.
-func (s Slice) SelectAnyOf(attr string, values ...string) {
-	index := s.indexForAttr(attr)
-	s.Filter(func(e Elem) bool {
-		vf := reflect.ValueOf(e).Elem().FieldByIndex(index)
-		return in(values, vf.String())
-	})
-}
-
-// SelectOnePerGroup filters s to include at most one element e per group of
-// elements matching Key(attr), where e has an attribute a that matches any
-// the values in v.
-// If more than one element in a group matches a value in v preference
-// is given to the element that matches the first value in v.
-func (s Slice) SelectOnePerGroup(a string, v []string) {
-	index := s.indexForAttr(a)
-	grouped := s.Group(func(e Elem) string { return Key(e, a) })
-	sl := s.Value()
-	sl.Set(sl.Slice(0, 0))
-	for _, g := range grouped {
-		e := reflect.Value{}
-		found := len(v)
-		gsl := g.Value()
-		for i := 0; i < gsl.Len(); i++ {
-			vi := gsl.Index(i).Elem().FieldByIndex(index)
-			j := 0
-			for ; j < len(v) && v[j] != vi.String(); j++ {
-			}
-			if j < found {
-				found = j
-				e = gsl.Index(i)
-			}
-		}
-		if found < len(v) {
-			sl.Set(reflect.Append(sl, e))
-		}
-	}
-}
-
-// SelectDraft drops all elements from the list with a draft level smaller than d
-// and selects the highest draft level of the remaining.
-// This method assumes that the input CLDR is canonicalized.
-func (s Slice) SelectDraft(d Draft) {
-	s.SelectOnePerGroup("draft", drafts[len(drafts)-2-int(d):])
-}

+ 0 - 1494
vendor/golang.org/x/text/unicode/cldr/xml.go

@@ -1,1494 +0,0 @@
-// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT.
-
-package cldr
-
-// LDMLBCP47 holds information on allowable values for various variables in LDML.
-type LDMLBCP47 struct {
-	Common
-	Version *struct {
-		Common
-		Number string `xml:"number,attr"`
-	} `xml:"version"`
-	Generation *struct {
-		Common
-		Date string `xml:"date,attr"`
-	} `xml:"generation"`
-	Keyword []*struct {
-		Common
-		Key []*struct {
-			Common
-			Extension   string `xml:"extension,attr"`
-			Name        string `xml:"name,attr"`
-			Description string `xml:"description,attr"`
-			Deprecated  string `xml:"deprecated,attr"`
-			Preferred   string `xml:"preferred,attr"`
-			Alias       string `xml:"alias,attr"`
-			ValueType   string `xml:"valueType,attr"`
-			Since       string `xml:"since,attr"`
-			Type        []*struct {
-				Common
-				Name        string `xml:"name,attr"`
-				Description string `xml:"description,attr"`
-				Deprecated  string `xml:"deprecated,attr"`
-				Preferred   string `xml:"preferred,attr"`
-				Alias       string `xml:"alias,attr"`
-				Since       string `xml:"since,attr"`
-			} `xml:"type"`
-		} `xml:"key"`
-	} `xml:"keyword"`
-	Attribute []*struct {
-		Common
-		Name        string `xml:"name,attr"`
-		Description string `xml:"description,attr"`
-		Deprecated  string `xml:"deprecated,attr"`
-		Preferred   string `xml:"preferred,attr"`
-		Since       string `xml:"since,attr"`
-	} `xml:"attribute"`
-}
-
-// SupplementalData holds information relevant for internationalization
-// and proper use of CLDR, but that is not contained in the locale hierarchy.
-type SupplementalData struct {
-	Common
-	Version *struct {
-		Common
-		Number string `xml:"number,attr"`
-	} `xml:"version"`
-	Generation *struct {
-		Common
-		Date string `xml:"date,attr"`
-	} `xml:"generation"`
-	CurrencyData *struct {
-		Common
-		Fractions []*struct {
-			Common
-			Info []*struct {
-				Common
-				Iso4217      string `xml:"iso4217,attr"`
-				Digits       string `xml:"digits,attr"`
-				Rounding     string `xml:"rounding,attr"`
-				CashDigits   string `xml:"cashDigits,attr"`
-				CashRounding string `xml:"cashRounding,attr"`
-			} `xml:"info"`
-		} `xml:"fractions"`
-		Region []*struct {
-			Common
-			Iso3166  string `xml:"iso3166,attr"`
-			Currency []*struct {
-				Common
-				Before       string `xml:"before,attr"`
-				From         string `xml:"from,attr"`
-				To           string `xml:"to,attr"`
-				Iso4217      string `xml:"iso4217,attr"`
-				Digits       string `xml:"digits,attr"`
-				Rounding     string `xml:"rounding,attr"`
-				CashRounding string `xml:"cashRounding,attr"`
-				Tender       string `xml:"tender,attr"`
-				Alternate    []*struct {
-					Common
-					Iso4217 string `xml:"iso4217,attr"`
-				} `xml:"alternate"`
-			} `xml:"currency"`
-		} `xml:"region"`
-	} `xml:"currencyData"`
-	TerritoryContainment *struct {
-		Common
-		Group []*struct {
-			Common
-			Contains string `xml:"contains,attr"`
-			Grouping string `xml:"grouping,attr"`
-			Status   string `xml:"status,attr"`
-		} `xml:"group"`
-	} `xml:"territoryContainment"`
-	SubdivisionContainment *struct {
-		Common
-		Subgroup []*struct {
-			Common
-			Subtype  string `xml:"subtype,attr"`
-			Contains string `xml:"contains,attr"`
-		} `xml:"subgroup"`
-	} `xml:"subdivisionContainment"`
-	LanguageData *struct {
-		Common
-		Language []*struct {
-			Common
-			Scripts     string `xml:"scripts,attr"`
-			Territories string `xml:"territories,attr"`
-			Variants    string `xml:"variants,attr"`
-		} `xml:"language"`
-	} `xml:"languageData"`
-	TerritoryInfo *struct {
-		Common
-		Territory []*struct {
-			Common
-			Gdp                string `xml:"gdp,attr"`
-			LiteracyPercent    string `xml:"literacyPercent,attr"`
-			Population         string `xml:"population,attr"`
-			LanguagePopulation []*struct {
-				Common
-				LiteracyPercent   string `xml:"literacyPercent,attr"`
-				WritingPercent    string `xml:"writingPercent,attr"`
-				PopulationPercent string `xml:"populationPercent,attr"`
-				OfficialStatus    string `xml:"officialStatus,attr"`
-			} `xml:"languagePopulation"`
-		} `xml:"territory"`
-	} `xml:"territoryInfo"`
-	PostalCodeData *struct {
-		Common
-		PostCodeRegex []*struct {
-			Common
-			TerritoryId string `xml:"territoryId,attr"`
-		} `xml:"postCodeRegex"`
-	} `xml:"postalCodeData"`
-	CalendarData *struct {
-		Common
-		Calendar []*struct {
-			Common
-			Territories    string  `xml:"territories,attr"`
-			CalendarSystem *Common `xml:"calendarSystem"`
-			Eras           *struct {
-				Common
-				Era []*struct {
-					Common
-					Start string `xml:"start,attr"`
-					End   string `xml:"end,attr"`
-				} `xml:"era"`
-			} `xml:"eras"`
-		} `xml:"calendar"`
-	} `xml:"calendarData"`
-	CalendarPreferenceData *struct {
-		Common
-		CalendarPreference []*struct {
-			Common
-			Territories string `xml:"territories,attr"`
-			Ordering    string `xml:"ordering,attr"`
-		} `xml:"calendarPreference"`
-	} `xml:"calendarPreferenceData"`
-	WeekData *struct {
-		Common
-		MinDays []*struct {
-			Common
-			Count       string `xml:"count,attr"`
-			Territories string `xml:"territories,attr"`
-		} `xml:"minDays"`
-		FirstDay []*struct {
-			Common
-			Day         string `xml:"day,attr"`
-			Territories string `xml:"territories,attr"`
-		} `xml:"firstDay"`
-		WeekendStart []*struct {
-			Common
-			Day         string `xml:"day,attr"`
-			Territories string `xml:"territories,attr"`
-		} `xml:"weekendStart"`
-		WeekendEnd []*struct {
-			Common
-			Day         string `xml:"day,attr"`
-			Territories string `xml:"territories,attr"`
-		} `xml:"weekendEnd"`
-		WeekOfPreference []*struct {
-			Common
-			Locales  string `xml:"locales,attr"`
-			Ordering string `xml:"ordering,attr"`
-		} `xml:"weekOfPreference"`
-	} `xml:"weekData"`
-	TimeData *struct {
-		Common
-		Hours []*struct {
-			Common
-			Allowed   string `xml:"allowed,attr"`
-			Preferred string `xml:"preferred,attr"`
-			Regions   string `xml:"regions,attr"`
-		} `xml:"hours"`
-	} `xml:"timeData"`
-	MeasurementData *struct {
-		Common
-		MeasurementSystem []*struct {
-			Common
-			Category    string `xml:"category,attr"`
-			Territories string `xml:"territories,attr"`
-		} `xml:"measurementSystem"`
-		PaperSize []*struct {
-			Common
-			Territories string `xml:"territories,attr"`
-		} `xml:"paperSize"`
-	} `xml:"measurementData"`
-	UnitPreferenceData *struct {
-		Common
-		UnitPreferences []*struct {
-			Common
-			Category       string `xml:"category,attr"`
-			Usage          string `xml:"usage,attr"`
-			Scope          string `xml:"scope,attr"`
-			UnitPreference []*struct {
-				Common
-				Regions string `xml:"regions,attr"`
-			} `xml:"unitPreference"`
-		} `xml:"unitPreferences"`
-	} `xml:"unitPreferenceData"`
-	TimezoneData *struct {
-		Common
-		MapTimezones []*struct {
-			Common
-			OtherVersion string `xml:"otherVersion,attr"`
-			TypeVersion  string `xml:"typeVersion,attr"`
-			MapZone      []*struct {
-				Common
-				Other     string `xml:"other,attr"`
-				Territory string `xml:"territory,attr"`
-			} `xml:"mapZone"`
-		} `xml:"mapTimezones"`
-		ZoneFormatting []*struct {
-			Common
-			Multizone   string `xml:"multizone,attr"`
-			TzidVersion string `xml:"tzidVersion,attr"`
-			ZoneItem    []*struct {
-				Common
-				Territory string `xml:"territory,attr"`
-				Aliases   string `xml:"aliases,attr"`
-			} `xml:"zoneItem"`
-		} `xml:"zoneFormatting"`
-	} `xml:"timezoneData"`
-	Characters *struct {
-		Common
-		CharacterFallback []*struct {
-			Common
-			Character []*struct {
-				Common
-				Value      string    `xml:"value,attr"`
-				Substitute []*Common `xml:"substitute"`
-			} `xml:"character"`
-		} `xml:"character-fallback"`
-	} `xml:"characters"`
-	Transforms *struct {
-		Common
-		Transform []*struct {
-			Common
-			Source        string    `xml:"source,attr"`
-			Target        string    `xml:"target,attr"`
-			Variant       string    `xml:"variant,attr"`
-			Direction     string    `xml:"direction,attr"`
-			Alias         string    `xml:"alias,attr"`
-			BackwardAlias string    `xml:"backwardAlias,attr"`
-			Visibility    string    `xml:"visibility,attr"`
-			Comment       []*Common `xml:"comment"`
-			TRule         []*Common `xml:"tRule"`
-		} `xml:"transform"`
-	} `xml:"transforms"`
-	Metadata *struct {
-		Common
-		AttributeOrder *Common `xml:"attributeOrder"`
-		ElementOrder   *Common `xml:"elementOrder"`
-		SerialElements *Common `xml:"serialElements"`
-		Suppress       *struct {
-			Common
-			Attributes []*struct {
-				Common
-				Element        string `xml:"element,attr"`
-				Attribute      string `xml:"attribute,attr"`
-				AttributeValue string `xml:"attributeValue,attr"`
-			} `xml:"attributes"`
-		} `xml:"suppress"`
-		Validity *struct {
-			Common
-			Variable []*struct {
-				Common
-				Id string `xml:"id,attr"`
-			} `xml:"variable"`
-			AttributeValues []*struct {
-				Common
-				Dtds       string `xml:"dtds,attr"`
-				Elements   string `xml:"elements,attr"`
-				Attributes string `xml:"attributes,attr"`
-				Order      string `xml:"order,attr"`
-			} `xml:"attributeValues"`
-		} `xml:"validity"`
-		Alias *struct {
-			Common
-			LanguageAlias []*struct {
-				Common
-				Replacement string `xml:"replacement,attr"`
-				Reason      string `xml:"reason,attr"`
-			} `xml:"languageAlias"`
-			ScriptAlias []*struct {
-				Common
-				Replacement string `xml:"replacement,attr"`
-				Reason      string `xml:"reason,attr"`
-			} `xml:"scriptAlias"`
-			TerritoryAlias []*struct {
-				Common
-				Replacement string `xml:"replacement,attr"`
-				Reason      string `xml:"reason,attr"`
-			} `xml:"territoryAlias"`
-			SubdivisionAlias []*struct {
-				Common
-				Replacement string `xml:"replacement,attr"`
-				Reason      string `xml:"reason,attr"`
-			} `xml:"subdivisionAlias"`
-			VariantAlias []*struct {
-				Common
-				Replacement string `xml:"replacement,attr"`
-				Reason      string `xml:"reason,attr"`
-			} `xml:"variantAlias"`
-			ZoneAlias []*struct {
-				Common
-				Replacement string `xml:"replacement,attr"`
-				Reason      string `xml:"reason,attr"`
-			} `xml:"zoneAlias"`
-		} `xml:"alias"`
-		Deprecated *struct {
-			Common
-			DeprecatedItems []*struct {
-				Common
-				Elements   string `xml:"elements,attr"`
-				Attributes string `xml:"attributes,attr"`
-				Values     string `xml:"values,attr"`
-			} `xml:"deprecatedItems"`
-		} `xml:"deprecated"`
-		Distinguishing *struct {
-			Common
-			DistinguishingItems []*struct {
-				Common
-				Exclude    string `xml:"exclude,attr"`
-				Elements   string `xml:"elements,attr"`
-				Attributes string `xml:"attributes,attr"`
-			} `xml:"distinguishingItems"`
-		} `xml:"distinguishing"`
-		Blocking *struct {
-			Common
-			BlockingItems []*struct {
-				Common
-				Elements string `xml:"elements,attr"`
-			} `xml:"blockingItems"`
-		} `xml:"blocking"`
-		CoverageAdditions *struct {
-			Common
-			LanguageCoverage []*struct {
-				Common
-				Values string `xml:"values,attr"`
-			} `xml:"languageCoverage"`
-			ScriptCoverage []*struct {
-				Common
-				Values string `xml:"values,attr"`
-			} `xml:"scriptCoverage"`
-			TerritoryCoverage []*struct {
-				Common
-				Values string `xml:"values,attr"`
-			} `xml:"territoryCoverage"`
-			CurrencyCoverage []*struct {
-				Common
-				Values string `xml:"values,attr"`
-			} `xml:"currencyCoverage"`
-			TimezoneCoverage []*struct {
-				Common
-				Values string `xml:"values,attr"`
-			} `xml:"timezoneCoverage"`
-		} `xml:"coverageAdditions"`
-		SkipDefaultLocale *struct {
-			Common
-			Services string `xml:"services,attr"`
-		} `xml:"skipDefaultLocale"`
-		DefaultContent *struct {
-			Common
-			Locales string `xml:"locales,attr"`
-		} `xml:"defaultContent"`
-	} `xml:"metadata"`
-	CodeMappings *struct {
-		Common
-		LanguageCodes []*struct {
-			Common
-			Alpha3 string `xml:"alpha3,attr"`
-		} `xml:"languageCodes"`
-		TerritoryCodes []*struct {
-			Common
-			Numeric  string `xml:"numeric,attr"`
-			Alpha3   string `xml:"alpha3,attr"`
-			Fips10   string `xml:"fips10,attr"`
-			Internet string `xml:"internet,attr"`
-		} `xml:"territoryCodes"`
-		CurrencyCodes []*struct {
-			Common
-			Numeric string `xml:"numeric,attr"`
-		} `xml:"currencyCodes"`
-	} `xml:"codeMappings"`
-	ParentLocales *struct {
-		Common
-		ParentLocale []*struct {
-			Common
-			Parent  string `xml:"parent,attr"`
-			Locales string `xml:"locales,attr"`
-		} `xml:"parentLocale"`
-	} `xml:"parentLocales"`
-	LikelySubtags *struct {
-		Common
-		LikelySubtag []*struct {
-			Common
-			From string `xml:"from,attr"`
-			To   string `xml:"to,attr"`
-		} `xml:"likelySubtag"`
-	} `xml:"likelySubtags"`
-	MetazoneInfo *struct {
-		Common
-		Timezone []*struct {
-			Common
-			UsesMetazone []*struct {
-				Common
-				From  string `xml:"from,attr"`
-				To    string `xml:"to,attr"`
-				Mzone string `xml:"mzone,attr"`
-			} `xml:"usesMetazone"`
-		} `xml:"timezone"`
-	} `xml:"metazoneInfo"`
-	Plurals []*struct {
-		Common
-		PluralRules []*struct {
-			Common
-			Locales    string `xml:"locales,attr"`
-			PluralRule []*struct {
-				Common
-				Count string `xml:"count,attr"`
-			} `xml:"pluralRule"`
-		} `xml:"pluralRules"`
-		PluralRanges []*struct {
-			Common
-			Locales     string `xml:"locales,attr"`
-			PluralRange []*struct {
-				Common
-				Start  string `xml:"start,attr"`
-				End    string `xml:"end,attr"`
-				Result string `xml:"result,attr"`
-			} `xml:"pluralRange"`
-		} `xml:"pluralRanges"`
-	} `xml:"plurals"`
-	TelephoneCodeData *struct {
-		Common
-		CodesByTerritory []*struct {
-			Common
-			Territory            string `xml:"territory,attr"`
-			TelephoneCountryCode []*struct {
-				Common
-				Code string `xml:"code,attr"`
-				From string `xml:"from,attr"`
-				To   string `xml:"to,attr"`
-			} `xml:"telephoneCountryCode"`
-		} `xml:"codesByTerritory"`
-	} `xml:"telephoneCodeData"`
-	NumberingSystems *struct {
-		Common
-		NumberingSystem []*struct {
-			Common
-			Id     string `xml:"id,attr"`
-			Radix  string `xml:"radix,attr"`
-			Digits string `xml:"digits,attr"`
-			Rules  string `xml:"rules,attr"`
-		} `xml:"numberingSystem"`
-	} `xml:"numberingSystems"`
-	Bcp47KeywordMappings *struct {
-		Common
-		MapKeys *struct {
-			Common
-			KeyMap []*struct {
-				Common
-				Bcp47 string `xml:"bcp47,attr"`
-			} `xml:"keyMap"`
-		} `xml:"mapKeys"`
-		MapTypes []*struct {
-			Common
-			TypeMap []*struct {
-				Common
-				Bcp47 string `xml:"bcp47,attr"`
-			} `xml:"typeMap"`
-		} `xml:"mapTypes"`
-	} `xml:"bcp47KeywordMappings"`
-	Gender *struct {
-		Common
-		PersonList []*struct {
-			Common
-			Locales string `xml:"locales,attr"`
-		} `xml:"personList"`
-	} `xml:"gender"`
-	References *struct {
-		Common
-		Reference []*struct {
-			Common
-			Uri string `xml:"uri,attr"`
-		} `xml:"reference"`
-	} `xml:"references"`
-	LanguageMatching *struct {
-		Common
-		LanguageMatches []*struct {
-			Common
-			ParadigmLocales []*struct {
-				Common
-				Locales string `xml:"locales,attr"`
-			} `xml:"paradigmLocales"`
-			MatchVariable []*struct {
-				Common
-				Id    string `xml:"id,attr"`
-				Value string `xml:"value,attr"`
-			} `xml:"matchVariable"`
-			LanguageMatch []*struct {
-				Common
-				Desired   string `xml:"desired,attr"`
-				Supported string `xml:"supported,attr"`
-				Percent   string `xml:"percent,attr"`
-				Distance  string `xml:"distance,attr"`
-				Oneway    string `xml:"oneway,attr"`
-			} `xml:"languageMatch"`
-		} `xml:"languageMatches"`
-	} `xml:"languageMatching"`
-	DayPeriodRuleSet []*struct {
-		Common
-		DayPeriodRules []*struct {
-			Common
-			Locales       string `xml:"locales,attr"`
-			DayPeriodRule []*struct {
-				Common
-				At     string `xml:"at,attr"`
-				After  string `xml:"after,attr"`
-				Before string `xml:"before,attr"`
-				From   string `xml:"from,attr"`
-				To     string `xml:"to,attr"`
-			} `xml:"dayPeriodRule"`
-		} `xml:"dayPeriodRules"`
-	} `xml:"dayPeriodRuleSet"`
-	MetaZones *struct {
-		Common
-		MetazoneInfo *struct {
-			Common
-			Timezone []*struct {
-				Common
-				UsesMetazone []*struct {
-					Common
-					From  string `xml:"from,attr"`
-					To    string `xml:"to,attr"`
-					Mzone string `xml:"mzone,attr"`
-				} `xml:"usesMetazone"`
-			} `xml:"timezone"`
-		} `xml:"metazoneInfo"`
-		MapTimezones *struct {
-			Common
-			OtherVersion string `xml:"otherVersion,attr"`
-			TypeVersion  string `xml:"typeVersion,attr"`
-			MapZone      []*struct {
-				Common
-				Other     string `xml:"other,attr"`
-				Territory string `xml:"territory,attr"`
-			} `xml:"mapZone"`
-		} `xml:"mapTimezones"`
-	} `xml:"metaZones"`
-	PrimaryZones *struct {
-		Common
-		PrimaryZone []*struct {
-			Common
-			Iso3166 string `xml:"iso3166,attr"`
-		} `xml:"primaryZone"`
-	} `xml:"primaryZones"`
-	WindowsZones *struct {
-		Common
-		MapTimezones *struct {
-			Common
-			OtherVersion string `xml:"otherVersion,attr"`
-			TypeVersion  string `xml:"typeVersion,attr"`
-			MapZone      []*struct {
-				Common
-				Other     string `xml:"other,attr"`
-				Territory string `xml:"territory,attr"`
-			} `xml:"mapZone"`
-		} `xml:"mapTimezones"`
-	} `xml:"windowsZones"`
-	CoverageLevels *struct {
-		Common
-		ApprovalRequirements *struct {
-			Common
-			ApprovalRequirement []*struct {
-				Common
-				Votes   string `xml:"votes,attr"`
-				Locales string `xml:"locales,attr"`
-				Paths   string `xml:"paths,attr"`
-			} `xml:"approvalRequirement"`
-		} `xml:"approvalRequirements"`
-		CoverageVariable []*struct {
-			Common
-			Key   string `xml:"key,attr"`
-			Value string `xml:"value,attr"`
-		} `xml:"coverageVariable"`
-		CoverageLevel []*struct {
-			Common
-			InLanguage  string `xml:"inLanguage,attr"`
-			InScript    string `xml:"inScript,attr"`
-			InTerritory string `xml:"inTerritory,attr"`
-			Value       string `xml:"value,attr"`
-			Match       string `xml:"match,attr"`
-		} `xml:"coverageLevel"`
-	} `xml:"coverageLevels"`
-	IdValidity *struct {
-		Common
-		Id []*struct {
-			Common
-			IdStatus string `xml:"idStatus,attr"`
-		} `xml:"id"`
-	} `xml:"idValidity"`
-	RgScope *struct {
-		Common
-		RgPath []*struct {
-			Common
-			Path string `xml:"path,attr"`
-		} `xml:"rgPath"`
-	} `xml:"rgScope"`
-	LanguageGroups *struct {
-		Common
-		LanguageGroup []*struct {
-			Common
-			Parent string `xml:"parent,attr"`
-		} `xml:"languageGroup"`
-	} `xml:"languageGroups"`
-}
-
-// LDML is the top-level type for locale-specific data.
-type LDML struct {
-	Common
-	Version  string `xml:"version,attr"`
-	Identity *struct {
-		Common
-		Version *struct {
-			Common
-			Number string `xml:"number,attr"`
-		} `xml:"version"`
-		Generation *struct {
-			Common
-			Date string `xml:"date,attr"`
-		} `xml:"generation"`
-		Language  *Common `xml:"language"`
-		Script    *Common `xml:"script"`
-		Territory *Common `xml:"territory"`
-		Variant   *Common `xml:"variant"`
-	} `xml:"identity"`
-	LocaleDisplayNames *LocaleDisplayNames `xml:"localeDisplayNames"`
-	Layout             *struct {
-		Common
-		Orientation []*struct {
-			Common
-			Characters     string    `xml:"characters,attr"`
-			Lines          string    `xml:"lines,attr"`
-			CharacterOrder []*Common `xml:"characterOrder"`
-			LineOrder      []*Common `xml:"lineOrder"`
-		} `xml:"orientation"`
-		InList []*struct {
-			Common
-			Casing string `xml:"casing,attr"`
-		} `xml:"inList"`
-		InText []*Common `xml:"inText"`
-	} `xml:"layout"`
-	ContextTransforms *struct {
-		Common
-		ContextTransformUsage []*struct {
-			Common
-			ContextTransform []*Common `xml:"contextTransform"`
-		} `xml:"contextTransformUsage"`
-	} `xml:"contextTransforms"`
-	Characters *struct {
-		Common
-		ExemplarCharacters []*Common `xml:"exemplarCharacters"`
-		Ellipsis           []*Common `xml:"ellipsis"`
-		MoreInformation    []*Common `xml:"moreInformation"`
-		Stopwords          []*struct {
-			Common
-			StopwordList []*Common `xml:"stopwordList"`
-		} `xml:"stopwords"`
-		IndexLabels []*struct {
-			Common
-			IndexSeparator           []*Common `xml:"indexSeparator"`
-			CompressedIndexSeparator []*Common `xml:"compressedIndexSeparator"`
-			IndexRangePattern        []*Common `xml:"indexRangePattern"`
-			IndexLabelBefore         []*Common `xml:"indexLabelBefore"`
-			IndexLabelAfter          []*Common `xml:"indexLabelAfter"`
-			IndexLabel               []*struct {
-				Common
-				IndexSource string `xml:"indexSource,attr"`
-				Priority    string `xml:"priority,attr"`
-			} `xml:"indexLabel"`
-		} `xml:"indexLabels"`
-		Mapping []*struct {
-			Common
-			Registry string `xml:"registry,attr"`
-		} `xml:"mapping"`
-		ParseLenients []*struct {
-			Common
-			Scope        string `xml:"scope,attr"`
-			Level        string `xml:"level,attr"`
-			ParseLenient []*struct {
-				Common
-				Sample string `xml:"sample,attr"`
-			} `xml:"parseLenient"`
-		} `xml:"parseLenients"`
-	} `xml:"characters"`
-	Delimiters *struct {
-		Common
-		QuotationStart          []*Common `xml:"quotationStart"`
-		QuotationEnd            []*Common `xml:"quotationEnd"`
-		AlternateQuotationStart []*Common `xml:"alternateQuotationStart"`
-		AlternateQuotationEnd   []*Common `xml:"alternateQuotationEnd"`
-	} `xml:"delimiters"`
-	Measurement *struct {
-		Common
-		MeasurementSystem []*Common `xml:"measurementSystem"`
-		PaperSize         []*struct {
-			Common
-			Height []*Common `xml:"height"`
-			Width  []*Common `xml:"width"`
-		} `xml:"paperSize"`
-	} `xml:"measurement"`
-	Dates *struct {
-		Common
-		LocalizedPatternChars []*Common `xml:"localizedPatternChars"`
-		DateRangePattern      []*Common `xml:"dateRangePattern"`
-		Calendars             *struct {
-			Common
-			Calendar []*Calendar `xml:"calendar"`
-		} `xml:"calendars"`
-		Fields *struct {
-			Common
-			Field []*struct {
-				Common
-				DisplayName []*struct {
-					Common
-					Count string `xml:"count,attr"`
-				} `xml:"displayName"`
-				Relative     []*Common `xml:"relative"`
-				RelativeTime []*struct {
-					Common
-					RelativeTimePattern []*struct {
-						Common
-						Count string `xml:"count,attr"`
-					} `xml:"relativeTimePattern"`
-				} `xml:"relativeTime"`
-				RelativePeriod []*Common `xml:"relativePeriod"`
-			} `xml:"field"`
-		} `xml:"fields"`
-		TimeZoneNames *TimeZoneNames `xml:"timeZoneNames"`
-	} `xml:"dates"`
-	Numbers *Numbers `xml:"numbers"`
-	Units   *struct {
-		Common
-		Unit []*struct {
-			Common
-			DisplayName []*struct {
-				Common
-				Count string `xml:"count,attr"`
-			} `xml:"displayName"`
-			UnitPattern []*struct {
-				Common
-				Count string `xml:"count,attr"`
-			} `xml:"unitPattern"`
-			PerUnitPattern []*Common `xml:"perUnitPattern"`
-		} `xml:"unit"`
-		UnitLength []*struct {
-			Common
-			CompoundUnit []*struct {
-				Common
-				CompoundUnitPattern []*Common `xml:"compoundUnitPattern"`
-			} `xml:"compoundUnit"`
-			Unit []*struct {
-				Common
-				DisplayName []*struct {
-					Common
-					Count string `xml:"count,attr"`
-				} `xml:"displayName"`
-				UnitPattern []*struct {
-					Common
-					Count string `xml:"count,attr"`
-				} `xml:"unitPattern"`
-				PerUnitPattern []*Common `xml:"perUnitPattern"`
-			} `xml:"unit"`
-			CoordinateUnit []*struct {
-				Common
-				CoordinateUnitPattern []*Common `xml:"coordinateUnitPattern"`
-			} `xml:"coordinateUnit"`
-		} `xml:"unitLength"`
-		DurationUnit []*struct {
-			Common
-			DurationUnitPattern []*Common `xml:"durationUnitPattern"`
-		} `xml:"durationUnit"`
-	} `xml:"units"`
-	ListPatterns *struct {
-		Common
-		ListPattern []*struct {
-			Common
-			ListPatternPart []*Common `xml:"listPatternPart"`
-		} `xml:"listPattern"`
-	} `xml:"listPatterns"`
-	Collations *struct {
-		Common
-		Version          string       `xml:"version,attr"`
-		DefaultCollation *Common      `xml:"defaultCollation"`
-		Collation        []*Collation `xml:"collation"`
-	} `xml:"collations"`
-	Posix *struct {
-		Common
-		Messages []*struct {
-			Common
-			Yesstr  []*Common `xml:"yesstr"`
-			Nostr   []*Common `xml:"nostr"`
-			Yesexpr []*Common `xml:"yesexpr"`
-			Noexpr  []*Common `xml:"noexpr"`
-		} `xml:"messages"`
-	} `xml:"posix"`
-	CharacterLabels *struct {
-		Common
-		CharacterLabelPattern []*struct {
-			Common
-			Count string `xml:"count,attr"`
-		} `xml:"characterLabelPattern"`
-		CharacterLabel []*Common `xml:"characterLabel"`
-	} `xml:"characterLabels"`
-	Segmentations *struct {
-		Common
-		Segmentation []*struct {
-			Common
-			Variables *struct {
-				Common
-				Variable []*struct {
-					Common
-					Id string `xml:"id,attr"`
-				} `xml:"variable"`
-			} `xml:"variables"`
-			SegmentRules *struct {
-				Common
-				Rule []*struct {
-					Common
-					Id string `xml:"id,attr"`
-				} `xml:"rule"`
-			} `xml:"segmentRules"`
-			Exceptions *struct {
-				Common
-				Exception []*Common `xml:"exception"`
-			} `xml:"exceptions"`
-			Suppressions *struct {
-				Common
-				Suppression []*Common `xml:"suppression"`
-			} `xml:"suppressions"`
-		} `xml:"segmentation"`
-	} `xml:"segmentations"`
-	Rbnf *struct {
-		Common
-		RulesetGrouping []*struct {
-			Common
-			Ruleset []*struct {
-				Common
-				Access        string `xml:"access,attr"`
-				AllowsParsing string `xml:"allowsParsing,attr"`
-				Rbnfrule      []*struct {
-					Common
-					Value  string `xml:"value,attr"`
-					Radix  string `xml:"radix,attr"`
-					Decexp string `xml:"decexp,attr"`
-				} `xml:"rbnfrule"`
-			} `xml:"ruleset"`
-		} `xml:"rulesetGrouping"`
-	} `xml:"rbnf"`
-	Annotations *struct {
-		Common
-		Annotation []*struct {
-			Common
-			Cp  string `xml:"cp,attr"`
-			Tts string `xml:"tts,attr"`
-		} `xml:"annotation"`
-	} `xml:"annotations"`
-	Metadata *struct {
-		Common
-		CasingData *struct {
-			Common
-			CasingItem []*struct {
-				Common
-				Override   string `xml:"override,attr"`
-				ForceError string `xml:"forceError,attr"`
-			} `xml:"casingItem"`
-		} `xml:"casingData"`
-	} `xml:"metadata"`
-	References *struct {
-		Common
-		Reference []*struct {
-			Common
-			Uri string `xml:"uri,attr"`
-		} `xml:"reference"`
-	} `xml:"references"`
-}
-
-// Collation contains rules that specify a certain sort-order,
-// as a tailoring of the root order.
-// The parsed rules are obtained by passing a RuleProcessor to Collation's
-// Process method.
-type Collation struct {
-	Common
-	Visibility string  `xml:"visibility,attr"`
-	Base       *Common `xml:"base"`
-	Import     []*struct {
-		Common
-		Source string `xml:"source,attr"`
-	} `xml:"import"`
-	Settings *struct {
-		Common
-		Strength           string `xml:"strength,attr"`
-		Alternate          string `xml:"alternate,attr"`
-		Backwards          string `xml:"backwards,attr"`
-		Normalization      string `xml:"normalization,attr"`
-		CaseLevel          string `xml:"caseLevel,attr"`
-		CaseFirst          string `xml:"caseFirst,attr"`
-		HiraganaQuaternary string `xml:"hiraganaQuaternary,attr"`
-		MaxVariable        string `xml:"maxVariable,attr"`
-		Numeric            string `xml:"numeric,attr"`
-		Private            string `xml:"private,attr"`
-		VariableTop        string `xml:"variableTop,attr"`
-		Reorder            string `xml:"reorder,attr"`
-	} `xml:"settings"`
-	SuppressContractions *Common   `xml:"suppress_contractions"`
-	Optimize             *Common   `xml:"optimize"`
-	Cr                   []*Common `xml:"cr"`
-	rulesElem
-}
-
-// Calendar specifies the fields used for formatting and parsing dates and times.
-// The month and quarter names are identified numerically, starting at 1.
-// The day (of the week) names are identified with short strings, since there is
-// no universally-accepted numeric designation.
-type Calendar struct {
-	Common
-	Months *struct {
-		Common
-		MonthContext []*struct {
-			Common
-			MonthWidth []*struct {
-				Common
-				Month []*struct {
-					Common
-					Yeartype string `xml:"yeartype,attr"`
-				} `xml:"month"`
-			} `xml:"monthWidth"`
-		} `xml:"monthContext"`
-	} `xml:"months"`
-	MonthNames *struct {
-		Common
-		Month []*struct {
-			Common
-			Yeartype string `xml:"yeartype,attr"`
-		} `xml:"month"`
-	} `xml:"monthNames"`
-	MonthAbbr *struct {
-		Common
-		Month []*struct {
-			Common
-			Yeartype string `xml:"yeartype,attr"`
-		} `xml:"month"`
-	} `xml:"monthAbbr"`
-	MonthPatterns *struct {
-		Common
-		MonthPatternContext []*struct {
-			Common
-			MonthPatternWidth []*struct {
-				Common
-				MonthPattern []*Common `xml:"monthPattern"`
-			} `xml:"monthPatternWidth"`
-		} `xml:"monthPatternContext"`
-	} `xml:"monthPatterns"`
-	Days *struct {
-		Common
-		DayContext []*struct {
-			Common
-			DayWidth []*struct {
-				Common
-				Day []*Common `xml:"day"`
-			} `xml:"dayWidth"`
-		} `xml:"dayContext"`
-	} `xml:"days"`
-	DayNames *struct {
-		Common
-		Day []*Common `xml:"day"`
-	} `xml:"dayNames"`
-	DayAbbr *struct {
-		Common
-		Day []*Common `xml:"day"`
-	} `xml:"dayAbbr"`
-	Quarters *struct {
-		Common
-		QuarterContext []*struct {
-			Common
-			QuarterWidth []*struct {
-				Common
-				Quarter []*Common `xml:"quarter"`
-			} `xml:"quarterWidth"`
-		} `xml:"quarterContext"`
-	} `xml:"quarters"`
-	Week *struct {
-		Common
-		MinDays []*struct {
-			Common
-			Count string `xml:"count,attr"`
-		} `xml:"minDays"`
-		FirstDay []*struct {
-			Common
-			Day string `xml:"day,attr"`
-		} `xml:"firstDay"`
-		WeekendStart []*struct {
-			Common
-			Day  string `xml:"day,attr"`
-			Time string `xml:"time,attr"`
-		} `xml:"weekendStart"`
-		WeekendEnd []*struct {
-			Common
-			Day  string `xml:"day,attr"`
-			Time string `xml:"time,attr"`
-		} `xml:"weekendEnd"`
-	} `xml:"week"`
-	Am         []*Common `xml:"am"`
-	Pm         []*Common `xml:"pm"`
-	DayPeriods *struct {
-		Common
-		DayPeriodContext []*struct {
-			Common
-			DayPeriodWidth []*struct {
-				Common
-				DayPeriod []*Common `xml:"dayPeriod"`
-			} `xml:"dayPeriodWidth"`
-		} `xml:"dayPeriodContext"`
-	} `xml:"dayPeriods"`
-	Eras *struct {
-		Common
-		EraNames *struct {
-			Common
-			Era []*Common `xml:"era"`
-		} `xml:"eraNames"`
-		EraAbbr *struct {
-			Common
-			Era []*Common `xml:"era"`
-		} `xml:"eraAbbr"`
-		EraNarrow *struct {
-			Common
-			Era []*Common `xml:"era"`
-		} `xml:"eraNarrow"`
-	} `xml:"eras"`
-	CyclicNameSets *struct {
-		Common
-		CyclicNameSet []*struct {
-			Common
-			CyclicNameContext []*struct {
-				Common
-				CyclicNameWidth []*struct {
-					Common
-					CyclicName []*Common `xml:"cyclicName"`
-				} `xml:"cyclicNameWidth"`
-			} `xml:"cyclicNameContext"`
-		} `xml:"cyclicNameSet"`
-	} `xml:"cyclicNameSets"`
-	DateFormats *struct {
-		Common
-		DateFormatLength []*struct {
-			Common
-			DateFormat []*struct {
-				Common
-				Pattern []*struct {
-					Common
-					Numbers string `xml:"numbers,attr"`
-					Count   string `xml:"count,attr"`
-				} `xml:"pattern"`
-				DisplayName []*struct {
-					Common
-					Count string `xml:"count,attr"`
-				} `xml:"displayName"`
-			} `xml:"dateFormat"`
-		} `xml:"dateFormatLength"`
-	} `xml:"dateFormats"`
-	TimeFormats *struct {
-		Common
-		TimeFormatLength []*struct {
-			Common
-			TimeFormat []*struct {
-				Common
-				Pattern []*struct {
-					Common
-					Numbers string `xml:"numbers,attr"`
-					Count   string `xml:"count,attr"`
-				} `xml:"pattern"`
-				DisplayName []*struct {
-					Common
-					Count string `xml:"count,attr"`
-				} `xml:"displayName"`
-			} `xml:"timeFormat"`
-		} `xml:"timeFormatLength"`
-	} `xml:"timeFormats"`
-	DateTimeFormats *struct {
-		Common
-		DateTimeFormatLength []*struct {
-			Common
-			DateTimeFormat []*struct {
-				Common
-				Pattern []*struct {
-					Common
-					Numbers string `xml:"numbers,attr"`
-					Count   string `xml:"count,attr"`
-				} `xml:"pattern"`
-				DisplayName []*struct {
-					Common
-					Count string `xml:"count,attr"`
-				} `xml:"displayName"`
-			} `xml:"dateTimeFormat"`
-		} `xml:"dateTimeFormatLength"`
-		AvailableFormats []*struct {
-			Common
-			DateFormatItem []*struct {
-				Common
-				Id    string `xml:"id,attr"`
-				Count string `xml:"count,attr"`
-			} `xml:"dateFormatItem"`
-		} `xml:"availableFormats"`
-		AppendItems []*struct {
-			Common
-			AppendItem []*struct {
-				Common
-				Request string `xml:"request,attr"`
-			} `xml:"appendItem"`
-		} `xml:"appendItems"`
-		IntervalFormats []*struct {
-			Common
-			IntervalFormatFallback []*Common `xml:"intervalFormatFallback"`
-			IntervalFormatItem     []*struct {
-				Common
-				Id                 string `xml:"id,attr"`
-				GreatestDifference []*struct {
-					Common
-					Id string `xml:"id,attr"`
-				} `xml:"greatestDifference"`
-			} `xml:"intervalFormatItem"`
-		} `xml:"intervalFormats"`
-	} `xml:"dateTimeFormats"`
-	Fields []*struct {
-		Common
-		Field []*struct {
-			Common
-			DisplayName []*struct {
-				Common
-				Count string `xml:"count,attr"`
-			} `xml:"displayName"`
-			Relative     []*Common `xml:"relative"`
-			RelativeTime []*struct {
-				Common
-				RelativeTimePattern []*struct {
-					Common
-					Count string `xml:"count,attr"`
-				} `xml:"relativeTimePattern"`
-			} `xml:"relativeTime"`
-			RelativePeriod []*Common `xml:"relativePeriod"`
-		} `xml:"field"`
-	} `xml:"fields"`
-}
-type TimeZoneNames struct {
-	Common
-	HourFormat           []*Common `xml:"hourFormat"`
-	HoursFormat          []*Common `xml:"hoursFormat"`
-	GmtFormat            []*Common `xml:"gmtFormat"`
-	GmtZeroFormat        []*Common `xml:"gmtZeroFormat"`
-	RegionFormat         []*Common `xml:"regionFormat"`
-	FallbackFormat       []*Common `xml:"fallbackFormat"`
-	FallbackRegionFormat []*Common `xml:"fallbackRegionFormat"`
-	AbbreviationFallback []*Common `xml:"abbreviationFallback"`
-	PreferenceOrdering   []*Common `xml:"preferenceOrdering"`
-	SingleCountries      []*struct {
-		Common
-		List string `xml:"list,attr"`
-	} `xml:"singleCountries"`
-	Zone []*struct {
-		Common
-		Long []*struct {
-			Common
-			Generic  []*Common `xml:"generic"`
-			Standard []*Common `xml:"standard"`
-			Daylight []*Common `xml:"daylight"`
-		} `xml:"long"`
-		Short []*struct {
-			Common
-			Generic  []*Common `xml:"generic"`
-			Standard []*Common `xml:"standard"`
-			Daylight []*Common `xml:"daylight"`
-		} `xml:"short"`
-		CommonlyUsed []*struct {
-			Common
-			Used string `xml:"used,attr"`
-		} `xml:"commonlyUsed"`
-		ExemplarCity []*Common `xml:"exemplarCity"`
-	} `xml:"zone"`
-	Metazone []*struct {
-		Common
-		Long []*struct {
-			Common
-			Generic  []*Common `xml:"generic"`
-			Standard []*Common `xml:"standard"`
-			Daylight []*Common `xml:"daylight"`
-		} `xml:"long"`
-		Short []*struct {
-			Common
-			Generic  []*Common `xml:"generic"`
-			Standard []*Common `xml:"standard"`
-			Daylight []*Common `xml:"daylight"`
-		} `xml:"short"`
-		CommonlyUsed []*struct {
-			Common
-			Used string `xml:"used,attr"`
-		} `xml:"commonlyUsed"`
-	} `xml:"metazone"`
-}
-
-// LocaleDisplayNames specifies localized display names for for scripts, languages,
-// countries, currencies, and variants.
-type LocaleDisplayNames struct {
-	Common
-	LocaleDisplayPattern *struct {
-		Common
-		LocalePattern        []*Common `xml:"localePattern"`
-		LocaleSeparator      []*Common `xml:"localeSeparator"`
-		LocaleKeyTypePattern []*Common `xml:"localeKeyTypePattern"`
-	} `xml:"localeDisplayPattern"`
-	Languages *struct {
-		Common
-		Language []*Common `xml:"language"`
-	} `xml:"languages"`
-	Scripts *struct {
-		Common
-		Script []*Common `xml:"script"`
-	} `xml:"scripts"`
-	Territories *struct {
-		Common
-		Territory []*Common `xml:"territory"`
-	} `xml:"territories"`
-	Subdivisions *struct {
-		Common
-		Subdivision []*Common `xml:"subdivision"`
-	} `xml:"subdivisions"`
-	Variants *struct {
-		Common
-		Variant []*Common `xml:"variant"`
-	} `xml:"variants"`
-	Keys *struct {
-		Common
-		Key []*Common `xml:"key"`
-	} `xml:"keys"`
-	Types *struct {
-		Common
-		Type []*struct {
-			Common
-			Key string `xml:"key,attr"`
-		} `xml:"type"`
-	} `xml:"types"`
-	TransformNames *struct {
-		Common
-		TransformName []*Common `xml:"transformName"`
-	} `xml:"transformNames"`
-	MeasurementSystemNames *struct {
-		Common
-		MeasurementSystemName []*Common `xml:"measurementSystemName"`
-	} `xml:"measurementSystemNames"`
-	CodePatterns *struct {
-		Common
-		CodePattern []*Common `xml:"codePattern"`
-	} `xml:"codePatterns"`
-}
-
-// Numbers supplies information for formatting and parsing numbers and currencies.
-type Numbers struct {
-	Common
-	DefaultNumberingSystem []*Common `xml:"defaultNumberingSystem"`
-	OtherNumberingSystems  []*struct {
-		Common
-		Native      []*Common `xml:"native"`
-		Traditional []*Common `xml:"traditional"`
-		Finance     []*Common `xml:"finance"`
-	} `xml:"otherNumberingSystems"`
-	MinimumGroupingDigits []*Common `xml:"minimumGroupingDigits"`
-	Symbols               []*struct {
-		Common
-		NumberSystem string `xml:"numberSystem,attr"`
-		Decimal      []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"decimal"`
-		Group []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"group"`
-		List []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"list"`
-		PercentSign []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"percentSign"`
-		NativeZeroDigit []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"nativeZeroDigit"`
-		PatternDigit []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"patternDigit"`
-		PlusSign []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"plusSign"`
-		MinusSign []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"minusSign"`
-		Exponential []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"exponential"`
-		SuperscriptingExponent []*Common `xml:"superscriptingExponent"`
-		PerMille               []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"perMille"`
-		Infinity []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"infinity"`
-		Nan []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"nan"`
-		CurrencyDecimal []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"currencyDecimal"`
-		CurrencyGroup []*struct {
-			Common
-			NumberSystem string `xml:"numberSystem,attr"`
-		} `xml:"currencyGroup"`
-		TimeSeparator []*Common `xml:"timeSeparator"`
-	} `xml:"symbols"`
-	DecimalFormats []*struct {
-		Common
-		NumberSystem        string `xml:"numberSystem,attr"`
-		DecimalFormatLength []*struct {
-			Common
-			DecimalFormat []*struct {
-				Common
-				Pattern []*struct {
-					Common
-					Numbers string `xml:"numbers,attr"`
-					Count   string `xml:"count,attr"`
-				} `xml:"pattern"`
-			} `xml:"decimalFormat"`
-		} `xml:"decimalFormatLength"`
-	} `xml:"decimalFormats"`
-	ScientificFormats []*struct {
-		Common
-		NumberSystem           string `xml:"numberSystem,attr"`
-		ScientificFormatLength []*struct {
-			Common
-			ScientificFormat []*struct {
-				Common
-				Pattern []*struct {
-					Common
-					Numbers string `xml:"numbers,attr"`
-					Count   string `xml:"count,attr"`
-				} `xml:"pattern"`
-			} `xml:"scientificFormat"`
-		} `xml:"scientificFormatLength"`
-	} `xml:"scientificFormats"`
-	PercentFormats []*struct {
-		Common
-		NumberSystem        string `xml:"numberSystem,attr"`
-		PercentFormatLength []*struct {
-			Common
-			PercentFormat []*struct {
-				Common
-				Pattern []*struct {
-					Common
-					Numbers string `xml:"numbers,attr"`
-					Count   string `xml:"count,attr"`
-				} `xml:"pattern"`
-			} `xml:"percentFormat"`
-		} `xml:"percentFormatLength"`
-	} `xml:"percentFormats"`
-	CurrencyFormats []*struct {
-		Common
-		NumberSystem    string `xml:"numberSystem,attr"`
-		CurrencySpacing []*struct {
-			Common
-			BeforeCurrency []*struct {
-				Common
-				CurrencyMatch    []*Common `xml:"currencyMatch"`
-				SurroundingMatch []*Common `xml:"surroundingMatch"`
-				InsertBetween    []*Common `xml:"insertBetween"`
-			} `xml:"beforeCurrency"`
-			AfterCurrency []*struct {
-				Common
-				CurrencyMatch    []*Common `xml:"currencyMatch"`
-				SurroundingMatch []*Common `xml:"surroundingMatch"`
-				InsertBetween    []*Common `xml:"insertBetween"`
-			} `xml:"afterCurrency"`
-		} `xml:"currencySpacing"`
-		CurrencyFormatLength []*struct {
-			Common
-			CurrencyFormat []*struct {
-				Common
-				Pattern []*struct {
-					Common
-					Numbers string `xml:"numbers,attr"`
-					Count   string `xml:"count,attr"`
-				} `xml:"pattern"`
-			} `xml:"currencyFormat"`
-		} `xml:"currencyFormatLength"`
-		UnitPattern []*struct {
-			Common
-			Count string `xml:"count,attr"`
-		} `xml:"unitPattern"`
-	} `xml:"currencyFormats"`
-	Currencies *struct {
-		Common
-		Currency []*struct {
-			Common
-			Pattern []*struct {
-				Common
-				Numbers string `xml:"numbers,attr"`
-				Count   string `xml:"count,attr"`
-			} `xml:"pattern"`
-			DisplayName []*struct {
-				Common
-				Count string `xml:"count,attr"`
-			} `xml:"displayName"`
-			Symbol  []*Common `xml:"symbol"`
-			Decimal []*struct {
-				Common
-				NumberSystem string `xml:"numberSystem,attr"`
-			} `xml:"decimal"`
-			Group []*struct {
-				Common
-				NumberSystem string `xml:"numberSystem,attr"`
-			} `xml:"group"`
-		} `xml:"currency"`
-	} `xml:"currencies"`
-	MiscPatterns []*struct {
-		Common
-		NumberSystem string `xml:"numberSystem,attr"`
-		Pattern      []*struct {
-			Common
-			Numbers string `xml:"numbers,attr"`
-			Count   string `xml:"count,attr"`
-		} `xml:"pattern"`
-	} `xml:"miscPatterns"`
-	MinimalPairs []*struct {
-		Common
-		PluralMinimalPairs []*struct {
-			Common
-			Count string `xml:"count,attr"`
-		} `xml:"pluralMinimalPairs"`
-		OrdinalMinimalPairs []*struct {
-			Common
-			Ordinal string `xml:"ordinal,attr"`
-		} `xml:"ordinalMinimalPairs"`
-	} `xml:"minimalPairs"`
-}
-
-// Version is the version of CLDR from which the XML definitions are generated.
-const Version = "32"

+ 127 - 0
vendor/google.golang.org/appengine/internal/datastore/datastore_v3.proto

@@ -0,0 +1,127 @@
+# github.com/BurntSushi/toml v0.3.1
+github.com/BurntSushi/toml
+# github.com/emirpasic/gods v1.12.0
+github.com/emirpasic/gods/trees/binaryheap
+github.com/emirpasic/gods/containers
+github.com/emirpasic/gods/lists/arraylist
+github.com/emirpasic/gods/trees
+github.com/emirpasic/gods/utils
+github.com/emirpasic/gods/lists
+# github.com/golang/protobuf v1.2.0
+github.com/golang/protobuf/proto
+# github.com/google/go-github v15.0.0+incompatible
+github.com/google/go-github/github
+# github.com/google/go-querystring v1.0.0
+github.com/google/go-querystring/query
+# github.com/hako/durafmt v0.0.0-20180520121703-7b7ae1e72ead
+github.com/hako/durafmt
+# github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99
+github.com/jbenet/go-context/io
+# github.com/jessevdk/go-flags v1.4.0
+github.com/jessevdk/go-flags
+# github.com/kevinburke/ssh_config v0.0.0-20180830205328-81db2a75821e
+github.com/kevinburke/ssh_config
+# github.com/mitchellh/go-homedir v1.0.0
+github.com/mitchellh/go-homedir
+# github.com/pelletier/go-buffruneio v0.2.0
+github.com/pelletier/go-buffruneio
+# github.com/sergi/go-diff v1.0.0
+github.com/sergi/go-diff/diffmatchpatch
+# github.com/sirupsen/logrus v1.0.6
+github.com/sirupsen/logrus
+# github.com/src-d/gcfg v1.4.0
+github.com/src-d/gcfg
+github.com/src-d/gcfg/scanner
+github.com/src-d/gcfg/token
+github.com/src-d/gcfg/types
+# github.com/xanzy/go-gitlab v0.11.3
+github.com/xanzy/go-gitlab
+# github.com/xanzy/ssh-agent v0.2.0
+github.com/xanzy/ssh-agent
+# golang.org/x/crypto v0.0.0-20180910181607-0e37d006457b
+golang.org/x/crypto/ssh/terminal
+golang.org/x/crypto/openpgp
+golang.org/x/crypto/ssh
+golang.org/x/crypto/ssh/knownhosts
+golang.org/x/crypto/openpgp/armor
+golang.org/x/crypto/openpgp/errors
+golang.org/x/crypto/openpgp/packet
+golang.org/x/crypto/openpgp/s2k
+golang.org/x/crypto/ssh/agent
+golang.org/x/crypto/curve25519
+golang.org/x/crypto/ed25519
+golang.org/x/crypto/internal/chacha20
+golang.org/x/crypto/poly1305
+golang.org/x/crypto/cast5
+golang.org/x/crypto/openpgp/elgamal
+golang.org/x/crypto/ed25519/internal/edwards25519
+golang.org/x/crypto/internal/subtle
+# golang.org/x/net v0.0.0-20180925072008-f04abc6bdfa7
+golang.org/x/net/context
+golang.org/x/net/context/ctxhttp
+# golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be
+golang.org/x/oauth2
+golang.org/x/oauth2/internal
+# golang.org/x/sys v0.0.0-20180925112736-b09afc3d579e
+golang.org/x/sys/unix
+golang.org/x/sys/windows
+# golang.org/x/text v0.3.0
+golang.org/x/text/unicode/norm
+golang.org/x/text/transform
+# google.golang.org/appengine v1.2.0
+google.golang.org/appengine/urlfetch
+google.golang.org/appengine/internal
+google.golang.org/appengine/internal/urlfetch
+google.golang.org/appengine/internal/base
+google.golang.org/appengine/internal/datastore
+google.golang.org/appengine/internal/log
+google.golang.org/appengine/internal/remote_api
+# gopkg.in/src-d/go-billy.v4 v4.3.0
+gopkg.in/src-d/go-billy.v4
+gopkg.in/src-d/go-billy.v4/osfs
+gopkg.in/src-d/go-billy.v4/util
+gopkg.in/src-d/go-billy.v4/helper/chroot
+gopkg.in/src-d/go-billy.v4/helper/polyfill
+# gopkg.in/src-d/go-git.v4 v4.9.1
+gopkg.in/src-d/go-git.v4
+gopkg.in/src-d/go-git.v4/plumbing
+gopkg.in/src-d/go-git.v4/plumbing/format/diff
+gopkg.in/src-d/go-git.v4/plumbing/object
+gopkg.in/src-d/go-git.v4/plumbing/storer
+gopkg.in/src-d/go-git.v4/plumbing/transport/http
+gopkg.in/src-d/go-git.v4/plumbing/transport/ssh
+gopkg.in/src-d/go-git.v4/storage/memory
+gopkg.in/src-d/go-git.v4/config
+gopkg.in/src-d/go-git.v4/internal/revision
+gopkg.in/src-d/go-git.v4/plumbing/cache
+gopkg.in/src-d/go-git.v4/plumbing/filemode
+gopkg.in/src-d/go-git.v4/plumbing/format/gitignore
+gopkg.in/src-d/go-git.v4/plumbing/format/index
+gopkg.in/src-d/go-git.v4/plumbing/format/packfile
+gopkg.in/src-d/go-git.v4/plumbing/protocol/packp
+gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/capability
+gopkg.in/src-d/go-git.v4/plumbing/protocol/packp/sideband
+gopkg.in/src-d/go-git.v4/plumbing/revlist
+gopkg.in/src-d/go-git.v4/plumbing/transport
+gopkg.in/src-d/go-git.v4/plumbing/transport/client
+gopkg.in/src-d/go-git.v4/storage
+gopkg.in/src-d/go-git.v4/storage/filesystem
+gopkg.in/src-d/go-git.v4/utils/diff
+gopkg.in/src-d/go-git.v4/utils/ioutil
+gopkg.in/src-d/go-git.v4/utils/merkletrie
+gopkg.in/src-d/go-git.v4/utils/merkletrie/filesystem
+gopkg.in/src-d/go-git.v4/utils/merkletrie/index
+gopkg.in/src-d/go-git.v4/utils/merkletrie/noder
+gopkg.in/src-d/go-git.v4/utils/binary
+gopkg.in/src-d/go-git.v4/plumbing/format/pktline
+gopkg.in/src-d/go-git.v4/plumbing/transport/internal/common
+gopkg.in/src-d/go-git.v4/plumbing/format/config
+gopkg.in/src-d/go-git.v4/plumbing/format/idxfile
+gopkg.in/src-d/go-git.v4/plumbing/transport/file
+gopkg.in/src-d/go-git.v4/plumbing/transport/git
+gopkg.in/src-d/go-git.v4/plumbing/format/objfile
+gopkg.in/src-d/go-git.v4/storage/filesystem/dotgit
+gopkg.in/src-d/go-git.v4/utils/merkletrie/internal/frame
+gopkg.in/src-d/go-git.v4/plumbing/transport/server
+# gopkg.in/warnings.v0 v0.1.2
+gopkg.in/warnings.v0