diff --git a/Makefile b/Makefile index 24086ef65..b7eaf36d5 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,5 @@ # Go parameters -GOCMD=go +GOCMD?=go GOENV=GO111MODULE=on GOOS=linux GOARCH=amd64 CGO_ENABLED=0 GOBUILD=$(GOCMD) build -mod=vendor GOTEST=$(GOCMD) test -mod=vendor @@ -17,13 +17,17 @@ build: $(GOENV) $(GOBUILD) -o ./out/$(ARCHIVE_NAME)/cacher -a ./cmd/cacher $(GOENV) $(GOBUILD) -o ./out/$(ARCHIVE_NAME)/launcher -a ./cmd/launcher +imports: + $(GOCMD) install -mod=vendor golang.org/x/tools/cmd/goimports + test -z $$(goimports -l -w -local github.com/buildpack/lifecycle $$(find . -type f -name '*.go' -not -path "./vendor/*")) + format: test -z $$($(GOCMD) fmt ./...) vet: $(GOCMD) vet $$($(GOCMD) list ./... | grep -v /testdata/) -test: format vet +test: format imports vet $(GOTEST) -v ./... clean: diff --git a/README.md b/README.md index 3e71b5841..8e2c605cc 100644 --- a/README.md +++ b/README.md @@ -23,12 +23,12 @@ A reference implementation of [Buildpack API v3](https://github.com/buildpack/sp ### Cache -* `retriever` - restores cache +* `restorer` - restores cache * `cacher` - updates cache ## Notes -Cache implementations (`retriever` and `cacher`) are intended to be interchangable and platform-specific. +Cache implementations (`restorer` and `cacher`) are intended to be interchangeable and platform-specific. A platform may choose not to deduplicate cache layers. ## Development diff --git a/analyzer.go b/analyzer.go index 4c23d9746..9a92906df 100644 --- a/analyzer.go +++ b/analyzer.go @@ -11,74 +11,101 @@ import ( ) type Analyzer struct { - Buildpacks []*Buildpack - AppDir string - LayersDir string - In []byte - Out, Err *log.Logger - UID int - GID int + AnalyzedPath string + AppDir string + Buildpacks []*Buildpack + GID, UID int + LayersDir string + Out, Err *log.Logger + SkipLayers bool } -func (a *Analyzer) Analyze(image imgutil.Image) error { +func (a *Analyzer) Analyze(image imgutil.Image) (metadata.AnalyzedMetadata, error) { + imageID, err := a.getImageIdentifier(image) + if err != nil { + return metadata.AnalyzedMetadata{}, errors.Wrap(err, "retrieve image identifier") + } + data, err := metadata.GetAppMetadata(image) if err != nil { - return err + return metadata.AnalyzedMetadata{}, err } - for _, buildpack := range a.Buildpacks { - cache, err := readBuildpackLayersDir(a.LayersDir, *buildpack) - if err != nil { - return err - } - metadataLayers := data.MetadataForBuildpack(buildpack.ID).Layers - for _, cachedLayer := range cache.layers { - cacheType := cachedLayer.classifyCache(metadataLayers) - switch cacheType { - case cacheStaleNoMetadata: - a.Out.Printf("removing stale cached launch layer '%s', not in metadata \n", cachedLayer.Identifier()) - if err := cachedLayer.remove(); err != nil { - return err - } - case cacheStaleWrongSHA: - a.Out.Printf("removing stale cached launch layer '%s'", cachedLayer.Identifier()) - if err := cachedLayer.remove(); err != nil { - return err - } - case cacheMalformed: - a.Out.Printf("removing malformed cached layer '%s'", cachedLayer.Identifier()) - if err := cachedLayer.remove(); err != nil { - return err - } - case cacheNotForLaunch: - a.Out.Printf("using cached layer '%s'", cachedLayer.Identifier()) - case cacheValid: - a.Out.Printf("using cached launch layer '%s'", cachedLayer.Identifier()) - a.Out.Printf("rewriting metadata for layer '%s'", cachedLayer.Identifier()) - if err := cachedLayer.writeMetadata(metadataLayers); err != nil { - return err + if !a.SkipLayers { + for _, buildpack := range a.Buildpacks { + bpLayersDir, err := readBuildpackLayersDir(a.LayersDir, *buildpack) + if err != nil { + return metadata.AnalyzedMetadata{}, err + } + + metadataLayers := data.MetadataForBuildpack(buildpack.ID).Layers + for _, cachedLayer := range bpLayersDir.layers { + cacheType := cachedLayer.classifyCache(metadataLayers) + switch cacheType { + case cacheStaleNoMetadata: + a.Out.Printf("Removing stale cached launch layer '%s', not in metadata \n", cachedLayer.Identifier()) + if err := cachedLayer.remove(); err != nil { + return metadata.AnalyzedMetadata{}, err + } + case cacheStaleWrongSHA: + a.Out.Printf("Removing stale cached launch layer '%s'", cachedLayer.Identifier()) + if err := cachedLayer.remove(); err != nil { + return metadata.AnalyzedMetadata{}, err + } + case cacheMalformed: + a.Out.Printf("Removing malformed cached layer '%s'", cachedLayer.Identifier()) + if err := cachedLayer.remove(); err != nil { + return metadata.AnalyzedMetadata{}, err + } + case cacheNotForLaunch: + a.Out.Printf("Using cached layer '%s'", cachedLayer.Identifier()) + case cacheValid: + a.Out.Printf("Using cached launch layer '%s'", cachedLayer.Identifier()) + a.Out.Printf("Rewriting metadata for layer '%s'", cachedLayer.Identifier()) + if err := cachedLayer.writeMetadata(metadataLayers); err != nil { + return metadata.AnalyzedMetadata{}, err + } } } - } - for lmd, data := range metadataLayers { - if !data.Build && !data.Cache { - layer := cache.newBPLayer(lmd) - a.Out.Printf("writing metadata for uncached layer '%s'", layer.Identifier()) - if err := layer.writeMetadata(metadataLayers); err != nil { - return err + for lmd, data := range metadataLayers { + if !data.Build && !data.Cache { + layer := bpLayersDir.newBPLayer(lmd) + a.Out.Printf("Writing metadata for uncached layer '%s'", layer.Identifier()) + if err := layer.writeMetadata(metadataLayers); err != nil { + return metadata.AnalyzedMetadata{}, err + } } } } + } else { + a.Out.Printf("Skipping buildpack layer analysis") } // if analyzer is running as root it needs to fix the ownership of the layers dir - if current := os.Getuid(); err != nil { - return err - } else if current == 0 { + if current := os.Getuid(); current == 0 { if err := recursiveChown(a.LayersDir, a.UID, a.GID); err != nil { - return errors.Wrapf(err, "chowning layers dir to '%d/%d'", a.UID, a.GID) + return metadata.AnalyzedMetadata{}, errors.Wrapf(err, "chowning layers dir to '%d/%d'", a.UID, a.GID) } } - return nil + + return metadata.AnalyzedMetadata{ + Image: imageID, + Metadata: data, + }, nil +} + +func (a *Analyzer) getImageIdentifier(image imgutil.Image) (*metadata.ImageIdentifier, error) { + if !image.Found() { + a.Out.Printf("Image '%s' not found", image.Name()) + return nil, nil + } + identifier, err := image.Identifier() + if err != nil { + return nil, err + } + a.Out.Printf("Analyzing image '%s'", identifier.String()) + return &metadata.ImageIdentifier{ + Reference: identifier.String(), + }, nil } diff --git a/analyzer_test.go b/analyzer_test.go index beca917f8..b1ac02bf9 100644 --- a/analyzer_test.go +++ b/analyzer_test.go @@ -2,6 +2,7 @@ package lifecycle_test import ( "bytes" + "encoding/json" "io/ioutil" "log" "os" @@ -10,11 +11,13 @@ import ( "testing" "github.com/buildpack/imgutil/fakes" + "github.com/buildpack/imgutil/local" "github.com/golang/mock/gomock" "github.com/sclevine/spec" "github.com/sclevine/spec/report" "github.com/buildpack/lifecycle" + "github.com/buildpack/lifecycle/metadata" h "github.com/buildpack/lifecycle/testhelpers" "github.com/buildpack/lifecycle/testmock" ) @@ -33,43 +36,50 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { stdout, stderr *bytes.Buffer layerDir string appDir string + tmpDir string ) it.Before(func() { var err error + + tmpDir, err = ioutil.TempDir("", "analyzer-tests") + h.AssertNil(t, err) + layerDir, err = ioutil.TempDir("", "lifecycle-layer-dir") - if err != nil { - t.Fatalf("Error: %s\n", err) - } - appDir = filepath.Join(layerDir, "some-app-dir") + h.AssertNil(t, err) + appDir = filepath.Join(layerDir, "some-app-dir") stdout, stderr = &bytes.Buffer{}, &bytes.Buffer{} analyzer = &lifecycle.Analyzer{ - Buildpacks: []*lifecycle.Buildpack{{ID: "metdata.buildpack"}, {ID: "no.cache.buildpack"}, {ID: "no.metadata.buildpack"}}, - AppDir: appDir, - LayersDir: layerDir, - Out: log.New(stdout, "", 0), - Err: log.New(stderr, "", 0), - UID: 1234, - GID: 4321, + Buildpacks: []*lifecycle.Buildpack{{ID: "metdata.buildpack"}, {ID: "no.cache.buildpack"}, {ID: "no.metadata.buildpack"}}, + AppDir: appDir, + LayersDir: layerDir, + AnalyzedPath: filepath.Join(tmpDir, "some-previous-file.toml"), + Out: log.New(stdout, "", 0), + Err: log.New(stderr, "", 0), + UID: 1234, + GID: 4321, } mockCtrl = gomock.NewController(t) }) it.After(func() { + os.RemoveAll(tmpDir) os.RemoveAll(layerDir) mockCtrl.Finish() }) - when("Analyze", func() { + when("#Analyze", func() { var ( - image *fakes.Image - ref *testmock.MockReference + image *fakes.Image + appImageMetadata metadata.AppImageMetadata + ref *testmock.MockReference ) it.Before(func() { - image = fakes.NewImage("image-repo-name", "", "") - + image = fakes.NewImage("image-repo-name", "", local.IDIdentifier{ + ImageID: "s0m3D1g3sT", + }) ref = testmock.NewMockReference(mockCtrl) ref.EXPECT().Name().AnyTimes() }) @@ -81,7 +91,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { when("image exists", func() { when("image label has compatible metadata", func() { it.Before(func() { - image.SetLabel("io.buildpacks.lifecycle.metadata", `{ + metadataLabel := `{ "buildpacks": [ { "key": "metdata.buildpack", @@ -139,11 +149,17 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { } } ] -}`) +}` + h.AssertNil(t, + image.SetLabel( + "io.buildpacks.lifecycle.metadata", + metadataLabel, + )) + h.AssertNil(t, json.Unmarshal([]byte(metadataLabel), &appImageMetadata)) }) it("should use labels to populate the layer dir", func() { - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -167,7 +183,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { it("should only write layer TOML files that correspond to detected buildpacks", func() { analyzer.Buildpacks = []*lifecycle.Buildpack{{ID: "no.cache.buildpack"}} - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -183,9 +199,17 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { } }) + it("should return the analyzed metadata", func() { + md, err := analyzer.Analyze(image) + h.AssertNil(t, err) + + h.AssertEq(t, md.Image.Reference, "s0m3D1g3sT") + h.AssertEq(t, md.Metadata, appImageMetadata) + }) + when("there is a launch/build layer that isn't cached", func() { it("should not restore the metadata", func() { - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } if _, err := ioutil.ReadFile(filepath.Join(layerDir, "metdata.buildpack/stale-launch-build.toml")); !os.IsNotExist(err) { @@ -196,7 +220,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { when("there is a cache=true layer in the metadata but not in the cache", func() { it("should not restore the metadata", func() { - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } if _, err := ioutil.ReadFile(filepath.Join(layerDir, "metdata.buildpack", "launch-cache.toml")); !os.IsNotExist(err) { @@ -210,7 +234,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { // copy to layerDir h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -227,7 +251,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { // copy to layerDir h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -251,7 +275,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { // copy to layerDir h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -275,7 +299,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { // copy to layerDir h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -292,7 +316,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { // copy to layerDir h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -318,7 +342,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { // copy to layerDir h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -340,7 +364,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { // copy to layerDir h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -364,7 +388,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { // copy to layerDir h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -388,7 +412,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { // copy to layerDir h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -404,7 +428,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { // copy to layerDir h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -422,7 +446,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { // copy to layerDir h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -441,7 +465,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { // copy to layerDir h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - if err := analyzer.Analyze(image); err != nil { + if _, err := analyzer.Analyze(image); err != nil { t.Fatalf("Error: %s\n", err) } @@ -459,29 +483,57 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { when("analyzer is running as root", func() { it.Before(func() { if os.Getuid() != 0 { - t.Skip() + t.Skip("Skipped when not running as root") } }) it("chowns new files to CNB_USER_ID:CNB_GROUP_ID", func() { - h.AssertNil(t, analyzer.Analyze(image)) + _, err := analyzer.Analyze(image) + h.AssertNil(t, err) h.AssertUidGid(t, layerDir, 1234, 4321) h.AssertUidGid(t, filepath.Join(layerDir, "metdata.buildpack", "valid-launch.toml"), 1234, 4321) h.AssertUidGid(t, filepath.Join(layerDir, "no.cache.buildpack"), 1234, 4321) h.AssertUidGid(t, filepath.Join(layerDir, "no.cache.buildpack", "go.toml"), 1234, 4321) }) }) + + when("skip-layers is true", func() { + it.Before(func() { + analyzer.SkipLayers = true + }) + + it("should return the analyzed metadata", func() { + md, err := analyzer.Analyze(image) + h.AssertNil(t, err) + + h.AssertEq(t, md.Image.Reference, "s0m3D1g3sT") + h.AssertEq(t, md.Metadata, appImageMetadata) + }) + + it("does not write buildpack layers", func() { + _, err := analyzer.Analyze(image) + h.AssertNil(t, err) + + fileInfos, err := ioutil.ReadDir(layerDir) + h.AssertNil(t, err) + + h.AssertEq(t, len(fileInfos), 0) + }) + }) }) }) - when("the image cannot found", func() { + when("the image cannot be found", func() { + var notFoundImage *fakes.Image + it.Before(func() { - h.AssertNil(t, image.Delete()) + notFoundImage = fakes.NewImage("image-repo-name", "", nil) + h.AssertNil(t, notFoundImage.Delete()) }) it("clears the cached launch layers", func() { h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - err := analyzer.Analyze(image) + _, err := analyzer.Analyze(notFoundImage) assertNil(t, err) if _, err := ioutil.ReadDir(filepath.Join(layerDir, "no.metadata.buildpack", "launchlayer")); !os.IsNotExist(err) { @@ -494,6 +546,14 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { t.Fatalf("Missing some-app-dir") } }) + + it("should return a nil image in the analyzed metadata", func() { + md, err := analyzer.Analyze(notFoundImage) + h.AssertNil(t, err) + + h.AssertNil(t, md.Image) + h.AssertEq(t, md.Metadata, metadata.AppImageMetadata{}) + }) }) when("the image does not have the required label", func() { @@ -504,7 +564,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { it("returns", func() { h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - err := analyzer.Analyze(image) + _, err := analyzer.Analyze(image) assertNil(t, err) if _, err := ioutil.ReadDir(filepath.Join(layerDir, "no.metadata.buildpack", "launchlayer")); !os.IsNotExist(err) { @@ -527,7 +587,7 @@ func testAnalyzer(t *testing.T, when spec.G, it spec.S) { it("returns", func() { h.RecursiveCopy(t, filepath.Join("testdata", "analyzer", "cached-layers"), layerDir) - err := analyzer.Analyze(image) + _, err := analyzer.Analyze(image) assertNil(t, err) if _, err := ioutil.ReadDir(filepath.Join(layerDir, "no.metadata.buildpack", "launchlayer")); !os.IsNotExist(err) { diff --git a/cache/image_cache.go b/cache/image_cache.go index 37129b5ed..a1f4cfdda 100644 --- a/cache/image_cache.go +++ b/cache/image_cache.go @@ -75,8 +75,7 @@ func (c *ImageCache) Commit() error { } c.committed = true - _, err := c.newImage.Save() - if err != nil { + if err := c.newImage.Save(); err != nil { return errors.Wrapf(err, "saving image '%s'", c.newImage.Name()) } diff --git a/cache/image_cache_test.go b/cache/image_cache_test.go index 97b4e4376..3d4a0b298 100644 --- a/cache/image_cache_test.go +++ b/cache/image_cache_test.go @@ -39,8 +39,8 @@ func testImageCache(t *testing.T, when spec.G, it spec.S) { tmpDir, err = ioutil.TempDir("", "") h.AssertNil(t, err) - fakeOriginalImage = fakes.NewImage("fake-image", "", "") - fakeNewImage = fakes.NewImage("fake-image", "", "") + fakeOriginalImage = fakes.NewImage("fake-image", "", nil) + fakeNewImage = fakes.NewImage("fake-image", "", nil) subject = cache.NewImageCache(fakeOriginalImage, fakeNewImage) @@ -77,11 +77,13 @@ func testImageCache(t *testing.T, when spec.G, it spec.S) { Version: "1.2.3", Layers: map[string]metadata.LayerMetadata{ "some-layer": { - SHA: "some-sha", - Data: "some-data", - Build: true, - Launch: false, - Cache: true, + SHA: "some-sha", + LayerMetadataFile: metadata.LayerMetadataFile{ + Data: "some-data", + Build: true, + Launch: false, + Cache: true, + }, }, }, }}, diff --git a/cache/volume_cache_test.go b/cache/volume_cache_test.go index 2a8ccbe6f..33539cf74 100644 --- a/cache/volume_cache_test.go +++ b/cache/volume_cache_test.go @@ -151,11 +151,13 @@ func testVolumeCache(t *testing.T, when spec.G, it spec.S) { Version: "1.2.3", Layers: map[string]metadata.LayerMetadata{ "some-layer": { - SHA: "some-sha", - Data: "some-data", - Build: true, - Launch: false, - Cache: true, + SHA: "some-sha", + LayerMetadataFile: metadata.LayerMetadataFile{ + Data: "some-data", + Build: true, + Launch: false, + Cache: true, + }, }, }, }}, diff --git a/caching_image.go b/caching_image.go index 5e0e79551..038d2ad56 100644 --- a/caching_image.go +++ b/caching_image.go @@ -80,15 +80,32 @@ func (c *cachingImage) GetLayer(sha string) (io.ReadCloser, error) { } } -func (c *cachingImage) Save() (string, error) { - sha, err := c.image.Save() - if err != nil { - return "", err +func (c *cachingImage) Save(additionalNames ...string) error { + err := c.image.Save(additionalNames...) + + if saveSucceededFor(c.Name(), err) { + if err := c.cache.Commit(); err != nil { + return errors.Wrap(err, "failed to commit cache") + } } - if err := c.cache.Commit(); err != nil { - return "", err + + return err +} + +func saveSucceededFor(imageName string, err error) bool { + if err == nil { + return true + } + + if saveErr, isSaveErr := err.(imgutil.SaveError); isSaveErr { + for _, d := range saveErr.Errors { + if d.ImageName == imageName { + return false + } + } + return true } - return sha, nil + return false } // delegates to image @@ -101,8 +118,8 @@ func (c *cachingImage) Rename(name string) { c.image.Rename(name) } -func (c *cachingImage) Digest() (string, error) { - return c.image.Digest() +func (c *cachingImage) Identifier() (imgutil.Identifier, error) { + return c.image.Identifier() } func (c *cachingImage) Label(label string) (string, error) { diff --git a/caching_image_test.go b/caching_image_test.go index 893dfab65..557009461 100644 --- a/caching_image_test.go +++ b/caching_image_test.go @@ -37,7 +37,7 @@ func testCachingImage(t *testing.T, when spec.G, it spec.S) { it.Before(func() { var err error - fakeImage = fakes.NewImage("some-image", "", "") + fakeImage = fakes.NewImage("some-image", "", nil) tmpDir, err = ioutil.TempDir("", "") h.AssertNil(t, err) volumeCache, err = cache.NewVolumeCache(tmpDir) @@ -54,10 +54,9 @@ func testCachingImage(t *testing.T, when spec.G, it spec.S) { it("adds the layer to the cache and the image", func() { h.AssertNil(t, subject.AddLayer(layerPath)) - _, err := subject.Save() - h.AssertNil(t, err) + h.AssertNil(t, subject.Save()) - _, err = fakeImage.GetLayer(layerSHA) + _, err := fakeImage.GetLayer(layerSHA) h.AssertNil(t, err) _, err = volumeCache.RetrieveLayer(layerSHA) @@ -81,20 +80,18 @@ func testCachingImage(t *testing.T, when spec.G, it spec.S) { it("adds the layer from the cache to the image", func() { h.AssertNil(t, subject.ReuseLayer(layerSHA)) - _, err := subject.Save() - h.AssertNil(t, err) + h.AssertNil(t, subject.Save()) - _, err = fakeImage.GetLayer(layerSHA) + _, err := fakeImage.GetLayer(layerSHA) h.AssertNil(t, err) }) it("keeps the layer in the cache", func() { h.AssertNil(t, subject.ReuseLayer(layerSHA)) - _, err := subject.Save() - h.AssertNil(t, err) + h.AssertNil(t, subject.Save()) - _, err = volumeCache.RetrieveLayerFile(layerSHA) + _, err := volumeCache.RetrieveLayerFile(layerSHA) h.AssertNil(t, err) }) }) @@ -107,8 +104,7 @@ func testCachingImage(t *testing.T, when spec.G, it spec.S) { it("reuses the layer from the image", func() { h.AssertNil(t, subject.ReuseLayer(layerSHA)) - _, err := subject.Save() - h.AssertNil(t, err) + h.AssertNil(t, subject.Save()) for _, reusedSHA := range fakeImage.ReusedLayers() { if reusedSHA == layerSHA { @@ -121,10 +117,9 @@ func testCachingImage(t *testing.T, when spec.G, it spec.S) { it("adds the layer to the cache", func() { h.AssertNil(t, subject.ReuseLayer(layerSHA)) - _, err := subject.Save() - h.AssertNil(t, err) + h.AssertNil(t, subject.Save()) - _, err = volumeCache.RetrieveLayer(layerSHA) + _, err := volumeCache.RetrieveLayer(layerSHA) h.AssertNil(t, err) }) }) @@ -150,8 +145,8 @@ func testCachingImage(t *testing.T, when spec.G, it spec.S) { when("the layer does not exist in the cache", func() { it.Before(func() { h.AssertNil(t, fakeImage.AddLayer(layerPath)) - _, err := fakeImage.Save() - h.AssertNil(t, err) + + h.AssertNil(t, subject.Save()) }) it("gets it from the image", func() { diff --git a/cmd/analyzer/main.go b/cmd/analyzer/main.go index 89a0b3f4b..4ff6a0714 100644 --- a/cmd/analyzer/main.go +++ b/cmd/analyzer/main.go @@ -1,7 +1,6 @@ package main import ( - "errors" "flag" "fmt" "io/ioutil" @@ -11,6 +10,9 @@ import ( "github.com/BurntSushi/toml" "github.com/buildpack/imgutil" + "github.com/buildpack/imgutil/local" + "github.com/buildpack/imgutil/remote" + "github.com/pkg/errors" "github.com/buildpack/lifecycle" "github.com/buildpack/lifecycle/cmd" @@ -19,24 +21,28 @@ import ( ) var ( - repoName string - layersDir string - appDir string - groupPath string - useDaemon bool - useHelpers bool - uid int - gid int + analyzedPath string + appDir string + gid int + groupPath string + layersDir string + repoName string + skipLayers bool + uid int + useDaemon bool + useHelpers bool ) func init() { - cmd.FlagLayersDir(&layersDir) + cmd.FlagAnalyzedPath(&analyzedPath) cmd.FlagAppDir(&appDir) + cmd.FlagGID(&gid) cmd.FlagGroupPath(&groupPath) + cmd.FlagLayersDir(&layersDir) + cmd.FlagUID(&uid) cmd.FlagUseDaemon(&useDaemon) cmd.FlagUseCredHelpers(&useHelpers) - cmd.FlagUID(&uid) - cmd.FlagGID(&gid) + cmd.FlagSkipLayers(&skipLayers) } func main() { @@ -67,37 +73,52 @@ func analyzer() error { } analyzer := &lifecycle.Analyzer{ - Buildpacks: group.Buildpacks, - AppDir: appDir, - LayersDir: layersDir, - Out: log.New(os.Stdout, "", 0), - Err: log.New(os.Stderr, "", 0), - UID: uid, - GID: gid, + Buildpacks: group.Buildpacks, + AppDir: appDir, + LayersDir: layersDir, + AnalyzedPath: analyzedPath, + Out: log.New(os.Stdout, "", 0), + Err: log.New(os.Stderr, "", 0), + UID: uid, + GID: gid, + SkipLayers: skipLayers, } var err error - var previousImage imgutil.Image + var img imgutil.Image if useDaemon { dockerClient, err := docker.DefaultClient() if err != nil { return cmd.FailErr(err, "create docker client") } - previousImage, err = imgutil.NewLocalImage(repoName, dockerClient) + img, err = local.NewImage( + repoName, + dockerClient, + local.FromBaseImage(repoName), + ) if err != nil { return cmd.FailErr(err, "access previous image") } } else { - previousImage, err = imgutil.NewRemoteImage(repoName, auth.DefaultEnvKeychain()) + img, err = remote.NewImage( + repoName, + auth.DefaultEnvKeychain(), + remote.FromBaseImage(repoName), + ) if err != nil { - return err + return cmd.FailErr(err, "access previous image") } } - if err := analyzer.Analyze(previousImage); err != nil { + md, err := analyzer.Analyze(img) + if err != nil { return cmd.FailErrCode(err, cmd.CodeFailed, "analyze") } + if err := lifecycle.WriteTOML(analyzedPath, md); err != nil { + return errors.Wrap(err, "write analyzed.toml") + } + return nil } diff --git a/cmd/cacher/main.go b/cmd/cacher/main.go index 27c8b5375..6ce8c626f 100644 --- a/cmd/cacher/main.go +++ b/cmd/cacher/main.go @@ -8,7 +8,7 @@ import ( "os" "github.com/BurntSushi/toml" - "github.com/buildpack/imgutil" + "github.com/buildpack/imgutil/local" "github.com/buildpack/lifecycle" "github.com/buildpack/lifecycle/cache" @@ -76,14 +76,19 @@ func doCache() error { return cmd.FailErr(err, "create docker client") } - origCacheImage, err := imgutil.NewLocalImage(cacheImageTag, dockerClient) + origCacheImage, err := local.NewImage(cacheImageTag, dockerClient, local.FromBaseImage(cacheImageTag)) if err != nil { return cmd.FailErr(err, "access cache image") } + emptyImage, err := local.NewImage(cacheImageTag, dockerClient, local.WithPreviousImage(cacheImageTag)) + if err != nil { + return cmd.FailErr(err, "creating new cache image") + } + cacheStore = cache.NewImageCache( origCacheImage, - imgutil.EmptyLocalImage(origCacheImage.Name(), dockerClient), + emptyImage, ) } else { var err error diff --git a/cmd/cmd.go b/cmd/cmd.go index f50cb3af2..875d8a3bc 100644 --- a/cmd/cmd.go +++ b/cmd/cmd.go @@ -17,12 +17,14 @@ const ( DefaultOrderPath = "/buildpacks/order.toml" DefaultGroupPath = "./group.toml" DefaultStackPath = "/buildpacks/stack.toml" + DefaultAnalyzedPath = "./analyzed.toml" DefaultPlanPath = "./plan.toml" EnvLayersDir = "CNB_LAYERS_DIR" EnvAppDir = "CNB_APP_DIR" EnvBuildpacksDir = "CNB_BUILDPACKS_DIR" EnvPlatformDir = "CNB_PLATFORM_DIR" + EnvAnalyzedPath = "CNB_ANALYZED_PATH" EnvOrderPath = "CNB_ORDER_PATH" EnvGroupPath = "CNB_GROUP_PATH" EnvStackPath = "CNB_STACK_PATH" @@ -36,81 +38,92 @@ const ( EnvUID = "CNB_USER_ID" EnvGID = "CNB_GROUP_ID" EnvRegistryAuth = "CNB_REGISTRY_AUTH" + EnvSkipLayers = "CNB_ANALYZE_SKIP_LAYERS" // defaults to false ) -func FlagLayersDir(dir *string) { - flag.StringVar(dir, "layers", envWithDefault(EnvLayersDir, DefaultLayersDir), "path to layers directory") +func FlagAnalyzedPath(dir *string) { + flag.StringVar(dir, "analyzed", envOrDefault(EnvAnalyzedPath, DefaultAnalyzedPath), "path to analyzed.toml") } func FlagAppDir(dir *string) { - flag.StringVar(dir, "app", envWithDefault(EnvAppDir, DefaultAppDir), "path to app directory") + flag.StringVar(dir, "app", envOrDefault(EnvAppDir, DefaultAppDir), "path to app directory") } func FlagBuildpacksDir(dir *string) { - flag.StringVar(dir, "buildpacks", envWithDefault(EnvBuildpacksDir, DefaultBuildpacksDir), "path to buildpacks directory") + flag.StringVar(dir, "buildpacks", envOrDefault(EnvBuildpacksDir, DefaultBuildpacksDir), "path to buildpacks directory") } -func FlagPlatformDir(dir *string) { - flag.StringVar(dir, "platform", envWithDefault(EnvPlatformDir, DefaultPlatformDir), "path to platform directory") +func FlagCacheDir(dir *string) { + flag.StringVar(dir, "path", os.Getenv(EnvCacheDir), "path to cache directory") } -func FlagOrderPath(path *string) { - flag.StringVar(path, "order", envWithDefault(EnvOrderPath, DefaultOrderPath), "path to order.toml") +func FlagCacheImage(image *string) { + flag.StringVar(image, "image", os.Getenv(EnvCacheImage), "cache image tag name") } -func FlagGroupPath(path *string) { - flag.StringVar(path, "group", envWithDefault(EnvGroupPath, DefaultGroupPath), "path to group.toml") +func FlagGID(gid *int) { + flag.IntVar(gid, "gid", intEnv(EnvGID), "GID of user's group in the stack's build and run images") } -func FlagStackPath(path *string) { - flag.StringVar(path, "stack", envWithDefault(EnvStackPath, DefaultStackPath), "path to stack.toml") +func FlagGroupPath(path *string) { + flag.StringVar(path, "group", envOrDefault(EnvGroupPath, DefaultGroupPath), "path to group.toml") } func FlagLaunchCacheDir(dir *string) { flag.StringVar(dir, "launch-cache", os.Getenv(EnvLaunchCacheDir), "path to launch cache directory") } +func FlagLayersDir(dir *string) { + flag.StringVar(dir, "layers", envOrDefault(EnvLayersDir, DefaultLayersDir), "path to layers directory") +} + +func FlagOrderPath(path *string) { + flag.StringVar(path, "order", envOrDefault(EnvOrderPath, DefaultOrderPath), "path to order.toml") +} + func FlagPlanPath(path *string) { - flag.StringVar(path, "plan", envWithDefault(EnvPlanPath, DefaultPlanPath), "path to plan.toml") + flag.StringVar(path, "plan", envOrDefault(EnvPlanPath, DefaultPlanPath), "path to plan.toml") } -func FlagRunImage(image *string) { - flag.StringVar(image, "image", os.Getenv(EnvRunImage), "reference to run image") +func FlagPlatformDir(dir *string) { + flag.StringVar(dir, "platform", envOrDefault(EnvPlatformDir, DefaultPlatformDir), "path to platform directory") } -func FlagCacheImage(image *string) { - flag.StringVar(image, "image", os.Getenv(EnvCacheImage), "cache image tag name") +func FlagRunImage(image *string) { + flag.StringVar(image, "image", os.Getenv(EnvRunImage), "reference to run image") } -func FlagCacheDir(dir *string) { - flag.StringVar(dir, "path", os.Getenv(EnvCacheDir), "path to cache directory") +func FlagStackPath(path *string) { + flag.StringVar(path, "stack", envOrDefault(EnvStackPath, DefaultStackPath), "path to stack.toml") } -func FlagUseDaemon(use *bool) { - flag.BoolVar(use, "daemon", boolEnv(EnvUseDaemon), "export to docker daemon") +func FlagUID(uid *int) { + flag.IntVar(uid, "uid", intEnv(EnvUID), "UID of user in the stack's build and run images") } func FlagUseCredHelpers(use *bool) { flag.BoolVar(use, "helpers", boolEnv(EnvUseHelpers), "use credential helpers") } -func FlagUID(uid *int) { - flag.IntVar(uid, "uid", intEnv(EnvUID), "UID of user in the stack's build and run images") +func FlagUseDaemon(use *bool) { + flag.BoolVar(use, "daemon", boolEnv(EnvUseDaemon), "export to docker daemon") } -func FlagGID(gid *int) { - flag.IntVar(gid, "gid", intEnv(EnvGID), "GID of user's group in the stack's build and run images") +func FlagSkipLayers(skip *bool) { + flag.BoolVar(skip, "skip-layers", boolEnv(EnvSkipLayers), "do not provide layer metadata to buildpacks") } const ( - CodeFailed = 1 - CodeInvalidArgs = iota + 2 - CodeInvalidEnv - CodeNotFound - CodeFailedDetect - CodeFailedBuild - CodeFailedLaunch - CodeFailedUpdate + CodeFailed = 1 + // 2: reserved + CodeInvalidArgs = 3 + // 4: CodeInvalidEnv + // 5: CodeNotFound + CodeFailedDetect = 6 + CodeFailedBuild = 7 + CodeFailedLaunch = 8 + // 9: CodeFailedUpdate + CodeFailedSave = 10 ) type ErrorFail struct { @@ -173,7 +186,7 @@ func boolEnv(k string) bool { return b } -func envWithDefault(key string, defaultVal string) string { +func envOrDefault(key string, defaultVal string) string { if envVal := os.Getenv(key); envVal != "" { return envVal } diff --git a/cmd/exporter/main.go b/cmd/exporter/main.go index 1137d74ab..3554c7bbe 100644 --- a/cmd/exporter/main.go +++ b/cmd/exporter/main.go @@ -1,7 +1,6 @@ package main import ( - "errors" "flag" "fmt" "io/ioutil" @@ -11,6 +10,9 @@ import ( "github.com/BurntSushi/toml" "github.com/buildpack/imgutil" + "github.com/buildpack/imgutil/local" + "github.com/buildpack/imgutil/remote" + "github.com/pkg/errors" "github.com/buildpack/lifecycle" "github.com/buildpack/lifecycle/cache" @@ -22,11 +24,12 @@ import ( ) var ( - repoName string + imageNames []string runImageRef string layersDir string appDir string groupPath string + analyzedPath string stackPath string launchCacheDir string useDaemon bool @@ -42,6 +45,7 @@ func init() { cmd.FlagLayersDir(&layersDir) cmd.FlagAppDir(&appDir) cmd.FlagGroupPath(&groupPath) + cmd.FlagAnalyzedPath(&analyzedPath) cmd.FlagStackPath(&stackPath) cmd.FlagLaunchCacheDir(&launchCacheDir) cmd.FlagUseDaemon(&useDaemon) @@ -55,18 +59,17 @@ func main() { log.SetOutput(ioutil.Discard) flag.Parse() - if flag.NArg() > 1 { - cmd.Exit(cmd.FailErrCode(fmt.Errorf("received %d args expected 1", flag.NArg()), cmd.CodeInvalidArgs, "parse arguments")) - } - if flag.Arg(0) == "" { - cmd.Exit(cmd.FailErrCode(errors.New("image argument is required"), cmd.CodeInvalidArgs, "parse arguments")) + + imageNames = flag.Args() + + if len(imageNames) == 0 { + cmd.Exit(cmd.FailErrCode(errors.New("at least one image argument is required"), cmd.CodeInvalidArgs, "parse arguments")) } if launchCacheDir != "" && !useDaemon { cmd.Exit(cmd.FailErrCode(errors.New("launch cache can only be used when exporting to a Docker daemon"), cmd.CodeInvalidArgs, "parse arguments")) } - repoName = flag.Arg(0) cmd.Exit(export()) } @@ -95,37 +98,61 @@ func export() error { ArtifactsDir: artifactsDir, } - var stack metadata.StackMetadata - _, err = toml.DecodeFile(stackPath, &stack) + analyzedMD, err := parseOptionalAnalyzedMD(outLog, analyzedPath) + if err != nil { + return cmd.FailErrCode(err, cmd.CodeInvalidArgs, "parse analyzed metadata") + } + + var registry string + if registry, err = image.EnsureSingleRegistry(imageNames...); err != nil { + return cmd.FailErrCode(err, cmd.CodeInvalidArgs, "parse arguments") + } + + var stackMD metadata.StackMetadata + _, err = toml.DecodeFile(stackPath, &stackMD) if err != nil { - outLog.Printf("no stack.toml found at path '%s', stack metadata will not be exported\n", stackPath) + outLog.Printf("no stack metadata found at path '%s', stack metadata will not be exported\n", stackPath) } if runImageRef == "" { - if stack.RunImage.Image == "" { + if stackMD.RunImage.Image == "" { return cmd.FailErrCode(errors.New("-image is required when there is no stack metadata available"), cmd.CodeInvalidArgs, "parse arguments") } - runImageRef, err = runImageFromStackToml(stack) + runImageRef, err = runImageFromStackToml(stackMD, registry) if err != nil { return err } } if useHelpers { - if err := lifecycle.SetupCredHelpers(filepath.Join(os.Getenv("HOME"), ".docker"), repoName, runImageRef); err != nil { + if err := lifecycle.SetupCredHelpers(filepath.Join(os.Getenv("HOME"), ".docker"), imageNames[0], runImageRef); err != nil { return cmd.FailErr(err, "setup credential helpers") } } - var runImage, origImage imgutil.Image + var appImage imgutil.Image if useDaemon { dockerClient, err := docker.DefaultClient() if err != nil { return err } - runImage, err = imgutil.NewLocalImage(runImageRef, dockerClient) + var opts = []local.ImageOption{ + local.FromBaseImage(runImageRef), + } + + if analyzedMD.Image != nil { + outLog.Printf("Reusing layers from image with id '%s'", analyzedMD.Image.Reference) + opts = append(opts, local.WithPreviousImage(analyzedMD.Image.Reference)) + } + + appImage, err = local.NewImage( + imageNames[0], + dockerClient, + opts..., + ) + if err != nil { return cmd.FailErr(err, "access run image") } @@ -135,37 +162,63 @@ func export() error { if err != nil { return cmd.FailErr(err, "create launch cache") } - runImage = lifecycle.NewCachingImage(runImage, volumeCache) + appImage = lifecycle.NewCachingImage(appImage, volumeCache) + } + } else { + var opts = []remote.ImageOption{ + remote.FromBaseImage(runImageRef), } - origImage, err = imgutil.NewLocalImage(repoName, dockerClient) - if err != nil { - return cmd.FailErr(err, "access previous image") + if analyzedMD.Image != nil { + outLog.Printf("Reusing layers from image '%s'", analyzedMD.Image.Reference) + analyzedRegistry, err := image.ParseRegistry(analyzedMD.Image.Reference) + if err != nil { + return cmd.FailErr(err, "parse analyzed registry") + } + if analyzedRegistry != registry { + return fmt.Errorf("analyzed image is on a different registry %s from the exported image %s", analyzedRegistry, registry) + } + opts = append(opts, remote.WithPreviousImage(analyzedMD.Image.Reference)) } - } else { - runImage, err = imgutil.NewRemoteImage(runImageRef, auth.DefaultEnvKeychain()) + + appImage, err = remote.NewImage( + imageNames[0], + auth.DefaultEnvKeychain(), + opts..., + ) if err != nil { return cmd.FailErr(err, "access run image") } - origImage, err = imgutil.NewRemoteImage(repoName, auth.DefaultEnvKeychain()) - if err != nil { - return cmd.FailErr(err, "access previous image") - } } - if err := exporter.Export(layersDir, appDir, runImage, origImage, launcherPath, stack); err != nil { + if err := exporter.Export(layersDir, appDir, appImage, analyzedMD.Metadata, imageNames[1:], launcherPath, stackMD); err != nil { + if _, isSaveError := err.(*imgutil.SaveError); isSaveError { + return cmd.FailErrCode(err, cmd.CodeFailedSave, "export") + } + return cmd.FailErr(err, "export") } return nil } -func runImageFromStackToml(stack metadata.StackMetadata) (string, error) { - registry, err := image.ParseRegistry(repoName) +func parseOptionalAnalyzedMD(logger *log.Logger, path string) (metadata.AnalyzedMetadata, error) { + var analyzedMD metadata.AnalyzedMetadata + + _, err := toml.DecodeFile(path, &analyzedMD) if err != nil { - return "", cmd.FailErrCode(err, cmd.CodeInvalidArgs, "parse image name") + if os.IsNotExist(err) { + logger.Printf("Warning: analyzed TOML file not found at '%s'", path) + return metadata.AnalyzedMetadata{}, nil + } + + return metadata.AnalyzedMetadata{}, err } + return analyzedMD, nil +} + +func runImageFromStackToml(stack metadata.StackMetadata, registry string) (string, error) { runImageMirrors := []string{stack.RunImage.Image} runImageMirrors = append(runImageMirrors, stack.RunImage.Mirrors...) runImageRef, err := image.ByRegistry(registry, runImageMirrors) diff --git a/cmd/restorer/main.go b/cmd/restorer/main.go index 214f18762..ab810330e 100644 --- a/cmd/restorer/main.go +++ b/cmd/restorer/main.go @@ -8,7 +8,7 @@ import ( "os" "github.com/BurntSushi/toml" - "github.com/buildpack/imgutil" + "github.com/buildpack/imgutil/local" "github.com/buildpack/lifecycle" "github.com/buildpack/lifecycle/cache" @@ -70,14 +70,15 @@ func restore() error { return cmd.FailErr(err, "create docker client") } - cacheImage, err := imgutil.NewLocalImage(cacheImageTag, dockerClient) + origCacheImage, err := local.NewImage(cacheImageTag, dockerClient, local.FromBaseImage(cacheImageTag)) if err != nil { return cmd.FailErr(err, "access cache image") } + emptyImage, err := local.NewImage(cacheImageTag, dockerClient, local.WithPreviousImage(cacheImageTag)) cacheStore = cache.NewImageCache( - cacheImage, - imgutil.EmptyLocalImage(cacheImage.Name(), dockerClient), + origCacheImage, + emptyImage, ) } else { var err error diff --git a/cred_helpers.go b/cred_helpers.go index 5ac128776..bf10ebc5f 100644 --- a/cred_helpers.go +++ b/cred_helpers.go @@ -3,10 +3,11 @@ package lifecycle import ( "encoding/json" "errors" - "github.com/google/go-containerregistry/pkg/name" "os" "path/filepath" "regexp" + + "github.com/google/go-containerregistry/pkg/name" ) func SetupCredHelpers(dockerPath string, refs ...string) error { diff --git a/cred_helpers_test.go b/cred_helpers_test.go index 7d6aca696..25712ebe0 100644 --- a/cred_helpers_test.go +++ b/cred_helpers_test.go @@ -2,14 +2,16 @@ package lifecycle_test import ( "encoding/json" - "github.com/buildpack/lifecycle" - h "github.com/buildpack/lifecycle/testhelpers" - "github.com/sclevine/spec" - "github.com/sclevine/spec/report" "io/ioutil" "os" "path/filepath" "testing" + + "github.com/sclevine/spec" + "github.com/sclevine/spec/report" + + "github.com/buildpack/lifecycle" + h "github.com/buildpack/lifecycle/testhelpers" ) func TestCredHelpers(t *testing.T) { diff --git a/exporter.go b/exporter.go index d432902db..2d1e2a122 100644 --- a/exporter.go +++ b/exporter.go @@ -7,6 +7,8 @@ import ( "path/filepath" "github.com/buildpack/imgutil" + "github.com/buildpack/imgutil/local" + "github.com/buildpack/imgutil/remote" "github.com/pkg/errors" "github.com/buildpack/lifecycle/archive" @@ -22,42 +24,47 @@ type Exporter struct { UID, GID int } -func (e *Exporter) Export(layersDir, appDir string, runImage, origImage imgutil.Image, launcher string, stack metadata.StackMetadata) error { +func (e *Exporter) Export( + layersDir, + appDir string, + workingImage imgutil.Image, + origMetadata metadata.AppImageMetadata, + additionalNames []string, + launcher string, + stack metadata.StackMetadata, +) error { var err error meta := metadata.AppImageMetadata{} - meta.RunImage.TopLayer, err = runImage.TopLayer() + meta.RunImage.TopLayer, err = workingImage.TopLayer() if err != nil { return errors.Wrap(err, "get run image top layer SHA") } - meta.RunImage.SHA, err = runImage.Digest() + identifier, err := workingImage.Identifier() if err != nil { - return errors.Wrap(err, "get run image digest") + return errors.Wrap(err, "get run image id or digest") } - meta.Stack = stack - - origMetadata, err := metadata.GetAppMetadata(origImage) + meta.RunImage.Reference = identifier.String() if err != nil { - return errors.Wrap(err, "metadata for previous image") + return errors.Wrap(err, "get run image digest") } - runImage.Rename(origImage.Name()) - appImage := runImage + meta.Stack = stack - meta.App.SHA, err = e.addOrReuseLayer(appImage, &layer{path: appDir, identifier: "app"}, origMetadata.App.SHA) + meta.App.SHA, err = e.addOrReuseLayer(workingImage, &layer{path: appDir, identifier: "app"}, origMetadata.App.SHA) if err != nil { return errors.Wrap(err, "exporting app layer") } - meta.Config.SHA, err = e.addOrReuseLayer(appImage, &layer{path: filepath.Join(layersDir, "config"), identifier: "config"}, origMetadata.Config.SHA) + meta.Config.SHA, err = e.addOrReuseLayer(workingImage, &layer{path: filepath.Join(layersDir, "config"), identifier: "config"}, origMetadata.Config.SHA) if err != nil { return errors.Wrap(err, "exporting config layer") } - meta.Launcher.SHA, err = e.addOrReuseLayer(appImage, &layer{path: launcher, identifier: "launcher"}, origMetadata.Launcher.SHA) + meta.Launcher.SHA, err = e.addOrReuseLayer(workingImage, &layer{path: launcher, identifier: "launcher"}, origMetadata.Launcher.SHA) if err != nil { return errors.Wrap(err, "exporting launcher layer") } @@ -77,7 +84,7 @@ func (e *Exporter) Export(layersDir, appDir string, runImage, origImage imgutil. if layer.hasLocalContents() { origLayerMetadata := origMetadata.MetadataForBuildpack(bp.ID).Layers[layer.name()] - lmd.SHA, err = e.addOrReuseLayer(appImage, &layer, origLayerMetadata.SHA) + lmd.SHA, err = e.addOrReuseLayer(workingImage, &layer, origLayerMetadata.SHA) if err != nil { return err } @@ -91,7 +98,7 @@ func (e *Exporter) Export(layersDir, appDir string, runImage, origImage imgutil. } e.Out.Printf("Reusing layer '%s' with SHA %s\n", layer.Identifier(), origLayerMetadata.SHA) - if err := appImage.ReuseLayer(origLayerMetadata.SHA); err != nil { + if err := workingImage.ReuseLayer(origLayerMetadata.SHA); err != nil { return errors.Wrapf(err, "reusing layer: '%s'", layer.Identifier()) } lmd.SHA = origLayerMetadata.SHA @@ -114,33 +121,28 @@ func (e *Exporter) Export(layersDir, appDir string, runImage, origImage imgutil. if err != nil { return errors.Wrap(err, "marshall metadata") } - if err := appImage.SetLabel(metadata.AppMetadataLabel, string(data)); err != nil { + + if err = workingImage.SetLabel(metadata.AppMetadataLabel, string(data)); err != nil { return errors.Wrap(err, "set app image metadata label") } - if err := appImage.SetEnv(cmd.EnvLayersDir, layersDir); err != nil { + if err = workingImage.SetEnv(cmd.EnvLayersDir, layersDir); err != nil { return errors.Wrapf(err, "set app image env %s", cmd.EnvLayersDir) } - if err := appImage.SetEnv(cmd.EnvAppDir, appDir); err != nil { + if err = workingImage.SetEnv(cmd.EnvAppDir, appDir); err != nil { return errors.Wrapf(err, "set app image env %s", cmd.EnvAppDir) } - if err := appImage.SetEntrypoint(launcher); err != nil { + if err = workingImage.SetEntrypoint(launcher); err != nil { return errors.Wrap(err, "setting entrypoint") } - if err := appImage.SetCmd(); err != nil { // Note: Command intentionally empty + if err = workingImage.SetCmd(); err != nil { // Note: Command intentionally empty return errors.Wrap(err, "setting cmd") } - sha, err := appImage.Save() - if err != nil { - return errors.Wrap(err, "saving") - } - e.Out.Printf("\n*** Image: %s@%s\n", runImage.Name(), sha) - - return nil + return e.saveImage(workingImage, additionalNames) } func (e *Exporter) addOrReuseLayer(image imgutil.Image, layer identifiableLayer, previousSha string) (string, error) { @@ -156,3 +158,61 @@ func (e *Exporter) addOrReuseLayer(image imgutil.Image, layer identifiableLayer, e.Out.Printf("Exporting layer '%s' with SHA %s\n", layer.Identifier(), sha) return sha, image.AddLayer(tarPath) } + +func (e *Exporter) saveImage(image imgutil.Image, additionalNames []string) error { + var saveErr error + if err := image.Save(additionalNames...); err != nil { + var ok bool + if saveErr, ok = err.(imgutil.SaveError); !ok { + return errors.Wrap(err, "saving image") + } + } + + e.Out.Println("*** Images:") + for _, n := range append([]string{image.Name()}, additionalNames...) { + e.Out.Printf(" %s - %s\n", n, getSaveStatus(saveErr, n)) + } + + id, idErr := image.Identifier() + if idErr != nil { + if saveErr != nil { + return &MultiError{Errors: []error{idErr, saveErr}} + } + return idErr + } + + e.logReference(id) + return saveErr +} + +func (e *Exporter) logReference(identifier imgutil.Identifier) { + switch v := identifier.(type) { + case local.IDIdentifier: + e.Out.Printf("\n*** Image ID: %s\n", v.String()) + case remote.DigestIdentifier: + e.Out.Printf("\n*** Digest: %s\n", v.Digest.DigestStr()) + default: + e.Out.Printf("\n*** Reference: %s\n", v.String()) + } +} + +type MultiError struct { + Errors []error +} + +func (me *MultiError) Error() string { + return fmt.Sprintf("failed with multiple errors %+v", me.Errors) +} + +func getSaveStatus(err error, imageName string) string { + if err != nil { + if saveErr, ok := err.(imgutil.SaveError); ok { + for _, d := range saveErr.Errors { + if d.ImageName == imageName { + return d.Cause.Error() + } + } + } + } + return "succeeded" +} diff --git a/exporter_test.go b/exporter_test.go index caad853b6..682829265 100644 --- a/exporter_test.go +++ b/exporter_test.go @@ -11,11 +11,13 @@ import ( "math/rand" "os" "path/filepath" - "strings" "testing" "time" "github.com/buildpack/imgutil/fakes" + "github.com/buildpack/imgutil/local" + "github.com/buildpack/imgutil/remote" + "github.com/google/go-containerregistry/pkg/name" "github.com/sclevine/spec" "github.com/sclevine/spec/report" @@ -31,17 +33,18 @@ func TestExporter(t *testing.T) { func testExporter(t *testing.T, when spec.G, it spec.S) { var ( - exporter *lifecycle.Exporter - fakeImage *fakes.Image - stderr bytes.Buffer - stdout bytes.Buffer - layersDir string - tmpDir string - appDir string - launcherPath string - uid = 1234 - gid = 4321 - stack = metadata.StackMetadata{} + exporter *lifecycle.Exporter + fakeAppImage *fakes.Image + stderr bytes.Buffer + stdout bytes.Buffer + layersDir string + tmpDir string + appDir string + launcherPath string + uid = 1234 + gid = 4321 + stack = metadata.StackMetadata{} + additionalNames []string ) it.Before(func() { @@ -57,7 +60,15 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, os.Mkdir(layersDir, 0777)) h.AssertNil(t, ioutil.WriteFile(filepath.Join(tmpDir, "launcher"), []byte("some-launcher"), 0777)) - fakeImage = fakes.NewImage("runImageName", "some-top-layer-sha", "some-run-image-digest") + fakeAppImage = fakes.NewImage( + "some-repo/app-image", + "some-top-layer-sha", + local.IDIdentifier{ + ImageID: "some-image-id", + }, + ) + + additionalNames = []string{"some-repo/app-image:foo", "some-repo/app-image:bar"} exporter = &lifecycle.Exporter{ ArtifactsDir: tmpDir, @@ -73,7 +84,7 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it.After(func() { - fakeImage.Cleanup() + fakeAppImage.Cleanup() if err := os.RemoveAll(tmpDir); err != nil { t.Fatal(err) @@ -82,7 +93,10 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { when("#Export", func() { when("previous image exists", func() { - var fakeOriginalImage *fakes.Image + var ( + fakeOriginalImage *fakes.Image + fakeImageMetadata metadata.AppImageMetadata + ) it.Before(func() { h.RecursiveCopy(t, filepath.Join("testdata", "exporter", "previous-image-exists", "layers"), layersDir) @@ -95,11 +109,14 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { localReusableLayerSha := h.ComputeSHA256ForPath(t, localReusableLayerPath, uid, gid) launcherSHA := h.ComputeSHA256ForPath(t, launcherPath, uid, gid) - fakeOriginalImage = fakes.NewImage("app/original-Image-Name", "original-top-layer-sha", "some-original-run-image-digest") - fakeImage.AddPreviousLayer("sha256:"+localReusableLayerSha, "") - fakeImage.AddPreviousLayer("sha256:"+launcherSHA, "") - fakeImage.AddPreviousLayer("sha256:orig-launch-layer-no-local-dir-sha", "") - _ = fakeOriginalImage.SetLabel("io.buildpacks.lifecycle.metadata", + fakeOriginalImage = fakes.NewImage("app/original-image", "original-top-layer-sha", + local.IDIdentifier{ImageID: "some-original-run-image-digest"}, + ) + fakeAppImage.AddPreviousLayer("sha256:"+localReusableLayerSha, "") + fakeAppImage.AddPreviousLayer("sha256:"+launcherSHA, "") + fakeAppImage.AddPreviousLayer("sha256:orig-launch-layer-no-local-dir-sha", "") + + h.AssertNil(t, fakeOriginalImage.SetLabel("io.buildpacks.lifecycle.metadata", fmt.Sprintf(`{ "buildpacks": [ { @@ -131,7 +148,10 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { "launcher": { "sha": "sha256:%s" } - }`, localReusableLayerSha, launcherSHA)) + }`, localReusableLayerSha, launcherSHA))) + + fakeImageMetadata, err = metadata.GetAppMetadata(fakeOriginalImage) + h.AssertNil(t, err) }) it.After(func() { @@ -139,9 +159,9 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("creates app layer on Run image", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - appLayerPath := fakeImage.AppLayerPath() + appLayerPath := fakeAppImage.AppLayerPath() assertTarFileContents(t, appLayerPath, filepath.Join(appDir, ".hidden.txt"), "some-hidden-text\n") assertTarFileOwner(t, appLayerPath, appDir, uid, gid) @@ -149,9 +169,9 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("creates config layer on Run image", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - configLayerPath := fakeImage.ConfigLayerPath() + configLayerPath := fakeAppImage.ConfigLayerPath() assertTarFileContents(t, configLayerPath, @@ -164,31 +184,31 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { it("reuses launcher layer if the sha matches the sha in the metadata", func() { launcherLayerSHA := h.ComputeSHA256ForPath(t, launcherPath, uid, gid) - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) - h.AssertContains(t, fakeImage.ReusedLayers(), "sha256:"+launcherLayerSHA) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) + h.AssertContains(t, fakeAppImage.ReusedLayers(), "sha256:"+launcherLayerSHA) assertReuseLayerLog(t, stdout, "launcher", launcherLayerSHA) }) it("reuses launch layers when only layer.toml is present", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - h.AssertContains(t, fakeImage.ReusedLayers(), "sha256:orig-launch-layer-no-local-dir-sha") + h.AssertContains(t, fakeAppImage.ReusedLayers(), "sha256:orig-launch-layer-no-local-dir-sha") assertReuseLayerLog(t, stdout, "buildpack.id:launch-layer-no-local-dir", "orig-launch-layer-no-local-dir-sha") }) it("reuses cached launch layers if the local sha matches the sha in the metadata", func() { layer5sha := h.ComputeSHA256ForPath(t, filepath.Join(layersDir, "other.buildpack.id/local-reusable-layer"), uid, gid) - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - h.AssertContains(t, fakeImage.ReusedLayers(), "sha256:"+layer5sha) + h.AssertContains(t, fakeAppImage.ReusedLayers(), "sha256:"+layer5sha) assertReuseLayerLog(t, stdout, "other.buildpack.id:local-reusable-layer", layer5sha) }) it("adds new launch layers", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - layer2Path, err := fakeImage.FindLayerWithPath(filepath.Join(layersDir, "buildpack.id/new-launch-layer")) + layer2Path, err := fakeAppImage.FindLayerWithPath(filepath.Join(layersDir, "buildpack.id/new-launch-layer")) h.AssertNil(t, err) assertTarFileContents(t, @@ -200,9 +220,9 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("adds new launch layers from a second buildpack", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - layer3Path, err := fakeImage.FindLayerWithPath(filepath.Join(layersDir, "other.buildpack.id/new-launch-layer")) + layer3Path, err := fakeAppImage.FindLayerWithPath(filepath.Join(layersDir, "other.buildpack.id/new-launch-layer")) h.AssertNil(t, err) assertTarFileContents(t, @@ -214,37 +234,37 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("only creates expected layers", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) var applayer, configLayer, layer2, layer3 = 1, 1, 1, 1 - h.AssertEq(t, fakeImage.NumberOfAddedLayers(), applayer+configLayer+layer2+layer3) + h.AssertEq(t, fakeAppImage.NumberOfAddedLayers(), applayer+configLayer+layer2+layer3) }) it("only reuses expected layers", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) var launcherLayer, layer1, layer5 = 1, 1, 1 - h.AssertEq(t, len(fakeImage.ReusedLayers()), launcherLayer+layer1+layer5) + h.AssertEq(t, len(fakeAppImage.ReusedLayers()), launcherLayer+layer1+layer5) }) it("saves lifecycle metadata with layer info", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - appLayerPath := fakeImage.AppLayerPath() + appLayerPath := fakeAppImage.AppLayerPath() appLayerSHA := h.ComputeSHA256ForFile(t, appLayerPath) - configLayerPath := fakeImage.ConfigLayerPath() + configLayerPath := fakeAppImage.ConfigLayerPath() configLayerSHA := h.ComputeSHA256ForFile(t, configLayerPath) - newLayerPath, err := fakeImage.FindLayerWithPath(filepath.Join(layersDir, "buildpack.id/new-launch-layer")) + newLayerPath, err := fakeAppImage.FindLayerWithPath(filepath.Join(layersDir, "buildpack.id/new-launch-layer")) h.AssertNil(t, err) newLayerSHA := h.ComputeSHA256ForFile(t, newLayerPath) - secondBPLayerPath, err := fakeImage.FindLayerWithPath(filepath.Join(layersDir, "other.buildpack.id/new-launch-layer")) + secondBPLayerPath, err := fakeAppImage.FindLayerWithPath(filepath.Join(layersDir, "other.buildpack.id/new-launch-layer")) h.AssertNil(t, err) secondBPLayerPathSHA := h.ComputeSHA256ForFile(t, secondBPLayerPath) - metadataJSON, err := fakeImage.Label("io.buildpacks.lifecycle.metadata") + metadataJSON, err := fakeAppImage.Label("io.buildpacks.lifecycle.metadata") h.AssertNil(t, err) var meta metadata.AppImageMetadata @@ -254,7 +274,7 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { t.Log("adds run image metadata to label") h.AssertEq(t, meta.RunImage.TopLayer, "some-top-layer-sha") - h.AssertEq(t, meta.RunImage.SHA, "some-run-image-digest") + h.AssertEq(t, meta.RunImage.Reference, "some-image-id") t.Log("adds layer shas to metadata label") h.AssertEq(t, meta.App.SHA, "sha256:"+appLayerSHA) @@ -286,9 +306,9 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { Mirrors: []string{"registry.example.com/some/run", "other.example.com/some/run"}, }, } - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - metadataJSON, err := fakeImage.Label("io.buildpacks.lifecycle.metadata") + metadataJSON, err := fakeAppImage.Label("io.buildpacks.lifecycle.metadata") h.AssertNil(t, err) var meta metadata.AppImageMetadata @@ -301,66 +321,140 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("sets CNB_LAYERS_DIR", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - val, err := fakeImage.Env("CNB_LAYERS_DIR") + val, err := fakeAppImage.Env("CNB_LAYERS_DIR") h.AssertNil(t, err) h.AssertEq(t, val, layersDir) }) it("sets CNB_APP_DIR", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - val, err := fakeImage.Env("CNB_APP_DIR") + val, err := fakeAppImage.Env("CNB_APP_DIR") h.AssertNil(t, err) h.AssertEq(t, val, appDir) }) it("sets ENTRYPOINT to launcher", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - val, err := fakeImage.Entrypoint() + val, err := fakeAppImage.Entrypoint() h.AssertNil(t, err) h.AssertEq(t, val, []string{launcherPath}) }) it("sets empty CMD", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - val, err := fakeImage.Cmd() + val, err := fakeAppImage.Cmd() h.AssertNil(t, err) h.AssertEq(t, val, []string(nil)) }) - it("sets name to match old run image", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + it("saves run image", func() { + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - h.AssertEq(t, fakeImage.Name(), "app/original-Image-Name") + h.AssertEq(t, fakeAppImage.IsSaved(), true) }) - it("saves run image", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + when("image has a digest identifier", func() { + var fakeRemoteDigest = "sha256:c27a27006b74a056bed5d9edcebc394783880abe8691a8c87c78b7cffa6fa5ad" + + it.Before(func() { + digestRef, err := name.NewDigest("some-repo/app-image@" + fakeRemoteDigest) + h.AssertNil(t, err) + fakeAppImage.SetIdentifier(remote.DigestIdentifier{ + Digest: digestRef, + }) + }) + + it("outputs the digest", func() { + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - h.AssertEq(t, fakeImage.IsSaved(), true) + h.AssertStringContains(t, + stdout.String(), + `*** Digest: `+fakeRemoteDigest, + ) + }) }) - it("outputs image name and digest", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + when("image has an ID identifier", func() { + it("outputs the image ID", func() { + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - if !strings.Contains(stdout.String(), "Image: app/original-Image-Name@saved-digest-from-fake-run-image") { - t.Fatalf("output should contain Image: app/original-Image-Name@some-digest, got '%s'", stdout.String()) - } + h.AssertStringContains(t, + stdout.String(), + `*** Image ID: some-image-id`, + ) + }) + }) + + it("outputs image names", func() { + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) + + h.AssertStringContains(t, + stdout.String(), + fmt.Sprintf(`*** Images: + %s - succeeded + %s - succeeded + %s - succeeded +`, + fakeAppImage.Name(), + additionalNames[0], + additionalNames[1], + ), + ) + }) + + when("one of the additional names fails", func() { + it("outputs identifier and image name with error", func() { + failingName := "not.a.tag@reference" + + err := exporter.Export( + layersDir, + appDir, + fakeAppImage, + fakeImageMetadata, + append(additionalNames, failingName), + launcherPath, + stack, + ) + + h.AssertError(t, err, fmt.Sprintf("failed to write image to the following tags: [%s]", failingName)) + + h.AssertStringContains(t, + stdout.String(), + fmt.Sprintf( + `*** Images: + %s - succeeded + %s - succeeded + %s - succeeded + %s - could not parse reference + +*** Image ID: some-image-id`, + fakeAppImage.Name(), + additionalNames[0], + additionalNames[1], + failingName, + ), + ) + }) }) when("previous image metadata is missing buildpack for reused layer", func() { it.Before(func() { _ = fakeOriginalImage.SetLabel("io.buildpacks.lifecycle.metadata", `{"buildpacks":[{}]}`) + + var err error + fakeImageMetadata, err = metadata.GetAppMetadata(fakeOriginalImage) + h.AssertNil(t, err) }) it("returns an error", func() { h.AssertError( t, - exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack), + exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack), "cannot reuse 'buildpack.id:launch-layer-no-local-dir', previous image has no metadata for layer 'buildpack.id:launch-layer-no-local-dir'", ) }) @@ -371,16 +465,25 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { _ = fakeOriginalImage.SetLabel( "io.buildpacks.lifecycle.metadata", `{"buildpacks":[{"key": "buildpack.id", "layers": {}}]}`) + + var err error + fakeImageMetadata, err = metadata.GetAppMetadata(fakeOriginalImage) + h.AssertNil(t, err) }) it("returns an error", func() { h.AssertError( t, - exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack), + exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack), "cannot reuse 'buildpack.id:launch-layer-no-local-dir', previous image has no metadata for layer 'buildpack.id:launch-layer-no-local-dir'", ) }) }) + + it("saves the image for all provided additionalNames", func() { + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) + h.AssertContains(t, fakeAppImage.SavedNames(), append(additionalNames, fakeAppImage.Name())...) + }) }) when("previous image doesn't exist", func() { @@ -395,7 +498,7 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) // TODO : this is an hacky way to create a non-existing image and should be improved in imgutil - nonExistingOriginalImage = fakes.NewImage("app/original-Image-Name", "", "") + nonExistingOriginalImage = fakes.NewImage("app/original-image", "", nil) nonExistingOriginalImage.Delete() }) @@ -404,9 +507,9 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("creates app layer on Run image", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, metadata.AppImageMetadata{}, additionalNames, launcherPath, stack)) - appLayerPath := fakeImage.AppLayerPath() + appLayerPath := fakeAppImage.AppLayerPath() assertTarFileContents(t, appLayerPath, filepath.Join(appDir, ".hidden.txt"), "some-hidden-text\n") assertTarFileOwner(t, appLayerPath, appDir, uid, gid) @@ -414,9 +517,9 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("creates config layer on Run image", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, metadata.AppImageMetadata{}, additionalNames, launcherPath, stack)) - configLayerPath := fakeImage.ConfigLayerPath() + configLayerPath := fakeAppImage.ConfigLayerPath() assertTarFileContents(t, configLayerPath, @@ -428,9 +531,9 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("creates a launcher layer", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, metadata.AppImageMetadata{}, additionalNames, launcherPath, stack)) - launcherLayerPath, err := fakeImage.FindLayerWithPath(launcherPath) + launcherLayerPath, err := fakeAppImage.FindLayerWithPath(launcherPath) h.AssertNil(t, err) assertTarFileContents(t, launcherLayerPath, @@ -441,9 +544,9 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("adds launch layers", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, metadata.AppImageMetadata{}, additionalNames, launcherPath, stack)) - layer1Path, err := fakeImage.FindLayerWithPath(filepath.Join(layersDir, "buildpack.id/layer1")) + layer1Path, err := fakeAppImage.FindLayerWithPath(filepath.Join(layersDir, "buildpack.id/layer1")) h.AssertNil(t, err) assertTarFileContents(t, layer1Path, @@ -452,7 +555,7 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { assertTarFileOwner(t, layer1Path, filepath.Join(layersDir, "buildpack.id/layer1"), uid, gid) assertAddLayerLog(t, stdout, "buildpack.id:layer1", layer1Path) - layer2Path, err := fakeImage.FindLayerWithPath(filepath.Join(layersDir, "buildpack.id/layer2")) + layer2Path, err := fakeAppImage.FindLayerWithPath(filepath.Join(layersDir, "buildpack.id/layer2")) h.AssertNil(t, err) assertTarFileContents(t, layer2Path, @@ -463,34 +566,34 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("only creates expected layers", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, metadata.AppImageMetadata{}, additionalNames, launcherPath, stack)) var applayer, configLayer, launcherLayer, layer1, layer2 = 1, 1, 1, 1, 1 - h.AssertEq(t, fakeImage.NumberOfAddedLayers(), applayer+configLayer+launcherLayer+layer1+layer2) + h.AssertEq(t, fakeAppImage.NumberOfAddedLayers(), applayer+configLayer+launcherLayer+layer1+layer2) }) it("saves metadata with layer info", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, metadata.AppImageMetadata{}, additionalNames, launcherPath, stack)) - appLayerPath := fakeImage.AppLayerPath() + appLayerPath := fakeAppImage.AppLayerPath() appLayerSHA := h.ComputeSHA256ForFile(t, appLayerPath) - configLayerPath := fakeImage.ConfigLayerPath() + configLayerPath := fakeAppImage.ConfigLayerPath() configLayerSHA := h.ComputeSHA256ForFile(t, configLayerPath) - launcherLayerPath, err := fakeImage.FindLayerWithPath(launcherPath) + launcherLayerPath, err := fakeAppImage.FindLayerWithPath(launcherPath) h.AssertNil(t, err) launcherLayerSHA := h.ComputeSHA256ForFile(t, launcherLayerPath) - layer1Path, err := fakeImage.FindLayerWithPath(filepath.Join(layersDir, "buildpack.id/layer1")) + layer1Path, err := fakeAppImage.FindLayerWithPath(filepath.Join(layersDir, "buildpack.id/layer1")) h.AssertNil(t, err) buildpackLayer1SHA := h.ComputeSHA256ForFile(t, layer1Path) - layer2Path, err := fakeImage.FindLayerWithPath(filepath.Join(layersDir, "buildpack.id/layer2")) + layer2Path, err := fakeAppImage.FindLayerWithPath(filepath.Join(layersDir, "buildpack.id/layer2")) h.AssertNil(t, err) buildpackLayer2SHA := h.ComputeSHA256ForFile(t, layer2Path) - metadataJSON, err := fakeImage.Label("io.buildpacks.lifecycle.metadata") + metadataJSON, err := fakeAppImage.Label("io.buildpacks.lifecycle.metadata") h.AssertNil(t, err) var meta metadata.AppImageMetadata @@ -500,7 +603,7 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { t.Log("adds run image metadata to label") h.AssertEq(t, meta.RunImage.TopLayer, "some-top-layer-sha") - h.AssertEq(t, meta.RunImage.SHA, "some-run-image-digest") + h.AssertEq(t, meta.RunImage.Reference, "some-image-id") t.Log("adds layer shas to metadata label") h.AssertEq(t, meta.App.SHA, "sha256:"+appLayerSHA) @@ -516,53 +619,47 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("sets CNB_LAYERS_DIR", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, metadata.AppImageMetadata{}, additionalNames, launcherPath, stack)) - val, err := fakeImage.Env("CNB_LAYERS_DIR") + val, err := fakeAppImage.Env("CNB_LAYERS_DIR") h.AssertNil(t, err) h.AssertEq(t, val, layersDir) }) it("sets CNB_APP_DIR", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, metadata.AppImageMetadata{}, additionalNames, launcherPath, stack)) - val, err := fakeImage.Env("CNB_APP_DIR") + val, err := fakeAppImage.Env("CNB_APP_DIR") h.AssertNil(t, err) h.AssertEq(t, val, appDir) }) it("sets ENTRYPOINT to launcher", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, metadata.AppImageMetadata{}, additionalNames, launcherPath, stack)) - val, err := fakeImage.Entrypoint() + val, err := fakeAppImage.Entrypoint() h.AssertNil(t, err) h.AssertEq(t, val, []string{launcherPath}) }) it("sets empty CMD", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, metadata.AppImageMetadata{}, additionalNames, launcherPath, stack)) - val, err := fakeImage.Cmd() + val, err := fakeAppImage.Cmd() h.AssertNil(t, err) h.AssertEq(t, val, []string(nil)) }) - it("sets name to match original image", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack)) - - h.AssertEq(t, fakeImage.Name(), "app/original-Image-Name") - }) - - it("saves run image", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack)) - - h.AssertEq(t, fakeImage.IsSaved(), true) + it("saves the image for all provided additionalNames", func() { + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, metadata.AppImageMetadata{}, additionalNames, launcherPath, stack)) + h.AssertContains(t, fakeAppImage.SavedNames(), append(additionalNames, fakeAppImage.Name())...) }) }) when("buildpack requires an escaped id", func() { var ( fakeOriginalImage *fakes.Image + fakeImageMetadata metadata.AppImageMetadata ) it.Before(func() { @@ -576,11 +673,14 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { appDir, err = filepath.Abs(filepath.Join("testdata", "exporter", "escaped-bpid", "layers", "app")) h.AssertNil(t, err) - fakeOriginalImage = fakes.NewImage("app/original", "original-top-sha", "run-digest") - fakeOriginalImage.SetLabel( + fakeOriginalImage = fakes.NewImage("app/original", "original-top-sha", local.IDIdentifier{ImageID: "run-digest"}) + h.AssertNil(t, fakeOriginalImage.SetLabel( "io.buildpacks.lifecycle.metadata", `{"buildpacks":[{"key": "some/escaped/bp/id", "layers": {"layer": {"sha": "original-layer-sha"}}}]}`, - ) + )) + + fakeImageMetadata, err = metadata.GetAppMetadata(fakeOriginalImage) + h.AssertNil(t, err) }) it.After(func() { @@ -588,9 +688,9 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("exports layers from the escaped id path", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - layerPath, err := fakeImage.FindLayerWithPath(filepath.Join(layersDir, "some_escaped_bp_id/some-layer")) + layerPath, err := fakeAppImage.FindLayerWithPath(filepath.Join(layersDir, "some_escaped_bp_id/some-layer")) h.AssertNil(t, err) assertTarFileContents(t, @@ -602,9 +702,9 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { }) it("exports buildpack metadata with unescaped id", func() { - h.AssertNil(t, exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack)) + h.AssertNil(t, exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack)) - metadataJSON, err := fakeImage.Label("io.buildpacks.lifecycle.metadata") + metadataJSON, err := fakeAppImage.Label("io.buildpacks.lifecycle.metadata") h.AssertNil(t, err) var meta metadata.AppImageMetadata @@ -630,7 +730,7 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { h.AssertNil(t, err) // TODO : this is an hacky way to create a non-existing image and should be improved in imgutil - nonExistingOriginalImage = fakes.NewImage("app/original-Image-Name", "", "") + nonExistingOriginalImage = fakes.NewImage("app/original-image", "", nil) nonExistingOriginalImage.Delete() }) @@ -641,7 +741,7 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { it("returns an error", func() { h.AssertError( t, - exporter.Export(layersDir, appDir, fakeImage, nonExistingOriginalImage, launcherPath, stack), + exporter.Export(layersDir, appDir, fakeAppImage, metadata.AppImageMetadata{}, additionalNames, launcherPath, stack), "failed to parse metadata for layers '[buildpack.id:bad-layer]'", ) }) @@ -650,6 +750,7 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { when("there is a launch=true cache=true layer without contents", func() { var ( fakeOriginalImage *fakes.Image + fakeImageMetadata metadata.AppImageMetadata ) it.Before(func() { @@ -658,7 +759,11 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { appDir, err = filepath.Abs(filepath.Join("testdata", "exporter", "cache-layer-no-contents", "layers", "app")) h.AssertNil(t, err) - fakeOriginalImage = fakes.NewImage("app/original-Image-Name", "original-top-layer-sha", "some-original-run-image-digest") + fakeOriginalImage = fakes.NewImage( + "app/original-image", + "original-top-layer-sha", + local.IDIdentifier{ImageID: "some-original-image-id"}, + ) _ = fakeOriginalImage.SetLabel("io.buildpacks.lifecycle.metadata", `{ "buildpacks": [ { @@ -672,6 +777,9 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { } ] }`) + + fakeImageMetadata, err = metadata.GetAppMetadata(fakeOriginalImage) + h.AssertNil(t, err) }) it.After(func() { @@ -681,7 +789,7 @@ func testExporter(t *testing.T, when spec.G, it spec.S) { it("returns an error", func() { h.AssertError( t, - exporter.Export(layersDir, appDir, fakeImage, fakeOriginalImage, launcherPath, stack), + exporter.Export(layersDir, appDir, fakeAppImage, fakeImageMetadata, additionalNames, launcherPath, stack), "layer 'buildpack.id:cache-layer-no-contents' is cache=true but has no contents", ) }) @@ -694,17 +802,13 @@ func assertAddLayerLog(t *testing.T, stdout bytes.Buffer, name, layerPath string layerSHA := h.ComputeSHA256ForFile(t, layerPath) expected := fmt.Sprintf("Exporting layer '%s' with SHA sha256:%s", name, layerSHA) - if !strings.Contains(stdout.String(), expected) { - t.Fatalf("Expected output \n'%s' to contain \n'%s'", stdout.String(), expected) - } + h.AssertStringContains(t, stdout.String(), expected) } func assertReuseLayerLog(t *testing.T, stdout bytes.Buffer, name, sha string) { t.Helper() expected := fmt.Sprintf("Reusing layer '%s' with SHA sha256:%s", name, sha) - if !strings.Contains(stdout.String(), expected) { - t.Fatalf("Expected output \n\"%s\"\n to contain \n\"%s\"", stdout.String(), expected) - } + h.AssertStringContains(t, stdout.String(), expected) } func assertTarFileContents(t *testing.T, tarfile, path, expected string) { diff --git a/go.mod b/go.mod index 15c5c99b8..242d573a1 100644 --- a/go.mod +++ b/go.mod @@ -2,7 +2,7 @@ module github.com/buildpack/lifecycle require ( github.com/BurntSushi/toml v0.3.1 - github.com/buildpack/imgutil v0.0.0-20190509214933-76de939dfb34 + github.com/buildpack/imgutil v0.0.0-20190702194220-aa6eb76474d3 github.com/docker/docker v0.7.3-0.20190307005417-54dddadc7d5d github.com/docker/go-connections v0.4.0 github.com/docker/go-units v0.4.0 // indirect @@ -14,6 +14,7 @@ require ( github.com/sclevine/spec v1.2.0 golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c // indirect golang.org/x/sys v0.0.0-20190509141414-a5b02f93d862 // indirect + golang.org/x/tools v0.0.0-20190624190245-7f2218787638 google.golang.org/genproto v0.0.0-20190508193815-b515fa19cec8 // indirect google.golang.org/grpc v1.20.1 // indirect ) diff --git a/go.sum b/go.sum index dd5650519..6139138ee 100644 --- a/go.sum +++ b/go.sum @@ -5,10 +5,8 @@ github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/Microsoft/go-winio v0.4.12 h1:xAfWHN1IrQ0NJ9TBC0KBZoqLjzDTr1ML+4MywiUOryc= github.com/Microsoft/go-winio v0.4.12/go.mod h1:VhR8bwka0BXejwEJY73c50VrPtXAaKcyvVC4A4RozmA= -github.com/buildpack/imgutil v0.0.0-20190509214311-ef7b838ea54e h1:+XO65Ne4CWNdWdtlmiSb31qRsFsCkMAAI8DyQ5mcWOk= -github.com/buildpack/imgutil v0.0.0-20190509214311-ef7b838ea54e/go.mod h1:hucbKwghPRl30WTj6YxhsoOYTLzpLo3hDSSMPmCAYf4= -github.com/buildpack/imgutil v0.0.0-20190509214933-76de939dfb34 h1:cCzM7734kk/1RmfIY8zDvOQDaqBaUoGpGpesl6i5nD0= -github.com/buildpack/imgutil v0.0.0-20190509214933-76de939dfb34/go.mod h1:hucbKwghPRl30WTj6YxhsoOYTLzpLo3hDSSMPmCAYf4= +github.com/buildpack/imgutil v0.0.0-20190702194220-aa6eb76474d3 h1:aSZAFY7Fux4lds6QQwN13SuzJFF7iUe1T0JkO7HLnAA= +github.com/buildpack/imgutil v0.0.0-20190702194220-aa6eb76474d3/go.mod h1:X0yFLGZtSyTXsEJq9NaPP9Rdnaq+3wouZ7xaTgw1680= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= @@ -98,8 +96,6 @@ golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f h1:wMNYb4v58l5UBM7MYRLPG6Zh golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190412183630-56d357773e84 h1:IqXQ59gzdXv58Jmm2xn0tSOR9i6HqroaOFRQ3wR/dJQ= -golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -122,6 +118,8 @@ golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGm golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190624190245-7f2218787638 h1:uIfBkD8gLczr4XDgYpt/qJYds2YJwZRNw4zs7wSnNhk= +golang.org/x/tools v0.0.0-20190624190245-7f2218787638/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= diff --git a/image/image.go b/image/image.go index 65171566b..1340e88b5 100644 --- a/image/image.go +++ b/image/image.go @@ -1,9 +1,8 @@ package image import ( - "errors" - "github.com/google/go-containerregistry/pkg/name" + "github.com/pkg/errors" ) func ByRegistry(registry string, images []string) (string, error) { @@ -30,3 +29,26 @@ func ParseRegistry(imageName string) (string, error) { } return ref.Context().RegistryStr(), nil } + +func EnsureSingleRegistry(repoNames ...string) (string, error) { + set := make(map[string]interface{}) + + var ( + err error + registry string + ) + + for _, repoName := range repoNames { + registry, err = ParseRegistry(repoName) + if err != nil { + return "", errors.Wrapf(err, "parsing registry from repo '%s'", repoName) + } + set[registry] = nil + } + + if len(set) != 1 { + return "", errors.New("exporting to multiple registries is unsupported") + } + + return registry, nil +} diff --git a/image/image_test.go b/image/image_test.go index 26287bb21..4616613d3 100644 --- a/image/image_test.go +++ b/image/image_test.go @@ -69,4 +69,22 @@ func testImage(t *testing.T, when spec.G, it spec.S) { }) }) }) + + when("#EnsureSingleRegistry", func() { + when("multiple registries are provided", func() { + it("errors as unsupported", func() { + _, err := image.EnsureSingleRegistry("some/repo", "gcr.io/other-repo:latest", "example.com/final-repo") + h.AssertError(t, err, "exporting to multiple registries is unsupported") + }) + }) + + when("a single registry is provided", func() { + it("does not return an error and", func() { + registry, err := image.EnsureSingleRegistry("gcr.io/some/repo", "gcr.io/other-repo:latest", "gcr.io/final-repo") + h.AssertNil(t, err) + + h.AssertEq(t, registry, "gcr.io") + }) + }) + }) } diff --git a/layers.go b/layers.go index e4dac3687..2605f43aa 100644 --- a/layers.go +++ b/layers.go @@ -164,7 +164,7 @@ func (bp *bpLayer) writeMetadata(metadataLayers map[string]metadata.LayerMetadat return err } defer fh.Close() - return toml.NewEncoder(fh).Encode(layerMetadata) + return toml.NewEncoder(fh).Encode(layerMetadata.LayerMetadataFile) } func (bp *bpLayer) hasLocalContents() bool { @@ -174,7 +174,7 @@ func (bp *bpLayer) hasLocalContents() bool { } func (bp *bpLayer) writeSha(sha string) error { - if err := ioutil.WriteFile(filepath.Join(bp.path+".sha"), []byte(sha), 0777); err != nil { + if err := ioutil.WriteFile(bp.path+".sha", []byte(sha), 0777); err != nil { return err } return nil diff --git a/metadata/metadata.go b/metadata/metadata.go index 23998c988..3fc5a6c0d 100644 --- a/metadata/metadata.go +++ b/metadata/metadata.go @@ -10,34 +10,48 @@ import ( const AppMetadataLabel = "io.buildpacks.lifecycle.metadata" type AppImageMetadata struct { - App AppMetadata `json:"app"` - Config ConfigMetadata `json:"config"` - Launcher LauncherMetadata `json:"launcher"` - Buildpacks []BuildpackMetadata `json:"buildpacks"` - RunImage RunImageMetadata `json:"runImage"` - Stack StackMetadata `json:"stack"` + App AppMetadata `json:"app" toml:"app"` + Config ConfigMetadata `json:"config" toml:"config"` + Launcher LauncherMetadata `json:"launcher" toml:"launcher"` + Buildpacks []BuildpackMetadata `json:"buildpacks" toml:"buildpacks"` + RunImage RunImageMetadata `json:"runImage" toml:"run-image"` + Stack StackMetadata `json:"stack" toml:"stack"` } type AppMetadata struct { - SHA string `json:"sha"` + SHA string `json:"sha" toml:"sha"` +} + +type AnalyzedMetadata struct { + Image *ImageIdentifier `toml:"image"` + Metadata AppImageMetadata `toml:"metadata"` +} + +//FIXME: fix key names to be accurate in the daemon case +type ImageIdentifier struct { + Reference string `toml:"reference"` } type ConfigMetadata struct { - SHA string `json:"sha"` + SHA string `json:"sha" toml:"sha"` } type LauncherMetadata struct { - SHA string `json:"sha"` + SHA string `json:"sha" toml:"sha"` } type BuildpackMetadata struct { - ID string `json:"key"` - Version string `json:"version"` - Layers map[string]LayerMetadata `json:"layers"` + ID string `json:"key" toml:"key"` + Version string `json:"version" toml:"version"` + Layers map[string]LayerMetadata `json:"layers" toml:"layers"` } type LayerMetadata struct { - SHA string `json:"sha" toml:"-"` + SHA string `json:"sha" toml:"sha"` + LayerMetadataFile +} + +type LayerMetadataFile struct { Data interface{} `json:"data" toml:"metadata"` Build bool `json:"build" toml:"build"` Launch bool `json:"launch" toml:"launch"` @@ -45,12 +59,12 @@ type LayerMetadata struct { } type RunImageMetadata struct { - TopLayer string `json:"topLayer"` - SHA string `json:"sha"` + TopLayer string `json:"topLayer" toml:"top-layer"` + Reference string `json:"reference" toml:"reference"` } type StackMetadata struct { - RunImage StackRunImageMetadata `toml:"run-image" json:"runImage"` + RunImage StackRunImageMetadata `json:"runImage" toml:"run-image"` } type StackRunImageMetadata struct { diff --git a/restorer.go b/restorer.go index 8c8a0eb88..8cfabbc42 100644 --- a/restorer.go +++ b/restorer.go @@ -25,7 +25,7 @@ func (r *Restorer) Restore(cache Cache) error { } if len(meta.Buildpacks) == 0 { - r.Out.Printf("cache '%s': metadata not found, nothing to restore", cache.Name()) + r.Out.Printf("Cache '%s': metadata not found, nothing to restore", cache.Name()) return nil } @@ -60,7 +60,7 @@ func (r *Restorer) Restore(cache Cache) error { func (r *Restorer) restoreLayer(name string, bpMD metadata.BuildpackMetadata, layer metadata.LayerMetadata, layersDir bpLayersDir, cache Cache) error { bpLayer := layersDir.newBPLayer(name) - r.Out.Printf("restoring cached layer '%s'", bpLayer.Identifier()) + r.Out.Printf("Restoring cached layer '%s'", bpLayer.Identifier()) if err := bpLayer.writeMetadata(bpMD.Layers); err != nil { return err } diff --git a/testhelpers/testhelpers.go b/testhelpers/testhelpers.go index 5182061a1..0c691ca15 100644 --- a/testhelpers/testhelpers.go +++ b/testhelpers/testhelpers.go @@ -14,6 +14,7 @@ import ( "os" "os/exec" "path/filepath" + "reflect" "strings" "sync" "syscall" @@ -43,15 +44,26 @@ func AssertEq(t *testing.T, actual, expected interface{}) { } } -func AssertContains(t *testing.T, slice []string, expected string) { +func AssertContains(t *testing.T, slice []string, elements ...string) { t.Helper() - for _, actual := range slice { - if diff := cmp.Diff(actual, expected); diff == "" { - return + +outer: + for _, el := range elements { + for _, actual := range slice { + if diff := cmp.Diff(actual, el); diff == "" { + continue outer + } } + + t.Fatalf("Expected %+v to contain: %s", slice, el) } - t.Fatalf("Expected %+v to contain: %s", slice, expected) +} +func AssertStringContains(t *testing.T, str string, expected string) { + t.Helper() + if !strings.Contains(str, expected) { + t.Fatalf("Expected %s to contain: %s\nDiff:\n%s", str, expected, cmp.Diff(str, expected)) + } } func AssertError(t *testing.T, actual error, expected string) { @@ -66,11 +78,15 @@ func AssertError(t *testing.T, actual error, expected string) { func AssertNil(t *testing.T, actual interface{}) { t.Helper() - if actual != nil { + if !isNil(actual) { t.Fatalf("Expected nil: %s", actual) } } +func isNil(value interface{}) bool { + return value == nil || (reflect.TypeOf(value).Kind() == reflect.Ptr && reflect.ValueOf(value).IsNil()) +} + func AssertUidGid(t *testing.T, path string, uid, gid int) { fi, err := os.Stat(path) AssertNil(t, err) diff --git a/testmock/cache.go b/testmock/cache.go index 715fc4994..a3318341b 100644 --- a/testmock/cache.go +++ b/testmock/cache.go @@ -5,10 +5,12 @@ package testmock import ( - cache "github.com/buildpack/lifecycle/cache" - gomock "github.com/golang/mock/gomock" io "io" reflect "reflect" + + gomock "github.com/golang/mock/gomock" + + cache "github.com/buildpack/lifecycle/cache" ) // MockCache is a mock of Cache interface diff --git a/testmock/env.go b/testmock/env.go index b1d7c13fd..fce5b7cb9 100644 --- a/testmock/env.go +++ b/testmock/env.go @@ -5,8 +5,9 @@ package testmock import ( - gomock "github.com/golang/mock/gomock" reflect "reflect" + + gomock "github.com/golang/mock/gomock" ) // MockBuildEnv is a mock of BuildEnv interface diff --git a/testmock/image.go b/testmock/image.go index b413087f3..6f62dad91 100644 --- a/testmock/image.go +++ b/testmock/image.go @@ -5,10 +5,11 @@ package testmock import ( + reflect "reflect" + gomock "github.com/golang/mock/gomock" v1 "github.com/google/go-containerregistry/pkg/v1" types "github.com/google/go-containerregistry/pkg/v1/types" - reflect "reflect" ) // GGCRImage is a mock of Image interface diff --git a/testmock/ref.go b/testmock/ref.go index e5cd57acd..8436df3ba 100644 --- a/testmock/ref.go +++ b/testmock/ref.go @@ -5,9 +5,10 @@ package testmock import ( + reflect "reflect" + gomock "github.com/golang/mock/gomock" name "github.com/google/go-containerregistry/pkg/name" - reflect "reflect" ) // MockReference is a mock of Reference interface diff --git a/tools/tools.go b/tools/tools.go new file mode 100644 index 000000000..0a53843ad --- /dev/null +++ b/tools/tools.go @@ -0,0 +1,7 @@ +// +build tools + +package tools + +import ( + _ "golang.org/x/tools/cmd/goimports" +) diff --git a/vendor/github.com/buildpack/imgutil/Makefile b/vendor/github.com/buildpack/imgutil/Makefile new file mode 100644 index 000000000..9d31a5e08 --- /dev/null +++ b/vendor/github.com/buildpack/imgutil/Makefile @@ -0,0 +1,18 @@ +# Go parameters +GOCMD?=go +GOTEST=$(GOCMD) test -mod=vendor + +all: test + +imports: + $(GOCMD) install -mod=vendor golang.org/x/tools/cmd/goimports + test -z $$(goimports -l -w -local github.com/buildpack/imgutil $$(find . -type f -name '*.go' -not -path "./vendor/*")) + +format: + test -z $$($(GOCMD) fmt ./...) + +vet: + $(GOCMD) vet $$($(GOCMD) list ./... | grep -v /testdata/) + +test: format imports vet + $(GOTEST) -parallel=1 -count=1 -v ./... diff --git a/vendor/github.com/buildpack/imgutil/README.md b/vendor/github.com/buildpack/imgutil/README.md index 8157198b5..bd34cc8b9 100644 --- a/vendor/github.com/buildpack/imgutil/README.md +++ b/vendor/github.com/buildpack/imgutil/README.md @@ -6,8 +6,14 @@ Helpful utilities for working with images ## Development +To format: + +```bash +$ make format imports +``` + To run tests: ```bash -$ go test +$ make test ``` \ No newline at end of file diff --git a/vendor/github.com/buildpack/imgutil/fakes/image.go b/vendor/github.com/buildpack/imgutil/fakes/image.go index 54ba1ffbd..a3809dd63 100644 --- a/vendor/github.com/buildpack/imgutil/fakes/image.go +++ b/vendor/github.com/buildpack/imgutil/fakes/image.go @@ -12,28 +12,28 @@ import ( "strings" "time" + "github.com/google/go-containerregistry/pkg/name" "github.com/pkg/errors" "github.com/buildpack/imgutil" ) -func NewImage(name, topLayerSha, digest string) *Image { +func NewImage(name, topLayerSha string, identifier imgutil.Identifier) *Image { return &Image{ - alreadySaved: false, labels: map[string]string{}, env: map[string]string{}, topLayerSha: topLayerSha, - digest: digest, + identifier: identifier, name: name, cmd: []string{"initialCMD"}, layersMap: map[string]string{}, prevLayersMap: map[string]string{}, createdAt: time.Now(), + savedNames: map[string]bool{}, } } type Image struct { - alreadySaved bool deleted bool layers []string layersMap map[string]string @@ -42,7 +42,7 @@ type Image struct { labels map[string]string env map[string]string topLayerSha string - digest string + identifier imgutil.Identifier name string entryPoint []string cmd []string @@ -50,74 +50,75 @@ type Image struct { createdAt time.Time layerDir string workingDir string + savedNames map[string]bool } -func (f *Image) CreatedAt() (time.Time, error) { - return f.createdAt, nil +func (i *Image) CreatedAt() (time.Time, error) { + return i.createdAt, nil } -func (f *Image) Label(key string) (string, error) { - return f.labels[key], nil +func (i *Image) Label(key string) (string, error) { + return i.labels[key], nil } -func (f *Image) Rename(name string) { - f.name = name +func (i *Image) Rename(name string) { + i.name = name } -func (f *Image) Name() string { - return f.name +func (i *Image) Name() string { + return i.name } -func (f *Image) Digest() (string, error) { - return f.digest, nil +func (i *Image) Identifier() (imgutil.Identifier, error) { + return i.identifier, nil } -func (f *Image) Rebase(baseTopLayer string, newBase imgutil.Image) error { - f.base = newBase.Name() +func (i *Image) Rebase(baseTopLayer string, newBase imgutil.Image) error { + i.base = newBase.Name() return nil } -func (f *Image) SetLabel(k string, v string) error { - f.labels[k] = v +func (i *Image) SetLabel(k string, v string) error { + i.labels[k] = v return nil } -func (f *Image) SetEnv(k string, v string) error { - f.env[k] = v +func (i *Image) SetEnv(k string, v string) error { + i.env[k] = v return nil } -func (f *Image) SetWorkingDir(dir string) error { - f.workingDir = dir +func (i *Image) SetWorkingDir(dir string) error { + i.workingDir = dir return nil } -func (f *Image) SetEntrypoint(v ...string) error { - f.entryPoint = v +func (i *Image) SetEntrypoint(v ...string) error { + i.entryPoint = v return nil } -func (f *Image) SetCmd(v ...string) error { - f.cmd = v +func (i *Image) SetCmd(v ...string) error { + i.cmd = v return nil } -func (f *Image) Env(k string) (string, error) { - return f.env[k], nil +func (i *Image) Env(k string) (string, error) { + return i.env[k], nil } -func (f *Image) TopLayer() (string, error) { - return f.topLayerSha, nil +func (i *Image) TopLayer() (string, error) { + return i.topLayerSha, nil } -func (f *Image) AddLayer(path string) error { +func (i *Image) AddLayer(path string) error { sha, err := shaForFile(path) if err != nil { return err } - f.layersMap["sha256:"+sha] = path - f.layers = append(f.layers, path) + i.layersMap["sha256:"+sha] = path + i.layers = append(i.layers, path) return nil } @@ -135,8 +136,8 @@ func shaForFile(path string) (string, error) { return hex.EncodeToString(hasher.Sum(make([]byte, 0, hasher.Size()))), nil } -func (f *Image) GetLayer(sha string) (io.ReadCloser, error) { - path, ok := f.layersMap[sha] +func (i *Image) GetLayer(sha string) (io.ReadCloser, error) { + path, ok := i.layersMap[sha] if !ok { return nil, fmt.Errorf("failed to get layer with sha '%s'", sha) } @@ -144,40 +145,54 @@ func (f *Image) GetLayer(sha string) (io.ReadCloser, error) { return os.Open(path) } -func (f *Image) ReuseLayer(sha string) error { - prevLayer, ok := f.prevLayersMap[sha] +func (i *Image) ReuseLayer(sha string) error { + prevLayer, ok := i.prevLayersMap[sha] if !ok { return fmt.Errorf("image does not have previous layer with sha '%s'", sha) } - f.reusedLayers = append(f.reusedLayers, sha) - f.layersMap[sha] = prevLayer + i.reusedLayers = append(i.reusedLayers, sha) + i.layersMap[sha] = prevLayer return nil } -func (f *Image) Save() (string, error) { - f.alreadySaved = true - +func (i *Image) Save(additionalNames ...string) error { var err error - f.layerDir, err = ioutil.TempDir("", "fake-image") + i.layerDir, err = ioutil.TempDir("", "fake-image") if err != nil { - return "", errors.Wrap(err, "failed to create tmpDir") + return err + } + + for sha, path := range i.layersMap { + newPath := filepath.Join(i.layerDir, filepath.Base(path)) + i.copyLayer(path, newPath) + i.layersMap[sha] = newPath } - for sha, path := range f.layersMap { - newPath := filepath.Join(f.layerDir, filepath.Base(path)) - f.copyLayer(path, newPath) - f.layersMap[sha] = newPath + for l := range i.layers { + layerPath := i.layers[l] + i.layers[l] = filepath.Join(i.layerDir, filepath.Base(layerPath)) + } + + allNames := append([]string{i.name}, additionalNames...) + + var errs []imgutil.SaveDiagnostic + for _, n := range allNames { + _, err := name.ParseReference(n, name.WeakValidation) + if err != nil { + errs = append(errs, imgutil.SaveDiagnostic{ImageName: n, Cause: err}) + } else { + i.savedNames[n] = true + } } - for i := range f.layers { - layerPath := f.layers[i] - f.layers[i] = filepath.Join(f.layerDir, filepath.Base(layerPath)) + if len(errs) > 0 { + return imgutil.SaveError{Errors: errs} } - return "saved-digest-from-fake-run-image", nil + return nil } -func (f *Image) copyLayer(path, newPath string) error { +func (i *Image) copyLayer(path, newPath string) error { src, err := os.Open(path) if err != nil { return errors.Wrap(err, "opening layer during copy") @@ -197,53 +212,57 @@ func (f *Image) copyLayer(path, newPath string) error { return nil } -func (f *Image) Delete() error { - f.deleted = true +func (i *Image) Delete() error { + i.deleted = true return nil } -func (f *Image) Found() bool { - return !f.deleted +func (i *Image) Found() bool { + return !i.deleted } // test methods -func (f *Image) Cleanup() error { - return os.RemoveAll(f.layerDir) +func (i *Image) SetIdentifier(identifier imgutil.Identifier) { + i.identifier = identifier +} + +func (i *Image) Cleanup() error { + return os.RemoveAll(i.layerDir) } -func (f *Image) AppLayerPath() string { - return f.layers[0] +func (i *Image) AppLayerPath() string { + return i.layers[0] } -func (f *Image) Entrypoint() ([]string, error) { - return f.entryPoint, nil +func (i *Image) Entrypoint() ([]string, error) { + return i.entryPoint, nil } -func (f *Image) Cmd() ([]string, error) { - return f.cmd, nil +func (i *Image) Cmd() ([]string, error) { + return i.cmd, nil } -func (f *Image) ConfigLayerPath() string { - return f.layers[1] +func (i *Image) ConfigLayerPath() string { + return i.layers[1] } -func (f *Image) ReusedLayers() []string { - return f.reusedLayers +func (i *Image) ReusedLayers() []string { + return i.reusedLayers } -func (f *Image) WorkingDir() string { - return f.workingDir +func (i *Image) WorkingDir() string { + return i.workingDir } -func (f *Image) AddPreviousLayer(sha, path string) { - f.prevLayersMap[sha] = path +func (i *Image) AddPreviousLayer(sha, path string) { + i.prevLayersMap[sha] = path } -func (f *Image) FindLayerWithPath(path string) (string, error) { +func (i *Image) FindLayerWithPath(path string) (string, error) { // we iterate backwards over the layer array b/c later layers could replace a file with a given path - for i := len(f.layers) - 1; i >= 0; i-- { - tarPath := f.layers[i] + for idx := len(i.layers) - 1; idx >= 0; idx-- { + tarPath := i.layers[idx] r, _ := os.Open(tarPath) defer r.Close() @@ -261,12 +280,12 @@ func (f *Image) FindLayerWithPath(path string) (string, error) { } } } - return "", fmt.Errorf("Could not find %s in any layer. \n \n %s", path, f.tarContents()) + return "", fmt.Errorf("Could not find %s in any layer. \n \n %s", path, i.tarContents()) } -func (f *Image) tarContents() string { +func (i *Image) tarContents() string { var strBuilder = strings.Builder{} - for _, tarPath := range f.layers { + for _, tarPath := range i.layers { strBuilder.WriteString(fmt.Sprintf("layer %s --- \n Contents: \n", filepath.Base(tarPath))) r, _ := os.Open(tarPath) @@ -289,14 +308,23 @@ func (f *Image) tarContents() string { return strBuilder.String() } -func (f *Image) NumberOfAddedLayers() int { - return len(f.layers) +func (i *Image) NumberOfAddedLayers() int { + return len(i.layers) } -func (f *Image) IsSaved() bool { - return f.alreadySaved +func (i *Image) IsSaved() bool { + return len(i.savedNames) > 0 } -func (f *Image) Base() string { - return f.base +func (i *Image) Base() string { + return i.base +} + +func (i *Image) SavedNames() []string { + var names []string + for k := range i.savedNames { + names = append(names, k) + } + + return names } diff --git a/vendor/github.com/buildpack/imgutil/go.mod b/vendor/github.com/buildpack/imgutil/go.mod index db797e547..889927f97 100644 --- a/vendor/github.com/buildpack/imgutil/go.mod +++ b/vendor/github.com/buildpack/imgutil/go.mod @@ -19,9 +19,10 @@ require ( github.com/sirupsen/logrus v1.4.1 // indirect github.com/stretchr/testify v1.3.0 // indirect golang.org/x/net v0.0.0-20190415214537-1da14a5a36f2 // indirect - golang.org/x/sync v0.0.0-20190412183630-56d357773e84 // indirect + golang.org/x/sync v0.0.0-20190423024810-112230192c58 golang.org/x/sys v0.0.0-20190416152802-12500544f89f // indirect golang.org/x/time v0.0.0-20190308202827-9d24e82272b4 // indirect + golang.org/x/tools v0.0.0-20190624190245-7f2218787638 google.golang.org/grpc v1.20.0 // indirect gotest.tools v2.2.0+incompatible // indirect ) diff --git a/vendor/github.com/buildpack/imgutil/go.sum b/vendor/github.com/buildpack/imgutil/go.sum index 41ce4db68..e8e62a5b3 100644 --- a/vendor/github.com/buildpack/imgutil/go.sum +++ b/vendor/github.com/buildpack/imgutil/go.sum @@ -30,8 +30,6 @@ github.com/golang/protobuf v1.2.0 h1:P3YflyNX/ehuJFLhxviNdFxQPkGK5cDcApsge1SqnvM github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/google/go-cmp v0.2.0 h1:+dTQ8DZQJz0Mb/HjFlkptS1FeQ4cWSnN941F8aEG4SQ= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-containerregistry v0.0.0-20190110221225-f514e780f7cd h1:u7HssFPC/BNGAttHPr1mcz5lWlqLlvV99SqrulqNhXY= -github.com/google/go-containerregistry v0.0.0-20190110221225-f514e780f7cd/go.mod h1:yZAFP63pRshzrEYLXLGPmUt0Ay+2zdjmMN1loCnRLUk= github.com/google/go-containerregistry v0.0.0-20190503220729-1c6c7f61e8a5 h1:wXZCVr9/0naJbZhAzKDzj/sgnduf8qn9ldjkI/CND9k= github.com/google/go-containerregistry v0.0.0-20190503220729-1c6c7f61e8a5/go.mod h1:yZAFP63pRshzrEYLXLGPmUt0Ay+2zdjmMN1loCnRLUk= github.com/gorilla/mux v1.7.1 h1:Dw4jY2nghMMRsh1ol8dv1axHkDwMQK2DHerMNJsIpJU= @@ -69,8 +67,8 @@ golang.org/x/net v0.0.0-20190415214537-1da14a5a36f2/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f h1:wMNYb4v58l5UBM7MYRLPG6ZhfOqbKu7X5eyFl8ZhKvA= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190412183630-56d357773e84 h1:IqXQ59gzdXv58Jmm2xn0tSOR9i6HqroaOFRQ3wR/dJQ= -golang.org/x/sync v0.0.0-20190412183630-56d357773e84/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58 h1:8gQV6CLnAEikrhgkHFbMAEhagSSnXWGV915qUMm9mrU= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190416152802-12500544f89f h1:1ZH9RnjNgLzh6YrsRp/c6ddZ8Lq0fq9xztNOoWJ2sz4= @@ -81,6 +79,8 @@ golang.org/x/time v0.0.0-20190308202827-9d24e82272b4 h1:SvFZT6jyqRaOeXpc5h/JSfZe golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190624190245-7f2218787638 h1:uIfBkD8gLczr4XDgYpt/qJYds2YJwZRNw4zs7wSnNhk= +golang.org/x/tools v0.0.0-20190624190245-7f2218787638/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8 h1:Nw54tB0rB7hY/N0NQvRW8DG4Yk3Q6T9cu9RcFQDu1tc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= diff --git a/vendor/github.com/buildpack/imgutil/image.go b/vendor/github.com/buildpack/imgutil/image.go index f561c0a2e..ef8f78d06 100644 --- a/vendor/github.com/buildpack/imgutil/image.go +++ b/vendor/github.com/buildpack/imgutil/image.go @@ -1,14 +1,31 @@ package imgutil import ( + "fmt" "io" "time" ) +type SaveDiagnostic struct { + ImageName string + Cause error +} + +type SaveError struct { + Errors []SaveDiagnostic +} + +func (e SaveError) Error() string { + var failed []string + for _, d := range e.Errors { + failed = append(failed, d.ImageName) + } + return fmt.Sprintf("failed to write image to the following tags: %+v", failed) +} + type Image interface { Name() string Rename(name string) - Digest() (string, error) Label(string) (string, error) SetLabel(string, string) error Env(key string) (string, error) @@ -20,9 +37,14 @@ type Image interface { AddLayer(path string) error ReuseLayer(sha string) error TopLayer() (string, error) - Save() (string, error) + // Save saves the image as `Name()` and any additional names provided to this method. + Save(additionalNames ...string) error + // Found tells whether the image exists in the repository by `Name()`. Found() bool - GetLayer(string) (io.ReadCloser, error) + GetLayer(sha string) (io.ReadCloser, error) Delete() error CreatedAt() (time.Time, error) + Identifier() (Identifier, error) } + +type Identifier fmt.Stringer diff --git a/vendor/github.com/buildpack/imgutil/local.go b/vendor/github.com/buildpack/imgutil/local.go deleted file mode 100644 index 543ba109b..000000000 --- a/vendor/github.com/buildpack/imgutil/local.go +++ /dev/null @@ -1,550 +0,0 @@ -package imgutil - -import ( - "archive/tar" - "context" - "crypto/sha256" - "encoding/hex" - "encoding/json" - "fmt" - "io" - "io/ioutil" - "os" - "path/filepath" - "strings" - "sync" - "time" - - "github.com/docker/docker/api/types" - "github.com/docker/docker/api/types/container" - "github.com/docker/docker/client" - "github.com/google/go-containerregistry/pkg/name" - "github.com/pkg/errors" -) - -type localImage struct { - repoName string - docker *client.Client - inspect types.ImageInspect - layerPaths []string - currentTempImage string - prevDir string - prevMap map[string]string - prevOnce *sync.Once - easyAddLayers []string -} - -func EmptyLocalImage(repoName string, dockerClient *client.Client) Image { - inspect := types.ImageInspect{} - inspect.Config = &container.Config{ - Labels: map[string]string{}, - } - return &localImage{ - repoName: repoName, - docker: dockerClient, - inspect: inspect, - prevOnce: &sync.Once{}, - } -} - -func NewLocalImage(repoName string, dockerClient *client.Client) (Image, error) { - inspect, _, err := dockerClient.ImageInspectWithRaw(context.Background(), repoName) - if err != nil && !client.IsErrNotFound(err) { - return nil, err - } - - return &localImage{ - docker: dockerClient, - repoName: repoName, - inspect: inspect, - layerPaths: make([]string, len(inspect.RootFS.Layers)), - prevOnce: &sync.Once{}, - }, nil -} - -func (l *localImage) Label(key string) (string, error) { - if l.inspect.Config == nil { - return "", nil - } - labels := l.inspect.Config.Labels - return labels[key], nil -} - -func (l *localImage) Env(key string) (string, error) { - if l.inspect.Config == nil { - return "", nil - } - for _, envVar := range l.inspect.Config.Env { - parts := strings.Split(envVar, "=") - if parts[0] == key { - return parts[1], nil - } - } - return "", nil -} - -func (l *localImage) Rename(name string) { - l.easyAddLayers = nil - if prevInspect, _, err := l.docker.ImageInspectWithRaw(context.TODO(), name); err == nil { - if l.sameBase(prevInspect) { - l.easyAddLayers = prevInspect.RootFS.Layers[len(l.inspect.RootFS.Layers):] - } - } - - l.repoName = name -} - -func (l *localImage) sameBase(prevInspect types.ImageInspect) bool { - if len(prevInspect.RootFS.Layers) < len(l.inspect.RootFS.Layers) { - return false - } - for i, baseLayer := range l.inspect.RootFS.Layers { - if baseLayer != prevInspect.RootFS.Layers[i] { - return false - } - } - return true -} - -func (l *localImage) Name() string { - return l.repoName -} - -func (l *localImage) Found() bool { - return l.inspect.Config != nil -} - -func (l *localImage) Digest() (string, error) { - if !l.Found() { - return "", fmt.Errorf("failed to get digest, image '%s' does not exist", l.repoName) - } - if len(l.inspect.RepoDigests) == 0 { - return "", nil - } - parts := strings.Split(l.inspect.RepoDigests[0], "@") - if len(parts) != 2 { - return "", fmt.Errorf("failed to get digest, image '%s' has malformed digest '%s'", l.repoName, l.inspect.RepoDigests[0]) - } - return parts[1], nil -} - -func (l *localImage) CreatedAt() (time.Time, error) { - createdAtTime := l.inspect.Created - createdTime, err := time.Parse(time.RFC3339Nano, createdAtTime) - - if err != nil { - return time.Time{}, err - } - return createdTime, nil -} - -func (l *localImage) Rebase(baseTopLayer string, newBase Image) error { - ctx := context.Background() - - // FIND TOP LAYER - keepLayers := -1 - for i, diffID := range l.inspect.RootFS.Layers { - if diffID == baseTopLayer { - keepLayers = len(l.inspect.RootFS.Layers) - i - 1 - break - } - } - if keepLayers == -1 { - return fmt.Errorf("'%s' not found in '%s' during rebase", baseTopLayer, l.repoName) - } - - // SWITCH BASE LAYERS - newBaseInspect, _, err := l.docker.ImageInspectWithRaw(ctx, newBase.Name()) - if err != nil { - return errors.Wrap(err, "analyze read previous image config") - } - l.inspect.RootFS.Layers = newBaseInspect.RootFS.Layers - l.layerPaths = make([]string, len(l.inspect.RootFS.Layers)) - - // SAVE CURRENT IMAGE TO DISK - if err := l.prevDownload(); err != nil { - return err - } - - // READ MANIFEST.JSON - b, err := ioutil.ReadFile(filepath.Join(l.prevDir, "manifest.json")) - if err != nil { - return err - } - var manifest []struct{ Layers []string } - if err := json.Unmarshal(b, &manifest); err != nil { - return err - } - if len(manifest) != 1 { - return fmt.Errorf("expected 1 image received %d", len(manifest)) - } - - // ADD EXISTING LAYERS - for _, filename := range manifest[0].Layers[(len(manifest[0].Layers) - keepLayers):] { - if err := l.AddLayer(filepath.Join(l.prevDir, filename)); err != nil { - return err - } - } - - return nil -} - -func (l *localImage) SetLabel(key, val string) error { - if l.inspect.Config == nil { - return fmt.Errorf("failed to set label, image '%s' does not exist", l.repoName) - } - l.inspect.Config.Labels[key] = val - return nil -} - -func (l *localImage) SetEnv(key, val string) error { - if l.inspect.Config == nil { - return fmt.Errorf("failed to set env var, image '%s' does not exist", l.repoName) - } - l.inspect.Config.Env = append(l.inspect.Config.Env, fmt.Sprintf("%s=%s", key, val)) - return nil -} - -func (l *localImage) SetWorkingDir(dir string) error { - if l.inspect.Config == nil { - return fmt.Errorf("failed to set working dir, image '%s' does not exist", l.repoName) - } - l.inspect.Config.WorkingDir = dir - return nil -} - -func (l *localImage) SetEntrypoint(ep ...string) error { - if l.inspect.Config == nil { - return fmt.Errorf("failed to set entrypoint, image '%s' does not exist", l.repoName) - } - l.inspect.Config.Entrypoint = ep - return nil -} - -func (l *localImage) SetCmd(cmd ...string) error { - if l.inspect.Config == nil { - return fmt.Errorf("failed to set cmd, image '%s' does not exist", l.repoName) - } - l.inspect.Config.Cmd = cmd - return nil -} - -func (l *localImage) TopLayer() (string, error) { - all := l.inspect.RootFS.Layers - topLayer := all[len(all)-1] - return topLayer, nil -} - -func (l *localImage) GetLayer(sha string) (io.ReadCloser, error) { - if err := l.prevDownload(); err != nil { - return nil, err - } - - layerID, ok := l.prevMap[sha] - if !ok { - return nil, fmt.Errorf("image '%s' does not contain layer with diff ID '%s'", l.repoName, sha) - } - return os.Open(filepath.Join(l.prevDir, layerID)) -} - -func (l *localImage) AddLayer(path string) error { - f, err := os.Open(path) - if err != nil { - return errors.Wrapf(err, "AddLayer: open layer: %s", path) - } - defer f.Close() - hasher := sha256.New() - if _, err := io.Copy(hasher, f); err != nil { - return errors.Wrapf(err, "AddLayer: calculate checksum: %s", path) - } - sha := hex.EncodeToString(hasher.Sum(make([]byte, 0, hasher.Size()))) - - l.inspect.RootFS.Layers = append(l.inspect.RootFS.Layers, "sha256:"+sha) - l.layerPaths = append(l.layerPaths, path) - l.easyAddLayers = nil - - return nil -} - -func (l *localImage) ReuseLayer(sha string) error { - if len(l.easyAddLayers) > 0 && l.easyAddLayers[0] == sha { - l.inspect.RootFS.Layers = append(l.inspect.RootFS.Layers, sha) - l.layerPaths = append(l.layerPaths, "") - l.easyAddLayers = l.easyAddLayers[1:] - return nil - } - - if err := l.prevDownload(); err != nil { - return err - } - - reuseLayer, ok := l.prevMap[sha] - if !ok { - return fmt.Errorf("SHA %s was not found in %s", sha, l.repoName) - } - - return l.AddLayer(filepath.Join(l.prevDir, reuseLayer)) -} - -func (l *localImage) Save() (string, error) { - ctx := context.Background() - done := make(chan error) - - t, err := name.NewTag(l.repoName, name.WeakValidation) - if err != nil { - return "", err - } - repoName := t.String() - - pr, pw := io.Pipe() - defer pw.Close() - go func() { - res, err := l.docker.ImageLoad(ctx, pr, true) - if err != nil { - done <- err - return - } - defer res.Body.Close() - io.Copy(ioutil.Discard, res.Body) - - done <- nil - }() - - tw := tar.NewWriter(pw) - defer tw.Close() - - configFile, err := l.configFile() - if err != nil { - return "", errors.Wrap(err, "generate config file") - } - - imgID := fmt.Sprintf("%x", sha256.Sum256(configFile)) - if err := addTextToTar(tw, imgID+".json", configFile); err != nil { - return "", err - } - - var layerPaths []string - for _, path := range l.layerPaths { - if path == "" { - layerPaths = append(layerPaths, "") - continue - } - layerName := fmt.Sprintf("/%x.tar", sha256.Sum256([]byte(path))) - f, err := os.Open(path) - if err != nil { - return "", err - } - defer f.Close() - if err := addFileToTar(tw, layerName, f); err != nil { - return "", err - } - f.Close() - layerPaths = append(layerPaths, layerName) - - } - - manifest, err := json.Marshal([]map[string]interface{}{ - { - "Config": imgID + ".json", - "RepoTags": []string{repoName}, - "Layers": layerPaths, - }, - }) - if err != nil { - return "", err - } - - if err := addTextToTar(tw, "manifest.json", manifest); err != nil { - return "", err - } - - tw.Close() - pw.Close() - err = <-done - - if l.prevDir != "" { - os.RemoveAll(l.prevDir) - l.prevDir = "" - l.prevMap = nil - l.prevOnce = &sync.Once{} - } - - if _, _, err = l.docker.ImageInspectWithRaw(context.Background(), imgID); err != nil { - if client.IsErrNotFound(err) { - return "", fmt.Errorf("save image '%s'", l.repoName) - } - return "", err - } - - return imgID, err -} - -func (l *localImage) configFile() ([]byte, error) { - imgConfig := map[string]interface{}{ - "os": "linux", - "created": time.Now().Format(time.RFC3339), - "config": l.inspect.Config, - "rootfs": map[string][]string{ - "diff_ids": l.inspect.RootFS.Layers, - }, - "history": make([]struct{}, len(l.inspect.RootFS.Layers)), - } - return json.Marshal(imgConfig) -} - -func (l *localImage) Delete() error { - if !l.Found() { - return nil - } - options := types.ImageRemoveOptions{ - Force: true, - PruneChildren: true, - } - _, err := l.docker.ImageRemove(context.Background(), l.inspect.ID, options) - return err -} - -func (l *localImage) prevDownload() error { - var outerErr error - l.prevOnce.Do(func() { - ctx := context.Background() - - tarFile, err := l.docker.ImageSave(ctx, []string{l.repoName}) - if err != nil { - outerErr = err - return - } - defer tarFile.Close() - - l.prevDir, err = ioutil.TempDir("", "imgutil.local.reuse-layer.") - if err != nil { - outerErr = errors.Wrap(err, "local reuse-layer create temp dir") - return - } - - err = untar(tarFile, l.prevDir) - if err != nil { - outerErr = err - return - } - - mf, err := os.Open(filepath.Join(l.prevDir, "manifest.json")) - if err != nil { - outerErr = err - return - } - defer mf.Close() - - var manifest []struct { - Config string - Layers []string - } - if err := json.NewDecoder(mf).Decode(&manifest); err != nil { - outerErr = err - return - } - - if len(manifest) != 1 { - outerErr = fmt.Errorf("manifest.json had unexpected number of entries: %d", len(manifest)) - return - } - - df, err := os.Open(filepath.Join(l.prevDir, manifest[0].Config)) - if err != nil { - outerErr = err - return - } - defer df.Close() - - var details struct { - RootFS struct { - DiffIDs []string `json:"diff_ids"` - } `json:"rootfs"` - } - - if err = json.NewDecoder(df).Decode(&details); err != nil { - outerErr = err - return - } - - if len(manifest[0].Layers) != len(details.RootFS.DiffIDs) { - outerErr = fmt.Errorf("layers and diff IDs do not match, there are %d layers and %d diffIDs", len(manifest[0].Layers), len(details.RootFS.DiffIDs)) - return - } - - l.prevMap = make(map[string]string, len(manifest[0].Layers)) - for i, diffID := range details.RootFS.DiffIDs { - layerID := manifest[0].Layers[i] - l.prevMap[diffID] = layerID - } - }) - return outerErr -} - -func addTextToTar(tw *tar.Writer, name string, contents []byte) error { - hdr := &tar.Header{Name: name, Mode: 0644, Size: int64(len(contents))} - if err := tw.WriteHeader(hdr); err != nil { - return err - } - _, err := tw.Write(contents) - return err -} - -func addFileToTar(tw *tar.Writer, name string, contents *os.File) error { - fi, err := contents.Stat() - if err != nil { - return err - } - hdr := &tar.Header{Name: name, Mode: 0644, Size: int64(fi.Size())} - if err := tw.WriteHeader(hdr); err != nil { - return err - } - _, err = io.Copy(tw, contents) - return err -} - -func untar(r io.Reader, dest string) error { - tr := tar.NewReader(r) - for { - hdr, err := tr.Next() - if err == io.EOF { - // end of tar archive - return nil - } - if err != nil { - return err - } - - path := filepath.Join(dest, hdr.Name) - - switch hdr.Typeflag { - case tar.TypeDir: - if err := os.MkdirAll(path, hdr.FileInfo().Mode()); err != nil { - return err - } - case tar.TypeReg, tar.TypeRegA: - _, err := os.Stat(filepath.Dir(path)) - if os.IsNotExist(err) { - if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { - return err - } - } - - fh, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY, hdr.FileInfo().Mode()) - if err != nil { - return err - } - if _, err := io.Copy(fh, tr); err != nil { - fh.Close() - return err - } - fh.Close() - case tar.TypeSymlink: - if err := os.Symlink(hdr.Linkname, path); err != nil { - return err - } - default: - return fmt.Errorf("unknown file type in tar %d", hdr.Typeflag) - } - } -} diff --git a/vendor/github.com/buildpack/imgutil/local/identifier.go b/vendor/github.com/buildpack/imgutil/local/identifier.go new file mode 100644 index 000000000..dd24fba1b --- /dev/null +++ b/vendor/github.com/buildpack/imgutil/local/identifier.go @@ -0,0 +1,9 @@ +package local + +type IDIdentifier struct { + ImageID string +} + +func (i IDIdentifier) String() string { + return i.ImageID +} diff --git a/vendor/github.com/buildpack/imgutil/local/local.go b/vendor/github.com/buildpack/imgutil/local/local.go new file mode 100644 index 000000000..5adbb0a0d --- /dev/null +++ b/vendor/github.com/buildpack/imgutil/local/local.go @@ -0,0 +1,616 @@ +package local + +import ( + "archive/tar" + "context" + "crypto/sha256" + "encoding/hex" + "encoding/json" + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + "strings" + "time" + + "github.com/docker/docker/api/types" + "github.com/docker/docker/api/types/container" + "github.com/docker/docker/client" + "github.com/google/go-containerregistry/pkg/name" + "github.com/pkg/errors" + "golang.org/x/sync/singleflight" + + "github.com/buildpack/imgutil" +) + +type Image struct { + repoName string + docker *client.Client + inspect types.ImageInspect + layerPaths []string + currentTempImage string + requestGroup singleflight.Group + prevName string + easyAddLayers []string +} + +type FileSystemLocalImage struct { + dir string + layersMap map[string]string +} + +type ImageOption func(image *Image) (*Image, error) + +func WithPreviousImage(imageName string) ImageOption { + return func(i *Image) (*Image, error) { + if _, err := inspectOptionalImage(i.docker, imageName); err != nil { + return i, err + } + + i.prevName = imageName + + return i, nil + } +} + +func FromBaseImage(imageName string) ImageOption { + return func(i *Image) (*Image, error) { + var ( + err error + inspect types.ImageInspect + ) + + if inspect, err = inspectOptionalImage(i.docker, imageName); err != nil { + return i, err + } + + i.inspect = inspect + i.layerPaths = make([]string, len(i.inspect.RootFS.Layers)) + + return i, nil + } +} + +func NewImage(repoName string, dockerClient *client.Client, ops ...ImageOption) (imgutil.Image, error) { + inspect := defaultInspect() + + image := &Image{ + docker: dockerClient, + repoName: repoName, + inspect: inspect, + layerPaths: make([]string, len(inspect.RootFS.Layers)), + } + + var err error + for _, v := range ops { + image, err = v(image) + if err != nil { + return nil, err + } + } + + return image, nil +} + +func (i *Image) Label(key string) (string, error) { + labels := i.inspect.Config.Labels + return labels[key], nil +} + +func (i *Image) Env(key string) (string, error) { + for _, envVar := range i.inspect.Config.Env { + parts := strings.Split(envVar, "=") + if parts[0] == key { + return parts[1], nil + } + } + return "", nil +} + +func (i *Image) Rename(name string) { + i.easyAddLayers = nil + if prevInspect, _, err := i.docker.ImageInspectWithRaw(context.TODO(), name); err == nil { + if i.sameBase(prevInspect) { + i.easyAddLayers = prevInspect.RootFS.Layers[len(i.inspect.RootFS.Layers):] + } + } + + i.repoName = name +} + +func (i *Image) sameBase(prevInspect types.ImageInspect) bool { + if len(prevInspect.RootFS.Layers) < len(i.inspect.RootFS.Layers) { + return false + } + for i, baseLayer := range i.inspect.RootFS.Layers { + if baseLayer != prevInspect.RootFS.Layers[i] { + return false + } + } + return true +} + +func (i *Image) Name() string { + return i.repoName +} + +func (i *Image) Found() bool { + return i.inspect.ID != "" +} + +func (i *Image) Identifier() (imgutil.Identifier, error) { + return IDIdentifier{ + ImageID: strings.TrimPrefix(i.inspect.ID, "sha256:"), + }, nil +} + +func (i *Image) CreatedAt() (time.Time, error) { + createdAtTime := i.inspect.Created + createdTime, err := time.Parse(time.RFC3339Nano, createdAtTime) + + if err != nil { + return time.Time{}, err + } + return createdTime, nil +} + +func (i *Image) Rebase(baseTopLayer string, newBase imgutil.Image) error { + ctx := context.Background() + + // FIND TOP LAYER + keepLayers := -1 + for idx, diffID := range i.inspect.RootFS.Layers { + if diffID == baseTopLayer { + keepLayers = len(i.inspect.RootFS.Layers) - idx - 1 + break + } + } + if keepLayers == -1 { + return fmt.Errorf("'%s' not found in '%s' during rebase", baseTopLayer, i.repoName) + } + + // SWITCH BASE LAYERS + newBaseInspect, _, err := i.docker.ImageInspectWithRaw(ctx, newBase.Name()) + if err != nil { + return errors.Wrap(err, "analyze read previous image config") + } + i.inspect.RootFS.Layers = newBaseInspect.RootFS.Layers + i.layerPaths = make([]string, len(i.inspect.RootFS.Layers)) + + // DOWNLOAD IMAGE + fsImage, err := i.downloadImageOnce(i.repoName) + if err != nil { + return err + } + + // READ MANIFEST.JSON + b, err := ioutil.ReadFile(filepath.Join(fsImage.dir, "manifest.json")) + if err != nil { + return err + } + var manifest []struct{ Layers []string } + if err := json.Unmarshal(b, &manifest); err != nil { + return err + } + if len(manifest) != 1 { + return fmt.Errorf("expected 1 image received %d", len(manifest)) + } + + // ADD EXISTING LAYERS + for _, filename := range manifest[0].Layers[(len(manifest[0].Layers) - keepLayers):] { + if err := i.AddLayer(filepath.Join(fsImage.dir, filename)); err != nil { + return err + } + } + + return nil +} + +func (i *Image) SetLabel(key, val string) error { + if i.inspect.Config.Labels == nil { + i.inspect.Config.Labels = map[string]string{} + } + + i.inspect.Config.Labels[key] = val + return nil +} + +func (i *Image) SetEnv(key, val string) error { + i.inspect.Config.Env = append(i.inspect.Config.Env, fmt.Sprintf("%s=%s", key, val)) + return nil +} + +func (i *Image) SetWorkingDir(dir string) error { + i.inspect.Config.WorkingDir = dir + return nil +} + +func (i *Image) SetEntrypoint(ep ...string) error { + i.inspect.Config.Entrypoint = ep + return nil +} + +func (i *Image) SetCmd(cmd ...string) error { + i.inspect.Config.Cmd = cmd + return nil +} + +func (i *Image) TopLayer() (string, error) { + all := i.inspect.RootFS.Layers + + if len(all) == 0 { + return "", fmt.Errorf("image '%s' has no layers", i.repoName) + } + + topLayer := all[len(all)-1] + return topLayer, nil +} + +func (i *Image) GetLayer(sha string) (io.ReadCloser, error) { + fsImage, err := i.downloadImageOnce(i.repoName) + if err != nil { + return nil, err + } + + layerID, ok := fsImage.layersMap[sha] + if !ok { + return nil, fmt.Errorf("image '%s' does not contain layer with diff ID '%s'", i.repoName, sha) + } + return os.Open(filepath.Join(fsImage.dir, layerID)) +} + +func (i *Image) AddLayer(path string) error { + f, err := os.Open(path) + if err != nil { + return errors.Wrapf(err, "AddLayer: open layer: %s", path) + } + defer f.Close() + hasher := sha256.New() + if _, err := io.Copy(hasher, f); err != nil { + return errors.Wrapf(err, "AddLayer: calculate checksum: %s", path) + } + sha := hex.EncodeToString(hasher.Sum(make([]byte, 0, hasher.Size()))) + + i.inspect.RootFS.Layers = append(i.inspect.RootFS.Layers, "sha256:"+sha) + i.layerPaths = append(i.layerPaths, path) + i.easyAddLayers = nil + + return nil +} + +func (i *Image) ReuseLayer(sha string) error { + if len(i.easyAddLayers) > 0 && i.easyAddLayers[0] == sha { + i.inspect.RootFS.Layers = append(i.inspect.RootFS.Layers, sha) + i.layerPaths = append(i.layerPaths, "") + i.easyAddLayers = i.easyAddLayers[1:] + return nil + } + + if i.prevName == "" { + return errors.New("no previous image provided to reuse layers from") + } + + fsImage, err := i.downloadImageOnce(i.prevName) + if err != nil { + return err + } + + reuseLayer, ok := fsImage.layersMap[sha] + if !ok { + return fmt.Errorf("SHA %s was not found in %s", sha, i.repoName) + } + + return i.AddLayer(filepath.Join(fsImage.dir, reuseLayer)) +} + +func (i *Image) Save(additionalNames ...string) error { + inspect, err := i.doSave() + if err != nil { + saveErr := imgutil.SaveError{} + for _, n := range append([]string{i.Name()}, additionalNames...) { + saveErr.Errors = append(saveErr.Errors, imgutil.SaveDiagnostic{ImageName: n, Cause: err}) + } + return saveErr + } + i.inspect = inspect + + var errs []imgutil.SaveDiagnostic + for _, n := range additionalNames { + if err := i.docker.ImageTag(context.Background(), i.repoName, n); err != nil { + errs = append(errs, imgutil.SaveDiagnostic{ImageName: n, Cause: err}) + } + } + + if len(errs) > 0 { + return imgutil.SaveError{Errors: errs} + } + + return nil +} + +func (i *Image) doSave() (types.ImageInspect, error) { + ctx := context.Background() + done := make(chan error) + + t, err := name.NewTag(i.repoName, name.WeakValidation) + if err != nil { + return types.ImageInspect{}, err + } + repoName := t.String() + + pr, pw := io.Pipe() + defer pw.Close() + go func() { + res, err := i.docker.ImageLoad(ctx, pr, true) + if err != nil { + done <- err + return + } + defer res.Body.Close() + io.Copy(ioutil.Discard, res.Body) + + done <- nil + }() + + tw := tar.NewWriter(pw) + defer tw.Close() + + configFile, err := i.newConfigFile() + if err != nil { + return types.ImageInspect{}, errors.Wrap(err, "generate config file") + } + + id := fmt.Sprintf("%x", sha256.Sum256(configFile)) + if err := addTextToTar(tw, id+".json", configFile); err != nil { + return types.ImageInspect{}, err + } + + var layerPaths []string + for _, path := range i.layerPaths { + if path == "" { + layerPaths = append(layerPaths, "") + continue + } + layerName := fmt.Sprintf("/%x.tar", sha256.Sum256([]byte(path))) + f, err := os.Open(path) + if err != nil { + return types.ImageInspect{}, err + } + defer f.Close() + if err := addFileToTar(tw, layerName, f); err != nil { + return types.ImageInspect{}, err + } + f.Close() + layerPaths = append(layerPaths, layerName) + + } + + manifest, err := json.Marshal([]map[string]interface{}{ + { + "Config": id + ".json", + "RepoTags": []string{repoName}, + "Layers": layerPaths, + }, + }) + if err != nil { + return types.ImageInspect{}, err + } + + if err := addTextToTar(tw, "manifest.json", manifest); err != nil { + return types.ImageInspect{}, err + } + + tw.Close() + pw.Close() + err = <-done + + i.requestGroup.Forget(i.repoName) + + inspect, _, err := i.docker.ImageInspectWithRaw(context.Background(), id) + if err != nil { + if client.IsErrNotFound(err) { + return types.ImageInspect{}, errors.Wrapf(err, "save image '%s'", i.repoName) + } + return types.ImageInspect{}, err + } + + return inspect, nil +} + +func (i *Image) newConfigFile() ([]byte, error) { + imgConfig := map[string]interface{}{ + "os": "linux", + "created": time.Now().Format(time.RFC3339), + "config": i.inspect.Config, + "rootfs": map[string][]string{ + "diff_ids": i.inspect.RootFS.Layers, + }, + "history": make([]struct{}, len(i.inspect.RootFS.Layers)), + } + return json.Marshal(imgConfig) +} + +func (i *Image) Delete() error { + if !i.Found() { + return nil + } + options := types.ImageRemoveOptions{ + Force: true, + PruneChildren: true, + } + _, err := i.docker.ImageRemove(context.Background(), i.inspect.ID, options) + return err +} + +func (i *Image) downloadImageOnce(imageName string) (*FileSystemLocalImage, error) { + v, err, _ := i.requestGroup.Do(imageName, func() (details interface{}, err error) { + return downloadImage(i.docker, imageName) + }) + + if err != nil { + return nil, err + } + + return v.(*FileSystemLocalImage), nil +} + +func downloadImage(docker *client.Client, imageName string) (*FileSystemLocalImage, error) { + ctx := context.Background() + + tarFile, err := docker.ImageSave(ctx, []string{imageName}) + if err != nil { + return nil, err + } + defer tarFile.Close() + + tmpDir, err := ioutil.TempDir("", "imgutil.local.image.") + if err != nil { + return nil, errors.Wrap(err, "local reuse-layer create temp dir") + } + + err = untar(tarFile, tmpDir) + if err != nil { + return nil, err + } + + mf, err := os.Open(filepath.Join(tmpDir, "manifest.json")) + if err != nil { + return nil, err + } + defer mf.Close() + + var manifest []struct { + Config string + Layers []string + } + if err := json.NewDecoder(mf).Decode(&manifest); err != nil { + return nil, err + } + + if len(manifest) != 1 { + return nil, fmt.Errorf("manifest.json had unexpected number of entries: %d", len(manifest)) + } + + df, err := os.Open(filepath.Join(tmpDir, manifest[0].Config)) + if err != nil { + return nil, err + } + defer df.Close() + + var details struct { + RootFS struct { + DiffIDs []string `json:"diff_ids"` + } `json:"rootfs"` + } + + if err = json.NewDecoder(df).Decode(&details); err != nil { + return nil, err + } + + if len(manifest[0].Layers) != len(details.RootFS.DiffIDs) { + return nil, fmt.Errorf("layers and diff IDs do not match, there are %d layers and %d diffIDs", len(manifest[0].Layers), len(details.RootFS.DiffIDs)) + } + + layersMap := make(map[string]string, len(manifest[0].Layers)) + for i, diffID := range details.RootFS.DiffIDs { + layerID := manifest[0].Layers[i] + layersMap[diffID] = layerID + } + + return &FileSystemLocalImage{ + dir: tmpDir, + layersMap: layersMap, + }, nil +} + +func addTextToTar(tw *tar.Writer, name string, contents []byte) error { + hdr := &tar.Header{Name: name, Mode: 0644, Size: int64(len(contents))} + if err := tw.WriteHeader(hdr); err != nil { + return err + } + _, err := tw.Write(contents) + return err +} + +func addFileToTar(tw *tar.Writer, name string, contents *os.File) error { + fi, err := contents.Stat() + if err != nil { + return err + } + hdr := &tar.Header{Name: name, Mode: 0644, Size: int64(fi.Size())} + if err := tw.WriteHeader(hdr); err != nil { + return err + } + _, err = io.Copy(tw, contents) + return err +} + +func untar(r io.Reader, dest string) error { + tr := tar.NewReader(r) + for { + hdr, err := tr.Next() + if err == io.EOF { + // end of tar archive + return nil + } + if err != nil { + return err + } + + path := filepath.Join(dest, hdr.Name) + + switch hdr.Typeflag { + case tar.TypeDir: + if err := os.MkdirAll(path, hdr.FileInfo().Mode()); err != nil { + return err + } + case tar.TypeReg, tar.TypeRegA: + _, err := os.Stat(filepath.Dir(path)) + if os.IsNotExist(err) { + if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil { + return err + } + } + + fh, err := os.OpenFile(path, os.O_CREATE|os.O_WRONLY, hdr.FileInfo().Mode()) + if err != nil { + return err + } + if _, err := io.Copy(fh, tr); err != nil { + fh.Close() + return err + } + fh.Close() + case tar.TypeSymlink: + if err := os.Symlink(hdr.Linkname, path); err != nil { + return err + } + default: + return fmt.Errorf("unknown file type in tar %d", hdr.Typeflag) + } + } +} + +func inspectOptionalImage(docker *client.Client, imageName string) (types.ImageInspect, error) { + var ( + err error + inspect types.ImageInspect + ) + + if inspect, _, err = docker.ImageInspectWithRaw(context.Background(), imageName); err != nil { + if client.IsErrNotFound(err) { + return defaultInspect(), nil + } + + return types.ImageInspect{}, errors.Wrapf(err, "verifying image '%s'", imageName) + } + + return inspect, nil +} + +func defaultInspect() types.ImageInspect { + return types.ImageInspect{ + Config: &container.Config{}, + } +} diff --git a/vendor/github.com/buildpack/imgutil/remote/identifier.go b/vendor/github.com/buildpack/imgutil/remote/identifier.go new file mode 100644 index 000000000..563890535 --- /dev/null +++ b/vendor/github.com/buildpack/imgutil/remote/identifier.go @@ -0,0 +1,13 @@ +package remote + +import ( + "github.com/google/go-containerregistry/pkg/name" +) + +type DigestIdentifier struct { + Digest name.Digest +} + +func (d DigestIdentifier) String() string { + return d.Digest.String() +} diff --git a/vendor/github.com/buildpack/imgutil/remote.go b/vendor/github.com/buildpack/imgutil/remote/remote.go similarity index 56% rename from vendor/github.com/buildpack/imgutil/remote.go rename to vendor/github.com/buildpack/imgutil/remote/remote.go index d51dd4630..9e65d2d02 100644 --- a/vendor/github.com/buildpack/imgutil/remote.go +++ b/vendor/github.com/buildpack/imgutil/remote/remote.go @@ -1,16 +1,15 @@ -package imgutil +package remote import ( "fmt" "io" "net/http" "strings" - "sync" "time" "github.com/google/go-containerregistry/pkg/authn" "github.com/google/go-containerregistry/pkg/name" - "github.com/google/go-containerregistry/pkg/v1" + v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/google/go-containerregistry/pkg/v1/mutate" "github.com/google/go-containerregistry/pkg/v1/random" "github.com/google/go-containerregistry/pkg/v1/remote" @@ -18,28 +17,69 @@ import ( "github.com/google/go-containerregistry/pkg/v1/tarball" "github.com/google/go-containerregistry/pkg/v1/types" "github.com/pkg/errors" + + "github.com/buildpack/imgutil" ) -type remoteImage struct { +type Image struct { keychain authn.Keychain repoName string image v1.Image prevLayers []v1.Layer - prevOnce *sync.Once } -func NewRemoteImage(repoName string, keychain authn.Keychain) (Image, error) { - image, err := newV1Image(keychain, repoName) +type ImageOption func(*Image) (*Image, error) + +func WithPreviousImage(imageName string) ImageOption { + return func(r *Image) (*Image, error) { + var err error + + prevImage, err := newV1Image(r.keychain, imageName) + if err != nil { + return nil, err + } + + prevLayers, err := prevImage.Layers() + if err != nil { + return nil, errors.Wrapf(err, "failed to get layers for previous image with repo name '%s'", imageName) + } + + r.prevLayers = prevLayers + return r, nil + } +} + +func FromBaseImage(imageName string) ImageOption { + return func(r *Image) (*Image, error) { + var err error + r.image, err = newV1Image(r.keychain, imageName) + if err != nil { + return nil, err + } + return r, nil + } +} + +func NewImage(repoName string, keychain authn.Keychain, ops ...ImageOption) (imgutil.Image, error) { + image, err := emptyImage() if err != nil { return nil, err } - return &remoteImage{ + ri := &Image{ keychain: keychain, repoName: repoName, image: image, - prevOnce: &sync.Once{}, - }, nil + } + + for _, op := range ops { + ri, err = op(ri) + if err != nil { + return nil, err + } + } + + return ri, nil } func newV1Image(keychain authn.Keychain, repoName string) (v1.Image, error) { @@ -52,7 +92,7 @@ func newV1Image(keychain authn.Keychain, repoName string) (v1.Image, error) { if transportErr, ok := err.(*transport.Error); ok && len(transportErr.Errors) > 0 { switch transportErr.Errors[0].Code { case transport.UnauthorizedErrorCode, transport.ManifestUnknownErrorCode: - return emptyRemoteImage() + return emptyImage() } } return nil, fmt.Errorf("connect to repo store '%s': %s", repoName, err.Error()) @@ -60,7 +100,7 @@ func newV1Image(keychain authn.Keychain, repoName string) (v1.Image, error) { return image, nil } -func emptyRemoteImage() (v1.Image, error) { +func emptyImage() (v1.Image, error) { return random.Image(0, 0) } @@ -78,20 +118,20 @@ func referenceForRepoName(keychain authn.Keychain, ref string) (name.Reference, return r, auth, nil } -func (r *remoteImage) Label(key string) (string, error) { - cfg, err := r.image.ConfigFile() +func (i *Image) Label(key string) (string, error) { + cfg, err := i.image.ConfigFile() if err != nil || cfg == nil { - return "", fmt.Errorf("failed to get config file for image '%s'", r.repoName) + return "", fmt.Errorf("failed to get config file for image '%s'", i.repoName) } labels := cfg.Config.Labels return labels[key], nil } -func (r *remoteImage) Env(key string) (string, error) { - cfg, err := r.image.ConfigFile() +func (i *Image) Env(key string) (string, error) { + cfg, err := i.image.ConfigFile() if err != nil || cfg == nil { - return "", fmt.Errorf("failed to get config file for image '%s'", r.repoName) + return "", fmt.Errorf("failed to get config file for image '%s'", i.repoName) } for _, envVar := range cfg.Config.Env { parts := strings.Split(envVar, "=") @@ -102,16 +142,16 @@ func (r *remoteImage) Env(key string) (string, error) { return "", nil } -func (r *remoteImage) Rename(name string) { - r.repoName = name +func (i *Image) Rename(name string) { + i.repoName = name } -func (r *remoteImage) Name() string { - return r.repoName +func (i *Image) Name() string { + return i.repoName } -func (r *remoteImage) Found() bool { - ref, auth, err := referenceForRepoName(r.keychain, r.repoName) +func (i *Image) Found() bool { + ref, auth, err := referenceForRepoName(i.keychain, i.repoName) if err != nil { return false } @@ -122,38 +162,46 @@ func (r *remoteImage) Found() bool { return true } -func (r *remoteImage) Digest() (string, error) { - hash, err := r.image.Digest() +func (i *Image) Identifier() (imgutil.Identifier, error) { + hash, err := i.image.Digest() + if err != nil { + return nil, fmt.Errorf("failed to get digest for image '%s': %s", i.repoName, err) + } + + digestRef, err := name.NewDigest(i.repoName+"@"+hash.String(), name.WeakValidation) if err != nil { - return "", fmt.Errorf("failed to get digest for image '%s': %s", r.repoName, err) + return nil, errors.Wrap(err, "creating digest reference") } - return hash.String(), nil + + return DigestIdentifier{ + Digest: digestRef, + }, nil } -func (r *remoteImage) CreatedAt() (time.Time, error) { - configFile, err := r.image.ConfigFile() +func (i *Image) CreatedAt() (time.Time, error) { + configFile, err := i.image.ConfigFile() if err != nil { - return time.Time{}, fmt.Errorf("failed to get createdAt time for image '%s': %s", r.repoName, err) + return time.Time{}, fmt.Errorf("failed to get createdAt time for image '%s': %s", i.repoName, err) } return configFile.Created.UTC(), nil } -func (r *remoteImage) Rebase(baseTopLayer string, newBase Image) error { - newBaseRemote, ok := newBase.(*remoteImage) +func (i *Image) Rebase(baseTopLayer string, newBase imgutil.Image) error { + newBaseRemote, ok := newBase.(*Image) if !ok { return errors.New("expected new base to be a remote image") } - newImage, err := mutate.Rebase(r.image, &subImage{img: r.image, topSHA: baseTopLayer}, newBaseRemote.image) + newImage, err := mutate.Rebase(i.image, &subImage{img: i.image, topSHA: baseTopLayer}, newBaseRemote.image) if err != nil { return errors.Wrap(err, "rebase") } - r.image = newImage + i.image = newImage return nil } -func (r *remoteImage) SetLabel(key, val string) error { - configFile, err := r.image.ConfigFile() +func (i *Image) SetLabel(key, val string) error { + configFile, err := i.image.ConfigFile() if err != nil { return err } @@ -162,21 +210,21 @@ func (r *remoteImage) SetLabel(key, val string) error { config.Labels = map[string]string{} } config.Labels[key] = val - r.image, err = mutate.Config(r.image, config) + i.image, err = mutate.Config(i.image, config) return err } -func (r *remoteImage) SetEnv(key, val string) error { - configFile, err := r.image.ConfigFile() +func (i *Image) SetEnv(key, val string) error { + configFile, err := i.image.ConfigFile() if err != nil { return err } config := *configFile.Config.DeepCopy() - for i, e := range config.Env { + for idx, e := range config.Env { parts := strings.Split(e, "=") if parts[0] == key { - config.Env[i] = fmt.Sprintf("%s=%s", key, val) - r.image, err = mutate.Config(r.image, config) + config.Env[idx] = fmt.Sprintf("%s=%s", key, val) + i.image, err = mutate.Config(i.image, config) if err != nil { return err } @@ -184,48 +232,51 @@ func (r *remoteImage) SetEnv(key, val string) error { } } config.Env = append(config.Env, fmt.Sprintf("%s=%s", key, val)) - r.image, err = mutate.Config(r.image, config) + i.image, err = mutate.Config(i.image, config) return err } -func (r *remoteImage) SetWorkingDir(dir string) error { - configFile, err := r.image.ConfigFile() +func (i *Image) SetWorkingDir(dir string) error { + configFile, err := i.image.ConfigFile() if err != nil { return err } config := *configFile.Config.DeepCopy() config.WorkingDir = dir - r.image, err = mutate.Config(r.image, config) + i.image, err = mutate.Config(i.image, config) return err } -func (r *remoteImage) SetEntrypoint(ep ...string) error { - configFile, err := r.image.ConfigFile() +func (i *Image) SetEntrypoint(ep ...string) error { + configFile, err := i.image.ConfigFile() if err != nil { return err } config := *configFile.Config.DeepCopy() config.Entrypoint = ep - r.image, err = mutate.Config(r.image, config) + i.image, err = mutate.Config(i.image, config) return err } -func (r *remoteImage) SetCmd(cmd ...string) error { - configFile, err := r.image.ConfigFile() +func (i *Image) SetCmd(cmd ...string) error { + configFile, err := i.image.ConfigFile() if err != nil { return err } config := *configFile.Config.DeepCopy() config.Cmd = cmd - r.image, err = mutate.Config(r.image, config) + i.image, err = mutate.Config(i.image, config) return err } -func (r *remoteImage) TopLayer() (string, error) { - all, err := r.image.Layers() +func (i *Image) TopLayer() (string, error) { + all, err := i.image.Layers() if err != nil { return "", err } + if len(all) == 0 { + return "", fmt.Errorf("image %s has no layers", i.Name()) + } topLayer := all[len(all)-1] hex, err := topLayer.DiffID() if err != nil { @@ -234,48 +285,38 @@ func (r *remoteImage) TopLayer() (string, error) { return hex.String(), nil } -func (r *remoteImage) GetLayer(string) (io.ReadCloser, error) { - panic("not implemented") +func (i *Image) GetLayer(sha string) (io.ReadCloser, error) { + layers, err := i.image.Layers() + if err != nil { + return nil, err + } + + layer, err := findLayerWithSha(layers, sha) + if err != nil { + return nil, err + } + + return layer.Compressed() } -func (r *remoteImage) AddLayer(path string) error { +func (i *Image) AddLayer(path string) error { layer, err := tarball.LayerFromFile(path) if err != nil { return err } - r.image, err = mutate.AppendLayers(r.image, layer) + i.image, err = mutate.AppendLayers(i.image, layer) if err != nil { return errors.Wrap(err, "add layer") } return nil } -func (r *remoteImage) ReuseLayer(sha string) error { - var outerErr error - - r.prevOnce.Do(func() { - prevImage, err := newV1Image(r.keychain, r.repoName) - if err != nil { - outerErr = err - return - } - r.prevLayers, err = prevImage.Layers() - if err != nil { - outerErr = fmt.Errorf("failed to get layers for previous image with repo name '%s': %s", r.repoName, err) - } - }) - if outerErr != nil { - return outerErr - } - if len(r.prevLayers) == 0 { - return fmt.Errorf("there is no previous image with name '%s'", r.repoName) - } - - layer, err := findLayerWithSha(r.prevLayers, sha) +func (i *Image) ReuseLayer(sha string) error { + layer, err := findLayerWithSha(i.prevLayers, sha) if err != nil { return err } - r.image, err = mutate.AppendLayers(r.image, layer) + i.image, err = mutate.AppendLayers(i.image, layer) return err } @@ -292,30 +333,43 @@ func findLayerWithSha(layers []v1.Layer, sha string) (v1.Layer, error) { return nil, fmt.Errorf(`previous image did not have layer with sha '%s'`, sha) } -func (r *remoteImage) Save() (string, error) { - ref, auth, err := referenceForRepoName(r.keychain, r.repoName) - if err != nil { - return "", err - } +func (i *Image) Save(additionalNames ...string) error { + var err error + + allNames := append([]string{i.repoName}, additionalNames...) - r.image, err = mutate.CreatedAt(r.image, v1.Time{Time: time.Now()}) + i.image, err = mutate.CreatedAt(i.image, v1.Time{Time: time.Now()}) if err != nil { - return "", err + return errors.Wrap(err, "set creation time") } - if err := remote.Write(ref, r.image, remote.WithAuth(auth)); err != nil { - return "", err + var diagnostics []imgutil.SaveDiagnostic + for _, n := range allNames { + if err := i.doSave(n); err != nil { + diagnostics = append(diagnostics, imgutil.SaveDiagnostic{ImageName: n, Cause: err}) + } + } + if len(diagnostics) > 0 { + return imgutil.SaveError{Errors: diagnostics} } - hex, err := r.image.Digest() + return nil +} + +func (i *Image) doSave(imageName string) error { + ref, auth, err := referenceForRepoName(i.keychain, imageName) if err != nil { - return "", err + return err } - return hex.String(), nil + if err := remote.Write(ref, i.image, remote.WithAuth(auth)); err != nil { + return err + } + + return nil } -func (r *remoteImage) Delete() error { +func (i *Image) Delete() error { return errors.New("remote image does not implement Delete") } diff --git a/vendor/github.com/golang/mock/mockgen/model/model.go b/vendor/github.com/golang/mock/mockgen/model/model.go deleted file mode 100644 index 8113e3d39..000000000 --- a/vendor/github.com/golang/mock/mockgen/model/model.go +++ /dev/null @@ -1,461 +0,0 @@ -// Copyright 2012 Google Inc. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -// Package model contains the data model necessary for generating mock implementations. -package model - -import ( - "encoding/gob" - "fmt" - "io" - "reflect" - "strings" -) - -// pkgPath is the importable path for package model -const pkgPath = "github.com/golang/mock/mockgen/model" - -// Package is a Go package. It may be a subset. -type Package struct { - Name string - Interfaces []*Interface - DotImports []string -} - -func (pkg *Package) Print(w io.Writer) { - fmt.Fprintf(w, "package %s\n", pkg.Name) - for _, intf := range pkg.Interfaces { - intf.Print(w) - } -} - -// Imports returns the imports needed by the Package as a set of import paths. -func (pkg *Package) Imports() map[string]bool { - im := make(map[string]bool) - for _, intf := range pkg.Interfaces { - intf.addImports(im) - } - return im -} - -// Interface is a Go interface. -type Interface struct { - Name string - Methods []*Method -} - -func (intf *Interface) Print(w io.Writer) { - fmt.Fprintf(w, "interface %s\n", intf.Name) - for _, m := range intf.Methods { - m.Print(w) - } -} - -func (intf *Interface) addImports(im map[string]bool) { - for _, m := range intf.Methods { - m.addImports(im) - } -} - -// Method is a single method of an interface. -type Method struct { - Name string - In, Out []*Parameter - Variadic *Parameter // may be nil -} - -func (m *Method) Print(w io.Writer) { - fmt.Fprintf(w, " - method %s\n", m.Name) - if len(m.In) > 0 { - fmt.Fprintf(w, " in:\n") - for _, p := range m.In { - p.Print(w) - } - } - if m.Variadic != nil { - fmt.Fprintf(w, " ...:\n") - m.Variadic.Print(w) - } - if len(m.Out) > 0 { - fmt.Fprintf(w, " out:\n") - for _, p := range m.Out { - p.Print(w) - } - } -} - -func (m *Method) addImports(im map[string]bool) { - for _, p := range m.In { - p.Type.addImports(im) - } - if m.Variadic != nil { - m.Variadic.Type.addImports(im) - } - for _, p := range m.Out { - p.Type.addImports(im) - } -} - -// Parameter is an argument or return parameter of a method. -type Parameter struct { - Name string // may be empty - Type Type -} - -func (p *Parameter) Print(w io.Writer) { - n := p.Name - if n == "" { - n = `""` - } - fmt.Fprintf(w, " - %v: %v\n", n, p.Type.String(nil, "")) -} - -// Type is a Go type. -type Type interface { - String(pm map[string]string, pkgOverride string) string - addImports(im map[string]bool) -} - -func init() { - gob.Register(&ArrayType{}) - gob.Register(&ChanType{}) - gob.Register(&FuncType{}) - gob.Register(&MapType{}) - gob.Register(&NamedType{}) - gob.Register(&PointerType{}) - - // Call gob.RegisterName to make sure it has the consistent name registered - // for both gob decoder and encoder. - // - // For a non-pointer type, gob.Register will try to get package full path by - // calling rt.PkgPath() for a name to register. If your project has vendor - // directory, it is possible that PkgPath will get a path like this: - // ../../../vendor/github.com/golang/mock/mockgen/model - gob.RegisterName(pkgPath+".PredeclaredType", PredeclaredType("")) -} - -// ArrayType is an array or slice type. -type ArrayType struct { - Len int // -1 for slices, >= 0 for arrays - Type Type -} - -func (at *ArrayType) String(pm map[string]string, pkgOverride string) string { - s := "[]" - if at.Len > -1 { - s = fmt.Sprintf("[%d]", at.Len) - } - return s + at.Type.String(pm, pkgOverride) -} - -func (at *ArrayType) addImports(im map[string]bool) { at.Type.addImports(im) } - -// ChanType is a channel type. -type ChanType struct { - Dir ChanDir // 0, 1 or 2 - Type Type -} - -func (ct *ChanType) String(pm map[string]string, pkgOverride string) string { - s := ct.Type.String(pm, pkgOverride) - if ct.Dir == RecvDir { - return "<-chan " + s - } - if ct.Dir == SendDir { - return "chan<- " + s - } - return "chan " + s -} - -func (ct *ChanType) addImports(im map[string]bool) { ct.Type.addImports(im) } - -// ChanDir is a channel direction. -type ChanDir int - -const ( - RecvDir ChanDir = 1 - SendDir ChanDir = 2 -) - -// FuncType is a function type. -type FuncType struct { - In, Out []*Parameter - Variadic *Parameter // may be nil -} - -func (ft *FuncType) String(pm map[string]string, pkgOverride string) string { - args := make([]string, len(ft.In)) - for i, p := range ft.In { - args[i] = p.Type.String(pm, pkgOverride) - } - if ft.Variadic != nil { - args = append(args, "..."+ft.Variadic.Type.String(pm, pkgOverride)) - } - rets := make([]string, len(ft.Out)) - for i, p := range ft.Out { - rets[i] = p.Type.String(pm, pkgOverride) - } - retString := strings.Join(rets, ", ") - if nOut := len(ft.Out); nOut == 1 { - retString = " " + retString - } else if nOut > 1 { - retString = " (" + retString + ")" - } - return "func(" + strings.Join(args, ", ") + ")" + retString -} - -func (ft *FuncType) addImports(im map[string]bool) { - for _, p := range ft.In { - p.Type.addImports(im) - } - if ft.Variadic != nil { - ft.Variadic.Type.addImports(im) - } - for _, p := range ft.Out { - p.Type.addImports(im) - } -} - -// MapType is a map type. -type MapType struct { - Key, Value Type -} - -func (mt *MapType) String(pm map[string]string, pkgOverride string) string { - return "map[" + mt.Key.String(pm, pkgOverride) + "]" + mt.Value.String(pm, pkgOverride) -} - -func (mt *MapType) addImports(im map[string]bool) { - mt.Key.addImports(im) - mt.Value.addImports(im) -} - -// NamedType is an exported type in a package. -type NamedType struct { - Package string // may be empty - Type string // TODO: should this be typed Type? -} - -func (nt *NamedType) String(pm map[string]string, pkgOverride string) string { - // TODO: is this right? - if pkgOverride == nt.Package { - return nt.Type - } - prefix := pm[nt.Package] - if prefix != "" { - return prefix + "." + nt.Type - } else { - return nt.Type - } -} -func (nt *NamedType) addImports(im map[string]bool) { - if nt.Package != "" { - im[nt.Package] = true - } -} - -// PointerType is a pointer to another type. -type PointerType struct { - Type Type -} - -func (pt *PointerType) String(pm map[string]string, pkgOverride string) string { - return "*" + pt.Type.String(pm, pkgOverride) -} -func (pt *PointerType) addImports(im map[string]bool) { pt.Type.addImports(im) } - -// PredeclaredType is a predeclared type such as "int". -type PredeclaredType string - -func (pt PredeclaredType) String(pm map[string]string, pkgOverride string) string { return string(pt) } -func (pt PredeclaredType) addImports(im map[string]bool) {} - -// The following code is intended to be called by the program generated by ../reflect.go. - -func InterfaceFromInterfaceType(it reflect.Type) (*Interface, error) { - if it.Kind() != reflect.Interface { - return nil, fmt.Errorf("%v is not an interface", it) - } - intf := &Interface{} - - for i := 0; i < it.NumMethod(); i++ { - mt := it.Method(i) - // TODO: need to skip unexported methods? or just raise an error? - m := &Method{ - Name: mt.Name, - } - - var err error - m.In, m.Variadic, m.Out, err = funcArgsFromType(mt.Type) - if err != nil { - return nil, err - } - - intf.Methods = append(intf.Methods, m) - } - - return intf, nil -} - -// t's Kind must be a reflect.Func. -func funcArgsFromType(t reflect.Type) (in []*Parameter, variadic *Parameter, out []*Parameter, err error) { - nin := t.NumIn() - if t.IsVariadic() { - nin-- - } - var p *Parameter - for i := 0; i < nin; i++ { - p, err = parameterFromType(t.In(i)) - if err != nil { - return - } - in = append(in, p) - } - if t.IsVariadic() { - p, err = parameterFromType(t.In(nin).Elem()) - if err != nil { - return - } - variadic = p - } - for i := 0; i < t.NumOut(); i++ { - p, err = parameterFromType(t.Out(i)) - if err != nil { - return - } - out = append(out, p) - } - return -} - -func parameterFromType(t reflect.Type) (*Parameter, error) { - tt, err := typeFromType(t) - if err != nil { - return nil, err - } - return &Parameter{Type: tt}, nil -} - -var errorType = reflect.TypeOf((*error)(nil)).Elem() - -var byteType = reflect.TypeOf(byte(0)) - -func typeFromType(t reflect.Type) (Type, error) { - // Hack workaround for https://golang.org/issue/3853. - // This explicit check should not be necessary. - if t == byteType { - return PredeclaredType("byte"), nil - } - - if imp := t.PkgPath(); imp != "" { - return &NamedType{ - Package: impPath(imp), - Type: t.Name(), - }, nil - } - - // only unnamed or predeclared types after here - - // Lots of types have element types. Let's do the parsing and error checking for all of them. - var elemType Type - switch t.Kind() { - case reflect.Array, reflect.Chan, reflect.Map, reflect.Ptr, reflect.Slice: - var err error - elemType, err = typeFromType(t.Elem()) - if err != nil { - return nil, err - } - } - - switch t.Kind() { - case reflect.Array: - return &ArrayType{ - Len: t.Len(), - Type: elemType, - }, nil - case reflect.Bool, reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr, - reflect.Float32, reflect.Float64, reflect.Complex64, reflect.Complex128, reflect.String: - return PredeclaredType(t.Kind().String()), nil - case reflect.Chan: - var dir ChanDir - switch t.ChanDir() { - case reflect.RecvDir: - dir = RecvDir - case reflect.SendDir: - dir = SendDir - } - return &ChanType{ - Dir: dir, - Type: elemType, - }, nil - case reflect.Func: - in, variadic, out, err := funcArgsFromType(t) - if err != nil { - return nil, err - } - return &FuncType{ - In: in, - Out: out, - Variadic: variadic, - }, nil - case reflect.Interface: - // Two special interfaces. - if t.NumMethod() == 0 { - return PredeclaredType("interface{}"), nil - } - if t == errorType { - return PredeclaredType("error"), nil - } - case reflect.Map: - kt, err := typeFromType(t.Key()) - if err != nil { - return nil, err - } - return &MapType{ - Key: kt, - Value: elemType, - }, nil - case reflect.Ptr: - return &PointerType{ - Type: elemType, - }, nil - case reflect.Slice: - return &ArrayType{ - Len: -1, - Type: elemType, - }, nil - case reflect.Struct: - if t.NumField() == 0 { - return PredeclaredType("struct{}"), nil - } - } - - // TODO: Struct, UnsafePointer - return nil, fmt.Errorf("can't yet turn %v (%v) into a model.Type", t, t.Kind()) -} - -// impPath sanitizes the package path returned by `PkgPath` method of a reflect Type so that -// it is importable. PkgPath might return a path that includes "vendor". These paths do not -// compile, so we need to remove everything up to and including "/vendor/". -// See https://github.com/golang/go/issues/12019. -func impPath(imp string) string { - if strings.HasPrefix(imp, "vendor/") { - imp = "/" + imp - } - if i := strings.LastIndex(imp, "/vendor/"); i != -1 { - imp = imp[i+len("/vendor/"):] - } - return imp -} diff --git a/vendor/golang.org/x/sync/singleflight/singleflight.go b/vendor/golang.org/x/sync/singleflight/singleflight.go new file mode 100644 index 000000000..97a1aa4bb --- /dev/null +++ b/vendor/golang.org/x/sync/singleflight/singleflight.go @@ -0,0 +1,120 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package singleflight provides a duplicate function call suppression +// mechanism. +package singleflight // import "golang.org/x/sync/singleflight" + +import "sync" + +// call is an in-flight or completed singleflight.Do call +type call struct { + wg sync.WaitGroup + + // These fields are written once before the WaitGroup is done + // and are only read after the WaitGroup is done. + val interface{} + err error + + // forgotten indicates whether Forget was called with this call's key + // while the call was still in flight. + forgotten bool + + // These fields are read and written with the singleflight + // mutex held before the WaitGroup is done, and are read but + // not written after the WaitGroup is done. + dups int + chans []chan<- Result +} + +// Group represents a class of work and forms a namespace in +// which units of work can be executed with duplicate suppression. +type Group struct { + mu sync.Mutex // protects m + m map[string]*call // lazily initialized +} + +// Result holds the results of Do, so they can be passed +// on a channel. +type Result struct { + Val interface{} + Err error + Shared bool +} + +// Do executes and returns the results of the given function, making +// sure that only one execution is in-flight for a given key at a +// time. If a duplicate comes in, the duplicate caller waits for the +// original to complete and receives the same results. +// The return value shared indicates whether v was given to multiple callers. +func (g *Group) Do(key string, fn func() (interface{}, error)) (v interface{}, err error, shared bool) { + g.mu.Lock() + if g.m == nil { + g.m = make(map[string]*call) + } + if c, ok := g.m[key]; ok { + c.dups++ + g.mu.Unlock() + c.wg.Wait() + return c.val, c.err, true + } + c := new(call) + c.wg.Add(1) + g.m[key] = c + g.mu.Unlock() + + g.doCall(c, key, fn) + return c.val, c.err, c.dups > 0 +} + +// DoChan is like Do but returns a channel that will receive the +// results when they are ready. +func (g *Group) DoChan(key string, fn func() (interface{}, error)) <-chan Result { + ch := make(chan Result, 1) + g.mu.Lock() + if g.m == nil { + g.m = make(map[string]*call) + } + if c, ok := g.m[key]; ok { + c.dups++ + c.chans = append(c.chans, ch) + g.mu.Unlock() + return ch + } + c := &call{chans: []chan<- Result{ch}} + c.wg.Add(1) + g.m[key] = c + g.mu.Unlock() + + go g.doCall(c, key, fn) + + return ch +} + +// doCall handles the single call for a key. +func (g *Group) doCall(c *call, key string, fn func() (interface{}, error)) { + c.val, c.err = fn() + c.wg.Done() + + g.mu.Lock() + if !c.forgotten { + delete(g.m, key) + } + for _, ch := range c.chans { + ch <- Result{c.val, c.err, c.dups > 0} + } + g.mu.Unlock() +} + +// Forget tells the singleflight to forget about a key. Future calls +// to Do for this key will call the function rather than waiting for +// an earlier call to complete. +func (g *Group) Forget(key string) { + g.mu.Lock() + if c, ok := g.m[key]; ok { + c.forgotten = true + } + delete(g.m, key) + g.mu.Unlock() +} diff --git a/vendor/golang.org/x/tools/AUTHORS b/vendor/golang.org/x/tools/AUTHORS new file mode 100644 index 000000000..15167cd74 --- /dev/null +++ b/vendor/golang.org/x/tools/AUTHORS @@ -0,0 +1,3 @@ +# This source code refers to The Go Authors for copyright purposes. +# The master list of authors is in the main Go distribution, +# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/golang.org/x/tools/CONTRIBUTORS b/vendor/golang.org/x/tools/CONTRIBUTORS new file mode 100644 index 000000000..1c4577e96 --- /dev/null +++ b/vendor/golang.org/x/tools/CONTRIBUTORS @@ -0,0 +1,3 @@ +# This source code was written by the Go contributors. +# The master list of contributors is in the main Go distribution, +# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/golang.org/x/tools/LICENSE b/vendor/golang.org/x/tools/LICENSE new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/vendor/golang.org/x/tools/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/tools/PATENTS b/vendor/golang.org/x/tools/PATENTS new file mode 100644 index 000000000..733099041 --- /dev/null +++ b/vendor/golang.org/x/tools/PATENTS @@ -0,0 +1,22 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Go project. + +Google hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) +patent license to make, have made, use, offer to sell, sell, import, +transfer and otherwise run, modify and propagate the contents of this +implementation of Go, where such license applies only to those patent +claims, both currently owned or controlled by Google and acquired in +the future, licensable by Google that are necessarily infringed by this +implementation of Go. This grant does not include claims that would be +infringed only as a consequence of further modification of this +implementation. If you or your agent or exclusive licensee institute or +order or agree to the institution of patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging +that this implementation of Go or any code incorporated within this +implementation of Go constitutes direct or contributory patent +infringement, or inducement of patent infringement, then any patent +rights granted to you under this License for this implementation of Go +shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/tools/cmd/goimports/doc.go b/vendor/golang.org/x/tools/cmd/goimports/doc.go new file mode 100644 index 000000000..7033e4d4c --- /dev/null +++ b/vendor/golang.org/x/tools/cmd/goimports/doc.go @@ -0,0 +1,43 @@ +/* + +Command goimports updates your Go import lines, +adding missing ones and removing unreferenced ones. + + $ go get golang.org/x/tools/cmd/goimports + +In addition to fixing imports, goimports also formats +your code in the same style as gofmt so it can be used +as a replacement for your editor's gofmt-on-save hook. + +For emacs, make sure you have the latest go-mode.el: + https://github.com/dominikh/go-mode.el +Then in your .emacs file: + (setq gofmt-command "goimports") + (add-hook 'before-save-hook 'gofmt-before-save) + +For vim, set "gofmt_command" to "goimports": + https://golang.org/change/39c724dd7f252 + https://golang.org/wiki/IDEsAndTextEditorPlugins + etc + +For GoSublime, follow the steps described here: + http://michaelwhatcott.com/gosublime-goimports/ + +For other editors, you probably know what to do. + +To exclude directories in your $GOPATH from being scanned for Go +files, goimports respects a configuration file at +$GOPATH/src/.goimportsignore which may contain blank lines, comment +lines (beginning with '#'), or lines naming a directory relative to +the configuration file to ignore when scanning. No globbing or regex +patterns are allowed. Use the "-v" verbose flag to verify it's +working and see what goimports is doing. + +File bugs or feature requests at: + + https://golang.org/issues/new?title=x/tools/cmd/goimports:+ + +Happy hacking! + +*/ +package main // import "golang.org/x/tools/cmd/goimports" diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports.go b/vendor/golang.org/x/tools/cmd/goimports/goimports.go new file mode 100644 index 000000000..a476a7f3c --- /dev/null +++ b/vendor/golang.org/x/tools/cmd/goimports/goimports.go @@ -0,0 +1,377 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package main + +import ( + "bufio" + "bytes" + "errors" + "flag" + "fmt" + "go/build" + "go/scanner" + "io" + "io/ioutil" + "log" + "os" + "os/exec" + "path/filepath" + "runtime" + "runtime/pprof" + "strings" + + "golang.org/x/tools/internal/imports" +) + +var ( + // main operation modes + list = flag.Bool("l", false, "list files whose formatting differs from goimport's") + write = flag.Bool("w", false, "write result to (source) file instead of stdout") + doDiff = flag.Bool("d", false, "display diffs instead of rewriting files") + srcdir = flag.String("srcdir", "", "choose imports as if source code is from `dir`. When operating on a single file, dir may instead be the complete file name.") + + verbose bool // verbose logging + + cpuProfile = flag.String("cpuprofile", "", "CPU profile output") + memProfile = flag.String("memprofile", "", "memory profile output") + memProfileRate = flag.Int("memrate", 0, "if > 0, sets runtime.MemProfileRate") + + options = &imports.Options{ + TabWidth: 8, + TabIndent: true, + Comments: true, + Fragment: true, + // This environment, and its caches, will be reused for the whole run. + Env: &imports.ProcessEnv{ + GOPATH: build.Default.GOPATH, + GOROOT: build.Default.GOROOT, + }, + } + exitCode = 0 +) + +func init() { + flag.BoolVar(&options.AllErrors, "e", false, "report all errors (not just the first 10 on different lines)") + flag.StringVar(&options.Env.LocalPrefix, "local", "", "put imports beginning with this string after 3rd-party packages; comma-separated list") + flag.BoolVar(&options.FormatOnly, "format-only", false, "if true, don't fix imports and only format. In this mode, goimports is effectively gofmt, with the addition that imports are grouped into sections.") +} + +func report(err error) { + scanner.PrintError(os.Stderr, err) + exitCode = 2 +} + +func usage() { + fmt.Fprintf(os.Stderr, "usage: goimports [flags] [path ...]\n") + flag.PrintDefaults() + os.Exit(2) +} + +func isGoFile(f os.FileInfo) bool { + // ignore non-Go files + name := f.Name() + return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go") +} + +// argumentType is which mode goimports was invoked as. +type argumentType int + +const ( + // fromStdin means the user is piping their source into goimports. + fromStdin argumentType = iota + + // singleArg is the common case from editors, when goimports is run on + // a single file. + singleArg + + // multipleArg is when the user ran "goimports file1.go file2.go" + // or ran goimports on a directory tree. + multipleArg +) + +func processFile(filename string, in io.Reader, out io.Writer, argType argumentType) error { + opt := options + if argType == fromStdin { + nopt := *options + nopt.Fragment = true + opt = &nopt + } + + if in == nil { + f, err := os.Open(filename) + if err != nil { + return err + } + defer f.Close() + in = f + } + + src, err := ioutil.ReadAll(in) + if err != nil { + return err + } + + target := filename + if *srcdir != "" { + // Determine whether the provided -srcdirc is a directory or file + // and then use it to override the target. + // + // See https://github.com/dominikh/go-mode.el/issues/146 + if isFile(*srcdir) { + if argType == multipleArg { + return errors.New("-srcdir value can't be a file when passing multiple arguments or when walking directories") + } + target = *srcdir + } else if argType == singleArg && strings.HasSuffix(*srcdir, ".go") && !isDir(*srcdir) { + // For a file which doesn't exist on disk yet, but might shortly. + // e.g. user in editor opens $DIR/newfile.go and newfile.go doesn't yet exist on disk. + // The goimports on-save hook writes the buffer to a temp file + // first and runs goimports before the actual save to newfile.go. + // The editor's buffer is named "newfile.go" so that is passed to goimports as: + // goimports -srcdir=/gopath/src/pkg/newfile.go /tmp/gofmtXXXXXXXX.go + // and then the editor reloads the result from the tmp file and writes + // it to newfile.go. + target = *srcdir + } else { + // Pretend that file is from *srcdir in order to decide + // visible imports correctly. + target = filepath.Join(*srcdir, filepath.Base(filename)) + } + } + + res, err := imports.Process(target, src, opt) + if err != nil { + return err + } + + if !bytes.Equal(src, res) { + // formatting has changed + if *list { + fmt.Fprintln(out, filename) + } + if *write { + if argType == fromStdin { + // filename is "" + return errors.New("can't use -w on stdin") + } + err = ioutil.WriteFile(filename, res, 0) + if err != nil { + return err + } + } + if *doDiff { + if argType == fromStdin { + filename = "stdin.go" // because .orig looks silly + } + data, err := diff(src, res, filename) + if err != nil { + return fmt.Errorf("computing diff: %s", err) + } + fmt.Printf("diff -u %s %s\n", filepath.ToSlash(filename+".orig"), filepath.ToSlash(filename)) + out.Write(data) + } + } + + if !*list && !*write && !*doDiff { + _, err = out.Write(res) + } + + return err +} + +func visitFile(path string, f os.FileInfo, err error) error { + if err == nil && isGoFile(f) { + err = processFile(path, nil, os.Stdout, multipleArg) + } + if err != nil { + report(err) + } + return nil +} + +func walkDir(path string) { + filepath.Walk(path, visitFile) +} + +func main() { + runtime.GOMAXPROCS(runtime.NumCPU()) + + // call gofmtMain in a separate function + // so that it can use defer and have them + // run before the exit. + gofmtMain() + os.Exit(exitCode) +} + +// parseFlags parses command line flags and returns the paths to process. +// It's a var so that custom implementations can replace it in other files. +var parseFlags = func() []string { + flag.BoolVar(&verbose, "v", false, "verbose logging") + + flag.Parse() + return flag.Args() +} + +func bufferedFileWriter(dest string) (w io.Writer, close func()) { + f, err := os.Create(dest) + if err != nil { + log.Fatal(err) + } + bw := bufio.NewWriter(f) + return bw, func() { + if err := bw.Flush(); err != nil { + log.Fatalf("error flushing %v: %v", dest, err) + } + if err := f.Close(); err != nil { + log.Fatal(err) + } + } +} + +func gofmtMain() { + flag.Usage = usage + paths := parseFlags() + + if *cpuProfile != "" { + bw, flush := bufferedFileWriter(*cpuProfile) + pprof.StartCPUProfile(bw) + defer flush() + defer pprof.StopCPUProfile() + } + // doTrace is a conditionally compiled wrapper around runtime/trace. It is + // used to allow goimports to compile under gccgo, which does not support + // runtime/trace. See https://golang.org/issue/15544. + defer doTrace()() + if *memProfileRate > 0 { + runtime.MemProfileRate = *memProfileRate + bw, flush := bufferedFileWriter(*memProfile) + defer func() { + runtime.GC() // materialize all statistics + if err := pprof.WriteHeapProfile(bw); err != nil { + log.Fatal(err) + } + flush() + }() + } + + if verbose { + log.SetFlags(log.LstdFlags | log.Lmicroseconds) + options.Env.Debug = true + } + if options.TabWidth < 0 { + fmt.Fprintf(os.Stderr, "negative tabwidth %d\n", options.TabWidth) + exitCode = 2 + return + } + + if len(paths) == 0 { + if err := processFile("", os.Stdin, os.Stdout, fromStdin); err != nil { + report(err) + } + return + } + + argType := singleArg + if len(paths) > 1 { + argType = multipleArg + } + + for _, path := range paths { + switch dir, err := os.Stat(path); { + case err != nil: + report(err) + case dir.IsDir(): + walkDir(path) + default: + if err := processFile(path, nil, os.Stdout, argType); err != nil { + report(err) + } + } + } +} + +func writeTempFile(dir, prefix string, data []byte) (string, error) { + file, err := ioutil.TempFile(dir, prefix) + if err != nil { + return "", err + } + _, err = file.Write(data) + if err1 := file.Close(); err == nil { + err = err1 + } + if err != nil { + os.Remove(file.Name()) + return "", err + } + return file.Name(), nil +} + +func diff(b1, b2 []byte, filename string) (data []byte, err error) { + f1, err := writeTempFile("", "gofmt", b1) + if err != nil { + return + } + defer os.Remove(f1) + + f2, err := writeTempFile("", "gofmt", b2) + if err != nil { + return + } + defer os.Remove(f2) + + cmd := "diff" + if runtime.GOOS == "plan9" { + cmd = "/bin/ape/diff" + } + + data, err = exec.Command(cmd, "-u", f1, f2).CombinedOutput() + if len(data) > 0 { + // diff exits with a non-zero status when the files don't match. + // Ignore that failure as long as we get output. + return replaceTempFilename(data, filename) + } + return +} + +// replaceTempFilename replaces temporary filenames in diff with actual one. +// +// --- /tmp/gofmt316145376 2017-02-03 19:13:00.280468375 -0500 +// +++ /tmp/gofmt617882815 2017-02-03 19:13:00.280468375 -0500 +// ... +// -> +// --- path/to/file.go.orig 2017-02-03 19:13:00.280468375 -0500 +// +++ path/to/file.go 2017-02-03 19:13:00.280468375 -0500 +// ... +func replaceTempFilename(diff []byte, filename string) ([]byte, error) { + bs := bytes.SplitN(diff, []byte{'\n'}, 3) + if len(bs) < 3 { + return nil, fmt.Errorf("got unexpected diff for %s", filename) + } + // Preserve timestamps. + var t0, t1 []byte + if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 { + t0 = bs[0][i:] + } + if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 { + t1 = bs[1][i:] + } + // Always print filepath with slash separator. + f := filepath.ToSlash(filename) + bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0)) + bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1)) + return bytes.Join(bs, []byte{'\n'}), nil +} + +// isFile reports whether name is a file. +func isFile(name string) bool { + fi, err := os.Stat(name) + return err == nil && fi.Mode().IsRegular() +} + +// isDir reports whether name is a directory. +func isDir(name string) bool { + fi, err := os.Stat(name) + return err == nil && fi.IsDir() +} diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go b/vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go new file mode 100644 index 000000000..21d867eaa --- /dev/null +++ b/vendor/golang.org/x/tools/cmd/goimports/goimports_gc.go @@ -0,0 +1,26 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build gc + +package main + +import ( + "flag" + "runtime/trace" +) + +var traceProfile = flag.String("trace", "", "trace profile output") + +func doTrace() func() { + if *traceProfile != "" { + bw, flush := bufferedFileWriter(*traceProfile) + trace.Start(bw) + return func() { + flush() + trace.Stop() + } + } + return func() {} +} diff --git a/vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go b/vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go new file mode 100644 index 000000000..f5531ceb3 --- /dev/null +++ b/vendor/golang.org/x/tools/cmd/goimports/goimports_not_gc.go @@ -0,0 +1,11 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !gc + +package main + +func doTrace() func() { + return func() {} +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go new file mode 100644 index 000000000..6b7052b89 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go @@ -0,0 +1,627 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +// This file defines utilities for working with source positions. + +import ( + "fmt" + "go/ast" + "go/token" + "sort" +) + +// PathEnclosingInterval returns the node that encloses the source +// interval [start, end), and all its ancestors up to the AST root. +// +// The definition of "enclosing" used by this function considers +// additional whitespace abutting a node to be enclosed by it. +// In this example: +// +// z := x + y // add them +// <-A-> +// <----B-----> +// +// the ast.BinaryExpr(+) node is considered to enclose interval B +// even though its [Pos()..End()) is actually only interval A. +// This behaviour makes user interfaces more tolerant of imperfect +// input. +// +// This function treats tokens as nodes, though they are not included +// in the result. e.g. PathEnclosingInterval("+") returns the +// enclosing ast.BinaryExpr("x + y"). +// +// If start==end, the 1-char interval following start is used instead. +// +// The 'exact' result is true if the interval contains only path[0] +// and perhaps some adjacent whitespace. It is false if the interval +// overlaps multiple children of path[0], or if it contains only +// interior whitespace of path[0]. +// In this example: +// +// z := x + y // add them +// <--C--> <---E--> +// ^ +// D +// +// intervals C, D and E are inexact. C is contained by the +// z-assignment statement, because it spans three of its children (:=, +// x, +). So too is the 1-char interval D, because it contains only +// interior whitespace of the assignment. E is considered interior +// whitespace of the BlockStmt containing the assignment. +// +// Precondition: [start, end) both lie within the same file as root. +// TODO(adonovan): return (nil, false) in this case and remove precond. +// Requires FileSet; see loader.tokenFileContainsPos. +// +// Postcondition: path is never nil; it always contains at least 'root'. +// +func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) { + // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging + + // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end). + var visit func(node ast.Node) bool + visit = func(node ast.Node) bool { + path = append(path, node) + + nodePos := node.Pos() + nodeEnd := node.End() + + // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging + + // Intersect [start, end) with interval of node. + if start < nodePos { + start = nodePos + } + if end > nodeEnd { + end = nodeEnd + } + + // Find sole child that contains [start, end). + children := childrenOf(node) + l := len(children) + for i, child := range children { + // [childPos, childEnd) is unaugmented interval of child. + childPos := child.Pos() + childEnd := child.End() + + // [augPos, augEnd) is whitespace-augmented interval of child. + augPos := childPos + augEnd := childEnd + if i > 0 { + augPos = children[i-1].End() // start of preceding whitespace + } + if i < l-1 { + nextChildPos := children[i+1].Pos() + // Does [start, end) lie between child and next child? + if start >= augEnd && end <= nextChildPos { + return false // inexact match + } + augEnd = nextChildPos // end of following whitespace + } + + // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n", + // i, augPos, augEnd, start, end) // debugging + + // Does augmented child strictly contain [start, end)? + if augPos <= start && end <= augEnd { + _, isToken := child.(tokenNode) + return isToken || visit(child) + } + + // Does [start, end) overlap multiple children? + // i.e. left-augmented child contains start + // but LR-augmented child does not contain end. + if start < childEnd && end > augEnd { + break + } + } + + // No single child contained [start, end), + // so node is the result. Is it exact? + + // (It's tempting to put this condition before the + // child loop, but it gives the wrong result in the + // case where a node (e.g. ExprStmt) and its sole + // child have equal intervals.) + if start == nodePos && end == nodeEnd { + return true // exact match + } + + return false // inexact: overlaps multiple children + } + + if start > end { + start, end = end, start + } + + if start < root.End() && end > root.Pos() { + if start == end { + end = start + 1 // empty interval => interval of size 1 + } + exact = visit(root) + + // Reverse the path: + for i, l := 0, len(path); i < l/2; i++ { + path[i], path[l-1-i] = path[l-1-i], path[i] + } + } else { + // Selection lies within whitespace preceding the + // first (or following the last) declaration in the file. + // The result nonetheless always includes the ast.File. + path = append(path, root) + } + + return +} + +// tokenNode is a dummy implementation of ast.Node for a single token. +// They are used transiently by PathEnclosingInterval but never escape +// this package. +// +type tokenNode struct { + pos token.Pos + end token.Pos +} + +func (n tokenNode) Pos() token.Pos { + return n.pos +} + +func (n tokenNode) End() token.Pos { + return n.end +} + +func tok(pos token.Pos, len int) ast.Node { + return tokenNode{pos, pos + token.Pos(len)} +} + +// childrenOf returns the direct non-nil children of ast.Node n. +// It may include fake ast.Node implementations for bare tokens. +// it is not safe to call (e.g.) ast.Walk on such nodes. +// +func childrenOf(n ast.Node) []ast.Node { + var children []ast.Node + + // First add nodes for all true subtrees. + ast.Inspect(n, func(node ast.Node) bool { + if node == n { // push n + return true // recur + } + if node != nil { // push child + children = append(children, node) + } + return false // no recursion + }) + + // Then add fake Nodes for bare tokens. + switch n := n.(type) { + case *ast.ArrayType: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Elt.End(), len("]"))) + + case *ast.AssignStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.BasicLit: + children = append(children, + tok(n.ValuePos, len(n.Value))) + + case *ast.BinaryExpr: + children = append(children, tok(n.OpPos, len(n.Op.String()))) + + case *ast.BlockStmt: + children = append(children, + tok(n.Lbrace, len("{")), + tok(n.Rbrace, len("}"))) + + case *ast.BranchStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.CallExpr: + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + if n.Ellipsis != 0 { + children = append(children, tok(n.Ellipsis, len("..."))) + } + + case *ast.CaseClause: + if n.List == nil { + children = append(children, + tok(n.Case, len("default"))) + } else { + children = append(children, + tok(n.Case, len("case"))) + } + children = append(children, tok(n.Colon, len(":"))) + + case *ast.ChanType: + switch n.Dir { + case ast.RECV: + children = append(children, tok(n.Begin, len("<-chan"))) + case ast.SEND: + children = append(children, tok(n.Begin, len("chan<-"))) + case ast.RECV | ast.SEND: + children = append(children, tok(n.Begin, len("chan"))) + } + + case *ast.CommClause: + if n.Comm == nil { + children = append(children, + tok(n.Case, len("default"))) + } else { + children = append(children, + tok(n.Case, len("case"))) + } + children = append(children, tok(n.Colon, len(":"))) + + case *ast.Comment: + // nop + + case *ast.CommentGroup: + // nop + + case *ast.CompositeLit: + children = append(children, + tok(n.Lbrace, len("{")), + tok(n.Rbrace, len("{"))) + + case *ast.DeclStmt: + // nop + + case *ast.DeferStmt: + children = append(children, + tok(n.Defer, len("defer"))) + + case *ast.Ellipsis: + children = append(children, + tok(n.Ellipsis, len("..."))) + + case *ast.EmptyStmt: + // nop + + case *ast.ExprStmt: + // nop + + case *ast.Field: + // TODO(adonovan): Field.{Doc,Comment,Tag}? + + case *ast.FieldList: + children = append(children, + tok(n.Opening, len("(")), + tok(n.Closing, len(")"))) + + case *ast.File: + // TODO test: Doc + children = append(children, + tok(n.Package, len("package"))) + + case *ast.ForStmt: + children = append(children, + tok(n.For, len("for"))) + + case *ast.FuncDecl: + // TODO(adonovan): FuncDecl.Comment? + + // Uniquely, FuncDecl breaks the invariant that + // preorder traversal yields tokens in lexical order: + // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func. + // + // As a workaround, we inline the case for FuncType + // here and order things correctly. + // + children = nil // discard ast.Walk(FuncDecl) info subtrees + children = append(children, tok(n.Type.Func, len("func"))) + if n.Recv != nil { + children = append(children, n.Recv) + } + children = append(children, n.Name) + if n.Type.Params != nil { + children = append(children, n.Type.Params) + } + if n.Type.Results != nil { + children = append(children, n.Type.Results) + } + if n.Body != nil { + children = append(children, n.Body) + } + + case *ast.FuncLit: + // nop + + case *ast.FuncType: + if n.Func != 0 { + children = append(children, + tok(n.Func, len("func"))) + } + + case *ast.GenDecl: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + if n.Lparen != 0 { + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + } + + case *ast.GoStmt: + children = append(children, + tok(n.Go, len("go"))) + + case *ast.Ident: + children = append(children, + tok(n.NamePos, len(n.Name))) + + case *ast.IfStmt: + children = append(children, + tok(n.If, len("if"))) + + case *ast.ImportSpec: + // TODO(adonovan): ImportSpec.{Doc,EndPos}? + + case *ast.IncDecStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.IndexExpr: + children = append(children, + tok(n.Lbrack, len("{")), + tok(n.Rbrack, len("}"))) + + case *ast.InterfaceType: + children = append(children, + tok(n.Interface, len("interface"))) + + case *ast.KeyValueExpr: + children = append(children, + tok(n.Colon, len(":"))) + + case *ast.LabeledStmt: + children = append(children, + tok(n.Colon, len(":"))) + + case *ast.MapType: + children = append(children, + tok(n.Map, len("map"))) + + case *ast.ParenExpr: + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + + case *ast.RangeStmt: + children = append(children, + tok(n.For, len("for")), + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.ReturnStmt: + children = append(children, + tok(n.Return, len("return"))) + + case *ast.SelectStmt: + children = append(children, + tok(n.Select, len("select"))) + + case *ast.SelectorExpr: + // nop + + case *ast.SendStmt: + children = append(children, + tok(n.Arrow, len("<-"))) + + case *ast.SliceExpr: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Rbrack, len("]"))) + + case *ast.StarExpr: + children = append(children, tok(n.Star, len("*"))) + + case *ast.StructType: + children = append(children, tok(n.Struct, len("struct"))) + + case *ast.SwitchStmt: + children = append(children, tok(n.Switch, len("switch"))) + + case *ast.TypeAssertExpr: + children = append(children, + tok(n.Lparen-1, len(".")), + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + + case *ast.TypeSpec: + // TODO(adonovan): TypeSpec.{Doc,Comment}? + + case *ast.TypeSwitchStmt: + children = append(children, tok(n.Switch, len("switch"))) + + case *ast.UnaryExpr: + children = append(children, tok(n.OpPos, len(n.Op.String()))) + + case *ast.ValueSpec: + // TODO(adonovan): ValueSpec.{Doc,Comment}? + + case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt: + // nop + } + + // TODO(adonovan): opt: merge the logic of ast.Inspect() into + // the switch above so we can make interleaved callbacks for + // both Nodes and Tokens in the right order and avoid the need + // to sort. + sort.Sort(byPos(children)) + + return children +} + +type byPos []ast.Node + +func (sl byPos) Len() int { + return len(sl) +} +func (sl byPos) Less(i, j int) bool { + return sl[i].Pos() < sl[j].Pos() +} +func (sl byPos) Swap(i, j int) { + sl[i], sl[j] = sl[j], sl[i] +} + +// NodeDescription returns a description of the concrete type of n suitable +// for a user interface. +// +// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident, +// StarExpr) we could be much more specific given the path to the AST +// root. Perhaps we should do that. +// +func NodeDescription(n ast.Node) string { + switch n := n.(type) { + case *ast.ArrayType: + return "array type" + case *ast.AssignStmt: + return "assignment" + case *ast.BadDecl: + return "bad declaration" + case *ast.BadExpr: + return "bad expression" + case *ast.BadStmt: + return "bad statement" + case *ast.BasicLit: + return "basic literal" + case *ast.BinaryExpr: + return fmt.Sprintf("binary %s operation", n.Op) + case *ast.BlockStmt: + return "block" + case *ast.BranchStmt: + switch n.Tok { + case token.BREAK: + return "break statement" + case token.CONTINUE: + return "continue statement" + case token.GOTO: + return "goto statement" + case token.FALLTHROUGH: + return "fall-through statement" + } + case *ast.CallExpr: + if len(n.Args) == 1 && !n.Ellipsis.IsValid() { + return "function call (or conversion)" + } + return "function call" + case *ast.CaseClause: + return "case clause" + case *ast.ChanType: + return "channel type" + case *ast.CommClause: + return "communication clause" + case *ast.Comment: + return "comment" + case *ast.CommentGroup: + return "comment group" + case *ast.CompositeLit: + return "composite literal" + case *ast.DeclStmt: + return NodeDescription(n.Decl) + " statement" + case *ast.DeferStmt: + return "defer statement" + case *ast.Ellipsis: + return "ellipsis" + case *ast.EmptyStmt: + return "empty statement" + case *ast.ExprStmt: + return "expression statement" + case *ast.Field: + // Can be any of these: + // struct {x, y int} -- struct field(s) + // struct {T} -- anon struct field + // interface {I} -- interface embedding + // interface {f()} -- interface method + // func (A) func(B) C -- receiver, param(s), result(s) + return "field/method/parameter" + case *ast.FieldList: + return "field/method/parameter list" + case *ast.File: + return "source file" + case *ast.ForStmt: + return "for loop" + case *ast.FuncDecl: + return "function declaration" + case *ast.FuncLit: + return "function literal" + case *ast.FuncType: + return "function type" + case *ast.GenDecl: + switch n.Tok { + case token.IMPORT: + return "import declaration" + case token.CONST: + return "constant declaration" + case token.TYPE: + return "type declaration" + case token.VAR: + return "variable declaration" + } + case *ast.GoStmt: + return "go statement" + case *ast.Ident: + return "identifier" + case *ast.IfStmt: + return "if statement" + case *ast.ImportSpec: + return "import specification" + case *ast.IncDecStmt: + if n.Tok == token.INC { + return "increment statement" + } + return "decrement statement" + case *ast.IndexExpr: + return "index expression" + case *ast.InterfaceType: + return "interface type" + case *ast.KeyValueExpr: + return "key/value association" + case *ast.LabeledStmt: + return "statement label" + case *ast.MapType: + return "map type" + case *ast.Package: + return "package" + case *ast.ParenExpr: + return "parenthesized " + NodeDescription(n.X) + case *ast.RangeStmt: + return "range loop" + case *ast.ReturnStmt: + return "return statement" + case *ast.SelectStmt: + return "select statement" + case *ast.SelectorExpr: + return "selector" + case *ast.SendStmt: + return "channel send" + case *ast.SliceExpr: + return "slice expression" + case *ast.StarExpr: + return "*-operation" // load/store expr or pointer type + case *ast.StructType: + return "struct type" + case *ast.SwitchStmt: + return "switch statement" + case *ast.TypeAssertExpr: + return "type assertion" + case *ast.TypeSpec: + return "type specification" + case *ast.TypeSwitchStmt: + return "type switch" + case *ast.UnaryExpr: + return fmt.Sprintf("unary %s operation", n.Op) + case *ast.ValueSpec: + return "value specification" + + } + panic(fmt.Sprintf("unexpected node type: %T", n)) +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go new file mode 100644 index 000000000..3e4b19536 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go @@ -0,0 +1,481 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package astutil contains common utilities for working with the Go AST. +package astutil // import "golang.org/x/tools/go/ast/astutil" + +import ( + "fmt" + "go/ast" + "go/token" + "strconv" + "strings" +) + +// AddImport adds the import path to the file f, if absent. +func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) { + return AddNamedImport(fset, f, "", path) +} + +// AddNamedImport adds the import with the given name and path to the file f, if absent. +// If name is not empty, it is used to rename the import. +// +// For example, calling +// AddNamedImport(fset, f, "pathpkg", "path") +// adds +// import pathpkg "path" +func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) { + if imports(f, name, path) { + return false + } + + newImport := &ast.ImportSpec{ + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: strconv.Quote(path), + }, + } + if name != "" { + newImport.Name = &ast.Ident{Name: name} + } + + // Find an import decl to add to. + // The goal is to find an existing import + // whose import path has the longest shared + // prefix with path. + var ( + bestMatch = -1 // length of longest shared prefix + lastImport = -1 // index in f.Decls of the file's final import decl + impDecl *ast.GenDecl // import decl containing the best match + impIndex = -1 // spec index in impDecl containing the best match + + isThirdPartyPath = isThirdParty(path) + ) + for i, decl := range f.Decls { + gen, ok := decl.(*ast.GenDecl) + if ok && gen.Tok == token.IMPORT { + lastImport = i + // Do not add to import "C", to avoid disrupting the + // association with its doc comment, breaking cgo. + if declImports(gen, "C") { + continue + } + + // Match an empty import decl if that's all that is available. + if len(gen.Specs) == 0 && bestMatch == -1 { + impDecl = gen + } + + // Compute longest shared prefix with imports in this group and find best + // matched import spec. + // 1. Always prefer import spec with longest shared prefix. + // 2. While match length is 0, + // - for stdlib package: prefer first import spec. + // - for third party package: prefer first third party import spec. + // We cannot use last import spec as best match for third party package + // because grouped imports are usually placed last by goimports -local + // flag. + // See issue #19190. + seenAnyThirdParty := false + for j, spec := range gen.Specs { + impspec := spec.(*ast.ImportSpec) + p := importPath(impspec) + n := matchLen(p, path) + if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) { + bestMatch = n + impDecl = gen + impIndex = j + } + seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p) + } + } + } + + // If no import decl found, add one after the last import. + if impDecl == nil { + impDecl = &ast.GenDecl{ + Tok: token.IMPORT, + } + if lastImport >= 0 { + impDecl.TokPos = f.Decls[lastImport].End() + } else { + // There are no existing imports. + // Our new import, preceded by a blank line, goes after the package declaration + // and after the comment, if any, that starts on the same line as the + // package declaration. + impDecl.TokPos = f.Package + + file := fset.File(f.Package) + pkgLine := file.Line(f.Package) + for _, c := range f.Comments { + if file.Line(c.Pos()) > pkgLine { + break + } + // +2 for a blank line + impDecl.TokPos = c.End() + 2 + } + } + f.Decls = append(f.Decls, nil) + copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:]) + f.Decls[lastImport+1] = impDecl + } + + // Insert new import at insertAt. + insertAt := 0 + if impIndex >= 0 { + // insert after the found import + insertAt = impIndex + 1 + } + impDecl.Specs = append(impDecl.Specs, nil) + copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:]) + impDecl.Specs[insertAt] = newImport + pos := impDecl.Pos() + if insertAt > 0 { + // If there is a comment after an existing import, preserve the comment + // position by adding the new import after the comment. + if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil { + pos = spec.Comment.End() + } else { + // Assign same position as the previous import, + // so that the sorter sees it as being in the same block. + pos = impDecl.Specs[insertAt-1].Pos() + } + } + if newImport.Name != nil { + newImport.Name.NamePos = pos + } + newImport.Path.ValuePos = pos + newImport.EndPos = pos + + // Clean up parens. impDecl contains at least one spec. + if len(impDecl.Specs) == 1 { + // Remove unneeded parens. + impDecl.Lparen = token.NoPos + } else if !impDecl.Lparen.IsValid() { + // impDecl needs parens added. + impDecl.Lparen = impDecl.Specs[0].Pos() + } + + f.Imports = append(f.Imports, newImport) + + if len(f.Decls) <= 1 { + return true + } + + // Merge all the import declarations into the first one. + var first *ast.GenDecl + for i := 0; i < len(f.Decls); i++ { + decl := f.Decls[i] + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") { + continue + } + if first == nil { + first = gen + continue // Don't touch the first one. + } + // We now know there is more than one package in this import + // declaration. Ensure that it ends up parenthesized. + first.Lparen = first.Pos() + // Move the imports of the other import declaration to the first one. + for _, spec := range gen.Specs { + spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() + first.Specs = append(first.Specs, spec) + } + f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + i-- + } + + return true +} + +func isThirdParty(importPath string) bool { + // Third party package import path usually contains "." (".com", ".org", ...) + // This logic is taken from golang.org/x/tools/imports package. + return strings.Contains(importPath, ".") +} + +// DeleteImport deletes the import path from the file f, if present. +// If there are duplicate import declarations, all matching ones are deleted. +func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) { + return DeleteNamedImport(fset, f, "", path) +} + +// DeleteNamedImport deletes the import with the given name and path from the file f, if present. +// If there are duplicate import declarations, all matching ones are deleted. +func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) { + var delspecs []*ast.ImportSpec + var delcomments []*ast.CommentGroup + + // Find the import nodes that import path, if any. + for i := 0; i < len(f.Decls); i++ { + decl := f.Decls[i] + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.IMPORT { + continue + } + for j := 0; j < len(gen.Specs); j++ { + spec := gen.Specs[j] + impspec := spec.(*ast.ImportSpec) + if importName(impspec) != name || importPath(impspec) != path { + continue + } + + // We found an import spec that imports path. + // Delete it. + delspecs = append(delspecs, impspec) + deleted = true + copy(gen.Specs[j:], gen.Specs[j+1:]) + gen.Specs = gen.Specs[:len(gen.Specs)-1] + + // If this was the last import spec in this decl, + // delete the decl, too. + if len(gen.Specs) == 0 { + copy(f.Decls[i:], f.Decls[i+1:]) + f.Decls = f.Decls[:len(f.Decls)-1] + i-- + break + } else if len(gen.Specs) == 1 { + if impspec.Doc != nil { + delcomments = append(delcomments, impspec.Doc) + } + if impspec.Comment != nil { + delcomments = append(delcomments, impspec.Comment) + } + for _, cg := range f.Comments { + // Found comment on the same line as the import spec. + if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line { + delcomments = append(delcomments, cg) + break + } + } + + spec := gen.Specs[0].(*ast.ImportSpec) + + // Move the documentation right after the import decl. + if spec.Doc != nil { + for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line { + fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) + } + } + for _, cg := range f.Comments { + if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line { + for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line { + fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) + } + break + } + } + } + if j > 0 { + lastImpspec := gen.Specs[j-1].(*ast.ImportSpec) + lastLine := fset.Position(lastImpspec.Path.ValuePos).Line + line := fset.Position(impspec.Path.ValuePos).Line + + // We deleted an entry but now there may be + // a blank line-sized hole where the import was. + if line-lastLine > 1 { + // There was a blank line immediately preceding the deleted import, + // so there's no need to close the hole. + // Do nothing. + } else if line != fset.File(gen.Rparen).LineCount() { + // There was no blank line. Close the hole. + fset.File(gen.Rparen).MergeLine(line) + } + } + j-- + } + } + + // Delete imports from f.Imports. + for i := 0; i < len(f.Imports); i++ { + imp := f.Imports[i] + for j, del := range delspecs { + if imp == del { + copy(f.Imports[i:], f.Imports[i+1:]) + f.Imports = f.Imports[:len(f.Imports)-1] + copy(delspecs[j:], delspecs[j+1:]) + delspecs = delspecs[:len(delspecs)-1] + i-- + break + } + } + } + + // Delete comments from f.Comments. + for i := 0; i < len(f.Comments); i++ { + cg := f.Comments[i] + for j, del := range delcomments { + if cg == del { + copy(f.Comments[i:], f.Comments[i+1:]) + f.Comments = f.Comments[:len(f.Comments)-1] + copy(delcomments[j:], delcomments[j+1:]) + delcomments = delcomments[:len(delcomments)-1] + i-- + break + } + } + } + + if len(delspecs) > 0 { + panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs)) + } + + return +} + +// RewriteImport rewrites any import of path oldPath to path newPath. +func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) { + for _, imp := range f.Imports { + if importPath(imp) == oldPath { + rewrote = true + // record old End, because the default is to compute + // it using the length of imp.Path.Value. + imp.EndPos = imp.End() + imp.Path.Value = strconv.Quote(newPath) + } + } + return +} + +// UsesImport reports whether a given import is used. +func UsesImport(f *ast.File, path string) (used bool) { + spec := importSpec(f, path) + if spec == nil { + return + } + + name := spec.Name.String() + switch name { + case "": + // If the package name is not explicitly specified, + // make an educated guess. This is not guaranteed to be correct. + lastSlash := strings.LastIndex(path, "/") + if lastSlash == -1 { + name = path + } else { + name = path[lastSlash+1:] + } + case "_", ".": + // Not sure if this import is used - err on the side of caution. + return true + } + + ast.Walk(visitFn(func(n ast.Node) { + sel, ok := n.(*ast.SelectorExpr) + if ok && isTopName(sel.X, name) { + used = true + } + }), f) + + return +} + +type visitFn func(node ast.Node) + +func (fn visitFn) Visit(node ast.Node) ast.Visitor { + fn(node) + return fn +} + +// imports reports whether f has an import with the specified name and path. +func imports(f *ast.File, name, path string) bool { + for _, s := range f.Imports { + if importName(s) == name && importPath(s) == path { + return true + } + } + return false +} + +// importSpec returns the import spec if f imports path, +// or nil otherwise. +func importSpec(f *ast.File, path string) *ast.ImportSpec { + for _, s := range f.Imports { + if importPath(s) == path { + return s + } + } + return nil +} + +// importName returns the name of s, +// or "" if the import is not named. +func importName(s *ast.ImportSpec) string { + if s.Name == nil { + return "" + } + return s.Name.Name +} + +// importPath returns the unquoted import path of s, +// or "" if the path is not properly quoted. +func importPath(s *ast.ImportSpec) string { + t, err := strconv.Unquote(s.Path.Value) + if err != nil { + return "" + } + return t +} + +// declImports reports whether gen contains an import of path. +func declImports(gen *ast.GenDecl, path string) bool { + if gen.Tok != token.IMPORT { + return false + } + for _, spec := range gen.Specs { + impspec := spec.(*ast.ImportSpec) + if importPath(impspec) == path { + return true + } + } + return false +} + +// matchLen returns the length of the longest path segment prefix shared by x and y. +func matchLen(x, y string) int { + n := 0 + for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ { + if x[i] == '/' { + n++ + } + } + return n +} + +// isTopName returns true if n is a top-level unresolved identifier with the given name. +func isTopName(n ast.Expr, name string) bool { + id, ok := n.(*ast.Ident) + return ok && id.Name == name && id.Obj == nil +} + +// Imports returns the file imports grouped by paragraph. +func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec { + var groups [][]*ast.ImportSpec + + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok || genDecl.Tok != token.IMPORT { + break + } + + group := []*ast.ImportSpec{} + + var lastLine int + for _, spec := range genDecl.Specs { + importSpec := spec.(*ast.ImportSpec) + pos := importSpec.Path.ValuePos + line := fset.Position(pos).Line + if lastLine > 0 && pos > 0 && line-lastLine > 1 { + groups = append(groups, group) + group = []*ast.ImportSpec{} + } + group = append(group, importSpec) + lastLine = line + } + groups = append(groups, group) + } + + return groups +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go new file mode 100644 index 000000000..cf72ea990 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go @@ -0,0 +1,477 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +import ( + "fmt" + "go/ast" + "reflect" + "sort" +) + +// An ApplyFunc is invoked by Apply for each node n, even if n is nil, +// before and/or after the node's children, using a Cursor describing +// the current node and providing operations on it. +// +// The return value of ApplyFunc controls the syntax tree traversal. +// See Apply for details. +type ApplyFunc func(*Cursor) bool + +// Apply traverses a syntax tree recursively, starting with root, +// and calling pre and post for each node as described below. +// Apply returns the syntax tree, possibly modified. +// +// If pre is not nil, it is called for each node before the node's +// children are traversed (pre-order). If pre returns false, no +// children are traversed, and post is not called for that node. +// +// If post is not nil, and a prior call of pre didn't return false, +// post is called for each node after its children are traversed +// (post-order). If post returns false, traversal is terminated and +// Apply returns immediately. +// +// Only fields that refer to AST nodes are considered children; +// i.e., token.Pos, Scopes, Objects, and fields of basic types +// (strings, etc.) are ignored. +// +// Children are traversed in the order in which they appear in the +// respective node's struct definition. A package's files are +// traversed in the filenames' alphabetical order. +// +func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) { + parent := &struct{ ast.Node }{root} + defer func() { + if r := recover(); r != nil && r != abort { + panic(r) + } + result = parent.Node + }() + a := &application{pre: pre, post: post} + a.apply(parent, "Node", nil, root) + return +} + +var abort = new(int) // singleton, to signal termination of Apply + +// A Cursor describes a node encountered during Apply. +// Information about the node and its parent is available +// from the Node, Parent, Name, and Index methods. +// +// If p is a variable of type and value of the current parent node +// c.Parent(), and f is the field identifier with name c.Name(), +// the following invariants hold: +// +// p.f == c.Node() if c.Index() < 0 +// p.f[c.Index()] == c.Node() if c.Index() >= 0 +// +// The methods Replace, Delete, InsertBefore, and InsertAfter +// can be used to change the AST without disrupting Apply. +type Cursor struct { + parent ast.Node + name string + iter *iterator // valid if non-nil + node ast.Node +} + +// Node returns the current Node. +func (c *Cursor) Node() ast.Node { return c.node } + +// Parent returns the parent of the current Node. +func (c *Cursor) Parent() ast.Node { return c.parent } + +// Name returns the name of the parent Node field that contains the current Node. +// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns +// the filename for the current Node. +func (c *Cursor) Name() string { return c.name } + +// Index reports the index >= 0 of the current Node in the slice of Nodes that +// contains it, or a value < 0 if the current Node is not part of a slice. +// The index of the current node changes if InsertBefore is called while +// processing the current node. +func (c *Cursor) Index() int { + if c.iter != nil { + return c.iter.index + } + return -1 +} + +// field returns the current node's parent field value. +func (c *Cursor) field() reflect.Value { + return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name) +} + +// Replace replaces the current Node with n. +// The replacement node is not walked by Apply. +func (c *Cursor) Replace(n ast.Node) { + if _, ok := c.node.(*ast.File); ok { + file, ok := n.(*ast.File) + if !ok { + panic("attempt to replace *ast.File with non-*ast.File") + } + c.parent.(*ast.Package).Files[c.name] = file + return + } + + v := c.field() + if i := c.Index(); i >= 0 { + v = v.Index(i) + } + v.Set(reflect.ValueOf(n)) +} + +// Delete deletes the current Node from its containing slice. +// If the current Node is not part of a slice, Delete panics. +// As a special case, if the current node is a package file, +// Delete removes it from the package's Files map. +func (c *Cursor) Delete() { + if _, ok := c.node.(*ast.File); ok { + delete(c.parent.(*ast.Package).Files, c.name) + return + } + + i := c.Index() + if i < 0 { + panic("Delete node not contained in slice") + } + v := c.field() + l := v.Len() + reflect.Copy(v.Slice(i, l), v.Slice(i+1, l)) + v.Index(l - 1).Set(reflect.Zero(v.Type().Elem())) + v.SetLen(l - 1) + c.iter.step-- +} + +// InsertAfter inserts n after the current Node in its containing slice. +// If the current Node is not part of a slice, InsertAfter panics. +// Apply does not walk n. +func (c *Cursor) InsertAfter(n ast.Node) { + i := c.Index() + if i < 0 { + panic("InsertAfter node not contained in slice") + } + v := c.field() + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) + l := v.Len() + reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l)) + v.Index(i + 1).Set(reflect.ValueOf(n)) + c.iter.step++ +} + +// InsertBefore inserts n before the current Node in its containing slice. +// If the current Node is not part of a slice, InsertBefore panics. +// Apply will not walk n. +func (c *Cursor) InsertBefore(n ast.Node) { + i := c.Index() + if i < 0 { + panic("InsertBefore node not contained in slice") + } + v := c.field() + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) + l := v.Len() + reflect.Copy(v.Slice(i+1, l), v.Slice(i, l)) + v.Index(i).Set(reflect.ValueOf(n)) + c.iter.index++ +} + +// application carries all the shared data so we can pass it around cheaply. +type application struct { + pre, post ApplyFunc + cursor Cursor + iter iterator +} + +func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) { + // convert typed nil into untyped nil + if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() { + n = nil + } + + // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead + saved := a.cursor + a.cursor.parent = parent + a.cursor.name = name + a.cursor.iter = iter + a.cursor.node = n + + if a.pre != nil && !a.pre(&a.cursor) { + a.cursor = saved + return + } + + // walk children + // (the order of the cases matches the order of the corresponding node types in go/ast) + switch n := n.(type) { + case nil: + // nothing to do + + // Comments and fields + case *ast.Comment: + // nothing to do + + case *ast.CommentGroup: + if n != nil { + a.applyList(n, "List") + } + + case *ast.Field: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Names") + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Tag", nil, n.Tag) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.FieldList: + a.applyList(n, "List") + + // Expressions + case *ast.BadExpr, *ast.Ident, *ast.BasicLit: + // nothing to do + + case *ast.Ellipsis: + a.apply(n, "Elt", nil, n.Elt) + + case *ast.FuncLit: + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Body", nil, n.Body) + + case *ast.CompositeLit: + a.apply(n, "Type", nil, n.Type) + a.applyList(n, "Elts") + + case *ast.ParenExpr: + a.apply(n, "X", nil, n.X) + + case *ast.SelectorExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Sel", nil, n.Sel) + + case *ast.IndexExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Index", nil, n.Index) + + case *ast.SliceExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Low", nil, n.Low) + a.apply(n, "High", nil, n.High) + a.apply(n, "Max", nil, n.Max) + + case *ast.TypeAssertExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Type", nil, n.Type) + + case *ast.CallExpr: + a.apply(n, "Fun", nil, n.Fun) + a.applyList(n, "Args") + + case *ast.StarExpr: + a.apply(n, "X", nil, n.X) + + case *ast.UnaryExpr: + a.apply(n, "X", nil, n.X) + + case *ast.BinaryExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Y", nil, n.Y) + + case *ast.KeyValueExpr: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + + // Types + case *ast.ArrayType: + a.apply(n, "Len", nil, n.Len) + a.apply(n, "Elt", nil, n.Elt) + + case *ast.StructType: + a.apply(n, "Fields", nil, n.Fields) + + case *ast.FuncType: + a.apply(n, "Params", nil, n.Params) + a.apply(n, "Results", nil, n.Results) + + case *ast.InterfaceType: + a.apply(n, "Methods", nil, n.Methods) + + case *ast.MapType: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + + case *ast.ChanType: + a.apply(n, "Value", nil, n.Value) + + // Statements + case *ast.BadStmt: + // nothing to do + + case *ast.DeclStmt: + a.apply(n, "Decl", nil, n.Decl) + + case *ast.EmptyStmt: + // nothing to do + + case *ast.LabeledStmt: + a.apply(n, "Label", nil, n.Label) + a.apply(n, "Stmt", nil, n.Stmt) + + case *ast.ExprStmt: + a.apply(n, "X", nil, n.X) + + case *ast.SendStmt: + a.apply(n, "Chan", nil, n.Chan) + a.apply(n, "Value", nil, n.Value) + + case *ast.IncDecStmt: + a.apply(n, "X", nil, n.X) + + case *ast.AssignStmt: + a.applyList(n, "Lhs") + a.applyList(n, "Rhs") + + case *ast.GoStmt: + a.apply(n, "Call", nil, n.Call) + + case *ast.DeferStmt: + a.apply(n, "Call", nil, n.Call) + + case *ast.ReturnStmt: + a.applyList(n, "Results") + + case *ast.BranchStmt: + a.apply(n, "Label", nil, n.Label) + + case *ast.BlockStmt: + a.applyList(n, "List") + + case *ast.IfStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Cond", nil, n.Cond) + a.apply(n, "Body", nil, n.Body) + a.apply(n, "Else", nil, n.Else) + + case *ast.CaseClause: + a.applyList(n, "List") + a.applyList(n, "Body") + + case *ast.SwitchStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Tag", nil, n.Tag) + a.apply(n, "Body", nil, n.Body) + + case *ast.TypeSwitchStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Assign", nil, n.Assign) + a.apply(n, "Body", nil, n.Body) + + case *ast.CommClause: + a.apply(n, "Comm", nil, n.Comm) + a.applyList(n, "Body") + + case *ast.SelectStmt: + a.apply(n, "Body", nil, n.Body) + + case *ast.ForStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Cond", nil, n.Cond) + a.apply(n, "Post", nil, n.Post) + a.apply(n, "Body", nil, n.Body) + + case *ast.RangeStmt: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + a.apply(n, "X", nil, n.X) + a.apply(n, "Body", nil, n.Body) + + // Declarations + case *ast.ImportSpec: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Path", nil, n.Path) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.ValueSpec: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Names") + a.apply(n, "Type", nil, n.Type) + a.applyList(n, "Values") + a.apply(n, "Comment", nil, n.Comment) + + case *ast.TypeSpec: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.BadDecl: + // nothing to do + + case *ast.GenDecl: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Specs") + + case *ast.FuncDecl: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Recv", nil, n.Recv) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Body", nil, n.Body) + + // Files and packages + case *ast.File: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.applyList(n, "Decls") + // Don't walk n.Comments; they have either been walked already if + // they are Doc comments, or they can be easily walked explicitly. + + case *ast.Package: + // collect and sort names for reproducible behavior + var names []string + for name := range n.Files { + names = append(names, name) + } + sort.Strings(names) + for _, name := range names { + a.apply(n, name, nil, n.Files[name]) + } + + default: + panic(fmt.Sprintf("Apply: unexpected node type %T", n)) + } + + if a.post != nil && !a.post(&a.cursor) { + panic(abort) + } + + a.cursor = saved +} + +// An iterator controls iteration over a slice of nodes. +type iterator struct { + index, step int +} + +func (a *application) applyList(parent ast.Node, name string) { + // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead + saved := a.iter + a.iter.index = 0 + for { + // must reload parent.name each time, since cursor modifications might change it + v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name) + if a.iter.index >= v.Len() { + break + } + + // element x may be nil in a bad AST - be cautious + var x ast.Node + if e := v.Index(a.iter.index); e.IsValid() { + x = e.Interface().(ast.Node) + } + + a.iter.step = 1 + a.apply(parent, name, &a.iter, x) + a.iter.index += a.iter.step + } + a.iter = saved +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go new file mode 100644 index 000000000..763062982 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/util.go @@ -0,0 +1,14 @@ +package astutil + +import "go/ast" + +// Unparen returns e with any enclosing parentheses stripped. +func Unparen(e ast.Expr) ast.Expr { + for { + p, ok := e.(*ast.ParenExpr) + if !ok { + return e + } + e = p.X + } +} diff --git a/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go new file mode 100644 index 000000000..98b3987b9 --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go @@ -0,0 +1,109 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gcexportdata provides functions for locating, reading, and +// writing export data files containing type information produced by the +// gc compiler. This package supports go1.7 export data format and all +// later versions. +// +// Although it might seem convenient for this package to live alongside +// go/types in the standard library, this would cause version skew +// problems for developer tools that use it, since they must be able to +// consume the outputs of the gc compiler both before and after a Go +// update such as from Go 1.7 to Go 1.8. Because this package lives in +// golang.org/x/tools, sites can update their version of this repo some +// time before the Go 1.8 release and rebuild and redeploy their +// developer tools, which will then be able to consume both Go 1.7 and +// Go 1.8 export data files, so they will work before and after the +// Go update. (See discussion at https://golang.org/issue/15651.) +// +package gcexportdata // import "golang.org/x/tools/go/gcexportdata" + +import ( + "bufio" + "bytes" + "fmt" + "go/token" + "go/types" + "io" + "io/ioutil" + + "golang.org/x/tools/go/internal/gcimporter" +) + +// Find returns the name of an object (.o) or archive (.a) file +// containing type information for the specified import path, +// using the workspace layout conventions of go/build. +// If no file was found, an empty filename is returned. +// +// A relative srcDir is interpreted relative to the current working directory. +// +// Find also returns the package's resolved (canonical) import path, +// reflecting the effects of srcDir and vendoring on importPath. +func Find(importPath, srcDir string) (filename, path string) { + return gcimporter.FindPkg(importPath, srcDir) +} + +// NewReader returns a reader for the export data section of an object +// (.o) or archive (.a) file read from r. The new reader may provide +// additional trailing data beyond the end of the export data. +func NewReader(r io.Reader) (io.Reader, error) { + buf := bufio.NewReader(r) + _, err := gcimporter.FindExportData(buf) + // If we ever switch to a zip-like archive format with the ToC + // at the end, we can return the correct portion of export data, + // but for now we must return the entire rest of the file. + return buf, err +} + +// Read reads export data from in, decodes it, and returns type +// information for the package. +// The package name is specified by path. +// File position information is added to fset. +// +// Read may inspect and add to the imports map to ensure that references +// within the export data to other packages are consistent. The caller +// must ensure that imports[path] does not exist, or exists but is +// incomplete (see types.Package.Complete), and Read inserts the +// resulting package into this map entry. +// +// On return, the state of the reader is undefined. +func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package, path string) (*types.Package, error) { + data, err := ioutil.ReadAll(in) + if err != nil { + return nil, fmt.Errorf("reading export data for %q: %v", path, err) + } + + if bytes.HasPrefix(data, []byte("!")) { + return nil, fmt.Errorf("can't read export data for %q directly from an archive file (call gcexportdata.NewReader first to extract export data)", path) + } + + // The App Engine Go runtime v1.6 uses the old export data format. + // TODO(adonovan): delete once v1.7 has been around for a while. + if bytes.HasPrefix(data, []byte("package ")) { + return gcimporter.ImportData(imports, path, path, bytes.NewReader(data)) + } + + // The indexed export format starts with an 'i'; the older + // binary export format starts with a 'c', 'd', or 'v' + // (from "version"). Select appropriate importer. + if len(data) > 0 && data[0] == 'i' { + _, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path) + return pkg, err + } + + _, pkg, err := gcimporter.BImportData(fset, imports, data, path) + return pkg, err +} + +// Write writes encoded type information for the specified package to out. +// The FileSet provides file position information for named objects. +func Write(out io.Writer, fset *token.FileSet, pkg *types.Package) error { + b, err := gcimporter.BExportData(fset, pkg) + if err != nil { + return err + } + _, err = out.Write(b) + return err +} diff --git a/vendor/golang.org/x/tools/go/gcexportdata/importer.go b/vendor/golang.org/x/tools/go/gcexportdata/importer.go new file mode 100644 index 000000000..efe221e7e --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcexportdata/importer.go @@ -0,0 +1,73 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package gcexportdata + +import ( + "fmt" + "go/token" + "go/types" + "os" +) + +// NewImporter returns a new instance of the types.Importer interface +// that reads type information from export data files written by gc. +// The Importer also satisfies types.ImporterFrom. +// +// Export data files are located using "go build" workspace conventions +// and the build.Default context. +// +// Use this importer instead of go/importer.For("gc", ...) to avoid the +// version-skew problems described in the documentation of this package, +// or to control the FileSet or access the imports map populated during +// package loading. +// +func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom { + return importer{fset, imports} +} + +type importer struct { + fset *token.FileSet + imports map[string]*types.Package +} + +func (imp importer) Import(importPath string) (*types.Package, error) { + return imp.ImportFrom(importPath, "", 0) +} + +func (imp importer) ImportFrom(importPath, srcDir string, mode types.ImportMode) (_ *types.Package, err error) { + filename, path := Find(importPath, srcDir) + if filename == "" { + if importPath == "unsafe" { + // Even for unsafe, call Find first in case + // the package was vendored. + return types.Unsafe, nil + } + return nil, fmt.Errorf("can't find import: %s", importPath) + } + + if pkg, ok := imp.imports[path]; ok && pkg.Complete() { + return pkg, nil // cache hit + } + + // open file + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + f.Close() + if err != nil { + // add file name to error + err = fmt.Errorf("reading export data: %s: %v", filename, err) + } + }() + + r, err := NewReader(f) + if err != nil { + return nil, err + } + + return Read(r, imp.fset, imp.imports, path) +} diff --git a/vendor/golang.org/x/tools/go/gcexportdata/main.go b/vendor/golang.org/x/tools/go/gcexportdata/main.go new file mode 100644 index 000000000..2713dce64 --- /dev/null +++ b/vendor/golang.org/x/tools/go/gcexportdata/main.go @@ -0,0 +1,99 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build ignore + +// The gcexportdata command is a diagnostic tool that displays the +// contents of gc export data files. +package main + +import ( + "flag" + "fmt" + "go/token" + "go/types" + "log" + "os" + + "golang.org/x/tools/go/gcexportdata" + "golang.org/x/tools/go/types/typeutil" +) + +var packageFlag = flag.String("package", "", "alternative package to print") + +func main() { + log.SetPrefix("gcexportdata: ") + log.SetFlags(0) + flag.Usage = func() { + fmt.Fprintln(os.Stderr, "usage: gcexportdata [-package path] file.a") + } + flag.Parse() + if flag.NArg() != 1 { + flag.Usage() + os.Exit(2) + } + filename := flag.Args()[0] + + f, err := os.Open(filename) + if err != nil { + log.Fatal(err) + } + + r, err := gcexportdata.NewReader(f) + if err != nil { + log.Fatalf("%s: %s", filename, err) + } + + // Decode the package. + const primary = "" + imports := make(map[string]*types.Package) + fset := token.NewFileSet() + pkg, err := gcexportdata.Read(r, fset, imports, primary) + if err != nil { + log.Fatalf("%s: %s", filename, err) + } + + // Optionally select an indirectly mentioned package. + if *packageFlag != "" { + pkg = imports[*packageFlag] + if pkg == nil { + fmt.Fprintf(os.Stderr, "export data file %s does not mention %s; has:\n", + filename, *packageFlag) + for p := range imports { + if p != primary { + fmt.Fprintf(os.Stderr, "\t%s\n", p) + } + } + os.Exit(1) + } + } + + // Print all package-level declarations, including non-exported ones. + fmt.Printf("package %s\n", pkg.Name()) + for _, imp := range pkg.Imports() { + fmt.Printf("import %q\n", imp.Path()) + } + qual := func(p *types.Package) string { + if pkg == p { + return "" + } + return p.Name() + } + scope := pkg.Scope() + for _, name := range scope.Names() { + obj := scope.Lookup(name) + fmt.Printf("%s: %s\n", + fset.Position(obj.Pos()), + types.ObjectString(obj, qual)) + + // For types, print each method. + if _, ok := obj.(*types.TypeName); ok { + for _, method := range typeutil.IntuitiveMethodSet(obj.Type(), nil) { + fmt.Printf("%s: %s\n", + fset.Position(method.Obj().Pos()), + types.SelectionString(method, qual)) + } + } + } +} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go new file mode 100644 index 000000000..a807d0aaa --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go @@ -0,0 +1,852 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Binary package export. +// This file was derived from $GOROOT/src/cmd/compile/internal/gc/bexport.go; +// see that file for specification of the format. + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "fmt" + "go/ast" + "go/constant" + "go/token" + "go/types" + "math" + "math/big" + "sort" + "strings" +) + +// If debugFormat is set, each integer and string value is preceded by a marker +// and position information in the encoding. This mechanism permits an importer +// to recognize immediately when it is out of sync. The importer recognizes this +// mode automatically (i.e., it can import export data produced with debugging +// support even if debugFormat is not set at the time of import). This mode will +// lead to massively larger export data (by a factor of 2 to 3) and should only +// be enabled during development and debugging. +// +// NOTE: This flag is the first flag to enable if importing dies because of +// (suspected) format errors, and whenever a change is made to the format. +const debugFormat = false // default: false + +// If trace is set, debugging output is printed to std out. +const trace = false // default: false + +// Current export format version. Increase with each format change. +// Note: The latest binary (non-indexed) export format is at version 6. +// This exporter is still at level 4, but it doesn't matter since +// the binary importer can handle older versions just fine. +// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE +// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE +// 4: type name objects support type aliases, uses aliasTag +// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used) +// 2: removed unused bool in ODCL export (compiler only) +// 1: header format change (more regular), export package for _ struct fields +// 0: Go1.7 encoding +const exportVersion = 4 + +// trackAllTypes enables cycle tracking for all types, not just named +// types. The existing compiler invariants assume that unnamed types +// that are not completely set up are not used, or else there are spurious +// errors. +// If disabled, only named types are tracked, possibly leading to slightly +// less efficient encoding in rare cases. It also prevents the export of +// some corner-case type declarations (but those are not handled correctly +// with with the textual export format either). +// TODO(gri) enable and remove once issues caused by it are fixed +const trackAllTypes = false + +type exporter struct { + fset *token.FileSet + out bytes.Buffer + + // object -> index maps, indexed in order of serialization + strIndex map[string]int + pkgIndex map[*types.Package]int + typIndex map[types.Type]int + + // position encoding + posInfoFormat bool + prevFile string + prevLine int + + // debugging support + written int // bytes written + indent int // for trace +} + +// internalError represents an error generated inside this package. +type internalError string + +func (e internalError) Error() string { return "gcimporter: " + string(e) } + +func internalErrorf(format string, args ...interface{}) error { + return internalError(fmt.Sprintf(format, args...)) +} + +// BExportData returns binary export data for pkg. +// If no file set is provided, position info will be missing. +func BExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) { + defer func() { + if e := recover(); e != nil { + if ierr, ok := e.(internalError); ok { + err = ierr + return + } + // Not an internal error; panic again. + panic(e) + } + }() + + p := exporter{ + fset: fset, + strIndex: map[string]int{"": 0}, // empty string is mapped to 0 + pkgIndex: make(map[*types.Package]int), + typIndex: make(map[types.Type]int), + posInfoFormat: true, // TODO(gri) might become a flag, eventually + } + + // write version info + // The version string must start with "version %d" where %d is the version + // number. Additional debugging information may follow after a blank; that + // text is ignored by the importer. + p.rawStringln(fmt.Sprintf("version %d", exportVersion)) + var debug string + if debugFormat { + debug = "debug" + } + p.rawStringln(debug) // cannot use p.bool since it's affected by debugFormat; also want to see this clearly + p.bool(trackAllTypes) + p.bool(p.posInfoFormat) + + // --- generic export data --- + + // populate type map with predeclared "known" types + for index, typ := range predeclared() { + p.typIndex[typ] = index + } + if len(p.typIndex) != len(predeclared()) { + return nil, internalError("duplicate entries in type map?") + } + + // write package data + p.pkg(pkg, true) + if trace { + p.tracef("\n") + } + + // write objects + objcount := 0 + scope := pkg.Scope() + for _, name := range scope.Names() { + if !ast.IsExported(name) { + continue + } + if trace { + p.tracef("\n") + } + p.obj(scope.Lookup(name)) + objcount++ + } + + // indicate end of list + if trace { + p.tracef("\n") + } + p.tag(endTag) + + // for self-verification only (redundant) + p.int(objcount) + + if trace { + p.tracef("\n") + } + + // --- end of export data --- + + return p.out.Bytes(), nil +} + +func (p *exporter) pkg(pkg *types.Package, emptypath bool) { + if pkg == nil { + panic(internalError("unexpected nil pkg")) + } + + // if we saw the package before, write its index (>= 0) + if i, ok := p.pkgIndex[pkg]; ok { + p.index('P', i) + return + } + + // otherwise, remember the package, write the package tag (< 0) and package data + if trace { + p.tracef("P%d = { ", len(p.pkgIndex)) + defer p.tracef("} ") + } + p.pkgIndex[pkg] = len(p.pkgIndex) + + p.tag(packageTag) + p.string(pkg.Name()) + if emptypath { + p.string("") + } else { + p.string(pkg.Path()) + } +} + +func (p *exporter) obj(obj types.Object) { + switch obj := obj.(type) { + case *types.Const: + p.tag(constTag) + p.pos(obj) + p.qualifiedName(obj) + p.typ(obj.Type()) + p.value(obj.Val()) + + case *types.TypeName: + if obj.IsAlias() { + p.tag(aliasTag) + p.pos(obj) + p.qualifiedName(obj) + } else { + p.tag(typeTag) + } + p.typ(obj.Type()) + + case *types.Var: + p.tag(varTag) + p.pos(obj) + p.qualifiedName(obj) + p.typ(obj.Type()) + + case *types.Func: + p.tag(funcTag) + p.pos(obj) + p.qualifiedName(obj) + sig := obj.Type().(*types.Signature) + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) + + default: + panic(internalErrorf("unexpected object %v (%T)", obj, obj)) + } +} + +func (p *exporter) pos(obj types.Object) { + if !p.posInfoFormat { + return + } + + file, line := p.fileLine(obj) + if file == p.prevFile { + // common case: write line delta + // delta == 0 means different file or no line change + delta := line - p.prevLine + p.int(delta) + if delta == 0 { + p.int(-1) // -1 means no file change + } + } else { + // different file + p.int(0) + // Encode filename as length of common prefix with previous + // filename, followed by (possibly empty) suffix. Filenames + // frequently share path prefixes, so this can save a lot + // of space and make export data size less dependent on file + // path length. The suffix is unlikely to be empty because + // file names tend to end in ".go". + n := commonPrefixLen(p.prevFile, file) + p.int(n) // n >= 0 + p.string(file[n:]) // write suffix only + p.prevFile = file + p.int(line) + } + p.prevLine = line +} + +func (p *exporter) fileLine(obj types.Object) (file string, line int) { + if p.fset != nil { + pos := p.fset.Position(obj.Pos()) + file = pos.Filename + line = pos.Line + } + return +} + +func commonPrefixLen(a, b string) int { + if len(a) > len(b) { + a, b = b, a + } + // len(a) <= len(b) + i := 0 + for i < len(a) && a[i] == b[i] { + i++ + } + return i +} + +func (p *exporter) qualifiedName(obj types.Object) { + p.string(obj.Name()) + p.pkg(obj.Pkg(), false) +} + +func (p *exporter) typ(t types.Type) { + if t == nil { + panic(internalError("nil type")) + } + + // Possible optimization: Anonymous pointer types *T where + // T is a named type are common. We could canonicalize all + // such types *T to a single type PT = *T. This would lead + // to at most one *T entry in typIndex, and all future *T's + // would be encoded as the respective index directly. Would + // save 1 byte (pointerTag) per *T and reduce the typIndex + // size (at the cost of a canonicalization map). We can do + // this later, without encoding format change. + + // if we saw the type before, write its index (>= 0) + if i, ok := p.typIndex[t]; ok { + p.index('T', i) + return + } + + // otherwise, remember the type, write the type tag (< 0) and type data + if trackAllTypes { + if trace { + p.tracef("T%d = {>\n", len(p.typIndex)) + defer p.tracef("<\n} ") + } + p.typIndex[t] = len(p.typIndex) + } + + switch t := t.(type) { + case *types.Named: + if !trackAllTypes { + // if we don't track all types, track named types now + p.typIndex[t] = len(p.typIndex) + } + + p.tag(namedTag) + p.pos(t.Obj()) + p.qualifiedName(t.Obj()) + p.typ(t.Underlying()) + if !types.IsInterface(t) { + p.assocMethods(t) + } + + case *types.Array: + p.tag(arrayTag) + p.int64(t.Len()) + p.typ(t.Elem()) + + case *types.Slice: + p.tag(sliceTag) + p.typ(t.Elem()) + + case *dddSlice: + p.tag(dddTag) + p.typ(t.elem) + + case *types.Struct: + p.tag(structTag) + p.fieldList(t) + + case *types.Pointer: + p.tag(pointerTag) + p.typ(t.Elem()) + + case *types.Signature: + p.tag(signatureTag) + p.paramList(t.Params(), t.Variadic()) + p.paramList(t.Results(), false) + + case *types.Interface: + p.tag(interfaceTag) + p.iface(t) + + case *types.Map: + p.tag(mapTag) + p.typ(t.Key()) + p.typ(t.Elem()) + + case *types.Chan: + p.tag(chanTag) + p.int(int(3 - t.Dir())) // hack + p.typ(t.Elem()) + + default: + panic(internalErrorf("unexpected type %T: %s", t, t)) + } +} + +func (p *exporter) assocMethods(named *types.Named) { + // Sort methods (for determinism). + var methods []*types.Func + for i := 0; i < named.NumMethods(); i++ { + methods = append(methods, named.Method(i)) + } + sort.Sort(methodsByName(methods)) + + p.int(len(methods)) + + if trace && methods != nil { + p.tracef("associated methods {>\n") + } + + for i, m := range methods { + if trace && i > 0 { + p.tracef("\n") + } + + p.pos(m) + name := m.Name() + p.string(name) + if !exported(name) { + p.pkg(m.Pkg(), false) + } + + sig := m.Type().(*types.Signature) + p.paramList(types.NewTuple(sig.Recv()), false) + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) + p.int(0) // dummy value for go:nointerface pragma - ignored by importer + } + + if trace && methods != nil { + p.tracef("<\n} ") + } +} + +type methodsByName []*types.Func + +func (x methodsByName) Len() int { return len(x) } +func (x methodsByName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } +func (x methodsByName) Less(i, j int) bool { return x[i].Name() < x[j].Name() } + +func (p *exporter) fieldList(t *types.Struct) { + if trace && t.NumFields() > 0 { + p.tracef("fields {>\n") + defer p.tracef("<\n} ") + } + + p.int(t.NumFields()) + for i := 0; i < t.NumFields(); i++ { + if trace && i > 0 { + p.tracef("\n") + } + p.field(t.Field(i)) + p.string(t.Tag(i)) + } +} + +func (p *exporter) field(f *types.Var) { + if !f.IsField() { + panic(internalError("field expected")) + } + + p.pos(f) + p.fieldName(f) + p.typ(f.Type()) +} + +func (p *exporter) iface(t *types.Interface) { + // TODO(gri): enable importer to load embedded interfaces, + // then emit Embeddeds and ExplicitMethods separately here. + p.int(0) + + n := t.NumMethods() + if trace && n > 0 { + p.tracef("methods {>\n") + defer p.tracef("<\n} ") + } + p.int(n) + for i := 0; i < n; i++ { + if trace && i > 0 { + p.tracef("\n") + } + p.method(t.Method(i)) + } +} + +func (p *exporter) method(m *types.Func) { + sig := m.Type().(*types.Signature) + if sig.Recv() == nil { + panic(internalError("method expected")) + } + + p.pos(m) + p.string(m.Name()) + if m.Name() != "_" && !ast.IsExported(m.Name()) { + p.pkg(m.Pkg(), false) + } + + // interface method; no need to encode receiver. + p.paramList(sig.Params(), sig.Variadic()) + p.paramList(sig.Results(), false) +} + +func (p *exporter) fieldName(f *types.Var) { + name := f.Name() + + if f.Anonymous() { + // anonymous field - we distinguish between 3 cases: + // 1) field name matches base type name and is exported + // 2) field name matches base type name and is not exported + // 3) field name doesn't match base type name (alias name) + bname := basetypeName(f.Type()) + if name == bname { + if ast.IsExported(name) { + name = "" // 1) we don't need to know the field name or package + } else { + name = "?" // 2) use unexported name "?" to force package export + } + } else { + // 3) indicate alias and export name as is + // (this requires an extra "@" but this is a rare case) + p.string("@") + } + } + + p.string(name) + if name != "" && !ast.IsExported(name) { + p.pkg(f.Pkg(), false) + } +} + +func basetypeName(typ types.Type) string { + switch typ := deref(typ).(type) { + case *types.Basic: + return typ.Name() + case *types.Named: + return typ.Obj().Name() + default: + return "" // unnamed type + } +} + +func (p *exporter) paramList(params *types.Tuple, variadic bool) { + // use negative length to indicate unnamed parameters + // (look at the first parameter only since either all + // names are present or all are absent) + n := params.Len() + if n > 0 && params.At(0).Name() == "" { + n = -n + } + p.int(n) + for i := 0; i < params.Len(); i++ { + q := params.At(i) + t := q.Type() + if variadic && i == params.Len()-1 { + t = &dddSlice{t.(*types.Slice).Elem()} + } + p.typ(t) + if n > 0 { + name := q.Name() + p.string(name) + if name != "_" { + p.pkg(q.Pkg(), false) + } + } + p.string("") // no compiler-specific info + } +} + +func (p *exporter) value(x constant.Value) { + if trace { + p.tracef("= ") + } + + switch x.Kind() { + case constant.Bool: + tag := falseTag + if constant.BoolVal(x) { + tag = trueTag + } + p.tag(tag) + + case constant.Int: + if v, exact := constant.Int64Val(x); exact { + // common case: x fits into an int64 - use compact encoding + p.tag(int64Tag) + p.int64(v) + return + } + // uncommon case: large x - use float encoding + // (powers of 2 will be encoded efficiently with exponent) + p.tag(floatTag) + p.float(constant.ToFloat(x)) + + case constant.Float: + p.tag(floatTag) + p.float(x) + + case constant.Complex: + p.tag(complexTag) + p.float(constant.Real(x)) + p.float(constant.Imag(x)) + + case constant.String: + p.tag(stringTag) + p.string(constant.StringVal(x)) + + case constant.Unknown: + // package contains type errors + p.tag(unknownTag) + + default: + panic(internalErrorf("unexpected value %v (%T)", x, x)) + } +} + +func (p *exporter) float(x constant.Value) { + if x.Kind() != constant.Float { + panic(internalErrorf("unexpected constant %v, want float", x)) + } + // extract sign (there is no -0) + sign := constant.Sign(x) + if sign == 0 { + // x == 0 + p.int(0) + return + } + // x != 0 + + var f big.Float + if v, exact := constant.Float64Val(x); exact { + // float64 + f.SetFloat64(v) + } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { + // TODO(gri): add big.Rat accessor to constant.Value. + r := valueToRat(num) + f.SetRat(r.Quo(r, valueToRat(denom))) + } else { + // Value too large to represent as a fraction => inaccessible. + // TODO(gri): add big.Float accessor to constant.Value. + f.SetFloat64(math.MaxFloat64) // FIXME + } + + // extract exponent such that 0.5 <= m < 1.0 + var m big.Float + exp := f.MantExp(&m) + + // extract mantissa as *big.Int + // - set exponent large enough so mant satisfies mant.IsInt() + // - get *big.Int from mant + m.SetMantExp(&m, int(m.MinPrec())) + mant, acc := m.Int(nil) + if acc != big.Exact { + panic(internalError("internal error")) + } + + p.int(sign) + p.int(exp) + p.string(string(mant.Bytes())) +} + +func valueToRat(x constant.Value) *big.Rat { + // Convert little-endian to big-endian. + // I can't believe this is necessary. + bytes := constant.Bytes(x) + for i := 0; i < len(bytes)/2; i++ { + bytes[i], bytes[len(bytes)-1-i] = bytes[len(bytes)-1-i], bytes[i] + } + return new(big.Rat).SetInt(new(big.Int).SetBytes(bytes)) +} + +func (p *exporter) bool(b bool) bool { + if trace { + p.tracef("[") + defer p.tracef("= %v] ", b) + } + + x := 0 + if b { + x = 1 + } + p.int(x) + return b +} + +// ---------------------------------------------------------------------------- +// Low-level encoders + +func (p *exporter) index(marker byte, index int) { + if index < 0 { + panic(internalError("invalid index < 0")) + } + if debugFormat { + p.marker('t') + } + if trace { + p.tracef("%c%d ", marker, index) + } + p.rawInt64(int64(index)) +} + +func (p *exporter) tag(tag int) { + if tag >= 0 { + panic(internalError("invalid tag >= 0")) + } + if debugFormat { + p.marker('t') + } + if trace { + p.tracef("%s ", tagString[-tag]) + } + p.rawInt64(int64(tag)) +} + +func (p *exporter) int(x int) { + p.int64(int64(x)) +} + +func (p *exporter) int64(x int64) { + if debugFormat { + p.marker('i') + } + if trace { + p.tracef("%d ", x) + } + p.rawInt64(x) +} + +func (p *exporter) string(s string) { + if debugFormat { + p.marker('s') + } + if trace { + p.tracef("%q ", s) + } + // if we saw the string before, write its index (>= 0) + // (the empty string is mapped to 0) + if i, ok := p.strIndex[s]; ok { + p.rawInt64(int64(i)) + return + } + // otherwise, remember string and write its negative length and bytes + p.strIndex[s] = len(p.strIndex) + p.rawInt64(-int64(len(s))) + for i := 0; i < len(s); i++ { + p.rawByte(s[i]) + } +} + +// marker emits a marker byte and position information which makes +// it easy for a reader to detect if it is "out of sync". Used for +// debugFormat format only. +func (p *exporter) marker(m byte) { + p.rawByte(m) + // Enable this for help tracking down the location + // of an incorrect marker when running in debugFormat. + if false && trace { + p.tracef("#%d ", p.written) + } + p.rawInt64(int64(p.written)) +} + +// rawInt64 should only be used by low-level encoders. +func (p *exporter) rawInt64(x int64) { + var tmp [binary.MaxVarintLen64]byte + n := binary.PutVarint(tmp[:], x) + for i := 0; i < n; i++ { + p.rawByte(tmp[i]) + } +} + +// rawStringln should only be used to emit the initial version string. +func (p *exporter) rawStringln(s string) { + for i := 0; i < len(s); i++ { + p.rawByte(s[i]) + } + p.rawByte('\n') +} + +// rawByte is the bottleneck interface to write to p.out. +// rawByte escapes b as follows (any encoding does that +// hides '$'): +// +// '$' => '|' 'S' +// '|' => '|' '|' +// +// Necessary so other tools can find the end of the +// export data by searching for "$$". +// rawByte should only be used by low-level encoders. +func (p *exporter) rawByte(b byte) { + switch b { + case '$': + // write '$' as '|' 'S' + b = 'S' + fallthrough + case '|': + // write '|' as '|' '|' + p.out.WriteByte('|') + p.written++ + } + p.out.WriteByte(b) + p.written++ +} + +// tracef is like fmt.Printf but it rewrites the format string +// to take care of indentation. +func (p *exporter) tracef(format string, args ...interface{}) { + if strings.ContainsAny(format, "<>\n") { + var buf bytes.Buffer + for i := 0; i < len(format); i++ { + // no need to deal with runes + ch := format[i] + switch ch { + case '>': + p.indent++ + continue + case '<': + p.indent-- + continue + } + buf.WriteByte(ch) + if ch == '\n' { + for j := p.indent; j > 0; j-- { + buf.WriteString(". ") + } + } + } + format = buf.String() + } + fmt.Printf(format, args...) +} + +// Debugging support. +// (tagString is only used when tracing is enabled) +var tagString = [...]string{ + // Packages + -packageTag: "package", + + // Types + -namedTag: "named type", + -arrayTag: "array", + -sliceTag: "slice", + -dddTag: "ddd", + -structTag: "struct", + -pointerTag: "pointer", + -signatureTag: "signature", + -interfaceTag: "interface", + -mapTag: "map", + -chanTag: "chan", + + // Values + -falseTag: "false", + -trueTag: "true", + -int64Tag: "int64", + -floatTag: "float", + -fractionTag: "fraction", + -complexTag: "complex", + -stringTag: "string", + -unknownTag: "unknown", + + // Type aliases + -aliasTag: "alias", +} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go new file mode 100644 index 000000000..e3c310782 --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/bimport.go @@ -0,0 +1,1036 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/bimport.go. + +package gcimporter + +import ( + "encoding/binary" + "fmt" + "go/constant" + "go/token" + "go/types" + "sort" + "strconv" + "strings" + "sync" + "unicode" + "unicode/utf8" +) + +type importer struct { + imports map[string]*types.Package + data []byte + importpath string + buf []byte // for reading strings + version int // export format version + + // object lists + strList []string // in order of appearance + pathList []string // in order of appearance + pkgList []*types.Package // in order of appearance + typList []types.Type // in order of appearance + interfaceList []*types.Interface // for delayed completion only + trackAllTypes bool + + // position encoding + posInfoFormat bool + prevFile string + prevLine int + fake fakeFileSet + + // debugging support + debugFormat bool + read int // bytes read +} + +// BImportData imports a package from the serialized package data +// and returns the number of bytes consumed and a reference to the package. +// If the export data version is not recognized or the format is otherwise +// compromised, an error is returned. +func BImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { + // catch panics and return them as errors + const currentVersion = 6 + version := -1 // unknown version + defer func() { + if e := recover(); e != nil { + // Return a (possibly nil or incomplete) package unchanged (see #16088). + if version > currentVersion { + err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) + } else { + err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) + } + } + }() + + p := importer{ + imports: imports, + data: data, + importpath: path, + version: version, + strList: []string{""}, // empty string is mapped to 0 + pathList: []string{""}, // empty string is mapped to 0 + fake: fakeFileSet{ + fset: fset, + files: make(map[string]*token.File), + }, + } + + // read version info + var versionstr string + if b := p.rawByte(); b == 'c' || b == 'd' { + // Go1.7 encoding; first byte encodes low-level + // encoding format (compact vs debug). + // For backward-compatibility only (avoid problems with + // old installed packages). Newly compiled packages use + // the extensible format string. + // TODO(gri) Remove this support eventually; after Go1.8. + if b == 'd' { + p.debugFormat = true + } + p.trackAllTypes = p.rawByte() == 'a' + p.posInfoFormat = p.int() != 0 + versionstr = p.string() + if versionstr == "v1" { + version = 0 + } + } else { + // Go1.8 extensible encoding + // read version string and extract version number (ignore anything after the version number) + versionstr = p.rawStringln(b) + if s := strings.SplitN(versionstr, " ", 3); len(s) >= 2 && s[0] == "version" { + if v, err := strconv.Atoi(s[1]); err == nil && v > 0 { + version = v + } + } + } + p.version = version + + // read version specific flags - extend as necessary + switch p.version { + // case currentVersion: + // ... + // fallthrough + case currentVersion, 5, 4, 3, 2, 1: + p.debugFormat = p.rawStringln(p.rawByte()) == "debug" + p.trackAllTypes = p.int() != 0 + p.posInfoFormat = p.int() != 0 + case 0: + // Go1.7 encoding format - nothing to do here + default: + errorf("unknown bexport format version %d (%q)", p.version, versionstr) + } + + // --- generic export data --- + + // populate typList with predeclared "known" types + p.typList = append(p.typList, predeclared()...) + + // read package data + pkg = p.pkg() + + // read objects of phase 1 only (see cmd/compile/internal/gc/bexport.go) + objcount := 0 + for { + tag := p.tagOrIndex() + if tag == endTag { + break + } + p.obj(tag) + objcount++ + } + + // self-verification + if count := p.int(); count != objcount { + errorf("got %d objects; want %d", objcount, count) + } + + // ignore compiler-specific import data + + // complete interfaces + // TODO(gri) re-investigate if we still need to do this in a delayed fashion + for _, typ := range p.interfaceList { + typ.Complete() + } + + // record all referenced packages as imports + list := append(([]*types.Package)(nil), p.pkgList[1:]...) + sort.Sort(byPath(list)) + pkg.SetImports(list) + + // package was imported completely and without errors + pkg.MarkComplete() + + return p.read, pkg, nil +} + +func errorf(format string, args ...interface{}) { + panic(fmt.Sprintf(format, args...)) +} + +func (p *importer) pkg() *types.Package { + // if the package was seen before, i is its index (>= 0) + i := p.tagOrIndex() + if i >= 0 { + return p.pkgList[i] + } + + // otherwise, i is the package tag (< 0) + if i != packageTag { + errorf("unexpected package tag %d version %d", i, p.version) + } + + // read package data + name := p.string() + var path string + if p.version >= 5 { + path = p.path() + } else { + path = p.string() + } + if p.version >= 6 { + p.int() // package height; unused by go/types + } + + // we should never see an empty package name + if name == "" { + errorf("empty package name in import") + } + + // an empty path denotes the package we are currently importing; + // it must be the first package we see + if (path == "") != (len(p.pkgList) == 0) { + errorf("package path %q for pkg index %d", path, len(p.pkgList)) + } + + // if the package was imported before, use that one; otherwise create a new one + if path == "" { + path = p.importpath + } + pkg := p.imports[path] + if pkg == nil { + pkg = types.NewPackage(path, name) + p.imports[path] = pkg + } else if pkg.Name() != name { + errorf("conflicting names %s and %s for package %q", pkg.Name(), name, path) + } + p.pkgList = append(p.pkgList, pkg) + + return pkg +} + +// objTag returns the tag value for each object kind. +func objTag(obj types.Object) int { + switch obj.(type) { + case *types.Const: + return constTag + case *types.TypeName: + return typeTag + case *types.Var: + return varTag + case *types.Func: + return funcTag + default: + errorf("unexpected object: %v (%T)", obj, obj) // panics + panic("unreachable") + } +} + +func sameObj(a, b types.Object) bool { + // Because unnamed types are not canonicalized, we cannot simply compare types for + // (pointer) identity. + // Ideally we'd check equality of constant values as well, but this is good enough. + return objTag(a) == objTag(b) && types.Identical(a.Type(), b.Type()) +} + +func (p *importer) declare(obj types.Object) { + pkg := obj.Pkg() + if alt := pkg.Scope().Insert(obj); alt != nil { + // This can only trigger if we import a (non-type) object a second time. + // Excluding type aliases, this cannot happen because 1) we only import a package + // once; and b) we ignore compiler-specific export data which may contain + // functions whose inlined function bodies refer to other functions that + // were already imported. + // However, type aliases require reexporting the original type, so we need + // to allow it (see also the comment in cmd/compile/internal/gc/bimport.go, + // method importer.obj, switch case importing functions). + // TODO(gri) review/update this comment once the gc compiler handles type aliases. + if !sameObj(obj, alt) { + errorf("inconsistent import:\n\t%v\npreviously imported as:\n\t%v\n", obj, alt) + } + } +} + +func (p *importer) obj(tag int) { + switch tag { + case constTag: + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil, nil) + val := p.value() + p.declare(types.NewConst(pos, pkg, name, typ, val)) + + case aliasTag: + // TODO(gri) verify type alias hookup is correct + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil, nil) + p.declare(types.NewTypeName(pos, pkg, name, typ)) + + case typeTag: + p.typ(nil, nil) + + case varTag: + pos := p.pos() + pkg, name := p.qualifiedName() + typ := p.typ(nil, nil) + p.declare(types.NewVar(pos, pkg, name, typ)) + + case funcTag: + pos := p.pos() + pkg, name := p.qualifiedName() + params, isddd := p.paramList() + result, _ := p.paramList() + sig := types.NewSignature(nil, params, result, isddd) + p.declare(types.NewFunc(pos, pkg, name, sig)) + + default: + errorf("unexpected object tag %d", tag) + } +} + +const deltaNewFile = -64 // see cmd/compile/internal/gc/bexport.go + +func (p *importer) pos() token.Pos { + if !p.posInfoFormat { + return token.NoPos + } + + file := p.prevFile + line := p.prevLine + delta := p.int() + line += delta + if p.version >= 5 { + if delta == deltaNewFile { + if n := p.int(); n >= 0 { + // file changed + file = p.path() + line = n + } + } + } else { + if delta == 0 { + if n := p.int(); n >= 0 { + // file changed + file = p.prevFile[:n] + p.string() + line = p.int() + } + } + } + p.prevFile = file + p.prevLine = line + + return p.fake.pos(file, line) +} + +// Synthesize a token.Pos +type fakeFileSet struct { + fset *token.FileSet + files map[string]*token.File +} + +func (s *fakeFileSet) pos(file string, line int) token.Pos { + // Since we don't know the set of needed file positions, we + // reserve maxlines positions per file. + const maxlines = 64 * 1024 + f := s.files[file] + if f == nil { + f = s.fset.AddFile(file, -1, maxlines) + s.files[file] = f + // Allocate the fake linebreak indices on first use. + // TODO(adonovan): opt: save ~512KB using a more complex scheme? + fakeLinesOnce.Do(func() { + fakeLines = make([]int, maxlines) + for i := range fakeLines { + fakeLines[i] = i + } + }) + f.SetLines(fakeLines) + } + + if line > maxlines { + line = 1 + } + + // Treat the file as if it contained only newlines + // and column=1: use the line number as the offset. + return f.Pos(line - 1) +} + +var ( + fakeLines []int + fakeLinesOnce sync.Once +) + +func (p *importer) qualifiedName() (pkg *types.Package, name string) { + name = p.string() + pkg = p.pkg() + return +} + +func (p *importer) record(t types.Type) { + p.typList = append(p.typList, t) +} + +// A dddSlice is a types.Type representing ...T parameters. +// It only appears for parameter types and does not escape +// the importer. +type dddSlice struct { + elem types.Type +} + +func (t *dddSlice) Underlying() types.Type { return t } +func (t *dddSlice) String() string { return "..." + t.elem.String() } + +// parent is the package which declared the type; parent == nil means +// the package currently imported. The parent package is needed for +// exported struct fields and interface methods which don't contain +// explicit package information in the export data. +// +// A non-nil tname is used as the "owner" of the result type; i.e., +// the result type is the underlying type of tname. tname is used +// to give interface methods a named receiver type where possible. +func (p *importer) typ(parent *types.Package, tname *types.Named) types.Type { + // if the type was seen before, i is its index (>= 0) + i := p.tagOrIndex() + if i >= 0 { + return p.typList[i] + } + + // otherwise, i is the type tag (< 0) + switch i { + case namedTag: + // read type object + pos := p.pos() + parent, name := p.qualifiedName() + scope := parent.Scope() + obj := scope.Lookup(name) + + // if the object doesn't exist yet, create and insert it + if obj == nil { + obj = types.NewTypeName(pos, parent, name, nil) + scope.Insert(obj) + } + + if _, ok := obj.(*types.TypeName); !ok { + errorf("pkg = %s, name = %s => %s", parent, name, obj) + } + + // associate new named type with obj if it doesn't exist yet + t0 := types.NewNamed(obj.(*types.TypeName), nil, nil) + + // but record the existing type, if any + tname := obj.Type().(*types.Named) // tname is either t0 or the existing type + p.record(tname) + + // read underlying type + t0.SetUnderlying(p.typ(parent, t0)) + + // interfaces don't have associated methods + if types.IsInterface(t0) { + return tname + } + + // read associated methods + for i := p.int(); i > 0; i-- { + // TODO(gri) replace this with something closer to fieldName + pos := p.pos() + name := p.string() + if !exported(name) { + p.pkg() + } + + recv, _ := p.paramList() // TODO(gri) do we need a full param list for the receiver? + params, isddd := p.paramList() + result, _ := p.paramList() + p.int() // go:nointerface pragma - discarded + + sig := types.NewSignature(recv.At(0), params, result, isddd) + t0.AddMethod(types.NewFunc(pos, parent, name, sig)) + } + + return tname + + case arrayTag: + t := new(types.Array) + if p.trackAllTypes { + p.record(t) + } + + n := p.int64() + *t = *types.NewArray(p.typ(parent, nil), n) + return t + + case sliceTag: + t := new(types.Slice) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewSlice(p.typ(parent, nil)) + return t + + case dddTag: + t := new(dddSlice) + if p.trackAllTypes { + p.record(t) + } + + t.elem = p.typ(parent, nil) + return t + + case structTag: + t := new(types.Struct) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewStruct(p.fieldList(parent)) + return t + + case pointerTag: + t := new(types.Pointer) + if p.trackAllTypes { + p.record(t) + } + + *t = *types.NewPointer(p.typ(parent, nil)) + return t + + case signatureTag: + t := new(types.Signature) + if p.trackAllTypes { + p.record(t) + } + + params, isddd := p.paramList() + result, _ := p.paramList() + *t = *types.NewSignature(nil, params, result, isddd) + return t + + case interfaceTag: + // Create a dummy entry in the type list. This is safe because we + // cannot expect the interface type to appear in a cycle, as any + // such cycle must contain a named type which would have been + // first defined earlier. + // TODO(gri) Is this still true now that we have type aliases? + // See issue #23225. + n := len(p.typList) + if p.trackAllTypes { + p.record(nil) + } + + var embeddeds []types.Type + for n := p.int(); n > 0; n-- { + p.pos() + embeddeds = append(embeddeds, p.typ(parent, nil)) + } + + t := newInterface(p.methodList(parent, tname), embeddeds) + p.interfaceList = append(p.interfaceList, t) + if p.trackAllTypes { + p.typList[n] = t + } + return t + + case mapTag: + t := new(types.Map) + if p.trackAllTypes { + p.record(t) + } + + key := p.typ(parent, nil) + val := p.typ(parent, nil) + *t = *types.NewMap(key, val) + return t + + case chanTag: + t := new(types.Chan) + if p.trackAllTypes { + p.record(t) + } + + dir := chanDir(p.int()) + val := p.typ(parent, nil) + *t = *types.NewChan(dir, val) + return t + + default: + errorf("unexpected type tag %d", i) // panics + panic("unreachable") + } +} + +func chanDir(d int) types.ChanDir { + // tag values must match the constants in cmd/compile/internal/gc/go.go + switch d { + case 1 /* Crecv */ : + return types.RecvOnly + case 2 /* Csend */ : + return types.SendOnly + case 3 /* Cboth */ : + return types.SendRecv + default: + errorf("unexpected channel dir %d", d) + return 0 + } +} + +func (p *importer) fieldList(parent *types.Package) (fields []*types.Var, tags []string) { + if n := p.int(); n > 0 { + fields = make([]*types.Var, n) + tags = make([]string, n) + for i := range fields { + fields[i], tags[i] = p.field(parent) + } + } + return +} + +func (p *importer) field(parent *types.Package) (*types.Var, string) { + pos := p.pos() + pkg, name, alias := p.fieldName(parent) + typ := p.typ(parent, nil) + tag := p.string() + + anonymous := false + if name == "" { + // anonymous field - typ must be T or *T and T must be a type name + switch typ := deref(typ).(type) { + case *types.Basic: // basic types are named types + pkg = nil // // objects defined in Universe scope have no package + name = typ.Name() + case *types.Named: + name = typ.Obj().Name() + default: + errorf("named base type expected") + } + anonymous = true + } else if alias { + // anonymous field: we have an explicit name because it's an alias + anonymous = true + } + + return types.NewField(pos, pkg, name, typ, anonymous), tag +} + +func (p *importer) methodList(parent *types.Package, baseType *types.Named) (methods []*types.Func) { + if n := p.int(); n > 0 { + methods = make([]*types.Func, n) + for i := range methods { + methods[i] = p.method(parent, baseType) + } + } + return +} + +func (p *importer) method(parent *types.Package, baseType *types.Named) *types.Func { + pos := p.pos() + pkg, name, _ := p.fieldName(parent) + // If we don't have a baseType, use a nil receiver. + // A receiver using the actual interface type (which + // we don't know yet) will be filled in when we call + // types.Interface.Complete. + var recv *types.Var + if baseType != nil { + recv = types.NewVar(token.NoPos, parent, "", baseType) + } + params, isddd := p.paramList() + result, _ := p.paramList() + sig := types.NewSignature(recv, params, result, isddd) + return types.NewFunc(pos, pkg, name, sig) +} + +func (p *importer) fieldName(parent *types.Package) (pkg *types.Package, name string, alias bool) { + name = p.string() + pkg = parent + if pkg == nil { + // use the imported package instead + pkg = p.pkgList[0] + } + if p.version == 0 && name == "_" { + // version 0 didn't export a package for _ fields + return + } + switch name { + case "": + // 1) field name matches base type name and is exported: nothing to do + case "?": + // 2) field name matches base type name and is not exported: need package + name = "" + pkg = p.pkg() + case "@": + // 3) field name doesn't match type name (alias) + name = p.string() + alias = true + fallthrough + default: + if !exported(name) { + pkg = p.pkg() + } + } + return +} + +func (p *importer) paramList() (*types.Tuple, bool) { + n := p.int() + if n == 0 { + return nil, false + } + // negative length indicates unnamed parameters + named := true + if n < 0 { + n = -n + named = false + } + // n > 0 + params := make([]*types.Var, n) + isddd := false + for i := range params { + params[i], isddd = p.param(named) + } + return types.NewTuple(params...), isddd +} + +func (p *importer) param(named bool) (*types.Var, bool) { + t := p.typ(nil, nil) + td, isddd := t.(*dddSlice) + if isddd { + t = types.NewSlice(td.elem) + } + + var pkg *types.Package + var name string + if named { + name = p.string() + if name == "" { + errorf("expected named parameter") + } + if name != "_" { + pkg = p.pkg() + } + if i := strings.Index(name, "·"); i > 0 { + name = name[:i] // cut off gc-specific parameter numbering + } + } + + // read and discard compiler-specific info + p.string() + + return types.NewVar(token.NoPos, pkg, name, t), isddd +} + +func exported(name string) bool { + ch, _ := utf8.DecodeRuneInString(name) + return unicode.IsUpper(ch) +} + +func (p *importer) value() constant.Value { + switch tag := p.tagOrIndex(); tag { + case falseTag: + return constant.MakeBool(false) + case trueTag: + return constant.MakeBool(true) + case int64Tag: + return constant.MakeInt64(p.int64()) + case floatTag: + return p.float() + case complexTag: + re := p.float() + im := p.float() + return constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + case stringTag: + return constant.MakeString(p.string()) + case unknownTag: + return constant.MakeUnknown() + default: + errorf("unexpected value tag %d", tag) // panics + panic("unreachable") + } +} + +func (p *importer) float() constant.Value { + sign := p.int() + if sign == 0 { + return constant.MakeInt64(0) + } + + exp := p.int() + mant := []byte(p.string()) // big endian + + // remove leading 0's if any + for len(mant) > 0 && mant[0] == 0 { + mant = mant[1:] + } + + // convert to little endian + // TODO(gri) go/constant should have a more direct conversion function + // (e.g., once it supports a big.Float based implementation) + for i, j := 0, len(mant)-1; i < j; i, j = i+1, j-1 { + mant[i], mant[j] = mant[j], mant[i] + } + + // adjust exponent (constant.MakeFromBytes creates an integer value, + // but mant represents the mantissa bits such that 0.5 <= mant < 1.0) + exp -= len(mant) << 3 + if len(mant) > 0 { + for msd := mant[len(mant)-1]; msd&0x80 == 0; msd <<= 1 { + exp++ + } + } + + x := constant.MakeFromBytes(mant) + switch { + case exp < 0: + d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) + x = constant.BinaryOp(x, token.QUO, d) + case exp > 0: + x = constant.Shift(x, token.SHL, uint(exp)) + } + + if sign < 0 { + x = constant.UnaryOp(token.SUB, x, 0) + } + return x +} + +// ---------------------------------------------------------------------------- +// Low-level decoders + +func (p *importer) tagOrIndex() int { + if p.debugFormat { + p.marker('t') + } + + return int(p.rawInt64()) +} + +func (p *importer) int() int { + x := p.int64() + if int64(int(x)) != x { + errorf("exported integer too large") + } + return int(x) +} + +func (p *importer) int64() int64 { + if p.debugFormat { + p.marker('i') + } + + return p.rawInt64() +} + +func (p *importer) path() string { + if p.debugFormat { + p.marker('p') + } + // if the path was seen before, i is its index (>= 0) + // (the empty string is at index 0) + i := p.rawInt64() + if i >= 0 { + return p.pathList[i] + } + // otherwise, i is the negative path length (< 0) + a := make([]string, -i) + for n := range a { + a[n] = p.string() + } + s := strings.Join(a, "/") + p.pathList = append(p.pathList, s) + return s +} + +func (p *importer) string() string { + if p.debugFormat { + p.marker('s') + } + // if the string was seen before, i is its index (>= 0) + // (the empty string is at index 0) + i := p.rawInt64() + if i >= 0 { + return p.strList[i] + } + // otherwise, i is the negative string length (< 0) + if n := int(-i); n <= cap(p.buf) { + p.buf = p.buf[:n] + } else { + p.buf = make([]byte, n) + } + for i := range p.buf { + p.buf[i] = p.rawByte() + } + s := string(p.buf) + p.strList = append(p.strList, s) + return s +} + +func (p *importer) marker(want byte) { + if got := p.rawByte(); got != want { + errorf("incorrect marker: got %c; want %c (pos = %d)", got, want, p.read) + } + + pos := p.read + if n := int(p.rawInt64()); n != pos { + errorf("incorrect position: got %d; want %d", n, pos) + } +} + +// rawInt64 should only be used by low-level decoders. +func (p *importer) rawInt64() int64 { + i, err := binary.ReadVarint(p) + if err != nil { + errorf("read error: %v", err) + } + return i +} + +// rawStringln should only be used to read the initial version string. +func (p *importer) rawStringln(b byte) string { + p.buf = p.buf[:0] + for b != '\n' { + p.buf = append(p.buf, b) + b = p.rawByte() + } + return string(p.buf) +} + +// needed for binary.ReadVarint in rawInt64 +func (p *importer) ReadByte() (byte, error) { + return p.rawByte(), nil +} + +// byte is the bottleneck interface for reading p.data. +// It unescapes '|' 'S' to '$' and '|' '|' to '|'. +// rawByte should only be used by low-level decoders. +func (p *importer) rawByte() byte { + b := p.data[0] + r := 1 + if b == '|' { + b = p.data[1] + r = 2 + switch b { + case 'S': + b = '$' + case '|': + // nothing to do + default: + errorf("unexpected escape sequence in export data") + } + } + p.data = p.data[r:] + p.read += r + return b + +} + +// ---------------------------------------------------------------------------- +// Export format + +// Tags. Must be < 0. +const ( + // Objects + packageTag = -(iota + 1) + constTag + typeTag + varTag + funcTag + endTag + + // Types + namedTag + arrayTag + sliceTag + dddTag + structTag + pointerTag + signatureTag + interfaceTag + mapTag + chanTag + + // Values + falseTag + trueTag + int64Tag + floatTag + fractionTag // not used by gc + complexTag + stringTag + nilTag // only used by gc (appears in exported inlined function bodies) + unknownTag // not used by gc (only appears in packages with errors) + + // Type aliases + aliasTag +) + +var predecl []types.Type // initialized lazily + +func predeclared() []types.Type { + if predecl == nil { + // initialize lazily to be sure that all + // elements have been initialized before + predecl = []types.Type{ // basic types + types.Typ[types.Bool], + types.Typ[types.Int], + types.Typ[types.Int8], + types.Typ[types.Int16], + types.Typ[types.Int32], + types.Typ[types.Int64], + types.Typ[types.Uint], + types.Typ[types.Uint8], + types.Typ[types.Uint16], + types.Typ[types.Uint32], + types.Typ[types.Uint64], + types.Typ[types.Uintptr], + types.Typ[types.Float32], + types.Typ[types.Float64], + types.Typ[types.Complex64], + types.Typ[types.Complex128], + types.Typ[types.String], + + // basic type aliases + types.Universe.Lookup("byte").Type(), + types.Universe.Lookup("rune").Type(), + + // error + types.Universe.Lookup("error").Type(), + + // untyped types + types.Typ[types.UntypedBool], + types.Typ[types.UntypedInt], + types.Typ[types.UntypedRune], + types.Typ[types.UntypedFloat], + types.Typ[types.UntypedComplex], + types.Typ[types.UntypedString], + types.Typ[types.UntypedNil], + + // package unsafe + types.Typ[types.UnsafePointer], + + // invalid type + types.Typ[types.Invalid], // only appears in packages with errors + + // used internally by gc; never used by this package or in .a files + anyType{}, + } + } + return predecl +} + +type anyType struct{} + +func (t anyType) Underlying() types.Type { return t } +func (t anyType) String() string { return "any" } diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go b/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go new file mode 100644 index 000000000..f33dc5613 --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/exportdata.go @@ -0,0 +1,93 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/exportdata.go. + +// This file implements FindExportData. + +package gcimporter + +import ( + "bufio" + "fmt" + "io" + "strconv" + "strings" +) + +func readGopackHeader(r *bufio.Reader) (name string, size int, err error) { + // See $GOROOT/include/ar.h. + hdr := make([]byte, 16+12+6+6+8+10+2) + _, err = io.ReadFull(r, hdr) + if err != nil { + return + } + // leave for debugging + if false { + fmt.Printf("header: %s", hdr) + } + s := strings.TrimSpace(string(hdr[16+12+6+6+8:][:10])) + size, err = strconv.Atoi(s) + if err != nil || hdr[len(hdr)-2] != '`' || hdr[len(hdr)-1] != '\n' { + err = fmt.Errorf("invalid archive header") + return + } + name = strings.TrimSpace(string(hdr[:16])) + return +} + +// FindExportData positions the reader r at the beginning of the +// export data section of an underlying GC-created object/archive +// file by reading from it. The reader must be positioned at the +// start of the file before calling this function. The hdr result +// is the string before the export data, either "$$" or "$$B". +// +func FindExportData(r *bufio.Reader) (hdr string, err error) { + // Read first line to make sure this is an object file. + line, err := r.ReadSlice('\n') + if err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + + if string(line) == "!\n" { + // Archive file. Scan to __.PKGDEF. + var name string + if name, _, err = readGopackHeader(r); err != nil { + return + } + + // First entry should be __.PKGDEF. + if name != "__.PKGDEF" { + err = fmt.Errorf("go archive is missing __.PKGDEF") + return + } + + // Read first line of __.PKGDEF data, so that line + // is once again the first line of the input. + if line, err = r.ReadSlice('\n'); err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + } + + // Now at __.PKGDEF in archive or still at beginning of file. + // Either way, line should begin with "go object ". + if !strings.HasPrefix(string(line), "go object ") { + err = fmt.Errorf("not a Go object file") + return + } + + // Skip over object header to export data. + // Begins after first line starting with $$. + for line[0] != '$' { + if line, err = r.ReadSlice('\n'); err != nil { + err = fmt.Errorf("can't find export data (%v)", err) + return + } + } + hdr = string(line) + + return +} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go new file mode 100644 index 000000000..9cf186605 --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go @@ -0,0 +1,1078 @@ +// Copyright 2011 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file is a modified copy of $GOROOT/src/go/internal/gcimporter/gcimporter.go, +// but it also contains the original source-based importer code for Go1.6. +// Once we stop supporting 1.6, we can remove that code. + +// Package gcimporter provides various functions for reading +// gc-generated object files that can be used to implement the +// Importer interface defined by the Go 1.5 standard library package. +package gcimporter // import "golang.org/x/tools/go/internal/gcimporter" + +import ( + "bufio" + "errors" + "fmt" + "go/build" + "go/constant" + "go/token" + "go/types" + "io" + "io/ioutil" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "text/scanner" +) + +// debugging/development support +const debug = false + +var pkgExts = [...]string{".a", ".o"} + +// FindPkg returns the filename and unique package id for an import +// path based on package information provided by build.Import (using +// the build.Default build.Context). A relative srcDir is interpreted +// relative to the current working directory. +// If no file was found, an empty filename is returned. +// +func FindPkg(path, srcDir string) (filename, id string) { + if path == "" { + return + } + + var noext string + switch { + default: + // "x" -> "$GOPATH/pkg/$GOOS_$GOARCH/x.ext", "x" + // Don't require the source files to be present. + if abs, err := filepath.Abs(srcDir); err == nil { // see issue 14282 + srcDir = abs + } + bp, _ := build.Import(path, srcDir, build.FindOnly|build.AllowBinary) + if bp.PkgObj == "" { + id = path // make sure we have an id to print in error message + return + } + noext = strings.TrimSuffix(bp.PkgObj, ".a") + id = bp.ImportPath + + case build.IsLocalImport(path): + // "./x" -> "/this/directory/x.ext", "/this/directory/x" + noext = filepath.Join(srcDir, path) + id = noext + + case filepath.IsAbs(path): + // for completeness only - go/build.Import + // does not support absolute imports + // "/x" -> "/x.ext", "/x" + noext = path + id = path + } + + if false { // for debugging + if path != id { + fmt.Printf("%s -> %s\n", path, id) + } + } + + // try extensions + for _, ext := range pkgExts { + filename = noext + ext + if f, err := os.Stat(filename); err == nil && !f.IsDir() { + return + } + } + + filename = "" // not found + return +} + +// ImportData imports a package by reading the gc-generated export data, +// adds the corresponding package object to the packages map indexed by id, +// and returns the object. +// +// The packages map must contains all packages already imported. The data +// reader position must be the beginning of the export data section. The +// filename is only used in error messages. +// +// If packages[id] contains the completely imported package, that package +// can be used directly, and there is no need to call this function (but +// there is also no harm but for extra time used). +// +func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) { + // support for parser error handling + defer func() { + switch r := recover().(type) { + case nil: + // nothing to do + case importError: + err = r + default: + panic(r) // internal error + } + }() + + var p parser + p.init(filename, id, data, packages) + pkg = p.parseExport() + + return +} + +// Import imports a gc-generated package given its import path and srcDir, adds +// the corresponding package object to the packages map, and returns the object. +// The packages map must contain all packages already imported. +// +func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) { + var rc io.ReadCloser + var filename, id string + if lookup != nil { + // With custom lookup specified, assume that caller has + // converted path to a canonical import path for use in the map. + if path == "unsafe" { + return types.Unsafe, nil + } + id = path + + // No need to re-import if the package was imported completely before. + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + f, err := lookup(path) + if err != nil { + return nil, err + } + rc = f + } else { + filename, id = FindPkg(path, srcDir) + if filename == "" { + if path == "unsafe" { + return types.Unsafe, nil + } + return nil, fmt.Errorf("can't find import: %q", id) + } + + // no need to re-import if the package was imported completely before + if pkg = packages[id]; pkg != nil && pkg.Complete() { + return + } + + // open file + f, err := os.Open(filename) + if err != nil { + return nil, err + } + defer func() { + if err != nil { + // add file name to error + err = fmt.Errorf("%s: %v", filename, err) + } + }() + rc = f + } + defer rc.Close() + + var hdr string + buf := bufio.NewReader(rc) + if hdr, err = FindExportData(buf); err != nil { + return + } + + switch hdr { + case "$$\n": + // Work-around if we don't have a filename; happens only if lookup != nil. + // Either way, the filename is only needed for importer error messages, so + // this is fine. + if filename == "" { + filename = path + } + return ImportData(packages, filename, id, buf) + + case "$$B\n": + var data []byte + data, err = ioutil.ReadAll(buf) + if err != nil { + break + } + + // TODO(gri): allow clients of go/importer to provide a FileSet. + // Or, define a new standard go/types/gcexportdata package. + fset := token.NewFileSet() + + // The indexed export format starts with an 'i'; the older + // binary export format starts with a 'c', 'd', or 'v' + // (from "version"). Select appropriate importer. + if len(data) > 0 && data[0] == 'i' { + _, pkg, err = IImportData(fset, packages, data[1:], id) + } else { + _, pkg, err = BImportData(fset, packages, data, id) + } + + default: + err = fmt.Errorf("unknown export data header: %q", hdr) + } + + return +} + +// ---------------------------------------------------------------------------- +// Parser + +// TODO(gri) Imported objects don't have position information. +// Ideally use the debug table line info; alternatively +// create some fake position (or the position of the +// import). That way error messages referring to imported +// objects can print meaningful information. + +// parser parses the exports inside a gc compiler-produced +// object/archive file and populates its scope with the results. +type parser struct { + scanner scanner.Scanner + tok rune // current token + lit string // literal string; only valid for Ident, Int, String tokens + id string // package id of imported package + sharedPkgs map[string]*types.Package // package id -> package object (across importer) + localPkgs map[string]*types.Package // package id -> package object (just this package) +} + +func (p *parser) init(filename, id string, src io.Reader, packages map[string]*types.Package) { + p.scanner.Init(src) + p.scanner.Error = func(_ *scanner.Scanner, msg string) { p.error(msg) } + p.scanner.Mode = scanner.ScanIdents | scanner.ScanInts | scanner.ScanChars | scanner.ScanStrings | scanner.ScanComments | scanner.SkipComments + p.scanner.Whitespace = 1<<'\t' | 1<<' ' + p.scanner.Filename = filename // for good error messages + p.next() + p.id = id + p.sharedPkgs = packages + if debug { + // check consistency of packages map + for _, pkg := range packages { + if pkg.Name() == "" { + fmt.Printf("no package name for %s\n", pkg.Path()) + } + } + } +} + +func (p *parser) next() { + p.tok = p.scanner.Scan() + switch p.tok { + case scanner.Ident, scanner.Int, scanner.Char, scanner.String, '·': + p.lit = p.scanner.TokenText() + default: + p.lit = "" + } + if debug { + fmt.Printf("%s: %q -> %q\n", scanner.TokenString(p.tok), p.scanner.TokenText(), p.lit) + } +} + +func declTypeName(pkg *types.Package, name string) *types.TypeName { + scope := pkg.Scope() + if obj := scope.Lookup(name); obj != nil { + return obj.(*types.TypeName) + } + obj := types.NewTypeName(token.NoPos, pkg, name, nil) + // a named type may be referred to before the underlying type + // is known - set it up + types.NewNamed(obj, nil, nil) + scope.Insert(obj) + return obj +} + +// ---------------------------------------------------------------------------- +// Error handling + +// Internal errors are boxed as importErrors. +type importError struct { + pos scanner.Position + err error +} + +func (e importError) Error() string { + return fmt.Sprintf("import error %s (byte offset = %d): %s", e.pos, e.pos.Offset, e.err) +} + +func (p *parser) error(err interface{}) { + if s, ok := err.(string); ok { + err = errors.New(s) + } + // panic with a runtime.Error if err is not an error + panic(importError{p.scanner.Pos(), err.(error)}) +} + +func (p *parser) errorf(format string, args ...interface{}) { + p.error(fmt.Sprintf(format, args...)) +} + +func (p *parser) expect(tok rune) string { + lit := p.lit + if p.tok != tok { + p.errorf("expected %s, got %s (%s)", scanner.TokenString(tok), scanner.TokenString(p.tok), lit) + } + p.next() + return lit +} + +func (p *parser) expectSpecial(tok string) { + sep := 'x' // not white space + i := 0 + for i < len(tok) && p.tok == rune(tok[i]) && sep > ' ' { + sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token + p.next() + i++ + } + if i < len(tok) { + p.errorf("expected %q, got %q", tok, tok[0:i]) + } +} + +func (p *parser) expectKeyword(keyword string) { + lit := p.expect(scanner.Ident) + if lit != keyword { + p.errorf("expected keyword %s, got %q", keyword, lit) + } +} + +// ---------------------------------------------------------------------------- +// Qualified and unqualified names + +// PackageId = string_lit . +// +func (p *parser) parsePackageId() string { + id, err := strconv.Unquote(p.expect(scanner.String)) + if err != nil { + p.error(err) + } + // id == "" stands for the imported package id + // (only known at time of package installation) + if id == "" { + id = p.id + } + return id +} + +// PackageName = ident . +// +func (p *parser) parsePackageName() string { + return p.expect(scanner.Ident) +} + +// dotIdentifier = ( ident | '·' ) { ident | int | '·' } . +func (p *parser) parseDotIdent() string { + ident := "" + if p.tok != scanner.Int { + sep := 'x' // not white space + for (p.tok == scanner.Ident || p.tok == scanner.Int || p.tok == '·') && sep > ' ' { + ident += p.lit + sep = p.scanner.Peek() // if sep <= ' ', there is white space before the next token + p.next() + } + } + if ident == "" { + p.expect(scanner.Ident) // use expect() for error handling + } + return ident +} + +// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) . +// +func (p *parser) parseQualifiedName() (id, name string) { + p.expect('@') + id = p.parsePackageId() + p.expect('.') + // Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields. + if p.tok == '?' { + p.next() + } else { + name = p.parseDotIdent() + } + return +} + +// getPkg returns the package for a given id. If the package is +// not found, create the package and add it to the p.localPkgs +// and p.sharedPkgs maps. name is the (expected) name of the +// package. If name == "", the package name is expected to be +// set later via an import clause in the export data. +// +// id identifies a package, usually by a canonical package path like +// "encoding/json" but possibly by a non-canonical import path like +// "./json". +// +func (p *parser) getPkg(id, name string) *types.Package { + // package unsafe is not in the packages maps - handle explicitly + if id == "unsafe" { + return types.Unsafe + } + + pkg := p.localPkgs[id] + if pkg == nil { + // first import of id from this package + pkg = p.sharedPkgs[id] + if pkg == nil { + // first import of id by this importer; + // add (possibly unnamed) pkg to shared packages + pkg = types.NewPackage(id, name) + p.sharedPkgs[id] = pkg + } + // add (possibly unnamed) pkg to local packages + if p.localPkgs == nil { + p.localPkgs = make(map[string]*types.Package) + } + p.localPkgs[id] = pkg + } else if name != "" { + // package exists already and we have an expected package name; + // make sure names match or set package name if necessary + if pname := pkg.Name(); pname == "" { + pkg.SetName(name) + } else if pname != name { + p.errorf("%s package name mismatch: %s (given) vs %s (expected)", id, pname, name) + } + } + return pkg +} + +// parseExportedName is like parseQualifiedName, but +// the package id is resolved to an imported *types.Package. +// +func (p *parser) parseExportedName() (pkg *types.Package, name string) { + id, name := p.parseQualifiedName() + pkg = p.getPkg(id, "") + return +} + +// ---------------------------------------------------------------------------- +// Types + +// BasicType = identifier . +// +func (p *parser) parseBasicType() types.Type { + id := p.expect(scanner.Ident) + obj := types.Universe.Lookup(id) + if obj, ok := obj.(*types.TypeName); ok { + return obj.Type() + } + p.errorf("not a basic type: %s", id) + return nil +} + +// ArrayType = "[" int_lit "]" Type . +// +func (p *parser) parseArrayType(parent *types.Package) types.Type { + // "[" already consumed and lookahead known not to be "]" + lit := p.expect(scanner.Int) + p.expect(']') + elem := p.parseType(parent) + n, err := strconv.ParseInt(lit, 10, 64) + if err != nil { + p.error(err) + } + return types.NewArray(elem, n) +} + +// MapType = "map" "[" Type "]" Type . +// +func (p *parser) parseMapType(parent *types.Package) types.Type { + p.expectKeyword("map") + p.expect('[') + key := p.parseType(parent) + p.expect(']') + elem := p.parseType(parent) + return types.NewMap(key, elem) +} + +// Name = identifier | "?" | QualifiedName . +// +// For unqualified and anonymous names, the returned package is the parent +// package unless parent == nil, in which case the returned package is the +// package being imported. (The parent package is not nil if the the name +// is an unqualified struct field or interface method name belonging to a +// type declared in another package.) +// +// For qualified names, the returned package is nil (and not created if +// it doesn't exist yet) unless materializePkg is set (which creates an +// unnamed package with valid package path). In the latter case, a +// subsequent import clause is expected to provide a name for the package. +// +func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) { + pkg = parent + if pkg == nil { + pkg = p.sharedPkgs[p.id] + } + switch p.tok { + case scanner.Ident: + name = p.lit + p.next() + case '?': + // anonymous + p.next() + case '@': + // exported name prefixed with package path + pkg = nil + var id string + id, name = p.parseQualifiedName() + if materializePkg { + pkg = p.getPkg(id, "") + } + default: + p.error("name expected") + } + return +} + +func deref(typ types.Type) types.Type { + if p, _ := typ.(*types.Pointer); p != nil { + return p.Elem() + } + return typ +} + +// Field = Name Type [ string_lit ] . +// +func (p *parser) parseField(parent *types.Package) (*types.Var, string) { + pkg, name := p.parseName(parent, true) + + if name == "_" { + // Blank fields should be package-qualified because they + // are unexported identifiers, but gc does not qualify them. + // Assuming that the ident belongs to the current package + // causes types to change during re-exporting, leading + // to spurious "can't assign A to B" errors from go/types. + // As a workaround, pretend all blank fields belong + // to the same unique dummy package. + const blankpkg = "<_>" + pkg = p.getPkg(blankpkg, blankpkg) + } + + typ := p.parseType(parent) + anonymous := false + if name == "" { + // anonymous field - typ must be T or *T and T must be a type name + switch typ := deref(typ).(type) { + case *types.Basic: // basic types are named types + pkg = nil // objects defined in Universe scope have no package + name = typ.Name() + case *types.Named: + name = typ.Obj().Name() + default: + p.errorf("anonymous field expected") + } + anonymous = true + } + tag := "" + if p.tok == scanner.String { + s := p.expect(scanner.String) + var err error + tag, err = strconv.Unquote(s) + if err != nil { + p.errorf("invalid struct tag %s: %s", s, err) + } + } + return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag +} + +// StructType = "struct" "{" [ FieldList ] "}" . +// FieldList = Field { ";" Field } . +// +func (p *parser) parseStructType(parent *types.Package) types.Type { + var fields []*types.Var + var tags []string + + p.expectKeyword("struct") + p.expect('{') + for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { + if i > 0 { + p.expect(';') + } + fld, tag := p.parseField(parent) + if tag != "" && tags == nil { + tags = make([]string, i) + } + if tags != nil { + tags = append(tags, tag) + } + fields = append(fields, fld) + } + p.expect('}') + + return types.NewStruct(fields, tags) +} + +// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] . +// +func (p *parser) parseParameter() (par *types.Var, isVariadic bool) { + _, name := p.parseName(nil, false) + // remove gc-specific parameter numbering + if i := strings.Index(name, "·"); i >= 0 { + name = name[:i] + } + if p.tok == '.' { + p.expectSpecial("...") + isVariadic = true + } + typ := p.parseType(nil) + if isVariadic { + typ = types.NewSlice(typ) + } + // ignore argument tag (e.g. "noescape") + if p.tok == scanner.String { + p.next() + } + // TODO(gri) should we provide a package? + par = types.NewVar(token.NoPos, nil, name, typ) + return +} + +// Parameters = "(" [ ParameterList ] ")" . +// ParameterList = { Parameter "," } Parameter . +// +func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) { + p.expect('(') + for p.tok != ')' && p.tok != scanner.EOF { + if len(list) > 0 { + p.expect(',') + } + par, variadic := p.parseParameter() + list = append(list, par) + if variadic { + if isVariadic { + p.error("... not on final argument") + } + isVariadic = true + } + } + p.expect(')') + + return +} + +// Signature = Parameters [ Result ] . +// Result = Type | Parameters . +// +func (p *parser) parseSignature(recv *types.Var) *types.Signature { + params, isVariadic := p.parseParameters() + + // optional result type + var results []*types.Var + if p.tok == '(' { + var variadic bool + results, variadic = p.parseParameters() + if variadic { + p.error("... not permitted on result type") + } + } + + return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic) +} + +// InterfaceType = "interface" "{" [ MethodList ] "}" . +// MethodList = Method { ";" Method } . +// Method = Name Signature . +// +// The methods of embedded interfaces are always "inlined" +// by the compiler and thus embedded interfaces are never +// visible in the export data. +// +func (p *parser) parseInterfaceType(parent *types.Package) types.Type { + var methods []*types.Func + + p.expectKeyword("interface") + p.expect('{') + for i := 0; p.tok != '}' && p.tok != scanner.EOF; i++ { + if i > 0 { + p.expect(';') + } + pkg, name := p.parseName(parent, true) + sig := p.parseSignature(nil) + methods = append(methods, types.NewFunc(token.NoPos, pkg, name, sig)) + } + p.expect('}') + + // Complete requires the type's embedded interfaces to be fully defined, + // but we do not define any + return types.NewInterface(methods, nil).Complete() +} + +// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type . +// +func (p *parser) parseChanType(parent *types.Package) types.Type { + dir := types.SendRecv + if p.tok == scanner.Ident { + p.expectKeyword("chan") + if p.tok == '<' { + p.expectSpecial("<-") + dir = types.SendOnly + } + } else { + p.expectSpecial("<-") + p.expectKeyword("chan") + dir = types.RecvOnly + } + elem := p.parseType(parent) + return types.NewChan(dir, elem) +} + +// Type = +// BasicType | TypeName | ArrayType | SliceType | StructType | +// PointerType | FuncType | InterfaceType | MapType | ChanType | +// "(" Type ")" . +// +// BasicType = ident . +// TypeName = ExportedName . +// SliceType = "[" "]" Type . +// PointerType = "*" Type . +// FuncType = "func" Signature . +// +func (p *parser) parseType(parent *types.Package) types.Type { + switch p.tok { + case scanner.Ident: + switch p.lit { + default: + return p.parseBasicType() + case "struct": + return p.parseStructType(parent) + case "func": + // FuncType + p.next() + return p.parseSignature(nil) + case "interface": + return p.parseInterfaceType(parent) + case "map": + return p.parseMapType(parent) + case "chan": + return p.parseChanType(parent) + } + case '@': + // TypeName + pkg, name := p.parseExportedName() + return declTypeName(pkg, name).Type() + case '[': + p.next() // look ahead + if p.tok == ']' { + // SliceType + p.next() + return types.NewSlice(p.parseType(parent)) + } + return p.parseArrayType(parent) + case '*': + // PointerType + p.next() + return types.NewPointer(p.parseType(parent)) + case '<': + return p.parseChanType(parent) + case '(': + // "(" Type ")" + p.next() + typ := p.parseType(parent) + p.expect(')') + return typ + } + p.errorf("expected type, got %s (%q)", scanner.TokenString(p.tok), p.lit) + return nil +} + +// ---------------------------------------------------------------------------- +// Declarations + +// ImportDecl = "import" PackageName PackageId . +// +func (p *parser) parseImportDecl() { + p.expectKeyword("import") + name := p.parsePackageName() + p.getPkg(p.parsePackageId(), name) +} + +// int_lit = [ "+" | "-" ] { "0" ... "9" } . +// +func (p *parser) parseInt() string { + s := "" + switch p.tok { + case '-': + s = "-" + p.next() + case '+': + p.next() + } + return s + p.expect(scanner.Int) +} + +// number = int_lit [ "p" int_lit ] . +// +func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) { + // mantissa + mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0) + if mant == nil { + panic("invalid mantissa") + } + + if p.lit == "p" { + // exponent (base 2) + p.next() + exp, err := strconv.ParseInt(p.parseInt(), 10, 0) + if err != nil { + p.error(err) + } + if exp < 0 { + denom := constant.MakeInt64(1) + denom = constant.Shift(denom, token.SHL, uint(-exp)) + typ = types.Typ[types.UntypedFloat] + val = constant.BinaryOp(mant, token.QUO, denom) + return + } + if exp > 0 { + mant = constant.Shift(mant, token.SHL, uint(exp)) + } + typ = types.Typ[types.UntypedFloat] + val = mant + return + } + + typ = types.Typ[types.UntypedInt] + val = mant + return +} + +// ConstDecl = "const" ExportedName [ Type ] "=" Literal . +// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit . +// bool_lit = "true" | "false" . +// complex_lit = "(" float_lit "+" float_lit "i" ")" . +// rune_lit = "(" int_lit "+" int_lit ")" . +// string_lit = `"` { unicode_char } `"` . +// +func (p *parser) parseConstDecl() { + p.expectKeyword("const") + pkg, name := p.parseExportedName() + + var typ0 types.Type + if p.tok != '=' { + // constant types are never structured - no need for parent type + typ0 = p.parseType(nil) + } + + p.expect('=') + var typ types.Type + var val constant.Value + switch p.tok { + case scanner.Ident: + // bool_lit + if p.lit != "true" && p.lit != "false" { + p.error("expected true or false") + } + typ = types.Typ[types.UntypedBool] + val = constant.MakeBool(p.lit == "true") + p.next() + + case '-', scanner.Int: + // int_lit + typ, val = p.parseNumber() + + case '(': + // complex_lit or rune_lit + p.next() + if p.tok == scanner.Char { + p.next() + p.expect('+') + typ = types.Typ[types.UntypedRune] + _, val = p.parseNumber() + p.expect(')') + break + } + _, re := p.parseNumber() + p.expect('+') + _, im := p.parseNumber() + p.expectKeyword("i") + p.expect(')') + typ = types.Typ[types.UntypedComplex] + val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + + case scanner.Char: + // rune_lit + typ = types.Typ[types.UntypedRune] + val = constant.MakeFromLiteral(p.lit, token.CHAR, 0) + p.next() + + case scanner.String: + // string_lit + typ = types.Typ[types.UntypedString] + val = constant.MakeFromLiteral(p.lit, token.STRING, 0) + p.next() + + default: + p.errorf("expected literal got %s", scanner.TokenString(p.tok)) + } + + if typ0 == nil { + typ0 = typ + } + + pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val)) +} + +// TypeDecl = "type" ExportedName Type . +// +func (p *parser) parseTypeDecl() { + p.expectKeyword("type") + pkg, name := p.parseExportedName() + obj := declTypeName(pkg, name) + + // The type object may have been imported before and thus already + // have a type associated with it. We still need to parse the type + // structure, but throw it away if the object already has a type. + // This ensures that all imports refer to the same type object for + // a given type declaration. + typ := p.parseType(pkg) + + if name := obj.Type().(*types.Named); name.Underlying() == nil { + name.SetUnderlying(typ) + } +} + +// VarDecl = "var" ExportedName Type . +// +func (p *parser) parseVarDecl() { + p.expectKeyword("var") + pkg, name := p.parseExportedName() + typ := p.parseType(pkg) + pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ)) +} + +// Func = Signature [ Body ] . +// Body = "{" ... "}" . +// +func (p *parser) parseFunc(recv *types.Var) *types.Signature { + sig := p.parseSignature(recv) + if p.tok == '{' { + p.next() + for i := 1; i > 0; p.next() { + switch p.tok { + case '{': + i++ + case '}': + i-- + } + } + } + return sig +} + +// MethodDecl = "func" Receiver Name Func . +// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" . +// +func (p *parser) parseMethodDecl() { + // "func" already consumed + p.expect('(') + recv, _ := p.parseParameter() // receiver + p.expect(')') + + // determine receiver base type object + base := deref(recv.Type()).(*types.Named) + + // parse method name, signature, and possibly inlined body + _, name := p.parseName(nil, false) + sig := p.parseFunc(recv) + + // methods always belong to the same package as the base type object + pkg := base.Obj().Pkg() + + // add method to type unless type was imported before + // and method exists already + // TODO(gri) This leads to a quadratic algorithm - ok for now because method counts are small. + base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig)) +} + +// FuncDecl = "func" ExportedName Func . +// +func (p *parser) parseFuncDecl() { + // "func" already consumed + pkg, name := p.parseExportedName() + typ := p.parseFunc(nil) + pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ)) +} + +// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" . +// +func (p *parser) parseDecl() { + if p.tok == scanner.Ident { + switch p.lit { + case "import": + p.parseImportDecl() + case "const": + p.parseConstDecl() + case "type": + p.parseTypeDecl() + case "var": + p.parseVarDecl() + case "func": + p.next() // look ahead + if p.tok == '(' { + p.parseMethodDecl() + } else { + p.parseFuncDecl() + } + } + } + p.expect('\n') +} + +// ---------------------------------------------------------------------------- +// Export + +// Export = "PackageClause { Decl } "$$" . +// PackageClause = "package" PackageName [ "safe" ] "\n" . +// +func (p *parser) parseExport() *types.Package { + p.expectKeyword("package") + name := p.parsePackageName() + if p.tok == scanner.Ident && p.lit == "safe" { + // package was compiled with -u option - ignore + p.next() + } + p.expect('\n') + + pkg := p.getPkg(p.id, name) + + for p.tok != '$' && p.tok != scanner.EOF { + p.parseDecl() + } + + if ch := p.scanner.Peek(); p.tok != '$' || ch != '$' { + // don't call next()/expect() since reading past the + // export data may cause scanner errors (e.g. NUL chars) + p.errorf("expected '$$', got %s %c", scanner.TokenString(p.tok), ch) + } + + if n := p.scanner.ErrorCount; n != 0 { + p.errorf("expected no scanner errors, got %d", n) + } + + // Record all locally referenced packages as imports. + var imports []*types.Package + for id, pkg2 := range p.localPkgs { + if pkg2.Name() == "" { + p.errorf("%s package has no name", id) + } + if id == p.id { + continue // avoid self-edge + } + imports = append(imports, pkg2) + } + sort.Sort(byPath(imports)) + pkg.SetImports(imports) + + // package was imported completely and without errors + pkg.MarkComplete() + + return pkg +} + +type byPath []*types.Package + +func (a byPath) Len() int { return len(a) } +func (a byPath) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a byPath) Less(i, j int) bool { return a[i].Path() < a[j].Path() } diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go new file mode 100644 index 000000000..be671c79b --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go @@ -0,0 +1,723 @@ +// Copyright 2019 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Indexed binary package export. +// This file was derived from $GOROOT/src/cmd/compile/internal/gc/iexport.go; +// see that file for specification of the format. + +// +build go1.11 + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "go/ast" + "go/constant" + "go/token" + "go/types" + "io" + "math/big" + "reflect" + "sort" +) + +// Current indexed export format version. Increase with each format change. +// 0: Go1.11 encoding +const iexportVersion = 0 + +// IExportData returns the binary export data for pkg. +// If no file set is provided, position info will be missing. +func IExportData(fset *token.FileSet, pkg *types.Package) (b []byte, err error) { + defer func() { + if e := recover(); e != nil { + if ierr, ok := e.(internalError); ok { + err = ierr + return + } + // Not an internal error; panic again. + panic(e) + } + }() + + p := iexporter{ + out: bytes.NewBuffer(nil), + fset: fset, + allPkgs: map[*types.Package]bool{}, + stringIndex: map[string]uint64{}, + declIndex: map[types.Object]uint64{}, + typIndex: map[types.Type]uint64{}, + } + + for i, pt := range predeclared() { + p.typIndex[pt] = uint64(i) + } + if len(p.typIndex) > predeclReserved { + panic(internalErrorf("too many predeclared types: %d > %d", len(p.typIndex), predeclReserved)) + } + + // Initialize work queue with exported declarations. + scope := pkg.Scope() + for _, name := range scope.Names() { + if ast.IsExported(name) { + p.pushDecl(scope.Lookup(name)) + } + } + + // Loop until no more work. + for !p.declTodo.empty() { + p.doDecl(p.declTodo.popHead()) + } + + // Append indices to data0 section. + dataLen := uint64(p.data0.Len()) + w := p.newWriter() + w.writeIndex(p.declIndex, pkg) + w.flush() + + // Assemble header. + var hdr intWriter + hdr.WriteByte('i') + hdr.uint64(iexportVersion) + hdr.uint64(uint64(p.strings.Len())) + hdr.uint64(dataLen) + + // Flush output. + io.Copy(p.out, &hdr) + io.Copy(p.out, &p.strings) + io.Copy(p.out, &p.data0) + + return p.out.Bytes(), nil +} + +// writeIndex writes out an object index. mainIndex indicates whether +// we're writing out the main index, which is also read by +// non-compiler tools and includes a complete package description +// (i.e., name and height). +func (w *exportWriter) writeIndex(index map[types.Object]uint64, localpkg *types.Package) { + // Build a map from packages to objects from that package. + pkgObjs := map[*types.Package][]types.Object{} + + // For the main index, make sure to include every package that + // we reference, even if we're not exporting (or reexporting) + // any symbols from it. + pkgObjs[localpkg] = nil + for pkg := range w.p.allPkgs { + pkgObjs[pkg] = nil + } + + for obj := range index { + pkgObjs[obj.Pkg()] = append(pkgObjs[obj.Pkg()], obj) + } + + var pkgs []*types.Package + for pkg, objs := range pkgObjs { + pkgs = append(pkgs, pkg) + + sort.Slice(objs, func(i, j int) bool { + return objs[i].Name() < objs[j].Name() + }) + } + + sort.Slice(pkgs, func(i, j int) bool { + return pkgs[i].Path() < pkgs[j].Path() + }) + + w.uint64(uint64(len(pkgs))) + for _, pkg := range pkgs { + w.string(pkg.Path()) + w.string(pkg.Name()) + w.uint64(uint64(0)) // package height is not needed for go/types + + objs := pkgObjs[pkg] + w.uint64(uint64(len(objs))) + for _, obj := range objs { + w.string(obj.Name()) + w.uint64(index[obj]) + } + } +} + +type iexporter struct { + fset *token.FileSet + out *bytes.Buffer + + // allPkgs tracks all packages that have been referenced by + // the export data, so we can ensure to include them in the + // main index. + allPkgs map[*types.Package]bool + + declTodo objQueue + + strings intWriter + stringIndex map[string]uint64 + + data0 intWriter + declIndex map[types.Object]uint64 + typIndex map[types.Type]uint64 +} + +// stringOff returns the offset of s within the string section. +// If not already present, it's added to the end. +func (p *iexporter) stringOff(s string) uint64 { + off, ok := p.stringIndex[s] + if !ok { + off = uint64(p.strings.Len()) + p.stringIndex[s] = off + + p.strings.uint64(uint64(len(s))) + p.strings.WriteString(s) + } + return off +} + +// pushDecl adds n to the declaration work queue, if not already present. +func (p *iexporter) pushDecl(obj types.Object) { + // Package unsafe is known to the compiler and predeclared. + assert(obj.Pkg() != types.Unsafe) + + if _, ok := p.declIndex[obj]; ok { + return + } + + p.declIndex[obj] = ^uint64(0) // mark n present in work queue + p.declTodo.pushTail(obj) +} + +// exportWriter handles writing out individual data section chunks. +type exportWriter struct { + p *iexporter + + data intWriter + currPkg *types.Package + prevFile string + prevLine int64 +} + +func (p *iexporter) doDecl(obj types.Object) { + w := p.newWriter() + w.setPkg(obj.Pkg(), false) + + switch obj := obj.(type) { + case *types.Var: + w.tag('V') + w.pos(obj.Pos()) + w.typ(obj.Type(), obj.Pkg()) + + case *types.Func: + sig, _ := obj.Type().(*types.Signature) + if sig.Recv() != nil { + panic(internalErrorf("unexpected method: %v", sig)) + } + w.tag('F') + w.pos(obj.Pos()) + w.signature(sig) + + case *types.Const: + w.tag('C') + w.pos(obj.Pos()) + w.value(obj.Type(), obj.Val()) + + case *types.TypeName: + if obj.IsAlias() { + w.tag('A') + w.pos(obj.Pos()) + w.typ(obj.Type(), obj.Pkg()) + break + } + + // Defined type. + w.tag('T') + w.pos(obj.Pos()) + + underlying := obj.Type().Underlying() + w.typ(underlying, obj.Pkg()) + + t := obj.Type() + if types.IsInterface(t) { + break + } + + named, ok := t.(*types.Named) + if !ok { + panic(internalErrorf("%s is not a defined type", t)) + } + + n := named.NumMethods() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + m := named.Method(i) + w.pos(m.Pos()) + w.string(m.Name()) + sig, _ := m.Type().(*types.Signature) + w.param(sig.Recv()) + w.signature(sig) + } + + default: + panic(internalErrorf("unexpected object: %v", obj)) + } + + p.declIndex[obj] = w.flush() +} + +func (w *exportWriter) tag(tag byte) { + w.data.WriteByte(tag) +} + +func (w *exportWriter) pos(pos token.Pos) { + p := w.p.fset.Position(pos) + file := p.Filename + line := int64(p.Line) + + // When file is the same as the last position (common case), + // we can save a few bytes by delta encoding just the line + // number. + // + // Note: Because data objects may be read out of order (or not + // at all), we can only apply delta encoding within a single + // object. This is handled implicitly by tracking prevFile and + // prevLine as fields of exportWriter. + + if file == w.prevFile { + delta := line - w.prevLine + w.int64(delta) + if delta == deltaNewFile { + w.int64(-1) + } + } else { + w.int64(deltaNewFile) + w.int64(line) // line >= 0 + w.string(file) + w.prevFile = file + } + w.prevLine = line +} + +func (w *exportWriter) pkg(pkg *types.Package) { + // Ensure any referenced packages are declared in the main index. + w.p.allPkgs[pkg] = true + + w.string(pkg.Path()) +} + +func (w *exportWriter) qualifiedIdent(obj types.Object) { + // Ensure any referenced declarations are written out too. + w.p.pushDecl(obj) + + w.string(obj.Name()) + w.pkg(obj.Pkg()) +} + +func (w *exportWriter) typ(t types.Type, pkg *types.Package) { + w.data.uint64(w.p.typOff(t, pkg)) +} + +func (p *iexporter) newWriter() *exportWriter { + return &exportWriter{p: p} +} + +func (w *exportWriter) flush() uint64 { + off := uint64(w.p.data0.Len()) + io.Copy(&w.p.data0, &w.data) + return off +} + +func (p *iexporter) typOff(t types.Type, pkg *types.Package) uint64 { + off, ok := p.typIndex[t] + if !ok { + w := p.newWriter() + w.doTyp(t, pkg) + off = predeclReserved + w.flush() + p.typIndex[t] = off + } + return off +} + +func (w *exportWriter) startType(k itag) { + w.data.uint64(uint64(k)) +} + +func (w *exportWriter) doTyp(t types.Type, pkg *types.Package) { + switch t := t.(type) { + case *types.Named: + w.startType(definedType) + w.qualifiedIdent(t.Obj()) + + case *types.Pointer: + w.startType(pointerType) + w.typ(t.Elem(), pkg) + + case *types.Slice: + w.startType(sliceType) + w.typ(t.Elem(), pkg) + + case *types.Array: + w.startType(arrayType) + w.uint64(uint64(t.Len())) + w.typ(t.Elem(), pkg) + + case *types.Chan: + w.startType(chanType) + // 1 RecvOnly; 2 SendOnly; 3 SendRecv + var dir uint64 + switch t.Dir() { + case types.RecvOnly: + dir = 1 + case types.SendOnly: + dir = 2 + case types.SendRecv: + dir = 3 + } + w.uint64(dir) + w.typ(t.Elem(), pkg) + + case *types.Map: + w.startType(mapType) + w.typ(t.Key(), pkg) + w.typ(t.Elem(), pkg) + + case *types.Signature: + w.startType(signatureType) + w.setPkg(pkg, true) + w.signature(t) + + case *types.Struct: + w.startType(structType) + w.setPkg(pkg, true) + + n := t.NumFields() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + f := t.Field(i) + w.pos(f.Pos()) + w.string(f.Name()) + w.typ(f.Type(), pkg) + w.bool(f.Embedded()) + w.string(t.Tag(i)) // note (or tag) + } + + case *types.Interface: + w.startType(interfaceType) + w.setPkg(pkg, true) + + n := t.NumEmbeddeds() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + f := t.Embedded(i) + w.pos(f.Obj().Pos()) + w.typ(f.Obj().Type(), f.Obj().Pkg()) + } + + n = t.NumExplicitMethods() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + m := t.ExplicitMethod(i) + w.pos(m.Pos()) + w.string(m.Name()) + sig, _ := m.Type().(*types.Signature) + w.signature(sig) + } + + default: + panic(internalErrorf("unexpected type: %v, %v", t, reflect.TypeOf(t))) + } +} + +func (w *exportWriter) setPkg(pkg *types.Package, write bool) { + if write { + w.pkg(pkg) + } + + w.currPkg = pkg +} + +func (w *exportWriter) signature(sig *types.Signature) { + w.paramList(sig.Params()) + w.paramList(sig.Results()) + if sig.Params().Len() > 0 { + w.bool(sig.Variadic()) + } +} + +func (w *exportWriter) paramList(tup *types.Tuple) { + n := tup.Len() + w.uint64(uint64(n)) + for i := 0; i < n; i++ { + w.param(tup.At(i)) + } +} + +func (w *exportWriter) param(obj types.Object) { + w.pos(obj.Pos()) + w.localIdent(obj) + w.typ(obj.Type(), obj.Pkg()) +} + +func (w *exportWriter) value(typ types.Type, v constant.Value) { + w.typ(typ, nil) + + switch v.Kind() { + case constant.Bool: + w.bool(constant.BoolVal(v)) + case constant.Int: + var i big.Int + if i64, exact := constant.Int64Val(v); exact { + i.SetInt64(i64) + } else if ui64, exact := constant.Uint64Val(v); exact { + i.SetUint64(ui64) + } else { + i.SetString(v.ExactString(), 10) + } + w.mpint(&i, typ) + case constant.Float: + f := constantToFloat(v) + w.mpfloat(f, typ) + case constant.Complex: + w.mpfloat(constantToFloat(constant.Real(v)), typ) + w.mpfloat(constantToFloat(constant.Imag(v)), typ) + case constant.String: + w.string(constant.StringVal(v)) + case constant.Unknown: + // package contains type errors + default: + panic(internalErrorf("unexpected value %v (%T)", v, v)) + } +} + +// constantToFloat converts a constant.Value with kind constant.Float to a +// big.Float. +func constantToFloat(x constant.Value) *big.Float { + assert(x.Kind() == constant.Float) + // Use the same floating-point precision (512) as cmd/compile + // (see Mpprec in cmd/compile/internal/gc/mpfloat.go). + const mpprec = 512 + var f big.Float + f.SetPrec(mpprec) + if v, exact := constant.Float64Val(x); exact { + // float64 + f.SetFloat64(v) + } else if num, denom := constant.Num(x), constant.Denom(x); num.Kind() == constant.Int { + // TODO(gri): add big.Rat accessor to constant.Value. + n := valueToRat(num) + d := valueToRat(denom) + f.SetRat(n.Quo(n, d)) + } else { + // Value too large to represent as a fraction => inaccessible. + // TODO(gri): add big.Float accessor to constant.Value. + _, ok := f.SetString(x.ExactString()) + assert(ok) + } + return &f +} + +// mpint exports a multi-precision integer. +// +// For unsigned types, small values are written out as a single +// byte. Larger values are written out as a length-prefixed big-endian +// byte string, where the length prefix is encoded as its complement. +// For example, bytes 0, 1, and 2 directly represent the integer +// values 0, 1, and 2; while bytes 255, 254, and 253 indicate a 1-, +// 2-, and 3-byte big-endian string follow. +// +// Encoding for signed types use the same general approach as for +// unsigned types, except small values use zig-zag encoding and the +// bottom bit of length prefix byte for large values is reserved as a +// sign bit. +// +// The exact boundary between small and large encodings varies +// according to the maximum number of bytes needed to encode a value +// of type typ. As a special case, 8-bit types are always encoded as a +// single byte. +// +// TODO(mdempsky): Is this level of complexity really worthwhile? +func (w *exportWriter) mpint(x *big.Int, typ types.Type) { + basic, ok := typ.Underlying().(*types.Basic) + if !ok { + panic(internalErrorf("unexpected type %v (%T)", typ.Underlying(), typ.Underlying())) + } + + signed, maxBytes := intSize(basic) + + negative := x.Sign() < 0 + if !signed && negative { + panic(internalErrorf("negative unsigned integer; type %v, value %v", typ, x)) + } + + b := x.Bytes() + if len(b) > 0 && b[0] == 0 { + panic(internalErrorf("leading zeros")) + } + if uint(len(b)) > maxBytes { + panic(internalErrorf("bad mpint length: %d > %d (type %v, value %v)", len(b), maxBytes, typ, x)) + } + + maxSmall := 256 - maxBytes + if signed { + maxSmall = 256 - 2*maxBytes + } + if maxBytes == 1 { + maxSmall = 256 + } + + // Check if x can use small value encoding. + if len(b) <= 1 { + var ux uint + if len(b) == 1 { + ux = uint(b[0]) + } + if signed { + ux <<= 1 + if negative { + ux-- + } + } + if ux < maxSmall { + w.data.WriteByte(byte(ux)) + return + } + } + + n := 256 - uint(len(b)) + if signed { + n = 256 - 2*uint(len(b)) + if negative { + n |= 1 + } + } + if n < maxSmall || n >= 256 { + panic(internalErrorf("encoding mistake: %d, %v, %v => %d", len(b), signed, negative, n)) + } + + w.data.WriteByte(byte(n)) + w.data.Write(b) +} + +// mpfloat exports a multi-precision floating point number. +// +// The number's value is decomposed into mantissa × 2**exponent, where +// mantissa is an integer. The value is written out as mantissa (as a +// multi-precision integer) and then the exponent, except exponent is +// omitted if mantissa is zero. +func (w *exportWriter) mpfloat(f *big.Float, typ types.Type) { + if f.IsInf() { + panic("infinite constant") + } + + // Break into f = mant × 2**exp, with 0.5 <= mant < 1. + var mant big.Float + exp := int64(f.MantExp(&mant)) + + // Scale so that mant is an integer. + prec := mant.MinPrec() + mant.SetMantExp(&mant, int(prec)) + exp -= int64(prec) + + manti, acc := mant.Int(nil) + if acc != big.Exact { + panic(internalErrorf("mantissa scaling failed for %f (%s)", f, acc)) + } + w.mpint(manti, typ) + if manti.Sign() != 0 { + w.int64(exp) + } +} + +func (w *exportWriter) bool(b bool) bool { + var x uint64 + if b { + x = 1 + } + w.uint64(x) + return b +} + +func (w *exportWriter) int64(x int64) { w.data.int64(x) } +func (w *exportWriter) uint64(x uint64) { w.data.uint64(x) } +func (w *exportWriter) string(s string) { w.uint64(w.p.stringOff(s)) } + +func (w *exportWriter) localIdent(obj types.Object) { + // Anonymous parameters. + if obj == nil { + w.string("") + return + } + + name := obj.Name() + if name == "_" { + w.string("_") + return + } + + w.string(name) +} + +type intWriter struct { + bytes.Buffer +} + +func (w *intWriter) int64(x int64) { + var buf [binary.MaxVarintLen64]byte + n := binary.PutVarint(buf[:], x) + w.Write(buf[:n]) +} + +func (w *intWriter) uint64(x uint64) { + var buf [binary.MaxVarintLen64]byte + n := binary.PutUvarint(buf[:], x) + w.Write(buf[:n]) +} + +func assert(cond bool) { + if !cond { + panic("internal error: assertion failed") + } +} + +// The below is copied from go/src/cmd/compile/internal/gc/syntax.go. + +// objQueue is a FIFO queue of types.Object. The zero value of objQueue is +// a ready-to-use empty queue. +type objQueue struct { + ring []types.Object + head, tail int +} + +// empty returns true if q contains no Nodes. +func (q *objQueue) empty() bool { + return q.head == q.tail +} + +// pushTail appends n to the tail of the queue. +func (q *objQueue) pushTail(obj types.Object) { + if len(q.ring) == 0 { + q.ring = make([]types.Object, 16) + } else if q.head+len(q.ring) == q.tail { + // Grow the ring. + nring := make([]types.Object, len(q.ring)*2) + // Copy the old elements. + part := q.ring[q.head%len(q.ring):] + if q.tail-q.head <= len(part) { + part = part[:q.tail-q.head] + copy(nring, part) + } else { + pos := copy(nring, part) + copy(nring[pos:], q.ring[:q.tail%len(q.ring)]) + } + q.ring, q.head, q.tail = nring, 0, q.tail-q.head + } + + q.ring[q.tail%len(q.ring)] = obj + q.tail++ +} + +// popHead pops a node from the head of the queue. It panics if q is empty. +func (q *objQueue) popHead() types.Object { + if q.empty() { + panic("dequeue empty") + } + obj := q.ring[q.head%len(q.ring)] + q.head++ + return obj +} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go new file mode 100644 index 000000000..3cb7ae5b9 --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go @@ -0,0 +1,606 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Indexed package import. +// See cmd/compile/internal/gc/iexport.go for the export data format. + +// This file is a copy of $GOROOT/src/go/internal/gcimporter/iimport.go. + +package gcimporter + +import ( + "bytes" + "encoding/binary" + "fmt" + "go/constant" + "go/token" + "go/types" + "io" + "sort" +) + +type intReader struct { + *bytes.Reader + path string +} + +func (r *intReader) int64() int64 { + i, err := binary.ReadVarint(r.Reader) + if err != nil { + errorf("import %q: read varint error: %v", r.path, err) + } + return i +} + +func (r *intReader) uint64() uint64 { + i, err := binary.ReadUvarint(r.Reader) + if err != nil { + errorf("import %q: read varint error: %v", r.path, err) + } + return i +} + +const predeclReserved = 32 + +type itag uint64 + +const ( + // Types + definedType itag = iota + pointerType + sliceType + arrayType + chanType + mapType + signatureType + structType + interfaceType +) + +// IImportData imports a package from the serialized package data +// and returns the number of bytes consumed and a reference to the package. +// If the export data version is not recognized or the format is otherwise +// compromised, an error is returned. +func IImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) { + const currentVersion = 0 + version := -1 + defer func() { + if e := recover(); e != nil { + if version > currentVersion { + err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e) + } else { + err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e) + } + } + }() + + r := &intReader{bytes.NewReader(data), path} + + version = int(r.uint64()) + switch version { + case currentVersion: + default: + errorf("unknown iexport format version %d", version) + } + + sLen := int64(r.uint64()) + dLen := int64(r.uint64()) + + whence, _ := r.Seek(0, io.SeekCurrent) + stringData := data[whence : whence+sLen] + declData := data[whence+sLen : whence+sLen+dLen] + r.Seek(sLen+dLen, io.SeekCurrent) + + p := iimporter{ + ipath: path, + + stringData: stringData, + stringCache: make(map[uint64]string), + pkgCache: make(map[uint64]*types.Package), + + declData: declData, + pkgIndex: make(map[*types.Package]map[string]uint64), + typCache: make(map[uint64]types.Type), + + fake: fakeFileSet{ + fset: fset, + files: make(map[string]*token.File), + }, + } + + for i, pt := range predeclared() { + p.typCache[uint64(i)] = pt + } + + pkgList := make([]*types.Package, r.uint64()) + for i := range pkgList { + pkgPathOff := r.uint64() + pkgPath := p.stringAt(pkgPathOff) + pkgName := p.stringAt(r.uint64()) + _ = r.uint64() // package height; unused by go/types + + if pkgPath == "" { + pkgPath = path + } + pkg := imports[pkgPath] + if pkg == nil { + pkg = types.NewPackage(pkgPath, pkgName) + imports[pkgPath] = pkg + } else if pkg.Name() != pkgName { + errorf("conflicting names %s and %s for package %q", pkg.Name(), pkgName, path) + } + + p.pkgCache[pkgPathOff] = pkg + + nameIndex := make(map[string]uint64) + for nSyms := r.uint64(); nSyms > 0; nSyms-- { + name := p.stringAt(r.uint64()) + nameIndex[name] = r.uint64() + } + + p.pkgIndex[pkg] = nameIndex + pkgList[i] = pkg + } + var localpkg *types.Package + for _, pkg := range pkgList { + if pkg.Path() == path { + localpkg = pkg + } + } + + names := make([]string, 0, len(p.pkgIndex[localpkg])) + for name := range p.pkgIndex[localpkg] { + names = append(names, name) + } + sort.Strings(names) + for _, name := range names { + p.doDecl(localpkg, name) + } + + for _, typ := range p.interfaceList { + typ.Complete() + } + + // record all referenced packages as imports + list := append(([]*types.Package)(nil), pkgList[1:]...) + sort.Sort(byPath(list)) + localpkg.SetImports(list) + + // package was imported completely and without errors + localpkg.MarkComplete() + + consumed, _ := r.Seek(0, io.SeekCurrent) + return int(consumed), localpkg, nil +} + +type iimporter struct { + ipath string + + stringData []byte + stringCache map[uint64]string + pkgCache map[uint64]*types.Package + + declData []byte + pkgIndex map[*types.Package]map[string]uint64 + typCache map[uint64]types.Type + + fake fakeFileSet + interfaceList []*types.Interface +} + +func (p *iimporter) doDecl(pkg *types.Package, name string) { + // See if we've already imported this declaration. + if obj := pkg.Scope().Lookup(name); obj != nil { + return + } + + off, ok := p.pkgIndex[pkg][name] + if !ok { + errorf("%v.%v not in index", pkg, name) + } + + r := &importReader{p: p, currPkg: pkg} + r.declReader.Reset(p.declData[off:]) + + r.obj(name) +} + +func (p *iimporter) stringAt(off uint64) string { + if s, ok := p.stringCache[off]; ok { + return s + } + + slen, n := binary.Uvarint(p.stringData[off:]) + if n <= 0 { + errorf("varint failed") + } + spos := off + uint64(n) + s := string(p.stringData[spos : spos+slen]) + p.stringCache[off] = s + return s +} + +func (p *iimporter) pkgAt(off uint64) *types.Package { + if pkg, ok := p.pkgCache[off]; ok { + return pkg + } + path := p.stringAt(off) + errorf("missing package %q in %q", path, p.ipath) + return nil +} + +func (p *iimporter) typAt(off uint64, base *types.Named) types.Type { + if t, ok := p.typCache[off]; ok && (base == nil || !isInterface(t)) { + return t + } + + if off < predeclReserved { + errorf("predeclared type missing from cache: %v", off) + } + + r := &importReader{p: p} + r.declReader.Reset(p.declData[off-predeclReserved:]) + t := r.doType(base) + + if base == nil || !isInterface(t) { + p.typCache[off] = t + } + return t +} + +type importReader struct { + p *iimporter + declReader bytes.Reader + currPkg *types.Package + prevFile string + prevLine int64 +} + +func (r *importReader) obj(name string) { + tag := r.byte() + pos := r.pos() + + switch tag { + case 'A': + typ := r.typ() + + r.declare(types.NewTypeName(pos, r.currPkg, name, typ)) + + case 'C': + typ, val := r.value() + + r.declare(types.NewConst(pos, r.currPkg, name, typ, val)) + + case 'F': + sig := r.signature(nil) + + r.declare(types.NewFunc(pos, r.currPkg, name, sig)) + + case 'T': + // Types can be recursive. We need to setup a stub + // declaration before recursing. + obj := types.NewTypeName(pos, r.currPkg, name, nil) + named := types.NewNamed(obj, nil, nil) + r.declare(obj) + + underlying := r.p.typAt(r.uint64(), named).Underlying() + named.SetUnderlying(underlying) + + if !isInterface(underlying) { + for n := r.uint64(); n > 0; n-- { + mpos := r.pos() + mname := r.ident() + recv := r.param() + msig := r.signature(recv) + + named.AddMethod(types.NewFunc(mpos, r.currPkg, mname, msig)) + } + } + + case 'V': + typ := r.typ() + + r.declare(types.NewVar(pos, r.currPkg, name, typ)) + + default: + errorf("unexpected tag: %v", tag) + } +} + +func (r *importReader) declare(obj types.Object) { + obj.Pkg().Scope().Insert(obj) +} + +func (r *importReader) value() (typ types.Type, val constant.Value) { + typ = r.typ() + + switch b := typ.Underlying().(*types.Basic); b.Info() & types.IsConstType { + case types.IsBoolean: + val = constant.MakeBool(r.bool()) + + case types.IsString: + val = constant.MakeString(r.string()) + + case types.IsInteger: + val = r.mpint(b) + + case types.IsFloat: + val = r.mpfloat(b) + + case types.IsComplex: + re := r.mpfloat(b) + im := r.mpfloat(b) + val = constant.BinaryOp(re, token.ADD, constant.MakeImag(im)) + + default: + if b.Kind() == types.Invalid { + val = constant.MakeUnknown() + return + } + errorf("unexpected type %v", typ) // panics + panic("unreachable") + } + + return +} + +func intSize(b *types.Basic) (signed bool, maxBytes uint) { + if (b.Info() & types.IsUntyped) != 0 { + return true, 64 + } + + switch b.Kind() { + case types.Float32, types.Complex64: + return true, 3 + case types.Float64, types.Complex128: + return true, 7 + } + + signed = (b.Info() & types.IsUnsigned) == 0 + switch b.Kind() { + case types.Int8, types.Uint8: + maxBytes = 1 + case types.Int16, types.Uint16: + maxBytes = 2 + case types.Int32, types.Uint32: + maxBytes = 4 + default: + maxBytes = 8 + } + + return +} + +func (r *importReader) mpint(b *types.Basic) constant.Value { + signed, maxBytes := intSize(b) + + maxSmall := 256 - maxBytes + if signed { + maxSmall = 256 - 2*maxBytes + } + if maxBytes == 1 { + maxSmall = 256 + } + + n, _ := r.declReader.ReadByte() + if uint(n) < maxSmall { + v := int64(n) + if signed { + v >>= 1 + if n&1 != 0 { + v = ^v + } + } + return constant.MakeInt64(v) + } + + v := -n + if signed { + v = -(n &^ 1) >> 1 + } + if v < 1 || uint(v) > maxBytes { + errorf("weird decoding: %v, %v => %v", n, signed, v) + } + + buf := make([]byte, v) + io.ReadFull(&r.declReader, buf) + + // convert to little endian + // TODO(gri) go/constant should have a more direct conversion function + // (e.g., once it supports a big.Float based implementation) + for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 { + buf[i], buf[j] = buf[j], buf[i] + } + + x := constant.MakeFromBytes(buf) + if signed && n&1 != 0 { + x = constant.UnaryOp(token.SUB, x, 0) + } + return x +} + +func (r *importReader) mpfloat(b *types.Basic) constant.Value { + x := r.mpint(b) + if constant.Sign(x) == 0 { + return x + } + + exp := r.int64() + switch { + case exp > 0: + x = constant.Shift(x, token.SHL, uint(exp)) + case exp < 0: + d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp)) + x = constant.BinaryOp(x, token.QUO, d) + } + return x +} + +func (r *importReader) ident() string { + return r.string() +} + +func (r *importReader) qualifiedIdent() (*types.Package, string) { + name := r.string() + pkg := r.pkg() + return pkg, name +} + +func (r *importReader) pos() token.Pos { + delta := r.int64() + if delta != deltaNewFile { + r.prevLine += delta + } else if l := r.int64(); l == -1 { + r.prevLine += deltaNewFile + } else { + r.prevFile = r.string() + r.prevLine = l + } + + if r.prevFile == "" && r.prevLine == 0 { + return token.NoPos + } + + return r.p.fake.pos(r.prevFile, int(r.prevLine)) +} + +func (r *importReader) typ() types.Type { + return r.p.typAt(r.uint64(), nil) +} + +func isInterface(t types.Type) bool { + _, ok := t.(*types.Interface) + return ok +} + +func (r *importReader) pkg() *types.Package { return r.p.pkgAt(r.uint64()) } +func (r *importReader) string() string { return r.p.stringAt(r.uint64()) } + +func (r *importReader) doType(base *types.Named) types.Type { + switch k := r.kind(); k { + default: + errorf("unexpected kind tag in %q: %v", r.p.ipath, k) + return nil + + case definedType: + pkg, name := r.qualifiedIdent() + r.p.doDecl(pkg, name) + return pkg.Scope().Lookup(name).(*types.TypeName).Type() + case pointerType: + return types.NewPointer(r.typ()) + case sliceType: + return types.NewSlice(r.typ()) + case arrayType: + n := r.uint64() + return types.NewArray(r.typ(), int64(n)) + case chanType: + dir := chanDir(int(r.uint64())) + return types.NewChan(dir, r.typ()) + case mapType: + return types.NewMap(r.typ(), r.typ()) + case signatureType: + r.currPkg = r.pkg() + return r.signature(nil) + + case structType: + r.currPkg = r.pkg() + + fields := make([]*types.Var, r.uint64()) + tags := make([]string, len(fields)) + for i := range fields { + fpos := r.pos() + fname := r.ident() + ftyp := r.typ() + emb := r.bool() + tag := r.string() + + fields[i] = types.NewField(fpos, r.currPkg, fname, ftyp, emb) + tags[i] = tag + } + return types.NewStruct(fields, tags) + + case interfaceType: + r.currPkg = r.pkg() + + embeddeds := make([]types.Type, r.uint64()) + for i := range embeddeds { + _ = r.pos() + embeddeds[i] = r.typ() + } + + methods := make([]*types.Func, r.uint64()) + for i := range methods { + mpos := r.pos() + mname := r.ident() + + // TODO(mdempsky): Matches bimport.go, but I + // don't agree with this. + var recv *types.Var + if base != nil { + recv = types.NewVar(token.NoPos, r.currPkg, "", base) + } + + msig := r.signature(recv) + methods[i] = types.NewFunc(mpos, r.currPkg, mname, msig) + } + + typ := newInterface(methods, embeddeds) + r.p.interfaceList = append(r.p.interfaceList, typ) + return typ + } +} + +func (r *importReader) kind() itag { + return itag(r.uint64()) +} + +func (r *importReader) signature(recv *types.Var) *types.Signature { + params := r.paramList() + results := r.paramList() + variadic := params.Len() > 0 && r.bool() + return types.NewSignature(recv, params, results, variadic) +} + +func (r *importReader) paramList() *types.Tuple { + xs := make([]*types.Var, r.uint64()) + for i := range xs { + xs[i] = r.param() + } + return types.NewTuple(xs...) +} + +func (r *importReader) param() *types.Var { + pos := r.pos() + name := r.ident() + typ := r.typ() + return types.NewParam(pos, r.currPkg, name, typ) +} + +func (r *importReader) bool() bool { + return r.uint64() != 0 +} + +func (r *importReader) int64() int64 { + n, err := binary.ReadVarint(&r.declReader) + if err != nil { + errorf("readVarint: %v", err) + } + return n +} + +func (r *importReader) uint64() uint64 { + n, err := binary.ReadUvarint(&r.declReader) + if err != nil { + errorf("readUvarint: %v", err) + } + return n +} + +func (r *importReader) byte() byte { + x, err := r.declReader.ReadByte() + if err != nil { + errorf("declReader.ReadByte: %v", err) + } + return x +} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go new file mode 100644 index 000000000..463f25227 --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface10.go @@ -0,0 +1,21 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build !go1.11 + +package gcimporter + +import "go/types" + +func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { + named := make([]*types.Named, len(embeddeds)) + for i, e := range embeddeds { + var ok bool + named[i], ok = e.(*types.Named) + if !ok { + panic("embedding of non-defined interfaces in interfaces is not supported before Go 1.11") + } + } + return types.NewInterface(methods, named) +} diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go new file mode 100644 index 000000000..ab28b95cb --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/gcimporter/newInterface11.go @@ -0,0 +1,13 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build go1.11 + +package gcimporter + +import "go/types" + +func newInterface(methods []*types.Func, embeddeds []types.Type) *types.Interface { + return types.NewInterfaceType(methods, embeddeds) +} diff --git a/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go b/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go new file mode 100644 index 000000000..fdc7da056 --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go @@ -0,0 +1,160 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package packagesdriver fetches type sizes for go/packages and go/analysis. +package packagesdriver + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "go/types" + "log" + "os" + "os/exec" + "strings" + "time" +) + +var debug = false + +// GetSizes returns the sizes used by the underlying driver with the given parameters. +func GetSizes(ctx context.Context, buildFlags, env []string, dir string, usesExportData bool) (types.Sizes, error) { + // TODO(matloob): Clean this up. This code is mostly a copy of packages.findExternalDriver. + const toolPrefix = "GOPACKAGESDRIVER=" + tool := "" + for _, env := range env { + if val := strings.TrimPrefix(env, toolPrefix); val != env { + tool = val + } + } + + if tool == "" { + var err error + tool, err = exec.LookPath("gopackagesdriver") + if err != nil { + // We did not find the driver, so use "go list". + tool = "off" + } + } + + if tool == "off" { + return GetSizesGolist(ctx, buildFlags, env, dir, usesExportData) + } + + req, err := json.Marshal(struct { + Command string `json:"command"` + Env []string `json:"env"` + BuildFlags []string `json:"build_flags"` + }{ + Command: "sizes", + Env: env, + BuildFlags: buildFlags, + }) + if err != nil { + return nil, fmt.Errorf("failed to encode message to driver tool: %v", err) + } + + buf := new(bytes.Buffer) + cmd := exec.CommandContext(ctx, tool) + cmd.Dir = dir + cmd.Env = env + cmd.Stdin = bytes.NewReader(req) + cmd.Stdout = buf + cmd.Stderr = new(bytes.Buffer) + if err := cmd.Run(); err != nil { + return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr) + } + var response struct { + // Sizes, if not nil, is the types.Sizes to use when type checking. + Sizes *types.StdSizes + } + if err := json.Unmarshal(buf.Bytes(), &response); err != nil { + return nil, err + } + return response.Sizes, nil +} + +func GetSizesGolist(ctx context.Context, buildFlags, env []string, dir string, usesExportData bool) (types.Sizes, error) { + args := []string{"list", "-f", "{{context.GOARCH}} {{context.Compiler}}"} + args = append(args, buildFlags...) + args = append(args, "--", "unsafe") + stdout, err := InvokeGo(ctx, env, dir, usesExportData, args...) + if err != nil { + return nil, err + } + fields := strings.Fields(stdout.String()) + if len(fields) < 2 { + return nil, fmt.Errorf("could not determine GOARCH and Go compiler") + } + goarch := fields[0] + compiler := fields[1] + return types.SizesFor(compiler, goarch), nil +} + +// InvokeGo returns the stdout of a go command invocation. +func InvokeGo(ctx context.Context, env []string, dir string, usesExportData bool, args ...string) (*bytes.Buffer, error) { + if debug { + defer func(start time.Time) { log.Printf("%s for %v", time.Since(start), cmdDebugStr(env, args...)) }(time.Now()) + } + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + cmd := exec.CommandContext(ctx, "go", args...) + // On darwin the cwd gets resolved to the real path, which breaks anything that + // expects the working directory to keep the original path, including the + // go command when dealing with modules. + // The Go stdlib has a special feature where if the cwd and the PWD are the + // same node then it trusts the PWD, so by setting it in the env for the child + // process we fix up all the paths returned by the go command. + cmd.Env = append(append([]string{}, env...), "PWD="+dir) + cmd.Dir = dir + cmd.Stdout = stdout + cmd.Stderr = stderr + if err := cmd.Run(); err != nil { + exitErr, ok := err.(*exec.ExitError) + if !ok { + // Catastrophic error: + // - executable not found + // - context cancellation + return nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err) + } + + // Export mode entails a build. + // If that build fails, errors appear on stderr + // (despite the -e flag) and the Export field is blank. + // Do not fail in that case. + if !usesExportData { + return nil, fmt.Errorf("go %v: %s: %s", args, exitErr, stderr) + } + } + + // As of writing, go list -export prints some non-fatal compilation + // errors to stderr, even with -e set. We would prefer that it put + // them in the Package.Error JSON (see https://golang.org/issue/26319). + // In the meantime, there's nowhere good to put them, but they can + // be useful for debugging. Print them if $GOPACKAGESPRINTGOLISTERRORS + // is set. + if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTGOLISTERRORS") != "" { + fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(env, args...), stderr) + } + + // debugging + if false { + fmt.Fprintf(os.Stderr, "%s stdout: <<%s>>\n", cmdDebugStr(env, args...), stdout) + } + + return stdout, nil +} + +func cmdDebugStr(envlist []string, args ...string) string { + env := make(map[string]string) + for _, kv := range envlist { + split := strings.Split(kv, "=") + k, v := split[0], split[1] + env[k] = v + } + + return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v PWD=%v go %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["PWD"], args) +} diff --git a/vendor/golang.org/x/tools/go/packages/doc.go b/vendor/golang.org/x/tools/go/packages/doc.go new file mode 100644 index 000000000..3799f8ed8 --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/doc.go @@ -0,0 +1,222 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +/* +Package packages loads Go packages for inspection and analysis. + +The Load function takes as input a list of patterns and return a list of Package +structs describing individual packages matched by those patterns. +The LoadMode controls the amount of detail in the loaded packages. + +Load passes most patterns directly to the underlying build tool, +but all patterns with the prefix "query=", where query is a +non-empty string of letters from [a-z], are reserved and may be +interpreted as query operators. + +Two query operators are currently supported: "file" and "pattern". + +The query "file=path/to/file.go" matches the package or packages enclosing +the Go source file path/to/file.go. For example "file=~/go/src/fmt/print.go" +might return the packages "fmt" and "fmt [fmt.test]". + +The query "pattern=string" causes "string" to be passed directly to +the underlying build tool. In most cases this is unnecessary, +but an application can use Load("pattern=" + x) as an escaping mechanism +to ensure that x is not interpreted as a query operator if it contains '='. + +All other query operators are reserved for future use and currently +cause Load to report an error. + +The Package struct provides basic information about the package, including + + - ID, a unique identifier for the package in the returned set; + - GoFiles, the names of the package's Go source files; + - Imports, a map from source import strings to the Packages they name; + - Types, the type information for the package's exported symbols; + - Syntax, the parsed syntax trees for the package's source code; and + - TypeInfo, the result of a complete type-check of the package syntax trees. + +(See the documentation for type Package for the complete list of fields +and more detailed descriptions.) + +For example, + + Load(nil, "bytes", "unicode...") + +returns four Package structs describing the standard library packages +bytes, unicode, unicode/utf16, and unicode/utf8. Note that one pattern +can match multiple packages and that a package might be matched by +multiple patterns: in general it is not possible to determine which +packages correspond to which patterns. + +Note that the list returned by Load contains only the packages matched +by the patterns. Their dependencies can be found by walking the import +graph using the Imports fields. + +The Load function can be configured by passing a pointer to a Config as +the first argument. A nil Config is equivalent to the zero Config, which +causes Load to run in LoadFiles mode, collecting minimal information. +See the documentation for type Config for details. + +As noted earlier, the Config.Mode controls the amount of detail +reported about the loaded packages, with each mode returning all the data of the +previous mode with some extra added. See the documentation for type LoadMode +for details. + +Most tools should pass their command-line arguments (after any flags) +uninterpreted to the loader, so that the loader can interpret them +according to the conventions of the underlying build system. +See the Example function for typical usage. + +*/ +package packages // import "golang.org/x/tools/go/packages" + +/* + +Motivation and design considerations + +The new package's design solves problems addressed by two existing +packages: go/build, which locates and describes packages, and +golang.org/x/tools/go/loader, which loads, parses and type-checks them. +The go/build.Package structure encodes too much of the 'go build' way +of organizing projects, leaving us in need of a data type that describes a +package of Go source code independent of the underlying build system. +We wanted something that works equally well with go build and vgo, and +also other build systems such as Bazel and Blaze, making it possible to +construct analysis tools that work in all these environments. +Tools such as errcheck and staticcheck were essentially unavailable to +the Go community at Google, and some of Google's internal tools for Go +are unavailable externally. +This new package provides a uniform way to obtain package metadata by +querying each of these build systems, optionally supporting their +preferred command-line notations for packages, so that tools integrate +neatly with users' build environments. The Metadata query function +executes an external query tool appropriate to the current workspace. + +Loading packages always returns the complete import graph "all the way down", +even if all you want is information about a single package, because the query +mechanisms of all the build systems we currently support ({go,vgo} list, and +blaze/bazel aspect-based query) cannot provide detailed information +about one package without visiting all its dependencies too, so there is +no additional asymptotic cost to providing transitive information. +(This property might not be true of a hypothetical 5th build system.) + +In calls to TypeCheck, all initial packages, and any package that +transitively depends on one of them, must be loaded from source. +Consider A->B->C->D->E: if A,C are initial, A,B,C must be loaded from +source; D may be loaded from export data, and E may not be loaded at all +(though it's possible that D's export data mentions it, so a +types.Package may be created for it and exposed.) + +The old loader had a feature to suppress type-checking of function +bodies on a per-package basis, primarily intended to reduce the work of +obtaining type information for imported packages. Now that imports are +satisfied by export data, the optimization no longer seems necessary. + +Despite some early attempts, the old loader did not exploit export data, +instead always using the equivalent of WholeProgram mode. This was due +to the complexity of mixing source and export data packages (now +resolved by the upward traversal mentioned above), and because export data +files were nearly always missing or stale. Now that 'go build' supports +caching, all the underlying build systems can guarantee to produce +export data in a reasonable (amortized) time. + +Test "main" packages synthesized by the build system are now reported as +first-class packages, avoiding the need for clients (such as go/ssa) to +reinvent this generation logic. + +One way in which go/packages is simpler than the old loader is in its +treatment of in-package tests. In-package tests are packages that +consist of all the files of the library under test, plus the test files. +The old loader constructed in-package tests by a two-phase process of +mutation called "augmentation": first it would construct and type check +all the ordinary library packages and type-check the packages that +depend on them; then it would add more (test) files to the package and +type-check again. This two-phase approach had four major problems: +1) in processing the tests, the loader modified the library package, + leaving no way for a client application to see both the test + package and the library package; one would mutate into the other. +2) because test files can declare additional methods on types defined in + the library portion of the package, the dispatch of method calls in + the library portion was affected by the presence of the test files. + This should have been a clue that the packages were logically + different. +3) this model of "augmentation" assumed at most one in-package test + per library package, which is true of projects using 'go build', + but not other build systems. +4) because of the two-phase nature of test processing, all packages that + import the library package had to be processed before augmentation, + forcing a "one-shot" API and preventing the client from calling Load + in several times in sequence as is now possible in WholeProgram mode. + (TypeCheck mode has a similar one-shot restriction for a different reason.) + +Early drafts of this package supported "multi-shot" operation. +Although it allowed clients to make a sequence of calls (or concurrent +calls) to Load, building up the graph of Packages incrementally, +it was of marginal value: it complicated the API +(since it allowed some options to vary across calls but not others), +it complicated the implementation, +it cannot be made to work in Types mode, as explained above, +and it was less efficient than making one combined call (when this is possible). +Among the clients we have inspected, none made multiple calls to load +but could not be easily and satisfactorily modified to make only a single call. +However, applications changes may be required. +For example, the ssadump command loads the user-specified packages +and in addition the runtime package. It is tempting to simply append +"runtime" to the user-provided list, but that does not work if the user +specified an ad-hoc package such as [a.go b.go]. +Instead, ssadump no longer requests the runtime package, +but seeks it among the dependencies of the user-specified packages, +and emits an error if it is not found. + +Overlays: The Overlay field in the Config allows providing alternate contents +for Go source files, by providing a mapping from file path to contents. +go/packages will pull in new imports added in overlay files when go/packages +is run in LoadImports mode or greater. +Overlay support for the go list driver isn't complete yet: if the file doesn't +exist on disk, it will only be recognized in an overlay if it is a non-test file +and the package would be reported even without the overlay. + +Questions & Tasks + +- Add GOARCH/GOOS? + They are not portable concepts, but could be made portable. + Our goal has been to allow users to express themselves using the conventions + of the underlying build system: if the build system honors GOARCH + during a build and during a metadata query, then so should + applications built atop that query mechanism. + Conversely, if the target architecture of the build is determined by + command-line flags, the application can pass the relevant + flags through to the build system using a command such as: + myapp -query_flag="--cpu=amd64" -query_flag="--os=darwin" + However, this approach is low-level, unwieldy, and non-portable. + GOOS and GOARCH seem important enough to warrant a dedicated option. + +- How should we handle partial failures such as a mixture of good and + malformed patterns, existing and non-existent packages, successful and + failed builds, import failures, import cycles, and so on, in a call to + Load? + +- Support bazel, blaze, and go1.10 list, not just go1.11 list. + +- Handle (and test) various partial success cases, e.g. + a mixture of good packages and: + invalid patterns + nonexistent packages + empty packages + packages with malformed package or import declarations + unreadable files + import cycles + other parse errors + type errors + Make sure we record errors at the correct place in the graph. + +- Missing packages among initial arguments are not reported. + Return bogus packages for them, like golist does. + +- "undeclared name" errors (for example) are reported out of source file + order. I suspect this is due to the breadth-first resolution now used + by go/types. Is that a bug? Discuss with gri. + +*/ diff --git a/vendor/golang.org/x/tools/go/packages/external.go b/vendor/golang.org/x/tools/go/packages/external.go new file mode 100644 index 000000000..22ff769ef --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/external.go @@ -0,0 +1,79 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// This file enables an external tool to intercept package requests. +// If the tool is present then its results are used in preference to +// the go list command. + +package packages + +import ( + "bytes" + "encoding/json" + "fmt" + "os/exec" + "strings" +) + +// Driver +type driverRequest struct { + Command string `json:"command"` + Mode LoadMode `json:"mode"` + Env []string `json:"env"` + BuildFlags []string `json:"build_flags"` + Tests bool `json:"tests"` + Overlay map[string][]byte `json:"overlay"` +} + +// findExternalDriver returns the file path of a tool that supplies +// the build system package structure, or "" if not found." +// If GOPACKAGESDRIVER is set in the environment findExternalTool returns its +// value, otherwise it searches for a binary named gopackagesdriver on the PATH. +func findExternalDriver(cfg *Config) driver { + const toolPrefix = "GOPACKAGESDRIVER=" + tool := "" + for _, env := range cfg.Env { + if val := strings.TrimPrefix(env, toolPrefix); val != env { + tool = val + } + } + if tool != "" && tool == "off" { + return nil + } + if tool == "" { + var err error + tool, err = exec.LookPath("gopackagesdriver") + if err != nil { + return nil + } + } + return func(cfg *Config, words ...string) (*driverResponse, error) { + req, err := json.Marshal(driverRequest{ + Mode: cfg.Mode, + Env: cfg.Env, + BuildFlags: cfg.BuildFlags, + Tests: cfg.Tests, + Overlay: cfg.Overlay, + }) + if err != nil { + return nil, fmt.Errorf("failed to encode message to driver tool: %v", err) + } + + buf := new(bytes.Buffer) + cmd := exec.CommandContext(cfg.Context, tool, words...) + cmd.Dir = cfg.Dir + cmd.Env = cfg.Env + cmd.Stdin = bytes.NewReader(req) + cmd.Stdout = buf + cmd.Stderr = new(bytes.Buffer) + if err := cmd.Run(); err != nil { + return nil, fmt.Errorf("%v: %v: %s", tool, err, cmd.Stderr) + } + var response driverResponse + if err := json.Unmarshal(buf.Bytes(), &response); err != nil { + return nil, err + } + return &response, nil + } +} diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go new file mode 100644 index 000000000..04c6396d5 --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/golist.go @@ -0,0 +1,866 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packages + +import ( + "bytes" + "encoding/json" + "fmt" + "go/types" + "io/ioutil" + "log" + "os" + "os/exec" + "path/filepath" + "reflect" + "regexp" + "strconv" + "strings" + "sync" + "time" + + "golang.org/x/tools/go/internal/packagesdriver" + "golang.org/x/tools/internal/gopathwalk" + "golang.org/x/tools/internal/semver" +) + +// debug controls verbose logging. +var debug, _ = strconv.ParseBool(os.Getenv("GOPACKAGESDEBUG")) + +// A goTooOldError reports that the go command +// found by exec.LookPath is too old to use the new go list behavior. +type goTooOldError struct { + error +} + +// responseDeduper wraps a driverResponse, deduplicating its contents. +type responseDeduper struct { + seenRoots map[string]bool + seenPackages map[string]*Package + dr *driverResponse +} + +// init fills in r with a driverResponse. +func (r *responseDeduper) init(dr *driverResponse) { + r.dr = dr + r.seenRoots = map[string]bool{} + r.seenPackages = map[string]*Package{} + for _, pkg := range dr.Packages { + r.seenPackages[pkg.ID] = pkg + } + for _, root := range dr.Roots { + r.seenRoots[root] = true + } +} + +func (r *responseDeduper) addPackage(p *Package) { + if r.seenPackages[p.ID] != nil { + return + } + r.seenPackages[p.ID] = p + r.dr.Packages = append(r.dr.Packages, p) +} + +func (r *responseDeduper) addRoot(id string) { + if r.seenRoots[id] { + return + } + r.seenRoots[id] = true + r.dr.Roots = append(r.dr.Roots, id) +} + +// goListDriver uses the go list command to interpret the patterns and produce +// the build system package structure. +// See driver for more details. +func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) { + var sizes types.Sizes + var sizeserr error + var sizeswg sync.WaitGroup + if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 { + sizeswg.Add(1) + go func() { + sizes, sizeserr = getSizes(cfg) + sizeswg.Done() + }() + } + + // Determine files requested in contains patterns + var containFiles []string + var packagesNamed []string + restPatterns := make([]string, 0, len(patterns)) + // Extract file= and other [querytype]= patterns. Report an error if querytype + // doesn't exist. +extractQueries: + for _, pattern := range patterns { + eqidx := strings.Index(pattern, "=") + if eqidx < 0 { + restPatterns = append(restPatterns, pattern) + } else { + query, value := pattern[:eqidx], pattern[eqidx+len("="):] + switch query { + case "file": + containFiles = append(containFiles, value) + case "pattern": + restPatterns = append(restPatterns, value) + case "iamashamedtousethedisabledqueryname": + packagesNamed = append(packagesNamed, value) + case "": // not a reserved query + restPatterns = append(restPatterns, pattern) + default: + for _, rune := range query { + if rune < 'a' || rune > 'z' { // not a reserved query + restPatterns = append(restPatterns, pattern) + continue extractQueries + } + } + // Reject all other patterns containing "=" + return nil, fmt.Errorf("invalid query type %q in query pattern %q", query, pattern) + } + } + } + + response := &responseDeduper{} + var err error + + // See if we have any patterns to pass through to go list. Zero initial + // patterns also requires a go list call, since it's the equivalent of + // ".". + if len(restPatterns) > 0 || len(patterns) == 0 { + dr, err := golistDriver(cfg, restPatterns...) + if err != nil { + return nil, err + } + response.init(dr) + } else { + response.init(&driverResponse{}) + } + + sizeswg.Wait() + if sizeserr != nil { + return nil, sizeserr + } + // types.SizesFor always returns nil or a *types.StdSizes + response.dr.Sizes, _ = sizes.(*types.StdSizes) + + var containsCandidates []string + + if len(containFiles) != 0 { + if err := runContainsQueries(cfg, golistDriver, response, containFiles); err != nil { + return nil, err + } + } + + if len(packagesNamed) != 0 { + if err := runNamedQueries(cfg, golistDriver, response, packagesNamed); err != nil { + return nil, err + } + } + + modifiedPkgs, needPkgs, err := processGolistOverlay(cfg, response) + if err != nil { + return nil, err + } + if len(containFiles) > 0 { + containsCandidates = append(containsCandidates, modifiedPkgs...) + containsCandidates = append(containsCandidates, needPkgs...) + } + if err := addNeededOverlayPackages(cfg, golistDriver, response, needPkgs); err != nil { + return nil, err + } + // Check candidate packages for containFiles. + if len(containFiles) > 0 { + for _, id := range containsCandidates { + pkg, ok := response.seenPackages[id] + if !ok { + response.addPackage(&Package{ + ID: id, + Errors: []Error{ + { + Kind: ListError, + Msg: fmt.Sprintf("package %s expected but not seen", id), + }, + }, + }) + continue + } + for _, f := range containFiles { + for _, g := range pkg.GoFiles { + if sameFile(f, g) { + response.addRoot(id) + } + } + } + } + } + + return response.dr, nil +} + +func addNeededOverlayPackages(cfg *Config, driver driver, response *responseDeduper, pkgs []string) error { + if len(pkgs) == 0 { + return nil + } + dr, err := driver(cfg, pkgs...) + if err != nil { + return err + } + for _, pkg := range dr.Packages { + response.addPackage(pkg) + } + _, needPkgs, err := processGolistOverlay(cfg, response) + if err != nil { + return err + } + addNeededOverlayPackages(cfg, driver, response, needPkgs) + return nil +} + +func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, queries []string) error { + for _, query := range queries { + // TODO(matloob): Do only one query per directory. + fdir := filepath.Dir(query) + // Pass absolute path of directory to go list so that it knows to treat it as a directory, + // not a package path. + pattern, err := filepath.Abs(fdir) + if err != nil { + return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err) + } + dirResponse, err := driver(cfg, pattern) + if err != nil { + // Couldn't find a package for the directory. Try to load the file as an ad-hoc package. + var queryErr error + dirResponse, queryErr = driver(cfg, query) + if queryErr != nil { + // Return the original error if the attempt to fall back failed. + return err + } + } + isRoot := make(map[string]bool, len(dirResponse.Roots)) + for _, root := range dirResponse.Roots { + isRoot[root] = true + } + for _, pkg := range dirResponse.Packages { + // Add any new packages to the main set + // We don't bother to filter packages that will be dropped by the changes of roots, + // that will happen anyway during graph construction outside this function. + // Over-reporting packages is not a problem. + response.addPackage(pkg) + // if the package was not a root one, it cannot have the file + if !isRoot[pkg.ID] { + continue + } + for _, pkgFile := range pkg.GoFiles { + if filepath.Base(query) == filepath.Base(pkgFile) { + response.addRoot(pkg.ID) + break + } + } + } + } + return nil +} + +// modCacheRegexp splits a path in a module cache into module, module version, and package. +var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`) + +func runNamedQueries(cfg *Config, driver driver, response *responseDeduper, queries []string) error { + // calling `go env` isn't free; bail out if there's nothing to do. + if len(queries) == 0 { + return nil + } + // Determine which directories are relevant to scan. + roots, modRoot, err := roots(cfg) + if err != nil { + return err + } + + // Scan the selected directories. Simple matches, from GOPATH/GOROOT + // or the local module, can simply be "go list"ed. Matches from the + // module cache need special treatment. + var matchesMu sync.Mutex + var simpleMatches, modCacheMatches []string + add := func(root gopathwalk.Root, dir string) { + // Walk calls this concurrently; protect the result slices. + matchesMu.Lock() + defer matchesMu.Unlock() + + path := dir + if dir != root.Path { + path = dir[len(root.Path)+1:] + } + if pathMatchesQueries(path, queries) { + switch root.Type { + case gopathwalk.RootModuleCache: + modCacheMatches = append(modCacheMatches, path) + case gopathwalk.RootCurrentModule: + // We'd need to read go.mod to find the full + // import path. Relative's easier. + rel, err := filepath.Rel(cfg.Dir, dir) + if err != nil { + // This ought to be impossible, since + // we found dir in the current module. + panic(err) + } + simpleMatches = append(simpleMatches, "./"+rel) + case gopathwalk.RootGOPATH, gopathwalk.RootGOROOT: + simpleMatches = append(simpleMatches, path) + } + } + } + + startWalk := time.Now() + gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modRoot != "", Debug: debug}) + if debug { + log.Printf("%v for walk", time.Since(startWalk)) + } + + // Weird special case: the top-level package in a module will be in + // whatever directory the user checked the repository out into. It's + // more reasonable for that to not match the package name. So, if there + // are any Go files in the mod root, query it just to be safe. + if modRoot != "" { + rel, err := filepath.Rel(cfg.Dir, modRoot) + if err != nil { + panic(err) // See above. + } + + files, err := ioutil.ReadDir(modRoot) + for _, f := range files { + if strings.HasSuffix(f.Name(), ".go") { + simpleMatches = append(simpleMatches, rel) + break + } + } + } + + addResponse := func(r *driverResponse) { + for _, pkg := range r.Packages { + response.addPackage(pkg) + for _, name := range queries { + if pkg.Name == name { + response.addRoot(pkg.ID) + break + } + } + } + } + + if len(simpleMatches) != 0 { + resp, err := driver(cfg, simpleMatches...) + if err != nil { + return err + } + addResponse(resp) + } + + // Module cache matches are tricky. We want to avoid downloading new + // versions of things, so we need to use the ones present in the cache. + // go list doesn't accept version specifiers, so we have to write out a + // temporary module, and do the list in that module. + if len(modCacheMatches) != 0 { + // Collect all the matches, deduplicating by major version + // and preferring the newest. + type modInfo struct { + mod string + major string + } + mods := make(map[modInfo]string) + var imports []string + for _, modPath := range modCacheMatches { + matches := modCacheRegexp.FindStringSubmatch(modPath) + mod, ver := filepath.ToSlash(matches[1]), matches[2] + importPath := filepath.ToSlash(filepath.Join(matches[1], matches[3])) + + major := semver.Major(ver) + if prevVer, ok := mods[modInfo{mod, major}]; !ok || semver.Compare(ver, prevVer) > 0 { + mods[modInfo{mod, major}] = ver + } + + imports = append(imports, importPath) + } + + // Build the temporary module. + var gomod bytes.Buffer + gomod.WriteString("module modquery\nrequire (\n") + for mod, version := range mods { + gomod.WriteString("\t" + mod.mod + " " + version + "\n") + } + gomod.WriteString(")\n") + + tmpCfg := *cfg + + // We're only trying to look at stuff in the module cache, so + // disable the network. This should speed things up, and has + // prevented errors in at least one case, #28518. + tmpCfg.Env = append(append([]string{"GOPROXY=off"}, cfg.Env...)) + + var err error + tmpCfg.Dir, err = ioutil.TempDir("", "gopackages-modquery") + if err != nil { + return err + } + defer os.RemoveAll(tmpCfg.Dir) + + if err := ioutil.WriteFile(filepath.Join(tmpCfg.Dir, "go.mod"), gomod.Bytes(), 0777); err != nil { + return fmt.Errorf("writing go.mod for module cache query: %v", err) + } + + // Run the query, using the import paths calculated from the matches above. + resp, err := driver(&tmpCfg, imports...) + if err != nil { + return fmt.Errorf("querying module cache matches: %v", err) + } + addResponse(resp) + } + + return nil +} + +func getSizes(cfg *Config) (types.Sizes, error) { + return packagesdriver.GetSizesGolist(cfg.Context, cfg.BuildFlags, cfg.Env, cfg.Dir, usesExportData(cfg)) +} + +// roots selects the appropriate paths to walk based on the passed-in configuration, +// particularly the environment and the presence of a go.mod in cfg.Dir's parents. +func roots(cfg *Config) ([]gopathwalk.Root, string, error) { + stdout, err := invokeGo(cfg, "env", "GOROOT", "GOPATH", "GOMOD") + if err != nil { + return nil, "", err + } + + fields := strings.Split(stdout.String(), "\n") + if len(fields) != 4 || len(fields[3]) != 0 { + return nil, "", fmt.Errorf("go env returned unexpected output: %q", stdout.String()) + } + goroot, gopath, gomod := fields[0], filepath.SplitList(fields[1]), fields[2] + var modDir string + if gomod != "" { + modDir = filepath.Dir(gomod) + } + + var roots []gopathwalk.Root + // Always add GOROOT. + roots = append(roots, gopathwalk.Root{filepath.Join(goroot, "/src"), gopathwalk.RootGOROOT}) + // If modules are enabled, scan the module dir. + if modDir != "" { + roots = append(roots, gopathwalk.Root{modDir, gopathwalk.RootCurrentModule}) + } + // Add either GOPATH/src or GOPATH/pkg/mod, depending on module mode. + for _, p := range gopath { + if modDir != "" { + roots = append(roots, gopathwalk.Root{filepath.Join(p, "/pkg/mod"), gopathwalk.RootModuleCache}) + } else { + roots = append(roots, gopathwalk.Root{filepath.Join(p, "/src"), gopathwalk.RootGOPATH}) + } + } + + return roots, modDir, nil +} + +// These functions were copied from goimports. See further documentation there. + +// pathMatchesQueries is adapted from pkgIsCandidate. +// TODO: is it reasonable to do Contains here, rather than an exact match on a path component? +func pathMatchesQueries(path string, queries []string) bool { + lastTwo := lastTwoComponents(path) + for _, query := range queries { + if strings.Contains(lastTwo, query) { + return true + } + if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(query) { + lastTwo = lowerASCIIAndRemoveHyphen(lastTwo) + if strings.Contains(lastTwo, query) { + return true + } + } + } + return false +} + +// lastTwoComponents returns at most the last two path components +// of v, using either / or \ as the path separator. +func lastTwoComponents(v string) string { + nslash := 0 + for i := len(v) - 1; i >= 0; i-- { + if v[i] == '/' || v[i] == '\\' { + nslash++ + if nslash == 2 { + return v[i:] + } + } + } + return v +} + +func hasHyphenOrUpperASCII(s string) bool { + for i := 0; i < len(s); i++ { + b := s[i] + if b == '-' || ('A' <= b && b <= 'Z') { + return true + } + } + return false +} + +func lowerASCIIAndRemoveHyphen(s string) (ret string) { + buf := make([]byte, 0, len(s)) + for i := 0; i < len(s); i++ { + b := s[i] + switch { + case b == '-': + continue + case 'A' <= b && b <= 'Z': + buf = append(buf, b+('a'-'A')) + default: + buf = append(buf, b) + } + } + return string(buf) +} + +// Fields must match go list; +// see $GOROOT/src/cmd/go/internal/load/pkg.go. +type jsonPackage struct { + ImportPath string + Dir string + Name string + Export string + GoFiles []string + CompiledGoFiles []string + CFiles []string + CgoFiles []string + CXXFiles []string + MFiles []string + HFiles []string + FFiles []string + SFiles []string + SwigFiles []string + SwigCXXFiles []string + SysoFiles []string + Imports []string + ImportMap map[string]string + Deps []string + TestGoFiles []string + TestImports []string + XTestGoFiles []string + XTestImports []string + ForTest string // q in a "p [q.test]" package, else "" + DepOnly bool + + Error *jsonPackageError +} + +type jsonPackageError struct { + ImportStack []string + Pos string + Err string +} + +func otherFiles(p *jsonPackage) [][]string { + return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles} +} + +// golistDriver uses the "go list" command to expand the pattern +// words and return metadata for the specified packages. dir may be +// "" and env may be nil, as per os/exec.Command. +func golistDriver(cfg *Config, words ...string) (*driverResponse, error) { + // go list uses the following identifiers in ImportPath and Imports: + // + // "p" -- importable package or main (command) + // "q.test" -- q's test executable + // "p [q.test]" -- variant of p as built for q's test executable + // "q_test [q.test]" -- q's external test package + // + // The packages p that are built differently for a test q.test + // are q itself, plus any helpers used by the external test q_test, + // typically including "testing" and all its dependencies. + + // Run "go list" for complete + // information on the specified packages. + buf, err := invokeGo(cfg, golistargs(cfg, words)...) + if err != nil { + return nil, err + } + seen := make(map[string]*jsonPackage) + // Decode the JSON and convert it to Package form. + var response driverResponse + for dec := json.NewDecoder(buf); dec.More(); { + p := new(jsonPackage) + if err := dec.Decode(p); err != nil { + return nil, fmt.Errorf("JSON decoding failed: %v", err) + } + + if p.ImportPath == "" { + // The documentation for go list says that “[e]rroneous packages will have + // a non-empty ImportPath”. If for some reason it comes back empty, we + // prefer to error out rather than silently discarding data or handing + // back a package without any way to refer to it. + if p.Error != nil { + return nil, Error{ + Pos: p.Error.Pos, + Msg: p.Error.Err, + } + } + return nil, fmt.Errorf("package missing import path: %+v", p) + } + + if old, found := seen[p.ImportPath]; found { + if !reflect.DeepEqual(p, old) { + return nil, fmt.Errorf("internal error: go list gives conflicting information for package %v", p.ImportPath) + } + // skip the duplicate + continue + } + seen[p.ImportPath] = p + + pkg := &Package{ + Name: p.Name, + ID: p.ImportPath, + GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles), + CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles), + OtherFiles: absJoin(p.Dir, otherFiles(p)...), + } + + // Work around https://golang.org/issue/28749: + // cmd/go puts assembly, C, and C++ files in CompiledGoFiles. + // Filter out any elements of CompiledGoFiles that are also in OtherFiles. + // We have to keep this workaround in place until go1.12 is a distant memory. + if len(pkg.OtherFiles) > 0 { + other := make(map[string]bool, len(pkg.OtherFiles)) + for _, f := range pkg.OtherFiles { + other[f] = true + } + + out := pkg.CompiledGoFiles[:0] + for _, f := range pkg.CompiledGoFiles { + if other[f] { + continue + } + out = append(out, f) + } + pkg.CompiledGoFiles = out + } + + // Extract the PkgPath from the package's ID. + if i := strings.IndexByte(pkg.ID, ' '); i >= 0 { + pkg.PkgPath = pkg.ID[:i] + } else { + pkg.PkgPath = pkg.ID + } + + if pkg.PkgPath == "unsafe" { + pkg.GoFiles = nil // ignore fake unsafe.go file + } + + // Assume go list emits only absolute paths for Dir. + if p.Dir != "" && !filepath.IsAbs(p.Dir) { + log.Fatalf("internal error: go list returned non-absolute Package.Dir: %s", p.Dir) + } + + if p.Export != "" && !filepath.IsAbs(p.Export) { + pkg.ExportFile = filepath.Join(p.Dir, p.Export) + } else { + pkg.ExportFile = p.Export + } + + // imports + // + // Imports contains the IDs of all imported packages. + // ImportsMap records (path, ID) only where they differ. + ids := make(map[string]bool) + for _, id := range p.Imports { + ids[id] = true + } + pkg.Imports = make(map[string]*Package) + for path, id := range p.ImportMap { + pkg.Imports[path] = &Package{ID: id} // non-identity import + delete(ids, id) + } + for id := range ids { + if id == "C" { + continue + } + + pkg.Imports[id] = &Package{ID: id} // identity import + } + if !p.DepOnly { + response.Roots = append(response.Roots, pkg.ID) + } + + // Work around for pre-go.1.11 versions of go list. + // TODO(matloob): they should be handled by the fallback. + // Can we delete this? + if len(pkg.CompiledGoFiles) == 0 { + pkg.CompiledGoFiles = pkg.GoFiles + } + + if p.Error != nil { + pkg.Errors = append(pkg.Errors, Error{ + Pos: p.Error.Pos, + Msg: strings.TrimSpace(p.Error.Err), // Trim to work around golang.org/issue/32363. + }) + } + + response.Packages = append(response.Packages, pkg) + } + + return &response, nil +} + +// absJoin absolutizes and flattens the lists of files. +func absJoin(dir string, fileses ...[]string) (res []string) { + for _, files := range fileses { + for _, file := range files { + if !filepath.IsAbs(file) { + file = filepath.Join(dir, file) + } + res = append(res, file) + } + } + return res +} + +func golistargs(cfg *Config, words []string) []string { + const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo + fullargs := []string{ + "list", "-e", "-json", + fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypesInfo|NeedTypesSizes) != 0), + fmt.Sprintf("-test=%t", cfg.Tests), + fmt.Sprintf("-export=%t", usesExportData(cfg)), + fmt.Sprintf("-deps=%t", cfg.Mode&NeedDeps != 0), + // go list doesn't let you pass -test and -find together, + // probably because you'd just get the TestMain. + fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0), + } + fullargs = append(fullargs, cfg.BuildFlags...) + fullargs = append(fullargs, "--") + fullargs = append(fullargs, words...) + return fullargs +} + +// invokeGo returns the stdout of a go command invocation. +func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) { + stdout := new(bytes.Buffer) + stderr := new(bytes.Buffer) + cmd := exec.CommandContext(cfg.Context, "go", args...) + // On darwin the cwd gets resolved to the real path, which breaks anything that + // expects the working directory to keep the original path, including the + // go command when dealing with modules. + // The Go stdlib has a special feature where if the cwd and the PWD are the + // same node then it trusts the PWD, so by setting it in the env for the child + // process we fix up all the paths returned by the go command. + cmd.Env = append(append([]string{}, cfg.Env...), "PWD="+cfg.Dir) + cmd.Dir = cfg.Dir + cmd.Stdout = stdout + cmd.Stderr = stderr + if debug { + defer func(start time.Time) { + log.Printf("%s for %v, stderr: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, args...), stderr) + }(time.Now()) + } + + if err := cmd.Run(); err != nil { + // Check for 'go' executable not being found. + if ee, ok := err.(*exec.Error); ok && ee.Err == exec.ErrNotFound { + return nil, fmt.Errorf("'go list' driver requires 'go', but %s", exec.ErrNotFound) + } + + exitErr, ok := err.(*exec.ExitError) + if !ok { + // Catastrophic error: + // - context cancellation + return nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err) + } + + // Old go version? + if strings.Contains(stderr.String(), "flag provided but not defined") { + return nil, goTooOldError{fmt.Errorf("unsupported version of go: %s: %s", exitErr, stderr)} + } + + // This error only appears in stderr. See golang.org/cl/166398 for a fix in go list to show + // the error in the Err section of stdout in case -e option is provided. + // This fix is provided for backwards compatibility. + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "named files must be .go files") { + output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`, + strings.Trim(stderr.String(), "\n")) + return bytes.NewBufferString(output), nil + } + + // Workaround for #29280: go list -e has incorrect behavior when an ad-hoc package doesn't exist. + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "no such file or directory") { + output := fmt.Sprintf(`{"ImportPath": "command-line-arguments","Incomplete": true,"Error": {"Pos": "","Err": %q}}`, + strings.Trim(stderr.String(), "\n")) + return bytes.NewBufferString(output), nil + } + + // Workaround for an instance of golang.org/issue/26755: go list -e will return a non-zero exit + // status if there's a dependency on a package that doesn't exist. But it should return + // a zero exit status and set an error on that package. + if len(stderr.String()) > 0 && strings.Contains(stderr.String(), "no Go files in") { + // try to extract package name from string + stderrStr := stderr.String() + var importPath string + colon := strings.Index(stderrStr, ":") + if colon > 0 && strings.HasPrefix(stderrStr, "go build ") { + importPath = stderrStr[len("go build "):colon] + } + output := fmt.Sprintf(`{"ImportPath": %q,"Incomplete": true,"Error": {"Pos": "","Err": %q}}`, + importPath, strings.Trim(stderrStr, "\n")) + return bytes.NewBufferString(output), nil + } + + // Export mode entails a build. + // If that build fails, errors appear on stderr + // (despite the -e flag) and the Export field is blank. + // Do not fail in that case. + // The same is true if an ad-hoc package given to go list doesn't exist. + // TODO(matloob): Remove these once we can depend on go list to exit with a zero status with -e even when + // packages don't exist or a build fails. + if !usesExportData(cfg) && !containsGoFile(args) { + return nil, fmt.Errorf("go %v: %s: %s", args, exitErr, stderr) + } + } + + // As of writing, go list -export prints some non-fatal compilation + // errors to stderr, even with -e set. We would prefer that it put + // them in the Package.Error JSON (see https://golang.org/issue/26319). + // In the meantime, there's nowhere good to put them, but they can + // be useful for debugging. Print them if $GOPACKAGESPRINTGOLISTERRORS + // is set. + if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTGOLISTERRORS") != "" { + fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cmd, args...), stderr) + } + + // debugging + if false { + fmt.Fprintf(os.Stderr, "%s stdout: <<%s>>\n", cmdDebugStr(cmd, args...), stdout) + } + + return stdout, nil +} + +func containsGoFile(s []string) bool { + for _, f := range s { + if strings.HasSuffix(f, ".go") { + return true + } + } + return false +} + +func cmdDebugStr(cmd *exec.Cmd, args ...string) string { + env := make(map[string]string) + for _, kv := range cmd.Env { + split := strings.Split(kv, "=") + k, v := split[0], split[1] + env[k] = v + } + var quotedArgs []string + for _, arg := range args { + quotedArgs = append(quotedArgs, strconv.Quote(arg)) + } + + return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v PWD=%v go %s", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["PWD"], strings.Join(quotedArgs, " ")) +} diff --git a/vendor/golang.org/x/tools/go/packages/golist_overlay.go b/vendor/golang.org/x/tools/go/packages/golist_overlay.go new file mode 100644 index 000000000..b35399ef1 --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/golist_overlay.go @@ -0,0 +1,290 @@ +package packages + +import ( + "bytes" + "encoding/json" + "fmt" + "go/parser" + "go/token" + "path" + "path/filepath" + "strconv" + "strings" + "sync" +) + +// processGolistOverlay provides rudimentary support for adding +// files that don't exist on disk to an overlay. The results can be +// sometimes incorrect. +// TODO(matloob): Handle unsupported cases, including the following: +// - determining the correct package to add given a new import path +func processGolistOverlay(cfg *Config, response *responseDeduper) (modifiedPkgs, needPkgs []string, err error) { + havePkgs := make(map[string]string) // importPath -> non-test package ID + needPkgsSet := make(map[string]bool) + modifiedPkgsSet := make(map[string]bool) + + for _, pkg := range response.dr.Packages { + // This is an approximation of import path to id. This can be + // wrong for tests, vendored packages, and a number of other cases. + havePkgs[pkg.PkgPath] = pkg.ID + } + + var rootDirs map[string]string + var onceGetRootDirs sync.Once + + for opath, contents := range cfg.Overlay { + base := filepath.Base(opath) + dir := filepath.Dir(opath) + var pkg *Package + var testVariantOf *Package // if opath is a test file, this is the package it is testing + var fileExists bool + isTest := strings.HasSuffix(opath, "_test.go") + pkgName, ok := extractPackageName(opath, contents) + if !ok { + // Don't bother adding a file that doesn't even have a parsable package statement + // to the overlay. + continue + } + nextPackage: + for _, p := range response.dr.Packages { + if pkgName != p.Name { + continue + } + for _, f := range p.GoFiles { + if !sameFile(filepath.Dir(f), dir) { + continue + } + if isTest && !hasTestFiles(p) { + // TODO(matloob): Are there packages other than the 'production' variant + // of a package that this can match? This shouldn't match the test main package + // because the file is generated in another directory. + testVariantOf = p + continue nextPackage + } + pkg = p + if filepath.Base(f) == base { + fileExists = true + } + } + } + // The overlay could have included an entirely new package. + if pkg == nil { + onceGetRootDirs.Do(func() { + rootDirs = determineRootDirs(cfg) + }) + // Try to find the module or gopath dir the file is contained in. + // Then for modules, add the module opath to the beginning. + var pkgPath string + for rdir, rpath := range rootDirs { + // TODO(matloob): This doesn't properly handle symlinks. + r, err := filepath.Rel(rdir, dir) + if err != nil { + continue + } + pkgPath = filepath.ToSlash(r) + if rpath != "" { + pkgPath = path.Join(rpath, pkgPath) + } + // We only create one new package even it can belong in multiple modules or GOPATH entries. + // This is okay because tools (such as the LSP) that use overlays will recompute the overlay + // once the file is saved, and golist will do the right thing. + // TODO(matloob): Implement module tiebreaking? + break + } + if pkgPath == "" { + continue + } + isXTest := strings.HasSuffix(pkgName, "_test") + if isXTest { + pkgPath += "_test" + } + id := pkgPath + if isTest && !isXTest { + id = fmt.Sprintf("%s [%s.test]", pkgPath, pkgPath) + } + // Try to reclaim a package with the same id if it exists in the response. + for _, p := range response.dr.Packages { + if reclaimPackage(p, id, opath, contents) { + pkg = p + break + } + } + // Otherwise, create a new package + if pkg == nil { + pkg = &Package{PkgPath: pkgPath, ID: id, Name: pkgName, Imports: make(map[string]*Package)} + response.addPackage(pkg) + havePkgs[pkg.PkgPath] = id + // Add the production package's sources for a test variant. + if isTest && !isXTest && testVariantOf != nil { + pkg.GoFiles = append(pkg.GoFiles, testVariantOf.GoFiles...) + pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, testVariantOf.CompiledGoFiles...) + } + } + } + if !fileExists { + pkg.GoFiles = append(pkg.GoFiles, opath) + // TODO(matloob): Adding the file to CompiledGoFiles can exhibit the wrong behavior + // if the file will be ignored due to its build tags. + pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, opath) + modifiedPkgsSet[pkg.ID] = true + } + imports, err := extractImports(opath, contents) + if err != nil { + // Let the parser or type checker report errors later. + continue + } + for _, imp := range imports { + _, found := pkg.Imports[imp] + if !found { + // TODO(matloob): Handle cases when the following block isn't correct. + // These include imports of test variants, imports of vendored packages, etc. + id, ok := havePkgs[imp] + if !ok { + id = imp + } + pkg.Imports[imp] = &Package{ID: id} + } + } + continue + } + + // toPkgPath tries to guess the package path given the id. + // This isn't always correct -- it's certainly wrong for + // vendored packages' paths. + toPkgPath := func(id string) string { + // TODO(matloob): Handle vendor paths. + i := strings.IndexByte(id, ' ') + if i >= 0 { + return id[:i] + } + return id + } + + // Do another pass now that new packages have been created to determine the + // set of missing packages. + for _, pkg := range response.dr.Packages { + for _, imp := range pkg.Imports { + pkgPath := toPkgPath(imp.ID) + if _, ok := havePkgs[pkgPath]; !ok { + needPkgsSet[pkgPath] = true + } + } + } + + needPkgs = make([]string, 0, len(needPkgsSet)) + for pkg := range needPkgsSet { + needPkgs = append(needPkgs, pkg) + } + modifiedPkgs = make([]string, 0, len(modifiedPkgsSet)) + for pkg := range modifiedPkgsSet { + modifiedPkgs = append(modifiedPkgs, pkg) + } + return modifiedPkgs, needPkgs, err +} + +func hasTestFiles(p *Package) bool { + for _, f := range p.GoFiles { + if strings.HasSuffix(f, "_test.go") { + return true + } + } + return false +} + +// determineRootDirs returns a mapping from directories code can be contained in to the +// corresponding import path prefixes of those directories. +// Its result is used to try to determine the import path for a package containing +// an overlay file. +func determineRootDirs(cfg *Config) map[string]string { + // Assume modules first: + out, err := invokeGo(cfg, "list", "-m", "-json", "all") + if err != nil { + return determineRootDirsGOPATH(cfg) + } + m := map[string]string{} + type jsonMod struct{ Path, Dir string } + for dec := json.NewDecoder(out); dec.More(); { + mod := new(jsonMod) + if err := dec.Decode(mod); err != nil { + return m // Give up and return an empty map. Package won't be found for overlay. + } + if mod.Dir != "" && mod.Path != "" { + // This is a valid module; add it to the map. + m[mod.Dir] = mod.Path + } + } + return m +} + +func determineRootDirsGOPATH(cfg *Config) map[string]string { + m := map[string]string{} + out, err := invokeGo(cfg, "env", "GOPATH") + if err != nil { + // Could not determine root dir mapping. Everything is best-effort, so just return an empty map. + // When we try to find the import path for a directory, there will be no root-dir match and + // we'll give up. + return m + } + for _, p := range filepath.SplitList(string(bytes.TrimSpace(out.Bytes()))) { + m[filepath.Join(p, "src")] = "" + } + return m +} + +func extractImports(filename string, contents []byte) ([]string, error) { + f, err := parser.ParseFile(token.NewFileSet(), filename, contents, parser.ImportsOnly) // TODO(matloob): reuse fileset? + if err != nil { + return nil, err + } + var res []string + for _, imp := range f.Imports { + quotedPath := imp.Path.Value + path, err := strconv.Unquote(quotedPath) + if err != nil { + return nil, err + } + res = append(res, path) + } + return res, nil +} + +// reclaimPackage attempts to reuse a package that failed to load in an overlay. +// +// If the package has errors and has no Name, GoFiles, or Imports, +// then it's possible that it doesn't yet exist on disk. +func reclaimPackage(pkg *Package, id string, filename string, contents []byte) bool { + // TODO(rstambler): Check the message of the actual error? + // It differs between $GOPATH and module mode. + if pkg.ID != id { + return false + } + if len(pkg.Errors) != 1 { + return false + } + if pkg.Name != "" || pkg.ExportFile != "" { + return false + } + if len(pkg.GoFiles) > 0 || len(pkg.CompiledGoFiles) > 0 || len(pkg.OtherFiles) > 0 { + return false + } + if len(pkg.Imports) > 0 { + return false + } + pkgName, ok := extractPackageName(filename, contents) + if !ok { + return false + } + pkg.Name = pkgName + pkg.Errors = nil + return true +} + +func extractPackageName(filename string, contents []byte) (string, bool) { + // TODO(rstambler): Check the message of the actual error? + // It differs between $GOPATH and module mode. + f, err := parser.ParseFile(token.NewFileSet(), filename, contents, parser.PackageClauseOnly) // TODO(matloob): reuse fileset? + if err != nil { + return "", false + } + return f.Name.Name, true +} diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go new file mode 100644 index 000000000..cd151469a --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/packages.go @@ -0,0 +1,1071 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package packages + +// See doc.go for package documentation and implementation notes. + +import ( + "context" + "encoding/json" + "fmt" + "go/ast" + "go/parser" + "go/scanner" + "go/token" + "go/types" + "io/ioutil" + "log" + "os" + "path/filepath" + "strings" + "sync" + + "golang.org/x/tools/go/gcexportdata" +) + +// A LoadMode controls the amount of detail to return when loading. +// The bits below can be combined to specify which fields should be +// filled in the result packages. +// The zero value is a special case, equivalent to combining +// the NeedName, NeedFiles, and NeedCompiledGoFiles bits. +// ID and Errors (if present) will always be filled. +// Load may return more information than requested. +type LoadMode int + +const ( + // NeedName adds Name and PkgPath. + NeedName LoadMode = 1 << iota + + // NeedFiles adds GoFiles and OtherFiles. + NeedFiles + + // NeedCompiledGoFiles adds CompiledGoFiles. + NeedCompiledGoFiles + + // NeedImports adds Imports. If NeedDeps is not set, the Imports field will contain + // "placeholder" Packages with only the ID set. + NeedImports + + // NeedDeps adds the fields requested by the LoadMode in the packages in Imports. If NeedImports + // is not set NeedDeps has no effect. + NeedDeps + + // NeedExportsFile adds ExportsFile. + NeedExportsFile + + // NeedTypes adds Types, Fset, and IllTyped. + NeedTypes + + // NeedSyntax adds Syntax. + NeedSyntax + + // NeedTypesInfo adds TypesInfo. + NeedTypesInfo + + // NeedTypesSizes adds TypesSizes. + NeedTypesSizes +) + +const ( + // Deprecated: LoadFiles exists for historical compatibility + // and should not be used. Please directly specify the needed fields using the Need values. + LoadFiles = NeedName | NeedFiles | NeedCompiledGoFiles + + // Deprecated: LoadImports exists for historical compatibility + // and should not be used. Please directly specify the needed fields using the Need values. + LoadImports = LoadFiles | NeedImports | NeedDeps + + // Deprecated: LoadTypes exists for historical compatibility + // and should not be used. Please directly specify the needed fields using the Need values. + LoadTypes = LoadImports | NeedTypes | NeedTypesSizes + + // Deprecated: LoadSyntax exists for historical compatibility + // and should not be used. Please directly specify the needed fields using the Need values. + LoadSyntax = LoadTypes | NeedSyntax | NeedTypesInfo + + // Deprecated: LoadAllSyntax exists for historical compatibility + // and should not be used. Please directly specify the needed fields using the Need values. + LoadAllSyntax = LoadSyntax +) + +// A Config specifies details about how packages should be loaded. +// The zero value is a valid configuration. +// Calls to Load do not modify this struct. +type Config struct { + // Mode controls the level of information returned for each package. + Mode LoadMode + + // Context specifies the context for the load operation. + // If the context is cancelled, the loader may stop early + // and return an ErrCancelled error. + // If Context is nil, the load cannot be cancelled. + Context context.Context + + // Dir is the directory in which to run the build system's query tool + // that provides information about the packages. + // If Dir is empty, the tool is run in the current directory. + Dir string + + // Env is the environment to use when invoking the build system's query tool. + // If Env is nil, the current environment is used. + // As in os/exec's Cmd, only the last value in the slice for + // each environment key is used. To specify the setting of only + // a few variables, append to the current environment, as in: + // + // opt.Env = append(os.Environ(), "GOOS=plan9", "GOARCH=386") + // + Env []string + + // BuildFlags is a list of command-line flags to be passed through to + // the build system's query tool. + BuildFlags []string + + // Fset provides source position information for syntax trees and types. + // If Fset is nil, Load will use a new fileset, but preserve Fset's value. + Fset *token.FileSet + + // ParseFile is called to read and parse each file + // when preparing a package's type-checked syntax tree. + // It must be safe to call ParseFile simultaneously from multiple goroutines. + // If ParseFile is nil, the loader will uses parser.ParseFile. + // + // ParseFile should parse the source from src and use filename only for + // recording position information. + // + // An application may supply a custom implementation of ParseFile + // to change the effective file contents or the behavior of the parser, + // or to modify the syntax tree. For example, selectively eliminating + // unwanted function bodies can significantly accelerate type checking. + ParseFile func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) + + // If Tests is set, the loader includes not just the packages + // matching a particular pattern but also any related test packages, + // including test-only variants of the package and the test executable. + // + // For example, when using the go command, loading "fmt" with Tests=true + // returns four packages, with IDs "fmt" (the standard package), + // "fmt [fmt.test]" (the package as compiled for the test), + // "fmt_test" (the test functions from source files in package fmt_test), + // and "fmt.test" (the test binary). + // + // In build systems with explicit names for tests, + // setting Tests may have no effect. + Tests bool + + // Overlay provides a mapping of absolute file paths to file contents. + // If the file with the given path already exists, the parser will use the + // alternative file contents provided by the map. + // + // Overlays provide incomplete support for when a given file doesn't + // already exist on disk. See the package doc above for more details. + Overlay map[string][]byte +} + +// driver is the type for functions that query the build system for the +// packages named by the patterns. +type driver func(cfg *Config, patterns ...string) (*driverResponse, error) + +// driverResponse contains the results for a driver query. +type driverResponse struct { + // Sizes, if not nil, is the types.Sizes to use when type checking. + Sizes *types.StdSizes + + // Roots is the set of package IDs that make up the root packages. + // We have to encode this separately because when we encode a single package + // we cannot know if it is one of the roots as that requires knowledge of the + // graph it is part of. + Roots []string `json:",omitempty"` + + // Packages is the full set of packages in the graph. + // The packages are not connected into a graph. + // The Imports if populated will be stubs that only have their ID set. + // Imports will be connected and then type and syntax information added in a + // later pass (see refine). + Packages []*Package +} + +// Load loads and returns the Go packages named by the given patterns. +// +// Config specifies loading options; +// nil behaves the same as an empty Config. +// +// Load returns an error if any of the patterns was invalid +// as defined by the underlying build system. +// It may return an empty list of packages without an error, +// for instance for an empty expansion of a valid wildcard. +// Errors associated with a particular package are recorded in the +// corresponding Package's Errors list, and do not cause Load to +// return an error. Clients may need to handle such errors before +// proceeding with further analysis. The PrintErrors function is +// provided for convenient display of all errors. +func Load(cfg *Config, patterns ...string) ([]*Package, error) { + l := newLoader(cfg) + response, err := defaultDriver(&l.Config, patterns...) + if err != nil { + return nil, err + } + l.sizes = response.Sizes + return l.refine(response.Roots, response.Packages...) +} + +// defaultDriver is a driver that looks for an external driver binary, and if +// it does not find it falls back to the built in go list driver. +func defaultDriver(cfg *Config, patterns ...string) (*driverResponse, error) { + driver := findExternalDriver(cfg) + if driver == nil { + driver = goListDriver + } + return driver(cfg, patterns...) +} + +// A Package describes a loaded Go package. +type Package struct { + // ID is a unique identifier for a package, + // in a syntax provided by the underlying build system. + // + // Because the syntax varies based on the build system, + // clients should treat IDs as opaque and not attempt to + // interpret them. + ID string + + // Name is the package name as it appears in the package source code. + Name string + + // PkgPath is the package path as used by the go/types package. + PkgPath string + + // Errors contains any errors encountered querying the metadata + // of the package, or while parsing or type-checking its files. + Errors []Error + + // GoFiles lists the absolute file paths of the package's Go source files. + GoFiles []string + + // CompiledGoFiles lists the absolute file paths of the package's source + // files that were presented to the compiler. + // This may differ from GoFiles if files are processed before compilation. + CompiledGoFiles []string + + // OtherFiles lists the absolute file paths of the package's non-Go source files, + // including assembly, C, C++, Fortran, Objective-C, SWIG, and so on. + OtherFiles []string + + // ExportFile is the absolute path to a file containing type + // information for the package as provided by the build system. + ExportFile string + + // Imports maps import paths appearing in the package's Go source files + // to corresponding loaded Packages. + Imports map[string]*Package + + // Types provides type information for the package. + // The NeedTypes LoadMode bit sets this field for packages matching the + // patterns; type information for dependencies may be missing or incomplete, + // unless NeedDeps and NeedImports are also set. + Types *types.Package + + // Fset provides position information for Types, TypesInfo, and Syntax. + // It is set only when Types is set. + Fset *token.FileSet + + // IllTyped indicates whether the package or any dependency contains errors. + // It is set only when Types is set. + IllTyped bool + + // Syntax is the package's syntax trees, for the files listed in CompiledGoFiles. + // + // The NeedSyntax LoadMode bit populates this field for packages matching the patterns. + // If NeedDeps and NeedImports are also set, this field will also be populated + // for dependencies. + Syntax []*ast.File + + // TypesInfo provides type information about the package's syntax trees. + // It is set only when Syntax is set. + TypesInfo *types.Info + + // TypesSizes provides the effective size function for types in TypesInfo. + TypesSizes types.Sizes +} + +// An Error describes a problem with a package's metadata, syntax, or types. +type Error struct { + Pos string // "file:line:col" or "file:line" or "" or "-" + Msg string + Kind ErrorKind +} + +// ErrorKind describes the source of the error, allowing the user to +// differentiate between errors generated by the driver, the parser, or the +// type-checker. +type ErrorKind int + +const ( + UnknownError ErrorKind = iota + ListError + ParseError + TypeError +) + +func (err Error) Error() string { + pos := err.Pos + if pos == "" { + pos = "-" // like token.Position{}.String() + } + return pos + ": " + err.Msg +} + +// flatPackage is the JSON form of Package +// It drops all the type and syntax fields, and transforms the Imports +// +// TODO(adonovan): identify this struct with Package, effectively +// publishing the JSON protocol. +type flatPackage struct { + ID string + Name string `json:",omitempty"` + PkgPath string `json:",omitempty"` + Errors []Error `json:",omitempty"` + GoFiles []string `json:",omitempty"` + CompiledGoFiles []string `json:",omitempty"` + OtherFiles []string `json:",omitempty"` + ExportFile string `json:",omitempty"` + Imports map[string]string `json:",omitempty"` +} + +// MarshalJSON returns the Package in its JSON form. +// For the most part, the structure fields are written out unmodified, and +// the type and syntax fields are skipped. +// The imports are written out as just a map of path to package id. +// The errors are written using a custom type that tries to preserve the +// structure of error types we know about. +// +// This method exists to enable support for additional build systems. It is +// not intended for use by clients of the API and we may change the format. +func (p *Package) MarshalJSON() ([]byte, error) { + flat := &flatPackage{ + ID: p.ID, + Name: p.Name, + PkgPath: p.PkgPath, + Errors: p.Errors, + GoFiles: p.GoFiles, + CompiledGoFiles: p.CompiledGoFiles, + OtherFiles: p.OtherFiles, + ExportFile: p.ExportFile, + } + if len(p.Imports) > 0 { + flat.Imports = make(map[string]string, len(p.Imports)) + for path, ipkg := range p.Imports { + flat.Imports[path] = ipkg.ID + } + } + return json.Marshal(flat) +} + +// UnmarshalJSON reads in a Package from its JSON format. +// See MarshalJSON for details about the format accepted. +func (p *Package) UnmarshalJSON(b []byte) error { + flat := &flatPackage{} + if err := json.Unmarshal(b, &flat); err != nil { + return err + } + *p = Package{ + ID: flat.ID, + Name: flat.Name, + PkgPath: flat.PkgPath, + Errors: flat.Errors, + GoFiles: flat.GoFiles, + CompiledGoFiles: flat.CompiledGoFiles, + OtherFiles: flat.OtherFiles, + ExportFile: flat.ExportFile, + } + if len(flat.Imports) > 0 { + p.Imports = make(map[string]*Package, len(flat.Imports)) + for path, id := range flat.Imports { + p.Imports[path] = &Package{ID: id} + } + } + return nil +} + +func (p *Package) String() string { return p.ID } + +// loaderPackage augments Package with state used during the loading phase +type loaderPackage struct { + *Package + importErrors map[string]error // maps each bad import to its error + loadOnce sync.Once + color uint8 // for cycle detection + needsrc bool // load from source (Mode >= LoadTypes) + needtypes bool // type information is either requested or depended on + initial bool // package was matched by a pattern +} + +// loader holds the working state of a single call to load. +type loader struct { + pkgs map[string]*loaderPackage + Config + sizes types.Sizes + parseCache map[string]*parseValue + parseCacheMu sync.Mutex + exportMu sync.Mutex // enforces mutual exclusion of exportdata operations + + // TODO(matloob): Add an implied mode here and use that instead of mode. + // Implied mode would contain all the fields we need the data for so we can + // get the actually requested fields. We'll zero them out before returning + // packages to the user. This will make it easier for us to get the conditions + // where we need certain modes right. +} + +type parseValue struct { + f *ast.File + err error + ready chan struct{} +} + +func newLoader(cfg *Config) *loader { + ld := &loader{ + parseCache: map[string]*parseValue{}, + } + if cfg != nil { + ld.Config = *cfg + } + if ld.Config.Mode == 0 { + ld.Config.Mode = NeedName | NeedFiles | NeedCompiledGoFiles // Preserve zero behavior of Mode for backwards compatibility. + } + if ld.Config.Env == nil { + ld.Config.Env = os.Environ() + } + if ld.Context == nil { + ld.Context = context.Background() + } + if ld.Dir == "" { + if dir, err := os.Getwd(); err == nil { + ld.Dir = dir + } + } + + if ld.Mode&NeedTypes != 0 { + if ld.Fset == nil { + ld.Fset = token.NewFileSet() + } + + // ParseFile is required even in LoadTypes mode + // because we load source if export data is missing. + if ld.ParseFile == nil { + ld.ParseFile = func(fset *token.FileSet, filename string, src []byte) (*ast.File, error) { + const mode = parser.AllErrors | parser.ParseComments + return parser.ParseFile(fset, filename, src, mode) + } + } + } + return ld +} + +// refine connects the supplied packages into a graph and then adds type and +// and syntax information as requested by the LoadMode. +func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) { + rootMap := make(map[string]int, len(roots)) + for i, root := range roots { + rootMap[root] = i + } + ld.pkgs = make(map[string]*loaderPackage) + // first pass, fixup and build the map and roots + var initial = make([]*loaderPackage, len(roots)) + for _, pkg := range list { + rootIndex := -1 + if i, found := rootMap[pkg.ID]; found { + rootIndex = i + } + lpkg := &loaderPackage{ + Package: pkg, + needtypes: (ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && rootIndex < 0) || rootIndex >= 0, + needsrc: (ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && rootIndex < 0) || rootIndex >= 0 || + len(ld.Overlay) > 0 || // Overlays can invalidate export data. TODO(matloob): make this check fine-grained based on dependencies on overlaid files + pkg.ExportFile == "" && pkg.PkgPath != "unsafe", + } + ld.pkgs[lpkg.ID] = lpkg + if rootIndex >= 0 { + initial[rootIndex] = lpkg + lpkg.initial = true + } + } + for i, root := range roots { + if initial[i] == nil { + return nil, fmt.Errorf("root package %v is missing", root) + } + } + + // Materialize the import graph. + + const ( + white = 0 // new + grey = 1 // in progress + black = 2 // complete + ) + + // visit traverses the import graph, depth-first, + // and materializes the graph as Packages.Imports. + // + // Valid imports are saved in the Packages.Import map. + // Invalid imports (cycles and missing nodes) are saved in the importErrors map. + // Thus, even in the presence of both kinds of errors, the Import graph remains a DAG. + // + // visit returns whether the package needs src or has a transitive + // dependency on a package that does. These are the only packages + // for which we load source code. + var stack []*loaderPackage + var visit func(lpkg *loaderPackage) bool + var srcPkgs []*loaderPackage + visit = func(lpkg *loaderPackage) bool { + switch lpkg.color { + case black: + return lpkg.needsrc + case grey: + panic("internal error: grey node") + } + lpkg.color = grey + stack = append(stack, lpkg) // push + stubs := lpkg.Imports // the structure form has only stubs with the ID in the Imports + lpkg.Imports = make(map[string]*Package, len(stubs)) + for importPath, ipkg := range stubs { + var importErr error + imp := ld.pkgs[ipkg.ID] + if imp == nil { + // (includes package "C" when DisableCgo) + importErr = fmt.Errorf("missing package: %q", ipkg.ID) + } else if imp.color == grey { + importErr = fmt.Errorf("import cycle: %s", stack) + } + if importErr != nil { + if lpkg.importErrors == nil { + lpkg.importErrors = make(map[string]error) + } + lpkg.importErrors[importPath] = importErr + continue + } + + if visit(imp) { + lpkg.needsrc = true + } + lpkg.Imports[importPath] = imp.Package + } + if lpkg.needsrc { + srcPkgs = append(srcPkgs, lpkg) + } + if ld.Mode&NeedTypesSizes != 0 { + lpkg.TypesSizes = ld.sizes + } + stack = stack[:len(stack)-1] // pop + lpkg.color = black + + return lpkg.needsrc + } + + if ld.Mode&(NeedImports|NeedDeps) == 0 { + // We do this to drop the stub import packages that we are not even going to try to resolve. + for _, lpkg := range initial { + lpkg.Imports = nil + } + } else { + // For each initial package, create its import DAG. + for _, lpkg := range initial { + visit(lpkg) + } + } + if ld.Mode&NeedDeps != 0 { // TODO(matloob): This is only the case if NeedTypes is also set, right? + for _, lpkg := range srcPkgs { + // Complete type information is required for the + // immediate dependencies of each source package. + for _, ipkg := range lpkg.Imports { + imp := ld.pkgs[ipkg.ID] + imp.needtypes = true + } + } + } + // Load type data if needed, starting at + // the initial packages (roots of the import DAG). + if ld.Mode&NeedTypes != 0 { + var wg sync.WaitGroup + for _, lpkg := range initial { + wg.Add(1) + go func(lpkg *loaderPackage) { + ld.loadRecursive(lpkg) + wg.Done() + }(lpkg) + } + wg.Wait() + } + + result := make([]*Package, len(initial)) + importPlaceholders := make(map[string]*Package) + for i, lpkg := range initial { + result[i] = lpkg.Package + } + for i := range ld.pkgs { + // Clear all unrequested fields, for extra de-Hyrum-ization. + if ld.Mode&NeedName == 0 { + ld.pkgs[i].Name = "" + ld.pkgs[i].PkgPath = "" + } + if ld.Mode&NeedFiles == 0 { + ld.pkgs[i].GoFiles = nil + ld.pkgs[i].OtherFiles = nil + } + if ld.Mode&NeedCompiledGoFiles == 0 { + ld.pkgs[i].CompiledGoFiles = nil + } + if ld.Mode&NeedImports == 0 { + ld.pkgs[i].Imports = nil + } + if ld.Mode&NeedExportsFile == 0 { + ld.pkgs[i].ExportFile = "" + } + if ld.Mode&NeedTypes == 0 { + ld.pkgs[i].Types = nil + ld.pkgs[i].Fset = nil + ld.pkgs[i].IllTyped = false + } + if ld.Mode&NeedSyntax == 0 { + ld.pkgs[i].Syntax = nil + } + if ld.Mode&NeedTypesInfo == 0 { + ld.pkgs[i].TypesInfo = nil + } + if ld.Mode&NeedTypesSizes == 0 { + ld.pkgs[i].TypesSizes = nil + } + if ld.Mode&NeedDeps == 0 { + for j, pkg := range ld.pkgs[i].Imports { + ph, ok := importPlaceholders[pkg.ID] + if !ok { + ph = &Package{ID: pkg.ID} + importPlaceholders[pkg.ID] = ph + } + ld.pkgs[i].Imports[j] = ph + } + } + } + return result, nil +} + +// loadRecursive loads the specified package and its dependencies, +// recursively, in parallel, in topological order. +// It is atomic and idempotent. +// Precondition: ld.Mode&NeedTypes. +func (ld *loader) loadRecursive(lpkg *loaderPackage) { + lpkg.loadOnce.Do(func() { + // Load the direct dependencies, in parallel. + var wg sync.WaitGroup + for _, ipkg := range lpkg.Imports { + imp := ld.pkgs[ipkg.ID] + wg.Add(1) + go func(imp *loaderPackage) { + ld.loadRecursive(imp) + wg.Done() + }(imp) + } + wg.Wait() + + ld.loadPackage(lpkg) + }) +} + +// loadPackage loads the specified package. +// It must be called only once per Package, +// after immediate dependencies are loaded. +// Precondition: ld.Mode & NeedTypes. +func (ld *loader) loadPackage(lpkg *loaderPackage) { + if lpkg.PkgPath == "unsafe" { + // Fill in the blanks to avoid surprises. + lpkg.Types = types.Unsafe + lpkg.Fset = ld.Fset + lpkg.Syntax = []*ast.File{} + lpkg.TypesInfo = new(types.Info) + lpkg.TypesSizes = ld.sizes + return + } + + // Call NewPackage directly with explicit name. + // This avoids skew between golist and go/types when the files' + // package declarations are inconsistent. + lpkg.Types = types.NewPackage(lpkg.PkgPath, lpkg.Name) + lpkg.Fset = ld.Fset + + // Subtle: we populate all Types fields with an empty Package + // before loading export data so that export data processing + // never has to create a types.Package for an indirect dependency, + // which would then require that such created packages be explicitly + // inserted back into the Import graph as a final step after export data loading. + // The Diamond test exercises this case. + if !lpkg.needtypes { + return + } + if !lpkg.needsrc { + ld.loadFromExportData(lpkg) + return // not a source package, don't get syntax trees + } + + appendError := func(err error) { + // Convert various error types into the one true Error. + var errs []Error + switch err := err.(type) { + case Error: + // from driver + errs = append(errs, err) + + case *os.PathError: + // from parser + errs = append(errs, Error{ + Pos: err.Path + ":1", + Msg: err.Err.Error(), + Kind: ParseError, + }) + + case scanner.ErrorList: + // from parser + for _, err := range err { + errs = append(errs, Error{ + Pos: err.Pos.String(), + Msg: err.Msg, + Kind: ParseError, + }) + } + + case types.Error: + // from type checker + errs = append(errs, Error{ + Pos: err.Fset.Position(err.Pos).String(), + Msg: err.Msg, + Kind: TypeError, + }) + + default: + // unexpected impoverished error from parser? + errs = append(errs, Error{ + Pos: "-", + Msg: err.Error(), + Kind: UnknownError, + }) + + // If you see this error message, please file a bug. + log.Printf("internal error: error %q (%T) without position", err, err) + } + + lpkg.Errors = append(lpkg.Errors, errs...) + } + + files, errs := ld.parseFiles(lpkg.CompiledGoFiles) + for _, err := range errs { + appendError(err) + } + + lpkg.Syntax = files + + lpkg.TypesInfo = &types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + } + lpkg.TypesSizes = ld.sizes + + importer := importerFunc(func(path string) (*types.Package, error) { + if path == "unsafe" { + return types.Unsafe, nil + } + + // The imports map is keyed by import path. + ipkg := lpkg.Imports[path] + if ipkg == nil { + if err := lpkg.importErrors[path]; err != nil { + return nil, err + } + // There was skew between the metadata and the + // import declarations, likely due to an edit + // race, or because the ParseFile feature was + // used to supply alternative file contents. + return nil, fmt.Errorf("no metadata for %s", path) + } + + if ipkg.Types != nil && ipkg.Types.Complete() { + return ipkg.Types, nil + } + log.Fatalf("internal error: nil Pkg importing %q from %q", path, lpkg) + panic("unreachable") + }) + + // type-check + tc := &types.Config{ + Importer: importer, + + // Type-check bodies of functions only in non-initial packages. + // Example: for import graph A->B->C and initial packages {A,C}, + // we can ignore function bodies in B. + IgnoreFuncBodies: (ld.Mode&(NeedDeps|NeedTypesInfo) == 0) && !lpkg.initial, + + Error: appendError, + Sizes: ld.sizes, + } + types.NewChecker(tc, ld.Fset, lpkg.Types, lpkg.TypesInfo).Files(lpkg.Syntax) + + lpkg.importErrors = nil // no longer needed + + // If !Cgo, the type-checker uses FakeImportC mode, so + // it doesn't invoke the importer for import "C", + // nor report an error for the import, + // or for any undefined C.f reference. + // We must detect this explicitly and correctly + // mark the package as IllTyped (by reporting an error). + // TODO(adonovan): if these errors are annoying, + // we could just set IllTyped quietly. + if tc.FakeImportC { + outer: + for _, f := range lpkg.Syntax { + for _, imp := range f.Imports { + if imp.Path.Value == `"C"` { + err := types.Error{Fset: ld.Fset, Pos: imp.Pos(), Msg: `import "C" ignored`} + appendError(err) + break outer + } + } + } + } + + // Record accumulated errors. + illTyped := len(lpkg.Errors) > 0 + if !illTyped { + for _, imp := range lpkg.Imports { + if imp.IllTyped { + illTyped = true + break + } + } + } + lpkg.IllTyped = illTyped +} + +// An importFunc is an implementation of the single-method +// types.Importer interface based on a function value. +type importerFunc func(path string) (*types.Package, error) + +func (f importerFunc) Import(path string) (*types.Package, error) { return f(path) } + +// We use a counting semaphore to limit +// the number of parallel I/O calls per process. +var ioLimit = make(chan bool, 20) + +func (ld *loader) parseFile(filename string) (*ast.File, error) { + ld.parseCacheMu.Lock() + v, ok := ld.parseCache[filename] + if ok { + // cache hit + ld.parseCacheMu.Unlock() + <-v.ready + } else { + // cache miss + v = &parseValue{ready: make(chan struct{})} + ld.parseCache[filename] = v + ld.parseCacheMu.Unlock() + + var src []byte + for f, contents := range ld.Config.Overlay { + if sameFile(f, filename) { + src = contents + } + } + var err error + if src == nil { + ioLimit <- true // wait + src, err = ioutil.ReadFile(filename) + <-ioLimit // signal + } + if err != nil { + v.err = err + } else { + v.f, v.err = ld.ParseFile(ld.Fset, filename, src) + } + + close(v.ready) + } + return v.f, v.err +} + +// parseFiles reads and parses the Go source files and returns the ASTs +// of the ones that could be at least partially parsed, along with a +// list of I/O and parse errors encountered. +// +// Because files are scanned in parallel, the token.Pos +// positions of the resulting ast.Files are not ordered. +// +func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) { + var wg sync.WaitGroup + n := len(filenames) + parsed := make([]*ast.File, n) + errors := make([]error, n) + for i, file := range filenames { + if ld.Config.Context.Err() != nil { + parsed[i] = nil + errors[i] = ld.Config.Context.Err() + continue + } + wg.Add(1) + go func(i int, filename string) { + parsed[i], errors[i] = ld.parseFile(filename) + wg.Done() + }(i, file) + } + wg.Wait() + + // Eliminate nils, preserving order. + var o int + for _, f := range parsed { + if f != nil { + parsed[o] = f + o++ + } + } + parsed = parsed[:o] + + o = 0 + for _, err := range errors { + if err != nil { + errors[o] = err + o++ + } + } + errors = errors[:o] + + return parsed, errors +} + +// sameFile returns true if x and y have the same basename and denote +// the same file. +// +func sameFile(x, y string) bool { + if x == y { + // It could be the case that y doesn't exist. + // For instance, it may be an overlay file that + // hasn't been written to disk. To handle that case + // let x == y through. (We added the exact absolute path + // string to the CompiledGoFiles list, so the unwritten + // overlay case implies x==y.) + return true + } + if strings.EqualFold(filepath.Base(x), filepath.Base(y)) { // (optimisation) + if xi, err := os.Stat(x); err == nil { + if yi, err := os.Stat(y); err == nil { + return os.SameFile(xi, yi) + } + } + } + return false +} + +// loadFromExportData returns type information for the specified +// package, loading it from an export data file on the first request. +func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error) { + if lpkg.PkgPath == "" { + log.Fatalf("internal error: Package %s has no PkgPath", lpkg) + } + + // Because gcexportdata.Read has the potential to create or + // modify the types.Package for each node in the transitive + // closure of dependencies of lpkg, all exportdata operations + // must be sequential. (Finer-grained locking would require + // changes to the gcexportdata API.) + // + // The exportMu lock guards the Package.Pkg field and the + // types.Package it points to, for each Package in the graph. + // + // Not all accesses to Package.Pkg need to be protected by exportMu: + // graph ordering ensures that direct dependencies of source + // packages are fully loaded before the importer reads their Pkg field. + ld.exportMu.Lock() + defer ld.exportMu.Unlock() + + if tpkg := lpkg.Types; tpkg != nil && tpkg.Complete() { + return tpkg, nil // cache hit + } + + lpkg.IllTyped = true // fail safe + + if lpkg.ExportFile == "" { + // Errors while building export data will have been printed to stderr. + return nil, fmt.Errorf("no export data file") + } + f, err := os.Open(lpkg.ExportFile) + if err != nil { + return nil, err + } + defer f.Close() + + // Read gc export data. + // + // We don't currently support gccgo export data because all + // underlying workspaces use the gc toolchain. (Even build + // systems that support gccgo don't use it for workspace + // queries.) + r, err := gcexportdata.NewReader(f) + if err != nil { + return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err) + } + + // Build the view. + // + // The gcexportdata machinery has no concept of package ID. + // It identifies packages by their PkgPath, which although not + // globally unique is unique within the scope of one invocation + // of the linker, type-checker, or gcexportdata. + // + // So, we must build a PkgPath-keyed view of the global + // (conceptually ID-keyed) cache of packages and pass it to + // gcexportdata. The view must contain every existing + // package that might possibly be mentioned by the + // current package---its transitive closure. + // + // In loadPackage, we unconditionally create a types.Package for + // each dependency so that export data loading does not + // create new ones. + // + // TODO(adonovan): it would be simpler and more efficient + // if the export data machinery invoked a callback to + // get-or-create a package instead of a map. + // + view := make(map[string]*types.Package) // view seen by gcexportdata + seen := make(map[*loaderPackage]bool) // all visited packages + var visit func(pkgs map[string]*Package) + visit = func(pkgs map[string]*Package) { + for _, p := range pkgs { + lpkg := ld.pkgs[p.ID] + if !seen[lpkg] { + seen[lpkg] = true + view[lpkg.PkgPath] = lpkg.Types + visit(lpkg.Imports) + } + } + } + visit(lpkg.Imports) + + viewLen := len(view) + 1 // adding the self package + // Parse the export data. + // (May modify incomplete packages in view but not create new ones.) + tpkg, err := gcexportdata.Read(r, ld.Fset, view, lpkg.PkgPath) + if err != nil { + return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err) + } + if viewLen != len(view) { + log.Fatalf("Unexpected package creation during export data loading") + } + + lpkg.Types = tpkg + lpkg.IllTyped = false + + return tpkg, nil +} + +func usesExportData(cfg *Config) bool { + return cfg.Mode&NeedExportsFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedTypesInfo == 0 +} diff --git a/vendor/golang.org/x/tools/go/packages/visit.go b/vendor/golang.org/x/tools/go/packages/visit.go new file mode 100644 index 000000000..b13cb081f --- /dev/null +++ b/vendor/golang.org/x/tools/go/packages/visit.go @@ -0,0 +1,55 @@ +package packages + +import ( + "fmt" + "os" + "sort" +) + +// Visit visits all the packages in the import graph whose roots are +// pkgs, calling the optional pre function the first time each package +// is encountered (preorder), and the optional post function after a +// package's dependencies have been visited (postorder). +// The boolean result of pre(pkg) determines whether +// the imports of package pkg are visited. +func Visit(pkgs []*Package, pre func(*Package) bool, post func(*Package)) { + seen := make(map[*Package]bool) + var visit func(*Package) + visit = func(pkg *Package) { + if !seen[pkg] { + seen[pkg] = true + + if pre == nil || pre(pkg) { + paths := make([]string, 0, len(pkg.Imports)) + for path := range pkg.Imports { + paths = append(paths, path) + } + sort.Strings(paths) // Imports is a map, this makes visit stable + for _, path := range paths { + visit(pkg.Imports[path]) + } + } + + if post != nil { + post(pkg) + } + } + } + for _, pkg := range pkgs { + visit(pkg) + } +} + +// PrintErrors prints to os.Stderr the accumulated errors of all +// packages in the import graph rooted at pkgs, dependencies first. +// PrintErrors returns the number of errors printed. +func PrintErrors(pkgs []*Package) int { + var n int + Visit(pkgs, nil, func(pkg *Package) { + for _, err := range pkg.Errors { + fmt.Fprintln(os.Stderr, err) + n++ + } + }) + return n +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go new file mode 100644 index 000000000..7219c8e9f --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go @@ -0,0 +1,196 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package fastwalk provides a faster version of filepath.Walk for file system +// scanning tools. +package fastwalk + +import ( + "errors" + "os" + "path/filepath" + "runtime" + "sync" +) + +// TraverseLink is used as a return value from WalkFuncs to indicate that the +// symlink named in the call may be traversed. +var TraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory") + +// SkipFiles is a used as a return value from WalkFuncs to indicate that the +// callback should not be called for any other files in the current directory. +// Child directories will still be traversed. +var SkipFiles = errors.New("fastwalk: skip remaining files in directory") + +// Walk is a faster implementation of filepath.Walk. +// +// filepath.Walk's design necessarily calls os.Lstat on each file, +// even if the caller needs less info. +// Many tools need only the type of each file. +// On some platforms, this information is provided directly by the readdir +// system call, avoiding the need to stat each file individually. +// fastwalk_unix.go contains a fork of the syscall routines. +// +// See golang.org/issue/16399 +// +// Walk walks the file tree rooted at root, calling walkFn for +// each file or directory in the tree, including root. +// +// If fastWalk returns filepath.SkipDir, the directory is skipped. +// +// Unlike filepath.Walk: +// * file stat calls must be done by the user. +// The only provided metadata is the file type, which does not include +// any permission bits. +// * multiple goroutines stat the filesystem concurrently. The provided +// walkFn must be safe for concurrent use. +// * fastWalk can follow symlinks if walkFn returns the TraverseLink +// sentinel error. It is the walkFn's responsibility to prevent +// fastWalk from going into symlink cycles. +func Walk(root string, walkFn func(path string, typ os.FileMode) error) error { + // TODO(bradfitz): make numWorkers configurable? We used a + // minimum of 4 to give the kernel more info about multiple + // things we want, in hopes its I/O scheduling can take + // advantage of that. Hopefully most are in cache. Maybe 4 is + // even too low of a minimum. Profile more. + numWorkers := 4 + if n := runtime.NumCPU(); n > numWorkers { + numWorkers = n + } + + // Make sure to wait for all workers to finish, otherwise + // walkFn could still be called after returning. This Wait call + // runs after close(e.donec) below. + var wg sync.WaitGroup + defer wg.Wait() + + w := &walker{ + fn: walkFn, + enqueuec: make(chan walkItem, numWorkers), // buffered for performance + workc: make(chan walkItem, numWorkers), // buffered for performance + donec: make(chan struct{}), + + // buffered for correctness & not leaking goroutines: + resc: make(chan error, numWorkers), + } + defer close(w.donec) + + for i := 0; i < numWorkers; i++ { + wg.Add(1) + go w.doWork(&wg) + } + todo := []walkItem{{dir: root}} + out := 0 + for { + workc := w.workc + var workItem walkItem + if len(todo) == 0 { + workc = nil + } else { + workItem = todo[len(todo)-1] + } + select { + case workc <- workItem: + todo = todo[:len(todo)-1] + out++ + case it := <-w.enqueuec: + todo = append(todo, it) + case err := <-w.resc: + out-- + if err != nil { + return err + } + if out == 0 && len(todo) == 0 { + // It's safe to quit here, as long as the buffered + // enqueue channel isn't also readable, which might + // happen if the worker sends both another unit of + // work and its result before the other select was + // scheduled and both w.resc and w.enqueuec were + // readable. + select { + case it := <-w.enqueuec: + todo = append(todo, it) + default: + return nil + } + } + } + } +} + +// doWork reads directories as instructed (via workc) and runs the +// user's callback function. +func (w *walker) doWork(wg *sync.WaitGroup) { + defer wg.Done() + for { + select { + case <-w.donec: + return + case it := <-w.workc: + select { + case <-w.donec: + return + case w.resc <- w.walk(it.dir, !it.callbackDone): + } + } + } +} + +type walker struct { + fn func(path string, typ os.FileMode) error + + donec chan struct{} // closed on fastWalk's return + workc chan walkItem // to workers + enqueuec chan walkItem // from workers + resc chan error // from workers +} + +type walkItem struct { + dir string + callbackDone bool // callback already called; don't do it again +} + +func (w *walker) enqueue(it walkItem) { + select { + case w.enqueuec <- it: + case <-w.donec: + } +} + +func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error { + joined := dirName + string(os.PathSeparator) + baseName + if typ == os.ModeDir { + w.enqueue(walkItem{dir: joined}) + return nil + } + + err := w.fn(joined, typ) + if typ == os.ModeSymlink { + if err == TraverseLink { + // Set callbackDone so we don't call it twice for both the + // symlink-as-symlink and the symlink-as-directory later: + w.enqueue(walkItem{dir: joined, callbackDone: true}) + return nil + } + if err == filepath.SkipDir { + // Permit SkipDir on symlinks too. + return nil + } + } + return err +} + +func (w *walker) walk(root string, runUserCallback bool) error { + if runUserCallback { + err := w.fn(root, os.ModeDir) + if err == filepath.SkipDir { + return nil + } + if err != nil { + return err + } + } + + return readDir(root, w.onDirEnt) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go new file mode 100644 index 000000000..ccffec5ad --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_fileno.go @@ -0,0 +1,13 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build freebsd openbsd netbsd + +package fastwalk + +import "syscall" + +func direntInode(dirent *syscall.Dirent) uint64 { + return uint64(dirent.Fileno) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go new file mode 100644 index 000000000..ab7fbc0a9 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_ino.go @@ -0,0 +1,14 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build linux darwin +// +build !appengine + +package fastwalk + +import "syscall" + +func direntInode(dirent *syscall.Dirent) uint64 { + return uint64(dirent.Ino) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go new file mode 100644 index 000000000..a3b26a7ba --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_bsd.go @@ -0,0 +1,13 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build darwin freebsd openbsd netbsd + +package fastwalk + +import "syscall" + +func direntNamlen(dirent *syscall.Dirent) uint64 { + return uint64(dirent.Namlen) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go new file mode 100644 index 000000000..e880d358b --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_dirent_namlen_linux.go @@ -0,0 +1,29 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build linux +// +build !appengine + +package fastwalk + +import ( + "bytes" + "syscall" + "unsafe" +) + +func direntNamlen(dirent *syscall.Dirent) uint64 { + const fixedHdr = uint16(unsafe.Offsetof(syscall.Dirent{}.Name)) + nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0])) + const nameBufLen = uint16(len(nameBuf)) + limit := dirent.Reclen - fixedHdr + if limit > nameBufLen { + limit = nameBufLen + } + nameLen := bytes.IndexByte(nameBuf[:limit], 0) + if nameLen < 0 { + panic("failed to find terminating 0 byte in dirent") + } + return uint64(nameLen) +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go new file mode 100644 index 000000000..a906b8759 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go @@ -0,0 +1,37 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build appengine !linux,!darwin,!freebsd,!openbsd,!netbsd + +package fastwalk + +import ( + "io/ioutil" + "os" +) + +// readDir calls fn for each directory entry in dirName. +// It does not descend into directories or follow symlinks. +// If fn returns a non-nil error, readDir returns with that error +// immediately. +func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error { + fis, err := ioutil.ReadDir(dirName) + if err != nil { + return err + } + skipFiles := false + for _, fi := range fis { + if fi.Mode().IsRegular() && skipFiles { + continue + } + if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil { + if err == SkipFiles { + skipFiles = true + continue + } + return err + } + } + return nil +} diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go new file mode 100644 index 000000000..3369b1a0b --- /dev/null +++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go @@ -0,0 +1,127 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// +build linux darwin freebsd openbsd netbsd +// +build !appengine + +package fastwalk + +import ( + "fmt" + "os" + "syscall" + "unsafe" +) + +const blockSize = 8 << 10 + +// unknownFileMode is a sentinel (and bogus) os.FileMode +// value used to represent a syscall.DT_UNKNOWN Dirent.Type. +const unknownFileMode os.FileMode = os.ModeNamedPipe | os.ModeSocket | os.ModeDevice + +func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) error) error { + fd, err := syscall.Open(dirName, 0, 0) + if err != nil { + return &os.PathError{Op: "open", Path: dirName, Err: err} + } + defer syscall.Close(fd) + + // The buffer must be at least a block long. + buf := make([]byte, blockSize) // stack-allocated; doesn't escape + bufp := 0 // starting read position in buf + nbuf := 0 // end valid data in buf + skipFiles := false + for { + if bufp >= nbuf { + bufp = 0 + nbuf, err = syscall.ReadDirent(fd, buf) + if err != nil { + return os.NewSyscallError("readdirent", err) + } + if nbuf <= 0 { + return nil + } + } + consumed, name, typ := parseDirEnt(buf[bufp:nbuf]) + bufp += consumed + if name == "" || name == "." || name == ".." { + continue + } + // Fallback for filesystems (like old XFS) that don't + // support Dirent.Type and have DT_UNKNOWN (0) there + // instead. + if typ == unknownFileMode { + fi, err := os.Lstat(dirName + "/" + name) + if err != nil { + // It got deleted in the meantime. + if os.IsNotExist(err) { + continue + } + return err + } + typ = fi.Mode() & os.ModeType + } + if skipFiles && typ.IsRegular() { + continue + } + if err := fn(dirName, name, typ); err != nil { + if err == SkipFiles { + skipFiles = true + continue + } + return err + } + } +} + +func parseDirEnt(buf []byte) (consumed int, name string, typ os.FileMode) { + // golang.org/issue/15653 + dirent := (*syscall.Dirent)(unsafe.Pointer(&buf[0])) + if v := unsafe.Offsetof(dirent.Reclen) + unsafe.Sizeof(dirent.Reclen); uintptr(len(buf)) < v { + panic(fmt.Sprintf("buf size of %d smaller than dirent header size %d", len(buf), v)) + } + if len(buf) < int(dirent.Reclen) { + panic(fmt.Sprintf("buf size %d < record length %d", len(buf), dirent.Reclen)) + } + consumed = int(dirent.Reclen) + if direntInode(dirent) == 0 { // File absent in directory. + return + } + switch dirent.Type { + case syscall.DT_REG: + typ = 0 + case syscall.DT_DIR: + typ = os.ModeDir + case syscall.DT_LNK: + typ = os.ModeSymlink + case syscall.DT_BLK: + typ = os.ModeDevice + case syscall.DT_FIFO: + typ = os.ModeNamedPipe + case syscall.DT_SOCK: + typ = os.ModeSocket + case syscall.DT_UNKNOWN: + typ = unknownFileMode + default: + // Skip weird things. + // It's probably a DT_WHT (http://lwn.net/Articles/325369/) + // or something. Revisit if/when this package is moved outside + // of goimports. goimports only cares about regular files, + // symlinks, and directories. + return + } + + nameBuf := (*[unsafe.Sizeof(dirent.Name)]byte)(unsafe.Pointer(&dirent.Name[0])) + nameLen := direntNamlen(dirent) + + // Special cases for common things: + if nameLen == 1 && nameBuf[0] == '.' { + name = "." + } else if nameLen == 2 && nameBuf[0] == '.' && nameBuf[1] == '.' { + name = ".." + } else { + name = string(nameBuf[:nameLen]) + } + return +} diff --git a/vendor/golang.org/x/tools/internal/gopathwalk/walk.go b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go new file mode 100644 index 000000000..04bb96a36 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go @@ -0,0 +1,250 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package gopathwalk is like filepath.Walk but specialized for finding Go +// packages, particularly in $GOPATH and $GOROOT. +package gopathwalk + +import ( + "bufio" + "bytes" + "fmt" + "go/build" + "io/ioutil" + "log" + "os" + "path/filepath" + "strings" + + "golang.org/x/tools/internal/fastwalk" +) + +// Options controls the behavior of a Walk call. +type Options struct { + Debug bool // Enable debug logging + ModulesEnabled bool // Search module caches. Also disables legacy goimports ignore rules. +} + +// RootType indicates the type of a Root. +type RootType int + +const ( + RootUnknown RootType = iota + RootGOROOT + RootGOPATH + RootCurrentModule + RootModuleCache + RootOther +) + +// A Root is a starting point for a Walk. +type Root struct { + Path string + Type RootType +} + +// SrcDirsRoots returns the roots from build.Default.SrcDirs(). Not modules-compatible. +func SrcDirsRoots(ctx *build.Context) []Root { + var roots []Root + roots = append(roots, Root{filepath.Join(ctx.GOROOT, "src"), RootGOROOT}) + for _, p := range filepath.SplitList(ctx.GOPATH) { + roots = append(roots, Root{filepath.Join(p, "src"), RootGOPATH}) + } + return roots +} + +// Walk walks Go source directories ($GOROOT, $GOPATH, etc) to find packages. +// For each package found, add will be called (concurrently) with the absolute +// paths of the containing source directory and the package directory. +// add will be called concurrently. +func Walk(roots []Root, add func(root Root, dir string), opts Options) { + for _, root := range roots { + walkDir(root, add, opts) + } +} + +func walkDir(root Root, add func(Root, string), opts Options) { + if _, err := os.Stat(root.Path); os.IsNotExist(err) { + if opts.Debug { + log.Printf("skipping nonexistant directory: %v", root.Path) + } + return + } + if opts.Debug { + log.Printf("scanning %s", root.Path) + } + w := &walker{ + root: root, + add: add, + opts: opts, + } + w.init() + if err := fastwalk.Walk(root.Path, w.walk); err != nil { + log.Printf("gopathwalk: scanning directory %v: %v", root.Path, err) + } + + if opts.Debug { + log.Printf("scanned %s", root.Path) + } +} + +// walker is the callback for fastwalk.Walk. +type walker struct { + root Root // The source directory to scan. + add func(Root, string) // The callback that will be invoked for every possible Go package dir. + opts Options // Options passed to Walk by the user. + + ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files. +} + +// init initializes the walker based on its Options. +func (w *walker) init() { + var ignoredPaths []string + if w.root.Type == RootModuleCache { + ignoredPaths = []string{"cache"} + } + if !w.opts.ModulesEnabled && w.root.Type == RootGOPATH { + ignoredPaths = w.getIgnoredDirs(w.root.Path) + ignoredPaths = append(ignoredPaths, "v", "mod") + } + + for _, p := range ignoredPaths { + full := filepath.Join(w.root.Path, p) + if fi, err := os.Stat(full); err == nil { + w.ignoredDirs = append(w.ignoredDirs, fi) + if w.opts.Debug { + log.Printf("Directory added to ignore list: %s", full) + } + } else if w.opts.Debug { + log.Printf("Error statting ignored directory: %v", err) + } + } +} + +// getIgnoredDirs reads an optional config file at /.goimportsignore +// of relative directories to ignore when scanning for go files. +// The provided path is one of the $GOPATH entries with "src" appended. +func (w *walker) getIgnoredDirs(path string) []string { + file := filepath.Join(path, ".goimportsignore") + slurp, err := ioutil.ReadFile(file) + if w.opts.Debug { + if err != nil { + log.Print(err) + } else { + log.Printf("Read %s", file) + } + } + if err != nil { + return nil + } + + var ignoredDirs []string + bs := bufio.NewScanner(bytes.NewReader(slurp)) + for bs.Scan() { + line := strings.TrimSpace(bs.Text()) + if line == "" || strings.HasPrefix(line, "#") { + continue + } + ignoredDirs = append(ignoredDirs, line) + } + return ignoredDirs +} + +func (w *walker) shouldSkipDir(fi os.FileInfo) bool { + for _, ignoredDir := range w.ignoredDirs { + if os.SameFile(fi, ignoredDir) { + return true + } + } + return false +} + +func (w *walker) walk(path string, typ os.FileMode) error { + dir := filepath.Dir(path) + if typ.IsRegular() { + if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) { + // Doesn't make sense to have regular files + // directly in your $GOPATH/src or $GOROOT/src. + return fastwalk.SkipFiles + } + if !strings.HasSuffix(path, ".go") { + return nil + } + + w.add(w.root, dir) + return fastwalk.SkipFiles + } + if typ == os.ModeDir { + base := filepath.Base(path) + if base == "" || base[0] == '.' || base[0] == '_' || + base == "testdata" || + (w.root.Type == RootGOROOT && w.opts.ModulesEnabled && base == "vendor") || + (!w.opts.ModulesEnabled && base == "node_modules") { + return filepath.SkipDir + } + fi, err := os.Lstat(path) + if err == nil && w.shouldSkipDir(fi) { + return filepath.SkipDir + } + return nil + } + if typ == os.ModeSymlink { + base := filepath.Base(path) + if strings.HasPrefix(base, ".#") { + // Emacs noise. + return nil + } + fi, err := os.Lstat(path) + if err != nil { + // Just ignore it. + return nil + } + if w.shouldTraverse(dir, fi) { + return fastwalk.TraverseLink + } + } + return nil +} + +// shouldTraverse reports whether the symlink fi, found in dir, +// should be followed. It makes sure symlinks were never visited +// before to avoid symlink loops. +func (w *walker) shouldTraverse(dir string, fi os.FileInfo) bool { + path := filepath.Join(dir, fi.Name()) + target, err := filepath.EvalSymlinks(path) + if err != nil { + return false + } + ts, err := os.Stat(target) + if err != nil { + fmt.Fprintln(os.Stderr, err) + return false + } + if !ts.IsDir() { + return false + } + if w.shouldSkipDir(ts) { + return false + } + // Check for symlink loops by statting each directory component + // and seeing if any are the same file as ts. + for { + parent := filepath.Dir(path) + if parent == path { + // Made it to the root without seeing a cycle. + // Use this symlink. + return true + } + parentInfo, err := os.Stat(parent) + if err != nil { + return false + } + if os.SameFile(ts, parentInfo) { + // Cycle. Don't traverse. + return false + } + path = parent + } + +} diff --git a/vendor/golang.org/x/tools/internal/imports/fix.go b/vendor/golang.org/x/tools/internal/imports/fix.go new file mode 100644 index 000000000..d9ba3b786 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/fix.go @@ -0,0 +1,1259 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package imports + +import ( + "bytes" + "context" + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "io/ioutil" + "log" + "os" + "os/exec" + "path" + "path/filepath" + "sort" + "strconv" + "strings" + "sync" + "time" + "unicode" + "unicode/utf8" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/packages" + "golang.org/x/tools/internal/gopathwalk" +) + +// importToGroup is a list of functions which map from an import path to +// a group number. +var importToGroup = []func(env *ProcessEnv, importPath string) (num int, ok bool){ + func(env *ProcessEnv, importPath string) (num int, ok bool) { + if env.LocalPrefix == "" { + return + } + for _, p := range strings.Split(env.LocalPrefix, ",") { + if strings.HasPrefix(importPath, p) || strings.TrimSuffix(p, "/") == importPath { + return 3, true + } + } + return + }, + func(_ *ProcessEnv, importPath string) (num int, ok bool) { + if strings.HasPrefix(importPath, "appengine") { + return 2, true + } + return + }, + func(_ *ProcessEnv, importPath string) (num int, ok bool) { + if strings.Contains(importPath, ".") { + return 1, true + } + return + }, +} + +func importGroup(env *ProcessEnv, importPath string) int { + for _, fn := range importToGroup { + if n, ok := fn(env, importPath); ok { + return n + } + } + return 0 +} + +// An importInfo represents a single import statement. +type importInfo struct { + importPath string // import path, e.g. "crypto/rand". + name string // import name, e.g. "crand", or "" if none. +} + +// A packageInfo represents what's known about a package. +type packageInfo struct { + name string // real package name, if known. + exports map[string]bool // known exports. +} + +// parseOtherFiles parses all the Go files in srcDir except filename, including +// test files if filename looks like a test. +func parseOtherFiles(fset *token.FileSet, srcDir, filename string) []*ast.File { + // This could use go/packages but it doesn't buy much, and it fails + // with https://golang.org/issue/26296 in LoadFiles mode in some cases. + considerTests := strings.HasSuffix(filename, "_test.go") + + fileBase := filepath.Base(filename) + packageFileInfos, err := ioutil.ReadDir(srcDir) + if err != nil { + return nil + } + + var files []*ast.File + for _, fi := range packageFileInfos { + if fi.Name() == fileBase || !strings.HasSuffix(fi.Name(), ".go") { + continue + } + if !considerTests && strings.HasSuffix(fi.Name(), "_test.go") { + continue + } + + f, err := parser.ParseFile(fset, filepath.Join(srcDir, fi.Name()), nil, 0) + if err != nil { + continue + } + + files = append(files, f) + } + + return files +} + +// addGlobals puts the names of package vars into the provided map. +func addGlobals(f *ast.File, globals map[string]bool) { + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok { + continue + } + + for _, spec := range genDecl.Specs { + valueSpec, ok := spec.(*ast.ValueSpec) + if !ok { + continue + } + globals[valueSpec.Names[0].Name] = true + } + } +} + +// collectReferences builds a map of selector expressions, from +// left hand side (X) to a set of right hand sides (Sel). +func collectReferences(f *ast.File) references { + refs := references{} + + var visitor visitFn + visitor = func(node ast.Node) ast.Visitor { + if node == nil { + return visitor + } + switch v := node.(type) { + case *ast.SelectorExpr: + xident, ok := v.X.(*ast.Ident) + if !ok { + break + } + if xident.Obj != nil { + // If the parser can resolve it, it's not a package ref. + break + } + if !ast.IsExported(v.Sel.Name) { + // Whatever this is, it's not exported from a package. + break + } + pkgName := xident.Name + r := refs[pkgName] + if r == nil { + r = make(map[string]bool) + refs[pkgName] = r + } + r[v.Sel.Name] = true + } + return visitor + } + ast.Walk(visitor, f) + return refs +} + +// collectImports returns all the imports in f, keyed by their package name as +// determined by pathToName. Unnamed imports (., _) and "C" are ignored. +func collectImports(f *ast.File) []*importInfo { + var imports []*importInfo + for _, imp := range f.Imports { + var name string + if imp.Name != nil { + name = imp.Name.Name + } + if imp.Path.Value == `"C"` || name == "_" || name == "." { + continue + } + path := strings.Trim(imp.Path.Value, `"`) + imports = append(imports, &importInfo{ + name: name, + importPath: path, + }) + } + return imports +} + +// findMissingImport searches pass's candidates for an import that provides +// pkg, containing all of syms. +func (p *pass) findMissingImport(pkg string, syms map[string]bool) *importInfo { + for _, candidate := range p.candidates { + pkgInfo, ok := p.knownPackages[candidate.importPath] + if !ok { + continue + } + if p.importIdentifier(candidate) != pkg { + continue + } + + allFound := true + for right := range syms { + if !pkgInfo.exports[right] { + allFound = false + break + } + } + + if allFound { + return candidate + } + } + return nil +} + +// references is set of references found in a Go file. The first map key is the +// left hand side of a selector expression, the second key is the right hand +// side, and the value should always be true. +type references map[string]map[string]bool + +// A pass contains all the inputs and state necessary to fix a file's imports. +// It can be modified in some ways during use; see comments below. +type pass struct { + // Inputs. These must be set before a call to load, and not modified after. + fset *token.FileSet // fset used to parse f and its siblings. + f *ast.File // the file being fixed. + srcDir string // the directory containing f. + env *ProcessEnv // the environment to use for go commands, etc. + loadRealPackageNames bool // if true, load package names from disk rather than guessing them. + otherFiles []*ast.File // sibling files. + + // Intermediate state, generated by load. + existingImports map[string]*importInfo + allRefs references + missingRefs references + + // Inputs to fix. These can be augmented between successive fix calls. + lastTry bool // indicates that this is the last call and fix should clean up as best it can. + candidates []*importInfo // candidate imports in priority order. + knownPackages map[string]*packageInfo // information about all known packages. +} + +// loadPackageNames saves the package names for everything referenced by imports. +func (p *pass) loadPackageNames(imports []*importInfo) error { + var unknown []string + for _, imp := range imports { + if _, ok := p.knownPackages[imp.importPath]; ok { + continue + } + unknown = append(unknown, imp.importPath) + } + + names, err := p.env.getResolver().loadPackageNames(unknown, p.srcDir) + if err != nil { + return err + } + + for path, name := range names { + p.knownPackages[path] = &packageInfo{ + name: name, + exports: map[string]bool{}, + } + } + return nil +} + +// importIdentifier returns the identifier that imp will introduce. It will +// guess if the package name has not been loaded, e.g. because the source +// is not available. +func (p *pass) importIdentifier(imp *importInfo) string { + if imp.name != "" { + return imp.name + } + known := p.knownPackages[imp.importPath] + if known != nil && known.name != "" { + return known.name + } + return importPathToAssumedName(imp.importPath) +} + +// load reads in everything necessary to run a pass, and reports whether the +// file already has all the imports it needs. It fills in p.missingRefs with the +// file's missing symbols, if any, or removes unused imports if not. +func (p *pass) load() bool { + p.knownPackages = map[string]*packageInfo{} + p.missingRefs = references{} + p.existingImports = map[string]*importInfo{} + + // Load basic information about the file in question. + p.allRefs = collectReferences(p.f) + + // Load stuff from other files in the same package: + // global variables so we know they don't need resolving, and imports + // that we might want to mimic. + globals := map[string]bool{} + for _, otherFile := range p.otherFiles { + // Don't load globals from files that are in the same directory + // but a different package. Using them to suggest imports is OK. + if p.f.Name.Name == otherFile.Name.Name { + addGlobals(otherFile, globals) + } + p.candidates = append(p.candidates, collectImports(otherFile)...) + } + + // Resolve all the import paths we've seen to package names, and store + // f's imports by the identifier they introduce. + imports := collectImports(p.f) + if p.loadRealPackageNames { + err := p.loadPackageNames(append(imports, p.candidates...)) + if err != nil { + if p.env.Debug { + log.Printf("loading package names: %v", err) + } + return false + } + } + for _, imp := range imports { + p.existingImports[p.importIdentifier(imp)] = imp + } + + // Find missing references. + for left, rights := range p.allRefs { + if globals[left] { + continue + } + _, ok := p.existingImports[left] + if !ok { + p.missingRefs[left] = rights + continue + } + } + if len(p.missingRefs) != 0 { + return false + } + + return p.fix() +} + +// fix attempts to satisfy missing imports using p.candidates. If it finds +// everything, or if p.lastTry is true, it adds the imports it found, +// removes anything unused, and returns true. +func (p *pass) fix() bool { + // Find missing imports. + var selected []*importInfo + for left, rights := range p.missingRefs { + if imp := p.findMissingImport(left, rights); imp != nil { + selected = append(selected, imp) + } + } + + if !p.lastTry && len(selected) != len(p.missingRefs) { + return false + } + + // Found everything, or giving up. Add the new imports and remove any unused. + for _, imp := range p.existingImports { + // We deliberately ignore globals here, because we can't be sure + // they're in the same package. People do things like put multiple + // main packages in the same directory, and we don't want to + // remove imports if they happen to have the same name as a var in + // a different package. + if _, ok := p.allRefs[p.importIdentifier(imp)]; !ok { + astutil.DeleteNamedImport(p.fset, p.f, imp.name, imp.importPath) + } + } + + for _, imp := range selected { + astutil.AddNamedImport(p.fset, p.f, imp.name, imp.importPath) + } + + if p.loadRealPackageNames { + for _, imp := range p.f.Imports { + if imp.Name != nil { + continue + } + path := strings.Trim(imp.Path.Value, `""`) + ident := p.importIdentifier(&importInfo{importPath: path}) + if ident != importPathToAssumedName(path) { + imp.Name = &ast.Ident{Name: ident, NamePos: imp.Pos()} + } + } + } + + return true +} + +// assumeSiblingImportsValid assumes that siblings' use of packages is valid, +// adding the exports they use. +func (p *pass) assumeSiblingImportsValid() { + for _, f := range p.otherFiles { + refs := collectReferences(f) + imports := collectImports(f) + importsByName := map[string]*importInfo{} + for _, imp := range imports { + importsByName[p.importIdentifier(imp)] = imp + } + for left, rights := range refs { + if imp, ok := importsByName[left]; ok { + if _, ok := stdlib[imp.importPath]; ok { + // We have the stdlib in memory; no need to guess. + rights = stdlib[imp.importPath] + } + p.addCandidate(imp, &packageInfo{ + // no name; we already know it. + exports: rights, + }) + } + } + } +} + +// addCandidate adds a candidate import to p, and merges in the information +// in pkg. +func (p *pass) addCandidate(imp *importInfo, pkg *packageInfo) { + p.candidates = append(p.candidates, imp) + if existing, ok := p.knownPackages[imp.importPath]; ok { + if existing.name == "" { + existing.name = pkg.name + } + for export := range pkg.exports { + existing.exports[export] = true + } + } else { + p.knownPackages[imp.importPath] = pkg + } +} + +// fixImports adds and removes imports from f so that all its references are +// satisfied and there are no unused imports. +// +// This is declared as a variable rather than a function so goimports can +// easily be extended by adding a file with an init function. +var fixImports = fixImportsDefault + +func fixImportsDefault(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv) error { + abs, err := filepath.Abs(filename) + if err != nil { + return err + } + srcDir := filepath.Dir(abs) + if env.Debug { + log.Printf("fixImports(filename=%q), abs=%q, srcDir=%q ...", filename, abs, srcDir) + } + + // First pass: looking only at f, and using the naive algorithm to + // derive package names from import paths, see if the file is already + // complete. We can't add any imports yet, because we don't know + // if missing references are actually package vars. + p := &pass{fset: fset, f: f, srcDir: srcDir} + if p.load() { + return nil + } + + otherFiles := parseOtherFiles(fset, srcDir, filename) + + // Second pass: add information from other files in the same package, + // like their package vars and imports. + p.otherFiles = otherFiles + if p.load() { + return nil + } + + // Now we can try adding imports from the stdlib. + p.assumeSiblingImportsValid() + addStdlibCandidates(p, p.missingRefs) + if p.fix() { + return nil + } + + // Third pass: get real package names where we had previously used + // the naive algorithm. This is the first step that will use the + // environment, so we provide it here for the first time. + p = &pass{fset: fset, f: f, srcDir: srcDir, env: env} + p.loadRealPackageNames = true + p.otherFiles = otherFiles + if p.load() { + return nil + } + + addStdlibCandidates(p, p.missingRefs) + p.assumeSiblingImportsValid() + if p.fix() { + return nil + } + + // Go look for candidates in $GOPATH, etc. We don't necessarily load + // the real exports of sibling imports, so keep assuming their contents. + if err := addExternalCandidates(p, p.missingRefs, filename); err != nil { + return err + } + + p.lastTry = true + p.fix() + return nil +} + +// ProcessEnv contains environment variables and settings that affect the use of +// the go command, the go/build package, etc. +type ProcessEnv struct { + LocalPrefix string + Debug bool + + // If non-empty, these will be used instead of the + // process-wide values. + GOPATH, GOROOT, GO111MODULE, GOPROXY, GOFLAGS, GOSUMDB string + WorkingDir string + + // If true, use go/packages regardless of the environment. + ForceGoPackages bool + + resolver resolver +} + +func (e *ProcessEnv) env() []string { + env := os.Environ() + add := func(k, v string) { + if v != "" { + env = append(env, k+"="+v) + } + } + add("GOPATH", e.GOPATH) + add("GOROOT", e.GOROOT) + add("GO111MODULE", e.GO111MODULE) + add("GOPROXY", e.GOPROXY) + add("GOFLAGS", e.GOFLAGS) + add("GOSUMDB", e.GOSUMDB) + if e.WorkingDir != "" { + add("PWD", e.WorkingDir) + } + return env +} + +func (e *ProcessEnv) getResolver() resolver { + if e.resolver != nil { + return e.resolver + } + if e.ForceGoPackages { + e.resolver = &goPackagesResolver{env: e} + return e.resolver + } + + out, err := e.invokeGo("env", "GOMOD") + if err != nil || len(bytes.TrimSpace(out.Bytes())) == 0 { + e.resolver = &gopathResolver{env: e} + return e.resolver + } + e.resolver = &moduleResolver{env: e} + return e.resolver +} + +func (e *ProcessEnv) newPackagesConfig(mode packages.LoadMode) *packages.Config { + return &packages.Config{ + Mode: mode, + Dir: e.WorkingDir, + Env: e.env(), + } +} + +func (e *ProcessEnv) buildContext() *build.Context { + ctx := build.Default + ctx.GOROOT = e.GOROOT + ctx.GOPATH = e.GOPATH + return &ctx +} + +func (e *ProcessEnv) invokeGo(args ...string) (*bytes.Buffer, error) { + cmd := exec.Command("go", args...) + stdout := &bytes.Buffer{} + stderr := &bytes.Buffer{} + cmd.Stdout = stdout + cmd.Stderr = stderr + cmd.Env = e.env() + cmd.Dir = e.WorkingDir + + if e.Debug { + defer func(start time.Time) { log.Printf("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now()) + } + if err := cmd.Run(); err != nil { + return nil, fmt.Errorf("running go: %v (stderr:\n%s)", err, stderr) + } + return stdout, nil +} + +func cmdDebugStr(cmd *exec.Cmd) string { + env := make(map[string]string) + for _, kv := range cmd.Env { + split := strings.Split(kv, "=") + k, v := split[0], split[1] + env[k] = v + } + + return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v GOPROXY=%v PWD=%v go %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["GOPROXY"], env["PWD"], cmd.Args) +} + +func addStdlibCandidates(pass *pass, refs references) { + add := func(pkg string) { + pass.addCandidate( + &importInfo{importPath: pkg}, + &packageInfo{name: path.Base(pkg), exports: stdlib[pkg]}) + } + for left := range refs { + if left == "rand" { + // Make sure we try crypto/rand before math/rand. + add("crypto/rand") + add("math/rand") + continue + } + for importPath := range stdlib { + if path.Base(importPath) == left { + add(importPath) + } + } + } +} + +// A resolver does the build-system-specific parts of goimports. +type resolver interface { + // loadPackageNames loads the package names in importPaths. + loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) + // scan finds (at least) the packages satisfying refs. The returned slice is unordered. + scan(refs references) ([]*pkg, error) +} + +// gopathResolver implements resolver for GOPATH and module workspaces using go/packages. +type goPackagesResolver struct { + env *ProcessEnv +} + +func (r *goPackagesResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { + cfg := r.env.newPackagesConfig(packages.LoadFiles) + pkgs, err := packages.Load(cfg, importPaths...) + if err != nil { + return nil, err + } + names := map[string]string{} + for _, pkg := range pkgs { + names[VendorlessPath(pkg.PkgPath)] = pkg.Name + } + // We may not have found all the packages. Guess the rest. + for _, path := range importPaths { + if _, ok := names[path]; ok { + continue + } + names[path] = importPathToAssumedName(path) + } + return names, nil + +} + +func (r *goPackagesResolver) scan(refs references) ([]*pkg, error) { + var loadQueries []string + for pkgName := range refs { + loadQueries = append(loadQueries, "iamashamedtousethedisabledqueryname="+pkgName) + } + sort.Strings(loadQueries) + cfg := r.env.newPackagesConfig(packages.LoadFiles) + goPackages, err := packages.Load(cfg, loadQueries...) + if err != nil { + return nil, err + } + + var scan []*pkg + for _, goPackage := range goPackages { + scan = append(scan, &pkg{ + dir: filepath.Dir(goPackage.CompiledGoFiles[0]), + importPathShort: VendorlessPath(goPackage.PkgPath), + goPackage: goPackage, + }) + } + return scan, nil +} + +func addExternalCandidates(pass *pass, refs references, filename string) error { + dirScan, err := pass.env.getResolver().scan(refs) + if err != nil { + return err + } + + // Search for imports matching potential package references. + type result struct { + imp *importInfo + pkg *packageInfo + } + results := make(chan result, len(refs)) + + ctx, cancel := context.WithCancel(context.TODO()) + var wg sync.WaitGroup + defer func() { + cancel() + wg.Wait() + }() + var ( + firstErr error + firstErrOnce sync.Once + ) + for pkgName, symbols := range refs { + wg.Add(1) + go func(pkgName string, symbols map[string]bool) { + defer wg.Done() + + found, err := findImport(ctx, pass, dirScan, pkgName, symbols, filename) + + if err != nil { + firstErrOnce.Do(func() { + firstErr = err + cancel() + }) + return + } + + if found == nil { + return // No matching package. + } + + imp := &importInfo{ + importPath: found.importPathShort, + } + + pkg := &packageInfo{ + name: pkgName, + exports: symbols, + } + results <- result{imp, pkg} + }(pkgName, symbols) + } + go func() { + wg.Wait() + close(results) + }() + + for result := range results { + pass.addCandidate(result.imp, result.pkg) + } + return firstErr +} + +// notIdentifier reports whether ch is an invalid identifier character. +func notIdentifier(ch rune) bool { + return !('a' <= ch && ch <= 'z' || 'A' <= ch && ch <= 'Z' || + '0' <= ch && ch <= '9' || + ch == '_' || + ch >= utf8.RuneSelf && (unicode.IsLetter(ch) || unicode.IsDigit(ch))) +} + +// importPathToAssumedName returns the assumed package name of an import path. +// It does this using only string parsing of the import path. +// It picks the last element of the path that does not look like a major +// version, and then picks the valid identifier off the start of that element. +// It is used to determine if a local rename should be added to an import for +// clarity. +// This function could be moved to a standard package and exported if we want +// for use in other tools. +func importPathToAssumedName(importPath string) string { + base := path.Base(importPath) + if strings.HasPrefix(base, "v") { + if _, err := strconv.Atoi(base[1:]); err == nil { + dir := path.Dir(importPath) + if dir != "." { + base = path.Base(dir) + } + } + } + base = strings.TrimPrefix(base, "go-") + if i := strings.IndexFunc(base, notIdentifier); i >= 0 { + base = base[:i] + } + return base +} + +// gopathResolver implements resolver for GOPATH workspaces. +type gopathResolver struct { + env *ProcessEnv +} + +func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { + names := map[string]string{} + for _, path := range importPaths { + names[path] = importPathToName(r.env, path, srcDir) + } + return names, nil +} + +// importPathToNameGoPath finds out the actual package name, as declared in its .go files. +// If there's a problem, it returns "". +func importPathToName(env *ProcessEnv, importPath, srcDir string) (packageName string) { + // Fast path for standard library without going to disk. + if _, ok := stdlib[importPath]; ok { + return path.Base(importPath) // stdlib packages always match their paths. + } + + buildPkg, err := env.buildContext().Import(importPath, srcDir, build.FindOnly) + if err != nil { + return "" + } + pkgName, err := packageDirToName(buildPkg.Dir) + if err != nil { + return "" + } + return pkgName +} + +// packageDirToName is a faster version of build.Import if +// the only thing desired is the package name. It uses build.FindOnly +// to find the directory and then only parses one file in the package, +// trusting that the files in the directory are consistent. +func packageDirToName(dir string) (packageName string, err error) { + d, err := os.Open(dir) + if err != nil { + return "", err + } + names, err := d.Readdirnames(-1) + d.Close() + if err != nil { + return "", err + } + sort.Strings(names) // to have predictable behavior + var lastErr error + var nfile int + for _, name := range names { + if !strings.HasSuffix(name, ".go") { + continue + } + if strings.HasSuffix(name, "_test.go") { + continue + } + nfile++ + fullFile := filepath.Join(dir, name) + + fset := token.NewFileSet() + f, err := parser.ParseFile(fset, fullFile, nil, parser.PackageClauseOnly) + if err != nil { + lastErr = err + continue + } + pkgName := f.Name.Name + if pkgName == "documentation" { + // Special case from go/build.ImportDir, not + // handled by ctx.MatchFile. + continue + } + if pkgName == "main" { + // Also skip package main, assuming it's a +build ignore generator or example. + // Since you can't import a package main anyway, there's no harm here. + continue + } + return pkgName, nil + } + if lastErr != nil { + return "", lastErr + } + return "", fmt.Errorf("no importable package found in %d Go files", nfile) +} + +type pkg struct { + goPackage *packages.Package + dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http") + importPathShort string // vendorless import path ("net/http", "a/b") +} + +type pkgDistance struct { + pkg *pkg + distance int // relative distance to target +} + +// byDistanceOrImportPathShortLength sorts by relative distance breaking ties +// on the short import path length and then the import string itself. +type byDistanceOrImportPathShortLength []pkgDistance + +func (s byDistanceOrImportPathShortLength) Len() int { return len(s) } +func (s byDistanceOrImportPathShortLength) Less(i, j int) bool { + di, dj := s[i].distance, s[j].distance + if di == -1 { + return false + } + if dj == -1 { + return true + } + if di != dj { + return di < dj + } + + vi, vj := s[i].pkg.importPathShort, s[j].pkg.importPathShort + if len(vi) != len(vj) { + return len(vi) < len(vj) + } + return vi < vj +} +func (s byDistanceOrImportPathShortLength) Swap(i, j int) { s[i], s[j] = s[j], s[i] } + +func distance(basepath, targetpath string) int { + p, err := filepath.Rel(basepath, targetpath) + if err != nil { + return -1 + } + if p == "." { + return 0 + } + return strings.Count(p, string(filepath.Separator)) + 1 +} + +func (r *gopathResolver) scan(_ references) ([]*pkg, error) { + dupCheck := make(map[string]bool) + var result []*pkg + + var mu sync.Mutex + + add := func(root gopathwalk.Root, dir string) { + mu.Lock() + defer mu.Unlock() + + if _, dup := dupCheck[dir]; dup { + return + } + dupCheck[dir] = true + importpath := filepath.ToSlash(dir[len(root.Path)+len("/"):]) + result = append(result, &pkg{ + importPathShort: VendorlessPath(importpath), + dir: dir, + }) + } + gopathwalk.Walk(gopathwalk.SrcDirsRoots(r.env.buildContext()), add, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: false}) + return result, nil +} + +// VendorlessPath returns the devendorized version of the import path ipath. +// For example, VendorlessPath("foo/bar/vendor/a/b") returns "a/b". +func VendorlessPath(ipath string) string { + // Devendorize for use in import statement. + if i := strings.LastIndex(ipath, "/vendor/"); i >= 0 { + return ipath[i+len("/vendor/"):] + } + if strings.HasPrefix(ipath, "vendor/") { + return ipath[len("vendor/"):] + } + return ipath +} + +// loadExports returns the set of exported symbols in the package at dir. +// It returns nil on error or if the package name in dir does not match expectPackage. +func loadExports(ctx context.Context, env *ProcessEnv, expectPackage string, pkg *pkg) (map[string]bool, error) { + if env.Debug { + log.Printf("loading exports in dir %s (seeking package %s)", pkg.dir, expectPackage) + } + if pkg.goPackage != nil { + exports := map[string]bool{} + fset := token.NewFileSet() + for _, fname := range pkg.goPackage.CompiledGoFiles { + f, err := parser.ParseFile(fset, fname, nil, 0) + if err != nil { + return nil, fmt.Errorf("parsing %s: %v", fname, err) + } + for name := range f.Scope.Objects { + if ast.IsExported(name) { + exports[name] = true + } + } + } + return exports, nil + } + + exports := make(map[string]bool) + + // Look for non-test, buildable .go files which could provide exports. + all, err := ioutil.ReadDir(pkg.dir) + if err != nil { + return nil, err + } + var files []os.FileInfo + for _, fi := range all { + name := fi.Name() + if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") { + continue + } + match, err := env.buildContext().MatchFile(pkg.dir, fi.Name()) + if err != nil || !match { + continue + } + files = append(files, fi) + } + + if len(files) == 0 { + return nil, fmt.Errorf("dir %v contains no buildable, non-test .go files", pkg.dir) + } + + fset := token.NewFileSet() + for _, fi := range files { + select { + case <-ctx.Done(): + return nil, ctx.Err() + default: + } + + fullFile := filepath.Join(pkg.dir, fi.Name()) + f, err := parser.ParseFile(fset, fullFile, nil, 0) + if err != nil { + return nil, fmt.Errorf("parsing %s: %v", fullFile, err) + } + pkgName := f.Name.Name + if pkgName == "documentation" { + // Special case from go/build.ImportDir, not + // handled by MatchFile above. + continue + } + if pkgName != expectPackage { + return nil, fmt.Errorf("scan of dir %v is not expected package %v (actually %v)", pkg.dir, expectPackage, pkgName) + } + for name := range f.Scope.Objects { + if ast.IsExported(name) { + exports[name] = true + } + } + } + + if env.Debug { + exportList := make([]string, 0, len(exports)) + for k := range exports { + exportList = append(exportList, k) + } + sort.Strings(exportList) + log.Printf("loaded exports in dir %v (package %v): %v", pkg.dir, expectPackage, strings.Join(exportList, ", ")) + } + return exports, nil +} + +// findImport searches for a package with the given symbols. +// If no package is found, findImport returns ("", false, nil) +func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string, symbols map[string]bool, filename string) (*pkg, error) { + pkgDir, err := filepath.Abs(filename) + if err != nil { + return nil, err + } + pkgDir = filepath.Dir(pkgDir) + + // Find candidate packages, looking only at their directory names first. + var candidates []pkgDistance + for _, pkg := range dirScan { + if pkg.dir == pkgDir && pass.f.Name.Name == pkgName { + // The candidate is in the same directory and has the + // same package name. Don't try to import ourselves. + continue + } + if pkgIsCandidate(filename, pkgName, pkg) { + candidates = append(candidates, pkgDistance{ + pkg: pkg, + distance: distance(pkgDir, pkg.dir), + }) + } + } + + // Sort the candidates by their import package length, + // assuming that shorter package names are better than long + // ones. Note that this sorts by the de-vendored name, so + // there's no "penalty" for vendoring. + sort.Sort(byDistanceOrImportPathShortLength(candidates)) + if pass.env.Debug { + for i, c := range candidates { + log.Printf("%s candidate %d/%d: %v in %v", pkgName, i+1, len(candidates), c.pkg.importPathShort, c.pkg.dir) + } + } + + // Collect exports for packages with matching names. + + rescv := make([]chan *pkg, len(candidates)) + for i := range candidates { + rescv[i] = make(chan *pkg, 1) + } + const maxConcurrentPackageImport = 4 + loadExportsSem := make(chan struct{}, maxConcurrentPackageImport) + + ctx, cancel := context.WithCancel(ctx) + var wg sync.WaitGroup + defer func() { + cancel() + wg.Wait() + }() + + wg.Add(1) + go func() { + defer wg.Done() + for i, c := range candidates { + select { + case loadExportsSem <- struct{}{}: + case <-ctx.Done(): + return + } + + wg.Add(1) + go func(c pkgDistance, resc chan<- *pkg) { + defer func() { + <-loadExportsSem + wg.Done() + }() + + exports, err := loadExports(ctx, pass.env, pkgName, c.pkg) + if err != nil { + if pass.env.Debug { + log.Printf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err) + } + resc <- nil + return + } + + // If it doesn't have the right + // symbols, send nil to mean no match. + for symbol := range symbols { + if !exports[symbol] { + resc <- nil + return + } + } + resc <- c.pkg + }(c, rescv[i]) + } + }() + + for _, resc := range rescv { + pkg := <-resc + if pkg == nil { + continue + } + return pkg, nil + } + return nil, nil +} + +// pkgIsCandidate reports whether pkg is a candidate for satisfying the +// finding which package pkgIdent in the file named by filename is trying +// to refer to. +// +// This check is purely lexical and is meant to be as fast as possible +// because it's run over all $GOPATH directories to filter out poor +// candidates in order to limit the CPU and I/O later parsing the +// exports in candidate packages. +// +// filename is the file being formatted. +// pkgIdent is the package being searched for, like "client" (if +// searching for "client.New") +func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool { + // Check "internal" and "vendor" visibility: + if !canUse(filename, pkg.dir) { + return false + } + + // Speed optimization to minimize disk I/O: + // the last two components on disk must contain the + // package name somewhere. + // + // This permits mismatch naming like directory + // "go-foo" being package "foo", or "pkg.v3" being "pkg", + // or directory "google.golang.org/api/cloudbilling/v1" + // being package "cloudbilling", but doesn't + // permit a directory "foo" to be package + // "bar", which is strongly discouraged + // anyway. There's no reason goimports needs + // to be slow just to accommodate that. + lastTwo := lastTwoComponents(pkg.importPathShort) + if strings.Contains(lastTwo, pkgIdent) { + return true + } + if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) { + lastTwo = lowerASCIIAndRemoveHyphen(lastTwo) + if strings.Contains(lastTwo, pkgIdent) { + return true + } + } + + return false +} + +func hasHyphenOrUpperASCII(s string) bool { + for i := 0; i < len(s); i++ { + b := s[i] + if b == '-' || ('A' <= b && b <= 'Z') { + return true + } + } + return false +} + +func lowerASCIIAndRemoveHyphen(s string) (ret string) { + buf := make([]byte, 0, len(s)) + for i := 0; i < len(s); i++ { + b := s[i] + switch { + case b == '-': + continue + case 'A' <= b && b <= 'Z': + buf = append(buf, b+('a'-'A')) + default: + buf = append(buf, b) + } + } + return string(buf) +} + +// canUse reports whether the package in dir is usable from filename, +// respecting the Go "internal" and "vendor" visibility rules. +func canUse(filename, dir string) bool { + // Fast path check, before any allocations. If it doesn't contain vendor + // or internal, it's not tricky: + // Note that this can false-negative on directories like "notinternal", + // but we check it correctly below. This is just a fast path. + if !strings.Contains(dir, "vendor") && !strings.Contains(dir, "internal") { + return true + } + + dirSlash := filepath.ToSlash(dir) + if !strings.Contains(dirSlash, "/vendor/") && !strings.Contains(dirSlash, "/internal/") && !strings.HasSuffix(dirSlash, "/internal") { + return true + } + // Vendor or internal directory only visible from children of parent. + // That means the path from the current directory to the target directory + // can contain ../vendor or ../internal but not ../foo/vendor or ../foo/internal + // or bar/vendor or bar/internal. + // After stripping all the leading ../, the only okay place to see vendor or internal + // is at the very beginning of the path. + absfile, err := filepath.Abs(filename) + if err != nil { + return false + } + absdir, err := filepath.Abs(dir) + if err != nil { + return false + } + rel, err := filepath.Rel(absfile, absdir) + if err != nil { + return false + } + relSlash := filepath.ToSlash(rel) + if i := strings.LastIndex(relSlash, "../"); i >= 0 { + relSlash = relSlash[i+len("../"):] + } + return !strings.Contains(relSlash, "/vendor/") && !strings.Contains(relSlash, "/internal/") && !strings.HasSuffix(relSlash, "/internal") +} + +// lastTwoComponents returns at most the last two path components +// of v, using either / or \ as the path separator. +func lastTwoComponents(v string) string { + nslash := 0 + for i := len(v) - 1; i >= 0; i-- { + if v[i] == '/' || v[i] == '\\' { + nslash++ + if nslash == 2 { + return v[i:] + } + } + } + return v +} + +type visitFn func(node ast.Node) ast.Visitor + +func (fn visitFn) Visit(node ast.Node) ast.Visitor { + return fn(node) +} diff --git a/vendor/golang.org/x/tools/internal/imports/imports.go b/vendor/golang.org/x/tools/internal/imports/imports.go new file mode 100644 index 000000000..625333760 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/imports.go @@ -0,0 +1,303 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate go run mkstdlib.go + +// Package imports implements a Go pretty-printer (like package "go/format") +// that also adds or removes import statements as necessary. +package imports + +import ( + "bufio" + "bytes" + "fmt" + "go/ast" + "go/format" + "go/parser" + "go/printer" + "go/token" + "io" + "io/ioutil" + "regexp" + "strconv" + "strings" + + "golang.org/x/tools/go/ast/astutil" +) + +// Options is golang.org/x/tools/imports.Options with extra internal-only options. +type Options struct { + Env *ProcessEnv // The environment to use. Note: this contains the cached module and filesystem state. + + Fragment bool // Accept fragment of a source file (no package statement) + AllErrors bool // Report all errors (not just the first 10 on different lines) + + Comments bool // Print comments (true if nil *Options provided) + TabIndent bool // Use tabs for indent (true if nil *Options provided) + TabWidth int // Tab width (8 if nil *Options provided) + + FormatOnly bool // Disable the insertion and deletion of imports +} + +// Process implements golang.org/x/tools/imports.Process with explicit context in env. +func Process(filename string, src []byte, opt *Options) ([]byte, error) { + if src == nil { + b, err := ioutil.ReadFile(filename) + if err != nil { + return nil, err + } + src = b + } + + fileSet := token.NewFileSet() + file, adjust, err := parse(fileSet, filename, src, opt) + if err != nil { + return nil, err + } + + if !opt.FormatOnly { + if err := fixImports(fileSet, file, filename, opt.Env); err != nil { + return nil, err + } + } + + sortImports(opt.Env, fileSet, file) + imps := astutil.Imports(fileSet, file) + var spacesBefore []string // import paths we need spaces before + for _, impSection := range imps { + // Within each block of contiguous imports, see if any + // import lines are in different group numbers. If so, + // we'll need to put a space between them so it's + // compatible with gofmt. + lastGroup := -1 + for _, importSpec := range impSection { + importPath, _ := strconv.Unquote(importSpec.Path.Value) + groupNum := importGroup(opt.Env, importPath) + if groupNum != lastGroup && lastGroup != -1 { + spacesBefore = append(spacesBefore, importPath) + } + lastGroup = groupNum + } + + } + + printerMode := printer.UseSpaces + if opt.TabIndent { + printerMode |= printer.TabIndent + } + printConfig := &printer.Config{Mode: printerMode, Tabwidth: opt.TabWidth} + + var buf bytes.Buffer + err = printConfig.Fprint(&buf, fileSet, file) + if err != nil { + return nil, err + } + out := buf.Bytes() + if adjust != nil { + out = adjust(src, out) + } + if len(spacesBefore) > 0 { + out, err = addImportSpaces(bytes.NewReader(out), spacesBefore) + if err != nil { + return nil, err + } + } + + out, err = format.Source(out) + if err != nil { + return nil, err + } + return out, nil +} + +// parse parses src, which was read from filename, +// as a Go source file or statement list. +func parse(fset *token.FileSet, filename string, src []byte, opt *Options) (*ast.File, func(orig, src []byte) []byte, error) { + parserMode := parser.Mode(0) + if opt.Comments { + parserMode |= parser.ParseComments + } + if opt.AllErrors { + parserMode |= parser.AllErrors + } + + // Try as whole source file. + file, err := parser.ParseFile(fset, filename, src, parserMode) + if err == nil { + return file, nil, nil + } + // If the error is that the source file didn't begin with a + // package line and we accept fragmented input, fall through to + // try as a source fragment. Stop and return on any other error. + if !opt.Fragment || !strings.Contains(err.Error(), "expected 'package'") { + return nil, nil, err + } + + // If this is a declaration list, make it a source file + // by inserting a package clause. + // Insert using a ;, not a newline, so that parse errors are on + // the correct line. + const prefix = "package main;" + psrc := append([]byte(prefix), src...) + file, err = parser.ParseFile(fset, filename, psrc, parserMode) + if err == nil { + // Gofmt will turn the ; into a \n. + // Do that ourselves now and update the file contents, + // so that positions and line numbers are correct going forward. + psrc[len(prefix)-1] = '\n' + fset.File(file.Package).SetLinesForContent(psrc) + + // If a main function exists, we will assume this is a main + // package and leave the file. + if containsMainFunc(file) { + return file, nil, nil + } + + adjust := func(orig, src []byte) []byte { + // Remove the package clause. + src = src[len(prefix):] + return matchSpace(orig, src) + } + return file, adjust, nil + } + // If the error is that the source file didn't begin with a + // declaration, fall through to try as a statement list. + // Stop and return on any other error. + if !strings.Contains(err.Error(), "expected declaration") { + return nil, nil, err + } + + // If this is a statement list, make it a source file + // by inserting a package clause and turning the list + // into a function body. This handles expressions too. + // Insert using a ;, not a newline, so that the line numbers + // in fsrc match the ones in src. + fsrc := append(append([]byte("package p; func _() {"), src...), '}') + file, err = parser.ParseFile(fset, filename, fsrc, parserMode) + if err == nil { + adjust := func(orig, src []byte) []byte { + // Remove the wrapping. + // Gofmt has turned the ; into a \n\n. + src = src[len("package p\n\nfunc _() {"):] + src = src[:len(src)-len("}\n")] + // Gofmt has also indented the function body one level. + // Remove that indent. + src = bytes.Replace(src, []byte("\n\t"), []byte("\n"), -1) + return matchSpace(orig, src) + } + return file, adjust, nil + } + + // Failed, and out of options. + return nil, nil, err +} + +// containsMainFunc checks if a file contains a function declaration with the +// function signature 'func main()' +func containsMainFunc(file *ast.File) bool { + for _, decl := range file.Decls { + if f, ok := decl.(*ast.FuncDecl); ok { + if f.Name.Name != "main" { + continue + } + + if len(f.Type.Params.List) != 0 { + continue + } + + if f.Type.Results != nil && len(f.Type.Results.List) != 0 { + continue + } + + return true + } + } + + return false +} + +func cutSpace(b []byte) (before, middle, after []byte) { + i := 0 + for i < len(b) && (b[i] == ' ' || b[i] == '\t' || b[i] == '\n') { + i++ + } + j := len(b) + for j > 0 && (b[j-1] == ' ' || b[j-1] == '\t' || b[j-1] == '\n') { + j-- + } + if i <= j { + return b[:i], b[i:j], b[j:] + } + return nil, nil, b[j:] +} + +// matchSpace reformats src to use the same space context as orig. +// 1) If orig begins with blank lines, matchSpace inserts them at the beginning of src. +// 2) matchSpace copies the indentation of the first non-blank line in orig +// to every non-blank line in src. +// 3) matchSpace copies the trailing space from orig and uses it in place +// of src's trailing space. +func matchSpace(orig []byte, src []byte) []byte { + before, _, after := cutSpace(orig) + i := bytes.LastIndex(before, []byte{'\n'}) + before, indent := before[:i+1], before[i+1:] + + _, src, _ = cutSpace(src) + + var b bytes.Buffer + b.Write(before) + for len(src) > 0 { + line := src + if i := bytes.IndexByte(line, '\n'); i >= 0 { + line, src = line[:i+1], line[i+1:] + } else { + src = nil + } + if len(line) > 0 && line[0] != '\n' { // not blank + b.Write(indent) + } + b.Write(line) + } + b.Write(after) + return b.Bytes() +} + +var impLine = regexp.MustCompile(`^\s+(?:[\w\.]+\s+)?"(.+)"`) + +func addImportSpaces(r io.Reader, breaks []string) ([]byte, error) { + var out bytes.Buffer + in := bufio.NewReader(r) + inImports := false + done := false + for { + s, err := in.ReadString('\n') + if err == io.EOF { + break + } else if err != nil { + return nil, err + } + + if !inImports && !done && strings.HasPrefix(s, "import") { + inImports = true + } + if inImports && (strings.HasPrefix(s, "var") || + strings.HasPrefix(s, "func") || + strings.HasPrefix(s, "const") || + strings.HasPrefix(s, "type")) { + done = true + inImports = false + } + if inImports && len(breaks) > 0 { + if m := impLine.FindStringSubmatch(s); m != nil { + if m[1] == breaks[0] { + out.WriteByte('\n') + breaks = breaks[1:] + } + } + } + + fmt.Fprint(&out, s) + } + return out.Bytes(), nil +} diff --git a/vendor/golang.org/x/tools/internal/imports/mkindex.go b/vendor/golang.org/x/tools/internal/imports/mkindex.go new file mode 100644 index 000000000..ef8c0d287 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/mkindex.go @@ -0,0 +1,173 @@ +// +build ignore + +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Command mkindex creates the file "pkgindex.go" containing an index of the Go +// standard library. The file is intended to be built as part of the imports +// package, so that the package may be used in environments where a GOROOT is +// not available (such as App Engine). +package imports + +import ( + "bytes" + "fmt" + "go/ast" + "go/build" + "go/format" + "go/parser" + "go/token" + "io/ioutil" + "log" + "os" + "path" + "path/filepath" + "strings" +) + +var ( + pkgIndex = make(map[string][]pkg) + exports = make(map[string]map[string]bool) +) + +func main() { + // Don't use GOPATH. + ctx := build.Default + ctx.GOPATH = "" + + // Populate pkgIndex global from GOROOT. + for _, path := range ctx.SrcDirs() { + f, err := os.Open(path) + if err != nil { + log.Print(err) + continue + } + children, err := f.Readdir(-1) + f.Close() + if err != nil { + log.Print(err) + continue + } + for _, child := range children { + if child.IsDir() { + loadPkg(path, child.Name()) + } + } + } + // Populate exports global. + for _, ps := range pkgIndex { + for _, p := range ps { + e := loadExports(p.dir) + if e != nil { + exports[p.dir] = e + } + } + } + + // Construct source file. + var buf bytes.Buffer + fmt.Fprint(&buf, pkgIndexHead) + fmt.Fprintf(&buf, "var pkgIndexMaster = %#v\n", pkgIndex) + fmt.Fprintf(&buf, "var exportsMaster = %#v\n", exports) + src := buf.Bytes() + + // Replace main.pkg type name with pkg. + src = bytes.Replace(src, []byte("main.pkg"), []byte("pkg"), -1) + // Replace actual GOROOT with "/go". + src = bytes.Replace(src, []byte(ctx.GOROOT), []byte("/go"), -1) + // Add some line wrapping. + src = bytes.Replace(src, []byte("}, "), []byte("},\n"), -1) + src = bytes.Replace(src, []byte("true, "), []byte("true,\n"), -1) + + var err error + src, err = format.Source(src) + if err != nil { + log.Fatal(err) + } + + // Write out source file. + err = ioutil.WriteFile("pkgindex.go", src, 0644) + if err != nil { + log.Fatal(err) + } +} + +const pkgIndexHead = `package imports + +func init() { + pkgIndexOnce.Do(func() { + pkgIndex.m = pkgIndexMaster + }) + loadExports = func(dir string) map[string]bool { + return exportsMaster[dir] + } +} +` + +type pkg struct { + importpath string // full pkg import path, e.g. "net/http" + dir string // absolute file path to pkg directory e.g. "/usr/lib/go/src/fmt" +} + +var fset = token.NewFileSet() + +func loadPkg(root, importpath string) { + shortName := path.Base(importpath) + if shortName == "testdata" { + return + } + + dir := filepath.Join(root, importpath) + pkgIndex[shortName] = append(pkgIndex[shortName], pkg{ + importpath: importpath, + dir: dir, + }) + + pkgDir, err := os.Open(dir) + if err != nil { + return + } + children, err := pkgDir.Readdir(-1) + pkgDir.Close() + if err != nil { + return + } + for _, child := range children { + name := child.Name() + if name == "" { + continue + } + if c := name[0]; c == '.' || ('0' <= c && c <= '9') { + continue + } + if child.IsDir() { + loadPkg(root, filepath.Join(importpath, name)) + } + } +} + +func loadExports(dir string) map[string]bool { + exports := make(map[string]bool) + buildPkg, err := build.ImportDir(dir, 0) + if err != nil { + if strings.Contains(err.Error(), "no buildable Go source files in") { + return nil + } + log.Printf("could not import %q: %v", dir, err) + return nil + } + for _, file := range buildPkg.GoFiles { + f, err := parser.ParseFile(fset, filepath.Join(dir, file), nil, 0) + if err != nil { + log.Printf("could not parse %q: %v", file, err) + continue + } + for name := range f.Scope.Objects { + if ast.IsExported(name) { + exports[name] = true + } + } + } + return exports +} diff --git a/vendor/golang.org/x/tools/internal/imports/mkstdlib.go b/vendor/golang.org/x/tools/internal/imports/mkstdlib.go new file mode 100644 index 000000000..f67b5c1ed --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/mkstdlib.go @@ -0,0 +1,132 @@ +// +build ignore + +// mkstdlib generates the zstdlib.go file, containing the Go standard +// library API symbols. It's baked into the binary to avoid scanning +// GOPATH in the common case. +package imports + +import ( + "bufio" + "bytes" + "fmt" + "go/format" + "io" + "io/ioutil" + "log" + "os" + "os/exec" + "path/filepath" + "regexp" + "runtime" + "sort" + "strings" +) + +func mustOpen(name string) io.Reader { + f, err := os.Open(name) + if err != nil { + log.Fatal(err) + } + return f +} + +func api(base string) string { + return filepath.Join(runtime.GOROOT(), "api", base) +} + +var sym = regexp.MustCompile(`^pkg (\S+).*?, (?:var|func|type|const) ([A-Z]\w*)`) + +var unsafeSyms = map[string]bool{"Alignof": true, "ArbitraryType": true, "Offsetof": true, "Pointer": true, "Sizeof": true} + +func main() { + var buf bytes.Buffer + outf := func(format string, args ...interface{}) { + fmt.Fprintf(&buf, format, args...) + } + outf("// Code generated by mkstdlib.go. DO NOT EDIT.\n\n") + outf("package imports\n") + outf("var stdlib = map[string]map[string]bool{\n") + f := io.MultiReader( + mustOpen(api("go1.txt")), + mustOpen(api("go1.1.txt")), + mustOpen(api("go1.2.txt")), + mustOpen(api("go1.3.txt")), + mustOpen(api("go1.4.txt")), + mustOpen(api("go1.5.txt")), + mustOpen(api("go1.6.txt")), + mustOpen(api("go1.7.txt")), + mustOpen(api("go1.8.txt")), + mustOpen(api("go1.9.txt")), + mustOpen(api("go1.10.txt")), + mustOpen(api("go1.11.txt")), + mustOpen(api("go1.12.txt")), + + // The API of the syscall/js package needs to be computed explicitly, + // because it's not included in the GOROOT/api/go1.*.txt files at this time. + syscallJSAPI(), + ) + sc := bufio.NewScanner(f) + + pkgs := map[string]map[string]bool{ + "unsafe": unsafeSyms, + } + paths := []string{"unsafe"} + + for sc.Scan() { + l := sc.Text() + has := func(v string) bool { return strings.Contains(l, v) } + if has("struct, ") || has("interface, ") || has(", method (") { + continue + } + if m := sym.FindStringSubmatch(l); m != nil { + path, sym := m[1], m[2] + + if _, ok := pkgs[path]; !ok { + pkgs[path] = map[string]bool{} + paths = append(paths, path) + } + pkgs[path][sym] = true + } + } + if err := sc.Err(); err != nil { + log.Fatal(err) + } + sort.Strings(paths) + for _, path := range paths { + outf("\t%q: map[string]bool{\n", path) + pkg := pkgs[path] + var syms []string + for sym := range pkg { + syms = append(syms, sym) + } + sort.Strings(syms) + for _, sym := range syms { + outf("\t\t%q: true,\n", sym) + } + outf("},\n") + } + outf("}\n") + fmtbuf, err := format.Source(buf.Bytes()) + if err != nil { + log.Fatal(err) + } + err = ioutil.WriteFile("zstdlib.go", fmtbuf, 0666) + if err != nil { + log.Fatal(err) + } +} + +// syscallJSAPI returns the API of the syscall/js package. +// It's computed from the contents of $(go env GOROOT)/src/syscall/js. +func syscallJSAPI() io.Reader { + var exeSuffix string + if runtime.GOOS == "windows" { + exeSuffix = ".exe" + } + cmd := exec.Command("go"+exeSuffix, "run", "cmd/api", "-contexts", "js-wasm", "syscall/js") + out, err := cmd.Output() + if err != nil { + log.Fatalln(err) + } + return bytes.NewReader(out) +} diff --git a/vendor/golang.org/x/tools/internal/imports/mod.go b/vendor/golang.org/x/tools/internal/imports/mod.go new file mode 100644 index 000000000..a072214ee --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/mod.go @@ -0,0 +1,355 @@ +package imports + +import ( + "bytes" + "encoding/json" + "io/ioutil" + "log" + "os" + "path" + "path/filepath" + "regexp" + "sort" + "strconv" + "strings" + "sync" + "time" + + "golang.org/x/tools/internal/gopathwalk" + "golang.org/x/tools/internal/module" +) + +// moduleResolver implements resolver for modules using the go command as little +// as feasible. +type moduleResolver struct { + env *ProcessEnv + + initialized bool + main *moduleJSON + modsByModPath []*moduleJSON // All modules, ordered by # of path components in module Path... + modsByDir []*moduleJSON // ...or Dir. +} + +type moduleJSON struct { + Path string // module path + Version string // module version + Versions []string // available module versions (with -versions) + Replace *moduleJSON // replaced by this module + Time *time.Time // time version was created + Update *moduleJSON // available update, if any (with -u) + Main bool // is this the main module? + Indirect bool // is this module only an indirect dependency of main module? + Dir string // directory holding files for this module, if any + GoMod string // path to go.mod file for this module, if any + Error *moduleErrorJSON // error loading module +} + +type moduleErrorJSON struct { + Err string // the error itself +} + +func (r *moduleResolver) init() error { + if r.initialized { + return nil + } + stdout, err := r.env.invokeGo("list", "-m", "-json", "...") + if err != nil { + return err + } + for dec := json.NewDecoder(stdout); dec.More(); { + mod := &moduleJSON{} + if err := dec.Decode(mod); err != nil { + return err + } + if mod.Dir == "" { + if r.env.Debug { + log.Printf("module %v has not been downloaded and will be ignored", mod.Path) + } + // Can't do anything with a module that's not downloaded. + continue + } + r.modsByModPath = append(r.modsByModPath, mod) + r.modsByDir = append(r.modsByDir, mod) + if mod.Main { + r.main = mod + } + } + + sort.Slice(r.modsByModPath, func(i, j int) bool { + count := func(x int) int { + return strings.Count(r.modsByModPath[x].Path, "/") + } + return count(j) < count(i) // descending order + }) + sort.Slice(r.modsByDir, func(i, j int) bool { + count := func(x int) int { + return strings.Count(r.modsByDir[x].Dir, "/") + } + return count(j) < count(i) // descending order + }) + + r.initialized = true + return nil +} + +// findPackage returns the module and directory that contains the package at +// the given import path, or returns nil, "" if no module is in scope. +func (r *moduleResolver) findPackage(importPath string) (*moduleJSON, string) { + for _, m := range r.modsByModPath { + if !strings.HasPrefix(importPath, m.Path) { + continue + } + pathInModule := importPath[len(m.Path):] + pkgDir := filepath.Join(m.Dir, pathInModule) + if dirIsNestedModule(pkgDir, m) { + continue + } + + pkgFiles, err := ioutil.ReadDir(pkgDir) + if err != nil { + continue + } + + // A module only contains a package if it has buildable go + // files in that directory. If not, it could be provided by an + // outer module. See #29736. + for _, fi := range pkgFiles { + if ok, _ := r.env.buildContext().MatchFile(pkgDir, fi.Name()); ok { + return m, pkgDir + } + } + } + return nil, "" +} + +// findModuleByDir returns the module that contains dir, or nil if no such +// module is in scope. +func (r *moduleResolver) findModuleByDir(dir string) *moduleJSON { + // This is quite tricky and may not be correct. dir could be: + // - a package in the main module. + // - a replace target underneath the main module's directory. + // - a nested module in the above. + // - a replace target somewhere totally random. + // - a nested module in the above. + // - in the mod cache. + // - in /vendor/ in -mod=vendor mode. + // - nested module? Dunno. + // Rumor has it that replace targets cannot contain other replace targets. + for _, m := range r.modsByDir { + if !strings.HasPrefix(dir, m.Dir) { + continue + } + + if dirIsNestedModule(dir, m) { + continue + } + + return m + } + return nil +} + +// dirIsNestedModule reports if dir is contained in a nested module underneath +// mod, not actually in mod. +func dirIsNestedModule(dir string, mod *moduleJSON) bool { + if !strings.HasPrefix(dir, mod.Dir) { + return false + } + mf := findModFile(dir) + if mf == "" { + return false + } + return filepath.Dir(mf) != mod.Dir +} + +func findModFile(dir string) string { + for { + f := filepath.Join(dir, "go.mod") + info, err := os.Stat(f) + if err == nil && !info.IsDir() { + return f + } + d := filepath.Dir(dir) + if len(d) >= len(dir) { + return "" // reached top of file system, no go.mod + } + dir = d + } +} + +func (r *moduleResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) { + if err := r.init(); err != nil { + return nil, err + } + names := map[string]string{} + for _, path := range importPaths { + _, packageDir := r.findPackage(path) + if packageDir == "" { + continue + } + name, err := packageDirToName(packageDir) + if err != nil { + continue + } + names[path] = name + } + return names, nil +} + +func (r *moduleResolver) scan(_ references) ([]*pkg, error) { + if err := r.init(); err != nil { + return nil, err + } + + // Walk GOROOT, GOPATH/pkg/mod, and the main module. + roots := []gopathwalk.Root{ + {filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT}, + } + if r.main != nil { + roots = append(roots, gopathwalk.Root{r.main.Dir, gopathwalk.RootCurrentModule}) + } + for _, p := range filepath.SplitList(r.env.GOPATH) { + roots = append(roots, gopathwalk.Root{filepath.Join(p, "/pkg/mod"), gopathwalk.RootModuleCache}) + } + + // Walk replace targets, just in case they're not in any of the above. + for _, mod := range r.modsByModPath { + if mod.Replace != nil { + roots = append(roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther}) + } + } + + var result []*pkg + dupCheck := make(map[string]bool) + var mu sync.Mutex + + gopathwalk.Walk(roots, func(root gopathwalk.Root, dir string) { + mu.Lock() + defer mu.Unlock() + + if _, dup := dupCheck[dir]; dup { + return + } + + dupCheck[dir] = true + + subdir := "" + if dir != root.Path { + subdir = dir[len(root.Path)+len("/"):] + } + importPath := filepath.ToSlash(subdir) + if strings.HasPrefix(importPath, "vendor/") { + // Ignore vendor dirs. If -mod=vendor is on, then things + // should mostly just work, but when it's not vendor/ + // is a mess. There's no easy way to tell if it's on. + // We can still find things in the mod cache and + // map them into /vendor when -mod=vendor is on. + return + } + switch root.Type { + case gopathwalk.RootCurrentModule: + importPath = path.Join(r.main.Path, filepath.ToSlash(subdir)) + case gopathwalk.RootModuleCache: + matches := modCacheRegexp.FindStringSubmatch(subdir) + modPath, err := module.DecodePath(filepath.ToSlash(matches[1])) + if err != nil { + if r.env.Debug { + log.Printf("decoding module cache path %q: %v", subdir, err) + } + return + } + importPath = path.Join(modPath, filepath.ToSlash(matches[3])) + case gopathwalk.RootGOROOT: + importPath = subdir + } + + // Check if the directory is underneath a module that's in scope. + if mod := r.findModuleByDir(dir); mod != nil { + // It is. If dir is the target of a replace directive, + // our guessed import path is wrong. Use the real one. + if mod.Dir == dir { + importPath = mod.Path + } else { + dirInMod := dir[len(mod.Dir)+len("/"):] + importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod)) + } + } else { + // The package is in an unknown module. Check that it's + // not obviously impossible to import. + var modFile string + switch root.Type { + case gopathwalk.RootModuleCache: + matches := modCacheRegexp.FindStringSubmatch(subdir) + modFile = filepath.Join(matches[1], "@", matches[2], "go.mod") + default: + modFile = findModFile(dir) + } + + modBytes, err := ioutil.ReadFile(modFile) + if err == nil && !strings.HasPrefix(importPath, modulePath(modBytes)) { + // The module's declared path does not match + // its expected path. It probably needs a + // replace directive we don't have. + return + } + } + // We may have discovered a package that has a different version + // in scope already. Canonicalize to that one if possible. + if _, canonicalDir := r.findPackage(importPath); canonicalDir != "" { + dir = canonicalDir + } + + result = append(result, &pkg{ + importPathShort: VendorlessPath(importPath), + dir: dir, + }) + }, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true}) + return result, nil +} + +// modCacheRegexp splits a path in a module cache into module, module version, and package. +var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`) + +var ( + slashSlash = []byte("//") + moduleStr = []byte("module") +) + +// modulePath returns the module path from the gomod file text. +// If it cannot find a module path, it returns an empty string. +// It is tolerant of unrelated problems in the go.mod file. +// +// Copied from cmd/go/internal/modfile. +func modulePath(mod []byte) string { + for len(mod) > 0 { + line := mod + mod = nil + if i := bytes.IndexByte(line, '\n'); i >= 0 { + line, mod = line[:i], line[i+1:] + } + if i := bytes.Index(line, slashSlash); i >= 0 { + line = line[:i] + } + line = bytes.TrimSpace(line) + if !bytes.HasPrefix(line, moduleStr) { + continue + } + line = line[len(moduleStr):] + n := len(line) + line = bytes.TrimSpace(line) + if len(line) == n || len(line) == 0 { + continue + } + + if line[0] == '"' || line[0] == '`' { + p, err := strconv.Unquote(string(line)) + if err != nil { + return "" // malformed quoted string or multiline module path + } + return p + } + + return string(line) + } + return "" // missing module path +} diff --git a/vendor/golang.org/x/tools/internal/imports/sortimports.go b/vendor/golang.org/x/tools/internal/imports/sortimports.go new file mode 100644 index 000000000..0a156fe2e --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/sortimports.go @@ -0,0 +1,233 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Hacked up copy of go/ast/import.go + +package imports + +import ( + "go/ast" + "go/token" + "sort" + "strconv" +) + +// sortImports sorts runs of consecutive import lines in import blocks in f. +// It also removes duplicate imports when it is possible to do so without data loss. +func sortImports(env *ProcessEnv, fset *token.FileSet, f *ast.File) { + for i, d := range f.Decls { + d, ok := d.(*ast.GenDecl) + if !ok || d.Tok != token.IMPORT { + // Not an import declaration, so we're done. + // Imports are always first. + break + } + + if len(d.Specs) == 0 { + // Empty import block, remove it. + f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + } + + if !d.Lparen.IsValid() { + // Not a block: sorted by default. + continue + } + + // Identify and sort runs of specs on successive lines. + i := 0 + specs := d.Specs[:0] + for j, s := range d.Specs { + if j > i && fset.Position(s.Pos()).Line > 1+fset.Position(d.Specs[j-1].End()).Line { + // j begins a new run. End this one. + specs = append(specs, sortSpecs(env, fset, f, d.Specs[i:j])...) + i = j + } + } + specs = append(specs, sortSpecs(env, fset, f, d.Specs[i:])...) + d.Specs = specs + + // Deduping can leave a blank line before the rparen; clean that up. + if len(d.Specs) > 0 { + lastSpec := d.Specs[len(d.Specs)-1] + lastLine := fset.Position(lastSpec.Pos()).Line + if rParenLine := fset.Position(d.Rparen).Line; rParenLine > lastLine+1 { + fset.File(d.Rparen).MergeLine(rParenLine - 1) + } + } + } +} + +func importPath(s ast.Spec) string { + t, err := strconv.Unquote(s.(*ast.ImportSpec).Path.Value) + if err == nil { + return t + } + return "" +} + +func importName(s ast.Spec) string { + n := s.(*ast.ImportSpec).Name + if n == nil { + return "" + } + return n.Name +} + +func importComment(s ast.Spec) string { + c := s.(*ast.ImportSpec).Comment + if c == nil { + return "" + } + return c.Text() +} + +// collapse indicates whether prev may be removed, leaving only next. +func collapse(prev, next ast.Spec) bool { + if importPath(next) != importPath(prev) || importName(next) != importName(prev) { + return false + } + return prev.(*ast.ImportSpec).Comment == nil +} + +type posSpan struct { + Start token.Pos + End token.Pos +} + +func sortSpecs(env *ProcessEnv, fset *token.FileSet, f *ast.File, specs []ast.Spec) []ast.Spec { + // Can't short-circuit here even if specs are already sorted, + // since they might yet need deduplication. + // A lone import, however, may be safely ignored. + if len(specs) <= 1 { + return specs + } + + // Record positions for specs. + pos := make([]posSpan, len(specs)) + for i, s := range specs { + pos[i] = posSpan{s.Pos(), s.End()} + } + + // Identify comments in this range. + // Any comment from pos[0].Start to the final line counts. + lastLine := fset.Position(pos[len(pos)-1].End).Line + cstart := len(f.Comments) + cend := len(f.Comments) + for i, g := range f.Comments { + if g.Pos() < pos[0].Start { + continue + } + if i < cstart { + cstart = i + } + if fset.Position(g.End()).Line > lastLine { + cend = i + break + } + } + comments := f.Comments[cstart:cend] + + // Assign each comment to the import spec preceding it. + importComment := map[*ast.ImportSpec][]*ast.CommentGroup{} + specIndex := 0 + for _, g := range comments { + for specIndex+1 < len(specs) && pos[specIndex+1].Start <= g.Pos() { + specIndex++ + } + s := specs[specIndex].(*ast.ImportSpec) + importComment[s] = append(importComment[s], g) + } + + // Sort the import specs by import path. + // Remove duplicates, when possible without data loss. + // Reassign the import paths to have the same position sequence. + // Reassign each comment to abut the end of its spec. + // Sort the comments by new position. + sort.Sort(byImportSpec{env, specs}) + + // Dedup. Thanks to our sorting, we can just consider + // adjacent pairs of imports. + deduped := specs[:0] + for i, s := range specs { + if i == len(specs)-1 || !collapse(s, specs[i+1]) { + deduped = append(deduped, s) + } else { + p := s.Pos() + fset.File(p).MergeLine(fset.Position(p).Line) + } + } + specs = deduped + + // Fix up comment positions + for i, s := range specs { + s := s.(*ast.ImportSpec) + if s.Name != nil { + s.Name.NamePos = pos[i].Start + } + s.Path.ValuePos = pos[i].Start + s.EndPos = pos[i].End + nextSpecPos := pos[i].End + + for _, g := range importComment[s] { + for _, c := range g.List { + c.Slash = pos[i].End + nextSpecPos = c.End() + } + } + if i < len(specs)-1 { + pos[i+1].Start = nextSpecPos + pos[i+1].End = nextSpecPos + } + } + + sort.Sort(byCommentPos(comments)) + + // Fixup comments can insert blank lines, because import specs are on different lines. + // We remove those blank lines here by merging import spec to the first import spec line. + firstSpecLine := fset.Position(specs[0].Pos()).Line + for _, s := range specs[1:] { + p := s.Pos() + line := fset.File(p).Line(p) + for previousLine := line - 1; previousLine >= firstSpecLine; { + fset.File(p).MergeLine(previousLine) + previousLine-- + } + } + return specs +} + +type byImportSpec struct { + env *ProcessEnv + specs []ast.Spec // slice of *ast.ImportSpec +} + +func (x byImportSpec) Len() int { return len(x.specs) } +func (x byImportSpec) Swap(i, j int) { x.specs[i], x.specs[j] = x.specs[j], x.specs[i] } +func (x byImportSpec) Less(i, j int) bool { + ipath := importPath(x.specs[i]) + jpath := importPath(x.specs[j]) + + igroup := importGroup(x.env, ipath) + jgroup := importGroup(x.env, jpath) + if igroup != jgroup { + return igroup < jgroup + } + + if ipath != jpath { + return ipath < jpath + } + iname := importName(x.specs[i]) + jname := importName(x.specs[j]) + + if iname != jname { + return iname < jname + } + return importComment(x.specs[i]) < importComment(x.specs[j]) +} + +type byCommentPos []*ast.CommentGroup + +func (x byCommentPos) Len() int { return len(x) } +func (x byCommentPos) Swap(i, j int) { x[i], x[j] = x[j], x[i] } +func (x byCommentPos) Less(i, j int) bool { return x[i].Pos() < x[j].Pos() } diff --git a/vendor/golang.org/x/tools/internal/imports/zstdlib.go b/vendor/golang.org/x/tools/internal/imports/zstdlib.go new file mode 100644 index 000000000..d81b8c530 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/imports/zstdlib.go @@ -0,0 +1,10325 @@ +// Code generated by mkstdlib.go. DO NOT EDIT. + +package imports + +var stdlib = map[string]map[string]bool{ + "archive/tar": map[string]bool{ + "ErrFieldTooLong": true, + "ErrHeader": true, + "ErrWriteAfterClose": true, + "ErrWriteTooLong": true, + "FileInfoHeader": true, + "Format": true, + "FormatGNU": true, + "FormatPAX": true, + "FormatUSTAR": true, + "FormatUnknown": true, + "Header": true, + "NewReader": true, + "NewWriter": true, + "Reader": true, + "TypeBlock": true, + "TypeChar": true, + "TypeCont": true, + "TypeDir": true, + "TypeFifo": true, + "TypeGNULongLink": true, + "TypeGNULongName": true, + "TypeGNUSparse": true, + "TypeLink": true, + "TypeReg": true, + "TypeRegA": true, + "TypeSymlink": true, + "TypeXGlobalHeader": true, + "TypeXHeader": true, + "Writer": true, + }, + "archive/zip": map[string]bool{ + "Compressor": true, + "Decompressor": true, + "Deflate": true, + "ErrAlgorithm": true, + "ErrChecksum": true, + "ErrFormat": true, + "File": true, + "FileHeader": true, + "FileInfoHeader": true, + "NewReader": true, + "NewWriter": true, + "OpenReader": true, + "ReadCloser": true, + "Reader": true, + "RegisterCompressor": true, + "RegisterDecompressor": true, + "Store": true, + "Writer": true, + }, + "bufio": map[string]bool{ + "ErrAdvanceTooFar": true, + "ErrBufferFull": true, + "ErrFinalToken": true, + "ErrInvalidUnreadByte": true, + "ErrInvalidUnreadRune": true, + "ErrNegativeAdvance": true, + "ErrNegativeCount": true, + "ErrTooLong": true, + "MaxScanTokenSize": true, + "NewReadWriter": true, + "NewReader": true, + "NewReaderSize": true, + "NewScanner": true, + "NewWriter": true, + "NewWriterSize": true, + "ReadWriter": true, + "Reader": true, + "ScanBytes": true, + "ScanLines": true, + "ScanRunes": true, + "ScanWords": true, + "Scanner": true, + "SplitFunc": true, + "Writer": true, + }, + "bytes": map[string]bool{ + "Buffer": true, + "Compare": true, + "Contains": true, + "ContainsAny": true, + "ContainsRune": true, + "Count": true, + "Equal": true, + "EqualFold": true, + "ErrTooLarge": true, + "Fields": true, + "FieldsFunc": true, + "HasPrefix": true, + "HasSuffix": true, + "Index": true, + "IndexAny": true, + "IndexByte": true, + "IndexFunc": true, + "IndexRune": true, + "Join": true, + "LastIndex": true, + "LastIndexAny": true, + "LastIndexByte": true, + "LastIndexFunc": true, + "Map": true, + "MinRead": true, + "NewBuffer": true, + "NewBufferString": true, + "NewReader": true, + "Reader": true, + "Repeat": true, + "Replace": true, + "ReplaceAll": true, + "Runes": true, + "Split": true, + "SplitAfter": true, + "SplitAfterN": true, + "SplitN": true, + "Title": true, + "ToLower": true, + "ToLowerSpecial": true, + "ToTitle": true, + "ToTitleSpecial": true, + "ToUpper": true, + "ToUpperSpecial": true, + "Trim": true, + "TrimFunc": true, + "TrimLeft": true, + "TrimLeftFunc": true, + "TrimPrefix": true, + "TrimRight": true, + "TrimRightFunc": true, + "TrimSpace": true, + "TrimSuffix": true, + }, + "compress/bzip2": map[string]bool{ + "NewReader": true, + "StructuralError": true, + }, + "compress/flate": map[string]bool{ + "BestCompression": true, + "BestSpeed": true, + "CorruptInputError": true, + "DefaultCompression": true, + "HuffmanOnly": true, + "InternalError": true, + "NewReader": true, + "NewReaderDict": true, + "NewWriter": true, + "NewWriterDict": true, + "NoCompression": true, + "ReadError": true, + "Reader": true, + "Resetter": true, + "WriteError": true, + "Writer": true, + }, + "compress/gzip": map[string]bool{ + "BestCompression": true, + "BestSpeed": true, + "DefaultCompression": true, + "ErrChecksum": true, + "ErrHeader": true, + "Header": true, + "HuffmanOnly": true, + "NewReader": true, + "NewWriter": true, + "NewWriterLevel": true, + "NoCompression": true, + "Reader": true, + "Writer": true, + }, + "compress/lzw": map[string]bool{ + "LSB": true, + "MSB": true, + "NewReader": true, + "NewWriter": true, + "Order": true, + }, + "compress/zlib": map[string]bool{ + "BestCompression": true, + "BestSpeed": true, + "DefaultCompression": true, + "ErrChecksum": true, + "ErrDictionary": true, + "ErrHeader": true, + "HuffmanOnly": true, + "NewReader": true, + "NewReaderDict": true, + "NewWriter": true, + "NewWriterLevel": true, + "NewWriterLevelDict": true, + "NoCompression": true, + "Resetter": true, + "Writer": true, + }, + "container/heap": map[string]bool{ + "Fix": true, + "Init": true, + "Interface": true, + "Pop": true, + "Push": true, + "Remove": true, + }, + "container/list": map[string]bool{ + "Element": true, + "List": true, + "New": true, + }, + "container/ring": map[string]bool{ + "New": true, + "Ring": true, + }, + "context": map[string]bool{ + "Background": true, + "CancelFunc": true, + "Canceled": true, + "Context": true, + "DeadlineExceeded": true, + "TODO": true, + "WithCancel": true, + "WithDeadline": true, + "WithTimeout": true, + "WithValue": true, + }, + "crypto": map[string]bool{ + "BLAKE2b_256": true, + "BLAKE2b_384": true, + "BLAKE2b_512": true, + "BLAKE2s_256": true, + "Decrypter": true, + "DecrypterOpts": true, + "Hash": true, + "MD4": true, + "MD5": true, + "MD5SHA1": true, + "PrivateKey": true, + "PublicKey": true, + "RIPEMD160": true, + "RegisterHash": true, + "SHA1": true, + "SHA224": true, + "SHA256": true, + "SHA384": true, + "SHA3_224": true, + "SHA3_256": true, + "SHA3_384": true, + "SHA3_512": true, + "SHA512": true, + "SHA512_224": true, + "SHA512_256": true, + "Signer": true, + "SignerOpts": true, + }, + "crypto/aes": map[string]bool{ + "BlockSize": true, + "KeySizeError": true, + "NewCipher": true, + }, + "crypto/cipher": map[string]bool{ + "AEAD": true, + "Block": true, + "BlockMode": true, + "NewCBCDecrypter": true, + "NewCBCEncrypter": true, + "NewCFBDecrypter": true, + "NewCFBEncrypter": true, + "NewCTR": true, + "NewGCM": true, + "NewGCMWithNonceSize": true, + "NewGCMWithTagSize": true, + "NewOFB": true, + "Stream": true, + "StreamReader": true, + "StreamWriter": true, + }, + "crypto/des": map[string]bool{ + "BlockSize": true, + "KeySizeError": true, + "NewCipher": true, + "NewTripleDESCipher": true, + }, + "crypto/dsa": map[string]bool{ + "ErrInvalidPublicKey": true, + "GenerateKey": true, + "GenerateParameters": true, + "L1024N160": true, + "L2048N224": true, + "L2048N256": true, + "L3072N256": true, + "ParameterSizes": true, + "Parameters": true, + "PrivateKey": true, + "PublicKey": true, + "Sign": true, + "Verify": true, + }, + "crypto/ecdsa": map[string]bool{ + "GenerateKey": true, + "PrivateKey": true, + "PublicKey": true, + "Sign": true, + "Verify": true, + }, + "crypto/elliptic": map[string]bool{ + "Curve": true, + "CurveParams": true, + "GenerateKey": true, + "Marshal": true, + "P224": true, + "P256": true, + "P384": true, + "P521": true, + "Unmarshal": true, + }, + "crypto/hmac": map[string]bool{ + "Equal": true, + "New": true, + }, + "crypto/md5": map[string]bool{ + "BlockSize": true, + "New": true, + "Size": true, + "Sum": true, + }, + "crypto/rand": map[string]bool{ + "Int": true, + "Prime": true, + "Read": true, + "Reader": true, + }, + "crypto/rc4": map[string]bool{ + "Cipher": true, + "KeySizeError": true, + "NewCipher": true, + }, + "crypto/rsa": map[string]bool{ + "CRTValue": true, + "DecryptOAEP": true, + "DecryptPKCS1v15": true, + "DecryptPKCS1v15SessionKey": true, + "EncryptOAEP": true, + "EncryptPKCS1v15": true, + "ErrDecryption": true, + "ErrMessageTooLong": true, + "ErrVerification": true, + "GenerateKey": true, + "GenerateMultiPrimeKey": true, + "OAEPOptions": true, + "PKCS1v15DecryptOptions": true, + "PSSOptions": true, + "PSSSaltLengthAuto": true, + "PSSSaltLengthEqualsHash": true, + "PrecomputedValues": true, + "PrivateKey": true, + "PublicKey": true, + "SignPKCS1v15": true, + "SignPSS": true, + "VerifyPKCS1v15": true, + "VerifyPSS": true, + }, + "crypto/sha1": map[string]bool{ + "BlockSize": true, + "New": true, + "Size": true, + "Sum": true, + }, + "crypto/sha256": map[string]bool{ + "BlockSize": true, + "New": true, + "New224": true, + "Size": true, + "Size224": true, + "Sum224": true, + "Sum256": true, + }, + "crypto/sha512": map[string]bool{ + "BlockSize": true, + "New": true, + "New384": true, + "New512_224": true, + "New512_256": true, + "Size": true, + "Size224": true, + "Size256": true, + "Size384": true, + "Sum384": true, + "Sum512": true, + "Sum512_224": true, + "Sum512_256": true, + }, + "crypto/subtle": map[string]bool{ + "ConstantTimeByteEq": true, + "ConstantTimeCompare": true, + "ConstantTimeCopy": true, + "ConstantTimeEq": true, + "ConstantTimeLessOrEq": true, + "ConstantTimeSelect": true, + }, + "crypto/tls": map[string]bool{ + "Certificate": true, + "CertificateRequestInfo": true, + "Client": true, + "ClientAuthType": true, + "ClientHelloInfo": true, + "ClientSessionCache": true, + "ClientSessionState": true, + "Config": true, + "Conn": true, + "ConnectionState": true, + "CurveID": true, + "CurveP256": true, + "CurveP384": true, + "CurveP521": true, + "Dial": true, + "DialWithDialer": true, + "ECDSAWithP256AndSHA256": true, + "ECDSAWithP384AndSHA384": true, + "ECDSAWithP521AndSHA512": true, + "ECDSAWithSHA1": true, + "Listen": true, + "LoadX509KeyPair": true, + "NewLRUClientSessionCache": true, + "NewListener": true, + "NoClientCert": true, + "PKCS1WithSHA1": true, + "PKCS1WithSHA256": true, + "PKCS1WithSHA384": true, + "PKCS1WithSHA512": true, + "PSSWithSHA256": true, + "PSSWithSHA384": true, + "PSSWithSHA512": true, + "RecordHeaderError": true, + "RenegotiateFreelyAsClient": true, + "RenegotiateNever": true, + "RenegotiateOnceAsClient": true, + "RenegotiationSupport": true, + "RequestClientCert": true, + "RequireAndVerifyClientCert": true, + "RequireAnyClientCert": true, + "Server": true, + "SignatureScheme": true, + "TLS_AES_128_GCM_SHA256": true, + "TLS_AES_256_GCM_SHA384": true, + "TLS_CHACHA20_POLY1305_SHA256": true, + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA": true, + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256": true, + "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256": true, + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA": true, + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384": true, + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305": true, + "TLS_ECDHE_ECDSA_WITH_RC4_128_SHA": true, + "TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA": true, + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA": true, + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256": true, + "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256": true, + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA": true, + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384": true, + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305": true, + "TLS_ECDHE_RSA_WITH_RC4_128_SHA": true, + "TLS_FALLBACK_SCSV": true, + "TLS_RSA_WITH_3DES_EDE_CBC_SHA": true, + "TLS_RSA_WITH_AES_128_CBC_SHA": true, + "TLS_RSA_WITH_AES_128_CBC_SHA256": true, + "TLS_RSA_WITH_AES_128_GCM_SHA256": true, + "TLS_RSA_WITH_AES_256_CBC_SHA": true, + "TLS_RSA_WITH_AES_256_GCM_SHA384": true, + "TLS_RSA_WITH_RC4_128_SHA": true, + "VerifyClientCertIfGiven": true, + "VersionSSL30": true, + "VersionTLS10": true, + "VersionTLS11": true, + "VersionTLS12": true, + "VersionTLS13": true, + "X25519": true, + "X509KeyPair": true, + }, + "crypto/x509": map[string]bool{ + "CANotAuthorizedForExtKeyUsage": true, + "CANotAuthorizedForThisName": true, + "CertPool": true, + "Certificate": true, + "CertificateInvalidError": true, + "CertificateRequest": true, + "ConstraintViolationError": true, + "CreateCertificate": true, + "CreateCertificateRequest": true, + "DSA": true, + "DSAWithSHA1": true, + "DSAWithSHA256": true, + "DecryptPEMBlock": true, + "ECDSA": true, + "ECDSAWithSHA1": true, + "ECDSAWithSHA256": true, + "ECDSAWithSHA384": true, + "ECDSAWithSHA512": true, + "EncryptPEMBlock": true, + "ErrUnsupportedAlgorithm": true, + "Expired": true, + "ExtKeyUsage": true, + "ExtKeyUsageAny": true, + "ExtKeyUsageClientAuth": true, + "ExtKeyUsageCodeSigning": true, + "ExtKeyUsageEmailProtection": true, + "ExtKeyUsageIPSECEndSystem": true, + "ExtKeyUsageIPSECTunnel": true, + "ExtKeyUsageIPSECUser": true, + "ExtKeyUsageMicrosoftCommercialCodeSigning": true, + "ExtKeyUsageMicrosoftKernelCodeSigning": true, + "ExtKeyUsageMicrosoftServerGatedCrypto": true, + "ExtKeyUsageNetscapeServerGatedCrypto": true, + "ExtKeyUsageOCSPSigning": true, + "ExtKeyUsageServerAuth": true, + "ExtKeyUsageTimeStamping": true, + "HostnameError": true, + "IncompatibleUsage": true, + "IncorrectPasswordError": true, + "InsecureAlgorithmError": true, + "InvalidReason": true, + "IsEncryptedPEMBlock": true, + "KeyUsage": true, + "KeyUsageCRLSign": true, + "KeyUsageCertSign": true, + "KeyUsageContentCommitment": true, + "KeyUsageDataEncipherment": true, + "KeyUsageDecipherOnly": true, + "KeyUsageDigitalSignature": true, + "KeyUsageEncipherOnly": true, + "KeyUsageKeyAgreement": true, + "KeyUsageKeyEncipherment": true, + "MD2WithRSA": true, + "MD5WithRSA": true, + "MarshalECPrivateKey": true, + "MarshalPKCS1PrivateKey": true, + "MarshalPKCS1PublicKey": true, + "MarshalPKCS8PrivateKey": true, + "MarshalPKIXPublicKey": true, + "NameConstraintsWithoutSANs": true, + "NameMismatch": true, + "NewCertPool": true, + "NotAuthorizedToSign": true, + "PEMCipher": true, + "PEMCipher3DES": true, + "PEMCipherAES128": true, + "PEMCipherAES192": true, + "PEMCipherAES256": true, + "PEMCipherDES": true, + "ParseCRL": true, + "ParseCertificate": true, + "ParseCertificateRequest": true, + "ParseCertificates": true, + "ParseDERCRL": true, + "ParseECPrivateKey": true, + "ParsePKCS1PrivateKey": true, + "ParsePKCS1PublicKey": true, + "ParsePKCS8PrivateKey": true, + "ParsePKIXPublicKey": true, + "PublicKeyAlgorithm": true, + "RSA": true, + "SHA1WithRSA": true, + "SHA256WithRSA": true, + "SHA256WithRSAPSS": true, + "SHA384WithRSA": true, + "SHA384WithRSAPSS": true, + "SHA512WithRSA": true, + "SHA512WithRSAPSS": true, + "SignatureAlgorithm": true, + "SystemCertPool": true, + "SystemRootsError": true, + "TooManyConstraints": true, + "TooManyIntermediates": true, + "UnconstrainedName": true, + "UnhandledCriticalExtension": true, + "UnknownAuthorityError": true, + "UnknownPublicKeyAlgorithm": true, + "UnknownSignatureAlgorithm": true, + "VerifyOptions": true, + }, + "crypto/x509/pkix": map[string]bool{ + "AlgorithmIdentifier": true, + "AttributeTypeAndValue": true, + "AttributeTypeAndValueSET": true, + "CertificateList": true, + "Extension": true, + "Name": true, + "RDNSequence": true, + "RelativeDistinguishedNameSET": true, + "RevokedCertificate": true, + "TBSCertificateList": true, + }, + "database/sql": map[string]bool{ + "ColumnType": true, + "Conn": true, + "DB": true, + "DBStats": true, + "Drivers": true, + "ErrConnDone": true, + "ErrNoRows": true, + "ErrTxDone": true, + "IsolationLevel": true, + "LevelDefault": true, + "LevelLinearizable": true, + "LevelReadCommitted": true, + "LevelReadUncommitted": true, + "LevelRepeatableRead": true, + "LevelSerializable": true, + "LevelSnapshot": true, + "LevelWriteCommitted": true, + "Named": true, + "NamedArg": true, + "NullBool": true, + "NullFloat64": true, + "NullInt64": true, + "NullString": true, + "Open": true, + "OpenDB": true, + "Out": true, + "RawBytes": true, + "Register": true, + "Result": true, + "Row": true, + "Rows": true, + "Scanner": true, + "Stmt": true, + "Tx": true, + "TxOptions": true, + }, + "database/sql/driver": map[string]bool{ + "Bool": true, + "ColumnConverter": true, + "Conn": true, + "ConnBeginTx": true, + "ConnPrepareContext": true, + "Connector": true, + "DefaultParameterConverter": true, + "Driver": true, + "DriverContext": true, + "ErrBadConn": true, + "ErrRemoveArgument": true, + "ErrSkip": true, + "Execer": true, + "ExecerContext": true, + "Int32": true, + "IsScanValue": true, + "IsValue": true, + "IsolationLevel": true, + "NamedValue": true, + "NamedValueChecker": true, + "NotNull": true, + "Null": true, + "Pinger": true, + "Queryer": true, + "QueryerContext": true, + "Result": true, + "ResultNoRows": true, + "Rows": true, + "RowsAffected": true, + "RowsColumnTypeDatabaseTypeName": true, + "RowsColumnTypeLength": true, + "RowsColumnTypeNullable": true, + "RowsColumnTypePrecisionScale": true, + "RowsColumnTypeScanType": true, + "RowsNextResultSet": true, + "SessionResetter": true, + "Stmt": true, + "StmtExecContext": true, + "StmtQueryContext": true, + "String": true, + "Tx": true, + "TxOptions": true, + "Value": true, + "ValueConverter": true, + "Valuer": true, + }, + "debug/dwarf": map[string]bool{ + "AddrType": true, + "ArrayType": true, + "Attr": true, + "AttrAbstractOrigin": true, + "AttrAccessibility": true, + "AttrAddrClass": true, + "AttrAllocated": true, + "AttrArtificial": true, + "AttrAssociated": true, + "AttrBaseTypes": true, + "AttrBitOffset": true, + "AttrBitSize": true, + "AttrByteSize": true, + "AttrCallColumn": true, + "AttrCallFile": true, + "AttrCallLine": true, + "AttrCalling": true, + "AttrCommonRef": true, + "AttrCompDir": true, + "AttrConstValue": true, + "AttrContainingType": true, + "AttrCount": true, + "AttrDataLocation": true, + "AttrDataMemberLoc": true, + "AttrDeclColumn": true, + "AttrDeclFile": true, + "AttrDeclLine": true, + "AttrDeclaration": true, + "AttrDefaultValue": true, + "AttrDescription": true, + "AttrDiscr": true, + "AttrDiscrList": true, + "AttrDiscrValue": true, + "AttrEncoding": true, + "AttrEntrypc": true, + "AttrExtension": true, + "AttrExternal": true, + "AttrFrameBase": true, + "AttrFriend": true, + "AttrHighpc": true, + "AttrIdentifierCase": true, + "AttrImport": true, + "AttrInline": true, + "AttrIsOptional": true, + "AttrLanguage": true, + "AttrLocation": true, + "AttrLowerBound": true, + "AttrLowpc": true, + "AttrMacroInfo": true, + "AttrName": true, + "AttrNamelistItem": true, + "AttrOrdering": true, + "AttrPriority": true, + "AttrProducer": true, + "AttrPrototyped": true, + "AttrRanges": true, + "AttrReturnAddr": true, + "AttrSegment": true, + "AttrSibling": true, + "AttrSpecification": true, + "AttrStartScope": true, + "AttrStaticLink": true, + "AttrStmtList": true, + "AttrStride": true, + "AttrStrideSize": true, + "AttrStringLength": true, + "AttrTrampoline": true, + "AttrType": true, + "AttrUpperBound": true, + "AttrUseLocation": true, + "AttrUseUTF8": true, + "AttrVarParam": true, + "AttrVirtuality": true, + "AttrVisibility": true, + "AttrVtableElemLoc": true, + "BasicType": true, + "BoolType": true, + "CharType": true, + "Class": true, + "ClassAddress": true, + "ClassBlock": true, + "ClassConstant": true, + "ClassExprLoc": true, + "ClassFlag": true, + "ClassLinePtr": true, + "ClassLocListPtr": true, + "ClassMacPtr": true, + "ClassRangeListPtr": true, + "ClassReference": true, + "ClassReferenceAlt": true, + "ClassReferenceSig": true, + "ClassString": true, + "ClassStringAlt": true, + "ClassUnknown": true, + "CommonType": true, + "ComplexType": true, + "Data": true, + "DecodeError": true, + "DotDotDotType": true, + "Entry": true, + "EnumType": true, + "EnumValue": true, + "ErrUnknownPC": true, + "Field": true, + "FloatType": true, + "FuncType": true, + "IntType": true, + "LineEntry": true, + "LineFile": true, + "LineReader": true, + "LineReaderPos": true, + "New": true, + "Offset": true, + "PtrType": true, + "QualType": true, + "Reader": true, + "StructField": true, + "StructType": true, + "Tag": true, + "TagAccessDeclaration": true, + "TagArrayType": true, + "TagBaseType": true, + "TagCatchDwarfBlock": true, + "TagClassType": true, + "TagCommonDwarfBlock": true, + "TagCommonInclusion": true, + "TagCompileUnit": true, + "TagCondition": true, + "TagConstType": true, + "TagConstant": true, + "TagDwarfProcedure": true, + "TagEntryPoint": true, + "TagEnumerationType": true, + "TagEnumerator": true, + "TagFileType": true, + "TagFormalParameter": true, + "TagFriend": true, + "TagImportedDeclaration": true, + "TagImportedModule": true, + "TagImportedUnit": true, + "TagInheritance": true, + "TagInlinedSubroutine": true, + "TagInterfaceType": true, + "TagLabel": true, + "TagLexDwarfBlock": true, + "TagMember": true, + "TagModule": true, + "TagMutableType": true, + "TagNamelist": true, + "TagNamelistItem": true, + "TagNamespace": true, + "TagPackedType": true, + "TagPartialUnit": true, + "TagPointerType": true, + "TagPtrToMemberType": true, + "TagReferenceType": true, + "TagRestrictType": true, + "TagRvalueReferenceType": true, + "TagSetType": true, + "TagSharedType": true, + "TagStringType": true, + "TagStructType": true, + "TagSubprogram": true, + "TagSubrangeType": true, + "TagSubroutineType": true, + "TagTemplateAlias": true, + "TagTemplateTypeParameter": true, + "TagTemplateValueParameter": true, + "TagThrownType": true, + "TagTryDwarfBlock": true, + "TagTypeUnit": true, + "TagTypedef": true, + "TagUnionType": true, + "TagUnspecifiedParameters": true, + "TagUnspecifiedType": true, + "TagVariable": true, + "TagVariant": true, + "TagVariantPart": true, + "TagVolatileType": true, + "TagWithStmt": true, + "Type": true, + "TypedefType": true, + "UcharType": true, + "UintType": true, + "UnspecifiedType": true, + "VoidType": true, + }, + "debug/elf": map[string]bool{ + "ARM_MAGIC_TRAMP_NUMBER": true, + "COMPRESS_HIOS": true, + "COMPRESS_HIPROC": true, + "COMPRESS_LOOS": true, + "COMPRESS_LOPROC": true, + "COMPRESS_ZLIB": true, + "Chdr32": true, + "Chdr64": true, + "Class": true, + "CompressionType": true, + "DF_BIND_NOW": true, + "DF_ORIGIN": true, + "DF_STATIC_TLS": true, + "DF_SYMBOLIC": true, + "DF_TEXTREL": true, + "DT_BIND_NOW": true, + "DT_DEBUG": true, + "DT_ENCODING": true, + "DT_FINI": true, + "DT_FINI_ARRAY": true, + "DT_FINI_ARRAYSZ": true, + "DT_FLAGS": true, + "DT_HASH": true, + "DT_HIOS": true, + "DT_HIPROC": true, + "DT_INIT": true, + "DT_INIT_ARRAY": true, + "DT_INIT_ARRAYSZ": true, + "DT_JMPREL": true, + "DT_LOOS": true, + "DT_LOPROC": true, + "DT_NEEDED": true, + "DT_NULL": true, + "DT_PLTGOT": true, + "DT_PLTREL": true, + "DT_PLTRELSZ": true, + "DT_PREINIT_ARRAY": true, + "DT_PREINIT_ARRAYSZ": true, + "DT_REL": true, + "DT_RELA": true, + "DT_RELAENT": true, + "DT_RELASZ": true, + "DT_RELENT": true, + "DT_RELSZ": true, + "DT_RPATH": true, + "DT_RUNPATH": true, + "DT_SONAME": true, + "DT_STRSZ": true, + "DT_STRTAB": true, + "DT_SYMBOLIC": true, + "DT_SYMENT": true, + "DT_SYMTAB": true, + "DT_TEXTREL": true, + "DT_VERNEED": true, + "DT_VERNEEDNUM": true, + "DT_VERSYM": true, + "Data": true, + "Dyn32": true, + "Dyn64": true, + "DynFlag": true, + "DynTag": true, + "EI_ABIVERSION": true, + "EI_CLASS": true, + "EI_DATA": true, + "EI_NIDENT": true, + "EI_OSABI": true, + "EI_PAD": true, + "EI_VERSION": true, + "ELFCLASS32": true, + "ELFCLASS64": true, + "ELFCLASSNONE": true, + "ELFDATA2LSB": true, + "ELFDATA2MSB": true, + "ELFDATANONE": true, + "ELFMAG": true, + "ELFOSABI_86OPEN": true, + "ELFOSABI_AIX": true, + "ELFOSABI_ARM": true, + "ELFOSABI_AROS": true, + "ELFOSABI_CLOUDABI": true, + "ELFOSABI_FENIXOS": true, + "ELFOSABI_FREEBSD": true, + "ELFOSABI_HPUX": true, + "ELFOSABI_HURD": true, + "ELFOSABI_IRIX": true, + "ELFOSABI_LINUX": true, + "ELFOSABI_MODESTO": true, + "ELFOSABI_NETBSD": true, + "ELFOSABI_NONE": true, + "ELFOSABI_NSK": true, + "ELFOSABI_OPENBSD": true, + "ELFOSABI_OPENVMS": true, + "ELFOSABI_SOLARIS": true, + "ELFOSABI_STANDALONE": true, + "ELFOSABI_TRU64": true, + "EM_386": true, + "EM_486": true, + "EM_56800EX": true, + "EM_68HC05": true, + "EM_68HC08": true, + "EM_68HC11": true, + "EM_68HC12": true, + "EM_68HC16": true, + "EM_68K": true, + "EM_78KOR": true, + "EM_8051": true, + "EM_860": true, + "EM_88K": true, + "EM_960": true, + "EM_AARCH64": true, + "EM_ALPHA": true, + "EM_ALPHA_STD": true, + "EM_ALTERA_NIOS2": true, + "EM_AMDGPU": true, + "EM_ARC": true, + "EM_ARCA": true, + "EM_ARC_COMPACT": true, + "EM_ARC_COMPACT2": true, + "EM_ARM": true, + "EM_AVR": true, + "EM_AVR32": true, + "EM_BA1": true, + "EM_BA2": true, + "EM_BLACKFIN": true, + "EM_BPF": true, + "EM_C166": true, + "EM_CDP": true, + "EM_CE": true, + "EM_CLOUDSHIELD": true, + "EM_COGE": true, + "EM_COLDFIRE": true, + "EM_COOL": true, + "EM_COREA_1ST": true, + "EM_COREA_2ND": true, + "EM_CR": true, + "EM_CR16": true, + "EM_CRAYNV2": true, + "EM_CRIS": true, + "EM_CRX": true, + "EM_CSR_KALIMBA": true, + "EM_CUDA": true, + "EM_CYPRESS_M8C": true, + "EM_D10V": true, + "EM_D30V": true, + "EM_DSP24": true, + "EM_DSPIC30F": true, + "EM_DXP": true, + "EM_ECOG1": true, + "EM_ECOG16": true, + "EM_ECOG1X": true, + "EM_ECOG2": true, + "EM_ETPU": true, + "EM_EXCESS": true, + "EM_F2MC16": true, + "EM_FIREPATH": true, + "EM_FR20": true, + "EM_FR30": true, + "EM_FT32": true, + "EM_FX66": true, + "EM_H8S": true, + "EM_H8_300": true, + "EM_H8_300H": true, + "EM_H8_500": true, + "EM_HUANY": true, + "EM_IA_64": true, + "EM_INTEL205": true, + "EM_INTEL206": true, + "EM_INTEL207": true, + "EM_INTEL208": true, + "EM_INTEL209": true, + "EM_IP2K": true, + "EM_JAVELIN": true, + "EM_K10M": true, + "EM_KM32": true, + "EM_KMX16": true, + "EM_KMX32": true, + "EM_KMX8": true, + "EM_KVARC": true, + "EM_L10M": true, + "EM_LANAI": true, + "EM_LATTICEMICO32": true, + "EM_M16C": true, + "EM_M32": true, + "EM_M32C": true, + "EM_M32R": true, + "EM_MANIK": true, + "EM_MAX": true, + "EM_MAXQ30": true, + "EM_MCHP_PIC": true, + "EM_MCST_ELBRUS": true, + "EM_ME16": true, + "EM_METAG": true, + "EM_MICROBLAZE": true, + "EM_MIPS": true, + "EM_MIPS_RS3_LE": true, + "EM_MIPS_RS4_BE": true, + "EM_MIPS_X": true, + "EM_MMA": true, + "EM_MMDSP_PLUS": true, + "EM_MMIX": true, + "EM_MN10200": true, + "EM_MN10300": true, + "EM_MOXIE": true, + "EM_MSP430": true, + "EM_NCPU": true, + "EM_NDR1": true, + "EM_NDS32": true, + "EM_NONE": true, + "EM_NORC": true, + "EM_NS32K": true, + "EM_OPEN8": true, + "EM_OPENRISC": true, + "EM_PARISC": true, + "EM_PCP": true, + "EM_PDP10": true, + "EM_PDP11": true, + "EM_PDSP": true, + "EM_PJ": true, + "EM_PPC": true, + "EM_PPC64": true, + "EM_PRISM": true, + "EM_QDSP6": true, + "EM_R32C": true, + "EM_RCE": true, + "EM_RH32": true, + "EM_RISCV": true, + "EM_RL78": true, + "EM_RS08": true, + "EM_RX": true, + "EM_S370": true, + "EM_S390": true, + "EM_SCORE7": true, + "EM_SEP": true, + "EM_SE_C17": true, + "EM_SE_C33": true, + "EM_SH": true, + "EM_SHARC": true, + "EM_SLE9X": true, + "EM_SNP1K": true, + "EM_SPARC": true, + "EM_SPARC32PLUS": true, + "EM_SPARCV9": true, + "EM_ST100": true, + "EM_ST19": true, + "EM_ST200": true, + "EM_ST7": true, + "EM_ST9PLUS": true, + "EM_STARCORE": true, + "EM_STM8": true, + "EM_STXP7X": true, + "EM_SVX": true, + "EM_TILE64": true, + "EM_TILEGX": true, + "EM_TILEPRO": true, + "EM_TINYJ": true, + "EM_TI_ARP32": true, + "EM_TI_C2000": true, + "EM_TI_C5500": true, + "EM_TI_C6000": true, + "EM_TI_PRU": true, + "EM_TMM_GPP": true, + "EM_TPC": true, + "EM_TRICORE": true, + "EM_TRIMEDIA": true, + "EM_TSK3000": true, + "EM_UNICORE": true, + "EM_V800": true, + "EM_V850": true, + "EM_VAX": true, + "EM_VIDEOCORE": true, + "EM_VIDEOCORE3": true, + "EM_VIDEOCORE5": true, + "EM_VISIUM": true, + "EM_VPP500": true, + "EM_X86_64": true, + "EM_XCORE": true, + "EM_XGATE": true, + "EM_XIMO16": true, + "EM_XTENSA": true, + "EM_Z80": true, + "EM_ZSP": true, + "ET_CORE": true, + "ET_DYN": true, + "ET_EXEC": true, + "ET_HIOS": true, + "ET_HIPROC": true, + "ET_LOOS": true, + "ET_LOPROC": true, + "ET_NONE": true, + "ET_REL": true, + "EV_CURRENT": true, + "EV_NONE": true, + "ErrNoSymbols": true, + "File": true, + "FileHeader": true, + "FormatError": true, + "Header32": true, + "Header64": true, + "ImportedSymbol": true, + "Machine": true, + "NT_FPREGSET": true, + "NT_PRPSINFO": true, + "NT_PRSTATUS": true, + "NType": true, + "NewFile": true, + "OSABI": true, + "Open": true, + "PF_MASKOS": true, + "PF_MASKPROC": true, + "PF_R": true, + "PF_W": true, + "PF_X": true, + "PT_DYNAMIC": true, + "PT_HIOS": true, + "PT_HIPROC": true, + "PT_INTERP": true, + "PT_LOAD": true, + "PT_LOOS": true, + "PT_LOPROC": true, + "PT_NOTE": true, + "PT_NULL": true, + "PT_PHDR": true, + "PT_SHLIB": true, + "PT_TLS": true, + "Prog": true, + "Prog32": true, + "Prog64": true, + "ProgFlag": true, + "ProgHeader": true, + "ProgType": true, + "R_386": true, + "R_386_16": true, + "R_386_32": true, + "R_386_32PLT": true, + "R_386_8": true, + "R_386_COPY": true, + "R_386_GLOB_DAT": true, + "R_386_GOT32": true, + "R_386_GOT32X": true, + "R_386_GOTOFF": true, + "R_386_GOTPC": true, + "R_386_IRELATIVE": true, + "R_386_JMP_SLOT": true, + "R_386_NONE": true, + "R_386_PC16": true, + "R_386_PC32": true, + "R_386_PC8": true, + "R_386_PLT32": true, + "R_386_RELATIVE": true, + "R_386_SIZE32": true, + "R_386_TLS_DESC": true, + "R_386_TLS_DESC_CALL": true, + "R_386_TLS_DTPMOD32": true, + "R_386_TLS_DTPOFF32": true, + "R_386_TLS_GD": true, + "R_386_TLS_GD_32": true, + "R_386_TLS_GD_CALL": true, + "R_386_TLS_GD_POP": true, + "R_386_TLS_GD_PUSH": true, + "R_386_TLS_GOTDESC": true, + "R_386_TLS_GOTIE": true, + "R_386_TLS_IE": true, + "R_386_TLS_IE_32": true, + "R_386_TLS_LDM": true, + "R_386_TLS_LDM_32": true, + "R_386_TLS_LDM_CALL": true, + "R_386_TLS_LDM_POP": true, + "R_386_TLS_LDM_PUSH": true, + "R_386_TLS_LDO_32": true, + "R_386_TLS_LE": true, + "R_386_TLS_LE_32": true, + "R_386_TLS_TPOFF": true, + "R_386_TLS_TPOFF32": true, + "R_390": true, + "R_390_12": true, + "R_390_16": true, + "R_390_20": true, + "R_390_32": true, + "R_390_64": true, + "R_390_8": true, + "R_390_COPY": true, + "R_390_GLOB_DAT": true, + "R_390_GOT12": true, + "R_390_GOT16": true, + "R_390_GOT20": true, + "R_390_GOT32": true, + "R_390_GOT64": true, + "R_390_GOTENT": true, + "R_390_GOTOFF": true, + "R_390_GOTOFF16": true, + "R_390_GOTOFF64": true, + "R_390_GOTPC": true, + "R_390_GOTPCDBL": true, + "R_390_GOTPLT12": true, + "R_390_GOTPLT16": true, + "R_390_GOTPLT20": true, + "R_390_GOTPLT32": true, + "R_390_GOTPLT64": true, + "R_390_GOTPLTENT": true, + "R_390_GOTPLTOFF16": true, + "R_390_GOTPLTOFF32": true, + "R_390_GOTPLTOFF64": true, + "R_390_JMP_SLOT": true, + "R_390_NONE": true, + "R_390_PC16": true, + "R_390_PC16DBL": true, + "R_390_PC32": true, + "R_390_PC32DBL": true, + "R_390_PC64": true, + "R_390_PLT16DBL": true, + "R_390_PLT32": true, + "R_390_PLT32DBL": true, + "R_390_PLT64": true, + "R_390_RELATIVE": true, + "R_390_TLS_DTPMOD": true, + "R_390_TLS_DTPOFF": true, + "R_390_TLS_GD32": true, + "R_390_TLS_GD64": true, + "R_390_TLS_GDCALL": true, + "R_390_TLS_GOTIE12": true, + "R_390_TLS_GOTIE20": true, + "R_390_TLS_GOTIE32": true, + "R_390_TLS_GOTIE64": true, + "R_390_TLS_IE32": true, + "R_390_TLS_IE64": true, + "R_390_TLS_IEENT": true, + "R_390_TLS_LDCALL": true, + "R_390_TLS_LDM32": true, + "R_390_TLS_LDM64": true, + "R_390_TLS_LDO32": true, + "R_390_TLS_LDO64": true, + "R_390_TLS_LE32": true, + "R_390_TLS_LE64": true, + "R_390_TLS_LOAD": true, + "R_390_TLS_TPOFF": true, + "R_AARCH64": true, + "R_AARCH64_ABS16": true, + "R_AARCH64_ABS32": true, + "R_AARCH64_ABS64": true, + "R_AARCH64_ADD_ABS_LO12_NC": true, + "R_AARCH64_ADR_GOT_PAGE": true, + "R_AARCH64_ADR_PREL_LO21": true, + "R_AARCH64_ADR_PREL_PG_HI21": true, + "R_AARCH64_ADR_PREL_PG_HI21_NC": true, + "R_AARCH64_CALL26": true, + "R_AARCH64_CONDBR19": true, + "R_AARCH64_COPY": true, + "R_AARCH64_GLOB_DAT": true, + "R_AARCH64_GOT_LD_PREL19": true, + "R_AARCH64_IRELATIVE": true, + "R_AARCH64_JUMP26": true, + "R_AARCH64_JUMP_SLOT": true, + "R_AARCH64_LD64_GOTOFF_LO15": true, + "R_AARCH64_LD64_GOTPAGE_LO15": true, + "R_AARCH64_LD64_GOT_LO12_NC": true, + "R_AARCH64_LDST128_ABS_LO12_NC": true, + "R_AARCH64_LDST16_ABS_LO12_NC": true, + "R_AARCH64_LDST32_ABS_LO12_NC": true, + "R_AARCH64_LDST64_ABS_LO12_NC": true, + "R_AARCH64_LDST8_ABS_LO12_NC": true, + "R_AARCH64_LD_PREL_LO19": true, + "R_AARCH64_MOVW_SABS_G0": true, + "R_AARCH64_MOVW_SABS_G1": true, + "R_AARCH64_MOVW_SABS_G2": true, + "R_AARCH64_MOVW_UABS_G0": true, + "R_AARCH64_MOVW_UABS_G0_NC": true, + "R_AARCH64_MOVW_UABS_G1": true, + "R_AARCH64_MOVW_UABS_G1_NC": true, + "R_AARCH64_MOVW_UABS_G2": true, + "R_AARCH64_MOVW_UABS_G2_NC": true, + "R_AARCH64_MOVW_UABS_G3": true, + "R_AARCH64_NONE": true, + "R_AARCH64_NULL": true, + "R_AARCH64_P32_ABS16": true, + "R_AARCH64_P32_ABS32": true, + "R_AARCH64_P32_ADD_ABS_LO12_NC": true, + "R_AARCH64_P32_ADR_GOT_PAGE": true, + "R_AARCH64_P32_ADR_PREL_LO21": true, + "R_AARCH64_P32_ADR_PREL_PG_HI21": true, + "R_AARCH64_P32_CALL26": true, + "R_AARCH64_P32_CONDBR19": true, + "R_AARCH64_P32_COPY": true, + "R_AARCH64_P32_GLOB_DAT": true, + "R_AARCH64_P32_GOT_LD_PREL19": true, + "R_AARCH64_P32_IRELATIVE": true, + "R_AARCH64_P32_JUMP26": true, + "R_AARCH64_P32_JUMP_SLOT": true, + "R_AARCH64_P32_LD32_GOT_LO12_NC": true, + "R_AARCH64_P32_LDST128_ABS_LO12_NC": true, + "R_AARCH64_P32_LDST16_ABS_LO12_NC": true, + "R_AARCH64_P32_LDST32_ABS_LO12_NC": true, + "R_AARCH64_P32_LDST64_ABS_LO12_NC": true, + "R_AARCH64_P32_LDST8_ABS_LO12_NC": true, + "R_AARCH64_P32_LD_PREL_LO19": true, + "R_AARCH64_P32_MOVW_SABS_G0": true, + "R_AARCH64_P32_MOVW_UABS_G0": true, + "R_AARCH64_P32_MOVW_UABS_G0_NC": true, + "R_AARCH64_P32_MOVW_UABS_G1": true, + "R_AARCH64_P32_PREL16": true, + "R_AARCH64_P32_PREL32": true, + "R_AARCH64_P32_RELATIVE": true, + "R_AARCH64_P32_TLSDESC": true, + "R_AARCH64_P32_TLSDESC_ADD_LO12_NC": true, + "R_AARCH64_P32_TLSDESC_ADR_PAGE21": true, + "R_AARCH64_P32_TLSDESC_ADR_PREL21": true, + "R_AARCH64_P32_TLSDESC_CALL": true, + "R_AARCH64_P32_TLSDESC_LD32_LO12_NC": true, + "R_AARCH64_P32_TLSDESC_LD_PREL19": true, + "R_AARCH64_P32_TLSGD_ADD_LO12_NC": true, + "R_AARCH64_P32_TLSGD_ADR_PAGE21": true, + "R_AARCH64_P32_TLSIE_ADR_GOTTPREL_PAGE21": true, + "R_AARCH64_P32_TLSIE_LD32_GOTTPREL_LO12_NC": true, + "R_AARCH64_P32_TLSIE_LD_GOTTPREL_PREL19": true, + "R_AARCH64_P32_TLSLE_ADD_TPREL_HI12": true, + "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12": true, + "R_AARCH64_P32_TLSLE_ADD_TPREL_LO12_NC": true, + "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0": true, + "R_AARCH64_P32_TLSLE_MOVW_TPREL_G0_NC": true, + "R_AARCH64_P32_TLSLE_MOVW_TPREL_G1": true, + "R_AARCH64_P32_TLS_DTPMOD": true, + "R_AARCH64_P32_TLS_DTPREL": true, + "R_AARCH64_P32_TLS_TPREL": true, + "R_AARCH64_P32_TSTBR14": true, + "R_AARCH64_PREL16": true, + "R_AARCH64_PREL32": true, + "R_AARCH64_PREL64": true, + "R_AARCH64_RELATIVE": true, + "R_AARCH64_TLSDESC": true, + "R_AARCH64_TLSDESC_ADD": true, + "R_AARCH64_TLSDESC_ADD_LO12_NC": true, + "R_AARCH64_TLSDESC_ADR_PAGE21": true, + "R_AARCH64_TLSDESC_ADR_PREL21": true, + "R_AARCH64_TLSDESC_CALL": true, + "R_AARCH64_TLSDESC_LD64_LO12_NC": true, + "R_AARCH64_TLSDESC_LDR": true, + "R_AARCH64_TLSDESC_LD_PREL19": true, + "R_AARCH64_TLSDESC_OFF_G0_NC": true, + "R_AARCH64_TLSDESC_OFF_G1": true, + "R_AARCH64_TLSGD_ADD_LO12_NC": true, + "R_AARCH64_TLSGD_ADR_PAGE21": true, + "R_AARCH64_TLSGD_ADR_PREL21": true, + "R_AARCH64_TLSGD_MOVW_G0_NC": true, + "R_AARCH64_TLSGD_MOVW_G1": true, + "R_AARCH64_TLSIE_ADR_GOTTPREL_PAGE21": true, + "R_AARCH64_TLSIE_LD64_GOTTPREL_LO12_NC": true, + "R_AARCH64_TLSIE_LD_GOTTPREL_PREL19": true, + "R_AARCH64_TLSIE_MOVW_GOTTPREL_G0_NC": true, + "R_AARCH64_TLSIE_MOVW_GOTTPREL_G1": true, + "R_AARCH64_TLSLD_ADR_PAGE21": true, + "R_AARCH64_TLSLD_ADR_PREL21": true, + "R_AARCH64_TLSLD_LDST128_DTPREL_LO12": true, + "R_AARCH64_TLSLD_LDST128_DTPREL_LO12_NC": true, + "R_AARCH64_TLSLE_ADD_TPREL_HI12": true, + "R_AARCH64_TLSLE_ADD_TPREL_LO12": true, + "R_AARCH64_TLSLE_ADD_TPREL_LO12_NC": true, + "R_AARCH64_TLSLE_LDST128_TPREL_LO12": true, + "R_AARCH64_TLSLE_LDST128_TPREL_LO12_NC": true, + "R_AARCH64_TLSLE_MOVW_TPREL_G0": true, + "R_AARCH64_TLSLE_MOVW_TPREL_G0_NC": true, + "R_AARCH64_TLSLE_MOVW_TPREL_G1": true, + "R_AARCH64_TLSLE_MOVW_TPREL_G1_NC": true, + "R_AARCH64_TLSLE_MOVW_TPREL_G2": true, + "R_AARCH64_TLS_DTPMOD64": true, + "R_AARCH64_TLS_DTPREL64": true, + "R_AARCH64_TLS_TPREL64": true, + "R_AARCH64_TSTBR14": true, + "R_ALPHA": true, + "R_ALPHA_BRADDR": true, + "R_ALPHA_COPY": true, + "R_ALPHA_GLOB_DAT": true, + "R_ALPHA_GPDISP": true, + "R_ALPHA_GPREL32": true, + "R_ALPHA_GPRELHIGH": true, + "R_ALPHA_GPRELLOW": true, + "R_ALPHA_GPVALUE": true, + "R_ALPHA_HINT": true, + "R_ALPHA_IMMED_BR_HI32": true, + "R_ALPHA_IMMED_GP_16": true, + "R_ALPHA_IMMED_GP_HI32": true, + "R_ALPHA_IMMED_LO32": true, + "R_ALPHA_IMMED_SCN_HI32": true, + "R_ALPHA_JMP_SLOT": true, + "R_ALPHA_LITERAL": true, + "R_ALPHA_LITUSE": true, + "R_ALPHA_NONE": true, + "R_ALPHA_OP_PRSHIFT": true, + "R_ALPHA_OP_PSUB": true, + "R_ALPHA_OP_PUSH": true, + "R_ALPHA_OP_STORE": true, + "R_ALPHA_REFLONG": true, + "R_ALPHA_REFQUAD": true, + "R_ALPHA_RELATIVE": true, + "R_ALPHA_SREL16": true, + "R_ALPHA_SREL32": true, + "R_ALPHA_SREL64": true, + "R_ARM": true, + "R_ARM_ABS12": true, + "R_ARM_ABS16": true, + "R_ARM_ABS32": true, + "R_ARM_ABS32_NOI": true, + "R_ARM_ABS8": true, + "R_ARM_ALU_PCREL_15_8": true, + "R_ARM_ALU_PCREL_23_15": true, + "R_ARM_ALU_PCREL_7_0": true, + "R_ARM_ALU_PC_G0": true, + "R_ARM_ALU_PC_G0_NC": true, + "R_ARM_ALU_PC_G1": true, + "R_ARM_ALU_PC_G1_NC": true, + "R_ARM_ALU_PC_G2": true, + "R_ARM_ALU_SBREL_19_12_NC": true, + "R_ARM_ALU_SBREL_27_20_CK": true, + "R_ARM_ALU_SB_G0": true, + "R_ARM_ALU_SB_G0_NC": true, + "R_ARM_ALU_SB_G1": true, + "R_ARM_ALU_SB_G1_NC": true, + "R_ARM_ALU_SB_G2": true, + "R_ARM_AMP_VCALL9": true, + "R_ARM_BASE_ABS": true, + "R_ARM_CALL": true, + "R_ARM_COPY": true, + "R_ARM_GLOB_DAT": true, + "R_ARM_GNU_VTENTRY": true, + "R_ARM_GNU_VTINHERIT": true, + "R_ARM_GOT32": true, + "R_ARM_GOTOFF": true, + "R_ARM_GOTOFF12": true, + "R_ARM_GOTPC": true, + "R_ARM_GOTRELAX": true, + "R_ARM_GOT_ABS": true, + "R_ARM_GOT_BREL12": true, + "R_ARM_GOT_PREL": true, + "R_ARM_IRELATIVE": true, + "R_ARM_JUMP24": true, + "R_ARM_JUMP_SLOT": true, + "R_ARM_LDC_PC_G0": true, + "R_ARM_LDC_PC_G1": true, + "R_ARM_LDC_PC_G2": true, + "R_ARM_LDC_SB_G0": true, + "R_ARM_LDC_SB_G1": true, + "R_ARM_LDC_SB_G2": true, + "R_ARM_LDRS_PC_G0": true, + "R_ARM_LDRS_PC_G1": true, + "R_ARM_LDRS_PC_G2": true, + "R_ARM_LDRS_SB_G0": true, + "R_ARM_LDRS_SB_G1": true, + "R_ARM_LDRS_SB_G2": true, + "R_ARM_LDR_PC_G1": true, + "R_ARM_LDR_PC_G2": true, + "R_ARM_LDR_SBREL_11_10_NC": true, + "R_ARM_LDR_SB_G0": true, + "R_ARM_LDR_SB_G1": true, + "R_ARM_LDR_SB_G2": true, + "R_ARM_ME_TOO": true, + "R_ARM_MOVT_ABS": true, + "R_ARM_MOVT_BREL": true, + "R_ARM_MOVT_PREL": true, + "R_ARM_MOVW_ABS_NC": true, + "R_ARM_MOVW_BREL": true, + "R_ARM_MOVW_BREL_NC": true, + "R_ARM_MOVW_PREL_NC": true, + "R_ARM_NONE": true, + "R_ARM_PC13": true, + "R_ARM_PC24": true, + "R_ARM_PLT32": true, + "R_ARM_PLT32_ABS": true, + "R_ARM_PREL31": true, + "R_ARM_PRIVATE_0": true, + "R_ARM_PRIVATE_1": true, + "R_ARM_PRIVATE_10": true, + "R_ARM_PRIVATE_11": true, + "R_ARM_PRIVATE_12": true, + "R_ARM_PRIVATE_13": true, + "R_ARM_PRIVATE_14": true, + "R_ARM_PRIVATE_15": true, + "R_ARM_PRIVATE_2": true, + "R_ARM_PRIVATE_3": true, + "R_ARM_PRIVATE_4": true, + "R_ARM_PRIVATE_5": true, + "R_ARM_PRIVATE_6": true, + "R_ARM_PRIVATE_7": true, + "R_ARM_PRIVATE_8": true, + "R_ARM_PRIVATE_9": true, + "R_ARM_RABS32": true, + "R_ARM_RBASE": true, + "R_ARM_REL32": true, + "R_ARM_REL32_NOI": true, + "R_ARM_RELATIVE": true, + "R_ARM_RPC24": true, + "R_ARM_RREL32": true, + "R_ARM_RSBREL32": true, + "R_ARM_RXPC25": true, + "R_ARM_SBREL31": true, + "R_ARM_SBREL32": true, + "R_ARM_SWI24": true, + "R_ARM_TARGET1": true, + "R_ARM_TARGET2": true, + "R_ARM_THM_ABS5": true, + "R_ARM_THM_ALU_ABS_G0_NC": true, + "R_ARM_THM_ALU_ABS_G1_NC": true, + "R_ARM_THM_ALU_ABS_G2_NC": true, + "R_ARM_THM_ALU_ABS_G3": true, + "R_ARM_THM_ALU_PREL_11_0": true, + "R_ARM_THM_GOT_BREL12": true, + "R_ARM_THM_JUMP11": true, + "R_ARM_THM_JUMP19": true, + "R_ARM_THM_JUMP24": true, + "R_ARM_THM_JUMP6": true, + "R_ARM_THM_JUMP8": true, + "R_ARM_THM_MOVT_ABS": true, + "R_ARM_THM_MOVT_BREL": true, + "R_ARM_THM_MOVT_PREL": true, + "R_ARM_THM_MOVW_ABS_NC": true, + "R_ARM_THM_MOVW_BREL": true, + "R_ARM_THM_MOVW_BREL_NC": true, + "R_ARM_THM_MOVW_PREL_NC": true, + "R_ARM_THM_PC12": true, + "R_ARM_THM_PC22": true, + "R_ARM_THM_PC8": true, + "R_ARM_THM_RPC22": true, + "R_ARM_THM_SWI8": true, + "R_ARM_THM_TLS_CALL": true, + "R_ARM_THM_TLS_DESCSEQ16": true, + "R_ARM_THM_TLS_DESCSEQ32": true, + "R_ARM_THM_XPC22": true, + "R_ARM_TLS_CALL": true, + "R_ARM_TLS_DESCSEQ": true, + "R_ARM_TLS_DTPMOD32": true, + "R_ARM_TLS_DTPOFF32": true, + "R_ARM_TLS_GD32": true, + "R_ARM_TLS_GOTDESC": true, + "R_ARM_TLS_IE12GP": true, + "R_ARM_TLS_IE32": true, + "R_ARM_TLS_LDM32": true, + "R_ARM_TLS_LDO12": true, + "R_ARM_TLS_LDO32": true, + "R_ARM_TLS_LE12": true, + "R_ARM_TLS_LE32": true, + "R_ARM_TLS_TPOFF32": true, + "R_ARM_V4BX": true, + "R_ARM_XPC25": true, + "R_INFO": true, + "R_INFO32": true, + "R_MIPS": true, + "R_MIPS_16": true, + "R_MIPS_26": true, + "R_MIPS_32": true, + "R_MIPS_64": true, + "R_MIPS_ADD_IMMEDIATE": true, + "R_MIPS_CALL16": true, + "R_MIPS_CALL_HI16": true, + "R_MIPS_CALL_LO16": true, + "R_MIPS_DELETE": true, + "R_MIPS_GOT16": true, + "R_MIPS_GOT_DISP": true, + "R_MIPS_GOT_HI16": true, + "R_MIPS_GOT_LO16": true, + "R_MIPS_GOT_OFST": true, + "R_MIPS_GOT_PAGE": true, + "R_MIPS_GPREL16": true, + "R_MIPS_GPREL32": true, + "R_MIPS_HI16": true, + "R_MIPS_HIGHER": true, + "R_MIPS_HIGHEST": true, + "R_MIPS_INSERT_A": true, + "R_MIPS_INSERT_B": true, + "R_MIPS_JALR": true, + "R_MIPS_LITERAL": true, + "R_MIPS_LO16": true, + "R_MIPS_NONE": true, + "R_MIPS_PC16": true, + "R_MIPS_PJUMP": true, + "R_MIPS_REL16": true, + "R_MIPS_REL32": true, + "R_MIPS_RELGOT": true, + "R_MIPS_SCN_DISP": true, + "R_MIPS_SHIFT5": true, + "R_MIPS_SHIFT6": true, + "R_MIPS_SUB": true, + "R_MIPS_TLS_DTPMOD32": true, + "R_MIPS_TLS_DTPMOD64": true, + "R_MIPS_TLS_DTPREL32": true, + "R_MIPS_TLS_DTPREL64": true, + "R_MIPS_TLS_DTPREL_HI16": true, + "R_MIPS_TLS_DTPREL_LO16": true, + "R_MIPS_TLS_GD": true, + "R_MIPS_TLS_GOTTPREL": true, + "R_MIPS_TLS_LDM": true, + "R_MIPS_TLS_TPREL32": true, + "R_MIPS_TLS_TPREL64": true, + "R_MIPS_TLS_TPREL_HI16": true, + "R_MIPS_TLS_TPREL_LO16": true, + "R_PPC": true, + "R_PPC64": true, + "R_PPC64_ADDR14": true, + "R_PPC64_ADDR14_BRNTAKEN": true, + "R_PPC64_ADDR14_BRTAKEN": true, + "R_PPC64_ADDR16": true, + "R_PPC64_ADDR16_DS": true, + "R_PPC64_ADDR16_HA": true, + "R_PPC64_ADDR16_HI": true, + "R_PPC64_ADDR16_HIGH": true, + "R_PPC64_ADDR16_HIGHA": true, + "R_PPC64_ADDR16_HIGHER": true, + "R_PPC64_ADDR16_HIGHERA": true, + "R_PPC64_ADDR16_HIGHEST": true, + "R_PPC64_ADDR16_HIGHESTA": true, + "R_PPC64_ADDR16_LO": true, + "R_PPC64_ADDR16_LO_DS": true, + "R_PPC64_ADDR24": true, + "R_PPC64_ADDR32": true, + "R_PPC64_ADDR64": true, + "R_PPC64_ADDR64_LOCAL": true, + "R_PPC64_DTPMOD64": true, + "R_PPC64_DTPREL16": true, + "R_PPC64_DTPREL16_DS": true, + "R_PPC64_DTPREL16_HA": true, + "R_PPC64_DTPREL16_HI": true, + "R_PPC64_DTPREL16_HIGH": true, + "R_PPC64_DTPREL16_HIGHA": true, + "R_PPC64_DTPREL16_HIGHER": true, + "R_PPC64_DTPREL16_HIGHERA": true, + "R_PPC64_DTPREL16_HIGHEST": true, + "R_PPC64_DTPREL16_HIGHESTA": true, + "R_PPC64_DTPREL16_LO": true, + "R_PPC64_DTPREL16_LO_DS": true, + "R_PPC64_DTPREL64": true, + "R_PPC64_ENTRY": true, + "R_PPC64_GOT16": true, + "R_PPC64_GOT16_DS": true, + "R_PPC64_GOT16_HA": true, + "R_PPC64_GOT16_HI": true, + "R_PPC64_GOT16_LO": true, + "R_PPC64_GOT16_LO_DS": true, + "R_PPC64_GOT_DTPREL16_DS": true, + "R_PPC64_GOT_DTPREL16_HA": true, + "R_PPC64_GOT_DTPREL16_HI": true, + "R_PPC64_GOT_DTPREL16_LO_DS": true, + "R_PPC64_GOT_TLSGD16": true, + "R_PPC64_GOT_TLSGD16_HA": true, + "R_PPC64_GOT_TLSGD16_HI": true, + "R_PPC64_GOT_TLSGD16_LO": true, + "R_PPC64_GOT_TLSLD16": true, + "R_PPC64_GOT_TLSLD16_HA": true, + "R_PPC64_GOT_TLSLD16_HI": true, + "R_PPC64_GOT_TLSLD16_LO": true, + "R_PPC64_GOT_TPREL16_DS": true, + "R_PPC64_GOT_TPREL16_HA": true, + "R_PPC64_GOT_TPREL16_HI": true, + "R_PPC64_GOT_TPREL16_LO_DS": true, + "R_PPC64_IRELATIVE": true, + "R_PPC64_JMP_IREL": true, + "R_PPC64_JMP_SLOT": true, + "R_PPC64_NONE": true, + "R_PPC64_PLT16_LO_DS": true, + "R_PPC64_PLTGOT16": true, + "R_PPC64_PLTGOT16_DS": true, + "R_PPC64_PLTGOT16_HA": true, + "R_PPC64_PLTGOT16_HI": true, + "R_PPC64_PLTGOT16_LO": true, + "R_PPC64_PLTGOT_LO_DS": true, + "R_PPC64_REL14": true, + "R_PPC64_REL14_BRNTAKEN": true, + "R_PPC64_REL14_BRTAKEN": true, + "R_PPC64_REL16": true, + "R_PPC64_REL16DX_HA": true, + "R_PPC64_REL16_HA": true, + "R_PPC64_REL16_HI": true, + "R_PPC64_REL16_LO": true, + "R_PPC64_REL24": true, + "R_PPC64_REL24_NOTOC": true, + "R_PPC64_REL32": true, + "R_PPC64_REL64": true, + "R_PPC64_SECTOFF_DS": true, + "R_PPC64_SECTOFF_LO_DS": true, + "R_PPC64_TLS": true, + "R_PPC64_TLSGD": true, + "R_PPC64_TLSLD": true, + "R_PPC64_TOC": true, + "R_PPC64_TOC16": true, + "R_PPC64_TOC16_DS": true, + "R_PPC64_TOC16_HA": true, + "R_PPC64_TOC16_HI": true, + "R_PPC64_TOC16_LO": true, + "R_PPC64_TOC16_LO_DS": true, + "R_PPC64_TOCSAVE": true, + "R_PPC64_TPREL16": true, + "R_PPC64_TPREL16_DS": true, + "R_PPC64_TPREL16_HA": true, + "R_PPC64_TPREL16_HI": true, + "R_PPC64_TPREL16_HIGH": true, + "R_PPC64_TPREL16_HIGHA": true, + "R_PPC64_TPREL16_HIGHER": true, + "R_PPC64_TPREL16_HIGHERA": true, + "R_PPC64_TPREL16_HIGHEST": true, + "R_PPC64_TPREL16_HIGHESTA": true, + "R_PPC64_TPREL16_LO": true, + "R_PPC64_TPREL16_LO_DS": true, + "R_PPC64_TPREL64": true, + "R_PPC_ADDR14": true, + "R_PPC_ADDR14_BRNTAKEN": true, + "R_PPC_ADDR14_BRTAKEN": true, + "R_PPC_ADDR16": true, + "R_PPC_ADDR16_HA": true, + "R_PPC_ADDR16_HI": true, + "R_PPC_ADDR16_LO": true, + "R_PPC_ADDR24": true, + "R_PPC_ADDR32": true, + "R_PPC_COPY": true, + "R_PPC_DTPMOD32": true, + "R_PPC_DTPREL16": true, + "R_PPC_DTPREL16_HA": true, + "R_PPC_DTPREL16_HI": true, + "R_PPC_DTPREL16_LO": true, + "R_PPC_DTPREL32": true, + "R_PPC_EMB_BIT_FLD": true, + "R_PPC_EMB_MRKREF": true, + "R_PPC_EMB_NADDR16": true, + "R_PPC_EMB_NADDR16_HA": true, + "R_PPC_EMB_NADDR16_HI": true, + "R_PPC_EMB_NADDR16_LO": true, + "R_PPC_EMB_NADDR32": true, + "R_PPC_EMB_RELSDA": true, + "R_PPC_EMB_RELSEC16": true, + "R_PPC_EMB_RELST_HA": true, + "R_PPC_EMB_RELST_HI": true, + "R_PPC_EMB_RELST_LO": true, + "R_PPC_EMB_SDA21": true, + "R_PPC_EMB_SDA2I16": true, + "R_PPC_EMB_SDA2REL": true, + "R_PPC_EMB_SDAI16": true, + "R_PPC_GLOB_DAT": true, + "R_PPC_GOT16": true, + "R_PPC_GOT16_HA": true, + "R_PPC_GOT16_HI": true, + "R_PPC_GOT16_LO": true, + "R_PPC_GOT_TLSGD16": true, + "R_PPC_GOT_TLSGD16_HA": true, + "R_PPC_GOT_TLSGD16_HI": true, + "R_PPC_GOT_TLSGD16_LO": true, + "R_PPC_GOT_TLSLD16": true, + "R_PPC_GOT_TLSLD16_HA": true, + "R_PPC_GOT_TLSLD16_HI": true, + "R_PPC_GOT_TLSLD16_LO": true, + "R_PPC_GOT_TPREL16": true, + "R_PPC_GOT_TPREL16_HA": true, + "R_PPC_GOT_TPREL16_HI": true, + "R_PPC_GOT_TPREL16_LO": true, + "R_PPC_JMP_SLOT": true, + "R_PPC_LOCAL24PC": true, + "R_PPC_NONE": true, + "R_PPC_PLT16_HA": true, + "R_PPC_PLT16_HI": true, + "R_PPC_PLT16_LO": true, + "R_PPC_PLT32": true, + "R_PPC_PLTREL24": true, + "R_PPC_PLTREL32": true, + "R_PPC_REL14": true, + "R_PPC_REL14_BRNTAKEN": true, + "R_PPC_REL14_BRTAKEN": true, + "R_PPC_REL24": true, + "R_PPC_REL32": true, + "R_PPC_RELATIVE": true, + "R_PPC_SDAREL16": true, + "R_PPC_SECTOFF": true, + "R_PPC_SECTOFF_HA": true, + "R_PPC_SECTOFF_HI": true, + "R_PPC_SECTOFF_LO": true, + "R_PPC_TLS": true, + "R_PPC_TPREL16": true, + "R_PPC_TPREL16_HA": true, + "R_PPC_TPREL16_HI": true, + "R_PPC_TPREL16_LO": true, + "R_PPC_TPREL32": true, + "R_PPC_UADDR16": true, + "R_PPC_UADDR32": true, + "R_RISCV": true, + "R_RISCV_32": true, + "R_RISCV_32_PCREL": true, + "R_RISCV_64": true, + "R_RISCV_ADD16": true, + "R_RISCV_ADD32": true, + "R_RISCV_ADD64": true, + "R_RISCV_ADD8": true, + "R_RISCV_ALIGN": true, + "R_RISCV_BRANCH": true, + "R_RISCV_CALL": true, + "R_RISCV_CALL_PLT": true, + "R_RISCV_COPY": true, + "R_RISCV_GNU_VTENTRY": true, + "R_RISCV_GNU_VTINHERIT": true, + "R_RISCV_GOT_HI20": true, + "R_RISCV_GPREL_I": true, + "R_RISCV_GPREL_S": true, + "R_RISCV_HI20": true, + "R_RISCV_JAL": true, + "R_RISCV_JUMP_SLOT": true, + "R_RISCV_LO12_I": true, + "R_RISCV_LO12_S": true, + "R_RISCV_NONE": true, + "R_RISCV_PCREL_HI20": true, + "R_RISCV_PCREL_LO12_I": true, + "R_RISCV_PCREL_LO12_S": true, + "R_RISCV_RELATIVE": true, + "R_RISCV_RELAX": true, + "R_RISCV_RVC_BRANCH": true, + "R_RISCV_RVC_JUMP": true, + "R_RISCV_RVC_LUI": true, + "R_RISCV_SET16": true, + "R_RISCV_SET32": true, + "R_RISCV_SET6": true, + "R_RISCV_SET8": true, + "R_RISCV_SUB16": true, + "R_RISCV_SUB32": true, + "R_RISCV_SUB6": true, + "R_RISCV_SUB64": true, + "R_RISCV_SUB8": true, + "R_RISCV_TLS_DTPMOD32": true, + "R_RISCV_TLS_DTPMOD64": true, + "R_RISCV_TLS_DTPREL32": true, + "R_RISCV_TLS_DTPREL64": true, + "R_RISCV_TLS_GD_HI20": true, + "R_RISCV_TLS_GOT_HI20": true, + "R_RISCV_TLS_TPREL32": true, + "R_RISCV_TLS_TPREL64": true, + "R_RISCV_TPREL_ADD": true, + "R_RISCV_TPREL_HI20": true, + "R_RISCV_TPREL_I": true, + "R_RISCV_TPREL_LO12_I": true, + "R_RISCV_TPREL_LO12_S": true, + "R_RISCV_TPREL_S": true, + "R_SPARC": true, + "R_SPARC_10": true, + "R_SPARC_11": true, + "R_SPARC_13": true, + "R_SPARC_16": true, + "R_SPARC_22": true, + "R_SPARC_32": true, + "R_SPARC_5": true, + "R_SPARC_6": true, + "R_SPARC_64": true, + "R_SPARC_7": true, + "R_SPARC_8": true, + "R_SPARC_COPY": true, + "R_SPARC_DISP16": true, + "R_SPARC_DISP32": true, + "R_SPARC_DISP64": true, + "R_SPARC_DISP8": true, + "R_SPARC_GLOB_DAT": true, + "R_SPARC_GLOB_JMP": true, + "R_SPARC_GOT10": true, + "R_SPARC_GOT13": true, + "R_SPARC_GOT22": true, + "R_SPARC_H44": true, + "R_SPARC_HH22": true, + "R_SPARC_HI22": true, + "R_SPARC_HIPLT22": true, + "R_SPARC_HIX22": true, + "R_SPARC_HM10": true, + "R_SPARC_JMP_SLOT": true, + "R_SPARC_L44": true, + "R_SPARC_LM22": true, + "R_SPARC_LO10": true, + "R_SPARC_LOPLT10": true, + "R_SPARC_LOX10": true, + "R_SPARC_M44": true, + "R_SPARC_NONE": true, + "R_SPARC_OLO10": true, + "R_SPARC_PC10": true, + "R_SPARC_PC22": true, + "R_SPARC_PCPLT10": true, + "R_SPARC_PCPLT22": true, + "R_SPARC_PCPLT32": true, + "R_SPARC_PC_HH22": true, + "R_SPARC_PC_HM10": true, + "R_SPARC_PC_LM22": true, + "R_SPARC_PLT32": true, + "R_SPARC_PLT64": true, + "R_SPARC_REGISTER": true, + "R_SPARC_RELATIVE": true, + "R_SPARC_UA16": true, + "R_SPARC_UA32": true, + "R_SPARC_UA64": true, + "R_SPARC_WDISP16": true, + "R_SPARC_WDISP19": true, + "R_SPARC_WDISP22": true, + "R_SPARC_WDISP30": true, + "R_SPARC_WPLT30": true, + "R_SYM32": true, + "R_SYM64": true, + "R_TYPE32": true, + "R_TYPE64": true, + "R_X86_64": true, + "R_X86_64_16": true, + "R_X86_64_32": true, + "R_X86_64_32S": true, + "R_X86_64_64": true, + "R_X86_64_8": true, + "R_X86_64_COPY": true, + "R_X86_64_DTPMOD64": true, + "R_X86_64_DTPOFF32": true, + "R_X86_64_DTPOFF64": true, + "R_X86_64_GLOB_DAT": true, + "R_X86_64_GOT32": true, + "R_X86_64_GOT64": true, + "R_X86_64_GOTOFF64": true, + "R_X86_64_GOTPC32": true, + "R_X86_64_GOTPC32_TLSDESC": true, + "R_X86_64_GOTPC64": true, + "R_X86_64_GOTPCREL": true, + "R_X86_64_GOTPCREL64": true, + "R_X86_64_GOTPCRELX": true, + "R_X86_64_GOTPLT64": true, + "R_X86_64_GOTTPOFF": true, + "R_X86_64_IRELATIVE": true, + "R_X86_64_JMP_SLOT": true, + "R_X86_64_NONE": true, + "R_X86_64_PC16": true, + "R_X86_64_PC32": true, + "R_X86_64_PC32_BND": true, + "R_X86_64_PC64": true, + "R_X86_64_PC8": true, + "R_X86_64_PLT32": true, + "R_X86_64_PLT32_BND": true, + "R_X86_64_PLTOFF64": true, + "R_X86_64_RELATIVE": true, + "R_X86_64_RELATIVE64": true, + "R_X86_64_REX_GOTPCRELX": true, + "R_X86_64_SIZE32": true, + "R_X86_64_SIZE64": true, + "R_X86_64_TLSDESC": true, + "R_X86_64_TLSDESC_CALL": true, + "R_X86_64_TLSGD": true, + "R_X86_64_TLSLD": true, + "R_X86_64_TPOFF32": true, + "R_X86_64_TPOFF64": true, + "Rel32": true, + "Rel64": true, + "Rela32": true, + "Rela64": true, + "SHF_ALLOC": true, + "SHF_COMPRESSED": true, + "SHF_EXECINSTR": true, + "SHF_GROUP": true, + "SHF_INFO_LINK": true, + "SHF_LINK_ORDER": true, + "SHF_MASKOS": true, + "SHF_MASKPROC": true, + "SHF_MERGE": true, + "SHF_OS_NONCONFORMING": true, + "SHF_STRINGS": true, + "SHF_TLS": true, + "SHF_WRITE": true, + "SHN_ABS": true, + "SHN_COMMON": true, + "SHN_HIOS": true, + "SHN_HIPROC": true, + "SHN_HIRESERVE": true, + "SHN_LOOS": true, + "SHN_LOPROC": true, + "SHN_LORESERVE": true, + "SHN_UNDEF": true, + "SHN_XINDEX": true, + "SHT_DYNAMIC": true, + "SHT_DYNSYM": true, + "SHT_FINI_ARRAY": true, + "SHT_GNU_ATTRIBUTES": true, + "SHT_GNU_HASH": true, + "SHT_GNU_LIBLIST": true, + "SHT_GNU_VERDEF": true, + "SHT_GNU_VERNEED": true, + "SHT_GNU_VERSYM": true, + "SHT_GROUP": true, + "SHT_HASH": true, + "SHT_HIOS": true, + "SHT_HIPROC": true, + "SHT_HIUSER": true, + "SHT_INIT_ARRAY": true, + "SHT_LOOS": true, + "SHT_LOPROC": true, + "SHT_LOUSER": true, + "SHT_NOBITS": true, + "SHT_NOTE": true, + "SHT_NULL": true, + "SHT_PREINIT_ARRAY": true, + "SHT_PROGBITS": true, + "SHT_REL": true, + "SHT_RELA": true, + "SHT_SHLIB": true, + "SHT_STRTAB": true, + "SHT_SYMTAB": true, + "SHT_SYMTAB_SHNDX": true, + "STB_GLOBAL": true, + "STB_HIOS": true, + "STB_HIPROC": true, + "STB_LOCAL": true, + "STB_LOOS": true, + "STB_LOPROC": true, + "STB_WEAK": true, + "STT_COMMON": true, + "STT_FILE": true, + "STT_FUNC": true, + "STT_HIOS": true, + "STT_HIPROC": true, + "STT_LOOS": true, + "STT_LOPROC": true, + "STT_NOTYPE": true, + "STT_OBJECT": true, + "STT_SECTION": true, + "STT_TLS": true, + "STV_DEFAULT": true, + "STV_HIDDEN": true, + "STV_INTERNAL": true, + "STV_PROTECTED": true, + "ST_BIND": true, + "ST_INFO": true, + "ST_TYPE": true, + "ST_VISIBILITY": true, + "Section": true, + "Section32": true, + "Section64": true, + "SectionFlag": true, + "SectionHeader": true, + "SectionIndex": true, + "SectionType": true, + "Sym32": true, + "Sym32Size": true, + "Sym64": true, + "Sym64Size": true, + "SymBind": true, + "SymType": true, + "SymVis": true, + "Symbol": true, + "Type": true, + "Version": true, + }, + "debug/gosym": map[string]bool{ + "DecodingError": true, + "Func": true, + "LineTable": true, + "NewLineTable": true, + "NewTable": true, + "Obj": true, + "Sym": true, + "Table": true, + "UnknownFileError": true, + "UnknownLineError": true, + }, + "debug/macho": map[string]bool{ + "ARM64_RELOC_ADDEND": true, + "ARM64_RELOC_BRANCH26": true, + "ARM64_RELOC_GOT_LOAD_PAGE21": true, + "ARM64_RELOC_GOT_LOAD_PAGEOFF12": true, + "ARM64_RELOC_PAGE21": true, + "ARM64_RELOC_PAGEOFF12": true, + "ARM64_RELOC_POINTER_TO_GOT": true, + "ARM64_RELOC_SUBTRACTOR": true, + "ARM64_RELOC_TLVP_LOAD_PAGE21": true, + "ARM64_RELOC_TLVP_LOAD_PAGEOFF12": true, + "ARM64_RELOC_UNSIGNED": true, + "ARM_RELOC_BR24": true, + "ARM_RELOC_HALF": true, + "ARM_RELOC_HALF_SECTDIFF": true, + "ARM_RELOC_LOCAL_SECTDIFF": true, + "ARM_RELOC_PAIR": true, + "ARM_RELOC_PB_LA_PTR": true, + "ARM_RELOC_SECTDIFF": true, + "ARM_RELOC_VANILLA": true, + "ARM_THUMB_32BIT_BRANCH": true, + "ARM_THUMB_RELOC_BR22": true, + "Cpu": true, + "Cpu386": true, + "CpuAmd64": true, + "CpuArm": true, + "CpuArm64": true, + "CpuPpc": true, + "CpuPpc64": true, + "Dylib": true, + "DylibCmd": true, + "Dysymtab": true, + "DysymtabCmd": true, + "ErrNotFat": true, + "FatArch": true, + "FatArchHeader": true, + "FatFile": true, + "File": true, + "FileHeader": true, + "FlagAllModsBound": true, + "FlagAllowStackExecution": true, + "FlagAppExtensionSafe": true, + "FlagBindAtLoad": true, + "FlagBindsToWeak": true, + "FlagCanonical": true, + "FlagDeadStrippableDylib": true, + "FlagDyldLink": true, + "FlagForceFlat": true, + "FlagHasTLVDescriptors": true, + "FlagIncrLink": true, + "FlagLazyInit": true, + "FlagNoFixPrebinding": true, + "FlagNoHeapExecution": true, + "FlagNoMultiDefs": true, + "FlagNoReexportedDylibs": true, + "FlagNoUndefs": true, + "FlagPIE": true, + "FlagPrebindable": true, + "FlagPrebound": true, + "FlagRootSafe": true, + "FlagSetuidSafe": true, + "FlagSplitSegs": true, + "FlagSubsectionsViaSymbols": true, + "FlagTwoLevel": true, + "FlagWeakDefines": true, + "FormatError": true, + "GENERIC_RELOC_LOCAL_SECTDIFF": true, + "GENERIC_RELOC_PAIR": true, + "GENERIC_RELOC_PB_LA_PTR": true, + "GENERIC_RELOC_SECTDIFF": true, + "GENERIC_RELOC_TLV": true, + "GENERIC_RELOC_VANILLA": true, + "Load": true, + "LoadBytes": true, + "LoadCmd": true, + "LoadCmdDylib": true, + "LoadCmdDylinker": true, + "LoadCmdDysymtab": true, + "LoadCmdRpath": true, + "LoadCmdSegment": true, + "LoadCmdSegment64": true, + "LoadCmdSymtab": true, + "LoadCmdThread": true, + "LoadCmdUnixThread": true, + "Magic32": true, + "Magic64": true, + "MagicFat": true, + "NewFatFile": true, + "NewFile": true, + "Nlist32": true, + "Nlist64": true, + "Open": true, + "OpenFat": true, + "Regs386": true, + "RegsAMD64": true, + "Reloc": true, + "RelocTypeARM": true, + "RelocTypeARM64": true, + "RelocTypeGeneric": true, + "RelocTypeX86_64": true, + "Rpath": true, + "RpathCmd": true, + "Section": true, + "Section32": true, + "Section64": true, + "SectionHeader": true, + "Segment": true, + "Segment32": true, + "Segment64": true, + "SegmentHeader": true, + "Symbol": true, + "Symtab": true, + "SymtabCmd": true, + "Thread": true, + "Type": true, + "TypeBundle": true, + "TypeDylib": true, + "TypeExec": true, + "TypeObj": true, + "X86_64_RELOC_BRANCH": true, + "X86_64_RELOC_GOT": true, + "X86_64_RELOC_GOT_LOAD": true, + "X86_64_RELOC_SIGNED": true, + "X86_64_RELOC_SIGNED_1": true, + "X86_64_RELOC_SIGNED_2": true, + "X86_64_RELOC_SIGNED_4": true, + "X86_64_RELOC_SUBTRACTOR": true, + "X86_64_RELOC_TLV": true, + "X86_64_RELOC_UNSIGNED": true, + }, + "debug/pe": map[string]bool{ + "COFFSymbol": true, + "COFFSymbolSize": true, + "DataDirectory": true, + "File": true, + "FileHeader": true, + "FormatError": true, + "IMAGE_DIRECTORY_ENTRY_ARCHITECTURE": true, + "IMAGE_DIRECTORY_ENTRY_BASERELOC": true, + "IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT": true, + "IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR": true, + "IMAGE_DIRECTORY_ENTRY_DEBUG": true, + "IMAGE_DIRECTORY_ENTRY_DELAY_IMPORT": true, + "IMAGE_DIRECTORY_ENTRY_EXCEPTION": true, + "IMAGE_DIRECTORY_ENTRY_EXPORT": true, + "IMAGE_DIRECTORY_ENTRY_GLOBALPTR": true, + "IMAGE_DIRECTORY_ENTRY_IAT": true, + "IMAGE_DIRECTORY_ENTRY_IMPORT": true, + "IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG": true, + "IMAGE_DIRECTORY_ENTRY_RESOURCE": true, + "IMAGE_DIRECTORY_ENTRY_SECURITY": true, + "IMAGE_DIRECTORY_ENTRY_TLS": true, + "IMAGE_FILE_MACHINE_AM33": true, + "IMAGE_FILE_MACHINE_AMD64": true, + "IMAGE_FILE_MACHINE_ARM": true, + "IMAGE_FILE_MACHINE_ARM64": true, + "IMAGE_FILE_MACHINE_ARMNT": true, + "IMAGE_FILE_MACHINE_EBC": true, + "IMAGE_FILE_MACHINE_I386": true, + "IMAGE_FILE_MACHINE_IA64": true, + "IMAGE_FILE_MACHINE_M32R": true, + "IMAGE_FILE_MACHINE_MIPS16": true, + "IMAGE_FILE_MACHINE_MIPSFPU": true, + "IMAGE_FILE_MACHINE_MIPSFPU16": true, + "IMAGE_FILE_MACHINE_POWERPC": true, + "IMAGE_FILE_MACHINE_POWERPCFP": true, + "IMAGE_FILE_MACHINE_R4000": true, + "IMAGE_FILE_MACHINE_SH3": true, + "IMAGE_FILE_MACHINE_SH3DSP": true, + "IMAGE_FILE_MACHINE_SH4": true, + "IMAGE_FILE_MACHINE_SH5": true, + "IMAGE_FILE_MACHINE_THUMB": true, + "IMAGE_FILE_MACHINE_UNKNOWN": true, + "IMAGE_FILE_MACHINE_WCEMIPSV2": true, + "ImportDirectory": true, + "NewFile": true, + "Open": true, + "OptionalHeader32": true, + "OptionalHeader64": true, + "Reloc": true, + "Section": true, + "SectionHeader": true, + "SectionHeader32": true, + "StringTable": true, + "Symbol": true, + }, + "debug/plan9obj": map[string]bool{ + "File": true, + "FileHeader": true, + "Magic386": true, + "Magic64": true, + "MagicAMD64": true, + "MagicARM": true, + "NewFile": true, + "Open": true, + "Section": true, + "SectionHeader": true, + "Sym": true, + }, + "encoding": map[string]bool{ + "BinaryMarshaler": true, + "BinaryUnmarshaler": true, + "TextMarshaler": true, + "TextUnmarshaler": true, + }, + "encoding/ascii85": map[string]bool{ + "CorruptInputError": true, + "Decode": true, + "Encode": true, + "MaxEncodedLen": true, + "NewDecoder": true, + "NewEncoder": true, + }, + "encoding/asn1": map[string]bool{ + "BitString": true, + "ClassApplication": true, + "ClassContextSpecific": true, + "ClassPrivate": true, + "ClassUniversal": true, + "Enumerated": true, + "Flag": true, + "Marshal": true, + "MarshalWithParams": true, + "NullBytes": true, + "NullRawValue": true, + "ObjectIdentifier": true, + "RawContent": true, + "RawValue": true, + "StructuralError": true, + "SyntaxError": true, + "TagBitString": true, + "TagBoolean": true, + "TagEnum": true, + "TagGeneralString": true, + "TagGeneralizedTime": true, + "TagIA5String": true, + "TagInteger": true, + "TagNull": true, + "TagNumericString": true, + "TagOID": true, + "TagOctetString": true, + "TagPrintableString": true, + "TagSequence": true, + "TagSet": true, + "TagT61String": true, + "TagUTCTime": true, + "TagUTF8String": true, + "Unmarshal": true, + "UnmarshalWithParams": true, + }, + "encoding/base32": map[string]bool{ + "CorruptInputError": true, + "Encoding": true, + "HexEncoding": true, + "NewDecoder": true, + "NewEncoder": true, + "NewEncoding": true, + "NoPadding": true, + "StdEncoding": true, + "StdPadding": true, + }, + "encoding/base64": map[string]bool{ + "CorruptInputError": true, + "Encoding": true, + "NewDecoder": true, + "NewEncoder": true, + "NewEncoding": true, + "NoPadding": true, + "RawStdEncoding": true, + "RawURLEncoding": true, + "StdEncoding": true, + "StdPadding": true, + "URLEncoding": true, + }, + "encoding/binary": map[string]bool{ + "BigEndian": true, + "ByteOrder": true, + "LittleEndian": true, + "MaxVarintLen16": true, + "MaxVarintLen32": true, + "MaxVarintLen64": true, + "PutUvarint": true, + "PutVarint": true, + "Read": true, + "ReadUvarint": true, + "ReadVarint": true, + "Size": true, + "Uvarint": true, + "Varint": true, + "Write": true, + }, + "encoding/csv": map[string]bool{ + "ErrBareQuote": true, + "ErrFieldCount": true, + "ErrQuote": true, + "ErrTrailingComma": true, + "NewReader": true, + "NewWriter": true, + "ParseError": true, + "Reader": true, + "Writer": true, + }, + "encoding/gob": map[string]bool{ + "CommonType": true, + "Decoder": true, + "Encoder": true, + "GobDecoder": true, + "GobEncoder": true, + "NewDecoder": true, + "NewEncoder": true, + "Register": true, + "RegisterName": true, + }, + "encoding/hex": map[string]bool{ + "Decode": true, + "DecodeString": true, + "DecodedLen": true, + "Dump": true, + "Dumper": true, + "Encode": true, + "EncodeToString": true, + "EncodedLen": true, + "ErrLength": true, + "InvalidByteError": true, + "NewDecoder": true, + "NewEncoder": true, + }, + "encoding/json": map[string]bool{ + "Compact": true, + "Decoder": true, + "Delim": true, + "Encoder": true, + "HTMLEscape": true, + "Indent": true, + "InvalidUTF8Error": true, + "InvalidUnmarshalError": true, + "Marshal": true, + "MarshalIndent": true, + "Marshaler": true, + "MarshalerError": true, + "NewDecoder": true, + "NewEncoder": true, + "Number": true, + "RawMessage": true, + "SyntaxError": true, + "Token": true, + "Unmarshal": true, + "UnmarshalFieldError": true, + "UnmarshalTypeError": true, + "Unmarshaler": true, + "UnsupportedTypeError": true, + "UnsupportedValueError": true, + "Valid": true, + }, + "encoding/pem": map[string]bool{ + "Block": true, + "Decode": true, + "Encode": true, + "EncodeToMemory": true, + }, + "encoding/xml": map[string]bool{ + "Attr": true, + "CharData": true, + "Comment": true, + "CopyToken": true, + "Decoder": true, + "Directive": true, + "Encoder": true, + "EndElement": true, + "Escape": true, + "EscapeText": true, + "HTMLAutoClose": true, + "HTMLEntity": true, + "Header": true, + "Marshal": true, + "MarshalIndent": true, + "Marshaler": true, + "MarshalerAttr": true, + "Name": true, + "NewDecoder": true, + "NewEncoder": true, + "NewTokenDecoder": true, + "ProcInst": true, + "StartElement": true, + "SyntaxError": true, + "TagPathError": true, + "Token": true, + "TokenReader": true, + "Unmarshal": true, + "UnmarshalError": true, + "Unmarshaler": true, + "UnmarshalerAttr": true, + "UnsupportedTypeError": true, + }, + "errors": map[string]bool{ + "New": true, + }, + "expvar": map[string]bool{ + "Do": true, + "Float": true, + "Func": true, + "Get": true, + "Handler": true, + "Int": true, + "KeyValue": true, + "Map": true, + "NewFloat": true, + "NewInt": true, + "NewMap": true, + "NewString": true, + "Publish": true, + "String": true, + "Var": true, + }, + "flag": map[string]bool{ + "Arg": true, + "Args": true, + "Bool": true, + "BoolVar": true, + "CommandLine": true, + "ContinueOnError": true, + "Duration": true, + "DurationVar": true, + "ErrHelp": true, + "ErrorHandling": true, + "ExitOnError": true, + "Flag": true, + "FlagSet": true, + "Float64": true, + "Float64Var": true, + "Getter": true, + "Int": true, + "Int64": true, + "Int64Var": true, + "IntVar": true, + "Lookup": true, + "NArg": true, + "NFlag": true, + "NewFlagSet": true, + "PanicOnError": true, + "Parse": true, + "Parsed": true, + "PrintDefaults": true, + "Set": true, + "String": true, + "StringVar": true, + "Uint": true, + "Uint64": true, + "Uint64Var": true, + "UintVar": true, + "UnquoteUsage": true, + "Usage": true, + "Value": true, + "Var": true, + "Visit": true, + "VisitAll": true, + }, + "fmt": map[string]bool{ + "Errorf": true, + "Formatter": true, + "Fprint": true, + "Fprintf": true, + "Fprintln": true, + "Fscan": true, + "Fscanf": true, + "Fscanln": true, + "GoStringer": true, + "Print": true, + "Printf": true, + "Println": true, + "Scan": true, + "ScanState": true, + "Scanf": true, + "Scanln": true, + "Scanner": true, + "Sprint": true, + "Sprintf": true, + "Sprintln": true, + "Sscan": true, + "Sscanf": true, + "Sscanln": true, + "State": true, + "Stringer": true, + }, + "go/ast": map[string]bool{ + "ArrayType": true, + "AssignStmt": true, + "Bad": true, + "BadDecl": true, + "BadExpr": true, + "BadStmt": true, + "BasicLit": true, + "BinaryExpr": true, + "BlockStmt": true, + "BranchStmt": true, + "CallExpr": true, + "CaseClause": true, + "ChanDir": true, + "ChanType": true, + "CommClause": true, + "Comment": true, + "CommentGroup": true, + "CommentMap": true, + "CompositeLit": true, + "Con": true, + "DeclStmt": true, + "DeferStmt": true, + "Ellipsis": true, + "EmptyStmt": true, + "ExprStmt": true, + "Field": true, + "FieldFilter": true, + "FieldList": true, + "File": true, + "FileExports": true, + "Filter": true, + "FilterDecl": true, + "FilterFile": true, + "FilterFuncDuplicates": true, + "FilterImportDuplicates": true, + "FilterPackage": true, + "FilterUnassociatedComments": true, + "ForStmt": true, + "Fprint": true, + "Fun": true, + "FuncDecl": true, + "FuncLit": true, + "FuncType": true, + "GenDecl": true, + "GoStmt": true, + "Ident": true, + "IfStmt": true, + "ImportSpec": true, + "Importer": true, + "IncDecStmt": true, + "IndexExpr": true, + "Inspect": true, + "InterfaceType": true, + "IsExported": true, + "KeyValueExpr": true, + "LabeledStmt": true, + "Lbl": true, + "MapType": true, + "MergeMode": true, + "MergePackageFiles": true, + "NewCommentMap": true, + "NewIdent": true, + "NewObj": true, + "NewPackage": true, + "NewScope": true, + "Node": true, + "NotNilFilter": true, + "ObjKind": true, + "Object": true, + "Package": true, + "PackageExports": true, + "ParenExpr": true, + "Pkg": true, + "Print": true, + "RECV": true, + "RangeStmt": true, + "ReturnStmt": true, + "SEND": true, + "Scope": true, + "SelectStmt": true, + "SelectorExpr": true, + "SendStmt": true, + "SliceExpr": true, + "SortImports": true, + "StarExpr": true, + "StructType": true, + "SwitchStmt": true, + "Typ": true, + "TypeAssertExpr": true, + "TypeSpec": true, + "TypeSwitchStmt": true, + "UnaryExpr": true, + "ValueSpec": true, + "Var": true, + "Visitor": true, + "Walk": true, + }, + "go/build": map[string]bool{ + "AllowBinary": true, + "ArchChar": true, + "Context": true, + "Default": true, + "FindOnly": true, + "IgnoreVendor": true, + "Import": true, + "ImportComment": true, + "ImportDir": true, + "ImportMode": true, + "IsLocalImport": true, + "MultiplePackageError": true, + "NoGoError": true, + "Package": true, + "ToolDir": true, + }, + "go/constant": map[string]bool{ + "BinaryOp": true, + "BitLen": true, + "Bool": true, + "BoolVal": true, + "Bytes": true, + "Compare": true, + "Complex": true, + "Denom": true, + "Float": true, + "Float32Val": true, + "Float64Val": true, + "Imag": true, + "Int": true, + "Int64Val": true, + "Kind": true, + "MakeBool": true, + "MakeFloat64": true, + "MakeFromBytes": true, + "MakeFromLiteral": true, + "MakeImag": true, + "MakeInt64": true, + "MakeString": true, + "MakeUint64": true, + "MakeUnknown": true, + "Num": true, + "Real": true, + "Shift": true, + "Sign": true, + "String": true, + "StringVal": true, + "ToComplex": true, + "ToFloat": true, + "ToInt": true, + "Uint64Val": true, + "UnaryOp": true, + "Unknown": true, + }, + "go/doc": map[string]bool{ + "AllDecls": true, + "AllMethods": true, + "Example": true, + "Examples": true, + "Filter": true, + "Func": true, + "IllegalPrefixes": true, + "IsPredeclared": true, + "Mode": true, + "New": true, + "Note": true, + "Package": true, + "PreserveAST": true, + "Synopsis": true, + "ToHTML": true, + "ToText": true, + "Type": true, + "Value": true, + }, + "go/format": map[string]bool{ + "Node": true, + "Source": true, + }, + "go/importer": map[string]bool{ + "Default": true, + "For": true, + "ForCompiler": true, + "Lookup": true, + }, + "go/parser": map[string]bool{ + "AllErrors": true, + "DeclarationErrors": true, + "ImportsOnly": true, + "Mode": true, + "PackageClauseOnly": true, + "ParseComments": true, + "ParseDir": true, + "ParseExpr": true, + "ParseExprFrom": true, + "ParseFile": true, + "SpuriousErrors": true, + "Trace": true, + }, + "go/printer": map[string]bool{ + "CommentedNode": true, + "Config": true, + "Fprint": true, + "Mode": true, + "RawFormat": true, + "SourcePos": true, + "TabIndent": true, + "UseSpaces": true, + }, + "go/scanner": map[string]bool{ + "Error": true, + "ErrorHandler": true, + "ErrorList": true, + "Mode": true, + "PrintError": true, + "ScanComments": true, + "Scanner": true, + }, + "go/token": map[string]bool{ + "ADD": true, + "ADD_ASSIGN": true, + "AND": true, + "AND_ASSIGN": true, + "AND_NOT": true, + "AND_NOT_ASSIGN": true, + "ARROW": true, + "ASSIGN": true, + "BREAK": true, + "CASE": true, + "CHAN": true, + "CHAR": true, + "COLON": true, + "COMMA": true, + "COMMENT": true, + "CONST": true, + "CONTINUE": true, + "DEC": true, + "DEFAULT": true, + "DEFER": true, + "DEFINE": true, + "ELLIPSIS": true, + "ELSE": true, + "EOF": true, + "EQL": true, + "FALLTHROUGH": true, + "FLOAT": true, + "FOR": true, + "FUNC": true, + "File": true, + "FileSet": true, + "GEQ": true, + "GO": true, + "GOTO": true, + "GTR": true, + "HighestPrec": true, + "IDENT": true, + "IF": true, + "ILLEGAL": true, + "IMAG": true, + "IMPORT": true, + "INC": true, + "INT": true, + "INTERFACE": true, + "LAND": true, + "LBRACE": true, + "LBRACK": true, + "LEQ": true, + "LOR": true, + "LPAREN": true, + "LSS": true, + "Lookup": true, + "LowestPrec": true, + "MAP": true, + "MUL": true, + "MUL_ASSIGN": true, + "NEQ": true, + "NOT": true, + "NewFileSet": true, + "NoPos": true, + "OR": true, + "OR_ASSIGN": true, + "PACKAGE": true, + "PERIOD": true, + "Pos": true, + "Position": true, + "QUO": true, + "QUO_ASSIGN": true, + "RANGE": true, + "RBRACE": true, + "RBRACK": true, + "REM": true, + "REM_ASSIGN": true, + "RETURN": true, + "RPAREN": true, + "SELECT": true, + "SEMICOLON": true, + "SHL": true, + "SHL_ASSIGN": true, + "SHR": true, + "SHR_ASSIGN": true, + "STRING": true, + "STRUCT": true, + "SUB": true, + "SUB_ASSIGN": true, + "SWITCH": true, + "TYPE": true, + "Token": true, + "UnaryPrec": true, + "VAR": true, + "XOR": true, + "XOR_ASSIGN": true, + }, + "go/types": map[string]bool{ + "Array": true, + "AssertableTo": true, + "AssignableTo": true, + "Basic": true, + "BasicInfo": true, + "BasicKind": true, + "Bool": true, + "Builtin": true, + "Byte": true, + "Chan": true, + "ChanDir": true, + "Checker": true, + "Comparable": true, + "Complex128": true, + "Complex64": true, + "Config": true, + "Const": true, + "ConvertibleTo": true, + "DefPredeclaredTestFuncs": true, + "Default": true, + "Error": true, + "Eval": true, + "ExprString": true, + "FieldVal": true, + "Float32": true, + "Float64": true, + "Func": true, + "Id": true, + "Identical": true, + "IdenticalIgnoreTags": true, + "Implements": true, + "ImportMode": true, + "Importer": true, + "ImporterFrom": true, + "Info": true, + "Initializer": true, + "Int": true, + "Int16": true, + "Int32": true, + "Int64": true, + "Int8": true, + "Interface": true, + "Invalid": true, + "IsBoolean": true, + "IsComplex": true, + "IsConstType": true, + "IsFloat": true, + "IsInteger": true, + "IsInterface": true, + "IsNumeric": true, + "IsOrdered": true, + "IsString": true, + "IsUnsigned": true, + "IsUntyped": true, + "Label": true, + "LookupFieldOrMethod": true, + "Map": true, + "MethodExpr": true, + "MethodSet": true, + "MethodVal": true, + "MissingMethod": true, + "Named": true, + "NewArray": true, + "NewChan": true, + "NewChecker": true, + "NewConst": true, + "NewField": true, + "NewFunc": true, + "NewInterface": true, + "NewInterfaceType": true, + "NewLabel": true, + "NewMap": true, + "NewMethodSet": true, + "NewNamed": true, + "NewPackage": true, + "NewParam": true, + "NewPkgName": true, + "NewPointer": true, + "NewScope": true, + "NewSignature": true, + "NewSlice": true, + "NewStruct": true, + "NewTuple": true, + "NewTypeName": true, + "NewVar": true, + "Nil": true, + "ObjectString": true, + "Package": true, + "PkgName": true, + "Pointer": true, + "Qualifier": true, + "RecvOnly": true, + "RelativeTo": true, + "Rune": true, + "Scope": true, + "Selection": true, + "SelectionKind": true, + "SelectionString": true, + "SendOnly": true, + "SendRecv": true, + "Signature": true, + "Sizes": true, + "SizesFor": true, + "Slice": true, + "StdSizes": true, + "String": true, + "Struct": true, + "Tuple": true, + "Typ": true, + "Type": true, + "TypeAndValue": true, + "TypeName": true, + "TypeString": true, + "Uint": true, + "Uint16": true, + "Uint32": true, + "Uint64": true, + "Uint8": true, + "Uintptr": true, + "Universe": true, + "Unsafe": true, + "UnsafePointer": true, + "UntypedBool": true, + "UntypedComplex": true, + "UntypedFloat": true, + "UntypedInt": true, + "UntypedNil": true, + "UntypedRune": true, + "UntypedString": true, + "Var": true, + "WriteExpr": true, + "WriteSignature": true, + "WriteType": true, + }, + "hash": map[string]bool{ + "Hash": true, + "Hash32": true, + "Hash64": true, + }, + "hash/adler32": map[string]bool{ + "Checksum": true, + "New": true, + "Size": true, + }, + "hash/crc32": map[string]bool{ + "Castagnoli": true, + "Checksum": true, + "ChecksumIEEE": true, + "IEEE": true, + "IEEETable": true, + "Koopman": true, + "MakeTable": true, + "New": true, + "NewIEEE": true, + "Size": true, + "Table": true, + "Update": true, + }, + "hash/crc64": map[string]bool{ + "Checksum": true, + "ECMA": true, + "ISO": true, + "MakeTable": true, + "New": true, + "Size": true, + "Table": true, + "Update": true, + }, + "hash/fnv": map[string]bool{ + "New128": true, + "New128a": true, + "New32": true, + "New32a": true, + "New64": true, + "New64a": true, + }, + "html": map[string]bool{ + "EscapeString": true, + "UnescapeString": true, + }, + "html/template": map[string]bool{ + "CSS": true, + "ErrAmbigContext": true, + "ErrBadHTML": true, + "ErrBranchEnd": true, + "ErrEndContext": true, + "ErrNoSuchTemplate": true, + "ErrOutputContext": true, + "ErrPartialCharset": true, + "ErrPartialEscape": true, + "ErrPredefinedEscaper": true, + "ErrRangeLoopReentry": true, + "ErrSlashAmbig": true, + "Error": true, + "ErrorCode": true, + "FuncMap": true, + "HTML": true, + "HTMLAttr": true, + "HTMLEscape": true, + "HTMLEscapeString": true, + "HTMLEscaper": true, + "IsTrue": true, + "JS": true, + "JSEscape": true, + "JSEscapeString": true, + "JSEscaper": true, + "JSStr": true, + "Must": true, + "New": true, + "OK": true, + "ParseFiles": true, + "ParseGlob": true, + "Srcset": true, + "Template": true, + "URL": true, + "URLQueryEscaper": true, + }, + "image": map[string]bool{ + "Alpha": true, + "Alpha16": true, + "Black": true, + "CMYK": true, + "Config": true, + "Decode": true, + "DecodeConfig": true, + "ErrFormat": true, + "Gray": true, + "Gray16": true, + "Image": true, + "NRGBA": true, + "NRGBA64": true, + "NYCbCrA": true, + "NewAlpha": true, + "NewAlpha16": true, + "NewCMYK": true, + "NewGray": true, + "NewGray16": true, + "NewNRGBA": true, + "NewNRGBA64": true, + "NewNYCbCrA": true, + "NewPaletted": true, + "NewRGBA": true, + "NewRGBA64": true, + "NewUniform": true, + "NewYCbCr": true, + "Opaque": true, + "Paletted": true, + "PalettedImage": true, + "Point": true, + "Pt": true, + "RGBA": true, + "RGBA64": true, + "Rect": true, + "Rectangle": true, + "RegisterFormat": true, + "Transparent": true, + "Uniform": true, + "White": true, + "YCbCr": true, + "YCbCrSubsampleRatio": true, + "YCbCrSubsampleRatio410": true, + "YCbCrSubsampleRatio411": true, + "YCbCrSubsampleRatio420": true, + "YCbCrSubsampleRatio422": true, + "YCbCrSubsampleRatio440": true, + "YCbCrSubsampleRatio444": true, + "ZP": true, + "ZR": true, + }, + "image/color": map[string]bool{ + "Alpha": true, + "Alpha16": true, + "Alpha16Model": true, + "AlphaModel": true, + "Black": true, + "CMYK": true, + "CMYKModel": true, + "CMYKToRGB": true, + "Color": true, + "Gray": true, + "Gray16": true, + "Gray16Model": true, + "GrayModel": true, + "Model": true, + "ModelFunc": true, + "NRGBA": true, + "NRGBA64": true, + "NRGBA64Model": true, + "NRGBAModel": true, + "NYCbCrA": true, + "NYCbCrAModel": true, + "Opaque": true, + "Palette": true, + "RGBA": true, + "RGBA64": true, + "RGBA64Model": true, + "RGBAModel": true, + "RGBToCMYK": true, + "RGBToYCbCr": true, + "Transparent": true, + "White": true, + "YCbCr": true, + "YCbCrModel": true, + "YCbCrToRGB": true, + }, + "image/color/palette": map[string]bool{ + "Plan9": true, + "WebSafe": true, + }, + "image/draw": map[string]bool{ + "Draw": true, + "DrawMask": true, + "Drawer": true, + "FloydSteinberg": true, + "Image": true, + "Op": true, + "Over": true, + "Quantizer": true, + "Src": true, + }, + "image/gif": map[string]bool{ + "Decode": true, + "DecodeAll": true, + "DecodeConfig": true, + "DisposalBackground": true, + "DisposalNone": true, + "DisposalPrevious": true, + "Encode": true, + "EncodeAll": true, + "GIF": true, + "Options": true, + }, + "image/jpeg": map[string]bool{ + "Decode": true, + "DecodeConfig": true, + "DefaultQuality": true, + "Encode": true, + "FormatError": true, + "Options": true, + "Reader": true, + "UnsupportedError": true, + }, + "image/png": map[string]bool{ + "BestCompression": true, + "BestSpeed": true, + "CompressionLevel": true, + "Decode": true, + "DecodeConfig": true, + "DefaultCompression": true, + "Encode": true, + "Encoder": true, + "EncoderBuffer": true, + "EncoderBufferPool": true, + "FormatError": true, + "NoCompression": true, + "UnsupportedError": true, + }, + "index/suffixarray": map[string]bool{ + "Index": true, + "New": true, + }, + "io": map[string]bool{ + "ByteReader": true, + "ByteScanner": true, + "ByteWriter": true, + "Closer": true, + "Copy": true, + "CopyBuffer": true, + "CopyN": true, + "EOF": true, + "ErrClosedPipe": true, + "ErrNoProgress": true, + "ErrShortBuffer": true, + "ErrShortWrite": true, + "ErrUnexpectedEOF": true, + "LimitReader": true, + "LimitedReader": true, + "MultiReader": true, + "MultiWriter": true, + "NewSectionReader": true, + "Pipe": true, + "PipeReader": true, + "PipeWriter": true, + "ReadAtLeast": true, + "ReadCloser": true, + "ReadFull": true, + "ReadSeeker": true, + "ReadWriteCloser": true, + "ReadWriteSeeker": true, + "ReadWriter": true, + "Reader": true, + "ReaderAt": true, + "ReaderFrom": true, + "RuneReader": true, + "RuneScanner": true, + "SectionReader": true, + "SeekCurrent": true, + "SeekEnd": true, + "SeekStart": true, + "Seeker": true, + "StringWriter": true, + "TeeReader": true, + "WriteCloser": true, + "WriteSeeker": true, + "WriteString": true, + "Writer": true, + "WriterAt": true, + "WriterTo": true, + }, + "io/ioutil": map[string]bool{ + "Discard": true, + "NopCloser": true, + "ReadAll": true, + "ReadDir": true, + "ReadFile": true, + "TempDir": true, + "TempFile": true, + "WriteFile": true, + }, + "log": map[string]bool{ + "Fatal": true, + "Fatalf": true, + "Fatalln": true, + "Flags": true, + "LUTC": true, + "Ldate": true, + "Llongfile": true, + "Lmicroseconds": true, + "Logger": true, + "Lshortfile": true, + "LstdFlags": true, + "Ltime": true, + "New": true, + "Output": true, + "Panic": true, + "Panicf": true, + "Panicln": true, + "Prefix": true, + "Print": true, + "Printf": true, + "Println": true, + "SetFlags": true, + "SetOutput": true, + "SetPrefix": true, + }, + "log/syslog": map[string]bool{ + "Dial": true, + "LOG_ALERT": true, + "LOG_AUTH": true, + "LOG_AUTHPRIV": true, + "LOG_CRIT": true, + "LOG_CRON": true, + "LOG_DAEMON": true, + "LOG_DEBUG": true, + "LOG_EMERG": true, + "LOG_ERR": true, + "LOG_FTP": true, + "LOG_INFO": true, + "LOG_KERN": true, + "LOG_LOCAL0": true, + "LOG_LOCAL1": true, + "LOG_LOCAL2": true, + "LOG_LOCAL3": true, + "LOG_LOCAL4": true, + "LOG_LOCAL5": true, + "LOG_LOCAL6": true, + "LOG_LOCAL7": true, + "LOG_LPR": true, + "LOG_MAIL": true, + "LOG_NEWS": true, + "LOG_NOTICE": true, + "LOG_SYSLOG": true, + "LOG_USER": true, + "LOG_UUCP": true, + "LOG_WARNING": true, + "New": true, + "NewLogger": true, + "Priority": true, + "Writer": true, + }, + "math": map[string]bool{ + "Abs": true, + "Acos": true, + "Acosh": true, + "Asin": true, + "Asinh": true, + "Atan": true, + "Atan2": true, + "Atanh": true, + "Cbrt": true, + "Ceil": true, + "Copysign": true, + "Cos": true, + "Cosh": true, + "Dim": true, + "E": true, + "Erf": true, + "Erfc": true, + "Erfcinv": true, + "Erfinv": true, + "Exp": true, + "Exp2": true, + "Expm1": true, + "Float32bits": true, + "Float32frombits": true, + "Float64bits": true, + "Float64frombits": true, + "Floor": true, + "Frexp": true, + "Gamma": true, + "Hypot": true, + "Ilogb": true, + "Inf": true, + "IsInf": true, + "IsNaN": true, + "J0": true, + "J1": true, + "Jn": true, + "Ldexp": true, + "Lgamma": true, + "Ln10": true, + "Ln2": true, + "Log": true, + "Log10": true, + "Log10E": true, + "Log1p": true, + "Log2": true, + "Log2E": true, + "Logb": true, + "Max": true, + "MaxFloat32": true, + "MaxFloat64": true, + "MaxInt16": true, + "MaxInt32": true, + "MaxInt64": true, + "MaxInt8": true, + "MaxUint16": true, + "MaxUint32": true, + "MaxUint64": true, + "MaxUint8": true, + "Min": true, + "MinInt16": true, + "MinInt32": true, + "MinInt64": true, + "MinInt8": true, + "Mod": true, + "Modf": true, + "NaN": true, + "Nextafter": true, + "Nextafter32": true, + "Phi": true, + "Pi": true, + "Pow": true, + "Pow10": true, + "Remainder": true, + "Round": true, + "RoundToEven": true, + "Signbit": true, + "Sin": true, + "Sincos": true, + "Sinh": true, + "SmallestNonzeroFloat32": true, + "SmallestNonzeroFloat64": true, + "Sqrt": true, + "Sqrt2": true, + "SqrtE": true, + "SqrtPhi": true, + "SqrtPi": true, + "Tan": true, + "Tanh": true, + "Trunc": true, + "Y0": true, + "Y1": true, + "Yn": true, + }, + "math/big": map[string]bool{ + "Above": true, + "Accuracy": true, + "AwayFromZero": true, + "Below": true, + "ErrNaN": true, + "Exact": true, + "Float": true, + "Int": true, + "Jacobi": true, + "MaxBase": true, + "MaxExp": true, + "MaxPrec": true, + "MinExp": true, + "NewFloat": true, + "NewInt": true, + "NewRat": true, + "ParseFloat": true, + "Rat": true, + "RoundingMode": true, + "ToNearestAway": true, + "ToNearestEven": true, + "ToNegativeInf": true, + "ToPositiveInf": true, + "ToZero": true, + "Word": true, + }, + "math/bits": map[string]bool{ + "Add": true, + "Add32": true, + "Add64": true, + "Div": true, + "Div32": true, + "Div64": true, + "LeadingZeros": true, + "LeadingZeros16": true, + "LeadingZeros32": true, + "LeadingZeros64": true, + "LeadingZeros8": true, + "Len": true, + "Len16": true, + "Len32": true, + "Len64": true, + "Len8": true, + "Mul": true, + "Mul32": true, + "Mul64": true, + "OnesCount": true, + "OnesCount16": true, + "OnesCount32": true, + "OnesCount64": true, + "OnesCount8": true, + "Reverse": true, + "Reverse16": true, + "Reverse32": true, + "Reverse64": true, + "Reverse8": true, + "ReverseBytes": true, + "ReverseBytes16": true, + "ReverseBytes32": true, + "ReverseBytes64": true, + "RotateLeft": true, + "RotateLeft16": true, + "RotateLeft32": true, + "RotateLeft64": true, + "RotateLeft8": true, + "Sub": true, + "Sub32": true, + "Sub64": true, + "TrailingZeros": true, + "TrailingZeros16": true, + "TrailingZeros32": true, + "TrailingZeros64": true, + "TrailingZeros8": true, + "UintSize": true, + }, + "math/cmplx": map[string]bool{ + "Abs": true, + "Acos": true, + "Acosh": true, + "Asin": true, + "Asinh": true, + "Atan": true, + "Atanh": true, + "Conj": true, + "Cos": true, + "Cosh": true, + "Cot": true, + "Exp": true, + "Inf": true, + "IsInf": true, + "IsNaN": true, + "Log": true, + "Log10": true, + "NaN": true, + "Phase": true, + "Polar": true, + "Pow": true, + "Rect": true, + "Sin": true, + "Sinh": true, + "Sqrt": true, + "Tan": true, + "Tanh": true, + }, + "math/rand": map[string]bool{ + "ExpFloat64": true, + "Float32": true, + "Float64": true, + "Int": true, + "Int31": true, + "Int31n": true, + "Int63": true, + "Int63n": true, + "Intn": true, + "New": true, + "NewSource": true, + "NewZipf": true, + "NormFloat64": true, + "Perm": true, + "Rand": true, + "Read": true, + "Seed": true, + "Shuffle": true, + "Source": true, + "Source64": true, + "Uint32": true, + "Uint64": true, + "Zipf": true, + }, + "mime": map[string]bool{ + "AddExtensionType": true, + "BEncoding": true, + "ErrInvalidMediaParameter": true, + "ExtensionsByType": true, + "FormatMediaType": true, + "ParseMediaType": true, + "QEncoding": true, + "TypeByExtension": true, + "WordDecoder": true, + "WordEncoder": true, + }, + "mime/multipart": map[string]bool{ + "ErrMessageTooLarge": true, + "File": true, + "FileHeader": true, + "Form": true, + "NewReader": true, + "NewWriter": true, + "Part": true, + "Reader": true, + "Writer": true, + }, + "mime/quotedprintable": map[string]bool{ + "NewReader": true, + "NewWriter": true, + "Reader": true, + "Writer": true, + }, + "net": map[string]bool{ + "Addr": true, + "AddrError": true, + "Buffers": true, + "CIDRMask": true, + "Conn": true, + "DNSConfigError": true, + "DNSError": true, + "DefaultResolver": true, + "Dial": true, + "DialIP": true, + "DialTCP": true, + "DialTimeout": true, + "DialUDP": true, + "DialUnix": true, + "Dialer": true, + "ErrWriteToConnected": true, + "Error": true, + "FileConn": true, + "FileListener": true, + "FilePacketConn": true, + "FlagBroadcast": true, + "FlagLoopback": true, + "FlagMulticast": true, + "FlagPointToPoint": true, + "FlagUp": true, + "Flags": true, + "HardwareAddr": true, + "IP": true, + "IPAddr": true, + "IPConn": true, + "IPMask": true, + "IPNet": true, + "IPv4": true, + "IPv4Mask": true, + "IPv4allrouter": true, + "IPv4allsys": true, + "IPv4bcast": true, + "IPv4len": true, + "IPv4zero": true, + "IPv6interfacelocalallnodes": true, + "IPv6len": true, + "IPv6linklocalallnodes": true, + "IPv6linklocalallrouters": true, + "IPv6loopback": true, + "IPv6unspecified": true, + "IPv6zero": true, + "Interface": true, + "InterfaceAddrs": true, + "InterfaceByIndex": true, + "InterfaceByName": true, + "Interfaces": true, + "InvalidAddrError": true, + "JoinHostPort": true, + "Listen": true, + "ListenConfig": true, + "ListenIP": true, + "ListenMulticastUDP": true, + "ListenPacket": true, + "ListenTCP": true, + "ListenUDP": true, + "ListenUnix": true, + "ListenUnixgram": true, + "Listener": true, + "LookupAddr": true, + "LookupCNAME": true, + "LookupHost": true, + "LookupIP": true, + "LookupMX": true, + "LookupNS": true, + "LookupPort": true, + "LookupSRV": true, + "LookupTXT": true, + "MX": true, + "NS": true, + "OpError": true, + "PacketConn": true, + "ParseCIDR": true, + "ParseError": true, + "ParseIP": true, + "ParseMAC": true, + "Pipe": true, + "ResolveIPAddr": true, + "ResolveTCPAddr": true, + "ResolveUDPAddr": true, + "ResolveUnixAddr": true, + "Resolver": true, + "SRV": true, + "SplitHostPort": true, + "TCPAddr": true, + "TCPConn": true, + "TCPListener": true, + "UDPAddr": true, + "UDPConn": true, + "UnixAddr": true, + "UnixConn": true, + "UnixListener": true, + "UnknownNetworkError": true, + }, + "net/http": map[string]bool{ + "CanonicalHeaderKey": true, + "Client": true, + "CloseNotifier": true, + "ConnState": true, + "Cookie": true, + "CookieJar": true, + "DefaultClient": true, + "DefaultMaxHeaderBytes": true, + "DefaultMaxIdleConnsPerHost": true, + "DefaultServeMux": true, + "DefaultTransport": true, + "DetectContentType": true, + "Dir": true, + "ErrAbortHandler": true, + "ErrBodyNotAllowed": true, + "ErrBodyReadAfterClose": true, + "ErrContentLength": true, + "ErrHandlerTimeout": true, + "ErrHeaderTooLong": true, + "ErrHijacked": true, + "ErrLineTooLong": true, + "ErrMissingBoundary": true, + "ErrMissingContentLength": true, + "ErrMissingFile": true, + "ErrNoCookie": true, + "ErrNoLocation": true, + "ErrNotMultipart": true, + "ErrNotSupported": true, + "ErrServerClosed": true, + "ErrShortBody": true, + "ErrSkipAltProtocol": true, + "ErrUnexpectedTrailer": true, + "ErrUseLastResponse": true, + "ErrWriteAfterFlush": true, + "Error": true, + "File": true, + "FileServer": true, + "FileSystem": true, + "Flusher": true, + "Get": true, + "Handle": true, + "HandleFunc": true, + "Handler": true, + "HandlerFunc": true, + "Head": true, + "Header": true, + "Hijacker": true, + "ListenAndServe": true, + "ListenAndServeTLS": true, + "LocalAddrContextKey": true, + "MaxBytesReader": true, + "MethodConnect": true, + "MethodDelete": true, + "MethodGet": true, + "MethodHead": true, + "MethodOptions": true, + "MethodPatch": true, + "MethodPost": true, + "MethodPut": true, + "MethodTrace": true, + "NewFileTransport": true, + "NewRequest": true, + "NewServeMux": true, + "NoBody": true, + "NotFound": true, + "NotFoundHandler": true, + "ParseHTTPVersion": true, + "ParseTime": true, + "Post": true, + "PostForm": true, + "ProtocolError": true, + "ProxyFromEnvironment": true, + "ProxyURL": true, + "PushOptions": true, + "Pusher": true, + "ReadRequest": true, + "ReadResponse": true, + "Redirect": true, + "RedirectHandler": true, + "Request": true, + "Response": true, + "ResponseWriter": true, + "RoundTripper": true, + "SameSite": true, + "SameSiteDefaultMode": true, + "SameSiteLaxMode": true, + "SameSiteStrictMode": true, + "Serve": true, + "ServeContent": true, + "ServeFile": true, + "ServeMux": true, + "ServeTLS": true, + "Server": true, + "ServerContextKey": true, + "SetCookie": true, + "StateActive": true, + "StateClosed": true, + "StateHijacked": true, + "StateIdle": true, + "StateNew": true, + "StatusAccepted": true, + "StatusAlreadyReported": true, + "StatusBadGateway": true, + "StatusBadRequest": true, + "StatusConflict": true, + "StatusContinue": true, + "StatusCreated": true, + "StatusExpectationFailed": true, + "StatusFailedDependency": true, + "StatusForbidden": true, + "StatusFound": true, + "StatusGatewayTimeout": true, + "StatusGone": true, + "StatusHTTPVersionNotSupported": true, + "StatusIMUsed": true, + "StatusInsufficientStorage": true, + "StatusInternalServerError": true, + "StatusLengthRequired": true, + "StatusLocked": true, + "StatusLoopDetected": true, + "StatusMethodNotAllowed": true, + "StatusMisdirectedRequest": true, + "StatusMovedPermanently": true, + "StatusMultiStatus": true, + "StatusMultipleChoices": true, + "StatusNetworkAuthenticationRequired": true, + "StatusNoContent": true, + "StatusNonAuthoritativeInfo": true, + "StatusNotAcceptable": true, + "StatusNotExtended": true, + "StatusNotFound": true, + "StatusNotImplemented": true, + "StatusNotModified": true, + "StatusOK": true, + "StatusPartialContent": true, + "StatusPaymentRequired": true, + "StatusPermanentRedirect": true, + "StatusPreconditionFailed": true, + "StatusPreconditionRequired": true, + "StatusProcessing": true, + "StatusProxyAuthRequired": true, + "StatusRequestEntityTooLarge": true, + "StatusRequestHeaderFieldsTooLarge": true, + "StatusRequestTimeout": true, + "StatusRequestURITooLong": true, + "StatusRequestedRangeNotSatisfiable": true, + "StatusResetContent": true, + "StatusSeeOther": true, + "StatusServiceUnavailable": true, + "StatusSwitchingProtocols": true, + "StatusTeapot": true, + "StatusTemporaryRedirect": true, + "StatusText": true, + "StatusTooEarly": true, + "StatusTooManyRequests": true, + "StatusUnauthorized": true, + "StatusUnavailableForLegalReasons": true, + "StatusUnprocessableEntity": true, + "StatusUnsupportedMediaType": true, + "StatusUpgradeRequired": true, + "StatusUseProxy": true, + "StatusVariantAlsoNegotiates": true, + "StripPrefix": true, + "TimeFormat": true, + "TimeoutHandler": true, + "TrailerPrefix": true, + "Transport": true, + }, + "net/http/cgi": map[string]bool{ + "Handler": true, + "Request": true, + "RequestFromMap": true, + "Serve": true, + }, + "net/http/cookiejar": map[string]bool{ + "Jar": true, + "New": true, + "Options": true, + "PublicSuffixList": true, + }, + "net/http/fcgi": map[string]bool{ + "ErrConnClosed": true, + "ErrRequestAborted": true, + "ProcessEnv": true, + "Serve": true, + }, + "net/http/httptest": map[string]bool{ + "DefaultRemoteAddr": true, + "NewRecorder": true, + "NewRequest": true, + "NewServer": true, + "NewTLSServer": true, + "NewUnstartedServer": true, + "ResponseRecorder": true, + "Server": true, + }, + "net/http/httptrace": map[string]bool{ + "ClientTrace": true, + "ContextClientTrace": true, + "DNSDoneInfo": true, + "DNSStartInfo": true, + "GotConnInfo": true, + "WithClientTrace": true, + "WroteRequestInfo": true, + }, + "net/http/httputil": map[string]bool{ + "BufferPool": true, + "ClientConn": true, + "DumpRequest": true, + "DumpRequestOut": true, + "DumpResponse": true, + "ErrClosed": true, + "ErrLineTooLong": true, + "ErrPersistEOF": true, + "ErrPipeline": true, + "NewChunkedReader": true, + "NewChunkedWriter": true, + "NewClientConn": true, + "NewProxyClientConn": true, + "NewServerConn": true, + "NewSingleHostReverseProxy": true, + "ReverseProxy": true, + "ServerConn": true, + }, + "net/http/pprof": map[string]bool{ + "Cmdline": true, + "Handler": true, + "Index": true, + "Profile": true, + "Symbol": true, + "Trace": true, + }, + "net/mail": map[string]bool{ + "Address": true, + "AddressParser": true, + "ErrHeaderNotPresent": true, + "Header": true, + "Message": true, + "ParseAddress": true, + "ParseAddressList": true, + "ParseDate": true, + "ReadMessage": true, + }, + "net/rpc": map[string]bool{ + "Accept": true, + "Call": true, + "Client": true, + "ClientCodec": true, + "DefaultDebugPath": true, + "DefaultRPCPath": true, + "DefaultServer": true, + "Dial": true, + "DialHTTP": true, + "DialHTTPPath": true, + "ErrShutdown": true, + "HandleHTTP": true, + "NewClient": true, + "NewClientWithCodec": true, + "NewServer": true, + "Register": true, + "RegisterName": true, + "Request": true, + "Response": true, + "ServeCodec": true, + "ServeConn": true, + "ServeRequest": true, + "Server": true, + "ServerCodec": true, + "ServerError": true, + }, + "net/rpc/jsonrpc": map[string]bool{ + "Dial": true, + "NewClient": true, + "NewClientCodec": true, + "NewServerCodec": true, + "ServeConn": true, + }, + "net/smtp": map[string]bool{ + "Auth": true, + "CRAMMD5Auth": true, + "Client": true, + "Dial": true, + "NewClient": true, + "PlainAuth": true, + "SendMail": true, + "ServerInfo": true, + }, + "net/textproto": map[string]bool{ + "CanonicalMIMEHeaderKey": true, + "Conn": true, + "Dial": true, + "Error": true, + "MIMEHeader": true, + "NewConn": true, + "NewReader": true, + "NewWriter": true, + "Pipeline": true, + "ProtocolError": true, + "Reader": true, + "TrimBytes": true, + "TrimString": true, + "Writer": true, + }, + "net/url": map[string]bool{ + "Error": true, + "EscapeError": true, + "InvalidHostError": true, + "Parse": true, + "ParseQuery": true, + "ParseRequestURI": true, + "PathEscape": true, + "PathUnescape": true, + "QueryEscape": true, + "QueryUnescape": true, + "URL": true, + "User": true, + "UserPassword": true, + "Userinfo": true, + "Values": true, + }, + "os": map[string]bool{ + "Args": true, + "Chdir": true, + "Chmod": true, + "Chown": true, + "Chtimes": true, + "Clearenv": true, + "Create": true, + "DevNull": true, + "Environ": true, + "ErrClosed": true, + "ErrExist": true, + "ErrInvalid": true, + "ErrNoDeadline": true, + "ErrNotExist": true, + "ErrPermission": true, + "Executable": true, + "Exit": true, + "Expand": true, + "ExpandEnv": true, + "File": true, + "FileInfo": true, + "FileMode": true, + "FindProcess": true, + "Getegid": true, + "Getenv": true, + "Geteuid": true, + "Getgid": true, + "Getgroups": true, + "Getpagesize": true, + "Getpid": true, + "Getppid": true, + "Getuid": true, + "Getwd": true, + "Hostname": true, + "Interrupt": true, + "IsExist": true, + "IsNotExist": true, + "IsPathSeparator": true, + "IsPermission": true, + "IsTimeout": true, + "Kill": true, + "Lchown": true, + "Link": true, + "LinkError": true, + "LookupEnv": true, + "Lstat": true, + "Mkdir": true, + "MkdirAll": true, + "ModeAppend": true, + "ModeCharDevice": true, + "ModeDevice": true, + "ModeDir": true, + "ModeExclusive": true, + "ModeIrregular": true, + "ModeNamedPipe": true, + "ModePerm": true, + "ModeSetgid": true, + "ModeSetuid": true, + "ModeSocket": true, + "ModeSticky": true, + "ModeSymlink": true, + "ModeTemporary": true, + "ModeType": true, + "NewFile": true, + "NewSyscallError": true, + "O_APPEND": true, + "O_CREATE": true, + "O_EXCL": true, + "O_RDONLY": true, + "O_RDWR": true, + "O_SYNC": true, + "O_TRUNC": true, + "O_WRONLY": true, + "Open": true, + "OpenFile": true, + "PathError": true, + "PathListSeparator": true, + "PathSeparator": true, + "Pipe": true, + "ProcAttr": true, + "Process": true, + "ProcessState": true, + "Readlink": true, + "Remove": true, + "RemoveAll": true, + "Rename": true, + "SEEK_CUR": true, + "SEEK_END": true, + "SEEK_SET": true, + "SameFile": true, + "Setenv": true, + "Signal": true, + "StartProcess": true, + "Stat": true, + "Stderr": true, + "Stdin": true, + "Stdout": true, + "Symlink": true, + "SyscallError": true, + "TempDir": true, + "Truncate": true, + "Unsetenv": true, + "UserCacheDir": true, + "UserHomeDir": true, + }, + "os/exec": map[string]bool{ + "Cmd": true, + "Command": true, + "CommandContext": true, + "ErrNotFound": true, + "Error": true, + "ExitError": true, + "LookPath": true, + }, + "os/signal": map[string]bool{ + "Ignore": true, + "Ignored": true, + "Notify": true, + "Reset": true, + "Stop": true, + }, + "os/user": map[string]bool{ + "Current": true, + "Group": true, + "Lookup": true, + "LookupGroup": true, + "LookupGroupId": true, + "LookupId": true, + "UnknownGroupError": true, + "UnknownGroupIdError": true, + "UnknownUserError": true, + "UnknownUserIdError": true, + "User": true, + }, + "path": map[string]bool{ + "Base": true, + "Clean": true, + "Dir": true, + "ErrBadPattern": true, + "Ext": true, + "IsAbs": true, + "Join": true, + "Match": true, + "Split": true, + }, + "path/filepath": map[string]bool{ + "Abs": true, + "Base": true, + "Clean": true, + "Dir": true, + "ErrBadPattern": true, + "EvalSymlinks": true, + "Ext": true, + "FromSlash": true, + "Glob": true, + "HasPrefix": true, + "IsAbs": true, + "Join": true, + "ListSeparator": true, + "Match": true, + "Rel": true, + "Separator": true, + "SkipDir": true, + "Split": true, + "SplitList": true, + "ToSlash": true, + "VolumeName": true, + "Walk": true, + "WalkFunc": true, + }, + "plugin": map[string]bool{ + "Open": true, + "Plugin": true, + "Symbol": true, + }, + "reflect": map[string]bool{ + "Append": true, + "AppendSlice": true, + "Array": true, + "ArrayOf": true, + "Bool": true, + "BothDir": true, + "Chan": true, + "ChanDir": true, + "ChanOf": true, + "Complex128": true, + "Complex64": true, + "Copy": true, + "DeepEqual": true, + "Float32": true, + "Float64": true, + "Func": true, + "FuncOf": true, + "Indirect": true, + "Int": true, + "Int16": true, + "Int32": true, + "Int64": true, + "Int8": true, + "Interface": true, + "Invalid": true, + "Kind": true, + "MakeChan": true, + "MakeFunc": true, + "MakeMap": true, + "MakeMapWithSize": true, + "MakeSlice": true, + "Map": true, + "MapIter": true, + "MapOf": true, + "Method": true, + "New": true, + "NewAt": true, + "Ptr": true, + "PtrTo": true, + "RecvDir": true, + "Select": true, + "SelectCase": true, + "SelectDefault": true, + "SelectDir": true, + "SelectRecv": true, + "SelectSend": true, + "SendDir": true, + "Slice": true, + "SliceHeader": true, + "SliceOf": true, + "String": true, + "StringHeader": true, + "Struct": true, + "StructField": true, + "StructOf": true, + "StructTag": true, + "Swapper": true, + "TypeOf": true, + "Uint": true, + "Uint16": true, + "Uint32": true, + "Uint64": true, + "Uint8": true, + "Uintptr": true, + "UnsafePointer": true, + "Value": true, + "ValueError": true, + "ValueOf": true, + "Zero": true, + }, + "regexp": map[string]bool{ + "Compile": true, + "CompilePOSIX": true, + "Match": true, + "MatchReader": true, + "MatchString": true, + "MustCompile": true, + "MustCompilePOSIX": true, + "QuoteMeta": true, + "Regexp": true, + }, + "regexp/syntax": map[string]bool{ + "ClassNL": true, + "Compile": true, + "DotNL": true, + "EmptyBeginLine": true, + "EmptyBeginText": true, + "EmptyEndLine": true, + "EmptyEndText": true, + "EmptyNoWordBoundary": true, + "EmptyOp": true, + "EmptyOpContext": true, + "EmptyWordBoundary": true, + "ErrInternalError": true, + "ErrInvalidCharClass": true, + "ErrInvalidCharRange": true, + "ErrInvalidEscape": true, + "ErrInvalidNamedCapture": true, + "ErrInvalidPerlOp": true, + "ErrInvalidRepeatOp": true, + "ErrInvalidRepeatSize": true, + "ErrInvalidUTF8": true, + "ErrMissingBracket": true, + "ErrMissingParen": true, + "ErrMissingRepeatArgument": true, + "ErrTrailingBackslash": true, + "ErrUnexpectedParen": true, + "Error": true, + "ErrorCode": true, + "Flags": true, + "FoldCase": true, + "Inst": true, + "InstAlt": true, + "InstAltMatch": true, + "InstCapture": true, + "InstEmptyWidth": true, + "InstFail": true, + "InstMatch": true, + "InstNop": true, + "InstOp": true, + "InstRune": true, + "InstRune1": true, + "InstRuneAny": true, + "InstRuneAnyNotNL": true, + "IsWordChar": true, + "Literal": true, + "MatchNL": true, + "NonGreedy": true, + "OneLine": true, + "Op": true, + "OpAlternate": true, + "OpAnyChar": true, + "OpAnyCharNotNL": true, + "OpBeginLine": true, + "OpBeginText": true, + "OpCapture": true, + "OpCharClass": true, + "OpConcat": true, + "OpEmptyMatch": true, + "OpEndLine": true, + "OpEndText": true, + "OpLiteral": true, + "OpNoMatch": true, + "OpNoWordBoundary": true, + "OpPlus": true, + "OpQuest": true, + "OpRepeat": true, + "OpStar": true, + "OpWordBoundary": true, + "POSIX": true, + "Parse": true, + "Perl": true, + "PerlX": true, + "Prog": true, + "Regexp": true, + "Simple": true, + "UnicodeGroups": true, + "WasDollar": true, + }, + "runtime": map[string]bool{ + "BlockProfile": true, + "BlockProfileRecord": true, + "Breakpoint": true, + "CPUProfile": true, + "Caller": true, + "Callers": true, + "CallersFrames": true, + "Compiler": true, + "Error": true, + "Frame": true, + "Frames": true, + "Func": true, + "FuncForPC": true, + "GC": true, + "GOARCH": true, + "GOMAXPROCS": true, + "GOOS": true, + "GOROOT": true, + "Goexit": true, + "GoroutineProfile": true, + "Gosched": true, + "KeepAlive": true, + "LockOSThread": true, + "MemProfile": true, + "MemProfileRate": true, + "MemProfileRecord": true, + "MemStats": true, + "MutexProfile": true, + "NumCPU": true, + "NumCgoCall": true, + "NumGoroutine": true, + "ReadMemStats": true, + "ReadTrace": true, + "SetBlockProfileRate": true, + "SetCPUProfileRate": true, + "SetCgoTraceback": true, + "SetFinalizer": true, + "SetMutexProfileFraction": true, + "Stack": true, + "StackRecord": true, + "StartTrace": true, + "StopTrace": true, + "ThreadCreateProfile": true, + "TypeAssertionError": true, + "UnlockOSThread": true, + "Version": true, + }, + "runtime/debug": map[string]bool{ + "BuildInfo": true, + "FreeOSMemory": true, + "GCStats": true, + "Module": true, + "PrintStack": true, + "ReadBuildInfo": true, + "ReadGCStats": true, + "SetGCPercent": true, + "SetMaxStack": true, + "SetMaxThreads": true, + "SetPanicOnFault": true, + "SetTraceback": true, + "Stack": true, + "WriteHeapDump": true, + }, + "runtime/pprof": map[string]bool{ + "Do": true, + "ForLabels": true, + "Label": true, + "LabelSet": true, + "Labels": true, + "Lookup": true, + "NewProfile": true, + "Profile": true, + "Profiles": true, + "SetGoroutineLabels": true, + "StartCPUProfile": true, + "StopCPUProfile": true, + "WithLabels": true, + "WriteHeapProfile": true, + }, + "runtime/trace": map[string]bool{ + "IsEnabled": true, + "Log": true, + "Logf": true, + "NewTask": true, + "Region": true, + "Start": true, + "StartRegion": true, + "Stop": true, + "Task": true, + "WithRegion": true, + }, + "sort": map[string]bool{ + "Float64Slice": true, + "Float64s": true, + "Float64sAreSorted": true, + "IntSlice": true, + "Interface": true, + "Ints": true, + "IntsAreSorted": true, + "IsSorted": true, + "Reverse": true, + "Search": true, + "SearchFloat64s": true, + "SearchInts": true, + "SearchStrings": true, + "Slice": true, + "SliceIsSorted": true, + "SliceStable": true, + "Sort": true, + "Stable": true, + "StringSlice": true, + "Strings": true, + "StringsAreSorted": true, + }, + "strconv": map[string]bool{ + "AppendBool": true, + "AppendFloat": true, + "AppendInt": true, + "AppendQuote": true, + "AppendQuoteRune": true, + "AppendQuoteRuneToASCII": true, + "AppendQuoteRuneToGraphic": true, + "AppendQuoteToASCII": true, + "AppendQuoteToGraphic": true, + "AppendUint": true, + "Atoi": true, + "CanBackquote": true, + "ErrRange": true, + "ErrSyntax": true, + "FormatBool": true, + "FormatFloat": true, + "FormatInt": true, + "FormatUint": true, + "IntSize": true, + "IsGraphic": true, + "IsPrint": true, + "Itoa": true, + "NumError": true, + "ParseBool": true, + "ParseFloat": true, + "ParseInt": true, + "ParseUint": true, + "Quote": true, + "QuoteRune": true, + "QuoteRuneToASCII": true, + "QuoteRuneToGraphic": true, + "QuoteToASCII": true, + "QuoteToGraphic": true, + "Unquote": true, + "UnquoteChar": true, + }, + "strings": map[string]bool{ + "Builder": true, + "Compare": true, + "Contains": true, + "ContainsAny": true, + "ContainsRune": true, + "Count": true, + "EqualFold": true, + "Fields": true, + "FieldsFunc": true, + "HasPrefix": true, + "HasSuffix": true, + "Index": true, + "IndexAny": true, + "IndexByte": true, + "IndexFunc": true, + "IndexRune": true, + "Join": true, + "LastIndex": true, + "LastIndexAny": true, + "LastIndexByte": true, + "LastIndexFunc": true, + "Map": true, + "NewReader": true, + "NewReplacer": true, + "Reader": true, + "Repeat": true, + "Replace": true, + "ReplaceAll": true, + "Replacer": true, + "Split": true, + "SplitAfter": true, + "SplitAfterN": true, + "SplitN": true, + "Title": true, + "ToLower": true, + "ToLowerSpecial": true, + "ToTitle": true, + "ToTitleSpecial": true, + "ToUpper": true, + "ToUpperSpecial": true, + "Trim": true, + "TrimFunc": true, + "TrimLeft": true, + "TrimLeftFunc": true, + "TrimPrefix": true, + "TrimRight": true, + "TrimRightFunc": true, + "TrimSpace": true, + "TrimSuffix": true, + }, + "sync": map[string]bool{ + "Cond": true, + "Locker": true, + "Map": true, + "Mutex": true, + "NewCond": true, + "Once": true, + "Pool": true, + "RWMutex": true, + "WaitGroup": true, + }, + "sync/atomic": map[string]bool{ + "AddInt32": true, + "AddInt64": true, + "AddUint32": true, + "AddUint64": true, + "AddUintptr": true, + "CompareAndSwapInt32": true, + "CompareAndSwapInt64": true, + "CompareAndSwapPointer": true, + "CompareAndSwapUint32": true, + "CompareAndSwapUint64": true, + "CompareAndSwapUintptr": true, + "LoadInt32": true, + "LoadInt64": true, + "LoadPointer": true, + "LoadUint32": true, + "LoadUint64": true, + "LoadUintptr": true, + "StoreInt32": true, + "StoreInt64": true, + "StorePointer": true, + "StoreUint32": true, + "StoreUint64": true, + "StoreUintptr": true, + "SwapInt32": true, + "SwapInt64": true, + "SwapPointer": true, + "SwapUint32": true, + "SwapUint64": true, + "SwapUintptr": true, + "Value": true, + }, + "syscall": map[string]bool{ + "AF_ALG": true, + "AF_APPLETALK": true, + "AF_ARP": true, + "AF_ASH": true, + "AF_ATM": true, + "AF_ATMPVC": true, + "AF_ATMSVC": true, + "AF_AX25": true, + "AF_BLUETOOTH": true, + "AF_BRIDGE": true, + "AF_CAIF": true, + "AF_CAN": true, + "AF_CCITT": true, + "AF_CHAOS": true, + "AF_CNT": true, + "AF_COIP": true, + "AF_DATAKIT": true, + "AF_DECnet": true, + "AF_DLI": true, + "AF_E164": true, + "AF_ECMA": true, + "AF_ECONET": true, + "AF_ENCAP": true, + "AF_FILE": true, + "AF_HYLINK": true, + "AF_IEEE80211": true, + "AF_IEEE802154": true, + "AF_IMPLINK": true, + "AF_INET": true, + "AF_INET6": true, + "AF_INET6_SDP": true, + "AF_INET_SDP": true, + "AF_IPX": true, + "AF_IRDA": true, + "AF_ISDN": true, + "AF_ISO": true, + "AF_IUCV": true, + "AF_KEY": true, + "AF_LAT": true, + "AF_LINK": true, + "AF_LLC": true, + "AF_LOCAL": true, + "AF_MAX": true, + "AF_MPLS": true, + "AF_NATM": true, + "AF_NDRV": true, + "AF_NETBEUI": true, + "AF_NETBIOS": true, + "AF_NETGRAPH": true, + "AF_NETLINK": true, + "AF_NETROM": true, + "AF_NS": true, + "AF_OROUTE": true, + "AF_OSI": true, + "AF_PACKET": true, + "AF_PHONET": true, + "AF_PPP": true, + "AF_PPPOX": true, + "AF_PUP": true, + "AF_RDS": true, + "AF_RESERVED_36": true, + "AF_ROSE": true, + "AF_ROUTE": true, + "AF_RXRPC": true, + "AF_SCLUSTER": true, + "AF_SECURITY": true, + "AF_SIP": true, + "AF_SLOW": true, + "AF_SNA": true, + "AF_SYSTEM": true, + "AF_TIPC": true, + "AF_UNIX": true, + "AF_UNSPEC": true, + "AF_VENDOR00": true, + "AF_VENDOR01": true, + "AF_VENDOR02": true, + "AF_VENDOR03": true, + "AF_VENDOR04": true, + "AF_VENDOR05": true, + "AF_VENDOR06": true, + "AF_VENDOR07": true, + "AF_VENDOR08": true, + "AF_VENDOR09": true, + "AF_VENDOR10": true, + "AF_VENDOR11": true, + "AF_VENDOR12": true, + "AF_VENDOR13": true, + "AF_VENDOR14": true, + "AF_VENDOR15": true, + "AF_VENDOR16": true, + "AF_VENDOR17": true, + "AF_VENDOR18": true, + "AF_VENDOR19": true, + "AF_VENDOR20": true, + "AF_VENDOR21": true, + "AF_VENDOR22": true, + "AF_VENDOR23": true, + "AF_VENDOR24": true, + "AF_VENDOR25": true, + "AF_VENDOR26": true, + "AF_VENDOR27": true, + "AF_VENDOR28": true, + "AF_VENDOR29": true, + "AF_VENDOR30": true, + "AF_VENDOR31": true, + "AF_VENDOR32": true, + "AF_VENDOR33": true, + "AF_VENDOR34": true, + "AF_VENDOR35": true, + "AF_VENDOR36": true, + "AF_VENDOR37": true, + "AF_VENDOR38": true, + "AF_VENDOR39": true, + "AF_VENDOR40": true, + "AF_VENDOR41": true, + "AF_VENDOR42": true, + "AF_VENDOR43": true, + "AF_VENDOR44": true, + "AF_VENDOR45": true, + "AF_VENDOR46": true, + "AF_VENDOR47": true, + "AF_WANPIPE": true, + "AF_X25": true, + "AI_CANONNAME": true, + "AI_NUMERICHOST": true, + "AI_PASSIVE": true, + "APPLICATION_ERROR": true, + "ARPHRD_ADAPT": true, + "ARPHRD_APPLETLK": true, + "ARPHRD_ARCNET": true, + "ARPHRD_ASH": true, + "ARPHRD_ATM": true, + "ARPHRD_AX25": true, + "ARPHRD_BIF": true, + "ARPHRD_CHAOS": true, + "ARPHRD_CISCO": true, + "ARPHRD_CSLIP": true, + "ARPHRD_CSLIP6": true, + "ARPHRD_DDCMP": true, + "ARPHRD_DLCI": true, + "ARPHRD_ECONET": true, + "ARPHRD_EETHER": true, + "ARPHRD_ETHER": true, + "ARPHRD_EUI64": true, + "ARPHRD_FCAL": true, + "ARPHRD_FCFABRIC": true, + "ARPHRD_FCPL": true, + "ARPHRD_FCPP": true, + "ARPHRD_FDDI": true, + "ARPHRD_FRAD": true, + "ARPHRD_FRELAY": true, + "ARPHRD_HDLC": true, + "ARPHRD_HIPPI": true, + "ARPHRD_HWX25": true, + "ARPHRD_IEEE1394": true, + "ARPHRD_IEEE802": true, + "ARPHRD_IEEE80211": true, + "ARPHRD_IEEE80211_PRISM": true, + "ARPHRD_IEEE80211_RADIOTAP": true, + "ARPHRD_IEEE802154": true, + "ARPHRD_IEEE802154_PHY": true, + "ARPHRD_IEEE802_TR": true, + "ARPHRD_INFINIBAND": true, + "ARPHRD_IPDDP": true, + "ARPHRD_IPGRE": true, + "ARPHRD_IRDA": true, + "ARPHRD_LAPB": true, + "ARPHRD_LOCALTLK": true, + "ARPHRD_LOOPBACK": true, + "ARPHRD_METRICOM": true, + "ARPHRD_NETROM": true, + "ARPHRD_NONE": true, + "ARPHRD_PIMREG": true, + "ARPHRD_PPP": true, + "ARPHRD_PRONET": true, + "ARPHRD_RAWHDLC": true, + "ARPHRD_ROSE": true, + "ARPHRD_RSRVD": true, + "ARPHRD_SIT": true, + "ARPHRD_SKIP": true, + "ARPHRD_SLIP": true, + "ARPHRD_SLIP6": true, + "ARPHRD_STRIP": true, + "ARPHRD_TUNNEL": true, + "ARPHRD_TUNNEL6": true, + "ARPHRD_VOID": true, + "ARPHRD_X25": true, + "AUTHTYPE_CLIENT": true, + "AUTHTYPE_SERVER": true, + "Accept": true, + "Accept4": true, + "AcceptEx": true, + "Access": true, + "Acct": true, + "AddrinfoW": true, + "Adjtime": true, + "Adjtimex": true, + "AttachLsf": true, + "B0": true, + "B1000000": true, + "B110": true, + "B115200": true, + "B1152000": true, + "B1200": true, + "B134": true, + "B14400": true, + "B150": true, + "B1500000": true, + "B1800": true, + "B19200": true, + "B200": true, + "B2000000": true, + "B230400": true, + "B2400": true, + "B2500000": true, + "B28800": true, + "B300": true, + "B3000000": true, + "B3500000": true, + "B38400": true, + "B4000000": true, + "B460800": true, + "B4800": true, + "B50": true, + "B500000": true, + "B57600": true, + "B576000": true, + "B600": true, + "B7200": true, + "B75": true, + "B76800": true, + "B921600": true, + "B9600": true, + "BASE_PROTOCOL": true, + "BIOCFEEDBACK": true, + "BIOCFLUSH": true, + "BIOCGBLEN": true, + "BIOCGDIRECTION": true, + "BIOCGDIRFILT": true, + "BIOCGDLT": true, + "BIOCGDLTLIST": true, + "BIOCGETBUFMODE": true, + "BIOCGETIF": true, + "BIOCGETZMAX": true, + "BIOCGFEEDBACK": true, + "BIOCGFILDROP": true, + "BIOCGHDRCMPLT": true, + "BIOCGRSIG": true, + "BIOCGRTIMEOUT": true, + "BIOCGSEESENT": true, + "BIOCGSTATS": true, + "BIOCGSTATSOLD": true, + "BIOCGTSTAMP": true, + "BIOCIMMEDIATE": true, + "BIOCLOCK": true, + "BIOCPROMISC": true, + "BIOCROTZBUF": true, + "BIOCSBLEN": true, + "BIOCSDIRECTION": true, + "BIOCSDIRFILT": true, + "BIOCSDLT": true, + "BIOCSETBUFMODE": true, + "BIOCSETF": true, + "BIOCSETFNR": true, + "BIOCSETIF": true, + "BIOCSETWF": true, + "BIOCSETZBUF": true, + "BIOCSFEEDBACK": true, + "BIOCSFILDROP": true, + "BIOCSHDRCMPLT": true, + "BIOCSRSIG": true, + "BIOCSRTIMEOUT": true, + "BIOCSSEESENT": true, + "BIOCSTCPF": true, + "BIOCSTSTAMP": true, + "BIOCSUDPF": true, + "BIOCVERSION": true, + "BPF_A": true, + "BPF_ABS": true, + "BPF_ADD": true, + "BPF_ALIGNMENT": true, + "BPF_ALIGNMENT32": true, + "BPF_ALU": true, + "BPF_AND": true, + "BPF_B": true, + "BPF_BUFMODE_BUFFER": true, + "BPF_BUFMODE_ZBUF": true, + "BPF_DFLTBUFSIZE": true, + "BPF_DIRECTION_IN": true, + "BPF_DIRECTION_OUT": true, + "BPF_DIV": true, + "BPF_H": true, + "BPF_IMM": true, + "BPF_IND": true, + "BPF_JA": true, + "BPF_JEQ": true, + "BPF_JGE": true, + "BPF_JGT": true, + "BPF_JMP": true, + "BPF_JSET": true, + "BPF_K": true, + "BPF_LD": true, + "BPF_LDX": true, + "BPF_LEN": true, + "BPF_LSH": true, + "BPF_MAJOR_VERSION": true, + "BPF_MAXBUFSIZE": true, + "BPF_MAXINSNS": true, + "BPF_MEM": true, + "BPF_MEMWORDS": true, + "BPF_MINBUFSIZE": true, + "BPF_MINOR_VERSION": true, + "BPF_MISC": true, + "BPF_MSH": true, + "BPF_MUL": true, + "BPF_NEG": true, + "BPF_OR": true, + "BPF_RELEASE": true, + "BPF_RET": true, + "BPF_RSH": true, + "BPF_ST": true, + "BPF_STX": true, + "BPF_SUB": true, + "BPF_TAX": true, + "BPF_TXA": true, + "BPF_T_BINTIME": true, + "BPF_T_BINTIME_FAST": true, + "BPF_T_BINTIME_MONOTONIC": true, + "BPF_T_BINTIME_MONOTONIC_FAST": true, + "BPF_T_FAST": true, + "BPF_T_FLAG_MASK": true, + "BPF_T_FORMAT_MASK": true, + "BPF_T_MICROTIME": true, + "BPF_T_MICROTIME_FAST": true, + "BPF_T_MICROTIME_MONOTONIC": true, + "BPF_T_MICROTIME_MONOTONIC_FAST": true, + "BPF_T_MONOTONIC": true, + "BPF_T_MONOTONIC_FAST": true, + "BPF_T_NANOTIME": true, + "BPF_T_NANOTIME_FAST": true, + "BPF_T_NANOTIME_MONOTONIC": true, + "BPF_T_NANOTIME_MONOTONIC_FAST": true, + "BPF_T_NONE": true, + "BPF_T_NORMAL": true, + "BPF_W": true, + "BPF_X": true, + "BRKINT": true, + "Bind": true, + "BindToDevice": true, + "BpfBuflen": true, + "BpfDatalink": true, + "BpfHdr": true, + "BpfHeadercmpl": true, + "BpfInsn": true, + "BpfInterface": true, + "BpfJump": true, + "BpfProgram": true, + "BpfStat": true, + "BpfStats": true, + "BpfStmt": true, + "BpfTimeout": true, + "BpfTimeval": true, + "BpfVersion": true, + "BpfZbuf": true, + "BpfZbufHeader": true, + "ByHandleFileInformation": true, + "BytePtrFromString": true, + "ByteSliceFromString": true, + "CCR0_FLUSH": true, + "CERT_CHAIN_POLICY_AUTHENTICODE": true, + "CERT_CHAIN_POLICY_AUTHENTICODE_TS": true, + "CERT_CHAIN_POLICY_BASE": true, + "CERT_CHAIN_POLICY_BASIC_CONSTRAINTS": true, + "CERT_CHAIN_POLICY_EV": true, + "CERT_CHAIN_POLICY_MICROSOFT_ROOT": true, + "CERT_CHAIN_POLICY_NT_AUTH": true, + "CERT_CHAIN_POLICY_SSL": true, + "CERT_E_CN_NO_MATCH": true, + "CERT_E_EXPIRED": true, + "CERT_E_PURPOSE": true, + "CERT_E_ROLE": true, + "CERT_E_UNTRUSTEDROOT": true, + "CERT_STORE_ADD_ALWAYS": true, + "CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG": true, + "CERT_STORE_PROV_MEMORY": true, + "CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT": true, + "CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT": true, + "CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT": true, + "CERT_TRUST_HAS_NOT_SUPPORTED_CRITICAL_EXT": true, + "CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT": true, + "CERT_TRUST_INVALID_BASIC_CONSTRAINTS": true, + "CERT_TRUST_INVALID_EXTENSION": true, + "CERT_TRUST_INVALID_NAME_CONSTRAINTS": true, + "CERT_TRUST_INVALID_POLICY_CONSTRAINTS": true, + "CERT_TRUST_IS_CYCLIC": true, + "CERT_TRUST_IS_EXPLICIT_DISTRUST": true, + "CERT_TRUST_IS_NOT_SIGNATURE_VALID": true, + "CERT_TRUST_IS_NOT_TIME_VALID": true, + "CERT_TRUST_IS_NOT_VALID_FOR_USAGE": true, + "CERT_TRUST_IS_OFFLINE_REVOCATION": true, + "CERT_TRUST_IS_REVOKED": true, + "CERT_TRUST_IS_UNTRUSTED_ROOT": true, + "CERT_TRUST_NO_ERROR": true, + "CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY": true, + "CERT_TRUST_REVOCATION_STATUS_UNKNOWN": true, + "CFLUSH": true, + "CLOCAL": true, + "CLONE_CHILD_CLEARTID": true, + "CLONE_CHILD_SETTID": true, + "CLONE_CSIGNAL": true, + "CLONE_DETACHED": true, + "CLONE_FILES": true, + "CLONE_FS": true, + "CLONE_IO": true, + "CLONE_NEWIPC": true, + "CLONE_NEWNET": true, + "CLONE_NEWNS": true, + "CLONE_NEWPID": true, + "CLONE_NEWUSER": true, + "CLONE_NEWUTS": true, + "CLONE_PARENT": true, + "CLONE_PARENT_SETTID": true, + "CLONE_PID": true, + "CLONE_PTRACE": true, + "CLONE_SETTLS": true, + "CLONE_SIGHAND": true, + "CLONE_SYSVSEM": true, + "CLONE_THREAD": true, + "CLONE_UNTRACED": true, + "CLONE_VFORK": true, + "CLONE_VM": true, + "CPUID_CFLUSH": true, + "CREAD": true, + "CREATE_ALWAYS": true, + "CREATE_NEW": true, + "CREATE_NEW_PROCESS_GROUP": true, + "CREATE_UNICODE_ENVIRONMENT": true, + "CRYPT_DEFAULT_CONTAINER_OPTIONAL": true, + "CRYPT_DELETEKEYSET": true, + "CRYPT_MACHINE_KEYSET": true, + "CRYPT_NEWKEYSET": true, + "CRYPT_SILENT": true, + "CRYPT_VERIFYCONTEXT": true, + "CS5": true, + "CS6": true, + "CS7": true, + "CS8": true, + "CSIZE": true, + "CSTART": true, + "CSTATUS": true, + "CSTOP": true, + "CSTOPB": true, + "CSUSP": true, + "CTL_MAXNAME": true, + "CTL_NET": true, + "CTL_QUERY": true, + "CTRL_BREAK_EVENT": true, + "CTRL_C_EVENT": true, + "CancelIo": true, + "CancelIoEx": true, + "CertAddCertificateContextToStore": true, + "CertChainContext": true, + "CertChainElement": true, + "CertChainPara": true, + "CertChainPolicyPara": true, + "CertChainPolicyStatus": true, + "CertCloseStore": true, + "CertContext": true, + "CertCreateCertificateContext": true, + "CertEnhKeyUsage": true, + "CertEnumCertificatesInStore": true, + "CertFreeCertificateChain": true, + "CertFreeCertificateContext": true, + "CertGetCertificateChain": true, + "CertInfo": true, + "CertOpenStore": true, + "CertOpenSystemStore": true, + "CertRevocationCrlInfo": true, + "CertRevocationInfo": true, + "CertSimpleChain": true, + "CertTrustListInfo": true, + "CertTrustStatus": true, + "CertUsageMatch": true, + "CertVerifyCertificateChainPolicy": true, + "Chdir": true, + "CheckBpfVersion": true, + "Chflags": true, + "Chmod": true, + "Chown": true, + "Chroot": true, + "Clearenv": true, + "Close": true, + "CloseHandle": true, + "CloseOnExec": true, + "Closesocket": true, + "CmsgLen": true, + "CmsgSpace": true, + "Cmsghdr": true, + "CommandLineToArgv": true, + "ComputerName": true, + "Conn": true, + "Connect": true, + "ConnectEx": true, + "ConvertSidToStringSid": true, + "ConvertStringSidToSid": true, + "CopySid": true, + "Creat": true, + "CreateDirectory": true, + "CreateFile": true, + "CreateFileMapping": true, + "CreateHardLink": true, + "CreateIoCompletionPort": true, + "CreatePipe": true, + "CreateProcess": true, + "CreateProcessAsUser": true, + "CreateSymbolicLink": true, + "CreateToolhelp32Snapshot": true, + "Credential": true, + "CryptAcquireContext": true, + "CryptGenRandom": true, + "CryptReleaseContext": true, + "DIOCBSFLUSH": true, + "DIOCOSFPFLUSH": true, + "DLL": true, + "DLLError": true, + "DLT_A429": true, + "DLT_A653_ICM": true, + "DLT_AIRONET_HEADER": true, + "DLT_AOS": true, + "DLT_APPLE_IP_OVER_IEEE1394": true, + "DLT_ARCNET": true, + "DLT_ARCNET_LINUX": true, + "DLT_ATM_CLIP": true, + "DLT_ATM_RFC1483": true, + "DLT_AURORA": true, + "DLT_AX25": true, + "DLT_AX25_KISS": true, + "DLT_BACNET_MS_TP": true, + "DLT_BLUETOOTH_HCI_H4": true, + "DLT_BLUETOOTH_HCI_H4_WITH_PHDR": true, + "DLT_CAN20B": true, + "DLT_CAN_SOCKETCAN": true, + "DLT_CHAOS": true, + "DLT_CHDLC": true, + "DLT_CISCO_IOS": true, + "DLT_C_HDLC": true, + "DLT_C_HDLC_WITH_DIR": true, + "DLT_DBUS": true, + "DLT_DECT": true, + "DLT_DOCSIS": true, + "DLT_DVB_CI": true, + "DLT_ECONET": true, + "DLT_EN10MB": true, + "DLT_EN3MB": true, + "DLT_ENC": true, + "DLT_ERF": true, + "DLT_ERF_ETH": true, + "DLT_ERF_POS": true, + "DLT_FC_2": true, + "DLT_FC_2_WITH_FRAME_DELIMS": true, + "DLT_FDDI": true, + "DLT_FLEXRAY": true, + "DLT_FRELAY": true, + "DLT_FRELAY_WITH_DIR": true, + "DLT_GCOM_SERIAL": true, + "DLT_GCOM_T1E1": true, + "DLT_GPF_F": true, + "DLT_GPF_T": true, + "DLT_GPRS_LLC": true, + "DLT_GSMTAP_ABIS": true, + "DLT_GSMTAP_UM": true, + "DLT_HDLC": true, + "DLT_HHDLC": true, + "DLT_HIPPI": true, + "DLT_IBM_SN": true, + "DLT_IBM_SP": true, + "DLT_IEEE802": true, + "DLT_IEEE802_11": true, + "DLT_IEEE802_11_RADIO": true, + "DLT_IEEE802_11_RADIO_AVS": true, + "DLT_IEEE802_15_4": true, + "DLT_IEEE802_15_4_LINUX": true, + "DLT_IEEE802_15_4_NOFCS": true, + "DLT_IEEE802_15_4_NONASK_PHY": true, + "DLT_IEEE802_16_MAC_CPS": true, + "DLT_IEEE802_16_MAC_CPS_RADIO": true, + "DLT_IPFILTER": true, + "DLT_IPMB": true, + "DLT_IPMB_LINUX": true, + "DLT_IPNET": true, + "DLT_IPOIB": true, + "DLT_IPV4": true, + "DLT_IPV6": true, + "DLT_IP_OVER_FC": true, + "DLT_JUNIPER_ATM1": true, + "DLT_JUNIPER_ATM2": true, + "DLT_JUNIPER_ATM_CEMIC": true, + "DLT_JUNIPER_CHDLC": true, + "DLT_JUNIPER_ES": true, + "DLT_JUNIPER_ETHER": true, + "DLT_JUNIPER_FIBRECHANNEL": true, + "DLT_JUNIPER_FRELAY": true, + "DLT_JUNIPER_GGSN": true, + "DLT_JUNIPER_ISM": true, + "DLT_JUNIPER_MFR": true, + "DLT_JUNIPER_MLFR": true, + "DLT_JUNIPER_MLPPP": true, + "DLT_JUNIPER_MONITOR": true, + "DLT_JUNIPER_PIC_PEER": true, + "DLT_JUNIPER_PPP": true, + "DLT_JUNIPER_PPPOE": true, + "DLT_JUNIPER_PPPOE_ATM": true, + "DLT_JUNIPER_SERVICES": true, + "DLT_JUNIPER_SRX_E2E": true, + "DLT_JUNIPER_ST": true, + "DLT_JUNIPER_VP": true, + "DLT_JUNIPER_VS": true, + "DLT_LAPB_WITH_DIR": true, + "DLT_LAPD": true, + "DLT_LIN": true, + "DLT_LINUX_EVDEV": true, + "DLT_LINUX_IRDA": true, + "DLT_LINUX_LAPD": true, + "DLT_LINUX_PPP_WITHDIRECTION": true, + "DLT_LINUX_SLL": true, + "DLT_LOOP": true, + "DLT_LTALK": true, + "DLT_MATCHING_MAX": true, + "DLT_MATCHING_MIN": true, + "DLT_MFR": true, + "DLT_MOST": true, + "DLT_MPEG_2_TS": true, + "DLT_MPLS": true, + "DLT_MTP2": true, + "DLT_MTP2_WITH_PHDR": true, + "DLT_MTP3": true, + "DLT_MUX27010": true, + "DLT_NETANALYZER": true, + "DLT_NETANALYZER_TRANSPARENT": true, + "DLT_NFC_LLCP": true, + "DLT_NFLOG": true, + "DLT_NG40": true, + "DLT_NULL": true, + "DLT_PCI_EXP": true, + "DLT_PFLOG": true, + "DLT_PFSYNC": true, + "DLT_PPI": true, + "DLT_PPP": true, + "DLT_PPP_BSDOS": true, + "DLT_PPP_ETHER": true, + "DLT_PPP_PPPD": true, + "DLT_PPP_SERIAL": true, + "DLT_PPP_WITH_DIR": true, + "DLT_PPP_WITH_DIRECTION": true, + "DLT_PRISM_HEADER": true, + "DLT_PRONET": true, + "DLT_RAIF1": true, + "DLT_RAW": true, + "DLT_RAWAF_MASK": true, + "DLT_RIO": true, + "DLT_SCCP": true, + "DLT_SITA": true, + "DLT_SLIP": true, + "DLT_SLIP_BSDOS": true, + "DLT_STANAG_5066_D_PDU": true, + "DLT_SUNATM": true, + "DLT_SYMANTEC_FIREWALL": true, + "DLT_TZSP": true, + "DLT_USB": true, + "DLT_USB_LINUX": true, + "DLT_USB_LINUX_MMAPPED": true, + "DLT_USER0": true, + "DLT_USER1": true, + "DLT_USER10": true, + "DLT_USER11": true, + "DLT_USER12": true, + "DLT_USER13": true, + "DLT_USER14": true, + "DLT_USER15": true, + "DLT_USER2": true, + "DLT_USER3": true, + "DLT_USER4": true, + "DLT_USER5": true, + "DLT_USER6": true, + "DLT_USER7": true, + "DLT_USER8": true, + "DLT_USER9": true, + "DLT_WIHART": true, + "DLT_X2E_SERIAL": true, + "DLT_X2E_XORAYA": true, + "DNSMXData": true, + "DNSPTRData": true, + "DNSRecord": true, + "DNSSRVData": true, + "DNSTXTData": true, + "DNS_INFO_NO_RECORDS": true, + "DNS_TYPE_A": true, + "DNS_TYPE_A6": true, + "DNS_TYPE_AAAA": true, + "DNS_TYPE_ADDRS": true, + "DNS_TYPE_AFSDB": true, + "DNS_TYPE_ALL": true, + "DNS_TYPE_ANY": true, + "DNS_TYPE_ATMA": true, + "DNS_TYPE_AXFR": true, + "DNS_TYPE_CERT": true, + "DNS_TYPE_CNAME": true, + "DNS_TYPE_DHCID": true, + "DNS_TYPE_DNAME": true, + "DNS_TYPE_DNSKEY": true, + "DNS_TYPE_DS": true, + "DNS_TYPE_EID": true, + "DNS_TYPE_GID": true, + "DNS_TYPE_GPOS": true, + "DNS_TYPE_HINFO": true, + "DNS_TYPE_ISDN": true, + "DNS_TYPE_IXFR": true, + "DNS_TYPE_KEY": true, + "DNS_TYPE_KX": true, + "DNS_TYPE_LOC": true, + "DNS_TYPE_MAILA": true, + "DNS_TYPE_MAILB": true, + "DNS_TYPE_MB": true, + "DNS_TYPE_MD": true, + "DNS_TYPE_MF": true, + "DNS_TYPE_MG": true, + "DNS_TYPE_MINFO": true, + "DNS_TYPE_MR": true, + "DNS_TYPE_MX": true, + "DNS_TYPE_NAPTR": true, + "DNS_TYPE_NBSTAT": true, + "DNS_TYPE_NIMLOC": true, + "DNS_TYPE_NS": true, + "DNS_TYPE_NSAP": true, + "DNS_TYPE_NSAPPTR": true, + "DNS_TYPE_NSEC": true, + "DNS_TYPE_NULL": true, + "DNS_TYPE_NXT": true, + "DNS_TYPE_OPT": true, + "DNS_TYPE_PTR": true, + "DNS_TYPE_PX": true, + "DNS_TYPE_RP": true, + "DNS_TYPE_RRSIG": true, + "DNS_TYPE_RT": true, + "DNS_TYPE_SIG": true, + "DNS_TYPE_SINK": true, + "DNS_TYPE_SOA": true, + "DNS_TYPE_SRV": true, + "DNS_TYPE_TEXT": true, + "DNS_TYPE_TKEY": true, + "DNS_TYPE_TSIG": true, + "DNS_TYPE_UID": true, + "DNS_TYPE_UINFO": true, + "DNS_TYPE_UNSPEC": true, + "DNS_TYPE_WINS": true, + "DNS_TYPE_WINSR": true, + "DNS_TYPE_WKS": true, + "DNS_TYPE_X25": true, + "DT_BLK": true, + "DT_CHR": true, + "DT_DIR": true, + "DT_FIFO": true, + "DT_LNK": true, + "DT_REG": true, + "DT_SOCK": true, + "DT_UNKNOWN": true, + "DT_WHT": true, + "DUPLICATE_CLOSE_SOURCE": true, + "DUPLICATE_SAME_ACCESS": true, + "DeleteFile": true, + "DetachLsf": true, + "DeviceIoControl": true, + "Dirent": true, + "DnsNameCompare": true, + "DnsQuery": true, + "DnsRecordListFree": true, + "DnsSectionAdditional": true, + "DnsSectionAnswer": true, + "DnsSectionAuthority": true, + "DnsSectionQuestion": true, + "Dup": true, + "Dup2": true, + "Dup3": true, + "DuplicateHandle": true, + "E2BIG": true, + "EACCES": true, + "EADDRINUSE": true, + "EADDRNOTAVAIL": true, + "EADV": true, + "EAFNOSUPPORT": true, + "EAGAIN": true, + "EALREADY": true, + "EAUTH": true, + "EBADARCH": true, + "EBADE": true, + "EBADEXEC": true, + "EBADF": true, + "EBADFD": true, + "EBADMACHO": true, + "EBADMSG": true, + "EBADR": true, + "EBADRPC": true, + "EBADRQC": true, + "EBADSLT": true, + "EBFONT": true, + "EBUSY": true, + "ECANCELED": true, + "ECAPMODE": true, + "ECHILD": true, + "ECHO": true, + "ECHOCTL": true, + "ECHOE": true, + "ECHOK": true, + "ECHOKE": true, + "ECHONL": true, + "ECHOPRT": true, + "ECHRNG": true, + "ECOMM": true, + "ECONNABORTED": true, + "ECONNREFUSED": true, + "ECONNRESET": true, + "EDEADLK": true, + "EDEADLOCK": true, + "EDESTADDRREQ": true, + "EDEVERR": true, + "EDOM": true, + "EDOOFUS": true, + "EDOTDOT": true, + "EDQUOT": true, + "EEXIST": true, + "EFAULT": true, + "EFBIG": true, + "EFER_LMA": true, + "EFER_LME": true, + "EFER_NXE": true, + "EFER_SCE": true, + "EFTYPE": true, + "EHOSTDOWN": true, + "EHOSTUNREACH": true, + "EHWPOISON": true, + "EIDRM": true, + "EILSEQ": true, + "EINPROGRESS": true, + "EINTR": true, + "EINVAL": true, + "EIO": true, + "EIPSEC": true, + "EISCONN": true, + "EISDIR": true, + "EISNAM": true, + "EKEYEXPIRED": true, + "EKEYREJECTED": true, + "EKEYREVOKED": true, + "EL2HLT": true, + "EL2NSYNC": true, + "EL3HLT": true, + "EL3RST": true, + "ELAST": true, + "ELF_NGREG": true, + "ELF_PRARGSZ": true, + "ELIBACC": true, + "ELIBBAD": true, + "ELIBEXEC": true, + "ELIBMAX": true, + "ELIBSCN": true, + "ELNRNG": true, + "ELOOP": true, + "EMEDIUMTYPE": true, + "EMFILE": true, + "EMLINK": true, + "EMSGSIZE": true, + "EMT_TAGOVF": true, + "EMULTIHOP": true, + "EMUL_ENABLED": true, + "EMUL_LINUX": true, + "EMUL_LINUX32": true, + "EMUL_MAXID": true, + "EMUL_NATIVE": true, + "ENAMETOOLONG": true, + "ENAVAIL": true, + "ENDRUNDISC": true, + "ENEEDAUTH": true, + "ENETDOWN": true, + "ENETRESET": true, + "ENETUNREACH": true, + "ENFILE": true, + "ENOANO": true, + "ENOATTR": true, + "ENOBUFS": true, + "ENOCSI": true, + "ENODATA": true, + "ENODEV": true, + "ENOENT": true, + "ENOEXEC": true, + "ENOKEY": true, + "ENOLCK": true, + "ENOLINK": true, + "ENOMEDIUM": true, + "ENOMEM": true, + "ENOMSG": true, + "ENONET": true, + "ENOPKG": true, + "ENOPOLICY": true, + "ENOPROTOOPT": true, + "ENOSPC": true, + "ENOSR": true, + "ENOSTR": true, + "ENOSYS": true, + "ENOTBLK": true, + "ENOTCAPABLE": true, + "ENOTCONN": true, + "ENOTDIR": true, + "ENOTEMPTY": true, + "ENOTNAM": true, + "ENOTRECOVERABLE": true, + "ENOTSOCK": true, + "ENOTSUP": true, + "ENOTTY": true, + "ENOTUNIQ": true, + "ENXIO": true, + "EN_SW_CTL_INF": true, + "EN_SW_CTL_PREC": true, + "EN_SW_CTL_ROUND": true, + "EN_SW_DATACHAIN": true, + "EN_SW_DENORM": true, + "EN_SW_INVOP": true, + "EN_SW_OVERFLOW": true, + "EN_SW_PRECLOSS": true, + "EN_SW_UNDERFLOW": true, + "EN_SW_ZERODIV": true, + "EOPNOTSUPP": true, + "EOVERFLOW": true, + "EOWNERDEAD": true, + "EPERM": true, + "EPFNOSUPPORT": true, + "EPIPE": true, + "EPOLLERR": true, + "EPOLLET": true, + "EPOLLHUP": true, + "EPOLLIN": true, + "EPOLLMSG": true, + "EPOLLONESHOT": true, + "EPOLLOUT": true, + "EPOLLPRI": true, + "EPOLLRDBAND": true, + "EPOLLRDHUP": true, + "EPOLLRDNORM": true, + "EPOLLWRBAND": true, + "EPOLLWRNORM": true, + "EPOLL_CLOEXEC": true, + "EPOLL_CTL_ADD": true, + "EPOLL_CTL_DEL": true, + "EPOLL_CTL_MOD": true, + "EPOLL_NONBLOCK": true, + "EPROCLIM": true, + "EPROCUNAVAIL": true, + "EPROGMISMATCH": true, + "EPROGUNAVAIL": true, + "EPROTO": true, + "EPROTONOSUPPORT": true, + "EPROTOTYPE": true, + "EPWROFF": true, + "ERANGE": true, + "EREMCHG": true, + "EREMOTE": true, + "EREMOTEIO": true, + "ERESTART": true, + "ERFKILL": true, + "EROFS": true, + "ERPCMISMATCH": true, + "ERROR_ACCESS_DENIED": true, + "ERROR_ALREADY_EXISTS": true, + "ERROR_BROKEN_PIPE": true, + "ERROR_BUFFER_OVERFLOW": true, + "ERROR_DIR_NOT_EMPTY": true, + "ERROR_ENVVAR_NOT_FOUND": true, + "ERROR_FILE_EXISTS": true, + "ERROR_FILE_NOT_FOUND": true, + "ERROR_HANDLE_EOF": true, + "ERROR_INSUFFICIENT_BUFFER": true, + "ERROR_IO_PENDING": true, + "ERROR_MOD_NOT_FOUND": true, + "ERROR_MORE_DATA": true, + "ERROR_NETNAME_DELETED": true, + "ERROR_NOT_FOUND": true, + "ERROR_NO_MORE_FILES": true, + "ERROR_OPERATION_ABORTED": true, + "ERROR_PATH_NOT_FOUND": true, + "ERROR_PRIVILEGE_NOT_HELD": true, + "ERROR_PROC_NOT_FOUND": true, + "ESHLIBVERS": true, + "ESHUTDOWN": true, + "ESOCKTNOSUPPORT": true, + "ESPIPE": true, + "ESRCH": true, + "ESRMNT": true, + "ESTALE": true, + "ESTRPIPE": true, + "ETHERCAP_JUMBO_MTU": true, + "ETHERCAP_VLAN_HWTAGGING": true, + "ETHERCAP_VLAN_MTU": true, + "ETHERMIN": true, + "ETHERMTU": true, + "ETHERMTU_JUMBO": true, + "ETHERTYPE_8023": true, + "ETHERTYPE_AARP": true, + "ETHERTYPE_ACCTON": true, + "ETHERTYPE_AEONIC": true, + "ETHERTYPE_ALPHA": true, + "ETHERTYPE_AMBER": true, + "ETHERTYPE_AMOEBA": true, + "ETHERTYPE_AOE": true, + "ETHERTYPE_APOLLO": true, + "ETHERTYPE_APOLLODOMAIN": true, + "ETHERTYPE_APPLETALK": true, + "ETHERTYPE_APPLITEK": true, + "ETHERTYPE_ARGONAUT": true, + "ETHERTYPE_ARP": true, + "ETHERTYPE_AT": true, + "ETHERTYPE_ATALK": true, + "ETHERTYPE_ATOMIC": true, + "ETHERTYPE_ATT": true, + "ETHERTYPE_ATTSTANFORD": true, + "ETHERTYPE_AUTOPHON": true, + "ETHERTYPE_AXIS": true, + "ETHERTYPE_BCLOOP": true, + "ETHERTYPE_BOFL": true, + "ETHERTYPE_CABLETRON": true, + "ETHERTYPE_CHAOS": true, + "ETHERTYPE_COMDESIGN": true, + "ETHERTYPE_COMPUGRAPHIC": true, + "ETHERTYPE_COUNTERPOINT": true, + "ETHERTYPE_CRONUS": true, + "ETHERTYPE_CRONUSVLN": true, + "ETHERTYPE_DCA": true, + "ETHERTYPE_DDE": true, + "ETHERTYPE_DEBNI": true, + "ETHERTYPE_DECAM": true, + "ETHERTYPE_DECCUST": true, + "ETHERTYPE_DECDIAG": true, + "ETHERTYPE_DECDNS": true, + "ETHERTYPE_DECDTS": true, + "ETHERTYPE_DECEXPER": true, + "ETHERTYPE_DECLAST": true, + "ETHERTYPE_DECLTM": true, + "ETHERTYPE_DECMUMPS": true, + "ETHERTYPE_DECNETBIOS": true, + "ETHERTYPE_DELTACON": true, + "ETHERTYPE_DIDDLE": true, + "ETHERTYPE_DLOG1": true, + "ETHERTYPE_DLOG2": true, + "ETHERTYPE_DN": true, + "ETHERTYPE_DOGFIGHT": true, + "ETHERTYPE_DSMD": true, + "ETHERTYPE_ECMA": true, + "ETHERTYPE_ENCRYPT": true, + "ETHERTYPE_ES": true, + "ETHERTYPE_EXCELAN": true, + "ETHERTYPE_EXPERDATA": true, + "ETHERTYPE_FLIP": true, + "ETHERTYPE_FLOWCONTROL": true, + "ETHERTYPE_FRARP": true, + "ETHERTYPE_GENDYN": true, + "ETHERTYPE_HAYES": true, + "ETHERTYPE_HIPPI_FP": true, + "ETHERTYPE_HITACHI": true, + "ETHERTYPE_HP": true, + "ETHERTYPE_IEEEPUP": true, + "ETHERTYPE_IEEEPUPAT": true, + "ETHERTYPE_IMLBL": true, + "ETHERTYPE_IMLBLDIAG": true, + "ETHERTYPE_IP": true, + "ETHERTYPE_IPAS": true, + "ETHERTYPE_IPV6": true, + "ETHERTYPE_IPX": true, + "ETHERTYPE_IPXNEW": true, + "ETHERTYPE_KALPANA": true, + "ETHERTYPE_LANBRIDGE": true, + "ETHERTYPE_LANPROBE": true, + "ETHERTYPE_LAT": true, + "ETHERTYPE_LBACK": true, + "ETHERTYPE_LITTLE": true, + "ETHERTYPE_LLDP": true, + "ETHERTYPE_LOGICRAFT": true, + "ETHERTYPE_LOOPBACK": true, + "ETHERTYPE_MATRA": true, + "ETHERTYPE_MAX": true, + "ETHERTYPE_MERIT": true, + "ETHERTYPE_MICP": true, + "ETHERTYPE_MOPDL": true, + "ETHERTYPE_MOPRC": true, + "ETHERTYPE_MOTOROLA": true, + "ETHERTYPE_MPLS": true, + "ETHERTYPE_MPLS_MCAST": true, + "ETHERTYPE_MUMPS": true, + "ETHERTYPE_NBPCC": true, + "ETHERTYPE_NBPCLAIM": true, + "ETHERTYPE_NBPCLREQ": true, + "ETHERTYPE_NBPCLRSP": true, + "ETHERTYPE_NBPCREQ": true, + "ETHERTYPE_NBPCRSP": true, + "ETHERTYPE_NBPDG": true, + "ETHERTYPE_NBPDGB": true, + "ETHERTYPE_NBPDLTE": true, + "ETHERTYPE_NBPRAR": true, + "ETHERTYPE_NBPRAS": true, + "ETHERTYPE_NBPRST": true, + "ETHERTYPE_NBPSCD": true, + "ETHERTYPE_NBPVCD": true, + "ETHERTYPE_NBS": true, + "ETHERTYPE_NCD": true, + "ETHERTYPE_NESTAR": true, + "ETHERTYPE_NETBEUI": true, + "ETHERTYPE_NOVELL": true, + "ETHERTYPE_NS": true, + "ETHERTYPE_NSAT": true, + "ETHERTYPE_NSCOMPAT": true, + "ETHERTYPE_NTRAILER": true, + "ETHERTYPE_OS9": true, + "ETHERTYPE_OS9NET": true, + "ETHERTYPE_PACER": true, + "ETHERTYPE_PAE": true, + "ETHERTYPE_PCS": true, + "ETHERTYPE_PLANNING": true, + "ETHERTYPE_PPP": true, + "ETHERTYPE_PPPOE": true, + "ETHERTYPE_PPPOEDISC": true, + "ETHERTYPE_PRIMENTS": true, + "ETHERTYPE_PUP": true, + "ETHERTYPE_PUPAT": true, + "ETHERTYPE_QINQ": true, + "ETHERTYPE_RACAL": true, + "ETHERTYPE_RATIONAL": true, + "ETHERTYPE_RAWFR": true, + "ETHERTYPE_RCL": true, + "ETHERTYPE_RDP": true, + "ETHERTYPE_RETIX": true, + "ETHERTYPE_REVARP": true, + "ETHERTYPE_SCA": true, + "ETHERTYPE_SECTRA": true, + "ETHERTYPE_SECUREDATA": true, + "ETHERTYPE_SGITW": true, + "ETHERTYPE_SG_BOUNCE": true, + "ETHERTYPE_SG_DIAG": true, + "ETHERTYPE_SG_NETGAMES": true, + "ETHERTYPE_SG_RESV": true, + "ETHERTYPE_SIMNET": true, + "ETHERTYPE_SLOW": true, + "ETHERTYPE_SLOWPROTOCOLS": true, + "ETHERTYPE_SNA": true, + "ETHERTYPE_SNMP": true, + "ETHERTYPE_SONIX": true, + "ETHERTYPE_SPIDER": true, + "ETHERTYPE_SPRITE": true, + "ETHERTYPE_STP": true, + "ETHERTYPE_TALARIS": true, + "ETHERTYPE_TALARISMC": true, + "ETHERTYPE_TCPCOMP": true, + "ETHERTYPE_TCPSM": true, + "ETHERTYPE_TEC": true, + "ETHERTYPE_TIGAN": true, + "ETHERTYPE_TRAIL": true, + "ETHERTYPE_TRANSETHER": true, + "ETHERTYPE_TYMSHARE": true, + "ETHERTYPE_UBBST": true, + "ETHERTYPE_UBDEBUG": true, + "ETHERTYPE_UBDIAGLOOP": true, + "ETHERTYPE_UBDL": true, + "ETHERTYPE_UBNIU": true, + "ETHERTYPE_UBNMC": true, + "ETHERTYPE_VALID": true, + "ETHERTYPE_VARIAN": true, + "ETHERTYPE_VAXELN": true, + "ETHERTYPE_VEECO": true, + "ETHERTYPE_VEXP": true, + "ETHERTYPE_VGLAB": true, + "ETHERTYPE_VINES": true, + "ETHERTYPE_VINESECHO": true, + "ETHERTYPE_VINESLOOP": true, + "ETHERTYPE_VITAL": true, + "ETHERTYPE_VLAN": true, + "ETHERTYPE_VLTLMAN": true, + "ETHERTYPE_VPROD": true, + "ETHERTYPE_VURESERVED": true, + "ETHERTYPE_WATERLOO": true, + "ETHERTYPE_WELLFLEET": true, + "ETHERTYPE_X25": true, + "ETHERTYPE_X75": true, + "ETHERTYPE_XNSSM": true, + "ETHERTYPE_XTP": true, + "ETHER_ADDR_LEN": true, + "ETHER_ALIGN": true, + "ETHER_CRC_LEN": true, + "ETHER_CRC_POLY_BE": true, + "ETHER_CRC_POLY_LE": true, + "ETHER_HDR_LEN": true, + "ETHER_MAX_DIX_LEN": true, + "ETHER_MAX_LEN": true, + "ETHER_MAX_LEN_JUMBO": true, + "ETHER_MIN_LEN": true, + "ETHER_PPPOE_ENCAP_LEN": true, + "ETHER_TYPE_LEN": true, + "ETHER_VLAN_ENCAP_LEN": true, + "ETH_P_1588": true, + "ETH_P_8021Q": true, + "ETH_P_802_2": true, + "ETH_P_802_3": true, + "ETH_P_AARP": true, + "ETH_P_ALL": true, + "ETH_P_AOE": true, + "ETH_P_ARCNET": true, + "ETH_P_ARP": true, + "ETH_P_ATALK": true, + "ETH_P_ATMFATE": true, + "ETH_P_ATMMPOA": true, + "ETH_P_AX25": true, + "ETH_P_BPQ": true, + "ETH_P_CAIF": true, + "ETH_P_CAN": true, + "ETH_P_CONTROL": true, + "ETH_P_CUST": true, + "ETH_P_DDCMP": true, + "ETH_P_DEC": true, + "ETH_P_DIAG": true, + "ETH_P_DNA_DL": true, + "ETH_P_DNA_RC": true, + "ETH_P_DNA_RT": true, + "ETH_P_DSA": true, + "ETH_P_ECONET": true, + "ETH_P_EDSA": true, + "ETH_P_FCOE": true, + "ETH_P_FIP": true, + "ETH_P_HDLC": true, + "ETH_P_IEEE802154": true, + "ETH_P_IEEEPUP": true, + "ETH_P_IEEEPUPAT": true, + "ETH_P_IP": true, + "ETH_P_IPV6": true, + "ETH_P_IPX": true, + "ETH_P_IRDA": true, + "ETH_P_LAT": true, + "ETH_P_LINK_CTL": true, + "ETH_P_LOCALTALK": true, + "ETH_P_LOOP": true, + "ETH_P_MOBITEX": true, + "ETH_P_MPLS_MC": true, + "ETH_P_MPLS_UC": true, + "ETH_P_PAE": true, + "ETH_P_PAUSE": true, + "ETH_P_PHONET": true, + "ETH_P_PPPTALK": true, + "ETH_P_PPP_DISC": true, + "ETH_P_PPP_MP": true, + "ETH_P_PPP_SES": true, + "ETH_P_PUP": true, + "ETH_P_PUPAT": true, + "ETH_P_RARP": true, + "ETH_P_SCA": true, + "ETH_P_SLOW": true, + "ETH_P_SNAP": true, + "ETH_P_TEB": true, + "ETH_P_TIPC": true, + "ETH_P_TRAILER": true, + "ETH_P_TR_802_2": true, + "ETH_P_WAN_PPP": true, + "ETH_P_WCCP": true, + "ETH_P_X25": true, + "ETIME": true, + "ETIMEDOUT": true, + "ETOOMANYREFS": true, + "ETXTBSY": true, + "EUCLEAN": true, + "EUNATCH": true, + "EUSERS": true, + "EVFILT_AIO": true, + "EVFILT_FS": true, + "EVFILT_LIO": true, + "EVFILT_MACHPORT": true, + "EVFILT_PROC": true, + "EVFILT_READ": true, + "EVFILT_SIGNAL": true, + "EVFILT_SYSCOUNT": true, + "EVFILT_THREADMARKER": true, + "EVFILT_TIMER": true, + "EVFILT_USER": true, + "EVFILT_VM": true, + "EVFILT_VNODE": true, + "EVFILT_WRITE": true, + "EV_ADD": true, + "EV_CLEAR": true, + "EV_DELETE": true, + "EV_DISABLE": true, + "EV_DISPATCH": true, + "EV_DROP": true, + "EV_ENABLE": true, + "EV_EOF": true, + "EV_ERROR": true, + "EV_FLAG0": true, + "EV_FLAG1": true, + "EV_ONESHOT": true, + "EV_OOBAND": true, + "EV_POLL": true, + "EV_RECEIPT": true, + "EV_SYSFLAGS": true, + "EWINDOWS": true, + "EWOULDBLOCK": true, + "EXDEV": true, + "EXFULL": true, + "EXTA": true, + "EXTB": true, + "EXTPROC": true, + "Environ": true, + "EpollCreate": true, + "EpollCreate1": true, + "EpollCtl": true, + "EpollEvent": true, + "EpollWait": true, + "Errno": true, + "EscapeArg": true, + "Exchangedata": true, + "Exec": true, + "Exit": true, + "ExitProcess": true, + "FD_CLOEXEC": true, + "FD_SETSIZE": true, + "FILE_ACTION_ADDED": true, + "FILE_ACTION_MODIFIED": true, + "FILE_ACTION_REMOVED": true, + "FILE_ACTION_RENAMED_NEW_NAME": true, + "FILE_ACTION_RENAMED_OLD_NAME": true, + "FILE_APPEND_DATA": true, + "FILE_ATTRIBUTE_ARCHIVE": true, + "FILE_ATTRIBUTE_DIRECTORY": true, + "FILE_ATTRIBUTE_HIDDEN": true, + "FILE_ATTRIBUTE_NORMAL": true, + "FILE_ATTRIBUTE_READONLY": true, + "FILE_ATTRIBUTE_REPARSE_POINT": true, + "FILE_ATTRIBUTE_SYSTEM": true, + "FILE_BEGIN": true, + "FILE_CURRENT": true, + "FILE_END": true, + "FILE_FLAG_BACKUP_SEMANTICS": true, + "FILE_FLAG_OPEN_REPARSE_POINT": true, + "FILE_FLAG_OVERLAPPED": true, + "FILE_LIST_DIRECTORY": true, + "FILE_MAP_COPY": true, + "FILE_MAP_EXECUTE": true, + "FILE_MAP_READ": true, + "FILE_MAP_WRITE": true, + "FILE_NOTIFY_CHANGE_ATTRIBUTES": true, + "FILE_NOTIFY_CHANGE_CREATION": true, + "FILE_NOTIFY_CHANGE_DIR_NAME": true, + "FILE_NOTIFY_CHANGE_FILE_NAME": true, + "FILE_NOTIFY_CHANGE_LAST_ACCESS": true, + "FILE_NOTIFY_CHANGE_LAST_WRITE": true, + "FILE_NOTIFY_CHANGE_SIZE": true, + "FILE_SHARE_DELETE": true, + "FILE_SHARE_READ": true, + "FILE_SHARE_WRITE": true, + "FILE_SKIP_COMPLETION_PORT_ON_SUCCESS": true, + "FILE_SKIP_SET_EVENT_ON_HANDLE": true, + "FILE_TYPE_CHAR": true, + "FILE_TYPE_DISK": true, + "FILE_TYPE_PIPE": true, + "FILE_TYPE_REMOTE": true, + "FILE_TYPE_UNKNOWN": true, + "FILE_WRITE_ATTRIBUTES": true, + "FLUSHO": true, + "FORMAT_MESSAGE_ALLOCATE_BUFFER": true, + "FORMAT_MESSAGE_ARGUMENT_ARRAY": true, + "FORMAT_MESSAGE_FROM_HMODULE": true, + "FORMAT_MESSAGE_FROM_STRING": true, + "FORMAT_MESSAGE_FROM_SYSTEM": true, + "FORMAT_MESSAGE_IGNORE_INSERTS": true, + "FORMAT_MESSAGE_MAX_WIDTH_MASK": true, + "FSCTL_GET_REPARSE_POINT": true, + "F_ADDFILESIGS": true, + "F_ADDSIGS": true, + "F_ALLOCATEALL": true, + "F_ALLOCATECONTIG": true, + "F_CANCEL": true, + "F_CHKCLEAN": true, + "F_CLOSEM": true, + "F_DUP2FD": true, + "F_DUP2FD_CLOEXEC": true, + "F_DUPFD": true, + "F_DUPFD_CLOEXEC": true, + "F_EXLCK": true, + "F_FLUSH_DATA": true, + "F_FREEZE_FS": true, + "F_FSCTL": true, + "F_FSDIRMASK": true, + "F_FSIN": true, + "F_FSINOUT": true, + "F_FSOUT": true, + "F_FSPRIV": true, + "F_FSVOID": true, + "F_FULLFSYNC": true, + "F_GETFD": true, + "F_GETFL": true, + "F_GETLEASE": true, + "F_GETLK": true, + "F_GETLK64": true, + "F_GETLKPID": true, + "F_GETNOSIGPIPE": true, + "F_GETOWN": true, + "F_GETOWN_EX": true, + "F_GETPATH": true, + "F_GETPATH_MTMINFO": true, + "F_GETPIPE_SZ": true, + "F_GETPROTECTIONCLASS": true, + "F_GETSIG": true, + "F_GLOBAL_NOCACHE": true, + "F_LOCK": true, + "F_LOG2PHYS": true, + "F_LOG2PHYS_EXT": true, + "F_MARKDEPENDENCY": true, + "F_MAXFD": true, + "F_NOCACHE": true, + "F_NODIRECT": true, + "F_NOTIFY": true, + "F_OGETLK": true, + "F_OK": true, + "F_OSETLK": true, + "F_OSETLKW": true, + "F_PARAM_MASK": true, + "F_PARAM_MAX": true, + "F_PATHPKG_CHECK": true, + "F_PEOFPOSMODE": true, + "F_PREALLOCATE": true, + "F_RDADVISE": true, + "F_RDAHEAD": true, + "F_RDLCK": true, + "F_READAHEAD": true, + "F_READBOOTSTRAP": true, + "F_SETBACKINGSTORE": true, + "F_SETFD": true, + "F_SETFL": true, + "F_SETLEASE": true, + "F_SETLK": true, + "F_SETLK64": true, + "F_SETLKW": true, + "F_SETLKW64": true, + "F_SETLK_REMOTE": true, + "F_SETNOSIGPIPE": true, + "F_SETOWN": true, + "F_SETOWN_EX": true, + "F_SETPIPE_SZ": true, + "F_SETPROTECTIONCLASS": true, + "F_SETSIG": true, + "F_SETSIZE": true, + "F_SHLCK": true, + "F_TEST": true, + "F_THAW_FS": true, + "F_TLOCK": true, + "F_ULOCK": true, + "F_UNLCK": true, + "F_UNLCKSYS": true, + "F_VOLPOSMODE": true, + "F_WRITEBOOTSTRAP": true, + "F_WRLCK": true, + "Faccessat": true, + "Fallocate": true, + "Fbootstraptransfer_t": true, + "Fchdir": true, + "Fchflags": true, + "Fchmod": true, + "Fchmodat": true, + "Fchown": true, + "Fchownat": true, + "FcntlFlock": true, + "FdSet": true, + "Fdatasync": true, + "FileNotifyInformation": true, + "Filetime": true, + "FindClose": true, + "FindFirstFile": true, + "FindNextFile": true, + "Flock": true, + "Flock_t": true, + "FlushBpf": true, + "FlushFileBuffers": true, + "FlushViewOfFile": true, + "ForkExec": true, + "ForkLock": true, + "FormatMessage": true, + "Fpathconf": true, + "FreeAddrInfoW": true, + "FreeEnvironmentStrings": true, + "FreeLibrary": true, + "Fsid": true, + "Fstat": true, + "Fstatat": true, + "Fstatfs": true, + "Fstore_t": true, + "Fsync": true, + "Ftruncate": true, + "FullPath": true, + "Futimes": true, + "Futimesat": true, + "GENERIC_ALL": true, + "GENERIC_EXECUTE": true, + "GENERIC_READ": true, + "GENERIC_WRITE": true, + "GUID": true, + "GetAcceptExSockaddrs": true, + "GetAdaptersInfo": true, + "GetAddrInfoW": true, + "GetCommandLine": true, + "GetComputerName": true, + "GetConsoleMode": true, + "GetCurrentDirectory": true, + "GetCurrentProcess": true, + "GetEnvironmentStrings": true, + "GetEnvironmentVariable": true, + "GetExitCodeProcess": true, + "GetFileAttributes": true, + "GetFileAttributesEx": true, + "GetFileExInfoStandard": true, + "GetFileExMaxInfoLevel": true, + "GetFileInformationByHandle": true, + "GetFileType": true, + "GetFullPathName": true, + "GetHostByName": true, + "GetIfEntry": true, + "GetLastError": true, + "GetLengthSid": true, + "GetLongPathName": true, + "GetProcAddress": true, + "GetProcessTimes": true, + "GetProtoByName": true, + "GetQueuedCompletionStatus": true, + "GetServByName": true, + "GetShortPathName": true, + "GetStartupInfo": true, + "GetStdHandle": true, + "GetSystemTimeAsFileTime": true, + "GetTempPath": true, + "GetTimeZoneInformation": true, + "GetTokenInformation": true, + "GetUserNameEx": true, + "GetUserProfileDirectory": true, + "GetVersion": true, + "Getcwd": true, + "Getdents": true, + "Getdirentries": true, + "Getdtablesize": true, + "Getegid": true, + "Getenv": true, + "Geteuid": true, + "Getfsstat": true, + "Getgid": true, + "Getgroups": true, + "Getpagesize": true, + "Getpeername": true, + "Getpgid": true, + "Getpgrp": true, + "Getpid": true, + "Getppid": true, + "Getpriority": true, + "Getrlimit": true, + "Getrusage": true, + "Getsid": true, + "Getsockname": true, + "Getsockopt": true, + "GetsockoptByte": true, + "GetsockoptICMPv6Filter": true, + "GetsockoptIPMreq": true, + "GetsockoptIPMreqn": true, + "GetsockoptIPv6MTUInfo": true, + "GetsockoptIPv6Mreq": true, + "GetsockoptInet4Addr": true, + "GetsockoptInt": true, + "GetsockoptUcred": true, + "Gettid": true, + "Gettimeofday": true, + "Getuid": true, + "Getwd": true, + "Getxattr": true, + "HANDLE_FLAG_INHERIT": true, + "HKEY_CLASSES_ROOT": true, + "HKEY_CURRENT_CONFIG": true, + "HKEY_CURRENT_USER": true, + "HKEY_DYN_DATA": true, + "HKEY_LOCAL_MACHINE": true, + "HKEY_PERFORMANCE_DATA": true, + "HKEY_USERS": true, + "HUPCL": true, + "Handle": true, + "Hostent": true, + "ICANON": true, + "ICMP6_FILTER": true, + "ICMPV6_FILTER": true, + "ICMPv6Filter": true, + "ICRNL": true, + "IEXTEN": true, + "IFAN_ARRIVAL": true, + "IFAN_DEPARTURE": true, + "IFA_ADDRESS": true, + "IFA_ANYCAST": true, + "IFA_BROADCAST": true, + "IFA_CACHEINFO": true, + "IFA_F_DADFAILED": true, + "IFA_F_DEPRECATED": true, + "IFA_F_HOMEADDRESS": true, + "IFA_F_NODAD": true, + "IFA_F_OPTIMISTIC": true, + "IFA_F_PERMANENT": true, + "IFA_F_SECONDARY": true, + "IFA_F_TEMPORARY": true, + "IFA_F_TENTATIVE": true, + "IFA_LABEL": true, + "IFA_LOCAL": true, + "IFA_MAX": true, + "IFA_MULTICAST": true, + "IFA_ROUTE": true, + "IFA_UNSPEC": true, + "IFF_ALLMULTI": true, + "IFF_ALTPHYS": true, + "IFF_AUTOMEDIA": true, + "IFF_BROADCAST": true, + "IFF_CANTCHANGE": true, + "IFF_CANTCONFIG": true, + "IFF_DEBUG": true, + "IFF_DRV_OACTIVE": true, + "IFF_DRV_RUNNING": true, + "IFF_DYING": true, + "IFF_DYNAMIC": true, + "IFF_LINK0": true, + "IFF_LINK1": true, + "IFF_LINK2": true, + "IFF_LOOPBACK": true, + "IFF_MASTER": true, + "IFF_MONITOR": true, + "IFF_MULTICAST": true, + "IFF_NOARP": true, + "IFF_NOTRAILERS": true, + "IFF_NO_PI": true, + "IFF_OACTIVE": true, + "IFF_ONE_QUEUE": true, + "IFF_POINTOPOINT": true, + "IFF_POINTTOPOINT": true, + "IFF_PORTSEL": true, + "IFF_PPROMISC": true, + "IFF_PROMISC": true, + "IFF_RENAMING": true, + "IFF_RUNNING": true, + "IFF_SIMPLEX": true, + "IFF_SLAVE": true, + "IFF_SMART": true, + "IFF_STATICARP": true, + "IFF_TAP": true, + "IFF_TUN": true, + "IFF_TUN_EXCL": true, + "IFF_UP": true, + "IFF_VNET_HDR": true, + "IFLA_ADDRESS": true, + "IFLA_BROADCAST": true, + "IFLA_COST": true, + "IFLA_IFALIAS": true, + "IFLA_IFNAME": true, + "IFLA_LINK": true, + "IFLA_LINKINFO": true, + "IFLA_LINKMODE": true, + "IFLA_MAP": true, + "IFLA_MASTER": true, + "IFLA_MAX": true, + "IFLA_MTU": true, + "IFLA_NET_NS_PID": true, + "IFLA_OPERSTATE": true, + "IFLA_PRIORITY": true, + "IFLA_PROTINFO": true, + "IFLA_QDISC": true, + "IFLA_STATS": true, + "IFLA_TXQLEN": true, + "IFLA_UNSPEC": true, + "IFLA_WEIGHT": true, + "IFLA_WIRELESS": true, + "IFNAMSIZ": true, + "IFT_1822": true, + "IFT_A12MPPSWITCH": true, + "IFT_AAL2": true, + "IFT_AAL5": true, + "IFT_ADSL": true, + "IFT_AFLANE8023": true, + "IFT_AFLANE8025": true, + "IFT_ARAP": true, + "IFT_ARCNET": true, + "IFT_ARCNETPLUS": true, + "IFT_ASYNC": true, + "IFT_ATM": true, + "IFT_ATMDXI": true, + "IFT_ATMFUNI": true, + "IFT_ATMIMA": true, + "IFT_ATMLOGICAL": true, + "IFT_ATMRADIO": true, + "IFT_ATMSUBINTERFACE": true, + "IFT_ATMVCIENDPT": true, + "IFT_ATMVIRTUAL": true, + "IFT_BGPPOLICYACCOUNTING": true, + "IFT_BLUETOOTH": true, + "IFT_BRIDGE": true, + "IFT_BSC": true, + "IFT_CARP": true, + "IFT_CCTEMUL": true, + "IFT_CELLULAR": true, + "IFT_CEPT": true, + "IFT_CES": true, + "IFT_CHANNEL": true, + "IFT_CNR": true, + "IFT_COFFEE": true, + "IFT_COMPOSITELINK": true, + "IFT_DCN": true, + "IFT_DIGITALPOWERLINE": true, + "IFT_DIGITALWRAPPEROVERHEADCHANNEL": true, + "IFT_DLSW": true, + "IFT_DOCSCABLEDOWNSTREAM": true, + "IFT_DOCSCABLEMACLAYER": true, + "IFT_DOCSCABLEUPSTREAM": true, + "IFT_DOCSCABLEUPSTREAMCHANNEL": true, + "IFT_DS0": true, + "IFT_DS0BUNDLE": true, + "IFT_DS1FDL": true, + "IFT_DS3": true, + "IFT_DTM": true, + "IFT_DUMMY": true, + "IFT_DVBASILN": true, + "IFT_DVBASIOUT": true, + "IFT_DVBRCCDOWNSTREAM": true, + "IFT_DVBRCCMACLAYER": true, + "IFT_DVBRCCUPSTREAM": true, + "IFT_ECONET": true, + "IFT_ENC": true, + "IFT_EON": true, + "IFT_EPLRS": true, + "IFT_ESCON": true, + "IFT_ETHER": true, + "IFT_FAITH": true, + "IFT_FAST": true, + "IFT_FASTETHER": true, + "IFT_FASTETHERFX": true, + "IFT_FDDI": true, + "IFT_FIBRECHANNEL": true, + "IFT_FRAMERELAYINTERCONNECT": true, + "IFT_FRAMERELAYMPI": true, + "IFT_FRDLCIENDPT": true, + "IFT_FRELAY": true, + "IFT_FRELAYDCE": true, + "IFT_FRF16MFRBUNDLE": true, + "IFT_FRFORWARD": true, + "IFT_G703AT2MB": true, + "IFT_G703AT64K": true, + "IFT_GIF": true, + "IFT_GIGABITETHERNET": true, + "IFT_GR303IDT": true, + "IFT_GR303RDT": true, + "IFT_H323GATEKEEPER": true, + "IFT_H323PROXY": true, + "IFT_HDH1822": true, + "IFT_HDLC": true, + "IFT_HDSL2": true, + "IFT_HIPERLAN2": true, + "IFT_HIPPI": true, + "IFT_HIPPIINTERFACE": true, + "IFT_HOSTPAD": true, + "IFT_HSSI": true, + "IFT_HY": true, + "IFT_IBM370PARCHAN": true, + "IFT_IDSL": true, + "IFT_IEEE1394": true, + "IFT_IEEE80211": true, + "IFT_IEEE80212": true, + "IFT_IEEE8023ADLAG": true, + "IFT_IFGSN": true, + "IFT_IMT": true, + "IFT_INFINIBAND": true, + "IFT_INTERLEAVE": true, + "IFT_IP": true, + "IFT_IPFORWARD": true, + "IFT_IPOVERATM": true, + "IFT_IPOVERCDLC": true, + "IFT_IPOVERCLAW": true, + "IFT_IPSWITCH": true, + "IFT_IPXIP": true, + "IFT_ISDN": true, + "IFT_ISDNBASIC": true, + "IFT_ISDNPRIMARY": true, + "IFT_ISDNS": true, + "IFT_ISDNU": true, + "IFT_ISO88022LLC": true, + "IFT_ISO88023": true, + "IFT_ISO88024": true, + "IFT_ISO88025": true, + "IFT_ISO88025CRFPINT": true, + "IFT_ISO88025DTR": true, + "IFT_ISO88025FIBER": true, + "IFT_ISO88026": true, + "IFT_ISUP": true, + "IFT_L2VLAN": true, + "IFT_L3IPVLAN": true, + "IFT_L3IPXVLAN": true, + "IFT_LAPB": true, + "IFT_LAPD": true, + "IFT_LAPF": true, + "IFT_LINEGROUP": true, + "IFT_LOCALTALK": true, + "IFT_LOOP": true, + "IFT_MEDIAMAILOVERIP": true, + "IFT_MFSIGLINK": true, + "IFT_MIOX25": true, + "IFT_MODEM": true, + "IFT_MPC": true, + "IFT_MPLS": true, + "IFT_MPLSTUNNEL": true, + "IFT_MSDSL": true, + "IFT_MVL": true, + "IFT_MYRINET": true, + "IFT_NFAS": true, + "IFT_NSIP": true, + "IFT_OPTICALCHANNEL": true, + "IFT_OPTICALTRANSPORT": true, + "IFT_OTHER": true, + "IFT_P10": true, + "IFT_P80": true, + "IFT_PARA": true, + "IFT_PDP": true, + "IFT_PFLOG": true, + "IFT_PFLOW": true, + "IFT_PFSYNC": true, + "IFT_PLC": true, + "IFT_PON155": true, + "IFT_PON622": true, + "IFT_POS": true, + "IFT_PPP": true, + "IFT_PPPMULTILINKBUNDLE": true, + "IFT_PROPATM": true, + "IFT_PROPBWAP2MP": true, + "IFT_PROPCNLS": true, + "IFT_PROPDOCSWIRELESSDOWNSTREAM": true, + "IFT_PROPDOCSWIRELESSMACLAYER": true, + "IFT_PROPDOCSWIRELESSUPSTREAM": true, + "IFT_PROPMUX": true, + "IFT_PROPVIRTUAL": true, + "IFT_PROPWIRELESSP2P": true, + "IFT_PTPSERIAL": true, + "IFT_PVC": true, + "IFT_Q2931": true, + "IFT_QLLC": true, + "IFT_RADIOMAC": true, + "IFT_RADSL": true, + "IFT_REACHDSL": true, + "IFT_RFC1483": true, + "IFT_RS232": true, + "IFT_RSRB": true, + "IFT_SDLC": true, + "IFT_SDSL": true, + "IFT_SHDSL": true, + "IFT_SIP": true, + "IFT_SIPSIG": true, + "IFT_SIPTG": true, + "IFT_SLIP": true, + "IFT_SMDSDXI": true, + "IFT_SMDSICIP": true, + "IFT_SONET": true, + "IFT_SONETOVERHEADCHANNEL": true, + "IFT_SONETPATH": true, + "IFT_SONETVT": true, + "IFT_SRP": true, + "IFT_SS7SIGLINK": true, + "IFT_STACKTOSTACK": true, + "IFT_STARLAN": true, + "IFT_STF": true, + "IFT_T1": true, + "IFT_TDLC": true, + "IFT_TELINK": true, + "IFT_TERMPAD": true, + "IFT_TR008": true, + "IFT_TRANSPHDLC": true, + "IFT_TUNNEL": true, + "IFT_ULTRA": true, + "IFT_USB": true, + "IFT_V11": true, + "IFT_V35": true, + "IFT_V36": true, + "IFT_V37": true, + "IFT_VDSL": true, + "IFT_VIRTUALIPADDRESS": true, + "IFT_VIRTUALTG": true, + "IFT_VOICEDID": true, + "IFT_VOICEEM": true, + "IFT_VOICEEMFGD": true, + "IFT_VOICEENCAP": true, + "IFT_VOICEFGDEANA": true, + "IFT_VOICEFXO": true, + "IFT_VOICEFXS": true, + "IFT_VOICEOVERATM": true, + "IFT_VOICEOVERCABLE": true, + "IFT_VOICEOVERFRAMERELAY": true, + "IFT_VOICEOVERIP": true, + "IFT_X213": true, + "IFT_X25": true, + "IFT_X25DDN": true, + "IFT_X25HUNTGROUP": true, + "IFT_X25MLP": true, + "IFT_X25PLE": true, + "IFT_XETHER": true, + "IGNBRK": true, + "IGNCR": true, + "IGNORE": true, + "IGNPAR": true, + "IMAXBEL": true, + "INFINITE": true, + "INLCR": true, + "INPCK": true, + "INVALID_FILE_ATTRIBUTES": true, + "IN_ACCESS": true, + "IN_ALL_EVENTS": true, + "IN_ATTRIB": true, + "IN_CLASSA_HOST": true, + "IN_CLASSA_MAX": true, + "IN_CLASSA_NET": true, + "IN_CLASSA_NSHIFT": true, + "IN_CLASSB_HOST": true, + "IN_CLASSB_MAX": true, + "IN_CLASSB_NET": true, + "IN_CLASSB_NSHIFT": true, + "IN_CLASSC_HOST": true, + "IN_CLASSC_NET": true, + "IN_CLASSC_NSHIFT": true, + "IN_CLASSD_HOST": true, + "IN_CLASSD_NET": true, + "IN_CLASSD_NSHIFT": true, + "IN_CLOEXEC": true, + "IN_CLOSE": true, + "IN_CLOSE_NOWRITE": true, + "IN_CLOSE_WRITE": true, + "IN_CREATE": true, + "IN_DELETE": true, + "IN_DELETE_SELF": true, + "IN_DONT_FOLLOW": true, + "IN_EXCL_UNLINK": true, + "IN_IGNORED": true, + "IN_ISDIR": true, + "IN_LINKLOCALNETNUM": true, + "IN_LOOPBACKNET": true, + "IN_MASK_ADD": true, + "IN_MODIFY": true, + "IN_MOVE": true, + "IN_MOVED_FROM": true, + "IN_MOVED_TO": true, + "IN_MOVE_SELF": true, + "IN_NONBLOCK": true, + "IN_ONESHOT": true, + "IN_ONLYDIR": true, + "IN_OPEN": true, + "IN_Q_OVERFLOW": true, + "IN_RFC3021_HOST": true, + "IN_RFC3021_MASK": true, + "IN_RFC3021_NET": true, + "IN_RFC3021_NSHIFT": true, + "IN_UNMOUNT": true, + "IOC_IN": true, + "IOC_INOUT": true, + "IOC_OUT": true, + "IOC_VENDOR": true, + "IOC_WS2": true, + "IO_REPARSE_TAG_SYMLINK": true, + "IPMreq": true, + "IPMreqn": true, + "IPPROTO_3PC": true, + "IPPROTO_ADFS": true, + "IPPROTO_AH": true, + "IPPROTO_AHIP": true, + "IPPROTO_APES": true, + "IPPROTO_ARGUS": true, + "IPPROTO_AX25": true, + "IPPROTO_BHA": true, + "IPPROTO_BLT": true, + "IPPROTO_BRSATMON": true, + "IPPROTO_CARP": true, + "IPPROTO_CFTP": true, + "IPPROTO_CHAOS": true, + "IPPROTO_CMTP": true, + "IPPROTO_COMP": true, + "IPPROTO_CPHB": true, + "IPPROTO_CPNX": true, + "IPPROTO_DCCP": true, + "IPPROTO_DDP": true, + "IPPROTO_DGP": true, + "IPPROTO_DIVERT": true, + "IPPROTO_DIVERT_INIT": true, + "IPPROTO_DIVERT_RESP": true, + "IPPROTO_DONE": true, + "IPPROTO_DSTOPTS": true, + "IPPROTO_EGP": true, + "IPPROTO_EMCON": true, + "IPPROTO_ENCAP": true, + "IPPROTO_EON": true, + "IPPROTO_ESP": true, + "IPPROTO_ETHERIP": true, + "IPPROTO_FRAGMENT": true, + "IPPROTO_GGP": true, + "IPPROTO_GMTP": true, + "IPPROTO_GRE": true, + "IPPROTO_HELLO": true, + "IPPROTO_HMP": true, + "IPPROTO_HOPOPTS": true, + "IPPROTO_ICMP": true, + "IPPROTO_ICMPV6": true, + "IPPROTO_IDP": true, + "IPPROTO_IDPR": true, + "IPPROTO_IDRP": true, + "IPPROTO_IGMP": true, + "IPPROTO_IGP": true, + "IPPROTO_IGRP": true, + "IPPROTO_IL": true, + "IPPROTO_INLSP": true, + "IPPROTO_INP": true, + "IPPROTO_IP": true, + "IPPROTO_IPCOMP": true, + "IPPROTO_IPCV": true, + "IPPROTO_IPEIP": true, + "IPPROTO_IPIP": true, + "IPPROTO_IPPC": true, + "IPPROTO_IPV4": true, + "IPPROTO_IPV6": true, + "IPPROTO_IPV6_ICMP": true, + "IPPROTO_IRTP": true, + "IPPROTO_KRYPTOLAN": true, + "IPPROTO_LARP": true, + "IPPROTO_LEAF1": true, + "IPPROTO_LEAF2": true, + "IPPROTO_MAX": true, + "IPPROTO_MAXID": true, + "IPPROTO_MEAS": true, + "IPPROTO_MH": true, + "IPPROTO_MHRP": true, + "IPPROTO_MICP": true, + "IPPROTO_MOBILE": true, + "IPPROTO_MPLS": true, + "IPPROTO_MTP": true, + "IPPROTO_MUX": true, + "IPPROTO_ND": true, + "IPPROTO_NHRP": true, + "IPPROTO_NONE": true, + "IPPROTO_NSP": true, + "IPPROTO_NVPII": true, + "IPPROTO_OLD_DIVERT": true, + "IPPROTO_OSPFIGP": true, + "IPPROTO_PFSYNC": true, + "IPPROTO_PGM": true, + "IPPROTO_PIGP": true, + "IPPROTO_PIM": true, + "IPPROTO_PRM": true, + "IPPROTO_PUP": true, + "IPPROTO_PVP": true, + "IPPROTO_RAW": true, + "IPPROTO_RCCMON": true, + "IPPROTO_RDP": true, + "IPPROTO_ROUTING": true, + "IPPROTO_RSVP": true, + "IPPROTO_RVD": true, + "IPPROTO_SATEXPAK": true, + "IPPROTO_SATMON": true, + "IPPROTO_SCCSP": true, + "IPPROTO_SCTP": true, + "IPPROTO_SDRP": true, + "IPPROTO_SEND": true, + "IPPROTO_SEP": true, + "IPPROTO_SKIP": true, + "IPPROTO_SPACER": true, + "IPPROTO_SRPC": true, + "IPPROTO_ST": true, + "IPPROTO_SVMTP": true, + "IPPROTO_SWIPE": true, + "IPPROTO_TCF": true, + "IPPROTO_TCP": true, + "IPPROTO_TLSP": true, + "IPPROTO_TP": true, + "IPPROTO_TPXX": true, + "IPPROTO_TRUNK1": true, + "IPPROTO_TRUNK2": true, + "IPPROTO_TTP": true, + "IPPROTO_UDP": true, + "IPPROTO_UDPLITE": true, + "IPPROTO_VINES": true, + "IPPROTO_VISA": true, + "IPPROTO_VMTP": true, + "IPPROTO_VRRP": true, + "IPPROTO_WBEXPAK": true, + "IPPROTO_WBMON": true, + "IPPROTO_WSN": true, + "IPPROTO_XNET": true, + "IPPROTO_XTP": true, + "IPV6_2292DSTOPTS": true, + "IPV6_2292HOPLIMIT": true, + "IPV6_2292HOPOPTS": true, + "IPV6_2292NEXTHOP": true, + "IPV6_2292PKTINFO": true, + "IPV6_2292PKTOPTIONS": true, + "IPV6_2292RTHDR": true, + "IPV6_ADDRFORM": true, + "IPV6_ADD_MEMBERSHIP": true, + "IPV6_AUTHHDR": true, + "IPV6_AUTH_LEVEL": true, + "IPV6_AUTOFLOWLABEL": true, + "IPV6_BINDANY": true, + "IPV6_BINDV6ONLY": true, + "IPV6_BOUND_IF": true, + "IPV6_CHECKSUM": true, + "IPV6_DEFAULT_MULTICAST_HOPS": true, + "IPV6_DEFAULT_MULTICAST_LOOP": true, + "IPV6_DEFHLIM": true, + "IPV6_DONTFRAG": true, + "IPV6_DROP_MEMBERSHIP": true, + "IPV6_DSTOPTS": true, + "IPV6_ESP_NETWORK_LEVEL": true, + "IPV6_ESP_TRANS_LEVEL": true, + "IPV6_FAITH": true, + "IPV6_FLOWINFO_MASK": true, + "IPV6_FLOWLABEL_MASK": true, + "IPV6_FRAGTTL": true, + "IPV6_FW_ADD": true, + "IPV6_FW_DEL": true, + "IPV6_FW_FLUSH": true, + "IPV6_FW_GET": true, + "IPV6_FW_ZERO": true, + "IPV6_HLIMDEC": true, + "IPV6_HOPLIMIT": true, + "IPV6_HOPOPTS": true, + "IPV6_IPCOMP_LEVEL": true, + "IPV6_IPSEC_POLICY": true, + "IPV6_JOIN_ANYCAST": true, + "IPV6_JOIN_GROUP": true, + "IPV6_LEAVE_ANYCAST": true, + "IPV6_LEAVE_GROUP": true, + "IPV6_MAXHLIM": true, + "IPV6_MAXOPTHDR": true, + "IPV6_MAXPACKET": true, + "IPV6_MAX_GROUP_SRC_FILTER": true, + "IPV6_MAX_MEMBERSHIPS": true, + "IPV6_MAX_SOCK_SRC_FILTER": true, + "IPV6_MIN_MEMBERSHIPS": true, + "IPV6_MMTU": true, + "IPV6_MSFILTER": true, + "IPV6_MTU": true, + "IPV6_MTU_DISCOVER": true, + "IPV6_MULTICAST_HOPS": true, + "IPV6_MULTICAST_IF": true, + "IPV6_MULTICAST_LOOP": true, + "IPV6_NEXTHOP": true, + "IPV6_OPTIONS": true, + "IPV6_PATHMTU": true, + "IPV6_PIPEX": true, + "IPV6_PKTINFO": true, + "IPV6_PMTUDISC_DO": true, + "IPV6_PMTUDISC_DONT": true, + "IPV6_PMTUDISC_PROBE": true, + "IPV6_PMTUDISC_WANT": true, + "IPV6_PORTRANGE": true, + "IPV6_PORTRANGE_DEFAULT": true, + "IPV6_PORTRANGE_HIGH": true, + "IPV6_PORTRANGE_LOW": true, + "IPV6_PREFER_TEMPADDR": true, + "IPV6_RECVDSTOPTS": true, + "IPV6_RECVDSTPORT": true, + "IPV6_RECVERR": true, + "IPV6_RECVHOPLIMIT": true, + "IPV6_RECVHOPOPTS": true, + "IPV6_RECVPATHMTU": true, + "IPV6_RECVPKTINFO": true, + "IPV6_RECVRTHDR": true, + "IPV6_RECVTCLASS": true, + "IPV6_ROUTER_ALERT": true, + "IPV6_RTABLE": true, + "IPV6_RTHDR": true, + "IPV6_RTHDRDSTOPTS": true, + "IPV6_RTHDR_LOOSE": true, + "IPV6_RTHDR_STRICT": true, + "IPV6_RTHDR_TYPE_0": true, + "IPV6_RXDSTOPTS": true, + "IPV6_RXHOPOPTS": true, + "IPV6_SOCKOPT_RESERVED1": true, + "IPV6_TCLASS": true, + "IPV6_UNICAST_HOPS": true, + "IPV6_USE_MIN_MTU": true, + "IPV6_V6ONLY": true, + "IPV6_VERSION": true, + "IPV6_VERSION_MASK": true, + "IPV6_XFRM_POLICY": true, + "IP_ADD_MEMBERSHIP": true, + "IP_ADD_SOURCE_MEMBERSHIP": true, + "IP_AUTH_LEVEL": true, + "IP_BINDANY": true, + "IP_BLOCK_SOURCE": true, + "IP_BOUND_IF": true, + "IP_DEFAULT_MULTICAST_LOOP": true, + "IP_DEFAULT_MULTICAST_TTL": true, + "IP_DF": true, + "IP_DIVERTFL": true, + "IP_DONTFRAG": true, + "IP_DROP_MEMBERSHIP": true, + "IP_DROP_SOURCE_MEMBERSHIP": true, + "IP_DUMMYNET3": true, + "IP_DUMMYNET_CONFIGURE": true, + "IP_DUMMYNET_DEL": true, + "IP_DUMMYNET_FLUSH": true, + "IP_DUMMYNET_GET": true, + "IP_EF": true, + "IP_ERRORMTU": true, + "IP_ESP_NETWORK_LEVEL": true, + "IP_ESP_TRANS_LEVEL": true, + "IP_FAITH": true, + "IP_FREEBIND": true, + "IP_FW3": true, + "IP_FW_ADD": true, + "IP_FW_DEL": true, + "IP_FW_FLUSH": true, + "IP_FW_GET": true, + "IP_FW_NAT_CFG": true, + "IP_FW_NAT_DEL": true, + "IP_FW_NAT_GET_CONFIG": true, + "IP_FW_NAT_GET_LOG": true, + "IP_FW_RESETLOG": true, + "IP_FW_TABLE_ADD": true, + "IP_FW_TABLE_DEL": true, + "IP_FW_TABLE_FLUSH": true, + "IP_FW_TABLE_GETSIZE": true, + "IP_FW_TABLE_LIST": true, + "IP_FW_ZERO": true, + "IP_HDRINCL": true, + "IP_IPCOMP_LEVEL": true, + "IP_IPSECFLOWINFO": true, + "IP_IPSEC_LOCAL_AUTH": true, + "IP_IPSEC_LOCAL_CRED": true, + "IP_IPSEC_LOCAL_ID": true, + "IP_IPSEC_POLICY": true, + "IP_IPSEC_REMOTE_AUTH": true, + "IP_IPSEC_REMOTE_CRED": true, + "IP_IPSEC_REMOTE_ID": true, + "IP_MAXPACKET": true, + "IP_MAX_GROUP_SRC_FILTER": true, + "IP_MAX_MEMBERSHIPS": true, + "IP_MAX_SOCK_MUTE_FILTER": true, + "IP_MAX_SOCK_SRC_FILTER": true, + "IP_MAX_SOURCE_FILTER": true, + "IP_MF": true, + "IP_MINFRAGSIZE": true, + "IP_MINTTL": true, + "IP_MIN_MEMBERSHIPS": true, + "IP_MSFILTER": true, + "IP_MSS": true, + "IP_MTU": true, + "IP_MTU_DISCOVER": true, + "IP_MULTICAST_IF": true, + "IP_MULTICAST_IFINDEX": true, + "IP_MULTICAST_LOOP": true, + "IP_MULTICAST_TTL": true, + "IP_MULTICAST_VIF": true, + "IP_NAT__XXX": true, + "IP_OFFMASK": true, + "IP_OLD_FW_ADD": true, + "IP_OLD_FW_DEL": true, + "IP_OLD_FW_FLUSH": true, + "IP_OLD_FW_GET": true, + "IP_OLD_FW_RESETLOG": true, + "IP_OLD_FW_ZERO": true, + "IP_ONESBCAST": true, + "IP_OPTIONS": true, + "IP_ORIGDSTADDR": true, + "IP_PASSSEC": true, + "IP_PIPEX": true, + "IP_PKTINFO": true, + "IP_PKTOPTIONS": true, + "IP_PMTUDISC": true, + "IP_PMTUDISC_DO": true, + "IP_PMTUDISC_DONT": true, + "IP_PMTUDISC_PROBE": true, + "IP_PMTUDISC_WANT": true, + "IP_PORTRANGE": true, + "IP_PORTRANGE_DEFAULT": true, + "IP_PORTRANGE_HIGH": true, + "IP_PORTRANGE_LOW": true, + "IP_RECVDSTADDR": true, + "IP_RECVDSTPORT": true, + "IP_RECVERR": true, + "IP_RECVIF": true, + "IP_RECVOPTS": true, + "IP_RECVORIGDSTADDR": true, + "IP_RECVPKTINFO": true, + "IP_RECVRETOPTS": true, + "IP_RECVRTABLE": true, + "IP_RECVTOS": true, + "IP_RECVTTL": true, + "IP_RETOPTS": true, + "IP_RF": true, + "IP_ROUTER_ALERT": true, + "IP_RSVP_OFF": true, + "IP_RSVP_ON": true, + "IP_RSVP_VIF_OFF": true, + "IP_RSVP_VIF_ON": true, + "IP_RTABLE": true, + "IP_SENDSRCADDR": true, + "IP_STRIPHDR": true, + "IP_TOS": true, + "IP_TRAFFIC_MGT_BACKGROUND": true, + "IP_TRANSPARENT": true, + "IP_TTL": true, + "IP_UNBLOCK_SOURCE": true, + "IP_XFRM_POLICY": true, + "IPv6MTUInfo": true, + "IPv6Mreq": true, + "ISIG": true, + "ISTRIP": true, + "IUCLC": true, + "IUTF8": true, + "IXANY": true, + "IXOFF": true, + "IXON": true, + "IfAddrmsg": true, + "IfAnnounceMsghdr": true, + "IfData": true, + "IfInfomsg": true, + "IfMsghdr": true, + "IfaMsghdr": true, + "IfmaMsghdr": true, + "IfmaMsghdr2": true, + "ImplementsGetwd": true, + "Inet4Pktinfo": true, + "Inet6Pktinfo": true, + "InotifyAddWatch": true, + "InotifyEvent": true, + "InotifyInit": true, + "InotifyInit1": true, + "InotifyRmWatch": true, + "InterfaceAddrMessage": true, + "InterfaceAnnounceMessage": true, + "InterfaceInfo": true, + "InterfaceMessage": true, + "InterfaceMulticastAddrMessage": true, + "InvalidHandle": true, + "Ioperm": true, + "Iopl": true, + "Iovec": true, + "IpAdapterInfo": true, + "IpAddrString": true, + "IpAddressString": true, + "IpMaskString": true, + "Issetugid": true, + "KEY_ALL_ACCESS": true, + "KEY_CREATE_LINK": true, + "KEY_CREATE_SUB_KEY": true, + "KEY_ENUMERATE_SUB_KEYS": true, + "KEY_EXECUTE": true, + "KEY_NOTIFY": true, + "KEY_QUERY_VALUE": true, + "KEY_READ": true, + "KEY_SET_VALUE": true, + "KEY_WOW64_32KEY": true, + "KEY_WOW64_64KEY": true, + "KEY_WRITE": true, + "Kevent": true, + "Kevent_t": true, + "Kill": true, + "Klogctl": true, + "Kqueue": true, + "LANG_ENGLISH": true, + "LAYERED_PROTOCOL": true, + "LCNT_OVERLOAD_FLUSH": true, + "LINUX_REBOOT_CMD_CAD_OFF": true, + "LINUX_REBOOT_CMD_CAD_ON": true, + "LINUX_REBOOT_CMD_HALT": true, + "LINUX_REBOOT_CMD_KEXEC": true, + "LINUX_REBOOT_CMD_POWER_OFF": true, + "LINUX_REBOOT_CMD_RESTART": true, + "LINUX_REBOOT_CMD_RESTART2": true, + "LINUX_REBOOT_CMD_SW_SUSPEND": true, + "LINUX_REBOOT_MAGIC1": true, + "LINUX_REBOOT_MAGIC2": true, + "LOCK_EX": true, + "LOCK_NB": true, + "LOCK_SH": true, + "LOCK_UN": true, + "LazyDLL": true, + "LazyProc": true, + "Lchown": true, + "Linger": true, + "Link": true, + "Listen": true, + "Listxattr": true, + "LoadCancelIoEx": true, + "LoadConnectEx": true, + "LoadCreateSymbolicLink": true, + "LoadDLL": true, + "LoadGetAddrInfo": true, + "LoadLibrary": true, + "LoadSetFileCompletionNotificationModes": true, + "LocalFree": true, + "Log2phys_t": true, + "LookupAccountName": true, + "LookupAccountSid": true, + "LookupSID": true, + "LsfJump": true, + "LsfSocket": true, + "LsfStmt": true, + "Lstat": true, + "MADV_AUTOSYNC": true, + "MADV_CAN_REUSE": true, + "MADV_CORE": true, + "MADV_DOFORK": true, + "MADV_DONTFORK": true, + "MADV_DONTNEED": true, + "MADV_FREE": true, + "MADV_FREE_REUSABLE": true, + "MADV_FREE_REUSE": true, + "MADV_HUGEPAGE": true, + "MADV_HWPOISON": true, + "MADV_MERGEABLE": true, + "MADV_NOCORE": true, + "MADV_NOHUGEPAGE": true, + "MADV_NORMAL": true, + "MADV_NOSYNC": true, + "MADV_PROTECT": true, + "MADV_RANDOM": true, + "MADV_REMOVE": true, + "MADV_SEQUENTIAL": true, + "MADV_SPACEAVAIL": true, + "MADV_UNMERGEABLE": true, + "MADV_WILLNEED": true, + "MADV_ZERO_WIRED_PAGES": true, + "MAP_32BIT": true, + "MAP_ALIGNED_SUPER": true, + "MAP_ALIGNMENT_16MB": true, + "MAP_ALIGNMENT_1TB": true, + "MAP_ALIGNMENT_256TB": true, + "MAP_ALIGNMENT_4GB": true, + "MAP_ALIGNMENT_64KB": true, + "MAP_ALIGNMENT_64PB": true, + "MAP_ALIGNMENT_MASK": true, + "MAP_ALIGNMENT_SHIFT": true, + "MAP_ANON": true, + "MAP_ANONYMOUS": true, + "MAP_COPY": true, + "MAP_DENYWRITE": true, + "MAP_EXECUTABLE": true, + "MAP_FILE": true, + "MAP_FIXED": true, + "MAP_FLAGMASK": true, + "MAP_GROWSDOWN": true, + "MAP_HASSEMAPHORE": true, + "MAP_HUGETLB": true, + "MAP_INHERIT": true, + "MAP_INHERIT_COPY": true, + "MAP_INHERIT_DEFAULT": true, + "MAP_INHERIT_DONATE_COPY": true, + "MAP_INHERIT_NONE": true, + "MAP_INHERIT_SHARE": true, + "MAP_JIT": true, + "MAP_LOCKED": true, + "MAP_NOCACHE": true, + "MAP_NOCORE": true, + "MAP_NOEXTEND": true, + "MAP_NONBLOCK": true, + "MAP_NORESERVE": true, + "MAP_NOSYNC": true, + "MAP_POPULATE": true, + "MAP_PREFAULT_READ": true, + "MAP_PRIVATE": true, + "MAP_RENAME": true, + "MAP_RESERVED0080": true, + "MAP_RESERVED0100": true, + "MAP_SHARED": true, + "MAP_STACK": true, + "MAP_TRYFIXED": true, + "MAP_TYPE": true, + "MAP_WIRED": true, + "MAXIMUM_REPARSE_DATA_BUFFER_SIZE": true, + "MAXLEN_IFDESCR": true, + "MAXLEN_PHYSADDR": true, + "MAX_ADAPTER_ADDRESS_LENGTH": true, + "MAX_ADAPTER_DESCRIPTION_LENGTH": true, + "MAX_ADAPTER_NAME_LENGTH": true, + "MAX_COMPUTERNAME_LENGTH": true, + "MAX_INTERFACE_NAME_LEN": true, + "MAX_LONG_PATH": true, + "MAX_PATH": true, + "MAX_PROTOCOL_CHAIN": true, + "MCL_CURRENT": true, + "MCL_FUTURE": true, + "MNT_DETACH": true, + "MNT_EXPIRE": true, + "MNT_FORCE": true, + "MSG_BCAST": true, + "MSG_CMSG_CLOEXEC": true, + "MSG_COMPAT": true, + "MSG_CONFIRM": true, + "MSG_CONTROLMBUF": true, + "MSG_CTRUNC": true, + "MSG_DONTROUTE": true, + "MSG_DONTWAIT": true, + "MSG_EOF": true, + "MSG_EOR": true, + "MSG_ERRQUEUE": true, + "MSG_FASTOPEN": true, + "MSG_FIN": true, + "MSG_FLUSH": true, + "MSG_HAVEMORE": true, + "MSG_HOLD": true, + "MSG_IOVUSRSPACE": true, + "MSG_LENUSRSPACE": true, + "MSG_MCAST": true, + "MSG_MORE": true, + "MSG_NAMEMBUF": true, + "MSG_NBIO": true, + "MSG_NEEDSA": true, + "MSG_NOSIGNAL": true, + "MSG_NOTIFICATION": true, + "MSG_OOB": true, + "MSG_PEEK": true, + "MSG_PROXY": true, + "MSG_RCVMORE": true, + "MSG_RST": true, + "MSG_SEND": true, + "MSG_SYN": true, + "MSG_TRUNC": true, + "MSG_TRYHARD": true, + "MSG_USERFLAGS": true, + "MSG_WAITALL": true, + "MSG_WAITFORONE": true, + "MSG_WAITSTREAM": true, + "MS_ACTIVE": true, + "MS_ASYNC": true, + "MS_BIND": true, + "MS_DEACTIVATE": true, + "MS_DIRSYNC": true, + "MS_INVALIDATE": true, + "MS_I_VERSION": true, + "MS_KERNMOUNT": true, + "MS_KILLPAGES": true, + "MS_MANDLOCK": true, + "MS_MGC_MSK": true, + "MS_MGC_VAL": true, + "MS_MOVE": true, + "MS_NOATIME": true, + "MS_NODEV": true, + "MS_NODIRATIME": true, + "MS_NOEXEC": true, + "MS_NOSUID": true, + "MS_NOUSER": true, + "MS_POSIXACL": true, + "MS_PRIVATE": true, + "MS_RDONLY": true, + "MS_REC": true, + "MS_RELATIME": true, + "MS_REMOUNT": true, + "MS_RMT_MASK": true, + "MS_SHARED": true, + "MS_SILENT": true, + "MS_SLAVE": true, + "MS_STRICTATIME": true, + "MS_SYNC": true, + "MS_SYNCHRONOUS": true, + "MS_UNBINDABLE": true, + "Madvise": true, + "MapViewOfFile": true, + "MaxTokenInfoClass": true, + "Mclpool": true, + "MibIfRow": true, + "Mkdir": true, + "Mkdirat": true, + "Mkfifo": true, + "Mknod": true, + "Mknodat": true, + "Mlock": true, + "Mlockall": true, + "Mmap": true, + "Mount": true, + "MoveFile": true, + "Mprotect": true, + "Msghdr": true, + "Munlock": true, + "Munlockall": true, + "Munmap": true, + "MustLoadDLL": true, + "NAME_MAX": true, + "NETLINK_ADD_MEMBERSHIP": true, + "NETLINK_AUDIT": true, + "NETLINK_BROADCAST_ERROR": true, + "NETLINK_CONNECTOR": true, + "NETLINK_DNRTMSG": true, + "NETLINK_DROP_MEMBERSHIP": true, + "NETLINK_ECRYPTFS": true, + "NETLINK_FIB_LOOKUP": true, + "NETLINK_FIREWALL": true, + "NETLINK_GENERIC": true, + "NETLINK_INET_DIAG": true, + "NETLINK_IP6_FW": true, + "NETLINK_ISCSI": true, + "NETLINK_KOBJECT_UEVENT": true, + "NETLINK_NETFILTER": true, + "NETLINK_NFLOG": true, + "NETLINK_NO_ENOBUFS": true, + "NETLINK_PKTINFO": true, + "NETLINK_RDMA": true, + "NETLINK_ROUTE": true, + "NETLINK_SCSITRANSPORT": true, + "NETLINK_SELINUX": true, + "NETLINK_UNUSED": true, + "NETLINK_USERSOCK": true, + "NETLINK_XFRM": true, + "NET_RT_DUMP": true, + "NET_RT_DUMP2": true, + "NET_RT_FLAGS": true, + "NET_RT_IFLIST": true, + "NET_RT_IFLIST2": true, + "NET_RT_IFLISTL": true, + "NET_RT_IFMALIST": true, + "NET_RT_MAXID": true, + "NET_RT_OIFLIST": true, + "NET_RT_OOIFLIST": true, + "NET_RT_STAT": true, + "NET_RT_STATS": true, + "NET_RT_TABLE": true, + "NET_RT_TRASH": true, + "NLA_ALIGNTO": true, + "NLA_F_NESTED": true, + "NLA_F_NET_BYTEORDER": true, + "NLA_HDRLEN": true, + "NLMSG_ALIGNTO": true, + "NLMSG_DONE": true, + "NLMSG_ERROR": true, + "NLMSG_HDRLEN": true, + "NLMSG_MIN_TYPE": true, + "NLMSG_NOOP": true, + "NLMSG_OVERRUN": true, + "NLM_F_ACK": true, + "NLM_F_APPEND": true, + "NLM_F_ATOMIC": true, + "NLM_F_CREATE": true, + "NLM_F_DUMP": true, + "NLM_F_ECHO": true, + "NLM_F_EXCL": true, + "NLM_F_MATCH": true, + "NLM_F_MULTI": true, + "NLM_F_REPLACE": true, + "NLM_F_REQUEST": true, + "NLM_F_ROOT": true, + "NOFLSH": true, + "NOTE_ABSOLUTE": true, + "NOTE_ATTRIB": true, + "NOTE_CHILD": true, + "NOTE_DELETE": true, + "NOTE_EOF": true, + "NOTE_EXEC": true, + "NOTE_EXIT": true, + "NOTE_EXITSTATUS": true, + "NOTE_EXTEND": true, + "NOTE_FFAND": true, + "NOTE_FFCOPY": true, + "NOTE_FFCTRLMASK": true, + "NOTE_FFLAGSMASK": true, + "NOTE_FFNOP": true, + "NOTE_FFOR": true, + "NOTE_FORK": true, + "NOTE_LINK": true, + "NOTE_LOWAT": true, + "NOTE_NONE": true, + "NOTE_NSECONDS": true, + "NOTE_PCTRLMASK": true, + "NOTE_PDATAMASK": true, + "NOTE_REAP": true, + "NOTE_RENAME": true, + "NOTE_RESOURCEEND": true, + "NOTE_REVOKE": true, + "NOTE_SECONDS": true, + "NOTE_SIGNAL": true, + "NOTE_TRACK": true, + "NOTE_TRACKERR": true, + "NOTE_TRIGGER": true, + "NOTE_TRUNCATE": true, + "NOTE_USECONDS": true, + "NOTE_VM_ERROR": true, + "NOTE_VM_PRESSURE": true, + "NOTE_VM_PRESSURE_SUDDEN_TERMINATE": true, + "NOTE_VM_PRESSURE_TERMINATE": true, + "NOTE_WRITE": true, + "NameCanonical": true, + "NameCanonicalEx": true, + "NameDisplay": true, + "NameDnsDomain": true, + "NameFullyQualifiedDN": true, + "NameSamCompatible": true, + "NameServicePrincipal": true, + "NameUniqueId": true, + "NameUnknown": true, + "NameUserPrincipal": true, + "Nanosleep": true, + "NetApiBufferFree": true, + "NetGetJoinInformation": true, + "NetSetupDomainName": true, + "NetSetupUnjoined": true, + "NetSetupUnknownStatus": true, + "NetSetupWorkgroupName": true, + "NetUserGetInfo": true, + "NetlinkMessage": true, + "NetlinkRIB": true, + "NetlinkRouteAttr": true, + "NetlinkRouteRequest": true, + "NewCallback": true, + "NewCallbackCDecl": true, + "NewLazyDLL": true, + "NlAttr": true, + "NlMsgerr": true, + "NlMsghdr": true, + "NsecToFiletime": true, + "NsecToTimespec": true, + "NsecToTimeval": true, + "Ntohs": true, + "OCRNL": true, + "OFDEL": true, + "OFILL": true, + "OFIOGETBMAP": true, + "OID_PKIX_KP_SERVER_AUTH": true, + "OID_SERVER_GATED_CRYPTO": true, + "OID_SGC_NETSCAPE": true, + "OLCUC": true, + "ONLCR": true, + "ONLRET": true, + "ONOCR": true, + "ONOEOT": true, + "OPEN_ALWAYS": true, + "OPEN_EXISTING": true, + "OPOST": true, + "O_ACCMODE": true, + "O_ALERT": true, + "O_ALT_IO": true, + "O_APPEND": true, + "O_ASYNC": true, + "O_CLOEXEC": true, + "O_CREAT": true, + "O_DIRECT": true, + "O_DIRECTORY": true, + "O_DSYNC": true, + "O_EVTONLY": true, + "O_EXCL": true, + "O_EXEC": true, + "O_EXLOCK": true, + "O_FSYNC": true, + "O_LARGEFILE": true, + "O_NDELAY": true, + "O_NOATIME": true, + "O_NOCTTY": true, + "O_NOFOLLOW": true, + "O_NONBLOCK": true, + "O_NOSIGPIPE": true, + "O_POPUP": true, + "O_RDONLY": true, + "O_RDWR": true, + "O_RSYNC": true, + "O_SHLOCK": true, + "O_SYMLINK": true, + "O_SYNC": true, + "O_TRUNC": true, + "O_TTY_INIT": true, + "O_WRONLY": true, + "Open": true, + "OpenCurrentProcessToken": true, + "OpenProcess": true, + "OpenProcessToken": true, + "Openat": true, + "Overlapped": true, + "PACKET_ADD_MEMBERSHIP": true, + "PACKET_BROADCAST": true, + "PACKET_DROP_MEMBERSHIP": true, + "PACKET_FASTROUTE": true, + "PACKET_HOST": true, + "PACKET_LOOPBACK": true, + "PACKET_MR_ALLMULTI": true, + "PACKET_MR_MULTICAST": true, + "PACKET_MR_PROMISC": true, + "PACKET_MULTICAST": true, + "PACKET_OTHERHOST": true, + "PACKET_OUTGOING": true, + "PACKET_RECV_OUTPUT": true, + "PACKET_RX_RING": true, + "PACKET_STATISTICS": true, + "PAGE_EXECUTE_READ": true, + "PAGE_EXECUTE_READWRITE": true, + "PAGE_EXECUTE_WRITECOPY": true, + "PAGE_READONLY": true, + "PAGE_READWRITE": true, + "PAGE_WRITECOPY": true, + "PARENB": true, + "PARMRK": true, + "PARODD": true, + "PENDIN": true, + "PFL_HIDDEN": true, + "PFL_MATCHES_PROTOCOL_ZERO": true, + "PFL_MULTIPLE_PROTO_ENTRIES": true, + "PFL_NETWORKDIRECT_PROVIDER": true, + "PFL_RECOMMENDED_PROTO_ENTRY": true, + "PF_FLUSH": true, + "PKCS_7_ASN_ENCODING": true, + "PMC5_PIPELINE_FLUSH": true, + "PRIO_PGRP": true, + "PRIO_PROCESS": true, + "PRIO_USER": true, + "PRI_IOFLUSH": true, + "PROCESS_QUERY_INFORMATION": true, + "PROCESS_TERMINATE": true, + "PROT_EXEC": true, + "PROT_GROWSDOWN": true, + "PROT_GROWSUP": true, + "PROT_NONE": true, + "PROT_READ": true, + "PROT_WRITE": true, + "PROV_DH_SCHANNEL": true, + "PROV_DSS": true, + "PROV_DSS_DH": true, + "PROV_EC_ECDSA_FULL": true, + "PROV_EC_ECDSA_SIG": true, + "PROV_EC_ECNRA_FULL": true, + "PROV_EC_ECNRA_SIG": true, + "PROV_FORTEZZA": true, + "PROV_INTEL_SEC": true, + "PROV_MS_EXCHANGE": true, + "PROV_REPLACE_OWF": true, + "PROV_RNG": true, + "PROV_RSA_AES": true, + "PROV_RSA_FULL": true, + "PROV_RSA_SCHANNEL": true, + "PROV_RSA_SIG": true, + "PROV_SPYRUS_LYNKS": true, + "PROV_SSL": true, + "PR_CAPBSET_DROP": true, + "PR_CAPBSET_READ": true, + "PR_CLEAR_SECCOMP_FILTER": true, + "PR_ENDIAN_BIG": true, + "PR_ENDIAN_LITTLE": true, + "PR_ENDIAN_PPC_LITTLE": true, + "PR_FPEMU_NOPRINT": true, + "PR_FPEMU_SIGFPE": true, + "PR_FP_EXC_ASYNC": true, + "PR_FP_EXC_DISABLED": true, + "PR_FP_EXC_DIV": true, + "PR_FP_EXC_INV": true, + "PR_FP_EXC_NONRECOV": true, + "PR_FP_EXC_OVF": true, + "PR_FP_EXC_PRECISE": true, + "PR_FP_EXC_RES": true, + "PR_FP_EXC_SW_ENABLE": true, + "PR_FP_EXC_UND": true, + "PR_GET_DUMPABLE": true, + "PR_GET_ENDIAN": true, + "PR_GET_FPEMU": true, + "PR_GET_FPEXC": true, + "PR_GET_KEEPCAPS": true, + "PR_GET_NAME": true, + "PR_GET_PDEATHSIG": true, + "PR_GET_SECCOMP": true, + "PR_GET_SECCOMP_FILTER": true, + "PR_GET_SECUREBITS": true, + "PR_GET_TIMERSLACK": true, + "PR_GET_TIMING": true, + "PR_GET_TSC": true, + "PR_GET_UNALIGN": true, + "PR_MCE_KILL": true, + "PR_MCE_KILL_CLEAR": true, + "PR_MCE_KILL_DEFAULT": true, + "PR_MCE_KILL_EARLY": true, + "PR_MCE_KILL_GET": true, + "PR_MCE_KILL_LATE": true, + "PR_MCE_KILL_SET": true, + "PR_SECCOMP_FILTER_EVENT": true, + "PR_SECCOMP_FILTER_SYSCALL": true, + "PR_SET_DUMPABLE": true, + "PR_SET_ENDIAN": true, + "PR_SET_FPEMU": true, + "PR_SET_FPEXC": true, + "PR_SET_KEEPCAPS": true, + "PR_SET_NAME": true, + "PR_SET_PDEATHSIG": true, + "PR_SET_PTRACER": true, + "PR_SET_SECCOMP": true, + "PR_SET_SECCOMP_FILTER": true, + "PR_SET_SECUREBITS": true, + "PR_SET_TIMERSLACK": true, + "PR_SET_TIMING": true, + "PR_SET_TSC": true, + "PR_SET_UNALIGN": true, + "PR_TASK_PERF_EVENTS_DISABLE": true, + "PR_TASK_PERF_EVENTS_ENABLE": true, + "PR_TIMING_STATISTICAL": true, + "PR_TIMING_TIMESTAMP": true, + "PR_TSC_ENABLE": true, + "PR_TSC_SIGSEGV": true, + "PR_UNALIGN_NOPRINT": true, + "PR_UNALIGN_SIGBUS": true, + "PTRACE_ARCH_PRCTL": true, + "PTRACE_ATTACH": true, + "PTRACE_CONT": true, + "PTRACE_DETACH": true, + "PTRACE_EVENT_CLONE": true, + "PTRACE_EVENT_EXEC": true, + "PTRACE_EVENT_EXIT": true, + "PTRACE_EVENT_FORK": true, + "PTRACE_EVENT_VFORK": true, + "PTRACE_EVENT_VFORK_DONE": true, + "PTRACE_GETCRUNCHREGS": true, + "PTRACE_GETEVENTMSG": true, + "PTRACE_GETFPREGS": true, + "PTRACE_GETFPXREGS": true, + "PTRACE_GETHBPREGS": true, + "PTRACE_GETREGS": true, + "PTRACE_GETREGSET": true, + "PTRACE_GETSIGINFO": true, + "PTRACE_GETVFPREGS": true, + "PTRACE_GETWMMXREGS": true, + "PTRACE_GET_THREAD_AREA": true, + "PTRACE_KILL": true, + "PTRACE_OLDSETOPTIONS": true, + "PTRACE_O_MASK": true, + "PTRACE_O_TRACECLONE": true, + "PTRACE_O_TRACEEXEC": true, + "PTRACE_O_TRACEEXIT": true, + "PTRACE_O_TRACEFORK": true, + "PTRACE_O_TRACESYSGOOD": true, + "PTRACE_O_TRACEVFORK": true, + "PTRACE_O_TRACEVFORKDONE": true, + "PTRACE_PEEKDATA": true, + "PTRACE_PEEKTEXT": true, + "PTRACE_PEEKUSR": true, + "PTRACE_POKEDATA": true, + "PTRACE_POKETEXT": true, + "PTRACE_POKEUSR": true, + "PTRACE_SETCRUNCHREGS": true, + "PTRACE_SETFPREGS": true, + "PTRACE_SETFPXREGS": true, + "PTRACE_SETHBPREGS": true, + "PTRACE_SETOPTIONS": true, + "PTRACE_SETREGS": true, + "PTRACE_SETREGSET": true, + "PTRACE_SETSIGINFO": true, + "PTRACE_SETVFPREGS": true, + "PTRACE_SETWMMXREGS": true, + "PTRACE_SET_SYSCALL": true, + "PTRACE_SET_THREAD_AREA": true, + "PTRACE_SINGLEBLOCK": true, + "PTRACE_SINGLESTEP": true, + "PTRACE_SYSCALL": true, + "PTRACE_SYSEMU": true, + "PTRACE_SYSEMU_SINGLESTEP": true, + "PTRACE_TRACEME": true, + "PT_ATTACH": true, + "PT_ATTACHEXC": true, + "PT_CONTINUE": true, + "PT_DATA_ADDR": true, + "PT_DENY_ATTACH": true, + "PT_DETACH": true, + "PT_FIRSTMACH": true, + "PT_FORCEQUOTA": true, + "PT_KILL": true, + "PT_MASK": true, + "PT_READ_D": true, + "PT_READ_I": true, + "PT_READ_U": true, + "PT_SIGEXC": true, + "PT_STEP": true, + "PT_TEXT_ADDR": true, + "PT_TEXT_END_ADDR": true, + "PT_THUPDATE": true, + "PT_TRACE_ME": true, + "PT_WRITE_D": true, + "PT_WRITE_I": true, + "PT_WRITE_U": true, + "ParseDirent": true, + "ParseNetlinkMessage": true, + "ParseNetlinkRouteAttr": true, + "ParseRoutingMessage": true, + "ParseRoutingSockaddr": true, + "ParseSocketControlMessage": true, + "ParseUnixCredentials": true, + "ParseUnixRights": true, + "PathMax": true, + "Pathconf": true, + "Pause": true, + "Pipe": true, + "Pipe2": true, + "PivotRoot": true, + "Pointer": true, + "PostQueuedCompletionStatus": true, + "Pread": true, + "Proc": true, + "ProcAttr": true, + "Process32First": true, + "Process32Next": true, + "ProcessEntry32": true, + "ProcessInformation": true, + "Protoent": true, + "PtraceAttach": true, + "PtraceCont": true, + "PtraceDetach": true, + "PtraceGetEventMsg": true, + "PtraceGetRegs": true, + "PtracePeekData": true, + "PtracePeekText": true, + "PtracePokeData": true, + "PtracePokeText": true, + "PtraceRegs": true, + "PtraceSetOptions": true, + "PtraceSetRegs": true, + "PtraceSingleStep": true, + "PtraceSyscall": true, + "Pwrite": true, + "REG_BINARY": true, + "REG_DWORD": true, + "REG_DWORD_BIG_ENDIAN": true, + "REG_DWORD_LITTLE_ENDIAN": true, + "REG_EXPAND_SZ": true, + "REG_FULL_RESOURCE_DESCRIPTOR": true, + "REG_LINK": true, + "REG_MULTI_SZ": true, + "REG_NONE": true, + "REG_QWORD": true, + "REG_QWORD_LITTLE_ENDIAN": true, + "REG_RESOURCE_LIST": true, + "REG_RESOURCE_REQUIREMENTS_LIST": true, + "REG_SZ": true, + "RLIMIT_AS": true, + "RLIMIT_CORE": true, + "RLIMIT_CPU": true, + "RLIMIT_DATA": true, + "RLIMIT_FSIZE": true, + "RLIMIT_NOFILE": true, + "RLIMIT_STACK": true, + "RLIM_INFINITY": true, + "RTAX_ADVMSS": true, + "RTAX_AUTHOR": true, + "RTAX_BRD": true, + "RTAX_CWND": true, + "RTAX_DST": true, + "RTAX_FEATURES": true, + "RTAX_FEATURE_ALLFRAG": true, + "RTAX_FEATURE_ECN": true, + "RTAX_FEATURE_SACK": true, + "RTAX_FEATURE_TIMESTAMP": true, + "RTAX_GATEWAY": true, + "RTAX_GENMASK": true, + "RTAX_HOPLIMIT": true, + "RTAX_IFA": true, + "RTAX_IFP": true, + "RTAX_INITCWND": true, + "RTAX_INITRWND": true, + "RTAX_LABEL": true, + "RTAX_LOCK": true, + "RTAX_MAX": true, + "RTAX_MTU": true, + "RTAX_NETMASK": true, + "RTAX_REORDERING": true, + "RTAX_RTO_MIN": true, + "RTAX_RTT": true, + "RTAX_RTTVAR": true, + "RTAX_SRC": true, + "RTAX_SRCMASK": true, + "RTAX_SSTHRESH": true, + "RTAX_TAG": true, + "RTAX_UNSPEC": true, + "RTAX_WINDOW": true, + "RTA_ALIGNTO": true, + "RTA_AUTHOR": true, + "RTA_BRD": true, + "RTA_CACHEINFO": true, + "RTA_DST": true, + "RTA_FLOW": true, + "RTA_GATEWAY": true, + "RTA_GENMASK": true, + "RTA_IFA": true, + "RTA_IFP": true, + "RTA_IIF": true, + "RTA_LABEL": true, + "RTA_MAX": true, + "RTA_METRICS": true, + "RTA_MULTIPATH": true, + "RTA_NETMASK": true, + "RTA_OIF": true, + "RTA_PREFSRC": true, + "RTA_PRIORITY": true, + "RTA_SRC": true, + "RTA_SRCMASK": true, + "RTA_TABLE": true, + "RTA_TAG": true, + "RTA_UNSPEC": true, + "RTCF_DIRECTSRC": true, + "RTCF_DOREDIRECT": true, + "RTCF_LOG": true, + "RTCF_MASQ": true, + "RTCF_NAT": true, + "RTCF_VALVE": true, + "RTF_ADDRCLASSMASK": true, + "RTF_ADDRCONF": true, + "RTF_ALLONLINK": true, + "RTF_ANNOUNCE": true, + "RTF_BLACKHOLE": true, + "RTF_BROADCAST": true, + "RTF_CACHE": true, + "RTF_CLONED": true, + "RTF_CLONING": true, + "RTF_CONDEMNED": true, + "RTF_DEFAULT": true, + "RTF_DELCLONE": true, + "RTF_DONE": true, + "RTF_DYNAMIC": true, + "RTF_FLOW": true, + "RTF_FMASK": true, + "RTF_GATEWAY": true, + "RTF_GWFLAG_COMPAT": true, + "RTF_HOST": true, + "RTF_IFREF": true, + "RTF_IFSCOPE": true, + "RTF_INTERFACE": true, + "RTF_IRTT": true, + "RTF_LINKRT": true, + "RTF_LLDATA": true, + "RTF_LLINFO": true, + "RTF_LOCAL": true, + "RTF_MASK": true, + "RTF_MODIFIED": true, + "RTF_MPATH": true, + "RTF_MPLS": true, + "RTF_MSS": true, + "RTF_MTU": true, + "RTF_MULTICAST": true, + "RTF_NAT": true, + "RTF_NOFORWARD": true, + "RTF_NONEXTHOP": true, + "RTF_NOPMTUDISC": true, + "RTF_PERMANENT_ARP": true, + "RTF_PINNED": true, + "RTF_POLICY": true, + "RTF_PRCLONING": true, + "RTF_PROTO1": true, + "RTF_PROTO2": true, + "RTF_PROTO3": true, + "RTF_REINSTATE": true, + "RTF_REJECT": true, + "RTF_RNH_LOCKED": true, + "RTF_SOURCE": true, + "RTF_SRC": true, + "RTF_STATIC": true, + "RTF_STICKY": true, + "RTF_THROW": true, + "RTF_TUNNEL": true, + "RTF_UP": true, + "RTF_USETRAILERS": true, + "RTF_WASCLONED": true, + "RTF_WINDOW": true, + "RTF_XRESOLVE": true, + "RTM_ADD": true, + "RTM_BASE": true, + "RTM_CHANGE": true, + "RTM_CHGADDR": true, + "RTM_DELACTION": true, + "RTM_DELADDR": true, + "RTM_DELADDRLABEL": true, + "RTM_DELETE": true, + "RTM_DELLINK": true, + "RTM_DELMADDR": true, + "RTM_DELNEIGH": true, + "RTM_DELQDISC": true, + "RTM_DELROUTE": true, + "RTM_DELRULE": true, + "RTM_DELTCLASS": true, + "RTM_DELTFILTER": true, + "RTM_DESYNC": true, + "RTM_F_CLONED": true, + "RTM_F_EQUALIZE": true, + "RTM_F_NOTIFY": true, + "RTM_F_PREFIX": true, + "RTM_GET": true, + "RTM_GET2": true, + "RTM_GETACTION": true, + "RTM_GETADDR": true, + "RTM_GETADDRLABEL": true, + "RTM_GETANYCAST": true, + "RTM_GETDCB": true, + "RTM_GETLINK": true, + "RTM_GETMULTICAST": true, + "RTM_GETNEIGH": true, + "RTM_GETNEIGHTBL": true, + "RTM_GETQDISC": true, + "RTM_GETROUTE": true, + "RTM_GETRULE": true, + "RTM_GETTCLASS": true, + "RTM_GETTFILTER": true, + "RTM_IEEE80211": true, + "RTM_IFANNOUNCE": true, + "RTM_IFINFO": true, + "RTM_IFINFO2": true, + "RTM_LLINFO_UPD": true, + "RTM_LOCK": true, + "RTM_LOSING": true, + "RTM_MAX": true, + "RTM_MAXSIZE": true, + "RTM_MISS": true, + "RTM_NEWACTION": true, + "RTM_NEWADDR": true, + "RTM_NEWADDRLABEL": true, + "RTM_NEWLINK": true, + "RTM_NEWMADDR": true, + "RTM_NEWMADDR2": true, + "RTM_NEWNDUSEROPT": true, + "RTM_NEWNEIGH": true, + "RTM_NEWNEIGHTBL": true, + "RTM_NEWPREFIX": true, + "RTM_NEWQDISC": true, + "RTM_NEWROUTE": true, + "RTM_NEWRULE": true, + "RTM_NEWTCLASS": true, + "RTM_NEWTFILTER": true, + "RTM_NR_FAMILIES": true, + "RTM_NR_MSGTYPES": true, + "RTM_OIFINFO": true, + "RTM_OLDADD": true, + "RTM_OLDDEL": true, + "RTM_OOIFINFO": true, + "RTM_REDIRECT": true, + "RTM_RESOLVE": true, + "RTM_RTTUNIT": true, + "RTM_SETDCB": true, + "RTM_SETGATE": true, + "RTM_SETLINK": true, + "RTM_SETNEIGHTBL": true, + "RTM_VERSION": true, + "RTNH_ALIGNTO": true, + "RTNH_F_DEAD": true, + "RTNH_F_ONLINK": true, + "RTNH_F_PERVASIVE": true, + "RTNLGRP_IPV4_IFADDR": true, + "RTNLGRP_IPV4_MROUTE": true, + "RTNLGRP_IPV4_ROUTE": true, + "RTNLGRP_IPV4_RULE": true, + "RTNLGRP_IPV6_IFADDR": true, + "RTNLGRP_IPV6_IFINFO": true, + "RTNLGRP_IPV6_MROUTE": true, + "RTNLGRP_IPV6_PREFIX": true, + "RTNLGRP_IPV6_ROUTE": true, + "RTNLGRP_IPV6_RULE": true, + "RTNLGRP_LINK": true, + "RTNLGRP_ND_USEROPT": true, + "RTNLGRP_NEIGH": true, + "RTNLGRP_NONE": true, + "RTNLGRP_NOTIFY": true, + "RTNLGRP_TC": true, + "RTN_ANYCAST": true, + "RTN_BLACKHOLE": true, + "RTN_BROADCAST": true, + "RTN_LOCAL": true, + "RTN_MAX": true, + "RTN_MULTICAST": true, + "RTN_NAT": true, + "RTN_PROHIBIT": true, + "RTN_THROW": true, + "RTN_UNICAST": true, + "RTN_UNREACHABLE": true, + "RTN_UNSPEC": true, + "RTN_XRESOLVE": true, + "RTPROT_BIRD": true, + "RTPROT_BOOT": true, + "RTPROT_DHCP": true, + "RTPROT_DNROUTED": true, + "RTPROT_GATED": true, + "RTPROT_KERNEL": true, + "RTPROT_MRT": true, + "RTPROT_NTK": true, + "RTPROT_RA": true, + "RTPROT_REDIRECT": true, + "RTPROT_STATIC": true, + "RTPROT_UNSPEC": true, + "RTPROT_XORP": true, + "RTPROT_ZEBRA": true, + "RTV_EXPIRE": true, + "RTV_HOPCOUNT": true, + "RTV_MTU": true, + "RTV_RPIPE": true, + "RTV_RTT": true, + "RTV_RTTVAR": true, + "RTV_SPIPE": true, + "RTV_SSTHRESH": true, + "RTV_WEIGHT": true, + "RT_CACHING_CONTEXT": true, + "RT_CLASS_DEFAULT": true, + "RT_CLASS_LOCAL": true, + "RT_CLASS_MAIN": true, + "RT_CLASS_MAX": true, + "RT_CLASS_UNSPEC": true, + "RT_DEFAULT_FIB": true, + "RT_NORTREF": true, + "RT_SCOPE_HOST": true, + "RT_SCOPE_LINK": true, + "RT_SCOPE_NOWHERE": true, + "RT_SCOPE_SITE": true, + "RT_SCOPE_UNIVERSE": true, + "RT_TABLEID_MAX": true, + "RT_TABLE_COMPAT": true, + "RT_TABLE_DEFAULT": true, + "RT_TABLE_LOCAL": true, + "RT_TABLE_MAIN": true, + "RT_TABLE_MAX": true, + "RT_TABLE_UNSPEC": true, + "RUSAGE_CHILDREN": true, + "RUSAGE_SELF": true, + "RUSAGE_THREAD": true, + "Radvisory_t": true, + "RawConn": true, + "RawSockaddr": true, + "RawSockaddrAny": true, + "RawSockaddrDatalink": true, + "RawSockaddrInet4": true, + "RawSockaddrInet6": true, + "RawSockaddrLinklayer": true, + "RawSockaddrNetlink": true, + "RawSockaddrUnix": true, + "RawSyscall": true, + "RawSyscall6": true, + "Read": true, + "ReadConsole": true, + "ReadDirectoryChanges": true, + "ReadDirent": true, + "ReadFile": true, + "Readlink": true, + "Reboot": true, + "Recvfrom": true, + "Recvmsg": true, + "RegCloseKey": true, + "RegEnumKeyEx": true, + "RegOpenKeyEx": true, + "RegQueryInfoKey": true, + "RegQueryValueEx": true, + "RemoveDirectory": true, + "Removexattr": true, + "Rename": true, + "Renameat": true, + "Revoke": true, + "Rlimit": true, + "Rmdir": true, + "RouteMessage": true, + "RouteRIB": true, + "RtAttr": true, + "RtGenmsg": true, + "RtMetrics": true, + "RtMsg": true, + "RtMsghdr": true, + "RtNexthop": true, + "Rusage": true, + "SCM_BINTIME": true, + "SCM_CREDENTIALS": true, + "SCM_CREDS": true, + "SCM_RIGHTS": true, + "SCM_TIMESTAMP": true, + "SCM_TIMESTAMPING": true, + "SCM_TIMESTAMPNS": true, + "SCM_TIMESTAMP_MONOTONIC": true, + "SHUT_RD": true, + "SHUT_RDWR": true, + "SHUT_WR": true, + "SID": true, + "SIDAndAttributes": true, + "SIGABRT": true, + "SIGALRM": true, + "SIGBUS": true, + "SIGCHLD": true, + "SIGCLD": true, + "SIGCONT": true, + "SIGEMT": true, + "SIGFPE": true, + "SIGHUP": true, + "SIGILL": true, + "SIGINFO": true, + "SIGINT": true, + "SIGIO": true, + "SIGIOT": true, + "SIGKILL": true, + "SIGLIBRT": true, + "SIGLWP": true, + "SIGPIPE": true, + "SIGPOLL": true, + "SIGPROF": true, + "SIGPWR": true, + "SIGQUIT": true, + "SIGSEGV": true, + "SIGSTKFLT": true, + "SIGSTOP": true, + "SIGSYS": true, + "SIGTERM": true, + "SIGTHR": true, + "SIGTRAP": true, + "SIGTSTP": true, + "SIGTTIN": true, + "SIGTTOU": true, + "SIGUNUSED": true, + "SIGURG": true, + "SIGUSR1": true, + "SIGUSR2": true, + "SIGVTALRM": true, + "SIGWINCH": true, + "SIGXCPU": true, + "SIGXFSZ": true, + "SIOCADDDLCI": true, + "SIOCADDMULTI": true, + "SIOCADDRT": true, + "SIOCAIFADDR": true, + "SIOCAIFGROUP": true, + "SIOCALIFADDR": true, + "SIOCARPIPLL": true, + "SIOCATMARK": true, + "SIOCAUTOADDR": true, + "SIOCAUTONETMASK": true, + "SIOCBRDGADD": true, + "SIOCBRDGADDS": true, + "SIOCBRDGARL": true, + "SIOCBRDGDADDR": true, + "SIOCBRDGDEL": true, + "SIOCBRDGDELS": true, + "SIOCBRDGFLUSH": true, + "SIOCBRDGFRL": true, + "SIOCBRDGGCACHE": true, + "SIOCBRDGGFD": true, + "SIOCBRDGGHT": true, + "SIOCBRDGGIFFLGS": true, + "SIOCBRDGGMA": true, + "SIOCBRDGGPARAM": true, + "SIOCBRDGGPRI": true, + "SIOCBRDGGRL": true, + "SIOCBRDGGSIFS": true, + "SIOCBRDGGTO": true, + "SIOCBRDGIFS": true, + "SIOCBRDGRTS": true, + "SIOCBRDGSADDR": true, + "SIOCBRDGSCACHE": true, + "SIOCBRDGSFD": true, + "SIOCBRDGSHT": true, + "SIOCBRDGSIFCOST": true, + "SIOCBRDGSIFFLGS": true, + "SIOCBRDGSIFPRIO": true, + "SIOCBRDGSMA": true, + "SIOCBRDGSPRI": true, + "SIOCBRDGSPROTO": true, + "SIOCBRDGSTO": true, + "SIOCBRDGSTXHC": true, + "SIOCDARP": true, + "SIOCDELDLCI": true, + "SIOCDELMULTI": true, + "SIOCDELRT": true, + "SIOCDEVPRIVATE": true, + "SIOCDIFADDR": true, + "SIOCDIFGROUP": true, + "SIOCDIFPHYADDR": true, + "SIOCDLIFADDR": true, + "SIOCDRARP": true, + "SIOCGARP": true, + "SIOCGDRVSPEC": true, + "SIOCGETKALIVE": true, + "SIOCGETLABEL": true, + "SIOCGETPFLOW": true, + "SIOCGETPFSYNC": true, + "SIOCGETSGCNT": true, + "SIOCGETVIFCNT": true, + "SIOCGETVLAN": true, + "SIOCGHIWAT": true, + "SIOCGIFADDR": true, + "SIOCGIFADDRPREF": true, + "SIOCGIFALIAS": true, + "SIOCGIFALTMTU": true, + "SIOCGIFASYNCMAP": true, + "SIOCGIFBOND": true, + "SIOCGIFBR": true, + "SIOCGIFBRDADDR": true, + "SIOCGIFCAP": true, + "SIOCGIFCONF": true, + "SIOCGIFCOUNT": true, + "SIOCGIFDATA": true, + "SIOCGIFDESCR": true, + "SIOCGIFDEVMTU": true, + "SIOCGIFDLT": true, + "SIOCGIFDSTADDR": true, + "SIOCGIFENCAP": true, + "SIOCGIFFIB": true, + "SIOCGIFFLAGS": true, + "SIOCGIFGATTR": true, + "SIOCGIFGENERIC": true, + "SIOCGIFGMEMB": true, + "SIOCGIFGROUP": true, + "SIOCGIFHARDMTU": true, + "SIOCGIFHWADDR": true, + "SIOCGIFINDEX": true, + "SIOCGIFKPI": true, + "SIOCGIFMAC": true, + "SIOCGIFMAP": true, + "SIOCGIFMEDIA": true, + "SIOCGIFMEM": true, + "SIOCGIFMETRIC": true, + "SIOCGIFMTU": true, + "SIOCGIFNAME": true, + "SIOCGIFNETMASK": true, + "SIOCGIFPDSTADDR": true, + "SIOCGIFPFLAGS": true, + "SIOCGIFPHYS": true, + "SIOCGIFPRIORITY": true, + "SIOCGIFPSRCADDR": true, + "SIOCGIFRDOMAIN": true, + "SIOCGIFRTLABEL": true, + "SIOCGIFSLAVE": true, + "SIOCGIFSTATUS": true, + "SIOCGIFTIMESLOT": true, + "SIOCGIFTXQLEN": true, + "SIOCGIFVLAN": true, + "SIOCGIFWAKEFLAGS": true, + "SIOCGIFXFLAGS": true, + "SIOCGLIFADDR": true, + "SIOCGLIFPHYADDR": true, + "SIOCGLIFPHYRTABLE": true, + "SIOCGLIFPHYTTL": true, + "SIOCGLINKSTR": true, + "SIOCGLOWAT": true, + "SIOCGPGRP": true, + "SIOCGPRIVATE_0": true, + "SIOCGPRIVATE_1": true, + "SIOCGRARP": true, + "SIOCGSPPPPARAMS": true, + "SIOCGSTAMP": true, + "SIOCGSTAMPNS": true, + "SIOCGVH": true, + "SIOCGVNETID": true, + "SIOCIFCREATE": true, + "SIOCIFCREATE2": true, + "SIOCIFDESTROY": true, + "SIOCIFGCLONERS": true, + "SIOCINITIFADDR": true, + "SIOCPROTOPRIVATE": true, + "SIOCRSLVMULTI": true, + "SIOCRTMSG": true, + "SIOCSARP": true, + "SIOCSDRVSPEC": true, + "SIOCSETKALIVE": true, + "SIOCSETLABEL": true, + "SIOCSETPFLOW": true, + "SIOCSETPFSYNC": true, + "SIOCSETVLAN": true, + "SIOCSHIWAT": true, + "SIOCSIFADDR": true, + "SIOCSIFADDRPREF": true, + "SIOCSIFALTMTU": true, + "SIOCSIFASYNCMAP": true, + "SIOCSIFBOND": true, + "SIOCSIFBR": true, + "SIOCSIFBRDADDR": true, + "SIOCSIFCAP": true, + "SIOCSIFDESCR": true, + "SIOCSIFDSTADDR": true, + "SIOCSIFENCAP": true, + "SIOCSIFFIB": true, + "SIOCSIFFLAGS": true, + "SIOCSIFGATTR": true, + "SIOCSIFGENERIC": true, + "SIOCSIFHWADDR": true, + "SIOCSIFHWBROADCAST": true, + "SIOCSIFKPI": true, + "SIOCSIFLINK": true, + "SIOCSIFLLADDR": true, + "SIOCSIFMAC": true, + "SIOCSIFMAP": true, + "SIOCSIFMEDIA": true, + "SIOCSIFMEM": true, + "SIOCSIFMETRIC": true, + "SIOCSIFMTU": true, + "SIOCSIFNAME": true, + "SIOCSIFNETMASK": true, + "SIOCSIFPFLAGS": true, + "SIOCSIFPHYADDR": true, + "SIOCSIFPHYS": true, + "SIOCSIFPRIORITY": true, + "SIOCSIFRDOMAIN": true, + "SIOCSIFRTLABEL": true, + "SIOCSIFRVNET": true, + "SIOCSIFSLAVE": true, + "SIOCSIFTIMESLOT": true, + "SIOCSIFTXQLEN": true, + "SIOCSIFVLAN": true, + "SIOCSIFVNET": true, + "SIOCSIFXFLAGS": true, + "SIOCSLIFPHYADDR": true, + "SIOCSLIFPHYRTABLE": true, + "SIOCSLIFPHYTTL": true, + "SIOCSLINKSTR": true, + "SIOCSLOWAT": true, + "SIOCSPGRP": true, + "SIOCSRARP": true, + "SIOCSSPPPPARAMS": true, + "SIOCSVH": true, + "SIOCSVNETID": true, + "SIOCZIFDATA": true, + "SIO_GET_EXTENSION_FUNCTION_POINTER": true, + "SIO_GET_INTERFACE_LIST": true, + "SIO_KEEPALIVE_VALS": true, + "SIO_UDP_CONNRESET": true, + "SOCK_CLOEXEC": true, + "SOCK_DCCP": true, + "SOCK_DGRAM": true, + "SOCK_FLAGS_MASK": true, + "SOCK_MAXADDRLEN": true, + "SOCK_NONBLOCK": true, + "SOCK_NOSIGPIPE": true, + "SOCK_PACKET": true, + "SOCK_RAW": true, + "SOCK_RDM": true, + "SOCK_SEQPACKET": true, + "SOCK_STREAM": true, + "SOL_AAL": true, + "SOL_ATM": true, + "SOL_DECNET": true, + "SOL_ICMPV6": true, + "SOL_IP": true, + "SOL_IPV6": true, + "SOL_IRDA": true, + "SOL_PACKET": true, + "SOL_RAW": true, + "SOL_SOCKET": true, + "SOL_TCP": true, + "SOL_X25": true, + "SOMAXCONN": true, + "SO_ACCEPTCONN": true, + "SO_ACCEPTFILTER": true, + "SO_ATTACH_FILTER": true, + "SO_BINDANY": true, + "SO_BINDTODEVICE": true, + "SO_BINTIME": true, + "SO_BROADCAST": true, + "SO_BSDCOMPAT": true, + "SO_DEBUG": true, + "SO_DETACH_FILTER": true, + "SO_DOMAIN": true, + "SO_DONTROUTE": true, + "SO_DONTTRUNC": true, + "SO_ERROR": true, + "SO_KEEPALIVE": true, + "SO_LABEL": true, + "SO_LINGER": true, + "SO_LINGER_SEC": true, + "SO_LISTENINCQLEN": true, + "SO_LISTENQLEN": true, + "SO_LISTENQLIMIT": true, + "SO_MARK": true, + "SO_NETPROC": true, + "SO_NKE": true, + "SO_NOADDRERR": true, + "SO_NOHEADER": true, + "SO_NOSIGPIPE": true, + "SO_NOTIFYCONFLICT": true, + "SO_NO_CHECK": true, + "SO_NO_DDP": true, + "SO_NO_OFFLOAD": true, + "SO_NP_EXTENSIONS": true, + "SO_NREAD": true, + "SO_NWRITE": true, + "SO_OOBINLINE": true, + "SO_OVERFLOWED": true, + "SO_PASSCRED": true, + "SO_PASSSEC": true, + "SO_PEERCRED": true, + "SO_PEERLABEL": true, + "SO_PEERNAME": true, + "SO_PEERSEC": true, + "SO_PRIORITY": true, + "SO_PROTOCOL": true, + "SO_PROTOTYPE": true, + "SO_RANDOMPORT": true, + "SO_RCVBUF": true, + "SO_RCVBUFFORCE": true, + "SO_RCVLOWAT": true, + "SO_RCVTIMEO": true, + "SO_RESTRICTIONS": true, + "SO_RESTRICT_DENYIN": true, + "SO_RESTRICT_DENYOUT": true, + "SO_RESTRICT_DENYSET": true, + "SO_REUSEADDR": true, + "SO_REUSEPORT": true, + "SO_REUSESHAREUID": true, + "SO_RTABLE": true, + "SO_RXQ_OVFL": true, + "SO_SECURITY_AUTHENTICATION": true, + "SO_SECURITY_ENCRYPTION_NETWORK": true, + "SO_SECURITY_ENCRYPTION_TRANSPORT": true, + "SO_SETFIB": true, + "SO_SNDBUF": true, + "SO_SNDBUFFORCE": true, + "SO_SNDLOWAT": true, + "SO_SNDTIMEO": true, + "SO_SPLICE": true, + "SO_TIMESTAMP": true, + "SO_TIMESTAMPING": true, + "SO_TIMESTAMPNS": true, + "SO_TIMESTAMP_MONOTONIC": true, + "SO_TYPE": true, + "SO_UPCALLCLOSEWAIT": true, + "SO_UPDATE_ACCEPT_CONTEXT": true, + "SO_UPDATE_CONNECT_CONTEXT": true, + "SO_USELOOPBACK": true, + "SO_USER_COOKIE": true, + "SO_VENDOR": true, + "SO_WANTMORE": true, + "SO_WANTOOBFLAG": true, + "SSLExtraCertChainPolicyPara": true, + "STANDARD_RIGHTS_ALL": true, + "STANDARD_RIGHTS_EXECUTE": true, + "STANDARD_RIGHTS_READ": true, + "STANDARD_RIGHTS_REQUIRED": true, + "STANDARD_RIGHTS_WRITE": true, + "STARTF_USESHOWWINDOW": true, + "STARTF_USESTDHANDLES": true, + "STD_ERROR_HANDLE": true, + "STD_INPUT_HANDLE": true, + "STD_OUTPUT_HANDLE": true, + "SUBLANG_ENGLISH_US": true, + "SW_FORCEMINIMIZE": true, + "SW_HIDE": true, + "SW_MAXIMIZE": true, + "SW_MINIMIZE": true, + "SW_NORMAL": true, + "SW_RESTORE": true, + "SW_SHOW": true, + "SW_SHOWDEFAULT": true, + "SW_SHOWMAXIMIZED": true, + "SW_SHOWMINIMIZED": true, + "SW_SHOWMINNOACTIVE": true, + "SW_SHOWNA": true, + "SW_SHOWNOACTIVATE": true, + "SW_SHOWNORMAL": true, + "SYMBOLIC_LINK_FLAG_DIRECTORY": true, + "SYNCHRONIZE": true, + "SYSCTL_VERSION": true, + "SYSCTL_VERS_0": true, + "SYSCTL_VERS_1": true, + "SYSCTL_VERS_MASK": true, + "SYS_ABORT2": true, + "SYS_ACCEPT": true, + "SYS_ACCEPT4": true, + "SYS_ACCEPT_NOCANCEL": true, + "SYS_ACCESS": true, + "SYS_ACCESS_EXTENDED": true, + "SYS_ACCT": true, + "SYS_ADD_KEY": true, + "SYS_ADD_PROFIL": true, + "SYS_ADJFREQ": true, + "SYS_ADJTIME": true, + "SYS_ADJTIMEX": true, + "SYS_AFS_SYSCALL": true, + "SYS_AIO_CANCEL": true, + "SYS_AIO_ERROR": true, + "SYS_AIO_FSYNC": true, + "SYS_AIO_READ": true, + "SYS_AIO_RETURN": true, + "SYS_AIO_SUSPEND": true, + "SYS_AIO_SUSPEND_NOCANCEL": true, + "SYS_AIO_WRITE": true, + "SYS_ALARM": true, + "SYS_ARCH_PRCTL": true, + "SYS_ARM_FADVISE64_64": true, + "SYS_ARM_SYNC_FILE_RANGE": true, + "SYS_ATGETMSG": true, + "SYS_ATPGETREQ": true, + "SYS_ATPGETRSP": true, + "SYS_ATPSNDREQ": true, + "SYS_ATPSNDRSP": true, + "SYS_ATPUTMSG": true, + "SYS_ATSOCKET": true, + "SYS_AUDIT": true, + "SYS_AUDITCTL": true, + "SYS_AUDITON": true, + "SYS_AUDIT_SESSION_JOIN": true, + "SYS_AUDIT_SESSION_PORT": true, + "SYS_AUDIT_SESSION_SELF": true, + "SYS_BDFLUSH": true, + "SYS_BIND": true, + "SYS_BINDAT": true, + "SYS_BREAK": true, + "SYS_BRK": true, + "SYS_BSDTHREAD_CREATE": true, + "SYS_BSDTHREAD_REGISTER": true, + "SYS_BSDTHREAD_TERMINATE": true, + "SYS_CAPGET": true, + "SYS_CAPSET": true, + "SYS_CAP_ENTER": true, + "SYS_CAP_FCNTLS_GET": true, + "SYS_CAP_FCNTLS_LIMIT": true, + "SYS_CAP_GETMODE": true, + "SYS_CAP_GETRIGHTS": true, + "SYS_CAP_IOCTLS_GET": true, + "SYS_CAP_IOCTLS_LIMIT": true, + "SYS_CAP_NEW": true, + "SYS_CAP_RIGHTS_GET": true, + "SYS_CAP_RIGHTS_LIMIT": true, + "SYS_CHDIR": true, + "SYS_CHFLAGS": true, + "SYS_CHFLAGSAT": true, + "SYS_CHMOD": true, + "SYS_CHMOD_EXTENDED": true, + "SYS_CHOWN": true, + "SYS_CHOWN32": true, + "SYS_CHROOT": true, + "SYS_CHUD": true, + "SYS_CLOCK_ADJTIME": true, + "SYS_CLOCK_GETCPUCLOCKID2": true, + "SYS_CLOCK_GETRES": true, + "SYS_CLOCK_GETTIME": true, + "SYS_CLOCK_NANOSLEEP": true, + "SYS_CLOCK_SETTIME": true, + "SYS_CLONE": true, + "SYS_CLOSE": true, + "SYS_CLOSEFROM": true, + "SYS_CLOSE_NOCANCEL": true, + "SYS_CONNECT": true, + "SYS_CONNECTAT": true, + "SYS_CONNECT_NOCANCEL": true, + "SYS_COPYFILE": true, + "SYS_CPUSET": true, + "SYS_CPUSET_GETAFFINITY": true, + "SYS_CPUSET_GETID": true, + "SYS_CPUSET_SETAFFINITY": true, + "SYS_CPUSET_SETID": true, + "SYS_CREAT": true, + "SYS_CREATE_MODULE": true, + "SYS_CSOPS": true, + "SYS_DELETE": true, + "SYS_DELETE_MODULE": true, + "SYS_DUP": true, + "SYS_DUP2": true, + "SYS_DUP3": true, + "SYS_EACCESS": true, + "SYS_EPOLL_CREATE": true, + "SYS_EPOLL_CREATE1": true, + "SYS_EPOLL_CTL": true, + "SYS_EPOLL_CTL_OLD": true, + "SYS_EPOLL_PWAIT": true, + "SYS_EPOLL_WAIT": true, + "SYS_EPOLL_WAIT_OLD": true, + "SYS_EVENTFD": true, + "SYS_EVENTFD2": true, + "SYS_EXCHANGEDATA": true, + "SYS_EXECVE": true, + "SYS_EXIT": true, + "SYS_EXIT_GROUP": true, + "SYS_EXTATTRCTL": true, + "SYS_EXTATTR_DELETE_FD": true, + "SYS_EXTATTR_DELETE_FILE": true, + "SYS_EXTATTR_DELETE_LINK": true, + "SYS_EXTATTR_GET_FD": true, + "SYS_EXTATTR_GET_FILE": true, + "SYS_EXTATTR_GET_LINK": true, + "SYS_EXTATTR_LIST_FD": true, + "SYS_EXTATTR_LIST_FILE": true, + "SYS_EXTATTR_LIST_LINK": true, + "SYS_EXTATTR_SET_FD": true, + "SYS_EXTATTR_SET_FILE": true, + "SYS_EXTATTR_SET_LINK": true, + "SYS_FACCESSAT": true, + "SYS_FADVISE64": true, + "SYS_FADVISE64_64": true, + "SYS_FALLOCATE": true, + "SYS_FANOTIFY_INIT": true, + "SYS_FANOTIFY_MARK": true, + "SYS_FCHDIR": true, + "SYS_FCHFLAGS": true, + "SYS_FCHMOD": true, + "SYS_FCHMODAT": true, + "SYS_FCHMOD_EXTENDED": true, + "SYS_FCHOWN": true, + "SYS_FCHOWN32": true, + "SYS_FCHOWNAT": true, + "SYS_FCHROOT": true, + "SYS_FCNTL": true, + "SYS_FCNTL64": true, + "SYS_FCNTL_NOCANCEL": true, + "SYS_FDATASYNC": true, + "SYS_FEXECVE": true, + "SYS_FFCLOCK_GETCOUNTER": true, + "SYS_FFCLOCK_GETESTIMATE": true, + "SYS_FFCLOCK_SETESTIMATE": true, + "SYS_FFSCTL": true, + "SYS_FGETATTRLIST": true, + "SYS_FGETXATTR": true, + "SYS_FHOPEN": true, + "SYS_FHSTAT": true, + "SYS_FHSTATFS": true, + "SYS_FILEPORT_MAKEFD": true, + "SYS_FILEPORT_MAKEPORT": true, + "SYS_FKTRACE": true, + "SYS_FLISTXATTR": true, + "SYS_FLOCK": true, + "SYS_FORK": true, + "SYS_FPATHCONF": true, + "SYS_FREEBSD6_FTRUNCATE": true, + "SYS_FREEBSD6_LSEEK": true, + "SYS_FREEBSD6_MMAP": true, + "SYS_FREEBSD6_PREAD": true, + "SYS_FREEBSD6_PWRITE": true, + "SYS_FREEBSD6_TRUNCATE": true, + "SYS_FREMOVEXATTR": true, + "SYS_FSCTL": true, + "SYS_FSETATTRLIST": true, + "SYS_FSETXATTR": true, + "SYS_FSGETPATH": true, + "SYS_FSTAT": true, + "SYS_FSTAT64": true, + "SYS_FSTAT64_EXTENDED": true, + "SYS_FSTATAT": true, + "SYS_FSTATAT64": true, + "SYS_FSTATFS": true, + "SYS_FSTATFS64": true, + "SYS_FSTATV": true, + "SYS_FSTATVFS1": true, + "SYS_FSTAT_EXTENDED": true, + "SYS_FSYNC": true, + "SYS_FSYNC_NOCANCEL": true, + "SYS_FSYNC_RANGE": true, + "SYS_FTIME": true, + "SYS_FTRUNCATE": true, + "SYS_FTRUNCATE64": true, + "SYS_FUTEX": true, + "SYS_FUTIMENS": true, + "SYS_FUTIMES": true, + "SYS_FUTIMESAT": true, + "SYS_GETATTRLIST": true, + "SYS_GETAUDIT": true, + "SYS_GETAUDIT_ADDR": true, + "SYS_GETAUID": true, + "SYS_GETCONTEXT": true, + "SYS_GETCPU": true, + "SYS_GETCWD": true, + "SYS_GETDENTS": true, + "SYS_GETDENTS64": true, + "SYS_GETDIRENTRIES": true, + "SYS_GETDIRENTRIES64": true, + "SYS_GETDIRENTRIESATTR": true, + "SYS_GETDTABLECOUNT": true, + "SYS_GETDTABLESIZE": true, + "SYS_GETEGID": true, + "SYS_GETEGID32": true, + "SYS_GETEUID": true, + "SYS_GETEUID32": true, + "SYS_GETFH": true, + "SYS_GETFSSTAT": true, + "SYS_GETFSSTAT64": true, + "SYS_GETGID": true, + "SYS_GETGID32": true, + "SYS_GETGROUPS": true, + "SYS_GETGROUPS32": true, + "SYS_GETHOSTUUID": true, + "SYS_GETITIMER": true, + "SYS_GETLCID": true, + "SYS_GETLOGIN": true, + "SYS_GETLOGINCLASS": true, + "SYS_GETPEERNAME": true, + "SYS_GETPGID": true, + "SYS_GETPGRP": true, + "SYS_GETPID": true, + "SYS_GETPMSG": true, + "SYS_GETPPID": true, + "SYS_GETPRIORITY": true, + "SYS_GETRESGID": true, + "SYS_GETRESGID32": true, + "SYS_GETRESUID": true, + "SYS_GETRESUID32": true, + "SYS_GETRLIMIT": true, + "SYS_GETRTABLE": true, + "SYS_GETRUSAGE": true, + "SYS_GETSGROUPS": true, + "SYS_GETSID": true, + "SYS_GETSOCKNAME": true, + "SYS_GETSOCKOPT": true, + "SYS_GETTHRID": true, + "SYS_GETTID": true, + "SYS_GETTIMEOFDAY": true, + "SYS_GETUID": true, + "SYS_GETUID32": true, + "SYS_GETVFSSTAT": true, + "SYS_GETWGROUPS": true, + "SYS_GETXATTR": true, + "SYS_GET_KERNEL_SYMS": true, + "SYS_GET_MEMPOLICY": true, + "SYS_GET_ROBUST_LIST": true, + "SYS_GET_THREAD_AREA": true, + "SYS_GTTY": true, + "SYS_IDENTITYSVC": true, + "SYS_IDLE": true, + "SYS_INITGROUPS": true, + "SYS_INIT_MODULE": true, + "SYS_INOTIFY_ADD_WATCH": true, + "SYS_INOTIFY_INIT": true, + "SYS_INOTIFY_INIT1": true, + "SYS_INOTIFY_RM_WATCH": true, + "SYS_IOCTL": true, + "SYS_IOPERM": true, + "SYS_IOPL": true, + "SYS_IOPOLICYSYS": true, + "SYS_IOPRIO_GET": true, + "SYS_IOPRIO_SET": true, + "SYS_IO_CANCEL": true, + "SYS_IO_DESTROY": true, + "SYS_IO_GETEVENTS": true, + "SYS_IO_SETUP": true, + "SYS_IO_SUBMIT": true, + "SYS_IPC": true, + "SYS_ISSETUGID": true, + "SYS_JAIL": true, + "SYS_JAIL_ATTACH": true, + "SYS_JAIL_GET": true, + "SYS_JAIL_REMOVE": true, + "SYS_JAIL_SET": true, + "SYS_KDEBUG_TRACE": true, + "SYS_KENV": true, + "SYS_KEVENT": true, + "SYS_KEVENT64": true, + "SYS_KEXEC_LOAD": true, + "SYS_KEYCTL": true, + "SYS_KILL": true, + "SYS_KLDFIND": true, + "SYS_KLDFIRSTMOD": true, + "SYS_KLDLOAD": true, + "SYS_KLDNEXT": true, + "SYS_KLDSTAT": true, + "SYS_KLDSYM": true, + "SYS_KLDUNLOAD": true, + "SYS_KLDUNLOADF": true, + "SYS_KQUEUE": true, + "SYS_KQUEUE1": true, + "SYS_KTIMER_CREATE": true, + "SYS_KTIMER_DELETE": true, + "SYS_KTIMER_GETOVERRUN": true, + "SYS_KTIMER_GETTIME": true, + "SYS_KTIMER_SETTIME": true, + "SYS_KTRACE": true, + "SYS_LCHFLAGS": true, + "SYS_LCHMOD": true, + "SYS_LCHOWN": true, + "SYS_LCHOWN32": true, + "SYS_LGETFH": true, + "SYS_LGETXATTR": true, + "SYS_LINK": true, + "SYS_LINKAT": true, + "SYS_LIO_LISTIO": true, + "SYS_LISTEN": true, + "SYS_LISTXATTR": true, + "SYS_LLISTXATTR": true, + "SYS_LOCK": true, + "SYS_LOOKUP_DCOOKIE": true, + "SYS_LPATHCONF": true, + "SYS_LREMOVEXATTR": true, + "SYS_LSEEK": true, + "SYS_LSETXATTR": true, + "SYS_LSTAT": true, + "SYS_LSTAT64": true, + "SYS_LSTAT64_EXTENDED": true, + "SYS_LSTATV": true, + "SYS_LSTAT_EXTENDED": true, + "SYS_LUTIMES": true, + "SYS_MAC_SYSCALL": true, + "SYS_MADVISE": true, + "SYS_MADVISE1": true, + "SYS_MAXSYSCALL": true, + "SYS_MBIND": true, + "SYS_MIGRATE_PAGES": true, + "SYS_MINCORE": true, + "SYS_MINHERIT": true, + "SYS_MKCOMPLEX": true, + "SYS_MKDIR": true, + "SYS_MKDIRAT": true, + "SYS_MKDIR_EXTENDED": true, + "SYS_MKFIFO": true, + "SYS_MKFIFOAT": true, + "SYS_MKFIFO_EXTENDED": true, + "SYS_MKNOD": true, + "SYS_MKNODAT": true, + "SYS_MLOCK": true, + "SYS_MLOCKALL": true, + "SYS_MMAP": true, + "SYS_MMAP2": true, + "SYS_MODCTL": true, + "SYS_MODFIND": true, + "SYS_MODFNEXT": true, + "SYS_MODIFY_LDT": true, + "SYS_MODNEXT": true, + "SYS_MODSTAT": true, + "SYS_MODWATCH": true, + "SYS_MOUNT": true, + "SYS_MOVE_PAGES": true, + "SYS_MPROTECT": true, + "SYS_MPX": true, + "SYS_MQUERY": true, + "SYS_MQ_GETSETATTR": true, + "SYS_MQ_NOTIFY": true, + "SYS_MQ_OPEN": true, + "SYS_MQ_TIMEDRECEIVE": true, + "SYS_MQ_TIMEDSEND": true, + "SYS_MQ_UNLINK": true, + "SYS_MREMAP": true, + "SYS_MSGCTL": true, + "SYS_MSGGET": true, + "SYS_MSGRCV": true, + "SYS_MSGRCV_NOCANCEL": true, + "SYS_MSGSND": true, + "SYS_MSGSND_NOCANCEL": true, + "SYS_MSGSYS": true, + "SYS_MSYNC": true, + "SYS_MSYNC_NOCANCEL": true, + "SYS_MUNLOCK": true, + "SYS_MUNLOCKALL": true, + "SYS_MUNMAP": true, + "SYS_NAME_TO_HANDLE_AT": true, + "SYS_NANOSLEEP": true, + "SYS_NEWFSTATAT": true, + "SYS_NFSCLNT": true, + "SYS_NFSSERVCTL": true, + "SYS_NFSSVC": true, + "SYS_NFSTAT": true, + "SYS_NICE": true, + "SYS_NLSTAT": true, + "SYS_NMOUNT": true, + "SYS_NSTAT": true, + "SYS_NTP_ADJTIME": true, + "SYS_NTP_GETTIME": true, + "SYS_OABI_SYSCALL_BASE": true, + "SYS_OBREAK": true, + "SYS_OLDFSTAT": true, + "SYS_OLDLSTAT": true, + "SYS_OLDOLDUNAME": true, + "SYS_OLDSTAT": true, + "SYS_OLDUNAME": true, + "SYS_OPEN": true, + "SYS_OPENAT": true, + "SYS_OPENBSD_POLL": true, + "SYS_OPEN_BY_HANDLE_AT": true, + "SYS_OPEN_EXTENDED": true, + "SYS_OPEN_NOCANCEL": true, + "SYS_OVADVISE": true, + "SYS_PACCEPT": true, + "SYS_PATHCONF": true, + "SYS_PAUSE": true, + "SYS_PCICONFIG_IOBASE": true, + "SYS_PCICONFIG_READ": true, + "SYS_PCICONFIG_WRITE": true, + "SYS_PDFORK": true, + "SYS_PDGETPID": true, + "SYS_PDKILL": true, + "SYS_PERF_EVENT_OPEN": true, + "SYS_PERSONALITY": true, + "SYS_PID_HIBERNATE": true, + "SYS_PID_RESUME": true, + "SYS_PID_SHUTDOWN_SOCKETS": true, + "SYS_PID_SUSPEND": true, + "SYS_PIPE": true, + "SYS_PIPE2": true, + "SYS_PIVOT_ROOT": true, + "SYS_PMC_CONTROL": true, + "SYS_PMC_GET_INFO": true, + "SYS_POLL": true, + "SYS_POLLTS": true, + "SYS_POLL_NOCANCEL": true, + "SYS_POSIX_FADVISE": true, + "SYS_POSIX_FALLOCATE": true, + "SYS_POSIX_OPENPT": true, + "SYS_POSIX_SPAWN": true, + "SYS_PPOLL": true, + "SYS_PRCTL": true, + "SYS_PREAD": true, + "SYS_PREAD64": true, + "SYS_PREADV": true, + "SYS_PREAD_NOCANCEL": true, + "SYS_PRLIMIT64": true, + "SYS_PROCCTL": true, + "SYS_PROCESS_POLICY": true, + "SYS_PROCESS_VM_READV": true, + "SYS_PROCESS_VM_WRITEV": true, + "SYS_PROC_INFO": true, + "SYS_PROF": true, + "SYS_PROFIL": true, + "SYS_PSELECT": true, + "SYS_PSELECT6": true, + "SYS_PSET_ASSIGN": true, + "SYS_PSET_CREATE": true, + "SYS_PSET_DESTROY": true, + "SYS_PSYNCH_CVBROAD": true, + "SYS_PSYNCH_CVCLRPREPOST": true, + "SYS_PSYNCH_CVSIGNAL": true, + "SYS_PSYNCH_CVWAIT": true, + "SYS_PSYNCH_MUTEXDROP": true, + "SYS_PSYNCH_MUTEXWAIT": true, + "SYS_PSYNCH_RW_DOWNGRADE": true, + "SYS_PSYNCH_RW_LONGRDLOCK": true, + "SYS_PSYNCH_RW_RDLOCK": true, + "SYS_PSYNCH_RW_UNLOCK": true, + "SYS_PSYNCH_RW_UNLOCK2": true, + "SYS_PSYNCH_RW_UPGRADE": true, + "SYS_PSYNCH_RW_WRLOCK": true, + "SYS_PSYNCH_RW_YIELDWRLOCK": true, + "SYS_PTRACE": true, + "SYS_PUTPMSG": true, + "SYS_PWRITE": true, + "SYS_PWRITE64": true, + "SYS_PWRITEV": true, + "SYS_PWRITE_NOCANCEL": true, + "SYS_QUERY_MODULE": true, + "SYS_QUOTACTL": true, + "SYS_RASCTL": true, + "SYS_RCTL_ADD_RULE": true, + "SYS_RCTL_GET_LIMITS": true, + "SYS_RCTL_GET_RACCT": true, + "SYS_RCTL_GET_RULES": true, + "SYS_RCTL_REMOVE_RULE": true, + "SYS_READ": true, + "SYS_READAHEAD": true, + "SYS_READDIR": true, + "SYS_READLINK": true, + "SYS_READLINKAT": true, + "SYS_READV": true, + "SYS_READV_NOCANCEL": true, + "SYS_READ_NOCANCEL": true, + "SYS_REBOOT": true, + "SYS_RECV": true, + "SYS_RECVFROM": true, + "SYS_RECVFROM_NOCANCEL": true, + "SYS_RECVMMSG": true, + "SYS_RECVMSG": true, + "SYS_RECVMSG_NOCANCEL": true, + "SYS_REMAP_FILE_PAGES": true, + "SYS_REMOVEXATTR": true, + "SYS_RENAME": true, + "SYS_RENAMEAT": true, + "SYS_REQUEST_KEY": true, + "SYS_RESTART_SYSCALL": true, + "SYS_REVOKE": true, + "SYS_RFORK": true, + "SYS_RMDIR": true, + "SYS_RTPRIO": true, + "SYS_RTPRIO_THREAD": true, + "SYS_RT_SIGACTION": true, + "SYS_RT_SIGPENDING": true, + "SYS_RT_SIGPROCMASK": true, + "SYS_RT_SIGQUEUEINFO": true, + "SYS_RT_SIGRETURN": true, + "SYS_RT_SIGSUSPEND": true, + "SYS_RT_SIGTIMEDWAIT": true, + "SYS_RT_TGSIGQUEUEINFO": true, + "SYS_SBRK": true, + "SYS_SCHED_GETAFFINITY": true, + "SYS_SCHED_GETPARAM": true, + "SYS_SCHED_GETSCHEDULER": true, + "SYS_SCHED_GET_PRIORITY_MAX": true, + "SYS_SCHED_GET_PRIORITY_MIN": true, + "SYS_SCHED_RR_GET_INTERVAL": true, + "SYS_SCHED_SETAFFINITY": true, + "SYS_SCHED_SETPARAM": true, + "SYS_SCHED_SETSCHEDULER": true, + "SYS_SCHED_YIELD": true, + "SYS_SCTP_GENERIC_RECVMSG": true, + "SYS_SCTP_GENERIC_SENDMSG": true, + "SYS_SCTP_GENERIC_SENDMSG_IOV": true, + "SYS_SCTP_PEELOFF": true, + "SYS_SEARCHFS": true, + "SYS_SECURITY": true, + "SYS_SELECT": true, + "SYS_SELECT_NOCANCEL": true, + "SYS_SEMCONFIG": true, + "SYS_SEMCTL": true, + "SYS_SEMGET": true, + "SYS_SEMOP": true, + "SYS_SEMSYS": true, + "SYS_SEMTIMEDOP": true, + "SYS_SEM_CLOSE": true, + "SYS_SEM_DESTROY": true, + "SYS_SEM_GETVALUE": true, + "SYS_SEM_INIT": true, + "SYS_SEM_OPEN": true, + "SYS_SEM_POST": true, + "SYS_SEM_TRYWAIT": true, + "SYS_SEM_UNLINK": true, + "SYS_SEM_WAIT": true, + "SYS_SEM_WAIT_NOCANCEL": true, + "SYS_SEND": true, + "SYS_SENDFILE": true, + "SYS_SENDFILE64": true, + "SYS_SENDMMSG": true, + "SYS_SENDMSG": true, + "SYS_SENDMSG_NOCANCEL": true, + "SYS_SENDTO": true, + "SYS_SENDTO_NOCANCEL": true, + "SYS_SETATTRLIST": true, + "SYS_SETAUDIT": true, + "SYS_SETAUDIT_ADDR": true, + "SYS_SETAUID": true, + "SYS_SETCONTEXT": true, + "SYS_SETDOMAINNAME": true, + "SYS_SETEGID": true, + "SYS_SETEUID": true, + "SYS_SETFIB": true, + "SYS_SETFSGID": true, + "SYS_SETFSGID32": true, + "SYS_SETFSUID": true, + "SYS_SETFSUID32": true, + "SYS_SETGID": true, + "SYS_SETGID32": true, + "SYS_SETGROUPS": true, + "SYS_SETGROUPS32": true, + "SYS_SETHOSTNAME": true, + "SYS_SETITIMER": true, + "SYS_SETLCID": true, + "SYS_SETLOGIN": true, + "SYS_SETLOGINCLASS": true, + "SYS_SETNS": true, + "SYS_SETPGID": true, + "SYS_SETPRIORITY": true, + "SYS_SETPRIVEXEC": true, + "SYS_SETREGID": true, + "SYS_SETREGID32": true, + "SYS_SETRESGID": true, + "SYS_SETRESGID32": true, + "SYS_SETRESUID": true, + "SYS_SETRESUID32": true, + "SYS_SETREUID": true, + "SYS_SETREUID32": true, + "SYS_SETRLIMIT": true, + "SYS_SETRTABLE": true, + "SYS_SETSGROUPS": true, + "SYS_SETSID": true, + "SYS_SETSOCKOPT": true, + "SYS_SETTID": true, + "SYS_SETTID_WITH_PID": true, + "SYS_SETTIMEOFDAY": true, + "SYS_SETUID": true, + "SYS_SETUID32": true, + "SYS_SETWGROUPS": true, + "SYS_SETXATTR": true, + "SYS_SET_MEMPOLICY": true, + "SYS_SET_ROBUST_LIST": true, + "SYS_SET_THREAD_AREA": true, + "SYS_SET_TID_ADDRESS": true, + "SYS_SGETMASK": true, + "SYS_SHARED_REGION_CHECK_NP": true, + "SYS_SHARED_REGION_MAP_AND_SLIDE_NP": true, + "SYS_SHMAT": true, + "SYS_SHMCTL": true, + "SYS_SHMDT": true, + "SYS_SHMGET": true, + "SYS_SHMSYS": true, + "SYS_SHM_OPEN": true, + "SYS_SHM_UNLINK": true, + "SYS_SHUTDOWN": true, + "SYS_SIGACTION": true, + "SYS_SIGALTSTACK": true, + "SYS_SIGNAL": true, + "SYS_SIGNALFD": true, + "SYS_SIGNALFD4": true, + "SYS_SIGPENDING": true, + "SYS_SIGPROCMASK": true, + "SYS_SIGQUEUE": true, + "SYS_SIGQUEUEINFO": true, + "SYS_SIGRETURN": true, + "SYS_SIGSUSPEND": true, + "SYS_SIGSUSPEND_NOCANCEL": true, + "SYS_SIGTIMEDWAIT": true, + "SYS_SIGWAIT": true, + "SYS_SIGWAITINFO": true, + "SYS_SOCKET": true, + "SYS_SOCKETCALL": true, + "SYS_SOCKETPAIR": true, + "SYS_SPLICE": true, + "SYS_SSETMASK": true, + "SYS_SSTK": true, + "SYS_STACK_SNAPSHOT": true, + "SYS_STAT": true, + "SYS_STAT64": true, + "SYS_STAT64_EXTENDED": true, + "SYS_STATFS": true, + "SYS_STATFS64": true, + "SYS_STATV": true, + "SYS_STATVFS1": true, + "SYS_STAT_EXTENDED": true, + "SYS_STIME": true, + "SYS_STTY": true, + "SYS_SWAPCONTEXT": true, + "SYS_SWAPCTL": true, + "SYS_SWAPOFF": true, + "SYS_SWAPON": true, + "SYS_SYMLINK": true, + "SYS_SYMLINKAT": true, + "SYS_SYNC": true, + "SYS_SYNCFS": true, + "SYS_SYNC_FILE_RANGE": true, + "SYS_SYSARCH": true, + "SYS_SYSCALL": true, + "SYS_SYSCALL_BASE": true, + "SYS_SYSFS": true, + "SYS_SYSINFO": true, + "SYS_SYSLOG": true, + "SYS_TEE": true, + "SYS_TGKILL": true, + "SYS_THREAD_SELFID": true, + "SYS_THR_CREATE": true, + "SYS_THR_EXIT": true, + "SYS_THR_KILL": true, + "SYS_THR_KILL2": true, + "SYS_THR_NEW": true, + "SYS_THR_SELF": true, + "SYS_THR_SET_NAME": true, + "SYS_THR_SUSPEND": true, + "SYS_THR_WAKE": true, + "SYS_TIME": true, + "SYS_TIMERFD_CREATE": true, + "SYS_TIMERFD_GETTIME": true, + "SYS_TIMERFD_SETTIME": true, + "SYS_TIMER_CREATE": true, + "SYS_TIMER_DELETE": true, + "SYS_TIMER_GETOVERRUN": true, + "SYS_TIMER_GETTIME": true, + "SYS_TIMER_SETTIME": true, + "SYS_TIMES": true, + "SYS_TKILL": true, + "SYS_TRUNCATE": true, + "SYS_TRUNCATE64": true, + "SYS_TUXCALL": true, + "SYS_UGETRLIMIT": true, + "SYS_ULIMIT": true, + "SYS_UMASK": true, + "SYS_UMASK_EXTENDED": true, + "SYS_UMOUNT": true, + "SYS_UMOUNT2": true, + "SYS_UNAME": true, + "SYS_UNDELETE": true, + "SYS_UNLINK": true, + "SYS_UNLINKAT": true, + "SYS_UNMOUNT": true, + "SYS_UNSHARE": true, + "SYS_USELIB": true, + "SYS_USTAT": true, + "SYS_UTIME": true, + "SYS_UTIMENSAT": true, + "SYS_UTIMES": true, + "SYS_UTRACE": true, + "SYS_UUIDGEN": true, + "SYS_VADVISE": true, + "SYS_VFORK": true, + "SYS_VHANGUP": true, + "SYS_VM86": true, + "SYS_VM86OLD": true, + "SYS_VMSPLICE": true, + "SYS_VM_PRESSURE_MONITOR": true, + "SYS_VSERVER": true, + "SYS_WAIT4": true, + "SYS_WAIT4_NOCANCEL": true, + "SYS_WAIT6": true, + "SYS_WAITEVENT": true, + "SYS_WAITID": true, + "SYS_WAITID_NOCANCEL": true, + "SYS_WAITPID": true, + "SYS_WATCHEVENT": true, + "SYS_WORKQ_KERNRETURN": true, + "SYS_WORKQ_OPEN": true, + "SYS_WRITE": true, + "SYS_WRITEV": true, + "SYS_WRITEV_NOCANCEL": true, + "SYS_WRITE_NOCANCEL": true, + "SYS_YIELD": true, + "SYS__LLSEEK": true, + "SYS__LWP_CONTINUE": true, + "SYS__LWP_CREATE": true, + "SYS__LWP_CTL": true, + "SYS__LWP_DETACH": true, + "SYS__LWP_EXIT": true, + "SYS__LWP_GETNAME": true, + "SYS__LWP_GETPRIVATE": true, + "SYS__LWP_KILL": true, + "SYS__LWP_PARK": true, + "SYS__LWP_SELF": true, + "SYS__LWP_SETNAME": true, + "SYS__LWP_SETPRIVATE": true, + "SYS__LWP_SUSPEND": true, + "SYS__LWP_UNPARK": true, + "SYS__LWP_UNPARK_ALL": true, + "SYS__LWP_WAIT": true, + "SYS__LWP_WAKEUP": true, + "SYS__NEWSELECT": true, + "SYS__PSET_BIND": true, + "SYS__SCHED_GETAFFINITY": true, + "SYS__SCHED_GETPARAM": true, + "SYS__SCHED_SETAFFINITY": true, + "SYS__SCHED_SETPARAM": true, + "SYS__SYSCTL": true, + "SYS__UMTX_LOCK": true, + "SYS__UMTX_OP": true, + "SYS__UMTX_UNLOCK": true, + "SYS___ACL_ACLCHECK_FD": true, + "SYS___ACL_ACLCHECK_FILE": true, + "SYS___ACL_ACLCHECK_LINK": true, + "SYS___ACL_DELETE_FD": true, + "SYS___ACL_DELETE_FILE": true, + "SYS___ACL_DELETE_LINK": true, + "SYS___ACL_GET_FD": true, + "SYS___ACL_GET_FILE": true, + "SYS___ACL_GET_LINK": true, + "SYS___ACL_SET_FD": true, + "SYS___ACL_SET_FILE": true, + "SYS___ACL_SET_LINK": true, + "SYS___CLONE": true, + "SYS___DISABLE_THREADSIGNAL": true, + "SYS___GETCWD": true, + "SYS___GETLOGIN": true, + "SYS___GET_TCB": true, + "SYS___MAC_EXECVE": true, + "SYS___MAC_GETFSSTAT": true, + "SYS___MAC_GET_FD": true, + "SYS___MAC_GET_FILE": true, + "SYS___MAC_GET_LCID": true, + "SYS___MAC_GET_LCTX": true, + "SYS___MAC_GET_LINK": true, + "SYS___MAC_GET_MOUNT": true, + "SYS___MAC_GET_PID": true, + "SYS___MAC_GET_PROC": true, + "SYS___MAC_MOUNT": true, + "SYS___MAC_SET_FD": true, + "SYS___MAC_SET_FILE": true, + "SYS___MAC_SET_LCTX": true, + "SYS___MAC_SET_LINK": true, + "SYS___MAC_SET_PROC": true, + "SYS___MAC_SYSCALL": true, + "SYS___OLD_SEMWAIT_SIGNAL": true, + "SYS___OLD_SEMWAIT_SIGNAL_NOCANCEL": true, + "SYS___POSIX_CHOWN": true, + "SYS___POSIX_FCHOWN": true, + "SYS___POSIX_LCHOWN": true, + "SYS___POSIX_RENAME": true, + "SYS___PTHREAD_CANCELED": true, + "SYS___PTHREAD_CHDIR": true, + "SYS___PTHREAD_FCHDIR": true, + "SYS___PTHREAD_KILL": true, + "SYS___PTHREAD_MARKCANCEL": true, + "SYS___PTHREAD_SIGMASK": true, + "SYS___QUOTACTL": true, + "SYS___SEMCTL": true, + "SYS___SEMWAIT_SIGNAL": true, + "SYS___SEMWAIT_SIGNAL_NOCANCEL": true, + "SYS___SETLOGIN": true, + "SYS___SETUGID": true, + "SYS___SET_TCB": true, + "SYS___SIGACTION_SIGTRAMP": true, + "SYS___SIGTIMEDWAIT": true, + "SYS___SIGWAIT": true, + "SYS___SIGWAIT_NOCANCEL": true, + "SYS___SYSCTL": true, + "SYS___TFORK": true, + "SYS___THREXIT": true, + "SYS___THRSIGDIVERT": true, + "SYS___THRSLEEP": true, + "SYS___THRWAKEUP": true, + "S_ARCH1": true, + "S_ARCH2": true, + "S_BLKSIZE": true, + "S_IEXEC": true, + "S_IFBLK": true, + "S_IFCHR": true, + "S_IFDIR": true, + "S_IFIFO": true, + "S_IFLNK": true, + "S_IFMT": true, + "S_IFREG": true, + "S_IFSOCK": true, + "S_IFWHT": true, + "S_IREAD": true, + "S_IRGRP": true, + "S_IROTH": true, + "S_IRUSR": true, + "S_IRWXG": true, + "S_IRWXO": true, + "S_IRWXU": true, + "S_ISGID": true, + "S_ISTXT": true, + "S_ISUID": true, + "S_ISVTX": true, + "S_IWGRP": true, + "S_IWOTH": true, + "S_IWRITE": true, + "S_IWUSR": true, + "S_IXGRP": true, + "S_IXOTH": true, + "S_IXUSR": true, + "S_LOGIN_SET": true, + "SecurityAttributes": true, + "Seek": true, + "Select": true, + "Sendfile": true, + "Sendmsg": true, + "SendmsgN": true, + "Sendto": true, + "Servent": true, + "SetBpf": true, + "SetBpfBuflen": true, + "SetBpfDatalink": true, + "SetBpfHeadercmpl": true, + "SetBpfImmediate": true, + "SetBpfInterface": true, + "SetBpfPromisc": true, + "SetBpfTimeout": true, + "SetCurrentDirectory": true, + "SetEndOfFile": true, + "SetEnvironmentVariable": true, + "SetFileAttributes": true, + "SetFileCompletionNotificationModes": true, + "SetFilePointer": true, + "SetFileTime": true, + "SetHandleInformation": true, + "SetKevent": true, + "SetLsfPromisc": true, + "SetNonblock": true, + "Setdomainname": true, + "Setegid": true, + "Setenv": true, + "Seteuid": true, + "Setfsgid": true, + "Setfsuid": true, + "Setgid": true, + "Setgroups": true, + "Sethostname": true, + "Setlogin": true, + "Setpgid": true, + "Setpriority": true, + "Setprivexec": true, + "Setregid": true, + "Setresgid": true, + "Setresuid": true, + "Setreuid": true, + "Setrlimit": true, + "Setsid": true, + "Setsockopt": true, + "SetsockoptByte": true, + "SetsockoptICMPv6Filter": true, + "SetsockoptIPMreq": true, + "SetsockoptIPMreqn": true, + "SetsockoptIPv6Mreq": true, + "SetsockoptInet4Addr": true, + "SetsockoptInt": true, + "SetsockoptLinger": true, + "SetsockoptString": true, + "SetsockoptTimeval": true, + "Settimeofday": true, + "Setuid": true, + "Setxattr": true, + "Shutdown": true, + "SidTypeAlias": true, + "SidTypeComputer": true, + "SidTypeDeletedAccount": true, + "SidTypeDomain": true, + "SidTypeGroup": true, + "SidTypeInvalid": true, + "SidTypeLabel": true, + "SidTypeUnknown": true, + "SidTypeUser": true, + "SidTypeWellKnownGroup": true, + "Signal": true, + "SizeofBpfHdr": true, + "SizeofBpfInsn": true, + "SizeofBpfProgram": true, + "SizeofBpfStat": true, + "SizeofBpfVersion": true, + "SizeofBpfZbuf": true, + "SizeofBpfZbufHeader": true, + "SizeofCmsghdr": true, + "SizeofICMPv6Filter": true, + "SizeofIPMreq": true, + "SizeofIPMreqn": true, + "SizeofIPv6MTUInfo": true, + "SizeofIPv6Mreq": true, + "SizeofIfAddrmsg": true, + "SizeofIfAnnounceMsghdr": true, + "SizeofIfData": true, + "SizeofIfInfomsg": true, + "SizeofIfMsghdr": true, + "SizeofIfaMsghdr": true, + "SizeofIfmaMsghdr": true, + "SizeofIfmaMsghdr2": true, + "SizeofInet4Pktinfo": true, + "SizeofInet6Pktinfo": true, + "SizeofInotifyEvent": true, + "SizeofLinger": true, + "SizeofMsghdr": true, + "SizeofNlAttr": true, + "SizeofNlMsgerr": true, + "SizeofNlMsghdr": true, + "SizeofRtAttr": true, + "SizeofRtGenmsg": true, + "SizeofRtMetrics": true, + "SizeofRtMsg": true, + "SizeofRtMsghdr": true, + "SizeofRtNexthop": true, + "SizeofSockFilter": true, + "SizeofSockFprog": true, + "SizeofSockaddrAny": true, + "SizeofSockaddrDatalink": true, + "SizeofSockaddrInet4": true, + "SizeofSockaddrInet6": true, + "SizeofSockaddrLinklayer": true, + "SizeofSockaddrNetlink": true, + "SizeofSockaddrUnix": true, + "SizeofTCPInfo": true, + "SizeofUcred": true, + "SlicePtrFromStrings": true, + "SockFilter": true, + "SockFprog": true, + "SockaddrDatalink": true, + "SockaddrGen": true, + "SockaddrInet4": true, + "SockaddrInet6": true, + "SockaddrLinklayer": true, + "SockaddrNetlink": true, + "SockaddrUnix": true, + "Socket": true, + "SocketControlMessage": true, + "SocketDisableIPv6": true, + "Socketpair": true, + "Splice": true, + "StartProcess": true, + "StartupInfo": true, + "Stat": true, + "Stat_t": true, + "Statfs": true, + "Statfs_t": true, + "Stderr": true, + "Stdin": true, + "Stdout": true, + "StringBytePtr": true, + "StringByteSlice": true, + "StringSlicePtr": true, + "StringToSid": true, + "StringToUTF16": true, + "StringToUTF16Ptr": true, + "Symlink": true, + "Sync": true, + "SyncFileRange": true, + "SysProcAttr": true, + "SysProcIDMap": true, + "Syscall": true, + "Syscall12": true, + "Syscall15": true, + "Syscall18": true, + "Syscall6": true, + "Syscall9": true, + "Sysctl": true, + "SysctlUint32": true, + "Sysctlnode": true, + "Sysinfo": true, + "Sysinfo_t": true, + "Systemtime": true, + "TCGETS": true, + "TCIFLUSH": true, + "TCIOFLUSH": true, + "TCOFLUSH": true, + "TCPInfo": true, + "TCPKeepalive": true, + "TCP_CA_NAME_MAX": true, + "TCP_CONGCTL": true, + "TCP_CONGESTION": true, + "TCP_CONNECTIONTIMEOUT": true, + "TCP_CORK": true, + "TCP_DEFER_ACCEPT": true, + "TCP_INFO": true, + "TCP_KEEPALIVE": true, + "TCP_KEEPCNT": true, + "TCP_KEEPIDLE": true, + "TCP_KEEPINIT": true, + "TCP_KEEPINTVL": true, + "TCP_LINGER2": true, + "TCP_MAXBURST": true, + "TCP_MAXHLEN": true, + "TCP_MAXOLEN": true, + "TCP_MAXSEG": true, + "TCP_MAXWIN": true, + "TCP_MAX_SACK": true, + "TCP_MAX_WINSHIFT": true, + "TCP_MD5SIG": true, + "TCP_MD5SIG_MAXKEYLEN": true, + "TCP_MINMSS": true, + "TCP_MINMSSOVERLOAD": true, + "TCP_MSS": true, + "TCP_NODELAY": true, + "TCP_NOOPT": true, + "TCP_NOPUSH": true, + "TCP_NSTATES": true, + "TCP_QUICKACK": true, + "TCP_RXT_CONNDROPTIME": true, + "TCP_RXT_FINDROP": true, + "TCP_SACK_ENABLE": true, + "TCP_SYNCNT": true, + "TCP_VENDOR": true, + "TCP_WINDOW_CLAMP": true, + "TCSAFLUSH": true, + "TCSETS": true, + "TF_DISCONNECT": true, + "TF_REUSE_SOCKET": true, + "TF_USE_DEFAULT_WORKER": true, + "TF_USE_KERNEL_APC": true, + "TF_USE_SYSTEM_THREAD": true, + "TF_WRITE_BEHIND": true, + "TH32CS_INHERIT": true, + "TH32CS_SNAPALL": true, + "TH32CS_SNAPHEAPLIST": true, + "TH32CS_SNAPMODULE": true, + "TH32CS_SNAPMODULE32": true, + "TH32CS_SNAPPROCESS": true, + "TH32CS_SNAPTHREAD": true, + "TIME_ZONE_ID_DAYLIGHT": true, + "TIME_ZONE_ID_STANDARD": true, + "TIME_ZONE_ID_UNKNOWN": true, + "TIOCCBRK": true, + "TIOCCDTR": true, + "TIOCCONS": true, + "TIOCDCDTIMESTAMP": true, + "TIOCDRAIN": true, + "TIOCDSIMICROCODE": true, + "TIOCEXCL": true, + "TIOCEXT": true, + "TIOCFLAG_CDTRCTS": true, + "TIOCFLAG_CLOCAL": true, + "TIOCFLAG_CRTSCTS": true, + "TIOCFLAG_MDMBUF": true, + "TIOCFLAG_PPS": true, + "TIOCFLAG_SOFTCAR": true, + "TIOCFLUSH": true, + "TIOCGDEV": true, + "TIOCGDRAINWAIT": true, + "TIOCGETA": true, + "TIOCGETD": true, + "TIOCGFLAGS": true, + "TIOCGICOUNT": true, + "TIOCGLCKTRMIOS": true, + "TIOCGLINED": true, + "TIOCGPGRP": true, + "TIOCGPTN": true, + "TIOCGQSIZE": true, + "TIOCGRANTPT": true, + "TIOCGRS485": true, + "TIOCGSERIAL": true, + "TIOCGSID": true, + "TIOCGSIZE": true, + "TIOCGSOFTCAR": true, + "TIOCGTSTAMP": true, + "TIOCGWINSZ": true, + "TIOCINQ": true, + "TIOCIXOFF": true, + "TIOCIXON": true, + "TIOCLINUX": true, + "TIOCMBIC": true, + "TIOCMBIS": true, + "TIOCMGDTRWAIT": true, + "TIOCMGET": true, + "TIOCMIWAIT": true, + "TIOCMODG": true, + "TIOCMODS": true, + "TIOCMSDTRWAIT": true, + "TIOCMSET": true, + "TIOCM_CAR": true, + "TIOCM_CD": true, + "TIOCM_CTS": true, + "TIOCM_DCD": true, + "TIOCM_DSR": true, + "TIOCM_DTR": true, + "TIOCM_LE": true, + "TIOCM_RI": true, + "TIOCM_RNG": true, + "TIOCM_RTS": true, + "TIOCM_SR": true, + "TIOCM_ST": true, + "TIOCNOTTY": true, + "TIOCNXCL": true, + "TIOCOUTQ": true, + "TIOCPKT": true, + "TIOCPKT_DATA": true, + "TIOCPKT_DOSTOP": true, + "TIOCPKT_FLUSHREAD": true, + "TIOCPKT_FLUSHWRITE": true, + "TIOCPKT_IOCTL": true, + "TIOCPKT_NOSTOP": true, + "TIOCPKT_START": true, + "TIOCPKT_STOP": true, + "TIOCPTMASTER": true, + "TIOCPTMGET": true, + "TIOCPTSNAME": true, + "TIOCPTYGNAME": true, + "TIOCPTYGRANT": true, + "TIOCPTYUNLK": true, + "TIOCRCVFRAME": true, + "TIOCREMOTE": true, + "TIOCSBRK": true, + "TIOCSCONS": true, + "TIOCSCTTY": true, + "TIOCSDRAINWAIT": true, + "TIOCSDTR": true, + "TIOCSERCONFIG": true, + "TIOCSERGETLSR": true, + "TIOCSERGETMULTI": true, + "TIOCSERGSTRUCT": true, + "TIOCSERGWILD": true, + "TIOCSERSETMULTI": true, + "TIOCSERSWILD": true, + "TIOCSER_TEMT": true, + "TIOCSETA": true, + "TIOCSETAF": true, + "TIOCSETAW": true, + "TIOCSETD": true, + "TIOCSFLAGS": true, + "TIOCSIG": true, + "TIOCSLCKTRMIOS": true, + "TIOCSLINED": true, + "TIOCSPGRP": true, + "TIOCSPTLCK": true, + "TIOCSQSIZE": true, + "TIOCSRS485": true, + "TIOCSSERIAL": true, + "TIOCSSIZE": true, + "TIOCSSOFTCAR": true, + "TIOCSTART": true, + "TIOCSTAT": true, + "TIOCSTI": true, + "TIOCSTOP": true, + "TIOCSTSTAMP": true, + "TIOCSWINSZ": true, + "TIOCTIMESTAMP": true, + "TIOCUCNTL": true, + "TIOCVHANGUP": true, + "TIOCXMTFRAME": true, + "TOKEN_ADJUST_DEFAULT": true, + "TOKEN_ADJUST_GROUPS": true, + "TOKEN_ADJUST_PRIVILEGES": true, + "TOKEN_ADJUST_SESSIONID": true, + "TOKEN_ALL_ACCESS": true, + "TOKEN_ASSIGN_PRIMARY": true, + "TOKEN_DUPLICATE": true, + "TOKEN_EXECUTE": true, + "TOKEN_IMPERSONATE": true, + "TOKEN_QUERY": true, + "TOKEN_QUERY_SOURCE": true, + "TOKEN_READ": true, + "TOKEN_WRITE": true, + "TOSTOP": true, + "TRUNCATE_EXISTING": true, + "TUNATTACHFILTER": true, + "TUNDETACHFILTER": true, + "TUNGETFEATURES": true, + "TUNGETIFF": true, + "TUNGETSNDBUF": true, + "TUNGETVNETHDRSZ": true, + "TUNSETDEBUG": true, + "TUNSETGROUP": true, + "TUNSETIFF": true, + "TUNSETLINK": true, + "TUNSETNOCSUM": true, + "TUNSETOFFLOAD": true, + "TUNSETOWNER": true, + "TUNSETPERSIST": true, + "TUNSETSNDBUF": true, + "TUNSETTXFILTER": true, + "TUNSETVNETHDRSZ": true, + "Tee": true, + "TerminateProcess": true, + "Termios": true, + "Tgkill": true, + "Time": true, + "Time_t": true, + "Times": true, + "Timespec": true, + "TimespecToNsec": true, + "Timeval": true, + "Timeval32": true, + "TimevalToNsec": true, + "Timex": true, + "Timezoneinformation": true, + "Tms": true, + "Token": true, + "TokenAccessInformation": true, + "TokenAuditPolicy": true, + "TokenDefaultDacl": true, + "TokenElevation": true, + "TokenElevationType": true, + "TokenGroups": true, + "TokenGroupsAndPrivileges": true, + "TokenHasRestrictions": true, + "TokenImpersonationLevel": true, + "TokenIntegrityLevel": true, + "TokenLinkedToken": true, + "TokenLogonSid": true, + "TokenMandatoryPolicy": true, + "TokenOrigin": true, + "TokenOwner": true, + "TokenPrimaryGroup": true, + "TokenPrivileges": true, + "TokenRestrictedSids": true, + "TokenSandBoxInert": true, + "TokenSessionId": true, + "TokenSessionReference": true, + "TokenSource": true, + "TokenStatistics": true, + "TokenType": true, + "TokenUIAccess": true, + "TokenUser": true, + "TokenVirtualizationAllowed": true, + "TokenVirtualizationEnabled": true, + "Tokenprimarygroup": true, + "Tokenuser": true, + "TranslateAccountName": true, + "TranslateName": true, + "TransmitFile": true, + "TransmitFileBuffers": true, + "Truncate": true, + "UNIX_PATH_MAX": true, + "USAGE_MATCH_TYPE_AND": true, + "USAGE_MATCH_TYPE_OR": true, + "UTF16FromString": true, + "UTF16PtrFromString": true, + "UTF16ToString": true, + "Ucred": true, + "Umask": true, + "Uname": true, + "Undelete": true, + "UnixCredentials": true, + "UnixRights": true, + "Unlink": true, + "Unlinkat": true, + "UnmapViewOfFile": true, + "Unmount": true, + "Unsetenv": true, + "Unshare": true, + "UserInfo10": true, + "Ustat": true, + "Ustat_t": true, + "Utimbuf": true, + "Utime": true, + "Utimes": true, + "UtimesNano": true, + "Utsname": true, + "VDISCARD": true, + "VDSUSP": true, + "VEOF": true, + "VEOL": true, + "VEOL2": true, + "VERASE": true, + "VERASE2": true, + "VINTR": true, + "VKILL": true, + "VLNEXT": true, + "VMIN": true, + "VQUIT": true, + "VREPRINT": true, + "VSTART": true, + "VSTATUS": true, + "VSTOP": true, + "VSUSP": true, + "VSWTC": true, + "VT0": true, + "VT1": true, + "VTDLY": true, + "VTIME": true, + "VWERASE": true, + "VirtualLock": true, + "VirtualUnlock": true, + "WAIT_ABANDONED": true, + "WAIT_FAILED": true, + "WAIT_OBJECT_0": true, + "WAIT_TIMEOUT": true, + "WALL": true, + "WALLSIG": true, + "WALTSIG": true, + "WCLONE": true, + "WCONTINUED": true, + "WCOREFLAG": true, + "WEXITED": true, + "WLINUXCLONE": true, + "WNOHANG": true, + "WNOTHREAD": true, + "WNOWAIT": true, + "WNOZOMBIE": true, + "WOPTSCHECKED": true, + "WORDSIZE": true, + "WSABuf": true, + "WSACleanup": true, + "WSADESCRIPTION_LEN": true, + "WSAData": true, + "WSAEACCES": true, + "WSAECONNABORTED": true, + "WSAECONNRESET": true, + "WSAEnumProtocols": true, + "WSAID_CONNECTEX": true, + "WSAIoctl": true, + "WSAPROTOCOL_LEN": true, + "WSAProtocolChain": true, + "WSAProtocolInfo": true, + "WSARecv": true, + "WSARecvFrom": true, + "WSASYS_STATUS_LEN": true, + "WSASend": true, + "WSASendTo": true, + "WSASendto": true, + "WSAStartup": true, + "WSTOPPED": true, + "WTRAPPED": true, + "WUNTRACED": true, + "Wait4": true, + "WaitForSingleObject": true, + "WaitStatus": true, + "Win32FileAttributeData": true, + "Win32finddata": true, + "Write": true, + "WriteConsole": true, + "WriteFile": true, + "X509_ASN_ENCODING": true, + "XCASE": true, + "XP1_CONNECTIONLESS": true, + "XP1_CONNECT_DATA": true, + "XP1_DISCONNECT_DATA": true, + "XP1_EXPEDITED_DATA": true, + "XP1_GRACEFUL_CLOSE": true, + "XP1_GUARANTEED_DELIVERY": true, + "XP1_GUARANTEED_ORDER": true, + "XP1_IFS_HANDLES": true, + "XP1_MESSAGE_ORIENTED": true, + "XP1_MULTIPOINT_CONTROL_PLANE": true, + "XP1_MULTIPOINT_DATA_PLANE": true, + "XP1_PARTIAL_MESSAGE": true, + "XP1_PSEUDO_STREAM": true, + "XP1_QOS_SUPPORTED": true, + "XP1_SAN_SUPPORT_SDP": true, + "XP1_SUPPORT_BROADCAST": true, + "XP1_SUPPORT_MULTIPOINT": true, + "XP1_UNI_RECV": true, + "XP1_UNI_SEND": true, + }, + "syscall/js": map[string]bool{ + "Error": true, + "Func": true, + "FuncOf": true, + "Global": true, + "Null": true, + "Type": true, + "TypeBoolean": true, + "TypeFunction": true, + "TypeNull": true, + "TypeNumber": true, + "TypeObject": true, + "TypeString": true, + "TypeSymbol": true, + "TypeUndefined": true, + "TypedArray": true, + "TypedArrayOf": true, + "Undefined": true, + "Value": true, + "ValueError": true, + "ValueOf": true, + "Wrapper": true, + }, + "testing": map[string]bool{ + "AllocsPerRun": true, + "B": true, + "Benchmark": true, + "BenchmarkResult": true, + "Cover": true, + "CoverBlock": true, + "CoverMode": true, + "Coverage": true, + "InternalBenchmark": true, + "InternalExample": true, + "InternalTest": true, + "M": true, + "Main": true, + "MainStart": true, + "PB": true, + "RegisterCover": true, + "RunBenchmarks": true, + "RunExamples": true, + "RunTests": true, + "Short": true, + "T": true, + "Verbose": true, + }, + "testing/iotest": map[string]bool{ + "DataErrReader": true, + "ErrTimeout": true, + "HalfReader": true, + "NewReadLogger": true, + "NewWriteLogger": true, + "OneByteReader": true, + "TimeoutReader": true, + "TruncateWriter": true, + }, + "testing/quick": map[string]bool{ + "Check": true, + "CheckEqual": true, + "CheckEqualError": true, + "CheckError": true, + "Config": true, + "Generator": true, + "SetupError": true, + "Value": true, + }, + "text/scanner": map[string]bool{ + "Char": true, + "Comment": true, + "EOF": true, + "Float": true, + "GoTokens": true, + "GoWhitespace": true, + "Ident": true, + "Int": true, + "Position": true, + "RawString": true, + "ScanChars": true, + "ScanComments": true, + "ScanFloats": true, + "ScanIdents": true, + "ScanInts": true, + "ScanRawStrings": true, + "ScanStrings": true, + "Scanner": true, + "SkipComments": true, + "String": true, + "TokenString": true, + }, + "text/tabwriter": map[string]bool{ + "AlignRight": true, + "Debug": true, + "DiscardEmptyColumns": true, + "Escape": true, + "FilterHTML": true, + "NewWriter": true, + "StripEscape": true, + "TabIndent": true, + "Writer": true, + }, + "text/template": map[string]bool{ + "ExecError": true, + "FuncMap": true, + "HTMLEscape": true, + "HTMLEscapeString": true, + "HTMLEscaper": true, + "IsTrue": true, + "JSEscape": true, + "JSEscapeString": true, + "JSEscaper": true, + "Must": true, + "New": true, + "ParseFiles": true, + "ParseGlob": true, + "Template": true, + "URLQueryEscaper": true, + }, + "text/template/parse": map[string]bool{ + "ActionNode": true, + "BoolNode": true, + "BranchNode": true, + "ChainNode": true, + "CommandNode": true, + "DotNode": true, + "FieldNode": true, + "IdentifierNode": true, + "IfNode": true, + "IsEmptyTree": true, + "ListNode": true, + "New": true, + "NewIdentifier": true, + "NilNode": true, + "Node": true, + "NodeAction": true, + "NodeBool": true, + "NodeChain": true, + "NodeCommand": true, + "NodeDot": true, + "NodeField": true, + "NodeIdentifier": true, + "NodeIf": true, + "NodeList": true, + "NodeNil": true, + "NodeNumber": true, + "NodePipe": true, + "NodeRange": true, + "NodeString": true, + "NodeTemplate": true, + "NodeText": true, + "NodeType": true, + "NodeVariable": true, + "NodeWith": true, + "NumberNode": true, + "Parse": true, + "PipeNode": true, + "Pos": true, + "RangeNode": true, + "StringNode": true, + "TemplateNode": true, + "TextNode": true, + "Tree": true, + "VariableNode": true, + "WithNode": true, + }, + "time": map[string]bool{ + "ANSIC": true, + "After": true, + "AfterFunc": true, + "April": true, + "August": true, + "Date": true, + "December": true, + "Duration": true, + "February": true, + "FixedZone": true, + "Friday": true, + "Hour": true, + "January": true, + "July": true, + "June": true, + "Kitchen": true, + "LoadLocation": true, + "LoadLocationFromTZData": true, + "Local": true, + "Location": true, + "March": true, + "May": true, + "Microsecond": true, + "Millisecond": true, + "Minute": true, + "Monday": true, + "Month": true, + "Nanosecond": true, + "NewTicker": true, + "NewTimer": true, + "November": true, + "Now": true, + "October": true, + "Parse": true, + "ParseDuration": true, + "ParseError": true, + "ParseInLocation": true, + "RFC1123": true, + "RFC1123Z": true, + "RFC3339": true, + "RFC3339Nano": true, + "RFC822": true, + "RFC822Z": true, + "RFC850": true, + "RubyDate": true, + "Saturday": true, + "Second": true, + "September": true, + "Since": true, + "Sleep": true, + "Stamp": true, + "StampMicro": true, + "StampMilli": true, + "StampNano": true, + "Sunday": true, + "Thursday": true, + "Tick": true, + "Ticker": true, + "Time": true, + "Timer": true, + "Tuesday": true, + "UTC": true, + "Unix": true, + "UnixDate": true, + "Until": true, + "Wednesday": true, + "Weekday": true, + }, + "unicode": map[string]bool{ + "ASCII_Hex_Digit": true, + "Adlam": true, + "Ahom": true, + "Anatolian_Hieroglyphs": true, + "Arabic": true, + "Armenian": true, + "Avestan": true, + "AzeriCase": true, + "Balinese": true, + "Bamum": true, + "Bassa_Vah": true, + "Batak": true, + "Bengali": true, + "Bhaiksuki": true, + "Bidi_Control": true, + "Bopomofo": true, + "Brahmi": true, + "Braille": true, + "Buginese": true, + "Buhid": true, + "C": true, + "Canadian_Aboriginal": true, + "Carian": true, + "CaseRange": true, + "CaseRanges": true, + "Categories": true, + "Caucasian_Albanian": true, + "Cc": true, + "Cf": true, + "Chakma": true, + "Cham": true, + "Cherokee": true, + "Co": true, + "Common": true, + "Coptic": true, + "Cs": true, + "Cuneiform": true, + "Cypriot": true, + "Cyrillic": true, + "Dash": true, + "Deprecated": true, + "Deseret": true, + "Devanagari": true, + "Diacritic": true, + "Digit": true, + "Duployan": true, + "Egyptian_Hieroglyphs": true, + "Elbasan": true, + "Ethiopic": true, + "Extender": true, + "FoldCategory": true, + "FoldScript": true, + "Georgian": true, + "Glagolitic": true, + "Gothic": true, + "Grantha": true, + "GraphicRanges": true, + "Greek": true, + "Gujarati": true, + "Gurmukhi": true, + "Han": true, + "Hangul": true, + "Hanunoo": true, + "Hatran": true, + "Hebrew": true, + "Hex_Digit": true, + "Hiragana": true, + "Hyphen": true, + "IDS_Binary_Operator": true, + "IDS_Trinary_Operator": true, + "Ideographic": true, + "Imperial_Aramaic": true, + "In": true, + "Inherited": true, + "Inscriptional_Pahlavi": true, + "Inscriptional_Parthian": true, + "Is": true, + "IsControl": true, + "IsDigit": true, + "IsGraphic": true, + "IsLetter": true, + "IsLower": true, + "IsMark": true, + "IsNumber": true, + "IsOneOf": true, + "IsPrint": true, + "IsPunct": true, + "IsSpace": true, + "IsSymbol": true, + "IsTitle": true, + "IsUpper": true, + "Javanese": true, + "Join_Control": true, + "Kaithi": true, + "Kannada": true, + "Katakana": true, + "Kayah_Li": true, + "Kharoshthi": true, + "Khmer": true, + "Khojki": true, + "Khudawadi": true, + "L": true, + "Lao": true, + "Latin": true, + "Lepcha": true, + "Letter": true, + "Limbu": true, + "Linear_A": true, + "Linear_B": true, + "Lisu": true, + "Ll": true, + "Lm": true, + "Lo": true, + "Logical_Order_Exception": true, + "Lower": true, + "LowerCase": true, + "Lt": true, + "Lu": true, + "Lycian": true, + "Lydian": true, + "M": true, + "Mahajani": true, + "Malayalam": true, + "Mandaic": true, + "Manichaean": true, + "Marchen": true, + "Mark": true, + "Masaram_Gondi": true, + "MaxASCII": true, + "MaxCase": true, + "MaxLatin1": true, + "MaxRune": true, + "Mc": true, + "Me": true, + "Meetei_Mayek": true, + "Mende_Kikakui": true, + "Meroitic_Cursive": true, + "Meroitic_Hieroglyphs": true, + "Miao": true, + "Mn": true, + "Modi": true, + "Mongolian": true, + "Mro": true, + "Multani": true, + "Myanmar": true, + "N": true, + "Nabataean": true, + "Nd": true, + "New_Tai_Lue": true, + "Newa": true, + "Nko": true, + "Nl": true, + "No": true, + "Noncharacter_Code_Point": true, + "Number": true, + "Nushu": true, + "Ogham": true, + "Ol_Chiki": true, + "Old_Hungarian": true, + "Old_Italic": true, + "Old_North_Arabian": true, + "Old_Permic": true, + "Old_Persian": true, + "Old_South_Arabian": true, + "Old_Turkic": true, + "Oriya": true, + "Osage": true, + "Osmanya": true, + "Other": true, + "Other_Alphabetic": true, + "Other_Default_Ignorable_Code_Point": true, + "Other_Grapheme_Extend": true, + "Other_ID_Continue": true, + "Other_ID_Start": true, + "Other_Lowercase": true, + "Other_Math": true, + "Other_Uppercase": true, + "P": true, + "Pahawh_Hmong": true, + "Palmyrene": true, + "Pattern_Syntax": true, + "Pattern_White_Space": true, + "Pau_Cin_Hau": true, + "Pc": true, + "Pd": true, + "Pe": true, + "Pf": true, + "Phags_Pa": true, + "Phoenician": true, + "Pi": true, + "Po": true, + "Prepended_Concatenation_Mark": true, + "PrintRanges": true, + "Properties": true, + "Ps": true, + "Psalter_Pahlavi": true, + "Punct": true, + "Quotation_Mark": true, + "Radical": true, + "Range16": true, + "Range32": true, + "RangeTable": true, + "Regional_Indicator": true, + "Rejang": true, + "ReplacementChar": true, + "Runic": true, + "S": true, + "STerm": true, + "Samaritan": true, + "Saurashtra": true, + "Sc": true, + "Scripts": true, + "Sentence_Terminal": true, + "Sharada": true, + "Shavian": true, + "Siddham": true, + "SignWriting": true, + "SimpleFold": true, + "Sinhala": true, + "Sk": true, + "Sm": true, + "So": true, + "Soft_Dotted": true, + "Sora_Sompeng": true, + "Soyombo": true, + "Space": true, + "SpecialCase": true, + "Sundanese": true, + "Syloti_Nagri": true, + "Symbol": true, + "Syriac": true, + "Tagalog": true, + "Tagbanwa": true, + "Tai_Le": true, + "Tai_Tham": true, + "Tai_Viet": true, + "Takri": true, + "Tamil": true, + "Tangut": true, + "Telugu": true, + "Terminal_Punctuation": true, + "Thaana": true, + "Thai": true, + "Tibetan": true, + "Tifinagh": true, + "Tirhuta": true, + "Title": true, + "TitleCase": true, + "To": true, + "ToLower": true, + "ToTitle": true, + "ToUpper": true, + "TurkishCase": true, + "Ugaritic": true, + "Unified_Ideograph": true, + "Upper": true, + "UpperCase": true, + "UpperLower": true, + "Vai": true, + "Variation_Selector": true, + "Version": true, + "Warang_Citi": true, + "White_Space": true, + "Yi": true, + "Z": true, + "Zanabazar_Square": true, + "Zl": true, + "Zp": true, + "Zs": true, + }, + "unicode/utf16": map[string]bool{ + "Decode": true, + "DecodeRune": true, + "Encode": true, + "EncodeRune": true, + "IsSurrogate": true, + }, + "unicode/utf8": map[string]bool{ + "DecodeLastRune": true, + "DecodeLastRuneInString": true, + "DecodeRune": true, + "DecodeRuneInString": true, + "EncodeRune": true, + "FullRune": true, + "FullRuneInString": true, + "MaxRune": true, + "RuneCount": true, + "RuneCountInString": true, + "RuneError": true, + "RuneLen": true, + "RuneSelf": true, + "RuneStart": true, + "UTFMax": true, + "Valid": true, + "ValidRune": true, + "ValidString": true, + }, + "unsafe": map[string]bool{ + "Alignof": true, + "ArbitraryType": true, + "Offsetof": true, + "Pointer": true, + "Sizeof": true, + }, +} diff --git a/vendor/golang.org/x/tools/internal/module/module.go b/vendor/golang.org/x/tools/internal/module/module.go new file mode 100644 index 000000000..9a4edb9de --- /dev/null +++ b/vendor/golang.org/x/tools/internal/module/module.go @@ -0,0 +1,540 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package module defines the module.Version type +// along with support code. +package module + +// IMPORTANT NOTE +// +// This file essentially defines the set of valid import paths for the go command. +// There are many subtle considerations, including Unicode ambiguity, +// security, network, and file system representations. +// +// This file also defines the set of valid module path and version combinations, +// another topic with many subtle considerations. +// +// Changes to the semantics in this file require approval from rsc. + +import ( + "fmt" + "sort" + "strings" + "unicode" + "unicode/utf8" + + "golang.org/x/tools/internal/semver" +) + +// A Version is defined by a module path and version pair. +type Version struct { + Path string + + // Version is usually a semantic version in canonical form. + // There are two exceptions to this general rule. + // First, the top-level target of a build has no specific version + // and uses Version = "". + // Second, during MVS calculations the version "none" is used + // to represent the decision to take no version of a given module. + Version string `json:",omitempty"` +} + +// Check checks that a given module path, version pair is valid. +// In addition to the path being a valid module path +// and the version being a valid semantic version, +// the two must correspond. +// For example, the path "yaml/v2" only corresponds to +// semantic versions beginning with "v2.". +func Check(path, version string) error { + if err := CheckPath(path); err != nil { + return err + } + if !semver.IsValid(version) { + return fmt.Errorf("malformed semantic version %v", version) + } + _, pathMajor, _ := SplitPathVersion(path) + if !MatchPathMajor(version, pathMajor) { + if pathMajor == "" { + pathMajor = "v0 or v1" + } + if pathMajor[0] == '.' { // .v1 + pathMajor = pathMajor[1:] + } + return fmt.Errorf("mismatched module path %v and version %v (want %v)", path, version, pathMajor) + } + return nil +} + +// firstPathOK reports whether r can appear in the first element of a module path. +// The first element of the path must be an LDH domain name, at least for now. +// To avoid case ambiguity, the domain name must be entirely lower case. +func firstPathOK(r rune) bool { + return r == '-' || r == '.' || + '0' <= r && r <= '9' || + 'a' <= r && r <= 'z' +} + +// pathOK reports whether r can appear in an import path element. +// Paths can be ASCII letters, ASCII digits, and limited ASCII punctuation: + - . _ and ~. +// This matches what "go get" has historically recognized in import paths. +// TODO(rsc): We would like to allow Unicode letters, but that requires additional +// care in the safe encoding (see note below). +func pathOK(r rune) bool { + if r < utf8.RuneSelf { + return r == '+' || r == '-' || r == '.' || r == '_' || r == '~' || + '0' <= r && r <= '9' || + 'A' <= r && r <= 'Z' || + 'a' <= r && r <= 'z' + } + return false +} + +// fileNameOK reports whether r can appear in a file name. +// For now we allow all Unicode letters but otherwise limit to pathOK plus a few more punctuation characters. +// If we expand the set of allowed characters here, we have to +// work harder at detecting potential case-folding and normalization collisions. +// See note about "safe encoding" below. +func fileNameOK(r rune) bool { + if r < utf8.RuneSelf { + // Entire set of ASCII punctuation, from which we remove characters: + // ! " # $ % & ' ( ) * + , - . / : ; < = > ? @ [ \ ] ^ _ ` { | } ~ + // We disallow some shell special characters: " ' * < > ? ` | + // (Note that some of those are disallowed by the Windows file system as well.) + // We also disallow path separators / : and \ (fileNameOK is only called on path element characters). + // We allow spaces (U+0020) in file names. + const allowed = "!#$%&()+,-.=@[]^_{}~ " + if '0' <= r && r <= '9' || 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' { + return true + } + for i := 0; i < len(allowed); i++ { + if rune(allowed[i]) == r { + return true + } + } + return false + } + // It may be OK to add more ASCII punctuation here, but only carefully. + // For example Windows disallows < > \, and macOS disallows :, so we must not allow those. + return unicode.IsLetter(r) +} + +// CheckPath checks that a module path is valid. +func CheckPath(path string) error { + if err := checkPath(path, false); err != nil { + return fmt.Errorf("malformed module path %q: %v", path, err) + } + i := strings.Index(path, "/") + if i < 0 { + i = len(path) + } + if i == 0 { + return fmt.Errorf("malformed module path %q: leading slash", path) + } + if !strings.Contains(path[:i], ".") { + return fmt.Errorf("malformed module path %q: missing dot in first path element", path) + } + if path[0] == '-' { + return fmt.Errorf("malformed module path %q: leading dash in first path element", path) + } + for _, r := range path[:i] { + if !firstPathOK(r) { + return fmt.Errorf("malformed module path %q: invalid char %q in first path element", path, r) + } + } + if _, _, ok := SplitPathVersion(path); !ok { + return fmt.Errorf("malformed module path %q: invalid version", path) + } + return nil +} + +// CheckImportPath checks that an import path is valid. +func CheckImportPath(path string) error { + if err := checkPath(path, false); err != nil { + return fmt.Errorf("malformed import path %q: %v", path, err) + } + return nil +} + +// checkPath checks that a general path is valid. +// It returns an error describing why but not mentioning path. +// Because these checks apply to both module paths and import paths, +// the caller is expected to add the "malformed ___ path %q: " prefix. +// fileName indicates whether the final element of the path is a file name +// (as opposed to a directory name). +func checkPath(path string, fileName bool) error { + if !utf8.ValidString(path) { + return fmt.Errorf("invalid UTF-8") + } + if path == "" { + return fmt.Errorf("empty string") + } + if strings.Contains(path, "..") { + return fmt.Errorf("double dot") + } + if strings.Contains(path, "//") { + return fmt.Errorf("double slash") + } + if path[len(path)-1] == '/' { + return fmt.Errorf("trailing slash") + } + elemStart := 0 + for i, r := range path { + if r == '/' { + if err := checkElem(path[elemStart:i], fileName); err != nil { + return err + } + elemStart = i + 1 + } + } + if err := checkElem(path[elemStart:], fileName); err != nil { + return err + } + return nil +} + +// checkElem checks whether an individual path element is valid. +// fileName indicates whether the element is a file name (not a directory name). +func checkElem(elem string, fileName bool) error { + if elem == "" { + return fmt.Errorf("empty path element") + } + if strings.Count(elem, ".") == len(elem) { + return fmt.Errorf("invalid path element %q", elem) + } + if elem[0] == '.' && !fileName { + return fmt.Errorf("leading dot in path element") + } + if elem[len(elem)-1] == '.' { + return fmt.Errorf("trailing dot in path element") + } + charOK := pathOK + if fileName { + charOK = fileNameOK + } + for _, r := range elem { + if !charOK(r) { + return fmt.Errorf("invalid char %q", r) + } + } + + // Windows disallows a bunch of path elements, sadly. + // See https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file + short := elem + if i := strings.Index(short, "."); i >= 0 { + short = short[:i] + } + for _, bad := range badWindowsNames { + if strings.EqualFold(bad, short) { + return fmt.Errorf("disallowed path element %q", elem) + } + } + return nil +} + +// CheckFilePath checks whether a slash-separated file path is valid. +func CheckFilePath(path string) error { + if err := checkPath(path, true); err != nil { + return fmt.Errorf("malformed file path %q: %v", path, err) + } + return nil +} + +// badWindowsNames are the reserved file path elements on Windows. +// See https://docs.microsoft.com/en-us/windows/desktop/fileio/naming-a-file +var badWindowsNames = []string{ + "CON", + "PRN", + "AUX", + "NUL", + "COM1", + "COM2", + "COM3", + "COM4", + "COM5", + "COM6", + "COM7", + "COM8", + "COM9", + "LPT1", + "LPT2", + "LPT3", + "LPT4", + "LPT5", + "LPT6", + "LPT7", + "LPT8", + "LPT9", +} + +// SplitPathVersion returns prefix and major version such that prefix+pathMajor == path +// and version is either empty or "/vN" for N >= 2. +// As a special case, gopkg.in paths are recognized directly; +// they require ".vN" instead of "/vN", and for all N, not just N >= 2. +func SplitPathVersion(path string) (prefix, pathMajor string, ok bool) { + if strings.HasPrefix(path, "gopkg.in/") { + return splitGopkgIn(path) + } + + i := len(path) + dot := false + for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9' || path[i-1] == '.') { + if path[i-1] == '.' { + dot = true + } + i-- + } + if i <= 1 || i == len(path) || path[i-1] != 'v' || path[i-2] != '/' { + return path, "", true + } + prefix, pathMajor = path[:i-2], path[i-2:] + if dot || len(pathMajor) <= 2 || pathMajor[2] == '0' || pathMajor == "/v1" { + return path, "", false + } + return prefix, pathMajor, true +} + +// splitGopkgIn is like SplitPathVersion but only for gopkg.in paths. +func splitGopkgIn(path string) (prefix, pathMajor string, ok bool) { + if !strings.HasPrefix(path, "gopkg.in/") { + return path, "", false + } + i := len(path) + if strings.HasSuffix(path, "-unstable") { + i -= len("-unstable") + } + for i > 0 && ('0' <= path[i-1] && path[i-1] <= '9') { + i-- + } + if i <= 1 || path[i-1] != 'v' || path[i-2] != '.' { + // All gopkg.in paths must end in vN for some N. + return path, "", false + } + prefix, pathMajor = path[:i-2], path[i-2:] + if len(pathMajor) <= 2 || pathMajor[2] == '0' && pathMajor != ".v0" { + return path, "", false + } + return prefix, pathMajor, true +} + +// MatchPathMajor reports whether the semantic version v +// matches the path major version pathMajor. +func MatchPathMajor(v, pathMajor string) bool { + if strings.HasPrefix(pathMajor, ".v") && strings.HasSuffix(pathMajor, "-unstable") { + pathMajor = strings.TrimSuffix(pathMajor, "-unstable") + } + if strings.HasPrefix(v, "v0.0.0-") && pathMajor == ".v1" { + // Allow old bug in pseudo-versions that generated v0.0.0- pseudoversion for gopkg .v1. + // For example, gopkg.in/yaml.v2@v2.2.1's go.mod requires gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405. + return true + } + m := semver.Major(v) + if pathMajor == "" { + return m == "v0" || m == "v1" || semver.Build(v) == "+incompatible" + } + return (pathMajor[0] == '/' || pathMajor[0] == '.') && m == pathMajor[1:] +} + +// CanonicalVersion returns the canonical form of the version string v. +// It is the same as semver.Canonical(v) except that it preserves the special build suffix "+incompatible". +func CanonicalVersion(v string) string { + cv := semver.Canonical(v) + if semver.Build(v) == "+incompatible" { + cv += "+incompatible" + } + return cv +} + +// Sort sorts the list by Path, breaking ties by comparing Versions. +func Sort(list []Version) { + sort.Slice(list, func(i, j int) bool { + mi := list[i] + mj := list[j] + if mi.Path != mj.Path { + return mi.Path < mj.Path + } + // To help go.sum formatting, allow version/file. + // Compare semver prefix by semver rules, + // file by string order. + vi := mi.Version + vj := mj.Version + var fi, fj string + if k := strings.Index(vi, "/"); k >= 0 { + vi, fi = vi[:k], vi[k:] + } + if k := strings.Index(vj, "/"); k >= 0 { + vj, fj = vj[:k], vj[k:] + } + if vi != vj { + return semver.Compare(vi, vj) < 0 + } + return fi < fj + }) +} + +// Safe encodings +// +// Module paths appear as substrings of file system paths +// (in the download cache) and of web server URLs in the proxy protocol. +// In general we cannot rely on file systems to be case-sensitive, +// nor can we rely on web servers, since they read from file systems. +// That is, we cannot rely on the file system to keep rsc.io/QUOTE +// and rsc.io/quote separate. Windows and macOS don't. +// Instead, we must never require two different casings of a file path. +// Because we want the download cache to match the proxy protocol, +// and because we want the proxy protocol to be possible to serve +// from a tree of static files (which might be stored on a case-insensitive +// file system), the proxy protocol must never require two different casings +// of a URL path either. +// +// One possibility would be to make the safe encoding be the lowercase +// hexadecimal encoding of the actual path bytes. This would avoid ever +// needing different casings of a file path, but it would be fairly illegible +// to most programmers when those paths appeared in the file system +// (including in file paths in compiler errors and stack traces) +// in web server logs, and so on. Instead, we want a safe encoding that +// leaves most paths unaltered. +// +// The safe encoding is this: +// replace every uppercase letter with an exclamation mark +// followed by the letter's lowercase equivalent. +// +// For example, +// github.com/Azure/azure-sdk-for-go -> github.com/!azure/azure-sdk-for-go. +// github.com/GoogleCloudPlatform/cloudsql-proxy -> github.com/!google!cloud!platform/cloudsql-proxy +// github.com/Sirupsen/logrus -> github.com/!sirupsen/logrus. +// +// Import paths that avoid upper-case letters are left unchanged. +// Note that because import paths are ASCII-only and avoid various +// problematic punctuation (like : < and >), the safe encoding is also ASCII-only +// and avoids the same problematic punctuation. +// +// Import paths have never allowed exclamation marks, so there is no +// need to define how to encode a literal !. +// +// Although paths are disallowed from using Unicode (see pathOK above), +// the eventual plan is to allow Unicode letters as well, to assume that +// file systems and URLs are Unicode-safe (storing UTF-8), and apply +// the !-for-uppercase convention. Note however that not all runes that +// are different but case-fold equivalent are an upper/lower pair. +// For example, U+004B ('K'), U+006B ('k'), and U+212A ('K' for Kelvin) +// are considered to case-fold to each other. When we do add Unicode +// letters, we must not assume that upper/lower are the only case-equivalent pairs. +// Perhaps the Kelvin symbol would be disallowed entirely, for example. +// Or perhaps it would encode as "!!k", or perhaps as "(212A)". +// +// Also, it would be nice to allow Unicode marks as well as letters, +// but marks include combining marks, and then we must deal not +// only with case folding but also normalization: both U+00E9 ('é') +// and U+0065 U+0301 ('e' followed by combining acute accent) +// look the same on the page and are treated by some file systems +// as the same path. If we do allow Unicode marks in paths, there +// must be some kind of normalization to allow only one canonical +// encoding of any character used in an import path. + +// EncodePath returns the safe encoding of the given module path. +// It fails if the module path is invalid. +func EncodePath(path string) (encoding string, err error) { + if err := CheckPath(path); err != nil { + return "", err + } + + return encodeString(path) +} + +// EncodeVersion returns the safe encoding of the given module version. +// Versions are allowed to be in non-semver form but must be valid file names +// and not contain exclamation marks. +func EncodeVersion(v string) (encoding string, err error) { + if err := checkElem(v, true); err != nil || strings.Contains(v, "!") { + return "", fmt.Errorf("disallowed version string %q", v) + } + return encodeString(v) +} + +func encodeString(s string) (encoding string, err error) { + haveUpper := false + for _, r := range s { + if r == '!' || r >= utf8.RuneSelf { + // This should be disallowed by CheckPath, but diagnose anyway. + // The correctness of the encoding loop below depends on it. + return "", fmt.Errorf("internal error: inconsistency in EncodePath") + } + if 'A' <= r && r <= 'Z' { + haveUpper = true + } + } + + if !haveUpper { + return s, nil + } + + var buf []byte + for _, r := range s { + if 'A' <= r && r <= 'Z' { + buf = append(buf, '!', byte(r+'a'-'A')) + } else { + buf = append(buf, byte(r)) + } + } + return string(buf), nil +} + +// DecodePath returns the module path of the given safe encoding. +// It fails if the encoding is invalid or encodes an invalid path. +func DecodePath(encoding string) (path string, err error) { + path, ok := decodeString(encoding) + if !ok { + return "", fmt.Errorf("invalid module path encoding %q", encoding) + } + if err := CheckPath(path); err != nil { + return "", fmt.Errorf("invalid module path encoding %q: %v", encoding, err) + } + return path, nil +} + +// DecodeVersion returns the version string for the given safe encoding. +// It fails if the encoding is invalid or encodes an invalid version. +// Versions are allowed to be in non-semver form but must be valid file names +// and not contain exclamation marks. +func DecodeVersion(encoding string) (v string, err error) { + v, ok := decodeString(encoding) + if !ok { + return "", fmt.Errorf("invalid version encoding %q", encoding) + } + if err := checkElem(v, true); err != nil { + return "", fmt.Errorf("disallowed version string %q", v) + } + return v, nil +} + +func decodeString(encoding string) (string, bool) { + var buf []byte + + bang := false + for _, r := range encoding { + if r >= utf8.RuneSelf { + return "", false + } + if bang { + bang = false + if r < 'a' || 'z' < r { + return "", false + } + buf = append(buf, byte(r+'A'-'a')) + continue + } + if r == '!' { + bang = true + continue + } + if 'A' <= r && r <= 'Z' { + return "", false + } + buf = append(buf, byte(r)) + } + if bang { + return "", false + } + return string(buf), true +} diff --git a/vendor/golang.org/x/tools/internal/semver/semver.go b/vendor/golang.org/x/tools/internal/semver/semver.go new file mode 100644 index 000000000..4af7118e5 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/semver/semver.go @@ -0,0 +1,388 @@ +// Copyright 2018 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package semver implements comparison of semantic version strings. +// In this package, semantic version strings must begin with a leading "v", +// as in "v1.0.0". +// +// The general form of a semantic version string accepted by this package is +// +// vMAJOR[.MINOR[.PATCH[-PRERELEASE][+BUILD]]] +// +// where square brackets indicate optional parts of the syntax; +// MAJOR, MINOR, and PATCH are decimal integers without extra leading zeros; +// PRERELEASE and BUILD are each a series of non-empty dot-separated identifiers +// using only alphanumeric characters and hyphens; and +// all-numeric PRERELEASE identifiers must not have leading zeros. +// +// This package follows Semantic Versioning 2.0.0 (see semver.org) +// with two exceptions. First, it requires the "v" prefix. Second, it recognizes +// vMAJOR and vMAJOR.MINOR (with no prerelease or build suffixes) +// as shorthands for vMAJOR.0.0 and vMAJOR.MINOR.0. +package semver + +// parsed returns the parsed form of a semantic version string. +type parsed struct { + major string + minor string + patch string + short string + prerelease string + build string + err string +} + +// IsValid reports whether v is a valid semantic version string. +func IsValid(v string) bool { + _, ok := parse(v) + return ok +} + +// Canonical returns the canonical formatting of the semantic version v. +// It fills in any missing .MINOR or .PATCH and discards build metadata. +// Two semantic versions compare equal only if their canonical formattings +// are identical strings. +// The canonical invalid semantic version is the empty string. +func Canonical(v string) string { + p, ok := parse(v) + if !ok { + return "" + } + if p.build != "" { + return v[:len(v)-len(p.build)] + } + if p.short != "" { + return v + p.short + } + return v +} + +// Major returns the major version prefix of the semantic version v. +// For example, Major("v2.1.0") == "v2". +// If v is an invalid semantic version string, Major returns the empty string. +func Major(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + return v[:1+len(pv.major)] +} + +// MajorMinor returns the major.minor version prefix of the semantic version v. +// For example, MajorMinor("v2.1.0") == "v2.1". +// If v is an invalid semantic version string, MajorMinor returns the empty string. +func MajorMinor(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + i := 1 + len(pv.major) + if j := i + 1 + len(pv.minor); j <= len(v) && v[i] == '.' && v[i+1:j] == pv.minor { + return v[:j] + } + return v[:i] + "." + pv.minor +} + +// Prerelease returns the prerelease suffix of the semantic version v. +// For example, Prerelease("v2.1.0-pre+meta") == "-pre". +// If v is an invalid semantic version string, Prerelease returns the empty string. +func Prerelease(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + return pv.prerelease +} + +// Build returns the build suffix of the semantic version v. +// For example, Build("v2.1.0+meta") == "+meta". +// If v is an invalid semantic version string, Build returns the empty string. +func Build(v string) string { + pv, ok := parse(v) + if !ok { + return "" + } + return pv.build +} + +// Compare returns an integer comparing two versions according to +// according to semantic version precedence. +// The result will be 0 if v == w, -1 if v < w, or +1 if v > w. +// +// An invalid semantic version string is considered less than a valid one. +// All invalid semantic version strings compare equal to each other. +func Compare(v, w string) int { + pv, ok1 := parse(v) + pw, ok2 := parse(w) + if !ok1 && !ok2 { + return 0 + } + if !ok1 { + return -1 + } + if !ok2 { + return +1 + } + if c := compareInt(pv.major, pw.major); c != 0 { + return c + } + if c := compareInt(pv.minor, pw.minor); c != 0 { + return c + } + if c := compareInt(pv.patch, pw.patch); c != 0 { + return c + } + return comparePrerelease(pv.prerelease, pw.prerelease) +} + +// Max canonicalizes its arguments and then returns the version string +// that compares greater. +func Max(v, w string) string { + v = Canonical(v) + w = Canonical(w) + if Compare(v, w) > 0 { + return v + } + return w +} + +func parse(v string) (p parsed, ok bool) { + if v == "" || v[0] != 'v' { + p.err = "missing v prefix" + return + } + p.major, v, ok = parseInt(v[1:]) + if !ok { + p.err = "bad major version" + return + } + if v == "" { + p.minor = "0" + p.patch = "0" + p.short = ".0.0" + return + } + if v[0] != '.' { + p.err = "bad minor prefix" + ok = false + return + } + p.minor, v, ok = parseInt(v[1:]) + if !ok { + p.err = "bad minor version" + return + } + if v == "" { + p.patch = "0" + p.short = ".0" + return + } + if v[0] != '.' { + p.err = "bad patch prefix" + ok = false + return + } + p.patch, v, ok = parseInt(v[1:]) + if !ok { + p.err = "bad patch version" + return + } + if len(v) > 0 && v[0] == '-' { + p.prerelease, v, ok = parsePrerelease(v) + if !ok { + p.err = "bad prerelease" + return + } + } + if len(v) > 0 && v[0] == '+' { + p.build, v, ok = parseBuild(v) + if !ok { + p.err = "bad build" + return + } + } + if v != "" { + p.err = "junk on end" + ok = false + return + } + ok = true + return +} + +func parseInt(v string) (t, rest string, ok bool) { + if v == "" { + return + } + if v[0] < '0' || '9' < v[0] { + return + } + i := 1 + for i < len(v) && '0' <= v[i] && v[i] <= '9' { + i++ + } + if v[0] == '0' && i != 1 { + return + } + return v[:i], v[i:], true +} + +func parsePrerelease(v string) (t, rest string, ok bool) { + // "A pre-release version MAY be denoted by appending a hyphen and + // a series of dot separated identifiers immediately following the patch version. + // Identifiers MUST comprise only ASCII alphanumerics and hyphen [0-9A-Za-z-]. + // Identifiers MUST NOT be empty. Numeric identifiers MUST NOT include leading zeroes." + if v == "" || v[0] != '-' { + return + } + i := 1 + start := 1 + for i < len(v) && v[i] != '+' { + if !isIdentChar(v[i]) && v[i] != '.' { + return + } + if v[i] == '.' { + if start == i || isBadNum(v[start:i]) { + return + } + start = i + 1 + } + i++ + } + if start == i || isBadNum(v[start:i]) { + return + } + return v[:i], v[i:], true +} + +func parseBuild(v string) (t, rest string, ok bool) { + if v == "" || v[0] != '+' { + return + } + i := 1 + start := 1 + for i < len(v) { + if !isIdentChar(v[i]) { + return + } + if v[i] == '.' { + if start == i { + return + } + start = i + 1 + } + i++ + } + if start == i { + return + } + return v[:i], v[i:], true +} + +func isIdentChar(c byte) bool { + return 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' || c == '-' +} + +func isBadNum(v string) bool { + i := 0 + for i < len(v) && '0' <= v[i] && v[i] <= '9' { + i++ + } + return i == len(v) && i > 1 && v[0] == '0' +} + +func isNum(v string) bool { + i := 0 + for i < len(v) && '0' <= v[i] && v[i] <= '9' { + i++ + } + return i == len(v) +} + +func compareInt(x, y string) int { + if x == y { + return 0 + } + if len(x) < len(y) { + return -1 + } + if len(x) > len(y) { + return +1 + } + if x < y { + return -1 + } else { + return +1 + } +} + +func comparePrerelease(x, y string) int { + // "When major, minor, and patch are equal, a pre-release version has + // lower precedence than a normal version. + // Example: 1.0.0-alpha < 1.0.0. + // Precedence for two pre-release versions with the same major, minor, + // and patch version MUST be determined by comparing each dot separated + // identifier from left to right until a difference is found as follows: + // identifiers consisting of only digits are compared numerically and + // identifiers with letters or hyphens are compared lexically in ASCII + // sort order. Numeric identifiers always have lower precedence than + // non-numeric identifiers. A larger set of pre-release fields has a + // higher precedence than a smaller set, if all of the preceding + // identifiers are equal. + // Example: 1.0.0-alpha < 1.0.0-alpha.1 < 1.0.0-alpha.beta < + // 1.0.0-beta < 1.0.0-beta.2 < 1.0.0-beta.11 < 1.0.0-rc.1 < 1.0.0." + if x == y { + return 0 + } + if x == "" { + return +1 + } + if y == "" { + return -1 + } + for x != "" && y != "" { + x = x[1:] // skip - or . + y = y[1:] // skip - or . + var dx, dy string + dx, x = nextIdent(x) + dy, y = nextIdent(y) + if dx != dy { + ix := isNum(dx) + iy := isNum(dy) + if ix != iy { + if ix { + return -1 + } else { + return +1 + } + } + if ix { + if len(dx) < len(dy) { + return -1 + } + if len(dx) > len(dy) { + return +1 + } + } + if dx < dy { + return -1 + } else { + return +1 + } + } + } + if x == "" { + return -1 + } else { + return +1 + } +} + +func nextIdent(x string) (dx, rest string) { + i := 0 + for i < len(x) && x[i] != '.' { + i++ + } + return x[:i], x[i:] +} diff --git a/vendor/modules.txt b/vendor/modules.txt index eaddc78df..2e1085a81 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -2,8 +2,10 @@ github.com/BurntSushi/toml # github.com/Microsoft/go-winio v0.4.12 github.com/Microsoft/go-winio -# github.com/buildpack/imgutil v0.0.0-20190509214933-76de939dfb34 +# github.com/buildpack/imgutil v0.0.0-20190702194220-aa6eb76474d3 github.com/buildpack/imgutil +github.com/buildpack/imgutil/local +github.com/buildpack/imgutil/remote github.com/buildpack/imgutil/fakes # github.com/docker/distribution v2.7.1+incompatible github.com/docker/distribution/reference @@ -36,7 +38,6 @@ github.com/docker/go-units # github.com/gogo/protobuf v1.2.1 github.com/gogo/protobuf/proto # github.com/golang/mock v1.3.0 -github.com/golang/mock/mockgen/model github.com/golang/mock/gomock # github.com/google/go-cmp v0.3.0 github.com/google/go-cmp/cmp @@ -72,6 +73,19 @@ github.com/sclevine/spec/report golang.org/x/net/proxy golang.org/x/net/internal/socks # golang.org/x/sync v0.0.0-20190423024810-112230192c58 +golang.org/x/sync/singleflight golang.org/x/sync/errgroup # golang.org/x/sys v0.0.0-20190509141414-a5b02f93d862 golang.org/x/sys/windows +# golang.org/x/tools v0.0.0-20190624190245-7f2218787638 +golang.org/x/tools/cmd/goimports +golang.org/x/tools/internal/imports +golang.org/x/tools/go/ast/astutil +golang.org/x/tools/go/packages +golang.org/x/tools/internal/gopathwalk +golang.org/x/tools/internal/module +golang.org/x/tools/go/gcexportdata +golang.org/x/tools/go/internal/packagesdriver +golang.org/x/tools/internal/semver +golang.org/x/tools/internal/fastwalk +golang.org/x/tools/go/internal/gcimporter