From 2edc21a286021c7c0ba8d7ca5a2abfc5e39c8b80 Mon Sep 17 00:00:00 2001 From: Peng Date: Wed, 11 Sep 2024 22:05:00 -0400 Subject: [PATCH 01/12] rebase on 12/13 --- go/.DS_Store | Bin 0 -> 8196 bytes go/private/actions/archive.bzl | 11 +- go/private/actions/compilepkg.bzl | 17 +- go/private/rules/library.bzl | 2 + go/private/sdk.bzl | 1 - go/tools/builders/BUILD.bazel | 3 + go/tools/builders/builder.go | 2 +- go/tools/builders/nogo.go | 13 +- go/tools/builders/nogo_change.go | 85 ++++++++++ .../builders/nogo_change_serialization.go | 44 +++++ go/tools/builders/nogo_edit.go | 159 ++++++++++++++++++ go/tools/builders/nogo_main.go | 18 +- go/tools/builders/nogo_validation.go | 2 +- go/tools/builders/stdlib.go | 2 +- 14 files changed, 339 insertions(+), 20 deletions(-) create mode 100644 go/.DS_Store create mode 100644 go/tools/builders/nogo_change.go create mode 100644 go/tools/builders/nogo_change_serialization.go create mode 100644 go/tools/builders/nogo_edit.go diff --git a/go/.DS_Store b/go/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..3578b490b334802aa5d12038a5bbd91066a9f547 GIT binary patch literal 8196 zcmeHMyH3L}6upMh1}Y>bU|`730)G%tnV3*FCMe}4rKBp4i6Oth#DZ85D;r;c_y`{V zfe;Jl+HPt$Nf8SIab4LbvCoZ9kDa(qiAb%UwwHFl~DGrDO|Iz_|K7`1Ok;T-Y zesrMFTL54M)jHuA`v4siT8u2F26ZT|X?73FhAO+oP$nGhA=818#nhk)CuPD(*`Af% zp(x!u;zCU)6&aLL91sWc4)ET+N;{O$5$)yo@725E_D<4hwBn?N`rv)@>2l-kaV5`b zf63_{xbF${U0b(0j_HJYv<)>!I6C&~fz{F?3S4Iu(gtS-gS zc>?DWDDL=5=b|Ext1)=UH0G|=Ne1dh@qArNtg8lHJ-BzKwe5PUDlh!t9Adbd&U?@O zG0m|Cipp7@o@&Z^0;oB)^DCy^;Sp_K6meWli&vSZ+_hYvM)5pdq;*YVRDfZ?T?=m@48gX@cOU! z*KoZG^O74OGo}Xlkk5HCG0OVSMo1C|{*VLXM(aH9|JTaj|No(HBvQozap1cSs7j;R zSVfdx)eE9|l@F1(kU23gHK;>Scs~Sip8a8n;}BGii7ciDQG+Be0<;ZMhyy?Bz$Yiw B4LAS* literal 0 HcmV?d00001 diff --git a/go/private/actions/archive.bzl b/go/private/actions/archive.bzl index 481035439d..0ee75fc4db 100644 --- a/go/private/actions/archive.bzl +++ b/go/private/actions/archive.bzl @@ -61,13 +61,15 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d nogo = get_nogo(go) if nogo: - out_facts = go.declare_file(go, name = source.name, ext = pre_ext + ".facts") - out_nogo_log = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo.log") - out_nogo_validation = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo") + out_facts = go.declare_file(go, name = source.library.name, ext = pre_ext + ".facts") + out_nogo_log = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo.log") + out_nogo_validation = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo") + out_nogo_fix = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo.fix") else: out_facts = None out_nogo_log = None out_nogo_validation = None + out_nogo_fix = None direct = source.deps @@ -113,6 +115,7 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d out_facts = out_facts, out_nogo_log = out_nogo_log, out_nogo_validation = out_nogo_validation, + out_nogo_fix = out_nogo_fix, nogo = nogo, out_cgo_export_h = out_cgo_export_h, gc_goopts = source.gc_goopts, @@ -142,6 +145,7 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d out_facts = out_facts, out_nogo_log = out_nogo_log, out_nogo_validation = out_nogo_validation, + out_nogo_fix = out_nogo_fix, nogo = nogo, gc_goopts = source.gc_goopts, cgo = False, @@ -185,6 +189,7 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d facts_file = out_facts, runfiles = source.runfiles, _validation_output = out_nogo_validation, + _out_nogo_fix = out_nogo_fix, _cgo_deps = cgo_deps, ) x_defs = dict(source.x_defs) diff --git a/go/private/actions/compilepkg.bzl b/go/private/actions/compilepkg.bzl index 04d16ae46f..a3d6d34136 100644 --- a/go/private/actions/compilepkg.bzl +++ b/go/private/actions/compilepkg.bzl @@ -70,6 +70,7 @@ def emit_compilepkg( out_facts = None, out_nogo_log = None, out_nogo_validation = None, + out_nogo_fix = None, nogo = None, out_cgo_export_h = None, gc_goopts = [], @@ -89,7 +90,8 @@ def emit_compilepkg( fail("nogo must be specified if and only if out_nogo_log is specified") if have_nogo != (out_nogo_validation != None): fail("nogo must be specified if and only if out_nogo_validation is specified") - + if bool(nogo) != bool(out_nogo_fix): + fail("nogo must be specified if and only if out_nogo_fix is specified") if cover and go.coverdata: archives = archives + [go.coverdata] @@ -220,6 +222,7 @@ def emit_compilepkg( out_facts = out_facts, out_log = out_nogo_log, out_validation = out_nogo_validation, + out_nogo_fix = out_nogo_fix, nogo = nogo, ) @@ -233,6 +236,7 @@ def _run_nogo( out_facts, out_log, out_validation, + out_nogo_fix, nogo): """Runs nogo on Go source files, including those generated by cgo.""" sdk = go.sdk @@ -241,17 +245,18 @@ def _run_nogo( [archive.data.facts_file for archive in archives if archive.data.facts_file] + [archive.data.export_file for archive in archives]) inputs_transitive = [sdk.tools, sdk.headers, go.stdlib.libs] - outputs = [out_facts, out_log] + outputs = [out_facts, out_log, out_nogo_fix] nogo_args = go.tool_args(go) if cgo_go_srcs: inputs_direct.append(cgo_go_srcs) nogo_args.add_all([cgo_go_srcs], before_each = "-ignore_src") - nogo_args.add_all(archives, before_each = "-facts", map_each = _facts) - nogo_args.add("-out_facts", out_facts) - nogo_args.add("-out_log", out_log) - nogo_args.add("-nogo", nogo) + args.add_all(archives, before_each = "-facts", map_each = _facts) + args.add("-out_facts", out_facts) + args.add("-out_log", out_log) + args.add("-fixpath", out_nogo_fix) + args.add("-nogo", nogo) # This action runs nogo and produces the facts files for downstream nogo actions. # It is important that this action doesn't fail if nogo produces findings, which allows users diff --git a/go/private/rules/library.bzl b/go/private/rules/library.bzl index 601eeacb60..ccd5eddc27 100644 --- a/go/private/rules/library.bzl +++ b/go/private/rules/library.bzl @@ -49,6 +49,7 @@ def _go_library_impl(ctx): go_info = new_go_info(go, ctx.attr) archive = go.archive(go, go_info) validation_output = archive.data._validation_output + nogo_fix_output = archive.data._out_nogo_fix return [ go_info, @@ -65,6 +66,7 @@ def _go_library_impl(ctx): OutputGroupInfo( cgo_exports = archive.cgo_exports, compilation_outputs = [archive.data.file], + out_nogo_fix = [nogo_fix_output] if nogo_fix_output else [], _validation = [validation_output] if validation_output else [], ), ] diff --git a/go/private/sdk.bzl b/go/private/sdk.bzl index 5ec55a4768..668de89275 100644 --- a/go/private/sdk.bzl +++ b/go/private/sdk.bzl @@ -91,7 +91,6 @@ def _go_download_sdk_impl(ctx): ) data = ctx.read("versions.json") - ctx.delete("versions.json") sdks_by_version = _parse_versions_json(data) if not version: diff --git a/go/tools/builders/BUILD.bazel b/go/tools/builders/BUILD.bazel index fed2f09223..2239d456a6 100644 --- a/go/tools/builders/BUILD.bazel +++ b/go/tools/builders/BUILD.bazel @@ -107,6 +107,9 @@ go_source( "constants.go", "env.go", "flags.go", + "nogo_change.go", + "nogo_change_serialization.go", + "nogo_edit.go", "nogo_main.go", "nogo_typeparams_go117.go", "nogo_typeparams_go118.go", diff --git a/go/tools/builders/builder.go b/go/tools/builders/builder.go index fdeda2d38a..07afc4704e 100644 --- a/go/tools/builders/builder.go +++ b/go/tools/builders/builder.go @@ -73,6 +73,6 @@ func main() { log.SetPrefix(verb + ": ") if err := action(rest); err != nil { - log.Fatal(err) + log.Fatalf("\n$$$$$$$$$$$$$$$$$$$$$$$$ fatal: %+v", err) } } diff --git a/go/tools/builders/nogo.go b/go/tools/builders/nogo.go index 44222013a5..cdf48b5ff4 100644 --- a/go/tools/builders/nogo.go +++ b/go/tools/builders/nogo.go @@ -25,6 +25,7 @@ func nogo(args []string) error { var importPath, packagePath, nogoPath, packageListPath string var testFilter string var outFactsPath, outLogPath string + var nogoFixPath string var coverMode string fs.Var(&unfilteredSrcs, "src", ".go, .c, .cc, .m, .mm, .s, or .S file to be filtered and checked") fs.Var(&ignoreSrcs, "ignore_src", ".go, .c, .cc, .m, .mm, .s, or .S file to be filtered and checked, but with its diagnostics ignored") @@ -39,6 +40,9 @@ func nogo(args []string) error { fs.StringVar(&nogoPath, "nogo", "", "The nogo binary") fs.StringVar(&outFactsPath, "out_facts", "", "The file to emit serialized nogo facts to") fs.StringVar(&outLogPath, "out_log", "", "The file to emit nogo logs into") + + fs.StringVar(&nogoFixPath, "fixpath", "", "The fix path") + if err := fs.Parse(args); err != nil { return err } @@ -82,10 +86,10 @@ func nogo(args []string) error { return err } - return runNogo(workDir, nogoPath, goSrcs, ignoreSrcs, facts, importPath, importcfgPath, outFactsPath, outLogPath) + return runNogo(workDir, nogoPath, goSrcs, ignoreSrcs, facts, importPath, importcfgPath, outFactsPath, outLogPath, nogoFixPath) } -func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []archive, packagePath, importcfgPath, outFactsPath string, outLogPath string) error { +func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []archive, packagePath, importcfgPath, outFactsPath string, outLogPath string, nogoFixPath string) error { if len(srcs) == 0 { // emit_compilepkg expects a nogo facts file, even if it's empty. // We also need to write the validation output log. @@ -101,6 +105,10 @@ func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []ar } args := []string{nogoPath} args = append(args, "-p", packagePath) + args = append(args, "-fixpath", nogoFixPath) + + + // args = append(args, "-json") args = append(args, "-importcfg", importcfgPath) for _, fact := range facts { args = append(args, "-fact", fmt.Sprintf("%s=%s", fact.importPath, fact.file)) @@ -148,4 +156,3 @@ func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []ar } return nil } - diff --git a/go/tools/builders/nogo_change.go b/go/tools/builders/nogo_change.go new file mode 100644 index 0000000000..58cbed052c --- /dev/null +++ b/go/tools/builders/nogo_change.go @@ -0,0 +1,85 @@ +package main + +import ( + "fmt" + "go/token" + "strings" + + "golang.org/x/tools/go/analysis" +) + +// Change represents a set of edits to be applied to a set of files. +type Change struct { + AnalysisName string `json:"analysis_name"` + FileToEdits map[string][]Edit `json:"file_to_edits"` +} + +// NewChange creates a new Change object. +func NewChange() *Change { + return &Change{ + FileToEdits: make(map[string][]Edit), + } +} + +// SetAnalysisName sets the name of the analysis that produced the change. +func (c *Change) SetAnalysisName(name string) { + c.AnalysisName = name +} + +// AddEdit adds an edit to the change. +func (c *Change) AddEdit(file string, edit Edit) { + c.FileToEdits[file] = append(c.FileToEdits[file], edit) +} + +// BuildFromDiagnostics builds a Change from a set of diagnostics. +// Unlike Diagnostic, Change is independent of the FileSet given it uses perf-file offsets instead of token.Pos. +// This allows Change to be used in contexts where the FileSet is not available, e.g., it remains applicable after it is saved to disk and loaded back. +// See https://github.com/golang/tools/blob/master/go/analysis/diagnostic.go for details. +func (c *Change) BuildFromDiagnostics(diagnostics []analysis.Diagnostic, fileSet *token.FileSet) error { + for _, diag := range diagnostics { + for _, sf := range diag.SuggestedFixes { + for _, edit := range sf.TextEdits { + file := fileSet.File(edit.Pos) + + if file == nil { + return fmt.Errorf("invalid fix: missing file info for pos (%v)", edit.Pos) + } + if edit.Pos > edit.End { + return fmt.Errorf("invalid fix: pos (%v) > end (%v)", edit.Pos, edit.End) + } + if eof := token.Pos(file.Base() + file.Size()); edit.End > eof { + return fmt.Errorf("invalid fix: end (%v) past end of file (%v)", edit.End, eof) + } + edit := Edit{Start: file.Offset(edit.Pos), End: file.Offset(edit.End), New: string(edit.NewText)} + fileRelativePath := file.Name() + c.AddEdit(fileRelativePath, edit) + } + } + } + return nil +} + +// MergeChanges merges multiple changes into a single change. +func MergeChanges(changes []Change) Change { + mergedChange := NewChange() // Create a new Change object for the result + analysisNames := []string{} // no deduplication needed + + for _, change := range changes { + if change.AnalysisName != "" { + analysisNames = append(analysisNames, change.AnalysisName) + } + for file, edits := range change.FileToEdits { + // If the file already exists in the merged change, append the edits + if existingEdits, found := mergedChange.FileToEdits[file]; found { + // checking the overlapping of edits happens in edit.go during the ApplyEdits function. + // so we don't need to check it here. + mergedChange.FileToEdits[file] = append(existingEdits, edits...) + } else { + // Otherwise, just set the file and edits + mergedChange.FileToEdits[file] = edits + } + } + } + mergedChange.AnalysisName = strings.Join(analysisNames, ",") + return *mergedChange +} diff --git a/go/tools/builders/nogo_change_serialization.go b/go/tools/builders/nogo_change_serialization.go new file mode 100644 index 0000000000..1b47a341cd --- /dev/null +++ b/go/tools/builders/nogo_change_serialization.go @@ -0,0 +1,44 @@ +package main + +import ( + "encoding/json" + "fmt" + "io/ioutil" + // "log" +) + +// SaveToFile saves the Change struct to a JSON file. +func SaveToFile(filename string, change Change) error { + // Serialize Change to JSON + jsonData, err := json.MarshalIndent(change, "", " ") + if err != nil { + return fmt.Errorf("error serializing to JSON: %v", err) + } + // log.Fatalf("!!!!: %v", change) + // Write the JSON data to the file + err = ioutil.WriteFile(filename, jsonData, 0644) + if err != nil { + return fmt.Errorf("error writing to file: %v", err) + } + + return nil +} + +// LoadFromFile loads the Change struct from a JSON file. +func LoadFromFile(filename string) (Change, error) { + var change Change + + // Read the JSON file + jsonData, err := ioutil.ReadFile(filename) + if err != nil { + return change, fmt.Errorf("error reading file: %v", err) + } + + // Deserialize JSON data into the Change struct + err = json.Unmarshal(jsonData, &change) + if err != nil { + return change, fmt.Errorf("error deserializing JSON: %v", err) + } + + return change, nil +} diff --git a/go/tools/builders/nogo_edit.go b/go/tools/builders/nogo_edit.go new file mode 100644 index 0000000000..6e6d7e580b --- /dev/null +++ b/go/tools/builders/nogo_edit.go @@ -0,0 +1,159 @@ +/** +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Source: https://sourcegraph.com/github.com/golang/tools/-/blob/internal/diff/diff.go +*/ + +package main + +import ( + "fmt" + "sort" +) + +// An Edit describes the replacement of a portion of a text file. +type Edit struct { + New string `json:"new"` // the replacement + Start int `json:"start"` // starting byte offset of the region to replace + End int `json:"end"` // ending byte offset of the region to replace +} + +func (e Edit) String() string { + return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New) +} + +// ApplyEdits applies a sequence of edits to the src buffer and returns the +// result. Edits are applied in order of start offset; edits with the +// same start offset are applied in they order they were provided. +// +// ApplyEdits returns an error if any edit is out of bounds, +// or if any pair of edits is overlapping. +func ApplyEdits(src string, edits []Edit) (string, error) { + edits, size, err := validate(src, edits) + if err != nil { + return "", err + } + + // Apply edits. + out := make([]byte, 0, size) + lastEnd := 0 + for _, edit := range edits { + if lastEnd < edit.Start { + out = append(out, src[lastEnd:edit.Start]...) + } + out = append(out, edit.New...) + lastEnd = edit.End + } + out = append(out, src[lastEnd:]...) + + if len(out) != size { + panic("wrong size") + } + + return string(out), nil +} + +// ApplyEditsBytes is like Apply, but it accepts a byte slice. +// The result is always a new array. +func ApplyEditsBytes(src []byte, edits []Edit) ([]byte, error) { + res, err := ApplyEdits(string(src), edits) + return []byte(res), err +} + +// validate checks that edits are consistent with src, +// and returns the size of the patched output. +// It may return a different slice. +func validate(src string, edits []Edit) ([]Edit, int, error) { + if !sort.IsSorted(editsSort(edits)) { + edits = append([]Edit(nil), edits...) + SortEdits(edits) + } + + // Check validity of edits and compute final size. + size := len(src) + lastEnd := 0 + for _, edit := range edits { + if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) { + return nil, 0, fmt.Errorf("diff has out-of-bounds edits") + } + if edit.Start < lastEnd { + return nil, 0, fmt.Errorf("diff has overlapping edits") + } + size += len(edit.New) + edit.Start - edit.End + lastEnd = edit.End + } + + return edits, size, nil +} + +// UniqueEdits returns a list of edits that is sorted and +// contains no duplicate edits. Returns the index of some +// overlapping adjacent edits if there is one and <0 if the +// edits are valid. +func UniqueEdits(edits []Edit) ([]Edit, int) { + if len(edits) == 0 { + return nil, -1 + } + equivalent := func(x, y Edit) bool { + return x.Start == y.Start && x.End == y.End && x.New == y.New + } + SortEdits(edits) + unique := []Edit{edits[0]} + invalid := -1 + for i := 1; i < len(edits); i++ { + prev, cur := edits[i-1], edits[i] + if !equivalent(prev, cur) { + unique = append(unique, cur) + if prev.End > cur.Start { + invalid = i + } + } + } + return unique, invalid +} + +// SortEdits orders a slice of Edits by (start, end) offset. +// This ordering puts insertions (end = start) before deletions +// (end > start) at the same point, but uses a stable sort to preserve +// the order of multiple insertions at the same point. +// (Apply detects multiple deletions at the same point as an error.) +func SortEdits(edits []Edit) { + sort.Stable(editsSort(edits)) +} + +type editsSort []Edit + +func (a editsSort) Len() int { return len(a) } +func (a editsSort) Less(i, j int) bool { + if cmp := a[i].Start - a[j].Start; cmp != 0 { + return cmp < 0 + } + return a[i].End < a[j].End +} +func (a editsSort) Swap(i, j int) { a[i], a[j] = a[j], a[i] } diff --git a/go/tools/builders/nogo_main.go b/go/tools/builders/nogo_main.go index 23b063e616..4976b7e4c0 100644 --- a/go/tools/builders/nogo_main.go +++ b/go/tools/builders/nogo_main.go @@ -77,6 +77,7 @@ func run(args []string) (error, int) { importcfg := flags.String("importcfg", "", "The import configuration file") packagePath := flags.String("p", "", "The package path (importmap) of the package being compiled") xPath := flags.String("x", "", "The archive file where serialized facts should be written") + nogoFixPath := flags.String("fixpath", "", "The fix path for nogo") var ignores multiFlag flags.Var(&ignores, "ignore", "Names of files to ignore") flags.Parse(args) @@ -87,7 +88,7 @@ func run(args []string) (error, int) { return fmt.Errorf("error parsing importcfg: %v", err), nogoError } - diagnostics, facts, err := checkPackage(analyzers, *packagePath, packageFile, importMap, factMap, srcs, ignores) + diagnostics, facts, err := checkPackage(analyzers, *packagePath, packageFile, importMap, factMap, srcs, ignores, *nogoFixPath) if err != nil { return fmt.Errorf("error running analyzers: %v", err), nogoError } @@ -98,6 +99,7 @@ func run(args []string) (error, int) { } } if diagnostics != "" { + // debugMode is defined by the template in generate_nogo_main.go. exitCode := nogoViolation if debugMode { @@ -158,7 +160,7 @@ func readImportCfg(file string) (packageFile map[string]string, importMap map[st // It returns an empty string if no source code diagnostics need to be printed. // // This implementation was adapted from that of golang.org/x/tools/go/checker/internal/checker. -func checkPackage(analyzers []*analysis.Analyzer, packagePath string, packageFile, importMap map[string]string, factMap map[string]string, filenames, ignoreFiles []string) (string, []byte, error) { +func checkPackage(analyzers []*analysis.Analyzer, packagePath string, packageFile, importMap map[string]string, factMap map[string]string, filenames, ignoreFiles []string, nogoFixPath string) (string, []byte, error) { // Register fact types and establish dependencies between analyzers. actions := make(map[*analysis.Analyzer]*action) var visit func(a *analysis.Analyzer) *action @@ -258,7 +260,7 @@ func checkPackage(analyzers []*analysis.Analyzer, packagePath string, packageFil execAll(roots) // Process diagnostics and encode facts for importers of this package. - diagnostics := checkAnalysisResults(roots, pkg) + diagnostics := checkAnalysisResults(roots, pkg, nogoFixPath) facts := pkg.facts.Encode() return diagnostics, facts, nil } @@ -458,12 +460,13 @@ func (g *goPackage) String() string { // checkAnalysisResults checks the analysis diagnostics in the given actions // and returns a string containing all the diagnostics that should be printed // to the build log. -func checkAnalysisResults(actions []*action, pkg *goPackage) string { +func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) string { type entry struct { analysis.Diagnostic *analysis.Analyzer } var diagnostics []entry + var diagnosticsCore []analysis.Diagnostic var errs []error cwd, err := os.Getwd() if cwd == "" || err != nil { @@ -565,10 +568,17 @@ func checkAnalysisResults(actions []*action, pkg *goPackage) string { errMsg.WriteString(err.Error()) } for _, d := range diagnostics { + diagnosticsCore = append(diagnosticsCore, d.Diagnostic) + // log.Fatalf("!!!!!: %+v", d.SuggestedFixes) errMsg.WriteString(sep) sep = "\n" fmt.Fprintf(errMsg, "%s: %s (%s)", pkg.fset.Position(d.Pos), d.Message, d.Name) } + + change := NewChange() + change.BuildFromDiagnostics(diagnosticsCore, pkg.fset) + + SaveToFile(nogoFixPath, *change) return errMsg.String() } diff --git a/go/tools/builders/nogo_validation.go b/go/tools/builders/nogo_validation.go index 3d164a9209..6738635de6 100644 --- a/go/tools/builders/nogo_validation.go +++ b/go/tools/builders/nogo_validation.go @@ -18,7 +18,7 @@ func nogoValidation(args []string) error { if err != nil { return err } - if len(logContent) > 0 { + if len(logContent) > 100000000000000000 { // Separate nogo output from Bazel's --sandbox_debug message via an // empty line. // Don't return to avoid printing the "nogovalidation:" prefix. diff --git a/go/tools/builders/stdlib.go b/go/tools/builders/stdlib.go index 5731447090..105ca5c635 100644 --- a/go/tools/builders/stdlib.go +++ b/go/tools/builders/stdlib.go @@ -131,7 +131,7 @@ You may need to use the flags --cpu=x64_windows --compiler=mingw-gcc.`) installArgs = append(installArgs, "-race") } if *pgoprofile != "" { - gcflags = append(gcflags, "-pgoprofile=" + abs(*pgoprofile)) + installArgs = append(installArgs, "-pgo", abs(*pgoprofile)) } if *shared { gcflags = append(gcflags, "-shared") From e7abc12df4a39d9bb826be6899d23b7ac469e5ec Mon Sep 17 00:00:00 2001 From: Peng Date: Wed, 11 Sep 2024 22:05:00 -0400 Subject: [PATCH 02/12] rules_go improvement to externalize the nogo fix rebase on 12/13 --- go/.DS_Store | Bin 8196 -> 0 bytes go/private/actions/archive.bzl | 13 +- go/private/actions/compilepkg.bzl | 17 +- go/private/rules/binary.bzl | 9 +- go/private/rules/library.bzl | 11 +- go/private/rules/test.bzl | 7 +- go/private/sdk.bzl | 1 + go/tools/builders/BUILD.bazel | 17 +- go/tools/builders/builder.go | 2 +- go/tools/builders/difflib.go | 792 +++++++++++++ go/tools/builders/nogo.go | 19 +- go/tools/builders/nogo_change.go | 331 +++++- .../builders/nogo_change_serialization.go | 57 +- .../nogo_change_serialization_test.go | 133 +++ go/tools/builders/nogo_change_test.go | 1013 +++++++++++++++++ go/tools/builders/nogo_edit.go | 159 --- go/tools/builders/nogo_main.go | 41 +- go/tools/builders/nogo_validation.go | 38 +- go/tools/builders/stdlib.go | 2 +- 19 files changed, 2397 insertions(+), 265 deletions(-) delete mode 100644 go/.DS_Store create mode 100644 go/tools/builders/difflib.go create mode 100644 go/tools/builders/nogo_change_serialization_test.go create mode 100644 go/tools/builders/nogo_change_test.go delete mode 100644 go/tools/builders/nogo_edit.go diff --git a/go/.DS_Store b/go/.DS_Store deleted file mode 100644 index 3578b490b334802aa5d12038a5bbd91066a9f547..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 8196 zcmeHMyH3L}6upMh1}Y>bU|`730)G%tnV3*FCMe}4rKBp4i6Oth#DZ85D;r;c_y`{V zfe;Jl+HPt$Nf8SIab4LbvCoZ9kDa(qiAb%UwwHFl~DGrDO|Iz_|K7`1Ok;T-Y zesrMFTL54M)jHuA`v4siT8u2F26ZT|X?73FhAO+oP$nGhA=818#nhk)CuPD(*`Af% zp(x!u;zCU)6&aLL91sWc4)ET+N;{O$5$)yo@725E_D<4hwBn?N`rv)@>2l-kaV5`b zf63_{xbF${U0b(0j_HJYv<)>!I6C&~fz{F?3S4Iu(gtS-gS zc>?DWDDL=5=b|Ext1)=UH0G|=Ne1dh@qArNtg8lHJ-BzKwe5PUDlh!t9Adbd&U?@O zG0m|Cipp7@o@&Z^0;oB)^DCy^;Sp_K6meWli&vSZ+_hYvM)5pdq;*YVRDfZ?T?=m@48gX@cOU! z*KoZG^O74OGo}Xlkk5HCG0OVSMo1C|{*VLXM(aH9|JTaj|No(HBvQozap1cSs7j;R zSVfdx)eE9|l@F1(kU23gHK;>Scs~Sip8a8n;}BGii7ciDQG+Be0<;ZMhyy?Bz$Yiw B4LAS* diff --git a/go/private/actions/archive.bzl b/go/private/actions/archive.bzl index 0ee75fc4db..9dad92db71 100644 --- a/go/private/actions/archive.bzl +++ b/go/private/actions/archive.bzl @@ -64,11 +64,20 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d out_facts = go.declare_file(go, name = source.library.name, ext = pre_ext + ".facts") out_nogo_log = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo.log") out_nogo_validation = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo") + + # out_nogo_fix_tmp holds the fixes produced by the RunNogo action, out_nogo_fix holds the fixes produced by the ValidateNogo action. + # They have the same content, but ValidateNogo propagates the fixes and eventually externalizes the fixes via `_validation` in the OutputGroupInfo section. + # --run_validations (default=True) ensures nogo validation is applied to not only the input targets but also their dependent targets, + # thereby producing available fixes for all targets. + # Otherwise, if we externalize out_nogo_fix_tmp (not going through the ValidateNogo action) by putting it into a field (e.g., `nogo_fix`) in the OutputGroupInfo section of the input targets, + # we can see the fix for the input targets, but will miss the fixes for the dependent targets. + out_nogo_fix_tmp = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo.fix.tmp") out_nogo_fix = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo.fix") else: out_facts = None out_nogo_log = None out_nogo_validation = None + out_nogo_fix_tmp = None out_nogo_fix = None direct = source.deps @@ -115,6 +124,7 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d out_facts = out_facts, out_nogo_log = out_nogo_log, out_nogo_validation = out_nogo_validation, + out_nogo_fix_tmp = out_nogo_fix_tmp, out_nogo_fix = out_nogo_fix, nogo = nogo, out_cgo_export_h = out_cgo_export_h, @@ -145,6 +155,7 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d out_facts = out_facts, out_nogo_log = out_nogo_log, out_nogo_validation = out_nogo_validation, + out_nogo_fix_tmp = out_nogo_fix_tmp, out_nogo_fix = out_nogo_fix, nogo = nogo, gc_goopts = source.gc_goopts, @@ -189,7 +200,7 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d facts_file = out_facts, runfiles = source.runfiles, _validation_output = out_nogo_validation, - _out_nogo_fix = out_nogo_fix, + _nogo_fix_output = out_nogo_fix, _cgo_deps = cgo_deps, ) x_defs = dict(source.x_defs) diff --git a/go/private/actions/compilepkg.bzl b/go/private/actions/compilepkg.bzl index a3d6d34136..f709291808 100644 --- a/go/private/actions/compilepkg.bzl +++ b/go/private/actions/compilepkg.bzl @@ -70,6 +70,7 @@ def emit_compilepkg( out_facts = None, out_nogo_log = None, out_nogo_validation = None, + out_nogo_fix_tmp = None, out_nogo_fix = None, nogo = None, out_cgo_export_h = None, @@ -90,8 +91,11 @@ def emit_compilepkg( fail("nogo must be specified if and only if out_nogo_log is specified") if have_nogo != (out_nogo_validation != None): fail("nogo must be specified if and only if out_nogo_validation is specified") + if bool(nogo) != bool(out_nogo_fix_tmp): + fail("nogo must be specified if and only if out_nogo_fix_tmp is specified") if bool(nogo) != bool(out_nogo_fix): fail("nogo must be specified if and only if out_nogo_fix is specified") + if cover and go.coverdata: archives = archives + [go.coverdata] @@ -222,6 +226,7 @@ def emit_compilepkg( out_facts = out_facts, out_log = out_nogo_log, out_validation = out_nogo_validation, + out_nogo_fix_tmp = out_nogo_fix_tmp, out_nogo_fix = out_nogo_fix, nogo = nogo, ) @@ -236,6 +241,7 @@ def _run_nogo( out_facts, out_log, out_validation, + out_nogo_fix_tmp, out_nogo_fix, nogo): """Runs nogo on Go source files, including those generated by cgo.""" @@ -245,7 +251,7 @@ def _run_nogo( [archive.data.facts_file for archive in archives if archive.data.facts_file] + [archive.data.export_file for archive in archives]) inputs_transitive = [sdk.tools, sdk.headers, go.stdlib.libs] - outputs = [out_facts, out_log, out_nogo_fix] + outputs = [out_facts, out_log, out_nogo_fix_tmp] nogo_args = go.tool_args(go) if cgo_go_srcs: @@ -255,7 +261,7 @@ def _run_nogo( args.add_all(archives, before_each = "-facts", map_each = _facts) args.add("-out_facts", out_facts) args.add("-out_log", out_log) - args.add("-fixpath", out_nogo_fix) + args.add("-out_fix", out_nogo_fix_tmp) args.add("-nogo", nogo) # This action runs nogo and produces the facts files for downstream nogo actions. @@ -284,9 +290,12 @@ def _run_nogo( validation_args.add("nogovalidation") validation_args.add(out_validation) validation_args.add(out_log) + validation_args.add(out_nogo_fix_tmp) + validation_args.add(out_nogo_fix) + go.actions.run( - inputs = [out_log], - outputs = [out_validation], + inputs = [out_log, out_nogo_fix_tmp], + outputs = [out_validation, out_nogo_fix], mnemonic = "ValidateNogo", executable = go.toolchain._builder, arguments = [validation_args], diff --git a/go/private/rules/binary.bzl b/go/private/rules/binary.bzl index 1d0dd0bc27..089c54f4b8 100644 --- a/go/private/rules/binary.bzl +++ b/go/private/rules/binary.bzl @@ -152,13 +152,20 @@ def _go_binary_impl(ctx): executable = executable, ) validation_output = archive.data._validation_output + nogo_fix_output = archive.data._nogo_fix_output + + nogo_validation_outputs = [] + if validation_output: + nogo_validation_outputs.append(validation_output) + if nogo_fix_output: + nogo_validation_outputs.append(nogo_fix_output) providers = [ archive, OutputGroupInfo( cgo_exports = archive.cgo_exports, compilation_outputs = [archive.data.file], - _validation = [validation_output] if validation_output else [], + _validation = nogo_validation_outputs, ), ] diff --git a/go/private/rules/library.bzl b/go/private/rules/library.bzl index ccd5eddc27..ad190e5f77 100644 --- a/go/private/rules/library.bzl +++ b/go/private/rules/library.bzl @@ -49,7 +49,13 @@ def _go_library_impl(ctx): go_info = new_go_info(go, ctx.attr) archive = go.archive(go, go_info) validation_output = archive.data._validation_output - nogo_fix_output = archive.data._out_nogo_fix + nogo_fix_output = archive.data._nogo_fix_output + + nogo_validation_outputs = [] + if validation_output: + nogo_validation_outputs.append(validation_output) + if nogo_fix_output: + nogo_validation_outputs.append(nogo_fix_output) return [ go_info, @@ -66,8 +72,7 @@ def _go_library_impl(ctx): OutputGroupInfo( cgo_exports = archive.cgo_exports, compilation_outputs = [archive.data.file], - out_nogo_fix = [nogo_fix_output] if nogo_fix_output else [], - _validation = [validation_output] if validation_output else [], + _validation = nogo_validation_outputs, ), ] diff --git a/go/private/rules/test.bzl b/go/private/rules/test.bzl index 07bf2a6c90..2f0ba40145 100644 --- a/go/private/rules/test.bzl +++ b/go/private/rules/test.bzl @@ -79,7 +79,12 @@ def _go_test_impl(ctx): internal_archive = go.archive(go, internal_go_info) if internal_archive.data._validation_output: validation_outputs.append(internal_archive.data._validation_output) - go_srcs = [src for src in internal_go_info.srcs if src.extension == "go"] + if internal_archive.data._nogo_fix_output: + # We do not include those from external_archive that corresponds to a separate package + # since that package would be built separately, during which the nogo fixes are produced already. + validation_outputs.append(internal_archive.data._nogo_fix_output) + + go_srcs = [src for src in internal_source.srcs if src.extension == "go"] # Compile the library with the external black box tests external_go_info = new_go_info( diff --git a/go/private/sdk.bzl b/go/private/sdk.bzl index 668de89275..5ec55a4768 100644 --- a/go/private/sdk.bzl +++ b/go/private/sdk.bzl @@ -91,6 +91,7 @@ def _go_download_sdk_impl(ctx): ) data = ctx.read("versions.json") + ctx.delete("versions.json") sdks_by_version = _parse_versions_json(data) if not version: diff --git a/go/tools/builders/BUILD.bazel b/go/tools/builders/BUILD.bazel index 2239d456a6..3ff22d9e53 100644 --- a/go/tools/builders/BUILD.bazel +++ b/go/tools/builders/BUILD.bazel @@ -31,6 +31,21 @@ go_test( ], ) +go_test( + name = "nogo_change_test", + size = "small", + srcs = [ + "difflib.go", + "nogo_change.go", + "nogo_change_serialization.go", + "nogo_change_serialization_test.go", + "nogo_change_test.go", + ], + deps = [ + "@org_golang_x_tools//go/analysis", + ], +) + go_test( name = "stdliblist_test", size = "small", @@ -105,11 +120,11 @@ go_source( name = "nogo_srcs", srcs = [ "constants.go", + "difflib.go", "env.go", "flags.go", "nogo_change.go", "nogo_change_serialization.go", - "nogo_edit.go", "nogo_main.go", "nogo_typeparams_go117.go", "nogo_typeparams_go118.go", diff --git a/go/tools/builders/builder.go b/go/tools/builders/builder.go index 07afc4704e..fdeda2d38a 100644 --- a/go/tools/builders/builder.go +++ b/go/tools/builders/builder.go @@ -73,6 +73,6 @@ func main() { log.SetPrefix(verb + ": ") if err := action(rest); err != nil { - log.Fatalf("\n$$$$$$$$$$$$$$$$$$$$$$$$ fatal: %+v", err) + log.Fatal(err) } } diff --git a/go/tools/builders/difflib.go b/go/tools/builders/difflib.go new file mode 100644 index 0000000000..8fbda0e5cf --- /dev/null +++ b/go/tools/builders/difflib.go @@ -0,0 +1,792 @@ +/* + * Copyright (c) 2013, Patrick Mezard + * All rights reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are + * met: + * + * Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * The names of its contributors may not be used to endorse or promote + * products derived from this software without specific prior written + * permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS + * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED + * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A + * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED + * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR + * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF + * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING + * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS + * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + +// This file is copied from https://github.com/pmezard/go-difflib, under the permission of the above copyright. + +package main + +import ( + "bufio" + "bytes" + "fmt" + "io" + "strings" +) + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} + +func calculateRatio(matches, length int) float64 { + if length > 0 { + return 2.0 * float64(matches) / float64(length) + } + return 1.0 +} + +type Match struct { + A int + B int + Size int +} + +type OpCode struct { + Tag byte + I1 int + I2 int + J1 int + J2 int +} + +// SequenceMatcher compares sequence of strings. The basic +// algorithm predates, and is a little fancier than, an algorithm +// published in the late 1980's by Ratcliff and Obershelp under the +// hyperbolic name "gestalt pattern matching". The basic idea is to find +// the longest contiguous matching subsequence that contains no "junk" +// elements (R-O doesn't address junk). The same idea is then applied +// recursively to the pieces of the sequences to the left and to the right +// of the matching subsequence. This does not yield minimal edit +// sequences, but does tend to yield matches that "look right" to people. +// +// SequenceMatcher tries to compute a "human-friendly diff" between two +// sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the +// longest *contiguous* & junk-free matching subsequence. That's what +// catches peoples' eyes. The Windows(tm) windiff has another interesting +// notion, pairing up elements that appear uniquely in each sequence. +// That, and the method here, appear to yield more intuitive difference +// reports than does diff. This method appears to be the least vulnerable +// to synching up on blocks of "junk lines", though (like blank lines in +// ordinary text files, or maybe "

" lines in HTML files). That may be +// because this is the only method of the 3 that has a *concept* of +// "junk" . +// +// Timing: Basic R-O is cubic time worst case and quadratic time expected +// case. SequenceMatcher is quadratic time for the worst case and has +// expected-case behavior dependent in a complicated way on how many +// elements the sequences have in common; best case time is linear. +type SequenceMatcher struct { + a []string + b []string + b2j map[string][]int + IsJunk func(string) bool + autoJunk bool + bJunk map[string]struct{} + matchingBlocks []Match + fullBCount map[string]int + bPopular map[string]struct{} + opCodes []OpCode +} + +func NewMatcher(a, b []string) *SequenceMatcher { + m := SequenceMatcher{autoJunk: true} + m.SetSeqs(a, b) + return &m +} + +func NewMatcherWithJunk(a, b []string, autoJunk bool, + isJunk func(string) bool) *SequenceMatcher { + + m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk} + m.SetSeqs(a, b) + return &m +} + +// Set two sequences to be compared. +func (m *SequenceMatcher) SetSeqs(a, b []string) { + m.SetSeq1(a) + m.SetSeq2(b) +} + +// Set the first sequence to be compared. The second sequence to be compared is +// not changed. +// +// SequenceMatcher computes and caches detailed information about the second +// sequence, so if you want to compare one sequence S against many sequences, +// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other +// sequences. +// +// See also SetSeqs() and SetSeq2(). +func (m *SequenceMatcher) SetSeq1(a []string) { + if &a == &m.a { + return + } + m.a = a + m.matchingBlocks = nil + m.opCodes = nil +} + +// Set the second sequence to be compared. The first sequence to be compared is +// not changed. +func (m *SequenceMatcher) SetSeq2(b []string) { + if &b == &m.b { + return + } + m.b = b + m.matchingBlocks = nil + m.opCodes = nil + m.fullBCount = nil + m.chainB() +} + +func (m *SequenceMatcher) chainB() { + // Populate line -> index mapping + b2j := map[string][]int{} + for i, s := range m.b { + indices := b2j[s] + indices = append(indices, i) + b2j[s] = indices + } + + // Purge junk elements + m.bJunk = map[string]struct{}{} + if m.IsJunk != nil { + junk := m.bJunk + for s, _ := range b2j { + if m.IsJunk(s) { + junk[s] = struct{}{} + } + } + for s, _ := range junk { + delete(b2j, s) + } + } + + // Purge remaining popular elements + popular := map[string]struct{}{} + n := len(m.b) + if m.autoJunk && n >= 200 { + ntest := n/100 + 1 + for s, indices := range b2j { + if len(indices) > ntest { + popular[s] = struct{}{} + } + } + for s, _ := range popular { + delete(b2j, s) + } + } + m.bPopular = popular + m.b2j = b2j +} + +func (m *SequenceMatcher) isBJunk(s string) bool { + _, ok := m.bJunk[s] + return ok +} + +// Find longest matching block in a[alo:ahi] and b[blo:bhi]. +// +// If IsJunk is not defined: +// +// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where +// +// alo <= i <= i+k <= ahi +// blo <= j <= j+k <= bhi +// +// and for all (i',j',k') meeting those conditions, +// +// k >= k' +// i <= i' +// and if i == i', j <= j' +// +// In other words, of all maximal matching blocks, return one that +// starts earliest in a, and of all those maximal matching blocks that +// start earliest in a, return the one that starts earliest in b. +// +// If IsJunk is defined, first the longest matching block is +// determined as above, but with the additional restriction that no +// junk element appears in the block. Then that block is extended as +// far as possible by matching (only) junk elements on both sides. So +// the resulting block never matches on junk except as identical junk +// happens to be adjacent to an "interesting" match. +// +// If no blocks match, return (alo, blo, 0). +func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match { + // CAUTION: stripping common prefix or suffix would be incorrect. + // E.g., + // ab + // acab + // Longest matching block is "ab", but if common prefix is + // stripped, it's "a" (tied with "b"). UNIX(tm) diff does so + // strip, so ends up claiming that ab is changed to acab by + // inserting "ca" in the middle. That's minimal but unintuitive: + // "it's obvious" that someone inserted "ac" at the front. + // Windiff ends up at the same place as diff, but by pairing up + // the unique 'b's and then matching the first two 'a's. + besti, bestj, bestsize := alo, blo, 0 + + // find longest junk-free match + // during an iteration of the loop, j2len[j] = length of longest + // junk-free match ending with a[i-1] and b[j] + j2len := map[int]int{} + for i := alo; i != ahi; i++ { + // look at all instances of a[i] in b; note that because + // b2j has no junk keys, the loop is skipped if a[i] is junk + newj2len := map[int]int{} + for _, j := range m.b2j[m.a[i]] { + // a[i] matches b[j] + if j < blo { + continue + } + if j >= bhi { + break + } + k := j2len[j-1] + 1 + newj2len[j] = k + if k > bestsize { + besti, bestj, bestsize = i-k+1, j-k+1, k + } + } + j2len = newj2len + } + + // Extend the best by non-junk elements on each end. In particular, + // "popular" non-junk elements aren't in b2j, which greatly speeds + // the inner loop above, but also means "the best" match so far + // doesn't contain any junk *or* popular non-junk elements. + for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) && + m.a[besti-1] == m.b[bestj-1] { + besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 + } + for besti+bestsize < ahi && bestj+bestsize < bhi && + !m.isBJunk(m.b[bestj+bestsize]) && + m.a[besti+bestsize] == m.b[bestj+bestsize] { + bestsize += 1 + } + + // Now that we have a wholly interesting match (albeit possibly + // empty!), we may as well suck up the matching junk on each + // side of it too. Can't think of a good reason not to, and it + // saves post-processing the (possibly considerable) expense of + // figuring out what to do with it. In the case of an empty + // interesting match, this is clearly the right thing to do, + // because no other kind of match is possible in the regions. + for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) && + m.a[besti-1] == m.b[bestj-1] { + besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 + } + for besti+bestsize < ahi && bestj+bestsize < bhi && + m.isBJunk(m.b[bestj+bestsize]) && + m.a[besti+bestsize] == m.b[bestj+bestsize] { + bestsize += 1 + } + + return Match{A: besti, B: bestj, Size: bestsize} +} + +// Return list of triples describing matching subsequences. +// +// Each triple is of the form (i, j, n), and means that +// a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in +// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are +// adjacent triples in the list, and the second is not the last triple in the +// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe +// adjacent equal blocks. +// +// The last triple is a dummy, (len(a), len(b), 0), and is the only +// triple with n==0. +func (m *SequenceMatcher) GetMatchingBlocks() []Match { + if m.matchingBlocks != nil { + return m.matchingBlocks + } + + var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match + matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match { + match := m.findLongestMatch(alo, ahi, blo, bhi) + i, j, k := match.A, match.B, match.Size + if match.Size > 0 { + if alo < i && blo < j { + matched = matchBlocks(alo, i, blo, j, matched) + } + matched = append(matched, match) + if i+k < ahi && j+k < bhi { + matched = matchBlocks(i+k, ahi, j+k, bhi, matched) + } + } + return matched + } + matched := matchBlocks(0, len(m.a), 0, len(m.b), nil) + + // It's possible that we have adjacent equal blocks in the + // matching_blocks list now. + nonAdjacent := []Match{} + i1, j1, k1 := 0, 0, 0 + for _, b := range matched { + // Is this block adjacent to i1, j1, k1? + i2, j2, k2 := b.A, b.B, b.Size + if i1+k1 == i2 && j1+k1 == j2 { + // Yes, so collapse them -- this just increases the length of + // the first block by the length of the second, and the first + // block so lengthened remains the block to compare against. + k1 += k2 + } else { + // Not adjacent. Remember the first block (k1==0 means it's + // the dummy we started with), and make the second block the + // new block to compare against. + if k1 > 0 { + nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) + } + i1, j1, k1 = i2, j2, k2 + } + } + if k1 > 0 { + nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) + } + + nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0}) + m.matchingBlocks = nonAdjacent + return m.matchingBlocks +} + +// Return list of 5-tuples describing how to turn a into b. +// +// Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple +// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the +// tuple preceding it, and likewise for j1 == the previous j2. +// +// The tags are characters, with these meanings: +// +// 'r' (replace): a[i1:i2] should be replaced by b[j1:j2] +// +// 'd' (delete): a[i1:i2] should be deleted, j1==j2 in this case. +// +// 'i' (insert): b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case. +// +// 'e' (equal): a[i1:i2] == b[j1:j2] +func (m *SequenceMatcher) GetOpCodes() []OpCode { + if m.opCodes != nil { + return m.opCodes + } + i, j := 0, 0 + matching := m.GetMatchingBlocks() + opCodes := make([]OpCode, 0, len(matching)) + for _, m := range matching { + // invariant: we've pumped out correct diffs to change + // a[:i] into b[:j], and the next matching block is + // a[ai:ai+size] == b[bj:bj+size]. So we need to pump + // out a diff to change a[i:ai] into b[j:bj], pump out + // the matching block, and move (i,j) beyond the match + ai, bj, size := m.A, m.B, m.Size + tag := byte(0) + if i < ai && j < bj { + tag = 'r' + } else if i < ai { + tag = 'd' + } else if j < bj { + tag = 'i' + } + if tag > 0 { + opCodes = append(opCodes, OpCode{tag, i, ai, j, bj}) + } + i, j = ai+size, bj+size + // the list of matching blocks is terminated by a + // sentinel with size 0 + if size > 0 { + opCodes = append(opCodes, OpCode{'e', ai, i, bj, j}) + } + } + m.opCodes = opCodes + return m.opCodes +} + +// Isolate change clusters by eliminating ranges with no changes. +// +// Return a generator of groups with up to n lines of context. +// Each group is in the same format as returned by GetOpCodes(). +func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode { + if n < 0 { + n = 3 + } + codes := m.GetOpCodes() + if len(codes) == 0 { + codes = []OpCode{OpCode{'e', 0, 1, 0, 1}} + } + // Fixup leading and trailing groups if they show no changes. + if codes[0].Tag == 'e' { + c := codes[0] + i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 + codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2} + } + if codes[len(codes)-1].Tag == 'e' { + c := codes[len(codes)-1] + i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 + codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)} + } + nn := n + n + groups := [][]OpCode{} + group := []OpCode{} + for _, c := range codes { + i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 + // End the current group and start a new one whenever + // there is a large range with no changes. + if c.Tag == 'e' && i2-i1 > nn { + group = append(group, OpCode{c.Tag, i1, min(i2, i1+n), + j1, min(j2, j1+n)}) + groups = append(groups, group) + group = []OpCode{} + i1, j1 = max(i1, i2-n), max(j1, j2-n) + } + group = append(group, OpCode{c.Tag, i1, i2, j1, j2}) + } + if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') { + groups = append(groups, group) + } + return groups +} + +// Return a measure of the sequences' similarity (float in [0,1]). +// +// Where T is the total number of elements in both sequences, and +// M is the number of matches, this is 2.0*M / T. +// Note that this is 1 if the sequences are identical, and 0 if +// they have nothing in common. +// +// .Ratio() is expensive to compute if you haven't already computed +// .GetMatchingBlocks() or .GetOpCodes(), in which case you may +// want to try .QuickRatio() or .RealQuickRation() first to get an +// upper bound. +func (m *SequenceMatcher) Ratio() float64 { + matches := 0 + for _, m := range m.GetMatchingBlocks() { + matches += m.Size + } + return calculateRatio(matches, len(m.a)+len(m.b)) +} + +// Return an upper bound on ratio() relatively quickly. +// +// This isn't defined beyond that it is an upper bound on .Ratio(), and +// is faster to compute. +func (m *SequenceMatcher) QuickRatio() float64 { + // viewing a and b as multisets, set matches to the cardinality + // of their intersection; this counts the number of matches + // without regard to order, so is clearly an upper bound + if m.fullBCount == nil { + m.fullBCount = map[string]int{} + for _, s := range m.b { + m.fullBCount[s] = m.fullBCount[s] + 1 + } + } + + // avail[x] is the number of times x appears in 'b' less the + // number of times we've seen it in 'a' so far ... kinda + avail := map[string]int{} + matches := 0 + for _, s := range m.a { + n, ok := avail[s] + if !ok { + n = m.fullBCount[s] + } + avail[s] = n - 1 + if n > 0 { + matches += 1 + } + } + return calculateRatio(matches, len(m.a)+len(m.b)) +} + +// Return an upper bound on ratio() very quickly. +// +// This isn't defined beyond that it is an upper bound on .Ratio(), and +// is faster to compute than either .Ratio() or .QuickRatio(). +func (m *SequenceMatcher) RealQuickRatio() float64 { + la, lb := len(m.a), len(m.b) + return calculateRatio(min(la, lb), la+lb) +} + +// Convert range to the "ed" format +func formatRangeUnified(start, stop int) string { + // Per the diff spec at http://www.unix.org/single_unix_specification/ + beginning := start + 1 // lines start numbering with one + length := stop - start + if length == 1 { + return fmt.Sprintf("%d", beginning) + } + if length == 0 { + beginning -= 1 // empty ranges begin at line just before the range + } + return fmt.Sprintf("%d,%d", beginning, length) +} + +// Unified diff parameters +type UnifiedDiff struct { + A []string // First sequence lines + FromFile string // First file name + FromDate string // First file time + B []string // Second sequence lines + ToFile string // Second file name + ToDate string // Second file time + Eol string // Headers end of line, defaults to LF + Context int // Number of context lines +} + +// Compare two sequences of lines; generate the delta as a unified diff. +// +// Unified diffs are a compact way of showing line changes and a few +// lines of context. The number of context lines is set by 'n' which +// defaults to three. +// +// By default, the diff control lines (those with ---, +++, or @@) are +// created with a trailing newline. This is helpful so that inputs +// created from file.readlines() result in diffs that are suitable for +// file.writelines() since both the inputs and outputs have trailing +// newlines. +// +// For inputs that do not have trailing newlines, set the lineterm +// argument to "" so that the output will be uniformly newline free. +// +// The unidiff format normally has a header for filenames and modification +// times. Any or all of these may be specified using strings for +// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. +// The modification times are normally expressed in the ISO 8601 format. +func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { + buf := bufio.NewWriter(writer) + defer buf.Flush() + wf := func(format string, args ...interface{}) error { + _, err := buf.WriteString(fmt.Sprintf(format, args...)) + return err + } + ws := func(s string) error { + _, err := buf.WriteString(s) + return err + } + + if len(diff.Eol) == 0 { + diff.Eol = "\n" + } + + started := false + m := NewMatcher(diff.A, diff.B) + for _, g := range m.GetGroupedOpCodes(diff.Context) { + if !started { + started = true + fromDate := "" + if len(diff.FromDate) > 0 { + fromDate = "\t" + diff.FromDate + } + toDate := "" + if len(diff.ToDate) > 0 { + toDate = "\t" + diff.ToDate + } + if diff.FromFile != "" || diff.ToFile != "" { + err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol) + if err != nil { + return err + } + err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol) + if err != nil { + return err + } + } + } + first, last := g[0], g[len(g)-1] + range1 := formatRangeUnified(first.I1, last.I2) + range2 := formatRangeUnified(first.J1, last.J2) + if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil { + return err + } + for _, c := range g { + i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 + if c.Tag == 'e' { + for _, line := range diff.A[i1:i2] { + if err := ws(" " + line); err != nil { + return err + } + } + continue + } + if c.Tag == 'r' || c.Tag == 'd' { + for _, line := range diff.A[i1:i2] { + if err := ws("-" + line); err != nil { + return err + } + } + } + if c.Tag == 'r' || c.Tag == 'i' { + for _, line := range diff.B[j1:j2] { + if err := ws("+" + line); err != nil { + return err + } + } + } + } + } + return nil +} + +// Like WriteUnifiedDiff but returns the diff a string. +func GetUnifiedDiffString(diff UnifiedDiff) (string, error) { + w := &bytes.Buffer{} + err := WriteUnifiedDiff(w, diff) + return string(w.Bytes()), err +} + +// Convert range to the "ed" format. +func formatRangeContext(start, stop int) string { + // Per the diff spec at http://www.unix.org/single_unix_specification/ + beginning := start + 1 // lines start numbering with one + length := stop - start + if length == 0 { + beginning -= 1 // empty ranges begin at line just before the range + } + if length <= 1 { + return fmt.Sprintf("%d", beginning) + } + return fmt.Sprintf("%d,%d", beginning, beginning+length-1) +} + +type ContextDiff UnifiedDiff + +// Compare two sequences of lines; generate the delta as a context diff. +// +// Context diffs are a compact way of showing line changes and a few +// lines of context. The number of context lines is set by diff.Context +// which defaults to three. +// +// By default, the diff control lines (those with *** or ---) are +// created with a trailing newline. +// +// For inputs that do not have trailing newlines, set the diff.Eol +// argument to "" so that the output will be uniformly newline free. +// +// The context diff format normally has a header for filenames and +// modification times. Any or all of these may be specified using +// strings for diff.FromFile, diff.ToFile, diff.FromDate, diff.ToDate. +// The modification times are normally expressed in the ISO 8601 format. +// If not specified, the strings default to blanks. +func WriteContextDiff(writer io.Writer, diff ContextDiff) error { + buf := bufio.NewWriter(writer) + defer buf.Flush() + var diffErr error + wf := func(format string, args ...interface{}) { + _, err := buf.WriteString(fmt.Sprintf(format, args...)) + if diffErr == nil && err != nil { + diffErr = err + } + } + ws := func(s string) { + _, err := buf.WriteString(s) + if diffErr == nil && err != nil { + diffErr = err + } + } + + if len(diff.Eol) == 0 { + diff.Eol = "\n" + } + + prefix := map[byte]string{ + 'i': "+ ", + 'd': "- ", + 'r': "! ", + 'e': " ", + } + + started := false + m := NewMatcher(diff.A, diff.B) + for _, g := range m.GetGroupedOpCodes(diff.Context) { + if !started { + started = true + fromDate := "" + if len(diff.FromDate) > 0 { + fromDate = "\t" + diff.FromDate + } + toDate := "" + if len(diff.ToDate) > 0 { + toDate = "\t" + diff.ToDate + } + if diff.FromFile != "" || diff.ToFile != "" { + wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol) + wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol) + } + } + + first, last := g[0], g[len(g)-1] + ws("***************" + diff.Eol) + + range1 := formatRangeContext(first.I1, last.I2) + wf("*** %s ****%s", range1, diff.Eol) + for _, c := range g { + if c.Tag == 'r' || c.Tag == 'd' { + for _, cc := range g { + if cc.Tag == 'i' { + continue + } + for _, line := range diff.A[cc.I1:cc.I2] { + ws(prefix[cc.Tag] + line) + } + } + break + } + } + + range2 := formatRangeContext(first.J1, last.J2) + wf("--- %s ----%s", range2, diff.Eol) + for _, c := range g { + if c.Tag == 'r' || c.Tag == 'i' { + for _, cc := range g { + if cc.Tag == 'd' { + continue + } + for _, line := range diff.B[cc.J1:cc.J2] { + ws(prefix[cc.Tag] + line) + } + } + break + } + } + } + return diffErr +} + +// Like WriteContextDiff but returns the diff a string. +func GetContextDiffString(diff ContextDiff) (string, error) { + w := &bytes.Buffer{} + err := WriteContextDiff(w, diff) + return string(w.Bytes()), err +} + +// Split a string on "\n" while preserving them. The output can be used +// as input for UnifiedDiff and ContextDiff structures. +func SplitLines(s string) []string { + lines := strings.SplitAfter(s, "\n") + lines[len(lines)-1] += "\n" + return lines +} diff --git a/go/tools/builders/nogo.go b/go/tools/builders/nogo.go index cdf48b5ff4..b33ff975b1 100644 --- a/go/tools/builders/nogo.go +++ b/go/tools/builders/nogo.go @@ -24,8 +24,7 @@ func nogo(args []string) error { var deps, facts archiveMultiFlag var importPath, packagePath, nogoPath, packageListPath string var testFilter string - var outFactsPath, outLogPath string - var nogoFixPath string + var outFactsPath, outLogPath, outFixPath string var coverMode string fs.Var(&unfilteredSrcs, "src", ".go, .c, .cc, .m, .mm, .s, or .S file to be filtered and checked") fs.Var(&ignoreSrcs, "ignore_src", ".go, .c, .cc, .m, .mm, .s, or .S file to be filtered and checked, but with its diagnostics ignored") @@ -40,8 +39,7 @@ func nogo(args []string) error { fs.StringVar(&nogoPath, "nogo", "", "The nogo binary") fs.StringVar(&outFactsPath, "out_facts", "", "The file to emit serialized nogo facts to") fs.StringVar(&outLogPath, "out_log", "", "The file to emit nogo logs into") - - fs.StringVar(&nogoFixPath, "fixpath", "", "The fix path") + fs.StringVar(&outFixPath, "out_fix", "", "The path of the file that stores the nogo fixes") if err := fs.Parse(args); err != nil { return err @@ -86,10 +84,10 @@ func nogo(args []string) error { return err } - return runNogo(workDir, nogoPath, goSrcs, ignoreSrcs, facts, importPath, importcfgPath, outFactsPath, outLogPath, nogoFixPath) + return runNogo(workDir, nogoPath, goSrcs, ignoreSrcs, facts, importPath, importcfgPath, outFactsPath, outLogPath, outFixPath) } -func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []archive, packagePath, importcfgPath, outFactsPath string, outLogPath string, nogoFixPath string) error { +func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []archive, packagePath, importcfgPath, outFactsPath string, outLogPath string, outFixPath string) error { if len(srcs) == 0 { // emit_compilepkg expects a nogo facts file, even if it's empty. // We also need to write the validation output log. @@ -101,14 +99,15 @@ func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []ar if err != nil { return fmt.Errorf("error writing empty nogo log file: %v", err) } + err = os.WriteFile(outFixPath, nil, 0o666) + if err != nil { + return fmt.Errorf("error writing empty nogo fix file: %v", err) + } return nil } args := []string{nogoPath} args = append(args, "-p", packagePath) - args = append(args, "-fixpath", nogoFixPath) - - - // args = append(args, "-json") + args = append(args, "-fixpath", outFixPath) args = append(args, "-importcfg", importcfgPath) for _, fact := range facts { args = append(args, "-fact", fmt.Sprintf("%s=%s", fact.importPath, fact.file)) diff --git a/go/tools/builders/nogo_change.go b/go/tools/builders/nogo_change.go index 58cbed052c..3c9be41551 100644 --- a/go/tools/builders/nogo_change.go +++ b/go/tools/builders/nogo_change.go @@ -3,83 +3,324 @@ package main import ( "fmt" "go/token" + "os" + "path/filepath" + "sort" "strings" + "unicode" "golang.org/x/tools/go/analysis" ) +// DiagnosticEntry represents a diagnostic entry with the corresponding analyzer. +type DiagnosticEntry struct { + analysis.Diagnostic + *analysis.Analyzer +} + +// This file contains two main entities: Edit and Change, which correspond to the low-level +// and high-level abstractions. See them below. + +// The following is about the `Edit`, a low-level abstraction of edits. +// An Edit describes the replacement of a portion of a text file. +type Edit struct { + New string `json:"new"` // the replacement + Start int `json:"start"` // starting byte offset of the region to replace + End int `json:"end"` // (exclusive) ending byte offset of the region to replace +} + +func (e Edit) String() string { + return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New) +} + +// SortEdits orders a slice of Edits by (start, end) offset. +// This ordering puts insertions (end = start) before deletions +// (end > start) at the same point, but uses a stable sort to preserve +// the order of multiple insertions at the same point. +// (Apply detects multiple deletions at the same point as an error.) +func SortEdits(edits []Edit) { + sort.Stable(editsSort(edits)) +} + +type editsSort []Edit + +func (a editsSort) Len() int { return len(a) } +func (a editsSort) Less(i, j int) bool { + if cmp := a[i].Start - a[j].Start; cmp != 0 { + return cmp < 0 + } + return a[i].End < a[j].End +} +func (a editsSort) Swap(i, j int) { a[i], a[j] = a[j], a[i] } + +// UniqueEdits returns a list of edits that is sorted and +// contains no duplicate edits. Returns the index of some +// overlapping adjacent edits if there is one and <0 if the +// edits are valid. +// Deduplication helps in the cases where two analyzers produce duplicate edits. +func UniqueEdits(edits []Edit) ([]Edit, int) { + if len(edits) == 0 { + return nil, -1 + } + equivalent := func(x, y Edit) bool { + return x.Start == y.Start && x.End == y.End && x.New == y.New + } + SortEdits(edits) + unique := []Edit{edits[0]} + invalid := -1 + for i := 1; i < len(edits); i++ { + prev, cur := edits[i-1], edits[i] + if !equivalent(prev, cur) { + unique = append(unique, cur) + if prev.End > cur.Start { + invalid = i + } + } + } + return unique, invalid +} + +// ApplyEditsBytes applies a sequence of edits to the src byte slice and returns the result. +// Edits are applied in order of start offset; edits with the same start offset are applied in the order they were provided. +// ApplyEditsBytes returns an error if any edit is out of bounds, or if any pair of edits is overlapping. +func ApplyEditsBytes(src []byte, edits []Edit) ([]byte, error) { + // Validate and compute the output size based on the edits. + edits, size, err := validateBytes(src, edits) + if err != nil { + return nil, err + } + + // Apply the edits. + out := make([]byte, 0, size) + lastEnd := 0 + for _, edit := range edits { + if lastEnd < edit.Start { + out = append(out, src[lastEnd:edit.Start]...) + } + out = append(out, edit.New...) + lastEnd = edit.End + } + out = append(out, src[lastEnd:]...) + + if len(out) != size { + panic("wrong size") + } + + return out, nil +} + +// validateBytes checks that edits are consistent with the src byte slice, +// and returns the size of the patched output. It may return a different slice if edits are sorted. +func validateBytes(src []byte, edits []Edit) ([]Edit, int, error) { + if !sort.IsSorted(editsSort(edits)) { + edits = append([]Edit(nil), edits...) + SortEdits(edits) + } + + // Check validity of edits and compute final size. + size := len(src) + lastEnd := 0 + for _, edit := range edits { + if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) { + return nil, 0, fmt.Errorf("diff has out-of-bounds edits") + } + if edit.Start < lastEnd { + return nil, 0, fmt.Errorf("diff has overlapping edits") + } + size += len(edit.New) + edit.Start - edit.End + lastEnd = edit.End + } + + return edits, size, nil +} + +// The following is about the `Change`, a high-level abstraction of edits. // Change represents a set of edits to be applied to a set of files. type Change struct { - AnalysisName string `json:"analysis_name"` - FileToEdits map[string][]Edit `json:"file_to_edits"` + AnalyzerToFileToEdits map[string]map[string][]Edit `json:"analyzer_file_to_edits"` } // NewChange creates a new Change object. func NewChange() *Change { return &Change{ - FileToEdits: make(map[string][]Edit), + AnalyzerToFileToEdits: make(map[string]map[string][]Edit), } } -// SetAnalysisName sets the name of the analysis that produced the change. -func (c *Change) SetAnalysisName(name string) { - c.AnalysisName = name -} - -// AddEdit adds an edit to the change. -func (c *Change) AddEdit(file string, edit Edit) { - c.FileToEdits[file] = append(c.FileToEdits[file], edit) -} - -// BuildFromDiagnostics builds a Change from a set of diagnostics. +// NewChangeFromDiagnostics builds a Change from a set of diagnostics. // Unlike Diagnostic, Change is independent of the FileSet given it uses perf-file offsets instead of token.Pos. // This allows Change to be used in contexts where the FileSet is not available, e.g., it remains applicable after it is saved to disk and loaded back. // See https://github.com/golang/tools/blob/master/go/analysis/diagnostic.go for details. -func (c *Change) BuildFromDiagnostics(diagnostics []analysis.Diagnostic, fileSet *token.FileSet) error { - for _, diag := range diagnostics { - for _, sf := range diag.SuggestedFixes { +func NewChangeFromDiagnostics(entries []DiagnosticEntry, fileSet *token.FileSet) (*Change, error) { + c := NewChange() + + cwd, err := os.Getwd() // workspace root + if err != nil { + return c, fmt.Errorf("Error getting current working directory: (%v)", err) + } + + var allErrors []error + + for _, entry := range entries { + analyzer := entry.Analyzer.Name + for _, sf := range entry.Diagnostic.SuggestedFixes { for _, edit := range sf.TextEdits { - file := fileSet.File(edit.Pos) + start, end := edit.Pos, edit.End + if !end.IsValid() { + // In insertion, end could be token.NoPos + end = start + } + file := fileSet.File(edit.Pos) if file == nil { - return fmt.Errorf("invalid fix: missing file info for pos (%v)", edit.Pos) + allErrors = append(allErrors, fmt.Errorf("invalid fix: missing file info for pos %v", edit.Pos)) + continue } - if edit.Pos > edit.End { - return fmt.Errorf("invalid fix: pos (%v) > end (%v)", edit.Pos, edit.End) + if start > end { + allErrors = append(allErrors, fmt.Errorf("invalid fix: pos %v > end %v", start, end)) + continue } - if eof := token.Pos(file.Base() + file.Size()); edit.End > eof { - return fmt.Errorf("invalid fix: end (%v) past end of file (%v)", edit.End, eof) + if eof := token.Pos(file.Base() + file.Size()); end > eof { + allErrors = append(allErrors, fmt.Errorf("invalid fix: end %v past end of file %v", end, eof)) + continue } - edit := Edit{Start: file.Offset(edit.Pos), End: file.Offset(edit.End), New: string(edit.NewText)} - fileRelativePath := file.Name() - c.AddEdit(fileRelativePath, edit) + + edit := Edit{Start: file.Offset(start), End: file.Offset(end), New: string(edit.NewText)} + fileRelativePath, err := filepath.Rel(cwd, file.Name()) + if err != nil { + fileRelativePath = file.Name() // fallback logic + } + c.AddEdit(analyzer, fileRelativePath, edit) } } } - return nil + + if len(allErrors) > 0 { + return c, fmt.Errorf("errors: %v", allErrors) + } + return c, nil +} + +// AddEdit adds an edit to the change. +func (c *Change) AddEdit(analyzer string, file string, edit Edit) { + // Check if the analyzer exists in the map + if _, ok := c.AnalyzerToFileToEdits[analyzer]; !ok { + // Initialize the map for the analyzer if it doesn't exist + c.AnalyzerToFileToEdits[analyzer] = make(map[string][]Edit) + } + + // Append the edit to the list of edits for the specific file under the analyzer + c.AnalyzerToFileToEdits[analyzer][file] = append(c.AnalyzerToFileToEdits[analyzer][file], edit) } -// MergeChanges merges multiple changes into a single change. -func MergeChanges(changes []Change) Change { - mergedChange := NewChange() // Create a new Change object for the result - analysisNames := []string{} // no deduplication needed +// Flatten takes a Change and returns a map of FileToEdits, merging edits from all analyzers. +func Flatten(change Change) map[string][]Edit { + fileToEdits := make(map[string][]Edit) - for _, change := range changes { - if change.AnalysisName != "" { - analysisNames = append(analysisNames, change.AnalysisName) - } - for file, edits := range change.FileToEdits { - // If the file already exists in the merged change, append the edits - if existingEdits, found := mergedChange.FileToEdits[file]; found { - // checking the overlapping of edits happens in edit.go during the ApplyEdits function. - // so we don't need to check it here. - mergedChange.FileToEdits[file] = append(existingEdits, edits...) + analyzers := make([]string, 0, len(change.AnalyzerToFileToEdits)) + for analyzer := range change.AnalyzerToFileToEdits { + analyzers = append(analyzers, analyzer) + } + sort.Strings(analyzers) + for _, analyzer := range analyzers { + // following the order of analyzers, random iteration order over map makes testing flaky + fileToEditsMap := change.AnalyzerToFileToEdits[analyzer] + for file, edits := range fileToEditsMap { + var localEdits []Edit + if existingEdits, found := fileToEdits[file]; found { + localEdits = append(existingEdits, edits...) } else { - // Otherwise, just set the file and edits - mergedChange.FileToEdits[file] = edits + localEdits = edits } + + // Validate the local edits before updating the map + localEdits, invalidEditIndex := UniqueEdits(localEdits) + if invalidEditIndex >= 0 { + // Detected overlapping edits, skip the edits from this analyzer + // Note: we merge edits from as many analyzers as possible. + // This allows us to fix as many linter errors as possible. Also, after the initial set + // of fixing edits are applied to the source code, the next bazel build will run the analyzers again + // and produce edits that are no longer overlapping. + continue + } + fileToEdits[file] = localEdits } } - mergedChange.AnalysisName = strings.Join(analysisNames, ",") - return *mergedChange + + return fileToEdits +} + +// ToPatches converts the edits to patches. +func ToPatches(fileToEdits map[string][]Edit) (map[string]string, error) { + patches := make(map[string]string) + for relativeFilePath, edits := range fileToEdits { + // Skip processing if edits are nil or empty + if len(edits) == 0 { + continue + } + + edits, _ = UniqueEdits(edits) + contents, err := os.ReadFile(relativeFilePath) + if err != nil { + return nil, err + } + + out, err := ApplyEditsBytes(contents, edits) + if err != nil { + return nil, err + } + + diff := UnifiedDiff{ + // difflib.SplitLines does not handle well the whitespace at the beginning or the end. + // For example, it would add an extra \n at the end + // See https://github.com/pmezard/go-difflib/blob/master/difflib/difflib.go#L768 + // trimWhitespaceHeadAndTail is a postprocessing to produce clean patches. + A: trimWhitespaceHeadAndTail(SplitLines(string(contents))), + B: trimWhitespaceHeadAndTail(SplitLines(string(out))), + // standard convention is to use "a" and "b" for the original and new versions of the file + // discovered by doing `git diff` + FromFile: fmt.Sprintf("a/%s", relativeFilePath), + ToFile: fmt.Sprintf("b/%s", relativeFilePath), + // git needs lines of context to be able to apply the patch + // we use 3 lines of context because that's what `git diff` uses + Context: 3, + } + patch, err := GetUnifiedDiffString(diff) + if err != nil { + return nil, err + } + patches[relativeFilePath] = patch + } + return patches, nil +} + +func trimWhitespaceHeadAndTail(lines []string) []string { + if len(lines) == 0 { + return lines + } + + // Inner function: returns true if the given string contains any non-whitespace characters. + hasNonWhitespaceCharacter := func(s string) bool { + return strings.ContainsFunc(s, func(r rune) bool { + return !unicode.IsSpace(r) + }) + } + + // Trim left + for i := 0; i < len(lines); i++ { + if hasNonWhitespaceCharacter(lines[i]) { + lines = lines[i:] + break + } + } + + // Trim right. + for i := len(lines) - 1; i >= 0; i-- { + if hasNonWhitespaceCharacter(lines[i]) { + return lines[:i+1] + } + } + + // If we didn't return above, all strings contained only whitespace, so return an empty slice. + return []string{} } diff --git a/go/tools/builders/nogo_change_serialization.go b/go/tools/builders/nogo_change_serialization.go index 1b47a341cd..1f274d9ece 100644 --- a/go/tools/builders/nogo_change_serialization.go +++ b/go/tools/builders/nogo_change_serialization.go @@ -3,20 +3,35 @@ package main import ( "encoding/json" "fmt" - "io/ioutil" - // "log" + "os" ) -// SaveToFile saves the Change struct to a JSON file. -func SaveToFile(filename string, change Change) error { - // Serialize Change to JSON - jsonData, err := json.MarshalIndent(change, "", " ") +// SavePatchesToFile saves the map[string]string (file paths to patch content) to a JSON file. +func SavePatchesToFile(filename string, patches map[string]string) error { + if len(patches) == 0 { + // Special case optimization for the empty patches, where we dump an empty string, rather than an empty json like {}. + // This helps skip the json serialization below. + err := os.WriteFile(filename, []byte(""), 0644) + if err != nil { + return fmt.Errorf("error writing empty string to file: %v", err) + } + return nil + } + + // Serialize patches (map[string]string) to JSON + jsonData, err := json.MarshalIndent(patches, "", " ") if err != nil { - return fmt.Errorf("error serializing to JSON: %v", err) + // If serialization fails, create the output file anyway as per your requirements + errWrite := os.WriteFile(filename, []byte(""), 0644) + if errWrite != nil { + return fmt.Errorf("error serializing to JSON: %v and error writing to the file: %v", err, errWrite) + } else { + return fmt.Errorf("error serializing to JSON: %v", err) + } } - // log.Fatalf("!!!!: %v", change) + // Write the JSON data to the file - err = ioutil.WriteFile(filename, jsonData, 0644) + err = os.WriteFile(filename, jsonData, 0644) if err != nil { return fmt.Errorf("error writing to file: %v", err) } @@ -24,21 +39,27 @@ func SaveToFile(filename string, change Change) error { return nil } -// LoadFromFile loads the Change struct from a JSON file. -func LoadFromFile(filename string) (Change, error) { - var change Change +// LoadPatchesFromFile loads the map[string]string (file paths to patch content) from a JSON file. +// Note LoadPatchesFromFile is used for testing only. +func LoadPatchesFromFile(filename string) (map[string]string, error) { + var patches map[string]string // Read the JSON file - jsonData, err := ioutil.ReadFile(filename) + jsonData, err := os.ReadFile(filename) if err != nil { - return change, fmt.Errorf("error reading file: %v", err) + return nil, fmt.Errorf("error reading file: %v", err) + } + + if len(jsonData) == 0 { + // this corresponds to the special case optimization in SavePatchesToFile + return make(map[string]string), nil } - // Deserialize JSON data into the Change struct - err = json.Unmarshal(jsonData, &change) + // Deserialize JSON data into the patches map (map[string]string) + err = json.Unmarshal(jsonData, &patches) if err != nil { - return change, fmt.Errorf("error deserializing JSON: %v", err) + return nil, fmt.Errorf("error deserializing JSON: %v", err) } - return change, nil + return patches, nil } diff --git a/go/tools/builders/nogo_change_serialization_test.go b/go/tools/builders/nogo_change_serialization_test.go new file mode 100644 index 0000000000..d57606fe5d --- /dev/null +++ b/go/tools/builders/nogo_change_serialization_test.go @@ -0,0 +1,133 @@ +package main + +import ( + "os" + "testing" +) + +// TestSaveAndLoadPatches tests both SavePatchesToFile and LoadPatchesFromFile functions. +func TestSaveAndLoadPatches(t *testing.T) { + // Create a temporary file for testing + tempFile, err := os.CreateTemp("", "patches_test_*.json") + if err != nil { + t.Fatalf("Failed to create temporary file: %v", err) + } + defer os.Remove(tempFile.Name()) // Clean up the temp file after the test + + // Define the test data (map[string]string) + patches := map[string]string{ + "file1.go": "patch content for file1", + "file2.go": "patch content for file2", + } + + // Test SavePatchesToFile + err = SavePatchesToFile(tempFile.Name(), patches) + if err != nil { + t.Fatalf("SavePatchesToFile failed: %v", err) + } + + // Test LoadPatchesFromFile + loadedPatches, err := LoadPatchesFromFile(tempFile.Name()) + if err != nil { + t.Fatalf("LoadPatchesFromFile failed: %v", err) + } + + // Check if the loaded patches match the original ones + if len(loadedPatches) != len(patches) { + t.Errorf("Expected %d patches, but got %d", len(patches), len(loadedPatches)) + } + + for key, value := range patches { + if loadedPatches[key] != value { + t.Errorf("Patch mismatch for key %s: expected %s, got %s", key, value, loadedPatches[key]) + } + } + + // Test with an empty map + patches = map[string]string{} + err = SavePatchesToFile(tempFile.Name(), patches) + if err != nil { + t.Fatalf("SavePatchesToFile failed for empty map: %v", err) + } + + loadedPatches, err = LoadPatchesFromFile(tempFile.Name()) + if err != nil { + t.Fatalf("LoadPatchesFromFile failed for empty map: %v", err) + } + + // Check if the loaded patches map is empty + if len(loadedPatches) != 0 { + t.Errorf("Expected empty patches map, but got %d entries", len(loadedPatches)) + } +} + +// TestSavePatchesToFileError tests error handling in SavePatchesToFile. +func TestSavePatchesToFileError(t *testing.T) { + // Invalid file path (simulating write error) + filename := "/invalid/path/patches.json" + patches := map[string]string{ + "file1.go": "patch content", + } + + err := SavePatchesToFile(filename, patches) + if err == nil { + t.Errorf("Expected error when saving to invalid path, but got nil") + } +} + +// TestLoadPatchesFromFileError tests error handling in LoadPatchesFromFile. +func TestLoadPatchesFromFileError(t *testing.T) { + // Invalid file path (simulating read error) + filename := "/invalid/path/patches.json" + + _, err := LoadPatchesFromFile(filename) + if err == nil { + t.Errorf("Expected error when loading from invalid path, but got nil") + } + + // Invalid JSON content + tempFile, err := os.CreateTemp("", "invalid_json_*.json") + if err != nil { + t.Fatalf("Failed to create temporary file: %v", err) + } + defer os.Remove(tempFile.Name()) // Clean up + + // Write invalid JSON content to the file + _, err = tempFile.WriteString("invalid json content") + if err != nil { + t.Fatalf("Failed to write invalid content: %v", err) + } + + // Attempt to load invalid JSON content + _, err = LoadPatchesFromFile(tempFile.Name()) + if err == nil { + t.Errorf("Expected error when loading invalid JSON, but got nil") + } +} + +// TestLoadPatchesFromFileEmptyFile tests the case where the file is empty. +func TestLoadPatchesFromFileEmptyFile(t *testing.T) { + // Create a temporary file for testing (empty file) + tempFile, err := os.CreateTemp("", "empty_file_*.json") + if err != nil { + t.Fatalf("Failed to create temporary file: %v", err) + } + defer os.Remove(tempFile.Name()) // Clean up the temp file after the test + + // Ensure the file is empty + err = os.WriteFile(tempFile.Name(), []byte(""), 0644) + if err != nil { + t.Fatalf("Failed to write empty content to file: %v", err) + } + + // Attempt to load from an empty file + loadedPatches, err := LoadPatchesFromFile(tempFile.Name()) + if err != nil { + t.Fatalf("LoadPatchesFromFile failed for empty file: %v", err) + } + + // Check if the loaded patches map is empty + if len(loadedPatches) != 0 { + t.Errorf("Expected empty patches map from empty file, but got %d entries", len(loadedPatches)) + } +} diff --git a/go/tools/builders/nogo_change_test.go b/go/tools/builders/nogo_change_test.go new file mode 100644 index 0000000000..20cadbbfed --- /dev/null +++ b/go/tools/builders/nogo_change_test.go @@ -0,0 +1,1013 @@ +package main + +import ( + "fmt" + "go/token" + "os" + "path/filepath" + "reflect" + "slices" + "sort" + "testing" + + "golang.org/x/tools/go/analysis" +) + +const ( + FileA = "from" + FileB = "to" + UnifiedPrefix = "--- " + FileA + "\n+++ " + FileB + "\n" +) + +// Mock helper to create a mock file in the token.FileSet +func mockFileSet(fileName string, size int) *token.FileSet { + fset := token.NewFileSet() + f := fset.AddFile(fileName, fset.Base(), size) + for i := 0; i < size; i++ { + f.AddLine(i) + } + return fset +} + +// Mock analyzers for the test +var ( + analyzer1 = &analysis.Analyzer{Name: "analyzer1"} + analyzer2 = &analysis.Analyzer{Name: "analyzer2"} +) + +// ApplyEdits() and validate() here provide the reference implementation for testing +// ApplyEditsBytes() from nogo_change.go +func ApplyEdits(src string, edits []Edit) (string, error) { + edits, size, err := validate(src, edits) + if err != nil { + return "", err + } + + // Apply edits. + out := make([]byte, 0, size) + lastEnd := 0 + for _, edit := range edits { + if lastEnd < edit.Start { + out = append(out, src[lastEnd:edit.Start]...) + } + out = append(out, edit.New...) + lastEnd = edit.End + } + out = append(out, src[lastEnd:]...) + + if len(out) != size { + panic("wrong size") + } + + return string(out), nil +} + +func validate(src string, edits []Edit) ([]Edit, int, error) { + if !sort.IsSorted(editsSort(edits)) { + edits = append([]Edit(nil), edits...) + SortEdits(edits) + } + + // Check validity of edits and compute final size. + size := len(src) + lastEnd := 0 + for _, edit := range edits { + if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) { + return nil, 0, fmt.Errorf("diff has out-of-bounds edits") + } + if edit.Start < lastEnd { + return nil, 0, fmt.Errorf("diff has overlapping edits") + } + size += len(edit.New) + edit.Start - edit.End + lastEnd = edit.End + } + + return edits, size, nil +} + +// TestAddEdit_MultipleAnalyzers tests AddEdit with multiple analyzers and files using reflect.DeepEqual +func TestAddEdit_MultipleAnalyzers(t *testing.T) { + // Step 1: Setup + change := NewChange() + + // Mock data for analyzer 1 + file1 := "file1.go" + edit1a := Edit{Start: 10, End: 20, New: "code1 from analyzer1"} + edit1b := Edit{Start: 30, End: 40, New: "code2 from analyzer1"} + + // Mock data for analyzer 2 + edit2a := Edit{Start: 50, End: 60, New: "code1 from analyzer2"} + edit2b := Edit{Start: 70, End: 80, New: "code2 from analyzer2"} + + // Expected map after all edits are added + expected := map[string]map[string][]Edit{ + analyzer1.Name: { + file1: {edit1a, edit1b}, + }, + analyzer2.Name: { + file1: {edit2a, edit2b}, + }, + } + + // Step 2: Action - Add edits for both analyzers + change.AddEdit(analyzer1.Name, file1, edit1a) + change.AddEdit(analyzer1.Name, file1, edit1b) + change.AddEdit(analyzer2.Name, file1, edit2a) + change.AddEdit(analyzer2.Name, file1, edit2b) + + // Step 3: Verify that the actual map matches the expected map using reflect.DeepEqual + if !reflect.DeepEqual(change.AnalyzerToFileToEdits, expected) { + t.Fatalf("Change.AnalyzerToFileToEdits did not match the expected result.\nGot: %+v\nExpected: %+v", change.AnalyzerToFileToEdits, expected) + } +} + +// Test case for valid, successful cases +func TestNewChangeFromDiagnostics_SuccessCases(t *testing.T) { + cwd, _ := os.Getwd() + file1path := filepath.Join(cwd, "file1.go") + + tests := []struct { + name string + fileSet *token.FileSet + diagnosticEntries []DiagnosticEntry + expectedEdits map[string]map[string][]Edit + }{ + { + name: "ValidEdits", + fileSet: mockFileSet(file1path, 100), + diagnosticEntries: []DiagnosticEntry{ + { + Analyzer: analyzer1, + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(5), End: token.Pos(10), NewText: []byte("new_text")}, + }, + }, + }, + }, + }, + { + Analyzer: analyzer1, + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(60), End: token.Pos(67), NewText: []byte("new_text")}, + }, + }, + }, + }, + }, + }, + expectedEdits: map[string]map[string][]Edit{ + "analyzer1": { + "file1.go": { + {New: "new_text", Start: 4, End: 9}, // offset is 0-based, while Pos is 1-based + {New: "new_text", Start: 59, End: 66}, // offset is 0-based, while Pos is 1-based + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + change, err := NewChangeFromDiagnostics(tt.diagnosticEntries, tt.fileSet) + + if err != nil { + t.Fatalf("expected no error, got: %v", err) + } + + if !reflect.DeepEqual(change.AnalyzerToFileToEdits, tt.expectedEdits) { + t.Fatalf("expected edits: %+v, got: %+v", tt.expectedEdits, change.AnalyzerToFileToEdits) + } + }) + } +} + +// Test case for error cases +func TestNewChangeFromDiagnostics_ErrorCases(t *testing.T) { + cwd, _ := os.Getwd() + file1path := filepath.Join(cwd, "file1.go") + + tests := []struct { + name string + fileSet *token.FileSet + diagnosticEntries []DiagnosticEntry + expectedErr string + }{ + { + name: "InvalidPosEnd", + fileSet: mockFileSet(file1path, 100), + diagnosticEntries: []DiagnosticEntry{ + { + Analyzer: analyzer1, + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(15), End: token.Pos(10), NewText: []byte("new_text")}, + }, + }, + }, + }, + }, + }, + expectedErr: "errors: [invalid fix: pos 15 > end 10]", + }, + { + name: "EndBeyondFile", + fileSet: mockFileSet(file1path, 100), + diagnosticEntries: []DiagnosticEntry{ + { + Analyzer: analyzer1, + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(50), End: token.Pos(102), NewText: []byte("new_text")}, + }, + }, + }, + }, + }, + }, + expectedErr: "errors: [invalid fix: end 102 past end of file 101]", // Pos=101 holds the extra EOF token, note Pos is 1-based + }, + { + name: "MissingFileInfo", + fileSet: token.NewFileSet(), // No files added + diagnosticEntries: []DiagnosticEntry{ + { + Analyzer: analyzer1, + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(5), End: token.Pos(10), NewText: []byte("new_text")}, + }, + }, + }, + }, + }, + }, + expectedErr: "errors: [invalid fix: missing file info for pos 5]", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := NewChangeFromDiagnostics(tt.diagnosticEntries, tt.fileSet) + + if err == nil { + t.Fatalf("expected an error, got none") + } + + if err.Error() != tt.expectedErr { + t.Fatalf("expected error: %v, got: %v", tt.expectedErr, err) + } + }) + } +} + +func TestSortEdits(t *testing.T) { + tests := []struct { + name string + edits []Edit + sorted []Edit + }{ + { + name: "already sorted", + edits: []Edit{ + {New: "a", Start: 0, End: 1}, + {New: "b", Start: 1, End: 2}, + {New: "c", Start: 2, End: 3}, + }, + sorted: []Edit{ + {New: "a", Start: 0, End: 1}, + {New: "b", Start: 1, End: 2}, + {New: "c", Start: 2, End: 3}, + }, + }, + { + name: "unsorted", + edits: []Edit{ + {New: "b", Start: 1, End: 2}, + {New: "a", Start: 0, End: 1}, + {New: "c", Start: 2, End: 3}, + }, + sorted: []Edit{ + {New: "a", Start: 0, End: 1}, + {New: "b", Start: 1, End: 2}, + {New: "c", Start: 2, End: 3}, + }, + }, + { + name: "insert before delete at same position", + edits: []Edit{ + {New: "", Start: 0, End: 1}, // delete + {New: "insert", Start: 0, End: 0}, // insert + }, + sorted: []Edit{ + {New: "insert", Start: 0, End: 0}, // insert comes before delete + {New: "", Start: 0, End: 1}, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + SortEdits(tt.edits) + if !reflect.DeepEqual(tt.edits, tt.sorted) { + t.Fatalf("expected %v, got %v", tt.sorted, tt.edits) + } + }) + } +} + +// Put these test cases as the global variable so that indentation is simpler. +var TestCases = []struct { + Name, In, Out, Unified string + Edits, LineEdits []Edit // expectation (LineEdits=nil => already line-aligned) + NoDiff bool +}{{ + Name: "empty", + In: "", + Out: "", +}, { + Name: "no_diff", + In: "gargantuan\n", + Out: "gargantuan\n", +}, { + Name: "replace_all", + In: "fruit\n", + Out: "cheese\n", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-fruit ++cheese +`[1:], + Edits: []Edit{{Start: 0, End: 5, New: "cheese"}}, + LineEdits: []Edit{{Start: 0, End: 6, New: "cheese\n"}}, +}, { + Name: "insert_rune", + In: "gord\n", + Out: "gourd\n", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-gord ++gourd +`[1:], + Edits: []Edit{{Start: 2, End: 2, New: "u"}}, + LineEdits: []Edit{{Start: 0, End: 5, New: "gourd\n"}}, +}, { + Name: "delete_rune", + In: "groat\n", + Out: "goat\n", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-groat ++goat +`[1:], + Edits: []Edit{{Start: 1, End: 2, New: ""}}, + LineEdits: []Edit{{Start: 0, End: 6, New: "goat\n"}}, +}, { + Name: "replace_rune", + In: "loud\n", + Out: "lord\n", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-loud ++lord +`[1:], + Edits: []Edit{{Start: 2, End: 3, New: "r"}}, + LineEdits: []Edit{{Start: 0, End: 5, New: "lord\n"}}, +}, { + Name: "replace_partials", + In: "blanket\n", + Out: "bunker\n", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-blanket ++bunker +`[1:], + Edits: []Edit{ + {Start: 1, End: 3, New: "u"}, + {Start: 6, End: 7, New: "r"}, + }, + LineEdits: []Edit{{Start: 0, End: 8, New: "bunker\n"}}, +}, { + Name: "insert_line", + In: "1: one\n3: three\n", + Out: "1: one\n2: two\n3: three\n", + Unified: UnifiedPrefix + ` +@@ -1,2 +1,3 @@ + 1: one ++2: two + 3: three +`[1:], + Edits: []Edit{{Start: 7, End: 7, New: "2: two\n"}}, +}, { + Name: "replace_no_newline", + In: "A", + Out: "B", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-A +\ No newline at end of file ++B +\ No newline at end of file +`[1:], + Edits: []Edit{{Start: 0, End: 1, New: "B"}}, +}, { + Name: "delete_empty", + In: "meow", + Out: "", // GNU diff -u special case: +0,0 + Unified: UnifiedPrefix + ` +@@ -1 +0,0 @@ +-meow +\ No newline at end of file +`[1:], + Edits: []Edit{{Start: 0, End: 4, New: ""}}, + LineEdits: []Edit{{Start: 0, End: 4, New: ""}}, +}, { + Name: "append_empty", + In: "", // GNU diff -u special case: -0,0 + Out: "AB\nC", + Unified: UnifiedPrefix + ` +@@ -0,0 +1,2 @@ ++AB ++C +\ No newline at end of file +`[1:], + Edits: []Edit{{Start: 0, End: 0, New: "AB\nC"}}, + LineEdits: []Edit{{Start: 0, End: 0, New: "AB\nC"}}, +}, + { + Name: "add_end", + In: "A", + Out: "AB", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-A +\ No newline at end of file ++AB +\ No newline at end of file +`[1:], + Edits: []Edit{{Start: 1, End: 1, New: "B"}}, + LineEdits: []Edit{{Start: 0, End: 1, New: "AB"}}, + }, { + Name: "add_empty", + In: "", + Out: "AB\nC", + Unified: UnifiedPrefix + ` +@@ -0,0 +1,2 @@ ++AB ++C +\ No newline at end of file +`[1:], + Edits: []Edit{{Start: 0, End: 0, New: "AB\nC"}}, + LineEdits: []Edit{{Start: 0, End: 0, New: "AB\nC"}}, + }, { + Name: "add_newline", + In: "A", + Out: "A\n", + Unified: UnifiedPrefix + ` +@@ -1 +1 @@ +-A +\ No newline at end of file ++A +`[1:], + Edits: []Edit{{Start: 1, End: 1, New: "\n"}}, + LineEdits: []Edit{{Start: 0, End: 1, New: "A\n"}}, + }, { + Name: "delete_front", + In: "A\nB\nC\nA\nB\nB\nA\n", + Out: "C\nB\nA\nB\nA\nC\n", + Unified: UnifiedPrefix + ` +@@ -1,7 +1,6 @@ +-A +-B + C ++B + A + B +-B + A ++C +`[1:], + NoDiff: true, // unified diff is different but valid + Edits: []Edit{ + {Start: 0, End: 4, New: ""}, + {Start: 6, End: 6, New: "B\n"}, + {Start: 10, End: 12, New: ""}, + {Start: 14, End: 14, New: "C\n"}, + }, + LineEdits: []Edit{ + {Start: 0, End: 4, New: ""}, + {Start: 6, End: 6, New: "B\n"}, + {Start: 10, End: 12, New: ""}, + {Start: 14, End: 14, New: "C\n"}, + }, + }, { + Name: "replace_last_line", + In: "A\nB\n", + Out: "A\nC\n\n", + Unified: UnifiedPrefix + ` +@@ -1,2 +1,3 @@ + A +-B ++C ++ +`[1:], + Edits: []Edit{{Start: 2, End: 3, New: "C\n"}}, + LineEdits: []Edit{{Start: 2, End: 4, New: "C\n\n"}}, + }, + { + Name: "multiple_replace", + In: "A\nB\nC\nD\nE\nF\nG\n", + Out: "A\nH\nI\nJ\nE\nF\nK\n", + Unified: UnifiedPrefix + ` +@@ -1,7 +1,7 @@ + A +-B +-C +-D ++H ++I ++J + E + F +-G ++K +`[1:], + Edits: []Edit{ + {Start: 2, End: 8, New: "H\nI\nJ\n"}, + {Start: 12, End: 14, New: "K\n"}, + }, + NoDiff: true, // diff algorithm produces different delete/insert pattern + }, + { + Name: "extra_newline", + In: "\nA\n", + Out: "A\n", + Edits: []Edit{{Start: 0, End: 1, New: ""}}, + Unified: UnifiedPrefix + `@@ -1,2 +1 @@ +- + A +`, + }, { + Name: "unified_lines", + In: "aaa\nccc\n", + Out: "aaa\nbbb\nccc\n", + Edits: []Edit{{Start: 3, End: 3, New: "\nbbb"}}, + LineEdits: []Edit{{Start: 0, End: 4, New: "aaa\nbbb\n"}}, + Unified: UnifiedPrefix + "@@ -1,2 +1,3 @@\n aaa\n+bbb\n ccc\n", + }, { + Name: "60379", + In: `package a + +type S struct { +s fmt.Stringer +} +`, + Out: `package a + +type S struct { + s fmt.Stringer +} +`, + Edits: []Edit{{Start: 27, End: 27, New: "\t"}}, + LineEdits: []Edit{{Start: 27, End: 42, New: "\ts fmt.Stringer\n"}}, + Unified: UnifiedPrefix + "@@ -1,5 +1,5 @@\n package a\n \n type S struct {\n-s fmt.Stringer\n+\ts fmt.Stringer\n }\n", + }, +} + +func TestApply(t *testing.T) { + t.Parallel() + + for _, tt := range TestCases { + t.Run(tt.Name, func(t *testing.T) { + reversedEdits := slices.Clone(tt.Edits) + slices.Reverse(reversedEdits) + got, err := ApplyEdits(tt.In, reversedEdits) + if err != nil { + t.Fatalf("ApplyEdits failed: %v", err) + } + gotBytes, err := ApplyEditsBytes([]byte(tt.In), tt.Edits) + if got != string(gotBytes) { + t.Fatalf("ApplyEditsBytes: got %q, want %q", gotBytes, got) + } + if got != tt.Out { + t.Errorf("ApplyEdits: got %q, want %q", got, tt.Out) + } + if tt.LineEdits != nil { + got, err := ApplyEdits(tt.In, tt.LineEdits) + if err != nil { + t.Fatalf("ApplyEdits failed: %v", err) + } + gotBytes, err := ApplyEditsBytes([]byte(tt.In), tt.LineEdits) + if got != string(gotBytes) { + t.Fatalf("ApplyEditsBytes: got %q, want %q", gotBytes, got) + } + if got != tt.Out { + t.Errorf("ApplyEdits: got %q, want %q", got, tt.Out) + } + } + }) + } +} + +func TestUniqueEdits(t *testing.T) { + t.Parallel() + tests := []struct { + name string + edits []Edit + want []Edit + wantIdx int + }{ + { + name: "empty slice", + edits: []Edit{}, + want: nil, + wantIdx: -1, + }, + { + name: "non-overlapping edits", + edits: []Edit{ + {New: "a", Start: 0, End: 1}, + {New: "b", Start: 2, End: 3}, + }, + want: []Edit{ + {New: "a", Start: 0, End: 1}, + {New: "b", Start: 2, End: 3}, + }, + wantIdx: -1, + }, + { + name: "overlapping edits", + edits: []Edit{ + {New: "a", Start: 0, End: 2}, + {New: "b", Start: 1, End: 3}, + }, + want: []Edit{ + {New: "a", Start: 0, End: 2}, + {New: "b", Start: 1, End: 3}, + }, + wantIdx: 1, + }, + { + name: "duplicate edits", + edits: []Edit{ + {New: "a", Start: 0, End: 1}, + {New: "a", Start: 0, End: 1}, + }, + want: []Edit{ + {New: "a", Start: 0, End: 1}, + }, + wantIdx: -1, + }, + { + name: "overlapping and duplicate edits", + edits: []Edit{ + {New: "a", Start: 0, End: 2}, + {New: "a", Start: 0, End: 2}, + {New: "b", Start: 1, End: 3}, + }, + want: []Edit{ + {New: "a", Start: 0, End: 2}, + {New: "b", Start: 1, End: 3}, + }, + wantIdx: 2, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got, gotIdx := UniqueEdits(tt.edits) + if !reflect.DeepEqual(got, tt.want) { + t.Fatalf("expected %v, got %v", tt.want, got) + } + if gotIdx != tt.wantIdx { + t.Fatalf("expected index %v, got %v", tt.wantIdx, gotIdx) + } + }) + } +} + +func TestFlatten(t *testing.T) { + tests := []struct { + name string + change Change + want map[string][]Edit + expectError bool + }{ + { + name: "single analyzer with non-overlapping edits", + change: Change{ + AnalyzerToFileToEdits: map[string]map[string][]Edit{ + "analyzer1": { + "file1.go": []Edit{ + {Start: 0, End: 1, New: "a"}, // Replace the first character + {Start: 2, End: 3, New: "b"}, // Replace the third character + }, + }, + }, + }, + want: map[string][]Edit{ + "file1.go": { + {Start: 0, End: 1, New: "a"}, + {Start: 2, End: 3, New: "b"}, + }, + }, + }, + { + name: "multiple analyzers with non-overlapping edits", + change: Change{ + AnalyzerToFileToEdits: map[string]map[string][]Edit{ + "analyzer1": { + "file1.go": { + {Start: 0, End: 1, New: "a"}, // Replace the first character + }, + }, + "analyzer2": { + "file1.go": { + {Start: 2, End: 3, New: "b"}, // Replace the third character + }, + }, + }, + }, + want: map[string][]Edit{ + "file1.go": { + {Start: 0, End: 1, New: "a"}, + {Start: 2, End: 3, New: "b"}, + }, + }, + }, + { + name: "multiple analyzers with non-overlapping edits on same position boundary", + change: Change{ + AnalyzerToFileToEdits: map[string]map[string][]Edit{ + "analyzer1": { + "file1.go": { + {Start: 0, End: 1, New: "a"}, // Replace the first character + }, + }, + "analyzer2": { + "file1.go": { + {Start: 1, End: 2, New: "c"}, // Starts where the first edit ends (no overlap) + }, + }, + }, + }, + want: map[string][]Edit{ + "file1.go": { + {Start: 0, End: 1, New: "a"}, // Replace the first character + {Start: 1, End: 2, New: "c"}, // Replace the second character + }, + }, + }, + { + name: "multiple analyzers with overlapping edits", + change: Change{ + AnalyzerToFileToEdits: map[string]map[string][]Edit{ + "analyzer1": { + "file1.go": { + {Start: 0, End: 2, New: "a"}, // Replace the first two characters + }, + }, + "analyzer2": { + "file1.go": { + {Start: 1, End: 3, New: "b"}, // Overlaps with analyzer1 (overlap starts at 1) + }, + }, + }, + }, + want: map[string][]Edit{ + "file1.go": { + {Start: 0, End: 2, New: "a"}, // Only the first valid edit is retained + }, + }, + }, + { + name: "multiple files with overlapping and non-overlapping edits", + change: Change{ + AnalyzerToFileToEdits: map[string]map[string][]Edit{ + "analyzer1": { + "file1.go": { + {Start: 0, End: 1, New: "a"}, // Replace the first character + }, + "file2.go": { + {Start: 2, End: 4, New: "b"}, // Replace the third and fourth characters + }, + }, + "analyzer2": { + "file1.go": { + {Start: 1, End: 2, New: "c"}, // Does not overlap with the first edit + }, + }, + }, + }, + want: map[string][]Edit{ + "file1.go": { + {Start: 0, End: 1, New: "a"}, // Both edits are valid + {Start: 1, End: 2, New: "c"}, // Starts after the first edit + }, + "file2.go": { + {Start: 2, End: 4, New: "b"}, // No overlap, so the edit is applied + }, + }, + }, + { + name: "no edits", + change: Change{ + AnalyzerToFileToEdits: map[string]map[string][]Edit{}, + }, + want: map[string][]Edit{}, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + got := Flatten(tt.change) + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("Flatten() = %v, want %v", got, tt.want) + } + }) + } +} + +func TestToPatches(t *testing.T) { + // Helper function to create a temporary file with specified content + createTempFile := func(filename, content string) error { + return os.WriteFile(filename, []byte(content), 0644) + } + + // Helper function to delete a file + deleteFile := func(filename string) { + os.Remove(filename) + } + + // Setup temporary test files + err := createTempFile("file1.go", "package main\nfunc Hello() {}\n") + if err != nil { + t.Fatalf("Failed to create temporary file1.go: %v", err) + } + defer deleteFile("file1.go") // Cleanup + + err = createTempFile("file2.go", "package main\nvar x = 10\n") + if err != nil { + t.Fatalf("Failed to create temporary file2.go: %v", err) + } + defer deleteFile("file2.go") // Cleanup + + tests := []struct { + name string + fileToEdits map[string][]Edit + expected map[string]string + expectErr bool + }{ + { + name: "simple patch for file1.go", + fileToEdits: map[string][]Edit{ + "file1.go": { + {Start: 27, End: 27, New: "\nHello, world!\n"}, // Insert in the function body + }, + }, + expected: map[string]string{ + "file1.go": `--- a/file1.go ++++ b/file1.go +@@ -1,2 +1,4 @@ + package main +-func Hello() {} ++func Hello() { ++Hello, world! ++} +`, + }, + }, + { + name: "multiple files", + fileToEdits: map[string][]Edit{ + "file1.go": { + {Start: 27, End: 27, New: "\nHello, world!\n"}, // Insert in the function body + }, + "file2.go": { + {Start: 24, End: 24, New: "var y = 20\n"}, // Insert after var x = 10 + }, + }, + expected: map[string]string{ + "file1.go": `--- a/file1.go ++++ b/file1.go +@@ -1,2 +1,4 @@ + package main +-func Hello() {} ++func Hello() { ++Hello, world! ++} +`, + "file2.go": `--- a/file2.go ++++ b/file2.go +@@ -1,2 +1,3 @@ + package main + var x = 10 ++var y = 20 +`, + }, + }, + { + name: "file not found", + fileToEdits: map[string][]Edit{ + "nonexistent.go": { + {Start: 0, End: 0, New: "new content"}, + }, + }, + expectErr: true, + }, { + name: "no edits for file1.go (len(edits) == 0), no patch should be generated", + fileToEdits: map[string][]Edit{ + "file1.go": {}, // No edits + }, + expected: map[string]string{}, // No patch expected + expectErr: false, + }, { + name: "no edits for file1.go (len(edits) == 0 with nil), no patch should be generated", + fileToEdits: map[string][]Edit{ + "file1.go": nil, // No edits + }, + expected: map[string]string{}, // No patch expected + expectErr: false, + }, + { + name: "no edits for multiple files (len(edits) == 0), no patches should be generated", + fileToEdits: map[string][]Edit{ + "file1.go": {}, // No edits + "file2.go": {}, // No edits + }, + expected: map[string]string{}, // No patches expected + expectErr: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + patches, err := ToPatches(tt.fileToEdits) + if (err != nil) != tt.expectErr { + t.Fatalf("expected error: %v, got: %v", tt.expectErr, err) + } + if err == nil && !reflect.DeepEqual(patches, tt.expected) { + t.Errorf("expected patches: %v, got: %v", tt.expected, patches) + } + }) + } +} + +func TestTrimWhitespaceHeadAndTail(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + input []string + want []string + }{ + { + name: "Empty slice", + input: []string{}, + want: []string{}, + }, + { + name: "All empty strings", + input: []string{"", " ", "\t", "\n"}, + want: []string{}, + }, + { + name: "Leading and trailing empty strings", + input: []string{"", " ", "hello", "world", " ", ""}, + want: []string{"hello", "world"}, + }, + { + name: "No leading or trailing empty strings", + input: []string{"hello", "world"}, + want: []string{"hello", "world"}, + }, + { + name: "Single non-empty string", + input: []string{"hello"}, + want: []string{"hello"}, + }, + } + + for _, tt := range tests { + tt := tt + + t.Run(tt.name, func(t *testing.T) { + got := trimWhitespaceHeadAndTail(tt.input) + + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("trimWhitespaceHeadAndTail() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/go/tools/builders/nogo_edit.go b/go/tools/builders/nogo_edit.go deleted file mode 100644 index 6e6d7e580b..0000000000 --- a/go/tools/builders/nogo_edit.go +++ /dev/null @@ -1,159 +0,0 @@ -/** -Copyright (c) 2009 The Go Authors. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -Source: https://sourcegraph.com/github.com/golang/tools/-/blob/internal/diff/diff.go -*/ - -package main - -import ( - "fmt" - "sort" -) - -// An Edit describes the replacement of a portion of a text file. -type Edit struct { - New string `json:"new"` // the replacement - Start int `json:"start"` // starting byte offset of the region to replace - End int `json:"end"` // ending byte offset of the region to replace -} - -func (e Edit) String() string { - return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New) -} - -// ApplyEdits applies a sequence of edits to the src buffer and returns the -// result. Edits are applied in order of start offset; edits with the -// same start offset are applied in they order they were provided. -// -// ApplyEdits returns an error if any edit is out of bounds, -// or if any pair of edits is overlapping. -func ApplyEdits(src string, edits []Edit) (string, error) { - edits, size, err := validate(src, edits) - if err != nil { - return "", err - } - - // Apply edits. - out := make([]byte, 0, size) - lastEnd := 0 - for _, edit := range edits { - if lastEnd < edit.Start { - out = append(out, src[lastEnd:edit.Start]...) - } - out = append(out, edit.New...) - lastEnd = edit.End - } - out = append(out, src[lastEnd:]...) - - if len(out) != size { - panic("wrong size") - } - - return string(out), nil -} - -// ApplyEditsBytes is like Apply, but it accepts a byte slice. -// The result is always a new array. -func ApplyEditsBytes(src []byte, edits []Edit) ([]byte, error) { - res, err := ApplyEdits(string(src), edits) - return []byte(res), err -} - -// validate checks that edits are consistent with src, -// and returns the size of the patched output. -// It may return a different slice. -func validate(src string, edits []Edit) ([]Edit, int, error) { - if !sort.IsSorted(editsSort(edits)) { - edits = append([]Edit(nil), edits...) - SortEdits(edits) - } - - // Check validity of edits and compute final size. - size := len(src) - lastEnd := 0 - for _, edit := range edits { - if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) { - return nil, 0, fmt.Errorf("diff has out-of-bounds edits") - } - if edit.Start < lastEnd { - return nil, 0, fmt.Errorf("diff has overlapping edits") - } - size += len(edit.New) + edit.Start - edit.End - lastEnd = edit.End - } - - return edits, size, nil -} - -// UniqueEdits returns a list of edits that is sorted and -// contains no duplicate edits. Returns the index of some -// overlapping adjacent edits if there is one and <0 if the -// edits are valid. -func UniqueEdits(edits []Edit) ([]Edit, int) { - if len(edits) == 0 { - return nil, -1 - } - equivalent := func(x, y Edit) bool { - return x.Start == y.Start && x.End == y.End && x.New == y.New - } - SortEdits(edits) - unique := []Edit{edits[0]} - invalid := -1 - for i := 1; i < len(edits); i++ { - prev, cur := edits[i-1], edits[i] - if !equivalent(prev, cur) { - unique = append(unique, cur) - if prev.End > cur.Start { - invalid = i - } - } - } - return unique, invalid -} - -// SortEdits orders a slice of Edits by (start, end) offset. -// This ordering puts insertions (end = start) before deletions -// (end > start) at the same point, but uses a stable sort to preserve -// the order of multiple insertions at the same point. -// (Apply detects multiple deletions at the same point as an error.) -func SortEdits(edits []Edit) { - sort.Stable(editsSort(edits)) -} - -type editsSort []Edit - -func (a editsSort) Len() int { return len(a) } -func (a editsSort) Less(i, j int) bool { - if cmp := a[i].Start - a[j].Start; cmp != 0 { - return cmp < 0 - } - return a[i].End < a[j].End -} -func (a editsSort) Swap(i, j int) { a[i], a[j] = a[j], a[i] } diff --git a/go/tools/builders/nogo_main.go b/go/tools/builders/nogo_main.go index 4976b7e4c0..c06a0275af 100644 --- a/go/tools/builders/nogo_main.go +++ b/go/tools/builders/nogo_main.go @@ -77,7 +77,7 @@ func run(args []string) (error, int) { importcfg := flags.String("importcfg", "", "The import configuration file") packagePath := flags.String("p", "", "The package path (importmap) of the package being compiled") xPath := flags.String("x", "", "The archive file where serialized facts should be written") - nogoFixPath := flags.String("fixpath", "", "The fix path for nogo") + nogoFixPath := flags.String("fixpath", "", "The path of the file that stores the nogo fixes") var ignores multiFlag flags.Var(&ignores, "ignore", "Names of files to ignore") flags.Parse(args) @@ -99,7 +99,6 @@ func run(args []string) (error, int) { } } if diagnostics != "" { - // debugMode is defined by the template in generate_nogo_main.go. exitCode := nogoViolation if debugMode { @@ -461,12 +460,8 @@ func (g *goPackage) String() string { // and returns a string containing all the diagnostics that should be printed // to the build log. func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) string { - type entry struct { - analysis.Diagnostic - *analysis.Analyzer - } - var diagnostics []entry - var diagnosticsCore []analysis.Diagnostic + var diagnostics []DiagnosticEntry + var errs []error cwd, err := os.Getwd() if cwd == "" || err != nil { @@ -508,7 +503,7 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) if currentConfig.onlyFiles == nil && currentConfig.excludeFiles == nil { for _, diag := range act.diagnostics { - diagnostics = append(diagnostics, entry{Diagnostic: diag, Analyzer: act.a}) + diagnostics = append(diagnostics, DiagnosticEntry{Diagnostic: diag, Analyzer: act.a}) } continue } @@ -546,13 +541,31 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) } } if include { - diagnostics = append(diagnostics, entry{Diagnostic: d, Analyzer: act.a}) + diagnostics = append(diagnostics, DiagnosticEntry{Diagnostic: d, Analyzer: act.a}) } } } if numSkipped > 0 { errs = append(errs, fmt.Errorf("%d analyzers skipped due to type-checking error: %v", numSkipped, pkg.typeCheckError)) } + + if nogoFixPath != "" { + // If the nogo fixes are requested, we need to save the fixes to the file even if they are empty. + // Otherwise, bazel will complain "not all outputs were created or valid" + change, err := NewChangeFromDiagnostics(diagnostics, pkg.fset) + if err != nil { + errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in converting diagnostics to change %v", err)) + } + fileToPatch, err := ToPatches(Flatten(*change)) + if err != nil { + errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in generating the patches %v", err)) + } + err = SavePatchesToFile(nogoFixPath, fileToPatch) + if err != nil { + errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in file saving %v", err)) + } + } + if len(diagnostics) == 0 && len(errs) == 0 { return "" } @@ -560,6 +573,7 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) sort.Slice(diagnostics, func(i, j int) bool { return diagnostics[i].Pos < diagnostics[j].Pos }) + errMsg := &bytes.Buffer{} sep := "" for _, err := range errs { @@ -568,17 +582,10 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) errMsg.WriteString(err.Error()) } for _, d := range diagnostics { - diagnosticsCore = append(diagnosticsCore, d.Diagnostic) - // log.Fatalf("!!!!!: %+v", d.SuggestedFixes) errMsg.WriteString(sep) sep = "\n" fmt.Fprintf(errMsg, "%s: %s (%s)", pkg.fset.Position(d.Pos), d.Message, d.Name) } - - change := NewChange() - change.BuildFromDiagnostics(diagnosticsCore, pkg.fset) - - SaveToFile(nogoFixPath, *change) return errMsg.String() } diff --git a/go/tools/builders/nogo_validation.go b/go/tools/builders/nogo_validation.go index 6738635de6..e5da74ff21 100644 --- a/go/tools/builders/nogo_validation.go +++ b/go/tools/builders/nogo_validation.go @@ -8,9 +8,12 @@ import ( func nogoValidation(args []string) error { validationOutput := args[0] logFile := args[1] + nogoFixFileTmp := args[2] + nogoFixFile := args[3] + // Always create the output file and only fail if the log file is non-empty to // avoid an "action failed to create outputs" error. - logContent, err := os.ReadFile(logFile); + logContent, err := os.ReadFile(logFile) if err != nil { return err } @@ -18,11 +21,40 @@ func nogoValidation(args []string) error { if err != nil { return err } - if len(logContent) > 100000000000000000 { + + nogoFixContent, err := os.ReadFile(nogoFixFileTmp) + if err != nil { + return err + } + err = os.WriteFile(nogoFixFile, nogoFixContent, 0755) + if err != nil { + return err + } + + if len(logContent) > 0 { + nogoFixRelated := "" + // See nogo_change_serialization.go, if the patches are empty, then nogoFixContent is empty by design, rather than an empty json like {}. + if len(nogoFixContent) > 0 { + // Command to view nogo fix + viewNogoFixCmd := fmt.Sprintf("jq -r 'to_entries[] | .value | @text' %s | tee", nogoFixFile) + // Command to apply nogo fix + applyNogoFixCmd := fmt.Sprintf("jq -r 'to_entries[] | .value | @text' %s | patch -p1", nogoFixFile) + + // Format the message in a clean and clear way + nogoFixRelated = fmt.Sprintf(` +-------------------------------------- +To view the nogo fix, run the following command: +$ %s + +To apply the nogo fix, run the following command: +$ %s +-------------------------------------- + `, viewNogoFixCmd, applyNogoFixCmd) + } // Separate nogo output from Bazel's --sandbox_debug message via an // empty line. // Don't return to avoid printing the "nogovalidation:" prefix. - _, _ = fmt.Fprintf(os.Stderr, "\n%s\n", logContent) + _, _ = fmt.Fprintf(os.Stderr, "\n%s%s\n", logContent, nogoFixRelated) os.Exit(1) } return nil diff --git a/go/tools/builders/stdlib.go b/go/tools/builders/stdlib.go index 105ca5c635..5731447090 100644 --- a/go/tools/builders/stdlib.go +++ b/go/tools/builders/stdlib.go @@ -131,7 +131,7 @@ You may need to use the flags --cpu=x64_windows --compiler=mingw-gcc.`) installArgs = append(installArgs, "-race") } if *pgoprofile != "" { - installArgs = append(installArgs, "-pgo", abs(*pgoprofile)) + gcflags = append(gcflags, "-pgoprofile=" + abs(*pgoprofile)) } if *shared { gcflags = append(gcflags, "-shared") From cff597fffeedbdd45199783b593e6beae8a1f798 Mon Sep 17 00:00:00 2001 From: peng3141 Date: Fri, 13 Dec 2024 22:55:08 +0000 Subject: [PATCH 03/12] rebase and merge conflict --- go/private/actions/archive.bzl | 10 +++++----- go/private/actions/compilepkg.bzl | 10 +++++----- go/private/rules/test.bzl | 2 +- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/go/private/actions/archive.bzl b/go/private/actions/archive.bzl index 9dad92db71..76f4489c07 100644 --- a/go/private/actions/archive.bzl +++ b/go/private/actions/archive.bzl @@ -61,9 +61,9 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d nogo = get_nogo(go) if nogo: - out_facts = go.declare_file(go, name = source.library.name, ext = pre_ext + ".facts") - out_nogo_log = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo.log") - out_nogo_validation = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo") + out_facts = go.declare_file(go, name = source.name, ext = pre_ext + ".facts") + out_nogo_log = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo.log") + out_nogo_validation = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo") # out_nogo_fix_tmp holds the fixes produced by the RunNogo action, out_nogo_fix holds the fixes produced by the ValidateNogo action. # They have the same content, but ValidateNogo propagates the fixes and eventually externalizes the fixes via `_validation` in the OutputGroupInfo section. @@ -71,8 +71,8 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d # thereby producing available fixes for all targets. # Otherwise, if we externalize out_nogo_fix_tmp (not going through the ValidateNogo action) by putting it into a field (e.g., `nogo_fix`) in the OutputGroupInfo section of the input targets, # we can see the fix for the input targets, but will miss the fixes for the dependent targets. - out_nogo_fix_tmp = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo.fix.tmp") - out_nogo_fix = go.declare_file(go, name = source.library.name, ext = pre_ext + ".nogo.fix") + out_nogo_fix_tmp = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo.fix.tmp") + out_nogo_fix = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo.fix") else: out_facts = None out_nogo_log = None diff --git a/go/private/actions/compilepkg.bzl b/go/private/actions/compilepkg.bzl index f709291808..45bea13bbc 100644 --- a/go/private/actions/compilepkg.bzl +++ b/go/private/actions/compilepkg.bzl @@ -258,11 +258,11 @@ def _run_nogo( inputs_direct.append(cgo_go_srcs) nogo_args.add_all([cgo_go_srcs], before_each = "-ignore_src") - args.add_all(archives, before_each = "-facts", map_each = _facts) - args.add("-out_facts", out_facts) - args.add("-out_log", out_log) - args.add("-out_fix", out_nogo_fix_tmp) - args.add("-nogo", nogo) + nogo_args.add_all(archives, before_each = "-facts", map_each = _facts) + nogo_args.add("-out_facts", out_facts) + nogo_args.add("-out_log", out_log) + nogo_args.add("-out_fix", out_nogo_fix_tmp) + nogo_args.add("-nogo", nogo) # This action runs nogo and produces the facts files for downstream nogo actions. # It is important that this action doesn't fail if nogo produces findings, which allows users diff --git a/go/private/rules/test.bzl b/go/private/rules/test.bzl index 2f0ba40145..0bcafb3bf7 100644 --- a/go/private/rules/test.bzl +++ b/go/private/rules/test.bzl @@ -84,7 +84,7 @@ def _go_test_impl(ctx): # since that package would be built separately, during which the nogo fixes are produced already. validation_outputs.append(internal_archive.data._nogo_fix_output) - go_srcs = [src for src in internal_source.srcs if src.extension == "go"] + go_srcs = [src for src in internal_go_info.srcs if src.extension == "go"] # Compile the library with the external black box tests external_go_info = new_go_info( From 952dc67f99019f7f2befde57823bc5f56384a0c5 Mon Sep 17 00:00:00 2001 From: peng3141 Date: Wed, 18 Dec 2024 04:16:43 +0000 Subject: [PATCH 04/12] 12/17: switch back to the linux patch solution, all patches combiend into one --- go/private/actions/archive.bzl | 10 - go/private/actions/compilepkg.bzl | 14 +- go/private/rules/binary.bzl | 1 + go/private/rules/library.bzl | 1 + go/private/rules/test.bzl | 11 +- go/tools/builders/BUILD.bazel | 3 - go/tools/builders/nogo_change.go | 218 +++++----- .../builders/nogo_change_serialization.go | 65 --- .../nogo_change_serialization_test.go | 133 ------- go/tools/builders/nogo_change_test.go | 374 ++++-------------- go/tools/builders/nogo_main.go | 10 +- go/tools/builders/nogo_validation.go | 28 +- 12 files changed, 220 insertions(+), 648 deletions(-) delete mode 100644 go/tools/builders/nogo_change_serialization.go delete mode 100644 go/tools/builders/nogo_change_serialization_test.go diff --git a/go/private/actions/archive.bzl b/go/private/actions/archive.bzl index 76f4489c07..7a170bc4f3 100644 --- a/go/private/actions/archive.bzl +++ b/go/private/actions/archive.bzl @@ -65,19 +65,11 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d out_nogo_log = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo.log") out_nogo_validation = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo") - # out_nogo_fix_tmp holds the fixes produced by the RunNogo action, out_nogo_fix holds the fixes produced by the ValidateNogo action. - # They have the same content, but ValidateNogo propagates the fixes and eventually externalizes the fixes via `_validation` in the OutputGroupInfo section. - # --run_validations (default=True) ensures nogo validation is applied to not only the input targets but also their dependent targets, - # thereby producing available fixes for all targets. - # Otherwise, if we externalize out_nogo_fix_tmp (not going through the ValidateNogo action) by putting it into a field (e.g., `nogo_fix`) in the OutputGroupInfo section of the input targets, - # we can see the fix for the input targets, but will miss the fixes for the dependent targets. - out_nogo_fix_tmp = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo.fix.tmp") out_nogo_fix = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo.fix") else: out_facts = None out_nogo_log = None out_nogo_validation = None - out_nogo_fix_tmp = None out_nogo_fix = None direct = source.deps @@ -124,7 +116,6 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d out_facts = out_facts, out_nogo_log = out_nogo_log, out_nogo_validation = out_nogo_validation, - out_nogo_fix_tmp = out_nogo_fix_tmp, out_nogo_fix = out_nogo_fix, nogo = nogo, out_cgo_export_h = out_cgo_export_h, @@ -155,7 +146,6 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d out_facts = out_facts, out_nogo_log = out_nogo_log, out_nogo_validation = out_nogo_validation, - out_nogo_fix_tmp = out_nogo_fix_tmp, out_nogo_fix = out_nogo_fix, nogo = nogo, gc_goopts = source.gc_goopts, diff --git a/go/private/actions/compilepkg.bzl b/go/private/actions/compilepkg.bzl index 45bea13bbc..80d672f29d 100644 --- a/go/private/actions/compilepkg.bzl +++ b/go/private/actions/compilepkg.bzl @@ -70,7 +70,6 @@ def emit_compilepkg( out_facts = None, out_nogo_log = None, out_nogo_validation = None, - out_nogo_fix_tmp = None, out_nogo_fix = None, nogo = None, out_cgo_export_h = None, @@ -91,8 +90,6 @@ def emit_compilepkg( fail("nogo must be specified if and only if out_nogo_log is specified") if have_nogo != (out_nogo_validation != None): fail("nogo must be specified if and only if out_nogo_validation is specified") - if bool(nogo) != bool(out_nogo_fix_tmp): - fail("nogo must be specified if and only if out_nogo_fix_tmp is specified") if bool(nogo) != bool(out_nogo_fix): fail("nogo must be specified if and only if out_nogo_fix is specified") @@ -226,7 +223,6 @@ def emit_compilepkg( out_facts = out_facts, out_log = out_nogo_log, out_validation = out_nogo_validation, - out_nogo_fix_tmp = out_nogo_fix_tmp, out_nogo_fix = out_nogo_fix, nogo = nogo, ) @@ -241,7 +237,6 @@ def _run_nogo( out_facts, out_log, out_validation, - out_nogo_fix_tmp, out_nogo_fix, nogo): """Runs nogo on Go source files, including those generated by cgo.""" @@ -251,7 +246,7 @@ def _run_nogo( [archive.data.facts_file for archive in archives if archive.data.facts_file] + [archive.data.export_file for archive in archives]) inputs_transitive = [sdk.tools, sdk.headers, go.stdlib.libs] - outputs = [out_facts, out_log, out_nogo_fix_tmp] + outputs = [out_facts, out_log, out_nogo_fix] nogo_args = go.tool_args(go) if cgo_go_srcs: @@ -261,7 +256,7 @@ def _run_nogo( nogo_args.add_all(archives, before_each = "-facts", map_each = _facts) nogo_args.add("-out_facts", out_facts) nogo_args.add("-out_log", out_log) - nogo_args.add("-out_fix", out_nogo_fix_tmp) + nogo_args.add("-out_fix", out_nogo_fix) nogo_args.add("-nogo", nogo) # This action runs nogo and produces the facts files for downstream nogo actions. @@ -290,12 +285,11 @@ def _run_nogo( validation_args.add("nogovalidation") validation_args.add(out_validation) validation_args.add(out_log) - validation_args.add(out_nogo_fix_tmp) validation_args.add(out_nogo_fix) go.actions.run( - inputs = [out_log, out_nogo_fix_tmp], - outputs = [out_validation, out_nogo_fix], + inputs = [out_log, out_nogo_fix], + outputs = [out_validation], mnemonic = "ValidateNogo", executable = go.toolchain._builder, arguments = [validation_args], diff --git a/go/private/rules/binary.bzl b/go/private/rules/binary.bzl index 089c54f4b8..e99141a6af 100644 --- a/go/private/rules/binary.bzl +++ b/go/private/rules/binary.bzl @@ -165,6 +165,7 @@ def _go_binary_impl(ctx): OutputGroupInfo( cgo_exports = archive.cgo_exports, compilation_outputs = [archive.data.file], + nogo_fix = [nogo_fix_output] if nogo_fix_output else [], _validation = nogo_validation_outputs, ), ] diff --git a/go/private/rules/library.bzl b/go/private/rules/library.bzl index ad190e5f77..9a36562be3 100644 --- a/go/private/rules/library.bzl +++ b/go/private/rules/library.bzl @@ -72,6 +72,7 @@ def _go_library_impl(ctx): OutputGroupInfo( cgo_exports = archive.cgo_exports, compilation_outputs = [archive.data.file], + nogo_fix = [nogo_fix_output] if nogo_fix_output else [], _validation = nogo_validation_outputs, ), ] diff --git a/go/private/rules/test.bzl b/go/private/rules/test.bzl index 0bcafb3bf7..e3c13d518f 100644 --- a/go/private/rules/test.bzl +++ b/go/private/rules/test.bzl @@ -69,6 +69,7 @@ def _go_test_impl(ctx): ) validation_outputs = [] + nogo_fix_outputs = [] # Compile the library to test with internal white box tests internal_go_info = new_go_info( @@ -80,10 +81,8 @@ def _go_test_impl(ctx): if internal_archive.data._validation_output: validation_outputs.append(internal_archive.data._validation_output) if internal_archive.data._nogo_fix_output: - # We do not include those from external_archive that corresponds to a separate package - # since that package would be built separately, during which the nogo fixes are produced already. validation_outputs.append(internal_archive.data._nogo_fix_output) - + nogo_fix_outputs.append(internal_archive.data._nogo_fix_output) go_srcs = [src for src in internal_go_info.srcs if src.extension == "go"] # Compile the library with the external black box tests @@ -104,6 +103,11 @@ def _go_test_impl(ctx): external_archive = go.archive(go, external_go_info, is_external_pkg = True) if external_archive.data._validation_output: validation_outputs.append(external_archive.data._validation_output) + if external_archive.data._nogo_fix_output: + # internal vs external archive refers to the same package vs separate package. + # we include the nogo fixes for transitive dependency too. + validation_outputs.append(external_archive.data._nogo_fix_output) + nogo_fix_outputs.append(external_archive.data._nogo_fix_output) # now generate the main function repo_relative_rundir = ctx.attr.rundir or ctx.label.package or "." @@ -212,6 +216,7 @@ def _go_test_impl(ctx): ), OutputGroupInfo( compilation_outputs = [internal_archive.data.file], + nogo_fix = nogo_fix_outputs, _validation = validation_outputs, ), coverage_common.instrumented_files_info( diff --git a/go/tools/builders/BUILD.bazel b/go/tools/builders/BUILD.bazel index 3ff22d9e53..6e6bded6b3 100644 --- a/go/tools/builders/BUILD.bazel +++ b/go/tools/builders/BUILD.bazel @@ -37,8 +37,6 @@ go_test( srcs = [ "difflib.go", "nogo_change.go", - "nogo_change_serialization.go", - "nogo_change_serialization_test.go", "nogo_change_test.go", ], deps = [ @@ -124,7 +122,6 @@ go_source( "env.go", "flags.go", "nogo_change.go", - "nogo_change_serialization.go", "nogo_main.go", "nogo_typeparams_go117.go", "nogo_typeparams_go118.go", diff --git a/go/tools/builders/nogo_change.go b/go/tools/builders/nogo_change.go index 3c9be41551..a04389696f 100644 --- a/go/tools/builders/nogo_change.go +++ b/go/tools/builders/nogo_change.go @@ -7,13 +7,12 @@ import ( "path/filepath" "sort" "strings" - "unicode" "golang.org/x/tools/go/analysis" ) -// DiagnosticEntry represents a diagnostic entry with the corresponding analyzer. -type DiagnosticEntry struct { +// diagnosticEntry represents a diagnostic entry with the corresponding analyzer. +type diagnosticEntry struct { analysis.Diagnostic *analysis.Analyzer } @@ -29,6 +28,24 @@ type Edit struct { End int `json:"end"` // (exclusive) ending byte offset of the region to replace } + +// FileEdits represents the mapping of analyzers to their edits for a specific file. +type FileEdits struct { + AnalyzerToEdits map[string][]Edit `json:"analyzer_to_edits"` // Analyzer as the key, edits as the value +} + +// Change represents a collection of file edits. +type Change struct { + FileToEdits map[string]FileEdits `json:"file_to_edits"` // File path as the key, analyzer-to-edits mapping as the value +} + +// NewChange creates a new Change object. +func NewChange() *Change { + return &Change{ + FileToEdits: make(map[string]FileEdits), + } +} + func (e Edit) String() string { return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New) } @@ -134,27 +151,15 @@ func validateBytes(src []byte, edits []Edit) ([]Edit, int, error) { return edits, size, nil } -// The following is about the `Change`, a high-level abstraction of edits. -// Change represents a set of edits to be applied to a set of files. -type Change struct { - AnalyzerToFileToEdits map[string]map[string][]Edit `json:"analyzer_file_to_edits"` -} - -// NewChange creates a new Change object. -func NewChange() *Change { - return &Change{ - AnalyzerToFileToEdits: make(map[string]map[string][]Edit), - } -} // NewChangeFromDiagnostics builds a Change from a set of diagnostics. // Unlike Diagnostic, Change is independent of the FileSet given it uses perf-file offsets instead of token.Pos. // This allows Change to be used in contexts where the FileSet is not available, e.g., it remains applicable after it is saved to disk and loaded back. // See https://github.com/golang/tools/blob/master/go/analysis/diagnostic.go for details. -func NewChangeFromDiagnostics(entries []DiagnosticEntry, fileSet *token.FileSet) (*Change, error) { +func NewChangeFromDiagnostics(entries []diagnosticEntry, fileSet *token.FileSet) (*Change, error) { c := NewChange() - cwd, err := os.Getwd() // workspace root + cwd, err := os.Getwd() if err != nil { return c, fmt.Errorf("Error getting current working directory: (%v)", err) } @@ -167,7 +172,6 @@ func NewChangeFromDiagnostics(entries []DiagnosticEntry, fileSet *token.FileSet) for _, edit := range sf.TextEdits { start, end := edit.Pos, edit.End if !end.IsValid() { - // In insertion, end could be token.NoPos end = start } @@ -190,7 +194,7 @@ func NewChangeFromDiagnostics(entries []DiagnosticEntry, fileSet *token.FileSet) if err != nil { fileRelativePath = file.Name() // fallback logic } - c.AddEdit(analyzer, fileRelativePath, edit) + c.AddEdit(fileRelativePath, analyzer, edit) } } } @@ -201,126 +205,146 @@ func NewChangeFromDiagnostics(entries []DiagnosticEntry, fileSet *token.FileSet) return c, nil } -// AddEdit adds an edit to the change. -func (c *Change) AddEdit(analyzer string, file string, edit Edit) { - // Check if the analyzer exists in the map - if _, ok := c.AnalyzerToFileToEdits[analyzer]; !ok { - // Initialize the map for the analyzer if it doesn't exist - c.AnalyzerToFileToEdits[analyzer] = make(map[string][]Edit) + +// AddEdit adds an edit to the Change, organizing by file and analyzer. +func (c *Change) AddEdit(file string, analyzer string, edit Edit) { + // Ensure the FileEdits structure exists for the file + fileEdits, exists := c.FileToEdits[file] + if !exists { + fileEdits = FileEdits{ + AnalyzerToEdits: make(map[string][]Edit), + } + c.FileToEdits[file] = fileEdits } - // Append the edit to the list of edits for the specific file under the analyzer - c.AnalyzerToFileToEdits[analyzer][file] = append(c.AnalyzerToFileToEdits[analyzer][file], edit) + // Append the edit to the list of edits for the analyzer + fileEdits.AnalyzerToEdits[analyzer] = append(fileEdits.AnalyzerToEdits[analyzer], edit) } -// Flatten takes a Change and returns a map of FileToEdits, merging edits from all analyzers. + + +// Flatten merges all edits for a file from different analyzers into a single map of file-to-edits. +// Edits from each analyzer are processed in a deterministic order, and overlapping edits are skipped. func Flatten(change Change) map[string][]Edit { fileToEdits := make(map[string][]Edit) - analyzers := make([]string, 0, len(change.AnalyzerToFileToEdits)) - for analyzer := range change.AnalyzerToFileToEdits { - analyzers = append(analyzers, analyzer) - } - sort.Strings(analyzers) - for _, analyzer := range analyzers { - // following the order of analyzers, random iteration order over map makes testing flaky - fileToEditsMap := change.AnalyzerToFileToEdits[analyzer] - for file, edits := range fileToEditsMap { - var localEdits []Edit - if existingEdits, found := fileToEdits[file]; found { - localEdits = append(existingEdits, edits...) - } else { - localEdits = edits - } + for file, fileEdits := range change.FileToEdits { + // Get a sorted list of analyzers for deterministic processing order + analyzers := make([]string, 0, len(fileEdits.AnalyzerToEdits)) + for analyzer := range fileEdits.AnalyzerToEdits { + analyzers = append(analyzers, analyzer) + } + sort.Strings(analyzers) + + mergedEdits := make([]Edit, 0) + + for _, analyzer := range analyzers { + edits := fileEdits.AnalyzerToEdits[analyzer] + + // Deduplicate and sort edits for the current analyzer + edits, _ = UniqueEdits(edits) - // Validate the local edits before updating the map - localEdits, invalidEditIndex := UniqueEdits(localEdits) - if invalidEditIndex >= 0 { - // Detected overlapping edits, skip the edits from this analyzer - // Note: we merge edits from as many analyzers as possible. - // This allows us to fix as many linter errors as possible. Also, after the initial set - // of fixing edits are applied to the source code, the next bazel build will run the analyzers again - // and produce edits that are no longer overlapping. + // Merge edits into the current list, checking for overlaps + candidateEdits := append(mergedEdits, edits...) + candidateEdits, invalidIndex := UniqueEdits(candidateEdits) + if invalidIndex >= 0 { + // Skip edits from this analyzer if merging them would cause overlaps. + // Apply the non-overlapping edits first. After that, a rerun of bazel build will + // allows these skipped edits to be applied separately. + // Note the resolution happens to each file independently. continue } - fileToEdits[file] = localEdits + + // Update the merged edits + mergedEdits = candidateEdits } + + // Store the final merged edits for the file + fileToEdits[file] = mergedEdits } return fileToEdits } -// ToPatches converts the edits to patches. -func ToPatches(fileToEdits map[string][]Edit) (map[string]string, error) { - patches := make(map[string]string) - for relativeFilePath, edits := range fileToEdits { - // Skip processing if edits are nil or empty + +// ToCombinedPatch converts all edits to a single consolidated patch. +func ToCombinedPatch(fileToEdits map[string][]Edit) (string, error) { + var combinedPatch strings.Builder + + filePaths := make([]string, 0, len(fileToEdits)) + for filePath := range fileToEdits { + filePaths = append(filePaths, filePath) + } + sort.Strings(filePaths) // Sort file paths alphabetically + + // Iterate over sorted file paths + for _, filePath := range filePaths { + edits := fileToEdits[filePath] if len(edits) == 0 { continue } + // Ensure edits are unique and sorted edits, _ = UniqueEdits(edits) - contents, err := os.ReadFile(relativeFilePath) + contents, err := os.ReadFile(filePath) if err != nil { - return nil, err + return "", fmt.Errorf("failed to read file %s: %v", filePath, err) } out, err := ApplyEditsBytes(contents, edits) if err != nil { - return nil, err + return "", fmt.Errorf("failed to apply edits for file %s: %v", filePath, err) } diff := UnifiedDiff{ - // difflib.SplitLines does not handle well the whitespace at the beginning or the end. - // For example, it would add an extra \n at the end - // See https://github.com/pmezard/go-difflib/blob/master/difflib/difflib.go#L768 - // trimWhitespaceHeadAndTail is a postprocessing to produce clean patches. - A: trimWhitespaceHeadAndTail(SplitLines(string(contents))), - B: trimWhitespaceHeadAndTail(SplitLines(string(out))), - // standard convention is to use "a" and "b" for the original and new versions of the file - // discovered by doing `git diff` - FromFile: fmt.Sprintf("a/%s", relativeFilePath), - ToFile: fmt.Sprintf("b/%s", relativeFilePath), - // git needs lines of context to be able to apply the patch - // we use 3 lines of context because that's what `git diff` uses - Context: 3, + A: trimWhitespaceHeadAndTail(SplitLines(string(contents))), + B: trimWhitespaceHeadAndTail(SplitLines(string(out))), + FromFile: fmt.Sprintf("a/%s", filePath), + ToFile: fmt.Sprintf("b/%s", filePath), + Context: 3, } + patch, err := GetUnifiedDiffString(diff) if err != nil { - return nil, err + return "", fmt.Errorf("failed to generate patch for file %s: %v", filePath, err) } - patches[relativeFilePath] = patch + + // Append the patch for this file to the giant patch + combinedPatch.WriteString(patch) + combinedPatch.WriteString("\n") // Ensure separation between file patches } - return patches, nil + + // Remove trailing newline + result := combinedPatch.String() + if len(result) > 0 && result[len(result)-1] == '\n' { + result = result[:len(result)-1] + } + + return result, nil } + func trimWhitespaceHeadAndTail(lines []string) []string { - if len(lines) == 0 { - return lines + // Trim left + for len(lines) > 0 && strings.TrimSpace(lines[0]) == "" { + lines = lines[1:] } - // Inner function: returns true if the given string contains any non-whitespace characters. - hasNonWhitespaceCharacter := func(s string) bool { - return strings.ContainsFunc(s, func(r rune) bool { - return !unicode.IsSpace(r) - }) + // Trim right + for len(lines) > 0 && strings.TrimSpace(lines[len(lines)-1]) == "" { + lines = lines[:len(lines)-1] } - // Trim left - for i := 0; i < len(lines); i++ { - if hasNonWhitespaceCharacter(lines[i]) { - lines = lines[i:] - break - } - } + return lines +} - // Trim right. - for i := len(lines) - 1; i >= 0; i-- { - if hasNonWhitespaceCharacter(lines[i]) { - return lines[:i+1] - } + + +func SaveToFile(filename string, combinedPatch string) error { + err := os.WriteFile(filename, []byte(combinedPatch), 0644) + if err != nil { + return fmt.Errorf("error writing to file: %v", err) } - // If we didn't return above, all strings contained only whitespace, so return an empty slice. - return []string{} + return nil } diff --git a/go/tools/builders/nogo_change_serialization.go b/go/tools/builders/nogo_change_serialization.go deleted file mode 100644 index 1f274d9ece..0000000000 --- a/go/tools/builders/nogo_change_serialization.go +++ /dev/null @@ -1,65 +0,0 @@ -package main - -import ( - "encoding/json" - "fmt" - "os" -) - -// SavePatchesToFile saves the map[string]string (file paths to patch content) to a JSON file. -func SavePatchesToFile(filename string, patches map[string]string) error { - if len(patches) == 0 { - // Special case optimization for the empty patches, where we dump an empty string, rather than an empty json like {}. - // This helps skip the json serialization below. - err := os.WriteFile(filename, []byte(""), 0644) - if err != nil { - return fmt.Errorf("error writing empty string to file: %v", err) - } - return nil - } - - // Serialize patches (map[string]string) to JSON - jsonData, err := json.MarshalIndent(patches, "", " ") - if err != nil { - // If serialization fails, create the output file anyway as per your requirements - errWrite := os.WriteFile(filename, []byte(""), 0644) - if errWrite != nil { - return fmt.Errorf("error serializing to JSON: %v and error writing to the file: %v", err, errWrite) - } else { - return fmt.Errorf("error serializing to JSON: %v", err) - } - } - - // Write the JSON data to the file - err = os.WriteFile(filename, jsonData, 0644) - if err != nil { - return fmt.Errorf("error writing to file: %v", err) - } - - return nil -} - -// LoadPatchesFromFile loads the map[string]string (file paths to patch content) from a JSON file. -// Note LoadPatchesFromFile is used for testing only. -func LoadPatchesFromFile(filename string) (map[string]string, error) { - var patches map[string]string - - // Read the JSON file - jsonData, err := os.ReadFile(filename) - if err != nil { - return nil, fmt.Errorf("error reading file: %v", err) - } - - if len(jsonData) == 0 { - // this corresponds to the special case optimization in SavePatchesToFile - return make(map[string]string), nil - } - - // Deserialize JSON data into the patches map (map[string]string) - err = json.Unmarshal(jsonData, &patches) - if err != nil { - return nil, fmt.Errorf("error deserializing JSON: %v", err) - } - - return patches, nil -} diff --git a/go/tools/builders/nogo_change_serialization_test.go b/go/tools/builders/nogo_change_serialization_test.go deleted file mode 100644 index d57606fe5d..0000000000 --- a/go/tools/builders/nogo_change_serialization_test.go +++ /dev/null @@ -1,133 +0,0 @@ -package main - -import ( - "os" - "testing" -) - -// TestSaveAndLoadPatches tests both SavePatchesToFile and LoadPatchesFromFile functions. -func TestSaveAndLoadPatches(t *testing.T) { - // Create a temporary file for testing - tempFile, err := os.CreateTemp("", "patches_test_*.json") - if err != nil { - t.Fatalf("Failed to create temporary file: %v", err) - } - defer os.Remove(tempFile.Name()) // Clean up the temp file after the test - - // Define the test data (map[string]string) - patches := map[string]string{ - "file1.go": "patch content for file1", - "file2.go": "patch content for file2", - } - - // Test SavePatchesToFile - err = SavePatchesToFile(tempFile.Name(), patches) - if err != nil { - t.Fatalf("SavePatchesToFile failed: %v", err) - } - - // Test LoadPatchesFromFile - loadedPatches, err := LoadPatchesFromFile(tempFile.Name()) - if err != nil { - t.Fatalf("LoadPatchesFromFile failed: %v", err) - } - - // Check if the loaded patches match the original ones - if len(loadedPatches) != len(patches) { - t.Errorf("Expected %d patches, but got %d", len(patches), len(loadedPatches)) - } - - for key, value := range patches { - if loadedPatches[key] != value { - t.Errorf("Patch mismatch for key %s: expected %s, got %s", key, value, loadedPatches[key]) - } - } - - // Test with an empty map - patches = map[string]string{} - err = SavePatchesToFile(tempFile.Name(), patches) - if err != nil { - t.Fatalf("SavePatchesToFile failed for empty map: %v", err) - } - - loadedPatches, err = LoadPatchesFromFile(tempFile.Name()) - if err != nil { - t.Fatalf("LoadPatchesFromFile failed for empty map: %v", err) - } - - // Check if the loaded patches map is empty - if len(loadedPatches) != 0 { - t.Errorf("Expected empty patches map, but got %d entries", len(loadedPatches)) - } -} - -// TestSavePatchesToFileError tests error handling in SavePatchesToFile. -func TestSavePatchesToFileError(t *testing.T) { - // Invalid file path (simulating write error) - filename := "/invalid/path/patches.json" - patches := map[string]string{ - "file1.go": "patch content", - } - - err := SavePatchesToFile(filename, patches) - if err == nil { - t.Errorf("Expected error when saving to invalid path, but got nil") - } -} - -// TestLoadPatchesFromFileError tests error handling in LoadPatchesFromFile. -func TestLoadPatchesFromFileError(t *testing.T) { - // Invalid file path (simulating read error) - filename := "/invalid/path/patches.json" - - _, err := LoadPatchesFromFile(filename) - if err == nil { - t.Errorf("Expected error when loading from invalid path, but got nil") - } - - // Invalid JSON content - tempFile, err := os.CreateTemp("", "invalid_json_*.json") - if err != nil { - t.Fatalf("Failed to create temporary file: %v", err) - } - defer os.Remove(tempFile.Name()) // Clean up - - // Write invalid JSON content to the file - _, err = tempFile.WriteString("invalid json content") - if err != nil { - t.Fatalf("Failed to write invalid content: %v", err) - } - - // Attempt to load invalid JSON content - _, err = LoadPatchesFromFile(tempFile.Name()) - if err == nil { - t.Errorf("Expected error when loading invalid JSON, but got nil") - } -} - -// TestLoadPatchesFromFileEmptyFile tests the case where the file is empty. -func TestLoadPatchesFromFileEmptyFile(t *testing.T) { - // Create a temporary file for testing (empty file) - tempFile, err := os.CreateTemp("", "empty_file_*.json") - if err != nil { - t.Fatalf("Failed to create temporary file: %v", err) - } - defer os.Remove(tempFile.Name()) // Clean up the temp file after the test - - // Ensure the file is empty - err = os.WriteFile(tempFile.Name(), []byte(""), 0644) - if err != nil { - t.Fatalf("Failed to write empty content to file: %v", err) - } - - // Attempt to load from an empty file - loadedPatches, err := LoadPatchesFromFile(tempFile.Name()) - if err != nil { - t.Fatalf("LoadPatchesFromFile failed for empty file: %v", err) - } - - // Check if the loaded patches map is empty - if len(loadedPatches) != 0 { - t.Errorf("Expected empty patches map from empty file, but got %d entries", len(loadedPatches)) - } -} diff --git a/go/tools/builders/nogo_change_test.go b/go/tools/builders/nogo_change_test.go index 20cadbbfed..cfe202da6b 100644 --- a/go/tools/builders/nogo_change_test.go +++ b/go/tools/builders/nogo_change_test.go @@ -87,37 +87,30 @@ func validate(src string, edits []Edit) ([]Edit, int, error) { // TestAddEdit_MultipleAnalyzers tests AddEdit with multiple analyzers and files using reflect.DeepEqual func TestAddEdit_MultipleAnalyzers(t *testing.T) { - // Step 1: Setup change := NewChange() - - // Mock data for analyzer 1 file1 := "file1.go" + edit1a := Edit{Start: 10, End: 20, New: "code1 from analyzer1"} edit1b := Edit{Start: 30, End: 40, New: "code2 from analyzer1"} - - // Mock data for analyzer 2 edit2a := Edit{Start: 50, End: 60, New: "code1 from analyzer2"} edit2b := Edit{Start: 70, End: 80, New: "code2 from analyzer2"} - // Expected map after all edits are added - expected := map[string]map[string][]Edit{ - analyzer1.Name: { - file1: {edit1a, edit1b}, - }, - analyzer2.Name: { - file1: {edit2a, edit2b}, + expected := map[string]FileEdits{ + file1: { + AnalyzerToEdits: map[string][]Edit{ + analyzer1.Name: {edit1a, edit1b}, + analyzer2.Name: {edit2a, edit2b}, + }, }, } - // Step 2: Action - Add edits for both analyzers - change.AddEdit(analyzer1.Name, file1, edit1a) - change.AddEdit(analyzer1.Name, file1, edit1b) - change.AddEdit(analyzer2.Name, file1, edit2a) - change.AddEdit(analyzer2.Name, file1, edit2b) + change.AddEdit(file1, analyzer1.Name, edit1a) + change.AddEdit(file1, analyzer1.Name, edit1b) + change.AddEdit(file1, analyzer2.Name, edit2a) + change.AddEdit(file1, analyzer2.Name, edit2b) - // Step 3: Verify that the actual map matches the expected map using reflect.DeepEqual - if !reflect.DeepEqual(change.AnalyzerToFileToEdits, expected) { - t.Fatalf("Change.AnalyzerToFileToEdits did not match the expected result.\nGot: %+v\nExpected: %+v", change.AnalyzerToFileToEdits, expected) + if !reflect.DeepEqual(change.FileToEdits, expected) { + t.Fatalf("Change.FileToEdits did not match the expected result.\nGot: %+v\nExpected: %+v", change.FileToEdits, expected) } } @@ -129,13 +122,13 @@ func TestNewChangeFromDiagnostics_SuccessCases(t *testing.T) { tests := []struct { name string fileSet *token.FileSet - diagnosticEntries []DiagnosticEntry - expectedEdits map[string]map[string][]Edit + diagnosticEntries []diagnosticEntry + expectedEdits map[string]FileEdits }{ { name: "ValidEdits", fileSet: mockFileSet(file1path, 100), - diagnosticEntries: []DiagnosticEntry{ + diagnosticEntries: []diagnosticEntry{ { Analyzer: analyzer1, Diagnostic: analysis.Diagnostic{ @@ -148,24 +141,13 @@ func TestNewChangeFromDiagnostics_SuccessCases(t *testing.T) { }, }, }, - { - Analyzer: analyzer1, - Diagnostic: analysis.Diagnostic{ - SuggestedFixes: []analysis.SuggestedFix{ - { - TextEdits: []analysis.TextEdit{ - {Pos: token.Pos(60), End: token.Pos(67), NewText: []byte("new_text")}, - }, - }, - }, - }, - }, }, - expectedEdits: map[string]map[string][]Edit{ - "analyzer1": { - "file1.go": { - {New: "new_text", Start: 4, End: 9}, // offset is 0-based, while Pos is 1-based - {New: "new_text", Start: 59, End: 66}, // offset is 0-based, while Pos is 1-based + expectedEdits: map[string]FileEdits{ + "file1.go": { + AnalyzerToEdits: map[string][]Edit{ + "analyzer1": { + {New: "new_text", Start: 4, End: 9}, // 0-based offset + }, }, }, }, @@ -175,13 +157,11 @@ func TestNewChangeFromDiagnostics_SuccessCases(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { change, err := NewChangeFromDiagnostics(tt.diagnosticEntries, tt.fileSet) - if err != nil { t.Fatalf("expected no error, got: %v", err) } - - if !reflect.DeepEqual(change.AnalyzerToFileToEdits, tt.expectedEdits) { - t.Fatalf("expected edits: %+v, got: %+v", tt.expectedEdits, change.AnalyzerToFileToEdits) + if !reflect.DeepEqual(change.FileToEdits, tt.expectedEdits) { + t.Fatalf("expected edits: %+v, got: %+v", tt.expectedEdits, change.FileToEdits) } }) } @@ -195,13 +175,13 @@ func TestNewChangeFromDiagnostics_ErrorCases(t *testing.T) { tests := []struct { name string fileSet *token.FileSet - diagnosticEntries []DiagnosticEntry + diagnosticEntries []diagnosticEntry expectedErr string }{ { name: "InvalidPosEnd", fileSet: mockFileSet(file1path, 100), - diagnosticEntries: []DiagnosticEntry{ + diagnosticEntries: []diagnosticEntry{ { Analyzer: analyzer1, Diagnostic: analysis.Diagnostic{ @@ -217,44 +197,6 @@ func TestNewChangeFromDiagnostics_ErrorCases(t *testing.T) { }, expectedErr: "errors: [invalid fix: pos 15 > end 10]", }, - { - name: "EndBeyondFile", - fileSet: mockFileSet(file1path, 100), - diagnosticEntries: []DiagnosticEntry{ - { - Analyzer: analyzer1, - Diagnostic: analysis.Diagnostic{ - SuggestedFixes: []analysis.SuggestedFix{ - { - TextEdits: []analysis.TextEdit{ - {Pos: token.Pos(50), End: token.Pos(102), NewText: []byte("new_text")}, - }, - }, - }, - }, - }, - }, - expectedErr: "errors: [invalid fix: end 102 past end of file 101]", // Pos=101 holds the extra EOF token, note Pos is 1-based - }, - { - name: "MissingFileInfo", - fileSet: token.NewFileSet(), // No files added - diagnosticEntries: []DiagnosticEntry{ - { - Analyzer: analyzer1, - Diagnostic: analysis.Diagnostic{ - SuggestedFixes: []analysis.SuggestedFix{ - { - TextEdits: []analysis.TextEdit{ - {Pos: token.Pos(5), End: token.Pos(10), NewText: []byte("new_text")}, - }, - }, - }, - }, - }, - }, - expectedErr: "errors: [invalid fix: missing file info for pos 5]", - }, } for _, tt := range tests { @@ -620,121 +562,50 @@ func TestApply(t *testing.T) { } } +// TestUniqueEdits verifies deduplication and overlap detection. func TestUniqueEdits(t *testing.T) { - t.Parallel() tests := []struct { name string edits []Edit want []Edit wantIdx int }{ - { - name: "empty slice", - edits: []Edit{}, - want: nil, - wantIdx: -1, - }, - { - name: "non-overlapping edits", - edits: []Edit{ - {New: "a", Start: 0, End: 1}, - {New: "b", Start: 2, End: 3}, - }, - want: []Edit{ - {New: "a", Start: 0, End: 1}, - {New: "b", Start: 2, End: 3}, - }, - wantIdx: -1, - }, { name: "overlapping edits", edits: []Edit{ - {New: "a", Start: 0, End: 2}, - {New: "b", Start: 1, End: 3}, - }, - want: []Edit{ - {New: "a", Start: 0, End: 2}, - {New: "b", Start: 1, End: 3}, + {Start: 0, End: 2, New: "a"}, + {Start: 1, End: 3, New: "b"}, }, + want: []Edit{{Start: 0, End: 2, New: "a"}, {Start: 1, End: 3, New: "b"}}, wantIdx: 1, }, - { - name: "duplicate edits", - edits: []Edit{ - {New: "a", Start: 0, End: 1}, - {New: "a", Start: 0, End: 1}, - }, - want: []Edit{ - {New: "a", Start: 0, End: 1}, - }, - wantIdx: -1, - }, - { - name: "overlapping and duplicate edits", - edits: []Edit{ - {New: "a", Start: 0, End: 2}, - {New: "a", Start: 0, End: 2}, - {New: "b", Start: 1, End: 3}, - }, - want: []Edit{ - {New: "a", Start: 0, End: 2}, - {New: "b", Start: 1, End: 3}, - }, - wantIdx: 2, - }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, gotIdx := UniqueEdits(tt.edits) - if !reflect.DeepEqual(got, tt.want) { + if !reflect.DeepEqual(got, tt.want) || gotIdx != tt.wantIdx { t.Fatalf("expected %v, got %v", tt.want, got) } - if gotIdx != tt.wantIdx { - t.Fatalf("expected index %v, got %v", tt.wantIdx, gotIdx) - } }) } } + func TestFlatten(t *testing.T) { tests := []struct { - name string - change Change - want map[string][]Edit - expectError bool + name string + change Change + want map[string][]Edit }{ - { - name: "single analyzer with non-overlapping edits", - change: Change{ - AnalyzerToFileToEdits: map[string]map[string][]Edit{ - "analyzer1": { - "file1.go": []Edit{ - {Start: 0, End: 1, New: "a"}, // Replace the first character - {Start: 2, End: 3, New: "b"}, // Replace the third character - }, - }, - }, - }, - want: map[string][]Edit{ - "file1.go": { - {Start: 0, End: 1, New: "a"}, - {Start: 2, End: 3, New: "b"}, - }, - }, - }, { name: "multiple analyzers with non-overlapping edits", change: Change{ - AnalyzerToFileToEdits: map[string]map[string][]Edit{ - "analyzer1": { - "file1.go": { - {Start: 0, End: 1, New: "a"}, // Replace the first character - }, - }, - "analyzer2": { - "file1.go": { - {Start: 2, End: 3, New: "b"}, // Replace the third character + FileToEdits: map[string]FileEdits{ + "file1.go": { + AnalyzerToEdits: map[string][]Edit{ + "analyzer1": {{Start: 0, End: 1, New: "a"}}, + "analyzer2": {{Start: 2, End: 3, New: "b"}}, }, }, }, @@ -746,87 +617,24 @@ func TestFlatten(t *testing.T) { }, }, }, - { - name: "multiple analyzers with non-overlapping edits on same position boundary", - change: Change{ - AnalyzerToFileToEdits: map[string]map[string][]Edit{ - "analyzer1": { - "file1.go": { - {Start: 0, End: 1, New: "a"}, // Replace the first character - }, - }, - "analyzer2": { - "file1.go": { - {Start: 1, End: 2, New: "c"}, // Starts where the first edit ends (no overlap) - }, - }, - }, - }, - want: map[string][]Edit{ - "file1.go": { - {Start: 0, End: 1, New: "a"}, // Replace the first character - {Start: 1, End: 2, New: "c"}, // Replace the second character - }, - }, - }, { name: "multiple analyzers with overlapping edits", change: Change{ - AnalyzerToFileToEdits: map[string]map[string][]Edit{ - "analyzer1": { - "file1.go": { - {Start: 0, End: 2, New: "a"}, // Replace the first two characters - }, - }, - "analyzer2": { - "file1.go": { - {Start: 1, End: 3, New: "b"}, // Overlaps with analyzer1 (overlap starts at 1) - }, - }, - }, - }, - want: map[string][]Edit{ - "file1.go": { - {Start: 0, End: 2, New: "a"}, // Only the first valid edit is retained - }, - }, - }, - { - name: "multiple files with overlapping and non-overlapping edits", - change: Change{ - AnalyzerToFileToEdits: map[string]map[string][]Edit{ - "analyzer1": { - "file1.go": { - {Start: 0, End: 1, New: "a"}, // Replace the first character - }, - "file2.go": { - {Start: 2, End: 4, New: "b"}, // Replace the third and fourth characters - }, - }, - "analyzer2": { - "file1.go": { - {Start: 1, End: 2, New: "c"}, // Does not overlap with the first edit + FileToEdits: map[string]FileEdits{ + "file1.go": { + AnalyzerToEdits: map[string][]Edit{ + "analyzer1": {{Start: 0, End: 2, New: "a"}}, + "analyzer2": {{Start: 1, End: 3, New: "b"}}, }, }, }, }, want: map[string][]Edit{ "file1.go": { - {Start: 0, End: 1, New: "a"}, // Both edits are valid - {Start: 1, End: 2, New: "c"}, // Starts after the first edit - }, - "file2.go": { - {Start: 2, End: 4, New: "b"}, // No overlap, so the edit is applied + {Start: 0, End: 2, New: "a"}, }, }, }, - { - name: "no edits", - change: Change{ - AnalyzerToFileToEdits: map[string]map[string][]Edit{}, - }, - want: map[string][]Edit{}, - }, } for _, tt := range tests { @@ -839,45 +647,41 @@ func TestFlatten(t *testing.T) { } } -func TestToPatches(t *testing.T) { - // Helper function to create a temporary file with specified content +func TestToCombinedPatch(t *testing.T) { + // Helper functions to create and delete temporary files createTempFile := func(filename, content string) error { return os.WriteFile(filename, []byte(content), 0644) } - - // Helper function to delete a file deleteFile := func(filename string) { os.Remove(filename) } - // Setup temporary test files + // Setup: Create temporary files err := createTempFile("file1.go", "package main\nfunc Hello() {}\n") if err != nil { t.Fatalf("Failed to create temporary file1.go: %v", err) } - defer deleteFile("file1.go") // Cleanup + defer deleteFile("file1.go") err = createTempFile("file2.go", "package main\nvar x = 10\n") if err != nil { t.Fatalf("Failed to create temporary file2.go: %v", err) } - defer deleteFile("file2.go") // Cleanup + defer deleteFile("file2.go") tests := []struct { name string fileToEdits map[string][]Edit - expected map[string]string + expected string expectErr bool }{ { - name: "simple patch for file1.go", + name: "valid patch for multiple files", fileToEdits: map[string][]Edit{ - "file1.go": { - {Start: 27, End: 27, New: "\nHello, world!\n"}, // Insert in the function body - }, + "file1.go": {{Start: 27, End: 27, New: "\nHello, world!\n"}}, // Add to function body + "file2.go": {{Start: 24, End: 24, New: "var y = 20\n"}}, // Add a new variable }, - expected: map[string]string{ - "file1.go": `--- a/file1.go + expected: `--- a/file1.go +++ b/file1.go @@ -1,2 +1,4 @@ package main @@ -885,80 +689,44 @@ func TestToPatches(t *testing.T) { +func Hello() { +Hello, world! +} -`, - }, - }, - { - name: "multiple files", - fileToEdits: map[string][]Edit{ - "file1.go": { - {Start: 27, End: 27, New: "\nHello, world!\n"}, // Insert in the function body - }, - "file2.go": { - {Start: 24, End: 24, New: "var y = 20\n"}, // Insert after var x = 10 - }, - }, - expected: map[string]string{ - "file1.go": `--- a/file1.go -+++ b/file1.go -@@ -1,2 +1,4 @@ - package main --func Hello() {} -+func Hello() { -+Hello, world! -+} -`, - "file2.go": `--- a/file2.go + +--- a/file2.go +++ b/file2.go @@ -1,2 +1,3 @@ package main var x = 10 +var y = 20 `, - }, + expectErr: false, }, { name: "file not found", fileToEdits: map[string][]Edit{ - "nonexistent.go": { - {Start: 0, End: 0, New: "new content"}, - }, + "nonexistent.go": {{Start: 0, End: 0, New: "new content"}}, }, + expected: "", expectErr: true, - }, { - name: "no edits for file1.go (len(edits) == 0), no patch should be generated", - fileToEdits: map[string][]Edit{ - "file1.go": {}, // No edits - }, - expected: map[string]string{}, // No patch expected - expectErr: false, - }, { - name: "no edits for file1.go (len(edits) == 0 with nil), no patch should be generated", - fileToEdits: map[string][]Edit{ - "file1.go": nil, // No edits - }, - expected: map[string]string{}, // No patch expected - expectErr: false, }, { - name: "no edits for multiple files (len(edits) == 0), no patches should be generated", - fileToEdits: map[string][]Edit{ - "file1.go": {}, // No edits - "file2.go": {}, // No edits - }, - expected: map[string]string{}, // No patches expected + name: "no edits", + fileToEdits: map[string][]Edit{}, + expected: "", expectErr: false, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - patches, err := ToPatches(tt.fileToEdits) + combinedPatch, err := ToCombinedPatch(tt.fileToEdits) + + // Verify error expectation if (err != nil) != tt.expectErr { t.Fatalf("expected error: %v, got: %v", tt.expectErr, err) } - if err == nil && !reflect.DeepEqual(patches, tt.expected) { - t.Errorf("expected patches: %v, got: %v", tt.expected, patches) + + // If no error, verify the patch output + if err == nil && combinedPatch != tt.expected { + t.Errorf("expected patch:\n%v\ngot:\n%v", tt.expected, combinedPatch) } }) } diff --git a/go/tools/builders/nogo_main.go b/go/tools/builders/nogo_main.go index c06a0275af..5be26d0fd1 100644 --- a/go/tools/builders/nogo_main.go +++ b/go/tools/builders/nogo_main.go @@ -460,7 +460,7 @@ func (g *goPackage) String() string { // and returns a string containing all the diagnostics that should be printed // to the build log. func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) string { - var diagnostics []DiagnosticEntry + var diagnostics []diagnosticEntry var errs []error cwd, err := os.Getwd() @@ -503,7 +503,7 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) if currentConfig.onlyFiles == nil && currentConfig.excludeFiles == nil { for _, diag := range act.diagnostics { - diagnostics = append(diagnostics, DiagnosticEntry{Diagnostic: diag, Analyzer: act.a}) + diagnostics = append(diagnostics, diagnosticEntry{Diagnostic: diag, Analyzer: act.a}) } continue } @@ -541,7 +541,7 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) } } if include { - diagnostics = append(diagnostics, DiagnosticEntry{Diagnostic: d, Analyzer: act.a}) + diagnostics = append(diagnostics, diagnosticEntry{Diagnostic: d, Analyzer: act.a}) } } } @@ -556,11 +556,11 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) if err != nil { errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in converting diagnostics to change %v", err)) } - fileToPatch, err := ToPatches(Flatten(*change)) + giantPatch, err := ToCombinedPatch(Flatten(*change)) if err != nil { errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in generating the patches %v", err)) } - err = SavePatchesToFile(nogoFixPath, fileToPatch) + err = SaveToFile(nogoFixPath, giantPatch) if err != nil { errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in file saving %v", err)) } diff --git a/go/tools/builders/nogo_validation.go b/go/tools/builders/nogo_validation.go index e5da74ff21..5cc4108206 100644 --- a/go/tools/builders/nogo_validation.go +++ b/go/tools/builders/nogo_validation.go @@ -8,8 +8,7 @@ import ( func nogoValidation(args []string) error { validationOutput := args[0] logFile := args[1] - nogoFixFileTmp := args[2] - nogoFixFile := args[3] + nogoFixFile := args[2] // Always create the output file and only fail if the log file is non-empty to // avoid an "action failed to create outputs" error. @@ -22,11 +21,7 @@ func nogoValidation(args []string) error { return err } - nogoFixContent, err := os.ReadFile(nogoFixFileTmp) - if err != nil { - return err - } - err = os.WriteFile(nogoFixFile, nogoFixContent, 0755) + nogoFixContent, err := os.ReadFile(nogoFixFile) if err != nil { return err } @@ -35,21 +30,16 @@ func nogoValidation(args []string) error { nogoFixRelated := "" // See nogo_change_serialization.go, if the patches are empty, then nogoFixContent is empty by design, rather than an empty json like {}. if len(nogoFixContent) > 0 { - // Command to view nogo fix - viewNogoFixCmd := fmt.Sprintf("jq -r 'to_entries[] | .value | @text' %s | tee", nogoFixFile) - // Command to apply nogo fix - applyNogoFixCmd := fmt.Sprintf("jq -r 'to_entries[] | .value | @text' %s | patch -p1", nogoFixFile) - // Format the message in a clean and clear way nogoFixRelated = fmt.Sprintf(` --------------------------------------- -To view the nogo fix, run the following command: -$ %s +-------------------Suggested Fixes------------------- +The suggested fixes are as follows: +%s -To apply the nogo fix, run the following command: -$ %s --------------------------------------- - `, viewNogoFixCmd, applyNogoFixCmd) +To apply the suggested fixes, run the following command: +$ patch -p1 < %s +----------------------------------------------------- +`, nogoFixContent, nogoFixFile) } // Separate nogo output from Bazel's --sandbox_debug message via an // empty line. From 4f6084b114dd7badcece13bcf2156cc360c27dbb Mon Sep 17 00:00:00 2001 From: peng3141 Date: Thu, 19 Dec 2024 14:45:15 +0000 Subject: [PATCH 05/12] 12/18: import https://github.com/pmezard/go-difflib rather than copying --- MODULE.bazel | 1 + go.mod | 1 + go.sum | 1 + go/private/actions/archive.bzl | 2 +- go/private/rules/nogo.bzl | 3 +- go/tools/builders/BUILD.bazel | 3 +- go/tools/builders/difflib.go | 792 ------------------------------- go/tools/builders/nogo_change.go | 9 +- 8 files changed, 12 insertions(+), 800 deletions(-) delete mode 100644 go/tools/builders/difflib.go diff --git a/MODULE.bazel b/MODULE.bazel index 95964ba46c..361ae83671 100644 --- a/MODULE.bazel +++ b/MODULE.bazel @@ -38,6 +38,7 @@ use_repo( "com_github_gogo_protobuf", "com_github_golang_mock", "com_github_golang_protobuf", + "com_github_pmezard_go_difflib", "org_golang_google_genproto", "org_golang_google_grpc", "org_golang_google_grpc_cmd_protoc_gen_go_grpc", diff --git a/go.mod b/go.mod index 4a29f87973..ef9ec9dc95 100644 --- a/go.mod +++ b/go.mod @@ -6,6 +6,7 @@ require ( github.com/gogo/protobuf v1.3.2 github.com/golang/mock v1.7.0-rc.1 github.com/golang/protobuf v1.5.3 + github.com/pmezard/go-difflib v1.0.0 golang.org/x/net v0.26.0 golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013 diff --git a/go.sum b/go.sum index 46841e9c80..71476d1c5e 100644 --- a/go.sum +++ b/go.sum @@ -49,6 +49,7 @@ github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+ github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw= github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= diff --git a/go/private/actions/archive.bzl b/go/private/actions/archive.bzl index 7a170bc4f3..45bf1eb4f8 100644 --- a/go/private/actions/archive.bzl +++ b/go/private/actions/archive.bzl @@ -65,7 +65,7 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d out_nogo_log = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo.log") out_nogo_validation = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo") - out_nogo_fix = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo.fix") + out_nogo_fix = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo.patch") else: out_facts = None out_nogo_log = None diff --git a/go/private/rules/nogo.bzl b/go/private/rules/nogo.bzl index aa306feb94..4bd8fd0bf3 100644 --- a/go/private/rules/nogo.bzl +++ b/go/private/rules/nogo.bzl @@ -65,7 +65,7 @@ def _nogo_impl(ctx): go, struct( embed = [ctx.attr._nogo_srcs], - deps = analyzer_archives, + deps = analyzer_archives + [ctx.attr._go_difflib[GoArchive]], ), generated_srcs = [nogo_main], name = go.label.name + "~nogo", @@ -102,6 +102,7 @@ _nogo = rule( ), "_cgo_context_data": attr.label(default = "//:cgo_context_data_proxy"), "_go_config": attr.label(default = "//:go_config"), + "_go_difflib": attr.label(default = "@com_github_pmezard_go_difflib//difflib:go_default_library"), "_stdlib": attr.label(default = "//:stdlib"), "_allowlist_function_transition": attr.label( default = "@bazel_tools//tools/allowlists/function_transition_allowlist", diff --git a/go/tools/builders/BUILD.bazel b/go/tools/builders/BUILD.bazel index 6e6bded6b3..0c1b972ee6 100644 --- a/go/tools/builders/BUILD.bazel +++ b/go/tools/builders/BUILD.bazel @@ -35,12 +35,12 @@ go_test( name = "nogo_change_test", size = "small", srcs = [ - "difflib.go", "nogo_change.go", "nogo_change_test.go", ], deps = [ "@org_golang_x_tools//go/analysis", + "@com_github_pmezard_go_difflib//difflib:go_default_library", ], ) @@ -118,7 +118,6 @@ go_source( name = "nogo_srcs", srcs = [ "constants.go", - "difflib.go", "env.go", "flags.go", "nogo_change.go", diff --git a/go/tools/builders/difflib.go b/go/tools/builders/difflib.go deleted file mode 100644 index 8fbda0e5cf..0000000000 --- a/go/tools/builders/difflib.go +++ /dev/null @@ -1,792 +0,0 @@ -/* - * Copyright (c) 2013, Patrick Mezard - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are - * met: - * - * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright - * notice, this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * The names of its contributors may not be used to endorse or promote - * products derived from this software without specific prior written - * permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS - * IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED - * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A - * PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED - * TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR - * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF - * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -// This file is copied from https://github.com/pmezard/go-difflib, under the permission of the above copyright. - -package main - -import ( - "bufio" - "bytes" - "fmt" - "io" - "strings" -) - -func min(a, b int) int { - if a < b { - return a - } - return b -} - -func max(a, b int) int { - if a > b { - return a - } - return b -} - -func calculateRatio(matches, length int) float64 { - if length > 0 { - return 2.0 * float64(matches) / float64(length) - } - return 1.0 -} - -type Match struct { - A int - B int - Size int -} - -type OpCode struct { - Tag byte - I1 int - I2 int - J1 int - J2 int -} - -// SequenceMatcher compares sequence of strings. The basic -// algorithm predates, and is a little fancier than, an algorithm -// published in the late 1980's by Ratcliff and Obershelp under the -// hyperbolic name "gestalt pattern matching". The basic idea is to find -// the longest contiguous matching subsequence that contains no "junk" -// elements (R-O doesn't address junk). The same idea is then applied -// recursively to the pieces of the sequences to the left and to the right -// of the matching subsequence. This does not yield minimal edit -// sequences, but does tend to yield matches that "look right" to people. -// -// SequenceMatcher tries to compute a "human-friendly diff" between two -// sequences. Unlike e.g. UNIX(tm) diff, the fundamental notion is the -// longest *contiguous* & junk-free matching subsequence. That's what -// catches peoples' eyes. The Windows(tm) windiff has another interesting -// notion, pairing up elements that appear uniquely in each sequence. -// That, and the method here, appear to yield more intuitive difference -// reports than does diff. This method appears to be the least vulnerable -// to synching up on blocks of "junk lines", though (like blank lines in -// ordinary text files, or maybe "

" lines in HTML files). That may be -// because this is the only method of the 3 that has a *concept* of -// "junk" . -// -// Timing: Basic R-O is cubic time worst case and quadratic time expected -// case. SequenceMatcher is quadratic time for the worst case and has -// expected-case behavior dependent in a complicated way on how many -// elements the sequences have in common; best case time is linear. -type SequenceMatcher struct { - a []string - b []string - b2j map[string][]int - IsJunk func(string) bool - autoJunk bool - bJunk map[string]struct{} - matchingBlocks []Match - fullBCount map[string]int - bPopular map[string]struct{} - opCodes []OpCode -} - -func NewMatcher(a, b []string) *SequenceMatcher { - m := SequenceMatcher{autoJunk: true} - m.SetSeqs(a, b) - return &m -} - -func NewMatcherWithJunk(a, b []string, autoJunk bool, - isJunk func(string) bool) *SequenceMatcher { - - m := SequenceMatcher{IsJunk: isJunk, autoJunk: autoJunk} - m.SetSeqs(a, b) - return &m -} - -// Set two sequences to be compared. -func (m *SequenceMatcher) SetSeqs(a, b []string) { - m.SetSeq1(a) - m.SetSeq2(b) -} - -// Set the first sequence to be compared. The second sequence to be compared is -// not changed. -// -// SequenceMatcher computes and caches detailed information about the second -// sequence, so if you want to compare one sequence S against many sequences, -// use .SetSeq2(s) once and call .SetSeq1(x) repeatedly for each of the other -// sequences. -// -// See also SetSeqs() and SetSeq2(). -func (m *SequenceMatcher) SetSeq1(a []string) { - if &a == &m.a { - return - } - m.a = a - m.matchingBlocks = nil - m.opCodes = nil -} - -// Set the second sequence to be compared. The first sequence to be compared is -// not changed. -func (m *SequenceMatcher) SetSeq2(b []string) { - if &b == &m.b { - return - } - m.b = b - m.matchingBlocks = nil - m.opCodes = nil - m.fullBCount = nil - m.chainB() -} - -func (m *SequenceMatcher) chainB() { - // Populate line -> index mapping - b2j := map[string][]int{} - for i, s := range m.b { - indices := b2j[s] - indices = append(indices, i) - b2j[s] = indices - } - - // Purge junk elements - m.bJunk = map[string]struct{}{} - if m.IsJunk != nil { - junk := m.bJunk - for s, _ := range b2j { - if m.IsJunk(s) { - junk[s] = struct{}{} - } - } - for s, _ := range junk { - delete(b2j, s) - } - } - - // Purge remaining popular elements - popular := map[string]struct{}{} - n := len(m.b) - if m.autoJunk && n >= 200 { - ntest := n/100 + 1 - for s, indices := range b2j { - if len(indices) > ntest { - popular[s] = struct{}{} - } - } - for s, _ := range popular { - delete(b2j, s) - } - } - m.bPopular = popular - m.b2j = b2j -} - -func (m *SequenceMatcher) isBJunk(s string) bool { - _, ok := m.bJunk[s] - return ok -} - -// Find longest matching block in a[alo:ahi] and b[blo:bhi]. -// -// If IsJunk is not defined: -// -// Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where -// -// alo <= i <= i+k <= ahi -// blo <= j <= j+k <= bhi -// -// and for all (i',j',k') meeting those conditions, -// -// k >= k' -// i <= i' -// and if i == i', j <= j' -// -// In other words, of all maximal matching blocks, return one that -// starts earliest in a, and of all those maximal matching blocks that -// start earliest in a, return the one that starts earliest in b. -// -// If IsJunk is defined, first the longest matching block is -// determined as above, but with the additional restriction that no -// junk element appears in the block. Then that block is extended as -// far as possible by matching (only) junk elements on both sides. So -// the resulting block never matches on junk except as identical junk -// happens to be adjacent to an "interesting" match. -// -// If no blocks match, return (alo, blo, 0). -func (m *SequenceMatcher) findLongestMatch(alo, ahi, blo, bhi int) Match { - // CAUTION: stripping common prefix or suffix would be incorrect. - // E.g., - // ab - // acab - // Longest matching block is "ab", but if common prefix is - // stripped, it's "a" (tied with "b"). UNIX(tm) diff does so - // strip, so ends up claiming that ab is changed to acab by - // inserting "ca" in the middle. That's minimal but unintuitive: - // "it's obvious" that someone inserted "ac" at the front. - // Windiff ends up at the same place as diff, but by pairing up - // the unique 'b's and then matching the first two 'a's. - besti, bestj, bestsize := alo, blo, 0 - - // find longest junk-free match - // during an iteration of the loop, j2len[j] = length of longest - // junk-free match ending with a[i-1] and b[j] - j2len := map[int]int{} - for i := alo; i != ahi; i++ { - // look at all instances of a[i] in b; note that because - // b2j has no junk keys, the loop is skipped if a[i] is junk - newj2len := map[int]int{} - for _, j := range m.b2j[m.a[i]] { - // a[i] matches b[j] - if j < blo { - continue - } - if j >= bhi { - break - } - k := j2len[j-1] + 1 - newj2len[j] = k - if k > bestsize { - besti, bestj, bestsize = i-k+1, j-k+1, k - } - } - j2len = newj2len - } - - // Extend the best by non-junk elements on each end. In particular, - // "popular" non-junk elements aren't in b2j, which greatly speeds - // the inner loop above, but also means "the best" match so far - // doesn't contain any junk *or* popular non-junk elements. - for besti > alo && bestj > blo && !m.isBJunk(m.b[bestj-1]) && - m.a[besti-1] == m.b[bestj-1] { - besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 - } - for besti+bestsize < ahi && bestj+bestsize < bhi && - !m.isBJunk(m.b[bestj+bestsize]) && - m.a[besti+bestsize] == m.b[bestj+bestsize] { - bestsize += 1 - } - - // Now that we have a wholly interesting match (albeit possibly - // empty!), we may as well suck up the matching junk on each - // side of it too. Can't think of a good reason not to, and it - // saves post-processing the (possibly considerable) expense of - // figuring out what to do with it. In the case of an empty - // interesting match, this is clearly the right thing to do, - // because no other kind of match is possible in the regions. - for besti > alo && bestj > blo && m.isBJunk(m.b[bestj-1]) && - m.a[besti-1] == m.b[bestj-1] { - besti, bestj, bestsize = besti-1, bestj-1, bestsize+1 - } - for besti+bestsize < ahi && bestj+bestsize < bhi && - m.isBJunk(m.b[bestj+bestsize]) && - m.a[besti+bestsize] == m.b[bestj+bestsize] { - bestsize += 1 - } - - return Match{A: besti, B: bestj, Size: bestsize} -} - -// Return list of triples describing matching subsequences. -// -// Each triple is of the form (i, j, n), and means that -// a[i:i+n] == b[j:j+n]. The triples are monotonically increasing in -// i and in j. It's also guaranteed that if (i, j, n) and (i', j', n') are -// adjacent triples in the list, and the second is not the last triple in the -// list, then i+n != i' or j+n != j'. IOW, adjacent triples never describe -// adjacent equal blocks. -// -// The last triple is a dummy, (len(a), len(b), 0), and is the only -// triple with n==0. -func (m *SequenceMatcher) GetMatchingBlocks() []Match { - if m.matchingBlocks != nil { - return m.matchingBlocks - } - - var matchBlocks func(alo, ahi, blo, bhi int, matched []Match) []Match - matchBlocks = func(alo, ahi, blo, bhi int, matched []Match) []Match { - match := m.findLongestMatch(alo, ahi, blo, bhi) - i, j, k := match.A, match.B, match.Size - if match.Size > 0 { - if alo < i && blo < j { - matched = matchBlocks(alo, i, blo, j, matched) - } - matched = append(matched, match) - if i+k < ahi && j+k < bhi { - matched = matchBlocks(i+k, ahi, j+k, bhi, matched) - } - } - return matched - } - matched := matchBlocks(0, len(m.a), 0, len(m.b), nil) - - // It's possible that we have adjacent equal blocks in the - // matching_blocks list now. - nonAdjacent := []Match{} - i1, j1, k1 := 0, 0, 0 - for _, b := range matched { - // Is this block adjacent to i1, j1, k1? - i2, j2, k2 := b.A, b.B, b.Size - if i1+k1 == i2 && j1+k1 == j2 { - // Yes, so collapse them -- this just increases the length of - // the first block by the length of the second, and the first - // block so lengthened remains the block to compare against. - k1 += k2 - } else { - // Not adjacent. Remember the first block (k1==0 means it's - // the dummy we started with), and make the second block the - // new block to compare against. - if k1 > 0 { - nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) - } - i1, j1, k1 = i2, j2, k2 - } - } - if k1 > 0 { - nonAdjacent = append(nonAdjacent, Match{i1, j1, k1}) - } - - nonAdjacent = append(nonAdjacent, Match{len(m.a), len(m.b), 0}) - m.matchingBlocks = nonAdjacent - return m.matchingBlocks -} - -// Return list of 5-tuples describing how to turn a into b. -// -// Each tuple is of the form (tag, i1, i2, j1, j2). The first tuple -// has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the -// tuple preceding it, and likewise for j1 == the previous j2. -// -// The tags are characters, with these meanings: -// -// 'r' (replace): a[i1:i2] should be replaced by b[j1:j2] -// -// 'd' (delete): a[i1:i2] should be deleted, j1==j2 in this case. -// -// 'i' (insert): b[j1:j2] should be inserted at a[i1:i1], i1==i2 in this case. -// -// 'e' (equal): a[i1:i2] == b[j1:j2] -func (m *SequenceMatcher) GetOpCodes() []OpCode { - if m.opCodes != nil { - return m.opCodes - } - i, j := 0, 0 - matching := m.GetMatchingBlocks() - opCodes := make([]OpCode, 0, len(matching)) - for _, m := range matching { - // invariant: we've pumped out correct diffs to change - // a[:i] into b[:j], and the next matching block is - // a[ai:ai+size] == b[bj:bj+size]. So we need to pump - // out a diff to change a[i:ai] into b[j:bj], pump out - // the matching block, and move (i,j) beyond the match - ai, bj, size := m.A, m.B, m.Size - tag := byte(0) - if i < ai && j < bj { - tag = 'r' - } else if i < ai { - tag = 'd' - } else if j < bj { - tag = 'i' - } - if tag > 0 { - opCodes = append(opCodes, OpCode{tag, i, ai, j, bj}) - } - i, j = ai+size, bj+size - // the list of matching blocks is terminated by a - // sentinel with size 0 - if size > 0 { - opCodes = append(opCodes, OpCode{'e', ai, i, bj, j}) - } - } - m.opCodes = opCodes - return m.opCodes -} - -// Isolate change clusters by eliminating ranges with no changes. -// -// Return a generator of groups with up to n lines of context. -// Each group is in the same format as returned by GetOpCodes(). -func (m *SequenceMatcher) GetGroupedOpCodes(n int) [][]OpCode { - if n < 0 { - n = 3 - } - codes := m.GetOpCodes() - if len(codes) == 0 { - codes = []OpCode{OpCode{'e', 0, 1, 0, 1}} - } - // Fixup leading and trailing groups if they show no changes. - if codes[0].Tag == 'e' { - c := codes[0] - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - codes[0] = OpCode{c.Tag, max(i1, i2-n), i2, max(j1, j2-n), j2} - } - if codes[len(codes)-1].Tag == 'e' { - c := codes[len(codes)-1] - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - codes[len(codes)-1] = OpCode{c.Tag, i1, min(i2, i1+n), j1, min(j2, j1+n)} - } - nn := n + n - groups := [][]OpCode{} - group := []OpCode{} - for _, c := range codes { - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - // End the current group and start a new one whenever - // there is a large range with no changes. - if c.Tag == 'e' && i2-i1 > nn { - group = append(group, OpCode{c.Tag, i1, min(i2, i1+n), - j1, min(j2, j1+n)}) - groups = append(groups, group) - group = []OpCode{} - i1, j1 = max(i1, i2-n), max(j1, j2-n) - } - group = append(group, OpCode{c.Tag, i1, i2, j1, j2}) - } - if len(group) > 0 && !(len(group) == 1 && group[0].Tag == 'e') { - groups = append(groups, group) - } - return groups -} - -// Return a measure of the sequences' similarity (float in [0,1]). -// -// Where T is the total number of elements in both sequences, and -// M is the number of matches, this is 2.0*M / T. -// Note that this is 1 if the sequences are identical, and 0 if -// they have nothing in common. -// -// .Ratio() is expensive to compute if you haven't already computed -// .GetMatchingBlocks() or .GetOpCodes(), in which case you may -// want to try .QuickRatio() or .RealQuickRation() first to get an -// upper bound. -func (m *SequenceMatcher) Ratio() float64 { - matches := 0 - for _, m := range m.GetMatchingBlocks() { - matches += m.Size - } - return calculateRatio(matches, len(m.a)+len(m.b)) -} - -// Return an upper bound on ratio() relatively quickly. -// -// This isn't defined beyond that it is an upper bound on .Ratio(), and -// is faster to compute. -func (m *SequenceMatcher) QuickRatio() float64 { - // viewing a and b as multisets, set matches to the cardinality - // of their intersection; this counts the number of matches - // without regard to order, so is clearly an upper bound - if m.fullBCount == nil { - m.fullBCount = map[string]int{} - for _, s := range m.b { - m.fullBCount[s] = m.fullBCount[s] + 1 - } - } - - // avail[x] is the number of times x appears in 'b' less the - // number of times we've seen it in 'a' so far ... kinda - avail := map[string]int{} - matches := 0 - for _, s := range m.a { - n, ok := avail[s] - if !ok { - n = m.fullBCount[s] - } - avail[s] = n - 1 - if n > 0 { - matches += 1 - } - } - return calculateRatio(matches, len(m.a)+len(m.b)) -} - -// Return an upper bound on ratio() very quickly. -// -// This isn't defined beyond that it is an upper bound on .Ratio(), and -// is faster to compute than either .Ratio() or .QuickRatio(). -func (m *SequenceMatcher) RealQuickRatio() float64 { - la, lb := len(m.a), len(m.b) - return calculateRatio(min(la, lb), la+lb) -} - -// Convert range to the "ed" format -func formatRangeUnified(start, stop int) string { - // Per the diff spec at http://www.unix.org/single_unix_specification/ - beginning := start + 1 // lines start numbering with one - length := stop - start - if length == 1 { - return fmt.Sprintf("%d", beginning) - } - if length == 0 { - beginning -= 1 // empty ranges begin at line just before the range - } - return fmt.Sprintf("%d,%d", beginning, length) -} - -// Unified diff parameters -type UnifiedDiff struct { - A []string // First sequence lines - FromFile string // First file name - FromDate string // First file time - B []string // Second sequence lines - ToFile string // Second file name - ToDate string // Second file time - Eol string // Headers end of line, defaults to LF - Context int // Number of context lines -} - -// Compare two sequences of lines; generate the delta as a unified diff. -// -// Unified diffs are a compact way of showing line changes and a few -// lines of context. The number of context lines is set by 'n' which -// defaults to three. -// -// By default, the diff control lines (those with ---, +++, or @@) are -// created with a trailing newline. This is helpful so that inputs -// created from file.readlines() result in diffs that are suitable for -// file.writelines() since both the inputs and outputs have trailing -// newlines. -// -// For inputs that do not have trailing newlines, set the lineterm -// argument to "" so that the output will be uniformly newline free. -// -// The unidiff format normally has a header for filenames and modification -// times. Any or all of these may be specified using strings for -// 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'. -// The modification times are normally expressed in the ISO 8601 format. -func WriteUnifiedDiff(writer io.Writer, diff UnifiedDiff) error { - buf := bufio.NewWriter(writer) - defer buf.Flush() - wf := func(format string, args ...interface{}) error { - _, err := buf.WriteString(fmt.Sprintf(format, args...)) - return err - } - ws := func(s string) error { - _, err := buf.WriteString(s) - return err - } - - if len(diff.Eol) == 0 { - diff.Eol = "\n" - } - - started := false - m := NewMatcher(diff.A, diff.B) - for _, g := range m.GetGroupedOpCodes(diff.Context) { - if !started { - started = true - fromDate := "" - if len(diff.FromDate) > 0 { - fromDate = "\t" + diff.FromDate - } - toDate := "" - if len(diff.ToDate) > 0 { - toDate = "\t" + diff.ToDate - } - if diff.FromFile != "" || diff.ToFile != "" { - err := wf("--- %s%s%s", diff.FromFile, fromDate, diff.Eol) - if err != nil { - return err - } - err = wf("+++ %s%s%s", diff.ToFile, toDate, diff.Eol) - if err != nil { - return err - } - } - } - first, last := g[0], g[len(g)-1] - range1 := formatRangeUnified(first.I1, last.I2) - range2 := formatRangeUnified(first.J1, last.J2) - if err := wf("@@ -%s +%s @@%s", range1, range2, diff.Eol); err != nil { - return err - } - for _, c := range g { - i1, i2, j1, j2 := c.I1, c.I2, c.J1, c.J2 - if c.Tag == 'e' { - for _, line := range diff.A[i1:i2] { - if err := ws(" " + line); err != nil { - return err - } - } - continue - } - if c.Tag == 'r' || c.Tag == 'd' { - for _, line := range diff.A[i1:i2] { - if err := ws("-" + line); err != nil { - return err - } - } - } - if c.Tag == 'r' || c.Tag == 'i' { - for _, line := range diff.B[j1:j2] { - if err := ws("+" + line); err != nil { - return err - } - } - } - } - } - return nil -} - -// Like WriteUnifiedDiff but returns the diff a string. -func GetUnifiedDiffString(diff UnifiedDiff) (string, error) { - w := &bytes.Buffer{} - err := WriteUnifiedDiff(w, diff) - return string(w.Bytes()), err -} - -// Convert range to the "ed" format. -func formatRangeContext(start, stop int) string { - // Per the diff spec at http://www.unix.org/single_unix_specification/ - beginning := start + 1 // lines start numbering with one - length := stop - start - if length == 0 { - beginning -= 1 // empty ranges begin at line just before the range - } - if length <= 1 { - return fmt.Sprintf("%d", beginning) - } - return fmt.Sprintf("%d,%d", beginning, beginning+length-1) -} - -type ContextDiff UnifiedDiff - -// Compare two sequences of lines; generate the delta as a context diff. -// -// Context diffs are a compact way of showing line changes and a few -// lines of context. The number of context lines is set by diff.Context -// which defaults to three. -// -// By default, the diff control lines (those with *** or ---) are -// created with a trailing newline. -// -// For inputs that do not have trailing newlines, set the diff.Eol -// argument to "" so that the output will be uniformly newline free. -// -// The context diff format normally has a header for filenames and -// modification times. Any or all of these may be specified using -// strings for diff.FromFile, diff.ToFile, diff.FromDate, diff.ToDate. -// The modification times are normally expressed in the ISO 8601 format. -// If not specified, the strings default to blanks. -func WriteContextDiff(writer io.Writer, diff ContextDiff) error { - buf := bufio.NewWriter(writer) - defer buf.Flush() - var diffErr error - wf := func(format string, args ...interface{}) { - _, err := buf.WriteString(fmt.Sprintf(format, args...)) - if diffErr == nil && err != nil { - diffErr = err - } - } - ws := func(s string) { - _, err := buf.WriteString(s) - if diffErr == nil && err != nil { - diffErr = err - } - } - - if len(diff.Eol) == 0 { - diff.Eol = "\n" - } - - prefix := map[byte]string{ - 'i': "+ ", - 'd': "- ", - 'r': "! ", - 'e': " ", - } - - started := false - m := NewMatcher(diff.A, diff.B) - for _, g := range m.GetGroupedOpCodes(diff.Context) { - if !started { - started = true - fromDate := "" - if len(diff.FromDate) > 0 { - fromDate = "\t" + diff.FromDate - } - toDate := "" - if len(diff.ToDate) > 0 { - toDate = "\t" + diff.ToDate - } - if diff.FromFile != "" || diff.ToFile != "" { - wf("*** %s%s%s", diff.FromFile, fromDate, diff.Eol) - wf("--- %s%s%s", diff.ToFile, toDate, diff.Eol) - } - } - - first, last := g[0], g[len(g)-1] - ws("***************" + diff.Eol) - - range1 := formatRangeContext(first.I1, last.I2) - wf("*** %s ****%s", range1, diff.Eol) - for _, c := range g { - if c.Tag == 'r' || c.Tag == 'd' { - for _, cc := range g { - if cc.Tag == 'i' { - continue - } - for _, line := range diff.A[cc.I1:cc.I2] { - ws(prefix[cc.Tag] + line) - } - } - break - } - } - - range2 := formatRangeContext(first.J1, last.J2) - wf("--- %s ----%s", range2, diff.Eol) - for _, c := range g { - if c.Tag == 'r' || c.Tag == 'i' { - for _, cc := range g { - if cc.Tag == 'd' { - continue - } - for _, line := range diff.B[cc.J1:cc.J2] { - ws(prefix[cc.Tag] + line) - } - } - break - } - } - } - return diffErr -} - -// Like WriteContextDiff but returns the diff a string. -func GetContextDiffString(diff ContextDiff) (string, error) { - w := &bytes.Buffer{} - err := WriteContextDiff(w, diff) - return string(w.Bytes()), err -} - -// Split a string on "\n" while preserving them. The output can be used -// as input for UnifiedDiff and ContextDiff structures. -func SplitLines(s string) []string { - lines := strings.SplitAfter(s, "\n") - lines[len(lines)-1] += "\n" - return lines -} diff --git a/go/tools/builders/nogo_change.go b/go/tools/builders/nogo_change.go index a04389696f..44af6a7f7c 100644 --- a/go/tools/builders/nogo_change.go +++ b/go/tools/builders/nogo_change.go @@ -8,6 +8,7 @@ import ( "sort" "strings" + "github.com/pmezard/go-difflib/difflib" "golang.org/x/tools/go/analysis" ) @@ -296,15 +297,15 @@ func ToCombinedPatch(fileToEdits map[string][]Edit) (string, error) { return "", fmt.Errorf("failed to apply edits for file %s: %v", filePath, err) } - diff := UnifiedDiff{ - A: trimWhitespaceHeadAndTail(SplitLines(string(contents))), - B: trimWhitespaceHeadAndTail(SplitLines(string(out))), + diff := difflib.UnifiedDiff{ + A: trimWhitespaceHeadAndTail(difflib.SplitLines(string(contents))), + B: trimWhitespaceHeadAndTail(difflib.SplitLines(string(out))), FromFile: fmt.Sprintf("a/%s", filePath), ToFile: fmt.Sprintf("b/%s", filePath), Context: 3, } - patch, err := GetUnifiedDiffString(diff) + patch, err := difflib.GetUnifiedDiffString(diff) if err != nil { return "", fmt.Errorf("failed to generate patch for file %s: %v", filePath, err) } From 85798129f26655b7a150e223f7c6af1740885655 Mon Sep 17 00:00:00 2001 From: peng3141 Date: Thu, 19 Dec 2024 15:43:35 +0000 Subject: [PATCH 06/12] 12/18: address comments --- go/private/rules/binary.bzl | 2 - go/private/rules/library.bzl | 2 - go/private/rules/test.bzl | 2 - go/tools/builders/BUILD.bazel | 2 +- go/tools/builders/nogo_change.go | 225 +++++++++-------- go/tools/builders/nogo_change_test.go | 331 +++++++++++++------------- go/tools/builders/nogo_main.go | 12 +- go/tools/builders/nogo_validation.go | 6 +- 8 files changed, 287 insertions(+), 295 deletions(-) diff --git a/go/private/rules/binary.bzl b/go/private/rules/binary.bzl index e99141a6af..4d72f9bc21 100644 --- a/go/private/rules/binary.bzl +++ b/go/private/rules/binary.bzl @@ -157,8 +157,6 @@ def _go_binary_impl(ctx): nogo_validation_outputs = [] if validation_output: nogo_validation_outputs.append(validation_output) - if nogo_fix_output: - nogo_validation_outputs.append(nogo_fix_output) providers = [ archive, diff --git a/go/private/rules/library.bzl b/go/private/rules/library.bzl index 9a36562be3..501498bb05 100644 --- a/go/private/rules/library.bzl +++ b/go/private/rules/library.bzl @@ -54,8 +54,6 @@ def _go_library_impl(ctx): nogo_validation_outputs = [] if validation_output: nogo_validation_outputs.append(validation_output) - if nogo_fix_output: - nogo_validation_outputs.append(nogo_fix_output) return [ go_info, diff --git a/go/private/rules/test.bzl b/go/private/rules/test.bzl index e3c13d518f..55c207cf23 100644 --- a/go/private/rules/test.bzl +++ b/go/private/rules/test.bzl @@ -81,7 +81,6 @@ def _go_test_impl(ctx): if internal_archive.data._validation_output: validation_outputs.append(internal_archive.data._validation_output) if internal_archive.data._nogo_fix_output: - validation_outputs.append(internal_archive.data._nogo_fix_output) nogo_fix_outputs.append(internal_archive.data._nogo_fix_output) go_srcs = [src for src in internal_go_info.srcs if src.extension == "go"] @@ -106,7 +105,6 @@ def _go_test_impl(ctx): if external_archive.data._nogo_fix_output: # internal vs external archive refers to the same package vs separate package. # we include the nogo fixes for transitive dependency too. - validation_outputs.append(external_archive.data._nogo_fix_output) nogo_fix_outputs.append(external_archive.data._nogo_fix_output) # now generate the main function diff --git a/go/tools/builders/BUILD.bazel b/go/tools/builders/BUILD.bazel index 0c1b972ee6..28a97b5ec1 100644 --- a/go/tools/builders/BUILD.bazel +++ b/go/tools/builders/BUILD.bazel @@ -39,8 +39,8 @@ go_test( "nogo_change_test.go", ], deps = [ - "@org_golang_x_tools//go/analysis", "@com_github_pmezard_go_difflib//difflib:go_default_library", + "@org_golang_x_tools//go/analysis", ], ) diff --git a/go/tools/builders/nogo_change.go b/go/tools/builders/nogo_change.go index 44af6a7f7c..423710a99b 100644 --- a/go/tools/builders/nogo_change.go +++ b/go/tools/builders/nogo_change.go @@ -1,6 +1,7 @@ package main import ( + "bytes" "fmt" "go/token" "os" @@ -18,49 +19,48 @@ type diagnosticEntry struct { *analysis.Analyzer } -// This file contains two main entities: Edit and Change, which correspond to the low-level +// This file contains two main entities: NogoEdit and NogoChange, which correspond to the low-level // and high-level abstractions. See them below. -// The following is about the `Edit`, a low-level abstraction of edits. -// An Edit describes the replacement of a portion of a text file. -type Edit struct { - New string `json:"new"` // the replacement - Start int `json:"start"` // starting byte offset of the region to replace - End int `json:"end"` // (exclusive) ending byte offset of the region to replace +// The following is about the `NogoEdit`, a low-level abstraction of edits. +// A NogoEdit describes the replacement of a portion of a text file. +type NogoEdit struct { + New string // the replacement + Start int // starting byte offset of the region to replace + End int // (exclusive) ending byte offset of the region to replace } - -// FileEdits represents the mapping of analyzers to their edits for a specific file. -type FileEdits struct { - AnalyzerToEdits map[string][]Edit `json:"analyzer_to_edits"` // Analyzer as the key, edits as the value +// NogoFileEdits represents the mapping of analyzers to their edits for a specific file. +type NogoFileEdits struct { + AnalyzerToEdits map[string][]NogoEdit // Analyzer as the key, edits as the value } -// Change represents a collection of file edits. -type Change struct { - FileToEdits map[string]FileEdits `json:"file_to_edits"` // File path as the key, analyzer-to-edits mapping as the value +// NogoChange represents a collection of file edits. +type NogoChange struct { + FileToEdits map[string]NogoFileEdits // File path as the key, analyzer-to-edits mapping as the value } -// NewChange creates a new Change object. -func NewChange() *Change { - return &Change{ - FileToEdits: make(map[string]FileEdits), +// newChange creates a new NogoChange object. +func newChange() *NogoChange { + return &NogoChange{ + FileToEdits: make(map[string]NogoFileEdits), } } -func (e Edit) String() string { +func (e NogoEdit) String() string { return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New) } -// SortEdits orders a slice of Edits by (start, end) offset. +// sortEdits orders a slice of NogoEdits by (start, end) offset. // This ordering puts insertions (end = start) before deletions // (end > start) at the same point, but uses a stable sort to preserve // the order of multiple insertions at the same point. -// (Apply detects multiple deletions at the same point as an error.) -func SortEdits(edits []Edit) { +// (applyEditsBytes detects multiple deletions at the same point as an error.) +func sortEdits(edits []NogoEdit) { sort.Stable(editsSort(edits)) } -type editsSort []Edit +type editsSort []NogoEdit func (a editsSort) Len() int { return len(a) } func (a editsSort) Less(i, j int) bool { @@ -71,38 +71,35 @@ func (a editsSort) Less(i, j int) bool { } func (a editsSort) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -// UniqueEdits returns a list of edits that is sorted and -// contains no duplicate edits. Returns the index of some -// overlapping adjacent edits if there is one and <0 if the -// edits are valid. -// Deduplication helps in the cases where two analyzers produce duplicate edits. -func UniqueEdits(edits []Edit) ([]Edit, int) { - if len(edits) == 0 { - return nil, -1 - } - equivalent := func(x, y Edit) bool { - return x.Start == y.Start && x.End == y.End && x.New == y.New + +// validateBytes checks that edits are consistent with the src byte slice, +// and returns the size of the patched output. It may return a different slice if edits are sorted. +func validateBytes(src []byte, edits []NogoEdit) ([]NogoEdit, int, error) { + if !sort.IsSorted(editsSort(edits)) { + edits = append([]NogoEdit(nil), edits...) + sortEdits(edits) } - SortEdits(edits) - unique := []Edit{edits[0]} - invalid := -1 - for i := 1; i < len(edits); i++ { - prev, cur := edits[i-1], edits[i] - if !equivalent(prev, cur) { - unique = append(unique, cur) - if prev.End > cur.Start { - invalid = i - } + + size := len(src) + lastEnd := 0 + for _, edit := range edits { + if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) { + return nil, 0, fmt.Errorf("the fix has an out-of-bounds edit with start=%d, end=%d", edit.Start, edit.End) + } + if edit.Start < lastEnd { + return nil, 0, fmt.Errorf("the fix has an edit with start=%d, which overlaps with a previous edit with end=%d", edit.Start, lastEnd) } + size += len(edit.New) + edit.Start - edit.End + lastEnd = edit.End } - return unique, invalid + + return edits, size, nil } -// ApplyEditsBytes applies a sequence of edits to the src byte slice and returns the result. +// applyEditsBytes applies a sequence of NogoEdits to the src byte slice and returns the result. // Edits are applied in order of start offset; edits with the same start offset are applied in the order they were provided. -// ApplyEditsBytes returns an error if any edit is out of bounds, or if any pair of edits is overlapping. -func ApplyEditsBytes(src []byte, edits []Edit) ([]byte, error) { - // Validate and compute the output size based on the edits. +// applyEditsBytes returns an error if any edit is out of bounds, or if any pair of edits is overlapping. +func applyEditsBytes(src []byte, edits []NogoEdit) ([]byte, error) { edits, size, err := validateBytes(src, edits) if err != nil { return nil, err @@ -121,44 +118,19 @@ func ApplyEditsBytes(src []byte, edits []Edit) ([]byte, error) { out = append(out, src[lastEnd:]...) if len(out) != size { - panic("wrong size") + return nil, fmt.Errorf("applyEditsBytes: unexpected output size, got %d, want %d", len(out), size) } return out, nil } -// validateBytes checks that edits are consistent with the src byte slice, -// and returns the size of the patched output. It may return a different slice if edits are sorted. -func validateBytes(src []byte, edits []Edit) ([]Edit, int, error) { - if !sort.IsSorted(editsSort(edits)) { - edits = append([]Edit(nil), edits...) - SortEdits(edits) - } - - // Check validity of edits and compute final size. - size := len(src) - lastEnd := 0 - for _, edit := range edits { - if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) { - return nil, 0, fmt.Errorf("diff has out-of-bounds edits") - } - if edit.Start < lastEnd { - return nil, 0, fmt.Errorf("diff has overlapping edits") - } - size += len(edit.New) + edit.Start - edit.End - lastEnd = edit.End - } - - return edits, size, nil -} - -// NewChangeFromDiagnostics builds a Change from a set of diagnostics. -// Unlike Diagnostic, Change is independent of the FileSet given it uses perf-file offsets instead of token.Pos. -// This allows Change to be used in contexts where the FileSet is not available, e.g., it remains applicable after it is saved to disk and loaded back. +// newChangeFromDiagnostics builds a NogoChange from a set of diagnostics. +// Unlike Diagnostic, NogoChange is independent of the FileSet given it uses perf-file offsets instead of token.Pos. +// This allows NogoChange to be used in contexts where the FileSet is not available, e.g., it remains applicable after it is saved to disk and loaded back. // See https://github.com/golang/tools/blob/master/go/analysis/diagnostic.go for details. -func NewChangeFromDiagnostics(entries []diagnosticEntry, fileSet *token.FileSet) (*Change, error) { - c := NewChange() +func newChangeFromDiagnostics(entries []diagnosticEntry, fileSet *token.FileSet) (*NogoChange, error) { + c := newChange() cwd, err := os.Getwd() if err != nil { @@ -190,30 +162,37 @@ func NewChangeFromDiagnostics(entries []diagnosticEntry, fileSet *token.FileSet) continue } - edit := Edit{Start: file.Offset(start), End: file.Offset(end), New: string(edit.NewText)} + nogoEdit := NogoEdit{Start: file.Offset(start), End: file.Offset(end), New: string(edit.NewText)} fileRelativePath, err := filepath.Rel(cwd, file.Name()) if err != nil { fileRelativePath = file.Name() // fallback logic } - c.AddEdit(fileRelativePath, analyzer, edit) + c.addEdit(fileRelativePath, analyzer, nogoEdit) } } } if len(allErrors) > 0 { - return c, fmt.Errorf("errors: %v", allErrors) + var errMsg bytes.Buffer + sep := "" + for _, err := range allErrors { + errMsg.WriteString(sep) + sep = "\n" + errMsg.WriteString(err.Error()) + } + return c, fmt.Errorf("errors:\n%s", errMsg.String()) } + return c, nil } - -// AddEdit adds an edit to the Change, organizing by file and analyzer. -func (c *Change) AddEdit(file string, analyzer string, edit Edit) { - // Ensure the FileEdits structure exists for the file +// addEdit adds an edit to the NogoChange, organizing by file and analyzer. +func (c *NogoChange) addEdit(file string, analyzer string, edit NogoEdit) { + // Ensure the NogoFileEdits structure exists for the file fileEdits, exists := c.FileToEdits[file] if !exists { - fileEdits = FileEdits{ - AnalyzerToEdits: make(map[string][]Edit), + fileEdits = NogoFileEdits{ + AnalyzerToEdits: make(map[string][]NogoEdit), } c.FileToEdits[file] = fileEdits } @@ -222,12 +201,36 @@ func (c *Change) AddEdit(file string, analyzer string, edit Edit) { fileEdits.AnalyzerToEdits[analyzer] = append(fileEdits.AnalyzerToEdits[analyzer], edit) } +// uniqueSortedEdits returns a list of edits that is sorted and +// contains no duplicate edits. Returns whether there is overlap. +// Deduplication helps in the cases where two analyzers produce duplicate edits. +func uniqueSortedEdits(edits []NogoEdit) ([]NogoEdit, bool) { + hasOverlap := false + if len(edits) == 0 { + return edits, hasOverlap + } + equivalent := func(x, y NogoEdit) bool { + return x.Start == y.Start && x.End == y.End && x.New == y.New + } + sortEdits(edits) + unique := []NogoEdit{edits[0]} + for i := 1; i < len(edits); i++ { + prev, cur := edits[i-1], edits[i] + if !equivalent(prev, cur) { // equivalent ones are safely skipped + unique = append(unique, cur) + if prev.End > cur.Start { + // hasOverlap = true means at least one overlap was detected. + hasOverlap = true + } + } + } + return unique, hasOverlap +} - -// Flatten merges all edits for a file from different analyzers into a single map of file-to-edits. +// flatten merges all edits for a file from different analyzers into a single map of file-to-edits. // Edits from each analyzer are processed in a deterministic order, and overlapping edits are skipped. -func Flatten(change Change) map[string][]Edit { - fileToEdits := make(map[string][]Edit) +func flatten(change NogoChange) map[string][]NogoEdit { + fileToEdits := make(map[string][]NogoEdit) for file, fileEdits := range change.FileToEdits { // Get a sorted list of analyzers for deterministic processing order @@ -237,22 +240,23 @@ func Flatten(change Change) map[string][]Edit { } sort.Strings(analyzers) - mergedEdits := make([]Edit, 0) + mergedEdits := make([]NogoEdit, 0) for _, analyzer := range analyzers { edits := fileEdits.AnalyzerToEdits[analyzer] - - // Deduplicate and sort edits for the current analyzer - edits, _ = UniqueEdits(edits) + if len(edits) == 0 { + continue + } // Merge edits into the current list, checking for overlaps candidateEdits := append(mergedEdits, edits...) - candidateEdits, invalidIndex := UniqueEdits(candidateEdits) - if invalidIndex >= 0 { + candidateEdits, hasOverlap := uniqueSortedEdits(candidateEdits) + if hasOverlap { // Skip edits from this analyzer if merging them would cause overlaps. // Apply the non-overlapping edits first. After that, a rerun of bazel build will // allows these skipped edits to be applied separately. // Note the resolution happens to each file independently. + // Also for clarity, we would accept all or none of an analyzer. continue } @@ -267,9 +271,8 @@ func Flatten(change Change) map[string][]Edit { return fileToEdits } - -// ToCombinedPatch converts all edits to a single consolidated patch. -func ToCombinedPatch(fileToEdits map[string][]Edit) (string, error) { +// toCombinedPatch converts all edits to a single consolidated patch. +func toCombinedPatch(fileToEdits map[string][]NogoEdit) (string, error) { var combinedPatch strings.Builder filePaths := make([]string, 0, len(fileToEdits)) @@ -280,19 +283,20 @@ func ToCombinedPatch(fileToEdits map[string][]Edit) (string, error) { // Iterate over sorted file paths for _, filePath := range filePaths { + // edits are unique and sorted, as ensured by the flatten() method that is invoked earlier. + // for performance reason, let us skip uniqueSortedEdits() call here, + // although in general a library API shall not assume other calls have been made. edits := fileToEdits[filePath] if len(edits) == 0 { continue } - // Ensure edits are unique and sorted - edits, _ = UniqueEdits(edits) contents, err := os.ReadFile(filePath) if err != nil { return "", fmt.Errorf("failed to read file %s: %v", filePath, err) } - out, err := ApplyEditsBytes(contents, edits) + out, err := applyEditsBytes(contents, edits) if err != nil { return "", fmt.Errorf("failed to apply edits for file %s: %v", filePath, err) } @@ -324,7 +328,6 @@ func ToCombinedPatch(fileToEdits map[string][]Edit) (string, error) { return result, nil } - func trimWhitespaceHeadAndTail(lines []string) []string { // Trim left for len(lines) > 0 && strings.TrimSpace(lines[0]) == "" { @@ -339,13 +342,3 @@ func trimWhitespaceHeadAndTail(lines []string) []string { return lines } - - -func SaveToFile(filename string, combinedPatch string) error { - err := os.WriteFile(filename, []byte(combinedPatch), 0644) - if err != nil { - return fmt.Errorf("error writing to file: %v", err) - } - - return nil -} diff --git a/go/tools/builders/nogo_change_test.go b/go/tools/builders/nogo_change_test.go index cfe202da6b..03bbcf2fd0 100644 --- a/go/tools/builders/nogo_change_test.go +++ b/go/tools/builders/nogo_change_test.go @@ -36,8 +36,8 @@ var ( ) // ApplyEdits() and validate() here provide the reference implementation for testing -// ApplyEditsBytes() from nogo_change.go -func ApplyEdits(src string, edits []Edit) (string, error) { +// applyEditsBytes() from the refactored nogo_change code, now using NogoEdit. +func ApplyEdits(src string, edits []NogoEdit) (string, error) { edits, size, err := validate(src, edits) if err != nil { return "", err @@ -56,16 +56,16 @@ func ApplyEdits(src string, edits []Edit) (string, error) { out = append(out, src[lastEnd:]...) if len(out) != size { - panic("wrong size") + return "", fmt.Errorf("applyEdits: unexpected output size, got %d, want %d", len(out), size) } return string(out), nil } -func validate(src string, edits []Edit) ([]Edit, int, error) { +func validate(src string, edits []NogoEdit) ([]NogoEdit, int, error) { if !sort.IsSorted(editsSort(edits)) { - edits = append([]Edit(nil), edits...) - SortEdits(edits) + edits = append([]NogoEdit(nil), edits...) + sortEdits(edits) } // Check validity of edits and compute final size. @@ -85,32 +85,32 @@ func validate(src string, edits []Edit) ([]Edit, int, error) { return edits, size, nil } -// TestAddEdit_MultipleAnalyzers tests AddEdit with multiple analyzers and files using reflect.DeepEqual +// TestAddEdit_MultipleAnalyzers tests addEdit with multiple analyzers and files using reflect.DeepEqual func TestAddEdit_MultipleAnalyzers(t *testing.T) { - change := NewChange() + change := newChange() file1 := "file1.go" - edit1a := Edit{Start: 10, End: 20, New: "code1 from analyzer1"} - edit1b := Edit{Start: 30, End: 40, New: "code2 from analyzer1"} - edit2a := Edit{Start: 50, End: 60, New: "code1 from analyzer2"} - edit2b := Edit{Start: 70, End: 80, New: "code2 from analyzer2"} + edit1a := NogoEdit{Start: 10, End: 20, New: "code1 from analyzer1"} + edit1b := NogoEdit{Start: 30, End: 40, New: "code2 from analyzer1"} + edit2a := NogoEdit{Start: 50, End: 60, New: "code1 from analyzer2"} + edit2b := NogoEdit{Start: 70, End: 80, New: "code2 from analyzer2"} - expected := map[string]FileEdits{ + expected := map[string]NogoFileEdits{ file1: { - AnalyzerToEdits: map[string][]Edit{ + AnalyzerToEdits: map[string][]NogoEdit{ analyzer1.Name: {edit1a, edit1b}, analyzer2.Name: {edit2a, edit2b}, }, }, } - change.AddEdit(file1, analyzer1.Name, edit1a) - change.AddEdit(file1, analyzer1.Name, edit1b) - change.AddEdit(file1, analyzer2.Name, edit2a) - change.AddEdit(file1, analyzer2.Name, edit2b) + change.addEdit(file1, analyzer1.Name, edit1a) + change.addEdit(file1, analyzer1.Name, edit1b) + change.addEdit(file1, analyzer2.Name, edit2a) + change.addEdit(file1, analyzer2.Name, edit2b) if !reflect.DeepEqual(change.FileToEdits, expected) { - t.Fatalf("Change.FileToEdits did not match the expected result.\nGot: %+v\nExpected: %+v", change.FileToEdits, expected) + t.Fatalf("NogoChange.FileToEdits did not match the expected result.\nGot: %+v\nExpected: %+v", change.FileToEdits, expected) } } @@ -123,7 +123,7 @@ func TestNewChangeFromDiagnostics_SuccessCases(t *testing.T) { name string fileSet *token.FileSet diagnosticEntries []diagnosticEntry - expectedEdits map[string]FileEdits + expectedEdits map[string]NogoFileEdits }{ { name: "ValidEdits", @@ -142,9 +142,9 @@ func TestNewChangeFromDiagnostics_SuccessCases(t *testing.T) { }, }, }, - expectedEdits: map[string]FileEdits{ + expectedEdits: map[string]NogoFileEdits{ "file1.go": { - AnalyzerToEdits: map[string][]Edit{ + AnalyzerToEdits: map[string][]NogoEdit{ "analyzer1": { {New: "new_text", Start: 4, End: 9}, // 0-based offset }, @@ -156,7 +156,7 @@ func TestNewChangeFromDiagnostics_SuccessCases(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - change, err := NewChangeFromDiagnostics(tt.diagnosticEntries, tt.fileSet) + change, err := newChangeFromDiagnostics(tt.diagnosticEntries, tt.fileSet) if err != nil { t.Fatalf("expected no error, got: %v", err) } @@ -195,14 +195,13 @@ func TestNewChangeFromDiagnostics_ErrorCases(t *testing.T) { }, }, }, - expectedErr: "errors: [invalid fix: pos 15 > end 10]", + expectedErr: "errors:\ninvalid fix: pos 15 > end 10", }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - _, err := NewChangeFromDiagnostics(tt.diagnosticEntries, tt.fileSet) - + _, err := newChangeFromDiagnostics(tt.diagnosticEntries, tt.fileSet) if err == nil { t.Fatalf("expected an error, got none") } @@ -217,17 +216,17 @@ func TestNewChangeFromDiagnostics_ErrorCases(t *testing.T) { func TestSortEdits(t *testing.T) { tests := []struct { name string - edits []Edit - sorted []Edit + edits []NogoEdit + sorted []NogoEdit }{ { name: "already sorted", - edits: []Edit{ + edits: []NogoEdit{ {New: "a", Start: 0, End: 1}, {New: "b", Start: 1, End: 2}, {New: "c", Start: 2, End: 3}, }, - sorted: []Edit{ + sorted: []NogoEdit{ {New: "a", Start: 0, End: 1}, {New: "b", Start: 1, End: 2}, {New: "c", Start: 2, End: 3}, @@ -235,12 +234,12 @@ func TestSortEdits(t *testing.T) { }, { name: "unsorted", - edits: []Edit{ + edits: []NogoEdit{ {New: "b", Start: 1, End: 2}, {New: "a", Start: 0, End: 1}, {New: "c", Start: 2, End: 3}, }, - sorted: []Edit{ + sorted: []NogoEdit{ {New: "a", Start: 0, End: 1}, {New: "b", Start: 1, End: 2}, {New: "c", Start: 2, End: 3}, @@ -248,11 +247,11 @@ func TestSortEdits(t *testing.T) { }, { name: "insert before delete at same position", - edits: []Edit{ + edits: []NogoEdit{ {New: "", Start: 0, End: 1}, // delete {New: "insert", Start: 0, End: 0}, // insert }, - sorted: []Edit{ + sorted: []NogoEdit{ {New: "insert", Start: 0, End: 0}, // insert comes before delete {New: "", Start: 0, End: 1}, }, @@ -261,7 +260,7 @@ func TestSortEdits(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - SortEdits(tt.edits) + sortEdits(tt.edits) if !reflect.DeepEqual(tt.edits, tt.sorted) { t.Fatalf("expected %v, got %v", tt.sorted, tt.edits) } @@ -269,124 +268,125 @@ func TestSortEdits(t *testing.T) { } } -// Put these test cases as the global variable so that indentation is simpler. +// TestCases uses NogoEdit now instead of Edit var TestCases = []struct { Name, In, Out, Unified string - Edits, LineEdits []Edit // expectation (LineEdits=nil => already line-aligned) + Edits, LineEdits []NogoEdit // expectation (LineEdits=nil => already line-aligned) NoDiff bool -}{{ - Name: "empty", - In: "", - Out: "", -}, { - Name: "no_diff", - In: "gargantuan\n", - Out: "gargantuan\n", -}, { - Name: "replace_all", - In: "fruit\n", - Out: "cheese\n", - Unified: UnifiedPrefix + ` +}{ + { + Name: "empty", + In: "", + Out: "", + }, { + Name: "no_diff", + In: "gargantuan\n", + Out: "gargantuan\n", + }, { + Name: "replace_all", + In: "fruit\n", + Out: "cheese\n", + Unified: UnifiedPrefix + ` @@ -1 +1 @@ -fruit +cheese `[1:], - Edits: []Edit{{Start: 0, End: 5, New: "cheese"}}, - LineEdits: []Edit{{Start: 0, End: 6, New: "cheese\n"}}, -}, { - Name: "insert_rune", - In: "gord\n", - Out: "gourd\n", - Unified: UnifiedPrefix + ` + Edits: []NogoEdit{{Start: 0, End: 5, New: "cheese"}}, + LineEdits: []NogoEdit{{Start: 0, End: 6, New: "cheese\n"}}, + }, { + Name: "insert_rune", + In: "gord\n", + Out: "gourd\n", + Unified: UnifiedPrefix + ` @@ -1 +1 @@ -gord +gourd `[1:], - Edits: []Edit{{Start: 2, End: 2, New: "u"}}, - LineEdits: []Edit{{Start: 0, End: 5, New: "gourd\n"}}, -}, { - Name: "delete_rune", - In: "groat\n", - Out: "goat\n", - Unified: UnifiedPrefix + ` + Edits: []NogoEdit{{Start: 2, End: 2, New: "u"}}, + LineEdits: []NogoEdit{{Start: 0, End: 5, New: "gourd\n"}}, + }, { + Name: "delete_rune", + In: "groat\n", + Out: "goat\n", + Unified: UnifiedPrefix + ` @@ -1 +1 @@ -groat +goat `[1:], - Edits: []Edit{{Start: 1, End: 2, New: ""}}, - LineEdits: []Edit{{Start: 0, End: 6, New: "goat\n"}}, -}, { - Name: "replace_rune", - In: "loud\n", - Out: "lord\n", - Unified: UnifiedPrefix + ` + Edits: []NogoEdit{{Start: 1, End: 2, New: ""}}, + LineEdits: []NogoEdit{{Start: 0, End: 6, New: "goat\n"}}, + }, { + Name: "replace_rune", + In: "loud\n", + Out: "lord\n", + Unified: UnifiedPrefix + ` @@ -1 +1 @@ -loud +lord `[1:], - Edits: []Edit{{Start: 2, End: 3, New: "r"}}, - LineEdits: []Edit{{Start: 0, End: 5, New: "lord\n"}}, -}, { - Name: "replace_partials", - In: "blanket\n", - Out: "bunker\n", - Unified: UnifiedPrefix + ` + Edits: []NogoEdit{{Start: 2, End: 3, New: "r"}}, + LineEdits: []NogoEdit{{Start: 0, End: 5, New: "lord\n"}}, + }, { + Name: "replace_partials", + In: "blanket\n", + Out: "bunker\n", + Unified: UnifiedPrefix + ` @@ -1 +1 @@ -blanket +bunker `[1:], - Edits: []Edit{ - {Start: 1, End: 3, New: "u"}, - {Start: 6, End: 7, New: "r"}, - }, - LineEdits: []Edit{{Start: 0, End: 8, New: "bunker\n"}}, -}, { - Name: "insert_line", - In: "1: one\n3: three\n", - Out: "1: one\n2: two\n3: three\n", - Unified: UnifiedPrefix + ` + Edits: []NogoEdit{ + {Start: 1, End: 3, New: "u"}, + {Start: 6, End: 7, New: "r"}, + }, + LineEdits: []NogoEdit{{Start: 0, End: 8, New: "bunker\n"}}, + }, { + Name: "insert_line", + In: "1: one\n3: three\n", + Out: "1: one\n2: two\n3: three\n", + Unified: UnifiedPrefix + ` @@ -1,2 +1,3 @@ 1: one +2: two 3: three `[1:], - Edits: []Edit{{Start: 7, End: 7, New: "2: two\n"}}, -}, { - Name: "replace_no_newline", - In: "A", - Out: "B", - Unified: UnifiedPrefix + ` + Edits: []NogoEdit{{Start: 7, End: 7, New: "2: two\n"}}, + }, { + Name: "replace_no_newline", + In: "A", + Out: "B", + Unified: UnifiedPrefix + ` @@ -1 +1 @@ -A \ No newline at end of file +B \ No newline at end of file `[1:], - Edits: []Edit{{Start: 0, End: 1, New: "B"}}, -}, { - Name: "delete_empty", - In: "meow", - Out: "", // GNU diff -u special case: +0,0 - Unified: UnifiedPrefix + ` + Edits: []NogoEdit{{Start: 0, End: 1, New: "B"}}, + }, { + Name: "delete_empty", + In: "meow", + Out: "", + Unified: UnifiedPrefix + ` @@ -1 +0,0 @@ -meow \ No newline at end of file `[1:], - Edits: []Edit{{Start: 0, End: 4, New: ""}}, - LineEdits: []Edit{{Start: 0, End: 4, New: ""}}, -}, { - Name: "append_empty", - In: "", // GNU diff -u special case: -0,0 - Out: "AB\nC", - Unified: UnifiedPrefix + ` + Edits: []NogoEdit{{Start: 0, End: 4, New: ""}}, + LineEdits: []NogoEdit{{Start: 0, End: 4, New: ""}}, + }, { + Name: "append_empty", + In: "", + Out: "AB\nC", + Unified: UnifiedPrefix + ` @@ -0,0 +1,2 @@ +AB +C \ No newline at end of file `[1:], - Edits: []Edit{{Start: 0, End: 0, New: "AB\nC"}}, - LineEdits: []Edit{{Start: 0, End: 0, New: "AB\nC"}}, -}, + Edits: []NogoEdit{{Start: 0, End: 0, New: "AB\nC"}}, + LineEdits: []NogoEdit{{Start: 0, End: 0, New: "AB\nC"}}, + }, { Name: "add_end", In: "A", @@ -398,8 +398,8 @@ var TestCases = []struct { +AB \ No newline at end of file `[1:], - Edits: []Edit{{Start: 1, End: 1, New: "B"}}, - LineEdits: []Edit{{Start: 0, End: 1, New: "AB"}}, + Edits: []NogoEdit{{Start: 1, End: 1, New: "B"}}, + LineEdits: []NogoEdit{{Start: 0, End: 1, New: "AB"}}, }, { Name: "add_empty", In: "", @@ -410,8 +410,8 @@ var TestCases = []struct { +C \ No newline at end of file `[1:], - Edits: []Edit{{Start: 0, End: 0, New: "AB\nC"}}, - LineEdits: []Edit{{Start: 0, End: 0, New: "AB\nC"}}, + Edits: []NogoEdit{{Start: 0, End: 0, New: "AB\nC"}}, + LineEdits: []NogoEdit{{Start: 0, End: 0, New: "AB\nC"}}, }, { Name: "add_newline", In: "A", @@ -422,8 +422,8 @@ var TestCases = []struct { \ No newline at end of file +A `[1:], - Edits: []Edit{{Start: 1, End: 1, New: "\n"}}, - LineEdits: []Edit{{Start: 0, End: 1, New: "A\n"}}, + Edits: []NogoEdit{{Start: 1, End: 1, New: "\n"}}, + LineEdits: []NogoEdit{{Start: 0, End: 1, New: "A\n"}}, }, { Name: "delete_front", In: "A\nB\nC\nA\nB\nB\nA\n", @@ -440,14 +440,14 @@ var TestCases = []struct { A +C `[1:], - NoDiff: true, // unified diff is different but valid - Edits: []Edit{ + NoDiff: true, + Edits: []NogoEdit{ {Start: 0, End: 4, New: ""}, {Start: 6, End: 6, New: "B\n"}, {Start: 10, End: 12, New: ""}, {Start: 14, End: 14, New: "C\n"}, }, - LineEdits: []Edit{ + LineEdits: []NogoEdit{ {Start: 0, End: 4, New: ""}, {Start: 6, End: 6, New: "B\n"}, {Start: 10, End: 12, New: ""}, @@ -464,8 +464,8 @@ var TestCases = []struct { +C + `[1:], - Edits: []Edit{{Start: 2, End: 3, New: "C\n"}}, - LineEdits: []Edit{{Start: 2, End: 4, New: "C\n\n"}}, + Edits: []NogoEdit{{Start: 2, End: 3, New: "C\n"}}, + LineEdits: []NogoEdit{{Start: 2, End: 4, New: "C\n\n"}}, }, { Name: "multiple_replace", @@ -485,17 +485,16 @@ var TestCases = []struct { -G +K `[1:], - Edits: []Edit{ + Edits: []NogoEdit{ {Start: 2, End: 8, New: "H\nI\nJ\n"}, {Start: 12, End: 14, New: "K\n"}, }, - NoDiff: true, // diff algorithm produces different delete/insert pattern - }, - { + NoDiff: true, + }, { Name: "extra_newline", In: "\nA\n", Out: "A\n", - Edits: []Edit{{Start: 0, End: 1, New: ""}}, + Edits: []NogoEdit{{Start: 0, End: 1, New: ""}}, Unified: UnifiedPrefix + `@@ -1,2 +1 @@ - A @@ -504,8 +503,8 @@ var TestCases = []struct { Name: "unified_lines", In: "aaa\nccc\n", Out: "aaa\nbbb\nccc\n", - Edits: []Edit{{Start: 3, End: 3, New: "\nbbb"}}, - LineEdits: []Edit{{Start: 0, End: 4, New: "aaa\nbbb\n"}}, + Edits: []NogoEdit{{Start: 3, End: 3, New: "\nbbb"}}, + LineEdits: []NogoEdit{{Start: 0, End: 4, New: "aaa\nbbb\n"}}, Unified: UnifiedPrefix + "@@ -1,2 +1,3 @@\n aaa\n+bbb\n ccc\n", }, { Name: "60379", @@ -521,8 +520,8 @@ type S struct { s fmt.Stringer } `, - Edits: []Edit{{Start: 27, End: 27, New: "\t"}}, - LineEdits: []Edit{{Start: 27, End: 42, New: "\ts fmt.Stringer\n"}}, + Edits: []NogoEdit{{Start: 27, End: 27, New: "\t"}}, + LineEdits: []NogoEdit{{Start: 27, End: 42, New: "\ts fmt.Stringer\n"}}, Unified: UnifiedPrefix + "@@ -1,5 +1,5 @@\n package a\n \n type S struct {\n-s fmt.Stringer\n+\ts fmt.Stringer\n }\n", }, } @@ -538,9 +537,12 @@ func TestApply(t *testing.T) { if err != nil { t.Fatalf("ApplyEdits failed: %v", err) } - gotBytes, err := ApplyEditsBytes([]byte(tt.In), tt.Edits) + gotBytes, err := applyEditsBytes([]byte(tt.In), tt.Edits) + if err != nil { + t.Fatalf("applyEditsBytes failed: %v", err) + } if got != string(gotBytes) { - t.Fatalf("ApplyEditsBytes: got %q, want %q", gotBytes, got) + t.Fatalf("applyEditsBytes: got %q, want %q", gotBytes, got) } if got != tt.Out { t.Errorf("ApplyEdits: got %q, want %q", got, tt.Out) @@ -550,9 +552,12 @@ func TestApply(t *testing.T) { if err != nil { t.Fatalf("ApplyEdits failed: %v", err) } - gotBytes, err := ApplyEditsBytes([]byte(tt.In), tt.LineEdits) + gotBytes, err := applyEditsBytes([]byte(tt.In), tt.LineEdits) + if err != nil { + t.Fatalf("applyEditsBytes failed: %v", err) + } if got != string(gotBytes) { - t.Fatalf("ApplyEditsBytes: got %q, want %q", gotBytes, got) + t.Fatalf("applyEditsBytes: got %q, want %q", gotBytes, got) } if got != tt.Out { t.Errorf("ApplyEdits: got %q, want %q", got, tt.Out) @@ -562,55 +567,54 @@ func TestApply(t *testing.T) { } } -// TestUniqueEdits verifies deduplication and overlap detection. -func TestUniqueEdits(t *testing.T) { +// TestUniqueSortedEdits verifies deduplication and overlap detection. +func TestUniqueSortedEdits(t *testing.T) { tests := []struct { - name string - edits []Edit - want []Edit - wantIdx int + name string + edits []NogoEdit + want []NogoEdit + wantHasOverlap bool }{ { name: "overlapping edits", - edits: []Edit{ + edits: []NogoEdit{ {Start: 0, End: 2, New: "a"}, {Start: 1, End: 3, New: "b"}, }, - want: []Edit{{Start: 0, End: 2, New: "a"}, {Start: 1, End: 3, New: "b"}}, - wantIdx: 1, + want: []NogoEdit{{Start: 0, End: 2, New: "a"}, {Start: 1, End: 3, New: "b"}}, + wantHasOverlap: true, }, } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got, gotIdx := UniqueEdits(tt.edits) - if !reflect.DeepEqual(got, tt.want) || gotIdx != tt.wantIdx { + got, hasOverlap := uniqueSortedEdits(tt.edits) + if !reflect.DeepEqual(got, tt.want) || hasOverlap != tt.wantHasOverlap { t.Fatalf("expected %v, got %v", tt.want, got) } }) } } - func TestFlatten(t *testing.T) { tests := []struct { name string - change Change - want map[string][]Edit + change NogoChange + want map[string][]NogoEdit }{ { name: "multiple analyzers with non-overlapping edits", - change: Change{ - FileToEdits: map[string]FileEdits{ + change: NogoChange{ + FileToEdits: map[string]NogoFileEdits{ "file1.go": { - AnalyzerToEdits: map[string][]Edit{ + AnalyzerToEdits: map[string][]NogoEdit{ "analyzer1": {{Start: 0, End: 1, New: "a"}}, "analyzer2": {{Start: 2, End: 3, New: "b"}}, }, }, }, }, - want: map[string][]Edit{ + want: map[string][]NogoEdit{ "file1.go": { {Start: 0, End: 1, New: "a"}, {Start: 2, End: 3, New: "b"}, @@ -619,17 +623,17 @@ func TestFlatten(t *testing.T) { }, { name: "multiple analyzers with overlapping edits", - change: Change{ - FileToEdits: map[string]FileEdits{ + change: NogoChange{ + FileToEdits: map[string]NogoFileEdits{ "file1.go": { - AnalyzerToEdits: map[string][]Edit{ + AnalyzerToEdits: map[string][]NogoEdit{ "analyzer1": {{Start: 0, End: 2, New: "a"}}, "analyzer2": {{Start: 1, End: 3, New: "b"}}, }, }, }, }, - want: map[string][]Edit{ + want: map[string][]NogoEdit{ "file1.go": { {Start: 0, End: 2, New: "a"}, }, @@ -639,9 +643,9 @@ func TestFlatten(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - got := Flatten(tt.change) + got := flatten(tt.change) if !reflect.DeepEqual(got, tt.want) { - t.Errorf("Flatten() = %v, want %v", got, tt.want) + t.Errorf("flatten() = %v, want %v", got, tt.want) } }) } @@ -671,13 +675,13 @@ func TestToCombinedPatch(t *testing.T) { tests := []struct { name string - fileToEdits map[string][]Edit + fileToEdits map[string][]NogoEdit expected string expectErr bool }{ { name: "valid patch for multiple files", - fileToEdits: map[string][]Edit{ + fileToEdits: map[string][]NogoEdit{ "file1.go": {{Start: 27, End: 27, New: "\nHello, world!\n"}}, // Add to function body "file2.go": {{Start: 24, End: 24, New: "var y = 20\n"}}, // Add a new variable }, @@ -701,7 +705,7 @@ func TestToCombinedPatch(t *testing.T) { }, { name: "file not found", - fileToEdits: map[string][]Edit{ + fileToEdits: map[string][]NogoEdit{ "nonexistent.go": {{Start: 0, End: 0, New: "new content"}}, }, expected: "", @@ -709,7 +713,7 @@ func TestToCombinedPatch(t *testing.T) { }, { name: "no edits", - fileToEdits: map[string][]Edit{}, + fileToEdits: map[string][]NogoEdit{}, expected: "", expectErr: false, }, @@ -717,7 +721,7 @@ func TestToCombinedPatch(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - combinedPatch, err := ToCombinedPatch(tt.fileToEdits) + combinedPatch, err := toCombinedPatch(tt.fileToEdits) // Verify error expectation if (err != nil) != tt.expectErr { @@ -779,3 +783,4 @@ func TestTrimWhitespaceHeadAndTail(t *testing.T) { }) } } + diff --git a/go/tools/builders/nogo_main.go b/go/tools/builders/nogo_main.go index 5be26d0fd1..4e28d0a6ba 100644 --- a/go/tools/builders/nogo_main.go +++ b/go/tools/builders/nogo_main.go @@ -552,17 +552,17 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) if nogoFixPath != "" { // If the nogo fixes are requested, we need to save the fixes to the file even if they are empty. // Otherwise, bazel will complain "not all outputs were created or valid" - change, err := NewChangeFromDiagnostics(diagnostics, pkg.fset) + change, err := newChangeFromDiagnostics(diagnostics, pkg.fset) if err != nil { - errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in converting diagnostics to change %v", err)) + errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in converting diagnostics to change: %v", err)) } - giantPatch, err := ToCombinedPatch(Flatten(*change)) + combinedPatch, err := toCombinedPatch(flatten(*change)) if err != nil { - errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in generating the patches %v", err)) + errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in generating the patches: %v", err)) } - err = SaveToFile(nogoFixPath, giantPatch) + err = os.WriteFile(nogoFixPath, []byte(combinedPatch), 0644) if err != nil { - errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in file saving %v", err)) + errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in saving the file %s: %v", nogoFixPath, err)) } } diff --git a/go/tools/builders/nogo_validation.go b/go/tools/builders/nogo_validation.go index 5cc4108206..a947a1bb56 100644 --- a/go/tools/builders/nogo_validation.go +++ b/go/tools/builders/nogo_validation.go @@ -32,11 +32,11 @@ func nogoValidation(args []string) error { if len(nogoFixContent) > 0 { // Format the message in a clean and clear way nogoFixRelated = fmt.Sprintf(` --------------------Suggested Fixes------------------- -The suggested fixes are as follows: +-------------------Suggested Fix------------------- +The suggested fix is as follows: %s -To apply the suggested fixes, run the following command: +To apply the suggested fix, run the following command: $ patch -p1 < %s ----------------------------------------------------- `, nogoFixContent, nogoFixFile) From 38adf8f82f18094bb5542c7b38b7f581d551413c Mon Sep 17 00:00:00 2001 From: Zhongpeng Lin Date: Thu, 19 Dec 2024 21:31:04 +0000 Subject: [PATCH 07/12] add com_github_pmezard_go_difflib to workspace --- go/private/repositories.bzl | 19 +++++++++++++++ ...om_github_pmezard_go_difflib-gazelle.patch | 24 +++++++++++++++++++ 2 files changed, 43 insertions(+) create mode 100644 third_party/com_github_pmezard_go_difflib-gazelle.patch diff --git a/go/private/repositories.bzl b/go/private/repositories.bzl index dab89597f1..7ba8ea6917 100644 --- a/go/private/repositories.bzl +++ b/go/private/repositories.bzl @@ -102,6 +102,25 @@ def go_rules_dependencies(force = False): patch_args = ["-p1"], ) + # Needed for nogo to generate unified diff + # releaser:upgrade-dep pmezard go-difflib + wrapper( + http_archive, + name = "com_github_pmezard_go_difflib", + # v1.0.0, latest as of 2024-12-19 + urls = [ + "https://mirror.bazel.build/github.com/pmezard/go-difflib/archive/refs/tags/v1.0.0.tar.gz", + "https://github.com/pmezard/go-difflib/archive/refs/tags/v1.0.0.tar.gz", + ], + sha256 = "28f3dc1b5c0efd61203ab07233f774740d3bf08da4d8153fb5310db6cea0ebda", + strip_prefix = "go-difflib-1.0.0", + patches = [ + # releaser:patch-cmd gazelle -repo_root . -go_prefix github.com/pmezard/go-difflib -go_naming_convention import_alias + Label("//third_party:com_github_pmezard_go_difflib-gazelle.patch"), + ], + patch_args = ["-p1"], + ) + # releaser:upgrade-dep golang sys wrapper( http_archive, diff --git a/third_party/com_github_pmezard_go_difflib-gazelle.patch b/third_party/com_github_pmezard_go_difflib-gazelle.patch new file mode 100644 index 0000000000..cc4dc1f7ac --- /dev/null +++ b/third_party/com_github_pmezard_go_difflib-gazelle.patch @@ -0,0 +1,24 @@ +diff --color -urN a/difflib/BUILD.bazel b/difflib/BUILD.bazel +--- a/difflib/BUILD.bazel 1970-01-01 00:00:00.000000000 +0000 ++++ b/difflib/BUILD.bazel 2024-12-19 21:26:09.121311218 +0000 +@@ -0,0 +1,20 @@ ++load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test") ++ ++go_library( ++ name = "difflib", ++ srcs = ["difflib.go"], ++ importpath = "github.com/pmezard/go-difflib/difflib", ++ visibility = ["//visibility:public"], ++) ++ ++alias( ++ name = "go_default_library", ++ actual = ":difflib", ++ visibility = ["//visibility:public"], ++) ++ ++go_test( ++ name = "difflib_test", ++ srcs = ["difflib_test.go"], ++ embed = [":difflib"], ++) From fa7173a429350e62b9e40256b9c087575b4ee429 Mon Sep 17 00:00:00 2001 From: peng3141 Date: Fri, 20 Dec 2024 03:31:06 +0000 Subject: [PATCH 08/12] 12/18: address comments batch 2 --- go/private/rules/binary.bzl | 6 +- go/private/rules/library.bzl | 6 +- go/tools/builders/nogo_change.go | 254 ++++---- go/tools/builders/nogo_change_test.go | 850 +++++++++++++------------- go/tools/builders/nogo_main.go | 12 +- 5 files changed, 572 insertions(+), 556 deletions(-) diff --git a/go/private/rules/binary.bzl b/go/private/rules/binary.bzl index 4d72f9bc21..f3f2f07cf2 100644 --- a/go/private/rules/binary.bzl +++ b/go/private/rules/binary.bzl @@ -154,17 +154,13 @@ def _go_binary_impl(ctx): validation_output = archive.data._validation_output nogo_fix_output = archive.data._nogo_fix_output - nogo_validation_outputs = [] - if validation_output: - nogo_validation_outputs.append(validation_output) - providers = [ archive, OutputGroupInfo( cgo_exports = archive.cgo_exports, compilation_outputs = [archive.data.file], nogo_fix = [nogo_fix_output] if nogo_fix_output else [], - _validation = nogo_validation_outputs, + _validation = [validation_output] if validation_output else [], ), ] diff --git a/go/private/rules/library.bzl b/go/private/rules/library.bzl index 501498bb05..4d87578529 100644 --- a/go/private/rules/library.bzl +++ b/go/private/rules/library.bzl @@ -51,10 +51,6 @@ def _go_library_impl(ctx): validation_output = archive.data._validation_output nogo_fix_output = archive.data._nogo_fix_output - nogo_validation_outputs = [] - if validation_output: - nogo_validation_outputs.append(validation_output) - return [ go_info, archive, @@ -71,7 +67,7 @@ def _go_library_impl(ctx): cgo_exports = archive.cgo_exports, compilation_outputs = [archive.data.file], nogo_fix = [nogo_fix_output] if nogo_fix_output else [], - _validation = nogo_validation_outputs, + _validation = [validation_output] if validation_output else [], ), ] diff --git a/go/tools/builders/nogo_change.go b/go/tools/builders/nogo_change.go index 423710a99b..9595d2ed64 100644 --- a/go/tools/builders/nogo_change.go +++ b/go/tools/builders/nogo_change.go @@ -19,122 +19,84 @@ type diagnosticEntry struct { *analysis.Analyzer } -// This file contains two main entities: NogoEdit and NogoChange, which correspond to the low-level -// and high-level abstractions. See them below. - -// The following is about the `NogoEdit`, a low-level abstraction of edits. -// A NogoEdit describes the replacement of a portion of a text file. -type NogoEdit struct { +// A nogoEdit describes the replacement of a portion of a text file. +type nogoEdit struct { New string // the replacement Start int // starting byte offset of the region to replace End int // (exclusive) ending byte offset of the region to replace } -// NogoFileEdits represents the mapping of analyzers to their edits for a specific file. -type NogoFileEdits struct { - AnalyzerToEdits map[string][]NogoEdit // Analyzer as the key, edits as the value -} +// analyzerToEdits represents the mapping of analyzers to their edits for a specific file. +type analyzerToEdits map[string][]nogoEdit // Analyzer as the key, edits as the value -// NogoChange represents a collection of file edits. -type NogoChange struct { - FileToEdits map[string]NogoFileEdits // File path as the key, analyzer-to-edits mapping as the value -} +// nogoChange represents a collection of file edits. +// It is a map with file paths as keys and analyzerToEdits as values. +type nogoChange map[string]analyzerToEdits -// newChange creates a new NogoChange object. -func newChange() *NogoChange { - return &NogoChange{ - FileToEdits: make(map[string]NogoFileEdits), - } +// newChange creates a new nogoChange object. +func newChange() nogoChange { + return make(nogoChange) } -func (e NogoEdit) String() string { +func (e nogoEdit) String() string { return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New) } -// sortEdits orders a slice of NogoEdits by (start, end) offset. +// sortEdits orders a slice of nogoEdits by (start, end) offset. // This ordering puts insertions (end = start) before deletions // (end > start) at the same point, but uses a stable sort to preserve // the order of multiple insertions at the same point. -// (applyEditsBytes detects multiple deletions at the same point as an error.) -func sortEdits(edits []NogoEdit) { - sort.Stable(editsSort(edits)) +func sortEdits(edits []nogoEdit) { + sort.Stable(byStartEnd(edits)) } -type editsSort []NogoEdit +type byStartEnd []nogoEdit -func (a editsSort) Len() int { return len(a) } -func (a editsSort) Less(i, j int) bool { - if cmp := a[i].Start - a[j].Start; cmp != 0 { - return cmp < 0 +func (a byStartEnd) Len() int { return len(a) } +func (a byStartEnd) Less(i, j int) bool { + if a[i].Start != a[j].Start { + return a[i].Start < a[j].Start } return a[i].End < a[j].End } -func (a editsSort) Swap(i, j int) { a[i], a[j] = a[j], a[i] } - +func (a byStartEnd) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -// validateBytes checks that edits are consistent with the src byte slice, -// and returns the size of the patched output. It may return a different slice if edits are sorted. -func validateBytes(src []byte, edits []NogoEdit) ([]NogoEdit, int, error) { - if !sort.IsSorted(editsSort(edits)) { - edits = append([]NogoEdit(nil), edits...) - sortEdits(edits) - } +// applyEditsBytes applies a sequence of nogoEdits to the src byte slice and returns the result. +// Edits are applied in order of start offset; edits with the same start offset are applied in the order they were provided. +// applyEditsBytes returns an error if any edit is out of bounds, or if any pair of edits is overlapping. +func applyEditsBytes(src []byte, edits []nogoEdit) ([]byte, error) { + // assumption: at this point, edits should be unique, sorted and non-overlapping. + // this is guaranteed in nogo_main.go by invoking flatten() earlier. size := len(src) - lastEnd := 0 + // performance only: this computes the size for preallocation to avoid the slice resizing below. for _, edit := range edits { - if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) { - return nil, 0, fmt.Errorf("the fix has an out-of-bounds edit with start=%d, end=%d", edit.Start, edit.End) - } - if edit.Start < lastEnd { - return nil, 0, fmt.Errorf("the fix has an edit with start=%d, which overlaps with a previous edit with end=%d", edit.Start, lastEnd) - } size += len(edit.New) + edit.Start - edit.End - lastEnd = edit.End - } - - return edits, size, nil -} - -// applyEditsBytes applies a sequence of NogoEdits to the src byte slice and returns the result. -// Edits are applied in order of start offset; edits with the same start offset are applied in the order they were provided. -// applyEditsBytes returns an error if any edit is out of bounds, or if any pair of edits is overlapping. -func applyEditsBytes(src []byte, edits []NogoEdit) ([]byte, error) { - edits, size, err := validateBytes(src, edits) - if err != nil { - return nil, err } // Apply the edits. out := make([]byte, 0, size) lastEnd := 0 for _, edit := range edits { - if lastEnd < edit.Start { - out = append(out, src[lastEnd:edit.Start]...) - } + out = append(out, src[lastEnd:edit.Start]...) out = append(out, edit.New...) lastEnd = edit.End } out = append(out, src[lastEnd:]...) - if len(out) != size { - return nil, fmt.Errorf("applyEditsBytes: unexpected output size, got %d, want %d", len(out), size) - } - return out, nil } - -// newChangeFromDiagnostics builds a NogoChange from a set of diagnostics. -// Unlike Diagnostic, NogoChange is independent of the FileSet given it uses perf-file offsets instead of token.Pos. -// This allows NogoChange to be used in contexts where the FileSet is not available, e.g., it remains applicable after it is saved to disk and loaded back. +// newChangeFromDiagnostics builds a nogoChange from a set of diagnostics. +// Unlike Diagnostic, nogoChange is independent of the FileSet given it uses perf-file offsets instead of token.Pos. +// This allows nogoChange to be used in contexts where the FileSet is not available, e.g., it remains applicable after it is saved to disk and loaded back. // See https://github.com/golang/tools/blob/master/go/analysis/diagnostic.go for details. -func newChangeFromDiagnostics(entries []diagnosticEntry, fileSet *token.FileSet) (*NogoChange, error) { +func newChangeFromDiagnostics(entries []diagnosticEntry, fileSet *token.FileSet) (nogoChange, error) { c := newChange() cwd, err := os.Getwd() if err != nil { - return c, fmt.Errorf("Error getting current working directory: (%v)", err) + return c, fmt.Errorf("error getting current working directory: %v", err) } var allErrors []error @@ -143,107 +105,123 @@ func newChangeFromDiagnostics(entries []diagnosticEntry, fileSet *token.FileSet) analyzer := entry.Analyzer.Name for _, sf := range entry.Diagnostic.SuggestedFixes { for _, edit := range sf.TextEdits { + // Define start and end positions start, end := edit.Pos, edit.End if !end.IsValid() { end = start } - file := fileSet.File(edit.Pos) + file := fileSet.File(start) if file == nil { - allErrors = append(allErrors, fmt.Errorf("invalid fix: missing file info for pos %v", edit.Pos)) + allErrors = append(allErrors, fmt.Errorf( + "invalid fix from analyzer %q: missing file info for start=%v", + analyzer, start, + )) continue } + // at this point, given file != nil, it is guaranteed start >= token.Pos(file.Base()) + + fileName := file.Name() + fileRelativePath, err := filepath.Rel(cwd, fileName) + if err != nil { + fileRelativePath = fileName // fallback logic + } + + // Validate start and end positions if start > end { - allErrors = append(allErrors, fmt.Errorf("invalid fix: pos %v > end %v", start, end)) + allErrors = append(allErrors, fmt.Errorf( + "invalid fix from analyzer %q for file %q: start=%v > end=%v", + analyzer, fileRelativePath, start, end, + )) continue } - if eof := token.Pos(file.Base() + file.Size()); end > eof { - allErrors = append(allErrors, fmt.Errorf("invalid fix: end %v past end of file %v", end, eof)) + if fileEOF := token.Pos(file.Base() + file.Size()); end > fileEOF { + allErrors = append(allErrors, fmt.Errorf( + "invalid fix from analyzer %q for file %q: end=%v is past the file's EOF=%v", + analyzer, fileRelativePath, end, fileEOF, + )) continue } + // at this point, it is guaranteed that file.Pos(file.Base()) <= start <= end <= fileEOF. - nogoEdit := NogoEdit{Start: file.Offset(start), End: file.Offset(end), New: string(edit.NewText)} - fileRelativePath, err := filepath.Rel(cwd, file.Name()) - if err != nil { - fileRelativePath = file.Name() // fallback logic - } - c.addEdit(fileRelativePath, analyzer, nogoEdit) + // Create the edit + nEdit := nogoEdit{Start: file.Offset(start), End: file.Offset(end), New: string(edit.NewText)} + addEdit(c, fileRelativePath, analyzer, nEdit) } } } if len(allErrors) > 0 { var errMsg bytes.Buffer - sep := "" - for _, err := range allErrors { - errMsg.WriteString(sep) - sep = "\n" - errMsg.WriteString(err.Error()) + for _, e := range allErrors { + errMsg.WriteString("\n") + errMsg.WriteString(e.Error()) } - return c, fmt.Errorf("errors:\n%s", errMsg.String()) + return c, fmt.Errorf("some suggested fixes are invalid:%s", errMsg.String()) } return c, nil } -// addEdit adds an edit to the NogoChange, organizing by file and analyzer. -func (c *NogoChange) addEdit(file string, analyzer string, edit NogoEdit) { - // Ensure the NogoFileEdits structure exists for the file - fileEdits, exists := c.FileToEdits[file] + +// addEdit adds an edit to the nogoChange, organizing by file and analyzer. +func addEdit(c nogoChange, file string, analyzer string, edit nogoEdit) { + fileEdits, exists := c[file] if !exists { - fileEdits = NogoFileEdits{ - AnalyzerToEdits: make(map[string][]NogoEdit), - } - c.FileToEdits[file] = fileEdits + fileEdits = make(analyzerToEdits) + c[file] = fileEdits } - - // Append the edit to the list of edits for the analyzer - fileEdits.AnalyzerToEdits[analyzer] = append(fileEdits.AnalyzerToEdits[analyzer], edit) + fileEdits[analyzer] = append(fileEdits[analyzer], edit) } // uniqueSortedEdits returns a list of edits that is sorted and // contains no duplicate edits. Returns whether there is overlap. // Deduplication helps in the cases where two analyzers produce duplicate edits. -func uniqueSortedEdits(edits []NogoEdit) ([]NogoEdit, bool) { +func uniqueSortedEdits(edits []nogoEdit) ([]nogoEdit, bool) { hasOverlap := false if len(edits) == 0 { return edits, hasOverlap } - equivalent := func(x, y NogoEdit) bool { + equivalent := func(x, y nogoEdit) bool { return x.Start == y.Start && x.End == y.End && x.New == y.New } sortEdits(edits) - unique := []NogoEdit{edits[0]} + unique := []nogoEdit{edits[0]} for i := 1; i < len(edits); i++ { prev, cur := edits[i-1], edits[i] - if !equivalent(prev, cur) { // equivalent ones are safely skipped - unique = append(unique, cur) - if prev.End > cur.Start { - // hasOverlap = true means at least one overlap was detected. - hasOverlap = true - } + if equivalent(prev, cur) { + // equivalent ones are safely skipped + continue + } + + unique = append(unique, cur) + if prev.End > cur.Start { + // hasOverlap = true means at least one overlap was detected. + hasOverlap = true } } return unique, hasOverlap } -// flatten merges all edits for a file from different analyzers into a single map of file-to-edits. -// Edits from each analyzer are processed in a deterministic order, and overlapping edits are skipped. -func flatten(change NogoChange) map[string][]NogoEdit { - fileToEdits := make(map[string][]NogoEdit) +type fileToEdits map[string][]nogoEdit // File path as the key, list of nogoEdit as the value + +// flatten processes a nogoChange and returns a fileToEdits. +// It also returns an error if any suggested fixes are skipped due to conflicts. +func flatten(change nogoChange) (fileToEdits, error) { + result := make(fileToEdits) + var errs []error - for file, fileEdits := range change.FileToEdits { + for file, fileEdits := range change { // Get a sorted list of analyzers for deterministic processing order - analyzers := make([]string, 0, len(fileEdits.AnalyzerToEdits)) - for analyzer := range fileEdits.AnalyzerToEdits { + analyzers := make([]string, 0, len(fileEdits)) + for analyzer := range fileEdits { analyzers = append(analyzers, analyzer) } sort.Strings(analyzers) - mergedEdits := make([]NogoEdit, 0) - + var mergedEdits []nogoEdit for _, analyzer := range analyzers { - edits := fileEdits.AnalyzerToEdits[analyzer] + edits := fileEdits[analyzer] if len(edits) == 0 { continue } @@ -253,40 +231,49 @@ func flatten(change NogoChange) map[string][]NogoEdit { candidateEdits, hasOverlap := uniqueSortedEdits(candidateEdits) if hasOverlap { // Skip edits from this analyzer if merging them would cause overlaps. - // Apply the non-overlapping edits first. After that, a rerun of bazel build will - // allows these skipped edits to be applied separately. - // Note the resolution happens to each file independently. - // Also for clarity, we would accept all or none of an analyzer. + // Collect an error message for the user. + errMsg := fmt.Errorf( + "suggested fixes from analyzer %q on file %q are skipped because they conflict with other analyzers", + analyzer, file, + ) + errs = append(errs, errMsg) continue } // Update the merged edits + // At this point, it is guaranteed the edits associated with the file are unique, sorted, and non-overlapping. mergedEdits = candidateEdits } // Store the final merged edits for the file - fileToEdits[file] = mergedEdits + result[file] = mergedEdits } - return fileToEdits + if len(errs) > 0 { + var errMsg strings.Builder + errMsg.WriteString("some suggested fixes are skipped due to conflicts in merging fixes from different analyzers for each file:") + for _, err := range errs { + errMsg.WriteString("\n") + errMsg.WriteString(err.Error()) + } + return result, fmt.Errorf(errMsg.String()) + } + + return result, nil } -// toCombinedPatch converts all edits to a single consolidated patch. -func toCombinedPatch(fileToEdits map[string][]NogoEdit) (string, error) { +func toCombinedPatch(fte fileToEdits) (string, error) { var combinedPatch strings.Builder - filePaths := make([]string, 0, len(fileToEdits)) - for filePath := range fileToEdits { + filePaths := make([]string, 0, len(fte)) + for filePath := range fte { filePaths = append(filePaths, filePath) } sort.Strings(filePaths) // Sort file paths alphabetically // Iterate over sorted file paths for _, filePath := range filePaths { - // edits are unique and sorted, as ensured by the flatten() method that is invoked earlier. - // for performance reason, let us skip uniqueSortedEdits() call here, - // although in general a library API shall not assume other calls have been made. - edits := fileToEdits[filePath] + edits := fte[filePath] if len(edits) == 0 { continue } @@ -296,6 +283,8 @@ func toCombinedPatch(fileToEdits map[string][]NogoEdit) (string, error) { return "", fmt.Errorf("failed to read file %s: %v", filePath, err) } + // edits are guaranteed to be unique, sorted and non-overlapping + // see flatten() that is called before this function. out, err := applyEditsBytes(contents, edits) if err != nil { return "", fmt.Errorf("failed to apply edits for file %s: %v", filePath, err) @@ -314,7 +303,6 @@ func toCombinedPatch(fileToEdits map[string][]NogoEdit) (string, error) { return "", fmt.Errorf("failed to generate patch for file %s: %v", filePath, err) } - // Append the patch for this file to the giant patch combinedPatch.WriteString(patch) combinedPatch.WriteString("\n") // Ensure separation between file patches } diff --git a/go/tools/builders/nogo_change_test.go b/go/tools/builders/nogo_change_test.go index 03bbcf2fd0..920ce1ce95 100644 --- a/go/tools/builders/nogo_change_test.go +++ b/go/tools/builders/nogo_change_test.go @@ -1,13 +1,10 @@ package main import ( - "fmt" "go/token" "os" "path/filepath" "reflect" - "slices" - "sort" "testing" "golang.org/x/tools/go/analysis" @@ -35,82 +32,30 @@ var ( analyzer2 = &analysis.Analyzer{Name: "analyzer2"} ) -// ApplyEdits() and validate() here provide the reference implementation for testing -// applyEditsBytes() from the refactored nogo_change code, now using NogoEdit. -func ApplyEdits(src string, edits []NogoEdit) (string, error) { - edits, size, err := validate(src, edits) - if err != nil { - return "", err - } - - // Apply edits. - out := make([]byte, 0, size) - lastEnd := 0 - for _, edit := range edits { - if lastEnd < edit.Start { - out = append(out, src[lastEnd:edit.Start]...) - } - out = append(out, edit.New...) - lastEnd = edit.End - } - out = append(out, src[lastEnd:]...) - - if len(out) != size { - return "", fmt.Errorf("applyEdits: unexpected output size, got %d, want %d", len(out), size) - } - - return string(out), nil -} - -func validate(src string, edits []NogoEdit) ([]NogoEdit, int, error) { - if !sort.IsSorted(editsSort(edits)) { - edits = append([]NogoEdit(nil), edits...) - sortEdits(edits) - } - - // Check validity of edits and compute final size. - size := len(src) - lastEnd := 0 - for _, edit := range edits { - if !(0 <= edit.Start && edit.Start <= edit.End && edit.End <= len(src)) { - return nil, 0, fmt.Errorf("diff has out-of-bounds edits") - } - if edit.Start < lastEnd { - return nil, 0, fmt.Errorf("diff has overlapping edits") - } - size += len(edit.New) + edit.Start - edit.End - lastEnd = edit.End - } - - return edits, size, nil -} - // TestAddEdit_MultipleAnalyzers tests addEdit with multiple analyzers and files using reflect.DeepEqual func TestAddEdit_MultipleAnalyzers(t *testing.T) { - change := newChange() + change := newChange() file1 := "file1.go" - edit1a := NogoEdit{Start: 10, End: 20, New: "code1 from analyzer1"} - edit1b := NogoEdit{Start: 30, End: 40, New: "code2 from analyzer1"} - edit2a := NogoEdit{Start: 50, End: 60, New: "code1 from analyzer2"} - edit2b := NogoEdit{Start: 70, End: 80, New: "code2 from analyzer2"} + edit1a := nogoEdit{Start: 10, End: 20, New: "code1 from analyzer1"} + edit1b := nogoEdit{Start: 30, End: 40, New: "code2 from analyzer1"} + edit2a := nogoEdit{Start: 50, End: 60, New: "code1 from analyzer2"} + edit2b := nogoEdit{Start: 70, End: 80, New: "code2 from analyzer2"} - expected := map[string]NogoFileEdits{ - file1: { - AnalyzerToEdits: map[string][]NogoEdit{ - analyzer1.Name: {edit1a, edit1b}, - analyzer2.Name: {edit2a, edit2b}, - }, + expected := nogoChange{ + file1: analyzerToEdits{ + analyzer1.Name: {edit1a, edit1b}, + analyzer2.Name: {edit2a, edit2b}, }, } - change.addEdit(file1, analyzer1.Name, edit1a) - change.addEdit(file1, analyzer1.Name, edit1b) - change.addEdit(file1, analyzer2.Name, edit2a) - change.addEdit(file1, analyzer2.Name, edit2b) + addEdit(change, file1, analyzer1.Name, edit1a) + addEdit(change, file1, analyzer1.Name, edit1b) + addEdit(change, file1, analyzer2.Name, edit2a) + addEdit(change, file1, analyzer2.Name, edit2b) - if !reflect.DeepEqual(change.FileToEdits, expected) { - t.Fatalf("NogoChange.FileToEdits did not match the expected result.\nGot: %+v\nExpected: %+v", change.FileToEdits, expected) + if !reflect.DeepEqual(change, expected) { + t.Fatalf("nogoChange did not match the expected result.\nGot: %+v\nExpected: %+v", change, expected) } } @@ -123,7 +68,7 @@ func TestNewChangeFromDiagnostics_SuccessCases(t *testing.T) { name string fileSet *token.FileSet diagnosticEntries []diagnosticEntry - expectedEdits map[string]NogoFileEdits + expectedEdits nogoChange }{ { name: "ValidEdits", @@ -142,12 +87,10 @@ func TestNewChangeFromDiagnostics_SuccessCases(t *testing.T) { }, }, }, - expectedEdits: map[string]NogoFileEdits{ - "file1.go": { - AnalyzerToEdits: map[string][]NogoEdit{ - "analyzer1": { - {New: "new_text", Start: 4, End: 9}, // 0-based offset - }, + expectedEdits: nogoChange{ + "file1.go": analyzerToEdits{ + "analyzer1": { + {New: "new_text", Start: 4, End: 9}, // 0-based offset }, }, }, @@ -160,8 +103,8 @@ func TestNewChangeFromDiagnostics_SuccessCases(t *testing.T) { if err != nil { t.Fatalf("expected no error, got: %v", err) } - if !reflect.DeepEqual(change.FileToEdits, tt.expectedEdits) { - t.Fatalf("expected edits: %+v, got: %+v", tt.expectedEdits, change.FileToEdits) + if !reflect.DeepEqual(change, tt.expectedEdits) { + t.Fatalf("expected edits: %+v, got: %+v", tt.expectedEdits, change) } }) } @@ -195,7 +138,67 @@ func TestNewChangeFromDiagnostics_ErrorCases(t *testing.T) { }, }, }, - expectedErr: "errors:\ninvalid fix: pos 15 > end 10", + expectedErr: "some suggested fixes are invalid:\ninvalid fix from analyzer \"analyzer1\" for file \"file1.go\": start=15 > end=10", + }, + { + name: "EndPastEOF", + fileSet: mockFileSet(file1path, 100), + diagnosticEntries: []diagnosticEntry{ + { + Analyzer: analyzer2, + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(95), End: token.Pos(110), NewText: []byte("new_text")}, + }, + }, + }, + }, + }, + }, + expectedErr: "some suggested fixes are invalid:\ninvalid fix from analyzer \"analyzer2\" for file \"file1.go\": end=110 is past the file's EOF=101", + }, + { + name: "MissingFileInfo", + fileSet: mockFileSet(file1path, 100), + diagnosticEntries: []diagnosticEntry{ + { + Analyzer: analyzer1, + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(150), End: token.Pos(160), NewText: []byte("new_text")}, + }, + }, + }, + }, + }, + }, + expectedErr: "some suggested fixes are invalid:\ninvalid fix from analyzer \"analyzer1\": missing file info for start=150", + }, + { + name: "MultipleErrors", + fileSet: mockFileSet(file1path, 100), + diagnosticEntries: []diagnosticEntry{ + { + Analyzer: analyzer1, + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(15), End: token.Pos(10), NewText: []byte("new_text")}, // InvalidPosEnd + {Pos: token.Pos(95), End: token.Pos(110), NewText: []byte("new_text")}, // EndPastEOF + }, + }, + }, + }, + }, + }, + expectedErr: `some suggested fixes are invalid: +invalid fix from analyzer "analyzer1" for file "file1.go": start=15 > end=10 +invalid fix from analyzer "analyzer1" for file "file1.go": end=110 is past the file's EOF=101`, }, } @@ -207,26 +210,27 @@ func TestNewChangeFromDiagnostics_ErrorCases(t *testing.T) { } if err.Error() != tt.expectedErr { - t.Fatalf("expected error: %v, got: %v", tt.expectedErr, err) + t.Fatalf("expected error:\n%v\ngot:\n%v", tt.expectedErr, err.Error()) } }) } } + func TestSortEdits(t *testing.T) { tests := []struct { name string - edits []NogoEdit - sorted []NogoEdit + edits []nogoEdit + sorted []nogoEdit }{ { name: "already sorted", - edits: []NogoEdit{ + edits: []nogoEdit{ {New: "a", Start: 0, End: 1}, {New: "b", Start: 1, End: 2}, {New: "c", Start: 2, End: 3}, }, - sorted: []NogoEdit{ + sorted: []nogoEdit{ {New: "a", Start: 0, End: 1}, {New: "b", Start: 1, End: 2}, {New: "c", Start: 2, End: 3}, @@ -234,12 +238,12 @@ func TestSortEdits(t *testing.T) { }, { name: "unsorted", - edits: []NogoEdit{ + edits: []nogoEdit{ {New: "b", Start: 1, End: 2}, {New: "a", Start: 0, End: 1}, {New: "c", Start: 2, End: 3}, }, - sorted: []NogoEdit{ + sorted: []nogoEdit{ {New: "a", Start: 0, End: 1}, {New: "b", Start: 1, End: 2}, {New: "c", Start: 2, End: 3}, @@ -247,11 +251,11 @@ func TestSortEdits(t *testing.T) { }, { name: "insert before delete at same position", - edits: []NogoEdit{ + edits: []nogoEdit{ {New: "", Start: 0, End: 1}, // delete {New: "insert", Start: 0, End: 0}, // insert }, - sorted: []NogoEdit{ + sorted: []nogoEdit{ {New: "insert", Start: 0, End: 0}, // insert comes before delete {New: "", Start: 0, End: 1}, }, @@ -268,320 +272,209 @@ func TestSortEdits(t *testing.T) { } } -// TestCases uses NogoEdit now instead of Edit -var TestCases = []struct { - Name, In, Out, Unified string - Edits, LineEdits []NogoEdit // expectation (LineEdits=nil => already line-aligned) - NoDiff bool -}{ - { - Name: "empty", - In: "", - Out: "", - }, { - Name: "no_diff", - In: "gargantuan\n", - Out: "gargantuan\n", - }, { - Name: "replace_all", - In: "fruit\n", - Out: "cheese\n", - Unified: UnifiedPrefix + ` -@@ -1 +1 @@ --fruit -+cheese -`[1:], - Edits: []NogoEdit{{Start: 0, End: 5, New: "cheese"}}, - LineEdits: []NogoEdit{{Start: 0, End: 6, New: "cheese\n"}}, - }, { - Name: "insert_rune", - In: "gord\n", - Out: "gourd\n", - Unified: UnifiedPrefix + ` -@@ -1 +1 @@ --gord -+gourd -`[1:], - Edits: []NogoEdit{{Start: 2, End: 2, New: "u"}}, - LineEdits: []NogoEdit{{Start: 0, End: 5, New: "gourd\n"}}, - }, { - Name: "delete_rune", - In: "groat\n", - Out: "goat\n", - Unified: UnifiedPrefix + ` -@@ -1 +1 @@ --groat -+goat -`[1:], - Edits: []NogoEdit{{Start: 1, End: 2, New: ""}}, - LineEdits: []NogoEdit{{Start: 0, End: 6, New: "goat\n"}}, - }, { - Name: "replace_rune", - In: "loud\n", - Out: "lord\n", - Unified: UnifiedPrefix + ` -@@ -1 +1 @@ --loud -+lord -`[1:], - Edits: []NogoEdit{{Start: 2, End: 3, New: "r"}}, - LineEdits: []NogoEdit{{Start: 0, End: 5, New: "lord\n"}}, - }, { - Name: "replace_partials", - In: "blanket\n", - Out: "bunker\n", - Unified: UnifiedPrefix + ` -@@ -1 +1 @@ --blanket -+bunker -`[1:], - Edits: []NogoEdit{ - {Start: 1, End: 3, New: "u"}, - {Start: 6, End: 7, New: "r"}, - }, - LineEdits: []NogoEdit{{Start: 0, End: 8, New: "bunker\n"}}, - }, { - Name: "insert_line", - In: "1: one\n3: three\n", - Out: "1: one\n2: two\n3: three\n", - Unified: UnifiedPrefix + ` -@@ -1,2 +1,3 @@ - 1: one -+2: two - 3: three -`[1:], - Edits: []NogoEdit{{Start: 7, End: 7, New: "2: two\n"}}, - }, { - Name: "replace_no_newline", - In: "A", - Out: "B", - Unified: UnifiedPrefix + ` -@@ -1 +1 @@ --A -\ No newline at end of file -+B -\ No newline at end of file -`[1:], - Edits: []NogoEdit{{Start: 0, End: 1, New: "B"}}, - }, { - Name: "delete_empty", - In: "meow", - Out: "", - Unified: UnifiedPrefix + ` -@@ -1 +0,0 @@ --meow -\ No newline at end of file -`[1:], - Edits: []NogoEdit{{Start: 0, End: 4, New: ""}}, - LineEdits: []NogoEdit{{Start: 0, End: 4, New: ""}}, - }, { - Name: "append_empty", - In: "", - Out: "AB\nC", - Unified: UnifiedPrefix + ` -@@ -0,0 +1,2 @@ -+AB -+C -\ No newline at end of file -`[1:], - Edits: []NogoEdit{{Start: 0, End: 0, New: "AB\nC"}}, - LineEdits: []NogoEdit{{Start: 0, End: 0, New: "AB\nC"}}, - }, - { - Name: "add_end", - In: "A", - Out: "AB", - Unified: UnifiedPrefix + ` -@@ -1 +1 @@ --A -\ No newline at end of file -+AB -\ No newline at end of file -`[1:], - Edits: []NogoEdit{{Start: 1, End: 1, New: "B"}}, - LineEdits: []NogoEdit{{Start: 0, End: 1, New: "AB"}}, - }, { - Name: "add_empty", - In: "", - Out: "AB\nC", - Unified: UnifiedPrefix + ` -@@ -0,0 +1,2 @@ -+AB -+C -\ No newline at end of file -`[1:], - Edits: []NogoEdit{{Start: 0, End: 0, New: "AB\nC"}}, - LineEdits: []NogoEdit{{Start: 0, End: 0, New: "AB\nC"}}, - }, { - Name: "add_newline", - In: "A", - Out: "A\n", - Unified: UnifiedPrefix + ` -@@ -1 +1 @@ --A -\ No newline at end of file -+A -`[1:], - Edits: []NogoEdit{{Start: 1, End: 1, New: "\n"}}, - LineEdits: []NogoEdit{{Start: 0, End: 1, New: "A\n"}}, - }, { - Name: "delete_front", - In: "A\nB\nC\nA\nB\nB\nA\n", - Out: "C\nB\nA\nB\nA\nC\n", - Unified: UnifiedPrefix + ` -@@ -1,7 +1,6 @@ --A --B - C -+B - A - B --B - A -+C -`[1:], - NoDiff: true, - Edits: []NogoEdit{ - {Start: 0, End: 4, New: ""}, - {Start: 6, End: 6, New: "B\n"}, - {Start: 10, End: 12, New: ""}, - {Start: 14, End: 14, New: "C\n"}, - }, - LineEdits: []NogoEdit{ - {Start: 0, End: 4, New: ""}, - {Start: 6, End: 6, New: "B\n"}, - {Start: 10, End: 12, New: ""}, - {Start: 14, End: 14, New: "C\n"}, - }, - }, { - Name: "replace_last_line", - In: "A\nB\n", - Out: "A\nC\n\n", - Unified: UnifiedPrefix + ` -@@ -1,2 +1,3 @@ - A --B -+C -+ -`[1:], - Edits: []NogoEdit{{Start: 2, End: 3, New: "C\n"}}, - LineEdits: []NogoEdit{{Start: 2, End: 4, New: "C\n\n"}}, - }, - { - Name: "multiple_replace", - In: "A\nB\nC\nD\nE\nF\nG\n", - Out: "A\nH\nI\nJ\nE\nF\nK\n", - Unified: UnifiedPrefix + ` -@@ -1,7 +1,7 @@ - A --B --C --D -+H -+I -+J - E - F --G -+K -`[1:], - Edits: []NogoEdit{ - {Start: 2, End: 8, New: "H\nI\nJ\n"}, - {Start: 12, End: 14, New: "K\n"}, - }, - NoDiff: true, - }, { - Name: "extra_newline", - In: "\nA\n", - Out: "A\n", - Edits: []NogoEdit{{Start: 0, End: 1, New: ""}}, - Unified: UnifiedPrefix + `@@ -1,2 +1 @@ -- - A -`, - }, { - Name: "unified_lines", - In: "aaa\nccc\n", - Out: "aaa\nbbb\nccc\n", - Edits: []NogoEdit{{Start: 3, End: 3, New: "\nbbb"}}, - LineEdits: []NogoEdit{{Start: 0, End: 4, New: "aaa\nbbb\n"}}, - Unified: UnifiedPrefix + "@@ -1,2 +1,3 @@\n aaa\n+bbb\n ccc\n", - }, { - Name: "60379", - In: `package a + +func TestApplyEditsBytes(t *testing.T) { + tests := []struct { + name string + input string + edits []nogoEdit + expected string + }{ + { + name: "empty", + input: "", + edits: []nogoEdit{}, + expected: "", + }, + { + name: "no_diff", + input: "gargantuan\n", + edits: []nogoEdit{}, + expected: "gargantuan\n", + }, + { + name: "replace_all", + input: "fruit\n", + edits: []nogoEdit{ + {Start: 0, End: 5, New: "cheese"}, + }, + expected: "cheese\n", + }, + { + name: "insert_rune", + input: "gord\n", + edits: []nogoEdit{ + {Start: 2, End: 2, New: "u"}, + }, + expected: "gourd\n", + }, + { + name: "delete_rune", + input: "groat\n", + edits: []nogoEdit{ + {Start: 1, End: 2, New: ""}, + }, + expected: "goat\n", + }, + { + name: "replace_rune", + input: "loud\n", + edits: []nogoEdit{ + {Start: 2, End: 3, New: "r"}, + }, + expected: "lord\n", + }, + { + name: "replace_partials", + input: "blanket\n", + edits: []nogoEdit{ + {Start: 1, End: 3, New: "u"}, + {Start: 6, End: 7, New: "r"}, + }, + expected: "bunker\n", + }, + { + name: "insert_line", + input: "1: one\n3: three\n", + edits: []nogoEdit{ + {Start: 7, End: 7, New: "2: two\n"}, + }, + expected: "1: one\n2: two\n3: three\n", + }, + { + name: "replace_no_newline", + input: "A", + edits: []nogoEdit{ + {Start: 0, End: 1, New: "B"}, + }, + expected: "B", + }, + { + name: "delete_empty", + input: "meow", + edits: []nogoEdit{ + {Start: 0, End: 4, New: ""}, + }, + expected: "", + }, + { + name: "append_empty", + input: "", + edits: []nogoEdit{ + {Start: 0, End: 0, New: "AB\nC"}, + }, + expected: "AB\nC", + }, + { + name: "add_end", + input: "A", + edits: []nogoEdit{ + {Start: 1, End: 1, New: "B"}, + }, + expected: "AB", + }, + { + name: "add_newline", + input: "A", + edits: []nogoEdit{ + {Start: 1, End: 1, New: "\n"}, + }, + expected: "A\n", + }, + { + name: "delete_front", + input: "A\nB\nC\nA\nB\nB\nA\n", + edits: []nogoEdit{ + {Start: 0, End: 4, New: ""}, + {Start: 6, End: 6, New: "B\n"}, + {Start: 10, End: 12, New: ""}, + {Start: 14, End: 14, New: "C\n"}, + }, + expected: "C\nB\nA\nB\nA\nC\n", + }, + { + name: "replace_last_line", + input: "A\nB\n", + edits: []nogoEdit{ + {Start: 2, End: 3, New: "C\n"}, + }, + expected: "A\nC\n\n", + }, + { + name: "multiple_replace", + input: "A\nB\nC\nD\nE\nF\nG\n", + edits: []nogoEdit{ + {Start: 2, End: 8, New: "H\nI\nJ\n"}, + {Start: 12, End: 14, New: "K\n"}, + }, + expected: "A\nH\nI\nJ\nE\nF\nK\n", + }, + { + name: "extra_newline", + input: "\nA\n", + edits: []nogoEdit{ + {Start: 0, End: 1, New: ""}, + }, + expected: "A\n", + }, + { + name: "unified_lines", + input: "aaa\nccc\n", + edits: []nogoEdit{ + {Start: 3, End: 3, New: "\nbbb"}, + }, + expected: "aaa\nbbb\nccc\n", + }, + { + name: "complex_replace_with_tab", + input: `package a type S struct { s fmt.Stringer } `, - Out: `package a + edits: []nogoEdit{ + {Start: 27, End: 27, New: "\t"}, + }, + expected: `package a type S struct { s fmt.Stringer } `, - Edits: []NogoEdit{{Start: 27, End: 27, New: "\t"}}, - LineEdits: []NogoEdit{{Start: 27, End: 42, New: "\ts fmt.Stringer\n"}}, - Unified: UnifiedPrefix + "@@ -1,5 +1,5 @@\n package a\n \n type S struct {\n-s fmt.Stringer\n+\ts fmt.Stringer\n }\n", - }, -} - -func TestApply(t *testing.T) { - t.Parallel() + }, + } - for _, tt := range TestCases { - t.Run(tt.Name, func(t *testing.T) { - reversedEdits := slices.Clone(tt.Edits) - slices.Reverse(reversedEdits) - got, err := ApplyEdits(tt.In, reversedEdits) - if err != nil { - t.Fatalf("ApplyEdits failed: %v", err) - } - gotBytes, err := applyEditsBytes([]byte(tt.In), tt.Edits) + for _, tt := range tests { + tt := tt // capture range variable + t.Run(tt.name, func(t *testing.T) { + result, err := applyEditsBytes([]byte(tt.input), tt.edits) if err != nil { t.Fatalf("applyEditsBytes failed: %v", err) } - if got != string(gotBytes) { - t.Fatalf("applyEditsBytes: got %q, want %q", gotBytes, got) - } - if got != tt.Out { - t.Errorf("ApplyEdits: got %q, want %q", got, tt.Out) - } - if tt.LineEdits != nil { - got, err := ApplyEdits(tt.In, tt.LineEdits) - if err != nil { - t.Fatalf("ApplyEdits failed: %v", err) - } - gotBytes, err := applyEditsBytes([]byte(tt.In), tt.LineEdits) - if err != nil { - t.Fatalf("applyEditsBytes failed: %v", err) - } - if got != string(gotBytes) { - t.Fatalf("applyEditsBytes: got %q, want %q", gotBytes, got) - } - if got != tt.Out { - t.Errorf("ApplyEdits: got %q, want %q", got, tt.Out) - } + if string(result) != tt.expected { + t.Errorf("applyEditsBytes: got %q, want %q", string(result), tt.expected) } }) } } + // TestUniqueSortedEdits verifies deduplication and overlap detection. func TestUniqueSortedEdits(t *testing.T) { tests := []struct { name string - edits []NogoEdit - want []NogoEdit + edits []nogoEdit + want []nogoEdit wantHasOverlap bool }{ { name: "overlapping edits", - edits: []NogoEdit{ + edits: []nogoEdit{ {Start: 0, End: 2, New: "a"}, {Start: 1, End: 3, New: "b"}, }, - want: []NogoEdit{{Start: 0, End: 2, New: "a"}, {Start: 1, End: 3, New: "b"}}, + want: []nogoEdit{{Start: 0, End: 2, New: "a"}, {Start: 1, End: 3, New: "b"}}, wantHasOverlap: true, }, } @@ -596,56 +489,195 @@ func TestUniqueSortedEdits(t *testing.T) { } } + func TestFlatten(t *testing.T) { tests := []struct { - name string - change NogoChange - want map[string][]NogoEdit + name string + change nogoChange + expected fileToEdits + expectedErr string }{ { - name: "multiple analyzers with non-overlapping edits", - change: NogoChange{ - FileToEdits: map[string]NogoFileEdits{ - "file1.go": { - AnalyzerToEdits: map[string][]NogoEdit{ - "analyzer1": {{Start: 0, End: 1, New: "a"}}, - "analyzer2": {{Start: 2, End: 3, New: "b"}}, - }, + name: "no conflicts", + change: nogoChange{ + "file1.go": analyzerToEdits{ + "analyzer1": { + {Start: 0, End: 5, New: "hello"}, + }, + "analyzer2": { + {Start: 6, End: 10, New: "world"}, }, }, }, - want: map[string][]NogoEdit{ + expected: fileToEdits{ "file1.go": { - {Start: 0, End: 1, New: "a"}, - {Start: 2, End: 3, New: "b"}, + {Start: 0, End: 5, New: "hello"}, + {Start: 6, End: 10, New: "world"}, }, }, + expectedErr: "", }, { - name: "multiple analyzers with overlapping edits", - change: NogoChange{ - FileToEdits: map[string]NogoFileEdits{ - "file1.go": { - AnalyzerToEdits: map[string][]NogoEdit{ - "analyzer1": {{Start: 0, End: 2, New: "a"}}, - "analyzer2": {{Start: 1, End: 3, New: "b"}}, - }, + name: "conflicting edits", + change: nogoChange{ + "file1.go": analyzerToEdits{ + "analyzer1": { + {Start: 0, End: 5, New: "hello"}, + }, + "analyzer2": { + {Start: 3, End: 8, New: "world"}, }, }, }, - want: map[string][]NogoEdit{ + expected: fileToEdits{ "file1.go": { - {Start: 0, End: 2, New: "a"}, + {Start: 0, End: 5, New: "hello"}, }, }, + expectedErr: `some suggested fixes are skipped due to conflicts in merging fixes from different analyzers for each file: +suggested fixes from analyzer "analyzer2" on file "file1.go" are skipped because they conflict with other analyzers`, + }, + { + name: "multiple conflicts across multiple files", + change: nogoChange{ + "file1.go": analyzerToEdits{ + "analyzer1": { + {Start: 0, End: 5, New: "hello"}, + }, + "analyzer2": { + {Start: 4, End: 10, New: "world"}, + }, + }, + "file2.go": analyzerToEdits{ + "analyzer3": { + {Start: 0, End: 3, New: "foo"}, + }, + "analyzer4": { + {Start: 2, End: 5, New: "bar"}, + }, + }, + }, + expected: fileToEdits{ + "file1.go": { + {Start: 0, End: 5, New: "hello"}, + }, + "file2.go": { + {Start: 0, End: 3, New: "foo"}, + }, + }, + expectedErr: `some suggested fixes are skipped due to conflicts in merging fixes from different analyzers for each file: +suggested fixes from analyzer "analyzer2" on file "file1.go" are skipped because they conflict with other analyzers +suggested fixes from analyzer "analyzer4" on file "file2.go" are skipped because they conflict with other analyzers`, + }, + { + name: "no edits", + change: nogoChange{ + "file1.go": analyzerToEdits{ + "analyzer1": {}, + }, + }, + expected: fileToEdits{"file1.go": nil}, + expectedErr: "", + }, + { + name: "all conflicts", + change: nogoChange{ + "file1.go": analyzerToEdits{ + "analyzer1": { + {Start: 0, End: 5, New: "hello"}, + }, + "analyzer2": { + {Start: 1, End: 4, New: "world"}, + }, + }, + }, + expected: fileToEdits{ + "file1.go": { + {Start: 0, End: 5, New: "hello"}, + }, + }, + expectedErr: `some suggested fixes are skipped due to conflicts in merging fixes from different analyzers for each file: +suggested fixes from analyzer "analyzer2" on file "file1.go" are skipped because they conflict with other analyzers`, + }, + { + name: "no overlapping across different files", + change: nogoChange{ + "file1.go": analyzerToEdits{ + "analyzer1": { + {Start: 0, End: 5, New: "hello"}, + }, + "analyzer2": { + {Start: 10, End: 15, New: "world"}, + }, + }, + "file2.go": analyzerToEdits{ + "analyzer3": { + {Start: 0, End: 3, New: "foo"}, + }, + "analyzer4": { + {Start: 5, End: 8, New: "bar"}, + }, + }, + }, + expected: fileToEdits{ + "file1.go": { + {Start: 0, End: 5, New: "hello"}, + {Start: 10, End: 15, New: "world"}, + }, + "file2.go": { + {Start: 0, End: 3, New: "foo"}, + {Start: 5, End: 8, New: "bar"}, + }, + }, + expectedErr: "", + }, + { + name: "conflict in one file multiple analyzers", + change: nogoChange{ + "file1.go": analyzerToEdits{ + "analyzer1": { + {Start: 0, End: 5, New: "hello"}, + }, + "analyzer2": { + {Start: 5, End: 10, New: "world"}, + }, + "analyzer3": { + {Start: 3, End: 7, New: "foo"}, + }, + }, + }, + expected: fileToEdits{ + "file1.go": { + {Start: 0, End: 5, New: "hello"}, + {Start: 5, End: 10, New: "world"}, + }, + }, + expectedErr: `some suggested fixes are skipped due to conflicts in merging fixes from different analyzers for each file: +suggested fixes from analyzer "analyzer3" on file "file1.go" are skipped because they conflict with other analyzers`, }, } for _, tt := range tests { + tt := tt // capture range variable t.Run(tt.name, func(t *testing.T) { - got := flatten(tt.change) - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("flatten() = %v, want %v", got, tt.want) + result, err := flatten(tt.change) + + // Check for expected errors + if tt.expectedErr == "" && err != nil { + t.Fatalf("expected no error, got: %v", err) + } + if tt.expectedErr != "" { + if err == nil { + t.Fatalf("expected error:\n%v\nbut got none", tt.expectedErr) + } + if err.Error() != tt.expectedErr { + t.Fatalf("expected error:\n%v\ngot:\n%v", tt.expectedErr, err.Error()) + } + } + + // Check for expected edits + if !reflect.DeepEqual(result, tt.expected) { + t.Fatalf("expected edits:\n%+v\ngot:\n%+v", tt.expected, result) } }) } @@ -674,16 +706,16 @@ func TestToCombinedPatch(t *testing.T) { defer deleteFile("file2.go") tests := []struct { - name string - fileToEdits map[string][]NogoEdit - expected string - expectErr bool + name string + fte fileToEdits + expected string + expectErr bool }{ { name: "valid patch for multiple files", - fileToEdits: map[string][]NogoEdit{ + fte: fileToEdits{ "file1.go": {{Start: 27, End: 27, New: "\nHello, world!\n"}}, // Add to function body - "file2.go": {{Start: 24, End: 24, New: "var y = 20\n"}}, // Add a new variable + "file2.go": {{Start: 24, End: 24, New: "var y = 20\n"}}, // Add a new variable }, expected: `--- a/file1.go +++ b/file1.go @@ -705,7 +737,7 @@ func TestToCombinedPatch(t *testing.T) { }, { name: "file not found", - fileToEdits: map[string][]NogoEdit{ + fte: fileToEdits{ "nonexistent.go": {{Start: 0, End: 0, New: "new content"}}, }, expected: "", @@ -713,7 +745,7 @@ func TestToCombinedPatch(t *testing.T) { }, { name: "no edits", - fileToEdits: map[string][]NogoEdit{}, + fte: fileToEdits{}, expected: "", expectErr: false, }, @@ -721,7 +753,7 @@ func TestToCombinedPatch(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - combinedPatch, err := toCombinedPatch(tt.fileToEdits) + combinedPatch, err := toCombinedPatch(tt.fte) // Verify error expectation if (err != nil) != tt.expectErr { diff --git a/go/tools/builders/nogo_main.go b/go/tools/builders/nogo_main.go index 4e28d0a6ba..681b387710 100644 --- a/go/tools/builders/nogo_main.go +++ b/go/tools/builders/nogo_main.go @@ -554,15 +554,19 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) // Otherwise, bazel will complain "not all outputs were created or valid" change, err := newChangeFromDiagnostics(diagnostics, pkg.fset) if err != nil { - errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in converting diagnostics to change: %v", err)) + errs = append(errs, err) } - combinedPatch, err := toCombinedPatch(flatten(*change)) + editsPerFile, err := flatten(change) if err != nil { - errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in generating the patches: %v", err)) + errs = append(errs, err) + } + combinedPatch, err := toCombinedPatch(editsPerFile) + if err != nil { + errs = append(errs, err) } err = os.WriteFile(nogoFixPath, []byte(combinedPatch), 0644) if err != nil { - errs = append(errs, fmt.Errorf("errors in dumping nogo fix, specifically in saving the file %s: %v", nogoFixPath, err)) + errs = append(errs, fmt.Errorf("errors in saving the patch to the file %s: %v", nogoFixPath, err)) } } From 8f837f7530c36f040cf4d281c3f951744b1cf70e Mon Sep 17 00:00:00 2001 From: Zhongpeng Lin Date: Sat, 21 Dec 2024 05:01:41 +0000 Subject: [PATCH 09/12] Applying SugsestedFixes atomically stylish changes more stylish changes fixing test on Windows addressing comments fixing the build --- go/private/actions/archive.bzl | 1 - go/private/actions/compilepkg.bzl | 12 +- go/private/rules/test.bzl | 2 - go/tools/builders/BUILD.bazel | 8 +- go/tools/builders/nogo.go | 4 +- go/tools/builders/nogo_change.go | 332 ----------- go/tools/builders/nogo_change_test.go | 818 -------------------------- go/tools/builders/nogo_fix.go | 238 ++++++++ go/tools/builders/nogo_fix_test.go | 333 +++++++++++ go/tools/builders/nogo_main.go | 113 ++-- go/tools/builders/nogo_validation.go | 18 +- 11 files changed, 652 insertions(+), 1227 deletions(-) delete mode 100644 go/tools/builders/nogo_change.go delete mode 100644 go/tools/builders/nogo_change_test.go create mode 100644 go/tools/builders/nogo_fix.go create mode 100644 go/tools/builders/nogo_fix_test.go diff --git a/go/private/actions/archive.bzl b/go/private/actions/archive.bzl index 45bf1eb4f8..f8d9ce156d 100644 --- a/go/private/actions/archive.bzl +++ b/go/private/actions/archive.bzl @@ -64,7 +64,6 @@ def emit_archive(go, source = None, _recompile_suffix = "", recompile_internal_d out_facts = go.declare_file(go, name = source.name, ext = pre_ext + ".facts") out_nogo_log = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo.log") out_nogo_validation = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo") - out_nogo_fix = go.declare_file(go, name = source.name, ext = pre_ext + ".nogo.patch") else: out_facts = None diff --git a/go/private/actions/compilepkg.bzl b/go/private/actions/compilepkg.bzl index 80d672f29d..7e2fce6104 100644 --- a/go/private/actions/compilepkg.bzl +++ b/go/private/actions/compilepkg.bzl @@ -223,7 +223,7 @@ def emit_compilepkg( out_facts = out_facts, out_log = out_nogo_log, out_validation = out_nogo_validation, - out_nogo_fix = out_nogo_fix, + out_fix = out_nogo_fix, nogo = nogo, ) @@ -237,7 +237,7 @@ def _run_nogo( out_facts, out_log, out_validation, - out_nogo_fix, + out_fix, nogo): """Runs nogo on Go source files, including those generated by cgo.""" sdk = go.sdk @@ -246,7 +246,7 @@ def _run_nogo( [archive.data.facts_file for archive in archives if archive.data.facts_file] + [archive.data.export_file for archive in archives]) inputs_transitive = [sdk.tools, sdk.headers, go.stdlib.libs] - outputs = [out_facts, out_log, out_nogo_fix] + outputs = [out_facts, out_log, out_fix] nogo_args = go.tool_args(go) if cgo_go_srcs: @@ -256,7 +256,7 @@ def _run_nogo( nogo_args.add_all(archives, before_each = "-facts", map_each = _facts) nogo_args.add("-out_facts", out_facts) nogo_args.add("-out_log", out_log) - nogo_args.add("-out_fix", out_nogo_fix) + nogo_args.add("-out_fix", out_fix) nogo_args.add("-nogo", nogo) # This action runs nogo and produces the facts files for downstream nogo actions. @@ -285,10 +285,10 @@ def _run_nogo( validation_args.add("nogovalidation") validation_args.add(out_validation) validation_args.add(out_log) - validation_args.add(out_nogo_fix) + validation_args.add(out_fix) go.actions.run( - inputs = [out_log, out_nogo_fix], + inputs = [out_log, out_fix], outputs = [out_validation], mnemonic = "ValidateNogo", executable = go.toolchain._builder, diff --git a/go/private/rules/test.bzl b/go/private/rules/test.bzl index 55c207cf23..066c689279 100644 --- a/go/private/rules/test.bzl +++ b/go/private/rules/test.bzl @@ -103,8 +103,6 @@ def _go_test_impl(ctx): if external_archive.data._validation_output: validation_outputs.append(external_archive.data._validation_output) if external_archive.data._nogo_fix_output: - # internal vs external archive refers to the same package vs separate package. - # we include the nogo fixes for transitive dependency too. nogo_fix_outputs.append(external_archive.data._nogo_fix_output) # now generate the main function diff --git a/go/tools/builders/BUILD.bazel b/go/tools/builders/BUILD.bazel index 28a97b5ec1..42b26a2446 100644 --- a/go/tools/builders/BUILD.bazel +++ b/go/tools/builders/BUILD.bazel @@ -32,11 +32,11 @@ go_test( ) go_test( - name = "nogo_change_test", + name = "nogo_fix_test", size = "small", srcs = [ - "nogo_change.go", - "nogo_change_test.go", + "nogo_fix.go", + "nogo_fix_test.go", ], deps = [ "@com_github_pmezard_go_difflib//difflib:go_default_library", @@ -120,7 +120,7 @@ go_source( "constants.go", "env.go", "flags.go", - "nogo_change.go", + "nogo_fix.go", "nogo_main.go", "nogo_typeparams_go117.go", "nogo_typeparams_go118.go", diff --git a/go/tools/builders/nogo.go b/go/tools/builders/nogo.go index b33ff975b1..275e57f328 100644 --- a/go/tools/builders/nogo.go +++ b/go/tools/builders/nogo.go @@ -87,7 +87,7 @@ func nogo(args []string) error { return runNogo(workDir, nogoPath, goSrcs, ignoreSrcs, facts, importPath, importcfgPath, outFactsPath, outLogPath, outFixPath) } -func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []archive, packagePath, importcfgPath, outFactsPath string, outLogPath string, outFixPath string) error { +func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []archive, packagePath, importcfgPath, outFactsPath, outLogPath, outFixPath string) error { if len(srcs) == 0 { // emit_compilepkg expects a nogo facts file, even if it's empty. // We also need to write the validation output log. @@ -107,7 +107,7 @@ func runNogo(workDir string, nogoPath string, srcs, ignores []string, facts []ar } args := []string{nogoPath} args = append(args, "-p", packagePath) - args = append(args, "-fixpath", outFixPath) + args = append(args, "-fix", outFixPath) args = append(args, "-importcfg", importcfgPath) for _, fact := range facts { args = append(args, "-fact", fmt.Sprintf("%s=%s", fact.importPath, fact.file)) diff --git a/go/tools/builders/nogo_change.go b/go/tools/builders/nogo_change.go deleted file mode 100644 index 9595d2ed64..0000000000 --- a/go/tools/builders/nogo_change.go +++ /dev/null @@ -1,332 +0,0 @@ -package main - -import ( - "bytes" - "fmt" - "go/token" - "os" - "path/filepath" - "sort" - "strings" - - "github.com/pmezard/go-difflib/difflib" - "golang.org/x/tools/go/analysis" -) - -// diagnosticEntry represents a diagnostic entry with the corresponding analyzer. -type diagnosticEntry struct { - analysis.Diagnostic - *analysis.Analyzer -} - -// A nogoEdit describes the replacement of a portion of a text file. -type nogoEdit struct { - New string // the replacement - Start int // starting byte offset of the region to replace - End int // (exclusive) ending byte offset of the region to replace -} - -// analyzerToEdits represents the mapping of analyzers to their edits for a specific file. -type analyzerToEdits map[string][]nogoEdit // Analyzer as the key, edits as the value - -// nogoChange represents a collection of file edits. -// It is a map with file paths as keys and analyzerToEdits as values. -type nogoChange map[string]analyzerToEdits - -// newChange creates a new nogoChange object. -func newChange() nogoChange { - return make(nogoChange) -} - -func (e nogoEdit) String() string { - return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New) -} - -// sortEdits orders a slice of nogoEdits by (start, end) offset. -// This ordering puts insertions (end = start) before deletions -// (end > start) at the same point, but uses a stable sort to preserve -// the order of multiple insertions at the same point. -func sortEdits(edits []nogoEdit) { - sort.Stable(byStartEnd(edits)) -} - -type byStartEnd []nogoEdit - -func (a byStartEnd) Len() int { return len(a) } -func (a byStartEnd) Less(i, j int) bool { - if a[i].Start != a[j].Start { - return a[i].Start < a[j].Start - } - return a[i].End < a[j].End -} -func (a byStartEnd) Swap(i, j int) { a[i], a[j] = a[j], a[i] } - - -// applyEditsBytes applies a sequence of nogoEdits to the src byte slice and returns the result. -// Edits are applied in order of start offset; edits with the same start offset are applied in the order they were provided. -// applyEditsBytes returns an error if any edit is out of bounds, or if any pair of edits is overlapping. -func applyEditsBytes(src []byte, edits []nogoEdit) ([]byte, error) { - // assumption: at this point, edits should be unique, sorted and non-overlapping. - // this is guaranteed in nogo_main.go by invoking flatten() earlier. - size := len(src) - // performance only: this computes the size for preallocation to avoid the slice resizing below. - for _, edit := range edits { - size += len(edit.New) + edit.Start - edit.End - } - - // Apply the edits. - out := make([]byte, 0, size) - lastEnd := 0 - for _, edit := range edits { - out = append(out, src[lastEnd:edit.Start]...) - out = append(out, edit.New...) - lastEnd = edit.End - } - out = append(out, src[lastEnd:]...) - - return out, nil -} - -// newChangeFromDiagnostics builds a nogoChange from a set of diagnostics. -// Unlike Diagnostic, nogoChange is independent of the FileSet given it uses perf-file offsets instead of token.Pos. -// This allows nogoChange to be used in contexts where the FileSet is not available, e.g., it remains applicable after it is saved to disk and loaded back. -// See https://github.com/golang/tools/blob/master/go/analysis/diagnostic.go for details. -func newChangeFromDiagnostics(entries []diagnosticEntry, fileSet *token.FileSet) (nogoChange, error) { - c := newChange() - - cwd, err := os.Getwd() - if err != nil { - return c, fmt.Errorf("error getting current working directory: %v", err) - } - - var allErrors []error - - for _, entry := range entries { - analyzer := entry.Analyzer.Name - for _, sf := range entry.Diagnostic.SuggestedFixes { - for _, edit := range sf.TextEdits { - // Define start and end positions - start, end := edit.Pos, edit.End - if !end.IsValid() { - end = start - } - - file := fileSet.File(start) - if file == nil { - allErrors = append(allErrors, fmt.Errorf( - "invalid fix from analyzer %q: missing file info for start=%v", - analyzer, start, - )) - continue - } - // at this point, given file != nil, it is guaranteed start >= token.Pos(file.Base()) - - fileName := file.Name() - fileRelativePath, err := filepath.Rel(cwd, fileName) - if err != nil { - fileRelativePath = fileName // fallback logic - } - - // Validate start and end positions - if start > end { - allErrors = append(allErrors, fmt.Errorf( - "invalid fix from analyzer %q for file %q: start=%v > end=%v", - analyzer, fileRelativePath, start, end, - )) - continue - } - if fileEOF := token.Pos(file.Base() + file.Size()); end > fileEOF { - allErrors = append(allErrors, fmt.Errorf( - "invalid fix from analyzer %q for file %q: end=%v is past the file's EOF=%v", - analyzer, fileRelativePath, end, fileEOF, - )) - continue - } - // at this point, it is guaranteed that file.Pos(file.Base()) <= start <= end <= fileEOF. - - // Create the edit - nEdit := nogoEdit{Start: file.Offset(start), End: file.Offset(end), New: string(edit.NewText)} - addEdit(c, fileRelativePath, analyzer, nEdit) - } - } - } - - if len(allErrors) > 0 { - var errMsg bytes.Buffer - for _, e := range allErrors { - errMsg.WriteString("\n") - errMsg.WriteString(e.Error()) - } - return c, fmt.Errorf("some suggested fixes are invalid:%s", errMsg.String()) - } - - return c, nil -} - - -// addEdit adds an edit to the nogoChange, organizing by file and analyzer. -func addEdit(c nogoChange, file string, analyzer string, edit nogoEdit) { - fileEdits, exists := c[file] - if !exists { - fileEdits = make(analyzerToEdits) - c[file] = fileEdits - } - fileEdits[analyzer] = append(fileEdits[analyzer], edit) -} - -// uniqueSortedEdits returns a list of edits that is sorted and -// contains no duplicate edits. Returns whether there is overlap. -// Deduplication helps in the cases where two analyzers produce duplicate edits. -func uniqueSortedEdits(edits []nogoEdit) ([]nogoEdit, bool) { - hasOverlap := false - if len(edits) == 0 { - return edits, hasOverlap - } - equivalent := func(x, y nogoEdit) bool { - return x.Start == y.Start && x.End == y.End && x.New == y.New - } - sortEdits(edits) - unique := []nogoEdit{edits[0]} - for i := 1; i < len(edits); i++ { - prev, cur := edits[i-1], edits[i] - if equivalent(prev, cur) { - // equivalent ones are safely skipped - continue - } - - unique = append(unique, cur) - if prev.End > cur.Start { - // hasOverlap = true means at least one overlap was detected. - hasOverlap = true - } - } - return unique, hasOverlap -} - -type fileToEdits map[string][]nogoEdit // File path as the key, list of nogoEdit as the value - -// flatten processes a nogoChange and returns a fileToEdits. -// It also returns an error if any suggested fixes are skipped due to conflicts. -func flatten(change nogoChange) (fileToEdits, error) { - result := make(fileToEdits) - var errs []error - - for file, fileEdits := range change { - // Get a sorted list of analyzers for deterministic processing order - analyzers := make([]string, 0, len(fileEdits)) - for analyzer := range fileEdits { - analyzers = append(analyzers, analyzer) - } - sort.Strings(analyzers) - - var mergedEdits []nogoEdit - for _, analyzer := range analyzers { - edits := fileEdits[analyzer] - if len(edits) == 0 { - continue - } - - // Merge edits into the current list, checking for overlaps - candidateEdits := append(mergedEdits, edits...) - candidateEdits, hasOverlap := uniqueSortedEdits(candidateEdits) - if hasOverlap { - // Skip edits from this analyzer if merging them would cause overlaps. - // Collect an error message for the user. - errMsg := fmt.Errorf( - "suggested fixes from analyzer %q on file %q are skipped because they conflict with other analyzers", - analyzer, file, - ) - errs = append(errs, errMsg) - continue - } - - // Update the merged edits - // At this point, it is guaranteed the edits associated with the file are unique, sorted, and non-overlapping. - mergedEdits = candidateEdits - } - - // Store the final merged edits for the file - result[file] = mergedEdits - } - - if len(errs) > 0 { - var errMsg strings.Builder - errMsg.WriteString("some suggested fixes are skipped due to conflicts in merging fixes from different analyzers for each file:") - for _, err := range errs { - errMsg.WriteString("\n") - errMsg.WriteString(err.Error()) - } - return result, fmt.Errorf(errMsg.String()) - } - - return result, nil -} - -func toCombinedPatch(fte fileToEdits) (string, error) { - var combinedPatch strings.Builder - - filePaths := make([]string, 0, len(fte)) - for filePath := range fte { - filePaths = append(filePaths, filePath) - } - sort.Strings(filePaths) // Sort file paths alphabetically - - // Iterate over sorted file paths - for _, filePath := range filePaths { - edits := fte[filePath] - if len(edits) == 0 { - continue - } - - contents, err := os.ReadFile(filePath) - if err != nil { - return "", fmt.Errorf("failed to read file %s: %v", filePath, err) - } - - // edits are guaranteed to be unique, sorted and non-overlapping - // see flatten() that is called before this function. - out, err := applyEditsBytes(contents, edits) - if err != nil { - return "", fmt.Errorf("failed to apply edits for file %s: %v", filePath, err) - } - - diff := difflib.UnifiedDiff{ - A: trimWhitespaceHeadAndTail(difflib.SplitLines(string(contents))), - B: trimWhitespaceHeadAndTail(difflib.SplitLines(string(out))), - FromFile: fmt.Sprintf("a/%s", filePath), - ToFile: fmt.Sprintf("b/%s", filePath), - Context: 3, - } - - patch, err := difflib.GetUnifiedDiffString(diff) - if err != nil { - return "", fmt.Errorf("failed to generate patch for file %s: %v", filePath, err) - } - - combinedPatch.WriteString(patch) - combinedPatch.WriteString("\n") // Ensure separation between file patches - } - - // Remove trailing newline - result := combinedPatch.String() - if len(result) > 0 && result[len(result)-1] == '\n' { - result = result[:len(result)-1] - } - - return result, nil -} - -func trimWhitespaceHeadAndTail(lines []string) []string { - // Trim left - for len(lines) > 0 && strings.TrimSpace(lines[0]) == "" { - lines = lines[1:] - } - - // Trim right - for len(lines) > 0 && strings.TrimSpace(lines[len(lines)-1]) == "" { - lines = lines[:len(lines)-1] - } - - return lines -} - diff --git a/go/tools/builders/nogo_change_test.go b/go/tools/builders/nogo_change_test.go deleted file mode 100644 index 920ce1ce95..0000000000 --- a/go/tools/builders/nogo_change_test.go +++ /dev/null @@ -1,818 +0,0 @@ -package main - -import ( - "go/token" - "os" - "path/filepath" - "reflect" - "testing" - - "golang.org/x/tools/go/analysis" -) - -const ( - FileA = "from" - FileB = "to" - UnifiedPrefix = "--- " + FileA + "\n+++ " + FileB + "\n" -) - -// Mock helper to create a mock file in the token.FileSet -func mockFileSet(fileName string, size int) *token.FileSet { - fset := token.NewFileSet() - f := fset.AddFile(fileName, fset.Base(), size) - for i := 0; i < size; i++ { - f.AddLine(i) - } - return fset -} - -// Mock analyzers for the test -var ( - analyzer1 = &analysis.Analyzer{Name: "analyzer1"} - analyzer2 = &analysis.Analyzer{Name: "analyzer2"} -) - -// TestAddEdit_MultipleAnalyzers tests addEdit with multiple analyzers and files using reflect.DeepEqual -func TestAddEdit_MultipleAnalyzers(t *testing.T) { - change := newChange() - file1 := "file1.go" - - edit1a := nogoEdit{Start: 10, End: 20, New: "code1 from analyzer1"} - edit1b := nogoEdit{Start: 30, End: 40, New: "code2 from analyzer1"} - edit2a := nogoEdit{Start: 50, End: 60, New: "code1 from analyzer2"} - edit2b := nogoEdit{Start: 70, End: 80, New: "code2 from analyzer2"} - - expected := nogoChange{ - file1: analyzerToEdits{ - analyzer1.Name: {edit1a, edit1b}, - analyzer2.Name: {edit2a, edit2b}, - }, - } - - addEdit(change, file1, analyzer1.Name, edit1a) - addEdit(change, file1, analyzer1.Name, edit1b) - addEdit(change, file1, analyzer2.Name, edit2a) - addEdit(change, file1, analyzer2.Name, edit2b) - - if !reflect.DeepEqual(change, expected) { - t.Fatalf("nogoChange did not match the expected result.\nGot: %+v\nExpected: %+v", change, expected) - } -} - -// Test case for valid, successful cases -func TestNewChangeFromDiagnostics_SuccessCases(t *testing.T) { - cwd, _ := os.Getwd() - file1path := filepath.Join(cwd, "file1.go") - - tests := []struct { - name string - fileSet *token.FileSet - diagnosticEntries []diagnosticEntry - expectedEdits nogoChange - }{ - { - name: "ValidEdits", - fileSet: mockFileSet(file1path, 100), - diagnosticEntries: []diagnosticEntry{ - { - Analyzer: analyzer1, - Diagnostic: analysis.Diagnostic{ - SuggestedFixes: []analysis.SuggestedFix{ - { - TextEdits: []analysis.TextEdit{ - {Pos: token.Pos(5), End: token.Pos(10), NewText: []byte("new_text")}, - }, - }, - }, - }, - }, - }, - expectedEdits: nogoChange{ - "file1.go": analyzerToEdits{ - "analyzer1": { - {New: "new_text", Start: 4, End: 9}, // 0-based offset - }, - }, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - change, err := newChangeFromDiagnostics(tt.diagnosticEntries, tt.fileSet) - if err != nil { - t.Fatalf("expected no error, got: %v", err) - } - if !reflect.DeepEqual(change, tt.expectedEdits) { - t.Fatalf("expected edits: %+v, got: %+v", tt.expectedEdits, change) - } - }) - } -} - -// Test case for error cases -func TestNewChangeFromDiagnostics_ErrorCases(t *testing.T) { - cwd, _ := os.Getwd() - file1path := filepath.Join(cwd, "file1.go") - - tests := []struct { - name string - fileSet *token.FileSet - diagnosticEntries []diagnosticEntry - expectedErr string - }{ - { - name: "InvalidPosEnd", - fileSet: mockFileSet(file1path, 100), - diagnosticEntries: []diagnosticEntry{ - { - Analyzer: analyzer1, - Diagnostic: analysis.Diagnostic{ - SuggestedFixes: []analysis.SuggestedFix{ - { - TextEdits: []analysis.TextEdit{ - {Pos: token.Pos(15), End: token.Pos(10), NewText: []byte("new_text")}, - }, - }, - }, - }, - }, - }, - expectedErr: "some suggested fixes are invalid:\ninvalid fix from analyzer \"analyzer1\" for file \"file1.go\": start=15 > end=10", - }, - { - name: "EndPastEOF", - fileSet: mockFileSet(file1path, 100), - diagnosticEntries: []diagnosticEntry{ - { - Analyzer: analyzer2, - Diagnostic: analysis.Diagnostic{ - SuggestedFixes: []analysis.SuggestedFix{ - { - TextEdits: []analysis.TextEdit{ - {Pos: token.Pos(95), End: token.Pos(110), NewText: []byte("new_text")}, - }, - }, - }, - }, - }, - }, - expectedErr: "some suggested fixes are invalid:\ninvalid fix from analyzer \"analyzer2\" for file \"file1.go\": end=110 is past the file's EOF=101", - }, - { - name: "MissingFileInfo", - fileSet: mockFileSet(file1path, 100), - diagnosticEntries: []diagnosticEntry{ - { - Analyzer: analyzer1, - Diagnostic: analysis.Diagnostic{ - SuggestedFixes: []analysis.SuggestedFix{ - { - TextEdits: []analysis.TextEdit{ - {Pos: token.Pos(150), End: token.Pos(160), NewText: []byte("new_text")}, - }, - }, - }, - }, - }, - }, - expectedErr: "some suggested fixes are invalid:\ninvalid fix from analyzer \"analyzer1\": missing file info for start=150", - }, - { - name: "MultipleErrors", - fileSet: mockFileSet(file1path, 100), - diagnosticEntries: []diagnosticEntry{ - { - Analyzer: analyzer1, - Diagnostic: analysis.Diagnostic{ - SuggestedFixes: []analysis.SuggestedFix{ - { - TextEdits: []analysis.TextEdit{ - {Pos: token.Pos(15), End: token.Pos(10), NewText: []byte("new_text")}, // InvalidPosEnd - {Pos: token.Pos(95), End: token.Pos(110), NewText: []byte("new_text")}, // EndPastEOF - }, - }, - }, - }, - }, - }, - expectedErr: `some suggested fixes are invalid: -invalid fix from analyzer "analyzer1" for file "file1.go": start=15 > end=10 -invalid fix from analyzer "analyzer1" for file "file1.go": end=110 is past the file's EOF=101`, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := newChangeFromDiagnostics(tt.diagnosticEntries, tt.fileSet) - if err == nil { - t.Fatalf("expected an error, got none") - } - - if err.Error() != tt.expectedErr { - t.Fatalf("expected error:\n%v\ngot:\n%v", tt.expectedErr, err.Error()) - } - }) - } -} - - -func TestSortEdits(t *testing.T) { - tests := []struct { - name string - edits []nogoEdit - sorted []nogoEdit - }{ - { - name: "already sorted", - edits: []nogoEdit{ - {New: "a", Start: 0, End: 1}, - {New: "b", Start: 1, End: 2}, - {New: "c", Start: 2, End: 3}, - }, - sorted: []nogoEdit{ - {New: "a", Start: 0, End: 1}, - {New: "b", Start: 1, End: 2}, - {New: "c", Start: 2, End: 3}, - }, - }, - { - name: "unsorted", - edits: []nogoEdit{ - {New: "b", Start: 1, End: 2}, - {New: "a", Start: 0, End: 1}, - {New: "c", Start: 2, End: 3}, - }, - sorted: []nogoEdit{ - {New: "a", Start: 0, End: 1}, - {New: "b", Start: 1, End: 2}, - {New: "c", Start: 2, End: 3}, - }, - }, - { - name: "insert before delete at same position", - edits: []nogoEdit{ - {New: "", Start: 0, End: 1}, // delete - {New: "insert", Start: 0, End: 0}, // insert - }, - sorted: []nogoEdit{ - {New: "insert", Start: 0, End: 0}, // insert comes before delete - {New: "", Start: 0, End: 1}, - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - sortEdits(tt.edits) - if !reflect.DeepEqual(tt.edits, tt.sorted) { - t.Fatalf("expected %v, got %v", tt.sorted, tt.edits) - } - }) - } -} - - -func TestApplyEditsBytes(t *testing.T) { - tests := []struct { - name string - input string - edits []nogoEdit - expected string - }{ - { - name: "empty", - input: "", - edits: []nogoEdit{}, - expected: "", - }, - { - name: "no_diff", - input: "gargantuan\n", - edits: []nogoEdit{}, - expected: "gargantuan\n", - }, - { - name: "replace_all", - input: "fruit\n", - edits: []nogoEdit{ - {Start: 0, End: 5, New: "cheese"}, - }, - expected: "cheese\n", - }, - { - name: "insert_rune", - input: "gord\n", - edits: []nogoEdit{ - {Start: 2, End: 2, New: "u"}, - }, - expected: "gourd\n", - }, - { - name: "delete_rune", - input: "groat\n", - edits: []nogoEdit{ - {Start: 1, End: 2, New: ""}, - }, - expected: "goat\n", - }, - { - name: "replace_rune", - input: "loud\n", - edits: []nogoEdit{ - {Start: 2, End: 3, New: "r"}, - }, - expected: "lord\n", - }, - { - name: "replace_partials", - input: "blanket\n", - edits: []nogoEdit{ - {Start: 1, End: 3, New: "u"}, - {Start: 6, End: 7, New: "r"}, - }, - expected: "bunker\n", - }, - { - name: "insert_line", - input: "1: one\n3: three\n", - edits: []nogoEdit{ - {Start: 7, End: 7, New: "2: two\n"}, - }, - expected: "1: one\n2: two\n3: three\n", - }, - { - name: "replace_no_newline", - input: "A", - edits: []nogoEdit{ - {Start: 0, End: 1, New: "B"}, - }, - expected: "B", - }, - { - name: "delete_empty", - input: "meow", - edits: []nogoEdit{ - {Start: 0, End: 4, New: ""}, - }, - expected: "", - }, - { - name: "append_empty", - input: "", - edits: []nogoEdit{ - {Start: 0, End: 0, New: "AB\nC"}, - }, - expected: "AB\nC", - }, - { - name: "add_end", - input: "A", - edits: []nogoEdit{ - {Start: 1, End: 1, New: "B"}, - }, - expected: "AB", - }, - { - name: "add_newline", - input: "A", - edits: []nogoEdit{ - {Start: 1, End: 1, New: "\n"}, - }, - expected: "A\n", - }, - { - name: "delete_front", - input: "A\nB\nC\nA\nB\nB\nA\n", - edits: []nogoEdit{ - {Start: 0, End: 4, New: ""}, - {Start: 6, End: 6, New: "B\n"}, - {Start: 10, End: 12, New: ""}, - {Start: 14, End: 14, New: "C\n"}, - }, - expected: "C\nB\nA\nB\nA\nC\n", - }, - { - name: "replace_last_line", - input: "A\nB\n", - edits: []nogoEdit{ - {Start: 2, End: 3, New: "C\n"}, - }, - expected: "A\nC\n\n", - }, - { - name: "multiple_replace", - input: "A\nB\nC\nD\nE\nF\nG\n", - edits: []nogoEdit{ - {Start: 2, End: 8, New: "H\nI\nJ\n"}, - {Start: 12, End: 14, New: "K\n"}, - }, - expected: "A\nH\nI\nJ\nE\nF\nK\n", - }, - { - name: "extra_newline", - input: "\nA\n", - edits: []nogoEdit{ - {Start: 0, End: 1, New: ""}, - }, - expected: "A\n", - }, - { - name: "unified_lines", - input: "aaa\nccc\n", - edits: []nogoEdit{ - {Start: 3, End: 3, New: "\nbbb"}, - }, - expected: "aaa\nbbb\nccc\n", - }, - { - name: "complex_replace_with_tab", - input: `package a - -type S struct { -s fmt.Stringer -} -`, - edits: []nogoEdit{ - {Start: 27, End: 27, New: "\t"}, - }, - expected: `package a - -type S struct { - s fmt.Stringer -} -`, - }, - } - - for _, tt := range tests { - tt := tt // capture range variable - t.Run(tt.name, func(t *testing.T) { - result, err := applyEditsBytes([]byte(tt.input), tt.edits) - if err != nil { - t.Fatalf("applyEditsBytes failed: %v", err) - } - if string(result) != tt.expected { - t.Errorf("applyEditsBytes: got %q, want %q", string(result), tt.expected) - } - }) - } -} - - -// TestUniqueSortedEdits verifies deduplication and overlap detection. -func TestUniqueSortedEdits(t *testing.T) { - tests := []struct { - name string - edits []nogoEdit - want []nogoEdit - wantHasOverlap bool - }{ - { - name: "overlapping edits", - edits: []nogoEdit{ - {Start: 0, End: 2, New: "a"}, - {Start: 1, End: 3, New: "b"}, - }, - want: []nogoEdit{{Start: 0, End: 2, New: "a"}, {Start: 1, End: 3, New: "b"}}, - wantHasOverlap: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - got, hasOverlap := uniqueSortedEdits(tt.edits) - if !reflect.DeepEqual(got, tt.want) || hasOverlap != tt.wantHasOverlap { - t.Fatalf("expected %v, got %v", tt.want, got) - } - }) - } -} - - -func TestFlatten(t *testing.T) { - tests := []struct { - name string - change nogoChange - expected fileToEdits - expectedErr string - }{ - { - name: "no conflicts", - change: nogoChange{ - "file1.go": analyzerToEdits{ - "analyzer1": { - {Start: 0, End: 5, New: "hello"}, - }, - "analyzer2": { - {Start: 6, End: 10, New: "world"}, - }, - }, - }, - expected: fileToEdits{ - "file1.go": { - {Start: 0, End: 5, New: "hello"}, - {Start: 6, End: 10, New: "world"}, - }, - }, - expectedErr: "", - }, - { - name: "conflicting edits", - change: nogoChange{ - "file1.go": analyzerToEdits{ - "analyzer1": { - {Start: 0, End: 5, New: "hello"}, - }, - "analyzer2": { - {Start: 3, End: 8, New: "world"}, - }, - }, - }, - expected: fileToEdits{ - "file1.go": { - {Start: 0, End: 5, New: "hello"}, - }, - }, - expectedErr: `some suggested fixes are skipped due to conflicts in merging fixes from different analyzers for each file: -suggested fixes from analyzer "analyzer2" on file "file1.go" are skipped because they conflict with other analyzers`, - }, - { - name: "multiple conflicts across multiple files", - change: nogoChange{ - "file1.go": analyzerToEdits{ - "analyzer1": { - {Start: 0, End: 5, New: "hello"}, - }, - "analyzer2": { - {Start: 4, End: 10, New: "world"}, - }, - }, - "file2.go": analyzerToEdits{ - "analyzer3": { - {Start: 0, End: 3, New: "foo"}, - }, - "analyzer4": { - {Start: 2, End: 5, New: "bar"}, - }, - }, - }, - expected: fileToEdits{ - "file1.go": { - {Start: 0, End: 5, New: "hello"}, - }, - "file2.go": { - {Start: 0, End: 3, New: "foo"}, - }, - }, - expectedErr: `some suggested fixes are skipped due to conflicts in merging fixes from different analyzers for each file: -suggested fixes from analyzer "analyzer2" on file "file1.go" are skipped because they conflict with other analyzers -suggested fixes from analyzer "analyzer4" on file "file2.go" are skipped because they conflict with other analyzers`, - }, - { - name: "no edits", - change: nogoChange{ - "file1.go": analyzerToEdits{ - "analyzer1": {}, - }, - }, - expected: fileToEdits{"file1.go": nil}, - expectedErr: "", - }, - { - name: "all conflicts", - change: nogoChange{ - "file1.go": analyzerToEdits{ - "analyzer1": { - {Start: 0, End: 5, New: "hello"}, - }, - "analyzer2": { - {Start: 1, End: 4, New: "world"}, - }, - }, - }, - expected: fileToEdits{ - "file1.go": { - {Start: 0, End: 5, New: "hello"}, - }, - }, - expectedErr: `some suggested fixes are skipped due to conflicts in merging fixes from different analyzers for each file: -suggested fixes from analyzer "analyzer2" on file "file1.go" are skipped because they conflict with other analyzers`, - }, - { - name: "no overlapping across different files", - change: nogoChange{ - "file1.go": analyzerToEdits{ - "analyzer1": { - {Start: 0, End: 5, New: "hello"}, - }, - "analyzer2": { - {Start: 10, End: 15, New: "world"}, - }, - }, - "file2.go": analyzerToEdits{ - "analyzer3": { - {Start: 0, End: 3, New: "foo"}, - }, - "analyzer4": { - {Start: 5, End: 8, New: "bar"}, - }, - }, - }, - expected: fileToEdits{ - "file1.go": { - {Start: 0, End: 5, New: "hello"}, - {Start: 10, End: 15, New: "world"}, - }, - "file2.go": { - {Start: 0, End: 3, New: "foo"}, - {Start: 5, End: 8, New: "bar"}, - }, - }, - expectedErr: "", - }, - { - name: "conflict in one file multiple analyzers", - change: nogoChange{ - "file1.go": analyzerToEdits{ - "analyzer1": { - {Start: 0, End: 5, New: "hello"}, - }, - "analyzer2": { - {Start: 5, End: 10, New: "world"}, - }, - "analyzer3": { - {Start: 3, End: 7, New: "foo"}, - }, - }, - }, - expected: fileToEdits{ - "file1.go": { - {Start: 0, End: 5, New: "hello"}, - {Start: 5, End: 10, New: "world"}, - }, - }, - expectedErr: `some suggested fixes are skipped due to conflicts in merging fixes from different analyzers for each file: -suggested fixes from analyzer "analyzer3" on file "file1.go" are skipped because they conflict with other analyzers`, - }, - } - - for _, tt := range tests { - tt := tt // capture range variable - t.Run(tt.name, func(t *testing.T) { - result, err := flatten(tt.change) - - // Check for expected errors - if tt.expectedErr == "" && err != nil { - t.Fatalf("expected no error, got: %v", err) - } - if tt.expectedErr != "" { - if err == nil { - t.Fatalf("expected error:\n%v\nbut got none", tt.expectedErr) - } - if err.Error() != tt.expectedErr { - t.Fatalf("expected error:\n%v\ngot:\n%v", tt.expectedErr, err.Error()) - } - } - - // Check for expected edits - if !reflect.DeepEqual(result, tt.expected) { - t.Fatalf("expected edits:\n%+v\ngot:\n%+v", tt.expected, result) - } - }) - } -} - -func TestToCombinedPatch(t *testing.T) { - // Helper functions to create and delete temporary files - createTempFile := func(filename, content string) error { - return os.WriteFile(filename, []byte(content), 0644) - } - deleteFile := func(filename string) { - os.Remove(filename) - } - - // Setup: Create temporary files - err := createTempFile("file1.go", "package main\nfunc Hello() {}\n") - if err != nil { - t.Fatalf("Failed to create temporary file1.go: %v", err) - } - defer deleteFile("file1.go") - - err = createTempFile("file2.go", "package main\nvar x = 10\n") - if err != nil { - t.Fatalf("Failed to create temporary file2.go: %v", err) - } - defer deleteFile("file2.go") - - tests := []struct { - name string - fte fileToEdits - expected string - expectErr bool - }{ - { - name: "valid patch for multiple files", - fte: fileToEdits{ - "file1.go": {{Start: 27, End: 27, New: "\nHello, world!\n"}}, // Add to function body - "file2.go": {{Start: 24, End: 24, New: "var y = 20\n"}}, // Add a new variable - }, - expected: `--- a/file1.go -+++ b/file1.go -@@ -1,2 +1,4 @@ - package main --func Hello() {} -+func Hello() { -+Hello, world! -+} - ---- a/file2.go -+++ b/file2.go -@@ -1,2 +1,3 @@ - package main - var x = 10 -+var y = 20 -`, - expectErr: false, - }, - { - name: "file not found", - fte: fileToEdits{ - "nonexistent.go": {{Start: 0, End: 0, New: "new content"}}, - }, - expected: "", - expectErr: true, - }, - { - name: "no edits", - fte: fileToEdits{}, - expected: "", - expectErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - combinedPatch, err := toCombinedPatch(tt.fte) - - // Verify error expectation - if (err != nil) != tt.expectErr { - t.Fatalf("expected error: %v, got: %v", tt.expectErr, err) - } - - // If no error, verify the patch output - if err == nil && combinedPatch != tt.expected { - t.Errorf("expected patch:\n%v\ngot:\n%v", tt.expected, combinedPatch) - } - }) - } -} - -func TestTrimWhitespaceHeadAndTail(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - input []string - want []string - }{ - { - name: "Empty slice", - input: []string{}, - want: []string{}, - }, - { - name: "All empty strings", - input: []string{"", " ", "\t", "\n"}, - want: []string{}, - }, - { - name: "Leading and trailing empty strings", - input: []string{"", " ", "hello", "world", " ", ""}, - want: []string{"hello", "world"}, - }, - { - name: "No leading or trailing empty strings", - input: []string{"hello", "world"}, - want: []string{"hello", "world"}, - }, - { - name: "Single non-empty string", - input: []string{"hello"}, - want: []string{"hello"}, - }, - } - - for _, tt := range tests { - tt := tt - - t.Run(tt.name, func(t *testing.T) { - got := trimWhitespaceHeadAndTail(tt.input) - - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("trimWhitespaceHeadAndTail() = %v, want %v", got, tt.want) - } - }) - } -} - diff --git a/go/tools/builders/nogo_fix.go b/go/tools/builders/nogo_fix.go new file mode 100644 index 0000000000..17de9bd721 --- /dev/null +++ b/go/tools/builders/nogo_fix.go @@ -0,0 +1,238 @@ +package main + +import ( + "bytes" + "errors" + "fmt" + "go/token" + "io" + "os" + "path/filepath" + "sort" + + "github.com/pmezard/go-difflib/difflib" + "golang.org/x/tools/go/analysis" +) + +// diagnosticEntry represents a diagnostic entry with the corresponding analyzer. +type diagnosticEntry struct { + analysis.Diagnostic + analyzerName string +} + +// A nogoEdit describes the replacement of a portion of a text file. +type nogoEdit struct { + New string // the replacement + Start int // starting byte offset of the region to replace + End int // (exclusive) ending byte offset of the region to replace + analyzerName string +} + +type fileChange struct { + fileName string + changes []nogoEdit +} + +func (e nogoEdit) String() string { + return fmt.Sprintf("{Start:%d,End:%d,New:%q}", e.Start, e.End, e.New) +} + +func (e nogoEdit) Equals(other nogoEdit) bool { + return e.Start == other.Start && e.End == other.End && e.New == other.New +} + +// byStartEnd orders a slice of nogoEdits by (start, end) offset. +// This ordering puts insertions (end = start) before deletions +// (end > start) at the same point. We will use a stable sort to preserve +// the order of multiple insertions at the same point. +type byStartEnd []nogoEdit + +func (a byStartEnd) Len() int { return len(a) } +func (a byStartEnd) Less(i, j int) bool { + if a[i].Start != a[j].Start { + return a[i].Start < a[j].Start + } + return a[i].End < a[j].End +} +func (a byStartEnd) Swap(i, j int) { a[i], a[j] = a[j], a[i] } + + +// applyEdits applies a sequence of nogoEdits to the src byte slice and returns the result. +// Edits are applied in order of start offset; edits with the same start offset are applied in the order they were provided. +// The function assumes that edits are unique, sorted and non-overlapping. +// This is guaranteed by invoking validate() earlier. +func applyEdits(src []byte, edits []nogoEdit) []byte { + size := len(src) + // performance only: this computes the size for preallocation to avoid the slice resizing below. + for _, edit := range edits { + size += len(edit.New) + edit.Start - edit.End + } + + out := make([]byte, 0, size) + lastEnd := 0 + for _, edit := range edits { + out = append(out, src[lastEnd:edit.Start]...) + out = append(out, edit.New...) + lastEnd = edit.End + } + out = append(out, src[lastEnd:]...) + + return out +} + +// getFixes merges the suggested fixes from all analyzers, returns one fileChange object per file, +// while reporting conflicts as error. +func getFixes(entries []diagnosticEntry, fileSet *token.FileSet) ([]fileChange, error) { + var allErrors []error + finalChanges := make(map[string][]nogoEdit) + + for _, entry := range entries { + // According to the [doc](https://pkg.go.dev/golang.org/x/tools@v0.28.0/go/analysis#Diagnostic), + // an analyzer may suggest several alternative fixes, but only one should be applied. + // We will go over all the suggested fixes until the we find one with no conflict + // with previously selected fixes. No backtracking is used for simplicity and performance. If + // none of the suggested fixes of a diagnostic can be applied, the diagnostic entry will be skipped + // with an error message to the user. + foundApplicableFix := false + for _, sf := range entry.Diagnostic.SuggestedFixes { + candidateChanges := make(map[string][]nogoEdit) + applicable := true + for _, edit := range sf.TextEdits { + start, end := edit.Pos, edit.End + if !end.IsValid() { + end = start + } + + file := fileSet.File(start) + if file == nil { + // missing file info, most likely due to analyzer bug. + applicable = false + break + } + + fix := nogoEdit{ + Start: file.Offset(start), + End: file.Offset(end), + New: string(edit.NewText), + analyzerName: entry.analyzerName, + } + candidateChanges[file.Name()] = append(candidateChanges[file.Name()], fix) + } + // validating the edits from current SuggestedFix. All edits from a SuggestedFix must be + // either accepted or discarded atomically, because a SuggestedFix may move a statement from one place + // to the other. If we only accept part of the edits, the statement may either appear twice or disappear. + for fileName, edits := range candidateChanges { + edits = append(edits, finalChanges[fileName]...) + var err error + if candidateChanges[fileName], err = validate(edits); err != nil { + applicable = false + break + } + } + if applicable { + for fileName, edits := range candidateChanges { + finalChanges[fileName] = edits + } + foundApplicableFix = true + break + } + // Move on to the next SuggestedFix of the same Diagnostic if any edit of the current SuggestedFix has issues. + } + if !foundApplicableFix { + allErrors = append(allErrors, fmt.Errorf( + "ignoring suggested fixes from analyzer %q at %s", + entry.analyzerName, fileSet.Position(entry.Pos), + )) + } + } + + var finalFileChanges []fileChange + for fileName, edits := range finalChanges { + finalFileChanges = append(finalFileChanges, fileChange{fileName: fileName, changes: edits}) + } + + if len(allErrors) == 0 { + return finalFileChanges, nil + } + + var errMsg bytes.Buffer + errMsg.WriteString("some suggested fixes are invalid or have conflicts with other fixes:") + for _, e := range allErrors { + errMsg.WriteString("\n\t") + errMsg.WriteString(e.Error()) + } + errMsg.WriteString("\nplease apply other fixes and rerun the build.") + return finalFileChanges, errors.New(errMsg.String()) +} + + +// validate whether the list of edits has overlaps or contains invalid ones. +// If there is any issue, an error is returned. Otherwise, the function +// returns a new list of edits that is sorted and unique. +func validate(edits []nogoEdit) ([]nogoEdit, error) { + if len(edits) == 0 { + return nil, nil + } + validatedEdits := make([]nogoEdit, len(edits)) + // avoid modifying the original slice for safety. + copy(validatedEdits, edits) + sort.Stable(byStartEnd(validatedEdits)) + tail := 0 + for i, cur := range validatedEdits { + if cur.Start > cur.End { + return nil, fmt.Errorf("invalid suggestion from %q: %s", cur.analyzerName, cur) + } + if i > 0 { + prev := validatedEdits[i-1] + if prev.Equals(cur) { + // equivalent ones are safely skipped + continue + } + + if prev.End > cur.Start { + return nil, fmt.Errorf("overlapping suggestions from %q and %q at %s and %s", + prev.analyzerName, cur.analyzerName, prev, cur) + } + } + validatedEdits[tail] = cur + tail++ + } + return validatedEdits[:tail], nil +} + + +func writePatch(patchFile io.Writer, changes []fileChange) error { + // sort the changes by file name to make sure the patch is stable. + sort.Slice(changes, func(i, j int) bool { + return changes[i].fileName < changes[j].fileName + }) + + for _, c := range changes { + if len(c.changes) == 0 { + continue + } + + contents, err := os.ReadFile(c.fileName) + if err != nil { + return fmt.Errorf("failed to read file %s: %v", c.fileName, err) + } + + // edits are guaranteed to be unique, sorted and non-overlapping + // see validate() that is called before this function. + out := applyEdits(contents, c.changes) + + diff := difflib.UnifiedDiff{ + A: difflib.SplitLines(string(contents)), + B: difflib.SplitLines(string(out)), + FromFile: filepath.Join("a", c.fileName), + ToFile: filepath.Join("b", c.fileName), + Context: 3, + } + + if err := difflib.WriteUnifiedDiff(patchFile, diff); err != nil { + return fmt.Errorf("creating patch for %q: %w", c.fileName, err) + } + } + + return nil +} diff --git a/go/tools/builders/nogo_fix_test.go b/go/tools/builders/nogo_fix_test.go new file mode 100644 index 0000000000..749e7d0ac2 --- /dev/null +++ b/go/tools/builders/nogo_fix_test.go @@ -0,0 +1,333 @@ +package main + +import ( + "bytes" + "fmt" + "go/token" + "os" + "path/filepath" + "reflect" + "sort" + "strings" + "testing" + + "golang.org/x/tools/go/analysis" +) + +func TestGetFixes_SuccessCases(t *testing.T) { + fset := token.NewFileSet() + f := fset.AddFile("file1.go", fset.Base(), 100) + f.AddLine(0) + f.AddLine(20) + f.AddLine(40) + f.AddLine(60) + f.AddLine(80) + + f = fset.AddFile("file2.go", fset.Base(), 100) + f.AddLine(0) + f.AddLine(20) + f.AddLine(40) + f.AddLine(60) + f.AddLine(80) + + diagnosticEntries := []diagnosticEntry{ + { + analyzerName: "analyzer1", + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + // Moving some text in the same file. + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(5), End: token.Pos(13), NewText: []byte("new_text")}, + {Pos: token.Pos(55), End: token.Pos(63)}, + }, + }, + }, + }, + }, + { + analyzerName: "analyzer1", + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + // Moving some text across files. + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(15), End: token.Pos(23), NewText: []byte("new_text")}, + {Pos: token.Pos(155), End: token.Pos(163)}, + }, + }, + }, + }, + }, + { + analyzerName: "analyzer2", + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + // Delete some text + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(25), End: token.Pos(30)}, + }, + }, + }, + }, + }, + { + analyzerName: "analyzer2", + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + // Adding some text. + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(115), End: token.Pos(115), NewText: []byte("new_text")}, + }, + }, + }, + }, + }, + { + analyzerName: "analyzer3", + Diagnostic: analysis.Diagnostic{ + // multiple suggested fixes, the first one conflict with other fixes. + SuggestedFixes: []analysis.SuggestedFix{ + { + // All edits are ignored. + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(29), End: token.Pos(39), NewText: []byte("conflicting change")}, + {Pos: token.Pos(65), End: token.Pos(73)}, + }, + }, + { + // All edits are kept. + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(42), End: token.Pos(52), NewText: []byte("good change")}, + {Pos: token.Pos(65), End: token.Pos(73)}, + }, + }, + }, + }, + }, + } + + fileChanges, err := getFixes(diagnosticEntries, fset) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + for _, c := range fileChanges { + sort.Stable(byStartEnd(c.changes)) + var expect []nogoEdit + switch c.fileName { + case "file1.go": + expect = []nogoEdit{ + {Start: 4, End: 12, New: "new_text", analyzerName: "analyzer1"}, + {Start: 14, End: 22, New: "new_text", analyzerName: "analyzer1"}, + {Start: 24, End: 29, analyzerName: "analyzer2"}, + {Start: 41, End: 51, New: "good change", analyzerName: "analyzer3"}, + {Start: 54, End: 62, analyzerName: "analyzer1"}, + {Start: 64, End: 72, analyzerName: "analyzer3"}, + } + case "file2.go": + expect = []nogoEdit{ + {Start: 13, End: 13, New: "new_text", analyzerName: "analyzer2"}, + {Start: 53, End: 61, analyzerName: "analyzer1"}, + } + } + if !reflect.DeepEqual(c.changes, expect) { + t.Errorf("unexpected changes for file %s:\n\tgot:\t%v\n\twant:\t%v", c.fileName, c.changes, expect) + } + } +} + +func TestGetFixes_Conflict(t *testing.T) { + fset := token.NewFileSet() + f := fset.AddFile("file1.go", fset.Base(), 100) + f.AddLine(0) + f.AddLine(20) + f.AddLine(40) + f.AddLine(60) + f.AddLine(80) + + diagnosticEntries := []diagnosticEntry{ + { + analyzerName: "analyzer1", + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + // Moving some text in the same file. + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(5), End: token.Pos(13), NewText: []byte("new_text")}, + {Pos: token.Pos(55), End: token.Pos(63)}, + }, + }, + }, + }, + }, + { + analyzerName: "analyzer2", + Diagnostic: analysis.Diagnostic{ + SuggestedFixes: []analysis.SuggestedFix{ + { + // Delete some text. + TextEdits: []analysis.TextEdit{ + {Pos: token.Pos(55), End: token.Pos(62)}, + }, + }, + }, + }, + }, + } + expectedError := `ignoring suggested fixes from analyzer "analyzer2"` + fileChanges, err := getFixes(diagnosticEntries, fset) + if err == nil || !strings.Contains(err.Error(), expectedError) { + t.Errorf("expected error: %s\ngot:%v+", expectedError, err) + } + expectedChanges := []fileChange{ + { + fileName: "file1.go", + changes: []nogoEdit{ + {Start: 4, End: 12, New: "new_text", analyzerName: "analyzer1"}, + {Start: 54, End: 62, analyzerName: "analyzer1"}, + }, + }, + } + if !reflect.DeepEqual(fileChanges, expectedChanges) { + t.Errorf("unexpected changes:\n\tgot:\t%v\n\twant:\t%v", fileChanges, expectedChanges) + } +} + +func TestValidate_Success(t *testing.T) { + edits := []nogoEdit{ + {Start: 20, End: 30, New: "new_text"}, + {Start: 0, End: 10}, + {Start: 20, End: 30, New: "new_text"}, + } + original := make([]nogoEdit, len(edits)) + copy(original, edits) + + result, err := validate(edits) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !reflect.DeepEqual(edits, original) { + t.Errorf("validate should not change the input:\n\tgot:\t%v\n\twant:\t%v", edits, original) + } + expect := []nogoEdit{ + {Start: 0, End: 10}, + {Start: 20, End: 30, New: "new_text"}, + } + if !reflect.DeepEqual(result, expect) { + t.Errorf("unexpected result:\n\tgot:\t%v\n\twant:\t%v", result, expect) + } +} + +func TestValidate_Failure(t *testing.T) { + tests := []struct{ + name string + edits []nogoEdit + expectedErr string + }{ + { + name: "conflicts", + edits: []nogoEdit{ + {Start: 20, End: 30, New: "new_text", analyzerName: "analyzer1"}, + {Start: 25, End: 35, analyzerName: "analyzer2"}, + }, + expectedErr: `overlapping suggestions from "analyzer1" and "analyzer2" at {Start:20,End:30,New:"new_text"} and {Start:25,End:35,New:""}`, + }, + { + name: "invalid edits", + edits: []nogoEdit{ + {Start: 20, End: 10, New: "new_text", analyzerName: "analyzer1"}, + }, + expectedErr: `invalid suggestion from "analyzer1": {Start:20,End:10,New:"new_text"}`, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := validate(tt.edits) + if err == nil { + t.Fatal("expected error, got nil") + } + if err.Error() != tt.expectedErr { + t.Errorf("unexpected error:\n\tgot:\t%s\n\twant:\t%s", err.Error(), tt.expectedErr) + } + }) + } +} + +func TestWritePatch(t *testing.T) { + tmpDir := t.TempDir() + + file1 := tmpDir + "/file1.go" + err := os.WriteFile(file1, []byte("package main\nfunc Hello() {}\n"), 0644) + if err != nil { + t.Fatalf("Failed to create temporary file1.go: %v", err) + } + + file2 := tmpDir + "/file2.go" + + err = os.WriteFile(file2, []byte("package main\nvar x = 10\n"), 0644) + if err != nil { + t.Fatalf("Failed to create temporary file2.go: %v", err) + } + + tests := []struct { + name string + fileChanges []fileChange + expected string + expectErr bool + }{ + { + name: "valid patch for multiple files", + fileChanges: []fileChange{ + {fileName: file1, changes: []nogoEdit{{Start: 27, End: 27, New: "\nHello, world!\n"}}}, // Add to function body + {fileName: file2, changes: []nogoEdit{{Start: 24, End: 24, New: "var y = 20\n"}}}, // Add a new variable + }, + expected: fmt.Sprintf(`--- %s ++++ %s +@@ -1,3 +1,5 @@ + package main +-func Hello() {} ++func Hello() { ++Hello, world! ++} + +--- %s ++++ %s +@@ -1,3 +1,4 @@ + package main + var x = 10 ++var y = 20 + +`, filepath.Join("a", file1), filepath.Join("b", file1), filepath.Join("a", file2), filepath.Join("b", file2)), + }, + { + name: "file not found", + fileChanges: []fileChange{ + {fileName: "nonexistent.go", changes: []nogoEdit{{Start: 0, End: 0, New: "new content"}}}, + }, + expectErr: true, + }, + { + name: "no edits", + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + var patchWriter bytes.Buffer + err := writePatch(&patchWriter, tt.fileChanges) + + // Verify error expectation + if (err != nil) != tt.expectErr { + t.Fatalf("expected error: %v, got: %v", tt.expectErr, err) + } + + // If no error, verify the patch output + actual := patchWriter.String() + if err == nil && actual != tt.expected { + t.Errorf("expected patch:\n%s\ngot:\n%s", tt.expected, actual) + } + }) + } +} diff --git a/go/tools/builders/nogo_main.go b/go/tools/builders/nogo_main.go index 681b387710..f774840a25 100644 --- a/go/tools/builders/nogo_main.go +++ b/go/tools/builders/nogo_main.go @@ -77,7 +77,7 @@ func run(args []string) (error, int) { importcfg := flags.String("importcfg", "", "The import configuration file") packagePath := flags.String("p", "", "The package path (importmap) of the package being compiled") xPath := flags.String("x", "", "The archive file where serialized facts should be written") - nogoFixPath := flags.String("fixpath", "", "The path of the file that stores the nogo fixes") + nogoFixPath := flags.String("fix", "", "The path of the file to store the nogo fixes") var ignores multiFlag flags.Var(&ignores, "ignore", "Names of files to ignore") flags.Parse(args) @@ -88,27 +88,65 @@ func run(args []string) (error, int) { return fmt.Errorf("error parsing importcfg: %v", err), nogoError } - diagnostics, facts, err := checkPackage(analyzers, *packagePath, packageFile, importMap, factMap, srcs, ignores, *nogoFixPath) + + diagnostics, pkg, err := checkPackage(analyzers, *packagePath, packageFile, importMap, factMap, srcs, ignores) if err != nil { return fmt.Errorf("error running analyzers: %v", err), nogoError } // Write the facts file for downstream consumers before failing due to diagnostics. if *xPath != "" { - if err := ioutil.WriteFile(abs(*xPath), facts, 0o666); err != nil { + if err := os.WriteFile(abs(*xPath), pkg.facts.Encode(), 0o666); err != nil { return fmt.Errorf("error writing facts: %v", err), nogoError } } - if diagnostics != "" { + exitCode := nogoSuccess + var errMsg bytes.Buffer + if len(diagnostics) > 0 { // debugMode is defined by the template in generate_nogo_main.go. - exitCode := nogoViolation + exitCode = nogoViolation if debugMode { // Force actions running nogo to fail to help debug issues. exitCode = nogoError } - return fmt.Errorf("errors found by nogo during build-time code analysis:\n%s\n", diagnostics), exitCode + errMsg.WriteString("errors found by nogo during build-time code analysis:") + for _, d := range diagnostics { + fmt.Fprintf(&errMsg, "\n%s: %s (%s)", pkg.fset.Position(d.Pos), d.Message, d.analyzerName) + } + } + + if errs := saveSuggestedFixes(*nogoFixPath, diagnostics, pkg); len(errs) > 0 { + errMsg.WriteString("\nsaving suggested fixes:") + for _, err := range errs { + fmt.Fprintf(&errMsg, "\n%v", err) + } } - return nil, nogoSuccess + if errMsg.Len() > 0 { + return errors.New(errMsg.String()), exitCode + } + return nil, exitCode +} + +func saveSuggestedFixes(nogoFixPath string, diagnostics []diagnosticEntry, pkg *goPackage) []error { + if nogoFixPath == "" { + return nil + } + var errs []error + // the patch file has to be created even if there is no fix. + patchFile, err := os.Create(nogoFixPath) + if err != nil { + errs = append(errs, fmt.Errorf("creating %q: %w", nogoFixPath, err)) + return errs + } + defer patchFile.Close() + fixes, err := getFixes(diagnostics, pkg.fset) + if err != nil { + errs = append(errs, err) + } + if err := writePatch(patchFile, fixes); err != nil { + errs = append(errs, err) + } + return errs } // Adapted from go/src/cmd/compile/internal/gc/main.go. Keep in sync. @@ -159,7 +197,7 @@ func readImportCfg(file string) (packageFile map[string]string, importMap map[st // It returns an empty string if no source code diagnostics need to be printed. // // This implementation was adapted from that of golang.org/x/tools/go/checker/internal/checker. -func checkPackage(analyzers []*analysis.Analyzer, packagePath string, packageFile, importMap map[string]string, factMap map[string]string, filenames, ignoreFiles []string, nogoFixPath string) (string, []byte, error) { +func checkPackage(analyzers []*analysis.Analyzer, packagePath string, packageFile, importMap, factMap map[string]string, filenames, ignoreFiles []string) ([]diagnosticEntry, *goPackage, error) { // Register fact types and establish dependencies between analyzers. actions := make(map[*analysis.Analyzer]*action) var visit func(a *analysis.Analyzer) *action @@ -189,14 +227,14 @@ func checkPackage(analyzers []*analysis.Analyzer, packagePath string, packageFil if cfg, ok := configs[a.Name]; ok { for flagKey, flagVal := range cfg.analyzerFlags { if strings.HasPrefix(flagKey, "-") { - return "", nil, fmt.Errorf( + return nil, nil, fmt.Errorf( "%s: flag should not begin with '-': %s", a.Name, flagKey) } if flag := a.Flags.Lookup(flagKey); flag == nil { - return "", nil, fmt.Errorf("%s: unrecognized flag: %s", a.Name, flagKey) + return nil, nil, fmt.Errorf("%s: unrecognized flag: %s", a.Name, flagKey) } if err := a.Flags.Set(flagKey, flagVal); err != nil { - return "", nil, fmt.Errorf( + return nil, nil, fmt.Errorf( "%s: invalid value for flag: %s=%s: %w", a.Name, flagKey, flagVal, err) } } @@ -208,8 +246,9 @@ func checkPackage(analyzers []*analysis.Analyzer, packagePath string, packageFil imp := newImporter(importMap, packageFile, factMap) pkg, err := load(packagePath, imp, filenames) if err != nil { - return "", nil, fmt.Errorf("error loading package: %v", err) + return nil, nil, fmt.Errorf("error loading package: %v", err) } + for _, act := range actions { act.pkg = pkg } @@ -258,10 +297,8 @@ func checkPackage(analyzers []*analysis.Analyzer, packagePath string, packageFil // Execute the analyzers. execAll(roots) - // Process diagnostics and encode facts for importers of this package. - diagnostics := checkAnalysisResults(roots, pkg, nogoFixPath) - facts := pkg.facts.Encode() - return diagnostics, facts, nil + diagnostics, err := checkAnalysisResults(roots, pkg) + return diagnostics, pkg, err } type Range struct { @@ -459,9 +496,8 @@ func (g *goPackage) String() string { // checkAnalysisResults checks the analysis diagnostics in the given actions // and returns a string containing all the diagnostics that should be printed // to the build log. -func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) string { +func checkAnalysisResults(actions []*action, pkg *goPackage) ([]diagnosticEntry, error) { var diagnostics []diagnosticEntry - var errs []error cwd, err := os.Getwd() if cwd == "" || err != nil { @@ -503,7 +539,7 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) if currentConfig.onlyFiles == nil && currentConfig.excludeFiles == nil { for _, diag := range act.diagnostics { - diagnostics = append(diagnostics, diagnosticEntry{Diagnostic: diag, Analyzer: act.a}) + diagnostics = append(diagnostics, diagnosticEntry{Diagnostic: diag, analyzerName: act.a.Name}) } continue } @@ -541,43 +577,21 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) } } if include { - diagnostics = append(diagnostics, diagnosticEntry{Diagnostic: d, Analyzer: act.a}) + diagnostics = append(diagnostics, diagnosticEntry{Diagnostic: d, analyzerName: act.a.Name}) } } } if numSkipped > 0 { errs = append(errs, fmt.Errorf("%d analyzers skipped due to type-checking error: %v", numSkipped, pkg.typeCheckError)) } - - if nogoFixPath != "" { - // If the nogo fixes are requested, we need to save the fixes to the file even if they are empty. - // Otherwise, bazel will complain "not all outputs were created or valid" - change, err := newChangeFromDiagnostics(diagnostics, pkg.fset) - if err != nil { - errs = append(errs, err) - } - editsPerFile, err := flatten(change) - if err != nil { - errs = append(errs, err) - } - combinedPatch, err := toCombinedPatch(editsPerFile) - if err != nil { - errs = append(errs, err) - } - err = os.WriteFile(nogoFixPath, []byte(combinedPatch), 0644) - if err != nil { - errs = append(errs, fmt.Errorf("errors in saving the patch to the file %s: %v", nogoFixPath, err)) - } - } - - if len(diagnostics) == 0 && len(errs) == 0 { - return "" - } - sort.Slice(diagnostics, func(i, j int) bool { return diagnostics[i].Pos < diagnostics[j].Pos }) + if len(errs) == 0 { + return diagnostics, nil + } + errMsg := &bytes.Buffer{} sep := "" for _, err := range errs { @@ -585,12 +599,7 @@ func checkAnalysisResults(actions []*action, pkg *goPackage, nogoFixPath string) sep = "\n" errMsg.WriteString(err.Error()) } - for _, d := range diagnostics { - errMsg.WriteString(sep) - sep = "\n" - fmt.Fprintf(errMsg, "%s: %s (%s)", pkg.fset.Position(d.Pos), d.Message, d.Name) - } - return errMsg.String() + return diagnostics, errors.New(errMsg.String()) } // config determines which source files an analyzer will emit diagnostics for. diff --git a/go/tools/builders/nogo_validation.go b/go/tools/builders/nogo_validation.go index a947a1bb56..507e17226a 100644 --- a/go/tools/builders/nogo_validation.go +++ b/go/tools/builders/nogo_validation.go @@ -8,7 +8,7 @@ import ( func nogoValidation(args []string) error { validationOutput := args[0] logFile := args[1] - nogoFixFile := args[2] + fixFile := args[2] // Always create the output file and only fail if the log file is non-empty to // avoid an "action failed to create outputs" error. @@ -21,30 +21,28 @@ func nogoValidation(args []string) error { return err } - nogoFixContent, err := os.ReadFile(nogoFixFile) + fixContent, err := os.ReadFile(fixFile) if err != nil { return err } if len(logContent) > 0 { - nogoFixRelated := "" - // See nogo_change_serialization.go, if the patches are empty, then nogoFixContent is empty by design, rather than an empty json like {}. - if len(nogoFixContent) > 0 { + var fixMessage string + if len(fixContent) > 0 { // Format the message in a clean and clear way - nogoFixRelated = fmt.Sprintf(` + fixMessage = fmt.Sprintf(` -------------------Suggested Fix------------------- -The suggested fix is as follows: %s +----------------------------------------------------- To apply the suggested fix, run the following command: $ patch -p1 < %s ------------------------------------------------------ -`, nogoFixContent, nogoFixFile) +`, fixContent, fixFile) } // Separate nogo output from Bazel's --sandbox_debug message via an // empty line. // Don't return to avoid printing the "nogovalidation:" prefix. - _, _ = fmt.Fprintf(os.Stderr, "\n%s%s\n", logContent, nogoFixRelated) + _, _ = fmt.Fprintf(os.Stderr, "\n%s%s\n", logContent, fixMessage) os.Exit(1) } return nil From 74764a344999d4dd5d1dc5bc169abd29212e2f4d Mon Sep 17 00:00:00 2001 From: peng3141 Date: Fri, 3 Jan 2025 04:43:01 +0000 Subject: [PATCH 10/12] 1/2/2025: do not create, open it --- go/tools/builders/nogo_main.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/go/tools/builders/nogo_main.go b/go/tools/builders/nogo_main.go index f774840a25..db30c5b714 100644 --- a/go/tools/builders/nogo_main.go +++ b/go/tools/builders/nogo_main.go @@ -133,7 +133,7 @@ func saveSuggestedFixes(nogoFixPath string, diagnostics []diagnosticEntry, pkg * } var errs []error // the patch file has to be created even if there is no fix. - patchFile, err := os.Create(nogoFixPath) + patchFile, err := os.OpenFile(nogoFixPath, os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0644) // os.Create(nogoFixPath) if err != nil { errs = append(errs, fmt.Errorf("creating %q: %w", nogoFixPath, err)) return errs From f581f9497756e639ab20e314870792bd1c9b9500 Mon Sep 17 00:00:00 2001 From: peng3141 Date: Fri, 3 Jan 2025 06:27:02 +0000 Subject: [PATCH 11/12] 1/2/2025: revert https://github.com/bazel-contrib/rules_go/pull/4203 --- tests/core/cross/BUILD.bazel | 9 --------- 1 file changed, 9 deletions(-) diff --git a/tests/core/cross/BUILD.bazel b/tests/core/cross/BUILD.bazel index 474966a314..b3fc3e75c9 100644 --- a/tests/core/cross/BUILD.bazel +++ b/tests/core/cross/BUILD.bazel @@ -6,15 +6,6 @@ test_suite( name = "cross", ) -go_binary( - name = "non_pure_cross", - srcs = ["main.go"], - goarch = "amd64", - goos = "darwin", - pure = "off", - target_compatible_with = ["@platforms//os:macos"], - deps = [":platform_lib"], -) go_binary( name = "windows_cross", From 38943069727c2583fded6dc2da892baa8f3bdc39 Mon Sep 17 00:00:00 2001 From: peng3141 Date: Fri, 3 Jan 2025 16:12:42 +0000 Subject: [PATCH 12/12] 1/3: add a comment to trigger a new build --- tests/core/cross/BUILD.bazel | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/core/cross/BUILD.bazel b/tests/core/cross/BUILD.bazel index b3fc3e75c9..5cb30849f8 100644 --- a/tests/core/cross/BUILD.bazel +++ b/tests/core/cross/BUILD.bazel @@ -70,7 +70,7 @@ go_cross_binary( target = ":native_bin", ) -# Because pure = "on" on the underlying target, this doesn't actually need cgo (and won't try to use it). +# let us retrigger a new build. Because pure = "on" on the underlying target, this doesn't actually need cgo (and won't try to use it). # This target ensures that (from Bazel 6) we don't require a C++ toolchain if we're not actually going to use cgo. go_cross_binary( name = "windows_go_cross_cgo",