diff --git a/Makefile b/Makefile index d403904..ba5749c 100644 --- a/Makefile +++ b/Makefile @@ -24,6 +24,10 @@ test_all: cover: cover.profile go tool cover -func=$< +clean: + rm -rf .make + rm -f cover.profile + cover.profile: $(shell $(DEVCTL) list --go) | bin/ginkgo bin/devctl $(GINKGO) run --coverprofile=cover.profile -r ./ diff --git a/ast/ast.go b/ast/ast.go new file mode 100644 index 0000000..ceaffad --- /dev/null +++ b/ast/ast.go @@ -0,0 +1,160 @@ +package ast + +import ( + "go/ast" + + "github.com/unmango/go-make/token" +) + +type Node = ast.Node + +var Walk = ast.Walk + +// A File represents text content interpreted as the make syntax. +// Most commonly this is a Makefile, but could also be any file +// understood by make, i.e. include-me.mk +type File struct { + FileStart, FileEnd token.Pos + Comments []*CommentGroup + Rules []*Rule +} + +// A CommentGroup represents a sequence of comments with no other tokens and no empty lines between. +type CommentGroup struct { + List []*Comment +} + +// Pos implements Node +func (c *CommentGroup) Pos() token.Pos { + return c.List[0].Pos() +} + +// End implements Node +func (c *CommentGroup) End() token.Pos { + return c.List[len(c.List)-1].End() +} + +// TODO: Handle multi-line comments with '\' escaped newlines + +// A comment represents a single comment starting with '#' +type Comment struct { + Pound token.Pos // position of '#' starting the comment + Text string // comment text, excluding '\n' +} + +// Pos implements Node +func (c *Comment) Pos() token.Pos { + return c.Pound +} + +// End implements Node +func (c *Comment) End() token.Pos { + return token.Pos(int(c.Pound) + len(c.Text)) +} + +// A Rule represents the Recipes and PreRequisites required to build Targets. [Rule Syntax] +// +// [Rule Syntax]: https://www.gnu.org/software/make/manual/html_node/Rule-Syntax.html +type Rule struct { + Colon token.Pos // position of ':' delimiting targets and prerequisites + Pipe token.Pos // position of '|' delimiting normal and order-only prerequisites + Semi token.Pos // position of ';' delimiting prerequisites and recipes + Targets *TargetList + PreReqs *PreReqList + Recipes []*Recipe +} + +// Pos implements Node +func (r *Rule) Pos() token.Pos { + return r.Targets.Pos() +} + +// End implements Node +func (r *Rule) End() token.Pos { + return r.Recipes[len(r.Recipes)-1].End() +} + +// A TargetList represents a list of Targets in a single Rule. +type TargetList struct { + List []FileName +} + +// Pos implements Node +func (t *TargetList) Pos() token.Pos { + return t.List[0].Pos() +} + +// End implements Node +func (t *TargetList) End() token.Pos { + return t.List[len(t.List)-1].End() +} + +// A PreReqList represents all normal and order-only prerequisites in a single Rule. +type PreReqList struct { + Pipe token.Pos + List []FileName +} + +// Pos implements Node +func (p *PreReqList) Pos() token.Pos { + return p.List[0].Pos() +} + +// End implements Node +func (p *PreReqList) End() token.Pos { + return p.List[len(p.List)-1].End() +} + +// A FileName represents any Node that can appear where a file name is expected. +type FileName interface { + Node + fileNameNode() +} + +// A LiteralFileName represents a name identifier with no additional syntax. +type LiteralFileName struct { + Name *Ident +} + +func (*LiteralFileName) fileNameNode() {} + +func (l *LiteralFileName) Pos() token.Pos { + return l.Name.Pos() +} + +func (l *LiteralFileName) End() token.Pos { + return l.Name.End() +} + +// A Recipe represents a line of text to be passed to the shell to build a Target. +type Recipe struct { + Tok token.Token // TAB or SEMI + TokPos token.Pos // position of Tok + Text string // recipe text excluding '\n' +} + +// Pos implements Node +func (r *Recipe) Pos() token.Pos { + return r.TokPos +} + +// End implements Node +func (r *Recipe) End() token.Pos { + return token.Pos(int(r.TokPos) + len(r.Text)) +} + +// An Ident represents an identifier. +type Ident struct { + Name string + NamePos token.Pos +} + +// Pos implements Node +func (i *Ident) Pos() token.Pos { + return i.NamePos +} + +// End implements Node +func (i *Ident) End() token.Pos { + return token.Pos(int(i.NamePos) + len(i.Name)) +} diff --git a/ast/ast_suite_test.go b/ast/ast_suite_test.go new file mode 100644 index 0000000..fd0c04d --- /dev/null +++ b/ast/ast_suite_test.go @@ -0,0 +1,13 @@ +package ast_test + +import ( + "testing" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" +) + +func TestAst(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Ast Suite") +} diff --git a/ast/ast_test.go b/ast/ast_test.go new file mode 100644 index 0000000..6809026 --- /dev/null +++ b/ast/ast_test.go @@ -0,0 +1,175 @@ +package ast_test + +import ( + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + + "github.com/unmango/go-make/ast" + "github.com/unmango/go-make/token" +) + +var _ = Describe("Ast", func() { + Describe("CommentGroup", func() { + It("should return the position of the first comment", func() { + c := &ast.CommentGroup{[]*ast.Comment{{ + Pound: token.Pos(69), + }}} + + Expect(c.Pos()).To(Equal(token.Pos(69))) + }) + + It("should return the position of the last comment", func() { + c := &ast.CommentGroup{[]*ast.Comment{ + {Pound: token.Pos(69), Text: "foo"}, + {Pound: token.Pos(420), Text: "Some comment text"}, + }} + + Expect(c.End()).To(Equal(token.Pos(437))) + }) + }) + + Describe("Comment", func() { + It("should return the pound position", func() { + c := &ast.Comment{Pound: token.Pos(69)} + + Expect(c.Pos()).To(Equal(token.Pos(69))) + Expect(c.Pos()).To(Equal(c.Pound)) + }) + + It("should return the position after the comment text", func() { + c := &ast.Comment{ + Pound: token.Pos(420), + Text: "Some comment text", + } + + Expect(c.End()).To(Equal(token.Pos(437))) + }) + }) + + Describe("Rule", func() { + It("should return the position of the first target", func() { + c := &ast.Rule{Targets: &ast.TargetList{ + List: []ast.FileName{&ast.LiteralFileName{ + Name: &ast.Ident{NamePos: token.Pos(69)}, + }}, + }} + + Expect(c.Pos()).To(Equal(token.Pos(69))) + }) + + It("should return the position after the final recipe", func() { + c := &ast.Rule{Recipes: []*ast.Recipe{{ + TokPos: token.Pos(420), + }}} + + // TODO: This is wrong, should be position after text + Expect(c.End()).To(Equal(token.Pos(420))) + }) + }) + + Describe("TargetList", func() { + It("should return the position of the first target", func() { + c := &ast.TargetList{ + List: []ast.FileName{&ast.LiteralFileName{ + Name: &ast.Ident{NamePos: token.Pos(69)}, + }}, + } + + Expect(c.Pos()).To(Equal(token.Pos(69))) + }) + + It("should return the position of the last target", func() { + c := &ast.TargetList{List: []ast.FileName{ + &ast.LiteralFileName{Name: &ast.Ident{NamePos: token.Pos(69)}}, + &ast.LiteralFileName{Name: &ast.Ident{ + NamePos: token.Pos(420), + Name: "foo", + }}, + }} + + Expect(c.End()).To(Equal(token.Pos(423))) + }) + }) + + Describe("PreReqList", func() { + It("should return the position of the first target", func() { + c := &ast.PreReqList{ + List: []ast.FileName{&ast.LiteralFileName{ + Name: &ast.Ident{NamePos: token.Pos(69)}, + }}, + } + + Expect(c.Pos()).To(Equal(token.Pos(69))) + }) + + It("should return the position after the lat prereq", func() { + c := &ast.PreReqList{List: []ast.FileName{ + &ast.LiteralFileName{Name: &ast.Ident{NamePos: token.Pos(69)}}, + &ast.LiteralFileName{Name: &ast.Ident{ + NamePos: token.Pos(420), + Name: "baz", + }}, + }} + + Expect(c.End()).To(Equal(token.Pos(423))) + }) + }) + + Describe("LiteralFileName", func() { + It("should return the position of the identifier", func() { + c := &ast.LiteralFileName{Name: &ast.Ident{ + NamePos: token.Pos(69), + }} + + Expect(c.Pos()).To(Equal(token.Pos(69))) + }) + + It("should return the position after the identifier", func() { + c := &ast.LiteralFileName{Name: &ast.Ident{ + NamePos: token.Pos(420), + Name: "bar", + }} + + Expect(c.End()).To(Equal(token.Pos(423))) + }) + }) + + Describe("Recipe", func() { + It("should return the position of the tab", func() { + c := &ast.Recipe{ + TokPos: token.Pos(420), + } + + Expect(c.Pos()).To(Equal(token.Pos(420))) + }) + + It("should return the position after the text", func() { + c := &ast.Recipe{ + TokPos: token.Pos(420), + Tok: token.TAB, + Text: "foo", + } + + Expect(c.End()).To(Equal(token.Pos(423))) + }) + }) + + Describe("Ident", func() { + It("should return the position of the name", func() { + c := &ast.Ident{ + NamePos: token.Pos(69), + } + + Expect(c.Pos()).To(Equal(token.Pos(69))) + }) + + It("should return the position after the name", func() { + c := &ast.Ident{ + NamePos: token.Pos(420), + Name: "foo", + } + + Expect(c.End()).To(Equal(token.Pos(423))) + }) + }) +}) diff --git a/internal/testing/testing.go b/internal/testing/testing.go index 22ad8b4..27b8312 100644 --- a/internal/testing/testing.go +++ b/internal/testing/testing.go @@ -1,6 +1,10 @@ package testing -import "errors" +import ( + "bytes" + "errors" + "fmt" +) type ErrReader string @@ -13,3 +17,24 @@ type ErrWriter string func (e ErrWriter) Write(p []byte) (int, error) { return 0, errors.New(string(e)) } + +type ErrAfterWriter struct { + After int + Buf *bytes.Buffer + at int +} + +func NewErrAfterWriter(after int) *ErrAfterWriter { + return &ErrAfterWriter{ + After: after, + Buf: &bytes.Buffer{}, + } +} + +func (e *ErrAfterWriter) Write(p []byte) (int, error) { + if e.at++; e.at >= e.After { + return 0, fmt.Errorf("write err: %d", e.at) + } else { + return e.Buf.Write(p) + } +} diff --git a/parser.go b/parser.go new file mode 100644 index 0000000..65574ee --- /dev/null +++ b/parser.go @@ -0,0 +1,135 @@ +package make + +import ( + "go/scanner" + "io" + + "github.com/unmango/go-make/ast" + "github.com/unmango/go-make/token" +) + +type Parser struct { + s *Scanner + file *token.File + errors scanner.ErrorList + + pos token.Pos + tok token.Token // one token look-ahead + lit string // token literal +} + +func NewParser(r io.Reader) *Parser { + s := NewScanner(r) + s.Scan() // TODO: Cleaner priming + + return &Parser{ + s: s, + file: &token.File{}, + tok: s.Token(), + lit: s.Literal(), + } +} + +func (p *Parser) ParseFile() (*ast.File, error) { + f := p.parseFile() + if p.errors.Len() > 0 { + p.errors.Sort() + return nil, p.errors.Err() + } else { + return f, nil + } +} + +func (p *Parser) expect(tok token.Token) token.Pos { + pos := p.pos + if p.tok != tok { + p.error(pos, "expected '"+tok.String()+"'") + } + + p.next() + return pos +} + +func (p *Parser) error(pos token.Pos, msg string) { + epos := p.file.Position(pos) + p.errors.Add(epos, msg) +} + +func (p *Parser) next() { + if p.s.Scan() { + // TODO: p.pos + p.tok, p.lit = p.s.Token(), p.s.Literal() + } else { + p.tok = token.EOF + } +} + +func (p *Parser) parseFile() *ast.File { + if p.errors.Len() > 0 { + return nil + } + + var rules []*ast.Rule + for p.tok != token.EOF { + rules = append(rules, p.parseRule()) + } + + return &ast.File{ + Comments: []*ast.CommentGroup{}, + Rules: rules, + FileStart: token.Pos(p.file.Base()), + FileEnd: token.Pos(p.file.Base() + p.file.Size()), + } +} + +func (p *Parser) parseRule() *ast.Rule { + if p.tok != token.IDENT { + p.expect(token.IDENT) + return nil + } + + var targets []ast.FileName + for p.tok != token.COLON && p.tok != token.EOF { + targets = append(targets, p.parseFileName()) + } + + var colon token.Pos + if p.tok == token.COLON { + colon = p.pos + p.next() + } else { + p.expect(token.COLON) + } + + return &ast.Rule{ + Targets: &ast.TargetList{ + List: targets, + }, + Colon: colon, + Pipe: token.NoPos, + Semi: token.NoPos, + PreReqs: &ast.PreReqList{}, + Recipes: []*ast.Recipe{}, + } +} + +func (p *Parser) parseFileName() ast.FileName { + return &ast.LiteralFileName{ + Name: p.parseIdent(), + } +} + +func (p *Parser) parseIdent() *ast.Ident { + pos, name := p.pos, "_" + if p.tok == token.IDENT { + name = p.lit + p.next() + } else { + p.expect(token.IDENT) + } + + return &ast.Ident{ + NamePos: pos, + Name: name, + } +} diff --git a/parser_test.go b/parser_test.go new file mode 100644 index 0000000..bad8af7 --- /dev/null +++ b/parser_test.go @@ -0,0 +1,31 @@ +package make_test + +import ( + "bytes" + + . "github.com/onsi/ginkgo/v2" + . "github.com/onsi/gomega" + + "github.com/unmango/go-make" +) + +var _ = Describe("Parser", func() { + It("should Parse a target", func() { + buf := bytes.NewBufferString("target:") + p := make.NewParser(buf) + + f, err := p.ParseFile() + + Expect(err).NotTo(HaveOccurred()) + Expect(f).NotTo(BeNil()) + }) + + It("should error when starting at a colon", func() { + buf := bytes.NewBufferString(":") + p := make.NewParser(buf) + + _, err := p.ParseFile() + + Expect(err).To(MatchError("expected 'IDENT'")) + }) +}) diff --git a/scanner_test.go b/scanner_test.go index 31898ac..4fa8b8c 100644 --- a/scanner_test.go +++ b/scanner_test.go @@ -111,6 +111,19 @@ var _ = Describe("Scanner", func() { }, ) + DescribeTable("Scan comment tokens", + Entry(nil, "#", token.COMMENT), + func(input string, expected token.Token) { + buf := bytes.NewBufferString(input) + s := make.NewScanner(buf) + + more := s.Scan() + + Expect(s.Token()).To(Equal(expected)) + Expect(more).To(BeTrueBecause("more to scan")) + }, + ) + It("should scan newline followed by token", func() { buf := bytes.NewBufferString("\n ident") s := make.NewScanner(buf) diff --git a/token/position.go b/token/position.go index 22028ac..0400c29 100644 --- a/token/position.go +++ b/token/position.go @@ -8,3 +8,5 @@ type ( File = token.File FileSet = token.FileSet ) + +const NoPos = token.NoPos diff --git a/token/token.go b/token/token.go index 359ff3b..0e8d591 100644 --- a/token/token.go +++ b/token/token.go @@ -279,7 +279,7 @@ func IsIdentifier(name string) bool { return false } switch name { - case "(", ")", "{", "}", "$", ":", ",", "\n", "\t": + case "(", ")", "{", "}", "$", ":", ",", "\n", "\t", "#": fallthrough case "=", ":=", "::=", ":::=", "?=", "!=": return false diff --git a/token/token_test.go b/token/token_test.go index 97a415a..1c298ca 100644 --- a/token/token_test.go +++ b/token/token_test.go @@ -514,6 +514,7 @@ var _ = Describe("Token", func() { Entry(nil, "}"), Entry(nil, ":"), Entry(nil, "$"), + Entry(nil, "#"), Entry(nil, ","), Entry(nil, "="), Entry(nil, ":="), diff --git a/write_test.go b/write_test.go index 2433e3d..b6e3778 100644 --- a/write_test.go +++ b/write_test.go @@ -44,67 +44,139 @@ var _ = Describe("Write", func() { Expect(n).To(Equal(15)) }) - DescribeTable("Rules", - Entry(nil, - make.Rule{Target: []string{"target"}}, - "target:\n", - ), - Entry(nil, - make.Rule{Target: []string{"target", "target2"}}, - "target target2:\n", - ), - Entry(nil, - make.Rule{ - Target: []string{"target"}, - PreReqs: []string{"prereq"}, + Describe("WriteRule", func() { + DescribeTable("Rules", + Entry(nil, + make.Rule{Target: []string{"target"}}, + "target:\n", + ), + Entry(nil, + make.Rule{Target: []string{"target", "target2"}}, + "target target2:\n", + ), + Entry(nil, + make.Rule{ + Target: []string{"target"}, + PreReqs: []string{"prereq"}, + }, + "target: prereq\n", + ), + Entry(nil, + make.Rule{ + Target: []string{"target"}, + PreReqs: []string{"prereq"}, + Recipe: []string{"curl https://example.com"}, + }, + "target: prereq\n\tcurl https://example.com\n", + ), + Entry(nil, + make.Rule{ + Target: []string{"target"}, + Recipe: []string{"curl https://example.com"}, + }, + "target:\n\tcurl https://example.com\n", + ), + func(r make.Rule, expected string) { + buf := &bytes.Buffer{} + w := make.NewWriter(buf) + + n, err := w.WriteRule(r) + + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal(expected)) + Expect(n).To(Equal(len(expected))) }, - "target: prereq\n", - ), - Entry(nil, - make.Rule{ - Target: []string{"target"}, - PreReqs: []string{"prereq"}, - Recipe: []string{"curl https://example.com"}, - }, - "target: prereq\n\tcurl https://example.com\n", - ), - Entry(nil, - make.Rule{ - Target: []string{"target"}, - Recipe: []string{"curl https://example.com"}, - }, - "target:\n\tcurl https://example.com\n", - ), - func(r make.Rule, expected string) { + ) + + It("should write multiple rules", func() { buf := &bytes.Buffer{} w := make.NewWriter(buf) - n, err := w.WriteRule(r) - + _, err := w.WriteRule(make.Rule{Target: []string{"target"}}) Expect(err).NotTo(HaveOccurred()) - Expect(buf.String()).To(Equal(expected)) - Expect(n).To(Equal(len(expected))) - }, - ) + _, err = w.WriteRule(make.Rule{Target: []string{"target2"}}) + Expect(err).NotTo(HaveOccurred()) + Expect(buf.String()).To(Equal("target:\ntarget2:\n")) + }) - It("should error when rule has no targets", func() { - buf := &bytes.Buffer{} - w := make.NewWriter(buf) + DescribeTable("should error when rule has no targets", + Entry("empty rule", make.Rule{}), + Entry("with prereqs", make.Rule{PreReqs: []string{"foo"}}), + Entry("with recipes", make.Rule{Recipe: []string{"foo"}}), + func(rule make.Rule) { + buf := &bytes.Buffer{} + w := make.NewWriter(buf) - _, err := w.WriteRule(make.Rule{}) + _, err := w.WriteRule(rule) - Expect(err).To(MatchError("no targets")) - }) + Expect(err).To(MatchError("no targets")) + }, + ) - It("should write multiple rules", func() { - buf := &bytes.Buffer{} - w := make.NewWriter(buf) + It("should return target errors", func() { + writer := testing.NewErrAfterWriter(1) + w := make.NewWriter(writer) - _, err := w.WriteRule(make.Rule{Target: []string{"target"}}) - Expect(err).NotTo(HaveOccurred()) - _, err = w.WriteRule(make.Rule{Target: []string{"target2"}}) - Expect(err).NotTo(HaveOccurred()) - Expect(buf.String()).To(Equal("target:\ntarget2:\n")) + _, err := w.WriteRule(make.Rule{ + Target: []string{"foo"}, + }) + + Expect(err).To(MatchError("write err: 1")) + }) + + It("should return prereq errors", func() { + writer := testing.NewErrAfterWriter(2) + w := make.NewWriter(writer) + + _, err := w.WriteRule(make.Rule{ + Target: []string{"foo"}, + PreReqs: []string{"bar"}, + }) + + Expect(err).To(MatchError("write err: 2")) + Expect(writer.Buf.String()).To(Equal("foo:")) + }) + + It("should return newline errors", func() { + writer := testing.NewErrAfterWriter(3) + w := make.NewWriter(writer) + + _, err := w.WriteRule(make.Rule{ + Target: []string{"foo"}, + PreReqs: []string{"bar"}, + }) + + Expect(err).To(MatchError("write err: 3")) + Expect(writer.Buf.String()).To(Equal("foo: bar")) + }) + + It("should return recipe errors", func() { + writer := testing.NewErrAfterWriter(4) + w := make.NewWriter(writer) + + _, err := w.WriteRule(make.Rule{ + Target: []string{"foo"}, + PreReqs: []string{"bar"}, + Recipe: []string{"baz"}, + }) + + Expect(err).To(MatchError("write err: 4")) + Expect(writer.Buf.String()).To(Equal("foo: bar\n")) + }) + + It("should return trailing newline errors", func() { + writer := testing.NewErrAfterWriter(5) + w := make.NewWriter(writer) + + _, err := w.WriteRule(make.Rule{ + Target: []string{"foo"}, + PreReqs: []string{"bar"}, + Recipe: []string{"baz"}, + }) + + Expect(err).To(MatchError("write err: 5")) + Expect(writer.Buf.String()).To(Equal("foo: bar\n\tbaz")) + }) }) It("should write a Makefile", func() {