Merge pull request #9291 from hashicorp/f-update-hcl

update HCL vendor
This commit is contained in:
Mitchell Hashimoto 2016-10-08 17:37:07 +08:00 committed by GitHub
commit 9b4235bd72
5 changed files with 90 additions and 32 deletions

View File

@ -156,7 +156,8 @@ func (o *ObjectKey) Pos() token.Pos {
type LiteralType struct { type LiteralType struct {
Token token.Token Token token.Token
// associated line comment, only when used in a list // comment types, only used when in a list
LeadComment *CommentGroup
LineComment *CommentGroup LineComment *CommentGroup
} }

View File

@ -337,6 +337,12 @@ func (p *Parser) listType() (*ast.ListType, error) {
return nil, err return nil, err
} }
// If there is a lead comment, apply it
if p.leadComment != nil {
node.LeadComment = p.leadComment
p.leadComment = nil
}
l.Add(node) l.Add(node)
needComma = true needComma = true
case token.COMMA: case token.COMMA:

View File

@ -62,6 +62,14 @@ func (p *printer) collectComments(node ast.Node) {
ast.Walk(node, func(nn ast.Node) (ast.Node, bool) { ast.Walk(node, func(nn ast.Node) (ast.Node, bool) {
switch t := nn.(type) { switch t := nn.(type) {
case *ast.LiteralType: case *ast.LiteralType:
if t.LeadComment != nil {
for _, comment := range t.LeadComment.List {
if _, ok := standaloneComments[comment.Pos()]; ok {
delete(standaloneComments, comment.Pos())
}
}
}
if t.LineComment != nil { if t.LineComment != nil {
for _, comment := range t.LineComment.List { for _, comment := range t.LineComment.List {
if _, ok := standaloneComments[comment.Pos()]; ok { if _, ok := standaloneComments[comment.Pos()]; ok {
@ -95,7 +103,6 @@ func (p *printer) collectComments(node ast.Node) {
} }
sort.Sort(ByPosition(p.standaloneComments)) sort.Sort(ByPosition(p.standaloneComments))
} }
// output prints creates b printable HCL output and returns it. // output prints creates b printable HCL output and returns it.
@ -104,11 +111,14 @@ func (p *printer) output(n interface{}) []byte {
switch t := n.(type) { switch t := n.(type) {
case *ast.File: case *ast.File:
// File doesn't trace so we add the tracing here
defer un(trace(p, "File"))
return p.output(t.Node) return p.output(t.Node)
case *ast.ObjectList: case *ast.ObjectList:
defer un(trace(p, "ObjectList"))
var index int var index int
var nextItem token.Pos var nextItem token.Pos
var commented bool
for { for {
// TODO(arslan): refactor below comment printing, we have the same in objectType // TODO(arslan): refactor below comment printing, we have the same in objectType
for _, c := range p.standaloneComments { for _, c := range p.standaloneComments {
@ -121,7 +131,10 @@ func (p *printer) output(n interface{}) []byte {
if comment.Pos().After(p.prev) && comment.Pos().Before(nextItem) { if comment.Pos().After(p.prev) && comment.Pos().Before(nextItem) {
// if we hit the end add newlines so we can print the comment // if we hit the end add newlines so we can print the comment
if index == len(t.Items) { // we don't do this if prev is invalid which means the
// beginning of the file since the first comment should
// be at the first line.
if p.prev.IsValid() && index == len(t.Items) {
buf.Write([]byte{newline, newline}) buf.Write([]byte{newline, newline})
} }
@ -140,7 +153,7 @@ func (p *printer) output(n interface{}) []byte {
} }
buf.Write(p.output(t.Items[index])) buf.Write(p.output(t.Items[index]))
if !commented && index != len(t.Items)-1 { if index != len(t.Items)-1 {
buf.Write([]byte{newline, newline}) buf.Write([]byte{newline, newline})
} }
index++ index++
@ -435,11 +448,33 @@ func (p *printer) list(l *ast.ListType) []byte {
} }
insertSpaceBeforeItem := false insertSpaceBeforeItem := false
lastHadLeadComment := false
for i, item := range l.List { for i, item := range l.List {
if item.Pos().Line != l.Lbrack.Line { if item.Pos().Line != l.Lbrack.Line {
// multiline list, add newline before we add each item // multiline list, add newline before we add each item
buf.WriteByte(newline) buf.WriteByte(newline)
insertSpaceBeforeItem = false insertSpaceBeforeItem = false
// If we have a lead comment, then we want to write that first
leadComment := false
if lit, ok := item.(*ast.LiteralType); ok && lit.LeadComment != nil {
leadComment = true
// If this isn't the first item and the previous element
// didn't have a lead comment, then we need to add an extra
// newline to properly space things out. If it did have a
// lead comment previously then this would be done
// automatically.
if i > 0 && !lastHadLeadComment {
buf.WriteByte(newline)
}
for _, comment := range lit.LeadComment.List {
buf.Write(p.indent([]byte(comment.Text)))
buf.WriteByte(newline)
}
}
// also indent each line // also indent each line
val := p.output(item) val := p.output(item)
curLen := len(val) curLen := len(val)
@ -458,9 +493,16 @@ func (p *printer) list(l *ast.ListType) []byte {
} }
} }
if i == len(l.List)-1 { lastItem := i == len(l.List)-1
if lastItem {
buf.WriteByte(newline) buf.WriteByte(newline)
} }
if leadComment && !lastItem {
buf.WriteByte(newline)
}
lastHadLeadComment = leadComment
} else { } else {
if insertSpaceBeforeItem { if insertSpaceBeforeItem {
buf.WriteByte(blank) buf.WriteByte(blank)

View File

@ -5,6 +5,7 @@ import (
"fmt" "fmt"
"github.com/hashicorp/hcl/hcl/ast" "github.com/hashicorp/hcl/hcl/ast"
hcltoken "github.com/hashicorp/hcl/hcl/token"
"github.com/hashicorp/hcl/json/scanner" "github.com/hashicorp/hcl/json/scanner"
"github.com/hashicorp/hcl/json/token" "github.com/hashicorp/hcl/json/token"
) )
@ -103,6 +104,14 @@ func (p *Parser) objectItem() (*ast.ObjectItem, error) {
switch p.tok.Type { switch p.tok.Type {
case token.COLON: case token.COLON:
pos := p.tok.Pos
o.Assign = hcltoken.Pos{
Filename: pos.Filename,
Offset: pos.Offset,
Line: pos.Line,
Column: pos.Column,
}
o.Val, err = p.objectValue() o.Val, err = p.objectValue()
if err != nil { if err != nil {
return nil, err return nil, err

52
vendor/vendor.json vendored
View File

@ -1168,68 +1168,68 @@
{ {
"checksumSHA1": "8OPDk+bKyRGJoKcS4QNw9F7dpE8=", "checksumSHA1": "8OPDk+bKyRGJoKcS4QNw9F7dpE8=",
"path": "github.com/hashicorp/hcl", "path": "github.com/hashicorp/hcl",
"revision": "ef8133da8cda503718a74741312bf50821e6de79", "revision": "6f5bfed9a0a22222fbe4e731ae3481730ba41e93",
"revisionTime": "2016-09-16T13:01:00Z" "revisionTime": "2016-10-08T07:35:57Z"
}, },
{ {
"checksumSHA1": "67DfevLBglV52Y2eAuhFc/xQni0=", "checksumSHA1": "XQmjDva9JCGGkIecOgwtBEMCJhU=",
"path": "github.com/hashicorp/hcl/hcl/ast", "path": "github.com/hashicorp/hcl/hcl/ast",
"revision": "99df0eb941dd8ddbc83d3f3605a34f6a686ac85e", "revision": "6f5bfed9a0a22222fbe4e731ae3481730ba41e93",
"revisionTime": "2016-09-02T16:52:19Z" "revisionTime": "2016-10-08T07:35:57Z"
}, },
{ {
"checksumSHA1": "5HVecyfmcTm6OTffEi6LGayQf5M=", "checksumSHA1": "5HVecyfmcTm6OTffEi6LGayQf5M=",
"path": "github.com/hashicorp/hcl/hcl/fmtcmd", "path": "github.com/hashicorp/hcl/hcl/fmtcmd",
"revision": "99df0eb941dd8ddbc83d3f3605a34f6a686ac85e", "revision": "6f5bfed9a0a22222fbe4e731ae3481730ba41e93",
"revisionTime": "2016-09-02T16:52:19Z" "revisionTime": "2016-10-08T07:35:57Z"
}, },
{ {
"checksumSHA1": "l2oQxBsZRwn6eZjf+whXr8c9+8c=", "checksumSHA1": "un4pN4yL5bl6LL3CgWacFbIeHVg=",
"path": "github.com/hashicorp/hcl/hcl/parser", "path": "github.com/hashicorp/hcl/hcl/parser",
"revision": "99df0eb941dd8ddbc83d3f3605a34f6a686ac85e", "revision": "6f5bfed9a0a22222fbe4e731ae3481730ba41e93",
"revisionTime": "2016-09-02T16:52:19Z" "revisionTime": "2016-10-08T07:35:57Z"
}, },
{ {
"checksumSHA1": "CSmwxPOTz7GSpnWPF9aGkbVeR64=", "checksumSHA1": "QjoxNbg+jBmtewexLaBZ8EJEl24=",
"path": "github.com/hashicorp/hcl/hcl/printer", "path": "github.com/hashicorp/hcl/hcl/printer",
"revision": "99df0eb941dd8ddbc83d3f3605a34f6a686ac85e", "revision": "6f5bfed9a0a22222fbe4e731ae3481730ba41e93",
"revisionTime": "2016-09-02T16:52:19Z" "revisionTime": "2016-10-08T07:35:57Z"
}, },
{ {
"checksumSHA1": "lgR7PSAZ0RtvAc9OCtCnNsF/x8g=", "checksumSHA1": "lgR7PSAZ0RtvAc9OCtCnNsF/x8g=",
"path": "github.com/hashicorp/hcl/hcl/scanner", "path": "github.com/hashicorp/hcl/hcl/scanner",
"revision": "99df0eb941dd8ddbc83d3f3605a34f6a686ac85e", "revision": "6f5bfed9a0a22222fbe4e731ae3481730ba41e93",
"revisionTime": "2016-09-02T16:52:19Z" "revisionTime": "2016-10-08T07:35:57Z"
}, },
{ {
"checksumSHA1": "JlZmnzqdmFFyb1+2afLyR3BOE/8=", "checksumSHA1": "JlZmnzqdmFFyb1+2afLyR3BOE/8=",
"path": "github.com/hashicorp/hcl/hcl/strconv", "path": "github.com/hashicorp/hcl/hcl/strconv",
"revision": "99df0eb941dd8ddbc83d3f3605a34f6a686ac85e", "revision": "6f5bfed9a0a22222fbe4e731ae3481730ba41e93",
"revisionTime": "2016-09-02T16:52:19Z" "revisionTime": "2016-10-08T07:35:57Z"
}, },
{ {
"checksumSHA1": "c6yprzj06ASwCo18TtbbNNBHljA=", "checksumSHA1": "c6yprzj06ASwCo18TtbbNNBHljA=",
"path": "github.com/hashicorp/hcl/hcl/token", "path": "github.com/hashicorp/hcl/hcl/token",
"revision": "99df0eb941dd8ddbc83d3f3605a34f6a686ac85e", "revision": "6f5bfed9a0a22222fbe4e731ae3481730ba41e93",
"revisionTime": "2016-09-02T16:52:19Z" "revisionTime": "2016-10-08T07:35:57Z"
}, },
{ {
"checksumSHA1": "jQ45CCc1ed/nlV7bbSnx6z72q1M=", "checksumSHA1": "fpQQdjFUZOoslYuFNKZMSO0N0ik=",
"path": "github.com/hashicorp/hcl/json/parser", "path": "github.com/hashicorp/hcl/json/parser",
"revision": "99df0eb941dd8ddbc83d3f3605a34f6a686ac85e", "revision": "6f5bfed9a0a22222fbe4e731ae3481730ba41e93",
"revisionTime": "2016-09-02T16:52:19Z" "revisionTime": "2016-10-08T07:35:57Z"
}, },
{ {
"checksumSHA1": "YdvFsNOMSWMLnY6fcliWQa0O5Fw=", "checksumSHA1": "YdvFsNOMSWMLnY6fcliWQa0O5Fw=",
"path": "github.com/hashicorp/hcl/json/scanner", "path": "github.com/hashicorp/hcl/json/scanner",
"revision": "99df0eb941dd8ddbc83d3f3605a34f6a686ac85e", "revision": "6f5bfed9a0a22222fbe4e731ae3481730ba41e93",
"revisionTime": "2016-09-02T16:52:19Z" "revisionTime": "2016-10-08T07:35:57Z"
}, },
{ {
"checksumSHA1": "fNlXQCQEnb+B3k5UDL/r15xtSJY=", "checksumSHA1": "fNlXQCQEnb+B3k5UDL/r15xtSJY=",
"path": "github.com/hashicorp/hcl/json/token", "path": "github.com/hashicorp/hcl/json/token",
"revision": "99df0eb941dd8ddbc83d3f3605a34f6a686ac85e", "revision": "6f5bfed9a0a22222fbe4e731ae3481730ba41e93",
"revisionTime": "2016-09-02T16:52:19Z" "revisionTime": "2016-10-08T07:35:57Z"
}, },
{ {
"checksumSHA1": "OrnLOmhc0FcHYs02wtbu1siIsnM=", "checksumSHA1": "OrnLOmhc0FcHYs02wtbu1siIsnM=",