Skip to content
Snippets Groups Projects
Commit dbc13deb authored by bryanl's avatar bryanl
Browse files

create util package for jsonnet


Signed-off-by: default avatarbryanl <bryanliles@gmail.com>
parent 727fc768
No related branches found
No related tags found
No related merge requests found
...@@ -610,6 +610,6 @@ ...@@ -610,6 +610,6 @@
[solve-meta] [solve-meta]
analyzer-name = "dep" analyzer-name = "dep"
analyzer-version = 1 analyzer-version = 1
inputs-digest = "87834a6bde3f8fbe065d32e469dc94301fd94d942c4e2ad414bb3d756e71778d" inputs-digest = "c2823dabf259fbe1a025ad57ce12af4869ab264ca610219a88c1b29142c4875a"
solver-name = "gps-cdcl" solver-name = "gps-cdcl"
solver-version = 1 solver-version = 1
/*
Copyright 2017 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package docparser
import (
"fmt"
"github.com/google/go-jsonnet/ast"
"github.com/ksonnet/ksonnet-lib/ksonnet-gen/astext"
)
var topLevelContext = "$"
const anonymous = "anonymous"
// TODO(sbarzowski) polish children functions and consider moving to AST
// and exporting
// directChildren are children of AST node that are executed in the same context
// and environment as their parent
//
// They must satisfy the following rules:
// * (no-delayed-evaluation) They are evaluated when their parent is evaluated or never.
// * (no-indirect-evaluation) They cannot be evaluated during evaluation of any non-direct children
// * (same-environment) They must be evaluated in the same environment as their parent
func directChildren(node ast.Node) []ast.Node {
switch node := node.(type) {
case *ast.Apply:
return []ast.Node{node.Target}
// TODO(sbarzowski) tailstrict call arguments (once we have tailstrict)
case *ast.ApplyBrace:
return []ast.Node{node.Left, node.Right}
case *ast.Array:
return nil
case *ast.Assert:
return []ast.Node{node.Cond, node.Message, node.Rest}
case *ast.Binary:
return []ast.Node{node.Left, node.Right}
case *ast.Conditional:
return []ast.Node{node.Cond, node.BranchTrue, node.BranchFalse}
case *ast.Dollar:
return nil
case *ast.Error:
return []ast.Node{node.Expr}
case *ast.Function:
return nil
case *ast.Import:
return nil
case *ast.ImportStr:
return nil
case *ast.Index:
return []ast.Node{node.Target, node.Index}
case *ast.Slice:
return []ast.Node{node.Target, node.BeginIndex, node.EndIndex, node.Step}
case *ast.Local:
return []ast.Node{node.Body}
case *ast.LiteralBoolean:
return nil
case *ast.LiteralNull:
return nil
case *ast.LiteralNumber:
return nil
case *ast.LiteralString:
return nil
case *astext.Object:
return objectFieldsDirectChildren(node.Fields)
case *ast.ArrayComp:
result := []ast.Node{}
spec := &node.Spec
for spec != nil {
result = append(result, spec.Expr)
for _, ifspec := range spec.Conditions {
result = append(result, ifspec.Expr)
}
spec = spec.Outer
}
return result
case *ast.ObjectComp:
var fields astext.ObjectFields
for _, field := range node.Fields {
f := astext.ObjectField{
ObjectField: field,
}
fields = append(fields, f)
}
result := objectFieldsDirectChildren(fields)
spec := &node.Spec
for spec != nil {
result = append(result, spec.Expr)
for _, ifspec := range spec.Conditions {
result = append(result, ifspec.Expr)
}
spec = spec.Outer
}
return result
case *ast.Self:
return nil
case *ast.SuperIndex:
return []ast.Node{node.Index}
case *ast.InSuper:
return []ast.Node{node.Index}
case *ast.Unary:
return []ast.Node{node.Expr}
case *ast.Var:
return nil
}
panic(fmt.Sprintf("directChildren: Unknown node %#v", node))
}
// thunkChildren are children of AST node that are executed in a new context
// and capture environment from parent (thunked)
// TODO(sbarzowski) Make sure it works well with boundary cases like tailstrict arguments,
// make it more precise.
// Rules:
// * (same-environment) They must be evaluated in the same environment as their parent
// * (not-direct) If they can be direct children, they should (and cannot be thunked).
func thunkChildren(node ast.Node) []ast.Node {
switch node := node.(type) {
case *ast.Apply:
var nodes []ast.Node
for _, arg := range node.Arguments.Positional {
nodes = append(nodes, arg)
}
for _, arg := range node.Arguments.Named {
nodes = append(nodes, arg.Arg)
}
return nodes
case *ast.ApplyBrace:
return nil
case *ast.Array:
return node.Elements
case *ast.Assert:
return nil
case *ast.Binary:
return nil
case *ast.Conditional:
return nil
case *ast.Dollar:
return nil
case *ast.Error:
return nil
case *ast.Function:
return nil
case *ast.Import:
return nil
case *ast.ImportStr:
return nil
case *ast.Index:
return nil
case *ast.Slice:
return nil
case *ast.Local:
// TODO(sbarzowski) complicated
return nil
case *ast.LiteralBoolean:
return nil
case *ast.LiteralNull:
return nil
case *ast.LiteralNumber:
return nil
case *ast.LiteralString:
return nil
case *astext.Object:
return nil
case *ast.ArrayComp:
return []ast.Node{node.Body}
case *ast.ObjectComp:
return nil
case *ast.Self:
return nil
case *ast.SuperIndex:
return nil
case *ast.InSuper:
return nil
case *ast.Unary:
return nil
case *ast.Var:
return nil
}
panic(fmt.Sprintf("thunkChildren: Unknown node %#v", node))
}
func objectFieldsDirectChildren(fields astext.ObjectFields) ast.Nodes {
result := ast.Nodes{}
for _, field := range fields {
if field.Expr1 != nil {
result = append(result, field.Expr1)
}
}
return result
}
func inObjectFieldsChildren(fields ast.ObjectFields) ast.Nodes {
result := ast.Nodes{}
for _, field := range fields {
if field.MethodSugar {
result = append(result, field.Method)
} else {
if field.Expr2 != nil {
result = append(result, field.Expr2)
}
if field.Expr3 != nil {
result = append(result, field.Expr3)
}
}
}
return result
}
// children that are neither direct nor thunked, e.g. object field body
// They are evaluated in a different environment from their parent.
func specialChildren(node ast.Node) []ast.Node {
switch node := node.(type) {
case *ast.Apply:
return nil
case *ast.ApplyBrace:
return nil
case *ast.Array:
return nil
case *ast.Assert:
return nil
case *ast.Binary:
return nil
case *ast.Conditional:
return nil
case *ast.Dollar:
return nil
case *ast.Error:
return nil
case *ast.Function:
// TODO(sbarzowski) this
return nil
case *ast.Import:
return nil
case *ast.ImportStr:
return nil
case *ast.Index:
return nil
case *ast.Slice:
return nil
case *ast.Local:
return nil
case *ast.LiteralBoolean:
return nil
case *ast.LiteralNull:
return nil
case *ast.LiteralNumber:
return nil
case *ast.LiteralString:
return nil
case *ast.Object:
return inObjectFieldsChildren(node.Fields)
case *ast.ArrayComp:
return []ast.Node{node.Body}
case *ast.ObjectComp:
case *ast.Self:
return nil
case *ast.SuperIndex:
return nil
case *ast.InSuper:
return nil
case *ast.Unary:
return nil
case *ast.Var:
return nil
}
panic(fmt.Sprintf("specialChildren: Unknown node %#v", node))
}
func Children(node ast.Node) []ast.Node {
var result []ast.Node
result = append(result, directChildren(node)...)
result = append(result, thunkChildren(node)...)
result = append(result, specialChildren(node)...)
return result
}
func functionContext(funcName string) *string {
r := "function <" + funcName + ">"
return &r
}
func objectContext(objName string) *string {
r := "object <" + objName + ">"
return &r
}
// addContext adds context to a node and its whole subtree.
//
// context is the surrounding context of a node (e.g. a function it's in)
//
// bind is a name that the node is bound to, i.e. if node is a local bind body
// then bind is its name. For nodes that are not bound to variables `anonymous`
// should be passed. For example:
// local x = 2 + 2; x
// In such case bind for binary node 2 + 2 is "x" and for every other node,
// including its children, its anonymous.
func addContext(node ast.Node, context *string, bind string) {
if node == nil {
return
}
node.SetContext(context)
switch node := node.(type) {
case *ast.Function:
funContext := functionContext(bind)
addContext(node.Body, funContext, anonymous)
for i := range node.Parameters.Optional {
// Default arguments have the same context as the function body.
addContext(node.Parameters.Optional[i].DefaultArg, funContext, anonymous)
}
case *ast.Object:
// TODO(sbarzowski) include fieldname, maybe even chains
outOfObject := directChildren(node)
for _, f := range outOfObject {
// This actually is evaluated outside of object
addContext(f, context, anonymous)
}
objContext := objectContext(bind)
inObject := inObjectFieldsChildren(node.Fields)
for _, f := range inObject {
// This actually is evaluated outside of object
addContext(f, objContext, anonymous)
}
case *ast.ObjectComp:
outOfObject := directChildren(node)
for _, f := range outOfObject {
// This actually is evaluated outside of object
addContext(f, context, anonymous)
}
objContext := objectContext(bind)
inObject := inObjectFieldsChildren(node.Fields)
for _, f := range inObject {
// This actually is evaluated outside of object
addContext(f, objContext, anonymous)
}
case *ast.Local:
for _, bind := range node.Binds {
namedThunkContext := "thunk <" + string(bind.Variable) + "> from <" + *context + ">"
if bind.Fun != nil {
addContext(bind.Fun, &namedThunkContext, string(bind.Variable))
} else {
addContext(bind.Body, &namedThunkContext, string(bind.Variable))
}
}
addContext(node.Body, context, bind)
default:
for _, child := range directChildren(node) {
addContext(child, context, anonymous)
}
// TODO(sbarzowski) avoid "thunk from <thunk from..."
thunkContext := "thunk from <" + *context + ">"
for _, child := range thunkChildren(node) {
addContext(child, &thunkContext, anonymous)
}
}
}
This diff is collapsed.
/*
Copyright 2016 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package docparser
import (
"testing"
)
type lexTest struct {
name string
input string
tokens tokens
errString string
}
var (
tEOF = token{kind: tokenEndOfFile}
)
var lexTests = []lexTest{
{"empty", "", tokens{}, ""},
{"whitespace", " \t\n\r\r\n", tokens{}, ""},
{"brace L", "{", tokens{{kind: tokenBraceL, data: "{"}}, ""},
{"brace R", "}", tokens{{kind: tokenBraceR, data: "}"}}, ""},
{"bracket L", "[", tokens{{kind: tokenBracketL, data: "["}}, ""},
{"bracket R", "]", tokens{{kind: tokenBracketR, data: "]"}}, ""},
{"colon", ":", tokens{{kind: tokenOperator, data: ":"}}, ""},
{"colon2", "::", tokens{{kind: tokenOperator, data: "::"}}, ""},
{"colon3", ":::", tokens{{kind: tokenOperator, data: ":::"}}, ""},
{"arrow right", "->", tokens{{kind: tokenOperator, data: "->"}}, ""},
{"less than minus", "<-", tokens{{kind: tokenOperator, data: "<"},
{kind: tokenOperator, data: "-"}}, ""},
{"comma", ",", tokens{{kind: tokenComma, data: ","}}, ""},
{"dollar", "$", tokens{{kind: tokenDollar, data: "$"}}, ""},
{"dot", ".", tokens{{kind: tokenDot, data: "."}}, ""},
{"paren L", "(", tokens{{kind: tokenParenL, data: "("}}, ""},
{"paren R", ")", tokens{{kind: tokenParenR, data: ")"}}, ""},
{"semicolon", ";", tokens{{kind: tokenSemicolon, data: ";"}}, ""},
{"not 1", "!", tokens{{kind: tokenOperator, data: "!"}}, ""},
{"not 2", "! ", tokens{{kind: tokenOperator, data: "!"}}, ""},
{"not equal", "!=", tokens{{kind: tokenOperator, data: "!="}}, ""},
{"tilde", "~", tokens{{kind: tokenOperator, data: "~"}}, ""},
{"plus", "+", tokens{{kind: tokenOperator, data: "+"}}, ""},
{"minus", "-", tokens{{kind: tokenOperator, data: "-"}}, ""},
{"number 0", "0", tokens{{kind: tokenNumber, data: "0"}}, ""},
{"number 1", "1", tokens{{kind: tokenNumber, data: "1"}}, ""},
{"number 1.0", "1.0", tokens{{kind: tokenNumber, data: "1.0"}}, ""},
{"number 0.10", "0.10", tokens{{kind: tokenNumber, data: "0.10"}}, ""},
{"number 0e100", "0e100", tokens{{kind: tokenNumber, data: "0e100"}}, ""},
{"number 1e100", "1e100", tokens{{kind: tokenNumber, data: "1e100"}}, ""},
{"number 1.1e100", "1.1e100", tokens{{kind: tokenNumber, data: "1.1e100"}}, ""},
{"number 1.1e-100", "1.1e-100", tokens{{kind: tokenNumber, data: "1.1e-100"}}, ""},
{"number 1.1e+100", "1.1e+100", tokens{{kind: tokenNumber, data: "1.1e+100"}}, ""},
{"number 0100", "0100", tokens{
{kind: tokenNumber, data: "0"},
{kind: tokenNumber, data: "100"},
}, ""},
{"number 10+10", "10+10", tokens{
{kind: tokenNumber, data: "10"},
{kind: tokenOperator, data: "+"},
{kind: tokenNumber, data: "10"},
}, ""},
{"number 1.+3", "1.+3", tokens{}, "number 1.+3:1:3 Couldn't lex number, junk after decimal point: '+'"},
{"number 1e!", "1e!", tokens{}, "number 1e!:1:3 Couldn't lex number, junk after 'E': '!'"},
{"number 1e+!", "1e+!", tokens{}, "number 1e+!:1:4 Couldn't lex number, junk after exponent sign: '!'"},
{"double string \"hi\"", "\"hi\"", tokens{{kind: tokenStringDouble, data: "hi"}}, ""},
{"double string \"hi nl\"", "\"hi\n\"", tokens{{kind: tokenStringDouble, data: "hi\n"}}, ""},
{"double string \"hi\\\"\"", "\"hi\\\"\"", tokens{{kind: tokenStringDouble, data: "hi\\\""}}, ""},
{"double string \"hi\\nl\"", "\"hi\\\n\"", tokens{{kind: tokenStringDouble, data: "hi\\\n"}}, ""},
{"double string \"hi", "\"hi", tokens{}, "double string \"hi:1:1 Unterminated String"},
{"single string 'hi'", "'hi'", tokens{{kind: tokenStringSingle, data: "hi"}}, ""},
{"single string 'hi nl'", "'hi\n'", tokens{{kind: tokenStringSingle, data: "hi\n"}}, ""},
{"single string 'hi\\''", "'hi\\''", tokens{{kind: tokenStringSingle, data: "hi\\'"}}, ""},
{"single string 'hi\\nl'", "'hi\\\n'", tokens{{kind: tokenStringSingle, data: "hi\\\n"}}, ""},
{"single string 'hi", "'hi", tokens{}, "single string 'hi:1:1 Unterminated String"},
{"assert", "assert", tokens{{kind: tokenAssert, data: "assert"}}, ""},
{"else", "else", tokens{{kind: tokenElse, data: "else"}}, ""},
{"error", "error", tokens{{kind: tokenError, data: "error"}}, ""},
{"false", "false", tokens{{kind: tokenFalse, data: "false"}}, ""},
{"for", "for", tokens{{kind: tokenFor, data: "for"}}, ""},
{"function", "function", tokens{{kind: tokenFunction, data: "function"}}, ""},
{"if", "if", tokens{{kind: tokenIf, data: "if"}}, ""},
{"import", "import", tokens{{kind: tokenImport, data: "import"}}, ""},
{"importstr", "importstr", tokens{{kind: tokenImportStr, data: "importstr"}}, ""},
{"in", "in", tokens{{kind: tokenIn, data: "in"}}, ""},
{"local", "local", tokens{{kind: tokenLocal, data: "local"}}, ""},
{"null", "null", tokens{{kind: tokenNullLit, data: "null"}}, ""},
{"self", "self", tokens{{kind: tokenSelf, data: "self"}}, ""},
{"super", "super", tokens{{kind: tokenSuper, data: "super"}}, ""},
{"tailstrict", "tailstrict", tokens{{kind: tokenTailStrict, data: "tailstrict"}}, ""},
{"then", "then", tokens{{kind: tokenThen, data: "then"}}, ""},
{"true", "true", tokens{{kind: tokenTrue, data: "true"}}, ""},
{"identifier", "foobar123", tokens{{kind: tokenIdentifier, data: "foobar123"}}, ""},
{"identifier", "foo bar123", tokens{{kind: tokenIdentifier, data: "foo"}, {kind: tokenIdentifier, data: "bar123"}}, ""},
{"c++ comment", "// hi", tokens{}, ""}, // This test doesn't look at fodder (yet?)
{"hash comment", "# hi", tokens{}, ""}, // This test doesn't look at fodder (yet?)
{"c comment", "/* hi */", tokens{}, ""}, // This test doesn't look at fodder (yet?)
{"c comment no term", "/* hi", tokens{}, "c comment no term:1:1 Multi-line comment has no terminating */"}, // This test doesn't look at fodder (yet?)
{
"block string spaces",
`|||
test
more
|||
foo
|||`,
tokens{
{
kind: tokenStringBlock,
data: "test\n more\n|||\n foo\n",
stringBlockIndent: " ",
stringBlockTermIndent: "",
},
},
"",
},
{
"block string tabs",
`|||
test
more
|||
foo
|||`,
tokens{
{
kind: tokenStringBlock,
data: "test\n more\n|||\n foo\n",
stringBlockIndent: "\t",
stringBlockTermIndent: "",
},
},
"",
},
{
"block string mixed",
`|||
test
more
|||
foo
|||`,
tokens{
{
kind: tokenStringBlock,
data: "test\n more\n|||\n foo\n",
stringBlockIndent: "\t \t",
stringBlockTermIndent: "",
},
},
"",
},
{
"block string blanks",
`|||
test
more
|||
foo
|||`,
tokens{
{
kind: tokenStringBlock,
data: "\ntest\n\n\n more\n|||\n foo\n",
stringBlockIndent: " ",
stringBlockTermIndent: "",
},
},
"",
},
{
"block string bad indent",
`|||
test
foo
|||`,
tokens{},
"block string bad indent:1:1 Text block not terminated with |||",
},
{
"block string eof",
`|||
test`,
tokens{},
"block string eof:1:1 Unexpected EOF",
},
{
"block string not term",
`|||
test
`,
tokens{},
"block string not term:1:1 Text block not terminated with |||",
},
{
"block string no ws",
`|||
test
|||`,
tokens{},
"block string no ws:1:1 Text block's first line must start with whitespace",
},
{"verbatim_string1", `@""`, tokens{{kind: tokenVerbatimStringDouble, data: ""}}, ""},
{"verbatim_string2", `@''`, tokens{{kind: tokenVerbatimStringSingle, data: ""}}, ""},
{"verbatim_string3", `@""""`, tokens{{kind: tokenVerbatimStringDouble, data: `"`}}, ""},
{"verbatim_string4", `@''''`, tokens{{kind: tokenVerbatimStringSingle, data: "'"}}, ""},
{"verbatim_string5", `@"\n"`, tokens{{kind: tokenVerbatimStringDouble, data: "\\n"}}, ""},
{"verbatim_string6", `@"''"`, tokens{{kind: tokenVerbatimStringDouble, data: "''"}}, ""},
{"verbatim_string_unterminated", `@"blah blah`, tokens{}, "verbatim_string_unterminated:1:1 Unterminated String"},
{"verbatim_string_junk", `@blah blah`, tokens{}, "verbatim_string_junk:1:1 Couldn't lex verbatim string, junk after '@': 98"},
{"op *", "*", tokens{{kind: tokenOperator, data: "*"}}, ""},
{"op /", "/", tokens{{kind: tokenOperator, data: "/"}}, ""},
{"op %", "%", tokens{{kind: tokenOperator, data: "%"}}, ""},
{"op &", "&", tokens{{kind: tokenOperator, data: "&"}}, ""},
{"op |", "|", tokens{{kind: tokenOperator, data: "|"}}, ""},
{"op ^", "^", tokens{{kind: tokenOperator, data: "^"}}, ""},
{"op =", "=", tokens{{kind: tokenOperator, data: "="}}, ""},
{"op <", "<", tokens{{kind: tokenOperator, data: "<"}}, ""},
{"op >", ">", tokens{{kind: tokenOperator, data: ">"}}, ""},
{"op >==|", ">==|", tokens{{kind: tokenOperator, data: ">==|"}}, ""},
{"junk", "💩", tokens{}, "junk:1:1 Could not lex the character '\\U0001f4a9'"},
}
func tokensEqual(ts1, ts2 tokens) bool {
if len(ts1) != len(ts2) {
return false
}
for i := range ts1 {
t1, t2 := ts1[i], ts2[i]
if t1.kind != t2.kind {
return false
}
if t1.data != t2.data {
return false
}
if t1.stringBlockIndent != t2.stringBlockIndent {
return false
}
if t1.stringBlockTermIndent != t2.stringBlockTermIndent {
return false
}
}
return true
}
func TestLex(t *testing.T) {
for _, test := range lexTests {
// Copy the test tokens and append an EOF token
testTokens := append(tokens(nil), test.tokens...)
testTokens = append(testTokens, tEOF)
tokens, err := Lex(test.name, test.input)
var errString string
if err != nil {
errString = err.Error()
}
if errString != test.errString {
t.Errorf("%s: error result does not match. got\n\t%+v\nexpected\n\t%+v",
test.name, errString, test.errString)
}
if err == nil && !tokensEqual(tokens, testTokens) {
t.Errorf("%s: got\n\t%+v\nexpected\n\t%+v", test.name, tokens, testTokens)
}
}
}
// TODO: test fodder, test position reporting
// Generated by: main
// TypeWriter: set
// Directive: +gen on literalField
package docparser
// Set is a modification of https://github.com/deckarep/golang-set
// The MIT License (MIT)
// Copyright (c) 2013 Ralph Caraveo (deckarep@gmail.com)
// literalFieldSet is the primary type that represents a set
type literalFieldSet map[LiteralField]struct{}
// NewliteralFieldSet creates and returns a reference to an empty set.
func NewliteralFieldSet(a ...LiteralField) literalFieldSet {
s := make(literalFieldSet)
for _, i := range a {
s.Add(i)
}
return s
}
// ToSlice returns the elements of the current set as a slice
func (set literalFieldSet) ToSlice() []LiteralField {
var s []LiteralField
for v := range set {
s = append(s, v)
}
return s
}
// Add adds an item to the current set if it doesn't already exist in the set.
func (set literalFieldSet) Add(i LiteralField) bool {
_, found := set[i]
set[i] = struct{}{}
return !found //False if it existed already
}
// Contains determines if a given item is already in the set.
func (set literalFieldSet) Contains(i LiteralField) bool {
_, found := set[i]
return found
}
// ContainsAll determines if the given items are all in the set
func (set literalFieldSet) ContainsAll(i ...LiteralField) bool {
for _, v := range i {
if !set.Contains(v) {
return false
}
}
return true
}
// IsSubset determines if every item in the other set is in this set.
func (set literalFieldSet) IsSubset(other literalFieldSet) bool {
for elem := range set {
if !other.Contains(elem) {
return false
}
}
return true
}
// IsSuperset determines if every item of this set is in the other set.
func (set literalFieldSet) IsSuperset(other literalFieldSet) bool {
return other.IsSubset(set)
}
// Union returns a new set with all items in both sets.
func (set literalFieldSet) Union(other literalFieldSet) literalFieldSet {
unionedSet := NewliteralFieldSet()
for elem := range set {
unionedSet.Add(elem)
}
for elem := range other {
unionedSet.Add(elem)
}
return unionedSet
}
// Intersect returns a new set with items that exist only in both sets.
func (set literalFieldSet) Intersect(other literalFieldSet) literalFieldSet {
intersection := NewliteralFieldSet()
// loop over smaller set
if set.Cardinality() < other.Cardinality() {
for elem := range set {
if other.Contains(elem) {
intersection.Add(elem)
}
}
} else {
for elem := range other {
if set.Contains(elem) {
intersection.Add(elem)
}
}
}
return intersection
}
// Difference returns a new set with items in the current set but not in the other set
func (set literalFieldSet) Difference(other literalFieldSet) literalFieldSet {
differencedSet := NewliteralFieldSet()
for elem := range set {
if !other.Contains(elem) {
differencedSet.Add(elem)
}
}
return differencedSet
}
// SymmetricDifference returns a new set with items in the current set or the other set but not in both.
func (set literalFieldSet) SymmetricDifference(other literalFieldSet) literalFieldSet {
aDiff := set.Difference(other)
bDiff := other.Difference(set)
return aDiff.Union(bDiff)
}
// Clear clears the entire set to be the empty set.
func (set *literalFieldSet) Clear() {
*set = make(literalFieldSet)
}
// Remove allows the removal of a single item in the set.
func (set literalFieldSet) Remove(i LiteralField) {
delete(set, i)
}
// Cardinality returns how many items are currently in the set.
func (set literalFieldSet) Cardinality() int {
return len(set)
}
// Iter returns a channel of type literalField that you can range over.
func (set literalFieldSet) Iter() <-chan LiteralField {
ch := make(chan LiteralField)
go func() {
for elem := range set {
ch <- elem
}
close(ch)
}()
return ch
}
// Equal determines if two sets are equal to each other.
// If they both are the same size and have the same items they are considered equal.
// Order of items is not relevent for sets to be equal.
func (set literalFieldSet) Equal(other literalFieldSet) bool {
if set.Cardinality() != other.Cardinality() {
return false
}
for elem := range set {
if !other.Contains(elem) {
return false
}
}
return true
}
// Clone returns a clone of the set.
// Does NOT clone the underlying elements.
func (set literalFieldSet) Clone() literalFieldSet {
clonedSet := NewliteralFieldSet()
for elem := range set {
clonedSet.Add(elem)
}
return clonedSet
}
This diff is collapsed.
/*
Copyright 2016 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package docparser
import (
"fmt"
"testing"
)
var tests = []string{
`true`,
`1`,
`1.2e3`,
`!true`,
`null`,
`$.foo.bar`,
`self.foo.bar`,
`super.foo.bar`,
`super[1]`,
`error "Error!"`,
`"world"`,
`'world'`,
`|||
world
|||`,
`foo(bar)`,
`foo(bar,)`,
`foo(bar) tailstrict`,
`foo(bar=42)`,
`foo(bar=42,)`,
`foo(bar, baz=42)`,
`foo.bar`,
`foo[bar]`,
`true || false`,
`0 && 1 || 0`,
`0 && (1 || 0)`,
`function(x) x`,
`function(x=5) x`,
`function(x, y=5) x`,
`local foo = "bar"; foo`,
`local foo(bar) = bar; foo(1)`,
`{ local foo = "bar", baz: 1}`,
`{ local foo(bar) = bar, baz: foo(1)}`,
`{ foo(bar, baz): bar+baz }`,
`{ ["foo" + "bar"]: 3 }`,
`{ ["field" + x]: x for x in [1, 2, 3] }`,
`{ local y = x, ["field" + x]: x for x in [1, 2, 3] }`,
`{ ["field" + x]: x for x in [1, 2, 3] if x <= 2 }`,
`{ ["field" + x + y]: x + y for x in [1, 2, 3] if x <= 2 for y in [4, 5, 6]}`,
`[]`,
`[a, b, c]`,
`[x for x in [1,2,3] ]`,
`[x for x in [1,2,3] if x <= 2]`,
`[x+y for x in [1,2,3] if x <= 2 for y in [4, 5, 6]]`,
`{}`,
`{ hello: "world" }`,
`{ hello +: "world" }`,
`{
hello: "world",
"name":: joe,
'mood'::: "happy",
|||
key type
|||: "block",
}`,
`assert true: 'woah!'; true`,
`{ assert true: 'woah!', foo: bar }`,
`if n > 1 then 'foos' else 'foo'`,
`local foo = function(x) x + 1; true`,
`local foo = function(x=5) x + 1; true`,
`local foo = function(x=5) x + 1; x(x=3)`,
`import 'foo.jsonnet'`,
`importstr 'foo.text'`,
`{a: b} + {c: d}`,
`{a: b}{c: d}`,
// no colons
`[][0]`,
// one colon
`[][:]`,
`[][1:]`,
`[][:1]`,
`[][1:2]`,
// two colons
`[][::]`,
`[][1::]`,
`[][:1:]`,
`[][::1]`,
`[][1:1:]`,
`[][:1:1]`,
`[][1::1]`,
`[][1:1:1]`,
`a in b`,
`{ x: if "opt" in super then "x" else "y" }`,
}
func TestParser(t *testing.T) {
for _, s := range tests {
t.Run(s, func(t *testing.T) {
fmt.Println(s)
tokens, err := Lex("test", s)
if err != nil {
t.Errorf("Unexpected lex error\n input: %v\n error: %v", s, err)
return
}
_, err = Parse(tokens)
if err != nil {
t.Errorf("Unexpected parse error\n input: %v\n error: %v", s, err)
}
})
}
}
type testError struct {
input string
err string
}
var errorTests = []testError{
{`,`, `test:1:1-2 Unexpected: (",", ",") while parsing terminal`},
{`function(a, b c)`, `test:1:15-16 Expected a comma before next function parameter, got (IDENTIFIER, "c").`},
{`function(a, 1)`, `test:1:13-14 Expected simple identifier but got a complex expression.`},
{`function(,)`, `test:1:10-11 Unexpected: (",", ",") while parsing terminal`},
{`function(a=)`, `test:1:12-13 Unexpected: (")", ")") while parsing terminal`},
{`function(a=,)`, `test:1:12-13 Unexpected: (",", ",") while parsing terminal`},
{`function(a=5, b)`, `test:1:15-16 Positional argument after a named argument is not allowed`},
{`a b`, `test:1:3-4 Did not expect: (IDENTIFIER, "b")`},
{`foo(a, bar(a b))`, `test:1:14-15 Expected a comma before next function argument, got (IDENTIFIER, "b").`},
{`local`, `test:1:6 Expected token IDENTIFIER but got end of file`},
{`local foo = 1, foo = 2; true`, `test:1:16-19 Duplicate local var: foo`},
{`local foo(a b) = a; true`, `test:1:13-14 Expected a comma before next function parameter, got (IDENTIFIER, "b").`},
{`local foo(a): a; true`, `test:1:13-14 Expected operator = but got ":"`},
{`local foo(a) = bar(a b); true`, `test:1:22-23 Expected a comma before next function argument, got (IDENTIFIER, "b").`},
{`local foo: 1; true`, `test:1:10-11 Expected operator = but got ":"`},
{`local foo = bar(a b); true`, `test:1:19-20 Expected a comma before next function argument, got (IDENTIFIER, "b").`},
{`{a b}`, `test:1:4-5 Expected token OPERATOR but got (IDENTIFIER, "b")`},
{`{a = b}`, `test:1:4-5 Expected one of :, ::, :::, +:, +::, +:::, got: =`},
{`{a :::: b}`, `test:1:4-8 Expected one of :, ::, :::, +:, +::, +:::, got: ::::`},
{`{assert x for x in [1, 2, 3]}`, `test:1:11-14 Object comprehension cannot have asserts.`},
{`{['foo' + x]: true, [x]: x for x in [1, 2, 3]}`, `test:1:28-31 Object comprehension can only have one field.`},
{`{foo: x for x in [1, 2, 3]}`, `test:1:9-12 Object comprehensions can only have [e] fields.`},
{`{[x]:: true for x in [1, 2, 3]}`, `test:1:13-16 Object comprehensions cannot have hidden fields.`},
{`{[x]: true for 1 in [1, 2, 3]}`, `test:1:16-17 Expected token IDENTIFIER but got (NUMBER, "1")`},
{`{[x]: true for x at [1, 2, 3]}`, `test:1:18-20 Expected token in but got (IDENTIFIER, "at")`},
{`{[x]: true for x in [1, 2 3]}`, `test:1:27-28 Expected a comma before next array element.`},
{`{[x]: true for x in [1, 2, 3] if (a b)}`, `test:1:37-38 Expected token ")" but got (IDENTIFIER, "b")`},
{`{[x]: true for x in [1, 2, 3] if a b}`, `test:1:36-37 Expected for, if or "}" after for clause, got: (IDENTIFIER, "b")`},
{`{a: b c:d}`, `test:1:7-8 Expected a comma before next field.`},
{`{[(x y)]: z}`, `test:1:6-7 Expected token ")" but got (IDENTIFIER, "y")`},
{`{[x y]: z}`, `test:1:5-6 Expected token "]" but got (IDENTIFIER, "y")`},
{`{foo(x y): z}`, `test:1:8-9 Expected a comma before next method parameter, got (IDENTIFIER, "y").`},
{`{foo(x)+: z}`, `test:1:2-5 Cannot use +: syntax sugar in a method: foo`},
{`{foo: 1, foo: 2}`, `test:1:10-13 Duplicate field: foo`},
{`{foo: (1 2)}`, `test:1:10-11 Expected token ")" but got (NUMBER, "2")`},
{`{local 1 = 3, true}`, `test:1:8-9 Expected token IDENTIFIER but got (NUMBER, "1")`},
{`{local foo = 1, local foo = 2, true}`, `test:1:23-26 Duplicate local var: foo`},
{`{local foo(a b) = 1, a: true}`, `test:1:14-15 Expected a comma before next function parameter, got (IDENTIFIER, "b").`},
{`{local foo(a): 1, a: true}`, `test:1:14-15 Expected operator = but got ":"`},
{`{local foo(a) = (a b), a: true}`, `test:1:20-21 Expected token ")" but got (IDENTIFIER, "b")`},
{`{assert (a b), a: true}`, `test:1:12-13 Expected token ")" but got (IDENTIFIER, "b")`},
{`{assert a: (a b), a: true}`, `test:1:15-16 Expected token ")" but got (IDENTIFIER, "b")`},
{`{function(a, b) a+b: true}`, `test:1:2-10 Unexpected: (function, "function") while parsing field definition`},
{`[(a b), 2, 3]`, `test:1:5-6 Expected token ")" but got (IDENTIFIER, "b")`},
{`[1, (a b), 2, 3]`, `test:1:8-9 Expected token ")" but got (IDENTIFIER, "b")`},
{`[a for b in [1 2 3]]`, `test:1:16-17 Expected a comma before next array element.`},
{`for`, `test:1:1-4 Unexpected: (for, "for") while parsing terminal`},
{``, `test:1:1 Unexpected end of file.`},
{`((a b))`, `test:1:5-6 Expected token ")" but got (IDENTIFIER, "b")`},
{`a.1`, `test:1:3-4 Expected token IDENTIFIER but got (NUMBER, "1")`},
{`super.1`, `test:1:7-8 Expected token IDENTIFIER but got (NUMBER, "1")`},
{`super[(a b)]`, `test:1:10-11 Expected token ")" but got (IDENTIFIER, "b")`},
{`super[a b]`, `test:1:9-10 Expected token "]" but got (IDENTIFIER, "b")`},
{`super`, `test:1:1-6 Expected . or [ after super.`},
{`assert (a b); true`, `test:1:11-12 Expected token ")" but got (IDENTIFIER, "b")`},
{`assert a: (a b); true`, `test:1:14-15 Expected token ")" but got (IDENTIFIER, "b")`},
{`assert a: 'foo', true`, `test:1:16-17 Expected token ";" but got (",", ",")`},
{`assert a: 'foo'; (a b)`, `test:1:21-22 Expected token ")" but got (IDENTIFIER, "b")`},
{`error (a b)`, `test:1:10-11 Expected token ")" but got (IDENTIFIER, "b")`},
{`if (a b) then c`, `test:1:7-8 Expected token ")" but got (IDENTIFIER, "b")`},
{`if a b c`, `test:1:6-7 Expected token then but got (IDENTIFIER, "b")`},
{`if a then (b c)`, `test:1:14-15 Expected token ")" but got (IDENTIFIER, "c")`},
{`if a then b else (c d)`, `test:1:21-22 Expected token ")" but got (IDENTIFIER, "d")`},
{`function(a) (a b)`, `test:1:16-17 Expected token ")" but got (IDENTIFIER, "b")`},
{`function a a`, `test:1:10-11 Expected ( but got (IDENTIFIER, "a")`},
{`import (a b)`, `test:1:11-12 Expected token ")" but got (IDENTIFIER, "b")`},
{`import (a+b)`, `test:1:9-12 Computed imports are not allowed`},
{`importstr (a b)`, `test:1:14-15 Expected token ")" but got (IDENTIFIER, "b")`},
{`importstr (a+b)`, `test:1:12-15 Computed imports are not allowed`},
{`local a = b ()`, `test:1:15 Expected , or ; but got end of file`},
{`local a = b; (a b)`, `test:1:17-18 Expected token ")" but got (IDENTIFIER, "b")`},
{`1+ <<`, `test:1:4-6 Not a unary operator: <<`},
{`-(a b)`, `test:1:5-6 Expected token ")" but got (IDENTIFIER, "b")`},
{`1~2`, `test:1:2-3 Not a binary operator: ~`},
{`a[(b c)]`, `test:1:6-7 Expected token ")" but got (IDENTIFIER, "c")`},
{`a[b c]`, `test:1:5-6 Expected token "]" but got (IDENTIFIER, "c")`},
{`a[]`, `test:1:3-4 ast.Index requires an expression`},
{`a[42:42:42:42]`, `test:1:11-12 Invalid slice: too many colons`},
{`a[42:42::42]`, `test:1:8-10 Invalid slice: too many colons`},
{`a{b c}`, `test:1:5-6 Expected token OPERATOR but got (IDENTIFIER, "c")`},
}
func TestParserErrors(t *testing.T) {
for _, s := range errorTests {
t.Run(s.input, func(t *testing.T) {
tokens, err := Lex("test", s.input)
if err != nil {
t.Errorf("Unexpected lex error\n input: %v\n error: %v", s.input, err)
return
}
_, err = Parse(tokens)
if err == nil {
t.Errorf("Expected parse error but got success\n input: %v", s.input)
return
}
if err.Error() != s.err {
t.Errorf("Error string not as expected\n input: %v\n expected error: %v\n actual error: %v", s.input, s.err, err.Error())
}
})
}
}
/*
Copyright 2016 Google Inc. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package docparser
import (
"fmt"
"github.com/google/go-jsonnet/ast"
)
//////////////////////////////////////////////////////////////////////////////
// StaticError
// StaticError represents an error during parsing/lexing or static analysis.
// TODO(sbarzowski) Make it possible to have multiple static errors and warnings
type StaticError struct {
Loc ast.LocationRange
Msg string
}
func MakeStaticErrorMsg(msg string) StaticError {
return StaticError{Msg: msg}
}
func MakeStaticError(msg string, lr ast.LocationRange) StaticError {
return StaticError{Msg: msg, Loc: lr}
}
func (err StaticError) Error() string {
loc := ""
if err.Loc.IsSet() {
loc = err.Loc.String()
}
return fmt.Sprintf("%v %v", loc, err.Msg)
}
package jsonnet
import (
"github.com/ksonnet/ksonnet-lib/ksonnet-gen/astext"
"github.com/ksonnet/ksonnet/pkg/docparser"
"github.com/pkg/errors"
"github.com/spf13/afero"
)
var (
// importFs is the filesystem import uses when a importFs is not supplied.
importFs = afero.NewOsFs()
)
// Import imports jsonnet from a path.
func Import(filename string) (*astext.Object, error) {
return ImportFromFs(filename, importFs)
}
// ImportFromFs imports jsonnet from a path on an afero filesystem.
func ImportFromFs(filename string, fs afero.Fs) (*astext.Object, error) {
if filename == "" {
return nil, errors.New("filename was blank")
}
b, err := afero.ReadFile(fs, filename)
if err != nil {
return nil, errors.Wrap(err, "read lib")
}
return Parse(filename, string(b))
}
// Parse converts a jsonnet snippet to AST.
func Parse(filename, src string) (*astext.Object, error) {
tokens, err := docparser.Lex(filename, src)
if err != nil {
return nil, errors.Wrap(err, "lex jsonnet snippet")
}
node, err := docparser.Parse(tokens)
if err != nil {
return nil, errors.Wrap(err, "parse jsonnet snippet")
}
root, ok := node.(*astext.Object)
if !ok {
return nil, errors.New("root was not an object")
}
return root, nil
}
package jsonnet
import (
"testing"
"github.com/google/go-jsonnet/ast"
"github.com/ksonnet/ksonnet-lib/ksonnet-gen/astext"
"github.com/spf13/afero"
"github.com/stretchr/testify/require"
)
func stageContent(t *testing.T, fs afero.Fs, path string, data []byte) {
err := afero.WriteFile(importFs, path, data, 0644)
require.NoError(t, err)
}
func TestImport(t *testing.T) {
ogFs := importFs
defer func(ogFs afero.Fs) {
importFs = ogFs
}(ogFs)
importFs = afero.NewMemMapFs()
stageContent(t, importFs, "/obj.jsonnet", []byte("{}"))
stageContent(t, importFs, "/array.jsonnet", []byte(`["a", "b"]`))
stageContent(t, importFs, "/parser.jsonnet", []byte("local️ a = b; []"))
cases := []struct {
name string
path string
isErr bool
}{
{
name: "with an existing jsonnet file",
path: "/obj.jsonnet",
},
{
name: "no filename",
isErr: true,
},
{
name: "invalid file",
path: "/invalid",
isErr: true,
},
{
name: "parser error",
path: "/parser.jsonnet",
isErr: true,
},
{
name: "not an object",
path: "/array.jsonnet",
isErr: true,
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
obj, err := Import(tc.path)
if tc.isErr {
require.Error(t, err)
} else {
require.NoError(t, err)
obj.NodeBase = ast.NodeBase{}
expected := &astext.Object{}
require.Equal(t, expected, obj)
}
})
}
}
package jsonnet
import (
"fmt"
"github.com/google/go-jsonnet/ast"
"github.com/ksonnet/ksonnet-lib/ksonnet-gen/astext"
"github.com/pkg/errors"
)
// Set sets an object key at path to a value.
func Set(object *astext.Object, path []string, value ast.Node) error {
if len(path) == 0 {
return errors.New("path was empty")
}
curObj := object
for i, k := range path {
field, err := findField(curObj, k)
if err != nil {
switch err.(type) {
default:
return err
case *unknownField:
field, err = astext.CreateField(k)
if err != nil {
return err
}
field.Hide = ast.ObjectFieldInherit
curObj.Fields = append(curObj.Fields, *field)
}
}
if i == len(path)-1 {
field, _ = findField(curObj, k)
if canUpdateObject(field.Expr2, value) {
return errors.New("can't set object to non object")
}
field.Expr2 = value
return nil
}
if field.Expr2 == nil {
curObj = &astext.Object{}
field.Expr2 = curObj
} else if obj, ok := field.Expr2.(*astext.Object); ok {
curObj = obj
} else {
return errors.Errorf("child is not an object at %q", k)
}
}
return nil
}
func canUpdateObject(node1, node2 ast.Node) bool {
return isNodeObject(node1) && !isNodeObject(node2)
}
func isNodeObject(node ast.Node) bool {
_, ok := node.(*astext.Object)
return ok
}
type unknownField struct {
name string
}
func (e *unknownField) Error() string {
return fmt.Sprintf("unable to find field %q", e.name)
}
func findField(object *astext.Object, id string) (*astext.ObjectField, error) {
for i := range object.Fields {
fieldID, err := FieldID(object.Fields[i])
if err != nil {
return nil, err
}
if id == fieldID {
return &object.Fields[i], nil
}
}
return nil, &unknownField{name: id}
}
// FindObject finds a path in an object.
func FindObject(object *astext.Object, path []string) (*astext.Object, error) {
if len(path) == 0 {
return nil, errors.New("search path was empty")
}
for i := range object.Fields {
id, err := FieldID(object.Fields[i])
if err != nil {
return nil, err
}
if path[0] == id {
if len(path) == 1 {
return object, nil
}
child, ok := object.Fields[i].Expr2.(*astext.Object)
if !ok {
return nil, errors.Errorf("child is a %T. expected an object", object.Fields[i].Expr2)
}
return FindObject(child, path[1:])
}
}
return nil, errors.New("path was not found")
}
// FieldID returns the id for an object field.
func FieldID(field astext.ObjectField) (string, error) {
if field.Expr1 != nil {
lf, ok := field.Expr1.(*ast.LiteralString)
if !ok {
return "", errors.New("field Expr1 is not a string")
}
return lf.Value, nil
}
if field.Id == nil {
return "", errors.New("field does not have an ID")
}
return string(*field.Id), nil
}
package jsonnet
import (
"bytes"
"io/ioutil"
"testing"
"github.com/google/go-jsonnet/ast"
"github.com/ksonnet/ksonnet-lib/ksonnet-gen/astext"
nm "github.com/ksonnet/ksonnet-lib/ksonnet-gen/nodemaker"
"github.com/ksonnet/ksonnet-lib/ksonnet-gen/printer"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestSet(t *testing.T) {
labels := map[string]interface{}{
"metadata": map[string]interface{}{
"labels": map[string]interface{}{
"label": "label",
},
},
}
labelsObject, err := nm.KVFromMap(labels)
require.NoError(t, err)
cases := []struct {
name string
updatePath []string
update ast.Node
expected string
isErr bool
}{
{
name: "update existing field",
updatePath: []string{"a", "b", "c"},
update: nm.NewInt(9).Node(),
expected: "{\n a:: {\n b:: {\n c:: 9,\n },\n },\n}",
},
{
name: "set map",
updatePath: []string{"a", "d"},
update: labelsObject.Node(),
expected: string(testdata(t, "set-map.jsonnet")),
},
{
name: "set new field",
updatePath: []string{"a", "e"},
update: nm.NewInt(9).Node(),
expected: "{\n a:: {\n b:: {\n c:: \"value\",\n },\n e: 9,\n },\n}",
},
{
name: "set object field to non object",
updatePath: []string{"a"},
update: nm.NewInt(9).Node(),
isErr: true,
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
b := nm.NewObject()
b.Set(nm.NewKey("c"), nm.NewStringDouble("value"))
a := nm.NewObject()
a.Set(nm.NewKey("b"), b)
object := nm.NewObject()
object.Set(nm.NewKey("a"), a)
astObject := object.Node().(*astext.Object)
err := Set(astObject, tc.updatePath, tc.update)
if tc.isErr {
require.Error(t, err)
} else {
require.NoError(t, err)
var got bytes.Buffer
err = printer.Fprint(&got, astObject)
require.NoError(t, err)
require.Equal(t, tc.expected, got.String())
}
})
}
}
func TestFindObject(t *testing.T) {
b := nm.NewObject()
b.Set(nm.NewKey("c"), nm.NewStringDouble("value"))
a := nm.NewObject()
a.Set(nm.NewKey("b"), b)
a.Set(nm.NewKey("d-1", nm.KeyOptCategory(ast.ObjectFieldStr)), nm.NewStringDouble("string"))
object := nm.NewObject()
object.Set(nm.NewKey("a"), a)
astObject := object.Node().(*astext.Object)
cases := []struct {
name string
path []string
expected ast.Node
isErr bool
}{
{
name: "find nested object",
path: []string{"a", "b", "c"},
expected: b.Node(),
},
{
name: "find string id object",
path: []string{"a", "d-1"},
expected: a.Node(),
},
{
name: "invalid path",
path: []string{"z"},
isErr: true,
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
node, err := FindObject(astObject, tc.path)
if tc.isErr {
require.Error(t, err)
} else {
require.NoError(t, err)
require.Equal(t, tc.expected, node)
}
})
}
}
func TestFieldID(t *testing.T) {
expr1Field := astext.ObjectField{
ObjectField: ast.ObjectField{
Expr1: nm.NewStringDouble("my-field").Node(),
},
}
invalidExpr1Field := astext.ObjectField{
ObjectField: ast.ObjectField{
Expr1: nm.NewInt(1).Node(),
},
}
id := ast.Identifier("my-field")
idField := astext.ObjectField{
ObjectField: ast.ObjectField{
Id: &id,
},
}
cases := []struct {
name string
field astext.ObjectField
expected string
isErr bool
}{
{
name: "no id",
isErr: true,
},
{
name: "field with id in Expr1",
field: expr1Field,
expected: "my-field",
},
{
name: "field with invalid Expr1",
field: invalidExpr1Field,
isErr: true,
},
{
name: "field with id as Identifier",
field: idField,
expected: "my-field",
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
id, err := FieldID(tc.field)
if tc.isErr {
require.Error(t, err)
} else {
require.NoError(t, err)
assert.Equal(t, tc.expected, id)
}
})
}
}
func testdata(t *testing.T, name string) []byte {
b, err := ioutil.ReadFile("testdata/" + name)
require.NoError(t, err, "read testdata %s", name)
return b
}
{
a:: {
b:: {
c:: "value",
},
d: {
metadata: {
labels: {
label: "label",
},
},
},
},
}
\ No newline at end of file
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment