diff --git a/Makefile b/Makefile index b411c734f7c269b8758b2ab18742143f4fe84811..13eeab633e564a1c897704a443c3eac30036830f 100644 --- a/Makefile +++ b/Makefile @@ -26,7 +26,7 @@ KCFG_TEST_FILE = lib/kubecfg_test.jsonnet GUESTBOOK_FILE = examples/guestbook.jsonnet JSONNET_FILES = $(KCFG_TEST_FILE) $(GUESTBOOK_FILE) # TODO: Simplify this once ./... ignores ./vendor -GO_PACKAGES = ./cmd/... ./utils/... ./pkg/... ./metadata/... +GO_PACKAGES = ./cmd/... ./utils/... ./pkg/... ./metadata/... ./prototype/... # Default cluster from this config is used for integration tests KUBECONFIG = $(HOME)/.kube/config diff --git a/cmd/delete.go b/cmd/delete.go index 88e92b260ec17d57658d7eba7dd2810cf4337d0d..f875b9538c07b071cb2aa83e03cc245a78a2b398 100644 --- a/cmd/delete.go +++ b/cmd/delete.go @@ -27,6 +27,8 @@ const ( func init() { RootCmd.AddCommand(deleteCmd) + addJsonnetFlagsToCmd(deleteCmd) + addKubectlFlagsToCmd(deleteCmd) addEnvCmdFlags(deleteCmd) deleteCmd.PersistentFlags().Int64(flagGracePeriod, -1, "Number of seconds given to resources to terminate gracefully. A negative value is ignored") } diff --git a/cmd/diff.go b/cmd/diff.go index dd5bff4155b0c6d476b06ae9c1540f1ee1d94885..b773d0a2b601a85bad8ba4debb6988ab6a84d0d0 100644 --- a/cmd/diff.go +++ b/cmd/diff.go @@ -24,6 +24,8 @@ import ( const flagDiffStrategy = "diff-strategy" func init() { + addJsonnetFlagsToCmd(diffCmd) + addKubectlFlagsToCmd(diffCmd) addEnvCmdFlags(diffCmd) diffCmd.PersistentFlags().String(flagDiffStrategy, "all", "Diff strategy, all or subset.") RootCmd.AddCommand(diffCmd) diff --git a/cmd/prototype.go b/cmd/prototype.go new file mode 100644 index 0000000000000000000000000000000000000000..097281ffe4ec47525300c86353219d059cab12d7 --- /dev/null +++ b/cmd/prototype.go @@ -0,0 +1,291 @@ +// Copyright 2017 The kubecfg authors +// +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package cmd + +import ( + "fmt" + "strings" + + "github.com/ksonnet/kubecfg/prototype" + "github.com/ksonnet/kubecfg/prototype/snippet" + "github.com/spf13/cobra" +) + +func init() { + RootCmd.AddCommand(prototypeCmd) + prototypeCmd.AddCommand(prototypeDescribeCmd) + prototypeCmd.AddCommand(prototypeSearchCmd) + prototypeCmd.AddCommand(prototypeUseCmd) +} + +var prototypeCmd = &cobra.Command{ + Use: "prototype", + Short: `Instantiate, inspect, and get examples for ksonnet prototypes`, + RunE: func(cmd *cobra.Command, args []string) error { + return fmt.Errorf("Command 'prototype' requires a subcommand\n\n%s", cmd.UsageString()) + }, + Long: `Manage, inspect, instantiate, and get examples for ksonnet prototypes. + +Prototypes are Kubernetes app configuration templates with "holes" that can be +filled in by (e.g.) the ksonnet CLI tool or a language server. For example, a +prototype for a 'apps.v1beta1.Deployment' might require a name and image, and +the ksonnet CLI could expand this to a fully-formed 'Deployment' object. + +Commands: + use Instantiate prototype, filling in parameters from flags, and + emitting the generated code to stdout. + describe Display documentation and details about a prototype + search Search for a prototype`, + + Example: ` # Display documentation about prototype + # 'io.ksonnet.pkg.prototype.simple-deployment', including: + # + # (1) a description of what gets generated during instantiation + # (2) a list of parameters that are required to be passed in with CLI flags + # + # NOTE: Many subcommands only require the user to specify enough of the + # identifier to disambiguate it among other known prototypes, which is why + # 'simple-deployment' is given as argument instead of the fully-qualified + # name. + ksonnet prototype describe simple-deployment + + # Instantiate prototype 'io.ksonnet.pkg.prototype.simple-deployment', using + # the 'nginx' image, and port 80 exposed. + # + # SEE ALSO: Note above for a description of why this subcommand can take + # 'simple-deployment' instead of the fully-qualified prototype name. + ksonnet prototype use simple-deployment \ + --name=nginx \ + --image=nginx \ + --port=80 \ + --portName=http + + # Search known prototype metadata for the string 'deployment'. + ksonnet prototype search deployment`, +} + +var prototypeDescribeCmd = &cobra.Command{ + Use: "describe <prototype-name>", + Short: `Describe a ksonnet prototype`, + RunE: func(cmd *cobra.Command, args []string) error { + if len(args) != 1 { + return fmt.Errorf("Command 'prototype describe' requires a prototype name\n\n%s", cmd.UsageString()) + } + + query := args[0] + + proto, err := fundUniquePrototype(query) + if err != nil { + return err + } + + fmt.Println(`PROTOTYPE NAME:`) + fmt.Println(proto.Name) + fmt.Println() + fmt.Println(`DESCRIPTION:`) + fmt.Println(proto.Template.Description) + fmt.Println() + fmt.Println(`REQUIRED PARAMETERS:`) + fmt.Println(proto.RequiredParams().PrettyString(" ")) + fmt.Println() + fmt.Println(`OPTIONAL PARAMETERS:`) + fmt.Println(proto.OptionalParams().PrettyString(" ")) + fmt.Println() + fmt.Println(`TEMPLATE:`) + fmt.Println(strings.Join(proto.Template.Body, "\n")) + + return nil + }, + Long: `Output documentation, examples, and other information for some ksonnet +prototype uniquely identified by some (possibly partial) 'prototype-name'. This +includes: + + (1) a description of what gets generated during instantiation + (2) a list of parameters that are required to be passed in with CLI flags + +'prototype-name' need only contain enough of the suffix of a name to uniquely +disambiguate it among known names. For example, 'deployment' may resolve +ambiguously, in which case 'use' will fail, while 'simple-deployment' might be +unique enough to resolve to 'io.ksonnet.pkg.prototype.simple-deployment'.`, + + Example: ` # Display documentation about prototype, including: + ksonnet prototype describe io.ksonnet.pkg.prototype.simple-deployment + + # Display documentation about prototype using a unique suffix of an + # identifier. That is, this command only requires a long enough suffix to + # uniquely identify a ksonnet prototype. In this example, the suffix + # 'simple-deployment' is enough to uniquely identify + # 'io.ksonnet.pkg.prototype.simple-deployment', but 'deployment' might not + # be, as several names end with that suffix. + ksonnet prototype describe simple-deployment`, +} + +var prototypeSearchCmd = &cobra.Command{ + Use: "search <name-substring>", + Short: `Search for a ksonnet prototype`, + RunE: func(cmd *cobra.Command, args []string) error { + if len(args) != 1 { + return fmt.Errorf("Command 'prototype search' requires a prototype name\n\n%s", cmd.UsageString()) + } + + query := args[0] + + index := prototype.NewIndex([]*prototype.SpecificationSchema{}) + protos, err := index.SearchNames(query, prototype.Substring) + if err != nil { + return err + } else if len(protos) == 0 { + return fmt.Errorf("Failed to find any search results for query '%s'", query) + } + + for _, proto := range protos { + fmt.Println(proto.Name) + } + + return nil + }, + Long: `Search ksonnet for prototypes whose names contain 'name-substring'.`, + Example: ` # Search known prototype metadata for the string 'deployment'. + ksonnet prototype search deployment`, +} + +var prototypeUseCmd = &cobra.Command{ + Use: "use <prototype-name> [parameter-flags]", + Short: `Instantiate prototype, emitting the generated code to stdout.`, + DisableFlagParsing: true, + RunE: func(cmd *cobra.Command, rawArgs []string) error { + if len(rawArgs) < 1 { + return fmt.Errorf("Command 'prototype use' requires a prototype name\n\n%s", cmd.UsageString()) + } + + query := rawArgs[0] + + proto, err := fundUniquePrototype(query) + if err != nil { + return err + } + + for _, param := range proto.RequiredParams() { + cmd.PersistentFlags().String(param.Name, "", param.Description) + } + + for _, param := range proto.OptionalParams() { + cmd.PersistentFlags().String(param.Name, *param.Default, param.Description) + } + + cmd.DisableFlagParsing = false + cmd.ParseFlags(rawArgs) + flags := cmd.Flags() + + missingReqd := prototype.ParamSchemas{} + values := map[string]string{} + for _, param := range proto.RequiredParams() { + val, err := flags.GetString(param.Name) + if err != nil { + return err + } else if val == "" { + missingReqd = append(missingReqd, param) + } else if _, ok := values[param.Name]; ok { + return fmt.Errorf("Prototype '%s' has multiple parameters with name '%s'", proto.Name, param.Name) + } + + values[param.Name] = val + } + + if len(missingReqd) > 0 { + return fmt.Errorf("Failed to instantiate prototype '%s'. The following required parameters are missing:\n%s", proto.Name, missingReqd.PrettyString("")) + } + + for _, param := range proto.OptionalParams() { + val, err := flags.GetString(param.Name) + if err != nil { + return err + } else if _, ok := values[param.Name]; ok { + return fmt.Errorf("Prototype '%s' has multiple parameters with name '%s'", proto.Name, param.Name) + } + + values[param.Name] = val + } + + tm := snippet.Parse(strings.Join(proto.Template.Body, "\n")) + text, err := tm.Evaluate(values) + if err != nil { + return err + } + fmt.Println(text) + return nil + }, + Long: `Instantiate prototype uniquely identified by (possibly partial) +'prototype-name', filling in parameters from flags, and emitting the generated +code to stdout. + +'prototype-name' need only contain enough of the suffix of a name to uniquely +disambiguate it among known names. For example, 'deployment' may resolve +ambiguously, in which case 'use' will fail, while 'simple-deployment' might be +unique enough to resolve to 'io.ksonnet.pkg.prototype.simple-deployment'.`, + + Example: ` # Instantiate prototype 'io.ksonnet.pkg.prototype.simple-deployment', using + # the 'nginx' image, and port 80 exposed. + ksonnet prototype use io.ksonnet.pkg.prototype.simple-deployment \ + --name=nginx \ + --image=nginx \ + --port=80 \ + --portName=http + + # Instantiate prototype using a unique suffix of an identifier. That is, this + # command only requires a long enough suffix to uniquely identify a ksonnet + # prototype. In this example, the suffix 'simple-deployment' is enough to + # uniquely identify 'io.ksonnet.pkg.prototype.simple-deployment', but + # 'deployment' might not be, as several names end with that suffix. + ksonnet prototype describe simple-deployment`, +} + +func fundUniquePrototype(query string) (*prototype.SpecificationSchema, error) { + index := prototype.NewIndex([]*prototype.SpecificationSchema{}) + + suffixProtos, err := index.SearchNames(query, prototype.Suffix) + if err != nil { + return nil, err + } + + if len(suffixProtos) == 1 { + // Success. + return suffixProtos[0], nil + } else if len(suffixProtos) > 1 { + // Ambiguous match. + names := specNames(suffixProtos) + return nil, fmt.Errorf("Ambiguous match for '%s':\n%s", query, strings.Join(names, "\n")) + } else { + // No matches. + substrProtos, err := index.SearchNames(query, prototype.Substring) + if err != nil || len(substrProtos) == 0 { + return nil, fmt.Errorf("No prototype names matched '%s'", query) + } + + partialMatches := specNames(substrProtos) + partials := strings.Join(partialMatches, "\n") + return nil, fmt.Errorf("No prototype names matched '%s'; a list of partial matches:\n%s", query, partials) + } +} + +func specNames(protos []*prototype.SpecificationSchema) []string { + partialMatches := []string{} + for _, proto := range protos { + partialMatches = append(partialMatches, proto.Name) + } + + return partialMatches +} diff --git a/cmd/root.go b/cmd/root.go index afbde568f41dacc3edecdb20893f6c002f97fb79..d2067c1e26af6b0876b5839ee17ce3df50d51033 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -63,27 +63,10 @@ var clientConfig clientcmd.ClientConfig func init() { RootCmd.PersistentFlags().CountP(flagVerbose, "v", "Increase verbosity. May be given multiple times.") - RootCmd.PersistentFlags().StringP(flagJpath, "J", "", "Additional jsonnet library search path") - RootCmd.PersistentFlags().StringSliceP(flagExtVar, "V", nil, "Values of external variables") - RootCmd.PersistentFlags().StringSlice(flagExtVarFile, nil, "Read external variable from a file") - RootCmd.PersistentFlags().StringSliceP(flagTlaVar, "A", nil, "Values of top level arguments") - RootCmd.PersistentFlags().StringSlice(flagTlaVarFile, nil, "Read top level argument from a file") - RootCmd.PersistentFlags().String(flagResolver, "noop", "Change implementation of resolveImage native function. One of: noop, registry") - RootCmd.PersistentFlags().String(flagResolvFail, "warn", "Action when resolveImage fails. One of ignore,warn,error") - - // The "usual" clientcmd/kubectl flags - loadingRules := clientcmd.NewDefaultClientConfigLoadingRules() - loadingRules.DefaultClientConfig = &clientcmd.DefaultClientConfig - overrides := clientcmd.ConfigOverrides{} - kflags := clientcmd.RecommendedConfigOverrideFlags("") - RootCmd.PersistentFlags().StringVar(&loadingRules.ExplicitPath, "kubeconfig", "", "Path to a kube config. Only required if out-of-cluster") - clientcmd.BindOverrideFlags(&overrides, RootCmd.PersistentFlags(), kflags) - clientConfig = clientcmd.NewInteractiveDeferredLoadingClientConfig(loadingRules, &overrides, os.Stdin) - - RootCmd.PersistentFlags().Set("logtostderr", "true") } -// RootCmd is the root of cobra subcommand tree +// RootCmd is the root of all commands that expand Jsonnet code or talk to +// the API server var RootCmd = &cobra.Command{ Use: "kubecfg", Short: "Synchronise Kubernetes resources with config files", @@ -108,6 +91,29 @@ var RootCmd = &cobra.Command{ }, } +func addJsonnetFlagsToCmd(cmd *cobra.Command) { + cmd.PersistentFlags().StringSliceP(flagJpath, "J", nil, "Additional jsonnet library search path") + cmd.PersistentFlags().StringSliceP(flagExtVar, "V", nil, "Values of external variables") + cmd.PersistentFlags().StringSlice(flagExtVarFile, nil, "Read external variable from a file") + cmd.PersistentFlags().StringSliceP(flagTlaVar, "A", nil, "Values of top level arguments") + cmd.PersistentFlags().StringSlice(flagTlaVarFile, nil, "Read top level argument from a file") + cmd.PersistentFlags().String(flagResolver, "noop", "Change implementation of resolveImage native function. One of: noop, registry") + cmd.PersistentFlags().String(flagResolvFail, "warn", "Action when resolveImage fails. One of ignore,warn,error") +} + +func addKubectlFlagsToCmd(cmd *cobra.Command) { + // The "usual" clientcmd/kubectl flags + loadingRules := clientcmd.NewDefaultClientConfigLoadingRules() + loadingRules.DefaultClientConfig = &clientcmd.DefaultClientConfig + overrides := clientcmd.ConfigOverrides{} + kflags := clientcmd.RecommendedConfigOverrideFlags("") + cmd.PersistentFlags().StringVar(&loadingRules.ExplicitPath, "kubeconfig", "", "Path to a kube config. Only required if out-of-cluster") + clientcmd.BindOverrideFlags(&overrides, cmd.PersistentFlags(), kflags) + clientConfig = clientcmd.NewInteractiveDeferredLoadingClientConfig(loadingRules, &overrides, os.Stdin) + + cmd.PersistentFlags().Set("logtostderr", "true") +} + func logLevel(verbosity int) log.Level { switch verbosity { case 0: @@ -168,11 +174,10 @@ func newExpander(cmd *cobra.Command) (*template.Expander, error) { spec.EnvJPath = filepath.SplitList(os.Getenv("KUBECFG_JPATH")) - jpath, err := flags.GetString(flagJpath) + spec.FlagJpath, err = flags.GetStringSlice(flagJpath) if err != nil { return nil, err } - spec.FlagJpath = filepath.SplitList(jpath) spec.ExtVars, err = flags.GetStringSlice(flagExtVar) if err != nil { diff --git a/cmd/show.go b/cmd/show.go index 5ea288e549043adc1e299f3ca29f21ae32fa4259..96fbffa69e66798ecbac5f4995f9096122e4cdbf 100644 --- a/cmd/show.go +++ b/cmd/show.go @@ -27,6 +27,8 @@ const ( func init() { RootCmd.AddCommand(showCmd) + addJsonnetFlagsToCmd(showCmd) + addKubectlFlagsToCmd(showCmd) addEnvCmdFlags(showCmd) showCmd.PersistentFlags().StringP(flagFormat, "o", "yaml", "Output format. Supported values are: json, yaml") } diff --git a/cmd/update.go b/cmd/update.go index 987df5ae894bb8a75d1d9b51c141681468b7f626..b37d9dbea24f5b388c5ee94fe9a484dceec36c77 100644 --- a/cmd/update.go +++ b/cmd/update.go @@ -49,6 +49,8 @@ const ( func init() { RootCmd.AddCommand(updateCmd) + addJsonnetFlagsToCmd(updateCmd) + addKubectlFlagsToCmd(updateCmd) addEnvCmdFlags(updateCmd) updateCmd.PersistentFlags().Bool(flagCreate, true, "Create missing resources") updateCmd.PersistentFlags().Bool(flagSkipGc, false, "Don't perform garbage collection, even with --"+flagGcTag) @@ -57,9 +59,8 @@ func init() { } var updateCmd = &cobra.Command{ - Use: "update [<env>|-f <file-or-dir>]", - Short: `Update (or optionally create) Kubernetes resources on the cluster using the -local configuration. Accepts JSON, YAML, or Jsonnet.`, + Use: "update [<env>|-f <file-or-dir>]", + Short: `Update (or optionally create) Kubernetes resources on the cluster using the local configuration. Accepts JSON, YAML, or Jsonnet.`, RunE: func(cmd *cobra.Command, args []string) error { flags := cmd.Flags() var err error diff --git a/cmd/validate.go b/cmd/validate.go index 171c1e3d50376b23c887d7c05cb3ca1a0b5cf0b3..d5e6abc88c9d19b5670447b1e09d0fd6fa01c800 100644 --- a/cmd/validate.go +++ b/cmd/validate.go @@ -23,6 +23,8 @@ import ( func init() { RootCmd.AddCommand(validateCmd) + addJsonnetFlagsToCmd(validateCmd) + addKubectlFlagsToCmd(validateCmd) addEnvCmdFlags(validateCmd) } diff --git a/prototype/index.go b/prototype/index.go new file mode 100644 index 0000000000000000000000000000000000000000..385dddd377c1e98a0d1dc98c6db1cac5f478161e --- /dev/null +++ b/prototype/index.go @@ -0,0 +1,41 @@ +package prototype + +import ( + "fmt" + "strings" +) + +const ( + delimiter = "\x00" +) + +type index struct { + prototypes map[string]*SpecificationSchema +} + +func (idx *index) SearchNames(query string, opts SearchOptions) ([]*SpecificationSchema, error) { + // TODO(hausdorff): This is the world's worst search algorithm. Improve it at + // some point. + + prototypes := []*SpecificationSchema{} + + for name, prototype := range idx.prototypes { + isSearchResult := false + switch opts { + case Prefix: + isSearchResult = strings.HasPrefix(name, query) + case Suffix: + isSearchResult = strings.HasSuffix(name, query) + case Substring: + isSearchResult = strings.Contains(name, query) + default: + return nil, fmt.Errorf("Unrecognized search option '%d'", opts) + } + + if isSearchResult { + prototypes = append(prototypes, prototype) + } + } + + return prototypes, nil +} diff --git a/prototype/interface.go b/prototype/interface.go new file mode 100644 index 0000000000000000000000000000000000000000..ad258891efc31a80709e89fd38f8705507db49e8 --- /dev/null +++ b/prototype/interface.go @@ -0,0 +1,52 @@ +package prototype + +import "encoding/json" + +// Unmarshal takes the bytes of a JSON-encoded prototype specification, and +// deserializes them to a `SpecificationSchema`. +func Unmarshal(bytes []byte) (*SpecificationSchema, error) { + var p SpecificationSchema + err := json.Unmarshal(bytes, &p) + if err != nil { + return nil, err + } + + return &p, nil +} + +// SearchOptions represents the type of prototype search to execute on an +// `Index`. +type SearchOptions int + +const ( + // Prefix represents a search over prototype name prefixes. + Prefix SearchOptions = iota + + // Suffix represents a search over prototype name suffices. + Suffix + + // Substring represents a search over substrings of prototype names. + Substring +) + +// Index represents a queryable index of prototype specifications. +type Index interface { + SearchNames(query string, opts SearchOptions) ([]*SpecificationSchema, error) +} + +// NewIndex constructs an index of prototype specifications from a list. +func NewIndex(prototypes []*SpecificationSchema) Index { + idx := map[string]*SpecificationSchema{} + + for _, p := range defaultPrototypes { + idx[p.Name] = p + } + + for _, p := range prototypes { + idx[p.Name] = p + } + + return &index{ + prototypes: idx, + } +} diff --git a/prototype/prototype_test.go b/prototype/prototype_test.go new file mode 100644 index 0000000000000000000000000000000000000000..30c5473e53ae7824fa6e293cf9766c6a4dd660ed --- /dev/null +++ b/prototype/prototype_test.go @@ -0,0 +1,145 @@ +package prototype + +import ( + "sort" + "testing" +) + +const ( + unmarshalErrPattern = "Expected value of %s: '%s', got: '%s'" +) + +var simpleService = `{ + "apiVersion": "0.1", + "name": "io.some-vendor.pkg.simple-service", + "template": { + "description": "Generates a simple service with a port exposed", + "body": [ + "local k = import 'ksonnet.beta.2/k.libsonnet';", + "", + "local service = k.core.v1.service;", + "local servicePort = k.core.v1.service.mixin.spec.portsType;", + "local port = servicePort.new(std.extVar('port'), std.extVar('portName'));", + "", + "local name = std.extVar('name');", + "k.core.v1.service.new('%-service' % name, {app: name}, port)" + ] + } +}` + +var simpleDeployment = `{ + "apiVersion": "0.1", + "name": "io.some-vendor.pkg.simple-deployment", + "template": { + "description": "Generates a simple service with a port exposed", + "body": [ + "local k = import 'ksonnet.beta.2/k.libsonnet';", + "local deployment = k.apps.v1beta1.deployment;", + "local container = deployment.mixin.spec.template.spec.containersType;", + "", + "local appName = std.extVar('name');", + "local appContainer = container.new(appName, std.extVar('image'));", + "deployment.new(appName, std.extVar('replicas'), appContainer, {app: appName})" + ] + } +}` + +func unmarshal(t *testing.T, bytes []byte) *SpecificationSchema { + p, err := Unmarshal(bytes) + if err != nil { + t.Fatalf("Failed to deserialize prototype:\n%v", err) + } + + return p +} + +func assertProp(t *testing.T, name string, expected string, actual string) { + if actual != expected { + t.Errorf(unmarshalErrPattern, name, expected, actual) + } +} + +func TestSimpleUnmarshal(t *testing.T) { + p := unmarshal(t, []byte(simpleService)) + + assertProp(t, "apiVersion", p.APIVersion, "0.1") + assertProp(t, "name", p.Name, "io.some-vendor.pkg.simple-service") + assertProp(t, "description", p.Template.Description, "Generates a simple service with a port exposed") +} + +var testPrototypes = map[string]string{ + "io.ksonnet.pkg.simple-service": simpleService, +} + +func assertSearch(t *testing.T, idx Index, opts SearchOptions, query string, expectedNames []string) { + ps, err := idx.SearchNames(query, opts) + if err != nil { + t.Fatalf("Failed to search index:\n%v", err) + } + + sort.Slice(ps, func(i, j int) bool { + return ps[i].Name < ps[j].Name + }) + + actualNames := []string{} + for _, p := range ps { + actualNames = append(actualNames, p.Name) + } + + sort.Slice(expectedNames, func(i, j int) bool { + return expectedNames[i] < expectedNames[j] + }) + + if len(expectedNames) != len(ps) { + t.Fatalf("Query '%s' returned results:\n%s, but expected:\n%s", query, actualNames, expectedNames) + } + + for i := 0; i < len(expectedNames); i++ { + if actualNames[i] != expectedNames[i] { + t.Fatalf("Query '%s' returned results:\n%s, but expected:\n%s", query, actualNames, expectedNames) + } + } +} + +func TestSearch(t *testing.T) { + svc := unmarshal(t, []byte(simpleService)) + depl := unmarshal(t, []byte(simpleDeployment)) + idx := NewIndex([]*SpecificationSchema{svc, depl}) + + // Prefix searches. + assertSearch(t, idx, Prefix, "service", []string{}) + assertSearch(t, idx, Prefix, "simple", []string{}) + assertSearch(t, idx, Prefix, "io.ksonnet", []string{ + "io.ksonnet.pkg.yaml-single-port-service", + "io.ksonnet.pkg.yaml-namespace", + "io.ksonnet.pkg.yaml-empty-configMap", + "io.ksonnet.pkg.yaml-single-port-deployment", + }) + assertSearch(t, idx, Prefix, "foo", []string{}) + + // Suffix searches. + assertSearch(t, idx, Suffix, "service", []string{ + "io.ksonnet.pkg.yaml-single-port-service", + "io.some-vendor.pkg.simple-service", + }) + assertSearch(t, idx, Suffix, "simple", []string{}) + assertSearch(t, idx, Suffix, "io.ksonnet", []string{}) + assertSearch(t, idx, Suffix, "foo", []string{}) + + // Substring searches. + assertSearch(t, idx, Substring, "service", []string{ + "io.ksonnet.pkg.yaml-single-port-service", + "io.some-vendor.pkg.simple-service", + }) + assertSearch(t, idx, Substring, "simple", []string{ + "io.some-vendor.pkg.simple-deployment", + "io.some-vendor.pkg.simple-service", + }) + assertSearch(t, idx, Substring, "io.ksonnet", []string{ + "io.ksonnet.pkg.yaml-single-port-service", + "io.ksonnet.pkg.yaml-single-port-deployment", + "io.ksonnet.pkg.yaml-empty-configMap", + "io.ksonnet.pkg.yaml-namespace", + }) + assertSearch(t, idx, Substring, "foo", []string{}) +} diff --git a/prototype/snippet/interface.go b/prototype/snippet/interface.go new file mode 100644 index 0000000000000000000000000000000000000000..fe51230b82ee5becff7126e10fc2e8a88ebbcc41 --- /dev/null +++ b/prototype/snippet/interface.go @@ -0,0 +1,58 @@ +// Package snippet provides primitives for parsing and evaluating TextMate +// snippets. In general, snippets are text with "placeholders" for the user to +// fill in. For example something like "foo ${bar}" would expect the user to +// provide a value for the `bar` variable. +// +// This code is influenced heavily by the more formal treatment specified by the +// Language Server Protocol, though (since this does not have to serve an +// interactive prompt in an IDE) we do omit some features for simplification +// (e.g., we have limited support for tabstops and builtin variables like +// `TM_SELECTED_TEXT`). +// +// A parsed snippet template is represented as a tree consisting of one of +// several types: +// +// * Text: representing free text, i.e., text that is not a part of (e.g.) a +// variable. +// * Variable: Takes the forms `${varName}` and `${varName:defaultValue}`. +// When a variable isn't set, an empty string is inserted. If the variable +// is undefined, its name is inserted as the default value. +// * Tabstop (currently unused by our tool, but implemented anyway): takes the +// form of the '$' character followed by a number, e.g., `$1` or `$2`. +// Inside an editor, a tabstop represents where to navigate when the user +// presses tab or shift-tab. +// * Placeholder (currently unused by our tool, but implemented anyway): +// representing a tabstop with a default value. These are usually of the +// form `${3:someDefaultValue}`. They can also be nested, as in +// `${1:firstValue${2:secondValue}`, or recursive, as in `${1:foo$1}`. +// +// TextMate does not specify a grammar for this templating language. This parser +// implements the following grammar for, which we believe is close enough to the +// intention of TextMate. The characters `$`, `}`, and `\` can be escaped with +// `\`, but for simplicity we omit them from the grammar. +// +// any ::= tabstop | placeholder | choice | variable | text +// tabstop ::= '$' int | '${' int '}' +// placeholder ::= '${' int ':' any '}' +// choice ::= '${' int '|' text (',' text)* '|}' +// variable ::= '$' var | '${' var }' | '${' var ':' any '}' +// var ::= [_a-zA-Z] [_a-zA-Z0-9]* +// int ::= [0-9]+ +// text ::= .* +package snippet + +// Parse takes a TextMate snippet and parses it, producing a `Template`. There +// is no opportunity for a parse error, since the grammar specifies that +// malformed placeholders are simply text. +// +// The grammar of the parse is formalized in part by the Language Server +// Protocol, and detailed in the package comments. +func Parse(template string) Template { + return parse(template, false) +} + +// Template represents a parsed TextMate snippet. The template can be evaluated +// (with respect to some set of variables) using `Evaluate`. +type Template interface { + Evaluate(values map[string]string) (string, error) +} diff --git a/prototype/snippet/lexer.go b/prototype/snippet/lexer.go new file mode 100644 index 0000000000000000000000000000000000000000..acd930f72af7fac757b3f43dd13b9b7c6116b231 --- /dev/null +++ b/prototype/snippet/lexer.go @@ -0,0 +1,137 @@ +package snippet + +type tokenType int + +const ( + dollar tokenType = iota + colon + curlyOpen + curlyClose + backslash + number + variableName + format + eof +) + +func (tt tokenType) String() string { + s, _ := tokenTypeToString[tt] + return s +} + +type token struct { + kind tokenType + pos int + len int +} + +var stringToTokenType = map[rune]tokenType{ + '$': dollar, + ':': colon, + '{': curlyOpen, + '}': curlyClose, + '\\': backslash, +} + +var tokenTypeToString = map[tokenType]string{ + dollar: "Dollar", + colon: "Colon", + curlyOpen: "CurlyOpen", + curlyClose: "CurlyClose", + backslash: "Backslash", + number: "Int", + variableName: "VariableName", + format: "Format", + eof: "EOF", +} + +type lexer struct { + value []rune + pos int +} + +func isDigitCharacter(ch rune) bool { + return ch >= '0' && ch <= '9' +} + +func isVariableCharacter(ch rune) bool { + return ch == '_' || (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z') +} + +func newLexer() *lexer { + s := lexer{} + s.text("") + + return &s +} + +func (s *lexer) text(value string) { + s.value = []rune(value) + s.pos = 0 +} + +func (s *lexer) tokenText(tok *token) string { + return string(s.value[tok.pos : tok.pos+tok.len]) +} + +func (s *lexer) next() *token { + valueLen := len(s.value) + if s.pos >= valueLen { + return &token{kind: eof, pos: s.pos, len: 0} + } + + pos := s.pos + len := 0 + ch := s.value[pos] + + // Known token types. + var t tokenType + if t, ok := stringToTokenType[ch]; ok { + s.pos++ + return &token{kind: t, pos: pos, len: 1} + } + + // Number token. + if isDigitCharacter(ch) { + t = number + for pos+len < valueLen { + ch = s.value[pos+len] + if !isDigitCharacter(ch) { + break + } + len++ + } + + s.pos += len + return &token{t, pos, len} + } + + // Variable. + if isVariableCharacter(ch) { + t = variableName + for pos+len < valueLen { + ch = s.value[pos+len] + if !isVariableCharacter(ch) && !isDigitCharacter(ch) { + break + } + len++ + } + + s.pos += len + return &token{t, pos, len} + } + + // Formatting characters. + t = format + for pos+len < valueLen { + ch = s.value[pos+len] + _, isStaticToken := stringToTokenType[ch] + if isStaticToken || isDigitCharacter(ch) || isVariableCharacter(ch) { + break + } + len++ + } + + s.pos += len + return &token{t, pos, len} +} diff --git a/prototype/snippet/marker.go b/prototype/snippet/marker.go new file mode 100644 index 0000000000000000000000000000000000000000..c038f080abcd93a01cc402793e543692b03a823d --- /dev/null +++ b/prototype/snippet/marker.go @@ -0,0 +1,168 @@ +package snippet + +import "bytes" + +// ---------------------------------------------------------------------------- +// Interfaces. +// ---------------------------------------------------------------------------- + +type index int + +type indices []index + +type marker interface { + children() *markers + parent() marker + setParent(p marker) + String() string + len() int +} + +type markers []marker + +func (ms *markers) append(m ...marker) { + *ms = append(*ms, m...) +} + +func (ms *markers) delete(i int) { + *ms = append((*ms)[:i], (*ms)[i+1:]...) +} + +func (ms *markers) String() string { + var buf bytes.Buffer + + for _, m := range *ms { + buf.WriteString(m.String()) + } + return buf.String() +} + +func (ms *markers) setParents(m marker) { + for _, child := range *ms { + child.setParent(m) + } +} + +// ---------------------------------------------------------------------------- +// Base. +// ---------------------------------------------------------------------------- + +type markerImpl struct { + // _markerBrand: any; + _children *markers + _parent marker +} + +func (mi *markerImpl) children() *markers { + return mi._children +} + +func (mi *markerImpl) parent() marker { + return mi._parent +} + +func (mi *markerImpl) setParent(p marker) { + mi._parent = p +} + +func (mi *markerImpl) String() string { + return "" +} + +func (mi *markerImpl) len() int { + return 0 +} + +// ---------------------------------------------------------------------------- +// Text. +// ---------------------------------------------------------------------------- + +type text struct { + markerImpl + data string +} + +func newText(data string) *text { + return &text{ + markerImpl: markerImpl{ + _children: &markers{}, + }, + data: data, + } +} + +func (t *text) String() string { + return t.data +} + +func (t *text) len() int { + return len(t.data) +} + +// ---------------------------------------------------------------------------- +// Placeholder. +// ---------------------------------------------------------------------------- + +type placeholder struct { + markerImpl + index int +} + +func newPlaceholder(index int, children *markers) *placeholder { + p := &placeholder{ + // markerImpl: *newMarkerImplWithChildren(children), + markerImpl: markerImpl{ + _children: children, + }, + index: index, + } + p._children.setParents(p) + return p +} + +func (p *placeholder) String() string { + return p._children.String() +} + +func (p *placeholder) isFinalTabstop() bool { + return p.index == 0 +} + +// ---------------------------------------------------------------------------- +// Variable. +// ---------------------------------------------------------------------------- + +type variable struct { + markerImpl + resolvedValue *string + name string +} + +func newVariable(name string, children *markers) *variable { + v := &variable{ + markerImpl: markerImpl{ + _children: children, + }, + name: name, + } + v._children.setParents(v) + return v +} + +func (v *variable) isDefined() bool { + return v.resolvedValue != nil +} + +func (v *variable) len() int { + if v.isDefined() { + return len(*v.resolvedValue) + } + return v.markerImpl.len() +} + +func (v *variable) String() string { + if v.isDefined() { + return *v.resolvedValue + } + return v._children.String() +} diff --git a/prototype/snippet/parser.go b/prototype/snippet/parser.go new file mode 100644 index 0000000000000000000000000000000000000000..cee058c146c39fc10cefb29ac78e6e399a2dd8bc --- /dev/null +++ b/prototype/snippet/parser.go @@ -0,0 +1,171 @@ +package snippet + +import ( + "regexp" + "strconv" +) + +func parse(template string, enforceFinalTabstop bool) *textmateSnippet { + m := newSnippetParser().parse(template, true, enforceFinalTabstop) + return newTextmateSnippet(m) +} + +type snippetParser struct { + tokenizer lexer + currToken *token + prevToken *token +} + +func newSnippetParser() *snippetParser { + return &snippetParser{ + tokenizer: *newLexer(), + } +} + +func (sp *snippetParser) parse(value string, insertFinalTabstop bool, enforceFinalTabstop bool) *markers { + ms := markers{} + + sp.tokenizer.text(value) + sp.currToken = sp.tokenizer.next() + for sp.parseAny(&ms) || sp.parseText(&ms) { + // Consume these tokens. + } + + placeholderDefaultValues := map[int]*markers{} + walkDefaults(&ms, placeholderDefaultValues) + + _, hasFinalTabstop := placeholderDefaultValues[0] + shouldInsertFinalTabstop := insertFinalTabstop && len(placeholderDefaultValues) > 0 || enforceFinalTabstop + if !hasFinalTabstop && shouldInsertFinalTabstop { + // Insert final tabstop. + // + // By default, when the user finishes filling out a snippet, they expect + // their cursor to be at the end of the snippet. So, here, if the user is + // using snippets but there is no final tabstop defined, we simply insert + // one. + ms.append(newPlaceholder(0, &markers{})) + } + + return &ms +} + +func (sp *snippetParser) text(value string) string { + return sp.parse(value, false, false).String() +} + +func (sp *snippetParser) accept(kind tokenType) bool { + if sp.currToken.kind == kind { + sp.prevToken = sp.currToken + sp.currToken = sp.tokenizer.next() + return true + } + return false +} + +func (sp *snippetParser) acceptAny() bool { + sp.prevToken = sp.currToken + sp.currToken = sp.tokenizer.next() + return true +} + +func (sp *snippetParser) parseAny(ms *markers) bool { + if sp.parseEscaped(ms) { + return true + } else if sp.parseTM(ms) { + return true + } + return false +} + +func (sp *snippetParser) parseText(ms *markers) bool { + if sp.currToken.kind != eof { + ms.append(newText(sp.tokenizer.tokenText(sp.currToken))) + sp.acceptAny() + return true + } + return false +} + +func (sp *snippetParser) parseTM(ms *markers) bool { + if sp.accept(dollar) { + if sp.accept(variableName) || sp.accept(number) { + // Cases like `$FOO` or `$123`. + idOrName := sp.tokenizer.tokenText(sp.prevToken) + if i, ok := parseNumber(idOrName); ok { + // Cases like `$123`. + ms.append(newPlaceholder(i, &markers{})) + } else { + // Cases like `$FOO`. + ms.append(newVariable(idOrName, &markers{})) + } + return true + } else if sp.accept(curlyOpen) { + // Cases like `${name:nginx}`. + name := markers{} + children := &markers{} + target := &name + + for { + if target != children && sp.accept(colon) { + target = children + continue + } + + if sp.accept(curlyClose) { + idOrName := name.String() + if i, ok := parseNumber(idOrName); ok { + ms.append(newPlaceholder(i, children)) + } else { + ms.append(newVariable(idOrName, children)) + } + return true + } + + if sp.parseAny(target) || sp.parseText(target) { + continue + } + + // fallback + if len(*children) > 0 { + ms.append(newText("${" + name.String() + ":")) + ms.append(*children...) + } else { + ms.append(newText("${")) + ms.append(name...) + } + return true + } + } + + ms.append(newText("$")) + return true + } + + return false +} + +func (sp *snippetParser) parseEscaped(ms *markers) bool { + if sp.accept(backslash) { + if sp.accept(dollar) || sp.accept(curlyClose) || sp.accept(backslash) { + // Do nothing. + } + ms.append(newText(sp.tokenizer.tokenText(sp.prevToken))) + return true + } + return false +} + +func parseNumber(id string) (int, bool) { + + if matches, err := regexp.MatchString(`^\d+$`, id); err != nil { + return 0, false + } else if !matches { + return 0, false + } + + i, err := strconv.ParseInt(id, 0, 0) + if err != nil { + return 0, false + } + return int(i), true +} diff --git a/prototype/snippet/parser_test.go b/prototype/snippet/parser_test.go new file mode 100644 index 0000000000000000000000000000000000000000..ee8952d294e6dde08e38ab2f46905a056959d244 --- /dev/null +++ b/prototype/snippet/parser_test.go @@ -0,0 +1,417 @@ +package snippet + +import ( + "fmt" + "reflect" + "testing" +) + +func assertTokensEqual(t *testing.T, actual, expected tokenType) { + if actual != expected { + t.Fatalf("Expected token type '%d' but got '%d'", expected, actual) + } +} + +func TestLexer(t *testing.T) { + lexer := newLexer() + assertTokensEqual(t, lexer.next().kind, eof) + + lexer.text("a") + assertTokensEqual(t, lexer.next().kind, variableName) + assertTokensEqual(t, lexer.next().kind, eof) + + lexer.text("abc") + assertTokensEqual(t, lexer.next().kind, variableName) + assertTokensEqual(t, lexer.next().kind, eof) + + lexer.text("{{abc}}") + assertTokensEqual(t, lexer.next().kind, curlyOpen) + assertTokensEqual(t, lexer.next().kind, curlyOpen) + assertTokensEqual(t, lexer.next().kind, variableName) + assertTokensEqual(t, lexer.next().kind, curlyClose) + assertTokensEqual(t, lexer.next().kind, curlyClose) + assertTokensEqual(t, lexer.next().kind, eof) + + lexer.text("abc() ") + assertTokensEqual(t, lexer.next().kind, variableName) + assertTokensEqual(t, lexer.next().kind, format) + assertTokensEqual(t, lexer.next().kind, eof) + + lexer.text("abc 123") + assertTokensEqual(t, lexer.next().kind, variableName) + assertTokensEqual(t, lexer.next().kind, format) + assertTokensEqual(t, lexer.next().kind, number) + assertTokensEqual(t, lexer.next().kind, eof) + + lexer.text("$foo") + assertTokensEqual(t, lexer.next().kind, dollar) + assertTokensEqual(t, lexer.next().kind, variableName) + assertTokensEqual(t, lexer.next().kind, eof) + + lexer.text("$foo_bar") + assertTokensEqual(t, lexer.next().kind, dollar) + assertTokensEqual(t, lexer.next().kind, variableName) + assertTokensEqual(t, lexer.next().kind, eof) + + lexer.text("$foo-bar") + assertTokensEqual(t, lexer.next().kind, dollar) + assertTokensEqual(t, lexer.next().kind, variableName) + assertTokensEqual(t, lexer.next().kind, format) + assertTokensEqual(t, lexer.next().kind, variableName) + assertTokensEqual(t, lexer.next().kind, eof) + + lexer.text("${foo}") + assertTokensEqual(t, lexer.next().kind, dollar) + assertTokensEqual(t, lexer.next().kind, curlyOpen) + assertTokensEqual(t, lexer.next().kind, variableName) + assertTokensEqual(t, lexer.next().kind, curlyClose) + assertTokensEqual(t, lexer.next().kind, eof) + + lexer.text("${1223:foo}") + assertTokensEqual(t, lexer.next().kind, dollar) + assertTokensEqual(t, lexer.next().kind, curlyOpen) + assertTokensEqual(t, lexer.next().kind, number) + assertTokensEqual(t, lexer.next().kind, colon) + assertTokensEqual(t, lexer.next().kind, variableName) + assertTokensEqual(t, lexer.next().kind, curlyClose) + assertTokensEqual(t, lexer.next().kind, eof) + + lexer.text("\\${}") + assertTokensEqual(t, lexer.next().kind, backslash) + assertTokensEqual(t, lexer.next().kind, dollar) + assertTokensEqual(t, lexer.next().kind, curlyOpen) + assertTokensEqual(t, lexer.next().kind, curlyClose) +} + +func assertText(t *testing.T, value, expected string) { + p := newSnippetParser() + actual := p.text(value) + if actual != expected { + t.Errorf("Expected text '%s', got '%s'", expected, actual) + } +} + +func assertMarkerTypes(t *testing.T, actual marker, expected marker) { + actualType, expectedType := reflect.TypeOf(actual), reflect.TypeOf(expected) + if actualType != expectedType { + t.Errorf("Expected type '%v', got type '%v'", expectedType, actualType) + } +} + +func assertEqual(t *testing.T, actual, expected interface{}) { + if actual != expected { + t.Errorf("Expected '%v', got '%v'", expected, actual) + } +} + +func assertMarker(t *testing.T, actual markers, expected ...marker) { + if len(actual) != len(expected) { + t.Errorf("Number of markers and types are not the same") + } + for i := range actual { + actualType := reflect.TypeOf(actual[i]) + expectedType := reflect.TypeOf(expected[i]) + if actualType != expectedType { + t.Errorf("Expected type '%v', got type '%v'", expectedType, actualType) + return + } + } +} + +func assertMarkerValue(t *testing.T, value string, ctors ...marker) { + p := newSnippetParser() + m := p.parse(value, false, false) + assertMarker(t, *m, ctors...) +} + +func assertTextAndMarker(t *testing.T, value, escaped string, ctors ...marker) { + assertText(t, value, escaped) + assertMarkerValue(t, value, ctors...) +} + +func TestParserText(t *testing.T) { + assertText(t, `$`, `$`) + assertText(t, `\\$`, `\$`) + assertText(t, "{", "{") + assertText(t, `\}`, `}`) + assertText(t, `\abc`, `\abc`) + assertText(t, `foo${f:\}}bar`, `foo}bar`) + assertText(t, `\{`, `\{`) + assertText(t, "I need \\\\\\$", "I need \\$") + assertText(t, `\`, `\`) + assertText(t, `\{{`, `\{{`) + assertText(t, `{{`, `{{`) + assertText(t, `{{dd`, `{{dd`) + assertText(t, `}}`, `}}`) + assertText(t, `ff}}`, `ff}}`) + + assertText(t, "farboo", "farboo") + assertText(t, "far{{}}boo", "far{{}}boo") + assertText(t, "far{{123}}boo", "far{{123}}boo") + assertText(t, "far\\{{123}}boo", "far\\{{123}}boo") + assertText(t, "far{{id:bern}}boo", "far{{id:bern}}boo") + assertText(t, "far{{id:bern {{basel}}}}boo", "far{{id:bern {{basel}}}}boo") + assertText(t, "far{{id:bern {{id:basel}}}}boo", "far{{id:bern {{id:basel}}}}boo") + assertText(t, "far{{id:bern {{id2:basel}}}}boo", "far{{id:bern {{id2:basel}}}}boo") +} + +func TestParserTMText(t *testing.T) { + assertTextAndMarker(t, "foo${1:bar}}", "foobar}", &text{}, &placeholder{}, &text{}) + assertTextAndMarker(t, "foo${1:bar}${2:foo}}", "foobarfoo}", &text{}, &placeholder{}, &placeholder{}, &text{}) + + assertTextAndMarker(t, "foo${1:bar\\}${2:foo}}", "foobar}foo", &text{}, &placeholder{}) + + parse := *newSnippetParser().parse("foo${1:bar\\}${2:foo}}", false, false) + ph := *parse[1].(*placeholder) + children := *ph._children + + assertEqual(t, ph.index, 1) + assertMarkerTypes(t, children[0], &text{}) + assertEqual(t, children[0].String(), "bar}") + assertMarkerTypes(t, children[1], &placeholder{}) + assertEqual(t, children[1].String(), "foo") +} + +func TestParserPlaceholder(t *testing.T) { + assertTextAndMarker(t, "farboo", "farboo", &text{}) + assertTextAndMarker(t, "far{{}}boo", "far{{}}boo", &text{}) + assertTextAndMarker(t, "far{{123}}boo", "far{{123}}boo", &text{}) + assertTextAndMarker(t, "far\\{{123}}boo", "far\\{{123}}boo", &text{}) +} + +func TestParserLiteral(t *testing.T) { + assertTextAndMarker(t, "far`123`boo", "far`123`boo", &text{}) + assertTextAndMarker(t, "far\\`123\\`boo", "far\\`123\\`boo", &text{}) +} + +func TestParserVariablesTabstop(t *testing.T) { + assertTextAndMarker(t, "$far-boo", "-boo", &variable{}, &text{}) + assertTextAndMarker(t, "\\$far-boo", "$far-boo", &text{}) + assertTextAndMarker(t, "far$farboo", "far", &text{}, &variable{}) + assertTextAndMarker(t, "far${farboo}", "far", &text{}, &variable{}) + assertTextAndMarker(t, "$123", "", &placeholder{}) + assertTextAndMarker(t, "$farboo", "", &variable{}) + assertTextAndMarker(t, "$far12boo", "", &variable{}) +} + +func TestParserVariablesWithDefaults(t *testing.T) { + assertTextAndMarker(t, "${name:value}", "value", &variable{}) + assertTextAndMarker(t, "${1:value}", "value", &placeholder{}) + assertTextAndMarker(t, "${1:bar${2:foo}bar}", "barfoobar", &placeholder{}) + + assertTextAndMarker(t, "${name:value", "${name:value", &text{}) + assertTextAndMarker(t, "${1:bar${2:foobar}", "${1:barfoobar", &text{}, &placeholder{}) +} + +func TestParserTextmate(t *testing.T) { + p := newSnippetParser() + assertMarker(t, *p.parse("far{{}}boo", false, false), &text{}) + assertMarker(t, *p.parse("far{{123}}boo", false, false), &text{}) + assertMarker(t, *p.parse("far\\{{123}}boo", false, false), &text{}) + + assertMarker(t, *p.parse("far$0boo", false, false), &text{}, &placeholder{}, &text{}) + assertMarker(t, *p.parse("far${123}boo", false, false), &text{}, &placeholder{}, &text{}) + assertMarker(t, *p.parse("far\\${123}boo", false, false), &text{}) +} + +func TestParserRealWorld(t *testing.T) { + m := newSnippetParser().parse("console.warn(${1: $TM_SELECTED_TEXT })", false, false) + + assertEqual(t, (*m)[0].String(), "console.warn(") + assertMarkerTypes(t, (*m)[1], &placeholder{}) + assertEqual(t, (*m)[2].String(), ")") + + ph := (*m)[1].(*placeholder) + children := *ph.children() + // assertEqual(t, placeholder, "false") + assertEqual(t, ph.index, 1) + assertEqual(t, len(children), 3) + assertMarkerTypes(t, children[0], &text{}) + assertMarkerTypes(t, children[1], &variable{}) + assertMarkerTypes(t, children[2], &text{}) + assertEqual(t, children[0].String(), " ") + assertEqual(t, children[1].String(), "") + assertEqual(t, children[2].String(), " ") + + nestedVariable := children[1].(*variable) + assertEqual(t, nestedVariable.name, "TM_SELECTED_TEXT") + assertEqual(t, len(*nestedVariable.children()), 0) + + m = newSnippetParser().parse("$TM_SELECTED_TEXT", false, false) + assertEqual(t, len(*m), 1) + assertMarkerTypes(t, (*m)[0], &variable{}) +} + +func TestParserDefaultPlaceholderValues(t *testing.T) { + assertMarkerValue(t, "errorContext: `${1:err}`, error: $1", &text{}, &placeholder{}, &text{}, &placeholder{}) + + parsed := newSnippetParser().parse("errorContext: `${1:err}`, error:$1", false, false) + assertMarkerTypes(t, (*parsed)[1], &placeholder{}) + assertMarkerTypes(t, (*parsed)[3], &placeholder{}) + p1, p2 := (*parsed)[1].(*placeholder), (*parsed)[3].(*placeholder) + + assertEqual(t, p1.index, 1) + assertEqual(t, len(*p1.children()), 1) + assertEqual(t, (*p1.children())[0].(*text).String(), "err") + + assertEqual(t, p2.index, 1) + assertEqual(t, len(*p2.children()), 1) + assertEqual(t, (*p2.children())[0].(*text).String(), "err") +} + +func TestBackspace(t *testing.T) { + actual := newSnippetParser().text("Foo \\\\${abc}bar") + assertEqual(t, actual, "Foo \\bar") +} + +func ColonAsVariableValue(t *testing.T) { + actual := newSnippetParser().text("${TM_SELECTED_TEXT:foo:bar}") + assertEqual(t, actual, "foo:bar") + + actual = newSnippetParser().text("${1:foo:bar}") + assertEqual(t, actual, "foo:bar") +} + +func assertLen(t *testing.T, template string, lengths ...int) { + children := parse(template, false).children() + walk(children, func(m marker) bool { + var expected int + expected, lengths = lengths[0], lengths[1:] + assertEqual(t, m.len(), expected) + return true + }) +} + +func TestMarkerLen(t *testing.T) { + assertLen(t, "text$0", 4, 0, 0) + assertLen(t, "$1text$0", 0, 4, 0, 0) + assertLen(t, "te$1xt$0", 2, 0, 2, 0, 0) + assertLen(t, "errorContext: `${1:err}`, error: $0", 15, 0, 3, 10, 0, 0) + assertLen(t, "errorContext: `${1:err}`, error: $1$0", 15, 0, 3, 10, 0, 3, 0, 0) + assertLen(t, "$TM_SELECTED_TEXT$0", 0, 0, 0) + assertLen(t, "${TM_SELECTED_TEXT:def}$0", 0, 3, 0, 0) +} + +func TestParserParent(t *testing.T) { + snippet := parse("This ${1:is ${2:nested}}$0", false) + + assertEqual(t, len(snippet.placeholders()), 3) + first, second := snippet.placeholders()[0], snippet.placeholders()[1] + assertEqual(t, first.index, 1) + assertEqual(t, second.index, 2) + sp := second.parent() + fmt.Println(sp) + assertEqual(t, second.parent(), first) + fp := first.parent() + fmt.Println(fp) + assertEqual(t, first.parent(), snippet) + + snippet = parse("${VAR:default${1:value}}$0", false) + phs := snippet.placeholders() + assertEqual(t, len(phs), 2) + first = phs[0] + assertEqual(t, first.index, 1) + + firstChild := (*snippet.children())[0] + assertMarkerTypes(t, firstChild, &variable{}) + assertEqual(t, first.parent(), firstChild) +} + +func TestTextmateSnippetEnclosingPlaceholders(t *testing.T) { + snippet := parse("This ${1:is ${2:nested}}$0", false) + first, second := snippet.placeholders()[0], snippet.placeholders()[1] + + assertEqual(t, len(snippet.enclosingPlaceholders(*first)), 0) + + sndEnclosing := snippet.enclosingPlaceholders(*second) + assertEqual(t, len(sndEnclosing), 1) + assertEqual(t, sndEnclosing[0], first) +} + +func TestTextmateSnippetOffset(t *testing.T) { + snippet := parse("te$1xt", false) + snippetChildren := *snippet.children() + assertEqual(t, snippet.offset(snippetChildren[0]), 0) + assertEqual(t, snippet.offset(snippetChildren[1]), 2) + assertEqual(t, snippet.offset(snippetChildren[2]), 2) + + snippet = parse("${TM_SELECTED_TEXT:def}", false) + snippetChildren = *snippet.children() + assertEqual(t, snippet.offset(snippetChildren[0]), 0) + assertMarkerTypes(t, snippetChildren[0], &variable{}) + assertEqual(t, snippet.offset((*snippetChildren[0].(*variable).children())[0]), 0) + + // forgein marker + assertEqual(t, snippet.offset(newText("foo")), -1) +} + +func TextmateSnippetPlaceholder(t *testing.T) { + snippet := parse("te$1xt$0", false) + placeholders := snippet.placeholders() + assertEqual(t, len(placeholders), 2) + + snippet = parse("te$1xt$1$0", false) + placeholders = snippet.placeholders() + assertEqual(t, len(placeholders), 3) + + snippet = parse("te$1xt$2$0", false) + placeholders = snippet.placeholders() + assertEqual(t, len(placeholders), 3) + + snippet = parse("${1:bar${2:foo}bar}$0", false) + placeholders = snippet.placeholders() + assertEqual(t, len(placeholders), 3) +} + +func TextmateSnippetReplace1(t *testing.T) { + snippet := parse("aaa${1:bbb${2:ccc}}$0", false) + + assertEqual(t, len(snippet.placeholders()), 3) + second := *snippet.placeholders()[1] + assertEqual(t, second.index, 2) + + enclosing := snippet.enclosingPlaceholders(second) + assertEqual(t, len(enclosing), 1) + assertEqual(t, enclosing[0].index, 1) + + nested := parse("ddd$1eee$0", false) + snippet.ReplacePlaceholder(2, nested.children()) + + snippetPlaceholders := snippet.placeholders() + assertEqual(t, snippet.text, "aaabbbdddeee") + assertEqual(t, len(snippetPlaceholders), 4) + assertEqual(t, snippetPlaceholders[0].index, "1") + assertEqual(t, snippetPlaceholders[1].index, "1") + assertEqual(t, snippetPlaceholders[2].index, "0") + assertEqual(t, snippetPlaceholders[3].index, "0") + + newEnclosing := snippet.enclosingPlaceholders(*snippetPlaceholders[1]) + assertEqual(t, newEnclosing[0], snippetPlaceholders[0]) + assertEqual(t, len(newEnclosing), 1) + assertEqual(t, newEnclosing[0].index, "1") +} + +func TextmateSnippetReplace2(t *testing.T) { + snippet := parse("aaa${1:bbb${2:ccc}}$0", false) + + assertEqual(t, len(snippet.placeholders()), 3) + second := snippet.placeholders()[1] + assertEqual(t, second.index, 2) + + nested := parse("dddeee$0", false) + snippet.ReplacePlaceholder(2, nested.children()) + + assertEqual(t, snippet.text, "aaabbbdddeee") + assertEqual(t, len(snippet.placeholders()), 3) +} + +func TestSnippetOrderPlaceholders(t *testing.T) { + _10 := newPlaceholder(10, &markers{}) + _2 := newPlaceholder(2, &markers{}) + + assertEqual(t, compareByIndex(*_10, *_2), 1) +} + +func TestMaxCallStackExceeded(t *testing.T) { + newSnippetParser().parse("${1:${foo:${1}}}", false, false) +} diff --git a/prototype/snippet/template.go b/prototype/snippet/template.go new file mode 100644 index 0000000000000000000000000000000000000000..7f64f3441c375ae4fa86765cd966888788a40d5b --- /dev/null +++ b/prototype/snippet/template.go @@ -0,0 +1,111 @@ +package snippet + +type textmateSnippet struct { + markerImpl + _placeholders *[]*placeholder +} + +func newTextmateSnippet(children *markers) *textmateSnippet { + tms := &textmateSnippet{ + markerImpl: markerImpl{ + _children: children, + }, + _placeholders: nil, + } + tms._children.setParents(tms) + return tms +} + +func (tms *textmateSnippet) placeholders() []*placeholder { + if tms._placeholders == nil { + // Fill in placeholders if they don't exist. + tms._placeholders = &[]*placeholder{} + walk(tms._children, func(candidate marker) bool { + switch candidate.(type) { + case *placeholder: + { + *tms._placeholders = append(*tms._placeholders, candidate.(*placeholder)) + } + } + return true + }) + } + return *tms._placeholders +} + +func (tms *textmateSnippet) offset(m marker) int { + pos := 0 + found := false + walk(tms._children, func(candidate marker) bool { + if candidate == m { + found = true + return false + } + pos += candidate.len() + return true + }) + + if !found { + return -1 + } + return pos +} + +func (tms *textmateSnippet) fullLen(m marker) int { + ret := 0 + walk(&markers{m}, func(m marker) bool { + ret += m.len() + return true + }) + return ret +} + +func (tms *textmateSnippet) enclosingPlaceholders(ph placeholder) []*placeholder { + ret := []*placeholder{} + parent := ph._parent + for parent != nil { + switch parent.(type) { + case *placeholder: + { + ret = append(ret, parent.(*placeholder)) + } + } + parent = parent.parent() + } + return ret +} + +func (tms *textmateSnippet) text() string { + return tms._children.String() +} + +func (tms *textmateSnippet) Evaluate(values map[string]string) (string, error) { + walk(tms.children(), func(candidate marker) bool { + switch casted := candidate.(type) { + case *variable: + { + if resolved, ok := values[casted.name]; ok { + casted.resolvedValue = &resolved + } + if casted.isDefined() { + // remove default value from resolved variable + casted._children = &markers{} + } + } + } + return true + }) + + // TODO: Explicitly disallow tabstops and empty placeholders. Error out if + // present. + + return tms.text(), nil +} + +func (tms *textmateSnippet) ReplacePlaceholder(idx index, replaceWith *markers) { + newChildren := make(markers, len(*replaceWith)) + copy(newChildren, *replaceWith) + newChildren.delete(int(idx)) + tms._children = &newChildren + tms._placeholders = nil +} diff --git a/prototype/snippet/util.go b/prototype/snippet/util.go new file mode 100644 index 0000000000000000000000000000000000000000..22109fb1299076c81c7da295f4fd3e2122868bca --- /dev/null +++ b/prototype/snippet/util.go @@ -0,0 +1,82 @@ +package snippet + +func compareByIndex(a placeholder, b placeholder) int { + if a.index == b.index { + return 0 + } else if a.isFinalTabstop() { + return 1 + } else if b.isFinalTabstop() { + return -1 + } else if a.index < b.index { + return -1 + } else if a.index > b.index { + return 1 + } + return 0 +} + +func walk(ms *markers, visitor func(m marker) bool) { + stack := make(markers, len(*ms)) + copy(stack, *ms) + + for len(stack) > 0 { + // NOTE: Declare `m` separately so that we can use the `=` operator + // (rather than `:=`) to make it clear that we're not shadowing `stack`. + var m marker + m, stack = stack[0], stack[1:] + recurse := visitor(m) + if !recurse { + break + } + stack = append(*m.children(), stack...) + } +} + +// * fill in default for empty placeHolders +// * compact sibling Text markers +func walkDefaults(ms *markers, placeholderDefaultValues map[int]*markers) { + + for i := 0; i < len(*ms); i++ { + thisMarker := (*ms)[i] + + switch thisMarker.(type) { + case *placeholder: + { + pl := thisMarker.(*placeholder) + // fill in default values for repeated placeholders + // like `${1:foo}and$1` becomes ${1:foo}and${1:foo} + if defaultVal, ok := placeholderDefaultValues[pl.index]; !ok { + placeholderDefaultValues[pl.index] = pl._children + walkDefaults(pl._children, placeholderDefaultValues) + + } else if len(*pl._children) == 0 { + // copy children from first placeholder definition, no need to + // recurse on them because they have been visited already + children := make(markers, len(*defaultVal)) + pl._children = &children + copy(*pl._children, *defaultVal) + } + } + case *variable: + { + walkDefaults(thisMarker.children(), placeholderDefaultValues) + } + case *text: + { + if i <= 0 { + continue + } + + prev := (*ms)[i-1] + switch prev.(type) { + case *text: + { + (*ms)[i-1].(*text).data += (*ms)[i].(*text).data + ms.delete(i) + i-- + } + } + } + } + } +} diff --git a/prototype/specification.go b/prototype/specification.go new file mode 100644 index 0000000000000000000000000000000000000000..d957cda66735ab3f8639006df24cd1d056cf928b --- /dev/null +++ b/prototype/specification.go @@ -0,0 +1,140 @@ +package prototype + +import ( + "fmt" + "strings" +) + +// +// NOTE: These members would ordinarily be private and exposed by interfaces, +// but because Go requires public structs for un/marshalling, it is more +// convenient to simply expose all of them. +// + +// SpecificationSchema is the JSON-serializable representation of a prototype +// specification. +type SpecificationSchema struct { + APIVersion string `json:"apiVersion"` + Kind string `json:"kind"` + + // Unique identifier of the mixin library. The most reliable way to make a + // name unique is to embed a domain you own into the name, as is commonly done + // in the Java community. + Name string `json:"name"` + Params ParamSchemas `json:"params"` + Template SnippetSchema `json:"template"` +} + +// RequiredParams retrieves all parameters that are required by a prototype. +func (s *SpecificationSchema) RequiredParams() ParamSchemas { + reqd := ParamSchemas{} + for _, p := range s.Params { + if p.Default == nil { + reqd = append(reqd, p) + } + } + + return reqd +} + +// OptionalParams retrieves all parameters that can optionally be provided to a +// prototype. +func (s *SpecificationSchema) OptionalParams() ParamSchemas { + opt := ParamSchemas{} + for _, p := range s.Params { + if p.Default != nil { + opt = append(opt, p) + } + } + + return opt +} + +// SnippetSchema is the JSON-serializable representation of the TextMate snippet +// specification, as implemented by the Language Server Protocol. +type SnippetSchema struct { + Prefix string `json:"prefix"` + + // Description describes what the prototype does. + Description string `json:"description"` + + // Body of the prototype. Follows the TextMate snippets syntax, with several + // features disallowed. + Body []string `json:"body"` +} + +// ParamSchema is the JSON-serializable representation of a parameter provided to a prototype. +type ParamSchema struct { + Name string `json:"name"` + Alias *string `json:"alias"` // Optional. + Description string `json:"description"` + Default *string `json:"default"` // `nil` only if the parameter is optional. +} + +// RequiredParam constructs a required parameter, i.e., a parameter that is +// meant to be required by some prototype, somewhere. +func RequiredParam(name, alias, description string) *ParamSchema { + return &ParamSchema{ + Name: name, + Alias: &alias, + Description: description, + Default: nil, + } +} + +// OptionalParam constructs an optional parameter, i.e., a parameter that is +// meant to be optionally provided to some prototype, somewhere. +func OptionalParam(name, alias, description, defaultVal string) *ParamSchema { + return &ParamSchema{ + Name: name, + Alias: &alias, + Description: description, + Default: &defaultVal, + } +} + +// ParamSchemas is a slice of `ParamSchema` +type ParamSchemas []*ParamSchema + +// PrettyString creates a prettified string representing a collection of +// parameters. +func (ps ParamSchemas) PrettyString(prefix string) string { + if len(ps) == 0 { + return " [none]" + } + + flags := []string{} + for _, p := range ps { + alias := p.Name + if p.Alias != nil { + alias = *p.Alias + } + flags = append(flags, fmt.Sprintf("--%s=<%s>", p.Name, alias)) + } + + max := 0 + for _, flag := range flags { + if flagLen := len(flag); max < flagLen { + max = flagLen + } + } + + prettyFlags := []string{} + for i := range flags { + p := ps[i] + flag := flags[i] + + defaultVal := "" + if p.Default != nil { + defaultVal = fmt.Sprintf(" [default: %s]", *p.Default) + } + + // NOTE: If we don't add 1 here, the longest line will look like: + // `--flag=<flag>Description is here.` + space := strings.Repeat(" ", max-len(flag)+1) + pretty := fmt.Sprintf(prefix + flag + space + p.Description + defaultVal) + prettyFlags = append(prettyFlags, pretty) + } + + return strings.Join(prettyFlags, "\n") +} diff --git a/prototype/systemPrototypes.go b/prototype/systemPrototypes.go new file mode 100644 index 0000000000000000000000000000000000000000..679a43fb4b1014b248aa511108d5a86ccebe559f --- /dev/null +++ b/prototype/systemPrototypes.go @@ -0,0 +1,102 @@ +package prototype + +var defaultPrototypes = []*SpecificationSchema{ + &SpecificationSchema{ + APIVersion: "0.1", + Name: "io.ksonnet.pkg.yaml-namespace", + Params: ParamSchemas{ + RequiredParam("name", "name", "Name to give the namespace."), + }, + Template: SnippetSchema{ + Description: `A simple namespace. Labels are automatically populated from the name of the +namespace.`, + Body: []string{ + "kind: Namespace", + "apiVersion: v1", + "metadata:", + " name: ${name}", + " labels:", + " name: ${name}", + }, + }, + }, + &SpecificationSchema{ + APIVersion: "0.1", + Name: "io.ksonnet.pkg.yaml-single-port-service", + Params: ParamSchemas{ + RequiredParam("name", "serviceName", "Name of the service"), + RequiredParam("targetLabelSelector", "selector", "Label for the service to target (e.g., 'app: MyApp')."), + RequiredParam("servicePort", "port", "Port for the service to expose."), + RequiredParam("targetPort", "port", "Port for the service target."), + OptionalParam("protocol", "protocol", "Protocol to use (either TCP or UDP).", "TCP"), + }, + Template: SnippetSchema{ + Description: `A service that exposes 'servicePort', and directs traffic +to 'targetLabelSelector', at 'targetPort'.`, + Body: []string{ + "kind: Service", + "apiVersion: v1", + "metadata:", + " name: ${name}", + "spec:", + " selector:", + " ${targetLabelSelector}", + " ports:", + " - protocol: ${protocol}", + " port: ${servicePort}", + " targetPort: ${targetPort}", + }, + }, + }, + &SpecificationSchema{ + APIVersion: "0.1", + Name: "io.ksonnet.pkg.yaml-empty-configMap", + Params: ParamSchemas{ + RequiredParam("serviceName", "name", "Name to give the configMap."), + }, + Template: SnippetSchema{ + Description: `A simple config map. Contains no data.`, + Body: []string{ + "apiVersion: v1", + "kind: ConfigMap", + "metadata:", + " name: ${name}", + "data:", + " // K/V pairs go here.", + }, + }, + }, + &SpecificationSchema{ + APIVersion: "0.1", + Name: "io.ksonnet.pkg.yaml-single-port-deployment", + Params: ParamSchemas{ + RequiredParam("name", "deploymentName", "Name of the deployment"), + RequiredParam("image", "containerImage", "Container image to deploy"), + OptionalParam("replicas", "replicas", "Number of replicas", "1"), + OptionalParam("port", "containerPort", "Port to expose", "80"), + }, + Template: SnippetSchema{ + Description: `A deployment that replicates container 'image' some number of times +(default: 1), and exposes a port (default: 80). Labels are automatically +populated from 'name'.`, + Body: []string{ + "apiVersion: apps/v1beta1", + "kind: Deployment", + "metadata:", + " name: ${name}", + "spec:", + " replicas: ${replicas:1}", + " template:", + " metadata:", + " labels:", + " app: ${name}", + " spec:", + " containers:", + " - name: ${name}", + " image: ${image}", + " ports:", + " - containerPort: ${containerPort:80}", + }, + }, + }, +}