From 434d1f6759c194a6b986d32a0cbfaa5cd92391e8 Mon Sep 17 00:00:00 2001
From: Alex Clemmer <clemmer.alexander@gmail.com>
Date: Sat, 2 Sep 2017 13:00:39 -0700
Subject: [PATCH] Implement parser and evaluator for TextMate snippets

The Language Server Protocol (LSP) implements TextMate's "snippets"
feature. Originally imagined as a kind of Mad Libs for code, with users
given a code template and some number of "placeholder" blanks to fill
out, which was done by using tab to switch between them, LSP team
attempted to standardize this feature across language servers, so that
language authors had a well-specified interface to provide "snippets" to
any editor implementing the LSP.

Today, the LSP specification of TextMate snippets forms the bedrock of the
ksonnet prototype specification. Specifically, though users will
eventually be able to use Jsonnet to generate prototypes, they are
compiled down to the TextMate snippets specification.

This commit begins this process by introducing an implementation of the
LSP snippets specification, including both a parser and an evaluator.

For more details, see the extensive comment in `interface.go`.
---
 Makefile                         |   2 +-
 prototype/snippet/interface.go   |  58 +++++
 prototype/snippet/lexer.go       | 137 ++++++++++
 prototype/snippet/marker.go      | 168 +++++++++++++
 prototype/snippet/parser.go      | 171 +++++++++++++
 prototype/snippet/parser_test.go | 417 +++++++++++++++++++++++++++++++
 prototype/snippet/template.go    | 111 ++++++++
 prototype/snippet/util.go        |  82 ++++++
 8 files changed, 1145 insertions(+), 1 deletion(-)
 create mode 100644 prototype/snippet/interface.go
 create mode 100644 prototype/snippet/lexer.go
 create mode 100644 prototype/snippet/marker.go
 create mode 100644 prototype/snippet/parser.go
 create mode 100644 prototype/snippet/parser_test.go
 create mode 100644 prototype/snippet/template.go
 create mode 100644 prototype/snippet/util.go

diff --git a/Makefile b/Makefile
index b411c734..13eeab63 100644
--- a/Makefile
+++ b/Makefile
@@ -26,7 +26,7 @@ KCFG_TEST_FILE = lib/kubecfg_test.jsonnet
 GUESTBOOK_FILE = examples/guestbook.jsonnet
 JSONNET_FILES = $(KCFG_TEST_FILE) $(GUESTBOOK_FILE)
 # TODO: Simplify this once ./... ignores ./vendor
-GO_PACKAGES = ./cmd/... ./utils/... ./pkg/... ./metadata/...
+GO_PACKAGES = ./cmd/... ./utils/... ./pkg/... ./metadata/... ./prototype/...
 
 # Default cluster from this config is used for integration tests
 KUBECONFIG = $(HOME)/.kube/config
diff --git a/prototype/snippet/interface.go b/prototype/snippet/interface.go
new file mode 100644
index 00000000..fe51230b
--- /dev/null
+++ b/prototype/snippet/interface.go
@@ -0,0 +1,58 @@
+// Package snippet provides primitives for parsing and evaluating TextMate
+// snippets. In general, snippets are text with "placeholders" for the user to
+// fill in. For example something like "foo ${bar}" would expect the user to
+// provide a value for the `bar` variable.
+//
+// This code is influenced heavily by the more formal treatment specified by the
+// Language Server Protocol, though (since this does not have to serve an
+// interactive prompt in an IDE) we do omit some features for simplification
+// (e.g., we have limited support for tabstops and builtin variables like
+// `TM_SELECTED_TEXT`).
+//
+// A parsed snippet template is represented as a tree consisting of one of
+// several types:
+//
+//   * Text: representing free text, i.e., text that is not a part of (e.g.) a
+//     variable.
+//   * Variable: Takes the forms `${varName}` and `${varName:defaultValue}`.
+//     When a variable isn't set, an empty string is inserted. If the variable
+//     is undefined, its name is inserted as the default value.
+//   * Tabstop (currently unused by our tool, but implemented anyway): takes the
+//     form of the '$' character followed by a number, e.g., `$1` or `$2`.
+//     Inside an editor, a tabstop represents where to navigate when the user
+//     presses tab or shift-tab.
+//   * Placeholder (currently unused by our tool, but implemented anyway):
+//     representing a tabstop with a default value. These are usually of the
+//     form `${3:someDefaultValue}`. They can also be nested, as in
+//     `${1:firstValue${2:secondValue}`, or recursive, as in `${1:foo$1}`.
+//
+// TextMate does not specify a grammar for this templating language. This parser
+// implements the following grammar for, which we believe is close enough to the
+// intention of TextMate. The characters `$`, `}`, and `\` can be escaped with
+// `\`, but for simplicity we omit them from the grammar.
+//
+//   any         ::= tabstop | placeholder | choice | variable | text
+//   tabstop     ::= '$' int | '${' int '}'
+//   placeholder ::= '${' int ':' any '}'
+//   choice      ::= '${' int '|' text (',' text)* '|}'
+//   variable    ::= '$' var | '${' var }' | '${' var ':' any '}'
+//   var         ::= [_a-zA-Z] [_a-zA-Z0-9]*
+//   int         ::= [0-9]+
+//   text        ::= .*
+package snippet
+
+// Parse takes a TextMate snippet and parses it, producing a `Template`. There
+// is no opportunity for a parse error, since the grammar specifies that
+// malformed placeholders are simply text.
+//
+// The grammar of the parse is formalized in part by the Language Server
+// Protocol, and detailed in the package comments.
+func Parse(template string) Template {
+	return parse(template, false)
+}
+
+// Template represents a parsed TextMate snippet. The template can be evaluated
+// (with respect to some set of variables) using `Evaluate`.
+type Template interface {
+	Evaluate(values map[string]string) (string, error)
+}
diff --git a/prototype/snippet/lexer.go b/prototype/snippet/lexer.go
new file mode 100644
index 00000000..acd930f7
--- /dev/null
+++ b/prototype/snippet/lexer.go
@@ -0,0 +1,137 @@
+package snippet
+
+type tokenType int
+
+const (
+	dollar tokenType = iota
+	colon
+	curlyOpen
+	curlyClose
+	backslash
+	number
+	variableName
+	format
+	eof
+)
+
+func (tt tokenType) String() string {
+	s, _ := tokenTypeToString[tt]
+	return s
+}
+
+type token struct {
+	kind tokenType
+	pos  int
+	len  int
+}
+
+var stringToTokenType = map[rune]tokenType{
+	'$':  dollar,
+	':':  colon,
+	'{':  curlyOpen,
+	'}':  curlyClose,
+	'\\': backslash,
+}
+
+var tokenTypeToString = map[tokenType]string{
+	dollar:       "Dollar",
+	colon:        "Colon",
+	curlyOpen:    "CurlyOpen",
+	curlyClose:   "CurlyClose",
+	backslash:    "Backslash",
+	number:       "Int",
+	variableName: "VariableName",
+	format:       "Format",
+	eof:          "EOF",
+}
+
+type lexer struct {
+	value []rune
+	pos   int
+}
+
+func isDigitCharacter(ch rune) bool {
+	return ch >= '0' && ch <= '9'
+}
+
+func isVariableCharacter(ch rune) bool {
+	return ch == '_' || (ch >= 'a' && ch <= 'z') || (ch >= 'A' && ch <= 'Z')
+}
+
+func newLexer() *lexer {
+	s := lexer{}
+	s.text("")
+
+	return &s
+}
+
+func (s *lexer) text(value string) {
+	s.value = []rune(value)
+	s.pos = 0
+}
+
+func (s *lexer) tokenText(tok *token) string {
+	return string(s.value[tok.pos : tok.pos+tok.len])
+}
+
+func (s *lexer) next() *token {
+	valueLen := len(s.value)
+	if s.pos >= valueLen {
+		return &token{kind: eof, pos: s.pos, len: 0}
+	}
+
+	pos := s.pos
+	len := 0
+	ch := s.value[pos]
+
+	// Known token types.
+	var t tokenType
+	if t, ok := stringToTokenType[ch]; ok {
+		s.pos++
+		return &token{kind: t, pos: pos, len: 1}
+	}
+
+	// Number token.
+	if isDigitCharacter(ch) {
+		t = number
+		for pos+len < valueLen {
+			ch = s.value[pos+len]
+			if !isDigitCharacter(ch) {
+				break
+			}
+			len++
+		}
+
+		s.pos += len
+		return &token{t, pos, len}
+	}
+
+	// Variable.
+	if isVariableCharacter(ch) {
+		t = variableName
+		for pos+len < valueLen {
+			ch = s.value[pos+len]
+			if !isVariableCharacter(ch) && !isDigitCharacter(ch) {
+				break
+			}
+			len++
+		}
+
+		s.pos += len
+		return &token{t, pos, len}
+	}
+
+	// Formatting characters.
+	t = format
+	for pos+len < valueLen {
+		ch = s.value[pos+len]
+		_, isStaticToken := stringToTokenType[ch]
+		if isStaticToken || isDigitCharacter(ch) || isVariableCharacter(ch) {
+			break
+		}
+		len++
+	}
+
+	s.pos += len
+	return &token{t, pos, len}
+}
diff --git a/prototype/snippet/marker.go b/prototype/snippet/marker.go
new file mode 100644
index 00000000..c038f080
--- /dev/null
+++ b/prototype/snippet/marker.go
@@ -0,0 +1,168 @@
+package snippet
+
+import "bytes"
+
+// ----------------------------------------------------------------------------
+// Interfaces.
+// ----------------------------------------------------------------------------
+
+type index int
+
+type indices []index
+
+type marker interface {
+	children() *markers
+	parent() marker
+	setParent(p marker)
+	String() string
+	len() int
+}
+
+type markers []marker
+
+func (ms *markers) append(m ...marker) {
+	*ms = append(*ms, m...)
+}
+
+func (ms *markers) delete(i int) {
+	*ms = append((*ms)[:i], (*ms)[i+1:]...)
+}
+
+func (ms *markers) String() string {
+	var buf bytes.Buffer
+
+	for _, m := range *ms {
+		buf.WriteString(m.String())
+	}
+	return buf.String()
+}
+
+func (ms *markers) setParents(m marker) {
+	for _, child := range *ms {
+		child.setParent(m)
+	}
+}
+
+// ----------------------------------------------------------------------------
+// Base.
+// ----------------------------------------------------------------------------
+
+type markerImpl struct {
+	// _markerBrand: any;
+	_children *markers
+	_parent   marker
+}
+
+func (mi *markerImpl) children() *markers {
+	return mi._children
+}
+
+func (mi *markerImpl) parent() marker {
+	return mi._parent
+}
+
+func (mi *markerImpl) setParent(p marker) {
+	mi._parent = p
+}
+
+func (mi *markerImpl) String() string {
+	return ""
+}
+
+func (mi *markerImpl) len() int {
+	return 0
+}
+
+// ----------------------------------------------------------------------------
+// Text.
+// ----------------------------------------------------------------------------
+
+type text struct {
+	markerImpl
+	data string
+}
+
+func newText(data string) *text {
+	return &text{
+		markerImpl: markerImpl{
+			_children: &markers{},
+		},
+		data: data,
+	}
+}
+
+func (t *text) String() string {
+	return t.data
+}
+
+func (t *text) len() int {
+	return len(t.data)
+}
+
+// ----------------------------------------------------------------------------
+// Placeholder.
+// ----------------------------------------------------------------------------
+
+type placeholder struct {
+	markerImpl
+	index int
+}
+
+func newPlaceholder(index int, children *markers) *placeholder {
+	p := &placeholder{
+		// markerImpl: *newMarkerImplWithChildren(children),
+		markerImpl: markerImpl{
+			_children: children,
+		},
+		index: index,
+	}
+	p._children.setParents(p)
+	return p
+}
+
+func (p *placeholder) String() string {
+	return p._children.String()
+}
+
+func (p *placeholder) isFinalTabstop() bool {
+	return p.index == 0
+}
+
+// ----------------------------------------------------------------------------
+// Variable.
+// ----------------------------------------------------------------------------
+
+type variable struct {
+	markerImpl
+	resolvedValue *string
+	name          string
+}
+
+func newVariable(name string, children *markers) *variable {
+	v := &variable{
+		markerImpl: markerImpl{
+			_children: children,
+		},
+		name: name,
+	}
+	v._children.setParents(v)
+	return v
+}
+
+func (v *variable) isDefined() bool {
+	return v.resolvedValue != nil
+}
+
+func (v *variable) len() int {
+	if v.isDefined() {
+		return len(*v.resolvedValue)
+	}
+	return v.markerImpl.len()
+}
+
+func (v *variable) String() string {
+	if v.isDefined() {
+		return *v.resolvedValue
+	}
+	return v._children.String()
+}
diff --git a/prototype/snippet/parser.go b/prototype/snippet/parser.go
new file mode 100644
index 00000000..cee058c1
--- /dev/null
+++ b/prototype/snippet/parser.go
@@ -0,0 +1,171 @@
+package snippet
+
+import (
+	"regexp"
+	"strconv"
+)
+
+func parse(template string, enforceFinalTabstop bool) *textmateSnippet {
+	m := newSnippetParser().parse(template, true, enforceFinalTabstop)
+	return newTextmateSnippet(m)
+}
+
+type snippetParser struct {
+	tokenizer lexer
+	currToken *token
+	prevToken *token
+}
+
+func newSnippetParser() *snippetParser {
+	return &snippetParser{
+		tokenizer: *newLexer(),
+	}
+}
+
+func (sp *snippetParser) parse(value string, insertFinalTabstop bool, enforceFinalTabstop bool) *markers {
+	ms := markers{}
+
+	sp.tokenizer.text(value)
+	sp.currToken = sp.tokenizer.next()
+	for sp.parseAny(&ms) || sp.parseText(&ms) {
+		// Consume these tokens.
+	}
+
+	placeholderDefaultValues := map[int]*markers{}
+	walkDefaults(&ms, placeholderDefaultValues)
+
+	_, hasFinalTabstop := placeholderDefaultValues[0]
+	shouldInsertFinalTabstop := insertFinalTabstop && len(placeholderDefaultValues) > 0 || enforceFinalTabstop
+	if !hasFinalTabstop && shouldInsertFinalTabstop {
+		// Insert final tabstop.
+		//
+		// By default, when the user finishes filling out a snippet, they expect
+		// their cursor to be at the end of the snippet. So, here, if the user is
+		// using snippets but there is no final tabstop defined, we simply insert
+		// one.
+		ms.append(newPlaceholder(0, &markers{}))
+	}
+
+	return &ms
+}
+
+func (sp *snippetParser) text(value string) string {
+	return sp.parse(value, false, false).String()
+}
+
+func (sp *snippetParser) accept(kind tokenType) bool {
+	if sp.currToken.kind == kind {
+		sp.prevToken = sp.currToken
+		sp.currToken = sp.tokenizer.next()
+		return true
+	}
+	return false
+}
+
+func (sp *snippetParser) acceptAny() bool {
+	sp.prevToken = sp.currToken
+	sp.currToken = sp.tokenizer.next()
+	return true
+}
+
+func (sp *snippetParser) parseAny(ms *markers) bool {
+	if sp.parseEscaped(ms) {
+		return true
+	} else if sp.parseTM(ms) {
+		return true
+	}
+	return false
+}
+
+func (sp *snippetParser) parseText(ms *markers) bool {
+	if sp.currToken.kind != eof {
+		ms.append(newText(sp.tokenizer.tokenText(sp.currToken)))
+		sp.acceptAny()
+		return true
+	}
+	return false
+}
+
+func (sp *snippetParser) parseTM(ms *markers) bool {
+	if sp.accept(dollar) {
+		if sp.accept(variableName) || sp.accept(number) {
+			// Cases like `$FOO` or `$123`.
+			idOrName := sp.tokenizer.tokenText(sp.prevToken)
+			if i, ok := parseNumber(idOrName); ok {
+				// Cases like `$123`.
+				ms.append(newPlaceholder(i, &markers{}))
+			} else {
+				// Cases like `$FOO`.
+				ms.append(newVariable(idOrName, &markers{}))
+			}
+			return true
+		} else if sp.accept(curlyOpen) {
+			// Cases like `${name:nginx}`.
+			name := markers{}
+			children := &markers{}
+			target := &name
+
+			for {
+				if target != children && sp.accept(colon) {
+					target = children
+					continue
+				}
+
+				if sp.accept(curlyClose) {
+					idOrName := name.String()
+					if i, ok := parseNumber(idOrName); ok {
+						ms.append(newPlaceholder(i, children))
+					} else {
+						ms.append(newVariable(idOrName, children))
+					}
+					return true
+				}
+
+				if sp.parseAny(target) || sp.parseText(target) {
+					continue
+				}
+
+				// fallback
+				if len(*children) > 0 {
+					ms.append(newText("${" + name.String() + ":"))
+					ms.append(*children...)
+				} else {
+					ms.append(newText("${"))
+					ms.append(name...)
+				}
+				return true
+			}
+		}
+
+		ms.append(newText("$"))
+		return true
+	}
+
+	return false
+}
+
+func (sp *snippetParser) parseEscaped(ms *markers) bool {
+	if sp.accept(backslash) {
+		if sp.accept(dollar) || sp.accept(curlyClose) || sp.accept(backslash) {
+			// Do nothing.
+		}
+		ms.append(newText(sp.tokenizer.tokenText(sp.prevToken)))
+		return true
+	}
+	return false
+}
+
+func parseNumber(id string) (int, bool) {
+
+	if matches, err := regexp.MatchString(`^\d+$`, id); err != nil {
+		return 0, false
+	} else if !matches {
+		return 0, false
+	}
+
+	i, err := strconv.ParseInt(id, 0, 0)
+	if err != nil {
+		return 0, false
+	}
+	return int(i), true
+}
diff --git a/prototype/snippet/parser_test.go b/prototype/snippet/parser_test.go
new file mode 100644
index 00000000..ee8952d2
--- /dev/null
+++ b/prototype/snippet/parser_test.go
@@ -0,0 +1,417 @@
+package snippet
+
+import (
+	"fmt"
+	"reflect"
+	"testing"
+)
+
+func assertTokensEqual(t *testing.T, actual, expected tokenType) {
+	if actual != expected {
+		t.Fatalf("Expected token type '%d' but got '%d'", expected, actual)
+	}
+}
+
+func TestLexer(t *testing.T) {
+	lexer := newLexer()
+	assertTokensEqual(t, lexer.next().kind, eof)
+
+	lexer.text("a")
+	assertTokensEqual(t, lexer.next().kind, variableName)
+	assertTokensEqual(t, lexer.next().kind, eof)
+
+	lexer.text("abc")
+	assertTokensEqual(t, lexer.next().kind, variableName)
+	assertTokensEqual(t, lexer.next().kind, eof)
+
+	lexer.text("{{abc}}")
+	assertTokensEqual(t, lexer.next().kind, curlyOpen)
+	assertTokensEqual(t, lexer.next().kind, curlyOpen)
+	assertTokensEqual(t, lexer.next().kind, variableName)
+	assertTokensEqual(t, lexer.next().kind, curlyClose)
+	assertTokensEqual(t, lexer.next().kind, curlyClose)
+	assertTokensEqual(t, lexer.next().kind, eof)
+
+	lexer.text("abc() ")
+	assertTokensEqual(t, lexer.next().kind, variableName)
+	assertTokensEqual(t, lexer.next().kind, format)
+	assertTokensEqual(t, lexer.next().kind, eof)
+
+	lexer.text("abc 123")
+	assertTokensEqual(t, lexer.next().kind, variableName)
+	assertTokensEqual(t, lexer.next().kind, format)
+	assertTokensEqual(t, lexer.next().kind, number)
+	assertTokensEqual(t, lexer.next().kind, eof)
+
+	lexer.text("$foo")
+	assertTokensEqual(t, lexer.next().kind, dollar)
+	assertTokensEqual(t, lexer.next().kind, variableName)
+	assertTokensEqual(t, lexer.next().kind, eof)
+
+	lexer.text("$foo_bar")
+	assertTokensEqual(t, lexer.next().kind, dollar)
+	assertTokensEqual(t, lexer.next().kind, variableName)
+	assertTokensEqual(t, lexer.next().kind, eof)
+
+	lexer.text("$foo-bar")
+	assertTokensEqual(t, lexer.next().kind, dollar)
+	assertTokensEqual(t, lexer.next().kind, variableName)
+	assertTokensEqual(t, lexer.next().kind, format)
+	assertTokensEqual(t, lexer.next().kind, variableName)
+	assertTokensEqual(t, lexer.next().kind, eof)
+
+	lexer.text("${foo}")
+	assertTokensEqual(t, lexer.next().kind, dollar)
+	assertTokensEqual(t, lexer.next().kind, curlyOpen)
+	assertTokensEqual(t, lexer.next().kind, variableName)
+	assertTokensEqual(t, lexer.next().kind, curlyClose)
+	assertTokensEqual(t, lexer.next().kind, eof)
+
+	lexer.text("${1223:foo}")
+	assertTokensEqual(t, lexer.next().kind, dollar)
+	assertTokensEqual(t, lexer.next().kind, curlyOpen)
+	assertTokensEqual(t, lexer.next().kind, number)
+	assertTokensEqual(t, lexer.next().kind, colon)
+	assertTokensEqual(t, lexer.next().kind, variableName)
+	assertTokensEqual(t, lexer.next().kind, curlyClose)
+	assertTokensEqual(t, lexer.next().kind, eof)
+
+	lexer.text("\\${}")
+	assertTokensEqual(t, lexer.next().kind, backslash)
+	assertTokensEqual(t, lexer.next().kind, dollar)
+	assertTokensEqual(t, lexer.next().kind, curlyOpen)
+	assertTokensEqual(t, lexer.next().kind, curlyClose)
+}
+
+func assertText(t *testing.T, value, expected string) {
+	p := newSnippetParser()
+	actual := p.text(value)
+	if actual != expected {
+		t.Errorf("Expected text '%s', got '%s'", expected, actual)
+	}
+}
+
+func assertMarkerTypes(t *testing.T, actual marker, expected marker) {
+	actualType, expectedType := reflect.TypeOf(actual), reflect.TypeOf(expected)
+	if actualType != expectedType {
+		t.Errorf("Expected type '%v', got type '%v'", expectedType, actualType)
+	}
+}
+
+func assertEqual(t *testing.T, actual, expected interface{}) {
+	if actual != expected {
+		t.Errorf("Expected '%v', got '%v'", expected, actual)
+	}
+}
+
+func assertMarker(t *testing.T, actual markers, expected ...marker) {
+	if len(actual) != len(expected) {
+		t.Errorf("Number of markers and types are not the same")
+	}
+	for i := range actual {
+		actualType := reflect.TypeOf(actual[i])
+		expectedType := reflect.TypeOf(expected[i])
+		if actualType != expectedType {
+			t.Errorf("Expected type '%v', got type '%v'", expectedType, actualType)
+			return
+		}
+	}
+}
+
+func assertMarkerValue(t *testing.T, value string, ctors ...marker) {
+	p := newSnippetParser()
+	m := p.parse(value, false, false)
+	assertMarker(t, *m, ctors...)
+}
+
+func assertTextAndMarker(t *testing.T, value, escaped string, ctors ...marker) {
+	assertText(t, value, escaped)
+	assertMarkerValue(t, value, ctors...)
+}
+
+func TestParserText(t *testing.T) {
+	assertText(t, `$`, `$`)
+	assertText(t, `\\$`, `\$`)
+	assertText(t, "{", "{")
+	assertText(t, `\}`, `}`)
+	assertText(t, `\abc`, `\abc`)
+	assertText(t, `foo${f:\}}bar`, `foo}bar`)
+	assertText(t, `\{`, `\{`)
+	assertText(t, "I need \\\\\\$", "I need \\$")
+	assertText(t, `\`, `\`)
+	assertText(t, `\{{`, `\{{`)
+	assertText(t, `{{`, `{{`)
+	assertText(t, `{{dd`, `{{dd`)
+	assertText(t, `}}`, `}}`)
+	assertText(t, `ff}}`, `ff}}`)
+
+	assertText(t, "farboo", "farboo")
+	assertText(t, "far{{}}boo", "far{{}}boo")
+	assertText(t, "far{{123}}boo", "far{{123}}boo")
+	assertText(t, "far\\{{123}}boo", "far\\{{123}}boo")
+	assertText(t, "far{{id:bern}}boo", "far{{id:bern}}boo")
+	assertText(t, "far{{id:bern {{basel}}}}boo", "far{{id:bern {{basel}}}}boo")
+	assertText(t, "far{{id:bern {{id:basel}}}}boo", "far{{id:bern {{id:basel}}}}boo")
+	assertText(t, "far{{id:bern {{id2:basel}}}}boo", "far{{id:bern {{id2:basel}}}}boo")
+}
+
+func TestParserTMText(t *testing.T) {
+	assertTextAndMarker(t, "foo${1:bar}}", "foobar}", &text{}, &placeholder{}, &text{})
+	assertTextAndMarker(t, "foo${1:bar}${2:foo}}", "foobarfoo}", &text{}, &placeholder{}, &placeholder{}, &text{})
+
+	assertTextAndMarker(t, "foo${1:bar\\}${2:foo}}", "foobar}foo", &text{}, &placeholder{})
+
+	parse := *newSnippetParser().parse("foo${1:bar\\}${2:foo}}", false, false)
+	ph := *parse[1].(*placeholder)
+	children := *ph._children
+
+	assertEqual(t, ph.index, 1)
+	assertMarkerTypes(t, children[0], &text{})
+	assertEqual(t, children[0].String(), "bar}")
+	assertMarkerTypes(t, children[1], &placeholder{})
+	assertEqual(t, children[1].String(), "foo")
+}
+
+func TestParserPlaceholder(t *testing.T) {
+	assertTextAndMarker(t, "farboo", "farboo", &text{})
+	assertTextAndMarker(t, "far{{}}boo", "far{{}}boo", &text{})
+	assertTextAndMarker(t, "far{{123}}boo", "far{{123}}boo", &text{})
+	assertTextAndMarker(t, "far\\{{123}}boo", "far\\{{123}}boo", &text{})
+}
+
+func TestParserLiteral(t *testing.T) {
+	assertTextAndMarker(t, "far`123`boo", "far`123`boo", &text{})
+	assertTextAndMarker(t, "far\\`123\\`boo", "far\\`123\\`boo", &text{})
+}
+
+func TestParserVariablesTabstop(t *testing.T) {
+	assertTextAndMarker(t, "$far-boo", "-boo", &variable{}, &text{})
+	assertTextAndMarker(t, "\\$far-boo", "$far-boo", &text{})
+	assertTextAndMarker(t, "far$farboo", "far", &text{}, &variable{})
+	assertTextAndMarker(t, "far${farboo}", "far", &text{}, &variable{})
+	assertTextAndMarker(t, "$123", "", &placeholder{})
+	assertTextAndMarker(t, "$farboo", "", &variable{})
+	assertTextAndMarker(t, "$far12boo", "", &variable{})
+}
+
+func TestParserVariablesWithDefaults(t *testing.T) {
+	assertTextAndMarker(t, "${name:value}", "value", &variable{})
+	assertTextAndMarker(t, "${1:value}", "value", &placeholder{})
+	assertTextAndMarker(t, "${1:bar${2:foo}bar}", "barfoobar", &placeholder{})
+
+	assertTextAndMarker(t, "${name:value", "${name:value", &text{})
+	assertTextAndMarker(t, "${1:bar${2:foobar}", "${1:barfoobar", &text{}, &placeholder{})
+}
+
+func TestParserTextmate(t *testing.T) {
+	p := newSnippetParser()
+	assertMarker(t, *p.parse("far{{}}boo", false, false), &text{})
+	assertMarker(t, *p.parse("far{{123}}boo", false, false), &text{})
+	assertMarker(t, *p.parse("far\\{{123}}boo", false, false), &text{})
+
+	assertMarker(t, *p.parse("far$0boo", false, false), &text{}, &placeholder{}, &text{})
+	assertMarker(t, *p.parse("far${123}boo", false, false), &text{}, &placeholder{}, &text{})
+	assertMarker(t, *p.parse("far\\${123}boo", false, false), &text{})
+}
+
+func TestParserRealWorld(t *testing.T) {
+	m := newSnippetParser().parse("console.warn(${1: $TM_SELECTED_TEXT })", false, false)
+
+	assertEqual(t, (*m)[0].String(), "console.warn(")
+	assertMarkerTypes(t, (*m)[1], &placeholder{})
+	assertEqual(t, (*m)[2].String(), ")")
+
+	ph := (*m)[1].(*placeholder)
+	children := *ph.children()
+	// assertEqual(t, placeholder, "false")
+	assertEqual(t, ph.index, 1)
+	assertEqual(t, len(children), 3)
+	assertMarkerTypes(t, children[0], &text{})
+	assertMarkerTypes(t, children[1], &variable{})
+	assertMarkerTypes(t, children[2], &text{})
+	assertEqual(t, children[0].String(), " ")
+	assertEqual(t, children[1].String(), "")
+	assertEqual(t, children[2].String(), " ")
+
+	nestedVariable := children[1].(*variable)
+	assertEqual(t, nestedVariable.name, "TM_SELECTED_TEXT")
+	assertEqual(t, len(*nestedVariable.children()), 0)
+
+	m = newSnippetParser().parse("$TM_SELECTED_TEXT", false, false)
+	assertEqual(t, len(*m), 1)
+	assertMarkerTypes(t, (*m)[0], &variable{})
+}
+
+func TestParserDefaultPlaceholderValues(t *testing.T) {
+	assertMarkerValue(t, "errorContext: `${1:err}`, error: $1", &text{}, &placeholder{}, &text{}, &placeholder{})
+
+	parsed := newSnippetParser().parse("errorContext: `${1:err}`, error:$1", false, false)
+	assertMarkerTypes(t, (*parsed)[1], &placeholder{})
+	assertMarkerTypes(t, (*parsed)[3], &placeholder{})
+	p1, p2 := (*parsed)[1].(*placeholder), (*parsed)[3].(*placeholder)
+
+	assertEqual(t, p1.index, 1)
+	assertEqual(t, len(*p1.children()), 1)
+	assertEqual(t, (*p1.children())[0].(*text).String(), "err")
+
+	assertEqual(t, p2.index, 1)
+	assertEqual(t, len(*p2.children()), 1)
+	assertEqual(t, (*p2.children())[0].(*text).String(), "err")
+}
+
+func TestBackspace(t *testing.T) {
+	actual := newSnippetParser().text("Foo \\\\${abc}bar")
+	assertEqual(t, actual, "Foo \\bar")
+}
+
+func ColonAsVariableValue(t *testing.T) {
+	actual := newSnippetParser().text("${TM_SELECTED_TEXT:foo:bar}")
+	assertEqual(t, actual, "foo:bar")
+
+	actual = newSnippetParser().text("${1:foo:bar}")
+	assertEqual(t, actual, "foo:bar")
+}
+
+func assertLen(t *testing.T, template string, lengths ...int) {
+	children := parse(template, false).children()
+	walk(children, func(m marker) bool {
+		var expected int
+		expected, lengths = lengths[0], lengths[1:]
+		assertEqual(t, m.len(), expected)
+		return true
+	})
+}
+
+func TestMarkerLen(t *testing.T) {
+	assertLen(t, "text$0", 4, 0, 0)
+	assertLen(t, "$1text$0", 0, 4, 0, 0)
+	assertLen(t, "te$1xt$0", 2, 0, 2, 0, 0)
+	assertLen(t, "errorContext: `${1:err}`, error: $0", 15, 0, 3, 10, 0, 0)
+	assertLen(t, "errorContext: `${1:err}`, error: $1$0", 15, 0, 3, 10, 0, 3, 0, 0)
+	assertLen(t, "$TM_SELECTED_TEXT$0", 0, 0, 0)
+	assertLen(t, "${TM_SELECTED_TEXT:def}$0", 0, 3, 0, 0)
+}
+
+func TestParserParent(t *testing.T) {
+	snippet := parse("This ${1:is ${2:nested}}$0", false)
+
+	assertEqual(t, len(snippet.placeholders()), 3)
+	first, second := snippet.placeholders()[0], snippet.placeholders()[1]
+	assertEqual(t, first.index, 1)
+	assertEqual(t, second.index, 2)
+	sp := second.parent()
+	fmt.Println(sp)
+	assertEqual(t, second.parent(), first)
+	fp := first.parent()
+	fmt.Println(fp)
+	assertEqual(t, first.parent(), snippet)
+
+	snippet = parse("${VAR:default${1:value}}$0", false)
+	phs := snippet.placeholders()
+	assertEqual(t, len(phs), 2)
+	first = phs[0]
+	assertEqual(t, first.index, 1)
+
+	firstChild := (*snippet.children())[0]
+	assertMarkerTypes(t, firstChild, &variable{})
+	assertEqual(t, first.parent(), firstChild)
+}
+
+func TestTextmateSnippetEnclosingPlaceholders(t *testing.T) {
+	snippet := parse("This ${1:is ${2:nested}}$0", false)
+	first, second := snippet.placeholders()[0], snippet.placeholders()[1]
+
+	assertEqual(t, len(snippet.enclosingPlaceholders(*first)), 0)
+
+	sndEnclosing := snippet.enclosingPlaceholders(*second)
+	assertEqual(t, len(sndEnclosing), 1)
+	assertEqual(t, sndEnclosing[0], first)
+}
+
+func TestTextmateSnippetOffset(t *testing.T) {
+	snippet := parse("te$1xt", false)
+	snippetChildren := *snippet.children()
+	assertEqual(t, snippet.offset(snippetChildren[0]), 0)
+	assertEqual(t, snippet.offset(snippetChildren[1]), 2)
+	assertEqual(t, snippet.offset(snippetChildren[2]), 2)
+
+	snippet = parse("${TM_SELECTED_TEXT:def}", false)
+	snippetChildren = *snippet.children()
+	assertEqual(t, snippet.offset(snippetChildren[0]), 0)
+	assertMarkerTypes(t, snippetChildren[0], &variable{})
+	assertEqual(t, snippet.offset((*snippetChildren[0].(*variable).children())[0]), 0)
+
+	// forgein marker
+	assertEqual(t, snippet.offset(newText("foo")), -1)
+}
+
+func TextmateSnippetPlaceholder(t *testing.T) {
+	snippet := parse("te$1xt$0", false)
+	placeholders := snippet.placeholders()
+	assertEqual(t, len(placeholders), 2)
+
+	snippet = parse("te$1xt$1$0", false)
+	placeholders = snippet.placeholders()
+	assertEqual(t, len(placeholders), 3)
+
+	snippet = parse("te$1xt$2$0", false)
+	placeholders = snippet.placeholders()
+	assertEqual(t, len(placeholders), 3)
+
+	snippet = parse("${1:bar${2:foo}bar}$0", false)
+	placeholders = snippet.placeholders()
+	assertEqual(t, len(placeholders), 3)
+}
+
+func TextmateSnippetReplace1(t *testing.T) {
+	snippet := parse("aaa${1:bbb${2:ccc}}$0", false)
+
+	assertEqual(t, len(snippet.placeholders()), 3)
+	second := *snippet.placeholders()[1]
+	assertEqual(t, second.index, 2)
+
+	enclosing := snippet.enclosingPlaceholders(second)
+	assertEqual(t, len(enclosing), 1)
+	assertEqual(t, enclosing[0].index, 1)
+
+	nested := parse("ddd$1eee$0", false)
+	snippet.ReplacePlaceholder(2, nested.children())
+
+	snippetPlaceholders := snippet.placeholders()
+	assertEqual(t, snippet.text, "aaabbbdddeee")
+	assertEqual(t, len(snippetPlaceholders), 4)
+	assertEqual(t, snippetPlaceholders[0].index, "1")
+	assertEqual(t, snippetPlaceholders[1].index, "1")
+	assertEqual(t, snippetPlaceholders[2].index, "0")
+	assertEqual(t, snippetPlaceholders[3].index, "0")
+
+	newEnclosing := snippet.enclosingPlaceholders(*snippetPlaceholders[1])
+	assertEqual(t, newEnclosing[0], snippetPlaceholders[0])
+	assertEqual(t, len(newEnclosing), 1)
+	assertEqual(t, newEnclosing[0].index, "1")
+}
+
+func TextmateSnippetReplace2(t *testing.T) {
+	snippet := parse("aaa${1:bbb${2:ccc}}$0", false)
+
+	assertEqual(t, len(snippet.placeholders()), 3)
+	second := snippet.placeholders()[1]
+	assertEqual(t, second.index, 2)
+
+	nested := parse("dddeee$0", false)
+	snippet.ReplacePlaceholder(2, nested.children())
+
+	assertEqual(t, snippet.text, "aaabbbdddeee")
+	assertEqual(t, len(snippet.placeholders()), 3)
+}
+
+func TestSnippetOrderPlaceholders(t *testing.T) {
+	_10 := newPlaceholder(10, &markers{})
+	_2 := newPlaceholder(2, &markers{})
+
+	assertEqual(t, compareByIndex(*_10, *_2), 1)
+}
+
+func TestMaxCallStackExceeded(t *testing.T) {
+	newSnippetParser().parse("${1:${foo:${1}}}", false, false)
+}
diff --git a/prototype/snippet/template.go b/prototype/snippet/template.go
new file mode 100644
index 00000000..7f64f344
--- /dev/null
+++ b/prototype/snippet/template.go
@@ -0,0 +1,111 @@
+package snippet
+
+type textmateSnippet struct {
+	markerImpl
+	_placeholders *[]*placeholder
+}
+
+func newTextmateSnippet(children *markers) *textmateSnippet {
+	tms := &textmateSnippet{
+		markerImpl: markerImpl{
+			_children: children,
+		},
+		_placeholders: nil,
+	}
+	tms._children.setParents(tms)
+	return tms
+}
+
+func (tms *textmateSnippet) placeholders() []*placeholder {
+	if tms._placeholders == nil {
+		// Fill in placeholders if they don't exist.
+		tms._placeholders = &[]*placeholder{}
+		walk(tms._children, func(candidate marker) bool {
+			switch candidate.(type) {
+			case *placeholder:
+				{
+					*tms._placeholders = append(*tms._placeholders, candidate.(*placeholder))
+				}
+			}
+			return true
+		})
+	}
+	return *tms._placeholders
+}
+
+func (tms *textmateSnippet) offset(m marker) int {
+	pos := 0
+	found := false
+	walk(tms._children, func(candidate marker) bool {
+		if candidate == m {
+			found = true
+			return false
+		}
+		pos += candidate.len()
+		return true
+	})
+
+	if !found {
+		return -1
+	}
+	return pos
+}
+
+func (tms *textmateSnippet) fullLen(m marker) int {
+	ret := 0
+	walk(&markers{m}, func(m marker) bool {
+		ret += m.len()
+		return true
+	})
+	return ret
+}
+
+func (tms *textmateSnippet) enclosingPlaceholders(ph placeholder) []*placeholder {
+	ret := []*placeholder{}
+	parent := ph._parent
+	for parent != nil {
+		switch parent.(type) {
+		case *placeholder:
+			{
+				ret = append(ret, parent.(*placeholder))
+			}
+		}
+		parent = parent.parent()
+	}
+	return ret
+}
+
+func (tms *textmateSnippet) text() string {
+	return tms._children.String()
+}
+
+func (tms *textmateSnippet) Evaluate(values map[string]string) (string, error) {
+	walk(tms.children(), func(candidate marker) bool {
+		switch casted := candidate.(type) {
+		case *variable:
+			{
+				if resolved, ok := values[casted.name]; ok {
+					casted.resolvedValue = &resolved
+				}
+				if casted.isDefined() {
+					// remove default value from resolved variable
+					casted._children = &markers{}
+				}
+			}
+		}
+		return true
+	})
+
+	// TODO: Explicitly disallow tabstops and empty placeholders. Error out if
+	// present.
+
+	return tms.text(), nil
+}
+
+func (tms *textmateSnippet) ReplacePlaceholder(idx index, replaceWith *markers) {
+	newChildren := make(markers, len(*replaceWith))
+	copy(newChildren, *replaceWith)
+	newChildren.delete(int(idx))
+	tms._children = &newChildren
+	tms._placeholders = nil
+}
diff --git a/prototype/snippet/util.go b/prototype/snippet/util.go
new file mode 100644
index 00000000..22109fb1
--- /dev/null
+++ b/prototype/snippet/util.go
@@ -0,0 +1,82 @@
+package snippet
+
+func compareByIndex(a placeholder, b placeholder) int {
+	if a.index == b.index {
+		return 0
+	} else if a.isFinalTabstop() {
+		return 1
+	} else if b.isFinalTabstop() {
+		return -1
+	} else if a.index < b.index {
+		return -1
+	} else if a.index > b.index {
+		return 1
+	}
+	return 0
+}
+
+func walk(ms *markers, visitor func(m marker) bool) {
+	stack := make(markers, len(*ms))
+	copy(stack, *ms)
+
+	for len(stack) > 0 {
+		// NOTE: Declare `m` separately so that we can use the `=` operator
+		// (rather than `:=`) to make it clear that we're not shadowing `stack`.
+		var m marker
+		m, stack = stack[0], stack[1:]
+		recurse := visitor(m)
+		if !recurse {
+			break
+		}
+		stack = append(*m.children(), stack...)
+	}
+}
+
+// * fill in default for empty placeHolders
+// * compact sibling Text markers
+func walkDefaults(ms *markers, placeholderDefaultValues map[int]*markers) {
+
+	for i := 0; i < len(*ms); i++ {
+		thisMarker := (*ms)[i]
+
+		switch thisMarker.(type) {
+		case *placeholder:
+			{
+				pl := thisMarker.(*placeholder)
+				// fill in default values for repeated placeholders
+				// like `${1:foo}and$1` becomes ${1:foo}and${1:foo}
+				if defaultVal, ok := placeholderDefaultValues[pl.index]; !ok {
+					placeholderDefaultValues[pl.index] = pl._children
+					walkDefaults(pl._children, placeholderDefaultValues)
+
+				} else if len(*pl._children) == 0 {
+					// copy children from first placeholder definition, no need to
+					// recurse on them because they have been visited already
+					children := make(markers, len(*defaultVal))
+					pl._children = &children
+					copy(*pl._children, *defaultVal)
+				}
+			}
+		case *variable:
+			{
+				walkDefaults(thisMarker.children(), placeholderDefaultValues)
+			}
+		case *text:
+			{
+				if i <= 0 {
+					continue
+				}
+
+				prev := (*ms)[i-1]
+				switch prev.(type) {
+				case *text:
+					{
+						(*ms)[i-1].(*text).data += (*ms)[i].(*text).data
+						ms.delete(i)
+						i--
+					}
+				}
+			}
+		}
+	}
+}
-- 
GitLab