aboutsummaryrefslogtreecommitdiffhomepage
path: root/caddyconfig/caddyfile/parse.go
diff options
context:
space:
mode:
Diffstat (limited to 'caddyconfig/caddyfile/parse.go')
-rw-r--r--caddyconfig/caddyfile/parse.go67
1 files changed, 65 insertions, 2 deletions
diff --git a/caddyconfig/caddyfile/parse.go b/caddyconfig/caddyfile/parse.go
index 17b0ca8e2..17d824efd 100644
--- a/caddyconfig/caddyfile/parse.go
+++ b/caddyconfig/caddyfile/parse.go
@@ -364,9 +364,45 @@ func (p *parser) doImport(nesting int) error {
// set up a replacer for non-variadic args replacement
repl := makeArgsReplacer(args)
+ // grab all the tokens (if it exists) from within a block that follows the import
+ var blockTokens []Token
+ for currentNesting := p.Nesting(); p.NextBlock(currentNesting); {
+ blockTokens = append(blockTokens, p.Token())
+ }
+ // initialize with size 1
+ blockMapping := make(map[string][]Token, 1)
+ if len(blockTokens) > 0 {
+ // use such tokens to create a new dispenser, and then use it to parse each block
+ bd := NewDispenser(blockTokens)
+ for bd.Next() {
+ // see if we can grab a key
+ var currentMappingKey string
+ if bd.Val() == "{" {
+ return p.Err("anonymous blocks are not supported")
+ }
+ currentMappingKey = bd.Val()
+ currentMappingTokens := []Token{}
+ // read all args until end of line / {
+ if bd.NextArg() {
+ currentMappingTokens = append(currentMappingTokens, bd.Token())
+ for bd.NextArg() {
+ currentMappingTokens = append(currentMappingTokens, bd.Token())
+ }
+ // TODO(elee1766): we don't enter another mapping here because it's annoying to extract the { and } properly.
+ // maybe someone can do that in the future
+ } else {
+ // attempt to enter a block and add tokens to the currentMappingTokens
+ for mappingNesting := bd.Nesting(); bd.NextBlock(mappingNesting); {
+ currentMappingTokens = append(currentMappingTokens, bd.Token())
+ }
+ }
+ blockMapping[currentMappingKey] = currentMappingTokens
+ }
+ }
+
// splice out the import directive and its arguments
// (2 tokens, plus the length of args)
- tokensBefore := p.tokens[:p.cursor-1-len(args)]
+ tokensBefore := p.tokens[:p.cursor-1-len(args)-len(blockTokens)]
tokensAfter := p.tokens[p.cursor+1:]
var importedTokens []Token
var nodes []string
@@ -495,6 +531,33 @@ func (p *parser) doImport(nesting int) error {
maybeSnippet = false
}
}
+ // if it is {block}, we substitute with all tokens in the block
+ // if it is {blocks.*}, we substitute with the tokens in the mapping for the *
+ var skip bool
+ var tokensToAdd []Token
+ switch {
+ case token.Text == "{block}":
+ tokensToAdd = blockTokens
+ case strings.HasPrefix(token.Text, "{blocks.") && strings.HasSuffix(token.Text, "}"):
+ // {blocks.foo.bar} will be extracted to key `foo.bar`
+ blockKey := strings.TrimPrefix(strings.TrimSuffix(token.Text, "}"), "{blocks.")
+ val, ok := blockMapping[blockKey]
+ if ok {
+ tokensToAdd = val
+ }
+ default:
+ skip = true
+ }
+ if !skip {
+ if len(tokensToAdd) == 0 {
+ // if there is no content in the snippet block, don't do any replacement
+ // this allows snippets which contained {block}/{block.*} before this change to continue functioning as normal
+ tokensCopy = append(tokensCopy, token)
+ } else {
+ tokensCopy = append(tokensCopy, tokensToAdd...)
+ }
+ continue
+ }
if maybeSnippet {
tokensCopy = append(tokensCopy, token)
@@ -516,7 +579,7 @@ func (p *parser) doImport(nesting int) error {
// splice the imported tokens in the place of the import statement
// and rewind cursor so Next() will land on first imported token
p.tokens = append(tokensBefore, append(tokensCopy, tokensAfter...)...)
- p.cursor -= len(args) + 1
+ p.cursor -= len(args) + len(blockTokens) + 1
return nil
}