mirror of
https://github.com/caddyserver/caddy.git
synced 2024-11-25 07:47:01 +08:00
caddyfile: Implement variadics for import args placeholders (#5249)
* implement variadic placeholders imported snippets reflect actual lines in file * add import directive line number for imported snippets add tests for parsing * add realfile field to help debug import cycle detection. * use file field to reflect import chain * Switch syntax, deprecate old syntax, refactoring - Moved the import args handling to a separate file - Using {args[0:1]} syntax now - Deprecate {args.*} syntax - Use a replacer map for better control over the parsing - Add plenty of warnings when invalid placeholders are detected - Renaming variables, cleanup comments for readability - More tests to cover edgecases I could think of - Minor cleanup to snippet tracking in tokens, drop a redundant boolean field in tokens --------- Co-authored-by: Francis Lavoie <lavofr@gmail.com>
This commit is contained in:
parent
bf54892a73
commit
8bc05e598d
142
caddyconfig/caddyfile/importargs.go
Normal file
142
caddyconfig/caddyfile/importargs.go
Normal file
|
@ -0,0 +1,142 @@
|
||||||
|
// Copyright 2015 Matthew Holt and The Caddy Authors
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package caddyfile
|
||||||
|
|
||||||
|
import (
|
||||||
|
"regexp"
|
||||||
|
"strconv"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/caddyserver/caddy/v2"
|
||||||
|
"go.uber.org/zap"
|
||||||
|
)
|
||||||
|
|
||||||
|
// parseVariadic determines if the token is a variadic placeholder,
|
||||||
|
// and if so, determines the index range (start/end) of args to use.
|
||||||
|
// Returns a boolean signaling whether a variadic placeholder was found,
|
||||||
|
// and the start and end indices.
|
||||||
|
func parseVariadic(token Token, argCount int) (bool, int, int) {
|
||||||
|
if !strings.HasPrefix(token.Text, "{args[") {
|
||||||
|
return false, 0, 0
|
||||||
|
}
|
||||||
|
if !strings.HasSuffix(token.Text, "]}") {
|
||||||
|
return false, 0, 0
|
||||||
|
}
|
||||||
|
|
||||||
|
argRange := strings.TrimSuffix(strings.TrimPrefix(token.Text, "{args["), "]}")
|
||||||
|
if argRange == "" {
|
||||||
|
caddy.Log().Named("caddyfile").Warn(
|
||||||
|
"Placeholder "+token.Text+" cannot have an empty index",
|
||||||
|
zap.String("file", token.File+":"+strconv.Itoa(token.Line)))
|
||||||
|
return false, 0, 0
|
||||||
|
}
|
||||||
|
|
||||||
|
start, end, found := strings.Cut(argRange, ":")
|
||||||
|
|
||||||
|
// If no ":" delimiter is found, this is not a variadic.
|
||||||
|
// The replacer will pick this up.
|
||||||
|
if !found {
|
||||||
|
return false, 0, 0
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
startIndex = 0
|
||||||
|
endIndex = argCount
|
||||||
|
err error
|
||||||
|
)
|
||||||
|
if start != "" {
|
||||||
|
startIndex, err = strconv.Atoi(start)
|
||||||
|
if err != nil {
|
||||||
|
caddy.Log().Named("caddyfile").Warn(
|
||||||
|
"Variadic placeholder "+token.Text+" has an invalid start index",
|
||||||
|
zap.String("file", token.File+":"+strconv.Itoa(token.Line)))
|
||||||
|
return false, 0, 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if end != "" {
|
||||||
|
endIndex, err = strconv.Atoi(end)
|
||||||
|
if err != nil {
|
||||||
|
caddy.Log().Named("caddyfile").Warn(
|
||||||
|
"Variadic placeholder "+token.Text+" has an invalid end index",
|
||||||
|
zap.String("file", token.File+":"+strconv.Itoa(token.Line)))
|
||||||
|
return false, 0, 0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// bound check
|
||||||
|
if startIndex < 0 || startIndex > endIndex || endIndex > argCount {
|
||||||
|
caddy.Log().Named("caddyfile").Warn(
|
||||||
|
"Variadic placeholder "+token.Text+" indices are out of bounds, only "+strconv.Itoa(argCount)+" argument(s) exist",
|
||||||
|
zap.String("file", token.File+":"+strconv.Itoa(token.Line)))
|
||||||
|
return false, 0, 0
|
||||||
|
}
|
||||||
|
return true, startIndex, endIndex
|
||||||
|
}
|
||||||
|
|
||||||
|
// makeArgsReplacer prepares a Replacer which can replace
|
||||||
|
// non-variadic args placeholders in imported tokens.
|
||||||
|
func makeArgsReplacer(args []string) *caddy.Replacer {
|
||||||
|
repl := caddy.NewEmptyReplacer()
|
||||||
|
repl.Map(func(key string) (any, bool) {
|
||||||
|
// TODO: Remove the deprecated {args.*} placeholder
|
||||||
|
// support at some point in the future
|
||||||
|
if matches := argsRegexpIndexDeprecated.FindStringSubmatch(key); len(matches) > 0 {
|
||||||
|
value, err := strconv.Atoi(matches[1])
|
||||||
|
if err != nil {
|
||||||
|
caddy.Log().Named("caddyfile").Warn(
|
||||||
|
"Placeholder {args." + matches[1] + "} has an invalid index")
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
if value >= len(args) {
|
||||||
|
caddy.Log().Named("caddyfile").Warn(
|
||||||
|
"Placeholder {args." + matches[1] + "} index is out of bounds, only " + strconv.Itoa(len(args)) + " argument(s) exist")
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
caddy.Log().Named("caddyfile").Warn(
|
||||||
|
"Placeholder {args." + matches[1] + "} deprecated, use {args[" + matches[1] + "]} instead")
|
||||||
|
return args[value], true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle args[*] form
|
||||||
|
if matches := argsRegexpIndex.FindStringSubmatch(key); len(matches) > 0 {
|
||||||
|
if strings.Contains(matches[1], ":") {
|
||||||
|
caddy.Log().Named("caddyfile").Warn(
|
||||||
|
"Variadic placeholder {args[" + matches[1] + "]} must be a token on its own")
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
value, err := strconv.Atoi(matches[1])
|
||||||
|
if err != nil {
|
||||||
|
caddy.Log().Named("caddyfile").Warn(
|
||||||
|
"Placeholder {args[" + matches[1] + "]} has an invalid index")
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
if value >= len(args) {
|
||||||
|
caddy.Log().Named("caddyfile").Warn(
|
||||||
|
"Placeholder {args[" + matches[1] + "]} index is out of bounds, only " + strconv.Itoa(len(args)) + " argument(s) exist")
|
||||||
|
return nil, false
|
||||||
|
}
|
||||||
|
return args[value], true
|
||||||
|
}
|
||||||
|
|
||||||
|
// Not an args placeholder, ignore
|
||||||
|
return nil, false
|
||||||
|
})
|
||||||
|
return repl
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
argsRegexpIndexDeprecated = regexp.MustCompile(`args\.(.+)`)
|
||||||
|
argsRegexpIndex = regexp.MustCompile(`args\[(.+)]`)
|
||||||
|
)
|
|
@ -36,14 +36,31 @@ type (
|
||||||
// Token represents a single parsable unit.
|
// Token represents a single parsable unit.
|
||||||
Token struct {
|
Token struct {
|
||||||
File string
|
File string
|
||||||
|
origFile string
|
||||||
Line int
|
Line int
|
||||||
Text string
|
Text string
|
||||||
wasQuoted rune // enclosing quote character, if any
|
wasQuoted rune // enclosing quote character, if any
|
||||||
inSnippet bool
|
|
||||||
snippetName string
|
snippetName string
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// originalFile gets original filename before import modification.
|
||||||
|
func (t Token) originalFile() string {
|
||||||
|
if t.origFile != "" {
|
||||||
|
return t.origFile
|
||||||
|
}
|
||||||
|
return t.File
|
||||||
|
}
|
||||||
|
|
||||||
|
// updateFile updates the token's source filename for error display
|
||||||
|
// and remembers the original filename. Used during "import" processing.
|
||||||
|
func (t *Token) updateFile(file string) {
|
||||||
|
if t.origFile == "" {
|
||||||
|
t.origFile = t.File
|
||||||
|
}
|
||||||
|
t.File = file
|
||||||
|
}
|
||||||
|
|
||||||
// load prepares the lexer to scan an input for tokens.
|
// load prepares the lexer to scan an input for tokens.
|
||||||
// It discards any leading byte order mark.
|
// It discards any leading byte order mark.
|
||||||
func (l *lexer) load(input io.Reader) error {
|
func (l *lexer) load(input io.Reader) error {
|
||||||
|
|
|
@ -20,7 +20,6 @@ import (
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strconv"
|
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/caddyserver/caddy/v2"
|
"github.com/caddyserver/caddy/v2"
|
||||||
|
@ -173,11 +172,10 @@ func (p *parser) begin() error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
// Just as we need to track which file the token comes from, we need to
|
// Just as we need to track which file the token comes from, we need to
|
||||||
// keep track of which snippets do the tokens come from. This is helpful
|
// keep track of which snippet the token comes from. This is helpful
|
||||||
// in tracking import cycles across files/snippets by namespacing them. Without
|
// in tracking import cycles across files/snippets by namespacing them.
|
||||||
// this we end up with false-positives in cycle-detection.
|
// Without this, we end up with false-positives in cycle-detection.
|
||||||
for k, v := range tokens {
|
for k, v := range tokens {
|
||||||
v.inSnippet = true
|
|
||||||
v.snippetName = name
|
v.snippetName = name
|
||||||
tokens[k] = v
|
tokens[k] = v
|
||||||
}
|
}
|
||||||
|
@ -337,11 +335,8 @@ func (p *parser) doImport() error {
|
||||||
// grab remaining args as placeholder replacements
|
// grab remaining args as placeholder replacements
|
||||||
args := p.RemainingArgs()
|
args := p.RemainingArgs()
|
||||||
|
|
||||||
// add args to the replacer
|
// set up a replacer for non-variadic args replacement
|
||||||
repl := caddy.NewEmptyReplacer()
|
repl := makeArgsReplacer(args)
|
||||||
for index, arg := range args {
|
|
||||||
repl.Set("args."+strconv.Itoa(index), arg)
|
|
||||||
}
|
|
||||||
|
|
||||||
// splice out the import directive and its arguments
|
// splice out the import directive and its arguments
|
||||||
// (2 tokens, plus the length of args)
|
// (2 tokens, plus the length of args)
|
||||||
|
@ -416,8 +411,8 @@ func (p *parser) doImport() error {
|
||||||
nodes = matches
|
nodes = matches
|
||||||
}
|
}
|
||||||
|
|
||||||
nodeName := p.File()
|
nodeName := p.Token().originalFile()
|
||||||
if p.Token().inSnippet {
|
if p.Token().snippetName != "" {
|
||||||
nodeName += fmt.Sprintf(":%s", p.Token().snippetName)
|
nodeName += fmt.Sprintf(":%s", p.Token().snippetName)
|
||||||
}
|
}
|
||||||
p.importGraph.addNode(nodeName)
|
p.importGraph.addNode(nodeName)
|
||||||
|
@ -428,13 +423,29 @@ func (p *parser) doImport() error {
|
||||||
}
|
}
|
||||||
|
|
||||||
// copy the tokens so we don't overwrite p.definedSnippets
|
// copy the tokens so we don't overwrite p.definedSnippets
|
||||||
tokensCopy := make([]Token, len(importedTokens))
|
tokensCopy := make([]Token, 0, len(importedTokens))
|
||||||
copy(tokensCopy, importedTokens)
|
|
||||||
|
|
||||||
// run the argument replacer on the tokens
|
// run the argument replacer on the tokens
|
||||||
for index, token := range tokensCopy {
|
// golang for range slice return a copy of value
|
||||||
|
// similarly, append also copy value
|
||||||
|
for _, token := range importedTokens {
|
||||||
|
// set the token's file to refer to import directive line number and snippet name
|
||||||
|
if token.snippetName != "" {
|
||||||
|
token.updateFile(fmt.Sprintf("%s:%d (import %s)", token.File, p.Line(), token.snippetName))
|
||||||
|
} else {
|
||||||
|
token.updateFile(fmt.Sprintf("%s:%d (import)", token.File, p.Line()))
|
||||||
|
}
|
||||||
|
|
||||||
|
foundVariadic, startIndex, endIndex := parseVariadic(token, len(args))
|
||||||
|
if foundVariadic {
|
||||||
|
for _, arg := range args[startIndex:endIndex] {
|
||||||
|
token.Text = arg
|
||||||
|
tokensCopy = append(tokensCopy, token)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
token.Text = repl.ReplaceKnown(token.Text, "")
|
token.Text = repl.ReplaceKnown(token.Text, "")
|
||||||
tokensCopy[index] = token
|
tokensCopy = append(tokensCopy, token)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// splice the imported tokens in the place of the import statement
|
// splice the imported tokens in the place of the import statement
|
||||||
|
|
|
@ -21,6 +21,88 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func TestParseVariadic(t *testing.T) {
|
||||||
|
var args = make([]string, 10)
|
||||||
|
for i, tc := range []struct {
|
||||||
|
input string
|
||||||
|
result bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
input: "",
|
||||||
|
result: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args[1",
|
||||||
|
result: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "1]}",
|
||||||
|
result: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args[:]}aaaaa",
|
||||||
|
result: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "aaaaa{args[:]}",
|
||||||
|
result: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args.}",
|
||||||
|
result: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args.1}",
|
||||||
|
result: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args[]}",
|
||||||
|
result: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args[:]}",
|
||||||
|
result: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args[:]}",
|
||||||
|
result: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args[0:]}",
|
||||||
|
result: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args[:0]}",
|
||||||
|
result: true,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args[-1:]}",
|
||||||
|
result: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args[:11]}",
|
||||||
|
result: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args[10:0]}",
|
||||||
|
result: false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: "{args[0:10]}",
|
||||||
|
result: true,
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
token := Token{
|
||||||
|
File: "test",
|
||||||
|
Line: 1,
|
||||||
|
Text: tc.input,
|
||||||
|
}
|
||||||
|
if v, _, _ := parseVariadic(token, len(args)); v != tc.result {
|
||||||
|
t.Errorf("Test %d error expectation failed Expected: %t, got %t", i, tc.result, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func TestAllTokens(t *testing.T) {
|
func TestAllTokens(t *testing.T) {
|
||||||
input := []byte("a b c\nd e")
|
input := []byte("a b c\nd e")
|
||||||
expected := []string{"a", "b", "c", "d", "e"}
|
expected := []string{"a", "b", "c", "d", "e"}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
package httpcaddyfile
|
package httpcaddyfile
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/caddyserver/caddy/v2/caddyconfig/caddyfile"
|
"github.com/caddyserver/caddy/v2/caddyconfig/caddyfile"
|
||||||
|
@ -213,3 +214,45 @@ func TestRedirDirectiveSyntax(t *testing.T) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestImportErrorLine(t *testing.T) {
|
||||||
|
for i, tc := range []struct {
|
||||||
|
input string
|
||||||
|
errorFunc func(err error) bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
input: `(t1) {
|
||||||
|
abort {args[:]}
|
||||||
|
}
|
||||||
|
:8080 {
|
||||||
|
import t1
|
||||||
|
import t1 true
|
||||||
|
}`,
|
||||||
|
errorFunc: func(err error) bool {
|
||||||
|
return err != nil && strings.Contains(err.Error(), "Caddyfile:6 (import t1):2")
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
input: `(t1) {
|
||||||
|
abort {args[:]}
|
||||||
|
}
|
||||||
|
:8080 {
|
||||||
|
import t1 true
|
||||||
|
}`,
|
||||||
|
errorFunc: func(err error) bool {
|
||||||
|
return err != nil && strings.Contains(err.Error(), "Caddyfile:5 (import t1):2")
|
||||||
|
},
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
adapter := caddyfile.Adapter{
|
||||||
|
ServerType: ServerType{},
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, err := adapter.Adapt([]byte(tc.input), nil)
|
||||||
|
|
||||||
|
if !tc.errorFunc(err) {
|
||||||
|
t.Errorf("Test %d error expectation failed, got %s", i, err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in New Issue
Block a user