aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authora <[email protected]>2024-06-18 22:48:21 -0500
committerGitHub <[email protected]>2024-06-18 22:48:21 -0500
commit73854014d9fd072282e4599021f2d2505d222fd1 (patch)
treec7e899727422594a95e5cc597737b6f663d29b95
parentc0d9a2383e7c6b3f370605834fc9cf62ba3753eb (diff)
parent99dcdf7e426f0dcbdffe510f241ae8a4fd5a56e6 (diff)
downloadcaddy-73854014d9fd072282e4599021f2d2505d222fd1.tar.gz
caddy-73854014d9fd072282e4599021f2d2505d222fd1.zip
Merge branch 'master' into caddytest-2
-rw-r--r--caddyconfig/caddyfile/parse.go67
-rw-r--r--caddytest/integration/caddyfile_adapt/import_block_snippet.caddyfiletest58
-rw-r--r--caddytest/integration/caddyfile_adapt/import_block_snippet_args.caddyfiletest56
-rw-r--r--caddytest/integration/caddyfile_adapt/import_blocks_snippet.caddyfiletest76
-rw-r--r--caddytest/integration/caddyfile_adapt/import_blocks_snippet_nested.caddyfiletest82
-rw-r--r--modules/caddyhttp/matchers.go20
-rw-r--r--modules/caddyhttp/matchers_test.go9
-rw-r--r--modules/caddyhttp/reverseproxy/selectionpolicies.go25
8 files changed, 385 insertions, 8 deletions
diff --git a/caddyconfig/caddyfile/parse.go b/caddyconfig/caddyfile/parse.go
index 17b0ca8e2..17d824efd 100644
--- a/caddyconfig/caddyfile/parse.go
+++ b/caddyconfig/caddyfile/parse.go
@@ -364,9 +364,45 @@ func (p *parser) doImport(nesting int) error {
// set up a replacer for non-variadic args replacement
repl := makeArgsReplacer(args)
+ // grab all the tokens (if it exists) from within a block that follows the import
+ var blockTokens []Token
+ for currentNesting := p.Nesting(); p.NextBlock(currentNesting); {
+ blockTokens = append(blockTokens, p.Token())
+ }
+ // initialize with size 1
+ blockMapping := make(map[string][]Token, 1)
+ if len(blockTokens) > 0 {
+ // use such tokens to create a new dispenser, and then use it to parse each block
+ bd := NewDispenser(blockTokens)
+ for bd.Next() {
+ // see if we can grab a key
+ var currentMappingKey string
+ if bd.Val() == "{" {
+ return p.Err("anonymous blocks are not supported")
+ }
+ currentMappingKey = bd.Val()
+ currentMappingTokens := []Token{}
+ // read all args until end of line / {
+ if bd.NextArg() {
+ currentMappingTokens = append(currentMappingTokens, bd.Token())
+ for bd.NextArg() {
+ currentMappingTokens = append(currentMappingTokens, bd.Token())
+ }
+ // TODO(elee1766): we don't enter another mapping here because it's annoying to extract the { and } properly.
+ // maybe someone can do that in the future
+ } else {
+ // attempt to enter a block and add tokens to the currentMappingTokens
+ for mappingNesting := bd.Nesting(); bd.NextBlock(mappingNesting); {
+ currentMappingTokens = append(currentMappingTokens, bd.Token())
+ }
+ }
+ blockMapping[currentMappingKey] = currentMappingTokens
+ }
+ }
+
// splice out the import directive and its arguments
// (2 tokens, plus the length of args)
- tokensBefore := p.tokens[:p.cursor-1-len(args)]
+ tokensBefore := p.tokens[:p.cursor-1-len(args)-len(blockTokens)]
tokensAfter := p.tokens[p.cursor+1:]
var importedTokens []Token
var nodes []string
@@ -495,6 +531,33 @@ func (p *parser) doImport(nesting int) error {
maybeSnippet = false
}
}
+ // if it is {block}, we substitute with all tokens in the block
+ // if it is {blocks.*}, we substitute with the tokens in the mapping for the *
+ var skip bool
+ var tokensToAdd []Token
+ switch {
+ case token.Text == "{block}":
+ tokensToAdd = blockTokens
+ case strings.HasPrefix(token.Text, "{blocks.") && strings.HasSuffix(token.Text, "}"):
+ // {blocks.foo.bar} will be extracted to key `foo.bar`
+ blockKey := strings.TrimPrefix(strings.TrimSuffix(token.Text, "}"), "{blocks.")
+ val, ok := blockMapping[blockKey]
+ if ok {
+ tokensToAdd = val
+ }
+ default:
+ skip = true
+ }
+ if !skip {
+ if len(tokensToAdd) == 0 {
+ // if there is no content in the snippet block, don't do any replacement
+ // this allows snippets which contained {block}/{block.*} before this change to continue functioning as normal
+ tokensCopy = append(tokensCopy, token)
+ } else {
+ tokensCopy = append(tokensCopy, tokensToAdd...)
+ }
+ continue
+ }
if maybeSnippet {
tokensCopy = append(tokensCopy, token)
@@ -516,7 +579,7 @@ func (p *parser) doImport(nesting int) error {
// splice the imported tokens in the place of the import statement
// and rewind cursor so Next() will land on first imported token
p.tokens = append(tokensBefore, append(tokensCopy, tokensAfter...)...)
- p.cursor -= len(args) + 1
+ p.cursor -= len(args) + len(blockTokens) + 1
return nil
}
diff --git a/caddytest/integration/caddyfile_adapt/import_block_snippet.caddyfiletest b/caddytest/integration/caddyfile_adapt/import_block_snippet.caddyfiletest
new file mode 100644
index 000000000..a60c238ce
--- /dev/null
+++ b/caddytest/integration/caddyfile_adapt/import_block_snippet.caddyfiletest
@@ -0,0 +1,58 @@
+(snippet) {
+ header {
+ {block}
+ }
+}
+
+example.com {
+ import snippet {
+ foo bar
+ }
+}
+----------
+{
+ "apps": {
+ "http": {
+ "servers": {
+ "srv0": {
+ "listen": [
+ ":443"
+ ],
+ "routes": [
+ {
+ "match": [
+ {
+ "host": [
+ "example.com"
+ ]
+ }
+ ],
+ "handle": [
+ {
+ "handler": "subroute",
+ "routes": [
+ {
+ "handle": [
+ {
+ "handler": "headers",
+ "response": {
+ "set": {
+ "Foo": [
+ "bar"
+ ]
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "terminal": true
+ }
+ ]
+ }
+ }
+ }
+ }
+}
diff --git a/caddytest/integration/caddyfile_adapt/import_block_snippet_args.caddyfiletest b/caddytest/integration/caddyfile_adapt/import_block_snippet_args.caddyfiletest
new file mode 100644
index 000000000..7f2e68b79
--- /dev/null
+++ b/caddytest/integration/caddyfile_adapt/import_block_snippet_args.caddyfiletest
@@ -0,0 +1,56 @@
+(snippet) {
+ {block}
+}
+
+example.com {
+ import snippet {
+ header foo bar
+ }
+}
+----------
+{
+ "apps": {
+ "http": {
+ "servers": {
+ "srv0": {
+ "listen": [
+ ":443"
+ ],
+ "routes": [
+ {
+ "match": [
+ {
+ "host": [
+ "example.com"
+ ]
+ }
+ ],
+ "handle": [
+ {
+ "handler": "subroute",
+ "routes": [
+ {
+ "handle": [
+ {
+ "handler": "headers",
+ "response": {
+ "set": {
+ "Foo": [
+ "bar"
+ ]
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "terminal": true
+ }
+ ]
+ }
+ }
+ }
+ }
+}
diff --git a/caddytest/integration/caddyfile_adapt/import_blocks_snippet.caddyfiletest b/caddytest/integration/caddyfile_adapt/import_blocks_snippet.caddyfiletest
new file mode 100644
index 000000000..4098f90b2
--- /dev/null
+++ b/caddytest/integration/caddyfile_adapt/import_blocks_snippet.caddyfiletest
@@ -0,0 +1,76 @@
+(snippet) {
+ header {
+ {blocks.foo}
+ }
+ header {
+ {blocks.bar}
+ }
+}
+
+example.com {
+ import snippet {
+ foo {
+ foo a
+ }
+ bar {
+ bar b
+ }
+ }
+}
+----------
+{
+ "apps": {
+ "http": {
+ "servers": {
+ "srv0": {
+ "listen": [
+ ":443"
+ ],
+ "routes": [
+ {
+ "match": [
+ {
+ "host": [
+ "example.com"
+ ]
+ }
+ ],
+ "handle": [
+ {
+ "handler": "subroute",
+ "routes": [
+ {
+ "handle": [
+ {
+ "handler": "headers",
+ "response": {
+ "set": {
+ "Foo": [
+ "a"
+ ]
+ }
+ }
+ },
+ {
+ "handler": "headers",
+ "response": {
+ "set": {
+ "Bar": [
+ "b"
+ ]
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "terminal": true
+ }
+ ]
+ }
+ }
+ }
+ }
+}
diff --git a/caddytest/integration/caddyfile_adapt/import_blocks_snippet_nested.caddyfiletest b/caddytest/integration/caddyfile_adapt/import_blocks_snippet_nested.caddyfiletest
new file mode 100644
index 000000000..ac1c5226c
--- /dev/null
+++ b/caddytest/integration/caddyfile_adapt/import_blocks_snippet_nested.caddyfiletest
@@ -0,0 +1,82 @@
+(snippet) {
+ header {
+ {blocks.bar}
+ }
+ import sub_snippet {
+ bar {
+ {blocks.foo}
+ }
+ }
+}
+(sub_snippet) {
+ header {
+ {blocks.bar}
+ }
+}
+example.com {
+ import snippet {
+ foo {
+ foo a
+ }
+ bar {
+ bar b
+ }
+ }
+}
+----------
+{
+ "apps": {
+ "http": {
+ "servers": {
+ "srv0": {
+ "listen": [
+ ":443"
+ ],
+ "routes": [
+ {
+ "match": [
+ {
+ "host": [
+ "example.com"
+ ]
+ }
+ ],
+ "handle": [
+ {
+ "handler": "subroute",
+ "routes": [
+ {
+ "handle": [
+ {
+ "handler": "headers",
+ "response": {
+ "set": {
+ "Bar": [
+ "b"
+ ]
+ }
+ }
+ },
+ {
+ "handler": "headers",
+ "response": {
+ "set": {
+ "Foo": [
+ "a"
+ ]
+ }
+ }
+ }
+ ]
+ }
+ ]
+ }
+ ],
+ "terminal": true
+ }
+ ]
+ }
+ }
+ }
+ }
+}
diff --git a/modules/caddyhttp/matchers.go b/modules/caddyhttp/matchers.go
index b1da14686..392312b6c 100644
--- a/modules/caddyhttp/matchers.go
+++ b/modules/caddyhttp/matchers.go
@@ -34,6 +34,7 @@ import (
"github.com/google/cel-go/cel"
"github.com/google/cel-go/common/types"
"github.com/google/cel-go/common/types/ref"
+ "golang.org/x/net/idna"
"github.com/caddyserver/caddy/v2"
"github.com/caddyserver/caddy/v2/caddyconfig/caddyfile"
@@ -239,13 +240,20 @@ func (m *MatchHost) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
func (m MatchHost) Provision(_ caddy.Context) error {
// check for duplicates; they are nonsensical and reduce efficiency
// (we could just remove them, but the user should know their config is erroneous)
- seen := make(map[string]int)
- for i, h := range m {
- h = strings.ToLower(h)
- if firstI, ok := seen[h]; ok {
- return fmt.Errorf("host at index %d is repeated at index %d: %s", firstI, i, h)
+ seen := make(map[string]int, len(m))
+ for i, host := range m {
+ asciiHost, err := idna.ToASCII(host)
+ if err != nil {
+ return fmt.Errorf("converting hostname '%s' to ASCII: %v", host, err)
+ }
+ if asciiHost != host {
+ m[i] = asciiHost
+ }
+ normalizedHost := strings.ToLower(asciiHost)
+ if firstI, ok := seen[normalizedHost]; ok {
+ return fmt.Errorf("host at index %d is repeated at index %d: %s", firstI, i, host)
}
- seen[h] = i
+ seen[normalizedHost] = i
}
if m.large() {
diff --git a/modules/caddyhttp/matchers_test.go b/modules/caddyhttp/matchers_test.go
index 5f76a36b1..05eaade5b 100644
--- a/modules/caddyhttp/matchers_test.go
+++ b/modules/caddyhttp/matchers_test.go
@@ -79,6 +79,11 @@ func TestHostMatcher(t *testing.T) {
expect: false,
},
{
+ match: MatchHost{"éxàmplê.com"},
+ input: "xn--xmpl-0na6cm.com",
+ expect: true,
+ },
+ {
match: MatchHost{"*.example.com"},
input: "example.com",
expect: false,
@@ -149,6 +154,10 @@ func TestHostMatcher(t *testing.T) {
ctx := context.WithValue(req.Context(), caddy.ReplacerCtxKey, repl)
req = req.WithContext(ctx)
+ if err := tc.match.Provision(caddy.Context{}); err != nil {
+ t.Errorf("Test %d %v: provisioning failed: %v", i, tc.match, err)
+ }
+
actual := tc.match.Match(req)
if actual != tc.expect {
t.Errorf("Test %d %v: Expected %t, got %t for '%s'", i, tc.match, tc.expect, actual, tc.input)
diff --git a/modules/caddyhttp/reverseproxy/selectionpolicies.go b/modules/caddyhttp/reverseproxy/selectionpolicies.go
index e61b3e0f4..293ff75e2 100644
--- a/modules/caddyhttp/reverseproxy/selectionpolicies.go
+++ b/modules/caddyhttp/reverseproxy/selectionpolicies.go
@@ -26,6 +26,7 @@ import (
"strconv"
"strings"
"sync/atomic"
+ "time"
"github.com/cespare/xxhash/v2"
@@ -613,6 +614,8 @@ type CookieHashSelection struct {
Name string `json:"name,omitempty"`
// Secret to hash (Hmac256) chosen upstream in cookie
Secret string `json:"secret,omitempty"`
+ // The cookie's Max-Age before it expires. Default is no expiry.
+ MaxAge caddy.Duration `json:"max_age,omitempty"`
// The fallback policy to use if the cookie is not present. Defaults to `random`.
FallbackRaw json.RawMessage `json:"fallback,omitempty" caddy:"namespace=http.reverse_proxy.selection_policies inline_key=policy"`
@@ -671,6 +674,9 @@ func (s CookieHashSelection) Select(pool UpstreamPool, req *http.Request, w http
cookie.Secure = true
cookie.SameSite = http.SameSiteNoneMode
}
+ if s.MaxAge > 0 {
+ cookie.MaxAge = int(time.Duration(s.MaxAge).Seconds())
+ }
http.SetCookie(w, cookie)
return upstream
}
@@ -699,6 +705,7 @@ func (s CookieHashSelection) Select(pool UpstreamPool, req *http.Request, w http
//
// lb_policy cookie [<name> [<secret>]] {
// fallback <policy>
+// max_age <duration>
// }
//
// By default name is `lb`
@@ -728,6 +735,24 @@ func (s *CookieHashSelection) UnmarshalCaddyfile(d *caddyfile.Dispenser) error {
return err
}
s.FallbackRaw = mod
+ case "max_age":
+ if !d.NextArg() {
+ return d.ArgErr()
+ }
+ if s.MaxAge != 0 {
+ return d.Err("cookie max_age already specified")
+ }
+ maxAge, err := caddy.ParseDuration(d.Val())
+ if err != nil {
+ return d.Errf("invalid duration: %s", d.Val())
+ }
+ if maxAge <= 0 {
+ return d.Errf("invalid duration: %s, max_age should be non-zero and positive", d.Val())
+ }
+ if d.NextArg() {
+ return d.ArgErr()
+ }
+ s.MaxAge = caddy.Duration(maxAge)
default:
return d.Errf("unrecognized option '%s'", d.Val())
}