Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 36 additions & 4 deletions caddyconfig/caddyfile/parse.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,14 @@
"bytes"
"fmt"
"io"
"maps"
"os"
"path/filepath"
"slices"

Check failure on line 24 in caddyconfig/caddyfile/parse.go

View workflow job for this annotation

GitHub Actions / lint (mac)

"slices" imported and not used (typecheck)

Check failure on line 24 in caddyconfig/caddyfile/parse.go

View workflow job for this annotation

GitHub Actions / govulncheck

"slices" imported and not used
"strings"

"go.uber.org/zap"

"github.com/caddyserver/caddy/v2"
"go.uber.org/zap"
)

// Parse parses the input just enough to group tokens, in
Expand Down Expand Up @@ -170,11 +171,24 @@

// get all the tokens from the block, including the braces
tokens, err := p.blockTokens(true)
tokens = append([]Token{nameToken}, tokens...)
if err != nil {
return err
}
tokens = append([]Token{nameToken}, tokens...)
p.block.Segments = []Segment{tokens}

// Parse the whole block, to evaluate all imports and
// other potential parse-time constructs at parse time,
// then use result as the block definition
routeParser := p.childParser(tokens)
if err := routeParser.parseOne(); err != nil {
return err
}

// Copy parsed segment tokens back along with any changes
// to the import graph, in case it was modified.
p.block.Segments = []Segment{routeParser.tokens}
p.importGraph = routeParser.importGraph

return nil
}

Expand Down Expand Up @@ -716,6 +730,24 @@
return false, ""
}

func (p *parser) childParser(tokens []Token) parser {
nodes := maps.Clone(p.importGraph.nodes)
edges := maps.Clone(p.importGraph.edges)

context := maps.Clone(p.context)
snippets := maps.Clone(p.definedSnippets)

return parser{
Dispenser: &Dispenser{tokens: tokens, context: context},
definedSnippets: snippets,
importGraph: importGraph{
nodes: nodes,
edges: edges,
},
nesting: p.Nesting(),
}
}

// read and store everything in a block for later replay.
func (p *parser) blockTokens(retainCurlies bool) ([]Token, error) {
// block must have curlies.
Expand Down
163 changes: 163 additions & 0 deletions caddyconfig/caddyfile/parse_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,12 @@ import (
"bytes"
"os"
"path/filepath"
"regexp"
"slices"
"strings"
"testing"

"github.com/stretchr/testify/assert"
)

func TestParseVariadic(t *testing.T) {
Expand Down Expand Up @@ -910,6 +914,129 @@ func TestRejectAnonymousImportBlock(t *testing.T) {
}
}

func TestAcceptImportWithinInvoke(t *testing.T) {
p := testParser(`
(proxy) {
reverse_proxy {args[:]}
}

&(named) {
import proxy 192.168.1.1:80
}

site {
invoke named
}
`)

blocks, err := p.parseAll()
if err != nil {
t.Errorf("Expected error to be nil but got '%v'", err)
}

keys := make([]string, 0)
var namedBlock TestServerBlock

for _, block := range blocks {
blockKeys := block.GetKeysText()

if slices.Contains(blockKeys, "named") {
namedBlock = testServerBlock(block)
}

keys = slices.Concat(keys, blockKeys)
}

assert.Equal(t, keys, []string{"named", "site"})
assert.True(t, namedBlock.HasBraces)

snippet := p.definedSnippets["proxy"]
blockTokens := namedBlock.InnerTokens
assert.Equalf(
t, len(snippet), len(blockTokens),
"Token mismatch, snippet has %d tokens while the named route ends up with %d",
len(snippet), len(blockTokens),
)

placeholderRegexp := regexp.MustCompile("\\{.+}")
for idx, tok := range blockTokens {
assert.Equal(t, tok.snippetName, "proxy")

isPlaceholder := placeholderRegexp.MatchString(snippet[idx].Text)
if !isPlaceholder {
assert.Equal(t, tok.Text, snippet[idx].Text)
} else {
assert.NotRegexpf(
t, placeholderRegexp, tok.Text,
"Imported tokens still include a placeholder: %s", tok.Text,
)
}
}
}

func TestComplexImportInvokeConfig(t *testing.T) {
p := testParser(`
(nesting_further) {
do something
}

(nesting) {
directive again with more {
interesting = content
import nesting_further
}
}

(proxy) {
reverse_proxy {args[:]}
import nesting
}

&(named) {
import proxy 192.168.1.1:80

handle_error {
import nesting_further
respond 404
}
}
`)

blocks, err := p.parseAll()
if err != nil {
t.Errorf("Expected error to be nil but got '%v'", err)
}

assert.Len(t, blocks, 1, "Expected only the named route to be in blocks")
assert.Equalf(
t, blocks[0].GetKeysText(), []string{"named"},
"Block in result is not the named route, expected name 'named' got: %s",
strings.Join(blocks[0].GetKeysText(), ", "),
)

namedBlock := testServerBlock(blocks[0])
blockText := stringifyTokens(namedBlock.InnerTokens)
deeplyNestedImport := stringifyTokens(p.definedSnippets["nesting_further"])
nestedImport := slices.Concat(
[]string{"directive", "again", "with", "more", "{", "interesting", "=", "content"},
deeplyNestedImport,
[]string{"}"},
)
proxyImport := slices.Concat(
[]string{"reverse_proxy", "192.168.1.1:80"},
nestedImport,
)

expectedText := slices.Concat(
proxyImport,
[]string{"handle_error", "{"},
deeplyNestedImport,
[]string{"respond", "404", "}"},
)

assert.ElementsMatch(t, blockText, expectedText)
}

func TestAcceptSiteImportWithBraces(t *testing.T) {
p := testParser(`
(site) {
Expand All @@ -933,3 +1060,39 @@ func TestAcceptSiteImportWithBraces(t *testing.T) {
func testParser(input string) parser {
return parser{Dispenser: NewTestDispenser(input)}
}

type TestServerBlock struct {
ServerBlock

InnerTokens []Token
}

func stringifyTokens(tokens []Token) []string {
return slices.Collect(func(yield func(string) bool) {
for _, tok := range tokens {
if !yield(tok.Text) {
return
}
}
})
}

func testServerBlock(block ServerBlock) TestServerBlock {
innerTokens := slices.Collect(func(yield func(Token) bool) {
for _, segment := range block.Segments {
tokens := []Token(segment)
for _, token := range tokens {
if !yield(token) {
return
}
}
}
})

keyLength := len(block.Keys)
return TestServerBlock{
ServerBlock: block,
// Exclude keys, opening brace and closing brace
InnerTokens: innerTokens[keyLength+1 : len(innerTokens)-1],
}
}
Loading
Loading