Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Port in expiration parsing into composable schema DSL #2239

Merged
merged 1 commit into from
Feb 8, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 19 additions & 0 deletions pkg/composableschemadsl/dslshape/dslshape.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ const (
NodeTypeError NodeType = iota // error occurred; value is text of error
NodeTypeFile // The file root node
NodeTypeComment // A single or multiline comment
NodeTypeUseFlag // A use flag

NodeTypeDefinition // A definition.
NodeTypeCaveatDefinition // A caveat definition.
Expand All @@ -24,6 +25,7 @@ const (
NodeTypeTypeReference // A type reference
NodeTypeSpecificTypeReference // A reference to a specific type.
NodeTypeCaveatReference // A caveat reference under a type.
NodeTypeTraitReference // A trait reference under a typr.

NodeTypeUnionExpression
NodeTypeIntersectExpression
Expand Down Expand Up @@ -75,6 +77,13 @@ const (
// The value of the comment, including its delimeter(s)
NodeCommentPredicateValue = "comment-value"

//
// NodeTypeUseFlag
//

// The name of the use flag.
NodeUseFlagPredicateName = "use-flag-name"

//
// NodeTypeDefinition
//
Expand Down Expand Up @@ -159,13 +168,23 @@ const (
// A caveat under a type reference.
NodeSpecificReferencePredicateCaveat = "caveat"

// A trait under a type reference.
NodeSpecificReferencePredicateTrait = "trait"

//
// NodeTypeCaveatReference
//

// The caveat name under the caveat.
NodeCaveatPredicateCaveat = "caveat-name"

//
// NodeTypeTraitReference
//

// The trait name under the trait.
NodeTraitPredicateTrait = "trait-name"

//
// NodeTypePermission
//
Expand Down
44 changes: 23 additions & 21 deletions pkg/composableschemadsl/dslshape/zz_generated.nodetype_string.go

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

59 changes: 59 additions & 0 deletions pkg/composableschemadsl/lexer/flaggablelexer.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
package lexer

// FlaggableLexler wraps a lexer, automatically translating tokens based on flags, if any.
type FlaggableLexler struct {
lex *Lexer // a reference to the lexer used for tokenization
enabledFlags map[string]transformer // flags that are enabled
seenDefinition bool
afterUseIdentifier bool
}

// NewFlaggableLexler returns a new FlaggableLexler for the given lexer.
func NewFlaggableLexler(lex *Lexer) *FlaggableLexler {
return &FlaggableLexler{
lex: lex,
enabledFlags: map[string]transformer{},
}
}

// Close stops the lexer from running.
func (l *FlaggableLexler) Close() {
l.lex.Close()
}

// NextToken returns the next token found in the lexer.
func (l *FlaggableLexler) NextToken() Lexeme {
nextToken := l.lex.nextToken()

// Look for `use somefeature`
if nextToken.Kind == TokenTypeIdentifier {
// Only allowed until we've seen a definition of some kind.
if !l.seenDefinition {
if l.afterUseIdentifier {
if transformer, ok := Flags[nextToken.Value]; ok {
l.enabledFlags[nextToken.Value] = transformer
}

l.afterUseIdentifier = false
} else {
l.afterUseIdentifier = nextToken.Value == "use"
}
}
}

if nextToken.Kind == TokenTypeKeyword && nextToken.Value == "definition" {
l.seenDefinition = true
}
if nextToken.Kind == TokenTypeKeyword && nextToken.Value == "caveat" {
l.seenDefinition = true
}

for _, handler := range l.enabledFlags {
updated, ok := handler(nextToken)
if ok {
return updated
}
}

return nextToken
}
74 changes: 74 additions & 0 deletions pkg/composableschemadsl/lexer/flaggablelexer_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
package lexer

import (
"slices"
"testing"

"github.com/authzed/spicedb/pkg/composableschemadsl/input"
)

var flaggableLexerTests = []lexerTest{
{"use expiration", "use expiration", []Lexeme{
{TokenTypeKeyword, 0, "use", ""},
{TokenTypeWhitespace, 0, " ", ""},
{TokenTypeKeyword, 0, "expiration", ""},
tEOF,
}},
{"use expiration and", "use expiration and", []Lexeme{
{TokenTypeKeyword, 0, "use", ""},
{TokenTypeWhitespace, 0, " ", ""},
{TokenTypeKeyword, 0, "expiration", ""},
{TokenTypeWhitespace, 0, " ", ""},
{TokenTypeKeyword, 0, "and", ""},
tEOF,
}},
{"expiration as non-keyword", "foo expiration", []Lexeme{
{TokenTypeIdentifier, 0, "foo", ""},
{TokenTypeWhitespace, 0, " ", ""},
{TokenTypeKeyword, 0, "expiration", ""},
tEOF,
}},
{"and as non-keyword", "foo and", []Lexeme{
{TokenTypeIdentifier, 0, "foo", ""},
{TokenTypeWhitespace, 0, " ", ""},
{TokenTypeKeyword, 0, "and", ""},
tEOF,
}},
{"invalid use flag", "use foobar", []Lexeme{
{TokenTypeKeyword, 0, "use", ""},
{TokenTypeWhitespace, 0, " ", ""},
{TokenTypeIdentifier, 0, "foobar", ""},
tEOF,
}},
{"use flag after definition", "definition use expiration", []Lexeme{
{TokenTypeKeyword, 0, "definition", ""},
{TokenTypeWhitespace, 0, " ", ""},
{TokenTypeKeyword, 0, "use", ""},
{TokenTypeWhitespace, 0, " ", ""},
{TokenTypeKeyword, 0, "expiration", ""},
Comment on lines +11 to +48
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Is it worth reworking these tests? use and expiration are now keywords, so the semantics of the tests don't completely scan.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes, I think so; we should add them in as formal keywords in the lexer tests

tEOF,
}},
}

func TestFlaggableLexer(t *testing.T) {
for _, test := range append(slices.Clone(lexerTests), flaggableLexerTests...) {
t.Run(test.name, func(t *testing.T) {
tokens := performFlaggedLex(&test)
if !equal(tokens, test.tokens) {
t.Errorf("%s: got\n\t%+v\nexpected\n\t%v", test.name, tokens, test.tokens)
}
})
}
}

func performFlaggedLex(t *lexerTest) (tokens []Lexeme) {
lexer := NewFlaggableLexler(Lex(input.Source(t.name), t.input))
for {
token := lexer.NextToken()
tokens = append(tokens, token)
if token.Kind == TokenTypeEOF || token.Kind == TokenTypeError {
break
}
}
return
}
26 changes: 26 additions & 0 deletions pkg/composableschemadsl/lexer/flags.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
package lexer

// FlagExpiration indicates that `expiration` is supported as a first-class
// feature in the schema.
const FlagExpiration = "expiration"

type transformer func(lexeme Lexeme) (Lexeme, bool)

// Flags is a map of flag names to their corresponding transformers.
var Flags = map[string]transformer{
FlagExpiration: func(lexeme Lexeme) (Lexeme, bool) {
// `expiration` becomes a keyword.
if lexeme.Kind == TokenTypeIdentifier && lexeme.Value == "expiration" {
lexeme.Kind = TokenTypeKeyword
return lexeme, true
}

// `and` becomes a keyword.
if lexeme.Kind == TokenTypeIdentifier && lexeme.Value == "and" {
lexeme.Kind = TokenTypeKeyword
return lexeme, true
}

return lexeme, false
},
}
12 changes: 12 additions & 0 deletions pkg/composableschemadsl/lexer/lex_def.go
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,18 @@ var keywords = map[string]struct{}{
"all": {},
"any": {},
"partial": {},
"use": {},
"expiration": {},
// Parking lot for future keywords
"and": {},
"or": {},
"not": {},
"under": {},
"static": {},
"if": {},
"where": {},
"private": {},
"public": {},
Comment on lines +87 to +96
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is one change that wasn't a direct copy.

}

// IsKeyword returns whether the specified input string is a reserved keyword.
Expand Down
12 changes: 12 additions & 0 deletions pkg/composableschemadsl/lexer/lex_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,18 @@ var lexerTests = []lexerTest{
{"keyword", "all", []Lexeme{{TokenTypeKeyword, 0, "all", ""}, tEOF}},
{"keyword", "nil", []Lexeme{{TokenTypeKeyword, 0, "nil", ""}, tEOF}},
{"keyword", "partial", []Lexeme{{TokenTypeKeyword, 0, "partial", ""}, tEOF}},
{"keyword", "use", []Lexeme{{TokenTypeKeyword, 0, "use", ""}, tEOF}},
{"keyword", "expiration", []Lexeme{{TokenTypeKeyword, 0, "expiration", ""}, tEOF}},
{"keyword", "and", []Lexeme{{TokenTypeKeyword, 0, "and", ""}, tEOF}},
{"keyword", "or", []Lexeme{{TokenTypeKeyword, 0, "or", ""}, tEOF}},
{"keyword", "not", []Lexeme{{TokenTypeKeyword, 0, "not", ""}, tEOF}},
{"keyword", "under", []Lexeme{{TokenTypeKeyword, 0, "under", ""}, tEOF}},
{"keyword", "static", []Lexeme{{TokenTypeKeyword, 0, "static", ""}, tEOF}},
{"keyword", "if", []Lexeme{{TokenTypeKeyword, 0, "if", ""}, tEOF}},
{"keyword", "where", []Lexeme{{TokenTypeKeyword, 0, "where", ""}, tEOF}},
{"keyword", "private", []Lexeme{{TokenTypeKeyword, 0, "private", ""}, tEOF}},
{"keyword", "public", []Lexeme{{TokenTypeKeyword, 0, "public", ""}, tEOF}},

{"identifier", "define", []Lexeme{{TokenTypeIdentifier, 0, "define", ""}, tEOF}},
{"typepath", "foo/bar", []Lexeme{
{TokenTypeIdentifier, 0, "foo", ""},
Expand Down
35 changes: 0 additions & 35 deletions pkg/composableschemadsl/lexer/peekable_lex.go

This file was deleted.

Loading
Loading