diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index c053a2a7..5d91a5c6 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -24,6 +24,9 @@ jobs: - name: Lint uses: golangci/golangci-lint-action@v7 + - name: Sum type linter + run: go install github.com/alecthomas/go-check-sumtype/cmd/go-check-sumtype@latest && go-check-sumtype -default-signifies-exhaustive=false ./... + - name: Fuzz run: mkdir -p testdata && go test -fuzz=FuzzParse -fuzztime 60s && go test -fuzz=FuzzTokenize -fuzztime 60s diff --git a/.golangci.yml b/.golangci.yml index 7886c7c6..e3400d64 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -10,7 +10,6 @@ linters: enable: - errcheck - errname - - gochecksumtype - govet - ineffassign - revive diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..5b3ebdfd --- /dev/null +++ b/Makefile @@ -0,0 +1,14 @@ +corpus-tests-json-schemas.tar.gz: corpus-tests.tar.gz + @echo "Generating JSON schemas from Cedar schemas..." + @rm -rf /tmp/corpus-tests /tmp/corpus-tests-json-schemas + @mkdir -p /tmp/corpus-tests-json-schemas + @tar -xzf corpus-tests.tar.gz -C /tmp/ + @for schema in /tmp/corpus-tests/*.cedarschema; do \ + basename=$$(basename $$schema .cedarschema); \ + echo "Converting $$basename.cedarschema..."; \ + cedar translate-schema --direction cedar-to-json --schema "$$schema" > "/tmp/corpus-tests-json-schemas/$$basename.cedarschema.json" 2>&1; \ + done + @cd /tmp && tar -czf corpus-tests-json-schemas.tar.gz corpus-tests-json-schemas/ + @mv /tmp/corpus-tests-json-schemas.tar.gz . + @rm -rf /tmp/corpus-tests /tmp/corpus-tests-json-schemas + @echo "Done! Created corpus-tests-json-schemas.tar.gz" diff --git a/README.md b/README.md index 47b6596e..af6e13b6 100644 --- a/README.md +++ b/README.md @@ -36,8 +36,8 @@ The Go implementation includes: - JSON marshalling and unmarshalling - all core and extended types (including [RFC 80](https://github.com/cedar-policy/rfcs/blob/main/text/0080-datetime-extension.md)'s datetime and duration) - integration test suite -- human-readable schema parsing - +- schema parsing and programmatic construction + The Go implementation does not yet include: - CLI applications diff --git a/corpus-tests-json-schemas.tar.gz b/corpus-tests-json-schemas.tar.gz new file mode 100644 index 00000000..621b65c4 Binary files /dev/null and b/corpus-tests-json-schemas.tar.gz differ diff --git a/corpus_test.go b/corpus_test.go index 5502f307..719f02ac 100644 --- a/corpus_test.go +++ b/corpus_test.go @@ -56,6 +56,9 @@ type corpusTest struct { //go:embed corpus-tests.tar.gz var corpusArchive []byte +//go:embed corpus-tests-json-schemas.tar.gz +var corpusJSONSchemasArchive []byte + type tarFileDataPointer struct { Position int64 Size int64 @@ -91,26 +94,24 @@ func (fdm TarFileMap) GetFileData(path string) ([]byte, error) { return content, nil } -//nolint:revive // due to test cognitive complexity -func TestCorpus(t *testing.T) { - t.Parallel() +func loadTarGz(t testing.TB, archive []byte) TarFileMap { + t.Helper() - gzipReader, err := gzip.NewReader(bytes.NewReader(corpusArchive)) + gzipReader, err := gzip.NewReader(bytes.NewReader(archive)) if err != nil { - t.Fatal("error reading corpus compressed archive header", err) + t.Fatal("error reading compressed archive header", err) } defer gzipReader.Close() //nolint:errcheck buf, err := io.ReadAll(gzipReader) if err != nil { - t.Fatal("error reading corpus compressed archive", err) + t.Fatal("error reading compressed archive", err) } bufReader := bytes.NewReader(buf) archiveReader := tar.NewReader(bufReader) fdm := NewTarFileMap(bufReader) - var testFiles []string for file, err := archiveReader.Next(); err == nil; file, err = archiveReader.Next() { if file.Typeflag != tar.TypeReg { continue @@ -118,12 +119,27 @@ func TestCorpus(t *testing.T) { cursor, _ := bufReader.Seek(0, io.SeekCurrent) fdm.AddFileData(file.Name, cursor, file.Size) + } + + return fdm +} - if strings.HasSuffix(file.Name, ".json") && !strings.HasSuffix(file.Name, ".entities.json") { - testFiles = append(testFiles, file.Name) +//nolint:revive // due to test cognitive complexity +func TestCorpus(t *testing.T) { + t.Parallel() + + // Load corpus test files + fdm := loadTarGz(t, corpusArchive) + var testFiles []string + for fileName := range fdm.files { + if strings.HasSuffix(fileName, ".json") && !strings.HasSuffix(fileName, ".entities.json") { + testFiles = append(testFiles, fileName) } } + // Load JSON schemas for validation + jsonSchemasFdm := loadTarGz(t, corpusJSONSchemasArchive) + for _, testFile := range testFiles { testFile := testFile t.Run(testFile, func(t *testing.T) { @@ -152,12 +168,70 @@ func TestCorpus(t *testing.T) { if err != nil { t.Fatal("error reading schema content", err) } + // Rust converted JSON never contains the empty context record + schemaContent = bytes.ReplaceAll(schemaContent, []byte("context: {}\n"), nil) + var s schema.Schema s.SetFilename("test.schema") if err := s.UnmarshalCedar(schemaContent); err != nil { t.Fatal("error parsing schema", err, "\n===\n", string(schemaContent)) } + // Validate schema round-trip + t.Run("schema-round-trip", func(t *testing.T) { + t.Parallel() + + js, err := s.MarshalJSON() + testutil.OK(t, err) + + var s2 schema.Schema + err = s2.UnmarshalJSON(js) + testutil.OK(t, err) + + sb, err := s2.MarshalCedar() + testutil.OK(t, err) + + var s3 schema.Schema + err = s3.UnmarshalCedar(sb) + testutil.OK(t, err) + + j2, err := s3.MarshalJSON() + testutil.OK(t, err) + + testutil.Equals(t, string(j2), string(js)) + }) + + // Validate schema matches Rust Cedar CLI output + t.Run("schema-vs-rust", func(t *testing.T) { + t.Parallel() + + // Extract schema filename from path (e.g., "corpus-tests/abc123.cedarschema" -> "abc123") + schemaFilename := strings.TrimSuffix(strings.TrimPrefix(tt.Schema, "corpus-tests/"), ".cedarschema") + jsonSchemaPath := fmt.Sprintf("corpus-tests-json-schemas/%s.cedarschema.json", schemaFilename) + + rustJSON, err := jsonSchemasFdm.GetFileData(jsonSchemaPath) + testutil.OK(t, err) + + // Normalize Rust JSON: appliesTo is optional - match testdata_test.go pattern + // Need to handle trailing comma to avoid creating invalid JSON like {,"other":...} + rustJSON = bytes.ReplaceAll(rustJSON, []byte(`"appliesTo":{"resourceTypes":[],"principalTypes":[]},`), nil) + rustJSON = bytes.ReplaceAll(rustJSON, []byte(`"appliesTo":{"resourceTypes":[],"principalTypes":[]}`), nil) + + // Unmarshal Rust JSON to handle any syntax issues from replacement + var rustSchema schema.Schema + err = rustSchema.UnmarshalJSON(rustJSON) + testutil.OK(t, err) + + // Marshal both schemas to JSON for comparison + goJSON, err := s.MarshalJSON() + testutil.OK(t, err) + rustJSON2, err := rustSchema.MarshalJSON() + testutil.OK(t, err) + + // Normalize and compare + stringEquals(t, string(normalizeJSON(t, goJSON)), string(normalizeJSON(t, rustJSON2))) + }) + policyContent, err := fdm.GetFileData(tt.Policies) if err != nil { t.Fatal("error reading policy content", err) @@ -250,6 +324,21 @@ func TestCorpus(t *testing.T) { } } +func normalizeJSON(t *testing.T, in []byte) []byte { + t.Helper() + var out any + err := json.Unmarshal(in, &out) + testutil.OK(t, err) + b, err := json.MarshalIndent(out, "", " ") + testutil.OK(t, err) + return b +} + +func stringEquals(t *testing.T, got, want string) { + t.Helper() + testutil.Equals(t, strings.TrimSpace(got), strings.TrimSpace(want)) +} + // Specific corpus tests that have been extracted for easy regression testing purposes func TestCorpusRelated(t *testing.T) { t.Parallel() diff --git a/internal/schema/ast/ast.go b/internal/schema/ast/ast.go deleted file mode 100644 index caa85705..00000000 --- a/internal/schema/ast/ast.go +++ /dev/null @@ -1,477 +0,0 @@ -package ast - -import ( - "strings" - - "github.com/cedar-policy/cedar-go/internal/schema/token" -) - -// Human readable syntax tree for Cedar schema files instead of JSON. -// The human readable format is defined here: https://docs.cedarpolicy.com/schema/human-readable-schema-grammar.html - -// Schema := {Namespace} -// Namespace := ('namespace' Path '{' {Decl} '}') | Decl -// Decl := Entity | Action | TypeDecl -// Entity := 'entity' Idents ['in' EntOrTyps] [['='] RecType] ['tags' Type] ';' -// Action := 'action' Names ['in' RefOrRefs] [AppliesTo]';' -// TypeDecl := 'type' TYPENAME '=' Type ';' -// Type := Path | SetType | RecType -// EntType := Path -// SetType := 'Set' '<' Type '>' -// RecType := '{' [AttrDecls] '}' -// AttrDecls := Name ['?'] ':' Type [',' | ',' AttrDecls] -// AppliesTo := 'appliesTo' '{' AppDecls '}' -// AppDecls := ('principal' | 'resource') ':' EntOrTyps [',' | ',' AppDecls] -// | 'context' ':' (Path | RecType) [',' | ',' AppDecls] -// Path := IDENT {'::' IDENT} -// Ref := Path '::' STR | Name -// RefOrRefs := Ref | '[' [RefOrRefs] ']' -// EntTypes := Path {',' Path} -// EntOrTyps := EntType | '[' [EntTypes] ']' -// Name := IDENT | STR -// Names := Name {',' Name} -// Idents := IDENT {',' IDENT} - -// IDENT := ['_''a'-'z''A'-'Z']['_''a'-'z''A'-'Z''0'-'9']* -// TYPENAME := IDENT - RESERVED -// STR := Fully-escaped Unicode surrounded by '"'s -// PRIMTYPE := 'Long' | 'String' | 'Bool' -// WHITESPC := Unicode whitespace -// COMMENT := '//' ~NEWLINE* NEWLINE -// RESERVED := 'Bool' | 'Boolean' | 'Entity' | 'Extension' | 'Long' | 'Record' | 'Set' | 'String' - -// The human readable format is not 1-1 convertible with JSON. The JSON format -// is lossy. It loses formatting, such as comments, ordering of fields, etc... - -type Node interface { - isNode() - // Pos returns first token of the node - Pos() token.Position - End() token.Position -} - -// no-op statements are included for code coverage instrumentation -func (*Schema) isNode() { _ = 0 } -func (*Namespace) isNode() { _ = 0 } -func (*CommonTypeDecl) isNode() { _ = 0 } -func (*RecordType) isNode() { _ = 0 } -func (*SetType) isNode() { _ = 0 } -func (*Path) isNode() { _ = 0 } -func (*Ident) isNode() { _ = 0 } -func (*Entity) isNode() { _ = 0 } -func (*Action) isNode() { _ = 0 } -func (*AppliesTo) isNode() { _ = 0 } -func (*Ref) isNode() { _ = 0 } -func (*Attribute) isNode() { _ = 0 } -func (*String) isNode() { _ = 0 } -func (CommentBlock) isNode() { _ = 0 } -func (*Comment) isNode() { _ = 0 } -func (*Annotation) isNode() { _ = 0 } - -type NodeComments struct { - Before CommentBlock // comments that precede the node on a separate line - Inline *Comment // inline, e.g. namespace name { - Footer *Comment // all trailing comments after closing brace -} - -type Schema struct { - Decls []Declaration // either namespace or declarations in global namespace - - Remaining CommentBlock // any comments after all the declarations -} - -func (s *Schema) Pos() token.Position { - if len(s.Decls) > 0 { - return s.Decls[0].Pos() - } - return token.Position{} -} - -func (s *Schema) End() token.Position { - if len(s.Remaining) > 0 { - return s.Remaining.End() - } - if len(s.Decls) > 0 { - return s.Decls[len(s.Decls)-1].End() - } - return token.Position{} -} - -type Declaration interface { - Node - isDecl() -} - -// no-op statements are included for code coverage instrumentation -func (*Entity) isDecl() { _ = 0 } -func (*Action) isDecl() { _ = 0 } -func (*Namespace) isDecl() { _ = 0 } -func (*CommonTypeDecl) isDecl() { _ = 0 } -func (*CommentBlock) isDecl() { _ = 0 } - -type Namespace struct { - Annotations []*Annotation - NodeComments - NamespaceTok token.Position - Name *Path - Decls []Declaration - Remaining CommentBlock - CloseBrace token.Position -} - -func (n *Namespace) Pos() token.Position { - if len(n.Annotations) > 0 { - return n.Annotations[0].Pos() - } - if len(n.Before) > 0 { - return n.Before.Pos() - } - return n.NamespaceTok -} - -func (n *Namespace) End() token.Position { - if n.Footer != nil { - return n.Footer.End() - } - return n.CloseBrace -} - -type CommonTypeDecl struct { - Annotations []*Annotation - NodeComments - TypeTok token.Position - Name *Ident - Value Type -} - -func (t *CommonTypeDecl) Pos() token.Position { - if len(t.Annotations) > 0 { - return t.Annotations[0].Pos() - } - if len(t.Before) > 0 { - return t.Before.Pos() - } - return t.TypeTok -} - -func (t *CommonTypeDecl) End() token.Position { - if t.Footer != nil { - return t.Footer.End() - } - return t.Value.End() -} - -// TypeValue is either: -// 1. A record type -// 2. A set type (Set) -// 3. A path (Namespace::EntityType or String) -type Type interface { - Node - isType() -} - -// no-op statements are included for code coverage instrumentation -func (*RecordType) isType() { _ = 0 } -func (*SetType) isType() { _ = 0 } -func (*Path) isType() { _ = 0 } - -type RecordType struct { - Inner *Comment // after initial '{' - LeftCurly token.Position - Attributes []*Attribute - RightCurly token.Position - Remaining CommentBlock // any comments after last attribute -} - -func (r *RecordType) Pos() token.Position { - return r.LeftCurly -} - -func (r *RecordType) End() token.Position { - return r.RightCurly -} - -type Attribute struct { - Annotations []*Annotation - NodeComments - Key Name - IsRequired bool // if true, has ? after name - Type Type - Comma token.Position -} - -func (a *Attribute) Pos() token.Position { - if a.Annotations != nil { - return a.Annotations[0].Pos() - } - if a.Before != nil { - return a.NodeComments.Before[0].SlashTok - } - return a.Key.Pos() -} - -func (a *Attribute) End() token.Position { - if a.Comma.Line != 0 { - return a.Comma - } - return a.Type.End() -} - -type SetType struct { - SetToken token.Position - Element Type - RightAngle token.Position -} - -func (s *SetType) Pos() token.Position { - return s.SetToken -} - -func (s *SetType) End() token.Position { - return s.RightAngle -} - -type Path struct { - Parts []*Ident -} - -func (p *Path) String() string { - parts := make([]string, len(p.Parts)) - for i, part := range p.Parts { - parts[i] = part.Value - } - return strings.Join(parts, "::") -} - -func (p *Path) Pos() token.Position { - if len(p.Parts) == 0 { - return token.Position{} - } - return p.Parts[0].IdentTok -} - -func (p *Path) End() token.Position { - if len(p.Parts) == 0 { - return token.Position{} - } - return p.Parts[len(p.Parts)-1].End() -} - -type Ident struct { - IdentTok token.Position - Value string -} - -func (i *Ident) Pos() token.Position { - return i.IdentTok -} - -func (i *Ident) End() token.Position { - after := i.IdentTok - after.Column += len(i.Value) - after.Offset += len(i.Value) - return after -} - -type Entity struct { - Annotations []*Annotation - NodeComments - EntityTok token.Position - Names []*Ident // define multiple entities with the same shape - - // Traditional entity definition - In []*Path // optional, if nil none given - EqTok token.Position // valid if = is present before shape - Shape *RecordType // nil if none given - Tags Type - - // Enumerated entity definition - Enum []*String - - Semicolon token.Position -} - -func (e *Entity) Pos() token.Position { - if len(e.Annotations) > 0 { - return e.Annotations[0].Pos() - } - if len(e.Before) > 0 { - return e.Before.Pos() - } - return e.EntityTok -} - -func (e *Entity) End() token.Position { - if e.Footer != nil { - return e.Footer.End() - } - return e.Semicolon -} - -type Action struct { - Annotations []*Annotation - NodeComments - ActionTok token.Position - Names []Name - In []*Ref // optional, if nil none given - AppliesTo *AppliesTo // optional, if nil none given - Semicolon token.Position -} - -func (a *Action) Pos() token.Position { - if len(a.Annotations) > 0 { - return a.Annotations[0].Pos() - } - if len(a.Before) > 0 { - return a.Before.Pos() - } - return a.ActionTok -} - -func (a *Action) End() token.Position { - if a.Footer != nil { - return a.Footer.End() - } - return a.Semicolon -} - -type AppliesTo struct { - AppliesToTok token.Position - CloseBrace token.Position - - Principal []*Path // one of required - Resource []*Path - ContextPath *Path // nil if none - ContextRecord *RecordType // nil if none - - Inline *Comment // after { - PrincipalComments NodeComments - ResourceComments NodeComments - ContextComments NodeComments - Remaining CommentBlock // leftovers after all three fields -} - -func (a *AppliesTo) Pos() token.Position { - return a.AppliesToTok -} - -func (a *AppliesTo) End() token.Position { - return a.CloseBrace -} - -// Ref is like a path, but the last element can be a string instead of an ident -type Ref struct { - Namespace []*Ident // nil if no namespace - Name Name -} - -func (r *Ref) Pos() token.Position { - if len(r.Namespace) == 0 { - return r.Name.Pos() - } - return r.Namespace[0].IdentTok -} - -func (r *Ref) End() token.Position { - return r.Name.End() -} - -// Name is an IDENT or STR -// -//sumtype:decl -type Name interface { - Node - isName() - String() string -} - -func (i *Ident) String() string { - return i.Value -} - -func (s *String) String() string { - return s.Value() -} - -type String struct { - Tok token.Position - QuotedVal string -} - -func (s *String) Value() string { - return s.QuotedVal[1 : len(s.QuotedVal)-1] -} - -// no-op statements are included for code coverage instrumentation -func (*String) isName() { _ = 0 } -func (*Ident) isName() { _ = 0 } - -func (s *String) Pos() token.Position { - return s.Tok -} - -func (s *String) End() token.Position { - after := s.Tok - after.Offset += len(s.QuotedVal) - after.Column += len(s.QuotedVal) - return after -} - -type CommentBlock []*Comment - -func (c CommentBlock) Pos() token.Position { - if len(c) == 0 { - return token.Position{} - } - return c[0].SlashTok -} - -func (c CommentBlock) End() token.Position { - if len(c) == 0 { - return token.Position{} - } - return c[len(c)-1].End() -} - -type Comment struct { - SlashTok token.Position // position of '//' - Value string // raw string value -} - -func (c *Comment) Pos() token.Position { - return c.SlashTok -} - -func (c *Comment) End() token.Position { - after := c.SlashTok - after.Offset += len(c.Value) - after.Column += len(c.Value) - return after -} - -func (c *Comment) Trim() string { - return strings.TrimLeft(c.Value, " \t\n/") -} - -type Annotation struct { - NodeComments - At token.Position - Key *Ident - LeftParen token.Position - Value *String - RightParen token.Position -} - -func (a *Annotation) Pos() token.Position { - if len(a.Before) > 0 { - return a.Before.Pos() - } - return a.At -} - -func (a *Annotation) End() token.Position { - if a.Value == nil { - return a.Key.End() - } - return a.RightParen -} diff --git a/internal/schema/ast/ast_test.go b/internal/schema/ast/ast_test.go deleted file mode 100644 index 3792dfcd..00000000 --- a/internal/schema/ast/ast_test.go +++ /dev/null @@ -1,189 +0,0 @@ -package ast - -// Tests in this file are for reaching 100% coverage of the ast package. These code paths should normally not be executed, -// but are forcefully exercised here to ensure that they are implemented correctly. - -import ( - "bytes" - "strings" - "testing" - - "github.com/cedar-policy/cedar-go/internal/schema/token" -) - -func TestIsNode(*testing.T) { - // Test all isNode() implementations - (&Schema{}).isNode() - (&Namespace{}).isNode() - (&CommonTypeDecl{}).isNode() - (&RecordType{}).isNode() - (&SetType{}).isNode() - (&Path{}).isNode() - (&Ident{}).isNode() - (&Entity{}).isNode() - (&Action{}).isNode() - (&AppliesTo{}).isNode() - (&Ref{}).isNode() - (&Attribute{}).isNode() - (&String{}).isNode() - (CommentBlock{}).isNode() - (&Comment{}).isNode() - (&Annotation{}).isNode() - - // No assertions needed since we just want coverage for these marker methods -} - -func TestIsDecl(*testing.T) { - // Test all isDecl() implementations - (&Entity{}).isDecl() - (&Action{}).isDecl() - (&Namespace{}).isDecl() - (&CommonTypeDecl{}).isDecl() - (&CommentBlock{}).isDecl() - - // No assertions needed since we just want coverage for these marker methods -} - -func TestIsType(*testing.T) { - // Test all isType() implementations - (&RecordType{}).isType() - (&SetType{}).isType() - (&Path{}).isType() - - // No assertions needed since we just want coverage for these marker methods -} - -func TestIsName(*testing.T) { - // Test all isName() implementations - (&String{}).isName() - (&Ident{}).isName() - - // No assertions needed since we just want coverage for these marker methods -} - -func TestPathEmptyParts(t *testing.T) { - p := &Path{Parts: nil} - - // Test Pos() with empty Parts - pos := p.Pos() - if pos != (token.Position{}) { - t.Errorf("Expected empty Position for Pos(), got %v", pos) - } - - // Test End() with empty Parts - end := p.End() - if end != (token.Position{}) { - t.Errorf("Expected empty Position for End(), got %v", end) - } -} - -func TestSchemaEmpty(t *testing.T) { - s := &Schema{} - - // Test Pos() with empty Schema - pos := s.Pos() - if pos != (token.Position{}) { - t.Errorf("Expected empty Position for Pos(), got %v", pos) - } - - // Test End() with empty Schema - end := s.End() - if end != (token.Position{}) { - t.Errorf("Expected empty Position for End(), got %v", end) - } -} - -func Test_formatter_printInd_panic(t *testing.T) { - p := &formatter{ - w: &bytes.Buffer{}, - lastchar: 'x', // Not a newline - } - - defer func() { - r := recover() - if r == nil { - t.Fatal("expected panic, got none") - } - msg, ok := r.(string) - if !ok { - t.Fatalf("expected string, got %T", r) - } - if !strings.Contains(msg, "lastchar must be newline") { - t.Errorf("expected panic message about newline, got %q", msg) - } - }() - - p.printInd("test") -} - -type unknownNode struct { - Node // Embed Node interface to satisfy type checker -} - -func Test_formatter_print_panic(t *testing.T) { - p := &formatter{ - w: &bytes.Buffer{}, - } - - defer func() { - r := recover() - if r == nil { - t.Fatal("expected panic, got none") - } - msg, ok := r.(string) - if !ok { - t.Fatalf("expected string panic, got %T", r) - } - if !strings.Contains(msg, "unhandled node type") { - t.Errorf("expected panic message about unhandled type, got %q", msg) - } - }() - - p.print(unknownNode{}) -} - -func Test_printBracketList_panic(t *testing.T) { - p := &formatter{ - w: &bytes.Buffer{}, - } - - defer func() { - r := recover() - if r == nil { - t.Fatal("expected panic, got none") - } - msg, ok := r.(string) - if !ok { - t.Fatalf("expected string panic, got %T", r) - } - if !strings.Contains(msg, "list must not be empty") { - t.Errorf("expected panic message about empty list, got %q", msg) - } - }() - - var emptyList []Node - printBracketList(p, emptyList, false) -} - -type unknownType struct { - Type // Embed Type interface to satisfy it -} - -func TestConvertType_Panic(t *testing.T) { - defer func() { - r := recover() - if r == nil { - t.Fatal("expected panic, got none") - } - msg, ok := r.(string) - if !ok { - t.Fatalf("expected string panic, got %T", r) - } - expected := "unknownType is not an AST type" - if !strings.Contains(msg, expected) { - t.Errorf("expected panic message to contain %q, got %q", expected, msg) - } - }() - - convertType(unknownType{}) -} diff --git a/internal/schema/ast/convert_human.go b/internal/schema/ast/convert_human.go deleted file mode 100644 index 5ade4e83..00000000 --- a/internal/schema/ast/convert_human.go +++ /dev/null @@ -1,164 +0,0 @@ -package ast - -import ( - "fmt" - "strings" -) - -// ConvertHuman2JSON converts an AST schema to a JSON schema. The conversion process is lossy. -// Any information related to ordering, formatting, comments, etc... are lost completely. -// -// TODO: Add errors if the schema is invalid (references names that don't exist) -func ConvertHuman2JSON(n *Schema) JSONSchema { - out := make(JSONSchema) - // In Cedar, all anonymous types (not under a namespace) are put into the "root" namespace, - // which just has a name of "". - anonymousNamespace := &Namespace{} - for _, decl := range n.Decls { - switch decl := decl.(type) { - case *Namespace: - out[decl.Name.String()] = convertNamespace(decl) - default: - anonymousNamespace.Decls = append(anonymousNamespace.Decls, decl) - } - } - if len(anonymousNamespace.Decls) > 0 { - out[""] = convertNamespace(anonymousNamespace) - } - return out -} - -func convertNamespace(n *Namespace) *JSONNamespace { - jsNamespace := new(JSONNamespace) - jsNamespace.Actions = make(map[string]*JSONAction) - jsNamespace.EntityTypes = make(map[string]*JSONEntity) - jsNamespace.CommonTypes = make(map[string]*JSONCommonType) - jsNamespace.Annotations = make(map[string]string) - for _, a := range n.Annotations { - jsNamespace.Annotations[a.Key.String()] = a.Value.String() - } - - for _, astDecl := range n.Decls { - switch astDecl := astDecl.(type) { - case *Action: - for _, astActionName := range astDecl.Names { - jsAction := new(JSONAction) - jsAction.Annotations = make(map[string]string) - for _, a := range astDecl.Annotations { - jsAction.Annotations[a.Key.String()] = a.Value.String() - } - jsNamespace.Actions[astActionName.String()] = jsAction - for _, astMember := range astDecl.In { - jsMember := &JSONMember{ - ID: astMember.Name.String(), - } - if len(astMember.Namespace) > 0 { - jsMember.Type = convertIdents(astMember.Namespace) - } - jsAction.MemberOf = append(jsAction.MemberOf, jsMember) - } - - if astDecl.AppliesTo != nil { - jsAction.AppliesTo = &JSONAppliesTo{} - for _, princ := range astDecl.AppliesTo.Principal { - jsAction.AppliesTo.PrincipalTypes = append(jsAction.AppliesTo.PrincipalTypes, princ.String()) - } - for _, res := range astDecl.AppliesTo.Resource { - jsAction.AppliesTo.ResourceTypes = append(jsAction.AppliesTo.ResourceTypes, res.String()) - } - if astDecl.AppliesTo.ContextRecord != nil { - jsAction.AppliesTo.Context = convertType(astDecl.AppliesTo.ContextRecord) - } else if astDecl.AppliesTo.ContextPath != nil { - jsAction.AppliesTo.Context = convertType(astDecl.AppliesTo.ContextPath) - } - } - jsNamespace.Actions[astActionName.String()] = jsAction - } - case *Entity: - for _, name := range astDecl.Names { - entity := new(JSONEntity) - entity.Annotations = make(map[string]string) - for _, a := range astDecl.Annotations { - entity.Annotations[a.Key.String()] = a.Value.String() - } - jsNamespace.EntityTypes[name.String()] = entity - for _, member := range astDecl.In { - entity.MemberOfTypes = append(entity.MemberOfTypes, member.String()) - } - if astDecl.Shape != nil { - entity.Shape = convertType(astDecl.Shape) - } - if astDecl.Tags != nil { - entity.Tags = convertType(astDecl.Tags) - } - for _, value := range astDecl.Enum { - entity.Enum = append(entity.Enum, value.String()) - } - jsNamespace.EntityTypes[name.String()] = entity - } - case *CommonTypeDecl: - commonType := new(JSONCommonType) - commonType.JSONType = convertType(astDecl.Value) - commonType.Annotations = make(map[string]string) - for _, a := range astDecl.Annotations { - commonType.Annotations[a.Key.String()] = a.Value.String() - } - jsNamespace.CommonTypes[astDecl.Name.String()] = commonType - } - } - return jsNamespace -} - -func convertType(t Type) *JSONType { - switch t := t.(type) { - case *RecordType: - return convertRecordType(t) - case *SetType: - return &JSONType{Type: "Set", Element: convertType(t.Element)} - case *Path: - if len(t.Parts) == 1 { - if t.Parts[0].Value == "Bool" || t.Parts[0].Value == "Boolean" { - return &JSONType{Type: "Boolean"} - } - if t.Parts[0].Value == "Long" { - return &JSONType{Type: "Long"} - } - if t.Parts[0].Value == "String" { - return &JSONType{Type: "String"} - } - } - return &JSONType{Type: "EntityOrCommon", Name: t.String()} - default: - panic(fmt.Sprintf("%T is not an AST type", t)) - } -} - -func convertRecordType(t *RecordType) *JSONType { - jt := new(JSONType) - jt.Type = "Record" - jt.Attributes = make(map[string]*JSONAttribute) - for _, attr := range t.Attributes { - jsAttr := &JSONAttribute{ - Required: attr.IsRequired, - } - inner := convertType(attr.Type) - jsAttr.Type = inner.Type - jsAttr.Element = inner.Element - jsAttr.Name = inner.Name - jsAttr.Attributes = inner.Attributes - jt.Attributes[attr.Key.String()] = jsAttr - jsAttr.Annotations = make(map[string]string) - for _, a := range attr.Annotations { - jsAttr.Annotations[a.Key.String()] = a.Value.String() - } - } - return jt -} - -func convertIdents(ns []*Ident) string { - var s []string - for _, n := range ns { - s = append(s, n.Value) - } - return strings.Join(s, "::") -} diff --git a/internal/schema/ast/convert_human_test.go b/internal/schema/ast/convert_human_test.go deleted file mode 100644 index 8e90ecc9..00000000 --- a/internal/schema/ast/convert_human_test.go +++ /dev/null @@ -1,49 +0,0 @@ -package ast_test - -import ( - "bytes" - "encoding/json" - "io/fs" - "testing" - - "github.com/google/go-cmp/cmp" - - "github.com/cedar-policy/cedar-go/internal/schema/ast" - "github.com/cedar-policy/cedar-go/internal/schema/parser" - "github.com/cedar-policy/cedar-go/internal/testutil" -) - -func TestConvertHumanToJson(t *testing.T) { - // Generate testdata/test_want.json by running: - // cedar translate-schema --direction cedar-to-json -s test.cedarschema | jq . > test_want.json - // Note that as of cedar-policy-cli 4.4.1, the "required: true" attribute is omitted from the JSON output while - // our JSON serialization always includes it. You'll have to add the expected "required: true" fields to - // the emitted output. - exampleHuman, err := fs.ReadFile(ast.Testdata, "testdata/convert/test.cedarschema") - if err != nil { - t.Fatalf("Error reading example schema: %v", err) - } - schema, err := parser.ParseFile("", exampleHuman) - if err != nil { - t.Fatalf("Error parsing example schema: %v", err) - } - - jsonSchema := ast.ConvertHuman2JSON(schema) - var got bytes.Buffer - enc := json.NewEncoder(&got) - enc.SetIndent("", " ") - err = enc.Encode(jsonSchema) - if err != nil { - t.Fatalf("Error dumping JSON: %v", err) - } - - want, err := fs.ReadFile(ast.Testdata, "testdata/convert/test_want.json") - if err != nil { - t.Fatalf("Error reading example JSON schema: %v", err) - } - var gotJ, wantJ interface{} - testutil.OK(t, json.Unmarshal(want, &wantJ)) - testutil.OK(t, json.Unmarshal(got.Bytes(), &gotJ)) - diff := cmp.Diff(gotJ, wantJ) - testutil.FatalIf(t, diff != "", "mismatch -want +got:\n%v", diff) -} diff --git a/internal/schema/ast/convert_json.go b/internal/schema/ast/convert_json.go deleted file mode 100644 index c0f353e8..00000000 --- a/internal/schema/ast/convert_json.go +++ /dev/null @@ -1,249 +0,0 @@ -package ast - -import ( - "fmt" - "strings" -) - -// ConvertJSON2Human converts a JSON schema to a human-readable AST schema. The conversion process is lossy. -// Any information related to ordering, formatting, comments, etc... are lost completely. -func ConvertJSON2Human(js JSONSchema) *Schema { - schema := &Schema{} - - // Handle anonymous namespace first (if it exists) - if anon, ok := js[""]; ok { - anonNamespace := convertJSONNamespace("", anon) - // Append anonymous namespace declarations to the schema root - schema.Decls = append(schema.Decls, anonNamespace.Decls...) - } - - // Handle all other namespaces - for name, ns := range js { - if name != "" { - schema.Decls = append(schema.Decls, convertJSONNamespace(name, ns)) - } - } - - return schema -} - -func convertJSONNamespace(name string, js *JSONNamespace) *Namespace { - ns := &Namespace{} - if name != "" { - ns.Name = convertJSONNamespaceName(name) - } - - // Convert annotations - ns.Annotations = convertJSONAnnotations(js.Annotations) - - // Convert common types - ns.Decls = append(ns.Decls, convertJSONCommonTypes(js.CommonTypes)...) - - // Convert entity types - ns.Decls = append(ns.Decls, convertJSONEntityTypes(js.EntityTypes)...) - - // Convert actions - ns.Decls = append(ns.Decls, convertJSONActions(js.Actions)...) - - return ns -} - -func convertJSONAnnotations(annotations map[string]string) []*Annotation { - var ans []*Annotation - for k, v := range annotations { - ans = append(ans, &Annotation{Key: &Ident{Value: k}, Value: &String{QuotedVal: fmt.Sprintf("%q", v)}}) - } - return ans -} - -func convertJSONNamespaceName(name string) *Path { - parts := strings.Split(name, "::") - idents := make([]*Ident, len(parts)) - for i, part := range parts { - idents[i] = &Ident{Value: part} - } - return &Path{Parts: idents} -} - -func convertJSONCommonTypes(types map[string]*JSONCommonType) []Declaration { - decls := make([]Declaration, 0, len(types)) - for name, ct := range types { - annotations := convertJSONAnnotations(ct.Annotations) - - decls = append(decls, &CommonTypeDecl{ - Annotations: annotations, - Name: &Ident{Value: name}, - Value: convertJSONType(ct.JSONType), - }) - } - return decls -} - -func convertJSONEntityTypes(types map[string]*JSONEntity) []Declaration { - decls := make([]Declaration, 0, len(types)) - for name, et := range types { - entity := &Entity{ - Names: []*Ident{{Value: name}}, - } - - // Convert annotations - entity.Annotations = convertJSONAnnotations(et.Annotations) - - // Convert memberOfTypes - if len(et.MemberOfTypes) > 0 { - entity.In = convertJSONMemberOfTypes(et.MemberOfTypes) - } - - // Convert shape - if et.Shape != nil { - if shape, ok := convertJSONType(et.Shape).(*RecordType); ok { - entity.Shape = shape - } - } - - // Convert tags - if et.Tags != nil { - entity.Tags = convertJSONType(et.Tags) - } - - // Convert enum - for _, value := range et.Enum { - entity.Enum = append(entity.Enum, &String{QuotedVal: fmt.Sprintf("%q", value)}) - } - - decls = append(decls, entity) - } - return decls -} - -func convertJSONMemberOfTypes(types []string) []*Path { - paths := make([]*Path, len(types)) - for i, t := range types { - parts := strings.Split(t, "::") - idents := make([]*Ident, len(parts)) - for j, part := range parts { - idents[j] = &Ident{Value: part} - } - paths[i] = &Path{Parts: idents} - } - return paths -} - -func convertJSONActions(actions map[string]*JSONAction) []Declaration { - decls := make([]Declaration, 0, len(actions)) - for name, act := range actions { - action := &Action{ - Names: []Name{&String{QuotedVal: fmt.Sprintf("%q", name)}}, - } - - // Convert annotations - action.Annotations = convertJSONAnnotations(act.Annotations) - - // Convert memberOf - if len(act.MemberOf) > 0 { - action.In = convertJSONMemberOf(act.MemberOf) - } - - // Convert appliesTo - if act.AppliesTo != nil { - action.AppliesTo = convertJSONAppliesTo(act.AppliesTo) - } - - decls = append(decls, action) - } - return decls -} - -func convertJSONMemberOf(members []*JSONMember) []*Ref { - refs := make([]*Ref, len(members)) - for i, m := range members { - ref := &Ref{ - Name: &String{QuotedVal: fmt.Sprintf("%q", m.ID)}, - } - if m.Type != "" { - parts := strings.Split(m.Type, "::") - ref.Namespace = make([]*Ident, len(parts)) - for j, part := range parts { - ref.Namespace[j] = &Ident{Value: part} - } - } - refs[i] = ref - } - return refs -} - -func convertJSONAppliesTo(appliesTo *JSONAppliesTo) *AppliesTo { - at := &AppliesTo{} - - // Convert principal types - if len(appliesTo.PrincipalTypes) > 0 { - at.Principal = convertJSONMemberOfTypes(appliesTo.PrincipalTypes) - } - - // Convert resource types - if len(appliesTo.ResourceTypes) > 0 { - at.Resource = convertJSONMemberOfTypes(appliesTo.ResourceTypes) - } - - // Convert context - if appliesTo.Context != nil { - switch t := convertJSONType(appliesTo.Context).(type) { - case *RecordType: - at.ContextRecord = t - case *Path: - at.ContextPath = t - } - } - - return at -} - -func convertJSONType(js *JSONType) Type { - switch js.Type { - case "Boolean": - return &Path{Parts: []*Ident{{Value: "Boolean"}}} - case "Long": - return &Path{Parts: []*Ident{{Value: "Long"}}} - case "String": - return &Path{Parts: []*Ident{{Value: "String"}}} - case "Set": - return &SetType{ - Element: convertJSONType(js.Element), - } - case "Record": - return convertJSONRecordType(js) - case "EntityOrCommon": - parts := strings.Split(js.Name, "::") - idents := make([]*Ident, len(parts)) - for i, part := range parts { - idents[i] = &Ident{Value: part} - } - return &Path{Parts: idents} - default: - panic(fmt.Sprintf("unknown JSON type: %s", js.Type)) - } -} - -func convertJSONRecordType(js *JSONType) *RecordType { - rt := &RecordType{ - Attributes: make([]*Attribute, 0, len(js.Attributes)), - } - - for name, attr := range js.Attributes { - annotations := convertJSONAnnotations(attr.Annotations) - - rt.Attributes = append(rt.Attributes, &Attribute{ - Annotations: annotations, - Key: &String{QuotedVal: fmt.Sprintf("%q", name)}, - IsRequired: attr.Required, - Type: convertJSONType(&JSONType{ - Type: attr.Type, - Element: attr.Element, - Name: attr.Name, - Attributes: attr.Attributes, - }), - }) - } - - return rt -} diff --git a/internal/schema/ast/convert_json_test.go b/internal/schema/ast/convert_json_test.go deleted file mode 100644 index 3182a0a4..00000000 --- a/internal/schema/ast/convert_json_test.go +++ /dev/null @@ -1,94 +0,0 @@ -package ast_test - -import ( - "bytes" - "encoding/json" - "fmt" - "io/fs" - "strings" - "testing" - - "github.com/google/go-cmp/cmp" - - "github.com/cedar-policy/cedar-go/internal/schema/ast" - "github.com/cedar-policy/cedar-go/internal/testutil" -) - -func TestConvertJsonToHumanRoundtrip(t *testing.T) { - // Read the example JSON schema from embedded filesystem - exampleJSON, err := fs.ReadFile(ast.Testdata, "testdata/convert/test_want.json") - if err != nil { - t.Fatalf("Error reading example JSON schema: %v", err) - } - - // Parse the JSON schema - var jsonSchema ast.JSONSchema - if err := json.Unmarshal(exampleJSON, &jsonSchema); err != nil { - t.Fatalf("Error parsing JSON schema: %v", err) - } - - // Convert to human-readable format and back to JSON - humanSchema := ast.ConvertJSON2Human(jsonSchema) - jsonSchema2 := ast.ConvertHuman2JSON(humanSchema) - - // Compare the JSON schemas - json1, err := json.MarshalIndent(jsonSchema, "", " ") - testutil.OK(t, err) - - json2, err := json.MarshalIndent(jsonSchema2, "", " ") - testutil.OK(t, err) - - diff := cmp.Diff(string(json1), string(json2)) - testutil.FatalIf(t, diff != "", "mismatch -want +got:\n%v", diff) -} - -func TestConvertJsonToHumanEmpty(t *testing.T) { - // Test with an empty JSON schema - emptySchema := ast.JSONSchema{} - humanSchema := ast.ConvertJSON2Human(emptySchema) - - // Format the human-readable schema - var got bytes.Buffer - if err := ast.Format(humanSchema, &got); err != nil { - t.Fatalf("Error formatting schema: %v", err) - } - - // Should be empty - if len(got.Bytes()) != 0 { - t.Errorf("Expected empty output, got: %q", got.String()) - } -} - -func TestConvertJsonToHumanInvalidType(t *testing.T) { - // Test with an invalid JSON type - invalidSchema := ast.JSONSchema{ - "": { - EntityTypes: map[string]*ast.JSONEntity{ - "Test": { - Shape: &ast.JSONType{ - Type: "InvalidType", - }, - }, - }, - }, - } - - var panicMsg string - func() { - defer func() { - if r := recover(); r != nil { - panicMsg = fmt.Sprint(r) - } - }() - ast.ConvertJSON2Human(invalidSchema) - }() - - if panicMsg == "" { - t.Fatal("expected panic, got none") - } - - expected := "unknown JSON type: InvalidType" - if !strings.Contains(panicMsg, expected) { - t.Errorf("expected panic message to contain %q, got %q", expected, panicMsg) - } -} diff --git a/internal/schema/ast/format.go b/internal/schema/ast/format.go deleted file mode 100644 index c2a91ba4..00000000 --- a/internal/schema/ast/format.go +++ /dev/null @@ -1,386 +0,0 @@ -package ast - -import ( - "fmt" - "io" -) - -type bailout error - -// Node will pretty-print the AST node to out. -// -// The rules for formatting are fixed and not configurable. -func Format(n Node, out io.Writer) (err error) { - defer func() { - if r := recover(); r != nil { - if bail, ok := r.(bailout); ok { - err = bail - } - } - }() - p := &formatter{ - w: out, - lastchar: '\n', - tab: " ", // 2 spaces - } - p.print(n) - return nil -} - -type formatter struct { - indent int // 1 = 1 tab over - w io.Writer - - lastchar byte - tab string -} - -func (p *formatter) printInd(s string) { - if p.lastchar != '\n' { - panic("lastchar must be newline when calling printInd") - } - for range p.indent { - p.write(p.tab) - } - if len(s) > 0 { - p.write(s) - p.lastchar = s[len(s)-1] - } -} - -func (p *formatter) printIndf(format string, args ...any) { - p.printInd(fmt.Sprintf(format, args...)) -} - -func (p *formatter) write(s string) { - _, err := io.WriteString(p.w, s) - if err != nil { - panic(bailout(err)) - } - p.lastchar = s[len(s)-1] -} - -func (p *formatter) writef(format string, args ...any) { - buf := fmt.Sprintf(format, args...) - p.write(buf) -} - -func (p *formatter) print(n Node) { - switch n := n.(type) { - case *Schema: - p.printSchema(n) - case *Namespace: - p.printNamespace(n) - case *CommonTypeDecl: - p.printCommonTypeDecl(n) - case *RecordType: - p.printRecordType(n) - case *SetType: - p.printSetType(n) - case *Path: - p.printPath(n) - case *Attribute: - p.printAttribute(n) - case *Ident: - p.write(n.Value) - case *String: - p.write(n.QuotedVal) - case *Entity: - p.printEntity(n) - case *Action: - p.printAction(n) - case *AppliesTo: - p.printAppliesTo(n) - case *Ref: - p.printRef(n) - case CommentBlock: - p.printCommentBlock(n) - case *Comment: - p.printComment(n) - case *Annotation: - p.printAnnotation(n) - default: - panic(fmt.Sprintf("unhandled node type %T", n)) - } -} - -func (p *formatter) printSchema(n *Schema) { - for _, d := range n.Decls { - p.print(d) - } - p.print(n.Remaining) -} - -func (p *formatter) printNamespace(n *Namespace) { - for _, a := range n.Annotations { - p.print(a) - } - p.print(n.Before) - p.printInd("namespace ") - p.print(n.Name) - p.write(" {") - if n.Inline != nil { - p.print(n.Inline) - } - p.write("\n") - for _, d := range n.Decls { - p.indent++ - p.print(d) - p.indent-- - } - if len(n.Remaining) > 0 { - p.indent++ - p.print(n.Remaining) - p.indent-- - } - p.write("}") - if n.Footer != nil { - p.print(n.Footer) - } - p.write("\n") -} - -func (p *formatter) printCommonTypeDecl(n *CommonTypeDecl) { - for _, a := range n.Annotations { - p.print(a) - } - p.print(n.Before) - p.printIndf("type %s = ", n.Name.Value) - p.print(n.Value) - p.write(";") - if n.Footer != nil { - p.print(n.Footer) - } - p.write("\n") -} - -func (p *formatter) printRecordType(n *RecordType) { - p.write("{") - if n.Inner != nil { - p.print(n.Inner) - } - p.write("\n") - for _, a := range n.Attributes { - p.indent++ - p.print(a) - p.indent-- - } - if len(n.Remaining) > 0 { - p.indent++ - p.print(n.Remaining) - p.indent-- - } - p.printInd("}") -} - -func (p *formatter) printSetType(n *SetType) { - p.write("Set<") - p.print(n.Element) - p.write(">") -} - -func (p *formatter) printPath(n *Path) { - for i, part := range n.Parts { - if i > 0 { - p.write("::") - } - p.print(part) - } -} - -func (p *formatter) printAttribute(n *Attribute) { - for _, a := range n.Annotations { - p.print(a) - } - p.print(n.Before) - p.printInd("") // print indent - p.print(n.Key) - if !n.IsRequired { - p.write("?") - } - p.write(": ") - p.print(n.Type) - p.write(",") - if n.Inline != nil { - p.print(n.Inline) - } - p.write("\n") -} - -func (p *formatter) printEntity(n *Entity) { - for _, a := range n.Annotations { - p.print(a) - } - p.print(n.Before) - p.printInd("entity ") - for i, name := range n.Names { - if i > 0 { - p.write(", ") - } - p.print(name) - } - if len(n.Enum) > 0 { - p.write(" enum ") - printBracketList(p, n.Enum, true) - } else { - if n.In != nil { - p.write(" in ") - printBracketList(p, n.In, false) - } - if n.Shape != nil { - if n.EqTok.Line > 0 { - p.write(" = ") - } else { - p.write(" ") - } - p.print(n.Shape) - } - if n.Tags != nil { - p.write(" tags ") - p.print(n.Tags) - } - } - p.write(";") - if n.Footer != nil { - p.print(n.Footer) - } - p.write("\n") -} - -func (p *formatter) printAction(n *Action) { - for _, a := range n.Annotations { - p.print(a) - } - p.print(n.Before) - p.printInd("action ") - for i, name := range n.Names { - if i > 0 { - p.write(", ") - } - p.print(name) - } - if len(n.In) > 0 { - p.write(" in ") - printBracketList(p, n.In, false) - } - if n.AppliesTo != nil { - p.write(" appliesTo {") - if n.AppliesTo.Inline != nil { - p.print(n.AppliesTo.Inline) - } - p.write("\n") - p.indent++ - p.print(n.AppliesTo) - p.indent-- - p.printInd("}") - } - p.write(";") - if n.Footer != nil { - p.print(n.Footer) - } - p.write("\n") -} - -func (p *formatter) printAppliesTo(n *AppliesTo) { - if len(n.Principal) > 0 { - p.print(n.PrincipalComments.Before) - p.printInd("principal: ") - printBracketList(p, n.Principal, false) - p.write(",") - if n.PrincipalComments.Inline != nil { - p.print(n.PrincipalComments.Inline) - } - p.write("\n") - } - if len(n.Resource) > 0 { - p.print(n.ResourceComments.Before) - p.printInd("resource: ") - printBracketList(p, n.Resource, false) - p.write(",") - if n.ResourceComments.Inline != nil { - p.print(n.ResourceComments.Inline) - } - p.write("\n") - } - if n.ContextRecord != nil || n.ContextPath != nil { - p.print(n.ContextComments.Before) - p.printInd("context: ") - if n.ContextRecord != nil { - p.print(n.ContextRecord) - } else { - p.print(n.ContextPath) - } - p.write(",") - if n.ContextComments.Inline != nil { - p.print(n.ContextComments.Inline) - } - p.write("\n") - } - p.print(n.Remaining) -} - -func (p *formatter) printRef(n *Ref) { - for i, part := range n.Namespace { - if i > 0 { - p.write("::") - } - p.print(part) - } - if len(n.Namespace) > 0 { - p.write("::") - } - p.print(n.Name) -} - -func (p *formatter) printCommentBlock(n CommentBlock) { - if len(n) == 0 { - return - } - for _, c := range n { - // Print each comment line on a separate line indented - p.printInd("") - p.print(c) - p.write("\n") - } -} - -func (p *formatter) printComment(n *Comment) { - if p.lastchar != ' ' && p.lastchar != '\t' && p.lastchar != '\x00' && p.lastchar != '\n' { - p.write(" ") - } - p.writef("// %s", n.Trim()) -} - -func (p *formatter) printAnnotation(n *Annotation) { - p.print(n.Before) - p.printInd("") - p.write("@") - p.print(n.Key) - if n.Value != nil { - p.write("(") - p.print(n.Value) - p.write(")") - } - if n.Inline != nil { - p.print(n.Inline) - } - p.write("\n") -} - -func printBracketList[T Node](p *formatter, list []T, alwaysEmitBrackets bool) { - if len(list) == 0 { - panic("list must not be empty") - } - if len(list) > 1 || alwaysEmitBrackets { - p.write("[") - } - for i, item := range list { - if i > 0 { - p.write(", ") - } - p.print(item) - } - if len(list) > 1 || alwaysEmitBrackets { - p.write("]") - } -} diff --git a/internal/schema/ast/format_test.go b/internal/schema/ast/format_test.go deleted file mode 100644 index ba22c896..00000000 --- a/internal/schema/ast/format_test.go +++ /dev/null @@ -1,92 +0,0 @@ -package ast_test - -import ( - "bytes" - "fmt" - "io/fs" - "strings" - "testing" - - "github.com/google/go-cmp/cmp" - - "github.com/cedar-policy/cedar-go/internal/schema/ast" - "github.com/cedar-policy/cedar-go/internal/schema/parser" - "github.com/cedar-policy/cedar-go/internal/testutil" -) - -// Source will pretty-print src in the returned byte slice. If src is malformed Cedar schema, an error will be returned. -func Source(src []byte) ([]byte, error) { - var buf bytes.Buffer - tree, err := parser.ParseFile("", src) - if err != nil { - return nil, err - } - - err = ast.Format(tree, &buf) - if err != nil { - return nil, err - } - return buf.Bytes(), nil -} - -func TestFormatExamples(t *testing.T) { - tests := []struct { - file string - }{ - {file: "testdata/format/nocomments.cedarschema"}, - {file: "testdata/format/test.cedarschema"}, - {file: "testdata/format/emptynamespace.cedarschema"}, - {file: "testdata/walk/emptyfile.cedarschema"}, - } - - for _, tt := range tests { - t.Run(tt.file, func(t *testing.T) { - example, err := fs.ReadFile(ast.Testdata, tt.file) - if err != nil { - t.Fatalf("open testfile %s: %v", tt.file, err) - } - - got, err := Source(example) - if err != nil { - t.Fatalf("formatting error: %v", err) - } - diff := cmp.Diff(string(got), string(example)) - testutil.FatalIf(t, diff != "", "mismatch -want +got:\n%v", diff) - }) - } -} - -func TestFormatEmpty(t *testing.T) { - got, err := Source([]byte("")) - if err != nil { - t.Fatalf("formatting empty file failed: %v", err) - } - if len(got) != 0 { - t.Errorf("expected empty output, got: %q", string(got)) - } -} - -type errorWriter struct{} - -func (w errorWriter) Write([]byte) (int, error) { - return 0, fmt.Errorf("intentional write error") -} - -func TestFormat_WriterError(t *testing.T) { - schema := &ast.Schema{ - Decls: []ast.Declaration{ - &ast.CommonTypeDecl{ - Name: &ast.Ident{Value: "Test"}, - Value: &ast.Path{Parts: []*ast.Ident{{Value: "Test"}}}, - }, - }, - } - - err := ast.Format(schema, errorWriter{}) - if err == nil { - t.Fatal("expected error, got nil") - } - if !strings.Contains(err.Error(), "intentional write error") { - t.Errorf("expected error to contain 'intentional write error', got %v", err) - } -} diff --git a/internal/schema/ast/json.go b/internal/schema/ast/json.go deleted file mode 100644 index cf60ef7b..00000000 --- a/internal/schema/ast/json.go +++ /dev/null @@ -1,69 +0,0 @@ -// Package ast defines the structure for a Cedar schema file. -// -// The schema is defined by the JSON format: https://docs.cedarpolicy.com/schema/json-schema.html -// -// The ast can be serialized 1-1 with the JSON format. -package ast - -// JSONSchema represents the top-level Cedar schema structure -type JSONSchema map[string]*JSONNamespace - -// JSONNamespace contains entity types, actions, and optional common types -type JSONNamespace struct { - EntityTypes map[string]*JSONEntity `json:"entityTypes"` - Actions map[string]*JSONAction `json:"actions"` - CommonTypes map[string]*JSONCommonType `json:"commonTypes,omitempty"` - Annotations map[string]string `json:"annotations,omitempty"` -} - -// JSONEntity defines the structure and relationships of an entity -type JSONEntity struct { - MemberOfTypes []string `json:"memberOfTypes,omitempty"` - Shape *JSONType `json:"shape,omitempty"` - Tags *JSONType `json:"tags,omitempty"` - Enum []string `json:"enum,omitempty"` - Annotations map[string]string `json:"annotations,omitempty"` -} - -// JSONAction defines what can perform an action and what it applies to -type JSONAction struct { - MemberOf []*JSONMember `json:"memberOf,omitempty"` - AppliesTo *JSONAppliesTo `json:"appliesTo"` - Annotations map[string]string `json:"annotations,omitempty"` -} - -type JSONMember struct { - ID string `json:"id"` - Type string `json:"type,omitempty"` -} - -// JSONAppliesTo defines what types can perform an action and what it applies to -type JSONAppliesTo struct { - PrincipalTypes []string `json:"principalTypes"` - ResourceTypes []string `json:"resourceTypes"` - Context *JSONType `json:"context,omitempty"` -} - -// JSONType represents the various type definitions possible in Cedar -type JSONType struct { - Type string `json:"type"` - Element *JSONType `json:"element,omitempty"` // For Set types - Name string `json:"name,omitempty"` // For Entity types - Attributes map[string]*JSONAttribute `json:"attributes,omitempty"` // For Record types - Annotations map[string]string `json:"annotations,omitempty"` -} - -// JSONAttribute represents a field in a Record type -type JSONAttribute struct { - Type string `json:"type"` - Required bool `json:"required"` - Element *JSONType `json:"element,omitempty"` // For Set types - Name string `json:"name,omitempty"` // For Entity types - Attributes map[string]*JSONAttribute `json:"attributes,omitempty"` // For nested Record types - Annotations map[string]string `json:"annotations,omitempty"` -} - -// JSONCommonType represents a reusable type definition -type JSONCommonType struct { - *JSONType -} diff --git a/internal/schema/ast/json_test.go b/internal/schema/ast/json_test.go deleted file mode 100644 index db36314f..00000000 --- a/internal/schema/ast/json_test.go +++ /dev/null @@ -1,40 +0,0 @@ -package ast - -import ( - "encoding/json" - "os" - "reflect" - "testing" -) - -func TestParsesExampleSchema(t *testing.T) { - exampleSchema, err := os.ReadFile("testdata/example_schema.json") - if err != nil { - t.Fatalf("Error reading example schema: %v", err) - } - - var schema JSONSchema - err = json.Unmarshal([]byte(exampleSchema), &schema) - if err != nil { - t.Fatalf("Error parsing schema: %v", err) - } - - out, err := json.MarshalIndent(&schema, "", " ") - if err != nil { - t.Fatalf("Error marshalling schema: %v", err) - } - if ok, err := jsonEq(exampleSchema, out); err != nil || !ok { - t.Errorf("Schema does not match original:\n%s\n=========================================\n%s", exampleSchema, string(out)) - } -} - -func jsonEq(a, b []byte) (bool, error) { - var j, j2 interface{} - if err := json.Unmarshal(a, &j); err != nil { - return false, err - } - if err := json.Unmarshal(b, &j2); err != nil { - return false, err - } - return reflect.DeepEqual(j2, j), nil -} diff --git a/internal/schema/ast/testdata.go b/internal/schema/ast/testdata.go deleted file mode 100644 index bf259e1a..00000000 --- a/internal/schema/ast/testdata.go +++ /dev/null @@ -1,8 +0,0 @@ -package ast - -import ( - "embed" -) - -//go:embed testdata -var Testdata embed.FS diff --git a/internal/schema/ast/testdata/convert/test.cedarschema b/internal/schema/ast/testdata/convert/test.cedarschema deleted file mode 100644 index 543a378a..00000000 --- a/internal/schema/ast/testdata/convert/test.cedarschema +++ /dev/null @@ -1,56 +0,0 @@ -type Anon = String; -action AnonAct; -@annotation("namespace") -namespace PhotoFlash { - @annotation1("type") - @annotation2("type") - type LongAlias = Long; - @annotation("action") - action groupAction1; - action groupAction2; - @annotation("entity") - entity User, User2 in UserGroup = { - @annotation("attrDecl") - "department": String, - "jobLevel": Long, - } tags String; - entity UserGroup; - entity Album in Album = { - "account": Account, - "private": Bool, - }; - entity Account = { - "admins"?: Set, - "owner": User, - }; - entity Photo in Album = { - "account": Account, - "private": Bool, - }; - entity PhotoFormat enum ["jpg", "gif", "png"]; - entity AccountType enum ["user"]; - action "uploadPhoto" in groupAction1 appliesTo { - principal: User, - resource: Album, - context: { - "authenticated": Bool, - "photo": { - "file_size": Long, - "file_type": String, - }, - } - }; - type authenticatedContext = { - "authenticated": Bool, - }; - action "viewPhoto" in [groupAction1, groupAction2, random::nested::name::"actionGroup"] appliesTo { - principal: User, - resource: Photo, - context: authenticatedContext, - }; - action "listAlbums" appliesTo { - principal: User, - resource: Account, - context: authenticatedContext, - }; -} \ No newline at end of file diff --git a/internal/schema/ast/testdata/convert/test_want.json b/internal/schema/ast/testdata/convert/test_want.json deleted file mode 100644 index 9d9fdee3..00000000 --- a/internal/schema/ast/testdata/convert/test_want.json +++ /dev/null @@ -1,248 +0,0 @@ -{ - "": { - "commonTypes": { - "Anon": { - "type": "String" - } - }, - "entityTypes": {}, - "actions": { - "AnonAct": { - "appliesTo": null - } - } - }, - "PhotoFlash": { - "commonTypes": { - "LongAlias": { - "type": "Long", - "annotations": { - "annotation1": "type", - "annotation2": "type" - } - }, - "authenticatedContext": { - "type": "Record", - "attributes": { - "authenticated": { - "type": "Boolean", - "required": true - } - } - } - }, - "entityTypes": { - "Account": { - "shape": { - "type": "Record", - "attributes": { - "admins": { - "type": "Set", - "element": { - "type": "EntityOrCommon", - "name": "User" - }, - "required": false - }, - "owner": { - "required": true, - "type": "EntityOrCommon", - "name": "User" - } - } - } - }, - "AccountType": { - "enum": ["user"] - }, - "Album": { - "memberOfTypes": [ - "Album" - ], - "shape": { - "type": "Record", - "attributes": { - "account": { - "required": true, - "type": "EntityOrCommon", - "name": "Account" - }, - "private": { - "required": true, - "type": "Boolean" - } - } - } - }, - "Photo": { - "memberOfTypes": [ - "Album" - ], - "shape": { - "type": "Record", - "attributes": { - "account": { - "required": true, - "type": "EntityOrCommon", - "name": "Account" - }, - "private": { - "required": true, - "type": "Boolean" - } - } - } - }, - "PhotoFormat": { - "enum": ["jpg", "gif", "png"] - }, - "User": { - "memberOfTypes": [ - "UserGroup" - ], - "shape": { - "type": "Record", - "attributes": { - "department": { - "required": true, - "type": "String", - "annotations": { - "annotation": "attrDecl" - } - }, - "jobLevel": { - "required": true, - "type": "Long" - } - } - }, - "tags": { - "type": "String" - }, - "annotations": { - "annotation": "entity" - } - }, - "User2": { - "memberOfTypes": [ - "UserGroup" - ], - "shape": { - "type": "Record", - "attributes": { - "department": { - "required": true, - "type": "String", - "annotations": { - "annotation": "attrDecl" - } - }, - "jobLevel": { - "required": true, - "type": "Long" - } - } - }, - "tags": { - "type": "String" - }, - "annotations": { - "annotation": "entity" - } - }, - "UserGroup": {} - }, - "actions": { - "groupAction1": { - "appliesTo": null, - "annotations": { - "annotation": "action" - } - }, - "groupAction2": { - "appliesTo": null - }, - "listAlbums": { - "appliesTo": { - "resourceTypes": [ - "Account" - ], - "principalTypes": [ - "User" - ], - "context": { - "type": "EntityOrCommon", - "name": "authenticatedContext" - } - } - }, - "uploadPhoto": { - "appliesTo": { - "resourceTypes": [ - "Album" - ], - "principalTypes": [ - "User" - ], - "context": { - "type": "Record", - "attributes": { - "authenticated": { - "required": true, - "type": "Boolean" - }, - "photo": { - "required": true, - "type": "Record", - "attributes": { - "file_size": { - "required": true, - "type": "Long" - }, - "file_type": { - "required": true, - "type": "String" - } - } - } - } - } - }, - "memberOf": [ - { - "id": "groupAction1" - } - ] - }, - "viewPhoto": { - "appliesTo": { - "resourceTypes": [ - "Photo" - ], - "principalTypes": [ - "User" - ], - "context": { - "type": "EntityOrCommon", - "name": "authenticatedContext" - } - }, - "memberOf": [ - { - "id": "groupAction1" - }, - { - "id": "groupAction2" - }, - { - "id": "actionGroup", - "type": "random::nested::name" - } - ] - } - }, - "annotations": { - "annotation": "namespace" - } - } -} diff --git a/internal/schema/ast/testdata/example_schema.json b/internal/schema/ast/testdata/example_schema.json deleted file mode 100644 index c825f71d..00000000 --- a/internal/schema/ast/testdata/example_schema.json +++ /dev/null @@ -1,200 +0,0 @@ -{ - "": { - "entityTypes": { - "Manufacturer": {} - }, - "actions": {} - }, - "ExampleCo::Clients": { - "entityTypes": { - "Manufacturer": {} - }, - "actions": {} - }, - "ExampleCo::Furniture": { - "entityTypes": { - "Table": { - "shape": { - "type": "Record", - "attributes": { - "manufacturer": { - "required": false, - "type": "Entity", - "name": "ExampleCo::Clients::Manufacturer" - } - } - } - } - }, - "actions": {} - }, - "PhotoFlash": { - "entityTypes": { - "Account": { - "shape": { - "type": "Record", - "attributes": { - "admins": { - "type": "Set", - "required": false, - "element": { - "type": "Entity", - "name": "User" - } - }, - "owner": { - "type": "Entity", - "required": false, - "name": "User" - } - } - } - }, - "Album": { - "memberOfTypes": [ - "Album" - ], - "shape": { - "type": "Record", - "attributes": { - "account": { - "type": "Entity", - "required": false, - "name": "Account" - }, - "private": { - "type": "Boolean", - "required": false - } - } - } - }, - "Photo": { - "memberOfTypes": [ - "Album" - ], - "shape": { - "type": "Record", - "attributes": { - "account": { - "type": "Entity", - "required": false, - "name": "Account" - }, - "private": { - "type": "Boolean", - "required": false - } - } - } - }, - "PhotoFormat": { - "enum": ["jpg", "gif", "png"] - }, - "User": { - "memberOfTypes": [ - "UserGroup" - ], - "shape": { - "type": "Record", - "attributes": { - "department": { - "type": "String", - "required": false - }, - "jobLevel": { - "type": "Long", - "required": false - } - } - } - }, - "UserGroup": {} - }, - "actions": { - "listAlbums": { - "appliesTo": { - "principalTypes": [ - "User" - ], - "resourceTypes": [ - "Account" - ], - "context": { - "type": "Record", - "attributes": { - "authenticated": { - "type": "Boolean", - "required": false - } - } - } - }, - "memberOf": [ - { - "id": "read", - "type": "PhotoFlash::Action" - } - ] - }, - "read": { - "appliesTo": { - "principalTypes": [], - "resourceTypes": [] - } - }, - "uploadPhoto": { - "appliesTo": { - "principalTypes": [ - "User" - ], - "resourceTypes": [ - "Album" - ], - "context": { - "type": "Record", - "attributes": { - "authenticated": { - "type": "Boolean", - "required": false - }, - "photo": { - "type": "Record", - "required": false, - "attributes": { - "file_size": { - "type": "Long", - "required": false - }, - "file_type": { - "type": "String", - "required": false - } - } - } - } - } - } - }, - "viewPhoto": { - "appliesTo": { - "principalTypes": [ - "User" - ], - "resourceTypes": [ - "Photo" - ], - "context": { - "type": "Record", - "attributes": { - "authenticated": { - "type": "Boolean", - "required": false - } - } - } - } - } - } - } -} \ No newline at end of file diff --git a/internal/schema/ast/testdata/format/emptynamespace.cedarschema b/internal/schema/ast/testdata/format/emptynamespace.cedarschema deleted file mode 100644 index ce07144a..00000000 --- a/internal/schema/ast/testdata/format/emptynamespace.cedarschema +++ /dev/null @@ -1,2 +0,0 @@ -namespace Test { -} diff --git a/internal/schema/ast/testdata/format/nocomments.cedarschema b/internal/schema/ast/testdata/format/nocomments.cedarschema deleted file mode 100644 index ae2b35d8..00000000 --- a/internal/schema/ast/testdata/format/nocomments.cedarschema +++ /dev/null @@ -1,47 +0,0 @@ -namespace PhotoFlash { - entity User, User2 in UserGroup = { - "department": String, - "jobLevel": Long, - } tags String; - entity UserGroup; - entity Album in Album = { - "account": Account, - "private": Bool, - }; - entity Account = { - "admins"?: Set, - "owner": User, - }; - entity Photo in Album = { - "account": Account, - "private": Bool, - }; - type LongAlias = Long; - action "uploadPhoto" appliesTo { - principal: User, - resource: [Album, Photo], - context: { - "authenticated": Bool, - "photo": { - "file_size": LongAlias, - "file_type": String, - }, - }, - }; - action "read"; - action "all"; - action "viewPhoto", viewPhoto2 in ["read", PhotoFlash::Action::all] appliesTo { - principal: User, - resource: Photo, - context: { - "authenticated": Bool, - }, - }; - action "listAlbums" in "read" appliesTo { - principal: User, - resource: Account, - context: { - "authenticated": Bool, - }, - }; -} diff --git a/internal/schema/ast/testdata/format/test.cedarschema b/internal/schema/ast/testdata/format/test.cedarschema deleted file mode 100644 index 2a9baffa..00000000 --- a/internal/schema/ast/testdata/format/test.cedarschema +++ /dev/null @@ -1,82 +0,0 @@ -// Test case to make sure that all types of Cedar elements are parseable -// and that the formatter doesn't mess anything up. -@annotation("namespace") // inline annotation comment -// comment between annotation and namespace -namespace PhotoFlash { // inline namespace comment - @annotation("entity") - // Comment explaining entities User and User2 - // it's a two line comment - entity User, User2 in UserGroup = { - "department": String, - "jobLevel": Long, - } tags String; - entity UserGroup; // inline comment entity - entity Album in Album = { // inline comment rec type - "account": Account, - // record attribute comment - "private": Bool, // record inline comment - // footer comment rec type - // multiple lines - }; // footer comment entity - entity Account { - "admins"?: Set, // attribute comment - "owner": User, - }; - entity Empty { - }; - entity Photo in [Album, PhotoFlassh::Other::Album] = { - "account": Account, - "private": Bool, - }; - entity PhotoFormat enum ["jpg", "gif", "png"]; - entity AccountType enum ["user"]; - // annotation header comment - @annotation("type") - @in // keywords are valid identifiers - // type header comment - type LongAlias = Long; // type footer comment - @annotation1("action") - @annotation2 // inline annotation comment - // action header comment - action "uploadPhoto" appliesTo { // inline action comment - // principal comment before - principal: User, // principal comment inline - // resource comment before - // multi line - resource: [Album, Photo], // resource comment inline - // context comment before - context: { // context comment inline - @annotation("attrDecl1") - "authenticated": Bool, - @annotation("attrDecl2") - "photo": { - "file_size": LongAlias, - "file_type": String, - }, - }, // context comment after - // remaining comments - }; // action footer comment - action "read"; // action footer comment 1-line - action "all"; - action "viewPhoto", viewPhoto2 in ["read", PhotoFlash::Action::all] appliesTo { - principal: User, - resource: Photo, - context: { // opener comment - // Attribute comment (line 1) - // Attribute comment (line 2) - "authenticated": Bool, // attribute comment inline - }, // context comment - }; - type authenticatedContext = { - "authenticated": Bool, - appliesTo: String, // keywords are valid identifiers - }; - action "listAlbums" in "read" appliesTo { - principal: User, - resource: Account, - context: authenticatedContext, - }; - // Remainder comment block - // should also be kept around -} // Footer comment on namespace -// Tailing comments after namespace diff --git a/internal/schema/ast/testdata/walk/emptyfile.cedarschema b/internal/schema/ast/testdata/walk/emptyfile.cedarschema deleted file mode 100644 index e69de29b..00000000 diff --git a/internal/schema/ast/testdata/walk/example.cedarschema b/internal/schema/ast/testdata/walk/example.cedarschema deleted file mode 100644 index 7f8061a5..00000000 --- a/internal/schema/ast/testdata/walk/example.cedarschema +++ /dev/null @@ -1,20 +0,0 @@ -// Comment -namespace PhotoFlash { - // Comment explaining entities User and User2 - // it's a two line comment - entity User, User2 in UserGroup = { - "department": String, - "jobLevel": Long, - } tags String; - entity UserGroup; - // Type comment - type LongAlias = Long; - // Action comment - action "viewPhoto", viewPhoto2 in ["read", PhotoFlash::Action::all] appliesTo { - principal: User, - resource: Photo, - context: { - "authenticated": Bool - } - }; -} \ No newline at end of file diff --git a/internal/schema/ast/walk_test.go b/internal/schema/ast/walk_test.go deleted file mode 100644 index f107acf1..00000000 --- a/internal/schema/ast/walk_test.go +++ /dev/null @@ -1,209 +0,0 @@ -package ast_test - -import ( - "fmt" - "io/fs" - "strings" - "testing" - - "github.com/cedar-policy/cedar-go/internal/schema/ast" - "github.com/cedar-policy/cedar-go/internal/schema/parser" -) - -func TestAstScope(t *testing.T) { - // For each node, we verify that all of its children nodes are entirely within the Start and End of the functions. - // This is a quick sanity test that we didn't implement any of our start or end positions incorrectly (or not set them!) - tests := []string{ - "testdata/format/test.cedarschema", - "testdata/format/emptynamespace.cedarschema", - "testdata/format/nocomments.cedarschema", - "testdata/walk/example.cedarschema", - "testdata/walk/emptyfile.cedarschema", - } - - for _, test := range tests { - t.Run(test, func(t *testing.T) { - src, err := fs.ReadFile(ast.Testdata, test) - if err != nil { - t.Fatalf("Error reading test schema: %v", err) - } - schema, err := parser.ParseFile("", []byte(src)) - if err != nil { - t.Fatalf("Error parsing example schema: %v", err) - } - - chain := []ast.Node{} - Walk(schema, func(node ast.Node) bool { - if node == nil { - panic("node should not be nil") - } - if len(chain) > 0 { - parent := chain[len(chain)-1] - err := assertWithin(node, parent) - if err != nil { - chainstrs := make([]string, 0, len(chain)+1) - for _, n := range chain { - chainstrs = append(chainstrs, nodeid(n)) - } - chainstrs = append(chainstrs, nodeid(node)) - - t.Errorf("%s: %v", strings.Join(chainstrs, " -> "), err) - return false - } - } - chain = append(chain, node) - return true - }, func(ast.Node) bool { - chain = chain[:len(chain)-1] - return true - }) - }) - } -} - -func nodeid(n ast.Node) string { - return fmt.Sprintf("%T(%d/%d:%d-%d/%d:%d)", n, n.Pos().Offset, n.Pos().Line, n.Pos().Column, n.End().Offset, n.End().Line, n.End().Column) -} - -func assertWithin(node ast.Node, parent ast.Node) error { - if node.Pos().Offset == 0 && node.End().Offset == 0 { - return nil - } - - if node.Pos().Offset == node.End().Offset { - return fmt.Errorf("zero length node") - } - - if node.Pos().Line == 0 { - return fmt.Errorf("missing start position") - } - if node.Pos().Offset < parent.Pos().Offset { - return fmt.Errorf("node start < parent start (%d < %d)", node.Pos().Offset, parent.Pos().Offset) - } - - if node.End().Line == 0 { - return fmt.Errorf("missing end position") - } - if node.End().Offset > parent.End().Offset { - return fmt.Errorf("node end > parent end (%d > %d)", node.End().Offset, parent.End().Offset) - } - return nil -} - -type visitor struct { - stop bool -} - -func Walk(n ast.Node, open, exit func(ast.Node) bool) { - var v visitor - v.walk(n, open, exit) -} - -func (vis *visitor) walk(n ast.Node, open, exit func(ast.Node) bool) { - if vis.stop { - return - } - if n == nil || !open(n) { - vis.stop = true - return - } - defer func() { - if n != nil { - exit(n) - } - }() - - switch v := n.(type) { - case *ast.Schema: - for _, decl := range v.Decls { - vis.walk(decl, open, exit) - } - vis.walk(v.Remaining, open, exit) - case *ast.Namespace: - for _, a := range v.Annotations { - vis.walk(a, open, exit) - } - vis.walk(v.Before, open, exit) - vis.walk(v.Name, open, exit) - if v.Inline != nil { - vis.walk(v.Inline, open, exit) - } - for _, decl := range v.Decls { - vis.walk(decl, open, exit) - } - vis.walk(v.Remaining, open, exit) - if v.Footer != nil { - vis.walk(v.Footer, open, exit) - } - case ast.CommentBlock: - for _, c := range v { - vis.walk(c, open, exit) - } - case *ast.CommonTypeDecl: - vis.walk(v.Name, open, exit) - vis.walk(v.Value, open, exit) - case *ast.Entity: - for _, a := range v.Annotations { - vis.walk(a, open, exit) - } - for _, name := range v.Names { - vis.walk(name, open, exit) - } - for _, in := range v.In { - vis.walk(in, open, exit) - } - if v.Shape != nil { - vis.walk(v.Shape, open, exit) - } - if v.Tags != nil { - vis.walk(v.Tags, open, exit) - } - case *ast.Action: - for _, a := range v.Annotations { - vis.walk(a, open, exit) - } - for _, name := range v.Names { - vis.walk(name, open, exit) - } - for _, in := range v.In { - vis.walk(in, open, exit) - } - if v.AppliesTo != nil { - vis.walk(v.AppliesTo, open, exit) - } - case *ast.AppliesTo: - for _, p := range v.Principal { - vis.walk(p, open, exit) - } - for _, r := range v.Resource { - vis.walk(r, open, exit) - } - if v.ContextRecord != nil { - vis.walk(v.ContextRecord, open, exit) - } - if v.ContextPath != nil { - vis.walk(v.ContextPath, open, exit) - } - case *ast.RecordType: - for _, attr := range v.Attributes { - vis.walk(attr, open, exit) - } - case *ast.SetType: - vis.walk(v.Element, open, exit) - case *ast.Path: - for _, part := range v.Parts { - vis.walk(part, open, exit) - } - case *ast.Attribute: - for _, a := range v.Annotations { - vis.walk(a, open, exit) - } - vis.walk(v.Key, open, exit) - vis.walk(v.Type, open, exit) - case *ast.Ref: - for _, n := range v.Namespace { - vis.walk(n, open, exit) - } - vis.walk(v.Name, open, exit) - } -} diff --git a/internal/schema/parser/cedarschema.go b/internal/schema/parser/cedarschema.go deleted file mode 100644 index 7c504424..00000000 --- a/internal/schema/parser/cedarschema.go +++ /dev/null @@ -1,1134 +0,0 @@ -// Code generated by re2go 4.3 on Mon Jul 7 15:42:21 2025, DO NOT EDIT. -package parser - -import ( - "bytes" - "fmt" - "encoding/hex" - - "github.com/cedar-policy/cedar-go/internal/schema/token" -) - -func (l *Lexer) lex() (pos token.Position, tok token.Type, lit string, err error) { - for { - lit = "" - l.pos.Offset = l.cursor - l.pos.Column = l.cursor - l.lineStart + 1 - l.token = l.cursor - pos = l.pos - - -{ - var yych byte - yych = l.input[l.cursor] - switch (yych) { - case 0x00: - goto yy1 - case '\t': - fallthrough - case ' ': - goto yy4 - case '\n': - goto yy5 - case '\r': - goto yy6 - case '"': - goto yy7 - case '(': - goto yy8 - case ')': - goto yy9 - case ',': - goto yy10 - case '/': - goto yy11 - case ':': - goto yy12 - case ';': - goto yy13 - case '<': - goto yy14 - case '=': - goto yy15 - case '>': - goto yy16 - case '?': - goto yy17 - case '@': - goto yy18 - case 'A','B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z': - fallthrough - case '_': - fallthrough - case 'b': - fallthrough - case 'd': - fallthrough - case 'f','g','h': - fallthrough - case 'j','k','l','m': - fallthrough - case 'o': - fallthrough - case 'q': - fallthrough - case 's': - fallthrough - case 'u','v','w','x','y','z': - goto yy19 - case '[': - goto yy22 - case ']': - goto yy23 - case 'a': - goto yy24 - case 'c': - goto yy25 - case 'e': - goto yy26 - case 'i': - goto yy27 - case 'n': - goto yy28 - case 'p': - goto yy29 - case 'r': - goto yy30 - case 't': - goto yy31 - case '{': - goto yy32 - case '}': - goto yy33 - default: - goto yy2 - } -yy1: - l.cursor += 1 - { l.cursor -= 1; tok = token.EOF; return } -yy2: - l.cursor += 1 -yy3: - { err = ErrUnrecognizedToken; return } -yy4: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == '\t') { - goto yy4 - } - if (yych == ' ') { - goto yy4 - } - { - continue - } -yy5: - l.cursor += 1 - { - l.pos.Line += 1 - l.pos.Column = 1 - l.lineStart = l.cursor - continue - } -yy6: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == '\n') { - goto yy5 - } - goto yy3 -yy7: - l.cursor += 1 - { return l.lexString('"') } -yy8: - l.cursor += 1 - { tok = token.LEFTPAREN; lit = "("; return } -yy9: - l.cursor += 1 - { tok = token.RIGHTPAREN; lit = ")"; return } -yy10: - l.cursor += 1 - { tok = token.COMMA; lit = ","; return } -yy11: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == '/') { - goto yy34 - } - goto yy3 -yy12: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == ':') { - goto yy36 - } - { tok = token.COLON; lit = ":"; return } -yy13: - l.cursor += 1 - { tok = token.SEMICOLON; lit = ";"; return } -yy14: - l.cursor += 1 - { tok = token.LEFTANGLE; lit = "<"; return } -yy15: - l.cursor += 1 - { tok = token.EQUALS; lit = "="; return } -yy16: - l.cursor += 1 - { tok = token.RIGHTANGLE; lit = ">"; return } -yy17: - l.cursor += 1 - { tok = token.QUESTION; lit = "?"; return } -yy18: - l.cursor += 1 - { tok = token.AT; lit = "@"; return } -yy19: - l.cursor += 1 - yych = l.input[l.cursor] -yy20: - if (yych <= 'Z') { - if (yych <= '/') { - goto yy21 - } - if (yych <= '9') { - goto yy19 - } - if (yych >= 'A') { - goto yy19 - } - } else { - if (yych <= '_') { - if (yych >= '_') { - goto yy19 - } - } else { - if (yych <= '`') { - goto yy21 - } - if (yych <= 'z') { - goto yy19 - } - } - } -yy21: - { tok = token.IDENT; lit = l.literal(); return } -yy22: - l.cursor += 1 - { tok = token.LEFTBRACKET; lit = "["; return } -yy23: - l.cursor += 1 - { tok = token.RIGHTBRACKET; lit = "]"; return } -yy24: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'c') { - goto yy37 - } - if (yych == 'p') { - goto yy38 - } - goto yy20 -yy25: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'o') { - goto yy39 - } - goto yy20 -yy26: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'n') { - goto yy40 - } - goto yy20 -yy27: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'n') { - goto yy41 - } - goto yy20 -yy28: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'a') { - goto yy43 - } - goto yy20 -yy29: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'r') { - goto yy44 - } - goto yy20 -yy30: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'e') { - goto yy45 - } - goto yy20 -yy31: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'a') { - goto yy46 - } - if (yych == 'y') { - goto yy47 - } - goto yy20 -yy32: - l.cursor += 1 - { tok = token.LEFTBRACE; lit = "{"; return } -yy33: - l.cursor += 1 - { tok = token.RIGHTBRACE; lit = "}"; return } -yy34: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= '\n') { - if (yych <= 0x00) { - goto yy35 - } - if (yych <= '\t') { - goto yy34 - } - } else { - if (yych != '\r') { - goto yy34 - } - } -yy35: - { tok = token.COMMENT; lit = l.literal(); return } -yy36: - l.cursor += 1 - { tok = token.DOUBLECOLON; lit = "::"; return } -yy37: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 't') { - goto yy48 - } - goto yy20 -yy38: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'p') { - goto yy49 - } - goto yy20 -yy39: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'n') { - goto yy50 - } - goto yy20 -yy40: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= 's') { - goto yy20 - } - if (yych <= 't') { - goto yy51 - } - if (yych <= 'u') { - goto yy52 - } - goto yy20 -yy41: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= 'Z') { - if (yych <= '/') { - goto yy42 - } - if (yych <= '9') { - goto yy19 - } - if (yych >= 'A') { - goto yy19 - } - } else { - if (yych <= '_') { - if (yych >= '_') { - goto yy19 - } - } else { - if (yych <= '`') { - goto yy42 - } - if (yych <= 'z') { - goto yy19 - } - } - } -yy42: - { tok = token.IN; lit = "in"; return } -yy43: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'm') { - goto yy53 - } - goto yy20 -yy44: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'i') { - goto yy54 - } - goto yy20 -yy45: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 's') { - goto yy55 - } - goto yy20 -yy46: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'g') { - goto yy56 - } - goto yy20 -yy47: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'p') { - goto yy57 - } - goto yy20 -yy48: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'i') { - goto yy58 - } - goto yy20 -yy49: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'l') { - goto yy59 - } - goto yy20 -yy50: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 't') { - goto yy60 - } - goto yy20 -yy51: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'i') { - goto yy61 - } - goto yy20 -yy52: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'm') { - goto yy62 - } - goto yy20 -yy53: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'e') { - goto yy64 - } - goto yy20 -yy54: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'n') { - goto yy65 - } - goto yy20 -yy55: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'o') { - goto yy66 - } - goto yy20 -yy56: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 's') { - goto yy67 - } - goto yy20 -yy57: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'e') { - goto yy69 - } - goto yy20 -yy58: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'o') { - goto yy71 - } - goto yy20 -yy59: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'i') { - goto yy72 - } - goto yy20 -yy60: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'e') { - goto yy73 - } - goto yy20 -yy61: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 't') { - goto yy74 - } - goto yy20 -yy62: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= 'Z') { - if (yych <= '/') { - goto yy63 - } - if (yych <= '9') { - goto yy19 - } - if (yych >= 'A') { - goto yy19 - } - } else { - if (yych <= '_') { - if (yych >= '_') { - goto yy19 - } - } else { - if (yych <= '`') { - goto yy63 - } - if (yych <= 'z') { - goto yy19 - } - } - } -yy63: - { tok = token.ENUM; lit = "enum"; return } -yy64: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 's') { - goto yy75 - } - goto yy20 -yy65: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'c') { - goto yy76 - } - goto yy20 -yy66: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'u') { - goto yy77 - } - goto yy20 -yy67: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= 'Z') { - if (yych <= '/') { - goto yy68 - } - if (yych <= '9') { - goto yy19 - } - if (yych >= 'A') { - goto yy19 - } - } else { - if (yych <= '_') { - if (yych >= '_') { - goto yy19 - } - } else { - if (yych <= '`') { - goto yy68 - } - if (yych <= 'z') { - goto yy19 - } - } - } -yy68: - { tok = token.TAGS; lit = "tags"; return } -yy69: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= 'Z') { - if (yych <= '/') { - goto yy70 - } - if (yych <= '9') { - goto yy19 - } - if (yych >= 'A') { - goto yy19 - } - } else { - if (yych <= '_') { - if (yych >= '_') { - goto yy19 - } - } else { - if (yych <= '`') { - goto yy70 - } - if (yych <= 'z') { - goto yy19 - } - } - } -yy70: - { tok = token.TYPE; lit = "type"; return } -yy71: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'n') { - goto yy78 - } - goto yy20 -yy72: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'e') { - goto yy80 - } - goto yy20 -yy73: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'x') { - goto yy81 - } - goto yy20 -yy74: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'y') { - goto yy82 - } - goto yy20 -yy75: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'p') { - goto yy84 - } - goto yy20 -yy76: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'i') { - goto yy85 - } - goto yy20 -yy77: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'r') { - goto yy86 - } - goto yy20 -yy78: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= 'Z') { - if (yych <= '/') { - goto yy79 - } - if (yych <= '9') { - goto yy19 - } - if (yych >= 'A') { - goto yy19 - } - } else { - if (yych <= '_') { - if (yych >= '_') { - goto yy19 - } - } else { - if (yych <= '`') { - goto yy79 - } - if (yych <= 'z') { - goto yy19 - } - } - } -yy79: - { tok = token.ACTION; lit = "action"; return } -yy80: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 's') { - goto yy87 - } - goto yy20 -yy81: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 't') { - goto yy88 - } - goto yy20 -yy82: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= 'Z') { - if (yych <= '/') { - goto yy83 - } - if (yych <= '9') { - goto yy19 - } - if (yych >= 'A') { - goto yy19 - } - } else { - if (yych <= '_') { - if (yych >= '_') { - goto yy19 - } - } else { - if (yych <= '`') { - goto yy83 - } - if (yych <= 'z') { - goto yy19 - } - } - } -yy83: - { tok = token.ENTITY; lit = "entity"; return } -yy84: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'a') { - goto yy90 - } - goto yy20 -yy85: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'p') { - goto yy91 - } - goto yy20 -yy86: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'c') { - goto yy92 - } - goto yy20 -yy87: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'T') { - goto yy93 - } - goto yy20 -yy88: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= 'Z') { - if (yych <= '/') { - goto yy89 - } - if (yych <= '9') { - goto yy19 - } - if (yych >= 'A') { - goto yy19 - } - } else { - if (yych <= '_') { - if (yych >= '_') { - goto yy19 - } - } else { - if (yych <= '`') { - goto yy89 - } - if (yych <= 'z') { - goto yy19 - } - } - } -yy89: - { tok = token.CONTEXT; lit = "context"; return } -yy90: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'c') { - goto yy94 - } - goto yy20 -yy91: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'a') { - goto yy95 - } - goto yy20 -yy92: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'e') { - goto yy96 - } - goto yy20 -yy93: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'o') { - goto yy98 - } - goto yy20 -yy94: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'e') { - goto yy100 - } - goto yy20 -yy95: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == 'l') { - goto yy102 - } - goto yy20 -yy96: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= 'Z') { - if (yych <= '/') { - goto yy97 - } - if (yych <= '9') { - goto yy19 - } - if (yych >= 'A') { - goto yy19 - } - } else { - if (yych <= '_') { - if (yych >= '_') { - goto yy19 - } - } else { - if (yych <= '`') { - goto yy97 - } - if (yych <= 'z') { - goto yy19 - } - } - } -yy97: - { tok = token.RESOURCE; lit = "resource"; return } -yy98: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= 'Z') { - if (yych <= '/') { - goto yy99 - } - if (yych <= '9') { - goto yy19 - } - if (yych >= 'A') { - goto yy19 - } - } else { - if (yych <= '_') { - if (yych >= '_') { - goto yy19 - } - } else { - if (yych <= '`') { - goto yy99 - } - if (yych <= 'z') { - goto yy19 - } - } - } -yy99: - { tok = token.APPLIESTO; lit = "appliesTo"; return } -yy100: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= 'Z') { - if (yych <= '/') { - goto yy101 - } - if (yych <= '9') { - goto yy19 - } - if (yych >= 'A') { - goto yy19 - } - } else { - if (yych <= '_') { - if (yych >= '_') { - goto yy19 - } - } else { - if (yych <= '`') { - goto yy101 - } - if (yych <= 'z') { - goto yy19 - } - } - } -yy101: - { tok = token.NAMESPACE; lit = "namespace"; return } -yy102: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych <= 'Z') { - if (yych <= '/') { - goto yy103 - } - if (yych <= '9') { - goto yy19 - } - if (yych >= 'A') { - goto yy19 - } - } else { - if (yych <= '_') { - if (yych >= '_') { - goto yy19 - } - } else { - if (yych <= '`') { - goto yy103 - } - if (yych <= 'z') { - goto yy19 - } - } - } -yy103: - { tok = token.PRINCIPAL; lit = "principal"; return } -} - - } -} - -func (l *Lexer) lexString(quote byte) (pos token.Position, tok token.Type, lit string, err error) { - pos = l.pos - marker := 0 - var buf bytes.Buffer - buf.WriteByte(quote) - for { - var u byte - - -{ - var yych byte - yych = l.input[l.cursor] - if (yych <= '\n') { - if (yych <= 0x00) { - goto yy105 - } - if (yych <= '\t') { - goto yy106 - } - goto yy107 - } else { - if (yych == '\\') { - goto yy109 - } - goto yy106 - } -yy105: - l.cursor += 1 - { - l.cursor -= 1 // make sure we don't overflow next lex call - err = ErrUnterminatedString - tok = token.EOF - pos = l.pos - return - } -yy106: - l.cursor += 1 - { - u = yych - buf.WriteByte(u) - if u == quote { - tok = token.STRING - pos = l.pos - lit = string(buf.Bytes()) - return - } - continue - } -yy107: - l.cursor += 1 -yy108: - { err = ErrInvalidString; return } -yy109: - l.cursor += 1 - marker = l.cursor - yych = l.input[l.cursor] - if (yych <= '\\') { - if (yych <= '\'') { - if (yych == '"') { - goto yy110 - } - if (yych <= '&') { - goto yy108 - } - goto yy111 - } else { - if (yych == '0') { - goto yy112 - } - if (yych <= '[') { - goto yy108 - } - goto yy113 - } - } else { - if (yych <= 'r') { - if (yych == 'n') { - goto yy114 - } - if (yych <= 'q') { - goto yy108 - } - goto yy115 - } else { - if (yych <= 's') { - goto yy108 - } - if (yych <= 't') { - goto yy116 - } - if (yych <= 'u') { - goto yy117 - } - goto yy108 - } - } -yy110: - l.cursor += 1 - { buf.WriteByte('"'); continue } -yy111: - l.cursor += 1 - { buf.WriteByte('\''); continue } -yy112: - l.cursor += 1 - { buf.WriteByte(0); continue } -yy113: - l.cursor += 1 - { buf.WriteByte('\\'); continue } -yy114: - l.cursor += 1 - { buf.WriteByte('\n'); continue } -yy115: - l.cursor += 1 - { buf.WriteByte('\r'); continue } -yy116: - l.cursor += 1 - { buf.WriteByte('\t'); continue } -yy117: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == '{') { - goto yy119 - } -yy118: - l.cursor = marker - goto yy108 -yy119: - l.cursor += 1 - yych = l.input[l.cursor] - if (yych == '}') { - goto yy118 - } - goto yy121 -yy120: - l.cursor += 1 - yych = l.input[l.cursor] -yy121: - if (yych <= 'F') { - if (yych <= '/') { - goto yy118 - } - if (yych <= '9') { - goto yy120 - } - if (yych <= '@') { - goto yy118 - } - goto yy120 - } else { - if (yych <= 'f') { - if (yych <= '`') { - goto yy118 - } - goto yy120 - } else { - if (yych != '}') { - goto yy118 - } - } - } - l.cursor += 1 - { - // Handle the hex digits between the braces - hexStr := string(l.input[marker+2:l.cursor-1]) // Strip off \u{ and } - if len(hexStr) % 2 != 0 { - hexStr = "0" + hexStr - } - var val []byte - val, err = hex.DecodeString(hexStr) - if err != nil { - pos = l.pos - lit = string(buf.Bytes()) - err = fmt.Errorf("%w: %s", ErrInvalidString, err) - return - } - buf.Write(val) - continue - } -} - - } -} diff --git a/internal/schema/parser/cedarschema.re b/internal/schema/parser/cedarschema.re deleted file mode 100644 index 3fe7fd72..00000000 --- a/internal/schema/parser/cedarschema.re +++ /dev/null @@ -1,148 +0,0 @@ -package parser - -import ( - "bytes" - "fmt" - "encoding/hex" - - "github.com/cedar-policy/cedar-go/internal/schema/token" -) - -func (l *Lexer) lex() (pos token.Position, tok token.Type, lit string, err error) { - for { - lit = "" - l.pos.Offset = l.cursor - l.pos.Column = l.cursor - l.lineStart + 1 - l.token = l.cursor - pos = l.pos - - /*!re2c - re2c:yyfill:enable = 0; - re2c:flags:nested-ifs = 1; - re2c:define:YYCTYPE = byte; - re2c:define:YYPEEK = "l.input[l.cursor]"; - re2c:define:YYSKIP = "l.cursor += 1"; - - end = [\x00]; - end { l.cursor -= 1; tok = token.EOF; return } - * { err = ErrUnrecognizedToken; return } - - // Whitespace and new lines - eol = ("\r\n" | "\n"); - eol { - l.pos.Line += 1 - l.pos.Column = 1 - l.lineStart = l.cursor - continue - } - - // Skip whitespace - [ \t]+ { - continue - } - - // Comments - "//" [^\r\n\x00]* { tok = token.COMMENT; lit = l.literal(); return } - - "namespace" { tok = token.NAMESPACE; lit = "namespace"; return } - "entity" { tok = token.ENTITY; lit = "entity"; return } - "action" { tok = token.ACTION; lit = "action"; return } - "type" { tok = token.TYPE; lit = "type"; return } - "in" { tok = token.IN; lit = "in"; return } - "tags" { tok = token.TAGS; lit = "tags"; return } - "appliesTo" { tok = token.APPLIES_TO; lit = "appliesTo"; return } - "principal" { tok = token.PRINCIPAL; lit = "principal"; return } - "resource" { tok = token.RESOURCE; lit = "resource"; return } - "context" { tok = token.CONTEXT; lit = "context"; return } - "enum" { tok = token.ENUM; lit = "enum"; return } - - // Operators and punctuation - "{" { tok = token.LEFTBRACE; lit = "{"; return } - "}" { tok = token.RIGHTBRACE; lit = "}"; return } - "[" { tok = token.LEFTBRACKET; lit = "["; return } - "]" { tok = token.RIGHTBRACKET; lit = "]"; return } - "<" { tok = token.LEFTANGLE; lit = "<"; return } - ">" { tok = token.RIGHTANGLE; lit = ">"; return } - ":" { tok = token.COLON; lit = ":"; return } - ";" { tok = token.SEMICOLON; lit = ";"; return } - "," { tok = token.COMMA; lit = ","; return } - "=" { tok = token.EQUALS; lit = "="; return } - "?" { tok = token.QUESTION; lit = "?"; return } - "::" { tok = token.DOUBLECOLON; lit = "::"; return } - "@" { tok = token.AT; lit = "@"; return } - "(" { tok = token.LEFTPAREN; lit = "("; return } - ")" { tok = token.RIGHTPAREN; lit = ")"; return } - - // Strings - ["] { return l.lexString('"') } - - // Identifiers - id = [a-zA-Z_][a-zA-Z_0-9]*; - id { tok = token.IDENT; lit = l.literal(); return } - */ - } -} - -func (l *Lexer) lexString(quote byte) (pos token.Position, tok token.Type, lit string, err error) { - pos = l.pos - marker := 0 - var buf bytes.Buffer - buf.WriteByte(quote) - for { - var u byte - - /*!re2c - re2c:yyfill:enable = 0; - re2c:flags:nested-ifs = 1; - re2c:define:YYBACKUP = "marker = l.cursor"; - re2c:define:YYRESTORE = "l.cursor = marker"; - re2c:define:YYPEEK = "l.input[l.cursor]"; - re2c:define:YYSKIP = "l.cursor += 1"; - - * { err = ErrInvalidString; return } - [\x00] { - l.cursor -= 1 // make sure we don't overflow next lex call - err = ErrUnterminatedString - tok = token.EOF - pos = l.pos - return - } - [^\n\\] { - u = yych - buf.WriteByte(u) - if u == quote { - tok = token.STRING - pos = l.pos - lit = string(buf.Bytes()) - return - } - continue - } - // Unicode escape sequences - "\\u{" [0-9A-Fa-f]+ "}" { - // Handle the hex digits between the braces - hexStr := string(l.input[marker+2:l.cursor-1]) // Strip off \u{ and } - if len(hexStr) % 2 != 0 { - hexStr = "0" + hexStr - } - var val []byte - val, err = hex.DecodeString(hexStr) - if err != nil { - pos = l.pos - lit = string(buf.Bytes()) - err = fmt.Errorf("%w: %s", ErrInvalidString, err) - return - } - buf.Write(val) - continue - } - "\\0" { buf.WriteByte(0); continue } - "\\n" { buf.WriteByte('\n'); continue } - "\\r" { buf.WriteByte('\r'); continue } - "\\t" { buf.WriteByte('\t'); continue } - "\\\\" { buf.WriteByte('\\'); continue } - "\\'" { buf.WriteByte('\''); continue } - "\\\"" { buf.WriteByte('"'); continue } - */ - } -} diff --git a/internal/schema/parser/fuzz_test.go b/internal/schema/parser/fuzz_test.go deleted file mode 100644 index 9a70674e..00000000 --- a/internal/schema/parser/fuzz_test.go +++ /dev/null @@ -1,33 +0,0 @@ -package parser - -import ( - "io/fs" - "testing" -) - -// Make sure that we aren't panicing or looping forever for any byte sequences -// that are passed to the parser. -// Run with: -// -// go test -fuzz=FuzzParseSchema -fuzztime=60s github.com/cedar-policy/cedar-go/schema/internal/parser -func FuzzParseSchema(f *testing.F) { - f.Add([]byte("namespace Demo {}")) - f.Add([]byte("namespace D A0!00000000000000\"0")) - f.Add([]byte("namespace 0A0 action \"\" appliesTo 0 principal!0//0000")) - f.Add([]byte("namespace Demo { action Test, Test2; entity Test { id: String } }")) - f.Add(read("testdata/cases/example.cedarschema")) - f.Fuzz(func(t *testing.T, data []byte) { - schema, _ := ParseFile("", data) - if schema == nil { - t.Fatalf("Schema should never be nil") - } - }) -} - -func read(file string) []byte { - contents, err := fs.ReadFile(Testdata, file) - if err != nil { - panic(err) - } - return contents -} diff --git a/internal/schema/parser/lex.go b/internal/schema/parser/lex.go deleted file mode 100644 index 32e5b610..00000000 --- a/internal/schema/parser/lex.go +++ /dev/null @@ -1,98 +0,0 @@ -package parser - -// We use re2go to automatically generate a correct lexer. -// You can install re2go here: https://re2c.org/manual/manual_go.html -// Unless you are changing cedarschema.re, you should not need to rerun this re2go -// and regenerate cedarschema.re. -//go:generate re2go cedarschema.re -o cedarschema.go -i - -import ( - "errors" - - "github.com/cedar-policy/cedar-go/internal/schema/token" -) - -var ( - ErrUnrecognizedToken = errors.New("unrecognized token") - ErrInvalidString = errors.New("invalid string") - ErrUnterminatedString = errors.New("unterminated string") - ErrUnterminatedComment = errors.New("unterminated multiline comment") -) - -type TokenType int - -type Token struct { - Pos token.Position - Type token.Type - Lit string -} - -func (t Token) String() string { - if t.Lit != "" { - return t.Lit - } else { - return t.Type.String() - } -} - -type Lexer struct { - input []byte - cursor int // internal use by lexer - token int // marks the start of the currently scanned token - prevToken Token - - lineStart int // byte offset from start of last line - pos token.Position // marks position of the scanner - - Errors token.Errors -} - -func (l *Lexer) error(pos token.Position, err error) { - l.Errors = append(l.Errors, token.Error{Pos: pos, Err: err}) -} - -// NewLexer creates a new lexer for the given input. -// -// All tokens returned from NextToken will have the filename set to the given filename. -// If the input byte array is not null-terminated, a NULL character will automatically be added to the end. -// If the input contains null characters, the lexer will stop at the first one. -func NewLexer(filename string, input []byte) *Lexer { - if len(input) == 0 || input[len(input)-1] != '\x00' { - // termination char, faster copying than branching every time in the lexer - input = append(input, '\x00') - } - return &Lexer{input: input, pos: token.Position{Filename: filename, Line: 1}} -} - -// All will scan all tokens from input until it sees the EOF (NULL) token. -func (l *Lexer) All() []Token { - var tokens []Token - for { - tok := l.NextToken() - if tok.Type == token.EOF { - break - } - tokens = append(tokens, tok) - } - return tokens -} - -func (l *Lexer) literal() string { return string(l.input[l.token:l.cursor]) } - -// NextToken returns a single token from the input. -// -// If the returned token is EOF, then NextToken will always return EOF on subsequent calls. -func (l *Lexer) NextToken() (tok Token) { - pos, typ, lit, err := l.lex() - if err != nil { - l.error(pos, err) - } - - tok.Pos = pos - tok.Lit = lit - tok.Type = typ - if tok.Type != token.COMMENT { - l.prevToken = tok - } - return -} diff --git a/internal/schema/parser/lex_coverage_test.go b/internal/schema/parser/lex_coverage_test.go deleted file mode 100644 index 0bfe2c25..00000000 --- a/internal/schema/parser/lex_coverage_test.go +++ /dev/null @@ -1 +0,0 @@ -package parser diff --git a/internal/schema/parser/lex_test.go b/internal/schema/parser/lex_test.go deleted file mode 100644 index 096ba44b..00000000 --- a/internal/schema/parser/lex_test.go +++ /dev/null @@ -1,178 +0,0 @@ -package parser - -import ( - "fmt" - "io/fs" - "reflect" - "strings" - "testing" - - "github.com/cedar-policy/cedar-go/internal/schema/token" -) - -func TestLexer(t *testing.T) { - tests := []string{ - "namespace Demo {}", - } - - for _, test := range tests { - lex := NewLexer("test", []byte(test)) - lex.All() - next := lex.NextToken() // make sure we can call this as many times as we want and it will always return EOF - if next.Type != token.EOF { - t.Errorf("Expected EOF, got %v", next) - } - if len(lex.Errors) > 0 { - t.Errorf("Errors: %v", lex.Errors) - } - } -} - -func TestLexerExample(t *testing.T) { - src := `namespace Demo { - entity User { - "name\0\n\r\t\"\'_\u{1}\u{001f}": id, - }; - // Comment - type id = String; -}` - lex := NewLexer("", []byte(src)) - tokens := lex.All() - if len(lex.Errors) > 0 { - t.Errorf("Errors: %v", lex.Errors) - } - - want := []string{ - "NAMESPACE :1:1 namespace", - "IDENT :1:11 Demo", - "LEFTBRACE :1:16 {", - "ENTITY :2:3 entity", - "IDENT :2:10 User", - "LEFTBRACE :2:15 {", - "STRING :3:5 \"name\x00\n\r\t\"'_\x01\x00\x1f\"", - "COLON :3:37 :", - "IDENT :3:39 id", - "COMMA :3:41 ,", - "RIGHTBRACE :4:3 }", - "SEMICOLON :4:4 ;", - "COMMENT :5:3 // Comment", - "TYPE :6:3 type", - "IDENT :6:8 id", - "EQUALS :6:11 =", - "IDENT :6:13 String", - "SEMICOLON :6:19 ;", - "RIGHTBRACE :7:1 }", - } - var got []string - for _, tok := range tokens { - got = append(got, fmt.Sprintf("%s %s %s", tok.Type.String(), fmtpos(tok.Pos), tok.String())) - } - - if !reflect.DeepEqual(got, want) { - t.Logf("want: %v", strings.Join(want, "\n")) - t.Logf(" got: %v", strings.Join(got, "\n")) - t.Fail() - } -} - -func fmtpos(pos token.Position) string { - return fmt.Sprintf("%s:%d:%d", pos.Filename, pos.Line, pos.Column) -} - -func TestLexerOk(t *testing.T) { - files, err := fs.ReadDir(Testdata, "testdata/lex") - if err != nil { - t.Fatalf("Failed to read test data: %v", err) - } - for _, file := range files { - t.Run(file.Name(), func(t *testing.T) { - data, err := fs.ReadFile(Testdata, "testdata/lex/"+file.Name()) - if err != nil { - t.Fatalf("Failed to read test data: %v", err) - } - l := NewLexer("", data) - l.All() - if len(l.Errors) > 0 { - t.Errorf("Errors: %v", l.Errors) - } - }) - } -} - -func TestLexerNoPanic(t *testing.T) { - cases := []string{ - // Basic tokens - "{}[]<>?=,;::", - // Keywords - "action context entity type namespace principal resource tags in applies appliesTo", - // Identifiers with various characters - "abc ABC _123 A_b_C", - // Strings with escape sequences - `"hello\"world"`, - `"escape sequences: \\ \' \? \a \b \f \n \r \t \v"`, - // Comments - "// this is a comment\n", - "// comment with special chars: !@#$%^&*()\n", - // Whitespace handling - " \t \n\r\n", - // Weird identifiers - "____azxkljcqmoqiwerjqflkjazxklmzlkmdrfoiwqerjlakdsfsazljfdi", - // Edge cases - "\r\n", // Carriage return + newline - "\"unterminated", // Unterminated string - "\\", // Single backslash - "@", // Invalid character - "\"\\z\"", // Invalid escape sequence - "\"\\\"", // Unterminated escaped string - `"simple string"`, - `"string with \"escaped\" quotes"`, - `"string with escape sequences \\ \' \? \a \b \f \n \r \t \v"`, - `"string with -newline"`, // Invalid - should error - `"unterminated`, // Invalid - should error - `"invalid escape \z"`, // Invalid escape sequence - } - - for _, input := range cases { - t.Run(input, func(t *testing.T) { - // Recover from any panics to ensure test continues - defer func() { - if r := recover(); r != nil { - t.Errorf("Lexer panicked on input: %q\nPanic: %v", input, r) - } - }() - - l := NewLexer("", []byte(input)) - for { - _, tok, _, _ := l.lex() - if tok == token.EOF { // EOF - break - } - } - }) - } -} - -func FuzzLexer(f *testing.F) { - // Add some initial seeds - seeds := []string{ - "namespace Demo {}", - "\"hello\\\"world\"", - "// comment\n", - "\r\n", - } - - for _, seed := range seeds { - f.Add([]byte(seed)) - } - - f.Fuzz(func(_ *testing.T, data []byte) { - l := NewLexer("", data) - for { - _, tok, _, _ := l.lex() - if tok == token.EOF { - break - } - } - }) -} diff --git a/internal/schema/parser/parser.go b/internal/schema/parser/parser.go deleted file mode 100644 index 5521aa0c..00000000 --- a/internal/schema/parser/parser.go +++ /dev/null @@ -1,675 +0,0 @@ -// Package schema/parser defines the parser for Cedar human-readable schema files. -// -// The grammar is defined here: https://docs.cedarpolicy.com/schema/human-readable-schema-grammar.html -package parser - -import ( - "errors" - "fmt" - "slices" - - "github.com/cedar-policy/cedar-go/internal/schema/ast" - "github.com/cedar-policy/cedar-go/internal/schema/token" -) - -const maxErrors = 10 - -var ( - ErrBailout = errors.New("too many errors") -) - -// ParseFile parses src bytes as human-readable Cedar schema and returns the AST. Parsing the human readable format -// preserves comments, ordering, etc... -// -// Parsing does not validate or ensure the schema is perfectly valid. For example, if the schema refers -// to entities that don't exist, the parsing will still be successful. Filename given will accompany all -// errors returned. -// -// To see the list of errors, you can use errors.Unwrap, for example: -// -// _, err := ParseFile(...) -// var errs []error -// errors.As(err, &errs) -func ParseFile(filename string, src []byte) (schema *ast.Schema, err error) { - lex := NewLexer(filename, src) - p := &Parser{lex: lex} - defer func() { - errs := lex.Errors - errs = append(errs, p.Errors...) - if r := recover(); r != nil { - must(r == ErrBailout, r) - } - if len(errs) > 0 { - errs.Sort() - err = errs - } - }() - schema = p.parseSchema() - return -} - -type Parser struct { - lex *Lexer - nextTok *Token // next token to be consumed, or nil if none consumed yet - - Errors token.Errors -} - -func (p *Parser) error(pos token.Position, err error) { - n := len(p.Errors) - if n > 0 && p.Errors[n-1].(token.Error).Pos.Line == pos.Line { - return // discard - likely a spurious error - } - if len(p.Errors) > maxErrors { - panic(ErrBailout) - } - p.Errors = append(p.Errors, token.Error{Pos: pos, Err: err}) -} - -func (p *Parser) errorf(pos token.Position, format string, args ...any) { - p.error(pos, fmt.Errorf(format, args...)) -} - -func (p *Parser) advance(to map[token.Type]bool) (tok Token) { - for p.peek().Type != token.EOF && !to[p.peek().Type] { - tok = p.eat() - } - p.eat() // eat past the token we stopped at - return -} - -func (p *Parser) peek() (tok Token) { - if p.nextTok != nil { - return *p.nextTok - } - tok = p.eat() - p.nextTok = &tok - return tok -} - -func (p *Parser) eat() (tok Token) { - if p.nextTok != nil { - tok = *p.nextTok - p.nextTok = nil - } else { - tok = p.lex.NextToken() - } - return -} - -func (p *Parser) eatOnly(tokenType token.Type, errfmt string, args ...any) (Token, bool) { - tok := p.eat() - if tok.Type != tokenType { - errmsg := fmt.Sprintf(errfmt, args...) - p.error(tok.Pos, fmt.Errorf("%s, got %s", errmsg, tok.String())) - return tok, false - } - return tok, true -} - -func (p *Parser) matches(want ...token.Type) bool { - return slices.Contains(want, p.peek().Type) -} - -func (p *Parser) parseSchema() *ast.Schema { - schema := new(ast.Schema) - var annotations []*ast.Annotation - var comments []*ast.Comment - for p.peek().Type != token.EOF { - t := p.peek() - switch t.Type { - case token.NAMESPACE: - namespace := p.parseNamespace() - if len(annotations) > 0 { - namespace.Annotations = annotations - annotations = nil - } - if len(comments) > 0 { - namespace.Before = comments - comments = nil - } - schema.Decls = append(schema.Decls, namespace) - case token.TYPE: - typ := p.parseTypeDecl() - if len(annotations) > 0 { - typ.Annotations = annotations - annotations = nil - } - if len(comments) > 0 { - typ.Before = comments - comments = nil - } - schema.Decls = append(schema.Decls, typ) - case token.ACTION: - action := p.parseAction() - if len(annotations) > 0 { - action.Annotations = annotations - annotations = nil - } - if len(comments) > 0 { - action.Before = comments - comments = nil - } - schema.Decls = append(schema.Decls, action) - case token.ENTITY: - entity := p.parseEntityDecl() - if len(annotations) > 0 { - entity.Annotations = annotations - annotations = nil - } - if len(comments) > 0 { - entity.Before = comments - comments = nil - } - schema.Decls = append(schema.Decls, entity) - case token.COMMENT: - comments = append(comments, p.parseComment()) - case token.AT: - annotation := p.parseAnnotation() - if len(comments) > 0 { - annotation.Before = comments - comments = nil - } - annotations = append(annotations, annotation) - default: - p.error(t.Pos, fmt.Errorf("unexpected token %s", t.String())) - p.advance(map[token.Type]bool{token.SEMICOLON: true}) - } - } - if len(annotations) > 0 { - p.error(p.peek().Pos, errors.New("bare annotation(s); expected namespace, action, entity, or type")) - } - if len(comments) > 0 { - schema.Remaining = comments - } - return schema -} - -func (p *Parser) parseAnnotation() *ast.Annotation { - annotation := new(ast.Annotation) - at, _ := p.eatOnly(token.AT, "expected @") - annotation.At = at.Pos - annotation.Key = p.parseIdent() - - lastLine := at.Pos.Line - if p.matches(token.LEFTPAREN) { - annotation.LeftParen = p.eat().Pos - value, _ := p.eatOnly(token.STRING, "expected STR") - annotation.Value = &ast.String{QuotedVal: value.Lit, Tok: value.Pos} - rp, _ := p.eatOnly(token.RIGHTPAREN, "expected ')'") - annotation.RightParen = rp.Pos - lastLine = rp.Pos.Line - } - if p.matches(token.COMMENT) && p.peek().Pos.Line == lastLine { - annotation.Inline = p.parseComment() - } - - return annotation -} - -func (p *Parser) parseNamespace() (namespace *ast.Namespace) { - namespace = new(ast.Namespace) - nptok, _ := p.eatOnly(token.NAMESPACE, "expected namespace keyword") - namespace.NamespaceTok = nptok.Pos - namespace.Name = p.parsePath() - p.eatOnly(token.LEFTBRACE, "expected { after namespace path") - if p.peek().Type == token.COMMENT && p.peek().Pos.Line == nptok.Pos.Line { - namespace.Inline = p.parseComment() - } - var annotations []*ast.Annotation - var comments []*ast.Comment - for !p.matches(token.RIGHTBRACE, token.EOF) { - if p.matches(token.ENTITY) { - entity := p.parseEntityDecl() - if len(annotations) > 0 { - entity.Annotations = annotations - annotations = nil - } - if len(comments) > 0 { - entity.Before = comments - comments = nil - } - namespace.Decls = append(namespace.Decls, entity) - } else if p.matches(token.ACTION) { - action := p.parseAction() - if len(annotations) > 0 { - action.Annotations = annotations - annotations = nil - } - if len(comments) > 0 { - action.Before = comments - comments = nil - } - namespace.Decls = append(namespace.Decls, action) - } else if p.matches(token.TYPE) { - typ := p.parseTypeDecl() - if len(annotations) > 0 { - typ.Annotations = annotations - annotations = nil - } - if len(comments) > 0 { - typ.Before = comments - comments = nil - } - namespace.Decls = append(namespace.Decls, typ) - } else if p.matches(token.COMMENT) { - comments = append(comments, p.parseComment()) - } else if p.matches(token.AT) { - annotation := p.parseAnnotation() - if len(comments) > 0 { - annotation.Before = comments - comments = nil - } - annotations = append(annotations, annotation) - } else { - p.errorf(p.peek().Pos, "unexpected token %s, expected action, entity, or type", p.peek().Type) - p.advance(map[token.Type]bool{token.SEMICOLON: true}) - } - } - if len(annotations) > 0 { - p.error(p.peek().Pos, errors.New("bare annotation(s); expected action, entity, or type")) - } - if len(comments) > 0 { - namespace.Remaining = comments - } - - closebrace, _ := p.eatOnly(token.RIGHTBRACE, "expected }") - namespace.CloseBrace = closebrace.Pos - if p.matches(token.COMMENT) && p.peek().Pos.Line == closebrace.Pos.Line { - namespace.Footer = p.parseComment() - } - return namespace -} - -var reserved = map[string]struct{}{ - "Bool": {}, - "Boolean": {}, - "Long": {}, - "String": {}, - "Set": {}, - "Entity": {}, - "Extension": {}, - "Record": {}, -} - -func (p *Parser) parseAction() (action *ast.Action) { - action = new(ast.Action) - actionTok, _ := p.eatOnly(token.ACTION, "expected action keyword") - action.ActionTok = actionTok.Pos - action.Names = append(action.Names, p.parseName()) - for p.matches(token.COMMA) { - p.eat() - action.Names = append(action.Names, p.parseName()) - } - if p.matches(token.IN) { - p.eat() - action.In = p.parseRefOrTypes() - } - if p.matches(token.APPLIESTO) { - appliesToTok := p.eat() - action.AppliesTo = p.parseAppliesTo() - action.AppliesTo.AppliesToTok = appliesToTok.Pos - } - semi, _ := p.eatOnly(token.SEMICOLON, "expected ;") - if p.matches(token.COMMENT) && p.peek().Pos.Line == semi.Pos.Line { - action.Footer = p.parseComment() - } - action.Semicolon = semi.Pos - return action -} - -func (p *Parser) parseAppliesTo() (appliesTo *ast.AppliesTo) { - appliesTo = new(ast.AppliesTo) - lbrace, _ := p.eatOnly(token.LEFTBRACE, "expected {") - if p.matches(token.COMMENT) && p.peek().Pos.Line == lbrace.Pos.Line { - appliesTo.Inline = p.parseComment() - } - var comments []*ast.Comment -loop: - for !p.matches(token.RIGHTBRACE, token.EOF) { - n := p.peek() - var nodeComments *ast.NodeComments - var node ast.Node - switch n.Type { - case token.PRINCIPAL: - p.eat() - p.eatOnly(token.COLON, "expected :") - appliesTo.Principal = p.parseEntOrTypes() - nodeComments = &appliesTo.PrincipalComments - node = appliesTo.Principal[len(appliesTo.Principal)-1] - case token.RESOURCE: - p.eat() - p.eatOnly(token.COLON, "expected :") - appliesTo.Resource = p.parseEntOrTypes() - nodeComments = &appliesTo.ResourceComments - node = appliesTo.Resource[len(appliesTo.Resource)-1] - case token.CONTEXT: - p.eat() - p.eatOnly(token.COLON, "expected :") - if p.peek().Type == token.LEFTBRACE { - appliesTo.ContextRecord = p.parseRecType() - node = appliesTo.ContextRecord - } else { - appliesTo.ContextPath = p.parsePath() - node = appliesTo.ContextPath - } - nodeComments = &appliesTo.ContextComments - case token.COMMENT: - comments = append(comments, p.parseComment()) - continue - default: - p.errorf(p.peek().Pos, "expected principal, resource, or context") - p.advance(map[token.Type]bool{token.RIGHTBRACE: true}) - break loop - } - - if nodeComments != nil && len(comments) > 0 { - nodeComments.Before = comments - comments = nil - } - - var comma token.Type - if p.matches(token.COMMA) { - comma = p.eat().Type - } - - if p.matches(token.COMMENT) { - if node != nil { - if p.peek().Pos.Line == node.End().Line { - nodeComments.Inline = p.parseComment() - } - } - } - - for p.matches(token.COMMENT) { // parse the rest if they exist - comments = append(comments, p.parseComment()) - } - - if !p.matches(token.RIGHTBRACE) && comma != token.COMMA { - // We're missing a comma and the appliesTo block isn't closed - p.errorf(p.peek().Pos, "expected , or }") - p.advance(map[token.Type]bool{token.ACTION: true, token.ENTITY: true, token.TYPE: true}) - break - } - } - if len(comments) > 0 { - appliesTo.Remaining = comments - } - closer, _ := p.eatOnly(token.RIGHTBRACE, "expected }") - appliesTo.CloseBrace = closer.Pos - return appliesTo -} - -func (p *Parser) parseEntityDecl() (entity *ast.Entity) { - entity = new(ast.Entity) - tok, _ := p.eatOnly(token.ENTITY, "expected entity keyword") - entity.EntityTok = tok.Pos - entity.Names = append(entity.Names, p.parseIdent()) - for p.matches(token.COMMA) { - p.eat() - entity.Names = append(entity.Names, p.parseIdent()) - } - - if p.matches(token.ENUM) { - p.eat() - entity.Enum = p.parseEntEnumValues() - } else { - if p.matches(token.IN) { - p.eat() - entity.In = p.parseEntOrTypes() - } - if p.matches(token.EQUALS) { - entity.EqTok = p.eat().Pos - } - if p.matches(token.LEFTBRACE) { - entity.Shape = p.parseRecType() - } - if p.matches(token.TAGS) { - p.eat() - entity.Tags = p.parseType() - } - } - semi, _ := p.eatOnly(token.SEMICOLON, "expected ;") - entity.Semicolon = semi.Pos - if p.matches(token.COMMENT) && p.peek().Pos.Line == semi.Pos.Line { - entity.Footer = p.parseComment() - } - return entity -} - -func (p *Parser) parseEntEnumValues() (values []*ast.String) { - p.eatOnly(token.LEFTBRACKET, "expected [") - str := p.eat() - values = append(values, &ast.String{QuotedVal: str.Lit, Tok: str.Pos}) - for !p.matches(token.RIGHTBRACKET, token.EOF) { - if p.matches(token.COMMA) { - p.eat() - str = p.eat() - values = append(values, &ast.String{QuotedVal: str.Lit, Tok: str.Pos}) - } else if !p.matches(token.RIGHTBRACKET) { - p.errorf(p.peek().Pos, "expected , or ]") - p.advance(map[token.Type]bool{token.RIGHTBRACKET: true}) - break - } - } - p.eatOnly(token.RIGHTBRACKET, "expected ]") - return values -} - -func (p *Parser) parseEntOrTypes() (types []*ast.Path) { - if p.matches(token.LEFTBRACKET) { - p.eat() - for !p.matches(token.RIGHTBRACKET, token.EOF) { - types = append(types, p.parsePath()) - if p.matches(token.COMMA) { - p.eat() - } else if !p.matches(token.RIGHTBRACKET) { - p.errorf(p.peek().Pos, "expected , or ]") - p.advance(map[token.Type]bool{token.RIGHTBRACKET: true}) - break - } - } - p.eatOnly(token.RIGHTBRACKET, "expected ]") - } else { - types = append(types, p.parsePath()) - } - return types -} - -func (p *Parser) parseRefOrTypes() (types []*ast.Ref) { - if p.matches(token.LEFTBRACKET) { - p.eat() - for !p.matches(token.RIGHTBRACKET, token.EOF) { - types = append(types, p.parseRef()) - if p.matches(token.COMMA) { - p.eat() - } else if !p.matches(token.RIGHTBRACKET) { - p.errorf(p.peek().Pos, "expected , or ]") - p.advance(map[token.Type]bool{token.RIGHTBRACKET: true}) - break - } - } - p.eatOnly(token.RIGHTBRACKET, "expected ]") - } else { - types = append(types, p.parseRef()) - } - return types -} - -func (p *Parser) parseTypeDecl() (typ *ast.CommonTypeDecl) { - typ = new(ast.CommonTypeDecl) - typeTok, _ := p.eatOnly(token.TYPE, "expected type keyword") - typ.TypeTok = typeTok.Pos - typ.Name = p.parseIdent() - if _, ok := reserved[typ.Name.Value]; ok { - p.errorf(p.peek().Pos, "reserved typename %s", typ.Name.Value) - p.advance(map[token.Type]bool{token.SEMICOLON: true}) - return typ - } - p.eatOnly(token.EQUALS, "expected = after typename") - typ.Value = p.parseType() - semi, _ := p.eatOnly(token.SEMICOLON, "expected ;") - if p.matches(token.COMMENT) && p.peek().Pos.Line == semi.Pos.Line { - typ.Footer = p.parseComment() - } - return typ -} - -func (p *Parser) parseType() (typ ast.Type) { - if p.matches(token.LEFTBRACE) { - typ = p.parseRecType() - } else if p.matches(validIdents...) { - if p.peek().Lit != "Set" { - typ = p.parsePath() - } else { - setTok := p.eat() - p.eatOnly(token.LEFTANGLE, "expected < after Set") - element := p.parseType() - rangle, _ := p.eatOnly(token.RIGHTANGLE, "expected >") - typ = &ast.SetType{SetToken: setTok.Pos, Element: element, RightAngle: rangle.Pos} - } - } else { - p.errorf(p.peek().Pos, "expected type, got %s", p.peek().String()) - p.advance(map[token.Type]bool{token.SEMICOLON: true}) - } - return typ -} - -func (p *Parser) parseRecType() (typ *ast.RecordType) { - typ = new(ast.RecordType) - lbrace, _ := p.eatOnly(token.LEFTBRACE, "expected {") - typ.LeftCurly = lbrace.Pos - if p.matches(token.COMMENT) && p.peek().Pos.Line == lbrace.Pos.Line { - typ.Inner = p.parseComment() - } - var annotations []*ast.Annotation - var comments []*ast.Comment - for !p.matches(token.RIGHTBRACE, token.EOF) { - if p.matches(token.COMMENT) { - comments = append(comments, p.parseComment()) - continue - } - if p.matches(token.AT) { - annotation := p.parseAnnotation() - if len(comments) > 0 { - annotation.Before = comments - comments = nil - } - annotations = append(annotations, annotation) - continue - } - attr := p.parseAttrDecl() - if len(annotations) > 0 { - attr.Annotations = annotations - annotations = nil - } - if len(comments) > 0 { - attr.Before = comments - comments = nil - } - typ.Attributes = append(typ.Attributes, attr) - if p.matches(token.COMMA) { - attr.Comma = p.eat().Pos - } else if !p.matches(token.RIGHTBRACE) { - p.errorf(p.peek().Pos, "expected , or }") - p.advance(map[token.Type]bool{token.RIGHTBRACE: true}) - break - } - if p.matches(token.COMMENT) && p.peek().Pos.Line == attr.End().Line { - typ.Attributes[len(typ.Attributes)-1].Inline = p.parseComment() - } - } - if len(comments) > 0 { - typ.Remaining = comments - } - rbrace, _ := p.eatOnly(token.RIGHTBRACE, "expected }") - typ.RightCurly = rbrace.Pos - return typ -} - -func (p *Parser) parseAttrDecl() (attr *ast.Attribute) { - attr = new(ast.Attribute) - attr.Key = p.parseName() - if p.matches(token.QUESTION) { - p.eat() - attr.IsRequired = false - } else { - attr.IsRequired = true - } - p.eatOnly(token.COLON, "expected :") - attr.Type = p.parseType() - return attr -} - -func (p *Parser) parsePath() *ast.Path { - result := new(ast.Path) - ident := p.parseIdent() - result.Parts = append(result.Parts, ident) - for p.matches(token.DOUBLECOLON) { - p.eat() - ident = p.parseIdent() - result.Parts = append(result.Parts, ident) - } - return result -} - -func (p *Parser) parseRef() *ast.Ref { - result := new(ast.Ref) - first := p.parseName() - if s, ok := first.(*ast.String); ok { - result.Name = s - return result - } else { - result.Namespace = append(result.Namespace, first.(*ast.Ident)) - } - for p.matches(token.DOUBLECOLON) { - p.eat() - next := p.parseName() - if s, ok := next.(*ast.String); ok { - result.Name = s - return result - } - result.Namespace = append(result.Namespace, next.(*ast.Ident)) - } - if len(result.Namespace) > 0 { - result.Name = result.Namespace[len(result.Namespace)-1] - result.Namespace = result.Namespace[:len(result.Namespace)-1] - } - return result -} - -func (p *Parser) parseName() ast.Name { - if p.matches(token.STRING) { - str := p.eat() - return &ast.String{QuotedVal: str.Lit, Tok: str.Pos} - } else if p.matches(validIdents...) { - ident := p.eat() - return &ast.Ident{Value: ident.Lit, IdentTok: ident.Pos} - } else { - got := p.eat() - p.errorf(got.Pos, "expected name (identifier or string)") - return &ast.Ident{Value: got.Lit, IdentTok: got.Pos} - } -} - -// Keywords are valid identifiers -var validIdents = append(token.AllKeywords, token.IDENT) - -func (p *Parser) parseIdent() *ast.Ident { - tok := p.eat() - if !slices.Contains(validIdents, tok.Type) { - p.error(tok.Pos, fmt.Errorf("expected identifier, got %s", tok.String())) - } - return &ast.Ident{Value: tok.Lit, IdentTok: tok.Pos} -} - -func (p *Parser) parseComment() *ast.Comment { - tok, _ := p.eatOnly(token.COMMENT, "expected comment") - return &ast.Comment{SlashTok: tok.Pos, Value: tok.Lit} -} - -func must(b bool, arg any) { - if !b { - panic(arg) - } -} diff --git a/internal/schema/parser/parser_error_test.go b/internal/schema/parser/parser_error_test.go deleted file mode 100644 index 7eccea98..00000000 --- a/internal/schema/parser/parser_error_test.go +++ /dev/null @@ -1,260 +0,0 @@ -package parser - -import ( - "errors" - "strings" - "testing" - - "github.com/cedar-policy/cedar-go/internal/schema/token" - "github.com/cedar-policy/cedar-go/internal/testutil" -) - -func TestParserErrors(t *testing.T) { - tests := []struct { - name string - input string - wantErrs []string // Substrings that should be present in error messages - }{ - { - name: "invalid token at schema level", - input: ` - foo bar; - `, - wantErrs: []string{"unexpected token foo"}, - }, - { - name: "missing comma in entity list", - input: ` - entity Foo Bar Baz; - `, - wantErrs: []string{"expected ;, got Bar"}, - }, - { - name: "missing comma in entity enum list", - input: ` - entity Foo enum ["Bar" "Baz"]; - `, - wantErrs: []string{"expected , or ]"}, - }, - { - name: "reserved type name", - input: ` - type String = { - foo: String - }; - `, - wantErrs: []string{"reserved typename String"}, - }, - { - name: "missing colon in applies to", - input: ` - action DoSomething appliesTo { - principal [User] - resource: [Resource]; - }; - `, - wantErrs: []string{"expected :"}, - }, - { - name: "invalid applies to field", - input: ` - action DoSomething appliesTo { - foo: [User]; - }; - `, - wantErrs: []string{"expected principal, resource, or context"}, - }, - { - name: "missing comma in applies to", - input: ` - action DoSomething appliesTo { - principal: [User] - resource: [Resource]; - }; - `, - wantErrs: []string{"expected , or }"}, - }, - { - name: "missing closing brace in record", - input: ` - type Foo = { - bar: String, - baz: Bool - `, - wantErrs: []string{"expected , or }"}, - }, - { - name: "missing comma in record", - input: ` - type Foo = { - bar: String - baz: Bool - }; - `, - wantErrs: []string{"expected , or }"}, - }, - { - name: "invalid Set type", - input: ` - type Foo = Set<>; - `, - wantErrs: []string{"expected type"}, - }, - { - name: "missing closing angle bracket", - input: ` - type Foo = Set"}, - }, - { - name: "missing type after equals", - input: ` - type Foo = ; - `, - wantErrs: []string{"expected type"}, - }, - { - name: "missing semicolon after declaration", - input: ` - type Foo = String - type Bar = Bool; - `, - wantErrs: []string{"expected ;"}, - }, - { - name: "invalid path separator", - input: ` - namespace Foo:Bar { - } - `, - wantErrs: []string{"expected { after namespace path, got :"}, - }, - { - name: "missing comma in type reference list", - input: ` - action DoSomething in [User::Path::To; - `, - wantErrs: []string{"expected , or ]"}, - }, - { - name: "missing closing bracket in principal list", - input: ` - action DoSomething appliesTo { - principal: [User::Path::To; - } - `, - wantErrs: []string{"expected , or ]"}, - }, - { - name: "invalid token after double colon in path", - input: ` - entity User in [User::123::Entity]; - `, - wantErrs: []string{"expected identifier, got INVALID"}, - }, - { - name: "invalid token where name expected", - input: ` - action 123; - `, - wantErrs: []string{"expected name (identifier or string)"}, - }, - { - name: "bare annotation", - input: ` - @key("value") - `, - wantErrs: []string{"bare annotation(s); expected namespace, action, entity, or type"}, - }, - { - name: "bare annotation in namespace", - input: ` - namespace Foo { - @key("value") - }; - `, - wantErrs: []string{"bare annotation(s); expected action, entity, or type"}, - }, - { - name: "too many errors causing bailout", - input: ` - type A ~ Other; - type B ~ Other; - type C ~ Other; - type D ~ Other; - type E ~ Other; - type F ~ Other; - type G ~ Other; - type H ~ Other; - type I ~ Other; - type J ~ Other; - type K ~ Other; - type L ~ Other; - type M ~ Other; - type N ~ Other; - type O ~ Other; - type P ~ Other; - type Q ~ Other; - `, - wantErrs: []string{"expected = after typename"}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - _, err := ParseFile("test.cedar", []byte(tt.input)) - if err == nil { - t.Fatal("expected error, got nil") - } - - var errs token.Errors - if !errors.As(err, &errs) { - t.Fatalf("expected []error, got %T", err) - } - - // Convert errors to strings for easier matching - var gotErrs []string - for _, err := range errs { - if terr, ok := err.(token.Error); ok { - gotErrs = append(gotErrs, terr.Err.Error()) - } else { - gotErrs = append(gotErrs, err.Error()) - } - } - - // Check that each expected error substring is present - for _, want := range tt.wantErrs { - found := false - for _, got := range gotErrs { - if strings.Contains(got, want) { - found = true - break - } - } - if !found { - t.Errorf("expected error containing %q, got errors: %v", want, gotErrs) - } - } - }) - } -} - -func Test_must(t *testing.T) { - t.Parallel() - t.Run("true", func(t *testing.T) { - t.Parallel() - must(true, "nothing should happen") - }) - t.Run("false", func(t *testing.T) { - t.Parallel() - defer func() { - if r := recover(); r != nil { - testutil.Equals(t, r, "nothing should happen") - } else { - t.Errorf("expected panic but no panic recovered") - } - }() - must(false, "nothing should happen") - }) -} diff --git a/internal/schema/parser/parser_test.go b/internal/schema/parser/parser_test.go deleted file mode 100644 index b4cfa900..00000000 --- a/internal/schema/parser/parser_test.go +++ /dev/null @@ -1,124 +0,0 @@ -// Needs to be in a dedicated test package to avoid circular dependency with format -package parser_test - -import ( - "bytes" - "io/fs" - "strings" - "testing" - - "github.com/google/go-cmp/cmp" - - "github.com/cedar-policy/cedar-go/internal/schema/ast" - "github.com/cedar-policy/cedar-go/internal/schema/parser" - "github.com/cedar-policy/cedar-go/internal/testutil" -) - -func TestParseSimple(t *testing.T) { - tests := []string{ - // Empty namespace - `namespace Demo { -} -`, - // Simple namespace with single entity - `namespace Demo { - entity User in UserGroup = { - name: Demo::id, - "department": UserGroup, - }; -} -`, - // Anonymous namespace references - `@annotation("entity") -// Entity example -entity User; -@annotation("type") -// Type example -type Id = String; -@annotation("action") -// Action example -action run; -namespace NS { - // empty -} // footer -`, - } - - for _, test := range tests { - schema, err := parser.ParseFile("", []byte(test)) - if err != nil { - t.Fatalf("Error parsing schema: %v", err) - } - var got bytes.Buffer - err = ast.Format(schema, &got) // tab format to match Go - testutil.OK(t, err) - diff := cmp.Diff(got.String(), test) - testutil.FatalIf(t, diff != "", "mismatch -want +got:\n%v", diff) - } -} - -func TestParserHasErrors(t *testing.T) { - tests := []struct { - name string - input string - want string - }{ - { - name: "missing closing bracket", - input: `namespace PhotoFlash {`, - want: `:1:23: expected }, got EOF`, - }, - { - name: "missing entity name", - input: `namespace PhotoFlash { entity { "department": String }; }`, - want: `:1:31: expected identifier, got {`, - }, - } - - for _, test := range tests { - _, err := parser.ParseFile("", []byte(test.input)) - if err == nil { - t.Fatalf("Expected error parsing schema, got none") - } - if err.Error() != test.want { - t.Errorf("Expected error %q, got %q", test.want, err.Error()) - } - } -} - -func TestRealFiles(t *testing.T) { - files, err := fs.ReadDir(parser.Testdata, "testdata/cases") - if err != nil { - t.Fatalf("Error reading testdata: %v", err) - } - - for _, file := range files { - if file.IsDir() { - continue - } - - if !strings.HasSuffix(file.Name(), ".cedarschema") { - continue - } - - t.Run(file.Name(), func(t *testing.T) { - input, err := fs.ReadFile(parser.Testdata, "testdata/cases/"+file.Name()) - if err != nil { - t.Fatalf("Error reading example schema: %v", err) - } - schema, err := parser.ParseFile("", input) - if err != nil { - t.Fatalf("Error parsing schema: %v", err) - } - - var gotBytes bytes.Buffer - err = ast.Format(schema, &gotBytes) - testutil.OK(t, err) - - got := strings.TrimSpace(gotBytes.String()) - testutil.OK(t, err) - diff := cmp.Diff(got, strings.TrimSpace(string(input))) - testutil.FatalIf(t, diff != "", "mismatch -want +got:\n%v", diff) - }) - } -} diff --git a/internal/schema/parser/testdata.go b/internal/schema/parser/testdata.go deleted file mode 100644 index b191ad71..00000000 --- a/internal/schema/parser/testdata.go +++ /dev/null @@ -1,6 +0,0 @@ -package parser - -import "embed" - -//go:embed testdata -var Testdata embed.FS diff --git a/internal/schema/parser/testdata/cases/example.cedarschema b/internal/schema/parser/testdata/cases/example.cedarschema deleted file mode 100644 index 89e4d820..00000000 --- a/internal/schema/parser/testdata/cases/example.cedarschema +++ /dev/null @@ -1,92 +0,0 @@ -// Test case to make sure that all types of Cedar elements are parseable -// and that the formatter doesn't mess anything up. -@annotation("namespace") // inline annotation comment -// comment between annotation and namespace -namespace PhotoFlash { // inline namespace comment - @annotation("entity") - // Comment explaining entities User and User2 - // it's a two line comment - entity User, User2 in UserGroup = { - "department": String, - "jobLevel": Long, - } tags String; - entity UserGroup; // inline comment entity - entity Album in Album = { // inline comment rec type - "account": Account, - // record attribute comment - "private": Bool, // record inline comment - // footer comment rec type - // multiple lines - }; // footer comment entity - entity Account { - "admins"?: Set, // attribute comment - "owner": User, - }; - entity Empty { - }; - entity Photo in [Album, PhotoFlassh::Other::Album] = { - "account": Account, - "private": Bool, - }; - entity PhotoFormat enum ["jpg", "gif", "png"]; - entity AccountType enum ["user"]; - // annotation header comment - @annotation("type") - @in // keywords are valid identifiers - // type header comment - type LongAlias = Long; // type footer comment - @annotation1("action") - @annotation2 // inline annotation comment - // action header comment - action "uploadPhoto" appliesTo { // inline action comment - // principal comment before - principal: User, // principal comment inline - // resource comment before - // multi line - resource: [Album, Photo], // resource comment inline - // context comment before - context: { // context comment inline - // comment before annotation - @annotation("attrDecl1") - // comment after annotation - "authenticated": Bool, - @annotation("attrDecl2") - "photo": { - "file_size": LongAlias, - "file_type": String, - }, - }, // context comment after - // remaining comments - }; // action footer comment - action "read"; // action footer comment 1-line - action "all"; - action "viewPhoto", viewPhoto2 in ["read", PhotoFlash::Action::all] appliesTo { - principal: User, - resource: Photo, - context: { // opener comment - // Attribute comment (line 1) - // Attribute comment (line 2) - "authenticated": Bool, // attribute comment inline - }, // context comment - }; - action "listAlbums" in "read" appliesTo { - principal: User, - resource: Account, - context: { - "authenticated": Bool, - appliesTo: String, // keywords are valid identifiers - }, - }; - type commonContext = { - "authenticated": Bool, - appliesTo: String, // keywords are valid identifiers - }; - action "commonContext" appliesTo { - principal: User, - resource: Account, - context: commonContext, - }; - // Remainder comment block - // should also be kept around -} // Footer comment on namespace -// Tailing comments after namespace diff --git a/internal/schema/parser/testdata/cases/k8s-authorization.cedarschema b/internal/schema/parser/testdata/cases/k8s-authorization.cedarschema deleted file mode 100644 index ce51c35a..00000000 --- a/internal/schema/parser/testdata/cases/k8s-authorization.cedarschema +++ /dev/null @@ -1,173 +0,0 @@ -// From https://raw.githubusercontent.com/awslabs/cedar-access-control-for-k8s/refs/heads/main/cedarschema/k8s-authorization.cedarschema -// This file was modified in the following ways to be formatted: -// - This comment was added at the start -// - The formatting was changed to match the formatter's rules -namespace k8s { - type ExtraAttribute = { - "key": __cedar::String, - "values": Set<__cedar::String>, - }; - type FieldRequirement = { - "field": __cedar::String, - "operator": __cedar::String, - "value": __cedar::String, - }; - type LabelRequirement = { - "key": __cedar::String, - "operator": __cedar::String, - "values": Set<__cedar::String>, - }; - entity Extra = { - "key": __cedar::String, - "value"?: __cedar::String, - }; - entity Group = { - "name": __cedar::String, - }; - entity Node in Group = { - "extra"?: Set, - "name": __cedar::String, - }; - entity NonResourceURL = { - "path": __cedar::String, - }; - entity PrincipalUID; - entity Resource = { - "apiGroup": __cedar::String, - "fieldSelector"?: Set, - "labelSelector"?: Set, - "name"?: __cedar::String, - "namespace"?: __cedar::String, - "resource": __cedar::String, - "subresource"?: __cedar::String, - }; - entity ServiceAccount in Group = { - "extra"?: Set, - "name": __cedar::String, - "namespace": __cedar::String, - }; - entity User in Group = { - "extra"?: Set, - "name": __cedar::String, - }; - action "approve" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "attest" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "bind" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "create" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "delete" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: [NonResourceURL, Resource], - context: { - }, - }; - action "deletecollection" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "escalate" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "get" in Action::"readOnly" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: [NonResourceURL, Resource], - context: { - }, - }; - action "head" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: NonResourceURL, - context: { - }, - }; - action "impersonate" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: [Extra, Group, Node, PrincipalUID, ServiceAccount, User], - context: { - }, - }; - action "list" in Action::"readOnly" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "options" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: NonResourceURL, - context: { - }, - }; - action "patch" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: [NonResourceURL, Resource], - context: { - }, - }; - action "post" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: NonResourceURL, - context: { - }, - }; - action "put" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: NonResourceURL, - context: { - }, - }; - action "readOnly" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "sign" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "update" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "use" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "watch" in Action::"readOnly" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; -} diff --git a/internal/schema/parser/testdata/cases/k8s-full.cedarschema b/internal/schema/parser/testdata/cases/k8s-full.cedarschema deleted file mode 100644 index d6da0e01..00000000 --- a/internal/schema/parser/testdata/cases/k8s-full.cedarschema +++ /dev/null @@ -1,3097 +0,0 @@ -// From https://raw.githubusercontent.com/awslabs/cedar-access-control-for-k8s/refs/heads/main/cedarschema/k8s-full.cedarschema -// This file was modified in the following ways to be formatted: -// - This comment was added at the start -// - The formatting was changed to match the formatter's rules -namespace k8s::admission { - action "all" appliesTo { - principal: [k8s::Group, k8s::Node, k8s::ServiceAccount, k8s::User], - resource: [admissionregistration::v1::MutatingWebhookConfiguration, admissionregistration::v1::ValidatingAdmissionPolicy, admissionregistration::v1::ValidatingAdmissionPolicyBinding, admissionregistration::v1::ValidatingWebhookConfiguration, apps::v1::ControllerRevision, apps::v1::DaemonSet, apps::v1::Deployment, apps::v1::ReplicaSet, apps::v1::StatefulSet, authentication::v1::SelfSubjectReview, authentication::v1::TokenRequest, authentication::v1::TokenReview, authorization::v1::LocalSubjectAccessReview, authorization::v1::SelfSubjectAccessReview, authorization::v1::SelfSubjectRulesReview, authorization::v1::SubjectAccessReview, autoscaling::v1::HorizontalPodAutoscaler, autoscaling::v1::Scale, autoscaling::v2::HorizontalPodAutoscaler, aws::k8s::cedar::v1alpha1::Policy, batch::v1::CronJob, batch::v1::Job, certificates::v1::CertificateSigningRequest, coordination::v1::Lease, core::v1::Binding, core::v1::ComponentStatus, core::v1::ConfigMap, core::v1::Endpoints, core::v1::Event, core::v1::LimitRange, core::v1::Namespace, core::v1::Node, core::v1::PersistentVolume, core::v1::PersistentVolumeClaim, core::v1::Pod, core::v1::PodTemplate, core::v1::ReplicationController, core::v1::ResourceQuota, core::v1::Secret, core::v1::Service, core::v1::ServiceAccount, discovery::v1::EndpointSlice, events::v1::Event, flowcontrol::v1::FlowSchema, flowcontrol::v1::PriorityLevelConfiguration, flowcontrol::v1beta3::FlowSchema, flowcontrol::v1beta3::PriorityLevelConfiguration, networking::v1::Ingress, networking::v1::IngressClass, networking::v1::NetworkPolicy, node::v1::RuntimeClass, policy::v1::Eviction, policy::v1::PodDisruptionBudget, rbac::v1::ClusterRole, rbac::v1::ClusterRoleBinding, rbac::v1::Role, rbac::v1::RoleBinding, scheduling::v1::PriorityClass, storage::v1::CSIDriver, storage::v1::CSINode, storage::v1::CSIStorageCapacity, storage::v1::StorageClass, storage::v1::VolumeAttachment], - context: { - }, - }; - action "connect" in Action::"all" appliesTo { - principal: [k8s::Group, k8s::Node, k8s::ServiceAccount, k8s::User], - resource: [core::v1::NodeProxyOptions, core::v1::PodAttachOptions, core::v1::PodExecOptions, core::v1::PodPortForwardOptions, core::v1::PodProxyOptions, core::v1::ServiceProxyOptions], - context: { - }, - }; - action "create" in Action::"all" appliesTo { - principal: [k8s::Group, k8s::Node, k8s::ServiceAccount, k8s::User], - resource: [admissionregistration::v1::MutatingWebhookConfiguration, admissionregistration::v1::ValidatingAdmissionPolicy, admissionregistration::v1::ValidatingAdmissionPolicyBinding, admissionregistration::v1::ValidatingWebhookConfiguration, apps::v1::ControllerRevision, apps::v1::DaemonSet, apps::v1::Deployment, apps::v1::ReplicaSet, apps::v1::StatefulSet, authentication::v1::SelfSubjectReview, authentication::v1::TokenRequest, authentication::v1::TokenReview, authorization::v1::LocalSubjectAccessReview, authorization::v1::SelfSubjectAccessReview, authorization::v1::SelfSubjectRulesReview, authorization::v1::SubjectAccessReview, autoscaling::v1::HorizontalPodAutoscaler, autoscaling::v2::HorizontalPodAutoscaler, aws::k8s::cedar::v1alpha1::Policy, batch::v1::CronJob, batch::v1::Job, certificates::v1::CertificateSigningRequest, coordination::v1::Lease, core::v1::Binding, core::v1::ConfigMap, core::v1::Endpoints, core::v1::Event, core::v1::LimitRange, core::v1::Namespace, core::v1::Node, core::v1::PersistentVolume, core::v1::PersistentVolumeClaim, core::v1::Pod, core::v1::PodTemplate, core::v1::ReplicationController, core::v1::ResourceQuota, core::v1::Secret, core::v1::Service, core::v1::ServiceAccount, discovery::v1::EndpointSlice, events::v1::Event, flowcontrol::v1::FlowSchema, flowcontrol::v1::PriorityLevelConfiguration, flowcontrol::v1beta3::FlowSchema, flowcontrol::v1beta3::PriorityLevelConfiguration, networking::v1::Ingress, networking::v1::IngressClass, networking::v1::NetworkPolicy, node::v1::RuntimeClass, policy::v1::Eviction, policy::v1::PodDisruptionBudget, rbac::v1::ClusterRole, rbac::v1::ClusterRoleBinding, rbac::v1::Role, rbac::v1::RoleBinding, scheduling::v1::PriorityClass, storage::v1::CSIDriver, storage::v1::CSINode, storage::v1::CSIStorageCapacity, storage::v1::StorageClass, storage::v1::VolumeAttachment], - context: { - }, - }; - action "delete" in Action::"all" appliesTo { - principal: [k8s::Group, k8s::Node, k8s::ServiceAccount, k8s::User], - resource: [admissionregistration::v1::MutatingWebhookConfiguration, admissionregistration::v1::ValidatingAdmissionPolicy, admissionregistration::v1::ValidatingAdmissionPolicyBinding, admissionregistration::v1::ValidatingWebhookConfiguration, apps::v1::ControllerRevision, apps::v1::DaemonSet, apps::v1::Deployment, apps::v1::ReplicaSet, apps::v1::StatefulSet, autoscaling::v1::HorizontalPodAutoscaler, autoscaling::v2::HorizontalPodAutoscaler, aws::k8s::cedar::v1alpha1::Policy, batch::v1::CronJob, batch::v1::Job, certificates::v1::CertificateSigningRequest, coordination::v1::Lease, core::v1::ConfigMap, core::v1::Endpoints, core::v1::Event, core::v1::LimitRange, core::v1::Namespace, core::v1::Node, core::v1::PersistentVolume, core::v1::PersistentVolumeClaim, core::v1::Pod, core::v1::PodTemplate, core::v1::ReplicationController, core::v1::ResourceQuota, core::v1::Secret, core::v1::Service, core::v1::ServiceAccount, discovery::v1::EndpointSlice, events::v1::Event, flowcontrol::v1::FlowSchema, flowcontrol::v1::PriorityLevelConfiguration, flowcontrol::v1beta3::FlowSchema, flowcontrol::v1beta3::PriorityLevelConfiguration, networking::v1::Ingress, networking::v1::IngressClass, networking::v1::NetworkPolicy, node::v1::RuntimeClass, policy::v1::PodDisruptionBudget, rbac::v1::ClusterRole, rbac::v1::ClusterRoleBinding, rbac::v1::Role, rbac::v1::RoleBinding, scheduling::v1::PriorityClass, storage::v1::CSIDriver, storage::v1::CSINode, storage::v1::CSIStorageCapacity, storage::v1::StorageClass, storage::v1::VolumeAttachment], - context: { - }, - }; - action "update" in Action::"all" appliesTo { - principal: [k8s::Group, k8s::Node, k8s::ServiceAccount, k8s::User], - resource: [admissionregistration::v1::MutatingWebhookConfiguration, admissionregistration::v1::ValidatingAdmissionPolicy, admissionregistration::v1::ValidatingAdmissionPolicyBinding, admissionregistration::v1::ValidatingWebhookConfiguration, apps::v1::ControllerRevision, apps::v1::DaemonSet, apps::v1::Deployment, apps::v1::ReplicaSet, apps::v1::StatefulSet, autoscaling::v1::HorizontalPodAutoscaler, autoscaling::v1::Scale, autoscaling::v2::HorizontalPodAutoscaler, aws::k8s::cedar::v1alpha1::Policy, batch::v1::CronJob, batch::v1::Job, certificates::v1::CertificateSigningRequest, coordination::v1::Lease, core::v1::ConfigMap, core::v1::Endpoints, core::v1::Event, core::v1::LimitRange, core::v1::Namespace, core::v1::Node, core::v1::PersistentVolume, core::v1::PersistentVolumeClaim, core::v1::Pod, core::v1::PodTemplate, core::v1::ReplicationController, core::v1::ResourceQuota, core::v1::Secret, core::v1::Service, core::v1::ServiceAccount, discovery::v1::EndpointSlice, events::v1::Event, flowcontrol::v1::FlowSchema, flowcontrol::v1::PriorityLevelConfiguration, flowcontrol::v1beta3::FlowSchema, flowcontrol::v1beta3::PriorityLevelConfiguration, networking::v1::Ingress, networking::v1::IngressClass, networking::v1::NetworkPolicy, node::v1::RuntimeClass, policy::v1::PodDisruptionBudget, rbac::v1::ClusterRole, rbac::v1::ClusterRoleBinding, rbac::v1::Role, rbac::v1::RoleBinding, scheduling::v1::PriorityClass, storage::v1::CSIDriver, storage::v1::CSINode, storage::v1::CSIStorageCapacity, storage::v1::StorageClass, storage::v1::VolumeAttachment], - context: { - }, - }; -} -namespace k8s { - type ExtraAttribute = { - "key": __cedar::String, - "values": Set<__cedar::String>, - }; - type FieldRequirement = { - "field": __cedar::String, - "operator": __cedar::String, - "value": __cedar::String, - }; - type LabelRequirement = { - "key": __cedar::String, - "operator": __cedar::String, - "values": Set<__cedar::String>, - }; - entity Extra = { - "key": __cedar::String, - "value"?: __cedar::String, - }; - entity Group = { - "name": __cedar::String, - }; - entity Node in Group = { - "extra"?: Set, - "name": __cedar::String, - }; - entity NonResourceURL = { - "path": __cedar::String, - }; - entity PrincipalUID; - entity Resource = { - "apiGroup": __cedar::String, - "fieldSelector"?: Set, - "labelSelector"?: Set, - "name"?: __cedar::String, - "namespace"?: __cedar::String, - "resource": __cedar::String, - "subresource"?: __cedar::String, - }; - entity ServiceAccount in Group = { - "extra"?: Set, - "name": __cedar::String, - "namespace": __cedar::String, - }; - entity User in Group = { - "extra"?: Set, - "name": __cedar::String, - }; - action "approve" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "attest" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "bind" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "create" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "delete" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: [NonResourceURL, Resource], - context: { - }, - }; - action "deletecollection" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "escalate" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "get" in Action::"readOnly" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: [NonResourceURL, Resource], - context: { - }, - }; - action "head" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: NonResourceURL, - context: { - }, - }; - action "impersonate" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: [Extra, Group, Node, PrincipalUID, ServiceAccount, User], - context: { - }, - }; - action "list" in Action::"readOnly" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "options" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: NonResourceURL, - context: { - }, - }; - action "patch" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: [NonResourceURL, Resource], - context: { - }, - }; - action "post" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: NonResourceURL, - context: { - }, - }; - action "put" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: NonResourceURL, - context: { - }, - }; - action "readOnly" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "sign" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "update" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "use" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; - action "watch" in Action::"readOnly" appliesTo { - principal: [Group, Node, ServiceAccount, User], - resource: Resource, - context: { - }, - }; -} -namespace admissionregistration::v1 { - type AuditAnnotation = { - "key": __cedar::String, - "valueExpression": __cedar::String, - }; - type ExpressionWarning = { - "fieldRef": __cedar::String, - "warning": __cedar::String, - }; - type MatchCondition = { - "expression": __cedar::String, - "name": __cedar::String, - }; - type MatchResources = { - "excludeResourceRules"?: Set, - "matchPolicy"?: __cedar::String, - "namespaceSelector"?: meta::v1::LabelSelector, - "objectSelector"?: meta::v1::LabelSelector, - "resourceRules"?: Set, - }; - type MutatingWebhook = { - "admissionReviewVersions": Set<__cedar::String>, - "clientConfig": WebhookClientConfig, - "failurePolicy"?: __cedar::String, - "matchConditions"?: Set, - "matchPolicy"?: __cedar::String, - "name": __cedar::String, - "namespaceSelector"?: meta::v1::LabelSelector, - "objectSelector"?: meta::v1::LabelSelector, - "reinvocationPolicy"?: __cedar::String, - "rules"?: Set, - "sideEffects": __cedar::String, - "timeoutSeconds"?: __cedar::Long, - }; - type NamedRuleWithOperations = { - "apiGroups"?: Set<__cedar::String>, - "apiVersions"?: Set<__cedar::String>, - "operations"?: Set<__cedar::String>, - "resourceNames"?: Set<__cedar::String>, - "resources"?: Set<__cedar::String>, - "scope"?: __cedar::String, - }; - type ParamKind = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - }; - type ParamRef = { - "name"?: __cedar::String, - "namespace"?: __cedar::String, - "parameterNotFoundAction"?: __cedar::String, - "selector"?: meta::v1::LabelSelector, - }; - type RuleWithOperations = { - "apiGroups"?: Set<__cedar::String>, - "apiVersions"?: Set<__cedar::String>, - "operations"?: Set<__cedar::String>, - "resources"?: Set<__cedar::String>, - "scope"?: __cedar::String, - }; - type ServiceReference = { - "name": __cedar::String, - "namespace": __cedar::String, - "path"?: __cedar::String, - "port"?: __cedar::Long, - }; - type TypeChecking = { - "expressionWarnings"?: Set, - }; - type ValidatingAdmissionPolicyBindingSpec = { - "matchResources"?: MatchResources, - "paramRef"?: ParamRef, - "policyName"?: __cedar::String, - "validationActions"?: Set<__cedar::String>, - }; - type ValidatingAdmissionPolicySpec = { - "auditAnnotations"?: Set, - "failurePolicy"?: __cedar::String, - "matchConditions"?: Set, - "matchConstraints"?: MatchResources, - "paramKind"?: ParamKind, - "validations"?: Set, - "variables"?: Set, - }; - type ValidatingAdmissionPolicyStatus = { - "conditions"?: Set, - "observedGeneration"?: __cedar::Long, - "typeChecking"?: TypeChecking, - }; - type ValidatingWebhook = { - "admissionReviewVersions": Set<__cedar::String>, - "clientConfig": WebhookClientConfig, - "failurePolicy"?: __cedar::String, - "matchConditions"?: Set, - "matchPolicy"?: __cedar::String, - "name": __cedar::String, - "namespaceSelector"?: meta::v1::LabelSelector, - "objectSelector"?: meta::v1::LabelSelector, - "rules"?: Set, - "sideEffects": __cedar::String, - "timeoutSeconds"?: __cedar::Long, - }; - type Validation = { - "expression": __cedar::String, - "message"?: __cedar::String, - "messageExpression"?: __cedar::String, - "reason"?: __cedar::String, - }; - type Variable = { - "expression": __cedar::String, - "name": __cedar::String, - }; - type WebhookClientConfig = { - "caBundle"?: __cedar::String, - "service"?: ServiceReference, - "url"?: __cedar::String, - }; - entity MutatingWebhookConfiguration = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: MutatingWebhookConfiguration, - "webhooks"?: Set, - }; - entity ValidatingAdmissionPolicy = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: ValidatingAdmissionPolicy, - "spec"?: ValidatingAdmissionPolicySpec, - "status"?: ValidatingAdmissionPolicyStatus, - }; - entity ValidatingAdmissionPolicyBinding = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: ValidatingAdmissionPolicyBinding, - "spec"?: ValidatingAdmissionPolicyBindingSpec, - }; - entity ValidatingWebhookConfiguration = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: ValidatingWebhookConfiguration, - "webhooks"?: Set, - }; -} -namespace apps::v1 { - type DaemonSetCondition = { - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type DaemonSetSpec = { - "minReadySeconds"?: __cedar::Long, - "revisionHistoryLimit"?: __cedar::Long, - "selector": meta::v1::LabelSelector, - "template": core::v1::PodTemplateSpec, - "updateStrategy"?: DaemonSetUpdateStrategy, - }; - type DaemonSetStatus = { - "collisionCount"?: __cedar::Long, - "conditions"?: Set, - "currentNumberScheduled": __cedar::Long, - "desiredNumberScheduled": __cedar::Long, - "numberAvailable"?: __cedar::Long, - "numberMisscheduled": __cedar::Long, - "numberReady": __cedar::Long, - "numberUnavailable"?: __cedar::Long, - "observedGeneration"?: __cedar::Long, - "updatedNumberScheduled"?: __cedar::Long, - }; - type DaemonSetUpdateStrategy = { - "rollingUpdate"?: RollingUpdateDaemonSet, - "type"?: __cedar::String, - }; - type DeploymentCondition = { - "lastTransitionTime"?: __cedar::String, - "lastUpdateTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type DeploymentSpec = { - "minReadySeconds"?: __cedar::Long, - "paused"?: __cedar::Bool, - "progressDeadlineSeconds"?: __cedar::Long, - "replicas"?: __cedar::Long, - "revisionHistoryLimit"?: __cedar::Long, - "selector": meta::v1::LabelSelector, - "strategy"?: DeploymentStrategy, - "template": core::v1::PodTemplateSpec, - }; - type DeploymentStatus = { - "availableReplicas"?: __cedar::Long, - "collisionCount"?: __cedar::Long, - "conditions"?: Set, - "observedGeneration"?: __cedar::Long, - "readyReplicas"?: __cedar::Long, - "replicas"?: __cedar::Long, - "unavailableReplicas"?: __cedar::Long, - "updatedReplicas"?: __cedar::Long, - }; - type DeploymentStrategy = { - "rollingUpdate"?: RollingUpdateDeployment, - "type"?: __cedar::String, - }; - type ReplicaSetCondition = { - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type ReplicaSetSpec = { - "minReadySeconds"?: __cedar::Long, - "replicas"?: __cedar::Long, - "selector": meta::v1::LabelSelector, - "template"?: core::v1::PodTemplateSpec, - }; - type ReplicaSetStatus = { - "availableReplicas"?: __cedar::Long, - "conditions"?: Set, - "fullyLabeledReplicas"?: __cedar::Long, - "observedGeneration"?: __cedar::Long, - "readyReplicas"?: __cedar::Long, - "replicas": __cedar::Long, - }; - type RollingUpdateDaemonSet = { - "maxSurge"?: __cedar::String, - "maxUnavailable"?: __cedar::String, - }; - type RollingUpdateDeployment = { - "maxSurge"?: __cedar::String, - "maxUnavailable"?: __cedar::String, - }; - type RollingUpdateStatefulSetStrategy = { - "maxUnavailable"?: __cedar::String, - "partition"?: __cedar::Long, - }; - type StatefulSetCondition = { - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type StatefulSetOrdinals = { - "start"?: __cedar::Long, - }; - type StatefulSetPersistentVolumeClaimRetentionPolicy = { - "whenDeleted"?: __cedar::String, - "whenScaled"?: __cedar::String, - }; - type StatefulSetSpec = { - "minReadySeconds"?: __cedar::Long, - "ordinals"?: StatefulSetOrdinals, - "persistentVolumeClaimRetentionPolicy"?: StatefulSetPersistentVolumeClaimRetentionPolicy, - "podManagementPolicy"?: __cedar::String, - "replicas"?: __cedar::Long, - "revisionHistoryLimit"?: __cedar::Long, - "selector": meta::v1::LabelSelector, - "serviceName": __cedar::String, - "template": core::v1::PodTemplateSpec, - "updateStrategy"?: StatefulSetUpdateStrategy, - "volumeClaimTemplates"?: Set, - }; - type StatefulSetStatus = { - "availableReplicas"?: __cedar::Long, - "collisionCount"?: __cedar::Long, - "conditions"?: Set, - "currentReplicas"?: __cedar::Long, - "currentRevision"?: __cedar::String, - "observedGeneration"?: __cedar::Long, - "readyReplicas"?: __cedar::Long, - "replicas": __cedar::Long, - "updateRevision"?: __cedar::String, - "updatedReplicas"?: __cedar::Long, - }; - type StatefulSetUpdateStrategy = { - "rollingUpdate"?: RollingUpdateStatefulSetStrategy, - "type"?: __cedar::String, - }; - entity ControllerRevision = { - "apiVersion"?: __cedar::String, - "data"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: ControllerRevision, - "revision": __cedar::Long, - }; - entity DaemonSet = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: DaemonSet, - "spec"?: DaemonSetSpec, - "status"?: DaemonSetStatus, - }; - entity Deployment = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Deployment, - "spec"?: DeploymentSpec, - "status"?: DeploymentStatus, - }; - entity ReplicaSet = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: ReplicaSet, - "spec"?: ReplicaSetSpec, - "status"?: ReplicaSetStatus, - }; - entity StatefulSet = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: StatefulSet, - "spec"?: StatefulSetSpec, - "status"?: StatefulSetStatus, - }; -} -namespace authentication::v1 { - type BoundObjectReference = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "name"?: __cedar::String, - "uid"?: __cedar::String, - }; - type SelfSubjectReviewStatus = { - "userInfo"?: UserInfo, - }; - type TokenRequestSpec = { - "audiences": Set<__cedar::String>, - "boundObjectRef"?: BoundObjectReference, - "expirationSeconds"?: __cedar::Long, - }; - type TokenRequestStatus = { - "expirationTimestamp": __cedar::String, - "token": __cedar::String, - }; - type TokenReviewSpec = { - "audiences"?: Set<__cedar::String>, - "token"?: __cedar::String, - }; - type TokenReviewStatus = { - "audiences"?: Set<__cedar::String>, - "authenticated"?: __cedar::Bool, - "error"?: __cedar::String, - "user"?: UserInfo, - }; - type UserInfo = { - "extra"?: Set, - "groups"?: Set<__cedar::String>, - "uid"?: __cedar::String, - "username"?: __cedar::String, - }; - entity SelfSubjectReview = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "status"?: SelfSubjectReviewStatus, - }; - entity TokenRequest = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "spec": TokenRequestSpec, - "status"?: TokenRequestStatus, - }; - entity TokenReview = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "spec": TokenReviewSpec, - "status"?: TokenReviewStatus, - }; -} -namespace authorization::v1 { - type FieldSelectorAttributes = { - "rawSelector"?: __cedar::String, - "requirements"?: Set, - }; - type LabelSelectorAttributes = { - "rawSelector"?: __cedar::String, - "requirements"?: Set, - }; - type NonResourceAttributes = { - "path"?: __cedar::String, - "verb"?: __cedar::String, - }; - type NonResourceRule = { - "nonResourceURLs"?: Set<__cedar::String>, - "verbs": Set<__cedar::String>, - }; - type ResourceAttributes = { - "fieldSelector"?: FieldSelectorAttributes, - "group"?: __cedar::String, - "labelSelector"?: LabelSelectorAttributes, - "name"?: __cedar::String, - "namespace"?: __cedar::String, - "resource"?: __cedar::String, - "subresource"?: __cedar::String, - "verb"?: __cedar::String, - "version"?: __cedar::String, - }; - type ResourceRule = { - "apiGroups"?: Set<__cedar::String>, - "resourceNames"?: Set<__cedar::String>, - "resources"?: Set<__cedar::String>, - "verbs": Set<__cedar::String>, - }; - type SelfSubjectAccessReviewSpec = { - "nonResourceAttributes"?: NonResourceAttributes, - "resourceAttributes"?: ResourceAttributes, - }; - type SelfSubjectRulesReviewSpec = { - "namespace"?: __cedar::String, - }; - type SubjectAccessReviewSpec = { - "extra"?: Set, - "groups"?: Set<__cedar::String>, - "nonResourceAttributes"?: NonResourceAttributes, - "resourceAttributes"?: ResourceAttributes, - "uid"?: __cedar::String, - "user"?: __cedar::String, - }; - type SubjectAccessReviewStatus = { - "allowed": __cedar::Bool, - "denied"?: __cedar::Bool, - "evaluationError"?: __cedar::String, - "reason"?: __cedar::String, - }; - type SubjectRulesReviewStatus = { - "evaluationError"?: __cedar::String, - "incomplete": __cedar::Bool, - "nonResourceRules": Set, - "resourceRules": Set, - }; - entity LocalSubjectAccessReview = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "spec": SubjectAccessReviewSpec, - "status"?: SubjectAccessReviewStatus, - }; - entity SelfSubjectAccessReview = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "spec": SelfSubjectAccessReviewSpec, - "status"?: SubjectAccessReviewStatus, - }; - entity SelfSubjectRulesReview = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "spec": SelfSubjectRulesReviewSpec, - "status"?: SubjectRulesReviewStatus, - }; - entity SubjectAccessReview = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "spec": SubjectAccessReviewSpec, - "status"?: SubjectAccessReviewStatus, - }; -} -namespace autoscaling::v1 { - type CrossVersionObjectReference = { - "apiVersion"?: __cedar::String, - "kind": __cedar::String, - "name": __cedar::String, - }; - type HorizontalPodAutoscalerSpec = { - "maxReplicas": __cedar::Long, - "minReplicas"?: __cedar::Long, - "scaleTargetRef": CrossVersionObjectReference, - "targetCPUUtilizationPercentage"?: __cedar::Long, - }; - type HorizontalPodAutoscalerStatus = { - "currentCPUUtilizationPercentage"?: __cedar::Long, - "currentReplicas": __cedar::Long, - "desiredReplicas": __cedar::Long, - "lastScaleTime"?: __cedar::String, - "observedGeneration"?: __cedar::Long, - }; - type ScaleSpec = { - "replicas"?: __cedar::Long, - }; - type ScaleStatus = { - "replicas": __cedar::Long, - "selector"?: __cedar::String, - }; - entity HorizontalPodAutoscaler = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: HorizontalPodAutoscaler, - "spec"?: HorizontalPodAutoscalerSpec, - "status"?: HorizontalPodAutoscalerStatus, - }; - entity Scale = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Scale, - "spec"?: ScaleSpec, - "status"?: ScaleStatus, - }; -} -namespace batch::v1 { - type CronJobSpec = { - "concurrencyPolicy"?: __cedar::String, - "failedJobsHistoryLimit"?: __cedar::Long, - "jobTemplate": JobTemplateSpec, - "schedule": __cedar::String, - "startingDeadlineSeconds"?: __cedar::Long, - "successfulJobsHistoryLimit"?: __cedar::Long, - "suspend"?: __cedar::Bool, - "timeZone"?: __cedar::String, - }; - type CronJobStatus = { - "active"?: Set, - "lastScheduleTime"?: __cedar::String, - "lastSuccessfulTime"?: __cedar::String, - }; - type JobCondition = { - "lastProbeTime"?: __cedar::String, - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type JobSpec = { - "activeDeadlineSeconds"?: __cedar::Long, - "backoffLimit"?: __cedar::Long, - "backoffLimitPerIndex"?: __cedar::Long, - "completionMode"?: __cedar::String, - "completions"?: __cedar::Long, - "managedBy"?: __cedar::String, - "manualSelector"?: __cedar::Bool, - "maxFailedIndexes"?: __cedar::Long, - "parallelism"?: __cedar::Long, - "podFailurePolicy"?: PodFailurePolicy, - "podReplacementPolicy"?: __cedar::String, - "selector"?: meta::v1::LabelSelector, - "successPolicy"?: SuccessPolicy, - "suspend"?: __cedar::Bool, - "template": core::v1::PodTemplateSpec, - "ttlSecondsAfterFinished"?: __cedar::Long, - }; - type JobStatus = { - "active"?: __cedar::Long, - "completedIndexes"?: __cedar::String, - "completionTime"?: __cedar::String, - "conditions"?: Set, - "failed"?: __cedar::Long, - "failedIndexes"?: __cedar::String, - "ready"?: __cedar::Long, - "startTime"?: __cedar::String, - "succeeded"?: __cedar::Long, - "terminating"?: __cedar::Long, - "uncountedTerminatedPods"?: UncountedTerminatedPods, - }; - type JobTemplateSpec = { - "metadata"?: meta::v1::ObjectMeta, - "spec"?: JobSpec, - }; - type PodFailurePolicy = { - "rules": Set, - }; - type PodFailurePolicyOnExitCodesRequirement = { - "containerName"?: __cedar::String, - "operator": __cedar::String, - "values": Set<__cedar::Long>, - }; - type PodFailurePolicyOnPodConditionsPattern = { - "status": __cedar::String, - "type": __cedar::String, - }; - type PodFailurePolicyRule = { - "action": __cedar::String, - "onExitCodes"?: PodFailurePolicyOnExitCodesRequirement, - "onPodConditions"?: Set, - }; - type SuccessPolicy = { - "rules": Set, - }; - type SuccessPolicyRule = { - "succeededCount"?: __cedar::Long, - "succeededIndexes"?: __cedar::String, - }; - type UncountedTerminatedPods = { - "failed"?: Set<__cedar::String>, - "succeeded"?: Set<__cedar::String>, - }; - entity CronJob = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: CronJob, - "spec"?: CronJobSpec, - "status"?: CronJobStatus, - }; - entity Job = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Job, - "spec"?: JobSpec, - "status"?: JobStatus, - }; -} -namespace certificates::v1 { - type CertificateSigningRequestCondition = { - "lastTransitionTime"?: __cedar::String, - "lastUpdateTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type CertificateSigningRequestSpec = { - "expirationSeconds"?: __cedar::Long, - "extra"?: Set, - "groups"?: Set<__cedar::String>, - "request": __cedar::String, - "signerName": __cedar::String, - "uid"?: __cedar::String, - "usages"?: Set<__cedar::String>, - "username"?: __cedar::String, - }; - type CertificateSigningRequestStatus = { - "certificate"?: __cedar::String, - "conditions"?: Set, - }; - entity CertificateSigningRequest = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: CertificateSigningRequest, - "spec": CertificateSigningRequestSpec, - "status"?: CertificateSigningRequestStatus, - }; -} -namespace coordination::v1 { - type LeaseSpec = { - "acquireTime"?: __cedar::String, - "holderIdentity"?: __cedar::String, - "leaseDurationSeconds"?: __cedar::Long, - "leaseTransitions"?: __cedar::Long, - "preferredHolder"?: __cedar::String, - "renewTime"?: __cedar::String, - "strategy"?: __cedar::String, - }; - entity Lease = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Lease, - "spec"?: LeaseSpec, - }; -} -namespace core::v1 { - type AWSElasticBlockStoreVolumeSource = { - "fsType"?: __cedar::String, - "partition"?: __cedar::Long, - "readOnly"?: __cedar::Bool, - "volumeID": __cedar::String, - }; - type Affinity = { - "nodeAffinity"?: NodeAffinity, - "podAffinity"?: PodAffinity, - "podAntiAffinity"?: PodAntiAffinity, - }; - type AppArmorProfile = { - "localhostProfile"?: __cedar::String, - "type": __cedar::String, - }; - type AttachedVolume = { - "devicePath": __cedar::String, - "name": __cedar::String, - }; - type AzureDiskVolumeSource = { - "cachingMode"?: __cedar::String, - "diskName": __cedar::String, - "diskURI": __cedar::String, - "fsType"?: __cedar::String, - "kind"?: __cedar::String, - "readOnly"?: __cedar::Bool, - }; - type AzureFilePersistentVolumeSource = { - "readOnly"?: __cedar::Bool, - "secretName": __cedar::String, - "secretNamespace"?: __cedar::String, - "shareName": __cedar::String, - }; - type AzureFileVolumeSource = { - "readOnly"?: __cedar::Bool, - "secretName": __cedar::String, - "shareName": __cedar::String, - }; - type CSIPersistentVolumeSource = { - "controllerExpandSecretRef"?: SecretReference, - "controllerPublishSecretRef"?: SecretReference, - "driver": __cedar::String, - "fsType"?: __cedar::String, - "nodeExpandSecretRef"?: SecretReference, - "nodePublishSecretRef"?: SecretReference, - "nodeStageSecretRef"?: SecretReference, - "readOnly"?: __cedar::Bool, - "volumeAttributes"?: Set, - "volumeHandle": __cedar::String, - }; - type CSIVolumeSource = { - "driver": __cedar::String, - "fsType"?: __cedar::String, - "nodePublishSecretRef"?: LocalObjectReference, - "readOnly"?: __cedar::Bool, - "volumeAttributes"?: Set, - }; - type Capabilities = { - "add"?: Set<__cedar::String>, - "drop"?: Set<__cedar::String>, - }; - type CephFSPersistentVolumeSource = { - "monitors": Set<__cedar::String>, - "path"?: __cedar::String, - "readOnly"?: __cedar::Bool, - "secretFile"?: __cedar::String, - "secretRef"?: SecretReference, - "user"?: __cedar::String, - }; - type CephFSVolumeSource = { - "monitors": Set<__cedar::String>, - "path"?: __cedar::String, - "readOnly"?: __cedar::Bool, - "secretFile"?: __cedar::String, - "secretRef"?: LocalObjectReference, - "user"?: __cedar::String, - }; - type CinderPersistentVolumeSource = { - "fsType"?: __cedar::String, - "readOnly"?: __cedar::Bool, - "secretRef"?: SecretReference, - "volumeID": __cedar::String, - }; - type CinderVolumeSource = { - "fsType"?: __cedar::String, - "readOnly"?: __cedar::Bool, - "secretRef"?: LocalObjectReference, - "volumeID": __cedar::String, - }; - type ClientIPConfig = { - "timeoutSeconds"?: __cedar::Long, - }; - type ClusterTrustBundleProjection = { - "labelSelector"?: meta::v1::LabelSelector, - "name"?: __cedar::String, - "optional"?: __cedar::Bool, - "path": __cedar::String, - "signerName"?: __cedar::String, - }; - type ComponentCondition = { - "error"?: __cedar::String, - "message"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type ConfigMapEnvSource = { - "name"?: __cedar::String, - "optional"?: __cedar::Bool, - }; - type ConfigMapKeySelector = { - "key": __cedar::String, - "name"?: __cedar::String, - "optional"?: __cedar::Bool, - }; - type ConfigMapNodeConfigSource = { - "kubeletConfigKey": __cedar::String, - "name": __cedar::String, - "namespace": __cedar::String, - "resourceVersion"?: __cedar::String, - "uid"?: __cedar::String, - }; - type ConfigMapProjection = { - "items"?: Set, - "name"?: __cedar::String, - "optional"?: __cedar::Bool, - }; - type ConfigMapVolumeSource = { - "defaultMode"?: __cedar::Long, - "items"?: Set, - "name"?: __cedar::String, - "optional"?: __cedar::Bool, - }; - type Container = { - "args"?: Set<__cedar::String>, - "command"?: Set<__cedar::String>, - "env"?: Set, - "envFrom"?: Set, - "image"?: __cedar::String, - "imagePullPolicy"?: __cedar::String, - "lifecycle"?: Lifecycle, - "livenessProbe"?: Probe, - "name": __cedar::String, - "ports"?: Set, - "readinessProbe"?: Probe, - "resizePolicy"?: Set, - "resources"?: ResourceRequirements, - "restartPolicy"?: __cedar::String, - "securityContext"?: SecurityContext, - "startupProbe"?: Probe, - "stdin"?: __cedar::Bool, - "stdinOnce"?: __cedar::Bool, - "terminationMessagePath"?: __cedar::String, - "terminationMessagePolicy"?: __cedar::String, - "tty"?: __cedar::Bool, - "volumeDevices"?: Set, - "volumeMounts"?: Set, - "workingDir"?: __cedar::String, - }; - type ContainerImage = { - "names"?: Set<__cedar::String>, - "sizeBytes"?: __cedar::Long, - }; - type ContainerPort = { - "containerPort": __cedar::Long, - "hostIP"?: __cedar::String, - "hostPort"?: __cedar::Long, - "name"?: __cedar::String, - "protocol"?: __cedar::String, - }; - type ContainerResizePolicy = { - "resourceName": __cedar::String, - "restartPolicy": __cedar::String, - }; - type ContainerState = { - "running"?: ContainerStateRunning, - "terminated"?: ContainerStateTerminated, - "waiting"?: ContainerStateWaiting, - }; - type ContainerStateRunning = { - "startedAt"?: __cedar::String, - }; - type ContainerStateTerminated = { - "containerID"?: __cedar::String, - "exitCode": __cedar::Long, - "finishedAt"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "signal"?: __cedar::Long, - "startedAt"?: __cedar::String, - }; - type ContainerStateWaiting = { - "message"?: __cedar::String, - "reason"?: __cedar::String, - }; - type ContainerStatus = { - "allocatedResources"?: __cedar::String, - "allocatedResourcesStatus"?: Set, - "containerID"?: __cedar::String, - "image": __cedar::String, - "imageID": __cedar::String, - "lastState"?: ContainerState, - "name": __cedar::String, - "ready": __cedar::Bool, - "resources"?: ResourceRequirements, - "restartCount": __cedar::Long, - "started"?: __cedar::Bool, - "state"?: ContainerState, - "user"?: ContainerUser, - "volumeMounts"?: Set, - }; - type ContainerUser = { - "linux"?: LinuxContainerUser, - }; - type DaemonEndpoint = { - "Port": __cedar::Long, - }; - type DownwardAPIProjection = { - "items"?: Set, - }; - type DownwardAPIVolumeFile = { - "fieldRef"?: ObjectFieldSelector, - "mode"?: __cedar::Long, - "path": __cedar::String, - "resourceFieldRef"?: ResourceFieldSelector, - }; - type DownwardAPIVolumeSource = { - "defaultMode"?: __cedar::Long, - "items"?: Set, - }; - type EmptyDirVolumeSource = { - "medium"?: __cedar::String, - "sizeLimit"?: __cedar::String, - }; - type EndpointAddress = { - "hostname"?: __cedar::String, - "ip": __cedar::String, - "nodeName"?: __cedar::String, - "targetRef"?: ObjectReference, - }; - type EndpointPort = { - "appProtocol"?: __cedar::String, - "name"?: __cedar::String, - "port": __cedar::Long, - "protocol"?: __cedar::String, - }; - type EndpointSubset = { - "addresses"?: Set, - "notReadyAddresses"?: Set, - "ports"?: Set, - }; - type EnvFromSource = { - "configMapRef"?: ConfigMapEnvSource, - "prefix"?: __cedar::String, - "secretRef"?: SecretEnvSource, - }; - type EnvVar = { - "name": __cedar::String, - "value"?: __cedar::String, - "valueFrom"?: EnvVarSource, - }; - type EnvVarSource = { - "configMapKeyRef"?: ConfigMapKeySelector, - "fieldRef"?: ObjectFieldSelector, - "resourceFieldRef"?: ResourceFieldSelector, - "secretKeyRef"?: SecretKeySelector, - }; - type EphemeralContainer = { - "args"?: Set<__cedar::String>, - "command"?: Set<__cedar::String>, - "env"?: Set, - "envFrom"?: Set, - "image"?: __cedar::String, - "imagePullPolicy"?: __cedar::String, - "lifecycle"?: Lifecycle, - "livenessProbe"?: Probe, - "name": __cedar::String, - "ports"?: Set, - "readinessProbe"?: Probe, - "resizePolicy"?: Set, - "resources"?: ResourceRequirements, - "restartPolicy"?: __cedar::String, - "securityContext"?: SecurityContext, - "startupProbe"?: Probe, - "stdin"?: __cedar::Bool, - "stdinOnce"?: __cedar::Bool, - "targetContainerName"?: __cedar::String, - "terminationMessagePath"?: __cedar::String, - "terminationMessagePolicy"?: __cedar::String, - "tty"?: __cedar::Bool, - "volumeDevices"?: Set, - "volumeMounts"?: Set, - "workingDir"?: __cedar::String, - }; - type EphemeralVolumeSource = { - "volumeClaimTemplate"?: PersistentVolumeClaimTemplate, - }; - type EventSeries = { - "count"?: __cedar::Long, - "lastObservedTime"?: __cedar::String, - }; - type EventSource = { - "component"?: __cedar::String, - "host"?: __cedar::String, - }; - type ExecAction = { - "command"?: Set<__cedar::String>, - }; - type FCVolumeSource = { - "fsType"?: __cedar::String, - "lun"?: __cedar::Long, - "readOnly"?: __cedar::Bool, - "targetWWNs"?: Set<__cedar::String>, - "wwids"?: Set<__cedar::String>, - }; - type FlexPersistentVolumeSource = { - "driver": __cedar::String, - "fsType"?: __cedar::String, - "options"?: Set, - "readOnly"?: __cedar::Bool, - "secretRef"?: SecretReference, - }; - type FlexVolumeSource = { - "driver": __cedar::String, - "fsType"?: __cedar::String, - "options"?: Set, - "readOnly"?: __cedar::Bool, - "secretRef"?: LocalObjectReference, - }; - type FlockerVolumeSource = { - "datasetName"?: __cedar::String, - "datasetUUID"?: __cedar::String, - }; - type GCEPersistentDiskVolumeSource = { - "fsType"?: __cedar::String, - "partition"?: __cedar::Long, - "pdName": __cedar::String, - "readOnly"?: __cedar::Bool, - }; - type GRPCAction = { - "port": __cedar::Long, - "service"?: __cedar::String, - }; - type GitRepoVolumeSource = { - "directory"?: __cedar::String, - "repository": __cedar::String, - "revision"?: __cedar::String, - }; - type GlusterfsPersistentVolumeSource = { - "endpoints": __cedar::String, - "endpointsNamespace"?: __cedar::String, - "path": __cedar::String, - "readOnly"?: __cedar::Bool, - }; - type GlusterfsVolumeSource = { - "endpoints": __cedar::String, - "path": __cedar::String, - "readOnly"?: __cedar::Bool, - }; - type HTTPGetAction = { - "host"?: __cedar::String, - "httpHeaders"?: Set, - "path"?: __cedar::String, - "port": __cedar::String, - "scheme"?: __cedar::String, - }; - type HTTPHeader = { - "name": __cedar::String, - "value": __cedar::String, - }; - type HostAlias = { - "hostnames"?: Set<__cedar::String>, - "ip": __cedar::String, - }; - type HostIP = { - "ip": __cedar::String, - }; - type HostPathVolumeSource = { - "path": __cedar::String, - "type"?: __cedar::String, - }; - type ISCSIPersistentVolumeSource = { - "chapAuthDiscovery"?: __cedar::Bool, - "chapAuthSession"?: __cedar::Bool, - "fsType"?: __cedar::String, - "initiatorName"?: __cedar::String, - "iqn": __cedar::String, - "iscsiInterface"?: __cedar::String, - "lun": __cedar::Long, - "portals"?: Set<__cedar::String>, - "readOnly"?: __cedar::Bool, - "secretRef"?: SecretReference, - "targetPortal": __cedar::String, - }; - type ISCSIVolumeSource = { - "chapAuthDiscovery"?: __cedar::Bool, - "chapAuthSession"?: __cedar::Bool, - "fsType"?: __cedar::String, - "initiatorName"?: __cedar::String, - "iqn": __cedar::String, - "iscsiInterface"?: __cedar::String, - "lun": __cedar::Long, - "portals"?: Set<__cedar::String>, - "readOnly"?: __cedar::Bool, - "secretRef"?: LocalObjectReference, - "targetPortal": __cedar::String, - }; - type ImageVolumeSource = { - "pullPolicy"?: __cedar::String, - "reference"?: __cedar::String, - }; - type KeyToPath = { - "key": __cedar::String, - "mode"?: __cedar::Long, - "path": __cedar::String, - }; - type Lifecycle = { - "postStart"?: LifecycleHandler, - "preStop"?: LifecycleHandler, - }; - type LifecycleHandler = { - "exec"?: ExecAction, - "httpGet"?: HTTPGetAction, - "sleep"?: SleepAction, - "tcpSocket"?: TCPSocketAction, - }; - type LimitRangeItem = { - "default"?: __cedar::String, - "defaultRequest"?: __cedar::String, - "max"?: __cedar::String, - "maxLimitRequestRatio"?: __cedar::String, - "min"?: __cedar::String, - "type": __cedar::String, - }; - type LimitRangeSpec = { - "limits": Set, - }; - type LinuxContainerUser = { - "gid": __cedar::Long, - "supplementalGroups"?: Set<__cedar::Long>, - "uid": __cedar::Long, - }; - type LoadBalancerIngress = { - "hostname"?: __cedar::String, - "ip"?: __cedar::String, - "ipMode"?: __cedar::String, - "ports"?: Set, - }; - type LoadBalancerStatus = { - "ingress"?: Set, - }; - type LocalObjectReference = { - "name"?: __cedar::String, - }; - type LocalVolumeSource = { - "fsType"?: __cedar::String, - "path": __cedar::String, - }; - type ModifyVolumeStatus = { - "status": __cedar::String, - "targetVolumeAttributesClassName"?: __cedar::String, - }; - type NFSVolumeSource = { - "path": __cedar::String, - "readOnly"?: __cedar::Bool, - "server": __cedar::String, - }; - type NamespaceCondition = { - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type NamespaceSpec = { - "finalizers"?: Set<__cedar::String>, - }; - type NamespaceStatus = { - "conditions"?: Set, - "phase"?: __cedar::String, - }; - type NodeAddress = { - "address": __cedar::String, - "type": __cedar::String, - }; - type NodeAffinity = { - "preferredDuringSchedulingIgnoredDuringExecution"?: Set, - "requiredDuringSchedulingIgnoredDuringExecution"?: NodeSelector, - }; - type NodeCondition = { - "lastHeartbeatTime"?: __cedar::String, - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type NodeConfigSource = { - "configMap"?: ConfigMapNodeConfigSource, - }; - type NodeConfigStatus = { - "active"?: NodeConfigSource, - "assigned"?: NodeConfigSource, - "error"?: __cedar::String, - "lastKnownGood"?: NodeConfigSource, - }; - type NodeDaemonEndpoints = { - "kubeletEndpoint"?: DaemonEndpoint, - }; - type NodeFeatures = { - "supplementalGroupsPolicy"?: __cedar::Bool, - }; - type NodeRuntimeHandler = { - "features"?: NodeRuntimeHandlerFeatures, - "name"?: __cedar::String, - }; - type NodeRuntimeHandlerFeatures = { - "recursiveReadOnlyMounts"?: __cedar::Bool, - "userNamespaces"?: __cedar::Bool, - }; - type NodeSelector = { - "nodeSelectorTerms": Set, - }; - type NodeSelectorRequirement = { - "key": __cedar::String, - "operator": __cedar::String, - "values"?: Set<__cedar::String>, - }; - type NodeSelectorTerm = { - "matchExpressions"?: Set, - "matchFields"?: Set, - }; - type NodeSpec = { - "configSource"?: NodeConfigSource, - "externalID"?: __cedar::String, - "podCIDR"?: __cedar::String, - "podCIDRs"?: Set<__cedar::String>, - "providerID"?: __cedar::String, - "taints"?: Set, - "unschedulable"?: __cedar::Bool, - }; - type NodeStatus = { - "addresses"?: Set, - "allocatable"?: __cedar::String, - "capacity"?: __cedar::String, - "conditions"?: Set, - "config"?: NodeConfigStatus, - "daemonEndpoints"?: NodeDaemonEndpoints, - "features"?: NodeFeatures, - "images"?: Set, - "nodeInfo"?: NodeSystemInfo, - "phase"?: __cedar::String, - "runtimeHandlers"?: Set, - "volumesAttached"?: Set, - "volumesInUse"?: Set<__cedar::String>, - }; - type NodeSystemInfo = { - "architecture": __cedar::String, - "bootID": __cedar::String, - "containerRuntimeVersion": __cedar::String, - "kernelVersion": __cedar::String, - "kubeProxyVersion": __cedar::String, - "kubeletVersion": __cedar::String, - "machineID": __cedar::String, - "operatingSystem": __cedar::String, - "osImage": __cedar::String, - "systemUUID": __cedar::String, - }; - type ObjectFieldSelector = { - "apiVersion"?: __cedar::String, - "fieldPath": __cedar::String, - }; - type ObjectReference = { - "apiVersion"?: __cedar::String, - "fieldPath"?: __cedar::String, - "kind"?: __cedar::String, - "name"?: __cedar::String, - "namespace"?: __cedar::String, - "resourceVersion"?: __cedar::String, - "uid"?: __cedar::String, - }; - type PersistentVolumeClaimCondition = { - "lastProbeTime"?: __cedar::String, - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type PersistentVolumeClaimSpec = { - "accessModes"?: Set<__cedar::String>, - "dataSource"?: TypedLocalObjectReference, - "dataSourceRef"?: TypedObjectReference, - "resources"?: VolumeResourceRequirements, - "selector"?: meta::v1::LabelSelector, - "storageClassName"?: __cedar::String, - "volumeAttributesClassName"?: __cedar::String, - "volumeMode"?: __cedar::String, - "volumeName"?: __cedar::String, - }; - type PersistentVolumeClaimStatus = { - "accessModes"?: Set<__cedar::String>, - "allocatedResourceStatuses"?: Set, - "allocatedResources"?: __cedar::String, - "capacity"?: __cedar::String, - "conditions"?: Set, - "currentVolumeAttributesClassName"?: __cedar::String, - "modifyVolumeStatus"?: ModifyVolumeStatus, - "phase"?: __cedar::String, - }; - type PersistentVolumeClaimTemplate = { - "metadata"?: meta::v1::ObjectMeta, - "spec": PersistentVolumeClaimSpec, - }; - type PersistentVolumeClaimVolumeSource = { - "claimName": __cedar::String, - "readOnly"?: __cedar::Bool, - }; - type PersistentVolumeSpec = { - "accessModes"?: Set<__cedar::String>, - "awsElasticBlockStore"?: AWSElasticBlockStoreVolumeSource, - "azureDisk"?: AzureDiskVolumeSource, - "azureFile"?: AzureFilePersistentVolumeSource, - "capacity"?: __cedar::String, - "cephfs"?: CephFSPersistentVolumeSource, - "cinder"?: CinderPersistentVolumeSource, - "claimRef"?: ObjectReference, - "csi"?: CSIPersistentVolumeSource, - "fc"?: FCVolumeSource, - "flexVolume"?: FlexPersistentVolumeSource, - "flocker"?: FlockerVolumeSource, - "gcePersistentDisk"?: GCEPersistentDiskVolumeSource, - "glusterfs"?: GlusterfsPersistentVolumeSource, - "hostPath"?: HostPathVolumeSource, - "iscsi"?: ISCSIPersistentVolumeSource, - "local"?: LocalVolumeSource, - "mountOptions"?: Set<__cedar::String>, - "nfs"?: NFSVolumeSource, - "nodeAffinity"?: VolumeNodeAffinity, - "persistentVolumeReclaimPolicy"?: __cedar::String, - "photonPersistentDisk"?: PhotonPersistentDiskVolumeSource, - "portworxVolume"?: PortworxVolumeSource, - "quobyte"?: QuobyteVolumeSource, - "rbd"?: RBDPersistentVolumeSource, - "scaleIO"?: ScaleIOPersistentVolumeSource, - "storageClassName"?: __cedar::String, - "storageos"?: StorageOSPersistentVolumeSource, - "volumeAttributesClassName"?: __cedar::String, - "volumeMode"?: __cedar::String, - "vsphereVolume"?: VsphereVirtualDiskVolumeSource, - }; - type PersistentVolumeStatus = { - "lastPhaseTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "phase"?: __cedar::String, - "reason"?: __cedar::String, - }; - type PhotonPersistentDiskVolumeSource = { - "fsType"?: __cedar::String, - "pdID": __cedar::String, - }; - type PodAffinity = { - "preferredDuringSchedulingIgnoredDuringExecution"?: Set, - "requiredDuringSchedulingIgnoredDuringExecution"?: Set, - }; - type PodAffinityTerm = { - "labelSelector"?: meta::v1::LabelSelector, - "matchLabelKeys"?: Set<__cedar::String>, - "mismatchLabelKeys"?: Set<__cedar::String>, - "namespaceSelector"?: meta::v1::LabelSelector, - "namespaces"?: Set<__cedar::String>, - "topologyKey": __cedar::String, - }; - type PodAntiAffinity = { - "preferredDuringSchedulingIgnoredDuringExecution"?: Set, - "requiredDuringSchedulingIgnoredDuringExecution"?: Set, - }; - type PodCondition = { - "lastProbeTime"?: __cedar::String, - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type PodDNSConfig = { - "nameservers"?: Set<__cedar::String>, - "options"?: Set, - "searches"?: Set<__cedar::String>, - }; - type PodDNSConfigOption = { - "name"?: __cedar::String, - "value"?: __cedar::String, - }; - type PodIP = { - "ip": __cedar::String, - }; - type PodOS = { - "name": __cedar::String, - }; - type PodReadinessGate = { - "conditionType": __cedar::String, - }; - type PodResourceClaim = { - "name": __cedar::String, - "resourceClaimName"?: __cedar::String, - "resourceClaimTemplateName"?: __cedar::String, - }; - type PodResourceClaimStatus = { - "name": __cedar::String, - "resourceClaimName"?: __cedar::String, - }; - type PodSchedulingGate = { - "name": __cedar::String, - }; - type PodSecurityContext = { - "appArmorProfile"?: AppArmorProfile, - "fsGroup"?: __cedar::Long, - "fsGroupChangePolicy"?: __cedar::String, - "runAsGroup"?: __cedar::Long, - "runAsNonRoot"?: __cedar::Bool, - "runAsUser"?: __cedar::Long, - "seLinuxOptions"?: SELinuxOptions, - "seccompProfile"?: SeccompProfile, - "supplementalGroups"?: Set<__cedar::Long>, - "supplementalGroupsPolicy"?: __cedar::String, - "sysctls"?: Set, - "windowsOptions"?: WindowsSecurityContextOptions, - }; - type PodSpec = { - "activeDeadlineSeconds"?: __cedar::Long, - "affinity"?: Affinity, - "automountServiceAccountToken"?: __cedar::Bool, - "containers": Set, - "dnsConfig"?: PodDNSConfig, - "dnsPolicy"?: __cedar::String, - "enableServiceLinks"?: __cedar::Bool, - "ephemeralContainers"?: Set, - "hostAliases"?: Set, - "hostIPC"?: __cedar::Bool, - "hostNetwork"?: __cedar::Bool, - "hostPID"?: __cedar::Bool, - "hostUsers"?: __cedar::Bool, - "hostname"?: __cedar::String, - "imagePullSecrets"?: Set, - "initContainers"?: Set, - "nodeName"?: __cedar::String, - "nodeSelector"?: Set, - "os"?: PodOS, - "overhead"?: __cedar::String, - "preemptionPolicy"?: __cedar::String, - "priority"?: __cedar::Long, - "priorityClassName"?: __cedar::String, - "readinessGates"?: Set, - "resourceClaims"?: Set, - "restartPolicy"?: __cedar::String, - "runtimeClassName"?: __cedar::String, - "schedulerName"?: __cedar::String, - "schedulingGates"?: Set, - "securityContext"?: PodSecurityContext, - "serviceAccount"?: __cedar::String, - "serviceAccountName"?: __cedar::String, - "setHostnameAsFQDN"?: __cedar::Bool, - "shareProcessNamespace"?: __cedar::Bool, - "subdomain"?: __cedar::String, - "terminationGracePeriodSeconds"?: __cedar::Long, - "tolerations"?: Set, - "topologySpreadConstraints"?: Set, - "volumes"?: Set, - }; - type PodStatus = { - "conditions"?: Set, - "containerStatuses"?: Set, - "ephemeralContainerStatuses"?: Set, - "hostIP"?: __cedar::String, - "hostIPs"?: Set, - "initContainerStatuses"?: Set, - "message"?: __cedar::String, - "nominatedNodeName"?: __cedar::String, - "phase"?: __cedar::String, - "podIP"?: __cedar::String, - "podIPs"?: Set, - "qosClass"?: __cedar::String, - "reason"?: __cedar::String, - "resize"?: __cedar::String, - "resourceClaimStatuses"?: Set, - "startTime"?: __cedar::String, - }; - type PodTemplateSpec = { - "metadata"?: meta::v1::ObjectMeta, - "spec"?: PodSpec, - }; - type PortStatus = { - "error"?: __cedar::String, - "port": __cedar::Long, - "protocol": __cedar::String, - }; - type PortworxVolumeSource = { - "fsType"?: __cedar::String, - "readOnly"?: __cedar::Bool, - "volumeID": __cedar::String, - }; - type PreferredSchedulingTerm = { - "preference": NodeSelectorTerm, - "weight": __cedar::Long, - }; - type Probe = { - "exec"?: ExecAction, - "failureThreshold"?: __cedar::Long, - "grpc"?: GRPCAction, - "httpGet"?: HTTPGetAction, - "initialDelaySeconds"?: __cedar::Long, - "periodSeconds"?: __cedar::Long, - "successThreshold"?: __cedar::Long, - "tcpSocket"?: TCPSocketAction, - "terminationGracePeriodSeconds"?: __cedar::Long, - "timeoutSeconds"?: __cedar::Long, - }; - type ProjectedVolumeSource = { - "defaultMode"?: __cedar::Long, - "sources"?: Set, - }; - type QuobyteVolumeSource = { - "group"?: __cedar::String, - "readOnly"?: __cedar::Bool, - "registry": __cedar::String, - "tenant"?: __cedar::String, - "user"?: __cedar::String, - "volume": __cedar::String, - }; - type RBDPersistentVolumeSource = { - "fsType"?: __cedar::String, - "image": __cedar::String, - "keyring"?: __cedar::String, - "monitors": Set<__cedar::String>, - "pool"?: __cedar::String, - "readOnly"?: __cedar::Bool, - "secretRef"?: SecretReference, - "user"?: __cedar::String, - }; - type RBDVolumeSource = { - "fsType"?: __cedar::String, - "image": __cedar::String, - "keyring"?: __cedar::String, - "monitors": Set<__cedar::String>, - "pool"?: __cedar::String, - "readOnly"?: __cedar::Bool, - "secretRef"?: LocalObjectReference, - "user"?: __cedar::String, - }; - type ReplicationControllerCondition = { - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type ReplicationControllerSpec = { - "minReadySeconds"?: __cedar::Long, - "replicas"?: __cedar::Long, - "selector"?: Set, - "template"?: PodTemplateSpec, - }; - type ReplicationControllerStatus = { - "availableReplicas"?: __cedar::Long, - "conditions"?: Set, - "fullyLabeledReplicas"?: __cedar::Long, - "observedGeneration"?: __cedar::Long, - "readyReplicas"?: __cedar::Long, - "replicas": __cedar::Long, - }; - type ResourceClaim = { - "name": __cedar::String, - "request"?: __cedar::String, - }; - type ResourceFieldSelector = { - "containerName"?: __cedar::String, - "divisor"?: __cedar::String, - "resource": __cedar::String, - }; - type ResourceHealth = { - "health"?: __cedar::String, - "resourceID": __cedar::String, - }; - type ResourceQuotaSpec = { - "hard"?: __cedar::String, - "scopeSelector"?: ScopeSelector, - "scopes"?: Set<__cedar::String>, - }; - type ResourceQuotaStatus = { - "hard"?: __cedar::String, - "used"?: __cedar::String, - }; - type ResourceRequirements = { - "claims"?: Set, - "limits"?: __cedar::String, - "requests"?: __cedar::String, - }; - type ResourceStatus = { - "name": __cedar::String, - "resources"?: Set, - }; - type SELinuxOptions = { - "level"?: __cedar::String, - "role"?: __cedar::String, - "type"?: __cedar::String, - "user"?: __cedar::String, - }; - type ScaleIOPersistentVolumeSource = { - "fsType"?: __cedar::String, - "gateway": __cedar::String, - "protectionDomain"?: __cedar::String, - "readOnly"?: __cedar::Bool, - "secretRef": SecretReference, - "sslEnabled"?: __cedar::Bool, - "storageMode"?: __cedar::String, - "storagePool"?: __cedar::String, - "system": __cedar::String, - "volumeName"?: __cedar::String, - }; - type ScaleIOVolumeSource = { - "fsType"?: __cedar::String, - "gateway": __cedar::String, - "protectionDomain"?: __cedar::String, - "readOnly"?: __cedar::Bool, - "secretRef": LocalObjectReference, - "sslEnabled"?: __cedar::Bool, - "storageMode"?: __cedar::String, - "storagePool"?: __cedar::String, - "system": __cedar::String, - "volumeName"?: __cedar::String, - }; - type ScopeSelector = { - "matchExpressions"?: Set, - }; - type ScopedResourceSelectorRequirement = { - "operator": __cedar::String, - "scopeName": __cedar::String, - "values"?: Set<__cedar::String>, - }; - type SeccompProfile = { - "localhostProfile"?: __cedar::String, - "type": __cedar::String, - }; - type SecretEnvSource = { - "name"?: __cedar::String, - "optional"?: __cedar::Bool, - }; - type SecretKeySelector = { - "key": __cedar::String, - "name"?: __cedar::String, - "optional"?: __cedar::Bool, - }; - type SecretProjection = { - "items"?: Set, - "name"?: __cedar::String, - "optional"?: __cedar::Bool, - }; - type SecretReference = { - "name"?: __cedar::String, - "namespace"?: __cedar::String, - }; - type SecretVolumeSource = { - "defaultMode"?: __cedar::Long, - "items"?: Set, - "optional"?: __cedar::Bool, - "secretName"?: __cedar::String, - }; - type SecurityContext = { - "allowPrivilegeEscalation"?: __cedar::Bool, - "appArmorProfile"?: AppArmorProfile, - "capabilities"?: Capabilities, - "privileged"?: __cedar::Bool, - "procMount"?: __cedar::String, - "readOnlyRootFilesystem"?: __cedar::Bool, - "runAsGroup"?: __cedar::Long, - "runAsNonRoot"?: __cedar::Bool, - "runAsUser"?: __cedar::Long, - "seLinuxOptions"?: SELinuxOptions, - "seccompProfile"?: SeccompProfile, - "windowsOptions"?: WindowsSecurityContextOptions, - }; - type ServiceAccountTokenProjection = { - "audience"?: __cedar::String, - "expirationSeconds"?: __cedar::Long, - "path": __cedar::String, - }; - type ServicePort = { - "appProtocol"?: __cedar::String, - "name"?: __cedar::String, - "nodePort"?: __cedar::Long, - "port": __cedar::Long, - "protocol"?: __cedar::String, - "targetPort"?: __cedar::String, - }; - type ServiceSpec = { - "allocateLoadBalancerNodePorts"?: __cedar::Bool, - "clusterIP"?: __cedar::String, - "clusterIPs"?: Set<__cedar::String>, - "externalIPs"?: Set<__cedar::String>, - "externalName"?: __cedar::String, - "externalTrafficPolicy"?: __cedar::String, - "healthCheckNodePort"?: __cedar::Long, - "internalTrafficPolicy"?: __cedar::String, - "ipFamilies"?: Set<__cedar::String>, - "ipFamilyPolicy"?: __cedar::String, - "loadBalancerClass"?: __cedar::String, - "loadBalancerIP"?: __cedar::String, - "loadBalancerSourceRanges"?: Set<__cedar::String>, - "ports"?: Set, - "publishNotReadyAddresses"?: __cedar::Bool, - "selector"?: Set, - "sessionAffinity"?: __cedar::String, - "sessionAffinityConfig"?: SessionAffinityConfig, - "trafficDistribution"?: __cedar::String, - "type"?: __cedar::String, - }; - type ServiceStatus = { - "conditions"?: Set, - "loadBalancer"?: LoadBalancerStatus, - }; - type SessionAffinityConfig = { - "clientIP"?: ClientIPConfig, - }; - type SleepAction = { - "seconds": __cedar::Long, - }; - type StorageOSPersistentVolumeSource = { - "fsType"?: __cedar::String, - "readOnly"?: __cedar::Bool, - "secretRef"?: ObjectReference, - "volumeName"?: __cedar::String, - "volumeNamespace"?: __cedar::String, - }; - type StorageOSVolumeSource = { - "fsType"?: __cedar::String, - "readOnly"?: __cedar::Bool, - "secretRef"?: LocalObjectReference, - "volumeName"?: __cedar::String, - "volumeNamespace"?: __cedar::String, - }; - type Sysctl = { - "name": __cedar::String, - "value": __cedar::String, - }; - type TCPSocketAction = { - "host"?: __cedar::String, - "port": __cedar::String, - }; - type Taint = { - "effect": __cedar::String, - "key": __cedar::String, - "timeAdded"?: __cedar::String, - "value"?: __cedar::String, - }; - type Toleration = { - "effect"?: __cedar::String, - "key"?: __cedar::String, - "operator"?: __cedar::String, - "tolerationSeconds"?: __cedar::Long, - "value"?: __cedar::String, - }; - type TopologySelectorLabelRequirement = { - "key": __cedar::String, - "values": Set<__cedar::String>, - }; - type TopologySelectorTerm = { - "matchLabelExpressions"?: Set, - }; - type TopologySpreadConstraint = { - "labelSelector"?: meta::v1::LabelSelector, - "matchLabelKeys"?: Set<__cedar::String>, - "maxSkew": __cedar::Long, - "minDomains"?: __cedar::Long, - "nodeAffinityPolicy"?: __cedar::String, - "nodeTaintsPolicy"?: __cedar::String, - "topologyKey": __cedar::String, - "whenUnsatisfiable": __cedar::String, - }; - type TypedLocalObjectReference = { - "apiGroup"?: __cedar::String, - "kind": __cedar::String, - "name": __cedar::String, - }; - type TypedObjectReference = { - "apiGroup"?: __cedar::String, - "kind": __cedar::String, - "name": __cedar::String, - "namespace"?: __cedar::String, - }; - type Volume = { - "awsElasticBlockStore"?: AWSElasticBlockStoreVolumeSource, - "azureDisk"?: AzureDiskVolumeSource, - "azureFile"?: AzureFileVolumeSource, - "cephfs"?: CephFSVolumeSource, - "cinder"?: CinderVolumeSource, - "configMap"?: ConfigMapVolumeSource, - "csi"?: CSIVolumeSource, - "downwardAPI"?: DownwardAPIVolumeSource, - "emptyDir"?: EmptyDirVolumeSource, - "ephemeral"?: EphemeralVolumeSource, - "fc"?: FCVolumeSource, - "flexVolume"?: FlexVolumeSource, - "flocker"?: FlockerVolumeSource, - "gcePersistentDisk"?: GCEPersistentDiskVolumeSource, - "gitRepo"?: GitRepoVolumeSource, - "glusterfs"?: GlusterfsVolumeSource, - "hostPath"?: HostPathVolumeSource, - "image"?: ImageVolumeSource, - "iscsi"?: ISCSIVolumeSource, - "name": __cedar::String, - "nfs"?: NFSVolumeSource, - "persistentVolumeClaim"?: PersistentVolumeClaimVolumeSource, - "photonPersistentDisk"?: PhotonPersistentDiskVolumeSource, - "portworxVolume"?: PortworxVolumeSource, - "projected"?: ProjectedVolumeSource, - "quobyte"?: QuobyteVolumeSource, - "rbd"?: RBDVolumeSource, - "scaleIO"?: ScaleIOVolumeSource, - "secret"?: SecretVolumeSource, - "storageos"?: StorageOSVolumeSource, - "vsphereVolume"?: VsphereVirtualDiskVolumeSource, - }; - type VolumeDevice = { - "devicePath": __cedar::String, - "name": __cedar::String, - }; - type VolumeMount = { - "mountPath": __cedar::String, - "mountPropagation"?: __cedar::String, - "name": __cedar::String, - "readOnly"?: __cedar::Bool, - "recursiveReadOnly"?: __cedar::String, - "subPath"?: __cedar::String, - "subPathExpr"?: __cedar::String, - }; - type VolumeMountStatus = { - "mountPath": __cedar::String, - "name": __cedar::String, - "readOnly"?: __cedar::Bool, - "recursiveReadOnly"?: __cedar::String, - }; - type VolumeNodeAffinity = { - "required"?: NodeSelector, - }; - type VolumeProjection = { - "clusterTrustBundle"?: ClusterTrustBundleProjection, - "configMap"?: ConfigMapProjection, - "downwardAPI"?: DownwardAPIProjection, - "secret"?: SecretProjection, - "serviceAccountToken"?: ServiceAccountTokenProjection, - }; - type VolumeResourceRequirements = { - "limits"?: __cedar::String, - "requests"?: __cedar::String, - }; - type VsphereVirtualDiskVolumeSource = { - "fsType"?: __cedar::String, - "storagePolicyID"?: __cedar::String, - "storagePolicyName"?: __cedar::String, - "volumePath": __cedar::String, - }; - type WeightedPodAffinityTerm = { - "podAffinityTerm": PodAffinityTerm, - "weight": __cedar::Long, - }; - type WindowsSecurityContextOptions = { - "gmsaCredentialSpec"?: __cedar::String, - "gmsaCredentialSpecName"?: __cedar::String, - "hostProcess"?: __cedar::Bool, - "runAsUserName"?: __cedar::String, - }; - entity Binding = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "target": ObjectReference, - }; - entity ComponentStatus = { - "apiVersion"?: __cedar::String, - "conditions"?: Set, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - }; - entity ConfigMap = { - "apiVersion"?: __cedar::String, - "binaryData"?: Set, - "data"?: Set, - "immutable"?: __cedar::Bool, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: ConfigMap, - }; - entity Endpoints = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Endpoints, - "subsets"?: Set, - }; - entity Event = { - "action"?: __cedar::String, - "apiVersion"?: __cedar::String, - "count"?: __cedar::Long, - "eventTime"?: __cedar::String, - "firstTimestamp"?: __cedar::String, - "involvedObject": ObjectReference, - "kind"?: __cedar::String, - "lastTimestamp"?: __cedar::String, - "message"?: __cedar::String, - "metadata": meta::v1::ObjectMeta, - "oldObject"?: Event, - "reason"?: __cedar::String, - "related"?: ObjectReference, - "reportingComponent"?: __cedar::String, - "reportingInstance"?: __cedar::String, - "series"?: EventSeries, - "source"?: EventSource, - "type"?: __cedar::String, - }; - entity LimitRange = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: LimitRange, - "spec"?: LimitRangeSpec, - }; - entity Namespace = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Namespace, - "spec"?: NamespaceSpec, - "status"?: NamespaceStatus, - }; - entity Node = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Node, - "spec"?: NodeSpec, - "status"?: NodeStatus, - }; - entity NodeProxyOptions = { - "apiVersion": __cedar::String, - "kind": __cedar::String, - "path": __cedar::String, - }; - entity PersistentVolume = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: PersistentVolume, - "spec"?: PersistentVolumeSpec, - "status"?: PersistentVolumeStatus, - }; - entity PersistentVolumeClaim = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: PersistentVolumeClaim, - "spec"?: PersistentVolumeClaimSpec, - "status"?: PersistentVolumeClaimStatus, - }; - entity Pod = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Pod, - "spec"?: PodSpec, - "status"?: PodStatus, - }; - entity PodAttachOptions = { - "apiVersion": __cedar::String, - "command": Set<__cedar::String>, - "container": __cedar::String, - "kind": __cedar::String, - "stderr": __cedar::Bool, - "stdin": __cedar::Bool, - "stdout": __cedar::Bool, - "tty": __cedar::Bool, - }; - entity PodExecOptions = { - "apiVersion": __cedar::String, - "command": Set<__cedar::String>, - "container": __cedar::String, - "kind": __cedar::String, - "stderr": __cedar::Bool, - "stdin": __cedar::Bool, - "stdout": __cedar::Bool, - "tty": __cedar::Bool, - }; - entity PodPortForwardOptions = { - "apiVersion": __cedar::String, - "kind": __cedar::String, - "ports"?: Set<__cedar::String>, - }; - entity PodProxyOptions = { - "apiVersion": __cedar::String, - "kind": __cedar::String, - "path": __cedar::String, - }; - entity PodTemplate = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: PodTemplate, - "template"?: PodTemplateSpec, - }; - entity ReplicationController = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: ReplicationController, - "spec"?: ReplicationControllerSpec, - "status"?: ReplicationControllerStatus, - }; - entity ResourceQuota = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: ResourceQuota, - "spec"?: ResourceQuotaSpec, - "status"?: ResourceQuotaStatus, - }; - entity Secret = { - "apiVersion"?: __cedar::String, - "data"?: Set, - "immutable"?: __cedar::Bool, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Secret, - "stringData"?: Set, - "type"?: __cedar::String, - }; - entity Service = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Service, - "spec"?: ServiceSpec, - "status"?: ServiceStatus, - }; - entity ServiceAccount = { - "apiVersion"?: __cedar::String, - "automountServiceAccountToken"?: __cedar::Bool, - "imagePullSecrets"?: Set, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: ServiceAccount, - "secrets"?: Set, - }; - entity ServiceProxyOptions = { - "apiVersion": __cedar::String, - "kind": __cedar::String, - "path": __cedar::String, - }; -} -namespace discovery::v1 { - type Endpoint = { - "addresses": Set<__cedar::String>, - "conditions"?: EndpointConditions, - "deprecatedTopology"?: Set, - "hints"?: EndpointHints, - "hostname"?: __cedar::String, - "nodeName"?: __cedar::String, - "targetRef"?: core::v1::ObjectReference, - "zone"?: __cedar::String, - }; - type EndpointConditions = { - "ready"?: __cedar::Bool, - "serving"?: __cedar::Bool, - "terminating"?: __cedar::Bool, - }; - type EndpointHints = { - "forZones"?: Set, - }; - type EndpointPort = { - "appProtocol"?: __cedar::String, - "name"?: __cedar::String, - "port"?: __cedar::Long, - "protocol"?: __cedar::String, - }; - type ForZone = { - "name": __cedar::String, - }; - entity EndpointSlice = { - "addressType": __cedar::String, - "apiVersion"?: __cedar::String, - "endpoints": Set, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: EndpointSlice, - "ports"?: Set, - }; -} -namespace events::v1 { - type EventSeries = { - "count": __cedar::Long, - "lastObservedTime": __cedar::String, - }; - entity Event = { - "action"?: __cedar::String, - "apiVersion"?: __cedar::String, - "deprecatedCount"?: __cedar::Long, - "deprecatedFirstTimestamp"?: __cedar::String, - "deprecatedLastTimestamp"?: __cedar::String, - "deprecatedSource"?: core::v1::EventSource, - "eventTime": __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "note"?: __cedar::String, - "oldObject"?: Event, - "reason"?: __cedar::String, - "regarding"?: core::v1::ObjectReference, - "related"?: core::v1::ObjectReference, - "reportingController"?: __cedar::String, - "reportingInstance"?: __cedar::String, - "series"?: EventSeries, - "type"?: __cedar::String, - }; -} -namespace flowcontrol::v1 { - type ExemptPriorityLevelConfiguration = { - "lendablePercent"?: __cedar::Long, - "nominalConcurrencyShares"?: __cedar::Long, - }; - type FlowDistinguisherMethod = { - "type": __cedar::String, - }; - type FlowSchemaCondition = { - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status"?: __cedar::String, - "type"?: __cedar::String, - }; - type FlowSchemaSpec = { - "distinguisherMethod"?: FlowDistinguisherMethod, - "matchingPrecedence"?: __cedar::Long, - "priorityLevelConfiguration": PriorityLevelConfigurationReference, - "rules"?: Set, - }; - type FlowSchemaStatus = { - "conditions"?: Set, - }; - type GroupSubject = { - "name": __cedar::String, - }; - type LimitResponse = { - "queuing"?: QueuingConfiguration, - "type": __cedar::String, - }; - type LimitedPriorityLevelConfiguration = { - "borrowingLimitPercent"?: __cedar::Long, - "lendablePercent"?: __cedar::Long, - "limitResponse"?: LimitResponse, - "nominalConcurrencyShares"?: __cedar::Long, - }; - type NonResourcePolicyRule = { - "nonResourceURLs": Set<__cedar::String>, - "verbs": Set<__cedar::String>, - }; - type PolicyRulesWithSubjects = { - "nonResourceRules"?: Set, - "resourceRules"?: Set, - "subjects": Set, - }; - type PriorityLevelConfigurationCondition = { - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status"?: __cedar::String, - "type"?: __cedar::String, - }; - type PriorityLevelConfigurationReference = { - "name": __cedar::String, - }; - type PriorityLevelConfigurationSpec = { - "exempt"?: ExemptPriorityLevelConfiguration, - "limited"?: LimitedPriorityLevelConfiguration, - "type": __cedar::String, - }; - type PriorityLevelConfigurationStatus = { - "conditions"?: Set, - }; - type QueuingConfiguration = { - "handSize"?: __cedar::Long, - "queueLengthLimit"?: __cedar::Long, - "queues"?: __cedar::Long, - }; - type ResourcePolicyRule = { - "apiGroups": Set<__cedar::String>, - "clusterScope"?: __cedar::Bool, - "namespaces"?: Set<__cedar::String>, - "resources": Set<__cedar::String>, - "verbs": Set<__cedar::String>, - }; - type ServiceAccountSubject = { - "name": __cedar::String, - "namespace": __cedar::String, - }; - type Subject = { - "group"?: GroupSubject, - "kind": __cedar::String, - "serviceAccount"?: ServiceAccountSubject, - "user"?: UserSubject, - }; - type UserSubject = { - "name": __cedar::String, - }; - entity FlowSchema = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: FlowSchema, - "spec"?: FlowSchemaSpec, - "status"?: FlowSchemaStatus, - }; - entity PriorityLevelConfiguration = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: PriorityLevelConfiguration, - "spec"?: PriorityLevelConfigurationSpec, - "status"?: PriorityLevelConfigurationStatus, - }; -} -namespace meta::v1 { - type APIResource = { - "categories"?: Set<__cedar::String>, - "group"?: __cedar::String, - "kind": __cedar::String, - "name": __cedar::String, - "namespaced": __cedar::Bool, - "shortNames"?: Set<__cedar::String>, - "singularName": __cedar::String, - "storageVersionHash"?: __cedar::String, - "verbs": Set<__cedar::String>, - "version"?: __cedar::String, - }; - type APIResourceList = { - "apiVersion"?: __cedar::String, - "groupVersion": __cedar::String, - "kind"?: __cedar::String, - "resources": Set, - }; - type Condition = { - "lastTransitionTime": __cedar::String, - "message": __cedar::String, - "observedGeneration"?: __cedar::Long, - "reason": __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type DeleteOptions = { - "apiVersion"?: __cedar::String, - "dryRun"?: Set<__cedar::String>, - "gracePeriodSeconds"?: __cedar::Long, - "kind"?: __cedar::String, - "orphanDependents"?: __cedar::Bool, - "preconditions"?: Preconditions, - "propagationPolicy"?: __cedar::String, - }; - type FieldSelectorRequirement = { - "key": __cedar::String, - "operator": __cedar::String, - "values"?: Set<__cedar::String>, - }; - type FieldsV1 = { - }; - type KeyValue = { - "key": __cedar::String, - "value": __cedar::String, - }; - type KeyValueStringSlice = { - "key": __cedar::String, - "value": Set<__cedar::String>, - }; - type LabelSelector = { - "matchExpressions"?: Set, - "matchLabels"?: Set, - }; - type LabelSelectorRequirement = { - "key": __cedar::String, - "operator": __cedar::String, - "values"?: Set<__cedar::String>, - }; - type ListMeta = { - "continue"?: __cedar::String, - "remainingItemCount"?: __cedar::Long, - "resourceVersion"?: __cedar::String, - "selfLink"?: __cedar::String, - }; - type ManagedFieldsEntry = { - "apiVersion"?: __cedar::String, - "fieldsType"?: __cedar::String, - "fieldsV1"?: FieldsV1, - "manager"?: __cedar::String, - "operation"?: __cedar::String, - "subresource"?: __cedar::String, - "time"?: __cedar::String, - }; - type ObjectMeta = { - "annotations"?: Set, - "creationTimestamp"?: __cedar::String, - "deletionGracePeriodSeconds"?: __cedar::Long, - "deletionTimestamp"?: __cedar::String, - "finalizers"?: Set<__cedar::String>, - "generateName"?: __cedar::String, - "generation"?: __cedar::Long, - "labels"?: Set, - "managedFields"?: Set, - "name"?: __cedar::String, - "namespace"?: __cedar::String, - "ownerReferences"?: Set, - "resourceVersion"?: __cedar::String, - "selfLink"?: __cedar::String, - "uid"?: __cedar::String, - }; - type OwnerReference = { - "apiVersion": __cedar::String, - "blockOwnerDeletion"?: __cedar::Bool, - "controller"?: __cedar::Bool, - "kind": __cedar::String, - "name": __cedar::String, - "uid": __cedar::String, - }; - type Patch = { - }; - type Preconditions = { - "resourceVersion"?: __cedar::String, - "uid"?: __cedar::String, - }; - type Status = { - "apiVersion"?: __cedar::String, - "code"?: __cedar::Long, - "details"?: StatusDetails, - "kind"?: __cedar::String, - "message"?: __cedar::String, - "metadata"?: ListMeta, - "reason"?: __cedar::String, - "status"?: __cedar::String, - }; - type StatusCause = { - "field"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - }; - type StatusDetails = { - "causes"?: Set, - "group"?: __cedar::String, - "kind"?: __cedar::String, - "name"?: __cedar::String, - "retryAfterSeconds"?: __cedar::Long, - "uid"?: __cedar::String, - }; - type WatchEvent = { - "object": __cedar::String, - "type": __cedar::String, - }; -} -namespace networking::v1 { - type HTTPIngressPath = { - "backend": IngressBackend, - "path"?: __cedar::String, - "pathType": __cedar::String, - }; - type HTTPIngressRuleValue = { - "paths": Set, - }; - type IPBlock = { - "cidr": __cedar::String, - "except"?: Set<__cedar::String>, - }; - type IngressBackend = { - "resource"?: core::v1::TypedLocalObjectReference, - "service"?: IngressServiceBackend, - }; - type IngressClassParametersReference = { - "apiGroup"?: __cedar::String, - "kind": __cedar::String, - "name": __cedar::String, - "namespace"?: __cedar::String, - "scope"?: __cedar::String, - }; - type IngressClassSpec = { - "controller"?: __cedar::String, - "parameters"?: IngressClassParametersReference, - }; - type IngressLoadBalancerIngress = { - "hostname"?: __cedar::String, - "ip"?: __cedar::String, - "ports"?: Set, - }; - type IngressLoadBalancerStatus = { - "ingress"?: Set, - }; - type IngressPortStatus = { - "error"?: __cedar::String, - "port": __cedar::Long, - "protocol": __cedar::String, - }; - type IngressRule = { - "host"?: __cedar::String, - }; - type IngressServiceBackend = { - "name": __cedar::String, - "port"?: ServiceBackendPort, - }; - type IngressSpec = { - "defaultBackend"?: IngressBackend, - "ingressClassName"?: __cedar::String, - "rules"?: Set, - "tls"?: Set, - }; - type IngressStatus = { - "loadBalancer"?: IngressLoadBalancerStatus, - }; - type IngressTLS = { - "hosts"?: Set<__cedar::String>, - "secretName"?: __cedar::String, - }; - type NetworkPolicyEgressRule = { - "ports"?: Set, - "to"?: Set, - }; - type NetworkPolicyIngressRule = { - "from"?: Set, - "ports"?: Set, - }; - type NetworkPolicyPeer = { - "ipBlock"?: IPBlock, - "namespaceSelector"?: meta::v1::LabelSelector, - "podSelector"?: meta::v1::LabelSelector, - }; - type NetworkPolicyPort = { - "endPort"?: __cedar::Long, - "port"?: __cedar::String, - "protocol"?: __cedar::String, - }; - type NetworkPolicySpec = { - "egress"?: Set, - "ingress"?: Set, - "podSelector": meta::v1::LabelSelector, - "policyTypes"?: Set<__cedar::String>, - }; - type ServiceBackendPort = { - "name"?: __cedar::String, - "number"?: __cedar::Long, - }; - entity Ingress = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Ingress, - "spec"?: IngressSpec, - "status"?: IngressStatus, - }; - entity IngressClass = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: IngressClass, - "spec"?: IngressClassSpec, - }; - entity NetworkPolicy = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: NetworkPolicy, - "spec"?: NetworkPolicySpec, - }; -} -namespace node::v1 { - type Overhead = { - "podFixed"?: __cedar::String, - }; - type Scheduling = { - "nodeSelector"?: Set, - "tolerations"?: Set, - }; - entity RuntimeClass = { - "apiVersion"?: __cedar::String, - "handler": __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: RuntimeClass, - "overhead"?: Overhead, - "scheduling"?: Scheduling, - }; -} -namespace policy::v1 { - type PodDisruptionBudgetSpec = { - "maxUnavailable"?: __cedar::String, - "minAvailable"?: __cedar::String, - "selector"?: meta::v1::LabelSelector, - "unhealthyPodEvictionPolicy"?: __cedar::String, - }; - type PodDisruptionBudgetStatus = { - "conditions"?: Set, - "currentHealthy": __cedar::Long, - "desiredHealthy": __cedar::Long, - "disruptedPods"?: __cedar::String, - "disruptionsAllowed": __cedar::Long, - "expectedPods": __cedar::Long, - "observedGeneration"?: __cedar::Long, - }; - entity Eviction = { - "apiVersion"?: __cedar::String, - "deleteOptions"?: meta::v1::DeleteOptions, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - }; - entity PodDisruptionBudget = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: PodDisruptionBudget, - "spec"?: PodDisruptionBudgetSpec, - "status"?: PodDisruptionBudgetStatus, - }; -} -namespace rbac::v1 { - type AggregationRule = { - "clusterRoleSelectors"?: Set, - }; - type PolicyRule = { - "apiGroups"?: Set<__cedar::String>, - "nonResourceURLs"?: Set<__cedar::String>, - "resourceNames"?: Set<__cedar::String>, - "resources"?: Set<__cedar::String>, - "verbs": Set<__cedar::String>, - }; - type RoleRef = { - "apiGroup": __cedar::String, - "kind": __cedar::String, - "name": __cedar::String, - }; - type Subject = { - "apiGroup"?: __cedar::String, - "kind": __cedar::String, - "name": __cedar::String, - "namespace"?: __cedar::String, - }; - entity ClusterRole = { - "aggregationRule"?: AggregationRule, - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: ClusterRole, - "rules"?: Set, - }; - entity ClusterRoleBinding = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: ClusterRoleBinding, - "roleRef": RoleRef, - "subjects"?: Set, - }; - entity Role = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Role, - "rules"?: Set, - }; - entity RoleBinding = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: RoleBinding, - "roleRef": RoleRef, - "subjects"?: Set, - }; -} -namespace scheduling::v1 { - entity PriorityClass = { - "apiVersion"?: __cedar::String, - "description"?: __cedar::String, - "globalDefault"?: __cedar::Bool, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: PriorityClass, - "preemptionPolicy"?: __cedar::String, - "value": __cedar::Long, - }; -} -namespace storage::v1 { - type CSIDriverSpec = { - "attachRequired"?: __cedar::Bool, - "fsGroupPolicy"?: __cedar::String, - "podInfoOnMount"?: __cedar::Bool, - "requiresRepublish"?: __cedar::Bool, - "seLinuxMount"?: __cedar::Bool, - "storageCapacity"?: __cedar::Bool, - "tokenRequests"?: Set, - "volumeLifecycleModes"?: Set<__cedar::String>, - }; - type CSINodeDriver = { - "allocatable"?: VolumeNodeResources, - "name": __cedar::String, - "nodeID": __cedar::String, - "topologyKeys"?: Set<__cedar::String>, - }; - type CSINodeSpec = { - "drivers": Set, - }; - type TokenRequest = { - "audience": __cedar::String, - "expirationSeconds"?: __cedar::Long, - }; - type VolumeAttachmentSource = { - "inlineVolumeSpec"?: core::v1::PersistentVolumeSpec, - "persistentVolumeName"?: __cedar::String, - }; - type VolumeAttachmentSpec = { - "attacher": __cedar::String, - "nodeName": __cedar::String, - "source": VolumeAttachmentSource, - }; - type VolumeAttachmentStatus = { - "attachError"?: VolumeError, - "attached": __cedar::Bool, - "attachmentMetadata"?: Set, - "detachError"?: VolumeError, - }; - type VolumeError = { - "message"?: __cedar::String, - "time"?: __cedar::String, - }; - type VolumeNodeResources = { - "count"?: __cedar::Long, - }; - entity CSIDriver = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: CSIDriver, - "spec": CSIDriverSpec, - }; - entity CSINode = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: CSINode, - "spec": CSINodeSpec, - }; - entity CSIStorageCapacity = { - "apiVersion"?: __cedar::String, - "capacity"?: __cedar::String, - "kind"?: __cedar::String, - "maximumVolumeSize"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "nodeTopology"?: meta::v1::LabelSelector, - "oldObject"?: CSIStorageCapacity, - "storageClassName": __cedar::String, - }; - entity StorageClass = { - "allowVolumeExpansion"?: __cedar::Bool, - "allowedTopologies"?: Set, - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "mountOptions"?: Set<__cedar::String>, - "oldObject"?: StorageClass, - "parameters"?: Set, - "provisioner": __cedar::String, - "reclaimPolicy"?: __cedar::String, - "volumeBindingMode"?: __cedar::String, - }; - entity VolumeAttachment = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: VolumeAttachment, - "spec": VolumeAttachmentSpec, - "status"?: VolumeAttachmentStatus, - }; -} -namespace aws::k8s::cedar::v1alpha1 { - entity Policy = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: Policy, - "spec": { - "content"?: __cedar::String, - }, - }; -} -namespace flowcontrol::v1beta3 { - type ExemptPriorityLevelConfiguration = { - "lendablePercent"?: __cedar::Long, - "nominalConcurrencyShares"?: __cedar::Long, - }; - type FlowDistinguisherMethod = { - "type": __cedar::String, - }; - type FlowSchemaCondition = { - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status"?: __cedar::String, - "type"?: __cedar::String, - }; - type FlowSchemaSpec = { - "distinguisherMethod"?: FlowDistinguisherMethod, - "matchingPrecedence"?: __cedar::Long, - "priorityLevelConfiguration": PriorityLevelConfigurationReference, - "rules"?: Set, - }; - type FlowSchemaStatus = { - "conditions"?: Set, - }; - type GroupSubject = { - "name": __cedar::String, - }; - type LimitResponse = { - "queuing"?: QueuingConfiguration, - "type": __cedar::String, - }; - type LimitedPriorityLevelConfiguration = { - "borrowingLimitPercent"?: __cedar::Long, - "lendablePercent"?: __cedar::Long, - "limitResponse"?: LimitResponse, - "nominalConcurrencyShares"?: __cedar::Long, - }; - type NonResourcePolicyRule = { - "nonResourceURLs": Set<__cedar::String>, - "verbs": Set<__cedar::String>, - }; - type PolicyRulesWithSubjects = { - "nonResourceRules"?: Set, - "resourceRules"?: Set, - "subjects": Set, - }; - type PriorityLevelConfigurationCondition = { - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status"?: __cedar::String, - "type"?: __cedar::String, - }; - type PriorityLevelConfigurationReference = { - "name": __cedar::String, - }; - type PriorityLevelConfigurationSpec = { - "exempt"?: ExemptPriorityLevelConfiguration, - "limited"?: LimitedPriorityLevelConfiguration, - "type": __cedar::String, - }; - type PriorityLevelConfigurationStatus = { - "conditions"?: Set, - }; - type QueuingConfiguration = { - "handSize"?: __cedar::Long, - "queueLengthLimit"?: __cedar::Long, - "queues"?: __cedar::Long, - }; - type ResourcePolicyRule = { - "apiGroups": Set<__cedar::String>, - "clusterScope"?: __cedar::Bool, - "namespaces"?: Set<__cedar::String>, - "resources": Set<__cedar::String>, - "verbs": Set<__cedar::String>, - }; - type ServiceAccountSubject = { - "name": __cedar::String, - "namespace": __cedar::String, - }; - type Subject = { - "group"?: GroupSubject, - "kind": __cedar::String, - "serviceAccount"?: ServiceAccountSubject, - "user"?: UserSubject, - }; - type UserSubject = { - "name": __cedar::String, - }; - entity FlowSchema = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: FlowSchema, - "spec"?: FlowSchemaSpec, - "status"?: FlowSchemaStatus, - }; - entity PriorityLevelConfiguration = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: PriorityLevelConfiguration, - "spec"?: PriorityLevelConfigurationSpec, - "status"?: PriorityLevelConfigurationStatus, - }; -} -namespace autoscaling::v2 { - type ContainerResourceMetricSource = { - "container": __cedar::String, - "name": __cedar::String, - "target": MetricTarget, - }; - type ContainerResourceMetricStatus = { - "container": __cedar::String, - "current": MetricValueStatus, - "name": __cedar::String, - }; - type CrossVersionObjectReference = { - "apiVersion"?: __cedar::String, - "kind": __cedar::String, - "name": __cedar::String, - }; - type ExternalMetricSource = { - "metric": MetricIdentifier, - "target": MetricTarget, - }; - type ExternalMetricStatus = { - "current": MetricValueStatus, - "metric": MetricIdentifier, - }; - type HPAScalingPolicy = { - "periodSeconds": __cedar::Long, - "type": __cedar::String, - "value": __cedar::Long, - }; - type HPAScalingRules = { - "policies"?: Set, - "selectPolicy"?: __cedar::String, - "stabilizationWindowSeconds"?: __cedar::Long, - }; - type HorizontalPodAutoscalerBehavior = { - "scaleDown"?: HPAScalingRules, - "scaleUp"?: HPAScalingRules, - }; - type HorizontalPodAutoscalerCondition = { - "lastTransitionTime"?: __cedar::String, - "message"?: __cedar::String, - "reason"?: __cedar::String, - "status": __cedar::String, - "type": __cedar::String, - }; - type HorizontalPodAutoscalerSpec = { - "behavior"?: HorizontalPodAutoscalerBehavior, - "maxReplicas": __cedar::Long, - "metrics"?: Set, - "minReplicas"?: __cedar::Long, - "scaleTargetRef": CrossVersionObjectReference, - }; - type HorizontalPodAutoscalerStatus = { - "conditions"?: Set, - "currentMetrics"?: Set, - "currentReplicas"?: __cedar::Long, - "desiredReplicas": __cedar::Long, - "lastScaleTime"?: __cedar::String, - "observedGeneration"?: __cedar::Long, - }; - type MetricIdentifier = { - "name": __cedar::String, - "selector"?: meta::v1::LabelSelector, - }; - type MetricSpec = { - "containerResource"?: ContainerResourceMetricSource, - "external"?: ExternalMetricSource, - "object"?: ObjectMetricSource, - "pods"?: PodsMetricSource, - "resource"?: ResourceMetricSource, - "type": __cedar::String, - }; - type MetricStatus = { - "containerResource"?: ContainerResourceMetricStatus, - "external"?: ExternalMetricStatus, - "object"?: ObjectMetricStatus, - "pods"?: PodsMetricStatus, - "resource"?: ResourceMetricStatus, - "type": __cedar::String, - }; - type MetricTarget = { - "averageUtilization"?: __cedar::Long, - "averageValue"?: __cedar::String, - "type": __cedar::String, - "value"?: __cedar::String, - }; - type MetricValueStatus = { - "averageUtilization"?: __cedar::Long, - "averageValue"?: __cedar::String, - "value"?: __cedar::String, - }; - type ObjectMetricSource = { - "describedObject": CrossVersionObjectReference, - "metric": MetricIdentifier, - "target": MetricTarget, - }; - type ObjectMetricStatus = { - "current": MetricValueStatus, - "describedObject": CrossVersionObjectReference, - "metric": MetricIdentifier, - }; - type PodsMetricSource = { - "metric": MetricIdentifier, - "target": MetricTarget, - }; - type PodsMetricStatus = { - "current": MetricValueStatus, - "metric": MetricIdentifier, - }; - type ResourceMetricSource = { - "name": __cedar::String, - "target": MetricTarget, - }; - type ResourceMetricStatus = { - "current": MetricValueStatus, - "name": __cedar::String, - }; - entity HorizontalPodAutoscaler = { - "apiVersion"?: __cedar::String, - "kind"?: __cedar::String, - "metadata"?: meta::v1::ObjectMeta, - "oldObject"?: HorizontalPodAutoscaler, - "spec"?: HorizontalPodAutoscalerSpec, - "status"?: HorizontalPodAutoscalerStatus, - }; -} diff --git a/internal/schema/parser/testdata/fuzz/FuzzParseSchema/3f22c1ed56f3e96f b/internal/schema/parser/testdata/fuzz/FuzzParseSchema/3f22c1ed56f3e96f deleted file mode 100644 index 5eb25a38..00000000 --- a/internal/schema/parser/testdata/fuzz/FuzzParseSchema/3f22c1ed56f3e96f +++ /dev/null @@ -1,2 +0,0 @@ -go test fuzz v1 -[]byte("namespace 0A0 action \"\" appliesTo 0 principal!0//0000") diff --git a/internal/schema/parser/testdata/fuzz/FuzzParseSchema/a28d644254e4aa45 b/internal/schema/parser/testdata/fuzz/FuzzParseSchema/a28d644254e4aa45 deleted file mode 100644 index 97a55bd2..00000000 --- a/internal/schema/parser/testdata/fuzz/FuzzParseSchema/a28d644254e4aa45 +++ /dev/null @@ -1,2 +0,0 @@ -go test fuzz v1 -[]byte("//00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\nnamespace 0A0 type") diff --git a/internal/schema/parser/testdata/lex/1.cedarschema b/internal/schema/parser/testdata/lex/1.cedarschema deleted file mode 100644 index 2533dac3..00000000 --- a/internal/schema/parser/testdata/lex/1.cedarschema +++ /dev/null @@ -1,6 +0,0 @@ -entity A; -action "action" appliesTo { - principal: [A], - resource: [A], - context: {"M": {"\0\0\u{13}\0?": A, "\0\0\u{13}\0?\u{1a}": A, "\0\0m": A, "\u{4}e": A, "\u{1b}\u{7f}___": A, "\'\u{1c}`": A, "8888": A, "U_": A, "c": A, "e": A, "m\u{1c}`ax": __cedar::Bool, "mWW": A, "mm\u{1c}`": A, "\u{7f}\u{1}\u{7}": A}} -}; \ No newline at end of file diff --git a/internal/schema/token/token.go b/internal/schema/token/token.go deleted file mode 100644 index c4a6d5e2..00000000 --- a/internal/schema/token/token.go +++ /dev/null @@ -1,151 +0,0 @@ -package token - -import ( - "errors" - "fmt" - "slices" - "strconv" - - "github.com/cedar-policy/cedar-go/x/exp/ast" -) - -type Error struct { - Pos Position - Err error -} - -func (e Error) Error() string { - filename := e.Pos.Filename - if filename == "" { - filename = "" - } - return fmt.Sprintf("%s:%d:%d: %v", filename, e.Pos.Line, e.Pos.Column, e.Err) -} - -type Errors []error - -func (errs Errors) Error() string { - return errors.Join(errs...).Error() -} - -func (errs Errors) Sort() { - errs = slices.DeleteFunc(errs, func(e1 error) bool { return e1 == nil }) - slices.SortFunc(errs, func(e1, e2 error) int { - te1, ok1 := e1.(Error) - te2, ok2 := e2.(Error) - if !ok1 || !ok2 { - return 0 // don't sort these values - } - return te1.Pos.Offset - te2.Pos.Offset - }) -} - -type Position = ast.Position - -type Type int - -const ( - INVALID Type = iota - - // Keywords - keep up to date with AllKeywords below - NAMESPACE - ENTITY - ACTION - TYPE - IN - TAGS - APPLIESTO - PRINCIPAL - RESOURCE - CONTEXT - ENUM - - // Punctuation - LEFTBRACE // { - RIGHTBRACE // } - LEFTBRACKET // [ - RIGHTBRACKET // ] - LEFTANGLE // < - RIGHTANGLE // > - COLON // : - SEMICOLON // ; - COMMA // , - EQUALS // = - QUESTION // ? - DOUBLECOLON // :: - AT // @ - LEFTPAREN // ( - RIGHTPAREN // ) - - // Identifiers and literals - IDENT // Regular identifier - STRING // String literal, quoted with "" - - // Comments - COMMENT // // style comment - - // Special - EOF // End of file -) - -var types = [...]string{ - INVALID: "INVALID", - NAMESPACE: "NAMESPACE", - ENTITY: "ENTITY", - ACTION: "ACTION", - TYPE: "TYPE", - IN: "IN", - TAGS: "TAGS", - APPLIESTO: "APPLIESTO", - PRINCIPAL: "PRINCIPAL", - RESOURCE: "RESOURCE", - CONTEXT: "CONTEXT", - ENUM: "ENUM", - - LEFTBRACE: "LEFTBRACE", - RIGHTBRACE: "RIGHTBRACE", - LEFTBRACKET: "LEFTBRACKET", - RIGHTBRACKET: "RIGHTBRACKET", - LEFTANGLE: "LEFTANGLE", - RIGHTANGLE: "RIGHTANGLE", - COLON: "COLON", - SEMICOLON: "SEMICOLON", - COMMA: "COMMA", - EQUALS: "EQUALS", - QUESTION: "QUESTION", - DOUBLECOLON: "DOUBLECOLON", - LEFTPAREN: "LEFTPAREN", - RIGHTPAREN: "RIGHTPAREN", - - IDENT: "IDENT", - STRING: "STRING", - - COMMENT: "COMMENT", - - EOF: "EOF", -} - -func (tok Type) String() string { - s := "" - if 0 <= tok && tok < Type(len(types)) { - s = types[tok] - } - if s == "" { - s = "Token(" + strconv.Itoa(int(tok)) + ")" - } - return s -} - -var AllKeywords = []Type{ - NAMESPACE, - ENTITY, - ACTION, - TYPE, - IN, - TAGS, - APPLIESTO, - PRINCIPAL, - RESOURCE, - CONTEXT, - ENUM, -} diff --git a/internal/schema/token/token_test.go b/internal/schema/token/token_test.go deleted file mode 100644 index 187d59d9..00000000 --- a/internal/schema/token/token_test.go +++ /dev/null @@ -1,73 +0,0 @@ -package token - -import ( - "errors" - "testing" -) - -func TestError_Error(t *testing.T) { - pos := Position{Filename: "testfile", Line: 1, Column: 2} - err := Error{Pos: pos, Err: errors.New("test error")} - expected := "testfile:1:2: test error" - if err.Error() != expected { - t.Errorf("expected %q, got %q", expected, err.Error()) - } -} - -func TestErrList_Error(t *testing.T) { - errs := Errors{ - errors.New("first error"), - errors.New("second error"), - } - expected := "first error\nsecond error" - if errs.Error() != expected { - t.Errorf("expected %q, got %q", expected, errs.Error()) - } -} - -func TestErrList_Sort(t *testing.T) { - errs := Errors{ - Error{Pos: Position{Line: 1, Column: 2, Offset: 2}, Err: errors.New("second error")}, - Error{Pos: Position{Line: 1, Column: 1, Offset: 1}, Err: errors.New("first error")}, - } - errs.Sort() - if errs[0].Error() != ":1:1: first error" || errs[1].Error() != ":1:2: second error" { - t.Errorf("errors not sorted correctly: %v", errs) - } -} - -func TestType_String(t *testing.T) { - tests := []struct { - tok Type - expected string - }{ - {INVALID, "INVALID"}, - {NAMESPACE, "NAMESPACE"}, - {ENTITY, "ENTITY"}, - {ACTION, "ACTION"}, - {EOF, "EOF"}, - {Type(999), "Token(999)"}, - } - - for _, test := range tests { - if test.tok.String() != test.expected { - t.Errorf("expected %q, got %q", test.expected, test.tok.String()) - } - } -} - -func TestErrorsSort(t *testing.T) { - // Test case for mixing Error and non-Error types - reg := errors.New("regular error") - reg2 := errors.New("another regular error") - errs := Errors{ - reg, - Error{Pos: Position{Offset: 10}, Err: errors.New("error at pos 10")}, - Error{Pos: Position{Offset: 5}, Err: errors.New("error at pos 5")}, - reg2, - } - errs.Sort() - if len(errs) != 4 { - t.Errorf("Expected length 4, got %d", len(errs)) - } -} diff --git a/policy_set.go b/policy_set.go index efa430a9..dff0be0e 100644 --- a/policy_set.go +++ b/policy_set.go @@ -11,7 +11,7 @@ import ( internaljson "github.com/cedar-policy/cedar-go/internal/json" "github.com/cedar-policy/cedar-go/types" - internalast "github.com/cedar-policy/cedar-go/x/exp/ast" + "github.com/cedar-policy/cedar-go/x/exp/ast" ) //revive:disable-next-line:exported @@ -131,7 +131,7 @@ func (p *PolicySet) UnmarshalJSON(b []byte) error { policies: make(PolicyMap, len(jsonPolicySet.StaticPolicies)), } for k, v := range jsonPolicySet.StaticPolicies { - p.policies[PolicyID(k)] = newPolicy((*internalast.Policy)(v)) + p.policies[PolicyID(k)] = newPolicy((*ast.Policy)(v)) } return nil } diff --git a/x/exp/schema/ast/ast.go b/x/exp/schema/ast/ast.go new file mode 100644 index 00000000..fb3303b4 --- /dev/null +++ b/x/exp/schema/ast/ast.go @@ -0,0 +1,101 @@ +// Package ast provides types for constructing Cedar schema ASTs programmatically. +package ast + +import ( + "github.com/cedar-policy/cedar-go/types" +) + +// Annotations maps annotation keys to their string values. +type Annotations map[types.Ident]types.String + +// Entities maps entity type names to their definitions. +type Entities map[types.Ident]Entity + +// Enums maps entity type names to their enum definitions. +type Enums map[types.Ident]Enum + +// Actions maps action names to their definitions. +type Actions map[types.String]Action + +// CommonTypes maps common type names to their definitions. +type CommonTypes map[types.Ident]CommonType + +// Namespaces maps namespace paths to their definitions. +type Namespaces map[types.Path]Namespace + +// Schema is the top-level Cedar schema AST. +// The Entities, Enums, Actions, and CommonTypes are for the top-level namespace. +type Schema struct { + Entities Entities + Enums Enums + Actions Actions + CommonTypes CommonTypes + Namespaces Namespaces +} + +// Namespace groups declarations under a namespace path. +type Namespace struct { + Annotations Annotations + Entities Entities + Enums Enums + Actions Actions + CommonTypes CommonTypes +} + +// CommonType is a named type alias declaration. +type CommonType struct { + Annotations Annotations + Type IsType +} + +// Entity defines the shape and membership of an entity type. +type Entity struct { + Annotations Annotations + ParentTypes []EntityTypeRef + Shape RecordType + Tags IsType +} + +// Enum defines an entity type whose valid values are a fixed set of strings. +type Enum struct { + Annotations Annotations + Values []types.String +} + +// Action defines what principals can do to resources. +// If AppliesTo is nil, the action never applies. +type Action struct { + Annotations Annotations + Parents []ParentRef + AppliesTo *AppliesTo +} + +// AppliesTo specifies the principal and resource types an action can apply to. +type AppliesTo struct { + Principals []EntityTypeRef + Resources []EntityTypeRef + Context IsType +} + +// ParentRef identifies an action parent by type and ID. +type ParentRef struct { + Type EntityTypeRef + ID types.String +} + +// ParentRefFromID creates a ParentRef with only an ID. +// Type is inferred as Action and namespaced during resolution. +func ParentRefFromID(id types.String) ParentRef { + return ParentRef{ + ID: id, + } +} + +// NewParentRef creates a ParentRef with type and ID. +// Type will be namespaced during resolution. +func NewParentRef(typ EntityTypeRef, id types.String) ParentRef { + return ParentRef{ + Type: typ, + ID: id, + } +} diff --git a/x/exp/schema/ast/ast_test.go b/x/exp/schema/ast/ast_test.go new file mode 100644 index 00000000..b6ce5eb5 --- /dev/null +++ b/x/exp/schema/ast/ast_test.go @@ -0,0 +1,34 @@ +package ast_test + +import ( + "testing" + + "github.com/cedar-policy/cedar-go/internal/testutil" + "github.com/cedar-policy/cedar-go/types" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" +) + +func TestConstructors(t *testing.T) { + testutil.Equals(t, ast.String(), ast.StringType{}) + testutil.Equals(t, ast.Long(), ast.LongType{}) + testutil.Equals(t, ast.Bool(), ast.BoolType{}) + testutil.Equals(t, ast.IPAddr(), ast.ExtensionType("ipaddr")) + testutil.Equals(t, ast.Decimal(), ast.ExtensionType("decimal")) + testutil.Equals(t, ast.Datetime(), ast.ExtensionType("datetime")) + testutil.Equals(t, ast.Duration(), ast.ExtensionType("duration")) + testutil.Equals(t, ast.Set(ast.Long()), ast.SetType{Element: ast.LongType{}}) + testutil.Equals(t, ast.EntityType("User"), ast.EntityTypeRef("User")) + testutil.Equals(t, ast.Type("MyType"), ast.TypeRef("MyType")) +} + +func TestParentRefFromID(t *testing.T) { + ref := ast.ParentRefFromID("view") + testutil.Equals(t, ref.ID, types.String("view")) + testutil.Equals(t, ref.Type, ast.EntityTypeRef("")) +} + +func TestNewParentRef(t *testing.T) { + ref := ast.NewParentRef("NS::Action", "view") + testutil.Equals(t, ref.ID, types.String("view")) + testutil.Equals(t, ref.Type, ast.EntityTypeRef("NS::Action")) +} diff --git a/x/exp/schema/ast/types.go b/x/exp/schema/ast/types.go new file mode 100644 index 00000000..0d0a96d1 --- /dev/null +++ b/x/exp/schema/ast/types.go @@ -0,0 +1,97 @@ +package ast + +import ( + "github.com/cedar-policy/cedar-go/types" +) + +// IsType is the sealed sum type for all Cedar schema types. +// +//sumtype:decl +type IsType interface { + isType() +} + +// StringType represents the Cedar String type. +type StringType struct{} + +func (StringType) isType() { _ = 0 } + +// String returns a StringType. +func String() StringType { return StringType{} } + +// LongType represents the Cedar Long type. +type LongType struct{} + +func (LongType) isType() { _ = 0 } + +// Long returns a LongType. +func Long() LongType { return LongType{} } + +// BoolType represents the Cedar Bool type. +type BoolType struct{} + +func (BoolType) isType() { _ = 0 } + +// Bool returns a BoolType. +func Bool() BoolType { return BoolType{} } + +// ExtensionType represents a Cedar extension type (e.g. ipaddr, decimal). +type ExtensionType types.Ident + +func (ExtensionType) isType() { _ = 0 } + +// IPAddr returns an ExtensionType for ipaddr. +func IPAddr() ExtensionType { return ExtensionType("ipaddr") } + +// Decimal returns an ExtensionType for decimal. +func Decimal() ExtensionType { return ExtensionType("decimal") } + +// Datetime returns an ExtensionType for datetime. +func Datetime() ExtensionType { return ExtensionType("datetime") } + +// Duration returns an ExtensionType for duration. +func Duration() ExtensionType { return ExtensionType("duration") } + +// SetType represents the Cedar Set type. +type SetType struct { + Element IsType +} + +func (SetType) isType() { _ = 0 } + +// Set returns a SetType with the given element type. +func Set(element IsType) SetType { + return SetType{Element: element} +} + +// Attribute describes a single attribute in a record type. +type Attribute struct { + Type IsType + Optional bool + Annotations Annotations +} + +// RecordType maps attribute names to their types and optionality. +type RecordType map[types.String]Attribute + +func (RecordType) isType() { _ = 0 } + +// EntityTypeRef is a reference to an entity type in the schema. +type EntityTypeRef types.EntityType + +func (EntityTypeRef) isType() { _ = 0 } + +// EntityType returns an EntityTypeRef for the given entity type name. +func EntityType(name types.EntityType) EntityTypeRef { + return EntityTypeRef(name) +} + +// TypeRef is a reference to a common type or entity type by path, not yet resolved. +type TypeRef types.Path + +func (TypeRef) isType() { _ = 0 } + +// Type returns a TypeRef for the given path. +func Type(name types.Path) TypeRef { + return TypeRef(name) +} diff --git a/x/exp/schema/ast/types_internal_test.go b/x/exp/schema/ast/types_internal_test.go new file mode 100644 index 00000000..462fccd3 --- /dev/null +++ b/x/exp/schema/ast/types_internal_test.go @@ -0,0 +1,14 @@ +package ast + +import "testing" + +func TestIsTypeMarkers(t *testing.T) { + StringType{}.isType() + LongType{}.isType() + BoolType{}.isType() + ExtensionType("ipaddr").isType() + SetType{}.isType() + RecordType{}.isType() + EntityTypeRef("User").isType() + TypeRef("Foo").isType() +} diff --git a/x/exp/schema/example_test.go b/x/exp/schema/example_test.go new file mode 100644 index 00000000..aca9a79b --- /dev/null +++ b/x/exp/schema/example_test.go @@ -0,0 +1,58 @@ +package schema_test + +import ( + "fmt" + + "github.com/cedar-policy/cedar-go/x/exp/schema" +) + +const exampleCedar = `entity User in [Group] { + name: String, + age?: Long +}; + +entity Group; + +entity Photo { + owner: User, + tags: Set +}; + +entity Status enum ["active", "inactive"]; + +action viewPhoto appliesTo { + principal: User, + resource: Photo, + context: {} +}; +` + +func ExampleSchema() { + var s schema.Schema + if err := s.UnmarshalCedar([]byte(exampleCedar)); err != nil { + fmt.Println("schema parse error:", err) + return + } + + resolved, err := s.Resolve() + if err != nil { + fmt.Println("schema resolve error:", err) + return + } + + for entityType := range resolved.Entities { + fmt.Println("entity:", entityType) + } + for _, enum := range resolved.Enums { + fmt.Println("enum:", enum.Name) + } + for actionUID := range resolved.Actions { + fmt.Println("action:", actionUID) + } + // Unordered output: + // entity: User + // entity: Group + // entity: Photo + // enum: Status + // action: Action::"viewPhoto" +} diff --git a/x/exp/schema/internal/json/json.go b/x/exp/schema/internal/json/json.go new file mode 100644 index 00000000..4c1740dd --- /dev/null +++ b/x/exp/schema/internal/json/json.go @@ -0,0 +1,450 @@ +// Package json provides JSON marshaling and unmarshaling for Cedar schema ASTs. +package json + +import ( + "encoding/json" + "fmt" + "sort" + + "github.com/cedar-policy/cedar-go/types" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" +) + +// Schema is a type alias of ast.Schema that provides JSON marshaling. +type Schema ast.Schema + +// MarshalJSON encodes the schema as JSON. +func (s *Schema) MarshalJSON() ([]byte, error) { + out := make(map[string]jsonNamespace) + + // Bare declarations go under the empty string key. + if hasBareDecls((*ast.Schema)(s)) { + ns, err := marshalNamespace("", ast.Namespace{ + Entities: s.Entities, + Enums: s.Enums, + Actions: s.Actions, + CommonTypes: s.CommonTypes, + }) + if err != nil { + return nil, err + } + out[""] = ns + } + + for name, ns := range s.Namespaces { + jns, err := marshalNamespace(name, ns) + if err != nil { + return nil, err + } + out[string(name)] = jns + } + return json.Marshal(out) +} + +// UnmarshalJSON parses a JSON schema into the AST. +func (s *Schema) UnmarshalJSON(b []byte) error { + var raw map[string]json.RawMessage + if err := json.Unmarshal(b, &raw); err != nil { + return err + } + + result := ast.Schema{} + for name, data := range raw { + var jns jsonNamespace + if err := json.Unmarshal(data, &jns); err != nil { + return fmt.Errorf("namespace %q: %w", name, err) + } + ns, err := unmarshalNamespace(jns) + if err != nil { + return fmt.Errorf("namespace %q: %w", name, err) + } + if name == "" { + result.Entities = ns.Entities + result.Enums = ns.Enums + result.Actions = ns.Actions + result.CommonTypes = ns.CommonTypes + } else { + if result.Namespaces == nil { + result.Namespaces = ast.Namespaces{} + } + result.Namespaces[types.Path(name)] = ns + } + } + *s = Schema(result) + return nil +} + +func hasBareDecls(s *ast.Schema) bool { + return len(s.Entities) > 0 || len(s.Enums) > 0 || len(s.Actions) > 0 || len(s.CommonTypes) > 0 +} + +type jsonNamespace struct { + EntityTypes map[string]jsonEntityType `json:"entityTypes"` + Actions map[string]jsonAction `json:"actions"` + CommonTypes map[string]jsonCommonType `json:"commonTypes,omitempty"` + Annotations map[string]string `json:"annotations,omitempty"` +} + +type jsonEntityType struct { + // Standard entity fields + MemberOfTypes []string `json:"memberOfTypes,omitempty"` + Shape *jsonType `json:"shape,omitempty"` + Tags *jsonType `json:"tags,omitempty"` + Annotations map[string]string `json:"annotations,omitempty"` + + // Enum entity field (mutually exclusive with standard fields) + Enum []string `json:"enum,omitempty"` +} + +type jsonAction struct { + MemberOf []jsonActionParent `json:"memberOf,omitempty"` + AppliesTo *jsonAppliesTo `json:"appliesTo,omitempty"` + Annotations map[string]string `json:"annotations,omitempty"` +} + +type jsonActionParent struct { + ID string `json:"id"` + Type string `json:"type"` +} + +type jsonAppliesTo struct { + PrincipalTypes []string `json:"principalTypes"` + ResourceTypes []string `json:"resourceTypes"` + Context *jsonType `json:"context,omitempty"` +} + +type jsonCommonType struct { + jsonType + Annotations map[string]string `json:"annotations,omitempty"` +} + +type jsonType struct { + Type string `json:"type"` + Element *jsonType `json:"element,omitempty"` + Attributes map[string]jsonAttr `json:"attributes,omitempty"` + Name string `json:"name,omitempty"` +} + +type jsonAttr struct { + jsonType + Required *bool `json:"required,omitempty"` + Annotations map[string]string `json:"annotations,omitempty"` +} + +func marshalNamespace(name types.Path, ns ast.Namespace) (jsonNamespace, error) { + jns := jsonNamespace{ + EntityTypes: make(map[string]jsonEntityType), + Actions: make(map[string]jsonAction), + } + + if len(ns.Annotations) > 0 { + jns.Annotations = marshalAnnotations(ns.Annotations) + } + + if len(ns.CommonTypes) > 0 { + jns.CommonTypes = make(map[string]jsonCommonType) + for ctName, ct := range ns.CommonTypes { + jt, err := marshalIsType(ct.Type) + if err != nil { + return jsonNamespace{}, err + } + jct := jsonCommonType{jsonType: *jt} + if len(ct.Annotations) > 0 { + jct.Annotations = marshalAnnotations(ct.Annotations) + } + jns.CommonTypes[string(ctName)] = jct + } + } + + for etName, entity := range ns.Entities { + jet := jsonEntityType{} + if len(entity.Annotations) > 0 { + jet.Annotations = marshalAnnotations(entity.Annotations) + } + if len(entity.ParentTypes) > 0 { + for _, ref := range entity.ParentTypes { + jet.MemberOfTypes = append(jet.MemberOfTypes, string(ref)) + } + sort.Strings(jet.MemberOfTypes) + } + if entity.Shape != nil { + jt, err := marshalRecordType(entity.Shape) + if err != nil { + return jsonNamespace{}, err + } + jet.Shape = jt + } + if entity.Tags != nil { + jt, err := marshalIsType(entity.Tags) + if err != nil { + return jsonNamespace{}, err + } + jet.Tags = jt + } + jns.EntityTypes[string(etName)] = jet + } + + for etName, enum := range ns.Enums { + jet := jsonEntityType{} + if len(enum.Annotations) > 0 { + jet.Annotations = marshalAnnotations(enum.Annotations) + } + for _, v := range enum.Values { + jet.Enum = append(jet.Enum, string(v)) + } + jns.EntityTypes[string(etName)] = jet + } + + for actionName, action := range ns.Actions { + ja := jsonAction{} + if len(action.Annotations) > 0 { + ja.Annotations = marshalAnnotations(action.Annotations) + } + for _, ref := range action.Parents { + parent := jsonActionParent{ + ID: string(ref.ID), + } + if types.EntityType(ref.Type) != "" { + parent.Type = string(ref.Type) + } + ja.MemberOf = append(ja.MemberOf, parent) + } + if action.AppliesTo != nil { + jat := &jsonAppliesTo{} + for _, p := range action.AppliesTo.Principals { + jat.PrincipalTypes = append(jat.PrincipalTypes, string(p)) + } + for _, r := range action.AppliesTo.Resources { + jat.ResourceTypes = append(jat.ResourceTypes, string(r)) + } + if action.AppliesTo.Context != nil { + jt, err := marshalIsType(action.AppliesTo.Context) + if err != nil { + return jsonNamespace{}, err + } + jat.Context = jt + } + ja.AppliesTo = jat + } + jns.Actions[string(actionName)] = ja + } + + return jns, nil +} + +func marshalIsType(t ast.IsType) (*jsonType, error) { + switch t := t.(type) { + case ast.StringType: + return &jsonType{Type: "String"}, nil + case ast.LongType: + return &jsonType{Type: "Long"}, nil + case ast.BoolType: + return &jsonType{Type: "Boolean"}, nil + case ast.ExtensionType: + return &jsonType{Type: "Extension", Name: string(t)}, nil + case ast.SetType: + elem, err := marshalIsType(t.Element) + if err != nil { + return nil, err + } + return &jsonType{Type: "Set", Element: elem}, nil + case ast.RecordType: + return marshalRecordType(t) + case ast.EntityTypeRef: + return &jsonType{Type: "Entity", Name: string(t)}, nil + case ast.TypeRef: + return &jsonType{Type: "EntityOrCommon", Name: string(t)}, nil + default: + return nil, fmt.Errorf("unknown type: %T", t) + } +} + +func marshalRecordType(rec ast.RecordType) (*jsonType, error) { + jt := &jsonType{ + Type: "Record", + Attributes: make(map[string]jsonAttr), + } + for name, attr := range rec { + attrType, err := marshalIsType(attr.Type) + if err != nil { + return nil, err + } + ja := jsonAttr{jsonType: *attrType} + if attr.Optional { + f := false + ja.Required = &f + } + if len(attr.Annotations) > 0 { + ja.Annotations = marshalAnnotations(attr.Annotations) + } + jt.Attributes[string(name)] = ja + } + return jt, nil +} + +func marshalAnnotations(annotations ast.Annotations) map[string]string { + m := make(map[string]string, len(annotations)) + for k, v := range annotations { + m[string(k)] = string(v) + } + return m +} + +func unmarshalNamespace(jns jsonNamespace) (ast.Namespace, error) { + ns := ast.Namespace{} + + if len(jns.Annotations) > 0 { + ns.Annotations = unmarshalAnnotations(jns.Annotations) + } + + for ctName, jct := range jns.CommonTypes { + t, err := unmarshalType(&jct.jsonType) + if err != nil { + return ast.Namespace{}, fmt.Errorf("common type %q: %w", ctName, err) + } + if ns.CommonTypes == nil { + ns.CommonTypes = ast.CommonTypes{} + } + ct := ast.CommonType{Type: t} + if len(jct.Annotations) > 0 { + ct.Annotations = unmarshalAnnotations(jct.Annotations) + } + ns.CommonTypes[types.Ident(ctName)] = ct + } + + for etName, jet := range jns.EntityTypes { + if len(jet.Enum) > 0 { + enum := ast.Enum{} + if len(jet.Annotations) > 0 { + enum.Annotations = unmarshalAnnotations(jet.Annotations) + } + for _, v := range jet.Enum { + enum.Values = append(enum.Values, types.String(v)) + } + if ns.Enums == nil { + ns.Enums = ast.Enums{} + } + ns.Enums[types.Ident(etName)] = enum + } else { + entity := ast.Entity{} + if len(jet.Annotations) > 0 { + entity.Annotations = unmarshalAnnotations(jet.Annotations) + } + for _, ref := range jet.MemberOfTypes { + entity.ParentTypes = append(entity.ParentTypes, ast.EntityTypeRef(ref)) + } + if jet.Shape != nil { + rec, err := unmarshalRecordType(jet.Shape) + if err != nil { + return ast.Namespace{}, fmt.Errorf("entity %q shape: %w", etName, err) + } + entity.Shape = rec + } + if jet.Tags != nil { + t, err := unmarshalType(jet.Tags) + if err != nil { + return ast.Namespace{}, fmt.Errorf("entity %q tags: %w", etName, err) + } + entity.Tags = t + } + if ns.Entities == nil { + ns.Entities = ast.Entities{} + } + ns.Entities[types.Ident(etName)] = entity + } + } + + for actionName, ja := range jns.Actions { + action := ast.Action{} + if len(ja.Annotations) > 0 { + action.Annotations = unmarshalAnnotations(ja.Annotations) + } + for _, parent := range ja.MemberOf { + if parent.Type == "" { + action.Parents = append(action.Parents, ast.ParentRefFromID(types.String(parent.ID))) + } else { + action.Parents = append(action.Parents, ast.NewParentRef(ast.EntityTypeRef(parent.Type), types.String(parent.ID))) + } + } + if ja.AppliesTo != nil { + at := &ast.AppliesTo{} + for _, p := range ja.AppliesTo.PrincipalTypes { + at.Principals = append(at.Principals, ast.EntityTypeRef(p)) + } + for _, r := range ja.AppliesTo.ResourceTypes { + at.Resources = append(at.Resources, ast.EntityTypeRef(r)) + } + if ja.AppliesTo.Context != nil { + t, err := unmarshalType(ja.AppliesTo.Context) + if err != nil { + return ast.Namespace{}, fmt.Errorf("action %q context: %w", actionName, err) + } + at.Context = t + } + action.AppliesTo = at + } + if ns.Actions == nil { + ns.Actions = ast.Actions{} + } + ns.Actions[types.String(actionName)] = action + } + + return ns, nil +} + +func unmarshalType(jt *jsonType) (ast.IsType, error) { + switch jt.Type { + case "String": + return ast.StringType{}, nil + case "Long": + return ast.LongType{}, nil + case "Boolean": + return ast.BoolType{}, nil + case "Extension": + return ast.ExtensionType(jt.Name), nil + case "Set": + if jt.Element == nil { + return nil, fmt.Errorf("set type missing element") + } + elem, err := unmarshalType(jt.Element) + if err != nil { + return nil, err + } + return ast.Set(elem), nil + case "Record": + return unmarshalRecordType(jt) + case "Entity": + return ast.EntityTypeRef(jt.Name), nil + case "EntityOrCommon": + return ast.TypeRef(jt.Name), nil + default: + return nil, fmt.Errorf("unknown type %q", jt.Type) + } +} + +func unmarshalRecordType(jt *jsonType) (ast.RecordType, error) { + rec := ast.RecordType{} + for name, ja := range jt.Attributes { + t, err := unmarshalType(&ja.jsonType) + if err != nil { + return nil, fmt.Errorf("attribute %q: %w", name, err) + } + attr := ast.Attribute{ + Type: t, + Optional: ja.Required != nil && !*ja.Required, + } + if len(ja.Annotations) > 0 { + attr.Annotations = unmarshalAnnotations(ja.Annotations) + } + rec[types.String(name)] = attr + } + return rec, nil +} + +func unmarshalAnnotations(m map[string]string) ast.Annotations { + a := make(ast.Annotations, len(m)) + for k, v := range m { + a[types.Ident(k)] = types.String(v) + } + return a +} diff --git a/x/exp/schema/internal/json/json_internal_test.go b/x/exp/schema/internal/json/json_internal_test.go new file mode 100644 index 00000000..6fc6edcb --- /dev/null +++ b/x/exp/schema/internal/json/json_internal_test.go @@ -0,0 +1,184 @@ +package json + +import ( + "testing" + + "github.com/cedar-policy/cedar-go/internal/testutil" + "github.com/cedar-policy/cedar-go/types" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" +) + +func TestMarshalIsTypeUnknown(t *testing.T) { + _, err := marshalIsType(nil) + testutil.Error(t, err) +} + +func TestMarshalIsTypeSetError(t *testing.T) { + _, err := marshalIsType(ast.SetType{Element: nil}) + testutil.Error(t, err) +} + +func TestMarshalRecordTypeError(t *testing.T) { + _, err := marshalRecordType(ast.RecordType{ + "bad": ast.Attribute{Type: nil}, + }) + testutil.Error(t, err) +} + +func TestMarshalNamespaceCommonTypeError(t *testing.T) { + _, err := marshalNamespace("", ast.Namespace{ + CommonTypes: ast.CommonTypes{ + "Bad": ast.CommonType{Type: nil}, + }, + }) + testutil.Error(t, err) +} + +func TestMarshalNamespaceEntityShapeError(t *testing.T) { + _, err := marshalNamespace("", ast.Namespace{ + Entities: ast.Entities{ + "Foo": ast.Entity{ + Shape: ast.RecordType{ + "bad": ast.Attribute{Type: nil}, + }, + }, + }, + }) + testutil.Error(t, err) +} + +func TestMarshalNamespaceEntityTagsError(t *testing.T) { + // Tags is nil, but the code checks `entity.Tags != nil` first + // So we need a non-nil tags that fails. Use SetType{Element: nil}. + _, err := marshalNamespace("", ast.Namespace{ + Entities: ast.Entities{ + "Foo": ast.Entity{Tags: nil}, + }, + }) + testutil.OK(t, err) +} + +func TestMarshalNamespaceEntityTagsError2(t *testing.T) { + _, err := marshalNamespace("", ast.Namespace{ + Entities: ast.Entities{ + "Foo": ast.Entity{Tags: ast.SetType{Element: nil}}, + }, + }) + testutil.Error(t, err) +} + +func TestMarshalNamespaceActionAnnotations(t *testing.T) { + ns, err := marshalNamespace("", ast.Namespace{ + Actions: ast.Actions{ + "view": ast.Action{ + Annotations: ast.Annotations{"doc": "test"}, + }, + }, + }) + testutil.OK(t, err) + testutil.Equals(t, ns.Actions["view"].Annotations["doc"], "test") +} + +func TestMarshalNamespaceContextError(t *testing.T) { + _, err := marshalNamespace("", ast.Namespace{ + Actions: ast.Actions{ + "view": ast.Action{ + AppliesTo: &ast.AppliesTo{ + Context: ast.SetType{Element: nil}, + }, + }, + }, + }) + testutil.Error(t, err) +} + +func TestMarshalBareNamespaceError(t *testing.T) { + s := &Schema{ + Entities: ast.Entities{ + "Foo": ast.Entity{Tags: ast.SetType{Element: nil}}, + }, + } + _, err := s.MarshalJSON() + testutil.Error(t, err) +} + +func TestMarshalNamespacedError(t *testing.T) { + s := &Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "Foo": ast.Entity{Tags: ast.SetType{Element: nil}}, + }, + }, + }, + } + _, err := s.MarshalJSON() + testutil.Error(t, err) +} + +func TestUnmarshalCommonTypeError(t *testing.T) { + _, err := unmarshalNamespace(jsonNamespace{ + EntityTypes: map[string]jsonEntityType{}, + Actions: map[string]jsonAction{}, + CommonTypes: map[string]jsonCommonType{ + "Bad": {jsonType: jsonType{Type: "Unknown"}}, + }, + }) + testutil.Error(t, err) +} + +func TestUnmarshalEntityShapeError(t *testing.T) { + _, err := unmarshalNamespace(jsonNamespace{ + EntityTypes: map[string]jsonEntityType{ + "Foo": {Shape: &jsonType{ + Type: "Record", + Attributes: map[string]jsonAttr{ + "bad": {jsonType: jsonType{Type: "Unknown"}}, + }, + }}, + }, + Actions: map[string]jsonAction{}, + }) + testutil.Error(t, err) +} + +func TestUnmarshalActionAnnotations(t *testing.T) { + ns, err := unmarshalNamespace(jsonNamespace{ + EntityTypes: map[string]jsonEntityType{}, + Actions: map[string]jsonAction{ + "view": {Annotations: map[string]string{"doc": "test"}}, + }, + }) + testutil.OK(t, err) + testutil.Equals(t, ns.Actions["view"].Annotations["doc"], types.String("test")) +} + +func TestUnmarshalContextTypeError(t *testing.T) { + _, err := unmarshalNamespace(jsonNamespace{ + EntityTypes: map[string]jsonEntityType{}, + Actions: map[string]jsonAction{ + "view": {AppliesTo: &jsonAppliesTo{ + Context: &jsonType{Type: "Unknown"}, + }}, + }, + }) + testutil.Error(t, err) +} + +func TestUnmarshalSetElementError(t *testing.T) { + _, err := unmarshalType(&jsonType{ + Type: "Set", + Element: &jsonType{Type: "Unknown"}, + }) + testutil.Error(t, err) +} + +func TestUnmarshalRecordAttrError(t *testing.T) { + _, err := unmarshalRecordType(&jsonType{ + Type: "Record", + Attributes: map[string]jsonAttr{ + "bad": {jsonType: jsonType{Type: "Unknown"}}, + }, + }) + testutil.Error(t, err) +} diff --git a/x/exp/schema/internal/json/json_test.go b/x/exp/schema/internal/json/json_test.go new file mode 100644 index 00000000..e9436f55 --- /dev/null +++ b/x/exp/schema/internal/json/json_test.go @@ -0,0 +1,335 @@ +package json_test + +import ( + "encoding/json" + "testing" + + "github.com/cedar-policy/cedar-go/internal/testutil" + "github.com/cedar-policy/cedar-go/types" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" + schemajson "github.com/cedar-policy/cedar-go/x/exp/schema/internal/json" +) + +func TestRoundTripEmpty(t *testing.T) { + s := ast.Schema{} + b, err := (*schemajson.Schema)(&s).MarshalJSON() + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + testutil.Equals(t, (*ast.Schema)(&s2), &ast.Schema{}) +} + +func TestRoundTripEntity(t *testing.T) { + s := ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "User": ast.Entity{ + ParentTypes: []ast.EntityTypeRef{"Group"}, + Shape: ast.RecordType{ + "name": ast.Attribute{Type: ast.StringType{}}, + "age": ast.Attribute{Type: ast.LongType{}, Optional: true}, + }, + Tags: ast.StringType{}, + }, + }, + }, + }, + } + b, err := (*schemajson.Schema)(&s).MarshalJSON() + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + + user := (*ast.Schema)(&s2).Namespaces["NS"].Entities["User"] + testutil.Equals(t, user.ParentTypes, []ast.EntityTypeRef{"Group"}) + testutil.Equals(t, user.Shape != nil, true) + testutil.Equals(t, user.Tags != nil, true) +} + +func TestRoundTripEnum(t *testing.T) { + s := ast.Schema{ + Enums: ast.Enums{ + "Status": ast.Enum{ + Values: []types.String{"active", "inactive"}, + }, + }, + } + b, err := (*schemajson.Schema)(&s).MarshalJSON() + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + + status := (*ast.Schema)(&s2).Enums["Status"] + testutil.Equals(t, status.Values, []types.String{"active", "inactive"}) +} + +func TestRoundTripAction(t *testing.T) { + s := ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{ + Parents: []ast.ParentRef{ + ast.NewParentRef("NS::Action", "readOnly"), + ast.ParentRefFromID("write"), + }, + AppliesTo: &ast.AppliesTo{ + Principals: []ast.EntityTypeRef{"User"}, + Resources: []ast.EntityTypeRef{"Photo"}, + Context: ast.RecordType{}, + }, + }, + }, + } + b, err := (*schemajson.Schema)(&s).MarshalJSON() + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + + view := (*ast.Schema)(&s2).Actions["view"] + testutil.Equals(t, len(view.Parents), 2) + testutil.Equals(t, view.AppliesTo != nil, true) + testutil.Equals(t, len(view.AppliesTo.Principals), 1) +} + +func TestRoundTripCommonType(t *testing.T) { + s := ast.Schema{ + CommonTypes: ast.CommonTypes{ + "Context": ast.CommonType{ + Annotations: ast.Annotations{"doc": "context type"}, + Type: ast.RecordType{ + "ip": ast.Attribute{Type: ast.ExtensionType("ipaddr")}, + }, + }, + }, + } + b, err := (*schemajson.Schema)(&s).MarshalJSON() + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + + ct := (*ast.Schema)(&s2).CommonTypes["Context"] + testutil.Equals(t, ct.Annotations["doc"], types.String("context type")) +} + +func TestRoundTripAllTypes(t *testing.T) { + s := ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "s": ast.Attribute{Type: ast.StringType{}}, + "l": ast.Attribute{Type: ast.LongType{}}, + "b": ast.Attribute{Type: ast.BoolType{}}, + "ip": ast.Attribute{Type: ast.ExtensionType("ipaddr")}, + "set": ast.Attribute{Type: ast.Set(ast.LongType{})}, + "rec": ast.Attribute{Type: ast.RecordType{}}, + "ref": ast.Attribute{Type: ast.EntityTypeRef("Other")}, + "tref": ast.Attribute{Type: ast.TypeRef("CommonT")}, + }, + }, + }, + } + b, err := (*schemajson.Schema)(&s).MarshalJSON() + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + testutil.Equals(t, len((*ast.Schema)(&s2).Entities["User"].Shape), 8) +} + +func TestRoundTripAnnotations(t *testing.T) { + s := ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Annotations: ast.Annotations{"doc": "user entity"}, + Shape: ast.RecordType{ + "name": ast.Attribute{ + Type: ast.StringType{}, + Annotations: ast.Annotations{"doc": "user name"}, + }, + }, + }, + }, + } + b, err := (*schemajson.Schema)(&s).MarshalJSON() + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + + user := (*ast.Schema)(&s2).Entities["User"] + testutil.Equals(t, user.Annotations["doc"], types.String("user entity")) + testutil.Equals(t, user.Shape["name"].Annotations["doc"], types.String("user name")) +} + +func TestRoundTripNamespaceAnnotations(t *testing.T) { + s := ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Annotations: ast.Annotations{"doc": "my ns"}, + Entities: ast.Entities{}, + }, + }, + } + b, err := (*schemajson.Schema)(&s).MarshalJSON() + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + + ns := (*ast.Schema)(&s2).Namespaces["NS"] + testutil.Equals(t, ns.Annotations["doc"], types.String("my ns")) +} + +func TestRoundTripActionNoAppliesTo(t *testing.T) { + s := ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{}, + }, + } + b, err := (*schemajson.Schema)(&s).MarshalJSON() + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + + view := (*ast.Schema)(&s2).Actions["view"] + testutil.Equals(t, view.AppliesTo == nil, true) +} + +func TestRoundTripActionEmptyLists(t *testing.T) { + s := ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{ + AppliesTo: &ast.AppliesTo{ + Principals: []ast.EntityTypeRef{}, + Resources: []ast.EntityTypeRef{}, + }, + }, + }, + } + b, err := (*schemajson.Schema)(&s).MarshalJSON() + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + + view := (*ast.Schema)(&s2).Actions["view"] + testutil.Equals(t, view.AppliesTo != nil, true) + testutil.Equals(t, len(view.AppliesTo.Principals), 0) + testutil.Equals(t, len(view.AppliesTo.Resources), 0) +} + +func TestUnmarshalBadJSON(t *testing.T) { + var s schemajson.Schema + testutil.Error(t, s.UnmarshalJSON([]byte(`not json`))) +} + +func TestUnmarshalBadNamespace(t *testing.T) { + var s schemajson.Schema + testutil.Error(t, s.UnmarshalJSON([]byte(`{"NS": "bad"}`))) +} + +func TestUnmarshalBadType(t *testing.T) { + var s schemajson.Schema + testutil.Error(t, s.UnmarshalJSON([]byte(`{"": {"entityTypes": {"Foo": {"tags": {"type": "Unknown"}}}, "actions": {}}}`))) +} + +func TestUnmarshalSetMissingElement(t *testing.T) { + var s schemajson.Schema + testutil.Error(t, s.UnmarshalJSON([]byte(`{"": {"entityTypes": {"Foo": {"tags": {"type": "Set"}}}, "actions": {}}}`))) +} + +func TestMarshalUnmarshalJSON(t *testing.T) { + input := `{ + "NS": { + "entityTypes": { + "User": { + "memberOfTypes": ["Group"], + "shape": { + "type": "Record", + "attributes": { + "name": {"type": "String"}, + "age": {"type": "Long", "required": false} + } + } + }, + "Group": {} + }, + "actions": { + "view": { + "appliesTo": { + "principalTypes": ["User"], + "resourceTypes": ["Photo"], + "context": {"type": "Record", "attributes": {}} + } + } + }, + "commonTypes": { + "Context": {"type": "Record", "attributes": {}} + } + } + }` + + var s schemajson.Schema + testutil.OK(t, json.Unmarshal([]byte(input), &s)) + + ns := (*ast.Schema)(&s).Namespaces["NS"] + testutil.Equals(t, len(ns.Entities), 2) + testutil.Equals(t, len(ns.Actions), 1) + testutil.Equals(t, len(ns.CommonTypes), 1) + + b, err := json.Marshal(&s) + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, json.Unmarshal(b, &s2)) + testutil.Equals(t, len((*ast.Schema)(&s2).Namespaces["NS"].Entities), 2) +} + +func TestRoundTripEnumAnnotations(t *testing.T) { + s := ast.Schema{ + Enums: ast.Enums{ + "Status": ast.Enum{ + Annotations: ast.Annotations{"doc": "status enum"}, + Values: []types.String{"active"}, + }, + }, + } + b, err := (*schemajson.Schema)(&s).MarshalJSON() + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + + status := (*ast.Schema)(&s2).Enums["Status"] + testutil.Equals(t, status.Annotations["doc"], types.String("status enum")) +} + +func TestRoundTripActionContext(t *testing.T) { + s := ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{ + AppliesTo: &ast.AppliesTo{ + Principals: []ast.EntityTypeRef{"User"}, + Resources: []ast.EntityTypeRef{"Photo"}, + Context: ast.RecordType{ + "ip": ast.Attribute{Type: ast.ExtensionType("ipaddr")}, + }, + }, + }, + }, + } + b, err := (*schemajson.Schema)(&s).MarshalJSON() + testutil.OK(t, err) + + var s2 schemajson.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + + view := (*ast.Schema)(&s2).Actions["view"] + testutil.Equals(t, view.AppliesTo.Context != nil, true) +} diff --git a/x/exp/schema/internal/parser/marshal.go b/x/exp/schema/internal/parser/marshal.go new file mode 100644 index 00000000..6c26ec2e --- /dev/null +++ b/x/exp/schema/internal/parser/marshal.go @@ -0,0 +1,352 @@ +package parser + +import ( + "bytes" + "fmt" + "maps" + "slices" + "strings" + + "github.com/cedar-policy/cedar-go/types" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" +) + +// MarshalSchema formats an AST schema as Cedar text. +func MarshalSchema(schema *ast.Schema) []byte { + var buf bytes.Buffer + m := marshaler{w: &buf} + m.marshalSchema(schema) + return buf.Bytes() +} + +type marshaler struct { + w *bytes.Buffer + indent int +} + +func (m *marshaler) writeIndent() { + for range m.indent { + m.w.WriteByte('\t') + } +} + +func (m *marshaler) marshalSchema(schema *ast.Schema) { + first := true + + // Marshal bare declarations + m.marshalDecls(&first, schema.Entities, schema.Enums, schema.Actions, schema.CommonTypes) + + // Marshal namespaces in sorted order + nsNames := slices.Sorted(maps.Keys(schema.Namespaces)) + for _, name := range nsNames { + ns := schema.Namespaces[name] + if !first { + m.w.WriteByte('\n') + } + first = false + m.marshalAnnotations(ns.Annotations) + m.writeIndent() + fmt.Fprintf(m.w, "namespace %s {\n", name) + m.indent++ + innerFirst := true + m.marshalDecls(&innerFirst, ns.Entities, ns.Enums, ns.Actions, ns.CommonTypes) + m.indent-- + m.writeIndent() + m.w.WriteString("}\n") + } +} + +func (m *marshaler) marshalDecls(first *bool, entities ast.Entities, enums ast.Enums, actions ast.Actions, commonTypes ast.CommonTypes) { + // Type declarations + typeNames := slices.Sorted(maps.Keys(commonTypes)) + for _, name := range typeNames { + ct := commonTypes[name] + if !*first { + m.w.WriteByte('\n') + } + *first = false + m.marshalAnnotations(ct.Annotations) + m.writeIndent() + fmt.Fprintf(m.w, "type %s = ", name) + m.marshalType(ct.Type) + m.w.WriteString(";\n") + } + + // Entity declarations + entityNames := slices.Sorted(maps.Keys(entities)) + for _, name := range entityNames { + entity := entities[name] + if !*first { + m.w.WriteByte('\n') + } + *first = false + m.marshalAnnotations(entity.Annotations) + m.writeIndent() + fmt.Fprintf(m.w, "entity %s", name) + if len(entity.ParentTypes) > 0 { + m.w.WriteString(" in ") + m.marshalEntityTypeRefs(entity.ParentTypes) + } + if entity.Shape != nil { + m.w.WriteByte(' ') + m.marshalRecordType(entity.Shape) + } + if entity.Tags != nil { + m.w.WriteString(" tags ") + m.marshalType(entity.Tags) + } + m.w.WriteString(";\n") + } + + // Enum entity declarations + enumNames := slices.Sorted(maps.Keys(enums)) + for _, name := range enumNames { + enum := enums[name] + if !*first { + m.w.WriteByte('\n') + } + *first = false + m.marshalAnnotations(enum.Annotations) + m.writeIndent() + fmt.Fprintf(m.w, "entity %s enum [", name) + for i, v := range enum.Values { + if i > 0 { + m.w.WriteString(", ") + } + m.w.WriteString(quoteCedar(string(v))) + } + m.w.WriteString("];\n") + } + + // Action declarations + actionNames := slices.Sorted(maps.Keys(actions)) + for _, name := range actionNames { + action := actions[name] + if !*first { + m.w.WriteByte('\n') + } + *first = false + m.marshalAnnotations(action.Annotations) + m.writeIndent() + m.w.WriteString("action ") + m.marshalActionName(name) + if len(action.Parents) > 0 { + m.w.WriteString(" in ") + m.marshalParentRefs(action.Parents) + } + if action.AppliesTo != nil { + m.marshalAppliesTo(action.AppliesTo) + } + m.w.WriteString(";\n") + } +} + +func (m *marshaler) marshalAnnotations(annotations ast.Annotations) { + keys := slices.Sorted(maps.Keys(annotations)) + for _, key := range keys { + val := annotations[key] + m.writeIndent() + if val == "" { + fmt.Fprintf(m.w, "@%s\n", key) + } else { + fmt.Fprintf(m.w, "@%s(%s)\n", key, quoteCedar(string(val))) + } + } +} + +func (m *marshaler) marshalType(t ast.IsType) { + switch t := t.(type) { + case ast.StringType: + m.w.WriteString("String") + case ast.LongType: + m.w.WriteString("Long") + case ast.BoolType: + m.w.WriteString("Bool") + case ast.ExtensionType: + m.w.WriteString(string(t)) + case ast.SetType: + m.w.WriteString("Set<") + m.marshalType(t.Element) + m.w.WriteByte('>') + case ast.RecordType: + m.marshalRecordType(t) + case ast.EntityTypeRef: + m.w.WriteString(string(t)) + case ast.TypeRef: + m.w.WriteString(string(t)) + } +} + +func (m *marshaler) marshalRecordType(rec ast.RecordType) { + m.w.WriteByte('{') + keys := slices.Sorted(maps.Keys(rec)) + if len(keys) > 0 { + m.w.WriteByte('\n') + m.indent++ + for i, key := range keys { + attr := rec[key] + m.marshalAnnotations(attr.Annotations) + m.writeIndent() + m.marshalAttrName(key) + if attr.Optional { + m.w.WriteByte('?') + } + m.w.WriteString(": ") + m.marshalType(attr.Type) + if i < len(keys)-1 { + m.w.WriteByte(',') + } + m.w.WriteByte('\n') + } + m.indent-- + m.writeIndent() + } + m.w.WriteByte('}') +} + +func (m *marshaler) marshalEntityTypeRefs(refs []ast.EntityTypeRef) { + if len(refs) == 1 { + m.w.WriteString(string(refs[0])) + return + } + m.w.WriteByte('[') + for i, ref := range refs { + if i > 0 { + m.w.WriteString(", ") + } + m.w.WriteString(string(ref)) + } + m.w.WriteByte(']') +} + +func (m *marshaler) marshalParentRefs(refs []ast.ParentRef) { + if len(refs) == 1 { + m.marshalParentRef(refs[0]) + return + } + m.w.WriteByte('[') + for i, ref := range refs { + if i > 0 { + m.w.WriteString(", ") + } + m.marshalParentRef(ref) + } + m.w.WriteByte(']') +} + +func (m *marshaler) marshalParentRef(ref ast.ParentRef) { + if types.EntityType(ref.Type) == "" { + m.marshalActionName(ref.ID) + } else { + fmt.Fprintf(m.w, "%s::%s", ref.Type, quoteCedar(string(ref.ID))) + } +} + +func (m *marshaler) marshalAppliesTo(at *ast.AppliesTo) { + m.w.WriteString(" appliesTo {\n") + m.indent++ + hasPrev := false + if at.Principals != nil { + m.writeIndent() + m.w.WriteString("principal: ") + m.marshalEntityTypeRefs(at.Principals) + hasPrev = true + } + if at.Resources != nil { + if hasPrev { + m.w.WriteString(",\n") + } + m.writeIndent() + m.w.WriteString("resource: ") + m.marshalEntityTypeRefs(at.Resources) + hasPrev = true + } + if at.Context != nil { + if hasPrev { + m.w.WriteString(",\n") + } + m.writeIndent() + m.w.WriteString("context: ") + m.marshalType(at.Context) + hasPrev = true + } + if hasPrev { + m.w.WriteByte('\n') + } + m.indent-- + m.writeIndent() + m.w.WriteByte('}') +} + +func (m *marshaler) marshalActionName(name types.String) { + s := string(name) + if isValidIdent(s) { + m.w.WriteString(s) + } else { + m.w.WriteString(quoteCedar(s)) + } +} + +func (m *marshaler) marshalAttrName(name types.String) { + s := string(name) + if isValidIdent(s) { + m.w.WriteString(s) + } else { + m.w.WriteString(quoteCedar(s)) + } +} + +func isValidIdent(s string) bool { + if len(s) == 0 { + return false + } + for i, r := range s { + if i == 0 { + if !isIdentStart(r) { + return false + } + } else { + if !isIdentContinue(r) { + return false + } + } + } + return true +} + +// quoteCedar produces a double-quoted string literal using only Cedar-valid +// escape sequences: \n, \r, \t, \\, \", \0, and \u{hex} for all other +// non-printable or non-ASCII characters. +func quoteCedar(s string) string { + var buf strings.Builder + buf.WriteByte('"') + for _, r := range s { + switch r { + case '"': + buf.WriteString(`\"`) + case '\\': + buf.WriteString(`\\`) + case '\n': + buf.WriteString(`\n`) + case '\r': + buf.WriteString(`\r`) + case '\t': + buf.WriteString(`\t`) + case '\x00': + buf.WriteString(`\0`) + default: + if r >= 0x20 && r < 0x7f { + buf.WriteRune(r) + } else { + fmt.Fprintf(&buf, `\u{%x}`, r) + } + } + } + buf.WriteByte('"') + return buf.String() +} + +func init() { + // Ensure reservedTypeNames is sorted for consistency. + slices.Sort(reservedTypeNames) +} diff --git a/x/exp/schema/internal/parser/parser.go b/x/exp/schema/internal/parser/parser.go new file mode 100644 index 00000000..220021c2 --- /dev/null +++ b/x/exp/schema/internal/parser/parser.go @@ -0,0 +1,811 @@ +// Package parser provides Cedar schema text parsing and formatting. +package parser + +import ( + "fmt" + "slices" + + "github.com/cedar-policy/cedar-go/types" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" +) + +// reservedTypeNames are identifiers that cannot be used as common type names. +var reservedTypeNames = []string{ + "Bool", "Boolean", "Entity", "Extension", "Long", "Record", "Set", "String", +} + +// ParseSchema parses Cedar schema text into an AST. +func ParseSchema(filename string, src []byte) (*ast.Schema, error) { + p := &parser{lex: newLexer(filename, src)} + if err := p.readToken(); err != nil { + return nil, err + } + return p.parseSchema() +} + +type parser struct { + lex *lexer + tok token +} + +func (p *parser) readToken() error { + tok, err := p.lex.next() + if err != nil { + return err + } + p.tok = tok + return nil +} + +func (p *parser) errorf(format string, args ...any) error { + return fmt.Errorf("%s: %s", p.tok.Pos, fmt.Sprintf(format, args...)) +} + +func (p *parser) expect(tt tokenType) error { + if p.tok.Type != tt { + return p.errorf("expected %s, got %s", tokenName(tt), tokenDesc(p.tok)) + } + return p.readToken() +} + +func tokenName(tt tokenType) string { + switch tt { + case tokenEOF: + return "EOF" + case tokenIdent: + return "identifier" + case tokenString: + return "string" + case tokenAt: + return "'@'" + case tokenLBrace: + return "'{'" + case tokenRBrace: + return "'}'" + case tokenLBracket: + return "'['" + case tokenRBracket: + return "']'" + case tokenLAngle: + return "'<'" + case tokenRAngle: + return "'>'" + case tokenLParen: + return "'('" + case tokenRParen: + return "')'" + case tokenComma: + return "','" + case tokenSemicolon: + return "';'" + case tokenColon: + return "':'" + case tokenDoubleColon: + return "'::'" + case tokenQuestion: + return "'?'" + case tokenEquals: + return "'='" + default: + return "unknown" + } +} + +func tokenDesc(tok token) string { + switch tok.Type { + case tokenEOF: + return "EOF" + case tokenIdent: + return fmt.Sprintf("identifier %q", tok.Text) + case tokenString: + return fmt.Sprintf("string %q", tok.Text) + default: + return fmt.Sprintf("%q", tok.Text) + } +} + +func (p *parser) parseSchema() (*ast.Schema, error) { + schema := &ast.Schema{} + for p.tok.Type != tokenEOF { + annotations, err := p.parseAnnotations() + if err != nil { + return nil, err + } + if p.tok.Type == tokenIdent && p.tok.Text == "namespace" { + if err := p.readToken(); err != nil { + return nil, err + } + ns, err := p.parseNamespace(annotations) + if err != nil { + return nil, err + } + if schema.Namespaces == nil { + schema.Namespaces = ast.Namespaces{} + } + if _, ok := schema.Namespaces[ns.name]; ok { + return nil, p.errorf("namespace %q is declared twice", ns.name) + } + schema.Namespaces[ns.name] = ns.ns + } else { + if err := p.parseDecl(annotations, schema); err != nil { + return nil, err + } + } + } + return schema, nil +} + +type parsedNamespace struct { + name types.Path + ns ast.Namespace +} + +func (p *parser) parseNamespace(annotations ast.Annotations) (parsedNamespace, error) { + path, err := p.parsePath() + if err != nil { + return parsedNamespace{}, err + } + if err := p.expect(tokenLBrace); err != nil { + return parsedNamespace{}, err + } + ns := ast.Namespace{Annotations: annotations} + var innerSchema ast.Schema + for p.tok.Type != tokenRBrace { + if p.tok.Type == tokenEOF { + return parsedNamespace{}, p.errorf("expected '}' to close namespace, got EOF") + } + innerAnnotations, err := p.parseAnnotations() + if err != nil { + return parsedNamespace{}, err + } + if err := p.parseDecl(innerAnnotations, &innerSchema); err != nil { + return parsedNamespace{}, err + } + } + if err := p.readToken(); err != nil { // consume '}' + return parsedNamespace{}, err + } + ns.Entities = innerSchema.Entities + ns.Enums = innerSchema.Enums + ns.Actions = innerSchema.Actions + ns.CommonTypes = innerSchema.CommonTypes + return parsedNamespace{name: path, ns: ns}, nil +} + +func (p *parser) parseDecl(annotations ast.Annotations, schema *ast.Schema) error { + if p.tok.Type != tokenIdent { + return p.errorf("expected declaration (entity, action, or type), got %s", tokenDesc(p.tok)) + } + switch p.tok.Text { + case "entity": + if err := p.readToken(); err != nil { + return err + } + return p.parseEntity(annotations, schema) + case "action": + if err := p.readToken(); err != nil { + return err + } + return p.parseAction(annotations, schema) + case "type": + if err := p.readToken(); err != nil { + return err + } + return p.parseTypeDecl(annotations, schema) + default: + return p.errorf("expected declaration (entity, action, or type), got identifier %q", p.tok.Text) + } +} + +func (p *parser) parseEntity(annotations ast.Annotations, schema *ast.Schema) error { + names, err := p.parseIdents() + if err != nil { + return err + } + + // Check for enum entity + if p.tok.Type == tokenIdent && p.tok.Text == "enum" { + if err := p.readToken(); err != nil { + return err + } + return p.parseEnumEntity(annotations, names, schema) + } + + // Parse optional 'in' clause + var memberOf []ast.EntityTypeRef + if p.tok.Type == tokenIdent && p.tok.Text == "in" { + if err := p.readToken(); err != nil { + return err + } + memberOf, err = p.parseEntityTypes() + if err != nil { + return err + } + } + + // Parse optional shape (record type), with optional '=' + var shape ast.RecordType + switch p.tok.Type { + case tokenEquals: + if err := p.readToken(); err != nil { + return err + } + rec, err := p.parseRecordType() + if err != nil { + return err + } + shape = rec + case tokenLBrace: + rec, err := p.parseRecordType() + if err != nil { + return err + } + shape = rec + } + + // Parse optional tags + var tags ast.IsType + if p.tok.Type == tokenIdent && p.tok.Text == "tags" { + if err := p.readToken(); err != nil { + return err + } + tags, err = p.parseType() + if err != nil { + return err + } + } + + if err := p.expect(tokenSemicolon); err != nil { + return err + } + + if schema.Entities == nil { + schema.Entities = ast.Entities{} + } + for _, name := range names { + if _, ok := schema.Entities[name]; ok { + return p.errorf("entity %q is declared twice", name) + } + if _, ok := schema.Enums[name]; ok { + return p.errorf("entity %q is declared twice", name) + } + schema.Entities[name] = ast.Entity{ + Annotations: annotations, + ParentTypes: memberOf, + Shape: shape, + Tags: tags, + } + } + return nil +} + +func (p *parser) parseEnumEntity(annotations ast.Annotations, names []types.Ident, schema *ast.Schema) error { + if err := p.expect(tokenLBracket); err != nil { + return err + } + var values []types.String + for p.tok.Type != tokenRBracket { + if p.tok.Type != tokenString { + return p.errorf("expected string literal in enum, got %s", tokenDesc(p.tok)) + } + values = append(values, types.String(p.tok.Text)) + if err := p.readToken(); err != nil { + return err + } + if p.tok.Type == tokenComma { + if err := p.readToken(); err != nil { + return err + } + } else if p.tok.Type != tokenRBracket { + return p.errorf("expected ',' or ']' in enum, got %s", tokenDesc(p.tok)) + } + } + if err := p.readToken(); err != nil { // consume ']' + return err + } + if err := p.expect(tokenSemicolon); err != nil { + return err + } + + if schema.Enums == nil { + schema.Enums = ast.Enums{} + } + for _, name := range names { + if _, ok := schema.Enums[name]; ok { + return p.errorf("entity %q is declared twice", name) + } + if _, ok := schema.Entities[name]; ok { + return p.errorf("entity %q is declared twice", name) + } + schema.Enums[name] = ast.Enum{ + Annotations: annotations, + Values: values, + } + } + return nil +} + +func (p *parser) parseAction(annotations ast.Annotations, schema *ast.Schema) error { + names, err := p.parseNames() + if err != nil { + return err + } + + // Parse optional 'in' clause + var memberOf []ast.ParentRef + if p.tok.Type == tokenIdent && p.tok.Text == "in" { + if err := p.readToken(); err != nil { + return err + } + memberOf, err = p.parseActionParents() + if err != nil { + return err + } + } + + // Parse optional appliesTo clause + var appliesTo *ast.AppliesTo + if p.tok.Type == tokenIdent && p.tok.Text == "appliesTo" { + if err := p.readToken(); err != nil { + return err + } + at, err := p.parseAppliesTo() + if err != nil { + return err + } + appliesTo = at + } + + // Allow optional 'attributes {}' (Rust compat, deprecated) + if p.tok.Type == tokenIdent && p.tok.Text == "attributes" { + if err := p.readToken(); err != nil { + return err + } + if err := p.expect(tokenLBrace); err != nil { + return err + } + if err := p.expect(tokenRBrace); err != nil { + return err + } + } + + if err := p.expect(tokenSemicolon); err != nil { + return err + } + + if schema.Actions == nil { + schema.Actions = ast.Actions{} + } + for _, name := range names { + if _, ok := schema.Actions[name]; ok { + return p.errorf("action %q is declared twice", name) + } + schema.Actions[name] = ast.Action{ + Annotations: annotations, + Parents: memberOf, + AppliesTo: appliesTo, + } + } + return nil +} + +func (p *parser) parseTypeDecl(annotations ast.Annotations, schema *ast.Schema) error { + if p.tok.Type != tokenIdent { + return p.errorf("expected type name, got %s", tokenDesc(p.tok)) + } + name := p.tok.Text + if slices.Contains(reservedTypeNames, name) { + return p.errorf("%q is a reserved type name", name) + } + if err := p.readToken(); err != nil { + return err + } + if err := p.expect(tokenEquals); err != nil { + return err + } + typ, err := p.parseType() + if err != nil { + return err + } + if err := p.expect(tokenSemicolon); err != nil { + return err + } + + if schema.CommonTypes == nil { + schema.CommonTypes = ast.CommonTypes{} + } + ident := types.Ident(name) + if _, ok := schema.CommonTypes[ident]; ok { + return p.errorf("type %q is declared twice", name) + } + schema.CommonTypes[ident] = ast.CommonType{ + Annotations: annotations, + Type: typ, + } + return nil +} + +func (p *parser) parseAnnotations() (ast.Annotations, error) { + var annotations ast.Annotations + for p.tok.Type == tokenAt { + if err := p.readToken(); err != nil { + return nil, err + } + if p.tok.Type != tokenIdent { + return nil, p.errorf("expected annotation name, got %s", tokenDesc(p.tok)) + } + key := types.Ident(p.tok.Text) + if err := p.readToken(); err != nil { + return nil, err + } + var value types.String + hasValue := false + if p.tok.Type == tokenLParen { + if err := p.readToken(); err != nil { + return nil, err + } + if p.tok.Type != tokenString { + return nil, p.errorf("expected annotation value string, got %s", tokenDesc(p.tok)) + } + value = types.String(p.tok.Text) + hasValue = true + if err := p.readToken(); err != nil { + return nil, err + } + if err := p.expect(tokenRParen); err != nil { + return nil, err + } + } + if annotations == nil { + annotations = ast.Annotations{} + } + if hasValue { + annotations[key] = value + } else { + annotations[key] = "" + } + } + return annotations, nil +} + +// parsePath parses IDENT { '::' IDENT } +func (p *parser) parsePath() (types.Path, error) { + if p.tok.Type != tokenIdent { + return "", p.errorf("expected identifier, got %s", tokenDesc(p.tok)) + } + path := p.tok.Text + if err := p.readToken(); err != nil { + return "", err + } + for p.tok.Type == tokenDoubleColon { + if err := p.readToken(); err != nil { + return "", err + } + if p.tok.Type != tokenIdent { + return "", p.errorf("expected identifier after '::', got %s", tokenDesc(p.tok)) + } + path += "::" + p.tok.Text + if err := p.readToken(); err != nil { + return "", err + } + } + return types.Path(path), nil +} + +// parsePathForRef parses a path that may include a trailing '::' followed by a string literal +// for action parent references. Returns the path and whether a string was found. +func (p *parser) parsePathForRef() (path types.Path, str types.String, qualified bool, err error) { + if p.tok.Type != tokenIdent { + return "", "", false, p.errorf("expected identifier, got %s", tokenDesc(p.tok)) + } + pathStr := p.tok.Text + if err := p.readToken(); err != nil { + return "", "", false, err + } + for p.tok.Type == tokenDoubleColon { + if err := p.readToken(); err != nil { + return "", "", false, err + } + if p.tok.Type == tokenString { + str := types.String(p.tok.Text) + if err := p.readToken(); err != nil { + return "", "", false, err + } + return types.Path(pathStr), str, true, nil + } + if p.tok.Type != tokenIdent { + return "", "", false, p.errorf("expected identifier or string after '::', got %s", tokenDesc(p.tok)) + } + pathStr += "::" + p.tok.Text + if err := p.readToken(); err != nil { + return "", "", false, err + } + } + return types.Path(pathStr), "", false, nil +} + +// parseIdents parses IDENT { ',' IDENT } +func (p *parser) parseIdents() ([]types.Ident, error) { + if p.tok.Type != tokenIdent { + return nil, p.errorf("expected identifier, got %s", tokenDesc(p.tok)) + } + var result []types.Ident + result = append(result, types.Ident(p.tok.Text)) + if err := p.readToken(); err != nil { + return nil, err + } + for p.tok.Type == tokenComma { + if err := p.readToken(); err != nil { + return nil, err + } + if p.tok.Type != tokenIdent { + return nil, p.errorf("expected identifier after ',', got %s", tokenDesc(p.tok)) + } + result = append(result, types.Ident(p.tok.Text)) + if err := p.readToken(); err != nil { + return nil, err + } + } + return result, nil +} + +// parseNames parses Name { ',' Name } where Name = IDENT | STR +func (p *parser) parseNames() ([]types.String, error) { + name, err := p.parseName() + if err != nil { + return nil, err + } + result := []types.String{name} + for p.tok.Type == tokenComma { + if err := p.readToken(); err != nil { + return nil, err + } + name, err = p.parseName() + if err != nil { + return nil, err + } + result = append(result, name) + } + return result, nil +} + +func (p *parser) parseName() (types.String, error) { + switch p.tok.Type { + case tokenIdent: + name := types.String(p.tok.Text) + if err := p.readToken(); err != nil { + return "", err + } + return name, nil + case tokenString: + name := types.String(p.tok.Text) + if err := p.readToken(); err != nil { + return "", err + } + return name, nil + default: + return "", p.errorf("expected name (identifier or string), got %s", tokenDesc(p.tok)) + } +} + +// parseEntityTypes parses Path | '[' [ Path { ',' Path } ] ']' +func (p *parser) parseEntityTypes() ([]ast.EntityTypeRef, error) { + if p.tok.Type == tokenLBracket { + if err := p.readToken(); err != nil { + return nil, err + } + var result []ast.EntityTypeRef + for p.tok.Type != tokenRBracket { + path, err := p.parsePath() + if err != nil { + return nil, err + } + result = append(result, ast.EntityTypeRef(types.EntityType(path))) + if p.tok.Type == tokenComma { + if err := p.readToken(); err != nil { + return nil, err + } + } else if p.tok.Type != tokenRBracket { + return nil, p.errorf("expected ',' or ']', got %s", tokenDesc(p.tok)) + } + } + return result, p.readToken() // consume ']' + } + path, err := p.parsePath() + if err != nil { + return nil, err + } + return []ast.EntityTypeRef{ast.EntityTypeRef(types.EntityType(path))}, nil +} + +// parseActionParents parses QualName | '[' QualName { ',' QualName } ']' +func (p *parser) parseActionParents() ([]ast.ParentRef, error) { + if p.tok.Type == tokenLBracket { + if err := p.readToken(); err != nil { + return nil, err + } + var result []ast.ParentRef + for p.tok.Type != tokenRBracket { + ref, err := p.parseQualName() + if err != nil { + return nil, err + } + result = append(result, ref) + if p.tok.Type == tokenComma { + if err := p.readToken(); err != nil { + return nil, err + } + } else if p.tok.Type != tokenRBracket { + return nil, p.errorf("expected ',' or ']', got %s", tokenDesc(p.tok)) + } + } + return result, p.readToken() // consume ']' + } + ref, err := p.parseQualName() + if err != nil { + return nil, err + } + return []ast.ParentRef{ref}, nil +} + +// parseQualName parses QualName = Name | Path '::' STR +func (p *parser) parseQualName() (ast.ParentRef, error) { + if p.tok.Type == tokenString { + name := types.String(p.tok.Text) + if err := p.readToken(); err != nil { + return ast.ParentRef{}, err + } + return ast.ParentRefFromID(name), nil + } + path, str, qualified, err := p.parsePathForRef() + if err != nil { + return ast.ParentRef{}, err + } + if qualified { + return ast.NewParentRef(ast.EntityTypeRef(path), str), nil + } + // Bare identifier: treat as an action ID + return ast.ParentRefFromID(types.String(path)), nil +} + +// parseAppliesTo parses '{' AppDecls '}' +func (p *parser) parseAppliesTo() (*ast.AppliesTo, error) { + if err := p.expect(tokenLBrace); err != nil { + return nil, err + } + at := &ast.AppliesTo{} + for p.tok.Type != tokenRBrace { + if p.tok.Type == tokenEOF { + return nil, p.errorf("expected '}' to close appliesTo, got EOF") + } + if p.tok.Type != tokenIdent { + return nil, p.errorf("expected 'principal', 'resource', or 'context', got %s", tokenDesc(p.tok)) + } + switch p.tok.Text { + case "principal": + if err := p.readToken(); err != nil { + return nil, err + } + if err := p.expect(tokenColon); err != nil { + return nil, err + } + refs, err := p.parseEntityTypes() + if err != nil { + return nil, err + } + at.Principals = refs + case "resource": + if err := p.readToken(); err != nil { + return nil, err + } + if err := p.expect(tokenColon); err != nil { + return nil, err + } + refs, err := p.parseEntityTypes() + if err != nil { + return nil, err + } + at.Resources = refs + case "context": + if err := p.readToken(); err != nil { + return nil, err + } + if err := p.expect(tokenColon); err != nil { + return nil, err + } + ctx, err := p.parseType() + if err != nil { + return nil, err + } + at.Context = ctx + default: + return nil, p.errorf("expected 'principal', 'resource', or 'context', got %q", p.tok.Text) + } + if p.tok.Type == tokenComma { + if err := p.readToken(); err != nil { + return nil, err + } + } + } + return at, p.readToken() // consume '}' +} + +// parseType parses Path | 'Set' '<' Type '>' | '{' AttrDecls '}' +func (p *parser) parseType() (ast.IsType, error) { + if p.tok.Type == tokenLBrace { + rec, err := p.parseRecordType() + if err != nil { + return nil, err + } + return rec, nil + } + + if p.tok.Type == tokenIdent && p.tok.Text == "Set" { + if err := p.readToken(); err != nil { + return nil, err + } + if err := p.expect(tokenLAngle); err != nil { + return nil, err + } + elem, err := p.parseType() + if err != nil { + return nil, err + } + if err := p.expect(tokenRAngle); err != nil { + return nil, err + } + return ast.Set(elem), nil + } + + path, err := p.parsePath() + if err != nil { + return nil, err + } + return ast.TypeRef(path), nil +} + +// parseRecordType parses '{' [ AttrDecls ] '}' +func (p *parser) parseRecordType() (ast.RecordType, error) { + if err := p.expect(tokenLBrace); err != nil { + return nil, err + } + rec := ast.RecordType{} + for p.tok.Type != tokenRBrace { + if p.tok.Type == tokenEOF { + return nil, p.errorf("expected '}' to close record type, got EOF") + } + attrAnnotations, err := p.parseAnnotations() + if err != nil { + return nil, err + } + name, err := p.parseName() + if err != nil { + return nil, err + } + optional := false + if p.tok.Type == tokenQuestion { + optional = true + if err := p.readToken(); err != nil { + return nil, err + } + } + if err := p.expect(tokenColon); err != nil { + return nil, err + } + typ, err := p.parseType() + if err != nil { + return nil, err + } + rec[name] = ast.Attribute{ + Type: typ, + Optional: optional, + Annotations: attrAnnotations, + } + if p.tok.Type == tokenComma { + if err := p.readToken(); err != nil { + return nil, err + } + } + } + return rec, p.readToken() // consume '}' +} diff --git a/x/exp/schema/internal/parser/parser_internal_test.go b/x/exp/schema/internal/parser/parser_internal_test.go new file mode 100644 index 00000000..31eedafb --- /dev/null +++ b/x/exp/schema/internal/parser/parser_internal_test.go @@ -0,0 +1,208 @@ +package parser + +import ( + "testing" + + "github.com/cedar-policy/cedar-go/internal/testutil" +) + +func TestLexerBasicTokens(t *testing.T) { + src := `@{}<>[](),;:?=::` + l := newLexer("", []byte(src)) + expected := []tokenType{ + tokenAt, tokenLBrace, tokenRBrace, + tokenLAngle, tokenRAngle, tokenLBracket, tokenRBracket, + tokenLParen, tokenRParen, tokenComma, tokenSemicolon, + tokenColon, tokenQuestion, tokenEquals, tokenDoubleColon, tokenEOF, + } + for _, tt := range expected { + tok, err := l.next() + testutil.OK(t, err) + testutil.Equals(t, tok.Type, tt) + } +} + +func TestLexerStringEscapes(t *testing.T) { + src := `"hello\nworld"` + l := newLexer("", []byte(src)) + tok, err := l.next() + testutil.OK(t, err) + testutil.Equals(t, tok.Type, tokenString) + testutil.Equals(t, tok.Text, "hello\nworld") +} + +func TestLexerUnterminatedString(t *testing.T) { + src := `"hello` + l := newLexer("", []byte(src)) + _, err := l.next() + testutil.Error(t, err) +} + +func TestLexerUnterminatedStringNewline(t *testing.T) { + src := "\"hello\nworld\"" + l := newLexer("", []byte(src)) + _, err := l.next() + testutil.Error(t, err) +} + +func TestLexerUnterminatedStringBackslash(t *testing.T) { + src := `"hello\` + l := newLexer("", []byte(src)) + _, err := l.next() + testutil.Error(t, err) +} + +func TestLexerUnexpectedChar(t *testing.T) { + src := `$` + l := newLexer("", []byte(src)) + _, err := l.next() + testutil.Error(t, err) +} + +func TestLexerLineComment(t *testing.T) { + src := "// comment\nfoo" + l := newLexer("", []byte(src)) + tok, err := l.next() + testutil.OK(t, err) + testutil.Equals(t, tok.Type, tokenIdent) + testutil.Equals(t, tok.Text, "foo") +} + +func TestLexerBlockComment(t *testing.T) { + src := "/* block */foo" + l := newLexer("", []byte(src)) + tok, err := l.next() + testutil.OK(t, err) + testutil.Equals(t, tok.Type, tokenIdent) + testutil.Equals(t, tok.Text, "foo") +} + +func TestLexerUnterminatedBlockComment(t *testing.T) { + src := "/* unterminated" + l := newLexer("", []byte(src)) + _, err := l.next() + testutil.Error(t, err) +} + +func TestLexerPosition(t *testing.T) { + src := "foo\nbar" + l := newLexer("test.cedar", []byte(src)) + tok, err := l.next() + testutil.OK(t, err) + testutil.Equals(t, tok.Pos.Line, 1) + testutil.Equals(t, tok.Pos.Column, 1) + testutil.Equals(t, tok.Pos.Filename, "test.cedar") + + tok, err = l.next() + testutil.OK(t, err) + testutil.Equals(t, tok.Pos.Line, 2) + testutil.Equals(t, tok.Pos.Column, 1) +} + +func TestLexerEOF(t *testing.T) { + l := newLexer("", []byte("")) + tok, err := l.next() + testutil.OK(t, err) + testutil.Equals(t, tok.Type, tokenEOF) +} + +func TestPositionString(t *testing.T) { + p := position{Line: 1, Column: 5} + testutil.Equals(t, p.String(), ":1:5") + + p.Filename = "test.cedarschema" + testutil.Equals(t, p.String(), "test.cedarschema:1:5") +} + +func TestTokenName(t *testing.T) { + tests := []struct { + tt tokenType + want string + }{ + {tokenEOF, "EOF"}, + {tokenIdent, "identifier"}, + {tokenString, "string"}, + {tokenAt, "'@'"}, + {tokenLBrace, "'{'"}, + {tokenRBrace, "'}'"}, + {tokenLBracket, "'['"}, + {tokenRBracket, "']'"}, + {tokenLAngle, "'<'"}, + {tokenRAngle, "'>'"}, + {tokenLParen, "'('"}, + {tokenRParen, "')'"}, + {tokenComma, "','"}, + {tokenSemicolon, "';'"}, + {tokenColon, "':'"}, + {tokenDoubleColon, "'::'"}, + {tokenQuestion, "'?'"}, + {tokenEquals, "'='"}, + {tokenType(999), "unknown"}, + } + for _, tt := range tests { + testutil.Equals(t, tokenName(tt.tt), tt.want) + } +} + +func TestTokenDesc(t *testing.T) { + testutil.Equals(t, tokenDesc(token{Type: tokenEOF}), "EOF") + testutil.Equals(t, tokenDesc(token{Type: tokenIdent, Text: "foo"}), `identifier "foo"`) + testutil.Equals(t, tokenDesc(token{Type: tokenString, Text: "bar"}), `string "bar"`) + testutil.Equals(t, tokenDesc(token{Type: tokenLBrace, Text: "{"}), `"{"`) +} + +func TestIsValidIdent(t *testing.T) { + testutil.Equals(t, isValidIdent("foo"), true) + testutil.Equals(t, isValidIdent("_bar"), true) + testutil.Equals(t, isValidIdent("a1"), true) + testutil.Equals(t, isValidIdent(""), false) + testutil.Equals(t, isValidIdent("1abc"), false) + testutil.Equals(t, isValidIdent("foo bar"), false) +} + +func TestLexerBadStringEscape(t *testing.T) { + src := `"\q"` + l := newLexer("", []byte(src)) + _, err := l.next() + testutil.Error(t, err) +} + +func TestLexerWhitespace(t *testing.T) { + src := " \t\r\n foo" + l := newLexer("", []byte(src)) + tok, err := l.next() + testutil.OK(t, err) + testutil.Equals(t, tok.Type, tokenIdent) + testutil.Equals(t, tok.Text, "foo") +} + +func TestQuoteCedar(t *testing.T) { + tests := []struct { + input string + want string + }{ + {`hello`, `"hello"`}, + {`he"lo`, `"he\"lo"`}, + {`he\lo`, `"he\\lo"`}, + {"he\nlo", `"he\nlo"`}, + {"he\rlo", `"he\rlo"`}, + {"he\tlo", `"he\tlo"`}, + {"he\x00lo", `"he\0lo"`}, + {"he\vlo", `"he\u{b}lo"`}, + {"he\u0080lo", `"he\u{80}lo"`}, + {"he\U0001F600lo", `"he\u{1f600}lo"`}, + } + for _, tt := range tests { + testutil.Equals(t, quoteCedar(tt.input), tt.want) + } +} + +func TestLexerPeekAtEOF(t *testing.T) { + l := newLexer("", []byte("")) + testutil.Equals(t, l.peek(), rune(-1)) +} + +func TestLexerAdvanceAtEOF(t *testing.T) { + l := newLexer("", []byte("")) + testutil.Equals(t, l.advance(), rune(-1)) +} diff --git a/x/exp/schema/internal/parser/parser_test.go b/x/exp/schema/internal/parser/parser_test.go new file mode 100644 index 00000000..f5cf58a4 --- /dev/null +++ b/x/exp/schema/internal/parser/parser_test.go @@ -0,0 +1,970 @@ +package parser_test + +import ( + "os" + "testing" + + "github.com/cedar-policy/cedar-go/internal/testutil" + "github.com/cedar-policy/cedar-go/types" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" + "github.com/cedar-policy/cedar-go/x/exp/schema/internal/parser" +) + +func TestParseEmpty(t *testing.T) { + schema, err := parser.ParseSchema("", []byte("")) + testutil.OK(t, err) + testutil.Equals(t, schema, &ast.Schema{}) +} + +func TestParseBasicFile(t *testing.T) { + src, err := os.ReadFile("testdata/basic.cedarschema") + testutil.OK(t, err) + schema, err := parser.ParseSchema("basic.cedarschema", src) + testutil.OK(t, err) + + ns := schema.Namespaces["PhotoApp"] + testutil.Equals(t, len(ns.Entities), 3) + testutil.Equals(t, len(ns.Actions), 2) + testutil.Equals(t, len(ns.CommonTypes), 1) + + user := ns.Entities["User"] + testutil.Equals(t, user.ParentTypes, []ast.EntityTypeRef{"Group"}) + testutil.Equals(t, user.Shape != nil, true) + testutil.Equals(t, len(user.Shape), 2) + testutil.Equals(t, user.Shape["name"].Type, ast.IsType(ast.TypeRef("String"))) + testutil.Equals(t, user.Shape["name"].Optional, false) + testutil.Equals(t, user.Shape["age"].Type, ast.IsType(ast.TypeRef("Long"))) + testutil.Equals(t, user.Shape["age"].Optional, true) + + group := ns.Entities["Group"] + testutil.Equals(t, group.Shape == nil, true) + testutil.Equals(t, len(group.ParentTypes), 0) + + photo := ns.Entities["Photo"] + testutil.Equals(t, photo.Shape != nil, true) + testutil.Equals(t, photo.Tags, ast.IsType(ast.TypeRef("String"))) + + viewPhoto := ns.Actions["viewPhoto"] + testutil.Equals(t, viewPhoto.AppliesTo != nil, true) + testutil.Equals(t, viewPhoto.AppliesTo.Principals, []ast.EntityTypeRef{"User"}) + testutil.Equals(t, viewPhoto.AppliesTo.Resources, []ast.EntityTypeRef{"Photo"}) + + createPhoto := ns.Actions["createPhoto"] + testutil.Equals(t, len(createPhoto.Parents), 1) + testutil.Equals(t, createPhoto.Parents[0], ast.ParentRefFromID("viewPhoto")) +} + +func TestParseMultiNameEntity(t *testing.T) { + src := `entity A, B, C { name: String };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, len(schema.Entities), 3) + for _, name := range []types.Ident{"A", "B", "C"} { + _, ok := schema.Entities[name] + testutil.Equals(t, ok, true) + } +} + +func TestParseEnumEntity(t *testing.T) { + src := `entity Status enum ["active", "inactive", "pending"];` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, len(schema.Enums), 1) + status := schema.Enums["Status"] + testutil.Equals(t, status.Values, []types.String{"active", "inactive", "pending"}) +} + +func TestParseMultiNameEnum(t *testing.T) { + src := `entity A, B enum ["x", "y"];` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, len(schema.Enums), 2) + testutil.Equals(t, schema.Enums["A"].Values, []types.String{"x", "y"}) + testutil.Equals(t, schema.Enums["B"].Values, []types.String{"x", "y"}) +} + +func TestParseAnnotations(t *testing.T) { + src := ` +@doc("user entity") +entity User; +` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + user := schema.Entities["User"] + testutil.Equals(t, user.Annotations["doc"], types.String("user entity")) +} + +func TestParseAnnotationNoValue(t *testing.T) { + src := ` +@deprecated +entity User; +` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + user := schema.Entities["User"] + _, ok := user.Annotations["deprecated"] + testutil.Equals(t, ok, true) +} + +func TestParseNamespaceAnnotations(t *testing.T) { + src := ` +@doc("my namespace") +namespace Foo { + entity Bar; +} +` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + ns := schema.Namespaces["Foo"] + testutil.Equals(t, ns.Annotations["doc"], types.String("my namespace")) +} + +func TestParseActionStringName(t *testing.T) { + src := `action "view photo" appliesTo { principal: User, resource: Photo };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + _, ok := schema.Actions["view photo"] + testutil.Equals(t, ok, true) +} + +func TestParseActionMultipleNames(t *testing.T) { + src := `action read, write appliesTo { principal: User, resource: Resource };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, len(schema.Actions), 2) + _, ok := schema.Actions["read"] + testutil.Equals(t, ok, true) + _, ok = schema.Actions["write"] + testutil.Equals(t, ok, true) +} + +func TestParseActionQualifiedParent(t *testing.T) { + src := `action view in [MyApp::Action::"readOnly"] appliesTo { principal: User, resource: Photo };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + view := schema.Actions["view"] + testutil.Equals(t, len(view.Parents), 1) + testutil.Equals(t, view.Parents[0], ast.NewParentRef("MyApp::Action", "readOnly")) +} + +func TestParseActionBareParent(t *testing.T) { + src := `action view in readOnly;` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + view := schema.Actions["view"] + testutil.Equals(t, len(view.Parents), 1) + testutil.Equals(t, view.Parents[0], ast.ParentRefFromID("readOnly")) +} + +func TestParseActionNoAppliesTo(t *testing.T) { + src := `action view;` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + view := schema.Actions["view"] + testutil.Equals(t, view.AppliesTo == nil, true) +} + +func TestParseEntityInList(t *testing.T) { + src := `entity User in [Admin, Group];` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + user := schema.Entities["User"] + testutil.Equals(t, user.ParentTypes, []ast.EntityTypeRef{"Admin", "Group"}) +} + +func TestParseEntityInSingle(t *testing.T) { + src := `entity User in Admin;` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + user := schema.Entities["User"] + testutil.Equals(t, user.ParentTypes, []ast.EntityTypeRef{"Admin"}) +} + +func TestParseEntityWithEquals(t *testing.T) { + src := `entity User = { name: String };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + user := schema.Entities["User"] + testutil.Equals(t, user.Shape != nil, true) +} + +func TestParseSetOfSet(t *testing.T) { + src := `entity User { tags: Set> };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + user := schema.Entities["User"] + testutil.Equals(t, user.Shape["tags"].Type, ast.IsType(ast.Set(ast.Set(ast.TypeRef("Long"))))) +} + +func TestParseTypeDecl(t *testing.T) { + src := `type Context = { ip: ipaddr, name: String };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + ct := schema.CommonTypes["Context"] + rec, ok := ct.Type.(ast.RecordType) + testutil.Equals(t, ok, true) + testutil.Equals(t, len(rec), 2) +} + +func TestParseReservedTypeName(t *testing.T) { + tests := []string{"Bool", "Boolean", "Entity", "Extension", "Long", "Record", "Set", "String"} + for _, name := range tests { + t.Run(name, func(t *testing.T) { + src := `type ` + name + ` = { x: Long };` + _, err := parser.ParseSchema("", []byte(src)) + testutil.Error(t, err) + }) + } +} + +func TestParseComments(t *testing.T) { + src := ` +// This is a comment +entity User; // trailing comment +/* block + comment */ +entity Group; +` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, len(schema.Entities), 2) +} + +func TestParseOptionalAttribute(t *testing.T) { + src := `entity User { name?: String };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, schema.Entities["User"].Shape["name"].Optional, true) +} + +func TestParseBareDeclarations(t *testing.T) { + src := ` +entity User; +entity Group; +action view; +` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, len(schema.Entities), 2) + testutil.Equals(t, len(schema.Actions), 1) +} + +func TestParseMixedBareAndNamespaced(t *testing.T) { + src := ` +entity Global; +namespace Foo { + entity Bar; +} +` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + _, ok := schema.Entities["Global"] + testutil.Equals(t, ok, true) + ns := schema.Namespaces["Foo"] + _, ok = ns.Entities["Bar"] + testutil.Equals(t, ok, true) +} + +func TestParseNestedNamespacePath(t *testing.T) { + src := `namespace Foo::Bar { entity Baz; }` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + ns := schema.Namespaces["Foo::Bar"] + _, ok := ns.Entities["Baz"] + testutil.Equals(t, ok, true) +} + +func TestParseCedarNamespace(t *testing.T) { + src := `entity User { name: __cedar::String };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + user := schema.Entities["User"] + testutil.Equals(t, user.Shape["name"].Type, ast.IsType(ast.TypeRef("__cedar::String"))) +} + +func TestParseEntityTypeQualified(t *testing.T) { + src := `entity User in NS::Group;` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + user := schema.Entities["User"] + testutil.Equals(t, user.ParentTypes, []ast.EntityTypeRef{"NS::Group"}) +} + +func TestParseActionAppliesToEmptyPrincipal(t *testing.T) { + src := `action view appliesTo { principal: [], resource: Photo };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + view := schema.Actions["view"] + testutil.Equals(t, len(view.AppliesTo.Principals), 0) + testutil.Equals(t, view.AppliesTo.Resources, []ast.EntityTypeRef{"Photo"}) +} + +func TestParseContextTypeName(t *testing.T) { + src := `action view appliesTo { principal: User, resource: Photo, context: MyContext };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + view := schema.Actions["view"] + testutil.Equals(t, view.AppliesTo.Context, ast.IsType(ast.TypeRef("MyContext"))) +} + +func TestParseAttrAnnotations(t *testing.T) { + src := `entity User { + @doc("the name") + name: String +};` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + user := schema.Entities["User"] + testutil.Equals(t, user.Shape["name"].Annotations["doc"], types.String("the name")) +} + +func TestParseUnicodeString(t *testing.T) { + src := `entity User enum ["\u{1F600}"];` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, schema.Enums["User"].Values, []types.String{"\U0001F600"}) +} + +func TestParseTrailingCommaInRecord(t *testing.T) { + src := `entity User { name: String, age: Long, };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, len(schema.Entities["User"].Shape), 2) +} + +func TestParseTrailingCommaInEntityList(t *testing.T) { + src := `entity User in [Admin, Group,];` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, len(schema.Entities["User"].ParentTypes), 2) +} + +func TestParseTrailingCommaInAppliesTo(t *testing.T) { + src := `action view appliesTo { principal: User, resource: Photo, };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, len(schema.Actions["view"].AppliesTo.Principals), 1) +} + +func TestParseActionParentStringLiteral(t *testing.T) { + src := `action view in ["readOnly"];` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + view := schema.Actions["view"] + testutil.Equals(t, view.Parents[0], ast.ParentRefFromID("readOnly")) +} + +func TestParseEntityEmptyRecord(t *testing.T) { + src := `entity User {};` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + user := schema.Entities["User"] + testutil.Equals(t, user.Shape != nil, true) + testutil.Equals(t, len(user.Shape), 0) +} + +func TestParseEntityInlineRecord(t *testing.T) { + src := `entity User { info: { name: String, age: Long } };` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + user := schema.Entities["User"] + rec, ok := user.Shape["info"].Type.(ast.RecordType) + testutil.Equals(t, ok, true) + testutil.Equals(t, len(rec), 2) +} + +func TestParseEntityWithTagsAndShape(t *testing.T) { + src := `entity User { name: String } tags Long;` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + user := schema.Entities["User"] + testutil.Equals(t, user.Shape != nil, true) + testutil.Equals(t, user.Tags, ast.IsType(ast.TypeRef("Long"))) +} + +func TestParseEnumTrailingComma(t *testing.T) { + src := `entity Status enum ["a", "b",];` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, len(schema.Enums["Status"].Values), 2) +} + +func TestParseActionAttributesDeprecated(t *testing.T) { + src := `action view appliesTo { principal: User, resource: Photo } attributes {};` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, schema.Actions["view"].AppliesTo != nil, true) +} + +func TestParseMultipleNamespaces(t *testing.T) { + src := ` +namespace A { + entity Foo; +} +namespace B { + entity Bar; +} +` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + testutil.Equals(t, len(schema.Namespaces), 2) + _, ok := schema.Namespaces["A"].Entities["Foo"] + testutil.Equals(t, ok, true) + _, ok = schema.Namespaces["B"].Entities["Bar"] + testutil.Equals(t, ok, true) +} + +func TestParseErrorPosition(t *testing.T) { + src := `entity User { + name String +};` + _, err := parser.ParseSchema("test.cedarschema", []byte(src)) + testutil.Error(t, err) + errStr := err.Error() + testutil.Equals(t, true, len(errStr) > 0) +} + +func TestMarshalRoundTrip(t *testing.T) { + src := `namespace PhotoApp { + entity User in [Group] { + name: String, + age?: Long + }; + + entity Group; + + entity Photo { + owner: User, + tags: Set + } tags String; + + action viewPhoto appliesTo { + principal: User, + resource: Photo, + context: {} + }; + + type Context = { + ip: ipaddr + }; +} +` + schema, err := parser.ParseSchema("", []byte(src)) + testutil.OK(t, err) + + out := parser.MarshalSchema(schema) + + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + + out2 := parser.MarshalSchema(schema2) + testutil.Equals(t, string(out), string(out2)) +} + +func TestMarshalEmpty(t *testing.T) { + schema := &ast.Schema{} + out := parser.MarshalSchema(schema) + testutil.Equals(t, string(out), "") +} + +func TestMarshalBareEntities(t *testing.T) { + schema := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{}, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + testutil.Equals(t, len(schema2.Entities), 1) +} + +func TestMarshalEnumEntity(t *testing.T) { + schema := &ast.Schema{ + Enums: ast.Enums{ + "Status": ast.Enum{ + Values: []types.String{"active", "inactive"}, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + testutil.Equals(t, schema2.Enums["Status"].Values, []types.String{"active", "inactive"}) +} + +func TestMarshalActionParentRef(t *testing.T) { + schema := &ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{ + Parents: []ast.ParentRef{ + ast.NewParentRef("NS::Action", "readOnly"), + }, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + testutil.Equals(t, schema2.Actions["view"].Parents[0], ast.NewParentRef("NS::Action", "readOnly")) +} + +func TestMarshalStringActionName(t *testing.T) { + schema := &ast.Schema{ + Actions: ast.Actions{ + "view photo": ast.Action{}, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + _, ok := schema2.Actions["view photo"] + testutil.Equals(t, ok, true) +} + +func TestMarshalAnnotations(t *testing.T) { + schema := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Annotations: ast.Annotations{ + "doc": "user entity", + }, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + testutil.Equals(t, schema2.Entities["User"].Annotations["doc"], types.String("user entity")) +} + +func TestMarshalAllTypes(t *testing.T) { + schema := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "s": ast.Attribute{Type: ast.TypeRef("String")}, + "l": ast.Attribute{Type: ast.TypeRef("Long")}, + "b": ast.Attribute{Type: ast.TypeRef("Bool")}, + "ip": ast.Attribute{Type: ast.TypeRef("ipaddr")}, + "dec": ast.Attribute{Type: ast.TypeRef("decimal")}, + "dt": ast.Attribute{Type: ast.TypeRef("datetime")}, + "dur": ast.Attribute{Type: ast.TypeRef("duration")}, + "set": ast.Attribute{Type: ast.Set(ast.TypeRef("Long"))}, + "rec": ast.Attribute{Type: ast.RecordType{}}, + "ref": ast.Attribute{Type: ast.EntityTypeRef("NS::Foo")}, + }, + }, + }, + } + out := parser.MarshalSchema(schema) + _, err := parser.ParseSchema("", out) + testutil.OK(t, err) +} + +func TestMarshalMultipleEntityTypeRefs(t *testing.T) { + schema := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + ParentTypes: []ast.EntityTypeRef{"Admin", "Group"}, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + testutil.Equals(t, len(schema2.Entities["User"].ParentTypes), 2) +} + +func TestMarshalMultipleActionParents(t *testing.T) { + schema := &ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{ + Parents: []ast.ParentRef{ + ast.ParentRefFromID("read"), + ast.ParentRefFromID("write"), + }, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + testutil.Equals(t, len(schema2.Actions["view"].Parents), 2) +} + +func TestMarshalQuotedAttrName(t *testing.T) { + schema := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "has space": ast.Attribute{Type: ast.TypeRef("String")}, + }, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + _, ok := schema2.Entities["User"].Shape["has space"] + testutil.Equals(t, ok, true) +} + +func TestMarshalAnnotationNoValue(t *testing.T) { + schema := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Annotations: ast.Annotations{ + "deprecated": "", + }, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + _, ok := schema2.Entities["User"].Annotations["deprecated"] + testutil.Equals(t, ok, true) +} + +func TestMarshalNamespace(t *testing.T) { + schema := &ast.Schema{ + Namespaces: ast.Namespaces{ + "Foo": ast.Namespace{ + Entities: ast.Entities{ + "Bar": ast.Entity{}, + }, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + _, ok := schema2.Namespaces["Foo"].Entities["Bar"] + testutil.Equals(t, ok, true) +} + +func TestMarshalNamespaceWithAnnotations(t *testing.T) { + schema := &ast.Schema{ + Namespaces: ast.Namespaces{ + "Foo": ast.Namespace{ + Annotations: ast.Annotations{ + "doc": "foo ns", + }, + Entities: ast.Entities{ + "Bar": ast.Entity{}, + }, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + testutil.Equals(t, schema2.Namespaces["Foo"].Annotations["doc"], types.String("foo ns")) +} + +func TestMarshalActionBareParent(t *testing.T) { + schema := &ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{ + Parents: []ast.ParentRef{ + ast.ParentRefFromID("readOnly"), + }, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + testutil.Equals(t, schema2.Actions["view"].Parents[0], ast.ParentRefFromID("readOnly")) +} + +func TestMarshalPrimitiveTypes(t *testing.T) { + schema := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "s": ast.Attribute{Type: ast.StringType{}}, + "l": ast.Attribute{Type: ast.LongType{}}, + "b": ast.Attribute{Type: ast.BoolType{}}, + "e": ast.Attribute{Type: ast.ExtensionType("ipaddr")}, + }, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + testutil.Equals(t, len(schema2.Entities["User"].Shape), 4) +} + +func TestMarshalEmptyNamespaceMap(t *testing.T) { + schema := &ast.Schema{ + Namespaces: ast.Namespaces{}, + } + out := parser.MarshalSchema(schema) + testutil.Equals(t, string(out), "") +} + +func TestMarshalEmptyCommonTypes(t *testing.T) { + schema := &ast.Schema{ + CommonTypes: ast.CommonTypes{}, + } + out := parser.MarshalSchema(schema) + testutil.Equals(t, string(out), "") +} + +func TestMarshalNamespaceCommonTypes(t *testing.T) { + schema := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + CommonTypes: ast.CommonTypes{ + "Ctx": ast.CommonType{ + Type: ast.RecordType{}, + }, + }, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + _, ok := schema2.Namespaces["NS"].CommonTypes["Ctx"] + testutil.Equals(t, ok, true) +} + +func TestMarshalBareAndNamespaced(t *testing.T) { + schema := &ast.Schema{ + Entities: ast.Entities{ + "Global": ast.Entity{}, + }, + Namespaces: ast.Namespaces{ + "Foo": ast.Namespace{ + Entities: ast.Entities{ + "Bar": ast.Entity{}, + }, + }, + }, + } + out := parser.MarshalSchema(schema) + schema2, err := parser.ParseSchema("", out) + testutil.OK(t, err) + _, ok := schema2.Entities["Global"] + testutil.Equals(t, ok, true) + _, ok = schema2.Namespaces["Foo"].Entities["Bar"] + testutil.Equals(t, ok, true) +} + +func TestMarshalMultipleDecls(t *testing.T) { + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS1": ast.Namespace{}, + "NS2": ast.Namespace{ + CommonTypes: ast.CommonTypes{ + "A": ast.CommonType{Type: ast.StringType{}}, + "B": ast.CommonType{Type: ast.LongType{}}, + }, + Enums: ast.Enums{ + "Color": ast.Enum{Values: []types.String{"red"}}, + "Size": ast.Enum{Values: []types.String{"small"}}, + }, + }, + }, + } + result := parser.MarshalSchema(s) + testutil.Equals(t, len(result) > 0, true) + _, err := parser.ParseSchema("", result) + testutil.OK(t, err) +} + +func TestMarshalNamespaceQualifiedKeyRoundTripBreaks(t *testing.T) { + // Entity keys must be bare Idents, not namespace-qualified. + // A qualified key like "Foo::Bar" in namespace "Foo" marshals as + // "entity Foo::Bar" inside the namespace block. Re-parsing fails + // because "::" is not valid in a bare entity declaration name. + schema := &ast.Schema{ + Namespaces: ast.Namespaces{ + "Foo": ast.Namespace{ + Entities: ast.Entities{ + "Foo::Bar": ast.Entity{}, + }, + }, + }, + } + out := parser.MarshalSchema(schema) + _, err := parser.ParseSchema("", out) + testutil.Error(t, err) +} + +func TestParseSchemaErrors(t *testing.T) { + tests := []struct { + name string + input string + }{ + // Lexer error coverage: '$' is invalid and causes a lexer error + {"readToken after namespace keyword", `namespace $`}, + {"readToken in inner annotations", `namespace Foo { @$`}, + {"readToken after namespace close brace", `namespace Foo { entity Bar; }$`}, + {"readToken after entity keyword", `entity $`}, + {"readToken after action keyword", `action $`}, + {"readToken after type keyword", `type $`}, + {"readToken after enum keyword", `entity Foo enum $`}, + {"readToken after in keyword", `entity Foo in $`}, + {"readToken after equals", `entity Foo = $`}, + {"parseRecordType error after equals", `entity Foo = { $ }`}, + {"readToken after tags keyword", `entity Foo tags $`}, + {"readToken after string in enum", `entity Foo enum ["a"$`}, + {"readToken after comma in enum", `entity Foo enum ["a",$`}, + {"readToken after bracket in enum", `entity Foo enum ["a"]$`}, + {"expect semicolon after enum", `entity Foo enum ["a"]}`}, + {"readToken after action in", `action view in $`}, + {"readToken after appliesTo", `action view appliesTo $`}, + {"readToken after attributes keyword", `action view attributes $`}, + {"expect lbrace after attributes", `action view attributes foo`}, + {"expect rbrace after attributes lbrace", `action view attributes { foo`}, + {"expect semicolon after action", `action view}`}, + {"readToken after type name", `type Foo$`}, + {"parseType error in type decl", `type Foo = $`}, + {"expect semicolon after type decl", `type Foo = Long}`}, + {"readToken after at sign", `@$`}, + {"readToken after annotation name", `@doc$`}, + {"readToken after annotation lparen", `@doc($`}, + {"readToken after annotation string value", `@doc("x"$`}, + {"expect rparen after annotation value", `@doc("x"}`}, + {"readToken after path ident", `entity Foo in Bar$`}, + {"readToken after path double colon", `entity Foo in Bar::$`}, + {"readToken after path second ident", `entity Foo in Bar::Baz$`}, + {"readToken after ref ident", `action view in foo$`}, + {"readToken after ref double colon", `action view in [foo::$]`}, + {"readToken after ref string", `action view in [Foo::"bar"$]`}, + {"readToken after ref second ident", `action view in [Foo::Bar$]`}, + {"readToken after first ident in list", `entity Foo$`}, + {"readToken after comma in ident list", `entity Foo,$`}, + {"readToken after second ident in list", `entity Foo, Bar$`}, + {"readToken after comma in names", `action foo,$`}, + {"readToken after ident name", `action foo in [bar$]`}, + {"readToken after string name", `action "foo"$`}, + {"readToken after entity type lbracket", `entity Foo in [$`}, + {"readToken after entity type comma", `entity Foo in [Bar,$`}, + {"readToken after entity type rbracket", `entity Foo in [Bar]$`}, + {"readToken after action parent lbracket", `action view in [$`}, + {"readToken after action parent comma", `action view in [foo,$`}, + {"parseQualName error for single parent", `action view in 42`}, + {"readToken after qual name string", `action view in ["foo"$]`}, + {"EOF inside appliesTo", `action view appliesTo { principal: User`}, + {"readToken after principal", `action view appliesTo { principal$`}, + {"expect colon after principal", `action view appliesTo { principal User }`}, + {"parseEntityTypes error after principal", `action view appliesTo { principal: $ }`}, + {"readToken after resource", `action view appliesTo { resource$`}, + {"expect colon after resource", `action view appliesTo { resource User }`}, + {"parseEntityTypes error after resource", `action view appliesTo { resource: $ }`}, + {"readToken after context", `action view appliesTo { context$`}, + {"expect colon after context", `action view appliesTo { context User }`}, + {"parseType error after context", `action view appliesTo { context: $ }`}, + {"readToken after comma in appliesTo", `action view appliesTo { principal: User,$`}, + {"parseRecordType error in type", `entity Foo { x: { $ } };`}, + {"readToken after Set", `entity Foo { x: Set$`}, + {"expect langle after Set", `entity Foo { x: Set(Long) };`}, + {"parseType error inside Set", `entity Foo { x: Set<$> };`}, + {"expect rangle after Set element", `entity Foo { x: Set };`}, + {"EOF inside record", `entity Foo {`}, + {"parseName error for attr non-ident non-string", `entity Foo { ;: Long };`}, + {"expect lbrace after namespace path", `namespace Foo entity Bar;`}, + {"type name not ident", `type "bad" = Long;`}, + + // General parse error coverage + {"unterminated namespace", `namespace Foo { entity Bar;`}, + {"invalid token", `entity User $ {};`}, + {"unterminated string", `entity User enum ["unterminated;`}, + {"unterminated block comment", `/* unterminated`}, + {"missing semicolon", `entity User`}, + {"bad declaration keyword", `foobar;`}, + {"non-decl keyword in namespace", `namespace Foo { bogus; }`}, + {"bad annotation name", `@ "bad" entity User;`}, + {"bad annotation value type", `@doc(42) entity User;`}, + {"entity name not ident", `entity "bad";`}, + {"type decl name not ident", `type 42 = Long;`}, + {"enum value not string literal", `entity Foo enum [42];`}, + {"enum bad separator", `entity Foo enum ["a" "b"];`}, + {"appliesTo unknown keyword", `action view appliesTo { foo: User };`}, + {"appliesTo not ident", `action view appliesTo { 42: User };`}, + {"appliesTo EOF", `action view appliesTo {`}, + {"record EOF", `entity User {`}, + {"path bad after double colon", `entity User in Foo::42;`}, + {"path not ident", `entity User in 42;`}, + {"entity type list bad separator", `entity User in [Foo Bar];`}, + {"action parent list bad separator", `action view in [foo bar];`}, + {"action name not ident or string", `action 42;`}, + {"decl not ident", `42;`}, + {"record bad attr name", `entity User { 42: Long };`}, + {"type decl missing equals", `type Foo Long;`}, + {"ref bad after double colon", `action view in [Foo::42];`}, + {"action parent ref not ident", `action view in [42];`}, + {"appliesTo missing brace", `action view appliesTo principal: User;`}, + {"appliesTo missing colon", `action view appliesTo { principal User };`}, + {"type not ident or brace", `entity User { name: 42 };`}, + {"namespace EOF", `namespace Foo {`}, + {"record missing colon", `entity User { name String };`}, + {"idents non-ident after comma", `entity A, 42 {};`}, + {"names non-ident after comma", `action foo, 42;`}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := parser.ParseSchema("", []byte(tt.input)) + testutil.Error(t, err) + }) + } +} + +func TestParseDuplicateDeclarations(t *testing.T) { + tests := []struct { + name string + input string + }{ + {"duplicate entity", `entity User; entity User;`}, + {"duplicate entity in namespace", `namespace Foo { entity User; entity User; }`}, + {"duplicate enum", `entity Status enum ["a"]; entity Status enum ["b"];`}, + {"duplicate action", `action view; action view;`}, + {"duplicate action in namespace", `namespace Foo { action view; action view; }`}, + {"duplicate common type", `type Ctx = { x: Long }; type Ctx = { y: Long };`}, + {"duplicate namespace", "namespace Foo { entity A; }\nnamespace Foo { entity B; }"}, + {"entity conflicts with enum", `entity User; entity User enum ["a"];`}, + {"enum conflicts with entity", `entity User enum ["a"]; entity User;`}, + {"duplicate multi-name entity", `entity A, A { name: String };`}, + {"duplicate multi-name action", `action read, read;`}, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + _, err := parser.ParseSchema("", []byte(tt.input)) + testutil.Error(t, err) + }) + } +} diff --git a/x/exp/schema/internal/parser/testdata/basic.cedarschema b/x/exp/schema/internal/parser/testdata/basic.cedarschema new file mode 100644 index 00000000..5edd59d6 --- /dev/null +++ b/x/exp/schema/internal/parser/testdata/basic.cedarschema @@ -0,0 +1,28 @@ +namespace PhotoApp { + entity User in [Group] { + name: String, + age?: Long + }; + + entity Group; + + entity Photo { + owner: User, + tags: Set + } tags String; + + action viewPhoto appliesTo { + principal: User, + resource: Photo, + context: {} + }; + + action createPhoto in viewPhoto appliesTo { + principal: [User, Group], + resource: Photo + }; + + type Context = { + ip: ipaddr + }; +} diff --git a/x/exp/schema/internal/parser/token.go b/x/exp/schema/internal/parser/token.go new file mode 100644 index 00000000..f728c71d --- /dev/null +++ b/x/exp/schema/internal/parser/token.go @@ -0,0 +1,253 @@ +package parser + +import ( + "fmt" + "unicode/utf8" + + "github.com/cedar-policy/cedar-go/internal/rust" +) + +type tokenType int + +const ( + tokenEOF tokenType = iota + tokenIdent + tokenString + tokenAt + tokenLBrace + tokenRBrace + tokenLBracket + tokenRBracket + tokenLAngle + tokenRAngle + tokenLParen + tokenRParen + tokenComma + tokenSemicolon + tokenColon + tokenDoubleColon + tokenQuestion + tokenEquals +) + +type position struct { + Filename string + Line int + Column int + Offset int +} + +func (p position) String() string { + name := p.Filename + if name == "" { + name = "" + } + return fmt.Sprintf("%s:%d:%d", name, p.Line, p.Column) +} + +type token struct { + Type tokenType + Pos position + Text string +} + +type lexer struct { + src []byte + pos int + line int + col int + filename string +} + +func newLexer(filename string, src []byte) *lexer { + return &lexer{ + src: src, + line: 1, + col: 1, + filename: filename, + } +} + +func (l *lexer) position() position { + return position{ + Filename: l.filename, + Line: l.line, + Column: l.col, + Offset: l.pos, + } +} + +func (l *lexer) errorf(pos position, format string, args ...any) error { + return fmt.Errorf("%s: %s", pos, fmt.Sprintf(format, args...)) +} + +func (l *lexer) peek() rune { + if l.pos >= len(l.src) { + return -1 + } + r, _ := utf8.DecodeRune(l.src[l.pos:]) + return r +} + +func (l *lexer) advance() rune { + if l.pos >= len(l.src) { + return -1 + } + r, size := utf8.DecodeRune(l.src[l.pos:]) + l.pos += size + if r == '\n' { + l.line++ + l.col = 1 + } else { + l.col++ + } + return r +} + +func (l *lexer) skipWhitespaceAndComments() error { + for l.pos < len(l.src) { + r := l.peek() + if r == ' ' || r == '\t' || r == '\r' || r == '\n' { + l.advance() + continue + } + if r == '/' && l.pos+1 < len(l.src) && l.src[l.pos+1] == '/' { + l.advance() + l.advance() + for l.pos < len(l.src) && l.peek() != '\n' { + l.advance() + } + continue + } + if r == '/' && l.pos+1 < len(l.src) && l.src[l.pos+1] == '*' { + pos := l.position() + l.advance() + l.advance() + for { + if l.pos >= len(l.src) { + return l.errorf(pos, "unterminated block comment") + } + if l.peek() == '*' && l.pos+1 < len(l.src) && l.src[l.pos+1] == '/' { + l.advance() + l.advance() + break + } + l.advance() + } + continue + } + break + } + return nil +} + +func isIdentStart(r rune) bool { + return (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') || r == '_' +} + +func isIdentContinue(r rune) bool { + return isIdentStart(r) || (r >= '0' && r <= '9') +} + +func (l *lexer) scanIdent() string { + start := l.pos + l.advance() + for l.pos < len(l.src) && isIdentContinue(l.peek()) { + l.advance() + } + return string(l.src[start:l.pos]) +} + +func (l *lexer) scanString() (string, error) { + pos := l.position() + l.advance() // skip opening quote + start := l.pos + for l.pos < len(l.src) { + r := l.peek() + if r == '"' { + raw := l.src[start:l.pos] + l.advance() // skip closing quote + unescaped, _, err := rust.Unquote(raw, false) + if err != nil { + return "", l.errorf(pos, "invalid string escape: %v", err) + } + return unescaped, nil + } + if r == '\n' { + return "", l.errorf(pos, "unterminated string literal") + } + if r == '\\' { + l.advance() + if l.pos >= len(l.src) { + return "", l.errorf(pos, "unterminated string literal") + } + } + l.advance() + } + return "", l.errorf(pos, "unterminated string literal") +} + +func (l *lexer) next() (token, error) { + if err := l.skipWhitespaceAndComments(); err != nil { + return token{}, err + } + + pos := l.position() + + if l.pos >= len(l.src) { + return token{Type: tokenEOF, Pos: pos}, nil + } + + r := l.peek() + + if isIdentStart(r) { + text := l.scanIdent() + return token{Type: tokenIdent, Pos: pos, Text: text}, nil + } + + if r == '"' { + text, err := l.scanString() + if err != nil { + return token{}, err + } + return token{Type: tokenString, Pos: pos, Text: text}, nil + } + + l.advance() + switch r { + case '@': + return token{Type: tokenAt, Pos: pos, Text: "@"}, nil + case '{': + return token{Type: tokenLBrace, Pos: pos, Text: "{"}, nil + case '}': + return token{Type: tokenRBrace, Pos: pos, Text: "}"}, nil + case '[': + return token{Type: tokenLBracket, Pos: pos, Text: "["}, nil + case ']': + return token{Type: tokenRBracket, Pos: pos, Text: "]"}, nil + case '<': + return token{Type: tokenLAngle, Pos: pos, Text: "<"}, nil + case '>': + return token{Type: tokenRAngle, Pos: pos, Text: ">"}, nil + case '(': + return token{Type: tokenLParen, Pos: pos, Text: "("}, nil + case ')': + return token{Type: tokenRParen, Pos: pos, Text: ")"}, nil + case ',': + return token{Type: tokenComma, Pos: pos, Text: ","}, nil + case ';': + return token{Type: tokenSemicolon, Pos: pos, Text: ";"}, nil + case '?': + return token{Type: tokenQuestion, Pos: pos, Text: "?"}, nil + case '=': + return token{Type: tokenEquals, Pos: pos, Text: "="}, nil + case ':': + if l.peek() == ':' { + l.advance() + return token{Type: tokenDoubleColon, Pos: pos, Text: "::"}, nil + } + return token{Type: tokenColon, Pos: pos, Text: ":"}, nil + default: + return token{}, l.errorf(pos, "unexpected character %q", r) + } +} diff --git a/x/exp/schema/resolved/resolve.go b/x/exp/schema/resolved/resolve.go new file mode 100644 index 00000000..82e9af2f --- /dev/null +++ b/x/exp/schema/resolved/resolve.go @@ -0,0 +1,630 @@ +// Package resolved transforms an AST schema into a resolved schema +// where all type references are fully qualified and common types are inlined. +package resolved + +import ( + "fmt" + "strings" + + "github.com/cedar-policy/cedar-go/types" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" +) + +// Schema is a Cedar schema with resolved types and indexed declarations. +type Schema struct { + Namespaces map[types.Path]Namespace + Entities map[types.EntityType]Entity + Enums map[types.EntityType]Enum + Actions map[types.EntityUID]Action +} + +// Namespace represents a resolved namespace. +type Namespace struct { + Name types.Path + Annotations Annotations +} + +// Entity is a resolved entity type definition. +type Entity struct { + Name types.EntityType + Annotations Annotations + ParentTypes []types.EntityType + Shape RecordType + Tags IsType +} + +// Enum is a resolved enum entity type definition. +type Enum struct { + Name types.EntityType + Annotations Annotations + Values []types.EntityUID +} + +// AppliesTo defines the resolved principal, resource, and context types for an action. +type AppliesTo struct { + Principals []types.EntityType + Resources []types.EntityType + Context RecordType +} + +// Action is a resolved action definition. +type Action struct { + Name types.String + Annotations Annotations + Parents []types.EntityUID + AppliesTo *AppliesTo +} + +// Resolve transforms an AST schema into a fully resolved schema. +func Resolve(s *ast.Schema) (*Schema, error) { + r := &resolverState{ + entityTypes: make(map[types.EntityType]bool), + enumTypes: make(map[types.EntityType]bool), + commonTypes: make(map[types.Path]ast.IsType), + } + + // Phase 1: Register all declarations + if err := r.registerDecls("", s.Entities, s.Enums, s.CommonTypes); err != nil { + return nil, err + } + for nsName, ns := range s.Namespaces { + if err := r.registerDecls(nsName, ns.Entities, ns.Enums, ns.CommonTypes); err != nil { + return nil, err + } + } + + // Phase 2: Check for illegal shadowing (RFC 70) + if err := checkShadowing(s); err != nil { + return nil, err + } + + // Phase 3: Detect cycles in common types + if err := r.detectCommonTypeCycles(); err != nil { + return nil, err + } + + // Phase 4: Resolve everything + result := &Schema{ + Namespaces: make(map[types.Path]Namespace), + Entities: make(map[types.EntityType]Entity), + Enums: make(map[types.EntityType]Enum), + Actions: make(map[types.EntityUID]Action), + } + + // Resolve bare declarations + if err := r.resolveEntities("", s.Entities, result); err != nil { + return nil, err + } + r.resolveEnums("", s.Enums, result) + if err := r.resolveActions("", s.Actions, result); err != nil { + return nil, err + } + + // Resolve namespaced declarations + for nsName, ns := range s.Namespaces { + result.Namespaces[nsName] = Namespace{ + Name: nsName, + Annotations: Annotations(ns.Annotations), + } + if err := r.resolveEntities(nsName, ns.Entities, result); err != nil { + return nil, err + } + r.resolveEnums(nsName, ns.Enums, result) + if err := r.resolveActions(nsName, ns.Actions, result); err != nil { + return nil, err + } + } + + // Phase 5: Validate and resolve action membership + if err := r.validateActionMembership(result); err != nil { + return nil, err + } + + return result, nil +} + +type resolverState struct { + entityTypes map[types.EntityType]bool + enumTypes map[types.EntityType]bool + commonTypes map[types.Path]ast.IsType +} + +func (r *resolverState) registerDecls(nsName types.Path, entities ast.Entities, enums ast.Enums, commonTypes ast.CommonTypes) error { + for name := range entities { + if _, ok := enums[name]; ok { + return fmt.Errorf("%q is declared twice", qualifyEntityType(nsName, name)) + } + r.entityTypes[qualifyEntityType(nsName, name)] = true + } + for name := range enums { + r.enumTypes[qualifyEntityType(nsName, name)] = true + } + for name, ct := range commonTypes { + r.commonTypes[qualifyPath(nsName, name)] = ct.Type + } + return nil +} + +// checkShadowing returns an error if any namespaced entity type, common type, +// or action shadows a declaration with the same basename in the empty namespace. +// See https://github.com/cedar-policy/rfcs/blob/main/text/0070-disallow-empty-namespace-shadowing.md +func checkShadowing(s *ast.Schema) error { + // Collect bare (empty namespace) entity and common type basenames + bareTypes := make(map[types.Ident]bool) + for name := range s.Entities { + bareTypes[name] = true + } + for name := range s.Enums { + bareTypes[name] = true + } + for name := range s.CommonTypes { + bareTypes[name] = true + } + + // Check each namespace for conflicts + for nsName, ns := range s.Namespaces { + for name := range ns.Entities { + if bareTypes[name] { + return fmt.Errorf("definition of %q illegally shadows the existing definition of %q", string(nsName)+"::"+string(name), name) + } + } + for name := range ns.Enums { + if bareTypes[name] { + return fmt.Errorf("definition of %q illegally shadows the existing definition of %q", string(nsName)+"::"+string(name), name) + } + } + for name := range ns.CommonTypes { + if bareTypes[name] { + return fmt.Errorf("definition of %q illegally shadows the existing definition of %q", string(nsName)+"::"+string(name), name) + } + } + } + + // Check bare action names against namespaced actions + bareActions := make(map[types.String]bool) + for name := range s.Actions { + bareActions[name] = true + } + for nsName, ns := range s.Namespaces { + for name := range ns.Actions { + if bareActions[name] { + return fmt.Errorf("definition of %q illegally shadows the existing definition of %q", + string(nsName)+"::Action::\""+string(name)+"\"", + "Action::\""+string(name)+"\"") + } + } + } + + return nil +} + +func (r *resolverState) detectCommonTypeCycles() error { + // Build dependency graph + deps := make(map[types.Path][]types.Path) + for name, typ := range r.commonTypes { + ns := extractNamespace(name) + refs := collectTypeRefs(typ) + for _, ref := range refs { + resolved := r.resolveTypeRefPath(ns, ref) + if _, ok := r.commonTypes[resolved]; ok { + deps[name] = append(deps[name], resolved) + } + } + } + + // Kahn's algorithm for topological sort / cycle detection + inDegree := make(map[types.Path]int) + for name := range r.commonTypes { + inDegree[name] = 0 + } + for _, neighbors := range deps { + for _, n := range neighbors { + inDegree[n]++ + } + } + + var queue []types.Path + for name, degree := range inDegree { + if degree == 0 { + queue = append(queue, name) + } + } + + visited := 0 + for len(queue) > 0 { + node := queue[0] + queue = queue[1:] + visited++ + for _, neighbor := range deps[node] { + inDegree[neighbor]-- + if inDegree[neighbor] == 0 { + queue = append(queue, neighbor) + } + } + } + + if visited != len(r.commonTypes) { + // Find a cycle for the error message + for name := range inDegree { + if inDegree[name] > 0 { + return fmt.Errorf("cycle detected in common type definitions involving %q", name) + } + } + } + + return nil +} + +func (r *resolverState) resolveEntities(nsName types.Path, entities ast.Entities, result *Schema) error { + for name, entity := range entities { + qualName := qualifyEntityType(nsName, name) + resolved := Entity{ + Name: qualName, + Annotations: Annotations(entity.Annotations), + } + for _, ref := range entity.ParentTypes { + et, err := r.resolveEntityTypeRef(nsName, ref) + if err != nil { + return fmt.Errorf("entity %q: %w", qualName, err) + } + resolved.ParentTypes = append(resolved.ParentTypes, et) + } + if entity.Shape != nil { + rec, err := r.resolveRecordType(nsName, entity.Shape) + if err != nil { + return fmt.Errorf("entity %q shape: %w", qualName, err) + } + resolved.Shape = rec + } + if entity.Tags != nil { + tags, err := r.resolveType(nsName, entity.Tags) + if err != nil { + return fmt.Errorf("entity %q tags: %w", qualName, err) + } + resolved.Tags = tags + } + result.Entities[qualName] = resolved + } + return nil +} + +func (r *resolverState) resolveEnums(nsName types.Path, enums ast.Enums, result *Schema) { + for name, enum := range enums { + qualName := qualifyEntityType(nsName, name) + values := make([]types.EntityUID, len(enum.Values)) + for i, v := range enum.Values { + values[i] = types.NewEntityUID(qualName, v) + } + result.Enums[qualName] = Enum{ + Name: qualName, + Annotations: Annotations(enum.Annotations), + Values: values, + } + } +} + +func (r *resolverState) resolveActions(nsName types.Path, actions ast.Actions, result *Schema) error { + for name, action := range actions { + actionTypeName := qualifyActionType(nsName) + uid := types.NewEntityUID(actionTypeName, types.String(name)) + resolved := Action{ + Name: name, + Annotations: Annotations(action.Annotations), + } + for _, ref := range action.Parents { + resolved.Parents = append(resolved.Parents, resolveActionParentRef(nsName, ref)) + } + if action.AppliesTo != nil { + at := &AppliesTo{} + for _, p := range action.AppliesTo.Principals { + et, err := r.resolveEntityTypeRef(nsName, p) + if err != nil { + return fmt.Errorf("action %q principal: %w", name, err) + } + at.Principals = append(at.Principals, et) + } + for _, res := range action.AppliesTo.Resources { + et, err := r.resolveEntityTypeRef(nsName, res) + if err != nil { + return fmt.Errorf("action %q resource: %w", name, err) + } + at.Resources = append(at.Resources, et) + } + if action.AppliesTo.Context != nil { + ctx, err := r.resolveType(nsName, action.AppliesTo.Context) + if err != nil { + return fmt.Errorf("action %q context: %w", name, err) + } + rec, ok := ctx.(RecordType) + if !ok { + return fmt.Errorf("action %q context must resolve to a record type", name) + } + at.Context = rec + } else { + at.Context = RecordType{} + } + resolved.AppliesTo = at + } + result.Actions[uid] = resolved + } + return nil +} + +func (r *resolverState) resolveType(ns types.Path, t ast.IsType) (IsType, error) { + switch t := t.(type) { + case ast.StringType: + return StringType{}, nil + case ast.LongType: + return LongType{}, nil + case ast.BoolType: + return BoolType{}, nil + case ast.ExtensionType: + return ExtensionType(t), nil + case ast.SetType: + elem, err := r.resolveType(ns, t.Element) + if err != nil { + return nil, err + } + return SetType{Element: elem}, nil + case ast.RecordType: + return r.resolveRecordType(ns, t) + case ast.EntityTypeRef: + et, err := r.resolveEntityTypeRef(ns, t) + if err != nil { + return nil, err + } + return EntityType(et), nil + case ast.TypeRef: + return r.resolveTypeRef(ns, t) + default: + panic(fmt.Sprintf("unknown AST type: %T", t)) + } +} + +func (r *resolverState) resolveRecordType(ns types.Path, rec ast.RecordType) (RecordType, error) { + result := make(RecordType, len(rec)) + for name, attr := range rec { + t, err := r.resolveType(ns, attr.Type) + if err != nil { + return nil, fmt.Errorf("attribute %q: %w", name, err) + } + result[name] = Attribute{ + Type: t, + Optional: attr.Optional, + Annotations: Annotations(attr.Annotations), + } + } + return result, nil +} + +func (r *resolverState) resolveEntityTypeRef(ns types.Path, ref ast.EntityTypeRef) (types.EntityType, error) { + path := types.Path(ref) + // If it's already a qualified path (contains ::), resolve directly + if strings.Contains(string(path), "::") { + et := types.EntityType(path) + if r.entityTypes[et] || r.enumTypes[et] { + return et, nil + } + return "", fmt.Errorf("undefined entity type %q", path) + } + // Unqualified: try NS::Name first, then bare Name + if ns != "" { + qualified := types.EntityType(string(ns) + "::" + string(path)) + if r.entityTypes[qualified] || r.enumTypes[qualified] { + return qualified, nil + } + } + bare := types.EntityType(path) + if r.entityTypes[bare] || r.enumTypes[bare] { + return bare, nil + } + return "", fmt.Errorf("undefined entity type %q", path) +} + +// resolveTypeRef resolves a type reference (TypeRef) following the Cedar disambiguation rules: +// 1. Check if NS::N is declared as a common type +// 2. Check if NS::N is declared as an entity type +// 3. Check if N (empty namespace) is declared as a common type +// 4. Check if N (empty namespace) is declared as an entity type +// 5. Check if N is a built-in type +// 6. Error +func (r *resolverState) resolveTypeRef(ns types.Path, ref ast.TypeRef) (IsType, error) { + // Qualified: resolve directly + if strings.Contains(string(ref), "::") { + return r.resolveQualifiedTypeRef(ref) + } + + // Unqualified: follow disambiguation rules + if ns != "" { + qualifiedPath := types.Path(string(ns) + "::" + string(ref)) + // 1. Check NS::N as common type + if ct, ok := r.commonTypes[qualifiedPath]; ok { + return r.resolveType(ns, ct) + } + // 2. Check NS::N as entity type + qualifiedET := types.EntityType(qualifiedPath) + if r.entityTypes[qualifiedET] || r.enumTypes[qualifiedET] { + return EntityType(qualifiedET), nil + } + } + + // 3. Check N as common type in empty namespace + path := types.Path(ref) + if ct, ok := r.commonTypes[path]; ok { + return r.resolveType("", ct) + } + + // 4. Check N as entity type in empty namespace + bareET := types.EntityType(ref) + if r.entityTypes[bareET] || r.enumTypes[bareET] { + return EntityType(bareET), nil + } + + // 5. Check built-in types + if t := lookupBuiltin(path); t != nil { + return t, nil + } + + return nil, fmt.Errorf("undefined type %q", ref) +} + +func (r *resolverState) resolveQualifiedTypeRef(ref ast.TypeRef) (IsType, error) { + // Check for __cedar:: prefix first + if strings.HasPrefix(string(ref), "__cedar::") { + builtinName := ref[len("__cedar::"):] + if t := lookupBuiltin(types.Path(builtinName)); t != nil { + return t, nil + } + return nil, fmt.Errorf("undefined built-in type %q", ref) + } + + // Try as common type first + path := types.Path(ref) + if ct, ok := r.commonTypes[path]; ok { + ns := extractNamespace(path) + return r.resolveType(ns, ct) + } + // Try as entity type + et := types.EntityType(ref) + if r.entityTypes[et] || r.enumTypes[et] { + return EntityType(et), nil + } + return nil, fmt.Errorf("undefined type %q", ref) +} + +func (r *resolverState) resolveTypeRefPath(ns types.Path, ref ast.TypeRef) types.Path { + if strings.Contains(string(ref), "::") { + return types.Path(ref) + } + if ns != "" { + qualifiedPath := types.Path(string(ns) + "::" + string(ref)) + if _, ok := r.commonTypes[qualifiedPath]; ok { + return qualifiedPath + } + } + return types.Path(ref) +} + +func resolveActionParentRef(ns types.Path, ref ast.ParentRef) types.EntityUID { + if types.EntityType(ref.Type) == "" { + // Bare reference: action in same namespace + actionType := qualifyActionType(ns) + return types.NewEntityUID(actionType, ref.ID) + } + return types.NewEntityUID(types.EntityType(ref.Type), ref.ID) +} + +func (r *resolverState) validateActionMembership(result *Schema) error { + // Build action UID set + actionUIDs := make(map[types.EntityUID]bool) + for uid := range result.Actions { + actionUIDs[uid] = true + } + + // Validate references and detect cycles + for uid, action := range result.Actions { + for _, parent := range action.Parents { + if !actionUIDs[parent] { + return fmt.Errorf("action %s: undefined parent action %s", uid, parent) + } + } + } + + // Detect cycles using DFS + visited := make(map[types.EntityUID]int) // 0=unvisited, 1=visiting, 2=done + var visit func(types.EntityUID) error + visit = func(uid types.EntityUID) error { + switch visited[uid] { + case 1: + return fmt.Errorf("cycle detected in action hierarchy involving %s", uid) + case 2: + return nil + } + visited[uid] = 1 + action := result.Actions[uid] + for _, parent := range action.Parents { + if err := visit(parent); err != nil { + return err + } + } + visited[uid] = 2 + return nil + } + + for uid := range result.Actions { + if err := visit(uid); err != nil { + return err + } + } + + return nil +} + +func lookupBuiltin(path types.Path) IsType { + switch path { + case "String": + return StringType{} + case "Long": + return LongType{} + case "Bool", "Boolean": + return BoolType{} + case "ipaddr": + return ExtensionType("ipaddr") + case "decimal": + return ExtensionType("decimal") + case "datetime": + return ExtensionType("datetime") + case "duration": + return ExtensionType("duration") + default: + return nil + } +} + +func collectTypeRefs(t ast.IsType) []ast.TypeRef { + switch t := t.(type) { + case ast.TypeRef: + return []ast.TypeRef{t} + case ast.SetType: + return collectTypeRefs(t.Element) + case ast.RecordType: + var refs []ast.TypeRef + for _, attr := range t { + refs = append(refs, collectTypeRefs(attr.Type)...) + } + return refs + case ast.BoolType, ast.EntityTypeRef, ast.ExtensionType, ast.LongType, ast.StringType: + return nil + default: + panic(fmt.Sprintf("unknown AST type: %T", t)) + } +} + +func qualifyEntityType(ns types.Path, name types.Ident) types.EntityType { + if ns != "" { + return types.EntityType(string(ns) + "::" + string(name)) + } + return types.EntityType(name) +} + +func qualifyPath(ns types.Path, name types.Ident) types.Path { + if ns != "" { + return types.Path(string(ns) + "::" + string(name)) + } + return types.Path(name) +} + +func qualifyActionType(ns types.Path) types.EntityType { + if ns != "" { + return types.EntityType(string(ns) + "::Action") + } + return types.EntityType("Action") +} + +func extractNamespace(path types.Path) types.Path { + s := string(path) + if idx := strings.LastIndex(s, "::"); idx >= 0 { + return types.Path(s[:idx]) + } + return "" +} diff --git a/x/exp/schema/resolved/resolve_internal_test.go b/x/exp/schema/resolved/resolve_internal_test.go new file mode 100644 index 00000000..9ab91bbf --- /dev/null +++ b/x/exp/schema/resolved/resolve_internal_test.go @@ -0,0 +1,80 @@ +package resolved + +import ( + "testing" + + "github.com/cedar-policy/cedar-go/internal/testutil" + "github.com/cedar-policy/cedar-go/types" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" +) + +func TestResolveTypeDefault(t *testing.T) { + // Exercise the default branch of resolveType (unreachable with real AST types). + r := &resolverState{ + entityTypes: make(map[types.EntityType]bool), + enumTypes: make(map[types.EntityType]bool), + commonTypes: make(map[types.Path]ast.IsType), + } + testutil.Panic(t, func() { + _, _ = r.resolveType("", nil) + }) +} + +func TestResolveTypePath(t *testing.T) { + r := &resolverState{ + commonTypes: map[types.Path]ast.IsType{ + "NS::A": ast.StringType{}, + "B": ast.LongType{}, + }, + entityTypes: make(map[types.EntityType]bool), + enumTypes: make(map[types.EntityType]bool), + } + + // __cedar:: prefix returns path unchanged + p := r.resolveTypeRefPath("NS", "__cedar::String") + testutil.Equals(t, p, types.Path("__cedar::String")) + + // Already qualified (contains ::) returns path unchanged + p = r.resolveTypeRefPath("NS", "Other::Foo") + testutil.Equals(t, p, types.Path("Other::Foo")) + + // Unqualified in namespace resolves to NS::A + p = r.resolveTypeRefPath("NS", "A") + testutil.Equals(t, p, types.Path("NS::A")) +} + +func TestResolveActionParentRef(t *testing.T) { + // Exercise both branches of resolveActionParentRef. + + // Bare reference + uid := resolveActionParentRef("NS", ast.ParentRef{ID: "view"}) + testutil.Equals(t, uid, types.NewEntityUID("NS::Action", "view")) + + // Typed reference + uid = resolveActionParentRef("NS", ast.ParentRef{Type: "Other::Action", ID: "edit"}) + testutil.Equals(t, uid, types.NewEntityUID("Other::Action", "edit")) +} + +func TestCollectTypeRefsDefault(t *testing.T) { + // Exercise the non-container type branch + refs := collectTypeRefs(ast.StringType{}) + testutil.Equals(t, len(refs), 0) + + // Exercise the impossible to hit branch + testutil.Panic(t, func() { + collectTypeRefs(nil) + }) +} + +func TestDetectCommonTypeCyclesBuiltinRef(t *testing.T) { + // Verify cycle detection works correctly with __cedar:: refs. + r := &resolverState{ + commonTypes: map[types.Path]ast.IsType{ + "NS::A": ast.TypeRef("__cedar::String"), + }, + entityTypes: make(map[types.EntityType]bool), + enumTypes: make(map[types.EntityType]bool), + } + err := r.detectCommonTypeCycles() + testutil.OK(t, err) +} diff --git a/x/exp/schema/resolved/resolve_test.go b/x/exp/schema/resolved/resolve_test.go new file mode 100644 index 00000000..3cb35e6f --- /dev/null +++ b/x/exp/schema/resolved/resolve_test.go @@ -0,0 +1,1301 @@ +package resolved_test + +import ( + "testing" + + "github.com/cedar-policy/cedar-go/internal/testutil" + "github.com/cedar-policy/cedar-go/types" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" + "github.com/cedar-policy/cedar-go/x/exp/schema/resolved" +) + +func TestResolveEmpty(t *testing.T) { + s := &ast.Schema{} + result, err := resolved.Resolve(s) + testutil.OK(t, err) + testutil.Equals(t, len(result.Entities), 0) + testutil.Equals(t, len(result.Actions), 0) +} + +func TestResolveBasicEntity(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "name": ast.Attribute{Type: ast.TypeRef("String")}, + "age": ast.Attribute{Type: ast.TypeRef("Long"), Optional: true}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + user := result.Entities["User"] + testutil.Equals(t, user.Shape["name"].Type, resolved.IsType(resolved.StringType{})) + testutil.Equals(t, user.Shape["age"].Type, resolved.IsType(resolved.LongType{})) + testutil.Equals(t, user.Shape["age"].Optional, true) +} + +func TestResolveEntityParents(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ParentTypes: []ast.EntityTypeRef{"Group"}}, + "Group": ast.Entity{}, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + testutil.Equals(t, result.Entities["User"].ParentTypes, []types.EntityType{"Group"}) +} + +func TestResolveEntityParentsUndefined(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ParentTypes: []ast.EntityTypeRef{"NonExistent"}}, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveCommonType(t *testing.T) { + s := &ast.Schema{ + CommonTypes: ast.CommonTypes{ + "Context": ast.CommonType{ + Type: ast.RecordType{ + "ip": ast.Attribute{Type: ast.TypeRef("ipaddr")}, + }, + }, + }, + Actions: ast.Actions{ + "view": ast.Action{ + AppliesTo: &ast.AppliesTo{ + Context: ast.TypeRef("Context"), + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + uid := types.NewEntityUID("Action", "view") + view := result.Actions[uid] + testutil.Equals(t, view.AppliesTo != nil, true) + _, ok := view.AppliesTo.Context["ip"] + testutil.Equals(t, ok, true) +} + +func TestResolveCommonTypeCycle(t *testing.T) { + s := &ast.Schema{ + CommonTypes: ast.CommonTypes{ + "A": ast.CommonType{Type: ast.TypeRef("B")}, + "B": ast.CommonType{Type: ast.TypeRef("A")}, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveCommonTypeIndirectCycle(t *testing.T) { + s := &ast.Schema{ + CommonTypes: ast.CommonTypes{ + "A": ast.CommonType{Type: ast.TypeRef("B")}, + "B": ast.CommonType{Type: ast.TypeRef("C")}, + "C": ast.CommonType{Type: ast.TypeRef("A")}, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveUndefinedType(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "x": ast.Attribute{Type: ast.TypeRef("NonExistent")}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveBuiltinTypes(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "s": ast.Attribute{Type: ast.TypeRef("String")}, + "l": ast.Attribute{Type: ast.TypeRef("Long")}, + "b": ast.Attribute{Type: ast.TypeRef("Bool")}, + "b2": ast.Attribute{Type: ast.TypeRef("Boolean")}, + "ip": ast.Attribute{Type: ast.TypeRef("ipaddr")}, + "dec": ast.Attribute{Type: ast.TypeRef("decimal")}, + "dt": ast.Attribute{Type: ast.TypeRef("datetime")}, + "dur": ast.Attribute{Type: ast.TypeRef("duration")}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + user := result.Entities["User"] + testutil.Equals(t, user.Shape["s"].Type, resolved.IsType(resolved.StringType{})) + testutil.Equals(t, user.Shape["l"].Type, resolved.IsType(resolved.LongType{})) + testutil.Equals(t, user.Shape["b"].Type, resolved.IsType(resolved.BoolType{})) + testutil.Equals(t, user.Shape["b2"].Type, resolved.IsType(resolved.BoolType{})) + testutil.Equals(t, user.Shape["ip"].Type, resolved.IsType(resolved.ExtensionType("ipaddr"))) + testutil.Equals(t, user.Shape["dec"].Type, resolved.IsType(resolved.ExtensionType("decimal"))) + testutil.Equals(t, user.Shape["dt"].Type, resolved.IsType(resolved.ExtensionType("datetime"))) + testutil.Equals(t, user.Shape["dur"].Type, resolved.IsType(resolved.ExtensionType("duration"))) +} + +func TestResolveCedarNamespace(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "name": ast.Attribute{Type: ast.TypeRef("__cedar::String")}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + testutil.Equals(t, result.Entities["User"].Shape["name"].Type, resolved.IsType(resolved.StringType{})) +} + +func TestResolveCedarNamespaceUndefined(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "x": ast.Attribute{Type: ast.TypeRef("__cedar::Bogus")}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveTypeDisambiguation(t *testing.T) { + t.Parallel() + + t.Run("common_over_entity", func(t *testing.T) { + t.Parallel() + // Example from the Cedar spec: https://docs.cedarpolicy.com/schema/human-readable-schema.html#schema-typeDisambiguation + // When "name" is declared as both a common type and an entity type in the same namespace, + // the common type wins. + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + CommonTypes: ast.CommonTypes{ + "name": ast.CommonType{ + Type: ast.RecordType{ + "first": ast.Attribute{Type: ast.TypeRef("String")}, + "last": ast.Attribute{Type: ast.TypeRef("String")}, + }, + }, + }, + Entities: ast.Entities{ + "name": ast.Entity{}, + "User": ast.Entity{ + Shape: ast.RecordType{ + "n": ast.Attribute{Type: ast.TypeRef("name")}, + }, + }, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + user := result.Entities["NS::User"] + // "name" should resolve to the common type (a record), not the entity type + rec, ok := user.Shape["n"].Type.(resolved.RecordType) + testutil.Equals(t, ok, true) + testutil.Equals(t, len(rec), 2) + }) + + t.Run("entity_over_builtin", func(t *testing.T) { + t.Parallel() + // An entity type named "Long" shadows the built-in Long primitive. + // A reference to "Long" should resolve to the entity type, not LongType. + // The built-in is still accessible via __cedar::Long. + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "Long": ast.Entity{}, + "User": ast.Entity{ + Shape: ast.RecordType{ + "x": ast.Attribute{Type: ast.TypeRef("Long")}, + "y": ast.Attribute{Type: ast.TypeRef("__cedar::Long")}, + }, + }, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + user := result.Entities["NS::User"] + // "Long" resolves to entity type NS::Long, not the built-in + testutil.Equals(t, user.Shape["x"].Type, resolved.IsType(resolved.EntityType("NS::Long"))) + // "__cedar::Long" still resolves to the built-in + testutil.Equals(t, user.Shape["y"].Type, resolved.IsType(resolved.LongType{})) + }) + + t.Run("common_over_builtin", func(t *testing.T) { + t.Parallel() + // A common type named "Long" shadows the built-in Long primitive. + // A reference to "Long" should resolve to the common type, not LongType. + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + CommonTypes: ast.CommonTypes{ + "Long": ast.CommonType{ + Type: ast.RecordType{ + "value": ast.Attribute{Type: ast.TypeRef("__cedar::Long")}, + }, + }, + }, + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "x": ast.Attribute{Type: ast.TypeRef("Long")}, + "y": ast.Attribute{Type: ast.TypeRef("__cedar::Long")}, + }, + }, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + user := result.Entities["NS::User"] + // "Long" resolves to the common type (a record), not the built-in + rec, ok := user.Shape["x"].Type.(resolved.RecordType) + testutil.Equals(t, ok, true) + testutil.Equals(t, len(rec), 1) + // "__cedar::Long" still resolves to the built-in + testutil.Equals(t, user.Shape["y"].Type, resolved.IsType(resolved.LongType{})) + }) +} + +func TestResolveNamespaceEntityRef(t *testing.T) { + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "User": ast.Entity{ParentTypes: []ast.EntityTypeRef{"Group"}}, + "Group": ast.Entity{}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + testutil.Equals(t, result.Entities["NS::User"].ParentTypes, []types.EntityType{"NS::Group"}) +} + +func TestResolveCrossNamespaceEntityRef(t *testing.T) { + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "A": ast.Namespace{ + Entities: ast.Entities{ + "User": ast.Entity{ParentTypes: []ast.EntityTypeRef{"B::Group"}}, + }, + }, + "B": ast.Namespace{ + Entities: ast.Entities{ + "Group": ast.Entity{}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + testutil.Equals(t, result.Entities["A::User"].ParentTypes, []types.EntityType{"B::Group"}) +} + +func TestResolveAction(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{}, + "Photo": ast.Entity{}, + }, + Actions: ast.Actions{ + "view": ast.Action{ + AppliesTo: &ast.AppliesTo{ + Principals: []ast.EntityTypeRef{"User"}, + Resources: []ast.EntityTypeRef{"Photo"}, + Context: ast.RecordType{}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + uid := types.NewEntityUID("Action", "view") + view := result.Actions[uid] + testutil.Equals(t, view.AppliesTo.Principals, []types.EntityType{"User"}) + testutil.Equals(t, view.AppliesTo.Resources, []types.EntityType{"Photo"}) +} + +func TestResolveActionParents(t *testing.T) { + s := &ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{Parents: []ast.ParentRef{ast.ParentRefFromID("readOnly")}}, + "readOnly": ast.Action{}, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + uid := types.NewEntityUID("Action", "view") + view := result.Actions[uid] + testutil.Equals(t, view.Parents, []types.EntityUID{types.NewEntityUID("Action", "readOnly")}) +} + +func TestResolveActionCycle(t *testing.T) { + s := &ast.Schema{ + Actions: ast.Actions{ + "a": ast.Action{Parents: []ast.ParentRef{ast.ParentRefFromID("b")}}, + "b": ast.Action{Parents: []ast.ParentRef{ast.ParentRefFromID("a")}}, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveActionUndefinedParent(t *testing.T) { + s := &ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{Parents: []ast.ParentRef{ast.ParentRefFromID("nonExistent")}}, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveEnum(t *testing.T) { + s := &ast.Schema{ + Enums: ast.Enums{ + "Status": ast.Enum{ + Values: []types.String{"active", "inactive"}, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + status := result.Enums["Status"] + testutil.Equals(t, status.Values, []types.EntityUID{ + types.NewEntityUID("Status", "active"), + types.NewEntityUID("Status", "inactive"), + }) +} + +func TestResolveEnumAsEntityType(t *testing.T) { + s := &ast.Schema{ + Enums: ast.Enums{ + "Status": ast.Enum{Values: []types.String{"active"}}, + }, + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "s": ast.Attribute{Type: ast.TypeRef("Status")}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + testutil.Equals(t, result.Entities["User"].Shape["s"].Type, resolved.IsType(resolved.EntityType("Status"))) +} + +func TestResolveSetType(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "tags": ast.Attribute{Type: ast.Set(ast.TypeRef("String"))}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + tags := result.Entities["User"].Shape["tags"] + set, ok := tags.Type.(resolved.SetType) + testutil.Equals(t, ok, true) + testutil.Equals(t, set.Element, resolved.IsType(resolved.StringType{})) +} + +func TestResolveEntityWithTags(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Tags: ast.TypeRef("String"), + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + testutil.Equals(t, result.Entities["User"].Tags, resolved.IsType(resolved.StringType{})) +} + +func TestResolveNamespacedAction(t *testing.T) { + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Actions: ast.Actions{ + "view": ast.Action{}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + uid := types.NewEntityUID("NS::Action", "view") + _, ok := result.Actions[uid] + testutil.Equals(t, ok, true) +} + +func TestResolveActionQualifiedParent(t *testing.T) { + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Actions: ast.Actions{ + "view": ast.Action{ + Parents: []ast.ParentRef{ + ast.NewParentRef("NS::Action", "readOnly"), + }, + }, + "readOnly": ast.Action{}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + uid := types.NewEntityUID("NS::Action", "view") + view := result.Actions[uid] + testutil.Equals(t, view.Parents, []types.EntityUID{types.NewEntityUID("NS::Action", "readOnly")}) +} + +func TestResolveActionContextNull(t *testing.T) { + s := &ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{ + AppliesTo: &ast.AppliesTo{}, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + uid := types.NewEntityUID("Action", "view") + view := result.Actions[uid] + testutil.Equals(t, len(view.AppliesTo.Context), 0) +} + +func TestResolveActionContextNonRecord(t *testing.T) { + s := &ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{ + AppliesTo: &ast.AppliesTo{ + Context: ast.TypeRef("String"), + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveActionPrincipalUndefined(t *testing.T) { + s := &ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{ + AppliesTo: &ast.AppliesTo{ + Principals: []ast.EntityTypeRef{"NonExistent"}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveActionResourceUndefined(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{"User": ast.Entity{}}, + Actions: ast.Actions{ + "view": ast.Action{ + AppliesTo: &ast.AppliesTo{ + Principals: []ast.EntityTypeRef{"User"}, + Resources: []ast.EntityTypeRef{"NonExistent"}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveCommonTypeChain(t *testing.T) { + s := &ast.Schema{ + CommonTypes: ast.CommonTypes{ + "A": ast.CommonType{Type: ast.TypeRef("B")}, + "B": ast.CommonType{Type: ast.RecordType{ + "x": ast.Attribute{Type: ast.TypeRef("Long")}, + }}, + }, + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "a": ast.Attribute{Type: ast.TypeRef("A")}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + a := result.Entities["User"].Shape["a"] + rec, ok := a.Type.(resolved.RecordType) + testutil.Equals(t, ok, true) + testutil.Equals(t, len(rec), 1) +} + +func TestResolveQualifiedCommonType(t *testing.T) { + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + CommonTypes: ast.CommonTypes{ + "Ctx": ast.CommonType{ + Type: ast.RecordType{}, + }, + }, + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "c": ast.Attribute{Type: ast.TypeRef("NS::Ctx")}, + }, + }, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + c := result.Entities["NS::User"].Shape["c"] + _, ok := c.Type.(resolved.RecordType) + testutil.Equals(t, ok, true) +} + +func TestResolveQualifiedUndefined(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "x": ast.Attribute{Type: ast.TypeRef("NS::NonExistent")}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveEntityTagsUndefined(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Tags: ast.TypeRef("NonExistent"), + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveEntityShapeAttrUndefined(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "x": ast.Attribute{Type: ast.Set(ast.TypeRef("NonExistent"))}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveNamespaceOutput(t *testing.T) { + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Annotations: ast.Annotations{"doc": "test"}, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + ns := result.Namespaces["NS"] + testutil.Equals(t, ns.Name, types.Path("NS")) + testutil.Equals(t, types.String(ns.Annotations["doc"]), types.String("test")) +} + +func TestResolveEntityTypeRef(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "friend": ast.Attribute{Type: ast.EntityTypeRef("User")}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + friend := result.Entities["User"].Shape["friend"] + testutil.Equals(t, friend.Type, resolved.IsType(resolved.EntityType("User"))) +} + +func TestResolveEntityTypeRefUndefined(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "x": ast.Attribute{Type: ast.EntityTypeRef("NonExistent")}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveEntityTypeRefQualified(t *testing.T) { + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "A": ast.Namespace{ + Entities: ast.Entities{ + "Foo": ast.Entity{}, + }, + }, + }, + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "x": ast.Attribute{Type: ast.EntityTypeRef("A::Foo")}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + x := result.Entities["User"].Shape["x"] + testutil.Equals(t, x.Type, resolved.IsType(resolved.EntityType("A::Foo"))) +} + +func TestResolveEntityTypeRefQualifiedUndefined(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "x": ast.Attribute{Type: ast.EntityTypeRef("A::NonExistent")}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveDirectPrimitiveTypes(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "s": ast.Attribute{Type: ast.StringType{}}, + "l": ast.Attribute{Type: ast.LongType{}}, + "b": ast.Attribute{Type: ast.BoolType{}}, + "e": ast.Attribute{Type: ast.ExtensionType("ipaddr")}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + user := result.Entities["User"] + testutil.Equals(t, user.Shape["s"].Type, resolved.IsType(resolved.StringType{})) + testutil.Equals(t, user.Shape["l"].Type, resolved.IsType(resolved.LongType{})) + testutil.Equals(t, user.Shape["b"].Type, resolved.IsType(resolved.BoolType{})) + testutil.Equals(t, user.Shape["e"].Type, resolved.IsType(resolved.ExtensionType("ipaddr"))) +} + +func TestResolveNamespacedEntitiesError(t *testing.T) { + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "x": ast.Attribute{Type: ast.TypeRef("NonExistent")}, + }, + }, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveNamespacedEnumError(t *testing.T) { + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Enums: ast.Enums{ + "Status": ast.Enum{Values: []types.String{"a"}}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + testutil.Equals(t, len(result.Enums), 1) +} + +func TestResolveNamespacedActionsError(t *testing.T) { + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "User": ast.Entity{}, + }, + Actions: ast.Actions{ + "view": ast.Action{ + AppliesTo: &ast.AppliesTo{ + Context: ast.TypeRef("NonExistent"), + }, + }, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveEntityParentsValidationError(t *testing.T) { + // Entity referencing an undefined parent type errors during resolution. + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + ParentTypes: []ast.EntityTypeRef{"NonExistent"}, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveQualifiedEntityType(t *testing.T) { + // Test resolving a qualified entity type ref + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "User": ast.Entity{}, + "Admin": ast.Entity{ParentTypes: []ast.EntityTypeRef{"NS::User"}}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + testutil.Equals(t, result.Entities["NS::Admin"].ParentTypes, []types.EntityType{"NS::User"}) +} + +func TestResolveNamespaceQualifiedKeyError(t *testing.T) { + // Using a namespace-qualified key in the Entities map is invalid; + // keys must be bare Idents. A qualified key gets double-qualified + // during resolution, causing references to it to fail. + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "NS::User": ast.Entity{}, + "Admin": ast.Entity{ParentTypes: []ast.EntityTypeRef{"NS::User"}}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveNamespaceQualifiedEnumKeyError(t *testing.T) { + // Same as above but for Enums: a namespace-qualified key causes + // double-qualification, so references to it fail. + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Enums: ast.Enums{ + "NS::Color": ast.Enum{Values: []types.String{"red"}}, + }, + Entities: ast.Entities{ + "Item": ast.Entity{ + Shape: ast.RecordType{ + "color": ast.Attribute{Type: ast.TypeRef("Color")}, + }, + }, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveQualifiedEntityTypeRefAsType(t *testing.T) { + // Test that a qualified entity type resolves when used through TypeRef + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "ref": ast.Attribute{Type: ast.TypeRef("NS::Admin")}, + }, + }, + "Admin": ast.Entity{}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + ref := result.Entities["NS::User"].Shape["ref"] + testutil.Equals(t, ref.Type, resolved.IsType(resolved.EntityType("NS::Admin"))) +} + +func TestResolveEmptyNamespaceCommonType(t *testing.T) { + // Test that empty namespace common types are found from a different namespace + s := &ast.Schema{ + CommonTypes: ast.CommonTypes{ + "Ctx": ast.CommonType{Type: ast.RecordType{}}, + }, + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "c": ast.Attribute{Type: ast.TypeRef("Ctx")}, + }, + }, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + c := result.Entities["NS::User"].Shape["c"] + _, ok := c.Type.(resolved.RecordType) + testutil.Equals(t, ok, true) +} + +func TestResolveEmptyNamespaceEntityType(t *testing.T) { + // Test that empty namespace entity types are found from a different namespace + s := &ast.Schema{ + Entities: ast.Entities{ + "Global": ast.Entity{}, + }, + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "User": ast.Entity{ + Shape: ast.RecordType{ + "g": ast.Attribute{Type: ast.TypeRef("Global")}, + }, + }, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + g := result.Entities["NS::User"].Shape["g"] + testutil.Equals(t, g.Type, resolved.IsType(resolved.EntityType("Global"))) +} + +func TestResolveAnnotationsOnAttributes(t *testing.T) { + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ + Annotations: ast.Annotations{"doc": "user"}, + Shape: ast.RecordType{ + "name": ast.Attribute{ + Type: ast.TypeRef("String"), + Annotations: ast.Annotations{"doc": "the name"}, + }, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + user := result.Entities["User"] + testutil.Equals(t, types.String(user.Annotations["doc"]), types.String("user")) + testutil.Equals(t, types.String(user.Shape["name"].Annotations["doc"]), types.String("the name")) +} + +func TestResolveEnumAnnotations(t *testing.T) { + s := &ast.Schema{ + Enums: ast.Enums{ + "Status": ast.Enum{ + Annotations: ast.Annotations{"doc": "status"}, + Values: []types.String{"a"}, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + testutil.Equals(t, types.String(result.Enums["Status"].Annotations["doc"]), types.String("status")) +} + +func TestResolveActionAnnotations(t *testing.T) { + s := &ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{ + Annotations: ast.Annotations{"doc": "view action"}, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + uid := types.NewEntityUID("Action", "view") + testutil.Equals(t, types.String(result.Actions[uid].Annotations["doc"]), types.String("view action")) +} + +func TestResolveBareEnumsError(t *testing.T) { + // Test that bare enums with no errors pass through resolveEnums + s := &ast.Schema{ + Enums: ast.Enums{ + "A": ast.Enum{Values: []types.String{"x"}}, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + testutil.Equals(t, result.Enums["A"].Name, types.EntityType("A")) +} + +func TestResolveBareActionsError(t *testing.T) { + // Test bare actions with an error in context resolution + s := &ast.Schema{ + Actions: ast.Actions{ + "view": ast.Action{ + AppliesTo: &ast.AppliesTo{ + Context: ast.TypeRef("NonExistent"), + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveCommonTypeCycleInSet(t *testing.T) { + s := &ast.Schema{ + CommonTypes: ast.CommonTypes{ + "A": ast.CommonType{Type: ast.Set(ast.TypeRef("B"))}, + "B": ast.CommonType{Type: ast.TypeRef("A")}, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveCommonTypeCycleInRecord(t *testing.T) { + s := &ast.Schema{ + CommonTypes: ast.CommonTypes{ + "A": ast.CommonType{Type: ast.RecordType{ + "x": ast.Attribute{Type: ast.TypeRef("B")}, + }}, + "B": ast.CommonType{Type: ast.TypeRef("A")}, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveNamespacedEntityTypeRef(t *testing.T) { + // Exercise resolveTypeRef line 421-423: unqualified name resolves to + // a namespaced entity type (not common type) via disambiguation rule 2. + // Use a Set attribute where "User" should resolve to NS::User entity type. + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "User": ast.Entity{}, + "Group": ast.Entity{ + Shape: ast.RecordType{ + "members": ast.Attribute{Type: ast.SetType{Element: ast.TypeRef("User")}}, + }, + }, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + members := result.Entities["NS::Group"].Shape["members"] + setType, ok := members.Type.(resolved.SetType) + testutil.Equals(t, ok, true) + testutil.Equals(t, setType.Element, resolved.IsType(resolved.EntityType("NS::User"))) +} + +func TestResolveNamespacedEnumTypeRef(t *testing.T) { + // Exercise resolveTypeRef line 421-423: unqualified name resolves to + // a namespaced enum type via disambiguation rule 2. + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Enums: ast.Enums{ + "Color": ast.Enum{Values: []types.String{"red", "blue"}}, + }, + Entities: ast.Entities{ + "Item": ast.Entity{ + Shape: ast.RecordType{ + "color": ast.Attribute{Type: ast.TypeRef("Color")}, + }, + }, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + color := result.Entities["NS::Item"].Shape["color"] + testutil.Equals(t, color.Type, resolved.IsType(resolved.EntityType("NS::Color"))) +} + +func TestResolveUndefinedParents(t *testing.T) { + // Resolve returns an error when an entity references an undefined parent type. + s := &ast.Schema{ + Entities: ast.Entities{ + "User": ast.Entity{ParentTypes: []ast.EntityTypeRef{"Nonexistent"}}, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveCedarBuiltinInTypePath(t *testing.T) { + // Exercise resolveTypeRefPath line 462-464: __cedar:: prefix in cycle detection. + s := &ast.Schema{ + CommonTypes: ast.CommonTypes{ + "A": ast.CommonType{Type: ast.TypeRef("__cedar::String")}, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + _ = result +} + +func TestResolveQualifiedTypePath(t *testing.T) { + // Exercise resolveTypeRefPath line 465-467: qualified path with :: in cycle detection. + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + CommonTypes: ast.CommonTypes{ + "A": ast.CommonType{Type: ast.TypeRef("NS::B")}, + "B": ast.CommonType{Type: ast.StringType{}}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + _ = result +} + +func TestResolveNamespacedCommonTypePath(t *testing.T) { + // Exercise resolveTypeRefPath line 470-472: namespaced common type ref in cycle detection. + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + CommonTypes: ast.CommonTypes{ + "A": ast.CommonType{Type: ast.TypeRef("B")}, + "B": ast.CommonType{Type: ast.StringType{}}, + }, + }, + }, + } + result, err := resolved.Resolve(s) + testutil.OK(t, err) + _ = result +} + +func TestResolveShadowingEntityEntity(t *testing.T) { + // Entity in namespace shadows entity in empty namespace. + s := &ast.Schema{ + Entities: ast.Entities{ + "T": ast.Entity{}, + }, + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "T": ast.Entity{}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveShadowingCommonEntity(t *testing.T) { + // Common type in namespace shadows entity in empty namespace. + s := &ast.Schema{ + Entities: ast.Entities{ + "T": ast.Entity{}, + }, + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + CommonTypes: ast.CommonTypes{ + "T": ast.CommonType{Type: ast.StringType{}}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveShadowingEntityCommon(t *testing.T) { + // Entity in namespace shadows common type in empty namespace. + s := &ast.Schema{ + CommonTypes: ast.CommonTypes{ + "T": ast.CommonType{Type: ast.StringType{}}, + }, + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Entities: ast.Entities{ + "T": ast.Entity{}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveShadowingCommonCommon(t *testing.T) { + // Common type in namespace shadows common type in empty namespace. + s := &ast.Schema{ + CommonTypes: ast.CommonTypes{ + "T": ast.CommonType{Type: ast.StringType{}}, + }, + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + CommonTypes: ast.CommonTypes{ + "T": ast.CommonType{Type: ast.LongType{}}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveShadowingEnumEntity(t *testing.T) { + // Enum in namespace shadows entity in empty namespace. + s := &ast.Schema{ + Entities: ast.Entities{ + "T": ast.Entity{}, + }, + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Enums: ast.Enums{ + "T": ast.Enum{Values: []types.String{"a"}}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveShadowingActionAction(t *testing.T) { + // Action in namespace shadows action in empty namespace. + s := &ast.Schema{ + Actions: ast.Actions{ + "A": ast.Action{}, + }, + Namespaces: ast.Namespaces{ + "NS": ast.Namespace{ + Actions: ast.Actions{ + "A": ast.Action{}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.Error(t, err) +} + +func TestResolveShadowingAllowed(t *testing.T) { + // No shadowing: same name in two non-empty namespaces is fine. + s := &ast.Schema{ + Namespaces: ast.Namespaces{ + "A": ast.Namespace{ + Entities: ast.Entities{ + "T": ast.Entity{}, + }, + }, + "B": ast.Namespace{ + Entities: ast.Entities{ + "T": ast.Entity{}, + }, + }, + }, + } + _, err := resolved.Resolve(s) + testutil.OK(t, err) +} + +func TestResolveDuplicateEntityEnum(t *testing.T) { + _, err := resolved.Resolve(&ast.Schema{ + Entities: ast.Entities{ + "Foo": ast.Entity{}, + }, + Enums: ast.Enums{ + "Foo": ast.Enum{Values: []types.String{"a"}}, + }, + }) + testutil.Error(t, err) + testutil.Equals(t, err.Error(), `"Foo" is declared twice`) +} + +func TestResolveDuplicateEntityEnumInNamespace(t *testing.T) { + _, err := resolved.Resolve(&ast.Schema{ + Namespaces: ast.Namespaces{ + "Baz": ast.Namespace{ + Entities: ast.Entities{ + "Foo": ast.Entity{}, + }, + Enums: ast.Enums{ + "Foo": ast.Enum{Values: []types.String{"Bar"}}, + }, + }, + }, + }) + testutil.Error(t, err) + testutil.Equals(t, err.Error(), `"Baz::Foo" is declared twice`) +} diff --git a/x/exp/schema/resolved/types.go b/x/exp/schema/resolved/types.go new file mode 100644 index 00000000..3bc3909c --- /dev/null +++ b/x/exp/schema/resolved/types.go @@ -0,0 +1,62 @@ +package resolved + +import ( + "github.com/cedar-policy/cedar-go/types" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" +) + +// Annotations is a resolved annotation map. +type Annotations ast.Annotations + +// IsType is the sealed sum type for resolved Cedar schema types. +// Unlike ast.IsType, there is no TypeRef (common types are inlined) +// and EntityTypeRef is replaced with EntityType. +// +//sumtype:decl +type IsType interface { + isType() +} + +// StringType represents the Cedar String type. +type StringType struct{} + +func (StringType) isType() { _ = 0 } + +// LongType represents the Cedar Long type. +type LongType struct{} + +func (LongType) isType() { _ = 0 } + +// BoolType represents the Cedar Bool type. +type BoolType struct{} + +func (BoolType) isType() { _ = 0 } + +// ExtensionType represents a Cedar extension type. +type ExtensionType types.Ident + +func (ExtensionType) isType() { _ = 0 } + +// SetType represents the Cedar Set type. +type SetType struct { + Element IsType +} + +func (SetType) isType() { _ = 0 } + +// Attribute describes a single attribute in a resolved record type. +type Attribute struct { + Type IsType + Optional bool + Annotations Annotations +} + +// RecordType maps attribute names to their resolved types. +type RecordType map[types.String]Attribute + +func (RecordType) isType() { _ = 0 } + +// EntityType represents a reference to an entity type in a resolved schema. +type EntityType types.EntityType + +func (EntityType) isType() { _ = 0 } diff --git a/x/exp/schema/resolved/types_internal_test.go b/x/exp/schema/resolved/types_internal_test.go new file mode 100644 index 00000000..2b06298b --- /dev/null +++ b/x/exp/schema/resolved/types_internal_test.go @@ -0,0 +1,13 @@ +package resolved + +import "testing" + +func TestIsTypeMarkers(t *testing.T) { + StringType{}.isType() + LongType{}.isType() + BoolType{}.isType() + ExtensionType("ipaddr").isType() + SetType{}.isType() + RecordType{}.isType() + EntityType("User").isType() +} diff --git a/x/exp/schema/schema.go b/x/exp/schema/schema.go index af8707c4..9a9a6e41 100644 --- a/x/exp/schema/schema.go +++ b/x/exp/schema/schema.go @@ -1,83 +1,73 @@ +// Package schema provides schema parsing, serialization, and resolution. package schema import ( - "bytes" - "encoding/json" - "fmt" - - "github.com/cedar-policy/cedar-go/internal/schema/ast" - "github.com/cedar-policy/cedar-go/internal/schema/parser" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" + "github.com/cedar-policy/cedar-go/x/exp/schema/internal/json" + "github.com/cedar-policy/cedar-go/x/exp/schema/internal/parser" + "github.com/cedar-policy/cedar-go/x/exp/schema/resolved" ) -// Schema is a description of entities and actions that are allowed for a PolicySet. They can be used to validate policies -// and entity definitions and also provide documentation. -// -// Schemas can be represented in either JSON (*JSON functions) or Human-readable formats (*Cedar functions) just like policies. -// Marshalling and unmarshalling between the formats is allowed. +// Schema provides parsing and marshaling for Cedar schemas. type Schema struct { - filename string - jsonSchema ast.JSONSchema - humanSchema *ast.Schema + filename string + schema *ast.Schema } -// UnmarshalCedar parses and stores the human-readable schema from src and returns an error if the schema is invalid. -// -// Any errors returned will have file positions matching filename. -func (old *Schema) UnmarshalCedar(src []byte) (err error) { - var s Schema - s.humanSchema, err = parser.ParseFile(old.filename, src) - if err != nil { +// NewSchemaFromAST creates a Schema from an AST. +func NewSchemaFromAST(in *ast.Schema) *Schema { + return &Schema{schema: in} +} + +// SetFilename sets the filename for error reporting. +func (s *Schema) SetFilename(filename string) { + s.filename = filename +} + +// MarshalJSON encodes the Schema in the JSON format. +func (s *Schema) MarshalJSON() ([]byte, error) { + jsonSchema := (*json.Schema)(s.astOrEmpty()) + return jsonSchema.MarshalJSON() +} + +// UnmarshalJSON parses a Schema in the JSON format. +func (s *Schema) UnmarshalJSON(b []byte) error { + var jsonSchema json.Schema + if err := jsonSchema.UnmarshalJSON(b); err != nil { return err } - if old.filename != "" { - s.filename = old.filename - } - *old = s + s.schema = (*ast.Schema)(&jsonSchema) return nil } -// MarshalCedar serializes the schema into the human readable format. +// MarshalCedar encodes the Schema in the human-readable format. func (s *Schema) MarshalCedar() ([]byte, error) { - if s.jsonSchema != nil { - s.humanSchema = ast.ConvertJSON2Human(s.jsonSchema) - } - if s.humanSchema == nil { - return nil, fmt.Errorf("schema is empty") - } - var buf bytes.Buffer - err := ast.Format(s.humanSchema, &buf) - return buf.Bytes(), err + return parser.MarshalSchema(s.astOrEmpty()), nil } -// UnmarshalJSON deserializes the JSON schema from src or returns an error if the JSON is not valid schema JSON. -func (old *Schema) UnmarshalJSON(src []byte) error { - var s Schema - err := json.Unmarshal(src, &s.jsonSchema) +// UnmarshalCedar parses a Schema in the human-readable format. +func (s *Schema) UnmarshalCedar(b []byte) error { + schema, err := parser.ParseSchema(s.filename, b) if err != nil { return err } - s.filename = old.filename - *old = s + s.schema = schema return nil } -// MarshalJSON serializes the schema into the JSON format. -// -// If the schema was loaded from UnmarshalCedar, it will convert the human-readable format into the JSON format. -// An error is returned if the schema is invalid. -func (s *Schema) MarshalJSON() (out []byte, err error) { - if s.humanSchema != nil { - // Error should not be possible since s.humanSchema comes from our parser. - // If it happens, we return empty JSON. - s.jsonSchema = ast.ConvertHuman2JSON(s.humanSchema) - } - if s.jsonSchema == nil { - return nil, nil - } - return json.Marshal(s.jsonSchema) +// AST returns the underlying AST. +func (s *Schema) AST() *ast.Schema { + return s.astOrEmpty() } -// SetFilename sets the filename for the schema in the returned error messagers from Unmarshal*. -func (s *Schema) SetFilename(filename string) { - s.filename = filename +// Resolve returns a resolved.Schema with type references resolved and declarations indexed. +func (s *Schema) Resolve() (*resolved.Schema, error) { + return resolved.Resolve(s.astOrEmpty()) +} + +func (s *Schema) astOrEmpty() *ast.Schema { + if s.schema == nil { + return &ast.Schema{} + } + return s.schema } diff --git a/x/exp/schema/schema_test.go b/x/exp/schema/schema_test.go index d7359160..7bfb1b5f 100644 --- a/x/exp/schema/schema_test.go +++ b/x/exp/schema/schema_test.go @@ -1,200 +1,768 @@ -package schema +package schema_test import ( "encoding/json" - "reflect" + "strings" "testing" + + "github.com/cedar-policy/cedar-go/internal/testutil" + "github.com/cedar-policy/cedar-go/types" + "github.com/cedar-policy/cedar-go/x/exp/schema" + "github.com/cedar-policy/cedar-go/x/exp/schema/ast" + "github.com/cedar-policy/cedar-go/x/exp/schema/resolved" ) -func TestSchemaCedarMarshalUnmarshal(t *testing.T) { - tests := []struct { - name string - input string - wantErr bool - }{ - { - name: "valid schema", - input: `namespace foo { - action Bar appliesTo { - principal: String, - resource: String - }; - }`, - wantErr: false, - }, - { - name: "empty schema", - input: "", - wantErr: false, - }, - { - name: "invalid schema", - input: `namespace foo { - action Bar = { - invalid syntax here - }; - }`, - wantErr: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - var s Schema - s.SetFilename("test.cedar") - err := s.UnmarshalCedar([]byte(tt.input)) - if (err != nil) != tt.wantErr { - t.Errorf("UnmarshalCedar() error = %v, wantErr %v", err, tt.wantErr) - return - } - - if tt.wantErr { - return - } - - // Test marshaling - out, err := s.MarshalCedar() - if err != nil { - t.Errorf("MarshalCedar() error = %v", err) - return - } - - // For valid schemas, unmarshaling and marshaling should preserve content - if !tt.wantErr { - var s2 Schema - s2.SetFilename("test.cedar") - err = s2.UnmarshalCedar(out) - if err != nil { - t.Errorf("UnmarshalCedar() second pass error = %v", err) - return - } - - out2, err := s2.MarshalCedar() - if err != nil { - t.Errorf("MarshalCedar() second pass error = %v", err) - return - } - - if !reflect.DeepEqual(out, out2) { - t.Errorf("Marshal/Unmarshal cycle produced different results:\nFirst: %s\nSecond: %s", out, out2) - } - } - }) - } -} +var wantCedar = ` +@doc("Address information") +@personal_information +type Address = { + @also("town") + city: String, + country: Country, + street: String, + zipcode?: String +}; + +type decimal = { + decimal: Long, + whole: Long +}; + +entity Admin; + +entity Country; + +entity System in Admin { + version: String +}; + +entity Role enum ["superuser", "operator"]; + +action audit appliesTo { + principal: Admin, + resource: [MyApp::Document, System] +}; + +@doc("Doc manager") +namespace MyApp { + type Metadata = { + created: datetime, + tags: Set + }; + + entity Department { + budget: decimal + }; -func TestSchemaCedarMarshalEmpty(t *testing.T) { - var s Schema - s.SetFilename("test.cedar") - _, err := s.MarshalCedar() - if err == nil { - t.Errorf("MarshalCedar() should return an error for empty schema") - return - } + entity Document { + public: Bool, + title: String + }; + + entity Group in Department { + metadata: Metadata, + name: String + }; + + @doc("User entity") + entity User in Group { + active: Bool, + address: Address, + email: String, + level: Long + }; + + entity Status enum ["draft", "published", "archived"]; + + @doc("View or edit document") + action edit appliesTo { + principal: User, + resource: Document, + context: { + ip: ipaddr, + timestamp: datetime + } + }; + + action manage appliesTo { + principal: User, + resource: [Document, Group] + }; + + @doc("View or edit document") + action view appliesTo { + principal: User, + resource: Document, + context: { + ip: ipaddr, + timestamp: datetime + } + }; } +` -func TestSchemaJSONMarshalEmpty(t *testing.T) { - var s Schema - s.SetFilename("test.json") - out, err := s.MarshalJSON() - if err != nil { - t.Errorf("MarshalJSON() error = %v", err) - return - } - if len(out) != 0 { - t.Errorf("MarshalJSON() produced non-empty output for empty schema") - } +var wantJSON = `{ + "": { + "entityTypes": { + "Admin": {}, + "Country": {}, + "Role": { + "enum": ["superuser", "operator"] + }, + "System": { + "memberOfTypes": ["Admin"], + "shape": { + "type": "Record", + "attributes": { + "version": { + "type": "EntityOrCommon", + "name": "String" + } + } + } + } + }, + "actions": { + "audit": { + "appliesTo": { + "principalTypes": ["Admin"], + "resourceTypes": ["MyApp::Document", "System"] + } + } + }, + "commonTypes": { + "Address": { + "type": "Record", + "attributes": { + "city": { + "type": "EntityOrCommon", + "name": "String", + "annotations": { + "also": "town" + } + }, + "country": { + "type": "EntityOrCommon", + "name": "Country" + }, + "street": { + "type": "EntityOrCommon", + "name": "String" + }, + "zipcode": { + "type": "EntityOrCommon", + "name": "String", + "required": false + } + }, + "annotations": { + "doc": "Address information", + "personal_information": "" + } + }, + "decimal": { + "type": "Record", + "attributes": { + "decimal": { + "type": "EntityOrCommon", + "name": "Long" + }, + "whole": { + "type": "EntityOrCommon", + "name": "Long" + } + } + } + } + }, + "MyApp": { + "annotations": { + "doc": "Doc manager" + }, + "entityTypes": { + "Department": { + "shape": { + "type": "Record", + "attributes": { + "budget": { + "type": "EntityOrCommon", + "name": "decimal" + } + } + } + }, + "Document": { + "shape": { + "type": "Record", + "attributes": { + "public": { + "type": "EntityOrCommon", + "name": "Bool" + }, + "title": { + "type": "EntityOrCommon", + "name": "String" + } + } + } + }, + "Group": { + "memberOfTypes": ["Department"], + "shape": { + "type": "Record", + "attributes": { + "metadata": { + "type": "EntityOrCommon", + "name": "Metadata" + }, + "name": { + "type": "EntityOrCommon", + "name": "String" + } + } + } + }, + "Status": { + "enum": ["draft", "published", "archived"] + }, + "User": { + "memberOfTypes": ["Group"], + "shape": { + "type": "Record", + "attributes": { + "active": { + "type": "EntityOrCommon", + "name": "Bool" + }, + "address": { + "type": "EntityOrCommon", + "name": "Address" + }, + "email": { + "type": "EntityOrCommon", + "name": "String" + }, + "level": { + "type": "EntityOrCommon", + "name": "Long" + } + } + }, + "annotations": { + "doc": "User entity" + } + } + }, + "actions": { + "edit": { + "appliesTo": { + "principalTypes": ["User"], + "resourceTypes": ["Document"], + "context": { + "type": "Record", + "attributes": { + "ip": { + "type": "EntityOrCommon", + "name": "ipaddr" + }, + "timestamp": { + "type": "EntityOrCommon", + "name": "datetime" + } + } + } + }, + "annotations": { + "doc": "View or edit document" + } + }, + "manage": { + "appliesTo": { + "principalTypes": ["User"], + "resourceTypes": ["Document", "Group"] + } + }, + "view": { + "appliesTo": { + "principalTypes": ["User"], + "resourceTypes": ["Document"], + "context": { + "type": "Record", + "attributes": { + "ip": { + "type": "EntityOrCommon", + "name": "ipaddr" + }, + "timestamp": { + "type": "EntityOrCommon", + "name": "datetime" + } + } + } + }, + "annotations": { + "doc": "View or edit document" + } + } + }, + "commonTypes": { + "Metadata": { + "type": "Record", + "attributes": { + "created": { + "type": "EntityOrCommon", + "name": "datetime" + }, + "tags": { + "type": "Set", + "element": { + "type": "EntityOrCommon", + "name": "String" + } + } + } + } + } + } +}` + +// wantAST is the expected AST structure for the test schema. +// The Cedar parser produces ast.TypeRef for all type names (including +// builtins like String, Long, Bool). Resolution happens later. +var wantAST = &ast.Schema{ + CommonTypes: ast.CommonTypes{ + "Address": ast.CommonType{ + Annotations: ast.Annotations{ + "doc": "Address information", + "personal_information": "", + }, + Type: ast.RecordType{ + "city": ast.Attribute{ + Type: ast.TypeRef("String"), + Annotations: ast.Annotations{ + "also": "town", + }, + }, + "country": ast.Attribute{Type: ast.TypeRef("Country")}, + "street": ast.Attribute{Type: ast.TypeRef("String")}, + "zipcode": ast.Attribute{Type: ast.TypeRef("String"), Optional: true}, + }, + }, + "decimal": ast.CommonType{ + Type: ast.RecordType{ + "decimal": ast.Attribute{Type: ast.TypeRef("Long")}, + "whole": ast.Attribute{Type: ast.TypeRef("Long")}, + }, + }, + }, + Entities: ast.Entities{ + "Admin": ast.Entity{}, + "Country": ast.Entity{}, + "System": ast.Entity{ + ParentTypes: []ast.EntityTypeRef{"Admin"}, + Shape: ast.RecordType{ + "version": ast.Attribute{Type: ast.TypeRef("String")}, + }, + }, + }, + Enums: ast.Enums{ + "Role": ast.Enum{ + Values: []types.String{"superuser", "operator"}, + }, + }, + Actions: ast.Actions{ + "audit": ast.Action{ + AppliesTo: &ast.AppliesTo{ + Principals: []ast.EntityTypeRef{"Admin"}, + Resources: []ast.EntityTypeRef{"MyApp::Document", "System"}, + }, + }, + }, + Namespaces: ast.Namespaces{ + "MyApp": ast.Namespace{ + Annotations: ast.Annotations{ + "doc": "Doc manager", + }, + CommonTypes: ast.CommonTypes{ + "Metadata": ast.CommonType{ + Type: ast.RecordType{ + "created": ast.Attribute{Type: ast.TypeRef("datetime")}, + "tags": ast.Attribute{Type: ast.SetType{Element: ast.TypeRef("String")}}, + }, + }, + }, + Entities: ast.Entities{ + "Department": ast.Entity{ + Shape: ast.RecordType{ + "budget": ast.Attribute{Type: ast.TypeRef("decimal")}, + }, + }, + "Document": ast.Entity{ + Shape: ast.RecordType{ + "public": ast.Attribute{Type: ast.TypeRef("Bool")}, + "title": ast.Attribute{Type: ast.TypeRef("String")}, + }, + }, + "Group": ast.Entity{ + ParentTypes: []ast.EntityTypeRef{"Department"}, + Shape: ast.RecordType{ + "metadata": ast.Attribute{Type: ast.TypeRef("Metadata")}, + "name": ast.Attribute{Type: ast.TypeRef("String")}, + }, + }, + "User": ast.Entity{ + ParentTypes: []ast.EntityTypeRef{"Group"}, + Annotations: ast.Annotations{ + "doc": "User entity", + }, + Shape: ast.RecordType{ + "active": ast.Attribute{Type: ast.TypeRef("Bool")}, + "address": ast.Attribute{Type: ast.TypeRef("Address")}, + "email": ast.Attribute{Type: ast.TypeRef("String")}, + "level": ast.Attribute{Type: ast.TypeRef("Long")}, + }, + }, + }, + Enums: ast.Enums{ + "Status": ast.Enum{ + Values: []types.String{"draft", "published", "archived"}, + }, + }, + Actions: ast.Actions{ + "edit": ast.Action{ + Annotations: ast.Annotations{ + "doc": "View or edit document", + }, + AppliesTo: &ast.AppliesTo{ + Principals: []ast.EntityTypeRef{"User"}, + Resources: []ast.EntityTypeRef{"Document"}, + Context: ast.RecordType{ + "ip": ast.Attribute{Type: ast.TypeRef("ipaddr")}, + "timestamp": ast.Attribute{Type: ast.TypeRef("datetime")}, + }, + }, + }, + "manage": ast.Action{ + AppliesTo: &ast.AppliesTo{ + Principals: []ast.EntityTypeRef{"User"}, + Resources: []ast.EntityTypeRef{"Document", "Group"}, + }, + }, + "view": ast.Action{ + Annotations: ast.Annotations{ + "doc": "View or edit document", + }, + AppliesTo: &ast.AppliesTo{ + Principals: []ast.EntityTypeRef{"User"}, + Resources: []ast.EntityTypeRef{"Document"}, + Context: ast.RecordType{ + "ip": ast.Attribute{Type: ast.TypeRef("ipaddr")}, + "timestamp": ast.Attribute{Type: ast.TypeRef("datetime")}, + }, + }, + }, + }, + }, + }, } -func TestSchemaJSONMarshalUnmarshal(t *testing.T) { - tests := []struct { - name string - input string - wantErr bool - }{ - { - name: "valid JSON schema", - input: `{ - "entityTypes": { - "User": { - "shape": { - "type": "Record", - "attributes": { - "name": {"type": "String"} - } - } - } - } - }`, - wantErr: false, - }, - { - name: "empty JSON", - input: "{}", - wantErr: false, - }, - { - name: "invalid JSON", - input: "{invalid json", - wantErr: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - var s Schema - s.SetFilename("test.json") - err := s.UnmarshalJSON([]byte(tt.input)) - if (err != nil) != tt.wantErr { - t.Errorf("UnmarshalJSON() error = %v, wantErr %v", err, tt.wantErr) - return - } - - if tt.wantErr { - return - } - - // Test marshaling - out, err := s.MarshalJSON() - if err != nil { - t.Errorf("MarshalJSON() error = %v", err) - return - } - - // Verify JSON validity - var raw interface{} - if err := json.Unmarshal(out, &raw); err != nil { - t.Errorf("MarshalJSON() produced invalid JSON: %v", err) - } - }) - } +// wantResolved is the expected resolved schema structure. +// All type references have been fully qualified and common types inlined. +var wantResolved = &resolved.Schema{ + Namespaces: map[types.Path]resolved.Namespace{ + "MyApp": { + Name: "MyApp", + Annotations: resolved.Annotations{ + "doc": "Doc manager", + }, + }, + }, + Entities: map[types.EntityType]resolved.Entity{ + "Admin": { + Name: "Admin", + }, + "Country": { + Name: "Country", + }, + "System": { + Name: "System", + ParentTypes: []types.EntityType{"Admin"}, + Shape: resolved.RecordType{ + "version": resolved.Attribute{Type: resolved.StringType{}}, + }, + }, + "MyApp::Department": { + Name: "MyApp::Department", + Shape: resolved.RecordType{ + "budget": resolved.Attribute{ + Type: resolved.RecordType{ + "decimal": resolved.Attribute{Type: resolved.LongType{}}, + "whole": resolved.Attribute{Type: resolved.LongType{}}, + }, + }, + }, + }, + "MyApp::Document": { + Name: "MyApp::Document", + Shape: resolved.RecordType{ + "public": resolved.Attribute{Type: resolved.BoolType{}}, + "title": resolved.Attribute{Type: resolved.StringType{}}, + }, + }, + "MyApp::Group": { + Name: "MyApp::Group", + ParentTypes: []types.EntityType{"MyApp::Department"}, + Shape: resolved.RecordType{ + "metadata": resolved.Attribute{ + Type: resolved.RecordType{ + "created": resolved.Attribute{Type: resolved.ExtensionType("datetime")}, + "tags": resolved.Attribute{Type: resolved.SetType{Element: resolved.StringType{}}}, + }, + }, + "name": resolved.Attribute{Type: resolved.StringType{}}, + }, + }, + "MyApp::User": { + Name: "MyApp::User", + Annotations: resolved.Annotations{"doc": "User entity"}, + ParentTypes: []types.EntityType{"MyApp::Group"}, + Shape: resolved.RecordType{ + "active": resolved.Attribute{Type: resolved.BoolType{}}, + "address": resolved.Attribute{ + Type: resolved.RecordType{ + "city": resolved.Attribute{ + Type: resolved.StringType{}, + Annotations: resolved.Annotations{"also": "town"}, + }, + "country": resolved.Attribute{Type: resolved.EntityType("Country")}, + "street": resolved.Attribute{Type: resolved.StringType{}}, + "zipcode": resolved.Attribute{Type: resolved.StringType{}, Optional: true}, + }, + }, + "email": resolved.Attribute{Type: resolved.StringType{}}, + "level": resolved.Attribute{Type: resolved.LongType{}}, + }, + }, + }, + Enums: map[types.EntityType]resolved.Enum{ + "Role": { + Name: "Role", + Values: []types.EntityUID{types.NewEntityUID("Role", "superuser"), types.NewEntityUID("Role", "operator")}, + }, + "MyApp::Status": { + Name: "MyApp::Status", + Values: []types.EntityUID{types.NewEntityUID("MyApp::Status", "draft"), types.NewEntityUID("MyApp::Status", "published"), types.NewEntityUID("MyApp::Status", "archived")}, + }, + }, + Actions: map[types.EntityUID]resolved.Action{ + types.NewEntityUID("Action", "audit"): { + Name: "audit", + AppliesTo: &resolved.AppliesTo{ + Principals: []types.EntityType{"Admin"}, + Resources: []types.EntityType{"MyApp::Document", "System"}, + Context: resolved.RecordType{}, + }, + }, + types.NewEntityUID("MyApp::Action", "edit"): { + Name: "edit", + Annotations: resolved.Annotations{"doc": "View or edit document"}, + AppliesTo: &resolved.AppliesTo{ + Principals: []types.EntityType{"MyApp::User"}, + Resources: []types.EntityType{"MyApp::Document"}, + Context: resolved.RecordType{ + "ip": resolved.Attribute{Type: resolved.ExtensionType("ipaddr")}, + "timestamp": resolved.Attribute{Type: resolved.ExtensionType("datetime")}, + }, + }, + }, + types.NewEntityUID("MyApp::Action", "manage"): { + Name: "manage", + AppliesTo: &resolved.AppliesTo{ + Principals: []types.EntityType{"MyApp::User"}, + Resources: []types.EntityType{"MyApp::Document", "MyApp::Group"}, + Context: resolved.RecordType{}, + }, + }, + types.NewEntityUID("MyApp::Action", "view"): { + Name: "view", + Annotations: resolved.Annotations{"doc": "View or edit document"}, + AppliesTo: &resolved.AppliesTo{ + Principals: []types.EntityType{"MyApp::User"}, + Resources: []types.EntityType{"MyApp::Document"}, + Context: resolved.RecordType{ + "ip": resolved.Attribute{Type: resolved.ExtensionType("ipaddr")}, + "timestamp": resolved.Attribute{Type: resolved.ExtensionType("datetime")}, + }, + }, + }, + }, } -func TestSchemaCrossFormatMarshaling(t *testing.T) { - t.Run("JSON to Cedar Marshalling", func(t *testing.T) { - var s Schema - err := s.UnmarshalJSON([]byte(`{}`)) - if err != nil { - t.Fatalf("UnmarshalJSON() error = %v", err) - } +func TestSchema(t *testing.T) { + t.Parallel() - _, err = s.MarshalCedar() - if err != nil { - t.Error("MarshalCedar() should not return error after UnmarshalJSON") - } + t.Run("UnmarshalCedar", func(t *testing.T) { + t.Parallel() + var s schema.Schema + err := s.UnmarshalCedar([]byte(wantCedar)) + testutil.OK(t, err) + testutil.Equals(t, s.AST(), wantAST) }) - t.Run("Cedar to JSON marshaling allowed", func(t *testing.T) { - var s Schema - s.SetFilename("test.cedar") - err := s.UnmarshalCedar([]byte(`namespace test {}`)) - if err != nil { - t.Fatalf("UnmarshalCedar() error = %v", err) - } + t.Run("UnmarshalJSON", func(t *testing.T) { + t.Parallel() + var s schema.Schema + err := s.UnmarshalJSON([]byte(wantJSON)) + testutil.OK(t, err) + testutil.Equals(t, s.AST(), wantAST) + }) - _, err = s.MarshalJSON() - if err != nil { - t.Errorf("MarshalJSON() error = %v", err) - } + t.Run("MarshalCedar", func(t *testing.T) { + t.Parallel() + s := schema.NewSchemaFromAST(wantAST) + b, err := s.MarshalCedar() + testutil.OK(t, err) + stringEquals(t, string(b), wantCedar) + }) + + t.Run("MarshalJSON", func(t *testing.T) { + t.Parallel() + s := schema.NewSchemaFromAST(wantAST) + b, err := s.MarshalJSON() + testutil.OK(t, err) + stringEquals(t, string(normalizeJSON(t, b)), string(normalizeJSON(t, []byte(wantJSON)))) + }) + + t.Run("Resolve", func(t *testing.T) { + t.Parallel() + s := schema.NewSchemaFromAST(wantAST) + r, err := s.Resolve() + testutil.OK(t, err) + testutil.Equals(t, r, wantResolved) + }) + + t.Run("CedarRoundTrip", func(t *testing.T) { + t.Parallel() + var s schema.Schema + testutil.OK(t, s.UnmarshalCedar([]byte(wantCedar))) + b, err := s.MarshalCedar() + testutil.OK(t, err) + var s2 schema.Schema + testutil.OK(t, s2.UnmarshalCedar(b)) + testutil.Equals(t, s2.AST(), wantAST) + }) + + t.Run("JSONRoundTrip", func(t *testing.T) { + t.Parallel() + var s schema.Schema + testutil.OK(t, s.UnmarshalJSON([]byte(wantJSON))) + b, err := s.MarshalJSON() + testutil.OK(t, err) + var s2 schema.Schema + testutil.OK(t, s2.UnmarshalJSON(b)) + testutil.Equals(t, s2.AST(), wantAST) + }) + + t.Run("CedarToJSONRoundTrip", func(t *testing.T) { + t.Parallel() + var s schema.Schema + testutil.OK(t, s.UnmarshalCedar([]byte(wantCedar))) + jsonBytes, err := s.MarshalJSON() + testutil.OK(t, err) + var s2 schema.Schema + testutil.OK(t, s2.UnmarshalJSON(jsonBytes)) + testutil.Equals(t, s2.AST(), wantAST) + }) + + t.Run("JSONToCedarRoundTrip", func(t *testing.T) { + t.Parallel() + var s schema.Schema + testutil.OK(t, s.UnmarshalJSON([]byte(wantJSON))) + cedarBytes, err := s.MarshalCedar() + testutil.OK(t, err) + var s2 schema.Schema + testutil.OK(t, s2.UnmarshalCedar(cedarBytes)) + testutil.Equals(t, s2.AST(), wantAST) + }) + + t.Run("JSONMarshalInterface", func(t *testing.T) { + t.Parallel() + s := schema.NewSchemaFromAST(wantAST) + b, err := json.Marshal(s) + testutil.OK(t, err) + var s2 schema.Schema + testutil.OK(t, json.Unmarshal(b, &s2)) + testutil.Equals(t, s2.AST(), wantAST) }) + + t.Run("UnmarshalCedarErr", func(t *testing.T) { + t.Parallel() + var s schema.Schema + const filename = "path/to/my-file-name.cedarschema" + s.SetFilename(filename) + err := s.UnmarshalCedar([]byte("LSKJDFN")) + testutil.Error(t, err) + testutil.FatalIf(t, !strings.Contains(err.Error(), filename+":1:1"), "expected %q in error: %v", filename, err) + }) + + t.Run("UnmarshalJSONErr", func(t *testing.T) { + t.Parallel() + var s schema.Schema + err := s.UnmarshalJSON([]byte("LSKJDFN")) + testutil.Error(t, err) + }) + + t.Run("ResolveErr", func(t *testing.T) { + t.Parallel() + var s schema.Schema + testutil.OK(t, s.UnmarshalCedar([]byte(`entity User in [NonExistent];`))) + _, err := s.Resolve() + testutil.Error(t, err) + }) + + t.Run("ZeroValueSchema", func(t *testing.T) { + t.Parallel() + var s schema.Schema + + b, err := s.MarshalCedar() + testutil.OK(t, err) + testutil.Equals(t, string(b), "") + + jb, err := s.MarshalJSON() + testutil.OK(t, err) + testutil.Equals(t, string(jb), "{}") + + r, err := s.Resolve() + testutil.OK(t, err) + testutil.Equals(t, r != nil, true) + + testutil.Equals(t, s.AST() != nil, true) + }) + + t.Run("EmptySchema", func(t *testing.T) { + t.Parallel() + s := schema.NewSchemaFromAST(&ast.Schema{}) + b, err := s.MarshalCedar() + testutil.OK(t, err) + testutil.Equals(t, string(b), "") + + jb, err := s.MarshalJSON() + testutil.OK(t, err) + testutil.Equals(t, string(jb), "{}") + }) +} + +func stringEquals(t *testing.T, got, want string) { + t.Helper() + testutil.Equals(t, strings.TrimSpace(got), strings.TrimSpace(want)) +} + +func normalizeJSON(t *testing.T, in []byte) []byte { + t.Helper() + var out any + err := json.Unmarshal(in, &out) + testutil.OK(t, err) + b, err := json.MarshalIndent(out, "", " ") + testutil.OK(t, err) + return b }