code
stringlengths
26
870k
docstring
stringlengths
1
65.6k
func_name
stringlengths
1
194
language
stringclasses
1 value
repo
stringlengths
8
68
path
stringlengths
5
194
url
stringlengths
46
254
license
stringclasses
4 values
def reduce_CreateOverloadedQualifiedLink(self, *kids): """%reduce OVERLOADED PtrQuals LINK PathNodeName OptExtendingSimple OptPtrTarget """ _, quals, _, name, opt_bases, opt_target = kids self.val = qlast.CreateConcreteLink( name=name.val, bases=opt_bases.val, declared_overloaded=True, is_required=quals.val.required, cardinality=quals.val.cardinality, target=opt_target.val, )
%reduce OVERLOADED PtrQuals LINK PathNodeName OptExtendingSimple OptPtrTarget
reduce_CreateOverloadedQualifiedLink
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def reduce_CreateComputableLink(self, *kids): """%reduce LINK PathNodeName ASSIGN Expr """ _, name, _, expr = kids self.val = qlast.CreateConcreteLink( name=name.val, target=expr.val, )
%reduce LINK PathNodeName ASSIGN Expr
reduce_CreateComputableLink
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def reduce_CreateQualifiedComputableLink(self, *kids): """%reduce PtrQuals LINK PathNodeName ASSIGN Expr """ quals, _, name, _, expr = kids self.val = qlast.CreateConcreteLink( is_required=quals.val.required, cardinality=quals.val.cardinality, name=name.val, target=expr.val, )
%reduce PtrQuals LINK PathNodeName ASSIGN Expr
reduce_CreateQualifiedComputableLink
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def reduce_CreateAccessPolicy(self, *kids): """%reduce ACCESS POLICY ShortNodeName OptWhenBlock AccessPolicyAction AccessKindList OptUsingBlock CreateAccessPolicySDLCommandsBlock """ _, _, name, when, action, access_kinds, using, commands = kids self.val = qlast.CreateAccessPolicy( name=name.val, condition=when.val, action=action.val, access_kinds=[y for x in access_kinds.val for y in x], expr=using.val, commands=commands.val, )
%reduce ACCESS POLICY ShortNodeName OptWhenBlock AccessPolicyAction AccessKindList OptUsingBlock CreateAccessPolicySDLCommandsBlock
reduce_CreateAccessPolicy
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def reduce_CreateAccessPolicy(self, *kids): """%reduce ACCESS POLICY ShortNodeName OptWhenBlock AccessPolicyAction AccessKindList OptUsingBlock """ _, _, name, when, action, access_kinds, using = kids self.val = qlast.CreateAccessPolicy( name=name.val, condition=when.val, action=action.val, access_kinds=[y for x in access_kinds.val for y in x], expr=using.val, )
%reduce ACCESS POLICY ShortNodeName OptWhenBlock AccessPolicyAction AccessKindList OptUsingBlock
reduce_CreateAccessPolicy
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def reduce_CreateTrigger(self, *kids): """%reduce TRIGGER NodeName TriggerTiming TriggerKindList FOR TriggerScope OptWhenBlock DO ParenExpr CreateTriggerSDLCommandsBlock """ _, name, timing, kinds, _, scope, when, _, expr, commands = kids self.val = qlast.CreateTrigger( name=name.val, timing=timing.val, kinds=kinds.val, scope=scope.val, expr=expr.val, condition=when.val, commands=commands.val, )
%reduce TRIGGER NodeName TriggerTiming TriggerKindList FOR TriggerScope OptWhenBlock DO ParenExpr CreateTriggerSDLCommandsBlock
reduce_CreateTrigger
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def reduce_CreateTrigger(self, *kids): """%reduce TRIGGER NodeName TriggerTiming TriggerKindList FOR TriggerScope OptWhenBlock DO ParenExpr """ _, name, timing, kinds, _, scope, when, _, expr = kids self.val = qlast.CreateTrigger( name=name.val, timing=timing.val, kinds=kinds.val, scope=scope.val, expr=expr.val, condition=when.val, )
%reduce TRIGGER NodeName TriggerTiming TriggerKindList FOR TriggerScope OptWhenBlock DO ParenExpr
reduce_CreateTrigger
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def reduce_CreateGlobalQuals(self, *kids): """%reduce PtrQuals GLOBAL NodeName OptPtrTarget CreateGlobalSDLCommandsBlock """ quals, glob, name, target, commands = kids target, cmds = self._extract_target( target.val, commands.val, glob.span ) self.val = qlast.CreateGlobal( name=name.val, is_required=quals.val.required, cardinality=quals.val.cardinality, target=target, commands=cmds, )
%reduce PtrQuals GLOBAL NodeName OptPtrTarget CreateGlobalSDLCommandsBlock
reduce_CreateGlobalQuals
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def reduce_CreateGlobal(self, *kids): """%reduce GLOBAL NodeName OptPtrTarget CreateGlobalSDLCommandsBlock """ glob, name, target, commands = kids target, cmds = self._extract_target( target.val, commands.val, glob.span ) self.val = qlast.CreateGlobal( name=name.val, target=target, commands=cmds, )
%reduce GLOBAL NodeName OptPtrTarget CreateGlobalSDLCommandsBlock
reduce_CreateGlobal
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def reduce_CreateRegularGlobalShortQuals(self, *kids): """%reduce PtrQuals GLOBAL NodeName PtrTarget """ quals, _, name, target = kids self.val = qlast.CreateGlobal( name=name.val, is_required=quals.val.required, cardinality=quals.val.cardinality, target=target.val, )
%reduce PtrQuals GLOBAL NodeName PtrTarget
reduce_CreateRegularGlobalShortQuals
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def reduce_CreateRegularGlobalShort(self, *kids): """%reduce GLOBAL NodeName PtrTarget """ _, name, target = kids self.val = qlast.CreateGlobal( name=name.val, target=target.val, )
%reduce GLOBAL NodeName PtrTarget
reduce_CreateRegularGlobalShort
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def reduce_CreateComputedGlobalShortQuals(self, *kids): """%reduce PtrQuals GLOBAL NodeName ASSIGN Expr """ quals, _, name, _, expr = kids self.val = qlast.CreateGlobal( name=name.val, is_required=quals.val.required, cardinality=quals.val.cardinality, target=expr.val, )
%reduce PtrQuals GLOBAL NodeName ASSIGN Expr
reduce_CreateComputedGlobalShortQuals
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def reduce_CreateComputedGlobalShort(self, *kids): """%reduce GLOBAL NodeName ASSIGN Expr """ _, name, _, expr = kids self.val = qlast.CreateGlobal( name=name.val, target=expr.val, )
%reduce GLOBAL NodeName ASSIGN Expr
reduce_CreateComputedGlobalShort
python
geldata/gel
edb/edgeql/parser/grammar/sdl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py
Apache-2.0
def tok_name(keyword): '''Convert a literal keyword into a token name.''' if _dunder_re.match(keyword): return f'DUNDER{keyword[2:-2].upper()}' else: return keyword.upper()
Convert a literal keyword into a token name.
tok_name
python
geldata/gel
edb/edgeql/parser/grammar/keywords.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/keywords.py
Apache-2.0
def reduce_DESCRIBE_SCHEMA(self, *kids): """%reduce DESCRIBE SCHEMA DescribeFormat""" self.val = qlast.DescribeStmt( object=qlast.DescribeGlobal.Schema, language=kids[2].val.language, options=kids[2].val.options, )
%reduce DESCRIBE SCHEMA DescribeFormat
reduce_DESCRIBE_SCHEMA
python
geldata/gel
edb/edgeql/parser/grammar/statements.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/statements.py
Apache-2.0
def reduce_DESCRIBE_CURRENT_DATABASE_CONFIG(self, *kids): """%reduce DESCRIBE CURRENT DATABASE CONFIG DescribeFormat""" self.val = qlast.DescribeStmt( object=qlast.DescribeGlobal.DatabaseConfig, language=kids[4].val.language, options=kids[4].val.options, )
%reduce DESCRIBE CURRENT DATABASE CONFIG DescribeFormat
reduce_DESCRIBE_CURRENT_DATABASE_CONFIG
python
geldata/gel
edb/edgeql/parser/grammar/statements.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/statements.py
Apache-2.0
def reduce_DESCRIBE_CURRENT_BRANCH_CONFIG(self, *kids): """%reduce DESCRIBE CURRENT BRANCH CONFIG DescribeFormat""" self.val = qlast.DescribeStmt( object=qlast.DescribeGlobal.DatabaseConfig, language=kids[4].val.language, options=kids[4].val.options, )
%reduce DESCRIBE CURRENT BRANCH CONFIG DescribeFormat
reduce_DESCRIBE_CURRENT_BRANCH_CONFIG
python
geldata/gel
edb/edgeql/parser/grammar/statements.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/statements.py
Apache-2.0
def reduce_DESCRIBE_INSTANCE_CONFIG(self, *kids): """%reduce DESCRIBE INSTANCE CONFIG DescribeFormat""" self.val = qlast.DescribeStmt( object=qlast.DescribeGlobal.InstanceConfig, language=kids[3].val.language, options=kids[3].val.options, )
%reduce DESCRIBE INSTANCE CONFIG DescribeFormat
reduce_DESCRIBE_INSTANCE_CONFIG
python
geldata/gel
edb/edgeql/parser/grammar/statements.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/statements.py
Apache-2.0
def reduce_DESCRIBE_SYSTEM_CONFIG(self, *kids): """%reduce DESCRIBE SYSTEM CONFIG DescribeFormat""" return self.reduce_DESCRIBE_INSTANCE_CONFIG(*kids)
%reduce DESCRIBE SYSTEM CONFIG DescribeFormat
reduce_DESCRIBE_SYSTEM_CONFIG
python
geldata/gel
edb/edgeql/parser/grammar/statements.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/statements.py
Apache-2.0
def reduce_DESCRIBE_ROLES(self, *kids): """%reduce DESCRIBE ROLES DescribeFormat""" self.val = qlast.DescribeStmt( object=qlast.DescribeGlobal.Roles, language=kids[2].val.language, options=kids[2].val.options, )
%reduce DESCRIBE ROLES DescribeFormat
reduce_DESCRIBE_ROLES
python
geldata/gel
edb/edgeql/parser/grammar/statements.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/statements.py
Apache-2.0
def reduce_DESCRIBE_SchemaItem(self, *kids): """%reduce DESCRIBE SchemaItem DescribeFormat""" self.val = qlast.DescribeStmt( object=kids[1].val, language=kids[2].val.language, options=kids[2].val.options, )
%reduce DESCRIBE SchemaItem DescribeFormat
reduce_DESCRIBE_SchemaItem
python
geldata/gel
edb/edgeql/parser/grammar/statements.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/statements.py
Apache-2.0
def reduce_DESCRIBE_OBJECT(self, *kids): """%reduce DESCRIBE OBJECT NodeName DescribeFormat""" self.val = qlast.DescribeStmt( object=kids[2].val, language=kids[3].val.language, options=kids[3].val.options, )
%reduce DESCRIBE OBJECT NodeName DescribeFormat
reduce_DESCRIBE_OBJECT
python
geldata/gel
edb/edgeql/parser/grammar/statements.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/statements.py
Apache-2.0
def reduce_DESCRIBE_CURRENT_MIGRATION(self, *kids): """%reduce DESCRIBE CURRENT MIGRATION DescribeFormat""" lang = kids[3].val.language if ( lang is not qltypes.DescribeLanguage.DDL and lang is not qltypes.DescribeLanguage.JSON ): raise errors.InvalidSyntaxError( f'unexpected DESCRIBE format: {lang!r}', span=kids[3].span, ) if kids[3].val.options: raise errors.InvalidSyntaxError( f'DESCRIBE CURRENT MIGRATION does not support options', span=kids[3].span, ) self.val = qlast.DescribeCurrentMigration( language=lang, )
%reduce DESCRIBE CURRENT MIGRATION DescribeFormat
reduce_DESCRIBE_CURRENT_MIGRATION
python
geldata/gel
edb/edgeql/parser/grammar/statements.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/statements.py
Apache-2.0
def reduce_CREATE_DATABASE_regular(self, *kids): """%reduce CREATE DATABASE DatabaseName OptCreateDatabaseCommandsBlock """ self.val = qlast.CreateDatabase( name=kids[2].val, commands=kids[3].val, branch_type=qlast.BranchType.EMPTY, flavor='DATABASE', )
%reduce CREATE DATABASE DatabaseName OptCreateDatabaseCommandsBlock
reduce_CREATE_DATABASE_regular
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CREATE_DATABASE_from_template(self, *kids): """%reduce CREATE DATABASE DatabaseName FROM AnyNodeName OptCreateDatabaseCommandsBlock """ _, _, _name, _, _template, _commands = kids self.val = qlast.CreateDatabase( name=kids[2].val, commands=kids[5].val, branch_type=qlast.BranchType.DATA, template=kids[4].val, flavor='DATABASE', )
%reduce CREATE DATABASE DatabaseName FROM AnyNodeName OptCreateDatabaseCommandsBlock
reduce_CREATE_DATABASE_from_template
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_create_schema_branch(self, *kids): """%reduce CREATE SCHEMA BRANCH DatabaseName FROM DatabaseName """ self.val = qlast.CreateDatabase( name=kids[3].val, template=kids[5].val, branch_type=qlast.BranchType.SCHEMA, )
%reduce CREATE SCHEMA BRANCH DatabaseName FROM DatabaseName
reduce_create_schema_branch
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_create_data_branch(self, *kids): """%reduce CREATE DATA BRANCH DatabaseName FROM DatabaseName """ self.val = qlast.CreateDatabase( name=kids[3].val, template=kids[5].val, branch_type=qlast.BranchType.DATA, )
%reduce CREATE DATA BRANCH DatabaseName FROM DatabaseName
reduce_create_data_branch
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_create_template_branch(self, *kids): """%reduce CREATE TEMPLATE BRANCH DatabaseName FROM DatabaseName """ self.val = qlast.CreateDatabase( name=kids[3].val, template=kids[5].val, branch_type=qlast.BranchType.TEMPLATE, )
%reduce CREATE TEMPLATE BRANCH DatabaseName FROM DatabaseName
reduce_create_template_branch
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_alter_branch(self, *kids): """%reduce ALTER BRANCH DatabaseName BranchOptions AlterBranchCommandsBlock """ _, _, name, options, commands = kids self.val = qlast.AlterDatabase( name=name.val, commands=commands.val, force=options.val.force, )
%reduce ALTER BRANCH DatabaseName BranchOptions AlterBranchCommandsBlock
reduce_alter_branch
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateRewrite(self, *kids): """%reduce CREATE REWRITE RewriteKindList USING ParenExpr OptCreateRewriteCommandsBlock """ _, _, kinds, _, expr, commands = kids self.val = qlast.CreateRewrite( kinds=kinds.val, expr=expr.val, commands=commands.val, )
%reduce CREATE REWRITE RewriteKindList USING ParenExpr OptCreateRewriteCommandsBlock
reduce_CreateRewrite
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateRegularProperty(self, *kids): """%reduce CREATE OptPtrQuals PROPERTY UnqualifiedPointerName OptExtendingSimple ARROW FullTypeExpr OptCreateConcretePropertyCommandsBlock """ vbases, vcommands = commondl.extract_bases(kids[4].val, kids[7].val) self.val = qlast.CreateConcreteProperty( name=kids[3].val, bases=vbases, is_required=kids[1].val.required, cardinality=kids[1].val.cardinality, target=kids[6].val, commands=vcommands, )
%reduce CREATE OptPtrQuals PROPERTY UnqualifiedPointerName OptExtendingSimple ARROW FullTypeExpr OptCreateConcretePropertyCommandsBlock
reduce_CreateRegularProperty
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateRegularPropertyNew(self, *kids): """%reduce CREATE OptPtrQuals PROPERTY UnqualifiedPointerName OptExtendingSimple COLON FullTypeExpr OptCreateConcretePropertyCommandsBlock """ vbases, vcommands = commondl.extract_bases(kids[4].val, kids[7].val) self.val = qlast.CreateConcreteProperty( name=kids[3].val, bases=vbases, is_required=kids[1].val.required, cardinality=kids[1].val.cardinality, target=kids[6].val, commands=vcommands, )
%reduce CREATE OptPtrQuals PROPERTY UnqualifiedPointerName OptExtendingSimple COLON FullTypeExpr OptCreateConcretePropertyCommandsBlock
reduce_CreateRegularPropertyNew
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateComputableProperty(self, *kids): """%reduce CREATE OptPtrQuals PROPERTY UnqualifiedPointerName ASSIGN Expr """ self.val = qlast.CreateConcreteProperty( name=kids[3].val, is_required=kids[1].val.required, cardinality=kids[1].val.cardinality, target=kids[5].val, )
%reduce CREATE OptPtrQuals PROPERTY UnqualifiedPointerName ASSIGN Expr
reduce_CreateComputableProperty
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateComputablePropertyWithUsing(self, *kids): """%reduce CREATE OptPtrQuals PROPERTY UnqualifiedPointerName OptCreateConcretePropertyCommandsBlock """ cmds = kids[4].val target = None for cmd in cmds: if isinstance(cmd, qlast.SetField) and cmd.name == 'expr': if target is not None: raise EdgeQLSyntaxError( f'computed property with more than one expression', span=kids[3].span) target = cmd.value elif isinstance(cmd, qlast.AlterAddInherit): raise EdgeQLSyntaxError( f'computed property cannot specify EXTENDING', span=kids[3].span) if target is None: raise EdgeQLSyntaxError( f'computed property without expression', span=kids[3].span) self.val = qlast.CreateConcreteProperty( name=kids[3].val, is_required=kids[1].val.required, cardinality=kids[1].val.cardinality, target=target, commands=cmds, )
%reduce CREATE OptPtrQuals PROPERTY UnqualifiedPointerName OptCreateConcretePropertyCommandsBlock
reduce_CreateComputablePropertyWithUsing
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateRegularLink(self, *kids): """%reduce CREATE OptPtrQuals LINK UnqualifiedPointerName OptExtendingSimple ARROW FullTypeExpr OptCreateConcreteLinkCommandsBlock """ vbases, vcommands = commondl.extract_bases(kids[4].val, kids[7].val) self.val = qlast.CreateConcreteLink( name=kids[3].val, bases=vbases, is_required=kids[1].val.required, cardinality=kids[1].val.cardinality, target=kids[6].val, commands=vcommands, )
%reduce CREATE OptPtrQuals LINK UnqualifiedPointerName OptExtendingSimple ARROW FullTypeExpr OptCreateConcreteLinkCommandsBlock
reduce_CreateRegularLink
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateRegularLinkNew(self, *kids): """%reduce CREATE OptPtrQuals LINK UnqualifiedPointerName OptExtendingSimple COLON FullTypeExpr OptCreateConcreteLinkCommandsBlock """ vbases, vcommands = commondl.extract_bases(kids[4].val, kids[7].val) self.val = qlast.CreateConcreteLink( name=kids[3].val, bases=vbases, is_required=kids[1].val.required, cardinality=kids[1].val.cardinality, target=kids[6].val, commands=vcommands )
%reduce CREATE OptPtrQuals LINK UnqualifiedPointerName OptExtendingSimple COLON FullTypeExpr OptCreateConcreteLinkCommandsBlock
reduce_CreateRegularLinkNew
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateComputableLink(self, *kids): """%reduce CREATE OptPtrQuals LINK UnqualifiedPointerName ASSIGN Expr """ self.val = qlast.CreateConcreteLink( name=kids[3].val, is_required=kids[1].val.required, cardinality=kids[1].val.cardinality, target=kids[5].val, )
%reduce CREATE OptPtrQuals LINK UnqualifiedPointerName ASSIGN Expr
reduce_CreateComputableLink
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateComputableLinkWithUsing(self, *kids): """%reduce CREATE OptPtrQuals LINK UnqualifiedPointerName OptCreateConcreteLinkCommandsBlock """ cmds = kids[4].val target = None for cmd in cmds: if isinstance(cmd, qlast.SetField) and cmd.name == 'expr': if target is not None: raise EdgeQLSyntaxError( f'computed link with more than one expression', span=kids[3].span) target = cmd.value elif isinstance(cmd, qlast.AlterAddInherit): raise EdgeQLSyntaxError( f'computed link cannot specify EXTENDING', span=kids[3].span) if target is None: raise EdgeQLSyntaxError( f'computed link without expression', span=kids[3].span) self.val = qlast.CreateConcreteLink( name=kids[3].val, is_required=kids[1].val.required, cardinality=kids[1].val.cardinality, target=target, commands=cmds, )
%reduce CREATE OptPtrQuals LINK UnqualifiedPointerName OptCreateConcreteLinkCommandsBlock
reduce_CreateComputableLinkWithUsing
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateAccessPolicy(self, *kids): """%reduce CREATE ACCESS POLICY UnqualifiedPointerName OptWhenBlock AccessPolicyAction AccessKindList OptUsingBlock OptCreateAccessPolicyCommandsBlock """ self.val = qlast.CreateAccessPolicy( name=kids[3].val, condition=kids[4].val, action=kids[5].val, access_kinds=[y for x in kids[6].val for y in x], expr=kids[7].val, commands=kids[8].val, )
%reduce CREATE ACCESS POLICY UnqualifiedPointerName OptWhenBlock AccessPolicyAction AccessKindList OptUsingBlock OptCreateAccessPolicyCommandsBlock
reduce_CreateAccessPolicy
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateTrigger(self, *kids): """%reduce CREATE TRIGGER UnqualifiedPointerName TriggerTiming TriggerKindList FOR TriggerScope OptWhenBlock DO ParenExpr OptCreateTriggerCommandsBlock """ _, _, name, timing, kinds, _, scope, when, _, expr, commands = kids self.val = qlast.CreateTrigger( name=name.val, timing=timing.val, kinds=kinds.val, scope=scope.val, expr=expr.val, condition=when.val, commands=commands.val, )
%reduce CREATE TRIGGER UnqualifiedPointerName TriggerTiming TriggerKindList FOR TriggerScope OptWhenBlock DO ParenExpr OptCreateTriggerCommandsBlock
reduce_CreateTrigger
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_AlterFunctionStmt(self, *kids): """%reduce ALTER FUNCTION NodeName CreateFunctionArgs AlterFunctionCommandsBlock """ self.val = qlast.AlterFunction( name=kids[2].val, params=kids[3].val, **self._process_function_body(kids[4], optional_using=True) )
%reduce ALTER FUNCTION NodeName CreateFunctionArgs AlterFunctionCommandsBlock
reduce_AlterFunctionStmt
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_AlterOperatorStmt(self, *kids): """%reduce ALTER OperatorKind OPERATOR NodeName CreateFunctionArgs AlterOperatorCommandsBlock """ self.val = qlast.AlterOperator( kind=kids[1].val, name=kids[3].val, params=kids[4].val, commands=kids[5].val )
%reduce ALTER OperatorKind OPERATOR NodeName CreateFunctionArgs AlterOperatorCommandsBlock
reduce_AlterOperatorStmt
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_DropOperator(self, *kids): """%reduce DROP OperatorKind OPERATOR NodeName CreateFunctionArgs """ self.val = qlast.DropOperator( kind=kids[1].val, name=kids[3].val, params=kids[4].val, )
%reduce DROP OperatorKind OPERATOR NodeName CreateFunctionArgs
reduce_DropOperator
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_AlterCastStmt(self, *kids): """%reduce ALTER CAST FROM TypeName TO TypeName AlterCastCommandsBlock """ self.val = qlast.AlterCast( from_type=kids[3].val, to_type=kids[5].val, commands=kids[6].val, )
%reduce ALTER CAST FROM TypeName TO TypeName AlterCastCommandsBlock
reduce_AlterCastStmt
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_DropCastStmt(self, *kids): """%reduce DROP CAST FROM TypeName TO TypeName """ self.val = qlast.DropCast( from_type=kids[3].val, to_type=kids[5].val, )
%reduce DROP CAST FROM TypeName TO TypeName
reduce_DropCastStmt
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateRegularGlobal(self, *kids): """%reduce CREATE OptPtrQuals GLOBAL NodeName ARROW FullTypeExpr OptCreateGlobalCommandsBlock """ self.val = qlast.CreateGlobal( name=kids[3].val, is_required=kids[1].val.required, cardinality=kids[1].val.cardinality, target=kids[5].val, commands=kids[6].val, )
%reduce CREATE OptPtrQuals GLOBAL NodeName ARROW FullTypeExpr OptCreateGlobalCommandsBlock
reduce_CreateRegularGlobal
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateRegularGlobalNew(self, *kids): """%reduce CREATE OptPtrQuals GLOBAL NodeName COLON FullTypeExpr OptCreateGlobalCommandsBlock """ self.val = qlast.CreateGlobal( name=kids[3].val, is_required=kids[1].val.required, cardinality=kids[1].val.cardinality, target=kids[5].val, commands=kids[6].val, )
%reduce CREATE OptPtrQuals GLOBAL NodeName COLON FullTypeExpr OptCreateGlobalCommandsBlock
reduce_CreateRegularGlobalNew
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateComputableGlobal(self, *kids): """%reduce CREATE OptPtrQuals GLOBAL NodeName ASSIGN Expr """ self.val = qlast.CreateGlobal( name=kids[3].val, is_required=kids[1].val.required, cardinality=kids[1].val.cardinality, target=kids[5].val, )
%reduce CREATE OptPtrQuals GLOBAL NodeName ASSIGN Expr
reduce_CreateComputableGlobal
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def reduce_CreateComputableGlobalWithUsing(self, *kids): """%reduce CREATE OptPtrQuals GLOBAL NodeName OptCreateConcretePropertyCommandsBlock """ cmds = kids[4].val target = None for cmd in cmds: if isinstance(cmd, qlast.SetField) and cmd.name == 'expr': if target is not None: raise EdgeQLSyntaxError( f'computed global with more than one expression', span=kids[3].span) target = cmd.value if target is None: raise EdgeQLSyntaxError( f'computed global without expression', span=kids[3].span) self.val = qlast.CreateGlobal( name=kids[3].val, is_required=kids[1].val.required, cardinality=kids[1].val.cardinality, target=target, commands=cmds, )
%reduce CREATE OptPtrQuals GLOBAL NodeName OptCreateConcretePropertyCommandsBlock
reduce_CreateComputableGlobalWithUsing
python
geldata/gel
edb/edgeql/parser/grammar/ddl.py
https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/ddl.py
Apache-2.0
def translate_type( typeref: irast.TypeRef, *, schema: s_schema.Schema, ) -> tuple[irast.ParamTransType, tuple[irast.TypeRef, ...]]: """Translate the type of a tuple-containing param to multiple params. This computes a list of parameter types, as well as a ParamTransType that clones the type information but augments each node in the type with indexes that correspond to which parameter data is drawn from. This is used to drive the encoder and the decoder generator. """ typs: list[irast.TypeRef] = [] def trans( typ: irast.TypeRef, in_array: bool, depth: int ) -> irast.ParamTransType: if depth > MAX_NESTING: raise errors.QueryError( f'type of parameter is too deeply nested') start = len(typs) if irtypeutils.is_array(typ): # If our array is appearing already inside another array, # we need to add an extra parameter if in_array: int_typeref = schema.get( sn.QualName('std', 'int32'), type=s_types.Type) nschema, array_styp = s_types.Array.from_subtypes( schema, [int_typeref]) typs.append(irtypeutils.type_to_typeref( nschema, array_styp, cache=None)) return irast.ParamArray( typeref=typ, idx=start, typ=trans(typ.subtypes[0], in_array=True, depth=depth + 1), ) elif irtypeutils.is_tuple(typ): return irast.ParamTuple( typeref=typ, idx=start, typs=tuple( ( t.element_name, trans(t, in_array=in_array, depth=depth + 1), ) for t in typ.subtypes ), ) else: nt = typ # If this appears in an array, the param needs to be an array if in_array: nschema, styp = irtypeutils.ir_typeref_to_type(schema, typ) nschema, styp = s_types.Array.from_subtypes(nschema, [styp]) nt = irtypeutils.type_to_typeref(nschema, styp, cache=None) typs.append(nt) return irast.ParamScalar(typeref=typ, idx=start) t = trans(typeref, in_array=False, depth=0) return t, tuple(typs)
Translate the type of a tuple-containing param to multiple params. This computes a list of parameter types, as well as a ParamTransType that clones the type information but augments each node in the type with indexes that correspond to which parameter data is drawn from. This is used to drive the encoder and the decoder generator.
translate_type
python
geldata/gel
edb/edgeql/compiler/tuple_args.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/tuple_args.py
Apache-2.0
def make_decoder( ptyp: irast.ParamTransType, qparams: tuple[irast.Param, ...], *, ctx: context.ContextLevel, ) -> qlast.Expr: """Generate a decoder for tuple parameters. More details in the module docstring. """ params: list[qlast.Expr] = [ qlast.TypeCast( expr=qlast.Parameter(name=param.name), type=_ref_to_ast(param.ir_type, ctx=ctx), cardinality_mod=( qlast.CardinalityModifier.Optional if not param.required else None ), ) for param in qparams ] def mk(typ: irast.ParamTransType, idx: Optional[qlast.Expr]) -> qlast.Expr: if isinstance(typ, irast.ParamScalar): expr = params[typ.idx] if idx is not None: expr = _index(expr, idx) return expr elif isinstance(typ, irast.ParamTuple): return _make_tuple([(f, mk(t, idx=idx)) for f, t in typ.typs]) elif isinstance(typ, irast.ParamArray): inner_idx_alias, inner_idx = _get_alias('idx', ctx=ctx) lo: qlast.Expr hi: qlast.Expr if idx is None: lo = qlast.Constant.integer(0) hi = qlast.FunctionCall( func=('__std__', 'len'), args=[params[typ.idx]]) # If the leftmost element inside a toplevel array is # itself an array, subtract 1 from the length (since # array params have an extra element). We also need to # call `max` to prevent generating an invalid range. if _lmost_is_array(typ.typ): hi = qlast.FunctionCall( func=('__std__', 'max'), args=[ qlast.Set(elements=[lo, _plus_const(hi, -1)])]) else: lo = _index(params[typ.idx], idx) hi = _index(params[typ.idx], _plus_const(idx, 1)) # If the contents is just a scalar, then we can take # values directly from the scalar array parameter, without # needing to iterate over the array directly. # This is an optimization, and not necessary for correctness. if isinstance(typ.typ, irast.ParamScalar): sub = params[typ.typ.idx] # If we are in an array, do a slice if idx is not None: sub = qlast.Indirection( arg=sub, indirection=[qlast.Slice(start=lo, stop=hi)], ) return sub sub_expr = mk(typ.typ, idx=inner_idx) # For some reason, this is much faster if force the range to # be over int64 instead of int32. lo = qlast.TypeCast( expr=lo, type=qlast.TypeName( maintype=qlast.ObjectRef(module='__std__', name='int64') ), ) loop = qlast.ForQuery( iterator_alias=inner_idx_alias, # TODO: Using _gen_series would be marginally faster, # but it isn't actually available in distributions # iterator=qlast.FunctionCall( # func=('__std__', '_gen_series'), # args=[lo, _plus_const(hi, -1)], # ), iterator=qlast.FunctionCall( func=('__std__', 'range_unpack'), args=[ qlast.FunctionCall( func=('__std__', 'range'), args=[lo, hi], ) ] ), result=sub_expr, ) res: qlast.Expr = qlast.FunctionCall( func=('__std__', 'array_agg'), args=[loop], ) # If the param is optional, and we are still at the # top-level, insert a filter so that our aggregate doesn't # create something from nothing. if not qparams[typ.idx].required and idx is None: res = qlast.SelectQuery( result=res, where=qlast.UnaryOp(op='EXISTS', operand=params[typ.idx]), ) return res else: raise AssertionError(f'bogus type {typ}') decoder = mk(ptyp, idx=None) return decoder
Generate a decoder for tuple parameters. More details in the module docstring.
make_decoder
python
geldata/gel
edb/edgeql/compiler/tuple_args.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/tuple_args.py
Apache-2.0
def create_sub_params( name: str, required: bool, typeref: irast.TypeRef, pt: s_types.Type, *, ctx: context.ContextLevel, ) -> Optional[irast.SubParams]: """Create sub parameters for a new param, if needed. We need to do this if there is a tuple in the type. """ if not ( ( pt.is_tuple(ctx.env.schema) or pt.is_anytuple(ctx.env.schema) or pt.contains_array_of_tuples(ctx.env.schema) ) and not ctx.env.options.func_params and not ctx.env.options.json_parameters ): return None pdt, arg_typs = translate_type(typeref, schema=ctx.env.schema) params = tuple([ irast.Param( name=f'__edb_decoded_{name}_{i}__', required=required, ir_type=arg_typeref, schema_type=typegen.type_from_typeref(arg_typeref, env=ctx.env), ) for i, arg_typeref in enumerate(arg_typs) ]) decode_ql = make_decoder(pdt, params, ctx=ctx) return irast.SubParams( trans_type=pdt, decoder_edgeql=decode_ql, params=params)
Create sub parameters for a new param, if needed. We need to do this if there is a tuple in the type.
create_sub_params
python
geldata/gel
edb/edgeql/compiler/tuple_args.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/tuple_args.py
Apache-2.0
def finish_sub_params( subps: irast.SubParams, *, ctx: context.ContextLevel, ) -> Optional[irast.SubParams]: """Finalize the subparams by compiling the IR in the proper context. We can't just compile it when doing create_sub_params, since that is called from preprocessing and so is shared between queries. """ with ctx.newscope(fenced=True) as subctx: decode_ir = dispatch.compile(subps.decoder_edgeql, ctx=subctx) return dataclasses.replace(subps, decoder_ir=decode_ir)
Finalize the subparams by compiling the IR in the proper context. We can't just compile it when doing create_sub_params, since that is called from preprocessing and so is shared between queries.
finish_sub_params
python
geldata/gel
edb/edgeql/compiler/tuple_args.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/tuple_args.py
Apache-2.0
def find_callable_typemods( candidates: Sequence[s_func.CallableLike], *, num_args: int, kwargs_names: AbstractSet[str], ctx: context.ContextLevel, ) -> Dict[Union[int, str], ft.TypeModifier]: """Find the type modifiers for a callable. We do this early, before we've compiled/checked the arguments, so that we can compile the arguments with the proper fences. """ typ = s_pseudo.PseudoType.get(ctx.env.schema, 'anytype') dummy = irast.DUMMY_SET args = [(typ, dummy)] * num_args kwargs = {k: (typ, dummy) for k in kwargs_names} options = find_callable( candidates, basic_matching_only=True, args=args, kwargs=kwargs, ctx=ctx ) # No options means an error is going to happen later, but for now, # just return some placeholders so that we can make it to the # error later. if not options: return {k: _SINGLETON for k in set(range(num_args)) | kwargs_names} fts: Dict[Union[int, str], ft.TypeModifier] = {} for choice in options: for barg in choice.args: if not barg.param or barg.arg_id is None: continue ft = barg.param.get_typemod(ctx.env.schema) if barg.arg_id in fts and fts[barg.arg_id] != ft: if ft == _SET_OF or fts[barg.arg_id] == _SET_OF: raise errors.QueryError( f'argument could be SET OF or not in call to ' f'{candidates[0].get_verbosename(ctx.env.schema)}: ' f'seems like a stdlib bug!') else: # If there is a mix between OPTIONAL and SINGLETON # arguments in possible call sites, we just call it # optional. Generated code quality will be a little # worse but still correct. fts[barg.arg_id] = _OPTIONAL else: fts[barg.arg_id] = ft return fts
Find the type modifiers for a callable. We do this early, before we've compiled/checked the arguments, so that we can compile the arguments with the proper fences.
find_callable_typemods
python
geldata/gel
edb/edgeql/compiler/polyres.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/polyres.py
Apache-2.0
def extend_path_id( path_id: irast.PathId, *, ptrcls: s_pointers.PointerLike, direction: s_pointers.PointerDirection = ( s_pointers.PointerDirection.Outbound), ns: AbstractSet[str] = frozenset(), ctx: context.ContextLevel, ) -> irast.PathId: """A wrapper over :meth:`ir.pathid.PathId.extend` that also ensures the cardinality of *ptrcls* is known at the end of compilation. """ ptrref = irtyputils.ptrref_from_ptrcls( schema=ctx.env.schema, ptrcls=ptrcls, cache=ctx.env.ptr_ref_cache, typeref_cache=ctx.env.type_ref_cache, ) return path_id.extend(ptrref=ptrref, direction=direction, ns=ns)
A wrapper over :meth:`ir.pathid.PathId.extend` that also ensures the cardinality of *ptrcls* is known at the end of compilation.
extend_path_id
python
geldata/gel
edb/edgeql/compiler/pathctx.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/pathctx.py
Apache-2.0
def _rewrite_weak_namespaces( irs: Sequence[irast.Base], ctx: context.ContextLevel ) -> None: """Rewrite weak namespaces in path ids to be usable by the backend. Weak namespaces in path ids in the frontend are "relative", and their interpretation depends on the current scope tree node and the namespaces on the parent nodes. The IR->pgsql compiler does not do this sort of interpretation, and needs path IDs that are "absolute". To accomplish this, we go through all the path ids and rewrite them: using the scope tree, we try to find the binding of the path ID (using a prefix if necessary) and drop all namespace parts that don't appear in the binding. """ tree = ctx.path_scope for node in tree.strict_descendants: if node.path_id: node.path_id = _try_namespace_fix(node, node.path_id) scopes = irutils.find_path_scopes(irs) for ir_set in ctx.env.set_types: path_scope_id: Optional[int] = scopes.get(ir_set) if path_scope_id is not None: # Some entries in set_types are from compiling views # in temporary scopes, so we need to just skip those. if scope := ctx.env.scope_tree_nodes.get(path_scope_id): ir_set.path_id = _try_namespace_fix(scope, ir_set.path_id)
Rewrite weak namespaces in path ids to be usable by the backend. Weak namespaces in path ids in the frontend are "relative", and their interpretation depends on the current scope tree node and the namespaces on the parent nodes. The IR->pgsql compiler does not do this sort of interpretation, and needs path IDs that are "absolute". To accomplish this, we go through all the path ids and rewrite them: using the scope tree, we try to find the binding of the path ID (using a prefix if necessary) and drop all namespace parts that don't appear in the binding.
_rewrite_weak_namespaces
python
geldata/gel
edb/edgeql/compiler/stmtctx.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/stmtctx.py
Apache-2.0
def _fixup_schema_view(*, ctx: context.ContextLevel) -> None: """Finalize schema view types for inclusion in the real schema. This includes setting from_alias flags and collapsing opaque unions to BaseObject. """ for view in ctx.view_nodes.values(): if view.is_collection(): continue assert isinstance(view, s_types.InheritingType) _elide_derived_ancestors(view, ctx=ctx) if not isinstance(view, s_sources.Source): continue view_own_pointers = view.get_pointers(ctx.env.schema) for vptr in view_own_pointers.objects(ctx.env.schema): _elide_derived_ancestors(vptr, ctx=ctx) ctx.env.schema = vptr.set_field_value( ctx.env.schema, 'from_alias', True, ) tgt = vptr.get_target(ctx.env.schema) assert tgt is not None if (tgt.is_union_type(ctx.env.schema) and tgt.get_is_opaque_union(ctx.env.schema)): # Opaque unions should manifest as std::BaseObject # in schema views. ctx.env.schema = vptr.set_target( ctx.env.schema, ctx.env.schema.get( 'std::BaseObject', type=s_types.Type), ) if not isinstance(vptr, s_sources.Source): continue vptr_own_pointers = vptr.get_pointers(ctx.env.schema) for vlprop in vptr_own_pointers.objects(ctx.env.schema): _elide_derived_ancestors(vlprop, ctx=ctx) ctx.env.schema = vlprop.set_field_value( ctx.env.schema, 'from_alias', True, )
Finalize schema view types for inclusion in the real schema. This includes setting from_alias flags and collapsing opaque unions to BaseObject.
_fixup_schema_view
python
geldata/gel
edb/edgeql/compiler/stmtctx.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/stmtctx.py
Apache-2.0
def _get_nearest_non_source_derived_parent( obj: s_obj.DerivableInheritingObjectT, ctx: context.ContextLevel ) -> s_obj.DerivableInheritingObjectT: """Find the nearest ancestor of obj whose "root source" is not derived""" schema = ctx.env.schema while ( (src := s_pointers.get_root_source(obj, schema)) and isinstance(src, s_obj.DerivableInheritingObject) and src.get_is_derived(schema) ): obj = obj.get_bases(schema).first(schema) return obj
Find the nearest ancestor of obj whose "root source" is not derived
_get_nearest_non_source_derived_parent
python
geldata/gel
edb/edgeql/compiler/stmtctx.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/stmtctx.py
Apache-2.0
def _elide_derived_ancestors( obj: Union[s_types.InheritingType, s_pointers.Pointer], *, ctx: context.ContextLevel, ) -> None: """Collapse references to derived objects in bases. When compiling a schema view expression, make sure we don't expose any ephemeral derived objects, as these wouldn't be present in the schema outside of the compilation context. """ pbase = obj.get_bases(ctx.env.schema).first(ctx.env.schema) new_pbase = _get_nearest_non_source_derived_parent(pbase, ctx) if pbase != new_pbase: ctx.env.schema = obj.set_field_value( ctx.env.schema, 'bases', s_obj.ObjectList.create(ctx.env.schema, [new_pbase]), ) ctx.env.schema = obj.set_field_value( ctx.env.schema, 'ancestors', s_obj.compute_ancestors(ctx.env.schema, obj) )
Collapse references to derived objects in bases. When compiling a schema view expression, make sure we don't expose any ephemeral derived objects, as these wouldn't be present in the schema outside of the compilation context.
_elide_derived_ancestors
python
geldata/gel
edb/edgeql/compiler/stmtctx.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/stmtctx.py
Apache-2.0
def preprocess_script( stmts: List[qlast.Base], *, ctx: context.ContextLevel ) -> irast.ScriptInfo: """Extract parameters from all statements in a script. Doing this in advance makes it easy to check that they have consistent types. """ params_lists = [ astutils.find_parameters(stmt, ctx.modaliases) for stmt in stmts ] if loose_params := [ loose for params in params_lists for loose in params.loose_params ]: throw_on_loose_param(loose_params[0], ctx) if shaped_params := [ shaped for params in params_lists for shaped in params.shaped_params ]: throw_on_shaped_param(shaped_params[0][0], shaped_params[0][1], ctx) casts = [ cast for params in params_lists for cast in params.cast_params ] params = {} for cast, modaliases in casts: assert isinstance(cast.expr, qlast.Parameter) name = cast.expr.name if name in params: continue with ctx.new() as mctx: mctx.modaliases = modaliases target_stype = typegen.ql_typeexpr_to_type(cast.type, ctx=mctx) # for ObjectType parameters, we inject intermediate cast to uuid, # so parameter is uuid and then cast to ObjectType if target_stype.is_object_type(): uuid_cast = qlast.TypeCast( type=qlast.TypeName(maintype=qlast.ObjectRef(name='uuid')), expr=cast.expr, cardinality_mod=cast.cardinality_mod, ) cast.expr = uuid_cast cast = cast.expr with ctx.new() as mctx: mctx.modaliases = modaliases target_stype = typegen.ql_typeexpr_to_type(cast.type, ctx=mctx) target_typeref = typegen.type_to_typeref(target_stype, env=ctx.env) required = cast.cardinality_mod != qlast.CardinalityModifier.Optional sub_params = tuple_args.create_sub_params( name, required, typeref=target_typeref, pt=target_stype, ctx=ctx) params[name] = irast.Param( name=name, required=required, schema_type=target_stype, ir_type=target_typeref, sub_params=sub_params, ) if params: check_params(params) def _arg_key(k: tuple[str, object]) -> int: name = k[0] arg_prefix = '__edb_arg_' # Positional arguments should just be sorted numerically, # while for named arguments, injected args should be sorted and # need to come after normal ones. Normal named arguments can have # any order. if name.isdecimal(): return int(name) elif name.startswith(arg_prefix): return int(k[0][len(arg_prefix):]) else: return -1 params = dict(sorted(params.items(), key=_arg_key)) return irast.ScriptInfo(params=params, schema=ctx.env.schema)
Extract parameters from all statements in a script. Doing this in advance makes it easy to check that they have consistent types.
preprocess_script
python
geldata/gel
edb/edgeql/compiler/stmtctx.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/stmtctx.py
Apache-2.0
def needs_eta_expansion_expr( ir: irast.Expr, stype: s_types.Type, *, ctx: context.ContextLevel, ) -> bool: """Determine if an expr is in need of η-expansion In general, any expression of an object-containing tuple or array type needs expansion unless it is: * A tuple literal * An empty array literal * A one-element array literal * A call to array_agg in which none of the arguments are sets that need expansion. """ if isinstance(ir, irast.SelectStmt): return needs_eta_expansion( ir.result, has_clauses=bool(ir.where or ir.orderby), ctx=ctx) if isinstance(stype, s_types.Array): if isinstance(ir, irast.Array): return bool(ir.elements) and ( len(ir.elements) != 1 or needs_eta_expansion(ir.elements[0], ctx=ctx) ) elif ( isinstance(ir, irast.FunctionCall) and ir.func_shortname == sn.QualName('std', 'array_agg') ): return needs_eta_expansion(ir.args[0].expr, ctx=ctx) else: return True elif isinstance(stype, s_types.Tuple): if isinstance(ir, irast.Tuple): return any( needs_eta_expansion(el.val, ctx=ctx) for el in ir.elements ) else: return True else: return False
Determine if an expr is in need of η-expansion In general, any expression of an object-containing tuple or array type needs expansion unless it is: * A tuple literal * An empty array literal * A one-element array literal * A call to array_agg in which none of the arguments are sets that need expansion.
needs_eta_expansion_expr
python
geldata/gel
edb/edgeql/compiler/eta_expand.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/eta_expand.py
Apache-2.0
def needs_eta_expansion( ir: irast.Set, *, has_clauses: bool = False, ctx: context.ContextLevel, ) -> bool: """Determine if a set is in need of η-expansion""" stype = setgen.get_set_type(ir, ctx=ctx) if not ( isinstance(stype, (s_types.Array, s_types.Tuple)) and stype.contains_object(ctx.env.schema) ): return False if ALWAYS_EXPAND: return True # Object containing arrays always need to be eta expanded if they # might be processed by a clause. This is because the pgsql side # will produce *either* a value or serialized for array_agg/array # literals. if has_clauses and ( (subarray := stype.find_array(ctx.env.schema)) and subarray.contains_object(ctx.env.schema) ): return True # If we are directly projecting an element out of a tuple, we can just # look through to the relevant tuple element. This is probably not # an important optimization to support, but our expansion can generate # this idiom, so on principle I wanted to support it. if ( isinstance(ir.expr, irast.TupleIndirectionPointer) and isinstance(ir.expr.source.expr, irast.Tuple) ): name = ir.expr.ptrref.shortname.name els = [x for x in ir.expr.source.expr.elements if x.name == name] if len(els) == 1: return needs_eta_expansion(els[0].val, ctx=ctx) if not ir.expr or ( ir.is_binding and ir.is_binding != irast.BindingKind.Select ): return True return needs_eta_expansion_expr(ir.expr, stype, ctx=ctx)
Determine if a set is in need of η-expansion
needs_eta_expansion
python
geldata/gel
edb/edgeql/compiler/eta_expand.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/eta_expand.py
Apache-2.0
def eta_expand_ir( ir: irast.Set, *, toplevel: bool=False, ctx: context.ContextLevel, ) -> irast.Set: """η-expansion of an IR set. Our core implementation of η-expansion operates on an AST, so this mostly just checks that we really want to expand and then sets up an anchor for the AST based implementation to run on. """ if ( ctx.env.options.schema_object_context or ctx.env.options.func_params or ctx.env.options.schema_view_mode ): return ir if not needs_eta_expansion(ir, ctx=ctx): return ir with ctx.new() as subctx: subctx.allow_factoring() subctx.anchors = subctx.anchors.copy() source_ref = subctx.create_anchor(ir) alias, path = _get_alias('eta', ctx=subctx) qry = qlast.SelectQuery( result=eta_expand_ordered( path, setgen.get_set_type(ir, ctx=subctx), ctx=subctx ), aliases=[ qlast.AliasedExpr(alias=alias, expr=source_ref) ], ) if toplevel: subctx.toplevel_stmt = None return dispatch.compile(qry, ctx=subctx)
η-expansion of an IR set. Our core implementation of η-expansion operates on an AST, so this mostly just checks that we really want to expand and then sets up an anchor for the AST based implementation to run on.
eta_expand_ir
python
geldata/gel
edb/edgeql/compiler/eta_expand.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/eta_expand.py
Apache-2.0
def eta_expand_ordered( expr: qlast.Expr, stype: s_types.Type, *, ctx: context.ContextLevel, ) -> qlast.Expr: """Do an order-preserving η-expansion Unlike in the lambda calculus, edgeql is a set-based language with a notion of ordering, which we need to preserve. We do this by using enumerate and ORDER BY on it: EXPAND_ORDERED(t, e) = WITH enum := enumerate(e) SELECT EXPAND(t, enum.1) ORDER BY enum.0 """ enumerated = qlast.FunctionCall( func=('__std__', 'enumerate'), args=[expr] ) enumerated_alias, enumerated_path = _get_alias('enum', ctx=ctx) element_path = astutils.extend_path(enumerated_path, '1') result_expr = eta_expand(element_path, stype, ctx=ctx) return qlast.SelectQuery( result=result_expr, orderby=[ qlast.SortExpr(path=astutils.extend_path(enumerated_path, '0')) ], aliases=[ qlast.AliasedExpr(alias=enumerated_alias, expr=enumerated) ], )
Do an order-preserving η-expansion Unlike in the lambda calculus, edgeql is a set-based language with a notion of ordering, which we need to preserve. We do this by using enumerate and ORDER BY on it: EXPAND_ORDERED(t, e) = WITH enum := enumerate(e) SELECT EXPAND(t, enum.1) ORDER BY enum.0
eta_expand_ordered
python
geldata/gel
edb/edgeql/compiler/eta_expand.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/eta_expand.py
Apache-2.0
def eta_expand( path: qlast.Path, stype: s_types.Type, *, ctx: context.ContextLevel, ) -> qlast.Expr: """η-expansion of an AST path""" if not ALWAYS_EXPAND and not stype.contains_object(ctx.env.schema): # This isn't strictly right from a "fully η expanding" perspective, # but for our uses, we only need to make sure that objects are # exposed to the output, so we can skip anything not containing one. return path if isinstance(stype, s_types.Array): return eta_expand_array(path, stype, ctx=ctx) elif isinstance(stype, s_types.Tuple): return eta_expand_tuple(path, stype, ctx=ctx) else: return path
η-expansion of an AST path
eta_expand
python
geldata/gel
edb/edgeql/compiler/eta_expand.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/eta_expand.py
Apache-2.0
def eta_expand_tuple( path: qlast.Path, stype: s_types.Tuple, *, ctx: context.ContextLevel, ) -> qlast.Expr: """η-expansion of tuples η-expansion of tuple types is straightforward and traditional: EXPAND(tuple<t, s>, p) = (EXPAND(t, p.0), EXPAND(s, p.1)) is the case for pairs. n-ary and named cases are generalized in the obvious way. The one exception is that the expansion of the empty tuple type is `p` and not `()`, to ensure that the path appears in the output. """ if not stype.get_subtypes(ctx.env.schema): return path els = [ qlast.TupleElement( name=qlast.Ptr(name=name), val=eta_expand(astutils.extend_path(path, name), subtype, ctx=ctx), ) for name, subtype in stype.iter_subtypes(ctx.env.schema) ] if stype.is_named(ctx.env.schema): return qlast.NamedTuple(elements=els) else: return qlast.Tuple(elements=[el.val for el in els])
η-expansion of tuples η-expansion of tuple types is straightforward and traditional: EXPAND(tuple<t, s>, p) = (EXPAND(t, p.0), EXPAND(s, p.1)) is the case for pairs. n-ary and named cases are generalized in the obvious way. The one exception is that the expansion of the empty tuple type is `p` and not `()`, to ensure that the path appears in the output.
eta_expand_tuple
python
geldata/gel
edb/edgeql/compiler/eta_expand.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/eta_expand.py
Apache-2.0
def eta_expand_array( path: qlast.Path, stype: s_types.Array, *, ctx: context.ContextLevel, ) -> qlast.Expr: """η-expansion of arrays η-expansion of array types is is a little peculiar to edgeql and less grounded in typed lambda calculi: EXPAND(array<t>, p) = (p, array_agg(EXPAND_ORDERED(t, array_unpack(p)))).1 We use a similar approach for compiling casts. The tuple projection trick serves to make sure that we iterate over `p` *outside* of the array_agg (or else all the arrays would get aggregated together) as well as ensuring that `p` appears in the expansion in a non-fenced position (or else sorting it from outside wouldn't work). (If it wasn't for the latter requirement, we could just use a FOR. I find it a little unsatisfying that our η-expansion needs to use this trick, and the pgsql compiler needed to be hacked to make it work.) """ unpacked = qlast.FunctionCall( func=('__std__', 'array_unpack'), args=[path] ) expanded = eta_expand_ordered( unpacked, stype.get_element_type(ctx.env.schema), ctx=ctx) agg_expr = qlast.FunctionCall( func=('__std__', 'array_agg'), args=[expanded] ) return astutils.extend_path( qlast.Tuple(elements=[path, agg_expr]), '1' )
η-expansion of arrays η-expansion of array types is is a little peculiar to edgeql and less grounded in typed lambda calculi: EXPAND(array<t>, p) = (p, array_agg(EXPAND_ORDERED(t, array_unpack(p)))).1 We use a similar approach for compiling casts. The tuple projection trick serves to make sure that we iterate over `p` *outside* of the array_agg (or else all the arrays would get aggregated together) as well as ensuring that `p` appears in the expansion in a non-fenced position (or else sorting it from outside wouldn't work). (If it wasn't for the latter requirement, we could just use a FOR. I find it a little unsatisfying that our η-expansion needs to use this trick, and the pgsql compiler needed to be hacked to make it work.)
eta_expand_array
python
geldata/gel
edb/edgeql/compiler/eta_expand.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/eta_expand.py
Apache-2.0
def _describe_config( schema: s_schema.Schema, scope: qltypes.ConfigScope, testmode: bool, ) -> str: """Generate an EdgeQL query to render config as DDL.""" if scope is qltypes.ConfigScope.INSTANCE: source = 'system override' config_object_name = 'cfg::InstanceConfig' elif scope is qltypes.ConfigScope.DATABASE: source = 'database' config_object_name = 'cfg::DatabaseConfig' else: raise AssertionError(f'unexpected configuration source: {scope!r}') cfg = schema.get(config_object_name, type=s_objtypes.ObjectType) items = [] items.extend(_describe_config_inner( schema, scope, config_object_name, cfg, testmode )) ext = schema.get('cfg::ExtensionConfig', type=s_objtypes.ObjectType) for ext_cfg in sorted( ext.descendants(schema), key=lambda x: x.get_name(schema) ): items.extend(_describe_config_inner( schema, scope, config_object_name, ext_cfg, testmode )) testmode_check = ( "<bool>json_get(cfg::get_config_json(),'__internal_testmode','value')" " ?? false" ) query = ( "assert_exists(assert_single((" + f"FOR conf IN {{cfg::get_config_json(sources := [{ql(source)}])}} " + "UNION (\n" + (f"FOR testmode IN {{{testmode_check}}} UNION (\n" if testmode else "") + "SELECT array_join([" + ', '.join(items) + "], '')" + (")" if testmode else "") + ")" + ")))" ) return query
Generate an EdgeQL query to render config as DDL.
_describe_config
python
geldata/gel
edb/edgeql/compiler/config_desc.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/config_desc.py
Apache-2.0
def _describe_config_inner( schema: s_schema.Schema, scope: qltypes.ConfigScope, config_object_name: str, cfg: s_objtypes.ObjectType, testmode: bool, ) -> list[str]: """Generate an EdgeQL query to render config as DDL.""" actual_name = str(cfg.get_name(schema)) cast = ( f'.extensions[is {actual_name}]' if actual_name != config_object_name else '' ) items = [] for ptr_name, p in sorted( cfg.get_pointers(schema).items(schema), key=lambda x: x[0], ): pn = str(ptr_name) if ( pn == 'id' or p.get_computable(schema) or p.get_protected(schema) ): continue is_internal = ( p.get_annotation( schema, s_name.QualName('cfg', 'internal') ) == 'true' ) if is_internal and not testmode: continue ptype = p.get_target(schema) assert ptype is not None # Skip backlinks to the base object. The will get plenty of # special treatment. if str(ptype.get_name(schema)) == 'cfg::AbstractConfig': continue ptr_card = p.get_cardinality(schema) mult = ptr_card.is_multi() psource = f'{config_object_name}{cast}.{qlquote.quote_ident(pn)}' if isinstance(ptype, s_objtypes.ObjectType): item = textwrap.indent( _render_config_object( schema=schema, valtype=ptype, value_expr=psource, scope=scope, join_term='', level=1, ), ' ' * 4, ) else: fn = ( pn if actual_name == config_object_name else f'{actual_name}::{pn}' ) renderer = ( _render_config_redacted if p.get_secret(schema) else _render_config_set if mult else _render_config_scalar ) item = textwrap.indent( renderer( schema=schema, valtype=ptype, value_expr=psource, name=fn, scope=scope, level=1, ), ' ' * 4, ) fpn = f'{actual_name}::{pn}' if cast else pn condition = f'EXISTS json_get(conf, {ql(fpn)})' if is_internal: condition = f'({condition}) AND testmode' # For INSTANCE, filter out configs that are set to the default. # This is because we currently implement the defaults by # setting them with CONFIGURE INSTANCE, so we can't detect # defaults by seeing what is unset. if ( scope == qltypes.ConfigScope.INSTANCE and (default := p.get_default(schema)) ): condition = f'({condition}) AND {psource} ?!= ({default.text})' items.append(f"(\n{item}\n IF {condition} ELSE ''\n )") return items
Generate an EdgeQL query to render config as DDL.
_describe_config_inner
python
geldata/gel
edb/edgeql/compiler/config_desc.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/config_desc.py
Apache-2.0
def _get_concrete_scalar_base( stype: s_types.Type, ctx: context.ContextLevel ) -> Optional[s_types.Type]: """Returns None if stype is not scalar or if it is already topmost""" if stype.is_enum(ctx.env.schema): return ctx.env.get_schema_type_and_track(sn.QualName('std', 'str')) if not isinstance(stype, s_scalars.ScalarType): return None if topmost := stype.maybe_get_topmost_concrete_base(ctx.env.schema): if topmost != stype: return topmost return None
Returns None if stype is not scalar or if it is already topmost
_get_concrete_scalar_base
python
geldata/gel
edb/edgeql/compiler/casts.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/casts.py
Apache-2.0
def _cast_enum_str_immutable( ir_expr: Union[irast.Set, irast.Expr], orig_stype: s_types.Type, new_stype: s_types.Type, *, ctx: context.ContextLevel, ) -> irast.Set: """ Compiles cast between an enum and std::str under the assumption that this expression must be immutable. """ if new_stype.is_enum(ctx.env.schema): enum_stype = new_stype suffix = "_from_str" else: enum_stype = orig_stype suffix = "_into_str" name: s_name.Name = enum_stype.get_name(ctx.env.schema) name = cast(s_name.QualName, name) cast_name = s_name.QualName( module=name.module, name=str(enum_stype.id) + suffix ) orig_typeref = typegen.type_to_typeref(orig_stype, env=ctx.env) new_typeref = typegen.type_to_typeref(new_stype, env=ctx.env) cast_ir = irast.TypeCast( expr=setgen.ensure_set(ir_expr, ctx=ctx), from_type=orig_typeref, to_type=new_typeref, cardinality_mod=None, cast_name=cast_name, sql_function=None, sql_cast=False, sql_expr=True, error_message_context=cast_message_context(ctx), ) return setgen.ensure_set(cast_ir, ctx=ctx)
Compiles cast between an enum and std::str under the assumption that this expression must be immutable.
_cast_enum_str_immutable
python
geldata/gel
edb/edgeql/compiler/casts.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/casts.py
Apache-2.0
def find_parameters( ql: qlast.Base, modaliases: Dict[Optional[str], str] ) -> Params: """Get all query parameters""" v = FindParams(modaliases) v.visit(ql) return v.params
Get all query parameters
find_parameters
python
geldata/gel
edb/edgeql/compiler/astutils.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/astutils.py
Apache-2.0
def contains_dml( ql_expr: qlast.Base, *, ctx: context.ContextLevel ) -> bool: """Check whether a expression contains any DML in a subtree.""" # If this ends up being a perf problem, we can use a visitor # directly and cache. dml_types = (qlast.InsertQuery, qlast.UpdateQuery, qlast.DeleteQuery) if isinstance(ql_expr, dml_types): return True res = ast.find_children( ql_expr, qlast.Base, lambda x: ( isinstance(x, dml_types) or (isinstance(x, qlast.IRAnchor) and x.has_dml) or ( isinstance(x, qlast.FunctionCall) and any( ( func.get_volatility(ctx.env.schema) == qltypes.Volatility.Modifying ) for func in _get_functions_from_call(x, ctx=ctx) ) ) ), terminate_early=True, ) return bool(res)
Check whether a expression contains any DML in a subtree.
contains_dml
python
geldata/gel
edb/edgeql/compiler/astutils.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/astutils.py
Apache-2.0
def renormalize_compat( norm_qltree: qlast.Base_T, orig_text: str, *, schema: s_schema.Schema, localnames: AbstractSet[str] = frozenset(), ) -> qlast.Base_T: """Renormalize an expression normalized with imprint_expr_context(). This helper takes the original, unmangled expression, an EdgeQL AST tree of the same expression mangled with `imprint_expr_context()` (which injects extra WITH MODULE clauses), and produces a normalized expression with explicitly qualified identifiers instead. Old dumps are the main user of this facility. """ orig_qltree = qlparser.parse_fragment(orig_text) norm_aliases: Dict[Optional[str], str] = {} assert isinstance(norm_qltree, ( qlast.Query, qlast.Command, qlast.DDLCommand )) for alias in (norm_qltree.aliases or ()): if isinstance(alias, qlast.ModuleAliasDecl): norm_aliases[alias.alias] = alias.module if isinstance(orig_qltree, ( qlast.Query, qlast.Command, qlast.DDLCommand )): orig_aliases: Dict[Optional[str], str] = {} for alias in (orig_qltree.aliases or ()): if isinstance(alias, qlast.ModuleAliasDecl): orig_aliases[alias.alias] = alias.module modaliases = { k: v for k, v in norm_aliases.items() if k not in orig_aliases } else: modaliases = norm_aliases normalize( orig_qltree, schema=schema, modaliases=modaliases, localnames=localnames, ) assert isinstance(orig_qltree, type(norm_qltree)) return cast(qlast.Base_T, orig_qltree)
Renormalize an expression normalized with imprint_expr_context(). This helper takes the original, unmangled expression, an EdgeQL AST tree of the same expression mangled with `imprint_expr_context()` (which injects extra WITH MODULE clauses), and produces a normalized expression with explicitly qualified identifiers instead. Old dumps are the main user of this facility.
renormalize_compat
python
geldata/gel
edb/edgeql/compiler/normalization.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/normalization.py
Apache-2.0
def _move_fenced_anchor(ir: irast.Set, *, ctx: context.ContextLevel) -> None: """Move the scope tree of a fenced anchor to its use site. For technical reasons, in _compile_dml_coalesce and _compile_dml_ifelse, we compile the expressions normally and then extract the subcomponents and use them as anchors inside a desugared expression. Because of this, the resultant scope tree does not have the right shape, since the scope trees of the fenced subexpressions aren't nested inside the trees of the FOR loops. This results in subtle problems in backend compilation, since the loop iterators are not visible to the loop bodies. Fix the trees by finding the scope tree associated with a set used as an anchor, finding where that anchor was used, and moving the scope tree there. """ match ir.expr: case irast.SelectStmt(result=irast.SetE(path_scope_id=int(id))): node = next(iter( x for x in ctx.path_scope.root.descendants if x.unique_id == id )) target = ctx.path_scope.find_descendant(ir.path_id) assert target and target.parent node.remove() target.parent.attach_child(node)
Move the scope tree of a fenced anchor to its use site. For technical reasons, in _compile_dml_coalesce and _compile_dml_ifelse, we compile the expressions normally and then extract the subcomponents and use them as anchors inside a desugared expression. Because of this, the resultant scope tree does not have the right shape, since the scope trees of the fenced subexpressions aren't nested inside the trees of the FOR loops. This results in subtle problems in backend compilation, since the loop iterators are not visible to the loop bodies. Fix the trees by finding the scope tree associated with a set used as an anchor, finding where that anchor was used, and moving the scope tree there.
_move_fenced_anchor
python
geldata/gel
edb/edgeql/compiler/expr.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/expr.py
Apache-2.0
def _compile_dml_coalesce( expr: qlast.BinOp, *, ctx: context.ContextLevel ) -> irast.Set: """Transform a coalesce that contains DML into FOR loops The basic approach is to extract the pieces from the ?? and rewrite them into: for optional x in (LHS) union ( { x, (for _ in (select () filter not exists x) union (RHS)), } ) Optional for is needed because the LHS needs to be bound in a for in order to get put in a CTE and only executed once, but the RHS needs to be dependent on the LHS being empty. """ with ctx.newscope(fenced=False) as subctx: # We have to compile it under a factoring fence to prevent # correlation with outside things. We can't just rely on the # factoring fences inserted when compiling the FORs, since we # are going to need to explicitly exempt the iterator # expression from that. subctx.path_scope.factoring_fence = True subctx.path_scope.factoring_allowlist.update(ctx.iterator_path_ids) ir = func.compile_operator( expr, op_name=expr.op, qlargs=[expr.left, expr.right], ctx=subctx) # Extract the IR parts from the ?? # Note that lhs_ir will be unfenced while rhs_ir # will have been compiled under fences. match ir.expr: case irast.OperatorCall(args={ 0: irast.CallArg(expr=lhs_ir), 1: irast.CallArg(expr=rhs_ir), }): pass case _: raise AssertionError('malformed DML ??') subctx.anchors = subctx.anchors.copy() alias = ctx.aliases.get('_coalesce_x') cond_path = qlast.Path( steps=[qlast.ObjectRef(name=alias)], ) rhs_b = qlast.ForQuery( iterator_alias=ctx.aliases.get('_coalesce_dummy'), iterator=qlast.SelectQuery( result=qlast.Tuple(elements=[]), where=qlast.UnaryOp( op='NOT', operand=qlast.UnaryOp(op='EXISTS', operand=cond_path), ), ), result=subctx.create_anchor(rhs_ir, check_dml=True), ) full = qlast.ForQuery( iterator_alias=alias, iterator=subctx.create_anchor(lhs_ir, 'b'), result=qlast.Set(elements=[cond_path, rhs_b]), optional=True, from_desugaring=True, ) subctx.iterator_path_ids |= {lhs_ir.path_id} res = dispatch.compile(full, ctx=subctx) # Indicate that the original ?? code should determine the # cardinality/multiplicity. assert isinstance(res.expr, irast.SelectStmt) res.expr.card_inference_override = ir _move_fenced_anchor(rhs_ir, ctx=subctx) return res
Transform a coalesce that contains DML into FOR loops The basic approach is to extract the pieces from the ?? and rewrite them into: for optional x in (LHS) union ( { x, (for _ in (select () filter not exists x) union (RHS)), } ) Optional for is needed because the LHS needs to be bound in a for in order to get put in a CTE and only executed once, but the RHS needs to be dependent on the LHS being empty.
_compile_dml_coalesce
python
geldata/gel
edb/edgeql/compiler/expr.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/expr.py
Apache-2.0
def _compile_dml_ifelse( expr: qlast.IfElse, *, ctx: context.ContextLevel ) -> irast.Set: """Transform an IF/ELSE that contains DML into FOR loops The basic approach is to extract the pieces from the if/then/else and rewrite them into: for b in COND union ( { (for _ in (select () filter b) union (IF_BRANCH)), (for _ in (select () filter not b) union (ELSE_BRANCH)), } ) """ with ctx.newscope(fenced=False) as subctx: # We have to compile it under a factoring fence to prevent # correlation with outside things. We can't just rely on the # factoring fences inserted when compiling the FORs, since we # are going to need to explicitly exempt the iterator # expression from that. subctx.path_scope.factoring_fence = True subctx.path_scope.factoring_allowlist.update(ctx.iterator_path_ids) ir = func.compile_operator( expr, op_name='std::IF', qlargs=[expr.if_expr, expr.condition, expr.else_expr], ctx=subctx) # Extract the IR parts from the IF/THEN/ELSE # Note that cond_ir will be unfenced while if_ir and else_ir # will have been compiled under fences. match ir.expr: case irast.OperatorCall(args={ 0: irast.CallArg(expr=if_ir), 1: irast.CallArg(expr=cond_ir), 2: irast.CallArg(expr=else_ir), }): pass case _: raise AssertionError('malformed DML IF/ELSE') subctx.anchors = subctx.anchors.copy() alias = ctx.aliases.get('_ifelse_b') cond_path = qlast.Path( steps=[qlast.ObjectRef(name=alias)], ) els: list[qlast.Expr] = [] if not isinstance(irutils.unwrap_set(if_ir).expr, irast.EmptySet): if_b = qlast.ForQuery( iterator_alias=ctx.aliases.get('_ifelse_true_dummy'), iterator=qlast.SelectQuery( result=qlast.Tuple(elements=[]), where=cond_path, ), result=subctx.create_anchor(if_ir, check_dml=True), ) els.append(if_b) if not isinstance(irutils.unwrap_set(else_ir).expr, irast.EmptySet): else_b = qlast.ForQuery( iterator_alias=ctx.aliases.get('_ifelse_false_dummy'), iterator=qlast.SelectQuery( result=qlast.Tuple(elements=[]), where=qlast.UnaryOp(op='NOT', operand=cond_path), ), result=subctx.create_anchor(else_ir, check_dml=True), ) els.append(else_b) # If we are warning on factoring, double wrap it. if ctx.warn_factoring: cond_ir = setgen.ensure_set( setgen.ensure_stmt(cond_ir, ctx=ctx), ctx=ctx) full = qlast.ForQuery( iterator_alias=alias, iterator=subctx.create_anchor(cond_ir, 'b'), result=qlast.Set(elements=els) if len(els) != 1 else els[0], ) subctx.iterator_path_ids |= {cond_ir.path_id} res = dispatch.compile(full, ctx=subctx) # Indicate that the original IF/ELSE code should determine the # cardinality/multiplicity. assert isinstance(res.expr, irast.SelectStmt) res.expr.card_inference_override = ir _move_fenced_anchor(if_ir, ctx=subctx) _move_fenced_anchor(else_ir, ctx=subctx) return res
Transform an IF/ELSE that contains DML into FOR loops The basic approach is to extract the pieces from the if/then/else and rewrite them into: for b in COND union ( { (for _ in (select () filter b) union (IF_BRANCH)), (for _ in (select () filter not b) union (ELSE_BRANCH)), } )
_compile_dml_ifelse
python
geldata/gel
edb/edgeql/compiler/expr.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/expr.py
Apache-2.0
def _compile_conflict_select_for_obj_type( stmt: irast.MutatingStmt, subject_typ: s_objtypes.ObjectType, *, for_inheritance: bool, fake_dml_set: Optional[irast.Set], obj_constrs: Sequence[s_constr.Constraint], constrs: Dict[str, Tuple[s_pointers.Pointer, List[s_constr.Constraint]]], span: Optional[irast.Span], ctx: context.ContextLevel, ) -> tuple[Optional[qlast.Expr], bool]: """Synthesize a select of conflicting objects ... for a single object type. This gets called once for each ancestor type that provides constraints to the type being inserted. `cnstrs` contains the constraints to consider. """ # Find which pointers we need to grab needed_ptrs, ptr_anchors = _get_needed_ptrs( subject_typ, obj_constrs, constrs.keys(), ctx=ctx ) # Check that no pointers in constraints are rewritten for p in needed_ptrs: ptr = subject_typ.getptr(ctx.env.schema, s_name.UnqualName(p)) rewrite_kind = ( qltypes.RewriteKind.Insert if isinstance(stmt, irast.InsertStmt) else qltypes.RewriteKind.Update if isinstance(stmt, irast.UpdateStmt) else None ) if rewrite_kind: rewrite = ptr.get_rewrite(ctx.env.schema, rewrite_kind) if rewrite: raise errors.UnsupportedFeatureError( "INSERT UNLESS CONFLICT cannot be used on properties or " "links that have a rewrite rule specified", span=span, ) ctx.anchors = ctx.anchors.copy() # If we are given a fake_dml_set to directly represent the result # of our DML, use that instead of populating the result. if fake_dml_set: for p in needed_ptrs | {'id'}: ptr = subject_typ.getptr(ctx.env.schema, s_name.UnqualName(p)) val = setgen.extend_path(fake_dml_set, ptr, ctx=ctx) ptr_anchors[p] = ctx.create_anchor(val, p) # Find the IR corresponding to the fields we care about and # produce anchors for them ptrs_in_shape = set() for elem, _ in stmt.subject.shape: rptr = elem.expr name = rptr.ptrref.shortname.name ptrs_in_shape.add(name) if name in needed_ptrs and name not in ptr_anchors: assert rptr.expr # We don't properly support hoisting volatile properties out of # UNLESS CONFLICT, so disallow it. We *do* support handling DML # there, since that gets hoisted into CTEs via its own mechanism. # See issue #1699. if inference.infer_volatility( rptr.expr, ctx.env, exclude_dml=True ).is_volatile(): if for_inheritance: error = ( 'INSERT does not support volatile properties with ' 'exclusive constraints when another statement in ' 'the same query modifies a related type' ) else: error = ( 'INSERT UNLESS CONFLICT ON does not support volatile ' 'properties' ) raise errors.UnsupportedFeatureError( error, span=span ) # We want to use the same path_scope_id as the original elem_set = setgen.ensure_set(rptr.expr, ctx=ctx) elem_set.path_scope_id = elem.path_scope_id # FIXME: The wrong thing will definitely happen if there are # volatile entries here ptr_anchors[name] = ctx.create_anchor(elem_set, name) if for_inheritance and not ptrs_in_shape: return None, False # Fill in empty sets for pointers that are needed but not present present_ptrs = set(ptr_anchors) for p in (needed_ptrs - present_ptrs): ptr = subject_typ.getptr(ctx.env.schema, s_name.UnqualName(p)) typ = ptr.get_target(ctx.env.schema) assert typ ptr_anchors[p] = qlast.TypeCast( expr=qlast.Set(elements=[]), type=typegen.type_to_ql_typeref(typ, ctx=ctx)) if not ptr_anchors: raise errors.QueryError( 'INSERT UNLESS CONFLICT property requires matching shape', span=span, ) conds: List[qlast.Expr] = [] for ptrname, (ptr, ptr_cnstrs) in constrs.items(): if ptrname not in present_ptrs: continue anchor = qlutils.subject_paths_substitute( ptr_anchors[ptrname], ptr_anchors) ptr_val = qlast.Path(partial=True, steps=[ qlast.Ptr(name=ptrname) ]) ptr, ptr_cnstrs = constrs[ptrname] ptr_card = ptr.get_cardinality(ctx.env.schema) for cnstr in ptr_cnstrs: lhs: qlast.Expr = anchor rhs: qlast.Expr = ptr_val # If there is a subjectexpr, substitute our lhs and rhs in # for __subject__ in the subjectexpr and compare *that* if (subjectexpr := cnstr.get_subjectexpr(ctx.env.schema)): assert isinstance(subjectexpr, s_expr.Expression) assert isinstance(subjectexpr.parse(), qlast.Expr) lhs = qlutils.subject_substitute(subjectexpr.parse(), lhs) rhs = qlutils.subject_substitute(subjectexpr.parse(), rhs) conds.append(qlast.BinOp( op='=' if ptr_card.is_single() else 'IN', left=lhs, right=rhs, )) # If the type we are looking at is BaseObject, then this must a # conflict check we are synthesizing for an explicit .id. We need # to ignore access policies in that case, since there is no # trigger to back us up. # (We can't insert directly into the abstract BaseObject, so this # is a safe assumption.) ignore_rewrites = ( str(subject_typ.get_name(ctx.env.schema)) == 'std::BaseObject') if ignore_rewrites: assert not obj_constrs assert len(constrs) == 1 and len(constrs['id'][1]) == 1 insert_subject = ctx.create_anchor(setgen.class_set( subject_typ, ignore_rewrites=ignore_rewrites, ctx=ctx )) for constr in obj_constrs: subject_expr: Optional[s_expr.Expression] = ( constr.get_subjectexpr(ctx.env.schema) ) assert subject_expr and isinstance(subject_expr.parse(), qlast.Expr) lhs = qlutils.subject_paths_substitute( subject_expr.parse(), ptr_anchors ) rhs = qlutils.subject_substitute( subject_expr.parse(), insert_subject ) op = qlast.BinOp(op='=', left=lhs, right=rhs) # If there is an except expr, we need to add in those checks also if except_expr := constr.get_except_expr(ctx.env.schema): assert isinstance(except_expr, s_expr.Expression) e_lhs = qlutils.subject_paths_substitute( except_expr.parse(), ptr_anchors) e_rhs = qlutils.subject_substitute( except_expr.parse(), insert_subject) true_ast = qlast.Constant.boolean(True) on = qlast.BinOp( op='AND', left=qlast.BinOp(op='?!=', left=e_lhs, right=true_ast), right=qlast.BinOp(op='?!=', left=e_rhs, right=true_ast), ) op = qlast.BinOp(op='AND', left=op, right=on) conds.append(op) if not conds: return None, False # We use `any` to compute the disjunction here because some might # be empty. if len(conds) == 1: cond = conds[0] else: cond = qlast.FunctionCall( func='any', args=[qlast.Set(elements=conds)], ) # For the result filtering we need to *ignore* the same object if fake_dml_set: anchor = qlutils.subject_paths_substitute( ptr_anchors['id'], ptr_anchors) ptr_val = qlast.Path(partial=True, steps=[qlast.Ptr(name='id')]) cond = qlast.BinOp( op='AND', left=cond, right=qlast.BinOp(op='!=', left=anchor, right=ptr_val), ) # Produce a query that finds the conflicting objects select_ast = qlast.DetachedExpr( expr=qlast.SelectQuery(result=insert_subject, where=cond) ) # If one of the pointers we care about is multi, then we have to always # use a conflict CTE check instead of trying to use a constraint. has_multi = False for ptrname in needed_ptrs: ptr = subject_typ.getptr(ctx.env.schema, s_name.UnqualName(ptrname)) if not ptr.get_cardinality(ctx.env.schema).is_single(): has_multi = True return select_ast, has_multi
Synthesize a select of conflicting objects ... for a single object type. This gets called once for each ancestor type that provides constraints to the type being inserted. `cnstrs` contains the constraints to consider.
_compile_conflict_select_for_obj_type
python
geldata/gel
edb/edgeql/compiler/conflicts.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/conflicts.py
Apache-2.0
def _compile_conflict_select( stmt: irast.MutatingStmt, subject_typ: s_objtypes.ObjectType, *, for_inheritance: bool=False, fake_dml_set: Optional[irast.Set]=None, obj_constrs: Sequence[s_constr.Constraint], constrs: PointerConstraintMap, span: Optional[irast.Span], ctx: context.ContextLevel, ) -> Tuple[irast.Set, bool, bool]: """Synthesize a select of conflicting objects This teases apart the constraints we care about based on which type they originate from, generates a SELECT for each type, and unions them together. `cnstrs` contains the constraints to consider. """ schema = ctx.env.schema if for_inheritance: type_maps = {subject_typ: (constrs, list(obj_constrs))} else: type_maps = _split_constraints(obj_constrs, constrs, ctx=ctx) always_check = False # Generate a separate query for each type from_parent = False frags = [] for a_obj, (a_constrs, a_obj_constrs) in type_maps.items(): frag, frag_always_check = _compile_conflict_select_for_obj_type( stmt, a_obj, obj_constrs=a_obj_constrs, constrs=a_constrs, for_inheritance=for_inheritance, fake_dml_set=fake_dml_set, span=span, ctx=ctx, ) always_check |= frag_always_check if frag: if a_obj != subject_typ: from_parent = True frags.append(frag) always_check |= from_parent or any( not child.is_view(schema) for child in subject_typ.children(schema) ) # Union them all together select_ast = qlast.Set(elements=frags) with ctx.new() as ectx: ectx.allow_factoring() ectx.implicit_limit = 0 ectx.allow_endpoint_linkprops = True select_ir = dispatch.compile(select_ast, ctx=ectx) select_ir = setgen.scoped_set( select_ir, force_reassign=True, ctx=ectx) assert isinstance(select_ir, irast.Set) # If we have an empty set, remake it with the right type if isinstance(select_ir.expr, irast.EmptySet): select_ir = setgen.new_empty_set(stype=subject_typ, ctx=ctx) return select_ir, always_check, from_parent
Synthesize a select of conflicting objects This teases apart the constraints we care about based on which type they originate from, generates a SELECT for each type, and unions them together. `cnstrs` contains the constraints to consider.
_compile_conflict_select
python
geldata/gel
edb/edgeql/compiler/conflicts.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/conflicts.py
Apache-2.0
def compile_insert_unless_conflict( stmt: irast.InsertStmt, typ: s_objtypes.ObjectType, *, ctx: context.ContextLevel, ) -> irast.OnConflictClause: """Compile an UNLESS CONFLICT clause with no ON This requires synthesizing a conditional based on all the exclusive constraints on the object. """ has_id_write = _has_explicit_id_write(stmt) pointers = _get_exclusive_ptr_constraints( typ, include_id=has_id_write, ctx=ctx) obj_constrs = typ.get_constraints(ctx.env.schema).objects(ctx.env.schema) select_ir, always_check, _ = _compile_conflict_select( stmt, typ, constrs=pointers, obj_constrs=obj_constrs, span=stmt.span, ctx=ctx) return irast.OnConflictClause( constraint=None, select_ir=select_ir, always_check=always_check, else_ir=None)
Compile an UNLESS CONFLICT clause with no ON This requires synthesizing a conditional based on all the exclusive constraints on the object.
compile_insert_unless_conflict
python
geldata/gel
edb/edgeql/compiler/conflicts.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/conflicts.py
Apache-2.0
def _compile_inheritance_conflict_selects( stmt: irast.MutatingStmt, conflict: irast.MutatingStmt, typ: s_objtypes.ObjectType, subject_type: s_objtypes.ObjectType, *, ctx: context.ContextLevel, ) -> List[irast.OnConflictClause]: """Compile the selects needed to resolve multiple DML to related types Generate a SELECT that finds all objects of type `typ` that conflict with the insert `stmt`. The backend will use this to explicitly check that no conflicts exist, and raise an error if they do. This is needed because we mostly use triggers to enforce these cross-type exclusive constraints, and they use a snapshot beginning at the start of the statement. """ _disallow_exclusive_linkprops(stmt, typ, ctx=ctx) has_id_write = _has_explicit_id_write(stmt) pointers = _get_exclusive_ptr_constraints( typ, include_id=has_id_write, ctx=ctx) exclusive = ctx.env.schema.get('std::exclusive', type=s_constr.Constraint) obj_constrs = [ constr for constr in typ.get_constraints(ctx.env.schema).objects(ctx.env.schema) if constr.issubclass(ctx.env.schema, exclusive) ] shape_ptrs = set() for elem, op in stmt.subject.shape: if op != qlast.ShapeOp.MATERIALIZE: shape_ptrs.add(elem.expr.ptrref.shortname.name) # This is a little silly, but for *this* we need to do one per # constraint (so that we can properly identify which constraint # failed in the error messages) entries: List[Tuple[s_constr.Constraint, ConstraintPair]] = [] for name, (ptr, ptr_constrs) in pointers.items(): for ptr_constr in ptr_constrs: # For updates, we only need to emit the check if we actually # modify a pointer used by the constraint. For inserts, though # everything must be in play, since constraints can depend on # nonexistence also. if ( _constr_matters(ptr_constr, ctx=ctx) and ( isinstance(stmt, irast.InsertStmt) or (_get_needed_ptrs(typ, (), [name], ctx)[0] & shape_ptrs) ) ): entries.append((ptr_constr, ({name: (ptr, [ptr_constr])}, []))) for obj_constr in obj_constrs: # See note above about needed ptrs check if ( _constr_matters(obj_constr, ctx=ctx) and ( isinstance(stmt, irast.InsertStmt) or (_get_needed_ptrs( typ, [obj_constr], (), ctx)[0] & shape_ptrs) ) ): entries.append((obj_constr, ({}, [obj_constr]))) # For updates, we need to pull from the actual result overlay, # since the final row can depend on things not in the query. fake_dml_set = None if isinstance(stmt, irast.UpdateStmt): fake_subject = qlast.DetachedExpr(expr=qlast.Path(steps=[ s_utils.name_to_ast_ref(subject_type.get_name(ctx.env.schema))])) fake_dml_set = dispatch.compile(fake_subject, ctx=ctx) clauses = [] for cnstr, (p, o) in entries: select_ir, _, _ = _compile_conflict_select( stmt, typ, for_inheritance=True, fake_dml_set=fake_dml_set, constrs=p, obj_constrs=o, span=stmt.span, ctx=ctx) if isinstance(select_ir.expr, irast.EmptySet): continue cnstr_ref = irast.ConstraintRef(id=cnstr.id) clauses.append( irast.OnConflictClause( constraint=cnstr_ref, select_ir=select_ir, always_check=False, else_ir=None, else_fail=conflict, update_query_set=fake_dml_set) ) return clauses
Compile the selects needed to resolve multiple DML to related types Generate a SELECT that finds all objects of type `typ` that conflict with the insert `stmt`. The backend will use this to explicitly check that no conflicts exist, and raise an error if they do. This is needed because we mostly use triggers to enforce these cross-type exclusive constraints, and they use a snapshot beginning at the start of the statement.
_compile_inheritance_conflict_selects
python
geldata/gel
edb/edgeql/compiler/conflicts.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/conflicts.py
Apache-2.0
def compile_pol( pol: s_policies.AccessPolicy, *, ctx: context.ContextLevel, ) -> irast.Set: """Compile the condition from an individual policy. A policy is evaluated in a context where it is allowed to access the *original subject type of the policy* and *all of its descendants*. Because it is based on the original source of the policy, we need to compile each policy separately. """ schema = ctx.env.schema expr_field: Optional[s_expr.Expression] = pol.get_expr(schema) if expr_field: expr = expr_field.parse() else: expr = qlast.Constant.boolean(True) if condition := pol.get_condition(schema): assert isinstance(condition, s_expr.Expression) expr = qlast.BinOp(op='AND', left=condition.parse(), right=expr) # Find all descendants of the original subject of the rule subject = pol.get_original_subject(schema) descs = {subject} | { desc for desc in subject.descendants(schema) if desc.is_material_object_type(schema) } # Compile it with all of the with ctx.detached() as dctx: dctx.schema_factoring() dctx.partial_path_prefix = ctx.partial_path_prefix dctx.expr_exposed = context.Exposure.UNEXPOSED dctx.suppress_rewrites = frozenset(descs) return dispatch.compile(expr, ctx=dctx)
Compile the condition from an individual policy. A policy is evaluated in a context where it is allowed to access the *original subject type of the policy* and *all of its descendants*. Because it is based on the original source of the policy, we need to compile each policy separately.
compile_pol
python
geldata/gel
edb/edgeql/compiler/policies.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/policies.py
Apache-2.0
def compile_dml_write_policies( stype: s_objtypes.ObjectType, result: irast.Set, mode: qltypes.AccessKind, *, ctx: context.ContextLevel, ) -> Optional[irast.WritePolicies]: """Compile policy filters and wrap them into irast.WritePolicies""" pols = get_access_policies(stype, ctx=ctx) if not pols: return None with ctx.detached() as _, _.newscope(fenced=True) as subctx: # TODO: can we make sure to always avoid generating needless # select filters _prepare_dml_policy_context(stype, result, ctx=subctx) schema = subctx.env.schema subctx.anchors = subctx.anchors.copy() policies = [] for pol in pols: if mode not in pol.get_access_kinds(schema): continue ir_set = compile_pol(pol, ctx=subctx) action = pol.get_action(schema) name = str(pol.get_shortname(schema)) policies.append( irast.WritePolicy( expr=ir_set, action=action, name=name, error_msg=pol.get_errmessage(schema), ) ) return irast.WritePolicies(policies=policies)
Compile policy filters and wrap them into irast.WritePolicies
compile_dml_write_policies
python
geldata/gel
edb/edgeql/compiler/policies.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/policies.py
Apache-2.0
def compile_dml_read_policies( stype: s_objtypes.ObjectType, result: irast.Set, mode: qltypes.AccessKind, *, ctx: context.ContextLevel, ) -> Optional[irast.ReadPolicyExpr]: """Compile a policy filter for a DML statement at a particular type""" if not get_access_policies(stype, ctx=ctx): return None with ctx.detached() as _, _.newscope(fenced=True) as subctx: # TODO: can we make sure to always avoid generating needless # select filters _prepare_dml_policy_context(stype, result, ctx=subctx) condition = get_rewrite_filter(stype, mode=mode, ctx=subctx) if not condition: return None return irast.ReadPolicyExpr( expr=setgen.scoped_set( dispatch.compile(condition, ctx=subctx), ctx=subctx ), )
Compile a policy filter for a DML statement at a particular type
compile_dml_read_policies
python
geldata/gel
edb/edgeql/compiler/policies.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/policies.py
Apache-2.0
def new_set( *, stype: s_types.Type, expr: irast.Expr, ctx: context.ContextLevel, ircls: Type[irast.Set] = irast.Set, **kwargs: Any, ) -> irast.Set: """Create a new ir.Set instance with given attributes. Absolutely all ir.Set instances must be created using this constructor. """ ignore_rewrites: bool = kwargs.get('ignore_rewrites', False) skip_subtypes = False if isinstance(expr, irast.TypeRoot): skip_subtypes = expr.skip_subtypes rw_key = (stype, skip_subtypes) if not ignore_rewrites and ctx.suppress_rewrites: from . import policies ignore_rewrites = kwargs['ignore_rewrites'] = ( policies.should_ignore_rewrite(stype, ctx=ctx)) if ( not ignore_rewrites and rw_key not in ctx.env.type_rewrites and isinstance(stype, s_objtypes.ObjectType) and ctx.env.options.apply_query_rewrites ): from . import policies policies.try_type_rewrite(stype, skip_subtypes=skip_subtypes, ctx=ctx) typeref = typegen.type_to_typeref(stype, env=ctx.env) ir_set = ircls(typeref=typeref, expr=expr, **kwargs) ctx.env.set_types[ir_set] = stype return ir_set
Create a new ir.Set instance with given attributes. Absolutely all ir.Set instances must be created using this constructor.
new_set
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def new_set_from_set( ir_set: irast.Set, *, merge_current_ns: bool=False, path_scope_id: Optional[int | KeepCurrentT]=KeepCurrent, path_id: Optional[irast.PathId]=None, stype: Optional[s_types.Type]=None, expr: irast.Expr | KeepCurrentT=KeepCurrent, span: Optional[qlast.Span]=None, is_binding: Optional[irast.BindingKind]=None, is_schema_alias: Optional[bool]=None, is_materialized_ref: Optional[bool]=None, is_visible_binding_ref: Optional[bool]=None, ignore_rewrites: Optional[bool]=None, ctx: context.ContextLevel) -> irast.Set: """Create a new ir.Set from another ir.Set. The new Set inherits source everything from the old set that is not overriden. If *merge_current_ns* is set, the new Set's path_id will be namespaced with the currently active scope namespace. """ if path_id is None: path_id = ir_set.path_id if merge_current_ns: path_id = path_id.merge_namespace(ctx.path_id_namespace) if stype is None: stype = get_set_type(ir_set, ctx=ctx) if path_scope_id == KeepCurrent: path_scope_id = ir_set.path_scope_id if expr == KeepCurrent: expr = ir_set.expr if span is None: span = ir_set.span if is_binding is None: is_binding = ir_set.is_binding if is_schema_alias is None: is_schema_alias = ir_set.is_schema_alias if is_materialized_ref is None: is_materialized_ref = ir_set.is_materialized_ref if is_visible_binding_ref is None: is_visible_binding_ref = ir_set.is_visible_binding_ref if ignore_rewrites is None: ignore_rewrites = ir_set.ignore_rewrites return new_set( path_id=path_id, path_scope_id=path_scope_id, stype=stype, expr=expr, span=span, is_binding=is_binding, is_schema_alias=is_schema_alias, is_materialized_ref=is_materialized_ref, is_visible_binding_ref=is_visible_binding_ref, ignore_rewrites=ignore_rewrites, ircls=type(ir_set), ctx=ctx, )
Create a new ir.Set from another ir.Set. The new Set inherits source everything from the old set that is not overriden. If *merge_current_ns* is set, the new Set's path_id will be namespaced with the currently active scope namespace.
new_set_from_set
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def compile_path(expr: qlast.Path, *, ctx: context.ContextLevel) -> irast.Set: """Create an ir.Set representing the given EdgeQL path expression.""" anchors = ctx.anchors if expr.partial: if ctx.partial_path_prefix is not None: path_tip = ctx.partial_path_prefix else: hint = None # If there are anchors, suggest one if anchors: anchor_names: list[str] = [ key if isinstance(key, str) else key.name for key in anchors ] import edb.edgeql.codegen suggestion = ( f'{anchor_names[0]}' f'{edb.edgeql.codegen.generate_source(expr)}' ) if len(anchor_names) == 1: hint = ( f'Did you mean {suggestion}?' ) else: hint = ( f'Did you mean to use one of: {anchor_names}? ' f'eg. {suggestion}' ) raise errors.QueryError( 'could not resolve partial path ', span=expr.span, hint=hint ) computables: list[irast.Set] = [] path_sets: list[irast.Set] = [] for i, step in enumerate(expr.steps): is_computable = False if isinstance(step, qlast.SpecialAnchor): path_tip = resolve_special_anchor(step, ctx=ctx) elif isinstance(step, qlast.IRAnchor): # Check if the starting path label is a known anchor refnode = anchors.get(step.name) if not refnode: raise AssertionError(f'anchor {step.name} is missing') path_tip = new_set_from_set(refnode, ctx=ctx) elif isinstance(step, qlast.ObjectRef): if i > 0: # pragma: no cover raise RuntimeError( 'unexpected ObjectRef as a non-first path item') refnode = None if ( not step.module and s_name.UnqualName(step.name) not in ctx.aliased_views ): # Check if the starting path label is a known anchor refnode = anchors.get(step.name) if refnode is not None: path_tip = new_set_from_set(refnode, ctx=ctx) else: (view_set, stype) = resolve_name(step, ctx=ctx) if (stype.is_enum(ctx.env.schema) and not stype.is_view(ctx.env.schema)): return compile_enum_path(expr, source=stype, ctx=ctx) if (stype.get_expr_type(ctx.env.schema) is not None and stype.get_name(ctx.env.schema) not in ctx.view_nodes): if not stype.get_expr(ctx.env.schema): raise errors.InvalidReferenceError( f"cannot refer to alias link helper type " f"'{stype.get_name(ctx.env.schema)}'", span=step.span, ) # This is a schema-level view, as opposed to # a WITH-block or inline alias view. stype = stmtctx.declare_view_from_schema(stype, ctx=ctx) if not view_set: view_set = ctx.view_sets.get(stype) if view_set is not None: view_scope_info = ctx.env.path_scope_map[view_set] path_tip = new_set_from_set( view_set, merge_current_ns=( view_scope_info.pinned_path_id_ns is None ), is_binding=view_scope_info.binding_kind, span=step.span, ctx=ctx, ) maybe_materialize(stype, path_tip, ctx=ctx) else: path_tip = class_set(stype, ctx=ctx) view_scls = ctx.class_view_overrides.get(stype.id) if (view_scls is not None and view_scls != get_set_type(path_tip, ctx=ctx)): path_tip = ensure_set( path_tip, type_override=view_scls, ctx=ctx) elif isinstance(step, qlast.Ptr): # Pointer traversal step ptr_expr = step if ptr_expr.direction is not None: direction = s_pointers.PointerDirection(ptr_expr.direction) else: direction = s_pointers.PointerDirection.Outbound ptr_name = ptr_expr.name source: s_obj.Object ptr: s_pointers.PointerLike if ptr_expr.type == 'property': # Link property reference; the source is the # link immediately preceding this step in the path. if isinstance(path_tip.expr, irast.Pointer): ptrref = path_tip.expr.ptrref fake_tip = path_tip elif ( path_tip.is_binding == irast.BindingKind.For and (new := irutils.unwrap_set(path_tip)) and isinstance(new.expr, irast.Pointer) ): # When accessing variables bound with FOR, allow # looking through to the underlying link. N.B: # This relies on the FOR bindings still having an # expr that lets us look at their # definition. Eventually I'd like to stop doing # that, and then we'll need to store it as part of # the binding/type metadata. ptrref = new.expr.ptrref fake_tip = new ind_prefix, _ = typegen.collapse_type_intersection_rptr( fake_tip, ctx=ctx, ) # Don't allow using the iterator to access # linkprops if the source of the link isn't # visible, because then there will be a semi-join # that prevents access to the props. (This is # pretty similar to how "changes the # interpretation" errors). assert isinstance(ind_prefix.expr, irast.Pointer) if not ctx.path_scope.is_visible( ind_prefix.expr.source.path_id ): # Better message raise errors.QueryError( 'improper reference to link property on ' 'a non-link object', span=step.span, ) else: raise errors.EdgeQLSyntaxError( f"unexpected reference to link property {ptr_name!r} " "outside of a path expression", span=ptr_expr.span, ) # The backend can't really handle @source/@target # outside of the singleton mode compiler, and they # aren't really particularly useful outside that # anyway, so disallow them. if ( ptr_expr.name in ('source', 'target') and not ctx.allow_endpoint_linkprops and ( ctx.env.options.schema_object_context not in (s_constr.Constraint, s_indexes.Index) ) ): raise errors.QueryError( f'@{ptr_expr.name} may only be used in index and ' 'constraint definitions', span=step.span) if isinstance( ptrref, irast.TypeIntersectionPointerRef ): ind_prefix, ptrs = typegen.collapse_type_intersection_rptr( fake_tip, ctx=ctx, ) assert isinstance(ind_prefix.expr, irast.Pointer) prefix_type = get_set_type(ind_prefix.expr.source, ctx=ctx) assert isinstance(prefix_type, s_objtypes.ObjectType) if not ptrs: tip_type = get_set_type(path_tip, ctx=ctx) s_vn = prefix_type.get_verbosename(ctx.env.schema) t_vn = tip_type.get_verbosename(ctx.env.schema) pn = ind_prefix.expr.ptrref.shortname.name if direction is s_pointers.PointerDirection.Inbound: s_vn, t_vn = t_vn, s_vn raise errors.InvalidReferenceError( f"property '{ptr_name}' does not exist because" f" there are no '{pn}' links between" f" {s_vn} and {t_vn}", span=ptr_expr.span, ) prefix_ptr_name = ( next(iter(ptrs)).get_local_name(ctx.env.schema)) ptr = schemactx.get_union_pointer( ptrname=prefix_ptr_name, source=prefix_type, direction=ind_prefix.expr.direction, components=ptrs, ctx=ctx, ) else: ptr = typegen.ptrcls_from_ptrref( ptrref, ctx=ctx) if isinstance(ptr, s_links.Link): source = ptr else: raise errors.QueryError( 'improper reference to link property on ' 'a non-link object', span=step.span, ) else: source = get_set_type(path_tip, ctx=ctx) # If this is followed by type intersections, collect # them up, since we need them in ptr_step_set. upcoming_intersections = [] for j in range(i + 1, len(expr.steps)): nstep = expr.steps[j] if (isinstance(nstep, qlast.TypeIntersection) and isinstance(nstep.type, qlast.TypeName)): upcoming_intersections.append( schemactx.get_schema_type( nstep.type.maintype, ctx=ctx)) else: break if isinstance(source, s_types.Tuple): path_tip = tuple_indirection_set( path_tip, source=source, ptr_name=ptr_name, span=step.span, ctx=ctx) else: path_tip = ptr_step_set( path_tip, expr=step, source=source, ptr_name=ptr_name, direction=direction, upcoming_intersections=upcoming_intersections, ignore_computable=True, span=step.span, ctx=ctx) assert isinstance(path_tip.expr, irast.Pointer) ptrcls = typegen.ptrcls_from_ptrref( path_tip.expr.ptrref, ctx=ctx) if _is_computable_ptr(ptrcls, direction, ctx=ctx): is_computable = True elif isinstance(step, qlast.TypeIntersection): arg_type = get_set_type(path_tip, ctx=ctx) if not isinstance(arg_type, s_objtypes.ObjectType): raise errors.QueryError( f'cannot apply type intersection operator to ' f'{arg_type.get_verbosename(ctx.env.schema)}: ' f'it is not an object type', span=step.span) typ: s_types.Type = typegen.ql_typeexpr_to_type(step.type, ctx=ctx) try: path_tip = type_intersection_set( path_tip, typ, optional=False, span=step.span, ctx=ctx) except errors.SchemaError as e: e.set_span(step.type.span) raise else: # Arbitrary expression if i > 0: # pragma: no cover raise RuntimeError( 'unexpected expression as a non-first path item') # We need to fence this if the head is a mutating # statement, to make sure that the factoring allowlist # works right. is_subquery = isinstance(step, qlast.Query) with ctx.newscope(fenced=is_subquery) as subctx: subctx.view_rptr = None path_tip = dispatch.compile(step, ctx=subctx) # If the head of the path is a direct object # reference, wrap it in an expression set to give it a # new path id. This prevents the object path from being # spuriously visible to computable paths defined in a shape # at the root of a path. (See test_edgeql_select_tvariant_04 # for an example). if ( path_tip.path_id.is_objtype_path() and not path_tip.path_id.is_view_path() and path_tip.path_id.src_path() is None ): path_tip = expression_set( ensure_stmt(path_tip, ctx=subctx), ctx=subctx) if path_tip.path_id.is_type_intersection_path(): assert isinstance(path_tip.expr, irast.Pointer) scope_set = path_tip.expr.source else: scope_set = path_tip scope_set = scoped_set(scope_set, ctx=subctx) # We compile computables under namespaces, but we need to have # the source of the computable *not* under that namespace, # so we need to do some remapping. if mapped := get_view_map_remapping(path_tip.path_id, ctx): path_tip = new_set_from_set( path_tip, path_id=mapped.path_id, ctx=ctx) # If we are remapping a source path, then we know that # the path is visible, so we shouldn't recompile it # if it is a computable path. is_computable = False if is_computable: computables.append(path_tip) if pathctx.path_is_inserting(path_tip.path_id, ctx=ctx): stype = ctx.env.schema.get_by_id( path_tip.typeref.id, type=s_types.Type ) assert stype raise_self_insert_error(stype, step.span, ctx=ctx) # Don't track this step of the path if it didn't change the set # (probably because of do-nothing intersection) if not path_sets or path_sets[-1] != path_tip: path_sets.append(path_tip) if expr.span: path_tip.span = expr.span pathctx.register_set_in_scope(path_tip, ctx=ctx) for ir_set in computables: # Compile the computables in sibling scopes to the subpaths # they are computing. Note that the path head will be visible # from inside the computable scope. That's fine. scope = ctx.path_scope.find_descendant(ir_set.path_id) if scope is None: scope = ctx.path_scope.find_visible(ir_set.path_id) # We skip recompiling if we can't find a scope for it. # This whole mechanism seems a little sketchy, unfortunately. if scope is None: continue with ctx.new() as subctx: subctx.path_scope = scope assert isinstance(ir_set.expr, irast.Pointer) comp_ir_set = computable_ptr_set( ir_set.expr, ir_set.path_id, span=ir_set.span, ctx=subctx ) i = path_sets.index(ir_set) if i != len(path_sets) - 1: prptr = path_sets[i + 1].expr assert isinstance(prptr, irast.Pointer) prptr.source = comp_ir_set else: path_tip = comp_ir_set path_sets[i] = comp_ir_set return path_tip
Create an ir.Set representing the given EdgeQL path expression.
compile_path
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def _add_target_schema_refs( stype: Optional[s_obj.Object], ctx: context.ContextLevel, ) -> None: """Add the appropriate schema dependencies for a pointer target. The only annoying bit is we need to handle unions/intersections also.""" if not isinstance(stype, s_objtypes.ObjectType): return ctx.env.add_schema_ref(stype, None) schema = ctx.env.schema for obj in ( stype.get_union_of(schema).objects(schema) + stype.get_intersection_of(schema).objects(schema) ): ctx.env.add_schema_ref(obj, None)
Add the appropriate schema dependencies for a pointer target. The only annoying bit is we need to handle unions/intersections also.
_add_target_schema_refs
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def resolve_ptr_with_intersections( near_endpoint: s_obj.Object, pointer_name: str, *, upcoming_intersections: Sequence[s_types.Type] = (), far_endpoints: Iterable[s_obj.Object] = (), direction: s_pointers.PointerDirection = ( s_pointers.PointerDirection.Outbound ), span: Optional[qlast.Span] = None, track_ref: Optional[Union[qlast.Base, Literal[False]]], ctx: context.ContextLevel, ) -> tuple[s_pointers.Pointer, s_pointers.Pointer]: """Resolve a pointer, taking into account upcoming intersections. The key trickiness here is that *two* pointers are returned: * one that (for backlinks) includes just the pointers that actually may be used * one for use in path ids, that does not do that filtering, so that path factoring works properly. """ if not isinstance(near_endpoint, s_sources.Source): # Reference to a property on non-object msg = 'invalid property reference on a primitive type expression' raise errors.InvalidReferenceError(msg, span=span) ptr: Optional[s_pointers.Pointer] = None if direction is s_pointers.PointerDirection.Outbound: path_id_ptr = ptr = near_endpoint.maybe_get_ptr( ctx.env.schema, s_name.UnqualName(pointer_name), ) # If we couldn't anything, but the source is a computed link # that aliases some other link, look for a link property on # it. This allows us to access link properties in both # directions on links, including when the backlink has been # stuck in a computed. if ( ptr is None and isinstance(near_endpoint, s_links.Link) and (back := near_endpoint.get_computed_link_alias(ctx.env.schema)) and isinstance(back, s_links.Link) and (nptr := back.maybe_get_ptr( ctx.env.schema, s_name.UnqualName(pointer_name), )) # We can't handle computeds yet, since we would need to switch # around a bunch of stuff inside them. and not nptr.is_pure_computable(ctx.env.schema) ): src_type = downcast( s_types.Type, near_endpoint.get_source(ctx.env.schema) ) if not src_type.is_view(ctx.env.schema): # HACK: If the source is in the standard library, and # not a view, we can't add a derived pointer. For # consistency, just always require it be a view. new_source = downcast( s_objtypes.ObjectType, schemactx.derive_view(src_type, ctx=ctx), ) new_endpoint = downcast(s_links.Link, schemactx.derive_ptr( near_endpoint, new_source, ctx=ctx)) else: new_endpoint = near_endpoint ptr = schemactx.derive_ptr(nptr, new_endpoint, ctx=ctx) path_id_ptr = nptr if ptr is not None: ref = ptr.get_nearest_non_derived_parent(ctx.env.schema) if track_ref is not False: ctx.env.add_schema_ref(ref, track_ref) _add_target_schema_refs( ref.get_target(ctx.env.schema), ctx=ctx) else: assert isinstance(near_endpoint, s_types.Type) concrete_near_endpoint = schemactx.concretify(near_endpoint, ctx=ctx) ptrs = concrete_near_endpoint.getrptrs( ctx.env.schema, pointer_name, sources=far_endpoints) if ptrs: # If this reverse pointer access is followed by # intersections, we filter out any pointers that # couldn't be picked up by the intersections. # If a pointer doesn't get picked up, we look to see # if any of its children might. # # This both allows us to avoid creating spurious # dependencies when reverse links are used in schemas # and to generate a precise set of possible pointers. dep_ptrs = set() wl = list(ptrs) while wl: ptr = wl.pop() if (src := ptr.get_source(ctx.env.schema)): if all( src.issubclass(ctx.env.schema, typ) for typ in upcoming_intersections ): dep_ptrs.add(ptr) else: wl.extend(ptr.children(ctx.env.schema)) if track_ref is not False: for p in dep_ptrs: p = p.get_nearest_non_derived_parent(ctx.env.schema) ctx.env.add_schema_ref(p, track_ref) _add_target_schema_refs( p.get_source(ctx.env.schema), ctx=ctx) # We can only compute backlinks for non-computed pointers, # but we need to make sure that a computed pointer doesn't # break properly-filtered backlinks. concrete_ptrs = [ ptr for ptr in ptrs if not ptr.is_pure_computable(ctx.env.schema)] for ptr in ptrs: if ( ptr.is_pure_computable(ctx.env.schema) and (ptr in dep_ptrs or not concrete_ptrs) ): vname = ptr.get_verbosename(ctx.env.schema, with_parent=True) raise errors.InvalidReferenceError( f'cannot follow backlink {pointer_name!r} because ' f'{vname} is computed', span=span ) opaque = not far_endpoints concrete_ptr = schemactx.get_union_pointer( ptrname=s_name.UnqualName(pointer_name), source=near_endpoint, direction=direction, components=concrete_ptrs, opaque=opaque, modname=ctx.derived_target_module, ctx=ctx, ) path_id_ptr = ptr = concrete_ptr # If we have an upcoming intersection that has actual # pointer targets, we want to put the filtered down # version into the AST, so that we can more easily use # that information in compilation. But we still need the # *full* union in the path_ids, for factoring. if dep_ptrs and upcoming_intersections: ptr = schemactx.get_union_pointer( ptrname=s_name.UnqualName(pointer_name), source=near_endpoint, direction=direction, components=dep_ptrs, opaque=opaque, modname=ctx.derived_target_module, ctx=ctx, ) if ptr and path_id_ptr: return ptr, path_id_ptr if isinstance(near_endpoint, s_links.Link): vname = near_endpoint.get_verbosename(ctx.env.schema, with_parent=True) msg = f'{vname} has no property {pointer_name!r}' elif direction == s_pointers.PointerDirection.Outbound: msg = (f'{near_endpoint.get_verbosename(ctx.env.schema)} ' f'has no link or property {pointer_name!r}') else: nep_name = near_endpoint.get_displayname(ctx.env.schema) path = f'{nep_name}.{direction}{pointer_name}' msg = f'{path!r} does not resolve to any known path' err = errors.InvalidReferenceError(msg, span=span) if ( direction is s_pointers.PointerDirection.Outbound # In some call sites, we call resolve_ptr "experimentally", # not tracking references and swallowing failures. Don't do an # expensive (30% of compilation time in some benchmarks!) # error enrichment for cases that won't really error. and track_ref is not False ): s_utils.enrich_schema_lookup_error( err, s_name.UnqualName(pointer_name), modaliases=ctx.modaliases, item_type=s_pointers.Pointer, pointer_parent=near_endpoint, schema=ctx.env.schema, ) raise err
Resolve a pointer, taking into account upcoming intersections. The key trickiness here is that *two* pointers are returned: * one that (for backlinks) includes just the pointers that actually may be used * one for use in path ids, that does not do that filtering, so that path factoring works properly.
resolve_ptr_with_intersections
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def extend_path( source_set: irast.Set, ptrcls: s_pointers.Pointer, direction: PtrDir = PtrDir.Outbound, *, path_id_ptrcls: Optional[s_pointers.Pointer] = None, ignore_computable: bool = False, same_computable_scope: bool = False, span: Optional[qlast.Span]=None, ctx: context.ContextLevel, ) -> irast.SetE[irast.Pointer]: """Return a Set node representing the new path tip.""" if ptrcls.is_link_property(ctx.env.schema): src_path_id = source_set.path_id.ptr_path() else: if direction is not s_pointers.PointerDirection.Inbound: source = ptrcls.get_near_endpoint(ctx.env.schema, direction) assert isinstance(source, s_types.Type) stype = get_set_type(source_set, ctx=ctx) if not stype.issubclass(ctx.env.schema, source): # Polymorphic link reference source_set = type_intersection_set( source_set, source, optional=True, span=span, ctx=ctx) src_path_id = source_set.path_id orig_ptrcls = ptrcls # If there is a particular specified ptrcls for the pathid, use # it, otherwise use the actual ptrcls. This comes up with # intersections on backlinks, where we want to use a precise ptr # in the IR for compilation reasons but need a path_id that is # independent of intersections. path_id_ptrcls = path_id_ptrcls or ptrcls # Find the pointer definition site. # This makes it so that views don't change path ids unless they are # introducing some computation. ptrcls = ptrcls.get_nearest_defined(ctx.env.schema) path_id_ptrcls = path_id_ptrcls.get_nearest_defined(ctx.env.schema) path_id = pathctx.extend_path_id( src_path_id, ptrcls=path_id_ptrcls, direction=direction, ns=ctx.path_id_namespace, ctx=ctx, ) if ptrcls.get_secret(ctx.env.schema): _check_secret_ptr(ptrcls, span=span, ctx=ctx) target = orig_ptrcls.get_far_endpoint(ctx.env.schema, direction) assert isinstance(target, s_types.Type) ptr = irast.Pointer( source=source_set, direction=direction, ptrref=typegen.ptr_to_ptrref(ptrcls, ctx=ctx), is_definition=False, ) target_set = new_set( stype=target, path_id=path_id, span=span, expr=ptr, ctx=ctx) is_computable = _is_computable_ptr(ptrcls, direction, ctx=ctx) if not ignore_computable and is_computable: target_set = computable_ptr_set( ptr, path_id, same_computable_scope=same_computable_scope, span=span, ctx=ctx, ) assert irutils.is_set_instance(target_set, irast.Pointer) return target_set
Return a Set node representing the new path tip.
extend_path
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def needs_rewrite_existence_assertion( ptrcls: s_pointers.PointerLike, direction: PtrDir, *, ctx: context.ContextLevel, ) -> bool: """Determines if we need to inject an assert_exists for a pointer Required pointers to types with access policies need to have an assert_exists added """ return bool( not ctx.suppress_rewrites and ptrcls.get_required(ctx.env.schema) and direction == PtrDir.Outbound and (target := ptrcls.get_target(ctx.env.schema)) and ctx.env.type_rewrites.get((target, False)) and ptrcls.get_shortname(ctx.env.schema).name != '__type__' )
Determines if we need to inject an assert_exists for a pointer Required pointers to types with access policies need to have an assert_exists added
needs_rewrite_existence_assertion
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def type_intersection_set( source_set: irast.Set, stype: s_types.Type, *, optional: bool, span: Optional[qlast.Span] = None, ctx: context.ContextLevel, ) -> irast.Set: """Return an interesection of *source_set* with type *stype*.""" arg_type = get_set_type(source_set, ctx=ctx) result = schemactx.apply_intersection(arg_type, stype, ctx=ctx) if result.stype == arg_type: return source_set rptr_specialization = [] if ( isinstance(source_set.expr, irast.Pointer) and source_set.expr.ptrref.union_components ): rptr = source_set.expr # This is a type intersection of a union pointer, most likely # a reverse link path specification. If so, test the union # components against the type expression and record which # components match. This information will be used later # when evaluating the path cardinality, as well as to # route link property references accordingly. for component in source_set.expr.ptrref.union_components: component_endpoint_ref = component.dir_target(rptr.direction) ctx.env.schema, component_endpoint = irtyputils.ir_typeref_to_type( ctx.env.schema, component_endpoint_ref) if component_endpoint.issubclass(ctx.env.schema, stype): assert isinstance(component, irast.PointerRef) rptr_specialization.append(component) elif stype.issubclass(ctx.env.schema, component_endpoint): assert isinstance(stype, s_objtypes.ObjectType) if rptr.direction is s_pointers.PointerDirection.Inbound: narrow_ptr = stype.getptr( ctx.env.schema, component.shortname.get_local_name(), ) rptr_specialization.append( irtyputils.ptrref_from_ptrcls( schema=ctx.env.schema, ptrcls=narrow_ptr, cache=ctx.env.ptr_ref_cache, typeref_cache=ctx.env.type_ref_cache, ), ) else: assert isinstance(component, irast.PointerRef) rptr_specialization.append(component) ptrcls = irast.TypeIntersectionLink( arg_type, result.stype, optional=optional, is_empty=result.is_empty, is_subtype=result.is_subtype, rptr_specialization=rptr_specialization, # The type intersection cannot increase the cardinality # of the input set, so semantically, the cardinality # of the type intersection "link" is, at most, ONE. cardinality=qltypes.SchemaCardinality.One, ) ptrref = irtyputils.ptrref_from_ptrcls( schema=ctx.env.schema, ptrcls=ptrcls, cache=ctx.env.ptr_ref_cache, typeref_cache=ctx.env.type_ref_cache, ) poly_set = new_set( stype=result.stype, path_id=source_set.path_id.extend(ptrref=ptrref), expr=irast.TypeIntersectionPointer( source=source_set, ptrref=downcast(irast.TypeIntersectionPointerRef, ptrref), direction=s_pointers.PointerDirection.Outbound, optional=optional, ), span=span, ctx=ctx, ) return poly_set
Return an interesection of *source_set* with type *stype*.
type_intersection_set
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def class_set( stype: s_types.Type, *, path_id: Optional[irast.PathId] = None, skip_subtypes: bool = False, ignore_rewrites: bool = False, ctx: context.ContextLevel, ) -> irast.Set: """Nominally, create a set representing selecting some type. That is, create a set with a TypeRoot expr. TODO(ir): In practice, a lot of call sites really want some kind of handle to something that will be bound elsewhere, and we should clean those up to use a different node. """ if path_id is None: path_id = pathctx.get_path_id(stype, ctx=ctx) return new_set( path_id=path_id, stype=stype, ignore_rewrites=ignore_rewrites, expr=irast.TypeRoot( typeref=typegen.type_to_typeref(stype, env=ctx.env), skip_subtypes=skip_subtypes, ), ctx=ctx, )
Nominally, create a set representing selecting some type. That is, create a set with a TypeRoot expr. TODO(ir): In practice, a lot of call sites really want some kind of handle to something that will be bound elsewhere, and we should clean those up to use a different node.
class_set
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def computable_ptr_set( rptr: irast.Pointer, path_id: irast.PathId, *, same_computable_scope: bool=False, span: Optional[qlast.Span]=None, ctx: context.ContextLevel, ) -> irast.Set: """Return ir.Set for a pointer defined as a computable.""" ptrcls = typegen.ptrcls_from_ptrref(rptr.ptrref, ctx=ctx) source_scls = get_set_type(rptr.source, ctx=ctx) source_set = fixup_computable_source_set(rptr.source, ctx=ctx) ptrcls_to_shadow = None qlctx: Optional[context.ContextLevel] try: comp_info = ctx.env.source_map[ptrcls] qlexpr = comp_info.qlexpr assert isinstance(comp_info.context, context.ContextLevel) qlctx = comp_info.context inner_source_path_id = comp_info.path_id path_id_ns = comp_info.path_id_ns except KeyError: comp_expr: Optional[s_expr.Expression] = ptrcls.get_expr(ctx.env.schema) schema_qlexpr: Optional[qlast.Expr] = None if comp_expr is None and ctx.env.options.apply_query_rewrites: assert isinstance(ptrcls, s_pointers.Pointer) ptrcls_n = ptrcls.get_shortname(ctx.env.schema).name path = qlast.Path( steps=[ qlast.SpecialAnchor(name='__source__'), qlast.Ptr( name=ptrcls_n, direction=s_pointers.PointerDirection.Outbound, type=( 'property' if ptrcls.is_link_property(ctx.env.schema) else None ) ) ], ) schema_deflt = ptrcls.get_schema_reflection_default(ctx.env.schema) if schema_deflt is not None: schema_qlexpr = qlast.BinOp( left=path, right=qlparser.parse_fragment(schema_deflt), op='??', ) if needs_rewrite_existence_assertion( ptrcls, PtrDir.Outbound, ctx=ctx): # Wrap it in a dummy select so that we can't optimize away # the assert_exists. # TODO: do something less bad arg = qlast.SelectQuery( result=path, where=qlast.Constant.boolean(True)) vname = ptrcls.get_verbosename( ctx.env.schema, with_parent=True) msg = f'required {vname} is hidden by access policy' if ctx.active_computeds: cur = next(reversed(ctx.active_computeds)) vname = cur.get_verbosename( ctx.env.schema, with_parent=True) msg += f' (while evaluating computed {vname})' schema_qlexpr = qlast.FunctionCall( func=('__std__', 'assert_exists'), args=[arg], kwargs={'message': qlast.Constant.string(value=msg)}, ) # Is this is a view, we want to shadow the underlying # ptrcls, since otherwise we will generate this default # code *twice*. if rptr.ptrref.base_ptr: ptrcls_to_shadow = typegen.ptrcls_from_ptrref( rptr.ptrref.base_ptr, ctx=ctx) if schema_qlexpr is None: if comp_expr is None: ptrcls_sn = ptrcls.get_shortname(ctx.env.schema) raise errors.InternalServerError( f'{ptrcls_sn!r} is not a computed pointer') comp_qlexpr = comp_expr.parse() assert isinstance(comp_qlexpr, qlast.Expr), 'expected qlast.Expr' schema_qlexpr = comp_qlexpr # NOTE: Validation of the expression type is not the concern # of this function. For any non-object pointer target type, # the default expression must be assignment-cast into that # type. target_scls = ptrcls.get_target(ctx.env.schema) assert target_scls is not None if not target_scls.is_object_type(): schema_qlexpr = qlast.TypeCast( type=typegen.type_to_ql_typeref( target_scls, ctx=ctx), expr=schema_qlexpr, ) qlexpr = astutils.ensure_ql_query(schema_qlexpr) qlctx = None path_id_ns = None newctx: Callable[[], ContextManager[context.ContextLevel]] if qlctx is None: # Schema-level computed link or property, the context should # still have a source. newctx = _get_schema_computed_ctx( rptr=rptr, source=source_set, ctx=ctx) else: newctx = _get_computable_ctx( rptr=rptr, source=source_set, source_scls=source_scls, inner_source_path_id=inner_source_path_id, path_id_ns=path_id_ns, same_scope=same_computable_scope, qlctx=qlctx, ctx=ctx) result_stype = ptrcls.get_target(ctx.env.schema) base_object = ctx.env.schema.get('std::BaseObject', type=s_types.Type) with newctx() as subctx: assert isinstance(source_scls, s_sources.Source) assert isinstance(ptrcls, s_pointers.Pointer) subctx.active_computeds = subctx.active_computeds.copy() if ptrcls_to_shadow: assert isinstance(ptrcls_to_shadow, s_pointers.Pointer) subctx.active_computeds.add(ptrcls_to_shadow) subctx.active_computeds.add(ptrcls) if result_stype != base_object: subctx.view_scls = result_stype subctx.view_rptr = context.ViewRPtr( source=source_scls, ptrcls=ptrcls) subctx.anchors['__source__'] = source_set subctx.empty_result_type_hint = ptrcls.get_target(ctx.env.schema) subctx.partial_path_prefix = source_set # On a mutation, make the expr_exposed. This corresponds with # a similar check on is_mutation in _normalize_view_ptr_expr. if (source_scls.get_expr_type(ctx.env.schema) != s_types.ExprType.Select): subctx.expr_exposed = context.Exposure.EXPOSED comp_ir_set = dispatch.compile(qlexpr, ctx=subctx) # XXX: or should we update rptr in place?? rptr = rptr.replace(expr=comp_ir_set.expr) comp_ir_set = new_set_from_set( comp_ir_set, path_id=path_id, expr=rptr, span=span, merge_current_ns=True, ctx=ctx) maybe_materialize(ptrcls, comp_ir_set, ctx=ctx) return comp_ir_set
Return ir.Set for a pointer defined as a computable.
computable_ptr_set
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def get_view_map_remapping( path_id: irast.PathId, ctx: context.ContextLevel ) -> Optional[irast.Set]: """Perform path_id remapping based on outer views This is a little fiddly, since we may have picked up *additional* namespaces. """ key = path_id.strip_namespace(path_id.namespace) entries = ctx.view_map.get(key, ()) fixed_path_id = path_id.merge_namespace(ctx.path_id_namespace, deep=True) for inner_path_id, mapped in entries: fixed_inner = inner_path_id.merge_namespace( ctx.path_id_namespace, deep=True) if fixed_inner == fixed_path_id: return mapped return None
Perform path_id remapping based on outer views This is a little fiddly, since we may have picked up *additional* namespaces.
get_view_map_remapping
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def remap_path_id( path_id: irast.PathId, ctx: context.ContextLevel ) -> irast.PathId: """Remap a path_id based on the view_map, one step at a time. This is intended to mirror what happens to paths in compile_path. """ new_id = None hit = False for prefix in path_id.iter_prefixes(): if not new_id: new_id = prefix else: nrptr, dir = prefix.rptr(), prefix.rptr_dir() assert nrptr and dir new_id = new_id.extend( ptrref=nrptr, direction=dir, ns=prefix.namespace) if mapped := get_view_map_remapping(new_id, ctx): hit = True new_id = mapped.path_id assert new_id and (new_id == path_id or hit) return new_id
Remap a path_id based on the view_map, one step at a time. This is intended to mirror what happens to paths in compile_path.
remap_path_id
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def get_globals_as_json( globs: Sequence[s_globals.Global], *, ctx: context.ContextLevel, span: Optional[qlast.Span], ) -> irast.Set: """Build a json object that contains the values of `globs` The format of the object is simply {"<glob name 1>": <json>glob_val_1, ...}, where values that are unset or set to {} are represented as null, with one catch: for globals that need "present" arguments (that is, optional globals with default values), we need to distinguish between the global being unset and being set to {}. In that case, we represent being set to {} with null and being unset by omitting it from the object. """ # TODO: arrange to compute this once per query, in a CTE or some such? # If globals are empty, arrange to still pass in the argument but # don't put anything in it. if ctx.env.options.make_globals_empty: globs = () objctx = ctx.env.options.schema_object_context is_constraint_like = objctx in (s_constr.Constraint, s_indexes.Index) if globs and is_constraint_like: assert objctx typname = objctx.get_schema_class_displayname() # XXX: or should we pass in empty globals, in this situation? raise errors.SchemaDefinitionError( f'functions that reference global variables cannot be called ' f'from {typname}', span=span) null_expr = qlast.FunctionCall( func=('__std__', 'to_json'), args=[qlast.Constant.string(value="null")], ) with ctx.new() as subctx: subctx.anchors = subctx.anchors.copy() normal_els = [] full_objs: list[qlast.Expr] = [] json_type = qlast.TypeName(maintype=qlast.ObjectRef( module='__std__', name='json')) for glob in globs: param, present = get_global_param_sets( glob, is_implicit_global=True, ctx=ctx) # The name of the global isn't syntactically a valid identifier # for a namedtuple element but nobody can stop us! name = str(glob.get_name(ctx.env.schema)) main_param = subctx.create_anchor(param, 'a') tuple_el = qlast.TupleElement( name=qlast.Ptr(name=name), val=qlast.BinOp( op='??', left=qlast.TypeCast(expr=main_param, type=json_type), right=null_expr, ) ) if not present: # For normal globals, just stick the element in the tuple. normal_els.append(tuple_el) else: # For globals with a present arg, we conditionally # construct a one-element object if it is present # and an empty object if it is not. These are # be combined using ++. present_param = subctx.create_anchor(present, 'a') tup = qlast.TypeCast( expr=qlast.NamedTuple(elements=[tuple_el]), type=json_type, ) full_objs.append(qlast.IfElse( condition=present_param, if_expr=tup, else_expr=qlast.FunctionCall( func=('__std__', 'to_json'), args=[qlast.Constant.string(value="{}")], ) )) # If access policies are disabled, stick a value in the blob # to indicate that. We do this using a full object so it # works in constraints and the like, where the tuple->json cast # isn't supported yet. if ( not ctx.env.options.apply_user_access_policies or not ctx.env.options.apply_query_rewrites ) and not is_constraint_like: full_objs.append(qlast.FunctionCall( func=('__std__', 'to_json'), args=[qlast.Constant.string( value='{"__disable_access_policies": true}' )], )) full_expr: qlast.Expr if not normal_els and not full_objs: full_expr = null_expr else: simple_obj = None if normal_els or not full_objs: simple_obj = qlast.TypeCast( expr=qlast.NamedTuple(elements=normal_els), type=json_type, ) full_expr = astutils.extend_binop(simple_obj, *full_objs, op='++') return dispatch.compile(full_expr, ctx=subctx)
Build a json object that contains the values of `globs` The format of the object is simply {"<glob name 1>": <json>glob_val_1, ...}, where values that are unset or set to {} are represented as null, with one catch: for globals that need "present" arguments (that is, optional globals with default values), we need to distinguish between the global being unset and being set to {}. In that case, we represent being set to {} with null and being unset by omitting it from the object.
get_globals_as_json
python
geldata/gel
edb/edgeql/compiler/setgen.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/setgen.py
Apache-2.0
def _make_group_binding( stype: s_types.Type, alias: str, *, ctx: context.ContextLevel, ) -> irast.Set: """Make a binding for one of the "dummy" bindings used in group""" binding_type = schemactx.derive_view( stype, derived_name=s_name.QualName('__derived__', alias), preserve_shape=True, ctx=ctx) binding_set = setgen.class_set(binding_type, ctx=ctx) binding_set.is_visible_binding_ref = True name = s_name.UnqualName(alias) ctx.aliased_views[name] = binding_set ctx.view_sets[binding_type] = binding_set ctx.env.path_scope_map[binding_set] = context.ScopeInfo( path_scope=ctx.path_scope, binding_kind=irast.BindingKind.For, pinned_path_id_ns=ctx.path_id_namespace, ) return binding_set
Make a binding for one of the "dummy" bindings used in group
_make_group_binding
python
geldata/gel
edb/edgeql/compiler/stmt.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/stmt.py
Apache-2.0
def maybe_add_view(ir: irast.Set, *, ctx: context.ContextLevel) -> irast.Set: """Possibly wrap ir in a new view, if needed for tid/tname injection This should be called by every ast leaf compilation that can originate an object type. """ # We call compile_query_subject in order to create a new view for # injecting properties if needed. This will only happen if # expr_exposed, so stmt code paths that don't want a new view # created (because there is a shape already specified or because # it wants to create its own new view in its compile_query_subject call) # should make sure expr_exposed is false. # # The checks here are microoptimizations. if ( ctx.expr_exposed >= context.Exposure.BINDING and ir.path_id.is_objtype_path() ): return compile_query_subject( ir, allow_select_shape_inject=True, compile_views=False, ctx=ctx, span=ir.span) else: return ir
Possibly wrap ir in a new view, if needed for tid/tname injection This should be called by every ast leaf compilation that can originate an object type.
maybe_add_view
python
geldata/gel
edb/edgeql/compiler/stmt.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/stmt.py
Apache-2.0
def compile_ast_to_ir( tree: qlast.Base, schema: s_schema.Schema, *, script_info: Optional[irast.ScriptInfo] = None, options: Optional[CompilerOptions] = None, ) -> irast.Statement | irast.ConfigCommand: """Compile given EdgeQL AST into Gel IR. This is the normal compiler entry point. It assumes that *tree* represents a complete statement. Args: tree: EdgeQL AST. schema: Schema instance. Must contain definitions for objects referenced by the AST *tree*. options: An optional :class:`edgeql.compiler.options.CompilerOptions` instance specifying compilation options. allow_writing_protected_ptrs: If ``True``, allows protected object properties or links to be overwritten in `INSERT` shapes. Returns: An instance of :class:`ir.ast.Command`. Most frequently, this would be an instance of :class:`ir.ast.Statement`. """ if options is None: options = CompilerOptions() if debug.flags.edgeql_compile or debug.flags.edgeql_compile_edgeql_text: debug.header('EdgeQL Text') debug.dump_code(qlcodegen.generate_source(tree, pretty=True)) if debug.flags.edgeql_compile or debug.flags.edgeql_compile_edgeql_ast: debug.header('Compiler Options') debug.dump(options.__dict__) debug.header('EdgeQL AST') debug.dump(tree, schema=schema) ctx = stmtctx_mod.init_context(schema=schema, options=options) if isinstance(tree, qlast.Expr) and ctx.implicit_limit: tree = qlast.SelectQuery(result=tree, implicit=True) tree.limit = qlast.Constant.integer(ctx.implicit_limit) if not script_info: script_info = stmtctx_mod.preprocess_script([tree], ctx=ctx) ctx.env.script_params = script_info.params ir_set = dispatch_mod.compile(tree, ctx=ctx) ir_expr = stmtctx_mod.fini_expression(ir_set, ctx=ctx) if debug.flags.edgeql_compile or debug.flags.edgeql_compile_scope: debug.header('Scope Tree') print(ctx.path_scope.pdebugformat()) # Also build and dump a mapping from scope ids to # paths that appear directly at them. scopes: Dict[int, Set[irast.PathId]] = { k: set() for k in sorted(node.unique_id for node in ctx.path_scope.descendants if node.unique_id) } for ir_set in ctx.env.set_types: if ir_set.path_scope_id and ir_set.path_scope_id in scopes: scopes[ir_set.path_scope_id].add(ir_set.path_id) debug.dump(scopes) if debug.flags.edgeql_compile or debug.flags.edgeql_compile_ir: debug.header('Gel IR') debug.dump(ir_expr, schema=getattr(ir_expr, 'schema', None)) return ir_expr
Compile given EdgeQL AST into Gel IR. This is the normal compiler entry point. It assumes that *tree* represents a complete statement. Args: tree: EdgeQL AST. schema: Schema instance. Must contain definitions for objects referenced by the AST *tree*. options: An optional :class:`edgeql.compiler.options.CompilerOptions` instance specifying compilation options. allow_writing_protected_ptrs: If ``True``, allows protected object properties or links to be overwritten in `INSERT` shapes. Returns: An instance of :class:`ir.ast.Command`. Most frequently, this would be an instance of :class:`ir.ast.Statement`.
compile_ast_to_ir
python
geldata/gel
edb/edgeql/compiler/__init__.py
https://github.com/geldata/gel/blob/master/edb/edgeql/compiler/__init__.py
Apache-2.0