code
stringlengths 26
870k
| docstring
stringlengths 1
65.6k
| func_name
stringlengths 1
194
| language
stringclasses 1
value | repo
stringlengths 8
68
| path
stringlengths 5
194
| url
stringlengths 46
254
| license
stringclasses 4
values |
---|---|---|---|---|---|---|---|
def lookup_computed(
obj: Obj, name: str, ctx: EvalContext
) -> Optional[Tuple[qlast.Expr, str, Obj]]:
"""Lookup a schema-computed property
Return (code, source type name, source object).
"""
if not (typ := obj.get('__type__', ctx.db)):
return None
typ_computed = ctx.db.schema_computables.get(typ)
if name[0] != '@' and typ_computed and name in typ_computed:
return typ_computed[name], typ, obj
elif (
name[0] == '@'
and (src := obj.get('@source', ctx.db))
and (src_ptr := obj.get('@__source_link', ctx.db))
and (src_type := src.get('__type__', ctx.db))
and (src_computed := ctx.db.schema_computables.get(src_type))
and f'{src_ptr}{name}' in src_computed
):
return src_computed[f'{src_ptr}{name}'], src_type, src
else:
return None | Lookup a schema-computed property
Return (code, source type name, source object). | lookup_computed | python | geldata/gel | edb/tools/toy_eval_model.py | https://github.com/geldata/gel/blob/master/edb/tools/toy_eval_model.py | Apache-2.0 |
def gen_types():
"""Print available debug flags."""
for flag in debug.flags:
print(f'env EDGEDB_DEBUG_{flag.name.upper()}=1')
print(f' {flag.doc}\n') | Print available debug flags. | gen_types | python | geldata/gel | edb/tools/dflags.py | https://github.com/geldata/gel/blob/master/edb/tools/dflags.py | Apache-2.0 |
async def wipe_tenant(
cluster: pgcluster.BaseCluster,
pgconn: pgcon.PGConnection,
tenant: str,
dry_run: bool,
) -> None:
from edb.server import pgcon
tpl_db = get_database_backend_name(
edbdef.EDGEDB_TEMPLATE_DB,
tenant_id=tenant,
)
sup_role = get_role_backend_name(
edbdef.EDGEDB_SUPERUSER,
tenant_id=tenant,
)
try:
tpl_conn = await cluster.connect(
database=tpl_db,
source_description="wipe_tenant",
)
except pgcon.BackendCatalogNameError:
click.secho(
f'Instance tenant {tenant!r} does not have the '
f'{edbdef.EDGEDB_TEMPLATE_DB!r} database. Is it already clean?'
)
return
try:
databases, roles = await _get_dbs_and_roles(tpl_conn)
finally:
tpl_conn.terminate()
stmts = [
f'SET ROLE {qi(sup_role)}',
]
for db in databases:
pg_db = get_database_backend_name(db, tenant_id=tenant)
owner = await pgconn.sql_fetch_val(
b"""
SELECT
rolname
FROM
pg_database d
INNER JOIN pg_roles r
ON (d.datdba = r.oid)
WHERE
d.datname = $1
""",
args=[pg_db.encode("utf-8")],
)
if owner:
stmts.append(f'SET ROLE {qi(owner.decode("utf-8"))}')
if pg_db == tpl_db:
stmts.append(f'ALTER DATABASE {qi(pg_db)} IS_TEMPLATE = false')
stmts.append(f'DROP DATABASE {qi(pg_db)}')
stmts.append('RESET ROLE;')
for role in roles:
pg_role = get_role_backend_name(role, tenant_id=tenant)
members = json.loads(await pgconn.sql_fetch_val(
b"""
SELECT
json_agg(member::regrole::text)
FROM
pg_auth_members
WHERE
roleid = (SELECT oid FROM pg_roles WHERE rolname = $1)
""",
args=[pg_role.encode("utf-8")],
))
for member in members:
stmts.append(f'REVOKE {qi(pg_role)} FROM {qi(member)}')
stmts.append(f'DROP ROLE {qi(pg_role)}')
super_group = get_role_backend_name(
edbdef.EDGEDB_SUPERGROUP, tenant_id=tenant)
stmts.append(f'DROP ROLE {qi(super_group)}')
for stmt in stmts:
click.echo(stmt + (';' if not stmt.endswith(';') else ''))
if not dry_run:
await pgconn.sql_execute(stmt.encode("utf-8")) | ,
args=[pg_db.encode("utf-8")],
)
if owner:
stmts.append(f'SET ROLE {qi(owner.decode("utf-8"))}')
if pg_db == tpl_db:
stmts.append(f'ALTER DATABASE {qi(pg_db)} IS_TEMPLATE = false')
stmts.append(f'DROP DATABASE {qi(pg_db)}')
stmts.append('RESET ROLE;')
for role in roles:
pg_role = get_role_backend_name(role, tenant_id=tenant)
members = json.loads(await pgconn.sql_fetch_val(
b | wipe_tenant | python | geldata/gel | edb/tools/wipe.py | https://github.com/geldata/gel/blob/master/edb/tools/wipe.py | Apache-2.0 |
def rm_data_dir():
"""Remove the local development data directory if present"""
data_dir = devmode.get_dev_mode_data_dir()
if data_dir.exists():
shutil.rmtree(data_dir)
print("Removed the following local dev data directory.")
print(data_dir)
else:
print("The local dev data directory does not exist.") | Remove the local development data directory if present | rm_data_dir | python | geldata/gel | edb/tools/rm_data_dir.py | https://github.com/geldata/gel/blob/master/edb/tools/rm_data_dir.py | Apache-2.0 |
def gen_meta_grammars(names):
"""Generate keywords, builtins, operators, etc. which can be used
for EdgeQL and SDL grammars.
NAME - at the moment there's only one option 'edgeql'
"""
if names:
for name in names:
if name not in NAMES:
die(f'{name} is not a valid NAME')
if len(names) > 2:
die(f'too many NAMES')
try:
res = subprocess.run([
'edb',
'cli',
'query',
'-Fjson',
r"""
WITH
MODULE schema,
T := (SELECT Type
FILTER Type IS (PseudoType | ScalarType | ObjectType)),
t_names := (
SELECT re_match(r'(?:.*::)?(.+)', T.name)[0]
FILTER re_test(r"^(?:std|math|cal|fts|pg)::", T.name)
),
c_names := re_match(
r"(?:std|sys|math)::([a-zA-Z]\w+$)",
DISTINCT `Constraint`.name
)[0],
f_names := re_match(
r"(?:std|sys|math|cal|fts|pg)::([a-zA-Z]\w+$)",
DISTINCT `Function`.name
),
o_names := (
SELECT _ := DISTINCT Operator.name[5:]
FILTER not re_test(r"^[a-zA-Z ]+$", _)
ORDER BY _
),
i_names := re_match(
r"(?:std|sys|math|cal|fts|pg)::([a-zA-Z]\w+$)",
DISTINCT `Index`.name
)[0],
SELECT {
t_names := t_names,
c_names := c_names,
f_names := f_names[0] if len(f_names) = 1 else '',
o_names := o_names,
i_names := i_names,
}
""",
], capture_output=True)
if res.returncode != 0:
die('Could not connect to the dev Gel instance')
main(names, (json.loads(res.stdout))[0])
except Exception as ex:
die(str(ex)) | Generate keywords, builtins, operators, etc. which can be used
for EdgeQL and SDL grammars.
NAME - at the moment there's only one option 'edgeql' | gen_meta_grammars | python | geldata/gel | edb/tools/gen_meta_grammars.py | https://github.com/geldata/gel/blob/master/edb/tools/gen_meta_grammars.py | Apache-2.0 |
def gen_types(*, stdout):
"""Generate edb/schema/_types.py from edb/api/types.txt"""
try:
main(stdout=stdout)
except Exception as ex:
die(str(ex)) | Generate edb/schema/_types.py from edb/api/types.txt | gen_types | python | geldata/gel | edb/tools/gen_types.py | https://github.com/geldata/gel/blob/master/edb/tools/gen_types.py | Apache-2.0 |
def gen_cast_table():
"""Generate a table of scalar casts to be used in the documentation.
NAME - at the moment there's only one option 'edgeql'
"""
try:
res = subprocess.run([
'edb',
'cli',
'query',
'-Fjson',
r"""
WITH MODULE schema
SELECT Cast {
source := .from_type.name,
target := .to_type.name,
allow_assignment,
allow_implicit,
}
FILTER all({.from_type, .to_type} IS ScalarType | ObjectType)
""",
], capture_output=True)
if res.returncode != 0:
die('Could not connect to the dev Gel instance')
main(json.loads(res.stdout))
except Exception as ex:
die(str(ex)) | Generate a table of scalar casts to be used in the documentation.
NAME - at the moment there's only one option 'edgeql' | gen_cast_table | python | geldata/gel | edb/tools/gen_cast_table.py | https://github.com/geldata/gel/blob/master/edb/tools/gen_cast_table.py | Apache-2.0 |
def test_extension(
script_path: pathlib.Path,
localdev: bool
) -> None:
'''Installs an extension package into a dev environment and creates it.
Removes the extension and package first if it already exists.'''
with open(script_path) as f:
script = f.read()
statements = edgeql.parse_block(script)
if not statements or not isinstance(
statements[0], qlast.CreateExtensionPackage
):
print("Script does not begin with CREATE EXTENSION PACKAGE")
sys.exit(1)
extension_name = statements[0].name.name
conn_params: dict[str, typing.Any] = {}
if localdev:
conn_params = dict(
dsn='_localdev',
tls_security='insecure',
)
db = edgedb.create_client(**conn_params)
db.execute(f'''
configure current database set __internal_testmode := true;
''')
# Delete the extension and the package if it already exists
ext = db.query('''
select schema::Extension filter .name = <str>$0;
''', extension_name)
if ext:
print(f"Dropping existing extension {extension_name}")
db.execute(f'''
drop extension {extension_name};
''')
ext_package = db.query('''
select sys::ExtensionPackage {version} filter .name = <str>$0;
''', extension_name)
if ext_package:
v = ext_package[0].version
version = f'{v.major}.{v.minor}.{v.stage_no}'
print(
f"Dropping existing extension package {extension_name} "
f"version {version}"
)
db.execute(f'''
drop extension package {extension_name} VERSION '{version}';
''')
# Run the script; should create the package
print(f"Creating extension package {extension_name}")
db.execute(script)
# Create the extension
print(f"Creating extension {extension_name}")
db.execute(f'''
create extension {extension_name};
''') | Installs an extension package into a dev environment and creates it.
Removes the extension and package first if it already exists. | test_extension | python | geldata/gel | edb/tools/test_extension.py | https://github.com/geldata/gel/blob/master/edb/tools/test_extension.py | Apache-2.0 |
def cli(args: tuple[str, ...]):
"""Run edgedb CLI with `-H localhost`."""
args_list = _ensure_linked(args)
if (
'--wait-until-available' not in args_list and
not any('--wait-until-available=' in a for a in args_list)
):
args_list += ['--wait-until-available', '60s']
sys.exit(rustcli.rustcli(args=[sys.argv[0], *args_list])) | Run edgedb CLI with `-H localhost`. | cli | python | geldata/gel | edb/tools/cli.py | https://github.com/geldata/gel/blob/master/edb/tools/cli.py | Apache-2.0 |
def ui(args: tuple[str, ...]):
"""Run edgedb GUI with `-H localhost`."""
_ensure_linked(args)
subprocess.check_call(
[
sys.executable,
"-m",
"edb.cli",
"ui",
"--instance=_localdev",
],
) | Run edgedb GUI with `-H localhost`. | ui | python | geldata/gel | edb/tools/cli.py | https://github.com/geldata/gel/blob/master/edb/tools/cli.py | Apache-2.0 |
def run():
"""
Generates DDL to recreate metaschema for sql introspection.
Can be used to apply changes to metaschema to an existing database.
edb redo-metaschema-sql | ./build/postgres/install/bin/psql \
"postgresql://postgres@/E_main?host=$(pwd)/tmp/devdatadir&port=5432" \
-v ON_ERROR_STOP=ON
"""
from edb.common import devmode
devmode.enable_dev_mode()
from edb.pgsql import dbops, metaschema
from edb import buildmeta
version = buildmeta.get_pg_version()
commands = metaschema._generate_sql_information_schema(version)
for command in commands:
block = dbops.PLTopBlock()
if isinstance(command, dbops.CreateFunction):
command.or_replace = True
if isinstance(command, dbops.CreateView):
command.or_replace = True
command.generate(block)
print(block.to_string()) | Generates DDL to recreate metaschema for sql introspection.
Can be used to apply changes to metaschema to an existing database.
edb redo-metaschema-sql | ./build/postgres/install/bin/psql \
"postgresql://postgres@/E_main?host=$(pwd)/tmp/devdatadir&port=5432" \
-v ON_ERROR_STOP=ON | run | python | geldata/gel | edb/tools/redo_metaschema.py | https://github.com/geldata/gel/blob/master/edb/tools/redo_metaschema.py | Apache-2.0 |
def gen_errors(*, base_class, message_base_class, base_import,
stdout, extra_all, client):
"""Generate edb/errors.py from edb/api/errors.txt"""
try:
main(base_class=base_class,
message_base_class=message_base_class,
base_import=base_import,
stdout=stdout,
extra_all=extra_all,
client=client,
language='python')
except Exception as ex:
die(str(ex)) | Generate edb/errors.py from edb/api/errors.txt | gen_errors | python | geldata/gel | edb/tools/gen_errors.py | https://github.com/geldata/gel/blob/master/edb/tools/gen_errors.py | Apache-2.0 |
def gen_errors_json(*, client):
"""Generate JSON from edb/api/errors.txt"""
for p in edb.__path__:
ep = pathlib.Path(p) / 'api' / 'errors.txt'
if ep.exists():
break
else:
die('Unable to find the "edb/api/errors.txt" file')
try:
tree = ErrorsTree()
tree.load(ep)
clss = tree.generate_classes(
message_base_class=None, base_class=None, client=client)
print(json.dumps(clss))
except Exception as ex:
die(str(ex)) | Generate JSON from edb/api/errors.txt | gen_errors_json | python | geldata/gel | edb/tools/gen_errors.py | https://github.com/geldata/gel/blob/master/edb/tools/gen_errors.py | Apache-2.0 |
def dispatch(cls):
"""generic_func.dispatch(cls) -> <function implementation>
Runs the dispatch algorithm to return the best available implementation
for the given *cls* registered on *generic_func*.
"""
nonlocal cache_token
if cache_token is not None:
current_token = get_cache_token()
if cache_token != current_token:
dispatch_cache.clear()
cache_token = current_token
try:
impl = dispatch_cache[cls]
except KeyError:
try:
impl = registry[cls]
except KeyError:
impl = _find_impl(cls, registry)
dispatch_cache[cls] = impl
return impl | generic_func.dispatch(cls) -> <function implementation>
Runs the dispatch algorithm to return the best available implementation
for the given *cls* registered on *generic_func*. | tracing_singledispatch.dispatch | python | geldata/gel | edb/tools/profiling/tracing_singledispatch.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/tracing_singledispatch.py | Apache-2.0 |
def register(cls, func=None):
"""generic_func.register(cls, func) -> func
Registers a new implementation for the given *cls* on a *generic_func*.
"""
nonlocal cache_token
if func is None:
if isinstance(cls, type):
return lambda f: register(cls, f)
ann = getattr(cls, '__annotations__', {})
if not ann:
raise TypeError(
f"Invalid first argument to `register()`: {cls!r}. "
f"Use either `@register(some_class)` or plain `@register` "
f"on an annotated function."
)
func = cls
# only import typing if annotation parsing is necessary
from typing import get_type_hints
argname, cls = next(iter(get_type_hints(func).items()))
if not isinstance(cls, type):
raise TypeError(
f"Invalid annotation for {argname!r}. "
f"{cls!r} is not a class."
)
registry[cls] = func
if cache_token is None and hasattr(cls, '__abstractmethods__'):
cache_token = get_cache_token()
dispatch_cache.clear()
return func | generic_func.register(cls, func) -> func
Registers a new implementation for the given *cls* on a *generic_func*. | tracing_singledispatch.register | python | geldata/gel | edb/tools/profiling/tracing_singledispatch.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/tracing_singledispatch.py | Apache-2.0 |
def tracing_singledispatch(func: T) -> T:
"""Single-dispatch generic function decorator.
Transforms a function into a generic function, which can have different
behaviours depending upon the type of its first argument. The decorated
function acts as the default implementation, and additional
implementations can be registered using the register() attribute of the
generic function.
"""
# There are many programs that use functools without singledispatch, so we
# trade-off making singledispatch marginally slower for the benefit of
# making start-up of such applications slightly faster.
import types
import weakref
registry = {}
dispatch_cache = weakref.WeakKeyDictionary()
cache_token = None
def dispatch(cls):
"""generic_func.dispatch(cls) -> <function implementation>
Runs the dispatch algorithm to return the best available implementation
for the given *cls* registered on *generic_func*.
"""
nonlocal cache_token
if cache_token is not None:
current_token = get_cache_token()
if cache_token != current_token:
dispatch_cache.clear()
cache_token = current_token
try:
impl = dispatch_cache[cls]
except KeyError:
try:
impl = registry[cls]
except KeyError:
impl = _find_impl(cls, registry)
dispatch_cache[cls] = impl
return impl
def register(cls, func=None):
"""generic_func.register(cls, func) -> func
Registers a new implementation for the given *cls* on a *generic_func*.
"""
nonlocal cache_token
if func is None:
if isinstance(cls, type):
return lambda f: register(cls, f)
ann = getattr(cls, '__annotations__', {})
if not ann:
raise TypeError(
f"Invalid first argument to `register()`: {cls!r}. "
f"Use either `@register(some_class)` or plain `@register` "
f"on an annotated function."
)
func = cls
# only import typing if annotation parsing is necessary
from typing import get_type_hints
argname, cls = next(iter(get_type_hints(func).items()))
if not isinstance(cls, type):
raise TypeError(
f"Invalid annotation for {argname!r}. "
f"{cls!r} is not a class."
)
registry[cls] = func
if cache_token is None and hasattr(cls, '__abstractmethods__'):
cache_token = get_cache_token()
dispatch_cache.clear()
return func
def sd_wrapper(*args, **kw):
if not args:
raise TypeError(f'{funcname} requires at least '
'1 positional argument')
impl = dispatch(args[0].__class__)
if profiling_in_progress.is_set():
caller = sys._getframe().f_back.f_code
caller_id = (
caller.co_filename,
caller.co_firstlineno,
caller.co_name,
)
impl_id = (
impl.__code__.co_filename,
impl.__code__.co_firstlineno,
impl.__code__.co_name,
)
our_dispatches = done_dispatches.setdefault(func_id, {})
caller_dispatches = our_dispatches.setdefault(caller_id, {})
caller_dispatches[impl_id] = caller_dispatches.get(impl_id, 0) + 1
return impl(*args, **kw)
funcname = getattr(func, '__name__', 'singledispatch function')
registry[object] = func
_fcode = func.__code__
func_id = (_fcode.co_filename, _fcode.co_firstlineno, _fcode.co_name)
wrapper = sd_wrapper
wrapper.register = register
wrapper.dispatch = dispatch
wrapper.registry = types.MappingProxyType(registry)
wrapper._clear_cache = dispatch_cache.clear
update_wrapper(wrapper, func)
return wrapper | Single-dispatch generic function decorator.
Transforms a function into a generic function, which can have different
behaviours depending upon the type of its first argument. The decorated
function acts as the default implementation, and additional
implementations can be registered using the register() attribute of the
generic function. | tracing_singledispatch | python | geldata/gel | edb/tools/profiling/tracing_singledispatch.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/tracing_singledispatch.py | Apache-2.0 |
def __init__(
self,
*,
prefix: str = PREFIX,
suffix: str = PROF_SUFFIX,
dir: Optional[str] = None,
save_every_n_calls: int = 1,
):
"""Create the decorator.
If `save_every_n_calls` is greater than 1, the profiler will not
dump data to files on every call to the profiled function. This speeds
up the running program but risks incomplete data if the process is
terminated non-gracefully.
`dir`, `prefix`, and `suffix` after `tempfile.mkstemp`.
"""
self.prefix = prefix
self.suffix = suffix
self.save_every_n_calls = save_every_n_calls
self.n_calls = 0
self._dir: Union[str, pathlib.Path, None] = dir
self._profiler: Optional[cProfile.Profile] = None
self._dump_file_path: Optional[str] = None
self._profiler_enabled = False | Create the decorator.
If `save_every_n_calls` is greater than 1, the profiler will not
dump data to files on every call to the profiled function. This speeds
up the running program but risks incomplete data if the process is
terminated non-gracefully.
`dir`, `prefix`, and `suffix` after `tempfile.mkstemp`. | __init__ | python | geldata/gel | edb/tools/profiling/profiler.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/profiler.py | Apache-2.0 |
def __call__(self, func: T) -> T:
"""Apply decorator to a function."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
tracing_singledispatch.profiling_in_progress.set()
self.n_calls += 1
profiler_was_enabled_here = False
if not self._profiler_enabled:
self.profiler.enable()
self._profiler_enabled = True
profiler_was_enabled_here = True
try:
return func(*args, **kwargs)
finally:
if profiler_was_enabled_here:
self.profiler.disable()
if self.n_calls % self.save_every_n_calls == 0:
self.dump_stats()
tracing_singledispatch.profiling_in_progress.clear()
return cast(T, wrapper) | Apply decorator to a function. | __call__ | python | geldata/gel | edb/tools/profiling/profiler.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/profiler.py | Apache-2.0 |
def dump_file(self) -> str:
"""Return a path to a new, empty, existing named temporary file."""
if self._dump_file_path is None:
file = tempfile.NamedTemporaryFile(
dir=self.dir,
prefix=self.prefix,
suffix=self.suffix,
delete=False,
)
file.close()
self._dump_file_path = file.name
return self._dump_file_path | Return a path to a new, empty, existing named temporary file. | dump_file | python | geldata/gel | edb/tools/profiling/profiler.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/profiler.py | Apache-2.0 |
def aggregate(
self,
out_path: pathlib.Path,
*,
sort_by: str = "",
width: int = 1920,
threshold: float = 0.0001, # 1.0 is 100%
quiet: bool = False,
) -> Tuple[int, int]:
"""Read all pstats in `self.dir` and write a summary to `out_path`.
`sort_by` after `pstats.sort_stats()`. Files identified by `self.dir`,
`self.prefix`, and `self.suffix`.
`width` selects the width of the generated SVG.
Functions whose runtime is below `threshold` percentage are not
included.
Returns a tuple with number of successfully and unsucessfully
aggregated files.
"""
print = builtins.print
if quiet:
print = lambda *args, **kwargs: None
if out_path.is_dir():
out_path = out_path / "profile_analysis"
prof_path = out_path.with_suffix(PROF_SUFFIX)
pstats_path = out_path.with_suffix(STAT_SUFFIX)
call_svg_path = out_path.with_suffix(".call_stack" + SVG_SUFFIX)
usage_svg_path = out_path.with_suffix(".usage" + SVG_SUFFIX)
files = list(
str(f) for f in self.dir.glob(self.prefix + "*" + self.suffix)
)
if not files:
print(f"warning: no files to process", file=sys.stderr)
return 0, 0
success = 0
failure = 0
with open(pstats_path, "w") as out:
ps = pstats.Stats(stream=out)
for file in files:
try:
ps.add(file)
except TypeError as te:
# Probably the profile file is empty.
print(te, file=sys.stderr)
failure += 1
else:
success += 1
ps.dump_stats(str(prof_path))
if sort_by:
ps.sort_stats(sort_by)
ps.print_stats()
singledispatch_traces = self.accumulate_singledispatch_traces()
if singledispatch_traces:
singledispatch_path = out_path.with_suffix(SINGLEDISPATCH_SUFFIX)
with singledispatch_path.open("wb") as sd_file:
pickle.dump(
singledispatch_traces, sd_file, pickle.HIGHEST_PROTOCOL
)
# Mypy is wrong below, `stats` is there on all pstats.Stats objects
stats = ps.stats # type: ignore
filter_singledispatch_in_place(stats, singledispatch_traces)
try:
render_svg(
stats,
call_svg_path,
usage_svg_path,
width=width,
threshold=threshold,
)
except ValueError as ve:
print(f"Cannot display flame graph: {ve}", file=sys.stderr)
print(
f"Processed {success + failure} files, {failure} failed.",
file=sys.stderr,
)
return success, failure | Read all pstats in `self.dir` and write a summary to `out_path`.
`sort_by` after `pstats.sort_stats()`. Files identified by `self.dir`,
`self.prefix`, and `self.suffix`.
`width` selects the width of the generated SVG.
Functions whose runtime is below `threshold` percentage are not
included.
Returns a tuple with number of successfully and unsucessfully
aggregated files. | aggregate | python | geldata/gel | edb/tools/profiling/profiler.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/profiler.py | Apache-2.0 |
def profile_memory(func: Callable[[], Any]) -> MemoryFrame:
"""Profile memory and return a tree of statistics.
Feed those to `render_memory_svg()` to write an SVG.
"""
import tracemalloc
tracemalloc.start(1024)
try:
func()
finally:
snap = tracemalloc.take_snapshot()
tracemalloc.stop()
stats = snap.statistics("traceback")
root = MemoryFrame(blocks=0, size=0)
for stat in stats:
blocks = stat.count
size = stat.size
callee = root
callee.blocks += blocks
callee.size += size
for frame in stat.traceback:
lineid = (frame.filename, frame.lineno)
callee = callee.callers.setdefault(
lineid, MemoryFrame(blocks=0, size=0)
)
callee.blocks += blocks
callee.size += size
while len(root.callers) == 1:
root = next(iter(root.callers.values()))
return root | Profile memory and return a tree of statistics.
Feed those to `render_memory_svg()` to write an SVG. | profile_memory | python | geldata/gel | edb/tools/profiling/profiler.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/profiler.py | Apache-2.0 |
def gen_colors(s: RGB, e: RGB, size: int) -> Iterator[RGB]:
"""Generate a gradient of `size` colors between `s` and `e`."""
for i in range(size):
yield RGB(
s.r + (e.r - s.r) * i // size,
s.g + (e.g - s.g) * i // size,
s.b + (e.b - s.b) * i // size,
) | Generate a gradient of `size` colors between `s` and `e`. | gen_colors | python | geldata/gel | edb/tools/profiling/profiler.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/profiler.py | Apache-2.0 |
def calc_callers(
stats: Stats,
threshold: float,
) -> Tuple[Dict[FunctionID, Function], Dict[Call, Stat]]:
"""Calculate flattened stats of calls between functions."""
roots: List[FunctionID] = []
funcs: Dict[FunctionID, Function] = {}
calls: Dict[Call, Stat] = {}
for func, (cc, nc, tt, ct, callers) in stats.items():
funcs[func] = Function(
id=func, calls=[], calledby=[], stat=(cc, nc, tt, ct)
)
if not callers:
roots.append(func)
calls[ROOT_ID, func] = funcs[func].stat
for func, (_, _, _, _, callers) in stats.items():
for caller, t in callers.items():
assert (caller, func) not in calls
funcs[caller].calls.append(func)
funcs[func].calledby.append(caller)
calls[caller, func] = t
total = sum(funcs[r].stat[3] for r in roots)
ttotal = sum(funcs[r].stat[2] for r in funcs)
# Try to find suitable root
newroot = max(
(r for r in funcs if r not in roots), key=lambda r: funcs[r].stat[3]
)
nstat = funcs[newroot].stat
ntotal = total + nstat[3]
if 0.8 < ntotal / ttotal < 1.2:
roots.append(newroot)
calls[ROOT_ID, newroot] = nstat
total = ntotal
else:
total = ttotal
funcs[ROOT_ID] = Function(
id=ROOT_ID, calls=roots, calledby=[], stat=(1, 1, 0, total),
)
return funcs, calls | Calculate flattened stats of calls between functions. | calc_callers | python | geldata/gel | edb/tools/profiling/profiler.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/profiler.py | Apache-2.0 |
def find_singledispatch_wrapper(
stats: Stats, *, regular_location: bool = False
) -> FunctionID:
"""Returns the singledispatch wrapper function ID tuple.
Raises LookupError if not found.
"""
if regular_location:
functools_path = re.compile(r"python3.\d+/functools.py$")
dispatch_name = "dispatch"
wrapper_name = "wrapper"
else:
functools_path = re.compile(r"profiling/tracing_singledispatch.py$")
dispatch_name = "dispatch"
wrapper_name = "sd_wrapper"
for (modpath, _lineno, funcname), (_, _, _, _, callers) in stats.items():
if funcname != dispatch_name:
continue
m = functools_path.search(modpath)
if not m:
continue
# Using this opportunity, we're figuring out which `wrapper` from
# functools in the trace is the singledispatch `wrapper` (there
# are three more others in functools.py).
for caller_modpath, caller_lineno, caller_funcname in callers:
if caller_funcname == wrapper_name:
m = functools_path.search(modpath)
if not m:
continue
return (caller_modpath, caller_lineno, caller_funcname)
raise LookupError("singledispatch.dispatch without wrapper?")
raise LookupError("No singledispatch use in provided stats") | Returns the singledispatch wrapper function ID tuple.
Raises LookupError if not found. | find_singledispatch_wrapper | python | geldata/gel | edb/tools/profiling/profiler.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/profiler.py | Apache-2.0 |
def filter_singledispatch_in_place(
stats: Stats,
dispatches: Dict[FunctionID, CallCounts],
regular_location: bool = False,
) -> None:
"""Removes singledispatch `wrapper` from the `stats.`
Given that:
- W is a wrapper function hiding original function O;
- D is the internal dispatching function of singledispatch;
- W calls D first to select which function to call;
- then, W calls the concrete registered implementations F1, F2, F3, and
rather rarely, O.
This filter changes this ( -> means "calls"):
A -> W -> F1
A -> W -> D
into this:
A -> F1
A -> D
"""
try:
wrapper = find_singledispatch_wrapper(
stats, regular_location=regular_location
)
except LookupError:
return
# Delete the function from stats
del stats[wrapper]
# Fix up all "callers" stats
singledispatch_functions = {d: (0, 0, 0, 0) for d in dispatches}
for funcid, (_, _, _, _, callers) in stats.items():
if wrapper not in callers:
continue
new_direct_calls = {}
for call_counts in dispatches.values():
for caller, calls in call_counts.items():
if funcid not in calls:
continue
new_direct_calls[caller] = calls[funcid]
pcc, cc, tottime, cumtime = callers.pop(wrapper)
all_calls = sum(new_direct_calls.values())
if all_calls == 0:
count = len(singledispatch_functions)
for sdfid, old_stats in singledispatch_functions.items():
cur_stats = (
round(pcc / count),
round(cc / count),
tottime / count,
cumtime / count,
)
callers[sdfid] = cur_stats
new_stats = tuple(
old_stats[i] + cur_stats[i] for i in range(4)
)
singledispatch_functions[sdfid] = new_stats # type: ignore
continue
factor = all_calls / cc
pcc_fl = pcc * factor
cc_fl = cc * factor
tottime *= factor
cumtime *= factor
for caller, count in new_direct_calls.items():
factor = count / cc_fl
callers[caller] = (
round(pcc_fl * factor),
count,
tottime * factor,
cumtime * factor,
)
# Insert original single dispatch generic functions back
for sdfid, sd_stats in singledispatch_functions.items():
o_pcc, o_cc, o_tottime, o_cumtime, callers = stats.get(
sdfid, (0, 0, 0, 0, {})
)
stats[sdfid] = (
sd_stats[0] + o_pcc,
sd_stats[1] + o_cc,
sd_stats[2] + o_tottime,
sd_stats[3] + o_cumtime,
callers,
) | Removes singledispatch `wrapper` from the `stats.`
Given that:
- W is a wrapper function hiding original function O;
- D is the internal dispatching function of singledispatch;
- W calls D first to select which function to call;
- then, W calls the concrete registered implementations F1, F2, F3, and
rather rarely, O.
This filter changes this ( -> means "calls"):
A -> W -> F1
A -> W -> D
into this:
A -> F1
A -> D | filter_singledispatch_in_place | python | geldata/gel | edb/tools/profiling/profiler.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/profiler.py | Apache-2.0 |
def render_svg(
stats: Stats,
call_out: Union[pathlib.Path, str],
usage_out: Union[pathlib.Path, str],
*,
threshold: float = 0.00001, # 1.0 is 100%
width: int = 1920, # in pixels
block_height: int = 24, # in pixels
font_size: int = 12,
raw: bool = False,
) -> None:
"""Render an SVG file to `call_out` and `usage_out`.
Raises ValueError if rendering cannot be done with the given `stats`.
Functions whose runtime is below `threshold` percentage are not included.
Unless `raw` is True, functions are filtered to exclude common wrappers
that make the resulting SVG too busy but are themselves harmless.
"""
funcs, calls = calc_callers(stats, threshold)
call_blocks, usage_blocks, maxw = build_svg_blocks(
funcs, calls, threshold=threshold
)
with PROFILING_JS.open() as js_file:
javascript = js_file.read()
if call_blocks:
call_svg = render_svg_section(
call_blocks,
maxw,
[COLORS, CCOLORS],
block_height=block_height,
font_size=font_size,
width=width,
javascript=javascript,
)
with open(call_out, "w") as outf:
outf.write(call_svg)
if usage_blocks:
usage_svg = render_svg_section(
usage_blocks,
maxw,
[COLORS, ECOLORS, DCOLORS],
block_height=block_height,
font_size=font_size,
width=width,
javascript=javascript,
)
with open(usage_out, "w") as outf:
outf.write(usage_svg) | Render an SVG file to `call_out` and `usage_out`.
Raises ValueError if rendering cannot be done with the given `stats`.
Functions whose runtime is below `threshold` percentage are not included.
Unless `raw` is True, functions are filtered to exclude common wrappers
that make the resulting SVG too busy but are themselves harmless. | render_svg | python | geldata/gel | edb/tools/profiling/profiler.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/profiler.py | Apache-2.0 |
def perfviz(
dirs: List[str],
prefix: str,
suffix: str,
sort_by: str,
out: Union[pathlib.Path, str],
width: int,
threshold: float,
) -> None:
"""Aggregate raw profiling traces into textual and graphical formats.
Generates aggregate .prof and .singledispatch files, an aggregate textual
.pstats file, as well as two SVG flame graphs.
For more comprehensive documentation read edb/tools/profiling/README.md.
"""
if len(dirs) > 1:
raise click.UsageError("Specify at most one directory")
dir: Optional[str] = dirs[0] if dirs else None
prof = profiler.profile(
dir=dir, prefix=prefix, suffix=suffix, save_every_n_calls=1
)
prof.aggregate(
pathlib.Path(out), sort_by=sort_by, width=width, threshold=threshold
) | Aggregate raw profiling traces into textual and graphical formats.
Generates aggregate .prof and .singledispatch files, an aggregate textual
.pstats file, as well as two SVG flame graphs.
For more comprehensive documentation read edb/tools/profiling/README.md. | perfviz | python | geldata/gel | edb/tools/profiling/cli.py | https://github.com/geldata/gel/blob/master/edb/tools/profiling/cli.py | Apache-2.0 |
def _collect_fields(self) -> List[Field]:
"""Collect all fields declared in a class and its ancestors."""
cls = self._ctx.cls
fields: List[Field] = []
known_fields: Set[str] = set()
for stmt in cls.defs.body:
if not isinstance(stmt, nodes.AssignmentStmt):
continue
lhs = stmt.lvalues[0]
if not isinstance(lhs, nodes.NameExpr):
continue
sym = cls.info.names.get(lhs.name)
if sym is None or isinstance(sym.node, nodes.PlaceholderNode):
# Not resolved yet?
continue
node = sym.node
assert isinstance(node, nodes.Var)
if node.is_classvar:
# Disregard ClassVar stuff
continue
field = self._field_from_field_def(stmt, lhs, sym)
if field is not None:
fields.append(field)
known_fields.add(field.name)
return self._get_inherited_fields(known_fields) + fields | Collect all fields declared in a class and its ancestors. | _collect_fields | python | geldata/gel | edb/tools/mypy/plugin.py | https://github.com/geldata/gel/blob/master/edb/tools/mypy/plugin.py | Apache-2.0 |
def _exc_info_to_string(
result: runner.ParallelTextTestResult,
err: typing.Any,
test: unittest.TestCase,
):
"""Converts a sys.exc_info()-style tuple of values into a string."""
from edb.common import traceback as edb_traceback
# Copied from unittest.TestResult._exc_info_to_string
exctype, value, tb = err
tb = result._clean_tracebacks(exctype, value, tb, test) # type: ignore
tb_e = traceback.TracebackException(
exctype, value, tb, capture_locals=result.tb_locals, compact=True
)
tb_e.stack = edb_traceback.StandardStackSummary(tb_e.stack)
msgLines = list(tb_e.format())
if result.buffer:
output = sys.stdout.getvalue() # type: ignore
error = sys.stderr.getvalue() # type: ignore
if output:
if not output.endswith('\n'):
output += '\n'
msgLines.append(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
msgLines.append(STDERR_LINE % error)
return ''.join(msgLines) | Converts a sys.exc_info()-style tuple of values into a string. | _exc_info_to_string | python | geldata/gel | edb/tools/test/results.py | https://github.com/geldata/gel/blob/master/edb/tools/test/results.py | Apache-2.0 |
def test(
*,
files: typing.Sequence[str],
jobs: int,
shard: str,
include: typing.Sequence[str],
exclude: typing.Sequence[str],
verbose: bool,
quiet: bool,
debug: bool,
output_format: runner.OutputFormat,
warnings: bool,
failfast: bool,
shuffle: bool,
cov: typing.Sequence[str],
repeat: int,
running_times_log_file: typing.Optional[typing.TextIO],
list_tests: bool,
backend_dsn: typing.Optional[str],
use_db_cache: bool,
data_dir: typing.Optional[str],
use_data_dir_dbs: bool,
result_log: str,
include_unsuccessful: bool,
):
"""Run Gel test suite.
Discovers and runs tests in the specified files or directories.
If no files or directories are specified, current directory is assumed.
"""
if quiet:
if verbose:
click.secho(
'Warning: both --quiet and --verbose are '
'specified, assuming --quiet.', fg='yellow')
verbosity = 0
elif verbose:
verbosity = 2
else:
verbosity = 1
if jobs == 0:
jobs = psutil.cpu_count(logical=False)
mproc_fixes.patch_multiprocessing(debug=debug)
if verbosity > 1 and output_format is runner.OutputFormat.stacked:
click.secho(
'Error: cannot use stacked output format in verbose mode.',
fg='red')
sys.exit(1)
if repeat < 1:
click.secho(
'Error: --repeat must be a positive non-zero number.', fg='red')
sys.exit(1)
if not files:
cwd = os.path.abspath(os.getcwd())
if os.path.exists(os.path.join(cwd, 'tests')):
files = ('tests',)
else:
click.secho(
'Error: no test path specified and no "tests" directory found',
fg='red')
sys.exit(1)
for file in files:
if not os.path.exists(file):
click.secho(
f'Error: test path {file!r} does not exist', fg='red')
sys.exit(1)
try:
selected_shard, total_shards = map(int, shard.split('/'))
except Exception:
click.secho(f'Error: --shard {shard} must match format e.g. 2/5')
sys.exit(1)
if selected_shard < 1 or selected_shard > total_shards:
click.secho(f'Error: --shard {shard} is out of bound')
sys.exit(1)
run = lambda: _run(
include=include,
exclude=exclude,
verbosity=verbosity,
files=files,
jobs=jobs,
output_format=output_format,
warnings=warnings,
failfast=failfast,
shuffle=shuffle,
repeat=repeat,
selected_shard=selected_shard,
total_shards=total_shards,
running_times_log_file=running_times_log_file,
list_tests=list_tests,
backend_dsn=backend_dsn,
try_cached_db=use_db_cache,
data_dir=data_dir,
use_data_dir_dbs=use_data_dir_dbs,
result_log=result_log,
include_unsuccessful=include_unsuccessful,
)
if cov:
for pkg in cov:
if '\\' in pkg or '/' in pkg or pkg.endswith('.py'):
click.secho(
f'Error: --cov argument {pkg!r} looks like a path, '
f'expected a Python package name', fg='red')
sys.exit(1)
with _coverage_wrapper(cov):
result = run()
else:
result = run()
sys.exit(result) | Run Gel test suite.
Discovers and runs tests in the specified files or directories.
If no files or directories are specified, current directory is assumed. | test | python | geldata/gel | edb/tools/test/__init__.py | https://github.com/geldata/gel/blob/master/edb/tools/test/__init__.py | Apache-2.0 |
def elab_Shape(elements: Sequence[qlast.ShapeElement]) -> ShapeExpr:
"""Convert a concrete syntax shape to object expressions"""
result: Dict[e.Label, e.BindingExpr] = {}
for se in elements:
if path_contains_splat(se.expr):
i_logging.print_warning("Splat is not implemented")
continue
match elab_ShapeElement(se):
case (name, elem):
if name not in result.keys():
result = {**result, name: elem}
else:
(elab_error("Duplicate Value in Shapes", se.span))
return ShapeExpr(result) | Convert a concrete syntax shape to object expressions | elab_Shape | python | geldata/gel | edb/tools/experimental_interpreter/elaboration.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/elaboration.py | Apache-2.0 |
def elab_label(p: qlast.Path) -> Label:
"""Elaborates a single name e.g. in the left hand side of a shape"""
steps = [*p.steps]
while steps[0] is not None and isinstance(
steps[0], qlast.TypeIntersection
):
steps = steps[1:]
match steps[0]:
case qlast.Ptr(
name=pname, direction=s_pointers.PointerDirection.Outbound
):
return StrLabel(pname)
case qlast.Ptr(name=pname, type='property'):
return LinkPropLabel(pname)
case _:
return elab_not_implemented(p, "label") | Elaborates a single name e.g. in the left hand side of a shape | elab_label | python | geldata/gel | edb/tools/experimental_interpreter/elaboration.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/elaboration.py | Apache-2.0 |
def elab_single_type_expr(typedef: qlast.TypeExpr) -> Tp:
"""elaborates the target type of a
concrete unknown pointer, i.e. links or properties"""
if isinstance(typedef, qlast.TypeName):
return elab_TypeName(typedef)
else:
match typedef:
case qlast.TypeOp(left=left_type, op=op_name, right=right_type):
if op_name == "|":
return UnionTp(
left=elab_single_type_expr(left_type),
right=elab_single_type_expr(right_type),
)
else:
raise ValueError("Unknown Type Op")
raise ValueError("MATCH") | elaborates the target type of a
concrete unknown pointer, i.e. links or properties | elab_single_type_expr | python | geldata/gel | edb/tools/experimental_interpreter/elaboration.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/elaboration.py | Apache-2.0 |
def do_write_logs(logs: List[Any], filename: str):
def format_entry(entry, index):
entry_id = '_'.join(map(str, index))
result = """<a href='#/' onclick='toggle("{}")'>
Input/Output {}</a>\n""".format(
entry_id, entry_id
)
result += """<button onclick='foldEntry(\"{}\")'>Fold</button>
<button onclick='unfoldEntry(\"{}\")'>
Unfold</button>""".format(
entry_id, entry_id
)
result += "<div class='entry' id='entry_{}'>".format(entry_id)
result += """<div class='input'><span style='color:blue;'>Input:
</span> {}</div>""".format(
to_html_str(pp.show(entry[0]))
)
result += """<div class='input'><span style='color:green;'>
Human-friendly Input:</span> {}</div>""".format(
codegen.generate_source(reverse_elab(entry[0]))
)
if len(entry) > 1:
result += """<div class='output'><span style='color:red;'>
Output:</span> {}</div>""".format(
to_html_str(pp.show(entry[1]))
)
try:
json_text = json.dumps(
multi_set_val_to_json_like(entry[1]), indent=4
)
except Exception as e:
json_text = "EXCEPTION OCCURRED" + str(e)
result += """<div class='output'><span style='color:green;'>
Human-friendly Output:</span> {}</div>""".format(
json_text
)
result += "</div>\n"
return result
def format_log(log, index):
result = "<ul {} id='entry_{}'>\n".format(
"class='entry'" if len(index) > 0 else '',
'_'.join(map(str, index)),
)
for i, entry in enumerate(log):
result += "<li>\n"
if isinstance(entry, list):
sub_index = index + (i,)
result += """<a href='#/' onclick='toggle("{}")'>
Log {}</a>\n""".format(
'_'.join(map(str, sub_index)),
'_'.join(map(str, sub_index)),
)
result += format_log(entry, sub_index)
else:
sub_index = index + (i,)
result += format_entry(entry, sub_index)
result += "</li>\n"
result += "</ul>\n"
return result
with open(filename, "w") as f:
f.write("<html>\n")
f.write("<head>\n")
f.write("<title>Log</title>\n")
f.write("""<meta charset="UTF-8">\n""")
f.write("<style>\n")
f.write(".entry { margin-left: 20px; }\n")
f.write("</style>\n")
f.write("</head>\n")
f.write("<body>\n")
f.write("<h1>Log</h1>\n")
f.write("<button onclick='foldAll()'>Fold all</button>\n")
f.write("<button onclick='unfoldAll()'>Unfold all</button>\n")
f.write(format_log(logs, ()))
f.write("<script>\n")
f.write("function toggle(index) {\n")
f.write(" var entry = document.getElementById('entry_' + index);\n")
f.write(
" entry.style.display = entry.style.display === 'none' ?"
" 'block' : 'none';\n"
""
)
f.write("return False;}\n")
f.write("function foldAll() {\n")
f.write(" var entries = document.getElementsByClassName('entry');\n")
f.write(" for (var i = 0; i < entries.length; i++) {\n")
f.write(" entries[i].style.display = 'none';\n")
f.write(" }\n")
f.write("}\n")
f.write("function unfoldAll() {\n")
f.write(" var entries = document.getElementsByClassName('entry');\n")
f.write(" for (var i = 0; i < entries.length; i++) {\n")
f.write(" entries[i].style.display = 'block';\n")
f.write(" }\n")
f.write("}\n")
f.write(
"""
function foldEntry(id) {
var entry = document.getElementById('entry_' + id);
entry.style.display = 'none';
var entries = entry.querySelectorAll('.entry');
for (var i = 0; i < entries.length; i++) {
entries[i].style.display = 'none';
}
}
function unfoldEntry(id) {
var entry = document.getElementById('entry_' + id);
entry.style.display = 'block';
var entries = entry.querySelectorAll('.entry');
for (var i = 0; i < entries.length; i++) {
entries[i].style.display = 'block';
}
}
"""
)
f.write("</script>\n")
f.write("</body>\n")
f.write("</html>\n") | function foldEntry(id) {
var entry = document.getElementById('entry_' + id);
entry.style.display = 'none';
var entries = entry.querySelectorAll('.entry');
for (var i = 0; i < entries.length; i++) {
entries[i].style.display = 'none';
}
}
function unfoldEntry(id) {
var entry = document.getElementById('entry_' + id);
entry.style.display = 'block';
var entries = entry.querySelectorAll('.entry');
for (var i = 0; i < entries.length; i++) {
entries[i].style.display = 'block';
}
} | do_write_logs | python | geldata/gel | edb/tools/experimental_interpreter/logs.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/logs.py | Apache-2.0 |
def interperter_entry(
*,
init_sdl_file=None,
next_ql_file=None,
init_ql_file=None,
verbose=False,
trace_to_file=None,
sqlite_file=None,
library_ddl_files=None,
test=None,
no_setup=False,
skip_test_confirm=False,
) -> None:
if test:
schemas_dir = os.path.join(
os.path.dirname(__file__), '..', '..', '..', 'tests', 'schemas'
)
print("Schemas are in ", schemas_dir)
# search root of test schemas for esdl file
candidate_files = []
failed_files = []
for root, _, files in os.walk(schemas_dir):
for file in files:
if file.endswith('.esdl'):
if test.lower() in file.lower():
candidate_files.append(os.path.join(root, file))
else:
failed_files.append(os.path.join(root, file))
if len(candidate_files) == 0:
print(f'Could not find any esdl files containing {test}')
print('Found these files:', failed_files)
return
if len(candidate_files) > 1:
print(f'Found multiple esdl files containing {test}:')
for file in candidate_files:
print(file)
return
if init_sdl_file is None:
init_sdl_file = candidate_files[0]
ql_file = candidate_files[0].replace('.esdl', '_setup.edgeql')
if init_ql_file is None:
init_ql_file = ql_file
if next_ql_file is None and os.path.exists(
"temp_current_testing.edgeql"
):
next_ql_file = "temp_current_testing.edgeql"
if no_setup:
init_ql_file = None
next_ql_file = None
if trace_to_file is None:
trace_to_file = "temp_debug.html"
if verbose is False:
verbose = True
# print all options
print(f'Running test {test} with options:')
print(
f'init_sdl_file: '
+ (
init_sdl_file
if not init_sdl_file.startswith(schemas_dir)
else "<s_dir>" + init_sdl_file[len(schemas_dir) :]
)
)
if init_ql_file:
print(
'init_ql_file: '
+ (
init_ql_file
if not init_ql_file.startswith(schemas_dir)
else "<s_dir>" + init_ql_file[len(schemas_dir) :]
)
)
else:
print(f'init_ql_file: None')
print(f'next_ql_file: {next_ql_file}')
print(f'trace_to_file: {trace_to_file}')
print(f'verbose: {verbose}')
if not skip_test_confirm:
if input('Continue? (Y/n)') == 'n':
return
""" Run the experimental interpreter for EdgeQL """
repl(
init_sdl_file=init_sdl_file,
init_ql_file=init_ql_file,
next_ql_file=next_ql_file,
library_ddl_files=library_ddl_files,
debug_print=verbose,
trace_to_file_path=trace_to_file,
sqlite_file=sqlite_file,
) | Run the experimental interpreter for EdgeQL | interperter_entry | python | geldata/gel | edb/tools/experimental_interpreter/edb_entry.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/edb_entry.py | Apache-2.0 |
def check_module_validity(
dbschema: e.DBSchema, module_name: Tuple[str, ...]
) -> e.DBSchema:
"""
Checks the validity of an unchecked module in dbschema.
Modifies the db schema after checking
"""
name_res.module_name_resolve(dbschema, module_name)
inheritance_populate.module_subtyping_resolve(dbschema)
inheritance_populate.module_inheritance_populate(dbschema, module_name)
mck.unchecked_module_map(
dbschema,
module_name,
check_object_tp_comp_validity,
check_fun_def_validity,
)
dbschema.modules[module_name] = dbschema.unchecked_modules[module_name]
del dbschema.unchecked_modules[module_name]
return dbschema | Checks the validity of an unchecked module in dbschema.
Modifies the db schema after checking | check_module_validity | python | geldata/gel | edb/tools/experimental_interpreter/type_checking_tools/schema_checking.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/type_checking_tools/schema_checking.py | Apache-2.0 |
def re_populate_module_inheritance(
dbschema: e.DBSchema, module_name: Tuple[str, ...]
) -> None:
"""
Checks the validity of an unchecked module in dbschema.
Modifies the db schema after checking
"""
dbschema.unchecked_modules[module_name] = dbschema.modules[module_name]
del dbschema.modules[module_name]
inheritance_populate.module_subtyping_resolve(dbschema)
inheritance_populate.module_inheritance_populate(dbschema, module_name)
dbschema.modules[module_name] = dbschema.unchecked_modules[module_name]
del dbschema.unchecked_modules[module_name] | Checks the validity of an unchecked module in dbschema.
Modifies the db schema after checking | re_populate_module_inheritance | python | geldata/gel | edb/tools/experimental_interpreter/type_checking_tools/schema_checking.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/type_checking_tools/schema_checking.py | Apache-2.0 |
def unchecked_module_map(
dbschema: e.DBSchema,
module_name: Tuple[str, ...],
f: Callable[[e.TcCtx, e.Tp, e.Tp, e.CMMode], e.Tp],
g: Callable[[e.TcCtx, e.FuncDef], e.FuncDef],
) -> None:
"""
Modifies the db schema after checking
"""
root_ctx = eops.emtpy_tcctx_from_dbschema(dbschema, module_name)
def unchecked_object_tp_map(
subject_tp: e.Tp, obj_tp: e.ObjectTp
) -> e.ObjectTp:
result_vals: Dict[str, e.ResultTp] = {}
for lbl, (t_comp_tp, t_comp_card) in obj_tp.val.items():
result_vals[lbl] = e.ResultTp(
f(root_ctx, subject_tp, t_comp_tp, t_comp_card), t_comp_card
)
return e.ObjectTp(result_vals)
result_vals: Dict[str, e.ModuleEntity] = {}
dbmodule = dbschema.unchecked_modules[module_name]
for t_name, t_me in dbmodule.defs.items():
match t_me:
case e.ModuleEntityTypeDef(
typedef=typedef,
is_abstract=is_abstract,
constraints=constraints,
indexes=indexes,
):
if isinstance(typedef, e.ObjectTp):
result_vals = {
**result_vals,
t_name: e.ModuleEntityTypeDef(
typedef=unchecked_object_tp_map(
e.NamedNominalLinkTp(
name=e.QualifiedName(
[*module_name, t_name]
),
linkprop=e.ObjectTp({}),
),
typedef,
),
is_abstract=is_abstract,
constraints=constraints,
indexes=indexes,
),
}
else:
assert isinstance(typedef, e.ScalarTp)
result_vals = {**result_vals, t_name: t_me}
case e.ModuleEntityFuncDef(funcdefs=funcdefs):
result_vals = {
**result_vals,
t_name: e.ModuleEntityFuncDef(
funcdefs=[g(root_ctx, funcdef) for funcdef in funcdefs]
),
}
case _:
raise ValueError("Unimplemented", t_me)
dbschema.unchecked_modules[module_name] = e.DBModule(result_vals) | Modifies the db schema after checking | unchecked_module_map | python | geldata/gel | edb/tools/experimental_interpreter/type_checking_tools/module_check_tools.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/type_checking_tools/module_check_tools.py | Apache-2.0 |
def try_match_and_get_arg_mods(
expr: e.FunAppExpr, fun_def: e.FuncDef
) -> Optional[Sequence[e.ParamModifier]]:
"""
Returns None if the expr does not match the fun_def.
"""
match expr:
case e.FunAppExpr(
fun=_, args=args, overloading_index=_, kwargs=kwargs
):
# positional
if len(args) == len(fun_def.tp.args_mod):
return fun_def.tp.args_mod
elif len(args) < len(fun_def.tp.args_mod):
part_one = fun_def.tp.args_mod[0 : len(args)]
assert all(
[
fun_def.tp.args_label.index(l) >= len(args)
for l in kwargs.keys()
]
)
part_two = [
fun_def.tp.args_mod[fun_def.tp.args_label.index(l)]
for l in kwargs.keys()
]
return [*part_one, *part_two]
elif len(args) > len(fun_def.tp.args_mod):
return None
else:
raise ValueError("impossible", expr)
case _:
raise ValueError("impossible", expr) | Returns None if the expr does not match the fun_def. | try_match_and_get_arg_mods | python | geldata/gel | edb/tools/experimental_interpreter/type_checking_tools/function_checking.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/type_checking_tools/function_checking.py | Apache-2.0 |
def check_args_ret_type_match(
ctx: e.TcCtx, tps_syn: List[e.Tp], tps_ck: e.FunArgRetType
) -> Optional[e.Tp]: # Returns the result Tp if matches
"""
If matches, return the result type.
Need to return result type because we have parametric ploymorphism.
"""
some_tp_mapping_candidates: Dict[int, List[e.Tp]] = {}
args_ck_tps = tps_ck.args_tp
ret_tp = tps_ck.ret_tp.tp
if len(args_ck_tps) != len(tps_syn):
return None
for _, (syn_tp, ck_tp) in enumerate(zip(tps_syn, args_ck_tps)):
tops.collect_is_subtype_with_instantiation(
ctx, syn_tp, ck_tp, some_tp_mapping_candidates
)
some_tp_mapping: Dict[int, e.Tp] = {}
for i, candidate_tps in some_tp_mapping_candidates.items():
if len(candidate_tps) == 1:
some_tp_mapping[i] = candidate_tps[0]
continue
else:
for candidate_tp in candidate_tps:
if all(
tops.check_is_subtype(ctx, tp, candidate_tp)
for tp in candidate_tps
):
some_tp_mapping[i] = candidate_tp
break
else:
# the refined is used with the test case
# test_edgeql_select_subqueries_16
# where for std::IN, one argument has a link prop
# and the other do not
# I choose to remove the link prop uniformly in this case
refined_candidate_tps = [
refine_candidate_tp(tp) for tp in candidate_tps
]
for candidate_tp in refined_candidate_tps:
if all(
tops.check_is_subtype(ctx, tp, candidate_tp)
for tp in refined_candidate_tps
):
some_tp_mapping[i] = candidate_tp
break
else:
# cannot find a unique assignment for a candidate type
return None
for _, (syn_tp, ck_tp) in enumerate(zip(tps_syn, args_ck_tps)):
if tops.check_is_subtype_with_instantiation(
ctx, syn_tp, ck_tp, some_tp_mapping
):
continue
else:
syn_tp = refine_candidate_tp(
syn_tp
) # use refinement if it fails on the first run
if tops.check_is_subtype_with_instantiation(
ctx, syn_tp, ck_tp, some_tp_mapping
):
continue
else:
return None
final_ret_tp = tops.recursive_instantiate_tp(ret_tp, some_tp_mapping)
return final_ret_tp | If matches, return the result type.
Need to return result type because we have parametric ploymorphism. | check_args_ret_type_match | python | geldata/gel | edb/tools/experimental_interpreter/type_checking_tools/function_checking.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/type_checking_tools/function_checking.py | Apache-2.0 |
def module_name_resolve(
dbschema: e.DBSchema, module_name: Tuple[str, ...]
) -> None:
"""
Modifies the db schema after checking
"""
def f(
root_ctx: e.TcCtx,
subject_tp: e.Tp,
tp_comp: e.Tp,
tp_comp_card: e.CMMode,
) -> e.Tp:
return object_tp_comp_name_resolve(root_ctx, tp_comp)
mck.unchecked_module_map(dbschema, module_name, f, func_def_name_resolve) | Modifies the db schema after checking | module_name_resolve | python | geldata/gel | edb/tools/experimental_interpreter/type_checking_tools/name_resolution.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/type_checking_tools/name_resolution.py | Apache-2.0 |
def checked_module_name_resolve(
dbschema: e.DBSchema, module_name: Tuple[str, ...]
) -> None:
"""
Modifies the db schema after checking
"""
assert module_name not in dbschema.unchecked_modules
dbschema.unchecked_modules[module_name] = dbschema.modules[module_name]
del dbschema.modules[module_name]
module_name_resolve(dbschema, module_name)
assert module_name not in dbschema.modules
dbschema.modules[module_name] = dbschema.unchecked_modules[module_name]
del dbschema.unchecked_modules[module_name] | Modifies the db schema after checking | checked_module_name_resolve | python | geldata/gel | edb/tools/experimental_interpreter/type_checking_tools/name_resolution.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/type_checking_tools/name_resolution.py | Apache-2.0 |
def module_inheritance_populate(
dbschema: e.DBSchema, module_name: Tuple[str, ...]
) -> None:
"""
Modifies the db schema after checking
"""
result_vals: Dict[str, e.ModuleEntity] = {}
dbmodule = dbschema.unchecked_modules[module_name]
for t_name, t_me in dbmodule.defs.items():
root_ctx = eops.emtpy_tcctx_from_dbschema(dbschema, module_name)
match t_me:
case e.ModuleEntityTypeDef(
typedef=typedef,
is_abstract=is_abstract,
constraints=constraints,
indexes=indexes,
):
if isinstance(typedef, e.ObjectTp):
if (
e.QualifiedName([*module_name, t_name])
in dbschema.subtyping_relations
):
new_typedef, new_constraints, new_indexes = (
copy_construct_inheritance(
root_ctx,
typedef,
dbschema.subtyping_relations[
e.QualifiedName([*module_name, t_name])
],
constraints,
indexes,
)
)
result_vals = {
**result_vals,
t_name: e.ModuleEntityTypeDef(
typedef=new_typedef,
is_abstract=is_abstract,
constraints=new_constraints,
indexes=new_indexes,
),
}
else:
result_vals = {**result_vals, t_name: t_me}
elif isinstance(typedef, e.ScalarTp):
# insert assignment casts
assert isinstance(
typedef.name, e.QualifiedName
), "Name resolution should have been done"
assert typedef.name == e.QualifiedName(
[*module_name, t_name]
)
assert (
typedef.name
not in dbschema.unchecked_subtyping_relations
)
for parent_name in dbschema.subtyping_relations[
typedef.name
]:
def default_cast_fun(v):
return v
cast_key = (
e.ScalarTp(parent_name),
e.ScalarTp(typedef.name),
)
assert cast_key not in dbschema.casts
dbschema.casts[cast_key] = e.TpCast(
e.TpCastKind.Assignment, default_cast_fun
)
result_vals = {**result_vals, t_name: t_me}
else:
raise ValueError("Not Implemented", typedef)
case e.ModuleEntityFuncDef(funcdefs=funcdefs):
result_vals = {
**result_vals,
t_name: e.ModuleEntityFuncDef(funcdefs=funcdefs),
}
case _:
raise ValueError("Unimplemented", t_me)
dbschema.unchecked_modules[module_name] = e.DBModule(result_vals) | Modifies the db schema after checking | module_inheritance_populate | python | geldata/gel | edb/tools/experimental_interpreter/type_checking_tools/inheritance_populate.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/type_checking_tools/inheritance_populate.py | Apache-2.0 |
def insert_link_prop_checking(
ctx: e.TcCtx,
expr_ck: e.Expr,
synth_lp: Dict[str, e.ResultTp],
ck_lp: Dict[str, e.ResultTp],
) -> e.Expr:
"""
Check link properties, return additional link props
that need to be inserted (default fields)
"""
# we do not support heterogenous link targets
for k in synth_lp.keys():
this_tp = ck_lp[k].tp
if k not in ck_lp.keys():
raise ValueError("Link prop not in type", k, ck_lp.keys())
elif isinstance(synth_lp[k].tp, e.ComputableTp):
raise ValueError("Cannot define computable tp")
elif isinstance(this_tp, e.DefaultTp):
tops.assert_real_subtype(ctx, synth_lp[k].tp, this_tp.tp)
else:
tops.assert_real_subtype(ctx, synth_lp[k].tp, ck_lp[k].tp)
additional_lps: Dict[str, e.Expr] = {}
for k in ck_lp.keys():
this_tp = ck_lp[k].tp
if k in synth_lp.keys():
continue
elif isinstance(ck_lp[k].tp, e.ComputableTp):
continue
elif isinstance(this_tp, e.DefaultTp):
default_expr = this_tp.expr
if eops.binding_is_unnamed(default_expr):
additional_lps[k] = type_elaborate_default_tp(
ctx,
eops.instantiate_expr(
e.FreeVarExpr("LP_DEFAULT_SHOULD_NOT_OCCUR"),
default_expr,
),
)
else:
raise ValueError("Default Tp is not unnamed", this_tp.expr)
elif tops.mode_is_optional(ck_lp[k].mode):
additional_lps[k] = e.MultiSetExpr(expr=[])
else:
raise ValueError(
"Missing link prop",
k,
"synthesized keys",
synth_lp.keys(),
"required keys",
ck_lp.keys(),
)
if len(additional_lps.keys()) == 0:
return expr_ck
else:
return e.ShapedExprExpr(
expr_ck,
e.ShapeExpr(
shape={
e.LinkPropLabel(k): eops.abstract_over_expr(v)
for (k, v) in additional_lps.items()
}
),
) | Check link properties, return additional link props
that need to be inserted (default fields) | insert_link_prop_checking | python | geldata/gel | edb/tools/experimental_interpreter/type_checking_tools/dml_checking.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/type_checking_tools/dml_checking.py | Apache-2.0 |
def check_type_valid(ctx: e.TcCtx | e.DBSchema, tp: e.Tp) -> e.Tp:
"""
Check that a raw schema type is a valid type.
Returns the checked valid type.
"""
match tp:
case e.UncheckedTypeName(name=name):
resolved_name, resolved_tp = mops.resolve_raw_name_and_type_def(
ctx, name
)
match resolved_tp:
case e.ScalarTp(_):
return resolved_tp
case e.ObjectTp(_):
return e.NamedNominalLinkTp(
name=resolved_name, linkprop=e.ObjectTp({})
)
case _:
raise ValueError("Not Implemented", resolved_tp)
case e.AnyTp(_):
return tp
case e.CompositeTp(kind=kind, tps=tps, labels=labels):
return e.CompositeTp(
kind=kind,
tps=[check_type_valid(ctx, t) for t in tps],
labels=labels,
)
case e.QualifiedName(_):
return tp
case _:
raise ValueError("Not Implemented", tp) | Check that a raw schema type is a valid type.
Returns the checked valid type. | check_type_valid | python | geldata/gel | edb/tools/experimental_interpreter/type_checking_tools/typechecking.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/type_checking_tools/typechecking.py | Apache-2.0 |
def process_edgeql_file(schema: e.DBSchema, path: str) -> None:
"""
Process an edgeql file as ddl.
"""
with open(path) as f:
content = f.read()
ddls = parse_ddl(content)
process_ddls(schema, ddls) | Process an edgeql file as ddl. | process_edgeql_file | python | geldata/gel | edb/tools/experimental_interpreter/schema/library_discovery.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/schema/library_discovery.py | Apache-2.0 |
def add_ddl_library(schema: e.DBSchema, libpaths: List[str]) -> None:
"""
Add a library to the schema.
Given a list of library paths,
If library is a edgeql file, process the edgeql file as ddl.
If library is a directory,
process all edgeql files in the top level of the directory
in a lexicographical order.
"""
for libpath in libpaths:
if os.path.isdir(libpath):
for filename in sorted(os.listdir(libpath)):
if filename.startswith("_"):
continue
if filename.endswith(".edgeql"):
process_edgeql_file(
schema, os.path.join(libpath, filename)
)
elif libpath.endswith(".edgeql"):
process_edgeql_file(schema, libpath)
else:
raise ValueError(f"Invalid library path {libpath}") | Add a library to the schema.
Given a list of library paths,
If library is a edgeql file, process the edgeql file as ddl.
If library is a directory,
process all edgeql files in the top level of the directory
in a lexicographical order. | add_ddl_library | python | geldata/gel | edb/tools/experimental_interpreter/schema/library_discovery.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/schema/library_discovery.py | Apache-2.0 |
def process_ddl(schema: e.DBSchema, ddl: qlast.DDLOperation) -> None:
"""
Process a single DDL operation.
"""
# debug.dump_edgeql(ddl)
match ddl:
case qlast.CreateModule(
name=qlast.ObjectRef(name=module_name), commands=[]
):
schema.modules[(module_name,)] = e.DBModule({})
case (
qlast.CreatePseudoType()
| qlast.CreateAnnotation()
| qlast.AlterAnnotation()
):
print_warning("WARNING: not supported yet", ddl)
case qlast.CreateScalarType(
name=qlast.ObjectRef(module=module_name, name=type_name),
commands=[],
bases=bases,
abstract=is_abstract,
):
assert (
module_name is not None
), "Scalar types cannot be created in top level"
schema.modules[(module_name,)].defs[type_name] = (
e.ModuleEntityTypeDef(
e.ScalarTp(e.QualifiedName([module_name, type_name])),
constraints=[],
is_abstract=is_abstract,
indexes=[],
)
)
# We require DDL to contain fully qualified names
schema.subtyping_relations[
e.QualifiedName([module_name, type_name])
] = []
for base_tp in bases:
base_elabed = elab.elab_TypeName(base_tp)
match base_elabed:
# for bare ddl, we assume qualified type name
# is actually checked
case e.UncheckedTypeName(name=e.QualifiedName(_)):
assert isinstance(base_elabed.name, e.QualifiedName)
schema.subtyping_relations[
e.QualifiedName([module_name, type_name])
].append(base_elabed.name)
case e.AnyTp(spec):
# choice: make anytype live in std
schema.subtyping_relations[
e.QualifiedName([module_name, type_name])
].append(
e.QualifiedName(["std", "any" + (spec or "")])
)
case e.CompositeTp(kind=e.CompositeTpKind.Enum, tps=_):
print_warning(
"WARNING: behavior of extending"
" enum types undefined",
base_elabed,
)
case _:
raise ValueError(
"Must inherit from single name", base_elabed
)
case qlast.CreateOperator(
kind=_,
params=params,
name=name,
returning=ret_tp,
returning_typemod=ret_typemod,
):
process_builtin_fun_def(schema, name, params, ret_tp, ret_typemod)
case qlast.CreateFunction(
commands=commands,
params=params,
name=name,
returning=ret_tp,
returning_typemod=ret_typemod,
):
process_builtin_fun_def(schema, name, params, ret_tp, ret_typemod)
case qlast.CreateCast(
from_type=from_type,
to_type=to_type,
commands=commands,
allow_implicit=allow_implicit,
allow_assignment=allow_assignment,
code=cast_code,
):
from_tp = elab.elab_TypeName(from_type)
to_tp = elab.elab_TypeName(to_type)
from_tp_ck = tck.check_type_valid(schema, from_tp)
to_tp_ck = tck.check_type_valid(schema, to_tp)
assert (
from_tp_ck,
to_tp_ck,
) not in schema.casts, "duplicate casts"
match cast_code:
case qlast.CastCode(from_expr=from_expr, from_cast=from_cast):
match from_expr, from_cast:
case False, True:
cast_impl = get_default_func_impl_for_cast(
from_tp_ck, to_tp_ck
)
case True, False:
cast_impl = get_default_func_impl_for_cast(
from_tp_ck, to_tp_ck
)
case False, False:
cast_impl = get_default_func_impl_for_cast(
from_tp_ck, to_tp_ck
)
case _:
raise ValueError(
"TODO", cast_code, from_tp_ck, to_tp_ck
)
case _:
raise ValueError("TODO", cast_code)
schema.casts[(from_tp_ck, to_tp_ck)] = e.TpCast(
(
e.TpCastKind.Implicit
if allow_implicit
else (
e.TpCastKind.Assignment
if allow_assignment
else e.TpCastKind.Explicit
)
),
cast_impl,
) # TODO implicit cast
case qlast.CreateConstraint():
print_warning("WARNING: not supported yet", ddl)
case qlast.CreateProperty():
print_warning("WARNING: not supported yet", ddl)
case qlast.CreateLink():
print_warning("WARNING: not supported yet", ddl)
case qlast.CreateObjectType(
bases=bases,
commands=commands,
name=qlast.ObjectRef(name=name, module=module_name),
abstract=abstract,
):
assert (
module_name is not None
), "Object types cannot be created in top level"
obj_tp, constraints, indexes = elab_schema.elab_create_object_tp(
commands
)
elab_schema.add_bases_for_name(schema, (module_name,), name, bases)
schema.modules[(module_name,)].defs[name] = e.ModuleEntityTypeDef(
obj_tp,
is_abstract=abstract,
constraints=constraints,
indexes=indexes,
)
case qlast.AlterObjectType():
print_warning("WARNING: not supported yet", ddl)
case _:
debug.dump(ddl)
raise ValueError("DDL not yet supported", ddl) | Process a single DDL operation. | process_ddl | python | geldata/gel | edb/tools/experimental_interpreter/schema/ddl_processing.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/schema/ddl_processing.py | Apache-2.0 |
def fun_arg_type_polymorphism_post_processing(tp: e.Tp) -> e.Tp:
"""
replace any type with Some(0)
TODO: handling anyelem
This is how the current polymorphism status quo.
"""
def replace_any(tp: e.Tp) -> Optional[e.Tp]:
match tp:
case e.AnyTp(spec):
if spec == "type":
return e.SomeTp(0)
else:
return None
case _:
return None
return eops.map_tp(replace_any, tp) | replace any type with Some(0)
TODO: handling anyelem
This is how the current polymorphism status quo. | fun_arg_type_polymorphism_post_processing | python | geldata/gel | edb/tools/experimental_interpreter/schema/function_elaboration.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/schema/function_elaboration.py | Apache-2.0 |
def type_subtyping_walk(
recurse: Callable[[e.TcCtx, e.Tp, e.Tp], bool],
ctx: e.TcCtx,
tp1: e.Tp,
tp2: e.Tp,
) -> bool:
"""
Walks the type equality tree.
Subtrees are checked for equality using the recurse function,
to account for possible unifications.
"""
if tp_is_primitive(tp1) and tp_is_primitive(tp2):
if isinstance(tp1, e.ScalarTp) and isinstance(tp2, e.ScalarTp):
return is_nominal_subtype_in_schema(ctx, tp1.name, tp2.name)
else:
match tp1, tp2:
case _, e.DefaultTp(tp=tp2_inner, expr=_):
return recurse(ctx, tp1, tp2_inner)
case e.DefaultTp(tp=tp1_inner, expr=_), _:
return recurse(ctx, tp1_inner, tp2)
case _, e.ComputableTp(tp=tp2_inner, expr=_):
return recurse(ctx, tp1, tp2_inner)
case e.ComputableTp(tp=tp1_inner, expr=_), _:
return recurse(ctx, tp1_inner, tp2)
case _:
raise ValueError("TODO")
else:
match tp1, tp2:
case _, e.AnyTp():
return True
# Union and intersections before expansion of names
case (e.UnionTp(left=tp1_left, right=tp1_right), _):
return recurse(ctx, tp1_left, tp2) and recurse(
ctx, tp1_right, tp2
)
case (_, e.UnionTp(left=tp2_left, right=tp2_right)):
return recurse(ctx, tp1, tp2_left) or recurse(
ctx, tp1, tp2_right
)
case (_, e.IntersectTp(left=tp2_left, right=tp2_right)):
return recurse(ctx, tp1, tp2_left) and recurse(
ctx, tp1, tp2_right
)
case (e.IntersectTp(left=tp1_left, right=tp1_right), _):
return recurse(ctx, tp1_left, tp2) or recurse(
ctx, tp1_right, tp2
)
case e.ObjectTp(val=tp1_val), e.ObjectTp(val=tp2_val):
if set(tp1_val.keys()) != set(tp2_val.keys()):
return False
for k in tp1_val.keys():
if not recurse(ctx, tp1_val[k].tp, tp2_val[k].tp):
return False
if not is_cardinal_subtype(
tp1_val[k].mode, tp2_val[k].mode
):
return False
return True
case (
e.NominalLinkTp(name=n_1, subject=s_1, linkprop=lp_1),
e.NominalLinkTp(name=n_2, subject=s_2, linkprop=lp_2),
):
if is_nominal_subtype_in_schema(ctx, n_1, n_2):
return recurse(ctx, s_1, s_2) and recurse(ctx, lp_1, lp_2)
else:
return False
case (
e.NamedNominalLinkTp(name=n_1, linkprop=lp_1),
e.NamedNominalLinkTp(name=n_2, linkprop=lp_2),
):
assert isinstance(n_1, e.QualifiedName)
assert isinstance(n_2, e.QualifiedName)
if is_nominal_subtype_in_schema(ctx, n_1, n_2):
return recurse(ctx, lp_1, lp_2)
else:
return False
case (
_,
e.NamedNominalLinkTp(name=e.QualifiedName(n_2), linkprop=lp_2),
):
assert isinstance(tp2, e.NamedNominalLinkTp)
return recurse(
ctx, tp1, resolve_named_nominal_link_tp(ctx, tp2)
)
case (
e.NamedNominalLinkTp(name=e.QualifiedName(n_1), linkprop=lp_1),
_,
):
assert isinstance(tp1, e.NamedNominalLinkTp)
return recurse(
ctx, resolve_named_nominal_link_tp(ctx, tp1), tp2
)
# Other structural typing
case (
e.CompositeTp(kind=kind1, tps=tps1),
e.CompositeTp(kind=kind2, tps=tps2),
):
if kind1 != kind2 or len(tps1) != len(tps2):
return False
else:
return all(
recurse(ctx, tp1, tp2) for tp1, tp2 in zip(tps1, tps2)
)
case _:
return False
raise ValueError(
"should not be reachable, check if returns are missing?", tp1, tp2
) | Walks the type equality tree.
Subtrees are checked for equality using the recurse function,
to account for possible unifications. | type_subtyping_walk | python | geldata/gel | edb/tools/experimental_interpreter/data/type_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/type_ops.py | Apache-2.0 |
def collect_is_subtype_with_instantiation(
ctx: e.TcCtx,
syn_tp: e.Tp,
ck_tp: e.Tp,
some_tp_mapping: Dict[int, List[e.Tp]],
) -> bool:
"""
Here, tp2 may be a some type
(parametric morphism, and need to be instantiated)
"""
if isinstance(ck_tp, e.SomeTp):
if ck_tp.index in some_tp_mapping:
some_tp_mapping[ck_tp.index] = [
syn_tp,
*some_tp_mapping[ck_tp.index],
]
else:
some_tp_mapping[ck_tp.index] = [syn_tp]
return True
else:
def recurse(ctx: e.TcCtx, tp1: e.Tp, tp2: e.Tp) -> bool:
return collect_is_subtype_with_instantiation(
ctx, tp1, tp2, some_tp_mapping
)
return type_subtyping_walk(recurse, ctx, syn_tp, ck_tp) | Here, tp2 may be a some type
(parametric morphism, and need to be instantiated) | collect_is_subtype_with_instantiation | python | geldata/gel | edb/tools/experimental_interpreter/data/type_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/type_ops.py | Apache-2.0 |
def recursive_instantiate_tp(
tp: e.Tp, some_tp_mapping: Dict[int, e.Tp]
) -> e.Tp:
"""
Instantiate all Some(i) sub term in a type.
Used to compute parametric function's return type.
"""
def inst_func(tp: e.Tp) -> Optional[e.Tp]:
if isinstance(tp, e.SomeTp):
if tp.index in some_tp_mapping:
return some_tp_mapping[tp.index]
else:
raise ValueError("some tp not found")
else:
return None
return eops.map_tp(inst_func, tp) | Instantiate all Some(i) sub term in a type.
Used to compute parametric function's return type. | recursive_instantiate_tp | python | geldata/gel | edb/tools/experimental_interpreter/data/type_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/type_ops.py | Apache-2.0 |
def get_runtime_tp(tp: e.Tp) -> e.Tp:
"""Drops defaults and computed"""
def map_func(candidate: e.Tp) -> Optional[e.Tp]:
match candidate:
case e.ComputableTp(expr=_, tp=c_tp):
return get_runtime_tp(c_tp)
case e.DefaultTp(expr=_, tp=c_tp):
return get_runtime_tp(c_tp)
case _:
return None
return eops.map_tp(map_func, tp) | Drops defaults and computed | get_runtime_tp | python | geldata/gel | edb/tools/experimental_interpreter/data/type_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/type_ops.py | Apache-2.0 |
def get_storage_tp(fmt: e.ObjectTp) -> e.ObjectTp:
"""
Returns the storage type of a given type.
In particular, computable attributes should be removed.
"""
def drop_computable(t: e.ObjectTp):
return e.ObjectTp(
{
k: tp
for (k, tp) in t.val.items()
if not isinstance(tp.tp, e.ComputableTp)
}
)
def get_lp_storage(t: e.ResultTp) -> e.ResultTp:
match t.tp:
case e.NamedNominalLinkTp(name=n, linkprop=lp):
return e.ResultTp(
e.NamedNominalLinkTp(name=n, linkprop=drop_computable(lp)),
t.mode,
)
case e.DefaultTp(tp=tp, expr=_):
return get_lp_storage(e.ResultTp(tp, t.mode))
case e.UnionTp(left=left_tp, right=right_tp):
return e.ResultTp(
e.UnionTp(
left=get_lp_storage(e.ResultTp(left_tp, t.mode)).tp,
right=get_lp_storage(e.ResultTp(right_tp, t.mode)).tp,
),
t.mode,
)
case e.CompositeTp(kind=kind, tps=tps, labels=labels):
return e.ResultTp(
e.CompositeTp(
kind=kind,
tps=[
get_lp_storage(e.ResultTp(tp, t.mode)).tp
for tp in tps
],
labels=labels,
),
t.mode,
)
case _:
if tp_is_primitive(t.tp):
return t
else:
raise ValueError("Cannot get lp storage", t.tp)
return e.ObjectTp(
{
k: get_lp_storage(tp)
for (k, tp) in fmt.val.items()
if not isinstance(tp.tp, e.ComputableTp)
}
) | Returns the storage type of a given type.
In particular, computable attributes should be removed. | get_storage_tp | python | geldata/gel | edb/tools/experimental_interpreter/data/type_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/type_ops.py | Apache-2.0 |
def typed_multi_set_val_to_json_like(
tp: e.ResultTp, m: MultiSetVal, dbschema: e.DBSchema, top_level=False
) -> json_like:
"""
Convert a MultiSetVal to a JSON-like value.
param top_level: If True, the result is a list of values, even if
the result's type is a singleton.
"""
if tp.mode.upper == e.CardNumOne:
if len(m.getVals()) > 1:
raise ValueError("Single Multiset must have cardinality at most 1")
if len(m.getVals()) == 1:
result = typed_val_to_json_like(m.getVals()[0], tp.tp, dbschema)
if top_level:
result = [result]
else:
if top_level:
result = []
else:
result = []
else:
# do not dedup when converting to json (see test_edgeql_shape_for_01)
result = [
typed_val_to_json_like(v, tp.tp, dbschema) for v in m.getRawVals()
]
return result | Convert a MultiSetVal to a JSON-like value.
param top_level: If True, the result is a list of values, even if
the result's type is a singleton. | typed_multi_set_val_to_json_like | python | geldata/gel | edb/tools/experimental_interpreter/data/val_to_json.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/val_to_json.py | Apache-2.0 |
def map_tp(f: Callable[[Tp], Optional[Tp]], tp: Tp) -> Tp:
"""maps a function over free variables and bound variables,
and does not modify other nodes
f : called with current expression and the current level, which refers to
the first binder outside of the entire expression expr
level : this value refers to the first binder OUTSIDE of the expression
being mapped, it should be called with initially = 1.
Increases as we encounter abstractions
"""
tentative = f(tp)
if tentative is not None:
return tentative
else:
def recur(expr):
return map_tp(f, expr)
match tp:
case e.ScalarTp() | e.AnyTp():
return tp
case e.ObjectTp(val=val):
return e.ObjectTp(
val={
k: e.ResultTp(recur(v), card)
for k, (v, card) in val.items()
}
)
case e.CompositeTp(kind=k, tps=tps, labels=labels):
return e.CompositeTp(
kind=k, tps=[recur(v) for v in tps], labels=labels
)
case e.NamedNominalLinkTp(name=name, linkprop=linkprop):
return e.NamedNominalLinkTp(
name=name, linkprop=recur(linkprop)
)
# case e.UncheckedNamedNominalLinkTp(name=name, linkprop=linkprop):
# return e.UncheckedNamedNominalLinkTp(name=name,
# linkprop=recur(linkprop))
case e.NominalLinkTp(
name=name, subject=subject, linkprop=linkprop
):
return e.NominalLinkTp(
name=name, subject=recur(subject), linkprop=recur(linkprop)
)
case e.UncheckedComputableTp(_):
return tp
case e.ComputableTp(expr=expr, tp=tp):
return e.ComputableTp(expr=expr, tp=recur(tp))
case e.DefaultTp(expr=expr, tp=tp):
return e.DefaultTp(expr=expr, tp=recur(tp))
case e.UncheckedTypeName(name=name):
return tp
case e.UnionTp(l, r):
return e.UnionTp(recur(l), recur(r))
case _:
raise ValueError("Not Implemented", tp) | maps a function over free variables and bound variables,
and does not modify other nodes
f : called with current expression and the current level, which refers to
the first binder outside of the entire expression expr
level : this value refers to the first binder OUTSIDE of the expression
being mapped, it should be called with initially = 1.
Increases as we encounter abstractions | map_tp | python | geldata/gel | edb/tools/experimental_interpreter/data/expr_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/expr_ops.py | Apache-2.0 |
def map_expr(f: Callable[[Expr], Optional[Expr]], expr: Expr) -> Expr:
"""maps a function over free variables and bound variables,
and does not modify other nodes
f : called with current expression and the current level, which refers to
the first binder outside of the entire expression expr
level : this value refers to the first binder OUTSIDE of the expression
being mapped, it should be called with initially = 1.
Increases as we encounter abstractions
"""
tentative = f(expr)
if tentative is not None:
return tentative
else:
def recur_tp(expr):
def f_tp(tp: Tp) -> Optional[Tp]:
result = f(tp)
# if result is not None:
# assert isinstance(result, Tp)
return cast(Optional[Tp], result)
return map_tp(f_tp, expr)
def recur(expr):
return map_expr(f, expr)
match expr:
case (
FreeVarExpr(_)
| BoundVarExpr(_)
| e.ScalarVal(_)
| RefVal(_)
| ArrVal(_)
| UnnamedTupleVal(_)
| e.QualifiedName(_)
| e.UnqualifiedName(_)
| e.QualifiedNameWithFilter(_, _)
):
return expr
case BindingExpr(var=var, body=body):
return BindingExpr(var=var, body=map_expr(f, body))
case UnnamedTupleExpr(val=val):
return UnnamedTupleExpr(val=[recur(e) for e in val])
case NamedTupleExpr(val=val):
return NamedTupleExpr(
val={k: recur(e) for (k, e) in val.items()}
)
case ObjectProjExpr(subject=subject, label=label):
return ObjectProjExpr(subject=recur(subject), label=label)
case e.TupleProjExpr(subject=subject, label=label):
return e.TupleProjExpr(subject=recur(subject), label=label)
case BackLinkExpr(subject=subject, label=label):
return BackLinkExpr(subject=recur(subject), label=label)
case e.IsTpExpr(subject=subject, tp=tp):
return e.IsTpExpr(subject=recur(subject), tp=tp)
case TpIntersectExpr(subject=subject, tp=tp_name):
return TpIntersectExpr(subject=recur(subject), tp=tp_name)
case LinkPropProjExpr(subject=subject, linkprop=label):
return LinkPropProjExpr(subject=recur(subject), linkprop=label)
case FunAppExpr(
fun=fname, args=args, overloading_index=idx, kwargs=kwargs
):
return FunAppExpr(
fun=fname,
args=[recur(arg) for arg in args],
overloading_index=idx,
kwargs={k: recur(v) for (k, v) in kwargs.items()},
)
case FilterOrderExpr(subject=subject, filter=filter, order=order):
return FilterOrderExpr(
subject=recur(subject),
filter=recur(filter),
order={l: recur(o) for (l, o) in order.items()},
)
case ShapedExprExpr(expr=expr, shape=shape):
return ShapedExprExpr(expr=recur(expr), shape=recur(shape))
case ShapeExpr(shape=shape):
return ShapeExpr(
shape={k: recur(e_1) for (k, e_1) in shape.items()}
)
case TypeCastExpr(tp=tp, arg=arg):
return TypeCastExpr(tp=recur_tp(tp), arg=recur(arg))
case UnionExpr(left=left, right=right):
return UnionExpr(left=recur(left), right=recur(right))
case ArrExpr(elems=arr):
return ArrExpr(elems=[recur(e) for e in arr])
case MultiSetExpr(expr=arr):
return MultiSetExpr(expr=[recur(e) for e in arr])
case OffsetLimitExpr(subject=subject, offset=offset, limit=limit):
return OffsetLimitExpr(
subject=recur(subject),
offset=recur(offset),
limit=recur(limit),
)
case WithExpr(bound=bound, next=next):
return WithExpr(bound=recur(bound), next=recur(next))
case InsertExpr(name=name, new=new):
return InsertExpr(
name=name, new={k: recur(v) for (k, v) in new.items()}
)
case e.FreeObjectExpr():
return e.FreeObjectExpr()
case e.ConditionalDedupExpr(expr=sub):
return e.ConditionalDedupExpr(recur(sub))
# case ObjectExpr(val=val):
# return ObjectExpr(
# val={label: recur(item)
# for (label, item) in val.items()})
case DetachedExpr(expr=expr):
return DetachedExpr(expr=recur(expr))
case SubqueryExpr(expr=expr):
return SubqueryExpr(expr=recur(expr))
case UpdateExpr(subject=subject, shape=shape):
return UpdateExpr(subject=recur(subject), shape=recur(shape))
case e.DeleteExpr(subject=subject):
return e.DeleteExpr(subject=recur(subject))
case ForExpr(bound=bound, next=next):
return ForExpr(bound=recur(bound), next=recur(next))
case OptionalForExpr(bound=bound, next=next):
return OptionalForExpr(bound=recur(bound), next=recur(next))
case e.IfElseExpr(
then_branch=then_branch,
condition=condition,
else_branch=else_branch,
):
return e.IfElseExpr(
then_branch=recur(then_branch),
condition=recur(condition),
else_branch=recur(else_branch),
)
case e.CheckedTypeCastExpr(
cast_tp=cast_tp,
cast_spec=cast_spec,
arg=arg,
):
return e.CheckedTypeCastExpr(
cast_tp=cast_tp, cast_spec=cast_spec, arg=recur(arg)
)
case e.ParameterExpr(name=name, tp=tp, is_required=is_required):
return e.ParameterExpr(
name=name, tp=recur_tp(tp), is_required=is_required
)
case _:
return map_tp(f, expr) # type: ignore[arg-type]
raise ValueError("Not Implemented: map_expr ", expr) | maps a function over free variables and bound variables,
and does not modify other nodes
f : called with current expression and the current level, which refers to
the first binder outside of the entire expression expr
level : this value refers to the first binder OUTSIDE of the expression
being mapped, it should be called with initially = 1.
Increases as we encounter abstractions | map_expr | python | geldata/gel | edb/tools/experimental_interpreter/data/expr_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/expr_ops.py | Apache-2.0 |
def map_var(f: Callable[[VarExpr], Optional[Expr]], expr: Expr) -> Expr:
"""maps a function over free variables and bound variables,
and does not modify other nodes
f : if not None, replace with the result
"""
def map_func(e: Expr) -> Optional[Expr]:
match e:
case FreeVarExpr(var=_):
return f(e)
case BoundVarExpr(var=_):
return f(e)
return None
return map_expr(map_func, expr) | maps a function over free variables and bound variables,
and does not modify other nodes
f : if not None, replace with the result | map_var | python | geldata/gel | edb/tools/experimental_interpreter/data/expr_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/expr_ops.py | Apache-2.0 |
def abstract_over_expr(expr: Expr, var: Optional[str] = None) -> BindingExpr:
"""Construct a BindingExpr that binds var"""
if var is None:
var = next_name()
new_body = subst_expr_for_expr(BoundVarExpr(var), FreeVarExpr(var), expr)
return BindingExpr(var=var, body=new_body) | Construct a BindingExpr that binds var | abstract_over_expr | python | geldata/gel | edb/tools/experimental_interpreter/data/expr_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/expr_ops.py | Apache-2.0 |
def iterative_subst_expr_for_expr(
expr2: Sequence[Expr], replace: Sequence[Expr], subject: Expr
):
"""Iteratively perform substitution from right to left,
comptues: [expr2[0]/replace[0]]...[expr[n-1]/replace[n-1]]subject"""
assert len(expr2) == len(replace)
result = subject
for i in reversed(list(range(len(replace)))):
result = subst_expr_for_expr(expr2[i], replace[i], result)
return result | Iteratively perform substitution from right to left,
comptues: [expr2[0]/replace[0]]...[expr[n-1]/replace[n-1]]subject | iterative_subst_expr_for_expr | python | geldata/gel | edb/tools/experimental_interpreter/data/expr_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/expr_ops.py | Apache-2.0 |
def try_resolve_module_entity(
ctx: e.TcCtx | e.DBSchema, name: e.QualifiedName
) -> Optional[e.ModuleEntity]:
"""
Resolve a module entity using the ABS method.
https://github.com/edgedb/edgedb/discussions/4883
"""
assert len(name.names) >= 2
if name.names[0] == "module":
assert isinstance(ctx, e.TcCtx), (
"qualified names beginning with"
" module cannot be resolved in a schema"
)
name = e.QualifiedName([*ctx.current_module, *name.names[1:]])
module: e.DBModule
if isinstance(ctx, e.TcCtx):
module = resolve_module_in_schema(ctx.schema, tuple(name.names[:-1]))
else:
module = resolve_module_in_schema(ctx, tuple(name.names[:-1]))
if name.names[-1] in module.defs:
return module.defs[name.names[-1]]
else:
return None | Resolve a module entity using the ABS method.
https://github.com/edgedb/edgedb/discussions/4883 | try_resolve_module_entity | python | geldata/gel | edb/tools/experimental_interpreter/data/module_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/module_ops.py | Apache-2.0 |
def try_resolve_simple_name(
ctx: e.TcCtx | e.DBSchema, unq_name: e.UnqualifiedName
) -> Optional[e.QualifiedName]:
"""
Resolve the name (may refer to a type or a function) in this order:
1. Current module
2. The default `std` module
"""
name = unq_name.name
if isinstance(ctx, e.TcCtx):
current_module = resolve_module_in_schema(
ctx.schema, ctx.current_module
)
if name in current_module.defs:
return e.QualifiedName([*ctx.current_module, name])
if isinstance(ctx, e.TcCtx):
schema = ctx.schema
else:
schema = ctx
for default_scope in default_open_scopes:
std_module = resolve_module_in_schema(schema, default_scope)
if name in std_module.defs:
return e.QualifiedName([*default_scope, name])
return None | Resolve the name (may refer to a type or a function) in this order:
1. Current module
2. The default `std` module | try_resolve_simple_name | python | geldata/gel | edb/tools/experimental_interpreter/data/module_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/module_ops.py | Apache-2.0 |
def enumerate_all_object_type_defs(
ctx: e.TcCtx,
) -> List[Tuple[e.QualifiedName, e.ObjectTp]]:
"""
Enumerate all type definitions in the current module
and the default `std` module.
"""
result: List[Tuple[e.QualifiedName, e.ObjectTp]] = []
for module_name, module_def in [
*ctx.schema.modules.items(),
*ctx.schema.unchecked_modules.items(),
]:
for tp_name, me in module_def.defs.items():
if isinstance(me, e.ModuleEntityTypeDef) and isinstance(
me.typedef, e.ObjectTp
):
result.append(
(e.QualifiedName([*module_name, tp_name]), me.typedef)
)
return result | Enumerate all type definitions in the current module
and the default `std` module. | enumerate_all_object_type_defs | python | geldata/gel | edb/tools/experimental_interpreter/data/module_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/module_ops.py | Apache-2.0 |
def map_query(
f: Callable[[Expr, QueryLevel], Optional[Expr]],
expr: Expr,
schema: e.TcCtx,
level: QueryLevel = QueryLevel.TOP_LEVEL,
) -> Expr:
"""maps a function over free variables and bound variables,
and does not modify other nodes
f : called with current expression and the current level,
return a non-null value to cut off further exploration
level : Indicates the current level, initially should be
"""
tentative = f(expr, level)
if tentative is not None:
return tentative
else:
def recur(expr):
return map_query(f, expr, schema, level)
def semisub_recur(expr):
return map_query(f, expr, schema, enter_semi_subquery(level))
def sub_recur(expr):
return map_query(f, expr, schema, enter_sub_query(level))
match expr:
case FreeVarExpr(_):
return expr
case e.QualifiedName(_):
return expr
case BoundVarExpr(_):
return expr
case e.ScalarVal(_):
return expr
case BindingExpr(var=var, body=body):
# type: ignore[has-type]
return BindingExpr(var=var, body=recur(body))
case UnnamedTupleExpr(val=val):
return UnnamedTupleExpr(val=[recur(e) for e in val])
case NamedTupleExpr(val=val):
return NamedTupleExpr(
val={k: recur(e) for (k, e) in val.items()}
)
case ObjectProjExpr(subject=subject, label=label):
if eops.is_path(subject):
return ObjectProjExpr(subject=recur(subject), label=label)
else:
return ObjectProjExpr(
subject=semisub_recur(subject), label=label
)
case LinkPropProjExpr(subject=subject, linkprop=linkprop):
return LinkPropProjExpr(
subject=recur(subject), linkprop=linkprop
)
case BackLinkExpr(subject=subject, label=label):
return BackLinkExpr(subject=recur(subject), label=label)
case e.IsTpExpr(subject=subject, tp=tp):
return e.IsTpExpr(subject=recur(subject), tp=tp)
case TpIntersectExpr(subject=subject, tp=tp_name):
return TpIntersectExpr(subject=recur(subject), tp=tp_name)
case FunAppExpr(
fun=fname, args=args, overloading_index=idx, kwargs=kwargs
):
mapped_args: Sequence[Expr] = []
_, resolved_fun_defs = mops.resolve_raw_name_and_func_def(
schema, fname
)
from ..type_checking_tools import function_checking as fun_ck
args_mods = [
mods
for mods in [
fun_ck.try_match_and_get_arg_mods(
expr, resolved_fun_defs[i]
)
for i in range(len(resolved_fun_defs))
]
if mods is not None
]
if len(args_mods) == 0:
# This is due to named only arguments,
# need some time to sort out
raise ValueError("Expecting fun_defs [TODO named args]")
if not all(
[args_mods[0] == args_mod for args_mod in args_mods]
):
print_warning(
"Function call with different argument mods",
"Choosing the first one",
)
params = resolved_fun_defs[0].tp.args_mod
for i in range(len(args)):
match params[i]:
case ParamSingleton():
mapped_args = [*mapped_args, recur(args[i])]
case ParamOptional():
mapped_args = [
*mapped_args,
semisub_recur(args[i]),
]
case ParamSetOf():
mapped_args = [*mapped_args, sub_recur(args[i])]
case _:
raise ValueError
mapped_kwargs: Dict[str, Expr] = {}
for i, (k, v) in enumerate(kwargs.items()):
match params[i + len(args)]:
case ParamSingleton():
mapped_kwargs[k] = recur(v)
case ParamOptional():
mapped_kwargs[k] = semisub_recur(v)
case ParamSetOf():
mapped_kwargs[k] = sub_recur(v)
return FunAppExpr(
fun=fname,
args=mapped_args,
overloading_index=idx,
kwargs=mapped_kwargs,
)
case FilterOrderExpr(subject=subject, filter=filter, order=order):
return FilterOrderExpr(
subject=recur(subject),
filter=sub_recur(filter),
order={l: sub_recur(o) for (l, o) in order.items()},
)
case ShapedExprExpr(expr=expr, shape=shape):
return ShapedExprExpr(expr=recur(expr), shape=sub_recur(shape))
case ShapeExpr(shape=shape):
return ShapeExpr(
shape={k: sub_recur(e_1) for (k, e_1) in shape.items()}
)
case TypeCastExpr(tp=tp, arg=arg):
return TypeCastExpr(tp=tp, arg=recur(arg))
case UnionExpr(left=left, right=right):
return UnionExpr(left=sub_recur(left), right=sub_recur(right))
case ArrExpr(elems=arr):
return ArrExpr(elems=[recur(e) for e in arr])
case MultiSetExpr(expr=arr):
return MultiSetExpr(expr=[sub_recur(e) for e in arr])
case OffsetLimitExpr(subject=subject, offset=offset, limit=limit):
return OffsetLimitExpr(
subject=sub_recur(subject),
offset=sub_recur(offset),
limit=sub_recur(limit),
)
case WithExpr(bound=bound, next=next):
return WithExpr(bound=sub_recur(bound), next=sub_recur(next))
case ForExpr(bound=bound, next=next):
return ForExpr(bound=sub_recur(bound), next=sub_recur(next))
case OptionalForExpr(bound=bound, next=next):
return OptionalForExpr(
bound=sub_recur(bound), next=sub_recur(next)
)
case e.IfElseExpr(
then_branch=then_branch,
condition=condition,
else_branch=else_branch,
):
return e.IfElseExpr(
then_branch=sub_recur(then_branch),
condition=recur(condition),
else_branch=sub_recur(else_branch),
)
case InsertExpr(name=name, new=new):
return InsertExpr(
name=name, new={k: sub_recur(e) for (k, e) in new.items()}
)
case DetachedExpr(expr=expr):
return DetachedExpr(expr=sub_recur(expr))
case SubqueryExpr(expr=expr):
return SubqueryExpr(expr=sub_recur(expr))
case UpdateExpr(subject=subject, shape=shape):
return UpdateExpr(
subject=recur(subject), shape=sub_recur(shape)
)
case e.DeleteExpr(subject=subject):
return e.DeleteExpr(subject=recur(subject))
case e.FreeObjectExpr():
return e.FreeObjectExpr()
case e.ParameterExpr(_):
return expr
raise ValueError("Not Implemented: map_query ", expr) | maps a function over free variables and bound variables,
and does not modify other nodes
f : called with current expression and the current level,
return a non-null value to cut off further exploration
level : Indicates the current level, initially should be | map_query | python | geldata/gel | edb/tools/experimental_interpreter/data/query_ops.py | https://github.com/geldata/gel/blob/master/edb/tools/experimental_interpreter/data/query_ops.py | Apache-2.0 |
def try_group_rewrite(
node: qlast.Query,
aliases: AliasGenerator,
) -> Optional[qlast.Query]:
"""
Try to apply some syntactic rewrites of GROUP expressions so we
can generate better code.
The two key desugarings are:
* Sink a shape into the internal group result
SELECT (GROUP ...) <shape>
[filter-clause] [order-clause] [other clauses]
=>
SELECT (
FOR GROUP ...
UNION <igroup-body> <shape>
[filter-clause]
[order-clause]
) [other clauses]
* Convert a FOR over a group into just an internal group (and
a trivial FOR)
FOR g in (GROUP ...) UNION <body>
=>
FOR GROUP ...
UNION (
FOR g IN (<group-body>)
UNION <body>
)
"""
# Inline trivial uses of aliases bound to a group and then
# immediately used, so that we can apply the other optimizations.
match node:
case qlast.SelectQuery(
aliases=[
*_,
qlast.AliasedExpr(alias=alias, expr=qlast.GroupQuery() as grp)
] as qaliases,
result=qlast.Shape(
expr=astutils.alias_view((alias2, [])),
elements=elements,
) as result,
) if alias == alias2 and _count_alias_uses(result, alias) == 1:
node = node.replace(
aliases=qaliases[:-1],
result=qlast.Shape(expr=grp, elements=elements),
)
case qlast.ForQuery(
aliases=[
*_,
qlast.AliasedExpr(alias=alias, expr=qlast.GroupQuery() as grp)
] as qaliases,
iterator=astutils.alias_view((alias2, [])),
result=result,
) if alias == alias2 and _count_alias_uses(result, alias) == 0:
node = node.replace(
aliases=qaliases[:-1],
iterator=grp,
)
# Sink shapes into the GROUP
if (
isinstance(node, qlast.SelectQuery)
and isinstance(node.result, qlast.Shape)
and isinstance(node.result.expr, qlast.GroupQuery)
):
igroup = desugar_group(node.result.expr, aliases)
igroup = igroup.replace(result=qlast.Shape(
expr=igroup.result, elements=node.result.elements))
# FILTER gets sunk into the body of the FOR GROUP
if node.where or node.orderby:
igroup = igroup.replace(
# We need to move the result_alias in case
# the FILTER depends on it.
result_alias=node.result_alias,
where=node.where,
orderby=node.orderby,
)
return node.replace(
result=igroup, result_alias=None, where=None, orderby=None)
# Eliminate FORs over GROUPs
if (
isinstance(node, qlast.ForQuery)
and isinstance(node.iterator, qlast.GroupQuery)
):
igroup = desugar_group(node.iterator, aliases)
new_result = qlast.ForQuery(
iterator_alias=node.iterator_alias,
iterator=igroup.result,
result=node.result,
)
return igroup.replace(result=new_result, aliases=node.aliases)
return None | Try to apply some syntactic rewrites of GROUP expressions so we
can generate better code.
The two key desugarings are:
* Sink a shape into the internal group result
SELECT (GROUP ...) <shape>
[filter-clause] [order-clause] [other clauses]
=>
SELECT (
FOR GROUP ...
UNION <igroup-body> <shape>
[filter-clause]
[order-clause]
) [other clauses]
* Convert a FOR over a group into just an internal group (and
a trivial FOR)
FOR g in (GROUP ...) UNION <body>
=>
FOR GROUP ...
UNION (
FOR g IN (<group-body>)
UNION <body>
) | try_group_rewrite | python | geldata/gel | edb/edgeql/desugar_group.py | https://github.com/geldata/gel/blob/master/edb/edgeql/desugar_group.py | Apache-2.0 |
def trace_refs(
qltree: qlast.Base,
*,
schema: s_schema.Schema,
anchors: Optional[Mapping[str, sn.QualName]] = None,
path_prefix: Optional[sn.QualName] = None,
module: str,
objects: Dict[sn.QualName, Optional[ObjectLike]],
pointers: Mapping[sn.UnqualName, Set[sn.QualName]],
params: Mapping[str, sn.QualName],
local_modules: AbstractSet[str]
) -> Tuple[FrozenSet[sn.QualName], FrozenSet[sn.QualName]]:
"""Return a list of schema item names used in an expression.
First set is strong deps, second is weak.
"""
ctx = TracerContext(
schema=schema,
module=module,
objects=objects,
pointers=pointers,
anchors=anchors or {},
path_prefix=path_prefix,
modaliases={},
params=params,
visited=set(),
local_modules=local_modules,
)
trace(qltree, ctx=ctx)
return frozenset(ctx.refs), frozenset(ctx.weak_refs) | Return a list of schema item names used in an expression.
First set is strong deps, second is weak. | trace_refs | python | geldata/gel | edb/edgeql/tracer.py | https://github.com/geldata/gel/blob/master/edb/edgeql/tracer.py | Apache-2.0 |
def resolve_name(
ref: qlast.ObjectRef,
*,
current_module: str,
schema: s_schema.Schema,
objects: Dict[sn.QualName, Optional[ObjectLike]],
modaliases: Optional[Dict[Optional[str], str]],
local_modules: AbstractSet[str],
declaration: bool=False,
) -> sn.QualName:
"""Resolve a name into a fully-qualified one.
This takes into account the current module and modaliases.
This function mostly mirrors schema.FlatSchema._search_with_getter
except:
- If no module and no default module was set, try the current module
- When searching in std, ensure module is not a local module
- If no result found, return a name with the best modname available
"""
def exists(name: sn.QualName) -> bool:
return (
objects.get(name) is not None
or schema.get(name, default=None, type=so.Object) is not None
)
module = ref.module
orig_module = module
# Apply module aliases
is_current, module = s_schema.apply_module_aliases(
module, modaliases, current_module,
)
no_std = declaration or is_current
# Check if something matches the name
if module is not None:
fqname = sn.QualName(module=module, name=ref.name)
if exists(fqname):
return fqname
elif orig_module is None:
# Look for name in current module
fqname = sn.QualName(module=current_module, name=ref.name)
if exists(fqname):
return fqname
# Try something in std if __current__ was not specified
if not no_std:
# If module == None, look in std
if orig_module is None:
mod_name = 'std'
fqname = sn.QualName(mod_name, ref.name)
if exists(fqname):
return fqname
# Ensure module is not a local module.
# Then try the module as part of std.
if module and module not in local_modules:
mod_name = f'std::{module}'
fqname = sn.QualName(mod_name, ref.name)
if exists(fqname):
return fqname
# Just pick the best module name available
return sn.QualName(
module=module or orig_module or current_module,
name=ref.name,
) | Resolve a name into a fully-qualified one.
This takes into account the current module and modaliases.
This function mostly mirrors schema.FlatSchema._search_with_getter
except:
- If no module and no default module was set, try the current module
- When searching in std, ensure module is not a local module
- If no result found, return a name with the best modname available | resolve_name | python | geldata/gel | edb/edgeql/tracer.py | https://github.com/geldata/gel/blob/master/edb/edgeql/tracer.py | Apache-2.0 |
def get_ancestors(
fq_name: s_name.QualName,
ancestors: Dict[s_name.QualName, Set[s_name.QualName]],
parents: Mapping[s_name.QualName, AbstractSet[s_name.QualName]],
) -> Set[s_name.QualName]:
"""Recursively compute ancestors (in place) from the parents graph."""
# value already computed
result = ancestors.get(fq_name, set())
if result is RECURSION_GUARD:
raise errors.InvalidDefinitionError(
f'{str(fq_name)!r} is defined recursively')
elif result:
return result
ancestors[fq_name] = RECURSION_GUARD
parent_set = parents.get(fq_name, set())
# base case: include the parents
result = set(parent_set)
for fq_parent in parent_set:
# recursive step: include parents' ancestors
result |= get_ancestors(fq_parent, ancestors, parents)
ancestors[fq_name] = result
return result | Recursively compute ancestors (in place) from the parents graph. | get_ancestors | python | geldata/gel | edb/edgeql/declarative.py | https://github.com/geldata/gel/blob/master/edb/edgeql/declarative.py | Apache-2.0 |
def _get_bases(
decl: qlast.CreateObject, *, ctx: LayoutTraceContext
) -> List[s_name.QualName]:
"""Resolve object bases from the "extends" declaration."""
if not isinstance(decl, qlast.BasedOnTuple):
return []
bases = []
if decl.bases:
# Explicit inheritance
has_enums = any(qlutils.is_enum(br) for br in decl.bases)
if has_enums:
if len(decl.bases) > 1:
raise errors.SchemaError(
f"invalid scalar type definition, enumeration must "
f"be the only supertype specified",
span=decl.bases[0].span,
)
bases = [s_name.QualName("std", "anyenum")]
else:
for base_ref in decl.bases:
# Validate that the base actually exists.
tracer_type = _get_tracer_type(decl)
assert tracer_type is not None
obj = _resolve_type_name(
base_ref.maintype,
tracer_type=tracer_type,
ctx=ctx
)
name = obj.get_name(ctx.schema)
if not isinstance(name, s_name.QualName):
qname = s_name.QualName.from_string(name.name)
else:
qname = name
bases.append(qname)
return bases | Resolve object bases from the "extends" declaration. | _get_bases | python | geldata/gel | edb/edgeql/declarative.py | https://github.com/geldata/gel/blob/master/edb/edgeql/declarative.py | Apache-2.0 |
def _block_ws(self, change: int, newlines: bool = True) -> None:
"""Block whitespace"""
if newlines:
self.indentation += change
self.new_lines = 1
else:
self.write(' ') | Block whitespace | _block_ws | python | geldata/gel | edb/edgeql/codegen.py | https://github.com/geldata/gel/blob/master/edb/edgeql/codegen.py | Apache-2.0 |
def parse_query(
source: Union[qltokenizer.Source, str],
module_aliases: Optional[Mapping[Optional[str], str]] = None,
) -> qlast.Query:
"""Parse some EdgeQL potentially adding some module aliases.
This will parse EdgeQL queries and expressions. If the source is an
expression, the result will be wrapped into a SelectQuery.
"""
tree = parse_fragment(source)
if not isinstance(tree, qlast.Query):
tree = qlast.SelectQuery(result=tree)
if module_aliases:
append_module_aliases(tree, module_aliases)
return tree | Parse some EdgeQL potentially adding some module aliases.
This will parse EdgeQL queries and expressions. If the source is an
expression, the result will be wrapped into a SelectQuery. | parse_query | python | geldata/gel | edb/edgeql/parser/__init__.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/__init__.py | Apache-2.0 |
def reduce_CreateRewrite(self, _r, kinds, _u, expr, commands):
"""%reduce
REWRITE RewriteKindList
USING ParenExpr
CreateRewriteSDLCommandsBlock
"""
# The name isn't important (it gets replaced) but we need to
# have one.
name = '/'.join(str(kind) for kind in kinds.val)
self.val = qlast.CreateRewrite(
name=qlast.ObjectRef(name=name),
kinds=kinds.val,
expr=expr.val,
commands=commands.val,
) | %reduce
REWRITE RewriteKindList
USING ParenExpr
CreateRewriteSDLCommandsBlock | reduce_CreateRewrite | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRewrite(self, _r, kinds, _u, expr):
"""%reduce
REWRITE RewriteKindList
USING ParenExpr
"""
# The name isn't important (it gets replaced) but we need to
# have one.
name = '/'.join(str(kind) for kind in kinds.val)
self.val = qlast.CreateRewrite(
name=qlast.ObjectRef(name=name),
kinds=kinds.val,
expr=expr.val,
) | %reduce
REWRITE RewriteKindList
USING ParenExpr | reduce_CreateRewrite | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRegularPointer(self, *kids):
"""%reduce
PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock
"""
name, opt_bases, opt_target, block = kids
target, cmds = self._extract_target(
opt_target.val, block.val, name.span)
vbases, vcmds = commondl.extract_bases(opt_bases.val, cmds)
self.val = qlast.CreateConcreteUnknownPointer(
name=name.val,
bases=vbases,
target=target,
commands=vcmds,
)
self._validate() | %reduce
PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock | reduce_CreateRegularPointer | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRegularQualifiedPointer(self, *kids):
"""%reduce
PtrQuals PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock
"""
quals, name, opt_bases, opt_target, block = kids
target, cmds = self._extract_target(
opt_target.val, block.val, name.span)
vbases, vcmds = commondl.extract_bases(opt_bases.val, cmds)
self.val = qlast.CreateConcreteUnknownPointer(
is_required=quals.val.required,
cardinality=quals.val.cardinality,
name=name.val,
bases=vbases,
target=target,
commands=vcmds,
)
self._validate() | %reduce
PtrQuals PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock | reduce_CreateRegularQualifiedPointer | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateOverloadedPointer(self, *kids):
"""%reduce
OVERLOADED PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock
"""
_, name, opt_bases, opt_target, block = kids
target, cmds = self._extract_target(
opt_target.val, block.val, name.span, overloaded=True)
vbases, vcmds = commondl.extract_bases(opt_bases.val, cmds)
self.val = qlast.CreateConcreteUnknownPointer(
name=name.val,
bases=vbases,
declared_overloaded=True,
is_required=None,
cardinality=None,
target=target,
commands=vcmds,
)
self._validate() | %reduce
OVERLOADED PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock | reduce_CreateOverloadedPointer | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateOverloadedQualifiedPointer(self, *kids):
"""%reduce
OVERLOADED PtrQuals PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock
"""
_, quals, name, opt_bases, opt_target, block = kids
target, cmds = self._extract_target(
opt_target.val, block.val, name.span, overloaded=True)
vbases, vcmds = commondl.extract_bases(opt_bases.val, cmds)
self.val = qlast.CreateConcreteUnknownPointer(
name=name.val,
bases=vbases,
declared_overloaded=True,
is_required=quals.val.required,
cardinality=quals.val.cardinality,
target=target,
commands=vcmds,
)
self._validate() | %reduce
OVERLOADED PtrQuals PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock | reduce_CreateOverloadedQualifiedPointer | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRegularPointer(self, *kids):
"""%reduce
PathNodeName OptExtendingSimple
PtrTarget
"""
name, opt_bases, target = kids
self.val = qlast.CreateConcreteUnknownPointer(
name=name.val,
bases=opt_bases.val,
target=target.val,
) | %reduce
PathNodeName OptExtendingSimple
PtrTarget | reduce_CreateRegularPointer | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRegularQualifiedPointer(self, *kids):
"""%reduce
PtrQuals PathNodeName OptExtendingSimple
PtrTarget
"""
quals, name, opt_bases, target = kids
self.val = qlast.CreateConcreteUnknownPointer(
name=name.val,
bases=opt_bases.val,
target=target.val,
is_required=quals.val.required,
cardinality=quals.val.cardinality,
) | %reduce
PtrQuals PathNodeName OptExtendingSimple
PtrTarget | reduce_CreateRegularQualifiedPointer | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateOverloadedPointer(self, *kids):
"""%reduce
OVERLOADED PathNodeName OptExtendingSimple
OptPtrTarget
"""
_, name, opt_bases, opt_target = kids
self.val = qlast.CreateConcreteUnknownPointer(
name=name.val,
bases=opt_bases.val,
declared_overloaded=True,
is_required=None,
cardinality=None,
target=opt_target.val,
) | %reduce
OVERLOADED PathNodeName OptExtendingSimple
OptPtrTarget | reduce_CreateOverloadedPointer | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateOverloadedQualifiedPointer(self, *kids):
"""%reduce
OVERLOADED PtrQuals PathNodeName OptExtendingSimple
OptPtrTarget
"""
_, quals, name, opt_bases, opt_target = kids
self.val = qlast.CreateConcreteUnknownPointer(
name=name.val,
bases=opt_bases.val,
declared_overloaded=True,
is_required=quals.val.required,
cardinality=quals.val.cardinality,
target=opt_target.val,
) | %reduce
OVERLOADED PtrQuals PathNodeName OptExtendingSimple
OptPtrTarget | reduce_CreateOverloadedQualifiedPointer | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateComputableUnknownPointer(self, *kids):
"""%reduce
PathNodeName ASSIGN Expr
"""
name, _, expr = kids
self.val = qlast.CreateConcreteUnknownPointer(
name=name.val,
target=expr.val,
) | %reduce
PathNodeName ASSIGN Expr | reduce_CreateComputableUnknownPointer | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateQualifiedComputableUnknownPointer(self, *kids):
"""%reduce
PtrQuals PathNodeName ASSIGN Expr
"""
quals, name, _, expr = kids
self.val = qlast.CreateConcreteUnknownPointer(
name=name.val,
is_required=quals.val.required,
cardinality=quals.val.cardinality,
target=expr.val,
) | %reduce
PtrQuals PathNodeName ASSIGN Expr | reduce_CreateQualifiedComputableUnknownPointer | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRegularProperty(self, *kids):
"""%reduce
PROPERTY PathNodeName OptExtendingSimple
OptPtrTarget CreateConcretePropertySDLCommandsBlock
"""
_, name, extending, target, commands_block = kids
target, cmds = self._extract_target(
target.val, commands_block.val, name.span
)
vbases, vcmds = commondl.extract_bases(extending.val, cmds)
self.val = qlast.CreateConcreteProperty(
name=name.val,
bases=vbases,
target=target,
commands=vcmds,
) | %reduce
PROPERTY PathNodeName OptExtendingSimple
OptPtrTarget CreateConcretePropertySDLCommandsBlock | reduce_CreateRegularProperty | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRegularQualifiedProperty(self, *kids):
"""%reduce
PtrQuals PROPERTY PathNodeName OptExtendingSimple
OptPtrTarget CreateConcretePropertySDLCommandsBlock
"""
(quals, property, name, extending, target, commands) = kids
target, cmds = self._extract_target(
target.val, commands.val, property.span
)
vbases, vcmds = commondl.extract_bases(extending.val, cmds)
self.val = qlast.CreateConcreteProperty(
name=name.val,
bases=vbases,
is_required=quals.val.required,
cardinality=quals.val.cardinality,
target=target,
commands=vcmds,
) | %reduce
PtrQuals PROPERTY PathNodeName OptExtendingSimple
OptPtrTarget CreateConcretePropertySDLCommandsBlock | reduce_CreateRegularQualifiedProperty | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateOverloadedProperty(self, *kids):
"""%reduce
OVERLOADED PROPERTY PathNodeName OptExtendingSimple
OptPtrTarget CreateConcretePropertySDLCommandsBlock
"""
_, _, name, opt_bases, opt_target, block = kids
target, cmds = self._extract_target(
opt_target.val, block.val, name.span, overloaded=True)
vbases, vcmds = commondl.extract_bases(opt_bases.val, cmds)
self.val = qlast.CreateConcreteProperty(
name=name.val,
bases=vbases,
declared_overloaded=True,
is_required=None,
cardinality=None,
target=target,
commands=vcmds,
) | %reduce
OVERLOADED PROPERTY PathNodeName OptExtendingSimple
OptPtrTarget CreateConcretePropertySDLCommandsBlock | reduce_CreateOverloadedProperty | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateOverloadedQualifiedProperty(self, *kids):
"""%reduce
OVERLOADED PtrQuals PROPERTY PathNodeName OptExtendingSimple
OptPtrTarget CreateConcretePropertySDLCommandsBlock
"""
_, quals, _, name, opt_bases, opt_target, block = kids
target, cmds = self._extract_target(
opt_target.val, block.val, name.span, overloaded=True)
vbases, vcmds = commondl.extract_bases(opt_bases.val, cmds)
self.val = qlast.CreateConcreteProperty(
name=name.val,
bases=vbases,
declared_overloaded=True,
is_required=quals.val.required,
cardinality=quals.val.cardinality,
target=target,
commands=vcmds,
) | %reduce
OVERLOADED PtrQuals PROPERTY PathNodeName OptExtendingSimple
OptPtrTarget CreateConcretePropertySDLCommandsBlock | reduce_CreateOverloadedQualifiedProperty | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRegularProperty(self, *kids):
"""%reduce
PROPERTY PathNodeName OptExtendingSimple PtrTarget
"""
_, name, extending, target = kids
self.val = qlast.CreateConcreteProperty(
name=name.val,
bases=extending.val,
target=target.val,
) | %reduce
PROPERTY PathNodeName OptExtendingSimple PtrTarget | reduce_CreateRegularProperty | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRegularQualifiedProperty(self, *kids):
"""%reduce
PtrQuals PROPERTY PathNodeName OptExtendingSimple PtrTarget
"""
quals, _, name, extending, target = kids
self.val = qlast.CreateConcreteProperty(
name=name.val,
bases=extending.val,
is_required=quals.val.required,
cardinality=quals.val.cardinality,
target=target.val,
) | %reduce
PtrQuals PROPERTY PathNodeName OptExtendingSimple PtrTarget | reduce_CreateRegularQualifiedProperty | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateOverloadedProperty(self, *kids):
"""%reduce
OVERLOADED PROPERTY PathNodeName OptExtendingSimple
OptPtrTarget
"""
_, _, name, opt_bases, opt_target = kids
self.val = qlast.CreateConcreteProperty(
name=name.val,
bases=opt_bases.val,
declared_overloaded=True,
is_required=None,
cardinality=None,
target=opt_target.val,
) | %reduce
OVERLOADED PROPERTY PathNodeName OptExtendingSimple
OptPtrTarget | reduce_CreateOverloadedProperty | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateOverloadedQualifiedProperty(self, *kids):
"""%reduce
OVERLOADED PtrQuals PROPERTY PathNodeName OptExtendingSimple
OptPtrTarget
"""
_, quals, _, name, opt_bases, opt_target = kids
self.val = qlast.CreateConcreteProperty(
name=name.val,
bases=opt_bases.val,
declared_overloaded=True,
is_required=quals.val.required,
cardinality=quals.val.cardinality,
target=opt_target.val,
) | %reduce
OVERLOADED PtrQuals PROPERTY PathNodeName OptExtendingSimple
OptPtrTarget | reduce_CreateOverloadedQualifiedProperty | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateComputableProperty(self, *kids):
"""%reduce
PROPERTY PathNodeName ASSIGN Expr
"""
_, name, _, expr = kids
self.val = qlast.CreateConcreteProperty(
name=name.val,
target=expr.val,
) | %reduce
PROPERTY PathNodeName ASSIGN Expr | reduce_CreateComputableProperty | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateQualifiedComputableProperty(self, *kids):
"""%reduce
PtrQuals PROPERTY PathNodeName ASSIGN Expr
"""
quals, _, name, _, expr = kids
self.val = qlast.CreateConcreteProperty(
name=name.val,
is_required=quals.val.required,
cardinality=quals.val.cardinality,
target=expr.val,
) | %reduce
PtrQuals PROPERTY PathNodeName ASSIGN Expr | reduce_CreateQualifiedComputableProperty | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRegularLink(self, *kids):
"""%reduce
LINK PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock
"""
_, name, extending, target, commands = kids
target, cmds = self._extract_target(
target.val, commands.val, name.span
)
vbases, vcmds = commondl.extract_bases(extending.val, cmds)
self.val = qlast.CreateConcreteLink(
name=name.val,
bases=vbases,
target=target,
commands=vcmds,
)
self._validate() | %reduce
LINK PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock | reduce_CreateRegularLink | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRegularQualifiedLink(self, *kids):
"""%reduce
PtrQuals LINK PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock
"""
quals, _, name, extending, target, commands = kids
target, cmds = self._extract_target(
target.val, commands.val, name.span
)
vbases, vcmds = commondl.extract_bases(extending.val, cmds)
self.val = qlast.CreateConcreteLink(
is_required=quals.val.required,
cardinality=quals.val.cardinality,
name=name.val,
bases=vbases,
target=target,
commands=vcmds,
)
self._validate() | %reduce
PtrQuals LINK PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock | reduce_CreateRegularQualifiedLink | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateOverloadedLink(self, *kids):
"""%reduce
OVERLOADED LINK PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock
"""
_, _, name, opt_bases, opt_target, block = kids
target, cmds = self._extract_target(
opt_target.val, block.val, name.span, overloaded=True)
vbases, vcmds = commondl.extract_bases(opt_bases.val, cmds)
self.val = qlast.CreateConcreteLink(
name=name.val,
bases=vbases,
declared_overloaded=True,
is_required=None,
cardinality=None,
target=target,
commands=vcmds,
)
self._validate() | %reduce
OVERLOADED LINK PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock | reduce_CreateOverloadedLink | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateOverloadedQualifiedLink(self, *kids):
"""%reduce
OVERLOADED PtrQuals LINK PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock
"""
_, quals, _, name, opt_bases, opt_target, block = kids
target, cmds = self._extract_target(
opt_target.val, block.val, name.span, overloaded=True)
vbases, vcmds = commondl.extract_bases(opt_bases.val, cmds)
self.val = qlast.CreateConcreteLink(
name=name.val,
bases=vbases,
declared_overloaded=True,
is_required=quals.val.required,
cardinality=quals.val.cardinality,
target=target,
commands=vcmds,
)
self._validate() | %reduce
OVERLOADED PtrQuals LINK PathNodeName OptExtendingSimple
OptPtrTarget CreateConcreteLinkSDLCommandsBlock | reduce_CreateOverloadedQualifiedLink | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRegularLink(self, *kids):
"""%reduce
LINK PathNodeName OptExtendingSimple
PtrTarget
"""
_, name, opt_bases, target = kids
self.val = qlast.CreateConcreteLink(
name=name.val,
bases=opt_bases.val,
target=target.val,
) | %reduce
LINK PathNodeName OptExtendingSimple
PtrTarget | reduce_CreateRegularLink | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateRegularQualifiedLink(self, *kids):
"""%reduce
PtrQuals LINK PathNodeName OptExtendingSimple
PtrTarget
"""
quals, _, name, opt_bases, target = kids
self.val = qlast.CreateConcreteLink(
name=name.val,
bases=opt_bases.val,
target=target.val,
is_required=quals.val.required,
cardinality=quals.val.cardinality,
) | %reduce
PtrQuals LINK PathNodeName OptExtendingSimple
PtrTarget | reduce_CreateRegularQualifiedLink | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
def reduce_CreateOverloadedLink(self, *kids):
"""%reduce
OVERLOADED LINK PathNodeName OptExtendingSimple
OptPtrTarget
"""
_, _, name, opt_bases, opt_target = kids
self.val = qlast.CreateConcreteLink(
name=name.val,
bases=opt_bases.val,
declared_overloaded=True,
is_required=None,
cardinality=None,
target=opt_target.val,
) | %reduce
OVERLOADED LINK PathNodeName OptExtendingSimple
OptPtrTarget | reduce_CreateOverloadedLink | python | geldata/gel | edb/edgeql/parser/grammar/sdl.py | https://github.com/geldata/gel/blob/master/edb/edgeql/parser/grammar/sdl.py | Apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.