code
stringlengths 26
870k
| docstring
stringlengths 1
65.6k
| func_name
stringlengths 1
194
| language
stringclasses 1
value | repo
stringlengths 8
68
| path
stringlengths 5
194
| url
stringlengths 46
254
| license
stringclasses 4
values |
---|---|---|---|---|---|---|---|
def update_remaining(self, latest: Limits) -> Limits:
"""Update remaining based on the latest information.
The remaining amount is assumed to decreasing during a call to
execute_no_sleep.
"""
if self.remaining is None:
self.remaining = latest.remaining
elif latest.remaining is not None:
self.remaining = min(self.remaining, latest.remaining)
if self.total == 'unlimited' and self.remaining:
# If there is a remaining value, the total is not actually
# unlimited.
self.total = None
return self | Update remaining based on the latest information.
The remaining amount is assumed to decreasing during a call to
execute_no_sleep. | update_remaining | python | geldata/gel | edb/server/protocol/request_scheduler.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/request_scheduler.py | Apache-2.0 |
async def run(self) -> Optional[Result[_T]]:
"""Run the request and return a result."""
raise NotImplementedError | Run the request and return a result. | run | python | geldata/gel | edb/server/protocol/request_scheduler.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/request_scheduler.py | Apache-2.0 |
async def wait_result(self) -> None:
"""Wait for the request to complete."""
await self._inner | Wait for the request to complete. | wait_result | python | geldata/gel | edb/server/protocol/request_scheduler.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/request_scheduler.py | Apache-2.0 |
def get_result(self) -> Optional[Result[_T]]:
"""Get the result of the request."""
result = self._inner.result()
return result | Get the result of the request. | get_result | python | geldata/gel | edb/server/protocol/request_scheduler.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/request_scheduler.py | Apache-2.0 |
def costs(self) -> dict[str, int]:
"""Expected cost to execute the request.
Keys must match service rate limits."""
raise NotImplementedError | Expected cost to execute the request.
Keys must match service rate limits. | costs | python | geldata/gel | edb/server/protocol/request_scheduler.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/request_scheduler.py | Apache-2.0 |
def create_request(self) -> Request[_T]:
"""Create a request with the parameters."""
raise NotImplementedError | Create a request with the parameters. | create_request | python | geldata/gel | edb/server/protocol/request_scheduler.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/request_scheduler.py | Apache-2.0 |
async def finalize(self) -> None:
"""An optional finalize to be run sequentially."""
pass | An optional finalize to be run sequentially. | finalize | python | geldata/gel | edb/server/protocol/request_scheduler.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/request_scheduler.py | Apache-2.0 |
async def execute_no_sleep(
params: Sequence[Params[_T]],
*,
service: Service,
) -> ExecutionReport:
"""Attempt to execute as many requests as possible without sleeping."""
report = ExecutionReport()
# Set up limits
execute_limits: dict[str, Limits] = {
limit_name: (
# If no other information is available, for the first attempt assume
# there is no limit.
Limits(total='unlimited')
if service_limit is None else
copy.copy(service_limit)
)
for limit_name, service_limit in service.limits.items()
}
# If any requests fail and can be retried, retry them up to a maximum number
# of times.
retry_count: int = 0
# If the costs are larger than a total limit, set aside the excess to be
# processed later.
# This prevents wasting resources, and allows the delays to increase
# specifically when an unexpected deferral happens.
pending_request_indexes: list[int]
excess_request_indexes: list[int]
initial_pending_cost = {
limit_name: 0
for limit_name in service.limits.keys()
}
for request_index in range(len(params)):
for limit_name, cost in params[request_index].costs().items():
initial_pending_cost[limit_name] += cost
if (
# If the pending cost exceeds a known limit, set aside some
# requests.
any(
limit.total < initial_pending_cost[limit_name]
for limit_name, limit in service.limits.items()
if limit is not None and isinstance(limit.total, int)
)
# Always include at least 1 request
and request_index != 0
):
pending_request_indexes = (
list(range(request_index))
)
excess_request_indexes = (
list(range(request_index, len(params)))
)
break
else:
# All inputs can be processed
pending_request_indexes = list(range(len(params)))
excess_request_indexes = []
while pending_request_indexes and retry_count < service.max_retry_count:
# Find the highest delay required by any of the service's limits
limit_base_delays = _get_limit_base_delays(
params, execute_limits, pending_request_indexes, service.guess_delay
)
base_delay = _get_maximum_delay(limit_base_delays)
active_request_indexes: list[int]
inactive_request_indexes: list[int]
if base_delay is None:
# Try to execute all requests.
active_request_indexes = pending_request_indexes
inactive_request_indexes = []
elif retry_count == 0:
# If there is any delay, only execute one request.
# This may update the remaining limit, allowing the remaining
# requests to run.
active_request_indexes = pending_request_indexes[:1]
inactive_request_indexes = pending_request_indexes[1:]
else:
break
results = await _execute_specified(
params, active_request_indexes,
)
# Check results
retry_request_indexes: list[int] = []
for request_index in active_request_indexes:
if request_index not in results:
report.unknown_error_count += 1
continue
result = results[request_index]
if isinstance(result.data, Error):
if result.data.retry:
# requests can be retried
retry_request_indexes.append(request_index)
else:
# error with message
report.known_error_messages.append(result.data.message)
else:
report.success_count += 1
await result.finalize()
if result.limits is not None:
for limit_name, execute_limit in execute_limits.items():
if limit_name not in result.limits:
continue
result_limit = result.limits[limit_name]
execute_limit.update_total(result_limit)
execute_limit.update_remaining(result_limit)
retry_count += 1
pending_request_indexes = (
retry_request_indexes + inactive_request_indexes
)
# Determine which limits cause unexpected deferrals and require additional
# delays.
limit_base_delays = _get_limit_base_delays(
params, execute_limits, pending_request_indexes, service.guess_delay
)
expected_pending_cost = {
limit_name
for limit_name in service.limits.keys()
if limit_base_delays[limit_name] is not None
}
if len(expected_pending_cost) == 0:
# If requests were deferred, but no limit appears to be the cause, delay
# them all just in case.
expected_pending_cost = set(service.limits.keys())
# Update deferred costs and any resulting limits.
report.deferred_costs = {
limit_name: 0
for limit_name in service.limits
}
for limit_name in service.limits.keys():
unexpected_deferred_costs = sum(
params[i].costs()[limit_name]
for i in pending_request_indexes
)
excess_deferred_costs = sum(
params[i].costs()[limit_name]
for i in excess_request_indexes
)
report.deferred_costs[limit_name] = (
unexpected_deferred_costs + excess_deferred_costs
)
if (
unexpected_deferred_costs != 0
# If the limit was not a cause of delays, don't increase the delay
# factor.
and limit_name in expected_pending_cost
):
# If there are deferred requests, gradually increase the delay
# factor
execute_limits[limit_name].delay_factor *= (
1 + random.random() if service.jitter else 2
)
elif (
len(report.known_error_messages) == 0
and report.unknown_error_count == 0
and excess_deferred_costs == 0
):
# If there are no errors, gradually decrease the delay factor over
# time.
execute_limits[limit_name].delay_factor = max(
0.95 * execute_limits[limit_name].delay_factor,
1,
)
# We don't know when the service will be called again, so just clear the
# remaining values
for execute_limit in execute_limits.values():
execute_limit.remaining = None
# Return the updated request limits
report.updated_limits = execute_limits
return report | Attempt to execute as many requests as possible without sleeping. | execute_no_sleep | python | geldata/gel | edb/server/protocol/request_scheduler.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/request_scheduler.py | Apache-2.0 |
def encode(self, text: str) -> list[int]:
"""Encode text into tokens."""
raise NotImplementedError | Encode text into tokens. | encode | python | geldata/gel | edb/server/protocol/ai_ext.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/ai_ext.py | Apache-2.0 |
def encode_padding(self) -> int:
"""How many special characters are added to encodings?"""
raise NotImplementedError | How many special characters are added to encodings? | encode_padding | python | geldata/gel | edb/server/protocol/ai_ext.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/ai_ext.py | Apache-2.0 |
def decode(self, tokens: list[int]) -> str:
"""Decode tokens into text."""
raise NotImplementedError | Decode tokens into text. | decode | python | geldata/gel | edb/server/protocol/ai_ext.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/ai_ext.py | Apache-2.0 |
def shorten_to_token_length(self, text: str, token_length: int) -> str:
"""Truncate text to a maximum token length."""
encoded = self.encode(text)
if len(encoded) > token_length:
encoded = encoded[:token_length]
return self.decode(encoded) | Truncate text to a maximum token length. | shorten_to_token_length | python | geldata/gel | edb/server/protocol/ai_ext.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/ai_ext.py | Apache-2.0 |
async def _get_pending_embeddings(
pgconn: pgcon.PGConnection,
model_name: str,
model_excluded_ids: dict[str, list[str]],
) -> list[PendingEmbedding]:
task_name = _task_name.get()
where_clause = ""
if (
model_name in model_excluded_ids
and (excluded_ids := model_excluded_ids[model_name])
):
# Only exclude long text if it won't be auto-truncated.
logger.debug(
f"{task_name} skipping {len(excluded_ids)} indexes "
f"for {model_name!r}"
)
where_clause = (f"""
WHERE
q."id" not in ({','.join(
"'" + excluded_id + "'"
for excluded_id in excluded_ids
)})
OR q."truncate_to_max"
""")
entries = await pgconn.sql_fetch(
f"""
SELECT
*
FROM
(
SELECT
"id",
"text",
"target_rel",
"target_attr",
"target_dims_shortening",
"truncate_to_max"
FROM
edgedbext."ai_pending_embeddings_{model_name}"
LIMIT
500
) AS q
{where_clause}
ORDER BY
q."target_dims_shortening"
""".encode(),
tx_isolation=edbdef.TxIsolationLevel.RepeatableRead,
)
if not entries:
return []
result = []
for entry in entries:
result.append(PendingEmbedding(
id=uuidgen.from_bytes(entry[0]),
text=entry[1].decode("utf-8"),
target_rel=entry[2].decode(),
target_attr=entry[3].decode(),
target_dims_shortening=(
int.from_bytes(
entry[4],
byteorder="big",
signed=False,
)
if entry[4] is not None else
None
),
truncate_to_max=bool.from_bytes(entry[5]),
))
return result | )
entries = await pgconn.sql_fetch(
f | _get_pending_embeddings | python | geldata/gel | edb/server/protocol/ai_ext.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/ai_ext.py | Apache-2.0 |
def _batch_embeddings_inputs(
tokenizer: Tokenizer,
inputs: list[str],
max_batch_tokens: int,
) -> list[tuple[list[int], int]]:
"""Create batches of embeddings inputs.
Returns batches which are a tuple of:
- Indexes of input strings grouped to avoid exceeding the max_batch_token
- The batch's token count
"""
# Get token counts
input_token_counts = [
len(tokenizer.encode(input))
for input in inputs
]
# Get indexes of inputs, sorted from shortest to longest by token count
unbatched_input_indexes = list(range(len(inputs)))
unbatched_input_indexes.sort(
key=lambda index: input_token_counts[index],
reverse=False,
)
def unbatched_token_count(unbatched_index: int) -> int:
return input_token_counts[unbatched_input_indexes[unbatched_index]]
# Remove any inputs that are larger than the maximum
while (
unbatched_input_indexes
and unbatched_token_count(-1) > max_batch_tokens
):
unbatched_input_indexes.pop()
batches: list[tuple[list[int], int]] = []
while unbatched_input_indexes:
# Start with the largest available input
batch_input_indexes = [unbatched_input_indexes[-1]]
batch_token_count = unbatched_token_count(-1)
unbatched_input_indexes.pop()
if batch_token_count < max_batch_tokens:
# Then add the smallest available input as long as long as the
# max batch token count isn't exceeded
unbatched_index = 0
while unbatched_index < len(unbatched_input_indexes):
if (
batch_token_count + unbatched_token_count(unbatched_index)
<= max_batch_tokens
):
batch_input_indexes.append(
unbatched_input_indexes[unbatched_index]
)
batch_token_count += unbatched_token_count(unbatched_index)
unbatched_input_indexes.pop(unbatched_index)
else:
unbatched_index += 1
batches.append((batch_input_indexes, batch_token_count))
return batches | Create batches of embeddings inputs.
Returns batches which are a tuple of:
- Indexes of input strings grouped to avoid exceeding the max_batch_token
- The batch's token count | _batch_embeddings_inputs | python | geldata/gel | edb/server/protocol/ai_ext.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/ai_ext.py | Apache-2.0 |
async def _handle_rag_request(
protocol: protocol.HttpProtocol,
request: protocol.HttpRequest,
response: protocol.HttpResponse,
db: dbview.Database,
tenant: srv_tenant.Tenant,
) -> None:
try:
http_client = tenant.get_http_client(originator="ai/rag")
body = json.loads(request.body)
if not isinstance(body, dict):
raise TypeError(
'the body of the request must be a JSON object')
context = body.get('context')
if context is None:
raise TypeError(
'missing required "context" object in request')
if not isinstance(context, dict):
raise TypeError(
'"context" value in request is not a valid JSON object')
ctx_query = context.get("query")
ctx_variables = context.get("variables")
ctx_globals = context.get("globals")
ctx_max_obj_count = context.get("max_object_count")
if not ctx_query:
raise TypeError(
'missing required "query" in request "context" object')
if ctx_variables is not None and not isinstance(ctx_variables, dict):
raise TypeError('"variables" must be a JSON object')
if ctx_globals is not None and not isinstance(ctx_globals, dict):
raise TypeError('"globals" must be a JSON object')
model = body.get('model')
if not model:
raise TypeError(
'missing required "model" in request')
query = body.get('query')
if not query:
raise TypeError(
'missing required "query" in request')
stream = body.get('stream')
if stream is None:
stream = False
elif not isinstance(stream, bool):
raise TypeError('"stream" must be a boolean')
if ctx_max_obj_count is None:
ctx_max_obj_count = 5
elif not isinstance(ctx_max_obj_count, int) or ctx_max_obj_count <= 0:
raise TypeError(
'"context.max_object_count" must be a positive integer')
prompt_id = None
prompt_name = None
custom_prompt = None
custom_prompt_messages: list[dict[str, Any]] = []
prompt = body.get("prompt")
if prompt is None:
prompt_name = "builtin::rag-default"
else:
if not isinstance(prompt, dict):
raise TypeError(
'"prompt" value in request must be a JSON object')
prompt_name = prompt.get("name")
prompt_id = prompt.get("id")
custom_prompt = prompt.get("custom")
if prompt_name and prompt_id:
raise TypeError(
"prompt.id and prompt.name are mutually exclusive"
)
if custom_prompt:
if not isinstance(custom_prompt, list):
raise TypeError(
(
"prompt.custom must be a list, where each element "
"is one of the following types:\n"
"{ role: 'system', content: str },\n"
"{ role: 'user', content: [{ type: 'text', "
"text: str }] },\n"
"{ role: 'assistant', content: str, "
"optional tool_calls: [{id: str, type: 'function',"
" function: { name: str, arguments: str }}] },\n"
"{ role: 'tool', content: str, tool_call_id: str }"
)
)
for entry in custom_prompt:
if not isinstance(entry, dict) or not entry.get("role"):
raise TypeError(
(
"each prompt.custom entry must be a "
"dictionary of one of the following types:\n"
"{ role: 'system', content: str },\n"
"{ role: 'user', content: [{ type: 'text', "
"text: str }] },\n"
"{ role: 'assistant', content: str, "
"optional tool_calls: [{id: str, "
"type: 'function', function: { "
"name: str, arguments: str }}] },\n"
"{ role: 'tool', content: str, "
"tool_call_id: str }"
)
)
entry_role = entry.get('role')
if entry_role == 'system':
if not isinstance(entry.get("content"), str):
raise TypeError(
"System message content has to be string."
)
elif entry_role == 'user':
if not isinstance(entry.get("content"), list):
raise TypeError(
(
"User message content has to be a list of "
"{ type: 'text', text: str }"
)
)
for content_entry in entry["content"]:
if content_entry.get(
"type"
) != "text" or not isinstance(
content_entry.get("text"), str
):
raise TypeError(
(
"Element of user message content has to"
"be of type { type: 'text', text: str }"
)
)
elif entry_role == 'assistant':
if not isinstance(entry.get("content"), str):
raise TypeError(
"Assistant message content has to be string"
)
tool_calls = entry.get("tool_calls")
if tool_calls:
if not isinstance(tool_calls, list):
raise TypeError(
(
"Assistant tool calls must be"
"a list of:\n"
"{id: str, type: 'function', function:"
" {name: str, arguments: str }}"
)
)
for call in tool_calls:
if (
not isinstance(call, dict)
or not isinstance(call.get("id"), str)
or call.get("type") != "function"
or not isinstance(
call.get("function"), dict
)
or not isinstance(
call["function"].get("name"), str
)
or not isinstance(
call["function"].get("arguments"),
str,
)
):
raise TypeError(
(
"A tool call must be of type:\n"
"{id: str, type: 'function', "
"function: { name: str, "
"arguments: str }}"
)
)
elif entry_role == 'tool':
if not isinstance(entry.get("content"), str):
raise TypeError(
"Tool message content has to be string."
)
if not isinstance(entry.get("tool_call_id"), str):
raise TypeError(
"Tool message tool_call_id has to be string."
)
else:
raise TypeError(
(
"Message role must match one of these: "
"system, user, assistant, tool."
)
)
custom_prompt_messages.append(entry)
except Exception as ex:
raise BadRequestError(ex.args[0])
provider_name = await _get_model_provider(
db,
base_model_type="ext::ai::TextGenerationModel",
model_name=model,
)
provider = _get_provider_config(db, provider_name)
vector_query = await _generate_embeddings_for_type(
db,
http_client,
ctx_query,
content=query,
)
ctx_query = f"""
WITH
__query := <array<float32>>(
to_json(<str>$input)["data"][0]["embedding"]
),
search := ext::ai::search(({ctx_query}), __query),
SELECT
ext::ai::to_context(search.object)
ORDER BY
search.distance ASC EMPTY LAST
LIMIT
<int64>$limit
"""
if ctx_variables is None:
ctx_variables = {}
ctx_variables["input"] = vector_query.decode("utf-8")
ctx_variables["limit"] = ctx_max_obj_count
context = await _edgeql_query_json(
db=db,
query=ctx_query,
variables=ctx_variables,
globals_=ctx_globals,
)
if len(context) == 0:
raise BadRequestError(
'query did not match any data in specified context',
)
prompt_query = """
SELECT
ext::ai::ChatPrompt {
messages: {
participant_role,
content,
},
}
FILTER
"""
if prompt_id or prompt_name:
prompt_variables = {}
if prompt_name:
prompt_query += ".name = <str>$prompt_name"
prompt_variables["prompt_name"] = prompt_name
elif prompt_id:
prompt_query += ".id = <uuid><str>$prompt_id"
prompt_variables["prompt_id"] = prompt_id
prompts = await _edgeql_query_json(
db=db,
query=prompt_query,
variables=prompt_variables,
)
if len(prompts) == 0:
raise BadRequestError("could not find the specified chat prompt")
prompt = prompts[0]
else:
prompt = {
"messages": [],
}
prompt_messages: list[dict[str, Any]] = []
for message in prompt["messages"]:
if message["participant_role"] == "User":
content = message["content"].format(
context="\n".join(context),
query=query,
)
elif message["participant_role"] == "System":
content = message["content"].format(
context="\n".join(context),
)
else:
content = message["content"]
role = message["participant_role"].lower()
prompt_messages.append(dict(role=role, content=content))
# don't add here at the end the user query msg because Mistral and
# Anthropic doesn't work if the user message shows after the tools
messages = prompt_messages + custom_prompt_messages
await _start_chat(
protocol=protocol,
request=request,
response=response,
provider=provider,
http_client=http_client,
model_name=model,
messages=messages,
stream=stream,
temperature=body.get("temperature"),
top_p=body.get("top_p"),
max_tokens=body.get("max_tokens"),
seed=body.get("seed"),
safe_prompt=body.get("safe_prompt"),
top_k=body.get("top_k"),
logit_bias=body.get("logit_bias"),
logprobs=body.get("logprobs"),
user=body.get("user"),
tools=body.get("tools"),
) | if ctx_variables is None:
ctx_variables = {}
ctx_variables["input"] = vector_query.decode("utf-8")
ctx_variables["limit"] = ctx_max_obj_count
context = await _edgeql_query_json(
db=db,
query=ctx_query,
variables=ctx_variables,
globals_=ctx_globals,
)
if len(context) == 0:
raise BadRequestError(
'query did not match any data in specified context',
)
prompt_query = | _handle_rag_request | python | geldata/gel | edb/server/protocol/ai_ext.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/ai_ext.py | Apache-2.0 |
def derive_key_raw(key: str, info: str) -> bytes:
"""Derive a new key from the given symmetric key using HKDF."""
input_key_material = key.encode()
backend = default_backend()
hkdf = HKDFExpand(
algorithm=hashes.SHA256(),
length=32,
info=info.encode("utf-8"),
backend=backend,
)
new_key_bytes = hkdf.derive(input_key_material)
return new_key_bytes | Derive a new key from the given symmetric key using HKDF. | derive_key_raw | python | geldata/gel | edb/server/protocol/auth_ext/jwt.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/jwt.py | Apache-2.0 |
async def create_authentication_options_for_email(
self,
*,
webauthn_provider: config.WebAuthnProvider,
email: str,
) -> Tuple[str, bytes]:
# Find credential IDs by email
result = await execute.parse_execute_json(
self.db,
"""
select ext::auth::WebAuthnFactor {
user_handle,
credential_id,
}
filter .email = <str>$email;""",
variables={
"email": email,
},
cached_globally=True,
query_tag='gel/auth',
)
result_json = json.loads(result.decode())
if len(result_json) == 0:
raise errors.WebAuthnAuthenticationFailed(
"No WebAuthn credentials found for this email."
)
user_handles: set[str] = {x["user_handle"] for x in result_json}
assert (
len(user_handles) == 1
), "Found WebAuthn multiple user handles for the same email."
user_handle = base64.b64decode(result_json[0]["user_handle"])
credential_ids = [
webauthn_structs.PublicKeyCredentialDescriptor(
base64.b64decode(x["credential_id"])
)
for x in result_json
]
registration_options = webauthn.generate_authentication_options(
rp_id=webauthn_provider.relying_party_id,
allow_credentials=credential_ids,
)
await execute.parse_execute_json(
self.db,
"""
with
challenge := <bytes>$challenge,
user_handle := <bytes>$user_handle,
email := <str>$email,
factors := (
assert_exists((
select ext::auth::WebAuthnFactor
filter .user_handle = user_handle
and .email = email
))
)
insert ext::auth::WebAuthnAuthenticationChallenge {
challenge := challenge,
factors := factors,
}
unless conflict on .factors
else (
update ext::auth::WebAuthnAuthenticationChallenge
set {
challenge := challenge
}
);""",
variables={
"challenge": registration_options.challenge,
"user_handle": user_handle,
"email": email,
},
query_tag='gel/auth',
)
return (
base64.urlsafe_b64encode(user_handle).decode(),
webauthn.options_to_json(registration_options).encode(),
) | select ext::auth::WebAuthnFactor {
user_handle,
credential_id,
}
filter .email = <str>$email;""",
variables={
"email": email,
},
cached_globally=True,
query_tag='gel/auth',
)
result_json = json.loads(result.decode())
if len(result_json) == 0:
raise errors.WebAuthnAuthenticationFailed(
"No WebAuthn credentials found for this email."
)
user_handles: set[str] = {x["user_handle"] for x in result_json}
assert (
len(user_handles) == 1
), "Found WebAuthn multiple user handles for the same email."
user_handle = base64.b64decode(result_json[0]["user_handle"])
credential_ids = [
webauthn_structs.PublicKeyCredentialDescriptor(
base64.b64decode(x["credential_id"])
)
for x in result_json
]
registration_options = webauthn.generate_authentication_options(
rp_id=webauthn_provider.relying_party_id,
allow_credentials=credential_ids,
)
await execute.parse_execute_json(
self.db, | create_authentication_options_for_email | python | geldata/gel | edb/server/protocol/auth_ext/webauthn.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/webauthn.py | Apache-2.0 |
async def _handle_identity(
self, user_info: data.UserInfo
) -> tuple[data.Identity, bool]:
"""Update or create an identity"""
r = await execute.parse_execute_json(
db=self.db,
query="""\
with
iss := <str>$issuer_url,
sub := <str>$subject,
identity := (
insert ext::auth::Identity {
issuer := iss,
subject := sub,
} unless conflict on ((.issuer, .subject))
else ext::auth::Identity
)
select {
identity := (select identity {*}),
new := (identity not in ext::auth::Identity)
};""",
variables={
"issuer_url": self.provider.issuer_url,
"subject": user_info.sub,
},
cached_globally=True,
query_tag='gel/auth',
)
result_json = json.loads(r.decode())
assert len(result_json) == 1
return (
data.Identity(**result_json[0]['identity']),
result_json[0]['new'],
) | Update or create an identity | _handle_identity | python | geldata/gel | edb/server/protocol/auth_ext/oauth.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/oauth.py | Apache-2.0 |
def _get_url_munger(
self, request: protocol.HttpRequest
) -> Callable[[str], str] | None:
"""
Returns a callable that can be used to modify the base URL
when making requests to the OAuth provider.
This is used to redirect requests to the test OAuth provider
when running in test mode.
"""
if not self.test_mode:
return None
test_url = (
request.params[b'oauth-test-server'].decode()
if (request.params and b'oauth-test-server' in request.params)
else None
)
if test_url:
return lambda path: f"{test_url}{urllib.parse.quote(path)}"
return None | Returns a callable that can be used to modify the base URL
when making requests to the OAuth provider.
This is used to redirect requests to the test OAuth provider
when running in test mode. | _get_url_munger | python | geldata/gel | edb/server/protocol/auth_ext/http.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/http.py | Apache-2.0 |
async def handle_ui_magic_link_sent(
self,
request: protocol.HttpRequest,
response: protocol.HttpResponse,
) -> None:
"""
Success page for when a magic link is sent
"""
app_details = self._get_app_details_config()
response.status = http.HTTPStatus.OK
response.content_type = b"text/html"
response.body = ui.render_magic_link_sent_page(
app_name=app_details.app_name,
logo_url=app_details.logo_url,
dark_logo_url=app_details.dark_logo_url,
brand_color=app_details.brand_color,
) | Success page for when a magic link is sent | handle_ui_magic_link_sent | python | geldata/gel | edb/server/protocol/auth_ext/http.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/http.py | Apache-2.0 |
async def get_remote_jwtset(
url: str,
fetch_lambda: Callable[[str], Awaitable[jwt_auth.JWKSet]],
) -> jwt_auth.JWKSet:
"""
Get a JWKSet from the cache, or fetch it from the given URL if it's not in
the cache.
"""
is_fresh, jwtset = jwtset_cache.get(url)
match (is_fresh, jwtset):
case (_, None):
jwtset = await fetch_lambda(url)
jwtset_cache.set(url, jwtset)
case (True, jwtset):
pass
case _:
# Run fetch in background to refresh cache
async def refresh_cache(url: str) -> None:
try:
new_jwtset = await fetch_lambda(url)
jwtset_cache.set(url, new_jwtset)
except Exception:
logger.exception(
f"Failed to refresh JWKSet cache for {url}"
)
asyncio.create_task(refresh_cache(url))
assert jwtset is not None
return jwtset | Get a JWKSet from the cache, or fetch it from the given URL if it's not in
the cache. | get_remote_jwtset | python | geldata/gel | edb/server/protocol/auth_ext/util.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/util.py | Apache-2.0 |
def tabs_content(sections: list[str], selected_tab: int) -> str:
content = ''
for i, section in enumerate(sections):
content += f'''
<div class="slider-section{' active' if selected_tab == i else ''}">
{section}
</div>
'''
style = (
f'style="transform: translateX({-100 * selected_tab}%)"'
if selected_tab > 0 else ''
)
return f'''
<div id="slider-container" class="slider-container" {style}>
{content}
</div>
''' | style = (
f'style="transform: translateX({-100 * selected_tab}%)"'
if selected_tab > 0 else ''
)
return f | tabs_content | python | geldata/gel | edb/server/protocol/auth_ext/ui/components.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/components.py | Apache-2.0 |
def tabs_buttons(labels: list[str], selected_tab: int) -> str:
content = ''
for i, label in enumerate(labels):
content += f'''
<div class="tab{' active' if selected_tab == i else ''}">
{label}
{_tab_underline}
</div>
'''
return f'''
<div id="email-provider-tabs" class="tabs">
{content}
</div>
''' | return f | tabs_buttons | python | geldata/gel | edb/server/protocol/auth_ext/ui/components.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/components.py | Apache-2.0 |
def base_default_email(
*,
content: str,
app_name: Optional[str],
logo_url: Optional[str],
) -> str:
logo_html = f"""
<!--[if mso | IE]><table align="center" border="0" cellpadding="0" cellspacing="0" class="" style="width:600px;" width="600" ><tr><td style="line-height:0px;font-size:0px;mso-line-height-rule:exactly;"><![endif]-->
<div style="margin: 0px auto; max-width: 600px">
<table
align="center"
border="0"
cellpadding="0"
cellspacing="0"
role="presentation"
style="width: 100%"
>
<tbody>
<tr>
<td
style="
direction: ltr;
font-size: 0px;
padding: 20px 0;
padding-bottom: 0px;
padding-top: 20px;
text-align: center;
"
>
<!--[if mso | IE]><table role="presentation" border="0" cellpadding="0" cellspacing="0"><tr><td class="" style="vertical-align:top;width:600px;" ><![endif]-->
<div
class="mj-column-per-100 mj-outlook-group-fix"
style="
font-size: 0px;
text-align: left;
direction: ltr;
display: inline-block;
vertical-align: top;
width: 100%;
"
>
<table
border="0"
cellpadding="0"
cellspacing="0"
role="presentation"
style="vertical-align: top"
width="100%"
>
<tbody>
<tr>
<td
align="center"
style="
font-size: 0px;
padding: 10px 25px;
padding-top: 0;
padding-right: 0px;
padding-bottom: 0px;
padding-left: 0px;
word-break: break-word;
"
>
<table
border="0"
cellpadding="0"
cellspacing="0"
role="presentation"
style="border-collapse: collapse; border-spacing: 0px"
>
<tbody>
<tr>
<td style="width: 150px">
<img
alt="
{f'{app_name} logo' if app_name else ''}
"
height="150"
src="{logo_url}"
style="
border: none;
display: block;
outline: none;
text-decoration: none;
height: 150px;
width: 100%;
font-size: 13px;
"
width="150"
/>
</td>
</tr>
</tbody>
</table>
</td>
</tr>
</tbody>
</table>
</div>
<!--[if mso | IE]></td></tr></table><![endif]-->
</td>
</tr>
</tbody>
</table>
</div>
<!--[if mso | IE]></td></tr></table><table align="center" border="0" cellpadding="0" cellspacing="0" class="" style="width:600px;" width="600" ><tr><td style="line-height:0px;font-size:0px;mso-line-height-rule:exactly;"><![endif]-->
""" if logo_url else "" # noqa: E501
return f"""
<!doctype html>
<html
xmlns="http://www.w3.org/1999/xhtml"
xmlns:v="urn:schemas-microsoft-com:vml"
xmlns:o="urn:schemas-microsoft-com:office:office"
>
<head>
<title>
</title>
<!--[if !mso]><!-->
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<!--<![endif]-->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1">
<style type="text/css">
#outlook a {{
padding: 0;
}}
body {{
margin: 0;
padding: 0;
-webkit-text-size-adjust: 100%;
-ms-text-size-adjust: 100%;
}}
table,
td {{
border-collapse: collapse;
mso-table-lspace: 0pt;
mso-table-rspace: 0pt;
}}
img {{
border: 0;
height: auto;
line-height: 100%;
outline: none;
text-decoration: none;
-ms-interpolation-mode: bicubic;
}}
p {{
display: block;
margin: 13px 0;
}}
</style>
<!--[if mso]>
<noscript>
<xml>
<o:OfficeDocumentSettings>
<o:AllowPNG/>
<o:PixelsPerInch>96</o:PixelsPerInch>
</o:OfficeDocumentSettings>
</xml>
</noscript>
<![endif]-->
<!--[if lte mso 11]>
<style type="text/css">
.mj-outlook-group-fix {{ width:100% !important; }}
</style>
<![endif]-->
<!--[if !mso]><!-->
<link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,500,700" rel="stylesheet" type="text/css">
<style type="text/css">
@import url(https://fonts.googleapis.com/css?family=Open+Sans:300,400,500,700);
</style>
<!--<![endif]-->
<style type="text/css">
@media only screen and (min-width:480px) {{
.mj-column-per-100 {{
width: 100% !important;
max-width: 100%;
}}
}}
</style>
<style media="screen and (min-width:480px)">
.moz-text-html .mj-column-per-100 {{
width: 100% !important;
max-width: 100%;
}}
</style>
<style type="text/css">
@media only screen and (max-width:480px) {{
table.mj-full-width-mobile {{
width: 100% !important;
}}
td.mj-full-width-mobile {{
width: auto !important;
}}
}}
</style>
</head>
<body style="word-spacing: normal; background-color: #ffffff">
<div style="background-color: #ffffff">
{logo_html}
<div style="margin: 0px auto; max-width: 600px">
<table
align="center"
border="0"
cellpadding="0"
cellspacing="0"
role="presentation"
style="width: 100%"
>
<tbody>
<tr>
<td
style="
direction: ltr;
font-size: 0px;
padding: 20px 0;
padding-bottom: 20px;
padding-top: 20px;
text-align: center;
"
>
<!--[if mso | IE]><table role="presentation" border="0" cellpadding="0" cellspacing="0"><tr><td class="" style="vertical-align:middle;width:600px;" ><![endif]-->
<div
class="mj-column-per-100 mj-outlook-group-fix"
style="
font-size: 0px;
text-align: left;
direction: ltr;
display: inline-block;
vertical-align: middle;
width: 100%;
"
>
<table
border="0"
cellpadding="0"
cellspacing="0"
role="presentation"
style="vertical-align: middle"
width="100%"
>
<tbody>
{content}
</tbody>
</table>
</div>
<!--[if mso | IE]></td></tr></table><![endif]-->
</td>
</tr>
</tbody>
</table>
</div>
<!--[if mso | IE]></td></tr></table><![endif]-->
</div>
</body>
</html>
""" # noqa: E501 | if logo_url else "" # noqa: E501
return f | base_default_email | python | geldata/gel | edb/server/protocol/auth_ext/ui/components.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/components.py | Apache-2.0 |
def render_signin_page(
*,
base_path: str,
providers: frozenset[auth_config.ProviderConfig],
error_message: Optional[str] = None,
email: Optional[str] = None,
challenge: str,
selected_tab: Optional[str] = None,
# config
redirect_to: str,
redirect_to_on_signup: Optional[str] = None,
app_name: Optional[str] = None,
logo_url: Optional[str] = None,
dark_logo_url: Optional[str] = None,
brand_color: Optional[str] = None,
) -> bytes:
password_provider = None
webauthn_provider = None
magic_link_provider = None
oauth_providers = []
for p in providers:
if p.name == 'builtin::local_emailpassword':
password_provider = p
elif p.name == 'builtin::local_webauthn':
webauthn_provider = p
elif p.name == 'builtin::local_magic_link':
magic_link_provider = p
elif p.name.startswith('builtin::oauth_') or hasattr(p, "issuer_url"):
oauth_providers.append(cast(auth_config.OAuthProviderConfig, p))
base_email_factor_form = f"""
<input type="hidden" name="challenge" value="{challenge}" />
<label for="email">Email</label>
<input id="email" name="email" type="email" value="{email or ''}" />
"""
password_input = (
f"""
<div class="field-header">
<label for="password">Password</label>
<a
id="forgot-password-link"
class="field-note"
href="forgot-password?challenge={challenge}"
tabindex="-1">
Forgot password?
</a>
</div>
<input id="password" name="password" type="password" />
"""
if password_provider
else ''
)
email_factor_form = render_email_factor_form(
base_email_factor_form=base_email_factor_form,
password_input=password_input,
selected_tab=selected_tab,
single_form_fields=f'''
{render.hidden_input(
name='redirect_to',
value=(
redirect_to if webauthn_provider
else (base_path + '/ui/magic-link-sent')
),
secondary_value=redirect_to
)}
{render.hidden_input(
name='redirect_on_failure',
value=f'{base_path}/ui/signin',
secondary_value=f'{base_path}/ui/signin?selected_tab=password'
)}
{render.hidden_input(
name='provider',
value=magic_link_provider.name if magic_link_provider else '',
secondary_value=(
password_provider.name if password_provider else '')
)}
{render.hidden_input(
name='callback_url', value=redirect_to
) if magic_link_provider else ''}
''',
password_form=(
f"""
<form
method="post"
action="../authenticate"
novalidate
>
<input type="hidden" name="redirect_to" value="{
redirect_to}" />
<input type="hidden" name="redirect_on_failure" value="{
base_path}/ui/signin?selected_tab=password" />
<input type="hidden" name="provider" value="{
password_provider.name}" />
{base_email_factor_form}
{password_input}
{render.button("Sign In", id="password-signin")}
</form>
"""
if password_provider
else None
),
webauthn_form=(
f"""
<form
id="email-factor"
novalidate
>
<input type="hidden" name="redirect_to" value="{
redirect_to}" />
<input type="hidden" name="redirect_on_failure" value="{
base_path}/ui/signin?selected_tab=webauthn" />
{base_email_factor_form}
{render.button("Sign In", id="webauthn-signin")}
</form>
"""
if webauthn_provider
else None
),
magic_link_form=(
f"""
<form
method="post"
action="../magic-link/email"
novalidate
>
<input type="hidden" name="redirect_to" value="{
base_path}/ui/magic-link-sent" />
<input type="hidden" name="redirect_on_failure" value="{
base_path}/ui/signin?selected_tab=magic_link" />
<input type="hidden" name="provider"
value="{magic_link_provider.name}" />
<input type="hidden" name="callback_url" value="{
redirect_to}" />
{base_email_factor_form}
{render.button("Email sign in link", id="magic-link-signin")}
</form>
"""
if magic_link_provider
else None
),
)
if email_factor_form:
email_factor_form += render.bottom_note(
"Don't have an account?", link='Sign up', href='signup'
)
oauth_buttons = render.oauth_buttons(
oauth_providers=oauth_providers,
label_prefix=('Sign in with' if email_factor_form else 'Continue with'),
challenge=challenge,
redirect_to=redirect_to,
redirect_to_on_signup=redirect_to_on_signup,
collapsed=email_factor_form is not None and len(oauth_providers) >= 3,
)
return render.base_page(
title=f'Sign in{f" to {app_name}" if app_name else ""}',
logo_url=logo_url,
dark_logo_url=dark_logo_url,
brand_color=brand_color,
cleanup_search_params=['error', 'email', 'selected_tab'],
content=f'''
{render.title('Sign in', app_name=app_name)}
{render.error_message(error_message)}
{oauth_buttons}
{render.divider
if email_factor_form and len(oauth_providers) > 0
else ''}
{email_factor_form or ''}
{render.script('webauthn-authenticate') if webauthn_provider else ''}
''',
) | password_input = (
f | render_signin_page | python | geldata/gel | edb/server/protocol/auth_ext/ui/__init__.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/__init__.py | Apache-2.0 |
def render_email_factor_form(
*,
base_email_factor_form: Optional[str] = None,
password_input: str = '',
selected_tab: Optional[str] = None,
single_form_fields: str = '',
password_form: Optional[str],
webauthn_form: Optional[str],
magic_link_form: Optional[str],
) -> Optional[str]:
if (
password_form is None
and webauthn_form is None
and magic_link_form is None
):
return None
match (password_form, webauthn_form, magic_link_form):
case (_, None, None):
return password_form
case (None, _, None):
return webauthn_form
case (None, None, _):
return magic_link_form
if base_email_factor_form is None or (
webauthn_form is not None and magic_link_form is not None
):
tabs = [
(
('Passkey', webauthn_form, selected_tab == 'webauthn')
if webauthn_form
else None
),
(
('Password', password_form, selected_tab == 'password')
if password_form
else None
),
(
('Email Link', magic_link_form, selected_tab == 'magic_link')
if magic_link_form
else None
),
]
selected_tabs = [t[2] for t in tabs if t is not None]
selected_index = (
selected_tabs.index(True) if True in selected_tabs else 0
)
return render.tabs_buttons(
[t[0] for t in tabs if t is not None], selected_index
) + render.tabs_content(
[t[1] for t in tabs if t is not None], selected_index
)
slider_content = [
f'''
{render.button("Sign In", id="webauthn-signin") if webauthn_form
else render.button("Email sign in link", id="magic-link-signin")
}
{render.button("Sign in with password", id="show-password-form",
secondary=True, type="button")}
''',
f'''
{password_input}
<div class="button-group">
{render.button(None, id="hide-password-form",
secondary=True, type="button")}
{render.button("Sign in with password", id="password-signin")}
</div>
''',
]
return f"""
<form id="email-factor" method="post" {
'action="../magic-link/email"'
if magic_link_form else ''
} data-secondary-action="../authenticate" novalidate>
{single_form_fields}
{base_email_factor_form}
{render.tabs_content(
slider_content,
selected_tab=(1 if selected_tab == 'password' else 0)
)}
</form>
""" | ,
f | render_email_factor_form | python | geldata/gel | edb/server/protocol/auth_ext/ui/__init__.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/__init__.py | Apache-2.0 |
def render_signup_page(
*,
base_path: str,
providers: frozenset[auth_config.ProviderConfig],
error_message: Optional[str] = None,
email: Optional[str] = None,
challenge: str,
selected_tab: Optional[str] = None,
# config
redirect_to: str,
redirect_to_on_signup: Optional[str] = None,
app_name: Optional[str] = None,
logo_url: Optional[str] = None,
dark_logo_url: Optional[str] = None,
brand_color: Optional[str] = None,
) -> bytes:
password_provider = None
webauthn_provider = None
magic_link_provider = None
oauth_providers = []
for p in providers:
if p.name == 'builtin::local_emailpassword':
password_provider = p
elif p.name == 'builtin::local_webauthn':
webauthn_provider = p
elif p.name == 'builtin::local_magic_link':
magic_link_provider = p
elif p.name.startswith('builtin::oauth_') or hasattr(p, "issuer_url"):
oauth_providers.append(cast(auth_config.OAuthProviderConfig, p))
base_email_factor_form = f"""
<input type="hidden" name="challenge" value="{challenge}" />
<label for="email">Email</label>
<input id="email" name="email" type="email" value="{email or ''}" />
"""
email_factor_form = render_email_factor_form(
selected_tab=selected_tab,
password_form=(
f"""
<form
method="post"
action="../register"
novalidate
>
<input type="hidden" name="redirect_to" value="{
redirect_to_on_signup or redirect_to}" />
<input type="hidden" name="redirect_on_failure" value="{
base_path}/ui/signup?selected_tab=password" />
<input type="hidden" name="provider" value="{
password_provider.name}" />
<input type="hidden" name="verify_url" value="{
base_path}/ui/verify" />
{base_email_factor_form}
<label for="password">Password</label>
<input id="password" name="password" type="password" />
{render.button("Sign Up", id="password-signup")}
</form>
"""
if password_provider
else None
),
webauthn_form=(
f"""
<form
id="email-factor"
novalidate
>
<input type="hidden" name="redirect_to" value="{
redirect_to_on_signup or redirect_to}" />
<input type="hidden" name="redirect_on_failure" value="{
base_path}/ui/signup?selected_tab=webauthn" />
<input type="hidden" name="verify_url" value="{
base_path}/ui/verify" />
{base_email_factor_form}
{render.button("Sign Up", id="webauthn-signup")}
</form>
"""
if webauthn_provider
else None
),
magic_link_form=(
f"""
<form
method="post"
action="../magic-link/register"
novalidate
>
<input type="hidden" name="redirect_to" value="{
base_path}/ui/magic-link-sent" />
<input type="hidden" name="redirect_on_failure" value="{
base_path}/ui/signup?selected_tab=magic_link" />
<input type="hidden" name="provider" value="{
magic_link_provider.name}" />
<input type="hidden" name="callback_url" value="{
redirect_to_on_signup or redirect_to}" />
{base_email_factor_form}
{render.button("Sign Up with Email Link",
id="magic-link-signup")}
</form>
"""
if magic_link_provider
else None
),
)
if email_factor_form:
email_factor_form += render.bottom_note(
'Already have an account?', link='Sign in', href='signin'
)
oauth_buttons = render.oauth_buttons(
oauth_providers=oauth_providers,
label_prefix=('Sign up with' if email_factor_form else 'Continue with'),
challenge=challenge,
redirect_to=redirect_to,
redirect_to_on_signup=redirect_to_on_signup,
collapsed=email_factor_form is not None and len(oauth_providers) >= 3,
)
return render.base_page(
title=f'Sign up{f" to {app_name}" if app_name else ""}',
logo_url=logo_url,
dark_logo_url=dark_logo_url,
brand_color=brand_color,
cleanup_search_params=['error', 'email', 'selected_tab'],
content=f'''
{render.title('Sign up', app_name=app_name)}
{render.error_message(error_message)}
{oauth_buttons}
{render.divider
if email_factor_form and len(oauth_providers) > 0
else ''}
{email_factor_form or ''}
{render.script('webauthn-register') if webauthn_provider else ''}
''',
) | email_factor_form = render_email_factor_form(
selected_tab=selected_tab,
password_form=(
f | render_signup_page | python | geldata/gel | edb/server/protocol/auth_ext/ui/__init__.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/__init__.py | Apache-2.0 |
def render_forgot_password_page(
*,
base_path: str,
provider_name: str,
challenge: str,
error_message: Optional[str] = None,
email: Optional[str] = None,
email_sent: Optional[str] = None,
# config
app_name: Optional[str] = None,
logo_url: Optional[str] = None,
dark_logo_url: Optional[str] = None,
brand_color: Optional[str] = None,
) -> bytes:
if email_sent is not None:
content = render.success_message(
f'Password reset email has been sent to <b>{email_sent}</b>'
)
else:
content = f'''
{render.error_message(error_message)}
<form method="POST" action="../send-reset-email">
<input type="hidden" name="provider" value="{provider_name}" />
<input type="hidden" name="challenge" value="{challenge}" />
<input type="hidden" name="redirect_on_failure" value="{
base_path}/ui/forgot-password?challenge={challenge}" />
<input type="hidden" name="redirect_to" value="{
base_path}/ui/forgot-password?challenge={challenge}" />
<input type="hidden" name="reset_url" value="{
base_path}/ui/reset-password" />
<label for="email">Email</label>
<input id="email" name="email" type="email" value="{email or ''}" />
{render.button('Send Reset Email')}
</form>
'''
return render.base_page(
title=f'Reset password{f" for {app_name}" if app_name else ""}',
logo_url=logo_url,
dark_logo_url=dark_logo_url,
brand_color=brand_color,
cleanup_search_params=['error', 'email', 'email_sent'],
content=f'''
{render.title('Reset password', join='for', app_name=app_name)}
{content}
{render.bottom_note("Back to", link="Sign In", href="signin")}
''',
) | return render.base_page(
title=f'Reset password{f" for {app_name}" if app_name else ""}',
logo_url=logo_url,
dark_logo_url=dark_logo_url,
brand_color=brand_color,
cleanup_search_params=['error', 'email', 'email_sent'],
content=f | render_forgot_password_page | python | geldata/gel | edb/server/protocol/auth_ext/ui/__init__.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/__init__.py | Apache-2.0 |
def render_email_verification_page(
*,
is_valid: bool,
error_messages: list[str],
verification_token: Optional[str] = None,
# config
app_name: Optional[str] = None,
logo_url: Optional[str] = None,
dark_logo_url: Optional[str] = None,
brand_color: Optional[str] = None,
) -> bytes:
resend_url = None
if verification_token:
verification_token = html.escape(verification_token)
resend_url = (
f"resend-verification?verification_token={verification_token}"
)
if not is_valid:
messages = ''.join(
[render.error_message(error) for error in error_messages]
)
content = f'''
{messages}
{(f'<a href="{resend_url}">Try sending another verification'
'email</a>')
if resend_url else ''}
'''
else:
content = '''
Email has been successfully verified. You may now
<a href="signin">sign in</a>
'''
return render.base_page(
title=f'Verify email{f" for {app_name}" if app_name else ""}',
logo_url=logo_url,
dark_logo_url=dark_logo_url,
brand_color=brand_color,
cleanup_search_params=['error'],
content=f'''
{render.title('Verify email', join='for', app_name=app_name)}
{content}
''',
) | else:
content = | render_email_verification_page | python | geldata/gel | edb/server/protocol/auth_ext/ui/__init__.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/__init__.py | Apache-2.0 |
def render_email_verification_expired_page(
verification_token: str,
# config
app_name: Optional[str] = None,
logo_url: Optional[str] = None,
dark_logo_url: Optional[str] = None,
brand_color: Optional[str] = None,
) -> bytes:
verification_token = html.escape(verification_token)
content = render.error_message(
f'''
Your verification token has expired.
<a href="resend-verification?verification_token={verification_token}">
Click here to resend the verification email
</a>
''',
False,
)
return render.base_page(
title=f'Verification expired{f" for {app_name}" if app_name else ""}',
logo_url=logo_url,
dark_logo_url=dark_logo_url,
brand_color=brand_color,
cleanup_search_params=['error'],
content=f'''
{render.title('Verification expired', join='for',
app_name=app_name)}
{content}
''',
) | ,
False,
)
return render.base_page(
title=f'Verification expired{f" for {app_name}" if app_name else ""}',
logo_url=logo_url,
dark_logo_url=dark_logo_url,
brand_color=brand_color,
cleanup_search_params=['error'],
content=f | render_email_verification_expired_page | python | geldata/gel | edb/server/protocol/auth_ext/ui/__init__.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/__init__.py | Apache-2.0 |
def render_resend_verification_done_page(
*,
is_valid: bool,
verification_token: Optional[str] = None,
# config
app_name: Optional[str] = None,
logo_url: Optional[str] = None,
dark_logo_url: Optional[str] = None,
brand_color: Optional[str] = None,
) -> bytes:
if verification_token is None:
content = render.error_message(
f"""
Missing verification token, please follow the link provided in the
original email, or on the signin page.
""",
False,
)
else:
verification_token = html.escape(verification_token)
if is_valid:
content = f'''
Your verification email has been resent. Please check your email.
'''
else:
content = f'''
Unable to resend verification email. Please try again.
'''
return render.base_page(
title=(
f'Email verification resent{f" for {app_name}" if app_name else ""}'
),
logo_url=logo_url,
dark_logo_url=dark_logo_url,
brand_color=brand_color,
cleanup_search_params=['error'],
content=f'''
{render.title('Email verification resent', join='for',
app_name=app_name)}
{content}
''',
) | ,
False,
)
else:
verification_token = html.escape(verification_token)
if is_valid:
content = f | render_resend_verification_done_page | python | geldata/gel | edb/server/protocol/auth_ext/ui/__init__.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/__init__.py | Apache-2.0 |
def render_password_reset_email(
*,
to_addr: str,
reset_url: str,
app_name: Optional[str] = None,
logo_url: Optional[str] = None,
dark_logo_url: Optional[str] = None,
brand_color: Optional[str] = render.DEFAULT_BRAND_COLOR,
) -> email.message.EmailMessage:
brand_color = brand_color or render.DEFAULT_BRAND_COLOR
msg = email.message.EmailMessage()
msg["To"] = to_addr
msg["Subject"] = "Reset password"
plain_text_content = f"""
Somebody requested a new password for the {app_name or ''} account associated
with {to_addr}.
Please paste the following URL into your browser address bar to verify your
email address:
{reset_url}
"""
html_content = f"""
<tr>
<td
style="
direction: ltr;
font-size: 0px;
padding: 20px 0;
padding-bottom: 20px;
padding-top: 20px;
text-align: center;
"
>
<!--[if mso | IE]><table role="presentation" border="0" cellpadding="0" cellspacing="0"><tr><td class="" style="vertical-align:middle;width:600px;" ><![endif]-->
<div
class="mj-column-per-100 mj-outlook-group-fix"
style="
font-size: 0px;
text-align: left;
direction: ltr;
display: inline-block;
vertical-align: middle;
width: 100%;
"
>
<table
border="0"
cellpadding="0"
cellspacing="0"
role="presentation"
style="vertical-align: middle"
width="100%"
>
<tbody>
<tr>
<td
align="left"
style="
font-size: 0px;
padding: 10px 25px;
padding-top: 50px;
word-break: break-word;
"
>
<div
style="
font-family: open Sans Helvetica, Arial, sans-serif;
font-size: 16px;
line-height: 1;
text-align: left;
color: #000000;
"
>
Somebody requested a new password for the {app_name or ''}
account associated with {to_addr}.
</div>
</td>
</tr>
<tr>
<td
align="left"
style="
font-size: 0px;
padding: 10px 25px;
word-break: break-word;
"
>
<div
style="
font-family: open Sans Helvetica, Arial, sans-serif;
font-size: 16px;
line-height: 1;
text-align: left;
color: #000000;
"
>
No changes have been made to your account yet.
</div>
</td>
</tr>
<tr>
<td
align="left"
style="
font-size: 0px;
padding: 10px 25px;
word-break: break-word;
"
>
<div
style="
font-family: open Sans Helvetica, Arial, sans-serif;
font-size: 16px;
line-height: 1;
text-align: left;
color: #000000;
"
>
You can reset your password by clicking the button below:
</div>
</td>
</tr>
<tr>
<td
align="center"
vertical-align="middle"
style="
font-size: 0px;
padding: 10px 25px;
word-break: break-word;
"
>
<table
border="0"
cellpadding="0"
cellspacing="0"
role="presentation"
style="border-collapse: separate; line-height: 100%"
>
<tr>
<td
align="center"
bgcolor="#{brand_color}"
role="presentation"
style="
border: none;
border-radius: 4px;
cursor: auto;
mso-padding-alt: 10px 25px;
background: #{brand_color};
"
valign="middle"
>
<a
href="{reset_url}"
style="
display: inline-block;
background: #{brand_color};
color: #ffffff;
font-family: open Sans Helvetica, Arial, sans-serif;
font-size: 18px;
font-weight: bold;
line-height: 120%;
margin: 0;
text-decoration: none;
text-transform: none;
padding: 10px 25px;
mso-padding-alt: 0px;
border-radius: 4px;
"
target="_blank"
>
Reset your password
</a>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td
align="left"
style="
font-size: 0px;
padding: 10px 25px;
word-break: break-word;
"
>
<div
style="
font-family: open Sans Helvetica, Arial, sans-serif;
font-size: 16px;
line-height: 1;
text-align: left;
color: #000000;
"
>
In case the button didn't work, please paste the following URL
into your browser address bar:
<p style="word-break: break-all">{reset_url}</p>
</div>
</td>
</tr>
<tr>
<td
align="left"
style="
font-size: 0px;
padding: 10px 25px;
word-break: break-word;
"
>
<div
style="
font-family: open Sans Helvetica, Arial, sans-serif;
font-size: 16px;
line-height: 1;
text-align: left;
color: #000000;
"
>
If you did not request a new password, please let us know
immediately by replying to this email.
</div>
</td>
</tr>
</tbody>
</table>
</div>
</td>
</tr>
""" # noqa: E501
msg.set_content(plain_text_content, subtype="plain")
msg.add_alternative(
render.base_default_email(
content=html_content,
app_name=app_name,
logo_url=logo_url,
),
subtype="html",
)
return msg | html_content = f | render_password_reset_email | python | geldata/gel | edb/server/protocol/auth_ext/ui/__init__.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/__init__.py | Apache-2.0 |
def render_verification_email(
*,
to_addr: str,
verify_url: str,
app_name: Optional[str] = None,
logo_url: Optional[str] = None,
dark_logo_url: Optional[str] = None,
brand_color: Optional[str] = render.DEFAULT_BRAND_COLOR,
) -> email.message.EmailMessage:
brand_color = brand_color or render.DEFAULT_BRAND_COLOR
msg = email.message.EmailMessage()
msg["To"] = to_addr
msg["Subject"] = (
f"Verify your email{f' for {app_name}' if app_name else ''}"
)
plain_text_content = f"""
Congratulations, you're registered{f' at {app_name}' if app_name else ''}!
Please paste the following URL into your browser address bar to verify your
email address:
{verify_url}
"""
html_content = f"""
<tr>
<td
align="left"
style="
font-size: 0px;
padding: 10px 25px;
padding-top: 50px;
word-break: break-word;
"
>
<div
style="
font-family:
open Sans Helvetica,
Arial,
sans-serif;
font-size: 16px;
line-height: 1;
text-align: left;
color: #000000;
"
>
Congratulations, you're registered
{f'at {app_name}' if app_name else ''}!
</div>
</td>
</tr>
<tr>
<td
align="left"
style="font-size: 0px; padding: 10px 25px; word-break: break-word"
>
<div
style="
font-family:
open Sans Helvetica,
Arial,
sans-serif;
font-size: 16px;
line-height: 1;
text-align: left;
color: #000000;
"
>
Please press the button below to verify your email address:
</div>
</td>
</tr>
<tr>
<td
align="center"
vertical-align="middle"
style="font-size: 0px; padding: 10px 25px; word-break: break-word"
>
<table
border="0"
cellpadding="0"
cellspacing="0"
role="presentation"
style="border-collapse: separate; line-height: 100%"
>
<tr>
<td
align="center"
bgcolor="#{brand_color}"
role="presentation"
style="
border: none;
border-radius: 4px;
cursor: auto;
mso-padding-alt: 10px 25px;
background: #{brand_color};
"
valign="middle"
>
<a
href="{verify_url}"
style="
display: inline-block;
background: #{brand_color};
color: #ffffff;
font-family:
open Sans Helvetica,
Arial,
sans-serif;
font-size: 18px;
font-weight: bold;
line-height: 120%;
margin: 0;
text-decoration: none;
text-transform: none;
padding: 10px 25px;
mso-padding-alt: 0px;
border-radius: 4px;
"
target="_blank"
>
Verify email address
</a>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td
align="left"
style="font-size: 0px; padding: 10px 25px; word-break: break-word"
>
<div
style="
font-family:
open Sans Helvetica,
Arial,
sans-serif;
font-size: 16px;
line-height: 1;
text-align: left;
color: #000000;
"
>
In case the button didn't work, please paste the following URL into
your browser address bar:
<p style="word-break: break-all">{verify_url}</p>
</div>
</td>
</tr>
"""
msg.set_content(plain_text_content, subtype="plain")
msg.set_content(
render.base_default_email(
content=html_content,
app_name=app_name,
logo_url=logo_url,
),
subtype="html",
)
return msg | html_content = f | render_verification_email | python | geldata/gel | edb/server/protocol/auth_ext/ui/__init__.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/__init__.py | Apache-2.0 |
def render_magic_link_email(
*,
to_addr: str,
link: str,
app_name: Optional[str] = None,
logo_url: Optional[str] = None,
dark_logo_url: Optional[str] = None,
brand_color: Optional[str] = render.DEFAULT_BRAND_COLOR,
) -> email.message.EmailMessage:
brand_color = brand_color or render.DEFAULT_BRAND_COLOR
msg = email.message.EmailMessage()
msg["To"] = to_addr
msg["Subject"] = "Sign in link"
plain_text_content = f"""
Please paste the following URL into your browser address bar to be signed into
your account:
{link}
"""
html_content = f"""
<tr>
<td
align="left"
style="font-size: 0px; padding: 10px 25px; word-break: break-word"
>
<div
style="
font-family: open Sans Helvetica, Arial, sans-serif;
font-size: 16px;
line-height: 1;
text-align: left;
color: #000000;
"
>
Sign into your {app_name or ""} account by clicking the button below:
</div>
</td>
</tr>
<tr>
<td
align="center"
vertical-align="middle"
style="font-size: 0px; padding: 10px 25px; word-break: break-word"
>
<table
border="0"
cellpadding="0"
cellspacing="0"
role="presentation"
style="border-collapse: separate; line-height: 100%"
>
<tr>
<td
align="center"
bgcolor="#{brand_color}"
role="presentation"
style="
border: none;
border-radius: 4px;
cursor: auto;
mso-padding-alt: 10px 25px;
background: #{brand_color};
"
valign="middle"
>
<a
href="{link}"
style="
display: inline-block;
background: #{brand_color};
color: #ffffff;
font-family: open Sans Helvetica, Arial, sans-serif;
font-size: 18px;
font-weight: bold;
line-height: 120%;
margin: 0;
text-decoration: none;
text-transform: none;
padding: 10px 25px;
mso-padding-alt: 0px;
border-radius: 4px;
"
target="_blank"
>
Sign in
</a>
</td>
</tr>
</table>
</td>
</tr>
<tr>
<td
align="left"
style="font-size: 0px; padding: 10px 25px; word-break: break-word"
>
<div
style="
font-family: open Sans Helvetica, Arial, sans-serif;
font-size: 16px;
line-height: 1;
text-align: left;
color: #000000;
"
>
In case the button didn't work, please paste the following URL into your
browser address bar:
<p style="word-break: break-all">{link}</p>
</div>
</td>
</tr>
"""
msg.set_content(plain_text_content, subtype="plain")
msg.set_content(
render.base_default_email(
content=html_content,
app_name=app_name,
logo_url=logo_url,
),
subtype="html",
)
return msg | html_content = f | render_magic_link_email | python | geldata/gel | edb/server/protocol/auth_ext/ui/__init__.py | https://github.com/geldata/gel/blob/master/edb/server/protocol/auth_ext/ui/__init__.py | Apache-2.0 |
async def create_postgres_connection(
dsn: str | ConnectionParams,
protocol_factory: Callable[[], P],
*,
source_description: Optional[str] = None,
) -> Tuple[PGRawConn, P]:
"""
Open a PostgreSQL connection to the address specified by the DSN or
ConnectionParams, creating the user protocol from the protocol_factory.
This method establishes the connection asynchronously. When successful, it
returns a (PGRawConn, protocol) pair.
"""
if isinstance(dsn, str):
dsn = ConnectionParams(dsn=dsn)
connect_timeout = dsn.connect_timeout
try:
state = pgrust.PyConnectionState(
dsn._params, "postgres", str(get_pg_home_directory())
)
except Exception as e:
raise ValueError(e)
pg_state = PGState(
parameters={},
cancellation_key=None,
auth=None,
server_error=None,
ssl=False,
)
# The PGConnectionProtocol will drive the PyConnectionState from network
# bytes it receives, as well as driving the connection from the messages
# from PyConnectionState.
connect_protocol_factory = (
lambda hostname, host, port: PGConnectionProtocol(
hostname,
state,
pg_state,
complete_connection_callback(
host,
port,
source_description,
state,
protocol_factory,
pg_state,
),
)
)
# Create a transport to the backend based off the host candidates.
host_candidates = await asyncio.get_running_loop().run_in_executor(
executor=None, func=lambda: state.config.host_candidates
)
_, protocol = await _create_connection(
connect_protocol_factory,
connect_timeout,
host_candidates,
)
conn, user_protocol = await protocol.ready_future
return conn, user_protocol | Open a PostgreSQL connection to the address specified by the DSN or
ConnectionParams, creating the user protocol from the protocol_factory.
This method establishes the connection asynchronously. When successful, it
returns a (PGRawConn, protocol) pair. | create_postgres_connection | python | geldata/gel | edb/server/pgcon/rust_transport.py | https://github.com/geldata/gel/blob/master/edb/server/pgcon/rust_transport.py | Apache-2.0 |
def _build_init_con_script(*, check_pg_is_in_recovery: bool) -> bytes:
if check_pg_is_in_recovery:
pg_is_in_recovery = ('''
SELECT CASE WHEN pg_is_in_recovery() THEN
edgedb.raise(
NULL::bigint,
'read_only_sql_transaction',
msg => 'cannot use a hot standby'
)
END;
''').strip()
else:
pg_is_in_recovery = ''
return textwrap.dedent(f'''
{pg_is_in_recovery}
{SETUP_TEMP_TABLE_SCRIPT}
{SETUP_CONFIG_CACHE_SCRIPT}
{SETUP_DML_DUMMY_TABLE_SCRIPT}
PREPARE _clear_state AS
WITH x1 AS (
DELETE FROM _config_cache
)
DELETE FROM _edgecon_state WHERE type = 'C' OR type = 'B';
PREPARE _apply_state(jsonb) AS
INSERT INTO
_edgecon_state(name, value, type)
SELECT
(CASE
WHEN e->'type' = '"B"'::jsonb
THEN edgedb._apply_session_config(e->>'name', e->'value')
ELSE e->>'name'
END) AS name,
e->'value' AS value,
e->>'type' AS type
FROM
jsonb_array_elements($1::jsonb) AS e;
PREPARE _reset_session_config AS
SELECT edgedb._reset_session_config();
PREPARE _apply_sql_state(jsonb) AS
SELECT
e.key AS name,
pg_catalog.set_config(e.key, e.value, false) AS value
FROM
jsonb_each_text($1::jsonb) AS e;
''').strip().encode('utf-8') | ).strip()
else:
pg_is_in_recovery = ''
return textwrap.dedent(f | _build_init_con_script | python | geldata/gel | edb/server/pgcon/connect.py | https://github.com/geldata/gel/blob/master/edb/server/pgcon/connect.py | Apache-2.0 |
def _build_constant_extraction_map(
src: pgast.Base,
out: pgast.Base,
) -> pg_codegen.BaseSourceMap:
"""Traverse two ASTs in parallel and build a source map between them.
The ASTs should *mostly* line up. When they don't, that is
considered a leaf.
This is used to translate SQL spans reported on a normalized query
to ones that make sense on the pre-normalization version.
Note that we only use this map for errors reported during the
"parse" phase, so we don't need to worry about it being reused
with different constants.
"""
tdata = pg_codegen.BaseSourceMap(
source_start=src.span.start if src.span else 0,
# HACK: I don't know why, but this - 1 helps a lot.
output_start=out.span.start - 1 if out.span else 0,
)
if type(src) != type(out):
return tdata
children = tdata.children
for (k1, v1), (k2, v2) in zip(ast.iter_fields(src), ast.iter_fields(out)):
assert k1 == k2
if isinstance(v1, pgast.Base) and isinstance(v2, pgast.Base):
children.append(_build_constant_extraction_map(v1, v2))
elif (
isinstance(v1, (tuple, list)) and isinstance(v2, (tuple, list))
):
for v1e, v2e in zip(v1, v2):
if isinstance(v1e, pgast.Base) and isinstance(v2e, pgast.Base):
children.append(_build_constant_extraction_map(v1e, v2e))
elif (
isinstance(v1, dict) and isinstance(v2, dict)
):
for k, v1e in v1.items():
v2e = v2.get(k)
if isinstance(v1e, pgast.Base) and isinstance(v2e, pgast.Base):
children.append(_build_constant_extraction_map(v1e, v2e))
children.sort(key=lambda k: k.output_start)
return tdata | Traverse two ASTs in parallel and build a source map between them.
The ASTs should *mostly* line up. When they don't, that is
considered a leaf.
This is used to translate SQL spans reported on a normalized query
to ones that make sense on the pre-normalization version.
Note that we only use this map for errors reported during the
"parse" phase, so we don't need to worry about it being reused
with different constants. | _build_constant_extraction_map | python | geldata/gel | edb/server/compiler/sql.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/sql.py | Apache-2.0 |
def _type_ref_packer(t: s_types.Type, *, ctx: Context) -> bytes:
"""Return typedesc representation of a type reference."""
return _type_ref_id_packer(_describe_type(t, ctx=ctx), ctx=ctx) | Return typedesc representation of a type reference. | _type_ref_packer | python | geldata/gel | edb/server/compiler/sertypes.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/sertypes.py | Apache-2.0 |
def _type_ref_id_packer(type_id: uuid.UUID, *, ctx: Context) -> bytes:
"""Return typedesc representation of a type reference by type id."""
return _uint16_packer(ctx.uuid_to_pos[type_id]) | Return typedesc representation of a type reference by type id. | _type_ref_id_packer | python | geldata/gel | edb/server/compiler/sertypes.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/sertypes.py | Apache-2.0 |
def _type_ref_seq_packer(ts: Sequence[s_types.Type], *, ctx: Context) -> bytes:
"""Return typedesc representation of a sequence of type references."""
result = _uint16_packer(len(ts))
for t in ts:
result += _type_ref_packer(t, ctx=ctx)
return result | Return typedesc representation of a sequence of type references. | _type_ref_seq_packer | python | geldata/gel | edb/server/compiler/sertypes.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/sertypes.py | Apache-2.0 |
def _type_ref_id_seq_packer(ts: Sequence[uuid.UUID], *, ctx: Context) -> bytes:
"""Return typedesc representation of a sequence of type id references."""
result = _uint16_packer(len(ts))
for t in ts:
result += _type_ref_id_packer(t, ctx=ctx)
return result | Return typedesc representation of a sequence of type id references. | _type_ref_id_seq_packer | python | geldata/gel | edb/server/compiler/sertypes.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/sertypes.py | Apache-2.0 |
def parse(
typedesc: bytes,
protocol_version: edbdef.ProtocolVersion,
) -> TypeDesc:
"""Unmarshal a byte stream with one or more type descriptors."""
ctx = ParseContext(protocol_version)
buf = io.BytesIO(typedesc)
wrapped = binwrapper.BinWrapper(buf)
while buf.tell() < len(typedesc):
_parse(wrapped, ctx=ctx)
if not ctx.codecs_list:
raise errors.InternalServerError('could not parse type descriptor')
return ctx.codecs_list[-1] | Unmarshal a byte stream with one or more type descriptors. | parse | python | geldata/gel | edb/server/compiler/sertypes.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/sertypes.py | Apache-2.0 |
def _parse(desc: binwrapper.BinWrapper, ctx: ParseContext) -> None:
"""Unmarshal the next type descriptor from the byte stream."""
if ctx.protocol_version >= (2, 0):
# .length
desc.read_bytes(4)
t = desc.read_bytes(1)
try:
tag = DescriptorTag(t)
except ValueError:
if (t[0] >= 0x80 and t[0] <= 0xff):
# Ignore all type annotations.
_parse_string(desc)
return
else:
raise NotImplementedError(
f'no codec implementation for Gel data kind {hex(t[0])}')
else:
ctx.codecs_list.append(_parse_descriptor(tag, desc, ctx=ctx)) | Unmarshal the next type descriptor from the byte stream. | _parse | python | geldata/gel | edb/server/compiler/sertypes.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/sertypes.py | Apache-2.0 |
def __init__(self, std_schema: s_schema.Schema, config_spec: config.Spec):
"""
{
module := 'default',
aliases := [ ('alias', 'module::target'), ... ],
config := cfg::Config {
session_idle_transaction_timeout: <duration>'0:05:00',
query_execution_timeout: <duration>'0:00:00',
allow_bare_ddl: AlwaysAllow,
apply_access_policies: true,
},
globals := { key := value, ... },
}
"""
schema = std_schema
str_type = schema.get('std::str', type=s_scalars.ScalarType)
free_obj = schema.get('std::FreeObject', type=s_objtypes.ObjectType)
schema, self._state_type = derive_alias(schema, free_obj, 'state_type')
# aliases := { ('alias1', 'mod::type'), ... }
schema, alias_tuple = s_types.Tuple.from_subtypes(
schema, [str_type, str_type])
schema, aliases_array = s_types.Array.from_subtypes(
schema, [alias_tuple])
schema, self.globals_type = derive_alias(
schema, free_obj, 'state_globals')
# config := cfg::Config { session_cfg1, session_cfg2, ... }
schema, config_type = derive_alias(
schema, free_obj, 'state_config'
)
config_shape = self._make_config_shape(config_spec, schema)
# Build type descriptors and codecs for compiler RPC
# comp_config := cfg::Config { comp_cfg1, comp_cfg2, ... }
schema, self._comp_config_type = derive_alias(
schema, free_obj, 'comp_config'
)
self._comp_config_shape: tuple[InputShapeElement, ...] = (
self._make_config_shape(
config_spec,
schema,
lambda setting: setting.affects_compilation,
)
)
self._input_shapes: immutables.Map[
s_types.Type,
tuple[InputShapeElement, ...],
] = immutables.Map([
(config_type, config_shape),
(self._state_type, (
("module", str_type, enums.Cardinality.AT_MOST_ONE),
("aliases", aliases_array, enums.Cardinality.AT_MOST_ONE),
("config", config_type, enums.Cardinality.AT_MOST_ONE),
))
])
self.config_type = config_type
self._schema = schema
self._contexts: dict[edbdef.ProtocolVersion, Context] = {} | {
module := 'default',
aliases := [ ('alias', 'module::target'), ... ],
config := cfg::Config {
session_idle_transaction_timeout: <duration>'0:05:00',
query_execution_timeout: <duration>'0:00:00',
allow_bare_ddl: AlwaysAllow,
apply_access_policies: true,
},
globals := { key := value, ... },
} | __init__ | python | geldata/gel | edb/server/compiler/sertypes.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/sertypes.py | Apache-2.0 |
def gql_translate_pgtype_inner(schema, msg):
"""Try to replace any internal pg type name with a GraphQL type name"""
# Mapping base types
def base_type_map(name: str) -> str:
result = gql_types.EDB_TO_GQL_SCALARS_MAP.get(
str(types.base_type_name_map_r.get(name))
)
if result is None:
return name
else:
return result.name
translated = pgtype_re.sub(
lambda r: base_type_map(r.group(0)),
msg,
)
if translated != msg:
return translated
def replace(r):
type_id = uuidgen.UUID(r.group('id'))
stype = schema.get_by_id(type_id, None)
gql_name = gql_types.GQLCoreSchema.get_gql_name(
stype.get_name(schema))
if stype:
return f'{r.group("p")} {gql_name!r}'
else:
return f'{r.group("p")} {r.group("v")}'
translated = enum_re.sub(replace, msg)
return translated | Try to replace any internal pg type name with a GraphQL type name | gql_translate_pgtype_inner | python | geldata/gel | edb/server/compiler/errormech.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/errormech.py | Apache-2.0 |
def eql_translate_pgtype_inner(schema, msg):
"""Try to replace any internal pg type name with an edgedb type name"""
translated = pgtype_re.sub(
lambda r: str(types.base_type_name_map_r.get(r.group(0), r.group(0))),
msg,
)
if translated != msg:
return translated
def replace(r):
type_id = uuidgen.UUID(r.group('id'))
stype = schema.get_by_id(type_id, None)
if stype:
return f'{r.group("p")} {stype.get_displayname(schema)!r}'
else:
return f'{r.group("p")} {r.group("v")}'
translated = enum_re.sub(replace, msg)
return translated | Try to replace any internal pg type name with an edgedb type name | eql_translate_pgtype_inner | python | geldata/gel | edb/server/compiler/errormech.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/errormech.py | Apache-2.0 |
def translate_pgtype(schema, msg, from_graphql=False):
"""Try to translate a message that might refer to internal pg types.
We *want* to replace internal pg type names with edgedb names, but only
when they actually refer to types.
The messages aren't really structured well enough to support this properly,
so we approximate it by only doing the replacement *before* the first colon
in the message, so if a user does `<int64>"bigint"`, and we get the message
'invalid input syntax for type bigint: "bigint"', we do the right thing.
"""
leading, *rest = msg.split(':')
if from_graphql:
leading_translated = gql_translate_pgtype_inner(schema, leading)
else:
leading_translated = eql_translate_pgtype_inner(schema, leading)
return ':'.join([leading_translated, *rest]) | Try to translate a message that might refer to internal pg types.
We *want* to replace internal pg type names with edgedb names, but only
when they actually refer to types.
The messages aren't really structured well enough to support this properly,
so we approximate it by only doing the replacement *before* the first colon
in the message, so if a user does `<int64>"bigint"`, and we get the message
'invalid input syntax for type bigint: "bigint"', we do the right thing. | translate_pgtype | python | geldata/gel | edb/server/compiler/errormech.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/errormech.py | Apache-2.0 |
def _get_delta_context_args(ctx: compiler.CompileContext) -> dict[str, Any]:
"""Get the args needed for delta_and_schema_from_ddl"""
return dict(
stdmode=ctx.bootstrap_mode,
testmode=ctx.is_testmode(),
store_migration_sdl=(
compiler._get_config_val(ctx, 'store_migration_sdl')
) == 'AlwaysStore',
schema_object_ids=ctx.schema_object_ids,
compat_ver=ctx.compat_ver,
) | Get the args needed for delta_and_schema_from_ddl | _get_delta_context_args | python | geldata/gel | edb/server/compiler/ddl.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/ddl.py | Apache-2.0 |
def _process_delta(
ctx: compiler.CompileContext, delta: s_delta.DeltaRoot
) -> tuple[pg_dbops.SQLBlock, FrozenSet[str], Any]:
"""Adapt and process the delta command."""
current_tx = ctx.state.current_tx()
schema = current_tx.get_schema(ctx.compiler_state.std_schema)
pgdelta = pg_delta.CommandMeta.adapt(delta)
assert isinstance(pgdelta, pg_delta.DeltaRoot)
context = _new_delta_context(ctx)
schema = pgdelta.apply(schema, context)
current_tx.update_schema(schema)
if debug.flags.delta_pgsql_plan:
debug.header('PgSQL Delta Plan')
debug.dump(pgdelta, schema=schema)
db_cmd = any(
isinstance(c, s_db.BranchCommand) for c in pgdelta.get_subcommands()
)
if db_cmd:
block = pg_dbops.SQLBlock()
new_types: FrozenSet[str] = frozenset()
else:
block = pg_dbops.PLTopBlock()
new_types = frozenset(str(tid) for tid in pgdelta.new_types)
# Generate SQL DDL for the delta.
pgdelta.generate(block) # type: ignore
# XXX: We would prefer for there to not be trampolines ever after bootstrap
pgdelta.create_trampolines.generate(block) # type: ignore
# Generate schema storage SQL (DML into schema storage tables).
subblock = block.add_block()
compiler.compile_schema_storage_in_delta(
ctx, pgdelta, subblock, context=context
)
# Performance hack; we really want trivial migration commands
# (that only mutate the migration log) to not trigger a pg_catalog
# view refresh, since many get issued as part of MIGRATION
# REWRITEs.
all_migration_tweaks = all(
isinstance(
cmd, (s_ver.AlterSchemaVersion, s_migrations.MigrationCommand)
)
and not cmd.get_subcommands(type=s_delta.ObjectCommand)
for cmd in delta.get_subcommands()
)
if not ctx.bootstrap_mode and not all_migration_tweaks:
from edb.pgsql import metaschema
refresh = metaschema.generate_sql_information_schema_refresh(
ctx.compiler_state.backend_runtime_params.instance_params.version
)
refresh.generate(subblock)
return block, new_types, pgdelta.config_ops | Adapt and process the delta command. | _process_delta | python | geldata/gel | edb/server/compiler/ddl.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/ddl.py | Apache-2.0 |
def repair_schema(
ctx: compiler.CompileContext,
) -> Optional[tuple[bytes, s_schema.Schema, Any]]:
"""Repair inconsistencies in the schema caused by bug fixes
Works by comparing the actual current schema to the schema we get
from reloading the DDL description of the schema and then directly
applying the diff.
"""
current_tx = ctx.state.current_tx()
schema = current_tx.get_schema(ctx.compiler_state.std_schema)
empty_schema = s_schema.ChainedSchema(
ctx.compiler_state.std_schema,
s_schema.EMPTY_SCHEMA,
current_tx.get_global_schema(),
)
context_args = _get_delta_context_args(ctx)
context_args.update(dict(
testmode=True,
))
text = s_ddl.ddl_text_from_schema(schema)
reloaded_schema, _ = s_ddl.apply_ddl_script_ex(
text,
schema=empty_schema,
**context_args,
)
delta = s_ddl.delta_schemas(
schema,
reloaded_schema,
)
mismatch = bool(delta.get_subcommands())
if not mismatch:
return None
if debug.flags.delta_plan:
debug.header('Repair Delta')
debug.dump(delta)
if not delta.is_data_safe():
raise AssertionError(
'Repair script for version upgrade is not data safe'
)
# Update the schema version also
context = _new_delta_context(ctx, context_args)
ver = schema.get_global(
s_ver.SchemaVersion, '__schema_version__')
reloaded_schema = ver.set_field_value(
reloaded_schema, 'version', ver.get_version(schema))
ver_cmd = ver.init_delta_command(schema, s_delta.AlterObject)
ver_cmd.set_attribute_value('version', uuidgen.uuid1mc())
reloaded_schema = ver_cmd.apply(reloaded_schema, context)
delta.add(ver_cmd)
# Apply and adapt delta, build native delta plan, which
# will also update the schema.
block, new_types, config_ops = _process_delta(ctx, delta)
is_transactional = block.is_transactional()
assert not new_types
assert is_transactional
sql = block.to_string().encode('utf-8')
if debug.flags.delta_execute:
debug.header('Repair Delta Script')
debug.dump_code(sql, lexer='sql')
return sql, reloaded_schema, config_ops | Repair inconsistencies in the schema caused by bug fixes
Works by comparing the actual current schema to the schema we get
from reloading the DDL description of the schema and then directly
applying the diff. | repair_schema | python | geldata/gel | edb/server/compiler/ddl.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/ddl.py | Apache-2.0 |
def sync_to_savepoint(self, spid: int) -> None:
"""Synchronize the compiler state with the current DB state."""
if not self.can_sync_to_savepoint(spid):
raise RuntimeError(f"failed to lookup savepoint with id={spid}")
sp = self._savepoints_log[spid]
self._current_tx = sp.tx
self._current_tx._current = sp
self._current_tx._id = spid
# Cleanup all savepoints declared after the one we rolled back to
# in the transaction we have now set as current.
for id in tuple(self._current_tx._savepoints):
if id > spid:
self._current_tx._savepoints.pop(id)
# Cleanup all savepoints declared after the one we rolled back to
# in the global savepoints log.
for id in tuple(self._savepoints_log):
if id > spid:
self._savepoints_log.pop(id) | Synchronize the compiler state with the current DB state. | sync_to_savepoint | python | geldata/gel | edb/server/compiler/dbstate.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/dbstate.py | Apache-2.0 |
def _assert_not_in_migration_block(self, ql: qlast.Base) -> None:
"""Check that a START MIGRATION block is *not* active."""
current_tx = self.state.current_tx()
mstate = current_tx.get_migration_state()
if mstate is not None:
stmt = status.get_status(ql).decode()
raise errors.QueryError(
f'cannot execute {stmt} in a migration block',
span=ql.span,
) | Check that a START MIGRATION block is *not* active. | _assert_not_in_migration_block | python | geldata/gel | edb/server/compiler/compiler.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/compiler.py | Apache-2.0 |
def _assert_in_migration_block(
self, ql: qlast.Base
) -> dbstate.MigrationState:
"""Check that a START MIGRATION block *is* active."""
current_tx = self.state.current_tx()
mstate = current_tx.get_migration_state()
if mstate is None:
stmt = status.get_status(ql).decode()
raise errors.QueryError(
f'cannot execute {stmt} outside of a migration block',
span=ql.span,
)
return mstate | Check that a START MIGRATION block *is* active. | _assert_in_migration_block | python | geldata/gel | edb/server/compiler/compiler.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/compiler.py | Apache-2.0 |
def _assert_not_in_migration_rewrite_block(self, ql: qlast.Base) -> None:
"""Check that a START MIGRATION REWRITE block is *not* active."""
current_tx = self.state.current_tx()
mstate = current_tx.get_migration_rewrite_state()
if mstate is not None:
stmt = status.get_status(ql).decode()
raise errors.QueryError(
f'cannot execute {stmt} in a migration rewrite block',
span=ql.span,
) | Check that a START MIGRATION REWRITE block is *not* active. | _assert_not_in_migration_rewrite_block | python | geldata/gel | edb/server/compiler/compiler.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/compiler.py | Apache-2.0 |
def _assert_in_migration_rewrite_block(
self, ql: qlast.Base
) -> dbstate.MigrationRewriteState:
"""Check that a START MIGRATION REWRITE block *is* active."""
current_tx = self.state.current_tx()
mstate = current_tx.get_migration_rewrite_state()
if mstate is None:
stmt = status.get_status(ql).decode()
raise errors.QueryError(
f'cannot execute {stmt} outside of a migration rewrite block',
span=ql.span,
)
return mstate | Check that a START MIGRATION REWRITE block *is* active. | _assert_in_migration_rewrite_block | python | geldata/gel | edb/server/compiler/compiler.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/compiler.py | Apache-2.0 |
def new_compiler(
std_schema: s_schema.Schema,
reflection_schema: s_schema.Schema,
schema_class_layout: s_refl.SchemaClassLayout,
*,
backend_runtime_params: Optional[pg_params.BackendRuntimeParams] = None,
local_intro_query: Optional[str] = None,
global_intro_query: Optional[str] = None,
config_spec: Optional[config.Spec] = None,
) -> Compiler:
"""Create and return a compiler instance."""
# XXX: THIS IS NOT GREAT
assert isinstance(std_schema, s_schema.FlatSchema)
assert isinstance(reflection_schema, s_schema.FlatSchema)
if not backend_runtime_params:
backend_runtime_params = pg_params.get_default_runtime_params()
if not config_spec:
config_spec = config.load_spec_from_schema(std_schema)
return Compiler(CompilerState(
std_schema=std_schema,
refl_schema=reflection_schema,
schema_class_layout=schema_class_layout,
backend_runtime_params=backend_runtime_params,
config_spec=config_spec,
local_intro_query=local_intro_query,
global_intro_query=global_intro_query,
)) | Create and return a compiler instance. | new_compiler | python | geldata/gel | edb/server/compiler/compiler.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/compiler.py | Apache-2.0 |
def new_compiler_context(
*,
compiler_state: CompilerState,
user_schema: s_schema.Schema,
global_schema: s_schema.Schema=s_schema.EMPTY_SCHEMA,
modaliases: Optional[Mapping[Optional[str], str]] = None,
expected_cardinality_one: bool = False,
json_parameters: bool = False,
schema_reflection_mode: bool = False,
output_format: enums.OutputFormat = enums.OutputFormat.BINARY,
bootstrap_mode: bool = False,
internal_schema_mode: bool = False,
force_testmode: bool = False,
protocol_version: defines.ProtocolVersion = defines.CURRENT_PROTOCOL,
backend_runtime_params: Optional[pg_params.BackendRuntimeParams] = None,
log_ddl_as_migrations: bool = True,
) -> CompileContext:
"""Create and return an ad-hoc compiler context."""
state = dbstate.CompilerConnectionState(
user_schema=user_schema,
global_schema=global_schema,
modaliases=immutables.Map(modaliases) if modaliases else EMPTY_MAP,
session_config=EMPTY_MAP,
database_config=EMPTY_MAP,
system_config=EMPTY_MAP,
cached_reflection=EMPTY_MAP,
)
ctx = CompileContext(
compiler_state=compiler_state,
state=state,
output_format=output_format,
expected_cardinality_one=expected_cardinality_one,
json_parameters=json_parameters,
schema_reflection_mode=schema_reflection_mode,
bootstrap_mode=bootstrap_mode,
internal_schema_mode=internal_schema_mode,
force_testmode=force_testmode,
protocol_version=protocol_version,
backend_runtime_params=(
backend_runtime_params or pg_params.get_default_runtime_params()
),
log_ddl_as_migrations=log_ddl_as_migrations,
)
return ctx | Create and return an ad-hoc compiler context. | new_compiler_context | python | geldata/gel | edb/server/compiler/compiler.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/compiler.py | Apache-2.0 |
async def get_patch_count(backend_conn: metaschema.PGConnection) -> int:
"""Get the number of applied patches."""
num_patches = await instdata.get_instdata(
backend_conn, 'num_patches', 'json')
res: int = json.loads(num_patches) if num_patches else 0
return res | Get the number of applied patches. | get_patch_count | python | geldata/gel | edb/server/compiler/compiler.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/compiler.py | Apache-2.0 |
def _reprocess_restore_config(
self,
stmts: list[qlast.Base],
) -> list[qlast.Base]:
'''Do any rewrites to the restore script needed.
This is intended to patch over certain backwards incompatible
changes to config. We try not to do that too much, but when we
do, dumps still need to work.
'''
new_stmts = []
smtp_config = {}
for stmt in stmts:
# ext::auth::SMTPConfig got removed and moved into a cfg
# object, so intercept those and rewrite them.
if (
isinstance(stmt, qlast.ConfigSet)
and stmt.name.module == 'ext::auth::SMTPConfig'
):
smtp_config[stmt.name.name] = stmt.expr
else:
new_stmts.append(stmt)
if smtp_config:
# Do the rewrite of SMTPConfig
smtp_config['name'] = qlast.Constant.string('_default')
new_stmts.append(
qlast.ConfigInsert(
scope=qltypes.ConfigScope.DATABASE,
name=qlast.ObjectRef(
module='cfg', name='SMTPProviderConfig'
),
shape=[
qlast.ShapeElement(
expr=qlast.Path(steps=[qlast.Ptr(name=name)]),
compexpr=expr,
)
for name, expr in smtp_config.items()
],
)
)
new_stmts.append(
qlast.ConfigSet(
scope=qltypes.ConfigScope.DATABASE,
name=qlast.ObjectRef(
name='current_email_provider_name'
),
expr=qlast.Constant.string('_default'),
)
)
return new_stmts | Do any rewrites to the restore script needed.
This is intended to patch over certain backwards incompatible
changes to config. We try not to do that too much, but when we
do, dumps still need to work. | _reprocess_restore_config | python | geldata/gel | edb/server/compiler/compiler.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/compiler.py | Apache-2.0 |
def _inject_config_cache_clear(sql_ast: pgast.Base) -> pgast.Base:
"""Inject a call to clear the config cache into a config op.
The trickiness here is that we can't just do the delete in a
statement before the config op, since RESET config ops query the
views and so might populate the cache, and we can't do it in a
statement directly after (unless we rework the server), since then
the query won't return anything.
So we instead fiddle around with the query to inject a call.
"""
assert isinstance(sql_ast, pgast.Query)
ctes = sql_ast.ctes or []
sql_ast.ctes = None
ctes.append(pgast.CommonTableExpr(
name="_conv_rel",
query=sql_ast,
))
clear_qry = pgast.SelectStmt(
target_list=[
pgast.ResTarget(
name="_dummy",
val=pgast.FuncCall(
name=('edgedb', '_clear_sys_config_cache'),
args=[],
),
),
],
)
ctes.append(pgast.CommonTableExpr(
name="_clear_cache",
query=clear_qry,
materialized=True,
))
force_qry = pgast.UpdateStmt(
targets=[pgast.UpdateTarget(
name='flag', val=pgast.BooleanConstant(val=True)
)],
relation=pgast.RelRangeVar(relation=pgast.Relation(
name='_dml_dummy')),
where_clause=pgast.Expr(
name="=",
lexpr=pgast.ColumnRef(name=["id"]),
rexpr=pgast.SelectStmt(
from_clause=[pgast.RelRangeVar(relation=ctes[-1])],
target_list=[
pgast.ResTarget(
val=pgast.FuncCall(
name=('count',), args=[pgast.Star()]),
)
],
),
)
)
if (
not isinstance(sql_ast, pgast.DMLQuery)
or sql_ast.returning_list
):
ctes.append(pgast.CommonTableExpr(
name="_force_clear",
query=force_qry,
materialized=True,
))
sql_ast = pgast.SelectStmt(
target_list=[
pgast.ResTarget(val=pgast.ColumnRef(
name=["_conv_rel", pgast.Star()])),
],
ctes=ctes,
from_clause=[
pgast.RelRangeVar(relation=ctes[-3]),
],
)
else:
sql_ast = force_qry
force_qry.ctes = ctes
return sql_ast | Inject a call to clear the config cache into a config op.
The trickiness here is that we can't just do the delete in a
statement before the config op, since RESET config ops query the
views and so might populate the cache, and we can't do it in a
statement directly after (unless we rework the server), since then
the query won't return anything.
So we instead fiddle around with the query to inject a call. | _inject_config_cache_clear | python | geldata/gel | edb/server/compiler/compiler.py | https://github.com/geldata/gel/blob/master/edb/server/compiler/compiler.py | Apache-2.0 |
def get_constraint_origins(
self, schema: s_schema.Schema
) -> List[Constraint]:
"""
Origins of a constraint are the constraints that should actually perform
validation on their subjects.
Example:
If we have `Baz <: Bar <: Foo` and `Foo` declares some exclusive
constraint, this constraint will be inherited by `Bar` and then `Baz`.
But this inherited constraint on `Baz` should not validate exclusivity
of the property within just `Baz`, but within all `Foo` objects.
That's why origin of the constraint on `Baz` and on `Bar` is the
constraint on `Foo`.
Determining which type is that is non-trivial because of:
- multiple inheritance
(constraint might originate from multiple unrelated ancestors)
- delegated exclusive constraints, which are defined on a parent, but
should be exclusive within each of the children.
We validate constraints using triggers, and this function helps drive
their generation.
"""
# collect origins from all ancestors
origins: Set[Constraint] = set()
for base in self.get_bases(schema).objects(schema):
# abstract bases are not an origin
if base.is_non_concrete(schema):
continue
# delegated bases are not an origin
if base.get_delegated(schema):
continue
# recurse
origins.update(base.get_constraint_origins(schema))
# if no ancestors have an origin, I am the origin
return [self] if not origins else list(origins) | Origins of a constraint are the constraints that should actually perform
validation on their subjects.
Example:
If we have `Baz <: Bar <: Foo` and `Foo` declares some exclusive
constraint, this constraint will be inherited by `Bar` and then `Baz`.
But this inherited constraint on `Baz` should not validate exclusivity
of the property within just `Baz`, but within all `Foo` objects.
That's why origin of the constraint on `Baz` and on `Bar` is the
constraint on `Foo`.
Determining which type is that is non-trivial because of:
- multiple inheritance
(constraint might originate from multiple unrelated ancestors)
- delegated exclusive constraints, which are defined on a parent, but
should be exclusive within each of the children.
We validate constraints using triggers, and this function helps drive
their generation. | get_constraint_origins | python | geldata/gel | edb/schema/constraints.py | https://github.com/geldata/gel/blob/master/edb/schema/constraints.py | Apache-2.0 |
def interpolate_error_text(text: str, args: Dict[str, str]) -> str:
"""
Converts message template "hello {world}! {nope}{{world}}" and
arguments {"world": "Alice", "hell": "Eve"}
into "hello Alice! {world}".
"""
regex = r"\{\{.*\}\}|\{([A-Za-z_0-9]+)\}"
formatted = ""
last_start = 0
for match in re.finditer(regex, text, flags=0):
formatted += text[last_start : match.start()]
last_start = match.end()
if match[1] is None:
# escape double curly braces
formatted += match[0][1:-1]
elif match[1] in args:
# lookup an arg
formatted += args[match[1]]
else:
# arg not found
formatted += match[0]
formatted += text[last_start:]
return formatted | Converts message template "hello {world}! {nope}{{world}}" and
arguments {"world": "Alice", "hell": "Eve"}
into "hello Alice! {world}". | interpolate_error_text | python | geldata/gel | edb/schema/constraints.py | https://github.com/geldata/gel/blob/master/edb/schema/constraints.py | Apache-2.0 |
def sort_by_cross_refs_key(
schema: s_schema.Schema,
objs: Iterable[T], *,
key: Callable[[T], so.Object],
) -> Tuple[T, ...]:
"""Sort an iterable of objects according to cross-references between them.
Return a toplogical ordering of a graph of objects joined by references.
It is assumed that the graph has no cycles.
"""
graph = {}
# We want to report longer cycles before trivial self references,
# since cycles with (for example) computed properties will *also*
# lead to self references (because the computed property gets
# inlined, essentially).
self_ref = None
for entry in objs:
x = key(entry)
referrers = schema.get_referrers(x)
if x in referrers:
self_ref = x
graph[x] = topological.DepGraphEntry(
item=entry,
deps={ref for ref in referrers
if not x.is_parent_ref(schema, ref) and x != ref},
extra=False,
)
res = topological.sort(graph, allow_unresolved=True)
if self_ref:
raise topological.CycleError(
f"{self_ref!r} refers to itself", item=self_ref)
return res | Sort an iterable of objects according to cross-references between them.
Return a toplogical ordering of a graph of objects joined by references.
It is assumed that the graph has no cycles. | sort_by_cross_refs_key | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def get_verb(self) -> str:
"""Return a verb representing this command in infinitive form."""
raise NotImplementedError | Return a verb representing this command in infinitive form. | get_verb | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def get_friendly_description(
self,
*,
parent_op: Optional[Command] = None,
schema: Optional[s_schema.Schema] = None,
object: Any = None,
object_desc: Optional[str] = None,
) -> str:
"""Return a friendly description of this command in imperative mood.
The result is used in error messages and other user-facing renderings
of the command.
"""
raise NotImplementedError | Return a friendly description of this command in imperative mood.
The result is used in error messages and other user-facing renderings
of the command. | get_friendly_description | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def record_diff_annotations(
self,
*,
schema: s_schema.Schema,
orig_schema: Optional[s_schema.Schema],
context: so.ComparisonContext,
orig_object: Optional[so.Object],
object: Optional[so.Object],
) -> None:
"""Record extra information on a delta obtained by diffing schemas.
This provides an apportunity for a delta command to annotate itself
in schema diff schenarios (i.e. migrations).
Args:
schema:
Final schema of a migration.
orig_schema:
Original schema of a migration.
context:
Schema comparison context.
"""
pass | Record extra information on a delta obtained by diffing schemas.
This provides an apportunity for a delta command to annotate itself
in schema diff schenarios (i.e. migrations).
Args:
schema:
Final schema of a migration.
orig_schema:
Original schema of a migration.
context:
Schema comparison context. | record_diff_annotations | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def get_local_attribute_value(
self,
attr_name: str,
) -> Any:
"""Return the new value of field, if not inherited."""
op = self._get_attribute_set_cmd(attr_name)
if op is not None and not op.new_inherited:
return op.new_value
else:
return None | Return the new value of field, if not inherited. | get_local_attribute_value | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def formatfields(
self,
formatter: str = 'str',
) -> Iterator[Tuple[str, str]]:
"""Return an iterator over fields formatted using `formatter`."""
for name, field in self.__class__._fields.items():
value = getattr(self, name)
default = field.default
formatter_obj = field.formatters.get(formatter)
if formatter_obj and value != default:
yield (name, formatter_obj(value)) | Return an iterator over fields formatted using `formatter`. | formatfields | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def in_deletion(self, offset: int = 0) -> bool:
"""Return True if any object is being deleted in this context.
:param offset:
The offset in the context stack to start looking at.
:returns:
True if any object is being deleted in this context starting
from *offset* in the stack.
"""
return any(isinstance(ctx.op, DeleteObject)
for ctx in self.stack[:-offset if offset else None]) | Return True if any object is being deleted in this context.
:param offset:
The offset in the context stack to start looking at.
:returns:
True if any object is being deleted in this context starting
from *offset* in the stack. | in_deletion | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def is_deleting(self, obj: so.Object) -> bool:
"""Return True if *obj* is being deleted in this context.
:param obj:
The object in question.
:returns:
True if *obj* is being deleted in this context.
"""
return any(isinstance(ctx.op, DeleteObject)
and ctx.op.scls == obj for ctx in self.stack) | Return True if *obj* is being deleted in this context.
:param obj:
The object in question.
:returns:
True if *obj* is being deleted in this context. | is_deleting | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def is_creating(self, obj: so.Object) -> bool:
"""Return True if *obj* is being created in this context.
:param obj:
The object in question.
:returns:
True if *obj* is being created in this context.
"""
return any(isinstance(ctx.op, CreateObject)
and getattr(ctx.op, 'scls', None) == obj
for ctx in self.stack) | Return True if *obj* is being created in this context.
:param obj:
The object in question.
:returns:
True if *obj* is being created in this context. | is_creating | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def is_altering(self, obj: so.Object) -> bool:
"""Return True if *obj* is being altered in this context.
:param obj:
The object in question.
:returns:
True if *obj* is being altered in this context.
"""
return any(isinstance(ctx.op, AlterObject)
and getattr(ctx.op, 'scls', None) == obj
for ctx in self.stack) | Return True if *obj* is being altered in this context.
:param obj:
The object in question.
:returns:
True if *obj* is being altered in this context. | is_altering | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def get_friendly_description(
self,
*,
parent_op: Optional[Command] = None,
schema: Optional[s_schema.Schema] = None,
object: Any = None,
object_desc: Optional[str] = None,
) -> str:
"""Return a friendly description of this command in imperative mood.
The result is used in error messages and other user-facing renderings
of the command.
"""
object_desc = self.get_friendly_object_name_for_description(
parent_op=parent_op,
schema=schema,
object=object,
object_desc=object_desc,
)
return f'{self.get_verb()} {object_desc}' | Return a friendly description of this command in imperative mood.
The result is used in error messages and other user-facing renderings
of the command. | get_friendly_description | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def get_user_prompt(
self,
*,
parent_op: Optional[Command] = None,
) -> Tuple[CommandKey, str]:
"""Return a human-friendly prompt describing this operation."""
# The prompt is determined by the *innermost* subcommand as
# long as all its parents have exactly one child. The tree
# traversal stops on fragments and CreateObject commands,
# since there is no point to prompt about the creation of
# object innards.
if (
not isinstance(self, AlterObjectFragment)
and (
not isinstance(self, CreateObject)
and (
self.orig_cmd_type is None
or not issubclass(
self.orig_cmd_type, CreateObject
)
)
)
):
from . import referencing as s_referencing
subcommands = self.get_subcommands(
type=ObjectCommand,
exclude=(AlterObjectProperty, s_referencing.AlterOwned),
)
if len(subcommands) == 1:
subcommand = subcommands[0]
if isinstance(subcommand, AlterObjectFragment):
return subcommand.get_user_prompt(parent_op=parent_op)
else:
return subcommand.get_user_prompt(parent_op=self)
desc = self.get_friendly_description(parent_op=parent_op)
prompt_text = f'did you {desc}?'
prompt_id = get_object_command_key(self)
assert prompt_id is not None
return prompt_id, prompt_text | Return a human-friendly prompt describing this operation. | get_user_prompt | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def canonicalize_attributes(
self,
schema: s_schema.Schema,
context: CommandContext,
) -> s_schema.Schema:
"""Resolve, canonicalize and amend field mutations in this command.
This is called just before the object described by this command
is created or updated but after all prerequisite commands have
been applied, so it is safe to resolve object shells and do
other schema inquiries here.
"""
return schema | Resolve, canonicalize and amend field mutations in this command.
This is called just before the object described by this command
is created or updated but after all prerequisite commands have
been applied, so it is safe to resolve object shells and do
other schema inquiries here. | canonicalize_attributes | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def get_specified_attribute_value(
self,
field: str,
schema: s_schema.Schema,
context: CommandContext,
) -> Optional[Any]:
"""Fetch the specified (not computed) value of a field.
If the command is an alter, it will fall back to the value in
the schema.
Return None if there is no specified value or if the specified
value is being reset.
"""
spec = self.get_attribute_value(field)
is_alter = (
isinstance(self, AlterObject)
or (
isinstance(self, AlterObjectFragment)
and isinstance(self.get_parent_op(context), AlterObject)
)
)
if (
is_alter
and spec is None
and not self.has_attribute_value(field)
and field not in self.scls.get_computed_fields(schema)
):
spec = self.scls.get_explicit_field_value(
schema, field, default=None)
return spec | Fetch the specified (not computed) value of a field.
If the command is an alter, it will fall back to the value in
the schema.
Return None if there is no specified value or if the specified
value is being reset. | get_specified_attribute_value | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def get_dummy_expr_field_value(
self,
schema: s_schema.Schema,
context: CommandContext,
field: so.Field[Any],
value: Any,
) -> Optional[s_expr.Expression]:
"""Return a dummy value for an expression stored in *field*.
Schema class command implementations should overload this
to specify a dummy value for an expression field, which is necessary
when doing dependency type and name propagation switcheroo in
_propagate_if_expr_refs() / _finalize_affected_refs().
"""
raise NotImplementedError | Return a dummy value for an expression stored in *field*.
Schema class command implementations should overload this
to specify a dummy value for an expression field, which is necessary
when doing dependency type and name propagation switcheroo in
_propagate_if_expr_refs() / _finalize_affected_refs(). | get_dummy_expr_field_value | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def ast_ignore_ownership(self) -> bool:
"""Whether to force generating an AST even though it isn't owned"""
return False | Whether to force generating an AST even though it isn't owned | ast_ignore_ownership | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def ast_ignore_field_ownership(self, field: str) -> bool:
"""Whether to force generating an AST even though it isn't owned"""
return False | Whether to force generating an AST even though it isn't owned | ast_ignore_field_ownership | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def canonicalize_alter_from_external_ref(
self,
schema: s_schema.Schema,
context: CommandContext,
) -> None:
"""Canonicalize an ALTER command triggered by a modification of a
an object referred to by an expression in this object."""
pass | Canonicalize an ALTER command triggered by a modification of a
an object referred to by an expression in this object. | canonicalize_alter_from_external_ref | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def get_special_field_alter_handler(
field: str,
schema_cls: Type[so.Object],
) -> Optional[Type[AlterSpecialObjectField[so.Object]]]:
"""Return a custom handler for the field value transition, if any.
Returns a subclass of AlterSpecialObjectField, when in the context
of an AlterObject operation, and a special handler has been declared.
"""
field_handlers = special_field_alter_handlers.get(field)
if field_handlers is None:
return None
return field_handlers.get(schema_cls) | Return a custom handler for the field value transition, if any.
Returns a subclass of AlterSpecialObjectField, when in the context
of an AlterObject operation, and a special handler has been declared. | get_special_field_alter_handler | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def get_special_field_create_handler(
field: str,
schema_cls: Type[so.Object],
) -> Optional[Type[AlterSpecialObjectField[so.Object]]]:
"""Return a custom handler for the field value transition, if any.
Returns a subclass of AlterSpecialObjectField, when in the context
of an CreateObject operation, and a special handler has been declared.
For now this is just a hacky special case:
the 'required' field of Pointers. If that changes, we should generalize
the mechanism.
"""
if field != 'required':
return None
return get_special_field_alter_handler(field, schema_cls) | Return a custom handler for the field value transition, if any.
Returns a subclass of AlterSpecialObjectField, when in the context
of an CreateObject operation, and a special handler has been declared.
For now this is just a hacky special case:
the 'required' field of Pointers. If that changes, we should generalize
the mechanism. | get_special_field_create_handler | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def get_special_field_alter_handler_for_context(
field: str,
context: CommandContext,
) -> Optional[Type[AlterSpecialObjectField[so.Object]]]:
"""Return a custom handler for the field value transition, if any.
Returns a subclass of AlterSpecialObjectField, when in the context
of an AlterObject operation, and a special handler has been declared.
"""
this_op = context.current().op
if (
isinstance(this_op, AlterObjectOrFragment)
and not isinstance(this_op, AlterSpecialObjectField)
):
mcls = this_op.get_schema_metaclass()
return get_special_field_alter_handler(field, mcls)
elif isinstance(this_op, CreateObject):
mcls = this_op.get_schema_metaclass()
return get_special_field_create_handler(field, mcls)
else:
return None | Return a custom handler for the field value transition, if any.
Returns a subclass of AlterSpecialObjectField, when in the context
of an AlterObject operation, and a special handler has been declared. | get_special_field_alter_handler_for_context | python | geldata/gel | edb/schema/delta.py | https://github.com/geldata/gel/blob/master/edb/schema/delta.py | Apache-2.0 |
def is_base_type(
self,
schema: s_schema.Schema,
) -> bool:
"""Returns true of the type has only abstract bases"""
bases: Sequence[s_types.Type] = self.get_bases(schema).objects(schema)
return all(b.get_abstract(schema) for b in bases) | Returns true of the type has only abstract bases | is_base_type | python | geldata/gel | edb/schema/scalars.py | https://github.com/geldata/gel/blob/master/edb/schema/scalars.py | Apache-2.0 |
def castable_to(
self,
other: s_types.Type,
schema: s_schema.Schema,
) -> bool:
"""Determine if any cast exists between self and *other*."""
if not isinstance(other, ScalarType):
return False
if self.is_polymorphic(schema) or other.is_polymorphic(schema):
return False
left = self.get_topmost_concrete_base(schema)
right = other.get_topmost_concrete_base(schema)
assert isinstance(left, s_types.Type)
assert isinstance(right, s_types.Type)
return s_casts.is_castable(schema, left, right) | Determine if any cast exists between self and *other*. | castable_to | python | geldata/gel | edb/schema/scalars.py | https://github.com/geldata/gel/blob/master/edb/schema/scalars.py | Apache-2.0 |
def default_field_merge(
target: InheritingObject,
sources: Iterable[Object],
field_name: str,
*,
ignore_local: bool = False,
schema: s_schema.Schema,
) -> Any:
"""The default `MergeFunction`."""
if not ignore_local:
ours = target.get_explicit_local_field_value(schema, field_name, None)
if ours is not None:
return ours
for source in sources:
theirs = source.get_explicit_field_value(schema, field_name, None)
if theirs is not None:
return theirs
return None | The default `MergeFunction`. | default_field_merge | python | geldata/gel | edb/schema/objects.py | https://github.com/geldata/gel/blob/master/edb/schema/objects.py | Apache-2.0 |
def __init__(
self,
type_: Type[T],
*,
type_is_generic_self: bool = False,
coerce: bool = False,
compcoef: Optional[float] = None,
inheritable: bool = True,
simpledelta: bool = True,
merge_fn: MergeFunction = default_field_merge,
ephemeral: bool = False,
weak_ref: bool = False,
allow_ddl_set: bool = False,
describe_visibility: DescribeVisibilityPolicy = (
DescribeVisibilityPolicy.SHOW_IF_EXPLICIT),
ddl_identity: bool = False,
aux_cmd_data: bool = False,
special_ddl_syntax: bool = False,
reflection_method: ReflectionMethod = ReflectionMethod.REGULAR,
reflection_proxy: Optional[Tuple[str, str]] = None,
name: Optional[str] = None,
reflection_name: Optional[str] = None,
patch_level: int = -1,
**kwargs: Any,
) -> None:
"""Schema item core attribute definition.
"""
if not isinstance(type_, type):
raise ValueError(f'{type_!r} is not a type')
self.type = type_
self.type_is_generic_self = type_is_generic_self
self.coerce = coerce
self.allow_ddl_set = allow_ddl_set
self.ddl_identity = ddl_identity
self.aux_cmd_data = aux_cmd_data
self.special_ddl_syntax = special_ddl_syntax
self.describe_visibility = describe_visibility
self.compcoef = compcoef
self.inheritable = inheritable
self.simpledelta = simpledelta
self.weak_ref = weak_ref
self.reflection_method = reflection_method
self.reflection_proxy = reflection_proxy
self.is_reducible = issubclass(type_, s_abc.Reducible)
self.patch_level = patch_level
if name is not None:
self.name = name
if reflection_name is not None:
self.sname = reflection_name
if (
merge_fn is default_field_merge
and callable(
type_merge_fn := getattr(self.type, 'merge_values', None)
)
):
self.merge_fn = type_merge_fn
else:
self.merge_fn = merge_fn
self.ephemeral = ephemeral | Schema item core attribute definition. | __init__ | python | geldata/gel | edb/schema/objects.py | https://github.com/geldata/gel/blob/master/edb/schema/objects.py | Apache-2.0 |
def is_abstract(cls) -> bool:
"""Return True if this type does NOT represent a concrete schema class.
"""
return cls.get_ql_class() is None | Return True if this type does NOT represent a concrete schema class. | is_abstract | python | geldata/gel | edb/schema/objects.py | https://github.com/geldata/gel/blob/master/edb/schema/objects.py | Apache-2.0 |
def is_parent_ref(
self,
schema: s_schema.Schema,
reference: Object,
) -> bool:
"""Return True if *reference* is a structural ancestor of self."""
return False | Return True if *reference* is a structural ancestor of self. | is_parent_ref | python | geldata/gel | edb/schema/objects.py | https://github.com/geldata/gel/blob/master/edb/schema/objects.py | Apache-2.0 |
def compare_values(
cls: Type[Object_T],
ours: Optional[Object_T],
theirs: Optional[Object_T],
*,
our_schema: s_schema.Schema,
their_schema: s_schema.Schema,
context: ComparisonContext,
compcoef: float,
) -> float:
"""Compare two values and return a coefficient of similarity.
This is a common callback that is used when we do schema comparisons.
*ours* and *theirs* are instances of this class, and *our_schema* and
*their_schema* are the corresponding schemas in which the values are
defined. *compcoef* is whatever was specified for the field. The
method returns a coefficient of similarity of the values, from ``0``
to ``1``.
"""
similarity = 1.0
if ours is not None and theirs is not None:
if type(ours) is not type(theirs):
similarity /= 1.4
else:
our_name = context.get_obj_name(our_schema, ours)
their_name = theirs.get_name(their_schema)
if our_name != their_name:
similarity /= 1.2
else:
# If the new and old versions share a reference to
# an object that is being deleted, then we must
# delete this object as well.
if (type(ours), our_name) in context.deletions:
return 0.0
elif ours is not None or theirs is not None:
# one is None but not both
similarity /= 1.2
if similarity < 1.0:
return compcoef
else:
return 1.0 | Compare two values and return a coefficient of similarity.
This is a common callback that is used when we do schema comparisons.
*ours* and *theirs* are instances of this class, and *our_schema* and
*their_schema* are the corresponding schemas in which the values are
defined. *compcoef* is whatever was specified for the field. The
method returns a coefficient of similarity of the values, from ``0``
to ``1``. | compare_values | python | geldata/gel | edb/schema/objects.py | https://github.com/geldata/gel/blob/master/edb/schema/objects.py | Apache-2.0 |
def init_parent_delta_branch(
self: Object_T,
schema: s_schema.Schema,
context: sd.CommandContext,
*,
referrer: Optional[Object] = None,
) -> Tuple[sd.CommandGroup, sd.Command, sd.ContextStack]:
"""Prepare a parent portion of a command tree for this object.
This returns a tuple containing:
- the root (as a ``CommandGroup``) of a nested ``AlterObject`` tree
with nodes for each enclosing referrer object;
- direct reference to the innermost command in the above tree
(may be root if there are no referring objects);
- a ``ContextStack`` instance representing the nested CommandContext
corresponding to the returned command tree.
"""
from . import delta as sd
root = sd.CommandGroup()
return root, root, sd.ContextStack(()) | Prepare a parent portion of a command tree for this object.
This returns a tuple containing:
- the root (as a ``CommandGroup``) of a nested ``AlterObject`` tree
with nodes for each enclosing referrer object;
- direct reference to the innermost command in the above tree
(may be root if there are no referring objects);
- a ``ContextStack`` instance representing the nested CommandContext
corresponding to the returned command tree. | init_parent_delta_branch | python | geldata/gel | edb/schema/objects.py | https://github.com/geldata/gel/blob/master/edb/schema/objects.py | Apache-2.0 |
def init_delta_branch(
self: Object_T,
schema: s_schema.Schema,
context: sd.CommandContext,
cmdtype: Type[sd.ObjectCommand_T],
*,
classname: Optional[sn.Name] = None,
referrer: Optional[Object] = None,
possible_parent: Optional[sd.ObjectCommand[Object]] = None,
**kwargs: Any,
) -> Tuple[sd.Command, sd.ObjectCommand_T, sd.ContextStack]:
"""Make a command subtree for this object.
This returns a tuple containing:
- the root (as a ``CommandGroup``) of a nested ``AlterObject`` tree
with nodes for each enclosing referrer object and an instance of
*cmdtype* as the innermost command;
- direct reference to the innermost command in the above tree;
- a ``ContextStack`` instance representing the nested CommandContext
corresponding to the returned command tree.
"""
root_cmd: sd.Command
root_cmd, parent_cmd, ctx_stack = self.init_parent_delta_branch(
schema=schema,
context=context,
referrer=referrer,
)
self_cmd = self.init_delta_command(
schema,
cmdtype=cmdtype,
classname=classname,
**kwargs,
)
from . import delta as sd
# possible_parent allows the caller to tell us what *they* are,
# so we can reuse that Alter if we can. The big advantage here is
# that it saves needing to do validate_object on the intermediate
# objects.
if (
isinstance(possible_parent, sd.AlterObject)
and isinstance(parent_cmd, sd.ObjectCommand)
and possible_parent.classname == parent_cmd.classname
):
root_cmd = parent_cmd = self_cmd
else:
parent_cmd.add(self_cmd)
ctx_stack.push(self_cmd.new_context(schema, context, self))
return root_cmd, self_cmd, ctx_stack | Make a command subtree for this object.
This returns a tuple containing:
- the root (as a ``CommandGroup``) of a nested ``AlterObject`` tree
with nodes for each enclosing referrer object and an instance of
*cmdtype* as the innermost command;
- direct reference to the innermost command in the above tree;
- a ``ContextStack`` instance representing the nested CommandContext
corresponding to the returned command tree. | init_delta_branch | python | geldata/gel | edb/schema/objects.py | https://github.com/geldata/gel/blob/master/edb/schema/objects.py | Apache-2.0 |
def is_abstract(cls) -> bool:
"""Return True if this type does NOT represent a concrete schema class.
"""
return cls is InternalObject | Return True if this type does NOT represent a concrete schema class. | is_abstract | python | geldata/gel | edb/schema/objects.py | https://github.com/geldata/gel/blob/master/edb/schema/objects.py | Apache-2.0 |
def add(
self: OIBT, schema: s_schema.Schema, item: Object
) -> Tuple[s_schema.Schema, OIBT]:
"""Return a copy of this collection containing the given item.
If the item is already present in the collection, an
``ObjectIndexDuplicateNameError`` is raised.
"""
key = type(self)._key(schema, item)
if self.has(schema, key):
raise ObjectCollectionDuplicateNameError(
f'object index already contains the {key!r} key')
return self.update(schema, [item]) | Return a copy of this collection containing the given item.
If the item is already present in the collection, an
``ObjectIndexDuplicateNameError`` is raised. | add | python | geldata/gel | edb/schema/objects.py | https://github.com/geldata/gel/blob/master/edb/schema/objects.py | Apache-2.0 |
def maybe_get_topmost_concrete_base(
self: InheritingObjectT, schema: s_schema.Schema
) -> Optional[InheritingObjectT]:
"""Get the topmost non-abstract base."""
lineage = self.get_ancestors(schema).objects(schema)
for ancestor in reversed(lineage):
if not ancestor.get_abstract(schema):
return ancestor
if not self.get_abstract(schema):
return self
return None | Get the topmost non-abstract base. | maybe_get_topmost_concrete_base | python | geldata/gel | edb/schema/objects.py | https://github.com/geldata/gel/blob/master/edb/schema/objects.py | Apache-2.0 |
def get_topmost_concrete_base(
self: InheritingObjectT, schema: s_schema.Schema
) -> InheritingObjectT:
"""Get the topmost non-abstract base."""
base = self.maybe_get_topmost_concrete_base(schema)
if not base:
raise errors.SchemaError(
f'{self.get_verbosename(schema)} has no non-abstract ancestors'
)
return base | Get the topmost non-abstract base. | get_topmost_concrete_base | python | geldata/gel | edb/schema/objects.py | https://github.com/geldata/gel/blob/master/edb/schema/objects.py | Apache-2.0 |
def ordered_descendants(
self: InheritingObjectT, schema: s_schema.Schema
) -> List[InheritingObjectT]:
"""Return class descendants in ancestral order."""
graph = {}
for descendant in self.descendants(schema):
graph[descendant] = topological.DepGraphEntry(
item=descendant,
deps=ordered.OrderedSet(
descendant.get_bases(schema).objects(schema),
),
extra=False,
)
return list(topological.sort(graph, allow_unresolved=True)) | Return class descendants in ancestral order. | ordered_descendants | python | geldata/gel | edb/schema/objects.py | https://github.com/geldata/gel/blob/master/edb/schema/objects.py | Apache-2.0 |
def minimize_class_set_by_most_generic(
schema: s_schema.Schema, classes: Iterable[so.InheritingObjectT]
) -> List[so.InheritingObjectT]:
"""Minimize the given set of objects by filtering out all subclasses."""
classes = list(classes)
mros = [set(p.get_ancestors(schema).objects(schema)) for p in classes]
count = len(classes)
smap = itertools.starmap
# Return only those entries that do not have other entries in their mro
result = [
scls for i, scls in enumerate(classes)
if not any(smap(set.__contains__,
((mros[i], classes[j])
for j in range(count) if j != i)))
]
return result | Minimize the given set of objects by filtering out all subclasses. | minimize_class_set_by_most_generic | python | geldata/gel | edb/schema/utils.py | https://github.com/geldata/gel/blob/master/edb/schema/utils.py | Apache-2.0 |
def minimize_class_set_by_least_generic(
schema: s_schema.Schema, classes: Iterable[so.InheritingObjectT]
) -> List[so.InheritingObjectT]:
"""Minimize the given set of objects by filtering out all superclasses."""
classes = list(classes)
mros = [set(p.get_ancestors(schema).objects(schema)) | {p}
for p in classes]
count = len(classes)
smap = itertools.starmap
# Return only those entries that are not present in other entries' mro
result = [
scls for i, scls in enumerate(classes)
if not any(smap(set.__contains__,
((mros[j], classes[i])
for j in range(count) if j != i)))
]
return result | Minimize the given set of objects by filtering out all superclasses. | minimize_class_set_by_least_generic | python | geldata/gel | edb/schema/utils.py | https://github.com/geldata/gel/blob/master/edb/schema/utils.py | Apache-2.0 |
def find_pointer_suggestions(
schema: s_schema.Schema,
item_type: Optional[so.ObjectMeta],
parent: Optional[so.Object],
) -> Iterable[Tuple[so.Object, str]]:
from . import pointers as s_pointers
"""
Suggests pointers (properties or links) from parent object type.
If pointer type is not expected, use .name notation.
"""
from . import sources as s_sources
if not isinstance(parent, s_sources.Source):
return ()
pointers_with_names = parent.get_pointers(schema).items(schema)
pointers = (pointer for _, pointer in pointers_with_names)
suggestions = ((s, s.get_displayname(schema)) for s in pointers)
if item_type is not s_pointers.Pointer:
# Prefix with .
suggestions = ((s, "." + n) for s, n in suggestions)
return suggestions | Suggests pointers (properties or links) from parent object type.
If pointer type is not expected, use .name notation. | find_pointer_suggestions | python | geldata/gel | edb/schema/utils.py | https://github.com/geldata/gel/blob/master/edb/schema/utils.py | Apache-2.0 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.