code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class wavelet_squash_ff_sptr(object): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> this = _wavelet_swig.new_wavelet_squash_ff_sptr(*args) <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> def __deref__(self): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr___deref__(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _wavelet_swig.delete_wavelet_squash_ff_sptr <NEW_LINE> __del__ = lambda self : None; <NEW_LINE> def history(self): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_history(self) <NEW_LINE> <DEDENT> def output_multiple(self): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_output_multiple(self) <NEW_LINE> <DEDENT> def relative_rate(self): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_relative_rate(self) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_start(self) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_stop(self) <NEW_LINE> <DEDENT> def nitems_read(self, *args, **kwargs): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_nitems_read(self, *args, **kwargs) <NEW_LINE> <DEDENT> def nitems_written(self, *args, **kwargs): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_nitems_written(self, *args, **kwargs) <NEW_LINE> <DEDENT> def detail(self): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_detail(self) <NEW_LINE> <DEDENT> def set_detail(self, *args, **kwargs): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_set_detail(self, *args, **kwargs) <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_name(self) <NEW_LINE> <DEDENT> def input_signature(self): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_input_signature(self) <NEW_LINE> <DEDENT> def output_signature(self): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_output_signature(self) <NEW_LINE> <DEDENT> def unique_id(self): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_unique_id(self) <NEW_LINE> <DEDENT> def to_basic_block(self): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_to_basic_block(self) <NEW_LINE> <DEDENT> def check_topology(self, *args, **kwargs): <NEW_LINE> <INDENT> return _wavelet_swig.wavelet_squash_ff_sptr_check_topology(self, *args, **kwargs)
Proxy of C++ boost::shared_ptr<(wavelet_squash_ff)> class
6259903a1f5feb6acb163dc0
class QOMCommand: <NEW_LINE> <INDENT> name: str <NEW_LINE> help: str <NEW_LINE> def __init__(self, args: argparse.Namespace): <NEW_LINE> <INDENT> if args.socket is None: <NEW_LINE> <INDENT> raise QMPError("No QMP socket path or address given") <NEW_LINE> <DEDENT> self.qmp = QEMUMonitorProtocol( QEMUMonitorProtocol.parse_address(args.socket) ) <NEW_LINE> self.qmp.connect() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def register(cls, subparsers: Subparsers) -> None: <NEW_LINE> <INDENT> subparser = subparsers.add_parser(cls.name, help=cls.help, description=cls.help) <NEW_LINE> cls.configure_parser(subparser) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def configure_parser(cls, parser: argparse.ArgumentParser) -> None: <NEW_LINE> <INDENT> default_path = os.environ.get('QMP_SOCKET') <NEW_LINE> parser.add_argument( '--socket', '-s', dest='socket', action='store', help='QMP socket path or address (addr:port).' ' May also be set via QMP_SOCKET environment variable.', default=default_path ) <NEW_LINE> parser.set_defaults(cmd_class=cls) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def add_path_prop_arg(cls, parser: argparse.ArgumentParser) -> None: <NEW_LINE> <INDENT> parser.add_argument( 'path_prop', metavar='<path>.<property>', action='store', help="QOM path and property, separated by a period '.'" ) <NEW_LINE> <DEDENT> def run(self) -> int: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def qom_list(self, path: str) -> List[ObjectPropertyInfo]: <NEW_LINE> <INDENT> rsp = self.qmp.command('qom-list', path=path) <NEW_LINE> assert isinstance(rsp, list) <NEW_LINE> return [ObjectPropertyInfo.make(x) for x in rsp] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def command_runner( cls: Type[CommandT], args: argparse.Namespace ) -> int: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cmd = cls(args) <NEW_LINE> return cmd.run() <NEW_LINE> <DEDENT> except QMPError as err: <NEW_LINE> <INDENT> print(f"{type(err).__name__}: {err!s}", file=sys.stderr) <NEW_LINE> return -1 <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def entry_point(cls) -> int: <NEW_LINE> <INDENT> parser = argparse.ArgumentParser(description=cls.help) <NEW_LINE> cls.configure_parser(parser) <NEW_LINE> args = parser.parse_args() <NEW_LINE> return cls.command_runner(args)
Represents a QOM sub-command. :param args: Parsed arguments, as returned from parser.parse_args.
6259903a07d97122c4217e6b
class Configuration(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.config = self.load() <NEW_LINE> <DEDENT> def _get(self, key_components): <NEW_LINE> <INDENT> value = self.config <NEW_LINE> for k in key_components: <NEW_LINE> <INDENT> value = value[k] <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def get(self, key, default=None): <NEW_LINE> <INDENT> key_components = key.split('.') <NEW_LINE> try: <NEW_LINE> <INDENT> return self._get(key_components) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return default <NEW_LINE> <DEDENT> <DEDENT> def load(self): <NEW_LINE> <INDENT> env_path = os.environ.get('CONFIG_ENV') or 'config/development.yaml' <NEW_LINE> if not os.path.exists(env_path): <NEW_LINE> <INDENT> raise Exception('{0} does not exist'.format(env_path)) <NEW_LINE> <DEDENT> stream = open(env_path, 'r') <NEW_LINE> return yaml.safe_load(stream)
Loads a yaml configuration file.
6259903acad5886f8bdc5963
class openrave_exception(Exception): <NEW_LINE> <INDENT> def __init__( self, app_error ): <NEW_LINE> <INDENT> Exception.__init__( self ) <NEW_LINE> self._pimpl = app_error <NEW_LINE> <DEDENT> def __str__( self ): <NEW_LINE> <INDENT> return str(self._pimpl) <NEW_LINE> <DEDENT> def __unicode__( self ): <NEW_LINE> <INDENT> return unicode(self._pimpl) <NEW_LINE> <DEDENT> def __getattribute__(self, attr): <NEW_LINE> <INDENT> my_pimpl = super(openrave_exception, self).__getattribute__("_pimpl") <NEW_LINE> try: <NEW_LINE> <INDENT> return getattr(my_pimpl, attr) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return super(openrave_exception,self).__getattribute__(attr)
wrap up the C++ openrave_exception
6259903a8e05c05ec3f6f742
class SecondClass(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.enabled = False <NEW_LINE> self.logger = logging.getLogger(__name__) <NEW_LINE> <DEDENT> def enable_system(self): <NEW_LINE> <INDENT> self.enabled = True <NEW_LINE> self.logger.warning('Enabling system.') <NEW_LINE> self.logger.info('Still enabling system.') <NEW_LINE> <DEDENT> def disable_system(self): <NEW_LINE> <INDENT> self.enabled = False <NEW_LINE> self.logger.warning('Disabling system.') <NEW_LINE> self.logger.info('Still disabling system.')
This class causes log entries for "system".
6259903ad164cc6175822143
class WeirdNumber: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> raise ChallengeNotAttempted
================================ Python OOP challenge level 0b010 ================================ Once upon the time, there was a weird number. Nobody knew what it was, but it existed. Some weird people called it infinity, although nobody knew what that word meant either. It seemed to be able to defy the rules of all basic mathematical operations, that is Let n be some random integer or float point number. Let w be the weird number w + n == w w + w == w w - n == w w - w == 0 w * n == w w * w == Nan W * 0 == 0 w / n == w (n!=0) w / w == 1 w / 0 == NaN w > n There is, of course, only one such weird number. So it always equals to itself. But it never equals to anything else. WeirdNumber() == WeirdNumber() w != n Hint: to construct an instance of NaN (not a number), use float("NaN")
6259903ad99f1b3c44d06874
class DbRefreshThread(Thread): <NEW_LINE> <INDENT> def __init__(self, settings, plugins, graph, refresh_interval, *args, **kwargs): <NEW_LINE> <INDENT> self.settings = settings <NEW_LINE> self.plugins = plugins <NEW_LINE> self.graph = graph <NEW_LINE> self.refresh_interval = refresh_interval <NEW_LINE> self.logger = logging.getLogger(__name__) <NEW_LINE> Thread.__init__(self, *args, **kwargs) <NEW_LINE> <DEDENT> def crash(self): <NEW_LINE> <INDENT> os._exit(1) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> initial_url = self.settings.database <NEW_LINE> while True: <NEW_LINE> <INDENT> self.logger.debug("Updating Graph from Database.") <NEW_LINE> try: <NEW_LINE> <INDENT> if self.settings.database != initial_url: <NEW_LINE> <INDENT> self.crash() <NEW_LINE> <DEDENT> with closing(Session()) as session: <NEW_LINE> <INDENT> self.graph.update_from_db(session) <NEW_LINE> <DEDENT> self.plugins.log_periodic_graph_update(success=True) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.plugins.log_periodic_graph_update(success=False) <NEW_LINE> self.plugins.log_exception(None, None, *sys.exc_info()) <NEW_LINE> logging.exception("Failed to refresh graph") <NEW_LINE> self.crash() <NEW_LINE> <DEDENT> sleep(self.refresh_interval)
Background thread for refreshing the in-memory cache of the graph.
6259903a73bcbd0ca4bcb458
@urls.register <NEW_LINE> class Extensions(generic.View): <NEW_LINE> <INDENT> url_regex = r'nova/extensions/$' <NEW_LINE> @rest_utils.ajax() <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> result = api.nova.list_extensions(request) <NEW_LINE> return {'items': [e.to_dict() for e in result]}
API for nova extensions.
6259903a8da39b475be043bd
class ImportName(ImportStmt): <NEW_LINE> <INDENT> pass
import_name: 'import' dotted_as_names
6259903ab57a9660fecd2c4a
class documentStatusType (pyxb.binding.datatypes.token, pyxb.binding.basis.enumeration_mixin): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'documentStatusType') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('http://www.ech.ch/xmlns/eCH-0039/2/eCH-0039-2-0.xsd', 131, 1) <NEW_LINE> _Documentation = 'Status: Zustand des Dokuments in Bezug auf Ver\xe4nderbarkeit und G\xfcltigkeit.'
Status: Zustand des Dokuments in Bezug auf Veränderbarkeit und Gültigkeit.
6259903a07d97122c4217e6c
class EnumToMapTestCase(PyrseasTestCase): <NEW_LINE> <INDENT> def test_enum(self): <NEW_LINE> <INDENT> self.db.execute_commit(DROP_STMT) <NEW_LINE> expmap = {'labels': ['red', 'green', 'blue']} <NEW_LINE> dbmap = self.db.execute_and_map(CREATE_ENUM_STMT) <NEW_LINE> self.assertEqual(dbmap['schema public']['type t1'], expmap)
Test mapping of created enum types
6259903a15baa7234946316a
class Alarms(Cluster): <NEW_LINE> <INDENT> cluster_id = 0x0009 <NEW_LINE> ep_attribute = "alarms" <NEW_LINE> attributes = { 0x0000: ("alarm_count", t.uint16_t) } <NEW_LINE> server_commands = { 0x0000: ("reset", (t.uint8_t, t.uint16_t), False), 0x0001: ("reset_all", (), False), 0x0002: ("get_alarm", (), False), 0x0003: ("reset_log", (), False), 0x0004: ("publish_event_log", (), False), } <NEW_LINE> client_commands = { 0x0000: ("alarm", (t.uint8_t, t.uint16_t), False), 0x0001: ( "get_alarm_response", ( t.uint8_t, t.Optional(t.uint8_t), t.Optional(t.uint16_t), t.Optional(t.uint32_t), ), True, ), 0x0002: ("get_event_log", (), False), }
Attributes and commands for sending notifications and configuring alarm functionality.
6259903a3c8af77a43b68823
class Context: <NEW_LINE> <INDENT> def __init__( self, hauto: 'Hautomate', event: str, *, event_data: Dict, target: 'Intent', when: pendulum.DateTime, parent: Union['Intent', 'Hautomate'] ): <NEW_LINE> <INDENT> self._id = next(_context_id) <NEW_LINE> self._hauto = hauto <NEW_LINE> self.event = event <NEW_LINE> self.event_data = event_data <NEW_LINE> self.target = target <NEW_LINE> self.parent = parent <NEW_LINE> self._when_ts = when.in_timezone('UTC').timestamp() <NEW_LINE> self._created_ts = pendulum.now(tz='UTC').timestamp() <NEW_LINE> <DEDENT> @property <NEW_LINE> def hauto(self): <NEW_LINE> <INDENT> return self._hauto <NEW_LINE> <DEDENT> @property <NEW_LINE> def when(self) -> Union[pendulum.DateTime, None]: <NEW_LINE> <INDENT> return pendulum.from_timestamp(self._when_ts, tz='UTC') <NEW_LINE> <DEDENT> @property <NEW_LINE> def created_at(self) -> pendulum.DateTime: <NEW_LINE> <INDENT> return pendulum.from_timestamp(self._created_ts, tz='UTC') <NEW_LINE> <DEDENT> def asdict(self): <NEW_LINE> <INDENT> d = { 'hauto': self.hauto, 'event': self.event, 'event_data': self.event_data, 'target': self.target, 'when': self.when, 'parent': self.parent, } <NEW_LINE> return d
Execution context under which an Intent is fired. Contexts hold a lot of relevant information as to why a specific intent was triggered.
6259903a596a897236128e6e
class BatchLoadRetryLimitReached(DynamoError): <NEW_LINE> <INDENT> pass
Retry limit hit when trying to batch load data. This is usually due to resource limits being exceded.
6259903a71ff763f4b5e896a
class INDataCenter(DataCenter): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def PRODUCTION(cls): <NEW_LINE> <INDENT> return DataCenter.Environment("https://www.zohoapis.in", cls().get_iam_url(), cls().get_file_upload_url()) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def SANDBOX(cls): <NEW_LINE> <INDENT> return DataCenter.Environment("https://sandbox.zohoapis.in", cls().get_iam_url(), cls().get_file_upload_url()) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def DEVELOPER(cls): <NEW_LINE> <INDENT> return DataCenter.Environment("https://developer.zohoapis.in", cls().get_iam_url(), cls().get_file_upload_url()) <NEW_LINE> <DEDENT> def get_iam_url(self): <NEW_LINE> <INDENT> return "https://accounts.zoho.in/oauth/v2/token" <NEW_LINE> <DEDENT> def get_file_upload_url(self): <NEW_LINE> <INDENT> return "https://content.zohoapis.in"
This class represents the properties of Zoho CRM in IN Domain.
6259903a9b70327d1c57ff3d
class Pledge(models.Model): <NEW_LINE> <INDENT> account = models.ForeignKey('account.Account') <NEW_LINE> pact = models.ForeignKey('pact.Pact') <NEW_LINE> created = models.DateTimeField(auto_now_add=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = ('account', 'pact',) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'{0} {1}'.format(self.account.username, self.pact.name) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Pledge, self).save(*args, **kwargs) <NEW_LINE> if self.pact.has_enough_pledges and self.pact.is_ongoing: <NEW_LINE> <INDENT> if not self.pact.goal_met: <NEW_LINE> <INDENT> self.pact.set_goal_suceeded()
A through model that connects a given user with a pact that they have pledged to.
6259903a0fa83653e46f60aa
class NamedNode(Node): <NEW_LINE> <INDENT> name = str <NEW_LINE> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.name) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.name == other.name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
Represents a named node in a graph.
6259903a8c3a8732951f7726
class ReplyTopicTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.board = Board.objects.create(name="Django", description="Django Board") <NEW_LINE> self.username = 'john' <NEW_LINE> self.password = 'django123' <NEW_LINE> user = User.objects.create_user(username=self.username, email='[email protected]', password=self.password) <NEW_LINE> self.topic = Topic.objects.create(subject="Test", board=self.board, starter=user) <NEW_LINE> self.post = Post(message="Hello world!", topic=self.topic, created_by=user) <NEW_LINE> self.url = reverse('reply_topic', kwargs={"board_pk": self.board.pk, "topic_pk": self.topic.pk})
Base Test for all other Test Cases with this page The setUp will persist through all other Tests that inherit from this
6259903a94891a1f408b9fdf
class AuctionItem(): <NEW_LINE> <INDENT> def __init__(self, attributes): <NEW_LINE> <INDENT> self.starttime = int(attributes[0]) <NEW_LINE> self.listuser = attributes[1] <NEW_LINE> self.name = attributes[3] <NEW_LINE> self.reserve = float(attributes[4]) <NEW_LINE> self.stoptime = int(attributes[5]) <NEW_LINE> self.open = True <NEW_LINE> self.bids = [0] <NEW_LINE> self.highbidder = '' <NEW_LINE> self.buyprice = 0 <NEW_LINE> <DEDENT> def update_time(self, time): <NEW_LINE> <INDENT> if int(time) >= self.stoptime and self.open == True: <NEW_LINE> <INDENT> self.open = False <NEW_LINE> self.close_auction() <NEW_LINE> <DEDENT> <DEDENT> def new_bid(self, bid): <NEW_LINE> <INDENT> if self.open == True: <NEW_LINE> <INDENT> if float(bid[4]) > float(max(self.bids)): <NEW_LINE> <INDENT> if float(bid[4]) > float(self.reserve): <NEW_LINE> <INDENT> self.highbidder = bid[1] <NEW_LINE> <DEDENT> self.bids.append(float(bid[4])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def close_auction(self): <NEW_LINE> <INDENT> if self.highbidder == '': <NEW_LINE> <INDENT> self.sold = "UNSOLD" <NEW_LINE> self.buyprice = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.sold = "SOLD" <NEW_LINE> if float(self.bids[-2]) > float(self.reserve): <NEW_LINE> <INDENT> self.buyprice = self.bids[-2] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.buyprice = self.reserve <NEW_LINE> <DEDENT> <DEDENT> if len(self.bids) > 1: <NEW_LINE> <INDENT> self.bids.pop(0) <NEW_LINE> self.nobids = len(self.bids) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.nobids = 0 <NEW_LINE> <DEDENT> print(self.output_record()) <NEW_LINE> <DEDENT> def output_record(self): <NEW_LINE> <INDENT> self.record = (str(self.stoptime) + '|' + str(self.name) + '|' + self.highbidder + '|' + self.sold + '|' + "{:.2f}".format(self.buyprice) + '|' + str(self.nobids) + '|' + "{:.2f}".format(max(self.bids)) + '|' + "{:.2f}".format(min(self.bids)) ) <NEW_LINE> return self.record
An Auction Item
6259903a26068e7796d4db17
class Webhook(db.Base, ModelMixin, SlugMixin): <NEW_LINE> <INDENT> __tablename__ = 'webhooks' <NEW_LINE> slug = sqlalchemy.Column(sqlalchemy.String(30), primary_key=True) <NEW_LINE> name = sqlalchemy.Column(sqlalchemy.String(30), nullable=False) <NEW_LINE> url = sqlalchemy.Column(sqlalchemy.String(250), nullable=False) <NEW_LINE> verify = sqlalchemy.Column(sqlalchemy.Boolean) <NEW_LINE> date_added = sqlalchemy.Column(sqlalchemy.DateTime, nullable=False) <NEW_LINE> added_by = sqlalchemy.Column(sqlalchemy.String(30), sqlalchemy.ForeignKey('users.username')) <NEW_LINE> def __init__(self, slug=None, name=None, url=None, verify=None, date_added=None, added_by=None): <NEW_LINE> <INDENT> super() <NEW_LINE> self.slug = slug <NEW_LINE> self.name = name <NEW_LINE> self.url = url <NEW_LINE> self.verify = verify <NEW_LINE> self.date_added = date_added <NEW_LINE> self.added_by = added_by <NEW_LINE> <DEDENT> def is_secure(self): <NEW_LINE> <INDENT> return self.url.startswith('https') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def add(cls, slug, name, url, verify=False, added_by=None): <NEW_LINE> <INDENT> if cls.get(slug): <NEW_LINE> <INDENT> raise WebhookError('Webhook slug already exists.') <NEW_LINE> <DEDENT> now = datetime.datetime.now() <NEW_LINE> added_by_username = None <NEW_LINE> if added_by: <NEW_LINE> <INDENT> if not isinstance(added_by, User): <NEW_LINE> <INDENT> raise TypeError('Expected instance of User not {}.'.format(type(added_by))) <NEW_LINE> <DEDENT> added_by_username = added_by.username <NEW_LINE> <DEDENT> webhook = cls(slug, name, url, verify, now, added_by_username) <NEW_LINE> db.session.add(webhook) <NEW_LINE> db.session.commit() <NEW_LINE> return webhook <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Webhook '{}'>".format(self.slug)
Model defining multipurpose webhooks. slug: String uniquely identifying webhook. name: Webhook name. url: URL to be requested. verify: Whether to force certificate checking if URL uses SSL. date_added: Date and time webhook was added to the database. added_by: Username of the user who added the webhook. Class also has property added_user created by the foreign key back reference to the respective User object.
6259903a73bcbd0ca4bcb459
class Node(object): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self._value = value <NEW_LINE> self._white = True <NEW_LINE> self._gray = False <NEW_LINE> self._black = False <NEW_LINE> self._pred = None <NEW_LINE> self._neighbors = [] <NEW_LINE> self._distance = 0 <NEW_LINE> <DEDENT> def _add_neighbor(self, other_node): <NEW_LINE> <INDENT> self._neighbors.append(other_node) <NEW_LINE> <DEDENT> def get_neighbors(self): <NEW_LINE> <INDENT> if self._neighbors: <NEW_LINE> <INDENT> return self._neighbors <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def distance(self): <NEW_LINE> <INDENT> return self._distance <NEW_LINE> <DEDENT> @distance.setter <NEW_LINE> def distance(self, value): <NEW_LINE> <INDENT> self._distance = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, value): <NEW_LINE> <INDENT> print("Unable to change value of node once set") <NEW_LINE> <DEDENT> @property <NEW_LINE> def gray(self): <NEW_LINE> <INDENT> return self._gray <NEW_LINE> <DEDENT> @gray.setter <NEW_LINE> def gray(self, value): <NEW_LINE> <INDENT> if value: <NEW_LINE> <INDENT> self._gray = True <NEW_LINE> self._white = False <NEW_LINE> <DEDENT> elif value is False: <NEW_LINE> <INDENT> print("Can not un-visit a Node.") <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def black(self): <NEW_LINE> <INDENT> return self._black <NEW_LINE> <DEDENT> @black.setter <NEW_LINE> def black(self, value): <NEW_LINE> <INDENT> if value: <NEW_LINE> <INDENT> self._black = True <NEW_LINE> self._white = False <NEW_LINE> <DEDENT> elif value is False: <NEW_LINE> <INDENT> print("Can not un-visit a Node.") <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def pred(self): <NEW_LINE> <INDENT> if self._pred == self: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._pred <NEW_LINE> <DEDENT> <DEDENT> @pred.setter <NEW_LINE> def pred(self, node): <NEW_LINE> <INDENT> self._pred = node <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self._value) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Node({})".format(self._value)
Node object which has a value, colors to determine at what stage it has been inspected, a predecessor, distance from origin vertex, and peer awareness.
6259903a287bf620b6272dbb
class NativeFastqReader(genomics_reader.GenomicsReader): <NEW_LINE> <INDENT> def __init__(self, input_path): <NEW_LINE> <INDENT> super(NativeFastqReader, self).__init__() <NEW_LINE> fastq_path = input_path.encode('utf8') <NEW_LINE> if fastq_path.endswith('.gz'): <NEW_LINE> <INDENT> options = fastq_pb2.FastqReaderOptions( compression_type=fastq_pb2.FastqReaderOptions.GZIP) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> options = fastq_pb2.FastqReaderOptions() <NEW_LINE> <DEDENT> self._reader = fastq_reader.FastqReader.from_file(fastq_path, options) <NEW_LINE> self.header = None <NEW_LINE> <DEDENT> def query(self): <NEW_LINE> <INDENT> raise NotImplementedError('Can not query a FASTQ file') <NEW_LINE> <DEDENT> def iterate(self): <NEW_LINE> <INDENT> return self._reader.iterate() <NEW_LINE> <DEDENT> def __exit__(self, exit_type, exit_value, exit_traceback): <NEW_LINE> <INDENT> self._reader.__exit__(exit_type, exit_value, exit_traceback)
Class for reading from native FASTQ files. Most users will want to use FastqReader instead, because it dynamically dispatches between reading native FASTQ files and TFRecord files based on the filename's extension.
6259903a1d351010ab8f4cec
class ReturnType(ArgumentType): <NEW_LINE> <INDENT> def __init__(self, function, arg_type): <NEW_LINE> <INDENT> super(ReturnType, self).__init__(function, 0, arg_type, None) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> spelling = self._clang_type.spelling.replace('const', '') <NEW_LINE> return_type = 'sapi::StatusOr<{}>'.format(spelling) <NEW_LINE> return_type = 'sapi::Status' if self.is_void() else return_type <NEW_LINE> return return_type
Class representing function return type. Attributes: return_type: sapi::StatusOr<T> where T is original return type, or sapi::Status for functions returning void
6259903a23e79379d538d6d1
class Screenboard( GetableAPIResource, CreateableAPIResource, UpdatableAPIResource, DeletableAPIResource, ActionAPIResource, ListableAPIResource, ): <NEW_LINE> <INDENT> _resource_name = "screen" <NEW_LINE> @classmethod <NEW_LINE> def share(cls, board_id): <NEW_LINE> <INDENT> return super(Screenboard, cls)._trigger_action("POST", "screen/share", board_id) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def revoke(cls, board_id): <NEW_LINE> <INDENT> return super(Screenboard, cls)._trigger_action("DELETE", "screen/share", board_id)
A wrapper around Screenboard HTTP API.
6259903ab5575c28eb7135b2
class RateLimitFault(webob.exc.HTTPException): <NEW_LINE> <INDENT> def __init__(self, message, details, retry_time): <NEW_LINE> <INDENT> hdrs = RateLimitFault._retry_after(retry_time) <NEW_LINE> self.wrapped_exc = webob.exc.HTTPTooManyRequests(headers=hdrs) <NEW_LINE> self.content = { "overLimit": { "code": self.wrapped_exc.status_int, "message": message, "details": details, "retryAfter": hdrs['Retry-After'], }, } <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _retry_after(retry_time): <NEW_LINE> <INDENT> delay = int(math.ceil(retry_time - time.time())) <NEW_LINE> retry_after = delay if delay > 0 else 0 <NEW_LINE> headers = {'Retry-After': '%d' % retry_after} <NEW_LINE> return headers <NEW_LINE> <DEDENT> @webob.dec.wsgify(RequestClass=Request) <NEW_LINE> def __call__(self, request): <NEW_LINE> <INDENT> user_locale = request.best_match_language() <NEW_LINE> content_type = request.best_match_content_type() <NEW_LINE> metadata = {"attributes": {"overLimit": ["code", "retryAfter"]}} <NEW_LINE> self.content['overLimit']['message'] = i18n.translate(self.content['overLimit']['message'], user_locale) <NEW_LINE> self.content['overLimit']['details'] = i18n.translate(self.content['overLimit']['details'], user_locale) <NEW_LINE> xml_serializer = XMLDictSerializer(metadata, XMLNS_V11) <NEW_LINE> serializer = { 'application/xml': xml_serializer, 'application/json': JSONDictSerializer(), }[content_type] <NEW_LINE> content = serializer.serialize(self.content) <NEW_LINE> self.wrapped_exc.body = content <NEW_LINE> self.wrapped_exc.content_type = content_type <NEW_LINE> return self.wrapped_exc
Rate-limited request response.
6259903a71ff763f4b5e896c
class TestManipulator(BaseManipulator): <NEW_LINE> <INDENT> def __init__(self, target_shape, **kwargs): <NEW_LINE> <INDENT> self.target_shape = target_shape <NEW_LINE> <DEDENT> def manipulate(self): <NEW_LINE> <INDENT> target_shape = self.target_to_valid_geom(self.target_shape) <NEW_LINE> status_html = self.do_template("0") <NEW_LINE> return self.result(target_shape, status_html) <NEW_LINE> <DEDENT> class Options(BaseManipulator.Options): <NEW_LINE> <INDENT> name = 'TestManipulator' <NEW_LINE> supported_geom_fields = ['PolygonField'] <NEW_LINE> html_templates = {'0':'manipulators/valid.html', }
This manipulator does nothing but ensure the geometry is clean.
6259903a0fa83653e46f60ac
class ENDMDL: <NEW_LINE> <INDENT> def __init__(self, line): <NEW_LINE> <INDENT> pass
ENDMDL class The ENDMDL records are paired with MODEL records to group individual structures found in a coordinate entry.
6259903ab830903b9686ed62
class InitDB(CkanCommand): <NEW_LINE> <INDENT> summary = __doc__.split('\n')[0] <NEW_LINE> usage = __doc__ <NEW_LINE> max_args = 0 <NEW_LINE> min_args = 0 <NEW_LINE> def command(self): <NEW_LINE> <INDENT> self._load_config() <NEW_LINE> import ckan.model as model <NEW_LINE> model.Session.remove() <NEW_LINE> model.Session.configure(bind=model.meta.engine) <NEW_LINE> log = logging.getLogger('ckanext.geometry') <NEW_LINE> import geo_model <NEW_LINE> geo_model.init_tables() <NEW_LINE> log.info("DB_Geometry tables are setup... SUCCESS!")
Initialise the extension's database tables
6259903a26068e7796d4db19
class ApiGetExportedFlowResultsHandlerTest(test_lib.GRRBaseTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(ApiGetExportedFlowResultsHandlerTest, self).setUp() <NEW_LINE> self.handler = flow_plugin.ApiGetExportedFlowResultsHandler() <NEW_LINE> self.client_id = self.SetupClients(1)[0] <NEW_LINE> <DEDENT> def testWorksCorrectlyWithTestOutputPluginOnFlowWithSingleResult(self): <NEW_LINE> <INDENT> with test_lib.FakeTime(42): <NEW_LINE> <INDENT> flow_urn = flow.GRRFlow.StartFlow( flow_name=test_lib.DummyFlowWithSingleReply.__name__, client_id=self.client_id, token=self.token) <NEW_LINE> for _ in test_lib.TestFlowHelper(flow_urn, token=self.token): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> result = self.handler.Handle( flow_plugin.ApiGetExportedFlowResultsArgs( client_id=self.client_id, flow_id=flow_urn.Basename(), plugin_name=test_plugins.TestInstantOutputPlugin.plugin_name), token=self.token) <NEW_LINE> chunks = list(result.GenerateContent()) <NEW_LINE> self.assertListEqual( chunks, ["Start: %s" % utils.SmartStr(flow_urn), "Values of type: RDFString", "First pass: oh (source=%s)" % utils.SmartStr(self.client_id), "Second pass: oh (source=%s)" % utils.SmartStr(self.client_id), "Finish: %s" % utils.SmartStr(flow_urn)])
Tests for ApiGetExportedFlowResultsHandler.
6259903a6fece00bbacccb7f
class PeriodicCallback(SessionCallback): <NEW_LINE> <INDENT> def __init__(self, document, callback, period, id=None): <NEW_LINE> <INDENT> super(PeriodicCallback, self).__init__(document, callback, id) <NEW_LINE> self._period = period <NEW_LINE> <DEDENT> @property <NEW_LINE> def period(self): <NEW_LINE> <INDENT> return self._period <NEW_LINE> <DEDENT> def _copy_with_changed_callback(self, new_callback): <NEW_LINE> <INDENT> return PeriodicCallback(self._document, new_callback, self._period, self._id)
Represent a callback to execute periodically on the IOLoop at a specified periodic time interval.
6259903abaa26c4b54d5047a
class _ReadRequest: <NEW_LINE> <INDENT> MAX_DATA_LENGTH = 20 <NEW_LINE> def __init__(self, mem, addr, length, ed): <NEW_LINE> <INDENT> self.mem = mem <NEW_LINE> self.addr = addr <NEW_LINE> self._bytes_left = length <NEW_LINE> self.data = bytearray() <NEW_LINE> self.ed = ed <NEW_LINE> self._current_addr = addr <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self._request_new_chunk() <NEW_LINE> <DEDENT> def resend(self): <NEW_LINE> <INDENT> logger.debug('Sending write again...') <NEW_LINE> self._request_new_chunk() <NEW_LINE> <DEDENT> def _request_new_chunk(self): <NEW_LINE> <INDENT> new_len = self._bytes_left <NEW_LINE> if new_len > _ReadRequest.MAX_DATA_LENGTH: <NEW_LINE> <INDENT> new_len = _ReadRequest.MAX_DATA_LENGTH <NEW_LINE> <DEDENT> logger.debug('Requesting new chunk of {}bytes at 0x{:X}'.format( new_len, self._current_addr)) <NEW_LINE> pk = CRTPPacket() <NEW_LINE> pk.set_header(CRTPPort.MEM, CHAN_READ) <NEW_LINE> pk.data = struct.pack('<BIB', self.mem.id, self._current_addr, new_len) <NEW_LINE> reply = struct.unpack('<BBBBB', pk.data[:-1]) <NEW_LINE> self.ed.send_packet(pk, expected_reply=reply, timeout=1) <NEW_LINE> <DEDENT> def add_data(self, addr, data): <NEW_LINE> <INDENT> data_len = len(data) <NEW_LINE> if not addr == self._current_addr: <NEW_LINE> <INDENT> logger.warning( 'Address did not match when adding data to read request!') <NEW_LINE> return <NEW_LINE> <DEDENT> self.data += data <NEW_LINE> self._bytes_left -= data_len <NEW_LINE> self._current_addr += data_len <NEW_LINE> if self._bytes_left > 0: <NEW_LINE> <INDENT> self._request_new_chunk() <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True
Class used to handle memory reads that will split up the read in multiple packets if necessary
6259903a50485f2cf55dc154
class ResponseError(Response): <NEW_LINE> <INDENT> def __init__( self, error, error_code=None, http_status=status.HTTP_400_BAD_REQUEST, content_type=None, detail=None, ): <NEW_LINE> <INDENT> reply = { "response": { "success": False, "error": error, "error_code": error_code, "detail": detail, } } <NEW_LINE> super().__init__(data=reply, status=http_status, content_type=content_type)
Common error response object. When an exception is not sufficient and a response can still be returned (e.g. certain errors to correct) we can use ResponseError to standardise this response format.
6259903ad164cc6175822147
class IntegrationTestCase(TestCase): <NEW_LINE> <INDENT> print_stdout_stderr_on_teardown = False <NEW_LINE> processes = {} <NEW_LINE> def tearDown(self): <NEW_LINE> <INDENT> super(IntegrationTestCase, self).tearDown() <NEW_LINE> for pid, process in self.processes.items(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> process.kill() <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if self.print_stdout_stderr_on_teardown: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> stdout = process.stdout.read() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> stdout = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> stderr = process.stderr.read() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> stderr = None <NEW_LINE> <DEDENT> print('Process "%s"' % (process.pid)) <NEW_LINE> print('Stdout: %s' % (stdout)) <NEW_LINE> print('Stderr: %s' % (stderr)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def add_process(self, process): <NEW_LINE> <INDENT> self.processes[process.pid] = process <NEW_LINE> <DEDENT> def remove_process(self, process): <NEW_LINE> <INDENT> if process.pid in self.processes: <NEW_LINE> <INDENT> del self.processes[process.pid] <NEW_LINE> <DEDENT> <DEDENT> def assertProcessIsRunning(self, process): <NEW_LINE> <INDENT> if not process: <NEW_LINE> <INDENT> raise ValueError('process is None') <NEW_LINE> <DEDENT> return_code = process.poll() <NEW_LINE> if return_code is not None: <NEW_LINE> <INDENT> if process.stdout: <NEW_LINE> <INDENT> stdout = process.stdout.read() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> stdout = '' <NEW_LINE> <DEDENT> if process.stderr: <NEW_LINE> <INDENT> stderr = process.stderr.read() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> stderr = '' <NEW_LINE> <DEDENT> msg = ('Process exited with code=%s.\nStdout:\n%s\n\nStderr:\n%s' % (return_code, stdout, stderr)) <NEW_LINE> self.fail(msg) <NEW_LINE> <DEDENT> <DEDENT> def assertProcessExited(self, proc): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> status = proc.status() <NEW_LINE> <DEDENT> except psutil.NoSuchProcess: <NEW_LINE> <INDENT> status = 'exited' <NEW_LINE> <DEDENT> if status not in ['exited', 'zombie']: <NEW_LINE> <INDENT> self.fail('Process with pid "%s" is still running' % (proc.pid))
Base test class for integration tests to inherit from. It includes various utility functions and assert methods for working with processes.
6259903ad53ae8145f919638
class DatabasesGetTestCase(BaseTestGenerator): <NEW_LINE> <INDENT> scenarios = [ ('Check Databases Node URL', dict(url='/browser/database/obj/')) ] <NEW_LINE> def runTest(self): <NEW_LINE> <INDENT> server_data = parent_node_dict["database"][-1] <NEW_LINE> server_id = server_data["server_id"] <NEW_LINE> db_id = server_data['db_id'] <NEW_LINE> db_con = database_utils.connect_database(self, utils.SERVER_GROUP, server_id, db_id) <NEW_LINE> try: <NEW_LINE> <INDENT> if db_con["info"] == "Database connected.": <NEW_LINE> <INDENT> response = self.tester.get( self.url + str(utils.SERVER_GROUP) + '/' + str( server_id) + '/' + str(db_id), follow_redirects=True) <NEW_LINE> self.assertEquals(response.status_code, 200) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("Could not connect to database.") <NEW_LINE> <DEDENT> <DEDENT> except Exception as exception: <NEW_LINE> <INDENT> raise Exception("Error while getting database. %s" % exception) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> database_utils.disconnect_database(self, server_id, db_id)
This class will fetch database added under last added server.
6259903a91af0d3eaad3b006
class TestCustomerFilter(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testCustomerFilter(self): <NEW_LINE> <INDENT> model = squareconnect.models.customer_filter.CustomerFilter()
CustomerFilter unit test stubs
6259903a8a43f66fc4bf3360
class CourseDatesFragmentView(EdxFragmentView): <NEW_LINE> <INDENT> template_name = 'course_experience/course-dates-fragment.html' <NEW_LINE> def render_to_fragment(self, request, course_id=None, **kwargs): <NEW_LINE> <INDENT> course_key = CourseKey.from_string(course_id) <NEW_LINE> course = get_course_with_access(request.user, 'load', course_key, check_if_enrolled=False) <NEW_LINE> course_date_blocks = get_course_date_blocks(course, request.user, request, num_assignments=1) <NEW_LINE> dates_tab_enabled = DatesTab.is_enabled(course, request.user) <NEW_LINE> if course_home_mfe_dates_tab_is_active(course_key): <NEW_LINE> <INDENT> dates_tab_link = get_microfrontend_url(course_key=course.id, view_name='dates') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dates_tab_link = reverse('dates', args=[course.id]) <NEW_LINE> <DEDENT> context = { 'course_date_blocks': [block for block in course_date_blocks if block.title != 'current_datetime'], 'dates_tab_link': dates_tab_link, 'dates_tab_enabled': dates_tab_enabled, } <NEW_LINE> html = render_to_string(self.template_name, context) <NEW_LINE> dates_fragment = Fragment(html) <NEW_LINE> self.add_fragment_resource_urls(dates_fragment) <NEW_LINE> return dates_fragment
A fragment to important dates within a course.
6259903a0a366e3fb87ddbb8
class VeryLazyProcessMemoryDumpLoader(LazyProcessMemoryDumpLoader): <NEW_LINE> <INDENT> def _load_memory_mappings(self): <NEW_LINE> <INDENT> _mappings = [] <NEW_LINE> default_ctypes = types.load_ctypes_default() <NEW_LINE> for mmap_fname, start, end, permissions, offset, major_device, minor_device, inode, pathname in self.metalines: <NEW_LINE> <INDENT> log.debug('Loading %s - %s' % (mmap_fname, pathname)) <NEW_LINE> fname = os.path.sep.join([self.dumpname, mmap_fname]) <NEW_LINE> mmap = FilenameBackedMemoryMapping(fname, start, end, permissions, offset, major_device, minor_device, inode, pathname=pathname) <NEW_LINE> mmap.set_ctypes(default_ctypes) <NEW_LINE> _mappings.append(mmap) <NEW_LINE> <DEDENT> _target_platform = target.TargetPlatform(_mappings, cpu_bits=self._cpu_bits, os_name=self._os_name) <NEW_LINE> self._memory_handler = MemoryHandler(_mappings, _target_platform, self.dumpname) <NEW_LINE> self._memory_handler.reset_mappings() <NEW_LINE> return
Always use a filename backed memory mapping.
6259903a1d351010ab8f4cee
class Presqoop(): <NEW_LINE> <INDENT> NECESSARY_ARGS = { '--presto-host': 'presto_host', '--presto-port': 'presto_port', '--presto-user': 'presto_user', '--presto-catalog': 'presto_catalog', '--presto-schema': 'presto_schema', '--table': 'table' } <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.__args = self.__set_args() <NEW_LINE> self.__check_args() <NEW_LINE> <DEDENT> def __set_args(self): <NEW_LINE> <INDENT> parser = argparse.ArgumentParser() <NEW_LINE> parser.add_argument('execute_type') <NEW_LINE> parser.add_argument('--presto-host', action='store', dest='presto_port',type=str, help="set presto port") <NEW_LINE> parser.add_argument('--presto-port', action='store', dest='presto_port',type=int, help="set presto port") <NEW_LINE> parser.add_argument('--presto-user', action='store', dest='presto_user', type=str, help="set presto user") <NEW_LINE> parser.add_argument( '--presto-catalog', action='store', dest='presto_catalog', type=str, help="set presto catalog" ) <NEW_LINE> parser.add_argument('--presto-schema', action='store', dest='presto_schema', type=str, help="set presto schema") <NEW_LINE> parser.add_argument('--tabel', action='store', nargs='*', dest='table', type=str, help="set table names") <NEW_LINE> parser.add_argument('--log-path', action='store', dest='log_path', type=str, help="set log path") <NEW_LINE> parser.add_argument('-l', '--list', action='store_true', dest='config_list', defalut=False, help="list config") <NEW_LINE> parser.add_argument('--presto', action='store', dest='config_presto', type=str, help="set presto config name") <NEW_LINE> parser.add_argument('--log', action='store', dest='config_log', type=str, help="set log config name") <NEW_LINE> args = parser.parse_args() <NEW_LINE> args_key = list(map(lambda kv: kv[0], args._get_kwargs())) <NEW_LINE> args_value = list(map(lambda kv: kv[1], args._get_kwargs())) <NEW_LINE> self.__args_dict = dict(zip(args_key, args_value)) <NEW_LINE> return args <NEW_LINE> <DEDENT> def __check_args(self): <NEW_LINE> <INDENT> for necessary_arg in SqlFlowExecutor.NECESSARY_ARGS.values(): <NEW_LINE> <INDENT> if self.__args_dict[necessary_arg] is None: <NEW_LINE> <INDENT> logging.error( "Please provide all necessary arguments: {}".format(SqlFlowExecutor.NECESSARY_ARGS.keys()) ) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __set_session(self): <NEW_LINE> <INDENT> session = requests.session() <NEW_LINE> request_retry = requests.adapters.HTTPAdapter(max_retries=3) <NEW_LINE> session.mount('https://',request_retry) <NEW_LINE> session.mount('http://',request_retry) <NEW_LINE> return session <NEW_LINE> <DEDENT> def __get_presto_connection(self): <NEW_LINE> <INDENT> return prestodb.dbapi.connect( host=self.__args.preto_host, port=self.__args.presto_port, user=self.__args.presto_user, catalog=self.__args.presto_catalog, schema=self.__args.presto_schema, )
A Sqoop like tools to import/export datas by using presto **Basic** 基于 presto 做的数据导入/导出脚本,功能仿照 sqoop 设计,尽量实现 sqoop 的功能 .. version v1.0
6259903a66673b3332c315ca
class Attributes(object): <NEW_LINE> <INDENT> __metaclass__ = AttributesMeta <NEW_LINE> _wrapper = False <NEW_LINE> default = None <NEW_LINE> nillable = True <NEW_LINE> min_occurs = 0 <NEW_LINE> max_occurs = 1 <NEW_LINE> schema_tag = '{%s}element' % spyne.const.xml_ns.xsd <NEW_LINE> translations = None <NEW_LINE> sqla_column_args = None <NEW_LINE> exc_mapper = False <NEW_LINE> exc_table = False <NEW_LINE> exc_interface = False <NEW_LINE> logged = True <NEW_LINE> unique = None <NEW_LINE> db_type = None <NEW_LINE> index = None
The class that holds the constraints for the given type.
6259903a30dc7b76659a0a05
class Dice(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> dummy = False <NEW_LINE> <DEDENT> def diceroll(self, die_no = 1, die_type = 10, die_weight = 0, die_tries = 1): <NEW_LINE> <INDENT> ttl_roll = 0 <NEW_LINE> for tries in range(0, die_tries): <NEW_LINE> <INDENT> for roll in range(0, die_no): <NEW_LINE> <INDENT> a_roll = randint(1, die_type + 1) + die_weight <NEW_LINE> if a_roll > die_type: <NEW_LINE> <INDENT> a_roll = die_type <NEW_LINE> <DEDENT> ttl_roll += a_roll <NEW_LINE> <DEDENT> <DEDENT> ttl_roll = int(ttl_roll/die_tries) <NEW_LINE> return(ttl_roll) <NEW_LINE> <DEDENT> def best_roll(self, die_no = 1, die_type = 10, die_weight = 0, die_tries = 1, best_of = 2): <NEW_LINE> <INDENT> roll_a = self.diceroll(die_no, die_type, die_weight, die_tries) <NEW_LINE> for roll in range(0, best_of): <NEW_LINE> <INDENT> roll_b = self.diceroll(die_no, die_type, die_weight, die_tries) <NEW_LINE> if roll_b > roll_a: <NEW_LINE> <INDENT> roll_a = roll_b <NEW_LINE> <DEDENT> <DEDENT> return(roll_a)
dice roll handler
6259903a07d97122c4217e71
class LinuxInstaller(cr.Installer): <NEW_LINE> <INDENT> @property <NEW_LINE> def enabled(self): <NEW_LINE> <INDENT> return cr.LinuxPlatform.GetInstance().is_active <NEW_LINE> <DEDENT> def Uninstall(self, targets, arguments): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Install(self, targets, arguments): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Reinstall(self, targets, arguments): <NEW_LINE> <INDENT> pass
An implementation of cr.Installer for the linux platform. This does nothing, the linux runner works from the output directory, there is no need to install anywhere.
6259903a73bcbd0ca4bcb45d
class BrandView( RestView ): <NEW_LINE> <INDENT> route_base = 'brand' <NEW_LINE> Controller = BrandCtrl
The brand view.
6259903abaa26c4b54d5047c
class TemporalVarianceParser(): <NEW_LINE> <INDENT> def __call__(self, criteria_str: str) -> tp.Dict[str, tp.Any]: <NEW_LINE> <INDENT> ret = { 'variance_type': "", 'xml_parent_path': "", 'variance_csv_col': "", 'waveform_type': "", 'waveform_param': int(), 'population': None } <NEW_LINE> xml_parent = { 'M': './/env_dynamics/motion_throttle', 'BC': './/env_dynamics/blocks/carry_throttle', 'BM': './/env_dynamics/blocks/manipulation_penalty', } <NEW_LINE> variance_col = { 'BC': "swarm_motion_throttle", 'M': "swarm_motion_throttle", 'BM': "block_manip_penalty", } <NEW_LINE> res = re.search("BC|BM|M", criteria_str) <NEW_LINE> assert res is not None, "FATAL: Bad variance type in criteria '{0}'".format(criteria_str) <NEW_LINE> variance_type = str(res.group(0)) <NEW_LINE> ret['variance_type'] = variance_type <NEW_LINE> ret['xml_parent_path'] = xml_parent[variance_type] <NEW_LINE> ret['variance_csv_col'] = variance_col[variance_type] <NEW_LINE> res = re.search("Sine|Square|Sawtooth|Step[UD]|Constant", criteria_str) <NEW_LINE> assert res is not None, "FATAL: Bad waveform type in criteria '{0}'".format(criteria_str) <NEW_LINE> waveform_type = str(res.group(0)) <NEW_LINE> if 'Step' in waveform_type: <NEW_LINE> <INDENT> res = re.search("Step[UD][0-9]+", criteria_str) <NEW_LINE> assert res is not None, "FATAL: Bad step specification type in criteria '{0}'".format( criteria_str) <NEW_LINE> ret['waveform_param'] = int(res.group(0)[5:]) <NEW_LINE> <DEDENT> ret['waveform_type'] = waveform_type <NEW_LINE> res = re.search(r"\.Z[0-9]+", criteria_str) <NEW_LINE> if res is not None: <NEW_LINE> <INDENT> ret['population'] = int(res.group(0)[2:]) <NEW_LINE> <DEDENT> return ret
Enforces the cmdline definition of the :class:`TemporalVariance` batch criteria described in :ref:`ln-bc-temporal-variance` .
6259903a26238365f5fadd2a
class Director: <NEW_LINE> <INDENT> def __init__(self, builder): <NEW_LINE> <INDENT> self._builder = builder <NEW_LINE> <DEDENT> def constructCar(self): <NEW_LINE> <INDENT> self._builder.createNewCar() <NEW_LINE> self._builder.addEngine("eng") <NEW_LINE> self._builder.addTyres("mrf") <NEW_LINE> self._builder.addSpeedometer("speed") <NEW_LINE> <DEDENT> def getCar(self): <NEW_LINE> <INDENT> return self._builder.car
Director: in charge of building the product using an object of Concrete Builder
6259903ac432627299fa41cd
class Meta: <NEW_LINE> <INDENT> ordering = ['-create_time']
统一以降序排序
6259903a91af0d3eaad3b008
class QueueInput(FeedfreeInput): <NEW_LINE> <INDENT> def __init__(self, ds, queue=None): <NEW_LINE> <INDENT> assert isinstance(ds, DataFlow), ds <NEW_LINE> self.queue = queue <NEW_LINE> self.ds = ds <NEW_LINE> <DEDENT> def _size(self): <NEW_LINE> <INDENT> return self.ds.size() <NEW_LINE> <DEDENT> def _setup(self, inputs): <NEW_LINE> <INDENT> logger.info("Setting up the queue for CPU prefetching ...") <NEW_LINE> self._input_placehdrs = [v.build_placeholder_reuse() for v in inputs] <NEW_LINE> assert len(self._input_placehdrs) > 0, "QueueInput has to be used with some inputs!" <NEW_LINE> if self.queue is None: <NEW_LINE> <INDENT> self.queue = tf.FIFOQueue( 50, [x.dtype for x in self._input_placehdrs], name='input_queue') <NEW_LINE> <DEDENT> self.thread = EnqueueThread(self.queue, self.ds, self._input_placehdrs) <NEW_LINE> <DEDENT> def _get_callbacks(self): <NEW_LINE> <INDENT> from ..callbacks.concurrency import StartProcOrThread <NEW_LINE> cb = StartProcOrThread(self.thread) <NEW_LINE> cb.chief_only = False <NEW_LINE> return [cb] <NEW_LINE> <DEDENT> def _get_input_tensors(self): <NEW_LINE> <INDENT> with tf.device('/cpu:0'): <NEW_LINE> <INDENT> ret = self.queue.dequeue(name='input_deque') <NEW_LINE> if isinstance(ret, tf.Tensor): <NEW_LINE> <INDENT> ret = [ret] <NEW_LINE> <DEDENT> assert len(ret) == len(self._input_placehdrs) <NEW_LINE> for qv, v in zip(ret, self._input_placehdrs): <NEW_LINE> <INDENT> qv.set_shape(v.get_shape()) <NEW_LINE> <DEDENT> return ret
Enqueue datapoints from a DataFlow to a TF queue. And the model receives dequeued tensors.
6259903a73bcbd0ca4bcb45e
class CourseCategory(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=64, unique=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "%s" % self.name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = '课程大类' <NEW_LINE> verbose_name = verbose_name_plural
课程大类 eg: 前端 后端
6259903a8a43f66fc4bf3362
class DictContainer(dict): <NEW_LINE> <INDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> value = super(DictContainer, self).__getitem__(key) <NEW_LINE> if type(value) == dict: <NEW_LINE> <INDENT> value = DictContainer(value) <NEW_LINE> self[key] = value <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def __getattribute__(self, key): <NEW_LINE> <INDENT> if key in self: <NEW_LINE> <INDENT> value = super(DictContainer, self).__getitem__(key) <NEW_LINE> if type(value) == dict: <NEW_LINE> <INDENT> value = DictContainer(value) <NEW_LINE> self[key] = value <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> return super(DictContainer, self).__getattribute__(key) <NEW_LINE> <DEDENT> def __setattr__(self, key, value): <NEW_LINE> <INDENT> if hasattr(self, key) and key not in self: <NEW_LINE> <INDENT> super(DictContainer, self).__setattr__(key, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self[key] = value
dict whose members can be accessed via attribute lookup. One thing to note: You can have an entry in your container that is visible instead of a standard dict method. So, for instance, you can have this happen:: >>> d = DictContainer({'keys': 'key'}) >>> d.keys() Traceback (most recent call last): File "<stdin>", line 1, in <module> TypeError: 'str' object is not callable So, as a safety precaution, you should be sure to access things via the dict methods:: >>> d = DictContainer({'keys': 'key'}) >>> dict.keys(d) ['keys'] The special methods like __getitem__(), __getattr__(), setattr(), etc that are invoked through alternate syntax rather than called directly as a method are immune to this so you can do this with no ill effects:: >>> d.__setattr__ = 1000 >>> d.__getattr__ = 10 >>> print d.__setattr__ 1000 >>> print d.__getattr__ 10
6259903a8a349b6b43687418
class ParallelForIterator(object): <NEW_LINE> <INDENT> def __init__(self, calculations, manager, pool = 1): <NEW_LINE> <INDENT> self.manager = manager <NEW_LINE> self.pooler = get_pooler( size = pool ) <NEW_LINE> self.resiter = RechargeableIterator() <NEW_LINE> self.calcsiter = iter( calculations ) <NEW_LINE> self.resume() <NEW_LINE> <DEDENT> def _ongoing(self, orderer): <NEW_LINE> <INDENT> while not self.manager.is_full(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> identifiers = self.pooler.pack( calcsiter = self.calcsiter, manager = self.manager, ) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> self.suspend() <NEW_LINE> break <NEW_LINE> <DEDENT> for identifier in identifiers: <NEW_LINE> <INDENT> orderer.job_submitted( identifier = identifier ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _terminating(self, orderer): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def next(self, orderer): <NEW_LINE> <INDENT> if not self.resiter.has_next(): <NEW_LINE> <INDENT> result = next(self.manager.results) <NEW_LINE> self.pooler.unpack( result = result, resiter = self.resiter ) <NEW_LINE> <DEDENT> self.process( orderer = orderer ) <NEW_LINE> assert self.resiter.has_next() <NEW_LINE> r = next(self.resiter) <NEW_LINE> return ( r.identifier, r ) <NEW_LINE> <DEDENT> def suspend(self): <NEW_LINE> <INDENT> self.process = self._terminating <NEW_LINE> <DEDENT> def resume(self): <NEW_LINE> <INDENT> self.process = self._ongoing
Creates an iterator that executes calls on a Manager-like object calculations - an iterable of calculations yielding ( target, args, kwargs ) tuples manager - execution manager
6259903a1d351010ab8f4cf0
class TestBaseNeoQuantitiesArrayTypes(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.base = BaseNeo() <NEW_LINE> <DEDENT> def test_quantities_array_int(self): <NEW_LINE> <INDENT> value = quantities.Quantity([1, 2, 3, 4, 5], dtype=numpy.int, units=quantities.s) <NEW_LINE> self.base.annotate(data=value) <NEW_LINE> result = {'data': value} <NEW_LINE> self.assertDictEqual(result, self.base.annotations) <NEW_LINE> <DEDENT> def test_quantities_array_uint(self): <NEW_LINE> <INDENT> value = quantities.Quantity([1, 2, 3, 4, 5], dtype=numpy.uint, units=quantities.meter) <NEW_LINE> self.base.annotate(data=value) <NEW_LINE> result = {'data': value} <NEW_LINE> self.assertDictEqual(result, self.base.annotations) <NEW_LINE> <DEDENT> def test_quantities_array_float(self): <NEW_LINE> <INDENT> value = [1, 2, 3, 4, 5] * quantities.kg <NEW_LINE> self.base.annotate(data=value) <NEW_LINE> result = {'data': value} <NEW_LINE> self.assertDictEqual(result, self.base.annotations) <NEW_LINE> <DEDENT> def test_quantities_array_str(self): <NEW_LINE> <INDENT> value = quantities.Quantity([1, 2, 3, 4, 5], dtype=numpy.str, units=quantities.meter) <NEW_LINE> self.assertRaises(ValueError, self.base.annotate, data=value)
TestCase to make sure annotations are properly checked for quantities arrays
6259903a1f5feb6acb163dc8
@attr.s <NEW_LINE> class CJKTagger(AnomalousTagger): <NEW_LINE> <INDENT> def classify(self, name: TransliteratedName) -> Optional[bool]: <NEW_LINE> <INDENT> hist = name.unicode_block_histogram <NEW_LINE> re_chinese_japanese = re.compile(r"^(ja|zh-*|lzh|wuu)") <NEW_LINE> if not re_chinese_japanese.match(name.language): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> contains_cjk = any(block.startswith("CJK") for block in hist) <NEW_LINE> return contains_cjk
Tags names as anomalous/non-anomalous based on their Hiragana/Katakana characters. Analyzes Japanese, Modern Chinese variants and Classical Chinese. Words are anomalous if they do not contain any CJK.
6259903a16aa5153ce4016c3
@dataclasses.dataclass <NEW_LINE> class AddressBookDataClass: <NEW_LINE> <INDENT> key: int <NEW_LINE> name: str <NEW_LINE> phone_number: str <NEW_LINE> address: str <NEW_LINE> email: str <NEW_LINE> birthday: str <NEW_LINE> age: int
Create dataclass with 7 fields - key (int), name (str), phone_number (str), address (str), email (str), birthday (str), age (int)
6259903a21bff66bcd723e3f
class WithLength(Generator): <NEW_LINE> <INDENT> DIRS = { 'lenmin':int, 'lenmax':int, 'length':int, 'lenvar':int } <NEW_LINE> def __init__(self, lenmin=None, lenmax=None): <NEW_LINE> <INDENT> self.lenmin, self.lenmax = lenmin, lenmax <NEW_LINE> mm = 'lenmin' in self.params or 'lenmax' in self.params <NEW_LINE> lv = 'length' in self.params or 'lenvar' in self.params <NEW_LINE> assert not (mm and lv), "%s: not both 'length'/'lenvar' & 'lenmin'/'lenmax'" % self <NEW_LINE> if self.type is not None and not mm and not 'length' in self.params: <NEW_LINE> <INDENT> clen = re.match(r'(var)?(char|bit)\((\d+)\)', self.type) <NEW_LINE> self.lenmin, self.lenmax = None, None <NEW_LINE> if clen: <NEW_LINE> <INDENT> self.lenmax = int(clen.group(3)) <NEW_LINE> if re.match('var(char|bit)', self.type): <NEW_LINE> <INDENT> if 'lenvar' in self.params: <NEW_LINE> <INDENT> self.lenmin = self.lenmax - 2 * self.params['lenvar'] <NEW_LINE> del self.params['lenvar'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.lenmin = int(self.lenmax * 3 / 4) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> assert self.params.get('lenvar', 0) == 0, "%s: non zero 'lenvar' on CHARS(*)" % self <NEW_LINE> self.lenmin = self.lenmax <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.lenmin, self.lenmax = None, None <NEW_LINE> <DEDENT> if 'lenmax' in self.params: <NEW_LINE> <INDENT> self.lenmax = self.params['lenmax'] <NEW_LINE> <DEDENT> if 'lenmin' in self.params: <NEW_LINE> <INDENT> self.lenmin = self.params['lenmin'] <NEW_LINE> <DEDENT> if 'length' in self.params or 'lenvar' in self.params: <NEW_LINE> <INDENT> length = self.params.get('length', int((lenmin+lenmax)/2)) <NEW_LINE> lenvar = self.params.get('lenvar', 0) <NEW_LINE> self.lenmin, self.lenmax = length-lenvar, length+lenvar <NEW_LINE> <DEDENT> if self.lenmin is not None and self.lenmax is None: <NEW_LINE> <INDENT> self.lenmax = int(self.lenmin * 4 / 3) <NEW_LINE> <DEDENT> elif self.lenmax is not None and self.lenmin is None: <NEW_LINE> <INDENT> self.lenmin = int(self.lenmax * 3 / 4) <NEW_LINE> <DEDENT> elif self.lenmin is None and self.lenmax is None: <NEW_LINE> <INDENT> self.lenmin, self.lenmax = lenmin, lenmax <NEW_LINE> <DEDENT> assert 0 <= self.lenmin and self.lenmin <= self.lenmax, "{0}: inconsistent length [{1},{2}]". format(self, self.lenmin, self.lenmax) <NEW_LINE> self.cleanParams(WithLength.DIRS)
Set {min,max}len attributes.
6259903ae76e3b2f99fd9be2
class Ipv6ExpressRouteCircuitPeeringConfig(Model): <NEW_LINE> <INDENT> _attribute_map = { 'primary_peer_address_prefix': {'key': 'primaryPeerAddressPrefix', 'type': 'str'}, 'secondary_peer_address_prefix': {'key': 'secondaryPeerAddressPrefix', 'type': 'str'}, 'microsoft_peering_config': {'key': 'microsoftPeeringConfig', 'type': 'ExpressRouteCircuitPeeringConfig'}, 'route_filter': {'key': 'routeFilter', 'type': 'RouteFilter'}, 'state': {'key': 'state', 'type': 'str'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(Ipv6ExpressRouteCircuitPeeringConfig, self).__init__(**kwargs) <NEW_LINE> self.primary_peer_address_prefix = kwargs.get('primary_peer_address_prefix', None) <NEW_LINE> self.secondary_peer_address_prefix = kwargs.get('secondary_peer_address_prefix', None) <NEW_LINE> self.microsoft_peering_config = kwargs.get('microsoft_peering_config', None) <NEW_LINE> self.route_filter = kwargs.get('route_filter', None) <NEW_LINE> self.state = kwargs.get('state', None)
Contains IPv6 peering config. :param primary_peer_address_prefix: The primary address prefix. :type primary_peer_address_prefix: str :param secondary_peer_address_prefix: The secondary address prefix. :type secondary_peer_address_prefix: str :param microsoft_peering_config: The Microsoft peering configuration. :type microsoft_peering_config: ~azure.mgmt.network.v2017_10_01.models.ExpressRouteCircuitPeeringConfig :param route_filter: The reference of the RouteFilter resource. :type route_filter: ~azure.mgmt.network.v2017_10_01.models.RouteFilter :param state: The state of peering. Possible values are: 'Disabled' and 'Enabled'. Possible values include: 'Disabled', 'Enabled' :type state: str or ~azure.mgmt.network.v2017_10_01.models.ExpressRouteCircuitPeeringState
6259903acad5886f8bdc5967
@disruptor(tactics, dtype="tCOMB", weight=1, args={'node': ('Node to combine with.', None, Node)}) <NEW_LINE> class sd_combine(SwapperDisruptor): <NEW_LINE> <INDENT> def setup(self, dm, user_input): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def get_nodes(self, node): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> nodes = [node] if node.is_term() else node.cc.frozen_node_list <NEW_LINE> if len(nodes) == 1 and not nodes[0].is_term(): <NEW_LINE> <INDENT> node = nodes[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return nodes <NEW_LINE> <DEDENT> def set_seed(self, prev_data): <NEW_LINE> <INDENT> SwapperDisruptor.set_seed(self, prev_data) <NEW_LINE> prev_content = prev_data.content <NEW_LINE> if not isinstance(prev_content, Node): <NEW_LINE> <INDENT> prev_data.add_info('DONT_PROCESS_THIS_KIND_OF_DATA') <NEW_LINE> return prev_data <NEW_LINE> <DEDENT> source = self.get_nodes(prev_content) <NEW_LINE> param = self.get_nodes(self.node) <NEW_LINE> if len(source) == 0 or len(param) == 0: <NEW_LINE> <INDENT> prev_data.add_info('DONT_PROCESS_THIS_KIND_OF_DATA') <NEW_LINE> return prev_data <NEW_LINE> <DEDENT> swap_nb = len(source) if len(source) < len(param) else len(param) <NEW_LINE> swap_nb = int(math.ceil(swap_nb / 2.0)) <NEW_LINE> random.shuffle(source) <NEW_LINE> random.shuffle(param) <NEW_LINE> for i in range(swap_nb): <NEW_LINE> <INDENT> self._swap_nodes(source[i], param[i])
Merge two nodes by swapping some roots' children
6259903a50485f2cf55dc158
class TestThumbtackExamples(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.actual_output = [] <NEW_LINE> self.db = DB() <NEW_LINE> self.longMessage = True <NEW_LINE> self.test_data_dir = '{}/data/'.format(os.path.dirname(__file__)) <NEW_LINE> <DEDENT> def test_basic_commands_1(self): <NEW_LINE> <INDENT> self.worker('basic-1') <NEW_LINE> <DEDENT> def test_basic_commands_2(self): <NEW_LINE> <INDENT> self.worker('basic-2') <NEW_LINE> <DEDENT> def test_transactional_commands_1(self): <NEW_LINE> <INDENT> self.worker('transactional-1') <NEW_LINE> <DEDENT> def test_transactional_commands_2(self): <NEW_LINE> <INDENT> self.worker('transactional-2') <NEW_LINE> <DEDENT> def test_transactional_commands_3(self): <NEW_LINE> <INDENT> self.worker('transactional-3') <NEW_LINE> <DEDENT> def test_transactional_commands_4(self): <NEW_LINE> <INDENT> self.worker('transactional-4') <NEW_LINE> <DEDENT> def worker(self, title): <NEW_LINE> <INDENT> out_file = '{}{}-out.txt'.format(self.test_data_dir, title) <NEW_LINE> expected_output = self.read_lines(out_file) <NEW_LINE> queries = self.read_lines('{}{}-in.txt'.format(self.test_data_dir, title)) <NEW_LINE> self.run_queries(queries) <NEW_LINE> self.assertEqual(len(self.actual_output), len(expected_output), 'Query results differ in length from {}'.format(out_file)) <NEW_LINE> for i, actual in enumerate(self.actual_output): <NEW_LINE> <INDENT> self.assertEqual(str(actual), expected_output[i], 'Query result line {} does not match {}'.format(i+1, out_file)) <NEW_LINE> <DEDENT> <DEDENT> def run_queries(self, queries): <NEW_LINE> <INDENT> for query in queries: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = self.db.run(query) <NEW_LINE> if result != None: <NEW_LINE> <INDENT> self.actual_output.append(result) <NEW_LINE> <DEDENT> <DEDENT> except SystemExit: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def read_lines(self, filename): <NEW_LINE> <INDENT> with open(filename, 'r') as f: <NEW_LINE> <INDENT> return [line.strip() for line in f if line.strip() != None]
Tests the examples at http://www.thumbtack.com/challenges.
6259903a26238365f5fadd2c
class Detect(Function): <NEW_LINE> <INDENT> def __init__(self, num_classes, bkg_label, cfg): <NEW_LINE> <INDENT> self.num_classes = num_classes <NEW_LINE> self.background_label = bkg_label <NEW_LINE> self.variance = cfg['variance'] <NEW_LINE> <DEDENT> def forward(self, predictions, prior): <NEW_LINE> <INDENT> loc, conf = predictions <NEW_LINE> if loc.size(2) > 4: <NEW_LINE> <INDENT> loc_data = loc[:,:,:-1].data <NEW_LINE> iou_data = loc[:,:,-1].data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> loc_data = loc.data <NEW_LINE> <DEDENT> conf_data = conf.data <NEW_LINE> prior_data = prior.data <NEW_LINE> num = loc_data.size(0) <NEW_LINE> self.num_priors = prior_data.size(0) <NEW_LINE> self.boxes = torch.zeros(1, self.num_priors, 4) <NEW_LINE> self.scores = torch.zeros(1, self.num_priors, self.num_classes) <NEW_LINE> if loc_data.is_cuda: <NEW_LINE> <INDENT> self.boxes = self.boxes.cuda() <NEW_LINE> self.scores = self.scores.cuda() <NEW_LINE> <DEDENT> if num == 1: <NEW_LINE> <INDENT> conf_preds = conf_data.unsqueeze(0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> conf_preds = conf_data.view(num, num_priors, self.num_classes) <NEW_LINE> self.boxes.expand_(num, self.num_priors, 4) <NEW_LINE> self.scores.expand_(num, self.num_priors, self.num_classes) <NEW_LINE> <DEDENT> for i in range(num): <NEW_LINE> <INDENT> decoded_boxes = decode(loc_data[i], prior_data, self.variance) <NEW_LINE> conf_scores = conf_preds[i].clone() <NEW_LINE> self.boxes[i] = decoded_boxes <NEW_LINE> self.scores[i] = conf_scores <NEW_LINE> <DEDENT> return self.boxes, self.scores
At test time, Detect is the final layer of SSD. Decode location preds, apply non-maximum suppression to location predictions based on conf scores and threshold to a top_k number of output predictions for both confidence score and locations.
6259903ad53ae8145f91963c
class AlertRequest(Model): <NEW_LINE> <INDENT> def __init__(self, ticker: str=None, event_type: str=None, price: float=None): <NEW_LINE> <INDENT> self.swagger_types = { 'ticker': str, 'event_type': str, 'price': float } <NEW_LINE> self.attribute_map = { 'ticker': 'ticker', 'event_type': 'eventType', 'price': 'price' } <NEW_LINE> self._ticker = ticker <NEW_LINE> self._event_type = event_type <NEW_LINE> self._price = price <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt) -> 'AlertRequest': <NEW_LINE> <INDENT> return util.deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def ticker(self) -> str: <NEW_LINE> <INDENT> return self._ticker <NEW_LINE> <DEDENT> @ticker.setter <NEW_LINE> def ticker(self, ticker: str): <NEW_LINE> <INDENT> if ticker is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `ticker`, must not be `None`") <NEW_LINE> <DEDENT> self._ticker = ticker <NEW_LINE> <DEDENT> @property <NEW_LINE> def event_type(self) -> str: <NEW_LINE> <INDENT> return self._event_type <NEW_LINE> <DEDENT> @event_type.setter <NEW_LINE> def event_type(self, event_type: str): <NEW_LINE> <INDENT> allowed_values = ["upCross", "downCross"] <NEW_LINE> if event_type not in allowed_values: <NEW_LINE> <INDENT> raise ValueError( "Invalid value for `event_type` ({0}), must be one of {1}" .format(event_type, allowed_values) ) <NEW_LINE> <DEDENT> self._event_type = event_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def price(self) -> float: <NEW_LINE> <INDENT> return self._price <NEW_LINE> <DEDENT> @price.setter <NEW_LINE> def price(self, price: float): <NEW_LINE> <INDENT> if price is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `price`, must not be `None`") <NEW_LINE> <DEDENT> self._price = price
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259903a8a349b6b4368741a
class NotebookLoader(object): <NEW_LINE> <INDENT> def __init__(self, path=None): <NEW_LINE> <INDENT> self.shell = InteractiveShell.instance() <NEW_LINE> self.path = path <NEW_LINE> <DEDENT> def load_module(self, fullname): <NEW_LINE> <INDENT> path = find_notebook(fullname, self.path) <NEW_LINE> print ("importing Jupyter notebook from %s" % path) <NEW_LINE> with io.open(path, 'r', encoding='utf-8') as f: <NEW_LINE> <INDENT> nb = nbformat.read(f, 4) <NEW_LINE> <DEDENT> mod = types.ModuleType(fullname) <NEW_LINE> mod.__file__ = path <NEW_LINE> mod.__loader__ = self <NEW_LINE> mod.__dict__['get_ipython'] = get_ipython <NEW_LINE> sys.modules[fullname] = mod <NEW_LINE> save_user_ns = self.shell.user_ns <NEW_LINE> self.shell.user_ns = mod.__dict__ <NEW_LINE> try: <NEW_LINE> <INDENT> for cell in nb.cells: <NEW_LINE> <INDENT> if cell['cell_type'] == 'code' and cell['source'].startswith('# EXPORT'): <NEW_LINE> <INDENT> code = self.shell.input_transformer_manager.transform_cell(cell.source) <NEW_LINE> exec(code, mod.__dict__) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> self.shell.user_ns = save_user_ns <NEW_LINE> <DEDENT> return mod
Module Loader for Jupyter Notebooks
6259903a287bf620b6272dc1
class RecentInterface(object): <NEW_LINE> <INDENT> open_new = QtCore.pyqtSignal(str) <NEW_LINE> select = QtCore.pyqtSignal(str) <NEW_LINE> convert = QtCore.pyqtSignal(str) <NEW_LINE> def __init__(self, conf, category): <NEW_LINE> <INDENT> super(RecentInterface, self).__init__() <NEW_LINE> self.category = category <NEW_LINE> self.conf = conf <NEW_LINE> self.name = category <NEW_LINE> if self.name == 'm3database': <NEW_LINE> <INDENT> self.name = 'Misura3 database' <NEW_LINE> self.label = self.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.label = 'Recent {}'.format(self.name.capitalize()) <NEW_LINE> <DEDENT> <DEDENT> def getNameSigList(self): <NEW_LINE> <INDENT> tab = self.conf['recent_' + self.category] <NEW_LINE> logging.debug('getNameSigList', self.category, tab) <NEW_LINE> nsl = [] <NEW_LINE> for i, row in enumerate(reversed(tab[1:])): <NEW_LINE> <INDENT> sig = row[0] <NEW_LINE> name = row[0] <NEW_LINE> if self.category == 'file': <NEW_LINE> <INDENT> if row[1] != '': <NEW_LINE> <INDENT> name = row[1] + ' (' + iutils.shorten(row[0]) + ')' <NEW_LINE> <DEDENT> <DEDENT> if self.category == 'server': <NEW_LINE> <INDENT> name0 = row[0].replace('//', '/').split('/')[1] <NEW_LINE> name = row[1] + '@' + name <NEW_LINE> <DEDENT> nsl.append([name, sig, row]) <NEW_LINE> <DEDENT> return nsl <NEW_LINE> <DEDENT> def clear_recent(self): <NEW_LINE> <INDENT> logging.debug('ConfWidget: Clearing recent entries') <NEW_LINE> tname = 'recent_' + self.category <NEW_LINE> self.conf[tname] = self.conf[tname][0] <NEW_LINE> self.conf.save() <NEW_LINE> self.conf.emit(QtCore.SIGNAL('rem()')) <NEW_LINE> <DEDENT> def new(self, *a): <NEW_LINE> <INDENT> if self.category in ['server']: <NEW_LINE> <INDENT> path = QtGui.QInputDialog.getText(self, _('Specify a new server address'), _( 'Address'), text='https://IP:3880/RPC')[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d= self.conf.last_directory(self.category) <NEW_LINE> path = QtGui.QFileDialog.getOpenFileName( self.parent(), _("Open a new ") + self.category, d) <NEW_LINE> <DEDENT> if not path: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.open_new.emit(path) <NEW_LINE> self.select.emit(path) <NEW_LINE> <DEDENT> def data_import(self, *a): <NEW_LINE> <INDENT> d= self.conf.last_directory(self.category) <NEW_LINE> file_filter = '' <NEW_LINE> for converter in dataimport.data_importers: <NEW_LINE> <INDENT> file_filter += '{} ({});;'.format(_(converter.name), converter.file_pattern.replace(';', ' ')) <NEW_LINE> print('adding filter', file_filter) <NEW_LINE> <DEDENT> path = QtGui.QFileDialog.getOpenFileName( self.parent(), _("Data import"), d, file_filter) <NEW_LINE> self.convert.emit(path)
Common functions for recent elements management
6259903a0a366e3fb87ddbbc
class SingleFolderQuerySet(FolderQuerySet): <NEW_LINE> <INDENT> def __init__(self, account, folder): <NEW_LINE> <INDENT> from .collections import FolderCollection <NEW_LINE> folder_collection = FolderCollection(account=account, folders=[folder]) <NEW_LINE> super().__init__(folder_collection=folder_collection) <NEW_LINE> <DEDENT> def _copy_cls(self): <NEW_LINE> <INDENT> return self.__class__(account=self.folder_collection.account, folder=self.folder_collection.folders[0])
A helper class with simpler argument types
6259903a23e79379d538d6d7
class TestIR13AASN(BaseWFC3): <NEW_LINE> <INDENT> detector = 'ir' <NEW_LINE> def test_ir_13aasn(self): <NEW_LINE> <INDENT> asn_file = 'iabg21010_asn.fits' <NEW_LINE> self.get_input_file(asn_file) <NEW_LINE> subprocess.call(['calwf3.e', asn_file, '-v']) <NEW_LINE> flist = ['iabg21a1q', 'iabg21a2q', 'iabg21a3q', 'iabg21a4q', 'iabg21a5q'] <NEW_LINE> outputs = [('iabg21011_crj.fits', 'iabg21011_crj_ref.fits')] <NEW_LINE> for rn in flist: <NEW_LINE> <INDENT> outputs += [(rn + '_flt.fits', rn + '_flt_ref.fits'), (rn + '_ima.fits', rn + '_ima_ref.fits')] <NEW_LINE> <DEDENT> self.compare_outputs(outputs)
Tests for WFC3/IR - Subarray dark associations of small sizes.
6259903a30dc7b76659a0a09
class ModelMetaclass(type): <NEW_LINE> <INDENT> def __new__(cls, name,bases,attrs): <NEW_LINE> <INDENT> if name=='Model': <NEW_LINE> <INDENT> return type.__new__(cls,name,bases,attrs) <NEW_LINE> <DEDENT> tableName = attrs.get('__table__',None) or name <NEW_LINE> logging.info("found moduel:%s (table:%s)" % (name,tableName)) <NEW_LINE> mappings = dict() <NEW_LINE> fileds = [] <NEW_LINE> filed_keys = [] <NEW_LINE> primaryKey_key = None <NEW_LINE> primaryKey = None <NEW_LINE> for k,v in attrs.items(): <NEW_LINE> <INDENT> if isinstance(v,Field): <NEW_LINE> <INDENT> logging.info("found the field:(%s,%s)" % (k,v)) <NEW_LINE> mappings[k] = v <NEW_LINE> if v.primary_key: <NEW_LINE> <INDENT> if primaryKey: <NEW_LINE> <INDENT> raise StandardError('Duplicate primary key for field:%s' % k) <NEW_LINE> <DEDENT> primaryKey = v.name <NEW_LINE> primaryKey_key = k <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fileds.append(v.name) <NEW_LINE> filed_keys.append(k) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not primaryKey: <NEW_LINE> <INDENT> raise StandardError('primary key not found') <NEW_LINE> <DEDENT> for k in mappings.keys(): <NEW_LINE> <INDENT> attrs.pop(k) <NEW_LINE> <DEDENT> escaped_field = list(map(lambda f : '%s' % f,fileds)) <NEW_LINE> attrs['__mappings__'] = mappings <NEW_LINE> attrs['__table__'] = tableName <NEW_LINE> attrs['__primary_key__'] = primaryKey <NEW_LINE> attrs['__primary_key_key__'] = primaryKey_key <NEW_LINE> attrs['__fileds__'] = fileds <NEW_LINE> attrs['__fileds_keys__'] = filed_keys <NEW_LINE> attrs['__select__'] = 'select %s,%s from %s ' % (primaryKey,', '.join(escaped_field),tableName) <NEW_LINE> attrs['__insert__'] = 'insert into %s (%s,%s) values(%s)' % (tableName,', '.join(escaped_field),primaryKey,create_args(len(escaped_field)+1)) <NEW_LINE> attrs['__update__'] = 'update %s set %s where %s = ?' % (tableName,', '.join(map(lambda f : '%s = ?' % f,fileds)),primaryKey) <NEW_LINE> attrs['__delete__'] = 'delete from %s where %s = ?' % (tableName,primaryKey) <NEW_LINE> return type.__new__(cls,name,bases,attrs)
docstring for ModelMetaclass
6259903ab830903b9686ed65
class DMLServerJob(DMLJob): <NEW_LINE> <INDENT> def __init__( self, job_uuid, dataset_uuid, round_num, statistics ): <NEW_LINE> <INDENT> self.job_type = JobTypes.JOB_STATS.name <NEW_LINE> self.job_uuid = job_uuid <NEW_LINE> self.dataset_uuid = dataset_uuid <NEW_LINE> self.round_num = round_num <NEW_LINE> self.statistics = statistics
DML Job for submitting Statistics Holds information specifically needed for submitting statistics
6259903aec188e330fdf9a70
class Eng_AnyRunning(MultistateDerivedParameterNode, EngRunning): <NEW_LINE> <INDENT> name = 'Eng (*) Any Running' <NEW_LINE> @classmethod <NEW_LINE> def can_operate(cls, available): <NEW_LINE> <INDENT> return 'Eng (*) N1 Max' in available or 'Eng (*) N2 Max' in available or 'Eng (*) Np Max' in available or 'Eng (*) Fuel Flow Max' in available <NEW_LINE> <DEDENT> def derive(self, eng_n1=P('Eng (*) N1 Max'), eng_n2=P('Eng (*) N2 Max'), eng_np=P('Eng (*) Np Max'), fuel_flow=P('Eng (*) Fuel Flow Max')): <NEW_LINE> <INDENT> self.array = self.determine_running(eng_n1, eng_n2, eng_np, fuel_flow)
Discrete parameter describing when any engines are running. This is useful with 'Eng (*) All Running' to detect if not all engines are running.
6259903aa4f1c619b294f773
class SantaRuleQueryHandler(RuleQueryHandler): <NEW_LINE> <INDENT> MODEL_CLASS = rule_models.SantaRule
Handler for querying santa rules.
6259903a26068e7796d4db1f
class SystemRole(System): <NEW_LINE> <INDENT> NAMES = ['GUEST', 'USER', 'PUBLISHER', 'ADMIN', 'OWNER'] <NEW_LINE> SEARCH_ARGS = [ ("_links", argtype.OBJECT, Verbosity.RECORD, argmod.STRING_DEFAULTS, argmod.DEFAULT, None, None, False), ("username", argtype.STRING, Verbosity.BRIEF, argmod.STRING_DEFAULTS, argmod.DEFAULT, None, None, True), ("role", argtype.STRING, Verbosity.BRIEF, argmod.STRING_DEFAULTS, argmod.DEFAULT, None, None, True) ]
Model of a Tapis system role
6259903a8da39b475be043c7
class Axes(vtk.vtkActor): <NEW_LINE> <INDENT> def __init__(self, center=(0,0,0), color=(0,0,1) ): <NEW_LINE> <INDENT> self.src = vtk.vtkAxes() <NEW_LINE> self.mapper = vtk.vtkPolyDataMapper() <NEW_LINE> self.mapper.SetInputConnection(self.src.GetOutputPort()) <NEW_LINE> self.SetMapper(self.mapper) <NEW_LINE> self.SetColor(color) <NEW_LINE> self.SetOrigin(center) <NEW_LINE> <DEDENT> def SetColor(self, color): <NEW_LINE> <INDENT> self.GetProperty().SetColor(color) <NEW_LINE> <DEDENT> def SetOrigin(self, center=(0,0,0)): <NEW_LINE> <INDENT> self.src.SetOrigin(center[0], center[1], center[2])
axes (x,y,z)
6259903a596a897236128e78
class RemoveFileIfExists(xtask.Task): <NEW_LINE> <INDENT> def __init__(self, file_path): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.file_path = xtask.parse_path(file_path) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> if not self.file_path.exists(): <NEW_LINE> <INDENT> self._info("Doing nothing: Location {} does not exist.""".format(self.file_path)) <NEW_LINE> return True <NEW_LINE> <DEDENT> if not self.file_path.is_file(): <NEW_LINE> <INDENT> self._info("Doing nothing: Location {} is not a file.".format(self.file_path)) <NEW_LINE> return True <NEW_LINE> <DEDENT> self._info("Removing file {}".format(self.file_path)) <NEW_LINE> try: <NEW_LINE> <INDENT> self.file_path.unlink() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self._exception("An error occured during removal.") <NEW_LINE> return False <NEW_LINE> <DEDENT> return True
Remove a file if it exists.
6259903a1d351010ab8f4cf4
class QPythonConsole(QObject, MooseWidget): <NEW_LINE> <INDENT> write_output = pyqtSignal(str) <NEW_LINE> prompt_changed = pyqtSignal(str) <NEW_LINE> def __init__(self, locals=None, filename="Python Console", **kwds): <NEW_LINE> <INDENT> super(QPythonConsole, self).__init__(**kwds) <NEW_LINE> self.console = InteractiveConsole(locals, filename) <NEW_LINE> self.current_prompt = "" <NEW_LINE> self.more = False <NEW_LINE> self.prompt = "" <NEW_LINE> self._setPrompt() <NEW_LINE> sys.excepthook = sys.__excepthook__ <NEW_LINE> self.setup() <NEW_LINE> <DEDENT> def _setPrompt(self): <NEW_LINE> <INDENT> if self.more: <NEW_LINE> <INDENT> self.prompt = "... " <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.prompt = ">>> " <NEW_LINE> <DEDENT> self.prompt_changed.emit(self.prompt) <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> self.write_output.emit(data.rstrip()) <NEW_LINE> <DEDENT> @pyqtSlot(str) <NEW_LINE> def _newLine(self, line): <NEW_LINE> <INDENT> old_stdout = sys.stdout <NEW_LINE> old_stderr = sys.stderr <NEW_LINE> output = StringIO() <NEW_LINE> sys.stdout = output <NEW_LINE> sys.stderr = output <NEW_LINE> try: <NEW_LINE> <INDENT> self.more = self.console.push(str(line).decode("utf-8")) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> sys.stdout = old_stdout <NEW_LINE> sys.stderr = old_stderr <NEW_LINE> self.write_output.emit(str(output.getvalue().rstrip())) <NEW_LINE> <DEDENT> self._setPrompt()
A python interactive interpreter that emits signals for output and has a slot for input, allowing to be hooked up to Qt widgets. Signals: write_output: Some output was written to the console. Argument is the output. prompt_changed: The prompt needs to be changed. This is for line continuation.
6259903a10dbd63aa1c71dae
class FlipDimension(gui_base.GuiCommandNeedsSelection): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Draft_FlipDimension, self).__init__(name=_tr("Flip dimension")) <NEW_LINE> <DEDENT> def GetResources(self): <NEW_LINE> <INDENT> _tip = ("Flip the normal direction of the selected dimensions " "(linear, radial, angular).\n" "If other objects are selected they are ignored.") <NEW_LINE> return {'Pixmap': 'Draft_FlipDimension', 'MenuText': QT_TRANSLATE_NOOP("Draft_FlipDimension", "Flip dimension"), 'ToolTip': QT_TRANSLATE_NOOP("Draft_FlipDimension", _tip)} <NEW_LINE> <DEDENT> def Activated(self): <NEW_LINE> <INDENT> super(Draft_FlipDimension, self).Activated() <NEW_LINE> for o in Gui.Selection.getSelection(): <NEW_LINE> <INDENT> if utils.get_type(o) in ("Dimension", "AngularDimension"): <NEW_LINE> <INDENT> self.doc.openTransaction("Flip dimension") <NEW_LINE> _cmd = "App.activeDocument()." + o.Name + ".Normal" <NEW_LINE> _cmd += " = " <NEW_LINE> _cmd += "App.activeDocument()." + o.Name + ".Normal.negative()" <NEW_LINE> Gui.doCommand(_cmd) <NEW_LINE> self.doc.commitTransaction() <NEW_LINE> self.doc.recompute()
The Draft FlipDimension command definition. Flip the normal direction of the selected dimensions. It inherits `GuiCommandNeedsSelection` to set up the document and other behavior. See this class for more information.
6259903a1f5feb6acb163dcc
class RoadStep(BaseObject): <NEW_LINE> <INDENT> line = StringField('When line') <NEW_LINE> start_time = TimeField('Start of step') <NEW_LINE> end_time = TimeField('End of step') <NEW_LINE> departure = StringField('Departure station') <NEW_LINE> arrival = StringField('Arrival station') <NEW_LINE> duration = DeltaField('Duration of this step')
A step on a roadmap.
6259903a66673b3332c315d0
class FloorResultTypes(Enum): <NEW_LINE> <INDENT> INCREMENT_FLOOR = -1 <NEW_LINE> DECREMENT_FLOOR = +1 <NEW_LINE> END_GAME = auto()
Enumerates the outcomes from a dungeon floor.
6259903a379a373c97d9a202
class KNearestNeighbor(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def train(self, X, y): <NEW_LINE> <INDENT> self.X_train = X <NEW_LINE> self.y_train = y <NEW_LINE> <DEDENT> def predict(self, X, k=1, num_loops=0): <NEW_LINE> <INDENT> if num_loops == 0: <NEW_LINE> <INDENT> dists = self.compute_distances_no_loops(X) <NEW_LINE> <DEDENT> elif num_loops == 1: <NEW_LINE> <INDENT> dists = self.compute_distances_one_loop(X) <NEW_LINE> <DEDENT> elif num_loops == 2: <NEW_LINE> <INDENT> dists = self.compute_distances_two_loops(X) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Invalid value %d for num_loops' % num_loops) <NEW_LINE> <DEDENT> return self.predict_labels(dists, k=k) <NEW_LINE> <DEDENT> def compute_distances_two_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in range(num_test): <NEW_LINE> <INDENT> for j in range(num_train): <NEW_LINE> <INDENT> dists[i, j] = np.linalg.norm(X[i]-self.X_train[j]) <NEW_LINE> <DEDENT> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_one_loop(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in range(num_test): <NEW_LINE> <INDENT> dists[i] = np.linalg.norm(X[i]-self.X_train, axis=1) <NEW_LINE> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_no_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> dists = -2 * np.dot(X, self.X_train.T) <NEW_LINE> sumX = np.sum(np.square(X), axis=1, keepdims=True) <NEW_LINE> sumXt = np.sum(np.square(self.X_train), axis=1) <NEW_LINE> dists = np.add(dists, sumX) <NEW_LINE> dists = np.add(dists, sumXt) <NEW_LINE> dists = np.sqrt(dists) <NEW_LINE> return dists <NEW_LINE> <DEDENT> def predict_labels(self, dists, k=1): <NEW_LINE> <INDENT> num_test = dists.shape[0] <NEW_LINE> y_pred = np.zeros(num_test) <NEW_LINE> for i in range(num_test): <NEW_LINE> <INDENT> closest_y = [] <NEW_LINE> k_nearest = np.argsort(dists[i])[:k] <NEW_LINE> closest_y = self.y_train[k_nearest] <NEW_LINE> y_pred[i] = np.argmax(np.bincount(closest_y)) <NEW_LINE> <DEDENT> return y_pred
a kNN classifier with L2 distance
6259903a30c21e258be999e8
class MGMSG_MOT_SET_EEPROMPARAMS(Message): <NEW_LINE> <INDENT> id = 0x4b9 <NEW_LINE> is_long_cmd = True <NEW_LINE> parameters = [('chan_ident', 'H'), ('msg_id', 'H')]
Used to save the parameter settings for the specified message. These settings may have been altered either through the various method calls or through user interaction with the GUI (specifically, by clicking on the ‘Settings’ button found in the lower right hand corner of the user interface). :param chan_ident: channel number (0x01, 0x02) :type chan_ident: int - msg_id
6259903ad164cc617582214f
class TestV1PodAntiAffinity(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testV1PodAntiAffinity(self): <NEW_LINE> <INDENT> pass
V1PodAntiAffinity unit test stubs
6259903ad99f1b3c44d06880
class BQ_Client(object): <NEW_LINE> <INDENT> def __init__(self, credentials, dataset_id, table_id, google_project_id): <NEW_LINE> <INDENT> self.credentials = credentials <NEW_LINE> self.google_project_id = google_project_id <NEW_LINE> self.dataset_id = dataset_id <NEW_LINE> self.table_id = table_id <NEW_LINE> self.client = None <NEW_LINE> self.table = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_connected(self): <NEW_LINE> <INDENT> assert False, "TODO: `is_connected` not implemented yet" <NEW_LINE> <DEDENT> def parse_credentials(self, credentials): <NEW_LINE> <INDENT> self.credentials = credentials <NEW_LINE> <DEDENT> def connect(self, to_table=True): <NEW_LINE> <INDENT> self.client = connect_to_bigquery(credentials=self.credentials, gcp_project_id=self.google_project_id) <NEW_LINE> if to_table: <NEW_LINE> <INDENT> self.table = get_bigquery_table(self.client, dataset_id=self.dataset_id, table_id=self.table_id, project_id=self.google_project_id) <NEW_LINE> <DEDENT> <DEDENT> def connect_to_table(self, table_id=None, dataset_id=None, project_id=None): <NEW_LINE> <INDENT> if table_id is None: <NEW_LINE> <INDENT> self.table_id = table_id <NEW_LINE> <DEDENT> if dataset_id is None: <NEW_LINE> <INDENT> dataset_id = self.dataset_id <NEW_LINE> <DEDENT> if project_id is None: <NEW_LINE> <INDENT> project_id = self.google_project_id <NEW_LINE> <DEDENT> self.table = get_bigquery_table(self.client, dataset_id=dataset_id, table_id=table_id, project_id=project_id) <NEW_LINE> <DEDENT> def get_table(self, table_id, dataset_id, project_id): <NEW_LINE> <INDENT> return get_bigquery_table(self.client, dataset_id=dataset_id, table_id=table_id, project_id=project_id) <NEW_LINE> <DEDENT> def create_table(self, table_id, schema): <NEW_LINE> <INDENT> dataset_ref = self.client.dataset(self.dataset_id) <NEW_LINE> table_ref = dataset_ref.table(table_id) <NEW_LINE> table = bigquery.Table(table_ref, schema=schema) <NEW_LINE> table = self.client.create_table(table) <NEW_LINE> assert table.table_id == table_id, "Something went wrong" <NEW_LINE> return True <NEW_LINE> <DEDENT> def store(self, data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> logger.debug("PASSING processed message to database (id={})".format(data.get("id", ""))) <NEW_LINE> errors = self.client.insert_rows(self.table, [data]) <NEW_LINE> assert errors == [], "Errors in inserting data: {}".format(str(errors)) <NEW_LINE> logger.debug("SUCCESS sending a message to database (id={})".format(data.get("id", ""))) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> def insert(self, data): <NEW_LINE> <INDENT> self.store(data)
Args: credentials: (str) path to json file with credentials dataset_id: (str) table_id: (str) google_project_id: (str)
6259903a8a43f66fc4bf3368
class RequestFailed(Exception): <NEW_LINE> <INDENT> def __init__(self, url, http_code, open_code, open_msg): <NEW_LINE> <INDENT> self._url = url <NEW_LINE> self._http_code = int(http_code) <NEW_LINE> self._open_code = int(open_code) <NEW_LINE> self._open_msg = open_msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr((self._url, self._http_code, self._open_code, self._open_msg))
class to represent a failed RESTful call
6259903a82261d6c527307b2
class ValidateConfigTestCase(TestCase): <NEW_LINE> <INDENT> def test_bad_object_in_array(self): <NEW_LINE> <INDENT> schema = { "type": "object", "properties": { "array_of_objs": { "type": "array", "items": {"type": "object", "required": ["r"]}, }, }, } <NEW_LINE> with self.assertRaises(ConfigError) as c: <NEW_LINE> <INDENT> validate_config(schema, {"array_of_objs": [{}]}, ("base",)) <NEW_LINE> <DEDENT> self.assertEqual(c.exception.path, ["base", "array_of_objs", "<item 0>"])
Test cases for synapse.config._util.validate_config
6259903a596a897236128e7a
class OTestpointBroker(Wrapper): <NEW_LINE> <INDENT> def register(self, registrar): <NEW_LINE> <INDENT> registrar.register_argument('loglevel', 3, 'log level - [0,3]') <NEW_LINE> registrar.register_infile_name('otestpoint-broker.xml') <NEW_LINE> registrar.register_outfile_name('otestpoint-broker.log') <NEW_LINE> <DEDENT> def run(self, ctx): <NEW_LINE> <INDENT> if not ctx.args.infile: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> argstr = '%s ' '--daemonize ' '--loglevel %d ' '--logfile %s ' '--pidfile %s ' % (ctx.args.infile, ctx.args.loglevel, ctx.args.outfile, ctx.args.default_pidfilename) <NEW_LINE> ctx.run('otestpoint-broker', argstr, genpidfile=False) <NEW_LINE> <DEDENT> def stop(self, ctx): <NEW_LINE> <INDENT> ctx.stop()
Instantiate an otestpoint-broker instance on this node.
6259903a0a366e3fb87ddbc0
class TypeValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> TYPE_UNSPECIFIED = 0 <NEW_LINE> PLAIN_TEXT = 1 <NEW_LINE> HTML = 2
Required. If the type is not set or is `TYPE_UNSPECIFIED`, returns an `INVALID_ARGUMENT` error. Values: TYPE_UNSPECIFIED: The content type is not specified. PLAIN_TEXT: Plain text HTML: HTML
6259903a4e696a045264e70f
class AddSecurityGroup(command.Command): <NEW_LINE> <INDENT> log = logging.getLogger(__name__ + ".AddSecurityGroup") <NEW_LINE> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(AddSecurityGroup, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( 'container', metavar='<container>', help='ID or name of the container to add security group.') <NEW_LINE> parser.add_argument( 'security_group', metavar='<security_group>', help='Security group ID or name for specified container. ') <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> client = _get_client(self, parsed_args) <NEW_LINE> opts = {} <NEW_LINE> opts['id'] = parsed_args.container <NEW_LINE> opts['security_group'] = parsed_args.security_group <NEW_LINE> opts = zun_utils.remove_null_parms(**opts) <NEW_LINE> try: <NEW_LINE> <INDENT> client.containers.add_security_group(**opts) <NEW_LINE> print("Request to add security group for container %s " "has been accepted." % parsed_args.container) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("Add security group for container %(container)s failed: " "%(e)s" % {'container': parsed_args.container, 'e': e})
Add security group for specified container.
6259903a30dc7b76659a0a0d
class ImportVisitor(ast.NodeVisitor): <NEW_LINE> <INDENT> def __init__(self, filename, options): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.options = options or {} <NEW_LINE> self.calls = [] <NEW_LINE> <DEDENT> def visit_Call(self, node): <NEW_LINE> <INDENT> if node.func.id == 'gql': <NEW_LINE> <INDENT> self.calls.append(node) <NEW_LINE> <DEDENT> <DEDENT> def node_query(self, node): <NEW_LINE> <INDENT> if isinstance(node, ast.Call): <NEW_LINE> <INDENT> assert node.args <NEW_LINE> arg = node.args[0] <NEW_LINE> if not isinstance(arg, ast.Str): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError(type(node)) <NEW_LINE> <DEDENT> return arg.s
This class visits all the gql calls.
6259903a23e79379d538d6dc
class ICarouselTile(IListTile): <NEW_LINE> <INDENT> autoplay = schema.Bool( title=_(u'Auto play'), required=False, default=True, ) <NEW_LINE> form.no_omit(ITileEditForm, 'uuids') <NEW_LINE> form.widget(uuids=TextLinesSortableFieldWidget) <NEW_LINE> uuids = schema.List( title=_(u'Elements'), value_type=schema.TextLine(), required=False, readonly=False, )
A carousel based on the Galleria JavaScript image gallery framework.
6259903ae76e3b2f99fd9be8
class SQSDecodeError(MssapiClientError): <NEW_LINE> <INDENT> def __init__(self, reason, message): <NEW_LINE> <INDENT> super(SQSDecodeError, self).__init__(reason, message) <NEW_LINE> self.message = message <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'SQSDecodeError: %s' % self.reason <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'SQSDecodeError: %s' % self.reason
Error when decoding an SQS message.
6259903acad5886f8bdc596a
class Toolbar(Base): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def cast(arg): <NEW_LINE> <INDENT> return Toolbar() <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return str() <NEW_LINE> <DEDENT> @property <NEW_LINE> def parentUserInterface(self): <NEW_LINE> <INDENT> return UserInterface() <NEW_LINE> <DEDENT> @property <NEW_LINE> def controls(self): <NEW_LINE> <INDENT> return ToolbarControls()
Provides access to a toolbar in the user interface. A toolbar is a collection of toolbar controls.
6259903a30c21e258be999ea
@destructiveTest <NEW_LINE> @skipIf(salt.utils.is_windows(), 'No mtab on Windows') <NEW_LINE> @skipIf(salt.utils.is_darwin(), 'No mtab on Darwin') <NEW_LINE> class DiskModuleVirtualizationTest(ModuleCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> if os.path.isfile('/etc/mtab'): <NEW_LINE> <INDENT> shutil.move('/etc/mtab', '/tmp/mtab') <NEW_LINE> <DEDENT> <DEDENT> def test_no_mtab(self): <NEW_LINE> <INDENT> ret = self.run_function('disk.usage') <NEW_LINE> self.assertDictEqual(ret, {}) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> if os.path.isfile('/tmp/mtab'): <NEW_LINE> <INDENT> shutil.move('/tmp/mtab', '/etc/mtab')
Test to make sure we return a clean result under Docker. Refs #8976 This is factored into its own class so that we can have some certainty that setUp() and tearDown() are run.
6259903a26238365f5fadd32
class SensorAccelerometer(Element): <NEW_LINE> <INDENT> def __init__( self, site, cutoff: float=None, name: str=None, noise: float=None, user: str="0 0 ...", ): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.site = site <NEW_LINE> self.cutoff = cutoff <NEW_LINE> self.name = name <NEW_LINE> self.noise = noise <NEW_LINE> self.user = user <NEW_LINE> self._attribute_names = ['site', 'cutoff', 'name', 'noise', 'user']
This element creates a 3-axis accelerometer. The sensor is mounted at a site, and has the same position and orientation as the site frame. This sensor outputs three numbers, which are the linear acceleration of the site (including gravity) in local coordinates. :param site: Site where the sensor is mounted. The accelerometer is centered and aligned with the site local frame. :param cutoff: When this value is positive, it limits the absolute value of the sensor output. It is also used to normalize the sensor output in the sensor data plots in HAPTIX and simulate.cpp. :param name: Name of the sensor. :param noise: The standard deviation of zero-mean Gaussian noise added to the sensor output, when the sensornoise attribute of flag is enabled. Sensor noise respects the sensor data type: quaternions and unit vectors remain normalized, non-negative quantities remain non-negative. :param user: See User parameters.
6259903ac432627299fa41d5
class tektronixDPO72004C(tektronixDPO70000): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.__dict__.setdefault('_instrument_id', 'DPO72004C') <NEW_LINE> super(tektronixDPO72004C, self).__init__(*args, **kwargs) <NEW_LINE> self._analog_channel_count = 4 <NEW_LINE> self._digital_channel_count = 0 <NEW_LINE> self._bandwidth = 20e9 <NEW_LINE> self._init_channels()
Tektronix DPO72004C IVI oscilloscope driver
6259903a004d5f362081f8d2
class Commit: <NEW_LINE> <INDENT> cves = dict() <NEW_LINE> def __init__(self, id, links): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.links = links <NEW_LINE> <DEDENT> def add_to_dictionary(self): <NEW_LINE> <INDENT> Commit.cves[self.id] = self.links
Class for representing commits as ID<->links
6259903a596a897236128e7c
class FrequencyBand(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=10, help_text="Ex. C, Ku, Ka, S, X") <NEW_LINE> start = models.FloatField(help_text="Start frequency of this band in GHz") <NEW_LINE> stop = models.FloatField(help_text="Stop frequency of this band in GHz") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ["start"] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_frequency_band(frequency): <NEW_LINE> <INDENT> return FrequencyBand.objects.filter(start__lt=frequency, stop__gt=frequency)[0] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{0}-Band ({1}-{2} GHz)".format(self.name, str(self.start), str(self.stop))
Represents a frequency band with start and stop range in GHz
6259903a63f4b57ef0086662
class Collections(enum.Enum): <NEW_LINE> <INDENT> PROJECTS = ( 'projects', 'projects/{projectsId}', {}, [u'projectsId'], True ) <NEW_LINE> PROJECTS_INSTANCECONFIGS = ( 'projects.instanceConfigs', '{+name}', { '': 'projects/{projectsId}/instanceConfigs/{instanceConfigsId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_INSTANCES = ( 'projects.instances', '{+name}', { '': 'projects/{projectsId}/instances/{instancesId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_INSTANCES_BACKUPS = ( 'projects.instances.backups', '{+name}', { '': 'projects/{projectsId}/instances/{instancesId}/backups/' '{backupsId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_INSTANCES_DATABASES = ( 'projects.instances.databases', '{+name}', { '': 'projects/{projectsId}/instances/{instancesId}/databases/' '{databasesId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_INSTANCES_DATABASES_OPERATIONS = ( 'projects.instances.databases.operations', '{+name}', { '': 'projects/{projectsId}/instances/{instancesId}/databases/' '{databasesId}/operations/{operationsId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_INSTANCES_DATABASES_SESSIONS = ( 'projects.instances.databases.sessions', '{+name}', { '': 'projects/{projectsId}/instances/{instancesId}/databases/' '{databasesId}/sessions/{sessionsId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_INSTANCES_OPERATIONS = ( 'projects.instances.operations', '{+name}', { '': 'projects/{projectsId}/instances/{instancesId}/operations/' '{operationsId}', }, [u'name'], True ) <NEW_LINE> def __init__(self, collection_name, path, flat_paths, params, enable_uri_parsing): <NEW_LINE> <INDENT> self.collection_name = collection_name <NEW_LINE> self.path = path <NEW_LINE> self.flat_paths = flat_paths <NEW_LINE> self.params = params <NEW_LINE> self.enable_uri_parsing = enable_uri_parsing
Collections for all supported apis.
6259903a15baa72349463177
class ComponentTests(ossie.utils.testing.ScaComponentTestCase): <NEW_LINE> <INDENT> def testScaBasicBehavior(self): <NEW_LINE> <INDENT> execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False) <NEW_LINE> execparams = dict([(x.id, any.from_any(x.value)) for x in execparams]) <NEW_LINE> self.launch(execparams) <NEW_LINE> self.assertNotEqual(self.comp, None) <NEW_LINE> self.assertEqual(self.comp.ref._non_existent(), False) <NEW_LINE> self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True) <NEW_LINE> expectedProps = [] <NEW_LINE> expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True)) <NEW_LINE> expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True)) <NEW_LINE> props = self.comp.query([]) <NEW_LINE> props = dict((x.id, any.from_any(x.value)) for x in props) <NEW_LINE> for expectedProp in expectedProps: <NEW_LINE> <INDENT> self.assertEquals(props.has_key(expectedProp.id), True) <NEW_LINE> <DEDENT> for port in self.scd.get_componentfeatures().get_ports().get_uses(): <NEW_LINE> <INDENT> port_obj = self.comp.getPort(str(port.get_usesname())) <NEW_LINE> self.assertNotEqual(port_obj, None) <NEW_LINE> self.assertEqual(port_obj._non_existent(), False) <NEW_LINE> self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True) <NEW_LINE> <DEDENT> for port in self.scd.get_componentfeatures().get_ports().get_provides(): <NEW_LINE> <INDENT> port_obj = self.comp.getPort(str(port.get_providesname())) <NEW_LINE> self.assertNotEqual(port_obj, None) <NEW_LINE> self.assertEqual(port_obj._non_existent(), False) <NEW_LINE> self.assertEqual(port_obj._is_a(port.get_repid()), True) <NEW_LINE> <DEDENT> self.comp.start() <NEW_LINE> self.comp.stop() <NEW_LINE> self.comp.releaseObject()
Test for all component implementations in vector_to_stream_cc
6259903a3c8af77a43b6882a
class MockWriter: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def write(self, write_bytes: bytes): <NEW_LINE> <INDENT> global _READER, _NEXT_EXCEPTION <NEW_LINE> if _NEXT_EXCEPTION is not None: <NEW_LINE> <INDENT> exception = _NEXT_EXCEPTION <NEW_LINE> _NEXT_EXCEPTION = None <NEW_LINE> raise exception <NEW_LINE> <DEDENT> if _READER is not None: <NEW_LINE> <INDENT> _READER.set_cmd(write_bytes) <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass
Mock implementation of the writer of a asyncio telnet connection.
6259903a0a366e3fb87ddbc2
class RenderPodcastTestCase(TestCase): <NEW_LINE> <INDENT> def test_render_defaults(self): <NEW_LINE> <INDENT> node = tags.RenderPodcastNode() <NEW_LINE> self.assertEquals("single", node.render_type) <NEW_LINE> self.assertEquals("episode", node.var_in_context) <NEW_LINE> self.assertEquals("sodes/renders/single.html", node.render_template) <NEW_LINE> <DEDENT> def test_init(self): <NEW_LINE> <INDENT> node = tags.RenderPodcastNode("asdf", "yoyo") <NEW_LINE> self.assertEquals("asdf", node.render_type) <NEW_LINE> self.assertEquals("yoyo", node.var_in_context) <NEW_LINE> self.assertEquals("sodes/renders/asdf.html", node.render_template) <NEW_LINE> <DEDENT> def test_template_switch(self): <NEW_LINE> <INDENT> node = tags.RenderPodcastNode("asdf") <NEW_LINE> self.assertEquals("asdf", node.render_type) <NEW_LINE> self.assertEquals("sodes/renders/asdf.html", node.render_template)
{% render_podcast %} template tag
6259903a23e79379d538d6dd
@ns.route('/', endpoint=ep_1) <NEW_LINE> class UserCollection(Resource): <NEW_LINE> <INDENT> @ns.doc('list_users') <NEW_LINE> @ns.marshal_list_with(user_model) <NEW_LINE> @ns.response(200, 'User found') <NEW_LINE> @api.marshal_with(user_model) <NEW_LINE> def get(self): <NEW_LINE> <INDENT> print_url_for('GET', ep_1) <NEW_LINE> return DAO.users, 200 <NEW_LINE> <DEDENT> @ns.doc('create_users') <NEW_LINE> @ns.expect(user_model) <NEW_LINE> @ns.marshal_with(user_model, code=201) <NEW_LINE> @ns.response(201, 'User created') <NEW_LINE> def post(self): <NEW_LINE> <INDENT> print_url_for('POST', ep_1) <NEW_LINE> return DAO.create(api.payload), 201
Retrieves a list of all users and creates a new user.
6259903a71ff763f4b5e8978
class S3SupplierModel(S3Model): <NEW_LINE> <INDENT> names = ["proc_supplier", "proc_supplier_id", ] <NEW_LINE> def model(self): <NEW_LINE> <INDENT> T = current.T <NEW_LINE> db = current.db <NEW_LINE> s3 = current.response.s3 <NEW_LINE> location_id = self.gis_location_id <NEW_LINE> tablename = "proc_supplier" <NEW_LINE> table = self.define_table(tablename, Field("name", notnull=True, unique=True, length=128, label = T("Name")), location_id(), Field("phone", label = T("Phone"), requires = IS_NULL_OR(s3_phone_requires)), Field("contact", label = T("Contact")), Field("website", label = T("Website"), requires = IS_NULL_OR(IS_URL()), represent = s3_url_represent), s3.comments(), *(s3.address_fields() + s3.meta_fields())) <NEW_LINE> s3.crud_strings[tablename] = Storage( title_create = T("Add Supplier"), title_display = T("Supplier Details"), title_list = T("List Suppliers"), title_update = T("Edit Supplier"), title_search = T("Search Suppliers"), subtitle_create = T("Add Supplier"), subtitle_list = T("Suppliers"), label_list_button = T("List Suppliers"), label_create_button = T("Add Supplier"), label_delete_button = T("Delete Supplier"), msg_record_created = T("Supplier added"), msg_record_modified = T("Supplier updated"), msg_record_deleted = T("Supplier deleted"), msg_list_empty = T("No Suppliers currently registered")) <NEW_LINE> supplier_id = S3ReusableField("supplier_id", db.proc_supplier, sortby="name", requires = IS_NULL_OR(IS_ONE_OF(db, "proc_supplier.id", "%(name)s", sort=True)), represent = self.proc_supplier_represent, label = T("Supplier"), comment=S3AddResourceLink(c="proc", f="supplier", label=T("Add Supplier")), ondelete = "RESTRICT") <NEW_LINE> self.add_component("proc_plan", proc_supplier="supplier_id") <NEW_LINE> self.add_component("asset_asset", proc_supplier="supplier_id") <NEW_LINE> return Storage( proc_supplier_id = supplier_id ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def proc_supplier_represent(id): <NEW_LINE> <INDENT> db = current.db <NEW_LINE> s3db = current.s3db <NEW_LINE> messages = current.messages <NEW_LINE> NONE = messages.NONE <NEW_LINE> UNKNOWN_OPT = messages.UNKNOWN_OPT <NEW_LINE> if not id: <NEW_LINE> <INDENT> return NONE <NEW_LINE> <DEDENT> table = s3db.proc_supplier <NEW_LINE> query = (table.id == id) <NEW_LINE> record = db(query).select(table.name, limitby=(0, 1)).first() <NEW_LINE> if record: <NEW_LINE> <INDENT> return record.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return UNKNOWN_OPT
Suppliers @ToDo: Are these really different enough from Orgs to be worth separating? e.g. Donor Orgs vs Purchases
6259903a9b70327d1c57ff44
class GanadoresTemporadasNode(template.Node): <NEW_LINE> <INDENT> def __init__(self, num_fotos, varname): <NEW_LINE> <INDENT> self.num_fotos = int(num_fotos) <NEW_LINE> self.varname = varname <NEW_LINE> <DEDENT> def render(self, context): <NEW_LINE> <INDENT> temporadas = Temporada.objects.all() <NEW_LINE> if temporadas: <NEW_LINE> <INDENT> tmp=temporadas[0].get_last_temporada() <NEW_LINE> if tmp: <NEW_LINE> <INDENT> usuarios = [] <NEW_LINE> for foto in tmp.foto_set.all()[:self.num_fotos]: <NEW_LINE> <INDENT> usuarios.append(foto.codigo_user) <NEW_LINE> <DEDENT> context[self.varname] = usuarios <NEW_LINE> <DEDENT> <DEDENT> return ''
retorna un numero usuarios, aquellos que ganaron la temporada mas cercana, igual a num_fotos
6259903ae76e3b2f99fd9bea
class Critic(nn.Module): <NEW_LINE> <INDENT> def __init__(self, state_size, action_size, seed, fcs1_units=128, fc2_units=32, fc3_units=32): <NEW_LINE> <INDENT> super(Critic, self).__init__() <NEW_LINE> self.seed = torch.manual_seed(seed) <NEW_LINE> self.fcs1 = nn.Linear(state_size, fcs1_units) <NEW_LINE> self.fc2 = nn.Linear(fcs1_units+action_size, fc2_units) <NEW_LINE> self.fc3 = nn.Linear(fc2_units, fc3_units) <NEW_LINE> self.fc4 = nn.Linear(fc3_units, 1) <NEW_LINE> self.reset_parameters() <NEW_LINE> <DEDENT> def reset_parameters(self): <NEW_LINE> <INDENT> self.fcs1.weight.data.uniform_(*hidden_init(self.fcs1)) <NEW_LINE> self.fc2.weight.data.uniform_(*hidden_init(self.fc2)) <NEW_LINE> self.fc3.weight.data.uniform_(*hidden_init(self.fc3)) <NEW_LINE> self.fc4.weight.data.uniform_(-3e-3, 3e-3) <NEW_LINE> <DEDENT> def forward(self, state, action): <NEW_LINE> <INDENT> xs = F.leaky_relu(self.fcs1(state)) <NEW_LINE> x = torch.cat((xs, action), dim=1) <NEW_LINE> x = F.leaky_relu(self.fc2(x)) <NEW_LINE> x = F.leaky_relu(self.fc3(x)) <NEW_LINE> return self.fc4(x)
Critic (Value) Model.
6259903a73bcbd0ca4bcb467
class Batch(object): <NEW_LINE> <INDENT> def __init__(self, example_list, hps, vocab): <NEW_LINE> <INDENT> self.pad_id = vocab.word2id(data.PAD_TOKEN) <NEW_LINE> self.init_encoder_seq(example_list, hps) <NEW_LINE> self.init_decoder_seq(example_list, hps) <NEW_LINE> self.store_orig_strings(example_list) <NEW_LINE> <DEDENT> def init_encoder_seq(self, example_list, hps): <NEW_LINE> <INDENT> max_enc_seq_len = max([ex.enc_len for ex in example_list]) <NEW_LINE> for ex in example_list: <NEW_LINE> <INDENT> ex.pad_encoder_input(max_enc_seq_len, self.pad_id) <NEW_LINE> <DEDENT> self.enc_batch = np.zeros((hps.batch_size, max_enc_seq_len), dtype=np.int32) <NEW_LINE> self.enc_lens = np.zeros((hps.batch_size), dtype=np.int32) <NEW_LINE> self.enc_padding_mask = np.zeros((hps.batch_size, max_enc_seq_len), dtype=np.float32) <NEW_LINE> for i, ex in enumerate(example_list): <NEW_LINE> <INDENT> self.enc_batch[i, :] = ex.enc_input[:] <NEW_LINE> self.enc_lens[i] = ex.enc_len <NEW_LINE> for j in xrange(ex.enc_len): <NEW_LINE> <INDENT> self.enc_padding_mask[i][j] = 1 <NEW_LINE> <DEDENT> <DEDENT> if hps.pointer_gen: <NEW_LINE> <INDENT> self.max_art_oovs = max([len(ex.article_oovs) for ex in example_list]) <NEW_LINE> self.art_oovs = [ex.article_oovs for ex in example_list] <NEW_LINE> self.enc_batch_extend_vocab = np.zeros((hps.batch_size, max_enc_seq_len), dtype=np.int32) <NEW_LINE> for i, ex in enumerate(example_list): <NEW_LINE> <INDENT> self.enc_batch_extend_vocab[i, :] = ex.enc_input_extend_vocab[:] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def init_decoder_seq(self, example_list, hps): <NEW_LINE> <INDENT> for ex in example_list: <NEW_LINE> <INDENT> ex.pad_decoder_inp_targ(hps.max_dec_steps, self.pad_id) <NEW_LINE> <DEDENT> self.dec_batch = np.zeros((hps.batch_size, hps.max_dec_steps), dtype=np.int32) <NEW_LINE> self.target_batch = np.zeros((hps.batch_size, hps.max_dec_steps), dtype=np.int32) <NEW_LINE> self.dec_padding_mask = np.zeros((hps.batch_size, hps.max_dec_steps), dtype=np.float32) <NEW_LINE> for i, ex in enumerate(example_list): <NEW_LINE> <INDENT> self.dec_batch[i, :] = ex.dec_input[:] <NEW_LINE> self.target_batch[i, :] = ex.target[:] <NEW_LINE> for j in xrange(ex.dec_len): <NEW_LINE> <INDENT> self.dec_padding_mask[i][j] = 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def store_orig_strings(self, example_list): <NEW_LINE> <INDENT> self.original_articles = [ex.original_article for ex in example_list] <NEW_LINE> self.original_abstracts = [ex.original_abstract for ex in example_list] <NEW_LINE> self.original_abstracts_sents = [ex.original_abstract_sents for ex in example_list]
Class representing a minibatch of train/val/test examples for text summarization.
6259903a91af0d3eaad3b012