code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class IdNamesPJContainer(PJContainer): <NEW_LINE> <INDENT> _pj_mapping_key = None <NEW_LINE> def __init__(self, table=None, parent_key=None): <NEW_LINE> <INDENT> super(IdNamesPJContainer, self).__init__(table, parent_key) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _pj_remove_documents(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def _cache_get_key(self, id, doc): <NEW_LINE> <INDENT> return id <NEW_LINE> <DEDENT> def _locate(self, obj, id, doc): <NEW_LINE> <INDENT> obj._v_name = id <NEW_LINE> obj._v_parent = self <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> obj = self._cache.get(key) <NEW_LINE> if obj is not None: <NEW_LINE> <INDENT> return obj <NEW_LINE> <DEDENT> if self._cache_complete: <NEW_LINE> <INDENT> raise KeyError(key) <NEW_LINE> <DEDENT> filter = self._pj_get_items_filter() <NEW_LINE> obj = self.find_one(filter, id=key) <NEW_LINE> if obj is None: <NEW_LINE> <INDENT> raise KeyError(key) <NEW_LINE> <DEDENT> return obj <NEW_LINE> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> if self._cache_complete: <NEW_LINE> <INDENT> return key in self._cache <NEW_LINE> <DEDENT> return self.raw_find_one(id=key) is not None <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> if self._cache_complete: <NEW_LINE> <INDENT> return iter(self._cache) <NEW_LINE> <DEDENT> result = self.raw_find(None) <NEW_LINE> return iter(str(row['id']) for row in result) <NEW_LINE> <DEDENT> def iteritems(self): <NEW_LINE> <INDENT> if self._cache_complete: <NEW_LINE> <INDENT> return self._cache.items() <NEW_LINE> <DEDENT> result = self.raw_find(self._pj_get_items_filter()) <NEW_LINE> items = [(row['id'], self._load_one(row['id'], row['data'])) for row in result] <NEW_LINE> self._cache_mark_complete() <NEW_LINE> return iter(items) <NEW_LINE> <DEDENT> def _real_setitem(self, key, value): <NEW_LINE> <INDENT> if value._p_oid is None: <NEW_LINE> <INDENT> self._pj_jar.insert(value, key) <NEW_LINE> <DEDENT> super(IdNamesPJContainer, self)._real_setitem(key, value) | A container that uses the PostGreSQL table UID as the name/key. | 6259903707d97122c4217e06 |
class ChangedSingleDateNotification(TimeBasedInfoMixin, TransitionMessage): <NEW_LINE> <INDENT> subject = pgettext('email', 'The details of activity "{title}" have changed') <NEW_LINE> template = 'messages/changed_single_date' <NEW_LINE> context = { 'title': 'activity.title', } <NEW_LINE> @property <NEW_LINE> def action_link(self): <NEW_LINE> <INDENT> return self.obj.activity.get_absolute_url() <NEW_LINE> <DEDENT> action_title = pgettext('email', 'View activity') <NEW_LINE> def get_recipients(self): <NEW_LINE> <INDENT> return [ participant.user for participant in self.obj.activity.accepted_participants ] | Notification when slot details (date, time or location) changed for a single date activity | 62599037a4f1c619b294f73c |
class DeferredDrawCallbackProperty(CallbackProperty): <NEW_LINE> <INDENT> @defer_draw <NEW_LINE> def notify(self, *args, **kwargs): <NEW_LINE> <INDENT> super(DeferredDrawCallbackProperty, self).notify(*args, **kwargs) | A callback property where drawing is deferred until
after notify has called all callback functions. | 62599037d53ae8145f9195ce |
@benchmark.Enabled('android') <NEW_LINE> class RendererMemoryBlinkMemoryMobile(_MemoryInfra): <NEW_LINE> <INDENT> page_set = page_sets.BlinkMemoryMobilePageSet <NEW_LINE> def SetExtraBrowserOptions(self, options): <NEW_LINE> <INDENT> super(RendererMemoryBlinkMemoryMobile, self).SetExtraBrowserOptions( options) <NEW_LINE> options.AppendExtraBrowserArgs([ '--ignore-certificate-errors', ]) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def Name(cls): <NEW_LINE> <INDENT> return 'memory.blink_memory_mobile' <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def ValueCanBeAddedPredicate(cls, value, is_first_result): <NEW_LINE> <INDENT> return (not _IGNORED_STATS_RE.search(value.name) and 'renderer_processes' in value.name) | Timeline based benchmark for measuring memory consumption on mobile
sites on which blink's memory consumption is relatively high. | 625990378a349b6b436873ab |
class CephBrokerRsp(object): <NEW_LINE> <INDENT> def __init__(self, encoded_rsp): <NEW_LINE> <INDENT> self.api_version = None <NEW_LINE> self.rsp = json.loads(encoded_rsp) <NEW_LINE> <DEDENT> @property <NEW_LINE> def exit_code(self): <NEW_LINE> <INDENT> return self.rsp.get('exit-code') <NEW_LINE> <DEDENT> @property <NEW_LINE> def exit_msg(self): <NEW_LINE> <INDENT> return self.rsp.get('stderr') | Ceph broker response.
Response is json-decoded and contents provided as methods/properties.
The API is versioned and defaults to version 1. | 6259903716aa5153ce401657 |
@cbpi.step <NEW_LINE> class BoilStep(StepBase): <NEW_LINE> <INDENT> temp = Property.Number("Temperature", configurable=True, default_value=100, description="Target temperature for boiling") <NEW_LINE> kettle = StepProperty.Kettle("Kettle", description="Kettle in which the boiling step takes place") <NEW_LINE> timer = Property.Number("Timer in Minutes", configurable=True, default_value=90, description="Timer is started when target temperature is reached") <NEW_LINE> hop_1 = Property.Number("Hop 1 Addition", configurable=True, description="Fist Hop alert") <NEW_LINE> hop_1_added = Property.Number("",default_value=None) <NEW_LINE> hop_2 = Property.Number("Hop 2 Addition", configurable=True, description="Second Hop alert") <NEW_LINE> hop_2_added = Property.Number("", default_value=None) <NEW_LINE> hop_3 = Property.Number("Hop 3 Addition", configurable=True) <NEW_LINE> hop_3_added = Property.Number("", default_value=None, description="Second Hop alert") <NEW_LINE> def init(self): <NEW_LINE> <INDENT> self.set_target_temp(self.temp, self.kettle) <NEW_LINE> <DEDENT> @cbpi.action("Start Timer Now") <NEW_LINE> def start(self): <NEW_LINE> <INDENT> if self.is_timer_finished() is None: <NEW_LINE> <INDENT> self.start_timer(int(self.timer) * 60) <NEW_LINE> <DEDENT> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.stop_timer() <NEW_LINE> self.set_target_temp(self.temp, self.kettle) <NEW_LINE> <DEDENT> def finish(self): <NEW_LINE> <INDENT> self.set_target_temp(0, self.kettle) <NEW_LINE> <DEDENT> def check_hop_timer(self, number, value): <NEW_LINE> <INDENT> if self.__getattribute__("hop_%s_added" % number) is not True and time.time() > ( self.timer_end - (int(self.timer) * 60 - int(value) * 60)): <NEW_LINE> <INDENT> self.__setattr__("hop_%s_added" % number, True) <NEW_LINE> self.notify("Hop Alert", "Please add Hop %s" % number, timeout=None) <NEW_LINE> <DEDENT> <DEDENT> def execute(self): <NEW_LINE> <INDENT> if self.get_kettle_temp(self.kettle) >= float(self.temp): <NEW_LINE> <INDENT> if self.is_timer_finished() is None: <NEW_LINE> <INDENT> self.start_timer(int(self.timer) * 60) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.check_hop_timer(1, self.hop_1) <NEW_LINE> self.check_hop_timer(2, self.hop_2) <NEW_LINE> self.check_hop_timer(3, self.hop_3) <NEW_LINE> <DEDENT> <DEDENT> if self.is_timer_finished() == True: <NEW_LINE> <INDENT> self.next() | Just put the decorator @cbpi.step on top of a method | 6259903730c21e258be99978 |
class LoanEvents(Base): <NEW_LINE> <INDENT> __tablename__ = 'loan_events' <NEW_LINE> event_types = ('fee', 'interest', 'payment') <NEW_LINE> event_type_enum = Enum(*event_types, name="event_type") <NEW_LINE> tx_id = Column(Integer, primary_key=True, autoincrement=True) <NEW_LINE> loan_id = Column(Integer, primary_key=False, autoincrement=False, unique=False) <NEW_LINE> event_type = Column(event_type_enum) <NEW_LINE> post_date = Column(Date) <NEW_LINE> amt = Column(Numeric) | Loan events model | 62599037b830903b9686ed2e |
@patch.dict('os.environ', {'GENIE_BYPASS_HOME_CONFIG': '1'}) <NEW_LINE> class TestingHiveJob(unittest.TestCase): <NEW_LINE> <INDENT> def test_default_command_tag(self): <NEW_LINE> <INDENT> job = pygenie.jobs.HiveJob() <NEW_LINE> assert_equals( job.get('default_command_tags'), [u'type:hive'] ) <NEW_LINE> <DEDENT> def test_cmd_args_explicit(self): <NEW_LINE> <INDENT> job = pygenie.jobs.HiveJob() .command_arguments('explicitly stating command args') .script('select * from something') .property('source', 'tester') .property_file('properties.hive') <NEW_LINE> assert_equals( job.cmd_args, u'explicitly stating command args' ) <NEW_LINE> <DEDENT> def test_cmd_args_constructed_script_code(self): <NEW_LINE> <INDENT> job = pygenie.jobs.HiveJob() .script('select * from something') .parameter('foo', 'fizz') .parameter('bar', 'buzz') .hiveconf('hconf1', 'h1') .property('prop1', 'p1') .property('prop2', 'p2') .property_file('properties_1.hive') .property_file('properties_2.hive') <NEW_LINE> assert_equals( job.cmd_args, " ".join([ "-i properties_1.hive -i properties_2.hive", "--hiveconf hconf1=h1 --hiveconf prop1=p1 --hiveconf prop2=p2", "-i _hive_parameters.txt", "-f script.hive" ]) ) <NEW_LINE> <DEDENT> @patch('pygenie.jobs.hive.is_file') <NEW_LINE> def test_cmd_args_constructed_script_file(self, is_file): <NEW_LINE> <INDENT> is_file.return_value = True <NEW_LINE> job = pygenie.jobs.HiveJob() .script('/Users/hive/test.hql') .parameter('hello', 'hi') .parameter('goodbye', 'bye') .property('p1', 'v1') .property('p2', 'v2') .property_file('props_1.hive') .property_file('props_2.hive') <NEW_LINE> assert_equals( " ".join([ "-i props_1.hive -i props_2.hive", "--hiveconf p1=v1 --hiveconf p2=v2", "-i _hive_parameters.txt", "-f test.hql" ]), job.cmd_args ) <NEW_LINE> <DEDENT> @patch('pygenie.jobs.hive.is_file') <NEW_LINE> def test_cmd_args_post_cmd_args(self, is_file): <NEW_LINE> <INDENT> is_file.return_value = True <NEW_LINE> job = pygenie.jobs.HiveJob() .script('/Users/hive/test.hql') .parameter('hello', 'hi') .parameter('goodbye', 'bye') .property('p1', 'v1') .property('p2', 'v2') .post_cmd_args('a') .post_cmd_args(['a', 'b', 'c']) .post_cmd_args('d e f') <NEW_LINE> assert_equals( " ".join([ "--hiveconf p1=v1 --hiveconf p2=v2", "-i _hive_parameters.txt", "-f test.hql", "a b c d e f" ]), job.cmd_args ) | Test HiveJob. | 62599037d10714528d69ef40 |
class OutboundNatRule(SubResource): <NEW_LINE> <INDENT> _validation = { 'backend_address_pool': {'required': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'allocated_outbound_ports': {'key': 'properties.allocatedOutboundPorts', 'type': 'int'}, 'frontend_ip_configurations': {'key': 'properties.frontendIPConfigurations', 'type': '[SubResource]'}, 'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, } <NEW_LINE> def __init__(self, backend_address_pool, id=None, allocated_outbound_ports=None, frontend_ip_configurations=None, provisioning_state=None, name=None, etag=None): <NEW_LINE> <INDENT> super(OutboundNatRule, self).__init__(id=id) <NEW_LINE> self.allocated_outbound_ports = allocated_outbound_ports <NEW_LINE> self.frontend_ip_configurations = frontend_ip_configurations <NEW_LINE> self.backend_address_pool = backend_address_pool <NEW_LINE> self.provisioning_state = provisioning_state <NEW_LINE> self.name = name <NEW_LINE> self.etag = etag | Outbound NAT pool of the load balancer.
:param id: Resource ID.
:type id: str
:param allocated_outbound_ports: The number of outbound ports to be used
for NAT.
:type allocated_outbound_ports: int
:param frontend_ip_configurations: The Frontend IP addresses of the load
balancer.
:type frontend_ip_configurations:
list[~azure.mgmt.network.v2016_12_01.models.SubResource]
:param backend_address_pool: A reference to a pool of DIPs. Outbound
traffic is randomly load balanced across IPs in the backend IPs.
:type backend_address_pool:
~azure.mgmt.network.v2016_12_01.models.SubResource
:param provisioning_state: Gets the provisioning state of the PublicIP
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str | 62599037507cdc57c63a5f04 |
class ImageDownloadViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Image.objects.all() <NEW_LINE> serializer_class = ImageSerializer <NEW_LINE> def list(self, request, **kwargs): <NEW_LINE> <INDENT> images = request.query_params.getlist('images', []) <NEW_LINE> import sys <NEW_LINE> from json import dumps <NEW_LINE> sys.stderr.write('labels in query: %s\n' % dumps(images)) <NEW_LINE> if len(images) > 0: <NEW_LINE> <INDENT> queryset = Image.objects.filter(id__in=images) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> queryset = Image.objects.none() <NEW_LINE> <DEDENT> filenames = [] <NEW_LINE> for image in list(queryset): <NEW_LINE> <INDENT> filenames.append(image.file.path) <NEW_LINE> <DEDENT> sys.stderr.write('filenames: %s\n' % dumps(filenames)) <NEW_LINE> zip_subdir = "files" <NEW_LINE> zip_filename = "%s.zip" % zip_subdir <NEW_LINE> s = StringIO.StringIO() <NEW_LINE> zf = zipfile.ZipFile(s, "w") <NEW_LINE> for fpath in filenames: <NEW_LINE> <INDENT> fdir, fname = os.path.split(fpath) <NEW_LINE> zip_path = os.path.join(zip_subdir, fname) <NEW_LINE> zf.write(fpath, zip_path) <NEW_LINE> <DEDENT> zf.close() <NEW_LINE> resp = HttpResponse(s.getvalue(), content_type = "application/x-zip-compressed") <NEW_LINE> resp['Content-Disposition'] = 'attachment; filename=%s' % zip_filename <NEW_LINE> return resp | API endpoint that allows users to be viewed or edited. | 625990379b70327d1c57fef0 |
class Solution: <NEW_LINE> <INDENT> def levelOrder(self, root): <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> res, q_lvl = [], [root] <NEW_LINE> while q_lvl != []: <NEW_LINE> <INDENT> pre, tmp = [], [] <NEW_LINE> for node in q_lvl: <NEW_LINE> <INDENT> pre.append(node.val) <NEW_LINE> l, r = node.left, node.right <NEW_LINE> if l: <NEW_LINE> <INDENT> tmp.append(l) <NEW_LINE> <DEDENT> if r: <NEW_LINE> <INDENT> tmp.append(r) <NEW_LINE> <DEDENT> <DEDENT> res.append(pre) <NEW_LINE> q_lvl = tmp <NEW_LINE> <DEDENT> return res | @param root: The root of binary tree.
@return: Level order in a list of lists of integers | 62599037711fe17d825e1551 |
class TracDataset(Dataset): <NEW_LINE> <INDENT> def __init__( self, data_df: pd.DataFrame, tokenizer: Callable, max_seq_length:int = None, ): <NEW_LINE> <INDENT> self.data_df = data_df <NEW_LINE> self.tokenizer = tokenizer <NEW_LINE> if max_seq_length is None: <NEW_LINE> <INDENT> self._max_seq_length = self._get_max_len(data_df,tokenizer) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._max_seq_length = max_seq_length <NEW_LINE> <DEDENT> self.train_df = self.data_df[self.data_df.split == 'train'] <NEW_LINE> self.train_size = len(self.train_df) <NEW_LINE> self.val_df = self.data_df[self.data_df.split == 'dev'] <NEW_LINE> self.val_size = len(self.val_df) <NEW_LINE> self.test_df = self.data_df[self.data_df.split == 'test'] <NEW_LINE> self.test_size = len(self.test_df) <NEW_LINE> self._simple_vectorizer = SimpleVectorizer(tokenizer, self._max_seq_length) <NEW_LINE> self._lookup_dict = { 'train': (self.train_df, self.train_size), 'val': (self.val_df, self.val_size), 'test': (self.test_df, self.test_size) } <NEW_LINE> self.set_split('train') <NEW_LINE> <DEDENT> def _get_max_len(self,data_df: pd.DataFrame, tokenizer: Callable): <NEW_LINE> <INDENT> len_func = lambda x: len(self.tokenizer.encode_plus(x)['input_ids']) <NEW_LINE> max_len = data_df.text.map(len_func).max() <NEW_LINE> return max_len <NEW_LINE> <DEDENT> def set_split(self, split="train"): <NEW_LINE> <INDENT> self._target_split = split <NEW_LINE> self._target_df, self._target_size = self._lookup_dict[split] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._target_size <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> row = self._target_df.iloc[index] <NEW_LINE> indices, attention_masks = self._simple_vectorizer.vectorize(row.text) <NEW_LINE> label = row.label <NEW_LINE> return {'x_data': indices, 'x_attn_mask': attention_masks, 'x_index': index, 'y_target': label} <NEW_LINE> <DEDENT> def get_num_batches(self, batch_size): <NEW_LINE> <INDENT> return len(self) // batch_size | PyTorch dataset class | 62599037796e427e5384f8e7 |
class XMLReader(object): <NEW_LINE> <INDENT> def __init__(self, xml_path): <NEW_LINE> <INDENT> self.logger = get_logger(self.__class__.__module__) <NEW_LINE> self.xml_path = xml_path <NEW_LINE> <DEDENT> def read_metadata(self): <NEW_LINE> <INDENT> self.logger.debug("Starting to parse XML file " + self.xml_path) <NEW_LINE> root_node = self._find_root() <NEW_LINE> result_data = self._parse_xml_node_to_dict(root_node) <NEW_LINE> return GenericMetaData(result_data) <NEW_LINE> <DEDENT> def read_only_element(self, tag_name): <NEW_LINE> <INDENT> root_node = self._find_root() <NEW_LINE> gid_node = root_node.getElementsByTagName(tag_name) <NEW_LINE> if gid_node is None: <NEW_LINE> <INDENT> self.logger.warning("Invalid XML, missing " + tag_name + " tag!!!") <NEW_LINE> return None <NEW_LINE> <DEDENT> return self.get_node_text(gid_node[0]) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_node_text(node): <NEW_LINE> <INDENT> for text_child in node.childNodes: <NEW_LINE> <INDENT> if text_child.nodeType == Node.TEXT_NODE: <NEW_LINE> <INDENT> return str(text_child.data).lstrip().rstrip() <NEW_LINE> <DEDENT> <DEDENT> return '' <NEW_LINE> <DEDENT> def parse_xml_content_to_dict(self, xml_data): <NEW_LINE> <INDENT> root = xml.dom.minidom.parseString(xml_data) <NEW_LINE> root = root.childNodes[-1] <NEW_LINE> return self._parse_xml_node_to_dict(root) <NEW_LINE> <DEDENT> def _find_root(self): <NEW_LINE> <INDENT> doc_xml = xml.dom.minidom.parse(self.xml_path) <NEW_LINE> for child_node in doc_xml.childNodes: <NEW_LINE> <INDENT> if child_node.nodeType == Node.ELEMENT_NODE: <NEW_LINE> <INDENT> return child_node <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def _parse_xml_node_to_dict(self, root_node): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for node in root_node.childNodes: <NEW_LINE> <INDENT> if node.nodeType == Node.ELEMENT_NODE: <NEW_LINE> <INDENT> result[node.nodeName] = self.get_node_text(node) <NEW_LINE> result_meta = self._parse_xml_node_to_dict(node) <NEW_LINE> if result_meta.keys() is not None and len(result_meta.keys()) > 0: <NEW_LINE> <INDENT> result[node.nodeName] = result_meta <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return result | Reader for XML with meta-data on generic entities (e.g. Project, Operation). | 6259903773bcbd0ca4bcb3f3 |
class DeviceTypeCategory(db.Model): <NEW_LINE> <INDENT> friendly_name = "Device Type Category" <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> name = db.Column(db.String(50), unique=True) <NEW_LINE> devicetypes = db.relationship('DeviceType', backref='devicetypecategory', lazy='dynamic') <NEW_LINE> def save(self): <NEW_LINE> <INDENT> db.session.add(self) <NEW_LINE> try: <NEW_LINE> <INDENT> db.session.commit() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> db.session.rollback() <NEW_LINE> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.name | Represents the Category of a DeviceType
Example : Firewall, Router, etc... | 62599037b5575c28eb71357f |
class User(base.Base, a.ColRepr, a.Updatable): <NEW_LINE> <INDENT> __tablename__ = "users" <NEW_LINE> id = s.Column("id", s.Integer, primary_key=True) <NEW_LINE> sub = s.Column("sub", s.String, nullable=False) <NEW_LINE> name = s.Column("name", s.String, nullable=False) <NEW_LINE> nickname = s.Column("nickname", s.String, nullable=False) <NEW_LINE> picture = s.Column("picture", s.String, nullable=False) <NEW_LINE> email = s.Column("email", s.String, nullable=False) <NEW_LINE> email_verified = s.Column("email_verified", s.String, nullable=False) <NEW_LINE> updated_at = s.Column("updated_at", s.String, nullable=False) <NEW_LINE> uploads = o.relationship("File", back_populates="uploader") <NEW_LINE> audit_logs = o.relationship("AuditLog", back_populates="user") <NEW_LINE> __table_args__ = ( ) | An user, as returned by OAuth2. | 62599037a8ecb0332587238a |
class VirtualNodeGroup(model_base.BASEV2, models_v2.HasId): <NEW_LINE> <INDENT> __tablename__ = 'virtual_node_group' <NEW_LINE> VirtualNodeGroupType = sa.Enum(cst.VSWITCH_GROUP, cst.VROUTER_GROUP, cst.BRIDGE_GROUP, name='virtual_node_group_type') <NEW_LINE> description = sa.Column(sa.String(255), nullable=True) <NEW_LINE> type = sa.Column(VirtualNodeGroupType, nullable=False) <NEW_LINE> physical_node_attached = sa.Column(sa.String(36), sa.ForeignKey('physical_node.id'), nullable=False) <NEW_LINE> virtual_network = sa.Column(sa.String(36), sa.ForeignKey('virtual_network.id'), nullable=False) <NEW_LINE> def __init__(self, id=None, description=None, type=None, physical_node_attached=None, virtual_network=None): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.description = description <NEW_LINE> self.type = type <NEW_LINE> self.physical_node_attached = physical_node_attached <NEW_LINE> self.virtual_network = virtual_network | Is a group of virtual nodes that belongs to the same physical node.
The association between the Virtual Node Group and Physical Node is
necessary for the mapping between virtual and physical networks. | 62599037c432627299fa4163 |
class State(BaseModel, Base): <NEW_LINE> <INDENT> __tablename__ = "states" <NEW_LINE> name = Column(String(128), nullable=False) <NEW_LINE> cities = relationship("City", backref="state", cascade="delete") <NEW_LINE> if getenv("HBNB_TYPE_STORAGE") != "db": <NEW_LINE> <INDENT> @property <NEW_LINE> def cities(self): <NEW_LINE> <INDENT> city_list = [] <NEW_LINE> for city in list(models.storage.all(City).values()): <NEW_LINE> <INDENT> if city.state_id == self.id: <NEW_LINE> <INDENT> city_list.append(city) <NEW_LINE> <DEDENT> <DEDENT> return city_list | Represents a state for a MySQL database.
Inherits from SQLAlchemy Base and links to the MySQL table states.
Attributes:
__tablename__ (str): The name of the MySQL table to store States.
name (sqlalchemy String): The name of the State.
cities (sqlalchemy relationship): The State-City relationship. | 625990371d351010ab8f4c86 |
class AddWatcher(Command): <NEW_LINE> <INDENT> name = "add" <NEW_LINE> options = [('', 'start', False, "start immediately the watcher")] <NEW_LINE> properties = ['name', 'cmd'] <NEW_LINE> def message(self, *args, **opts): <NEW_LINE> <INDENT> if len(args) < 2: <NEW_LINE> <INDENT> raise ArgumentError("Invalid number of arguments") <NEW_LINE> <DEDENT> return self.make_message(name=args[0], cmd=" ".join(args[1:]), start=opts.get('start', False)) <NEW_LINE> <DEDENT> def execute(self, arbiter, props): <NEW_LINE> <INDENT> options = props.get('options', {}) <NEW_LINE> watcher = arbiter.add_watcher(props['name'], props['cmd'], args=props.get('args'), **options) <NEW_LINE> if props.get('start', False): <NEW_LINE> <INDENT> watcher.start() <NEW_LINE> <DEDENT> <DEDENT> def validate(self, props): <NEW_LINE> <INDENT> super(AddWatcher, self).validate(props) <NEW_LINE> if 'options' in props: <NEW_LINE> <INDENT> options = props.get('options') <NEW_LINE> if not isinstance(options, dict): <NEW_LINE> <INDENT> raise MessageError("'options' property should be an object") <NEW_LINE> <DEDENT> for key, val in props['options'].items(): <NEW_LINE> <INDENT> validate_option(key, val) | Add a watcher
=============
This command add a watcher dynamically to a arbiter.
ZMQ Message
-----------
::
{
"command": "add",
"properties": {
"cmd": "/path/to/commandline --option"
"name": "nameofwatcher"
"args": [],
"options": {},
"start": false
}
}
A message contains 2 properties:
- cmd: Full command line to execute in a process
- args: array, arguments passed to the command (optional)
- name: name of watcher
- options: options of a watcher
- start: start the watcher after the creation
The response return a status "ok".
Command line
------------
::
$ circusctl add [--start] <name> <cmd>
Options
+++++++
- <name>: name of the watcher to create
- <cmd>: full command line to execute in a process
- --start: start the watcher immediately | 62599037be8e80087fbc01eb |
class ValueMetadata: <NEW_LINE> <INDENT> def __init__(self, data: MetaDataType) -> None: <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self) -> str: <NEW_LINE> <INDENT> return self.data["type"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def readable(self) -> Optional[bool]: <NEW_LINE> <INDENT> return self.data.get("readable") <NEW_LINE> <DEDENT> @property <NEW_LINE> def writeable(self) -> Optional[bool]: <NEW_LINE> <INDENT> return self.data.get("writeable") <NEW_LINE> <DEDENT> @property <NEW_LINE> def label(self) -> Optional[str]: <NEW_LINE> <INDENT> return self.data.get("label") <NEW_LINE> <DEDENT> @property <NEW_LINE> def description(self) -> Optional[str]: <NEW_LINE> <INDENT> return self.data.get("description") <NEW_LINE> <DEDENT> @property <NEW_LINE> def min(self) -> Optional[int]: <NEW_LINE> <INDENT> return self.data.get("min") <NEW_LINE> <DEDENT> @property <NEW_LINE> def max(self) -> Optional[int]: <NEW_LINE> <INDENT> return self.data.get("max") <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit(self) -> Optional[str]: <NEW_LINE> <INDENT> return self.data.get("unit") <NEW_LINE> <DEDENT> @property <NEW_LINE> def states(self) -> dict: <NEW_LINE> <INDENT> return self.data.get("states", {}) <NEW_LINE> <DEDENT> @property <NEW_LINE> def cc_specific(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> return self.data.get("ccSpecific", {}) <NEW_LINE> <DEDENT> @property <NEW_LINE> def value_change_options(self) -> List[str]: <NEW_LINE> <INDENT> return self.data.get("valueChangeOptions", []) <NEW_LINE> <DEDENT> @property <NEW_LINE> def allow_manual_entry(self) -> Optional[bool]: <NEW_LINE> <INDENT> return self.data.get("allowManualEntry") <NEW_LINE> <DEDENT> @property <NEW_LINE> def value_size(self) -> Optional[int]: <NEW_LINE> <INDENT> return self.data.get("valueSize") <NEW_LINE> <DEDENT> def update(self, data: MetaDataType) -> None: <NEW_LINE> <INDENT> self.data.update(data) | Represent metadata on a value instance. | 6259903726238365f5fadcc1 |
class ExpressionTree: <NEW_LINE> <INDENT> def __init__(self, exp_str): <NEW_LINE> <INDENT> self._exp_tree = None <NEW_LINE> self._buildTree(exp_str) <NEW_LINE> <DEDENT> def evaluate(self, var_dict): <NEW_LINE> <INDENT> return self._evalTree(self._exp_tree, var_dict) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self._buildString(self._exp_tree) <NEW_LINE> <DEDENT> def _buildString(self, tree_node): <NEW_LINE> <INDENT> if tree_node.left is None and tree_node.right is None: <NEW_LINE> <INDENT> return str(tree_node.element) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> exp_str = '(' <NEW_LINE> exp_str += self._buildString(tree_node.left) <NEW_LINE> exp_str += str(tree_node.element) <NEW_LINE> exp_str += self._buildString(tree_node.right) <NEW_LINE> exp_str += ')' <NEW_LINE> return exp_str <NEW_LINE> <DEDENT> <DEDENT> def _evalTree(self, subtree, var_dict): <NEW_LINE> <INDENT> if subtree.left is None and subtree.right is None: <NEW_LINE> <INDENT> if '0' <= subtree.element <= '9': <NEW_LINE> <INDENT> return int(subtree.element) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert subtree.element in var_dict, 'Invalid variable.' <NEW_LINE> return var_dict[subtree.element] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> lvalue = self._evalTree(subtree.left, var_dict) <NEW_LINE> rvalue = self._evalTree(subtree.right, var_dict) <NEW_LINE> print(subtree.element) <NEW_LINE> return self._computeOp(lvalue, subtree.element, rvalue) <NEW_LINE> <DEDENT> <DEDENT> def _computeOp(self, left, op, right): <NEW_LINE> <INDENT> op_func = { '+': lambda left, right: left + right, '-': lambda left, right: left - right, '*': lambda left, right: left * right, '/': lambda left, right: left / right, '%': lambda left, right: left % right, } <NEW_LINE> assert op in op_func, 'Invalid operator.' <NEW_LINE> return op_func[op](left, right) <NEW_LINE> <DEDENT> def _buildTree(self, exp_str): <NEW_LINE> <INDENT> expQ = Queue() <NEW_LINE> for token in exp_str: <NEW_LINE> <INDENT> expQ.put(token) <NEW_LINE> <DEDENT> self._exp_tree = _ExpTreeNode(None) <NEW_LINE> self._recBuildTree(self._exp_tree, expQ) <NEW_LINE> <DEDENT> def _recBuildTree(self, cur_node, expQ): <NEW_LINE> <INDENT> token = expQ.get() <NEW_LINE> if token == '(': <NEW_LINE> <INDENT> cur_node.left = _ExpTreeNode(None) <NEW_LINE> self._recBuildTree(cur_node.left, expQ) <NEW_LINE> cur_node.element = expQ.get() <NEW_LINE> cur_node.right = _ExpTreeNode(None) <NEW_LINE> self._recBuildTree(cur_node.right, expQ) <NEW_LINE> expQ.get() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cur_node.element = token | 表达式树
操作符存储在内节点操作数存储在叶子节点的二叉树
*
/ + -
/ \ / 9 3 8 4
(9 + 3) * (8 - 4)
Expression Tree Abstract Data Type, 可以实现二元操作符
ExpressionTree(exp_str): user string as constructor param
evaluate(var_dict): evaluates the expression and returns the numeric result
toString(): constructs and returns a string represention of the expression
Usage:
vars = {'a': 5, 'b': 12}
exp_tree = ExpressionTree("a/(b-3)")
print('The result =', exp_tree.evaluate(vars)) | 62599037a4f1c619b294f73d |
class SelectWithDisable(object): <NEW_LINE> <INDENT> def __init__(self, multiple=False): <NEW_LINE> <INDENT> self.multiple = multiple <NEW_LINE> <DEDENT> def __call__(self, field, **kwargs): <NEW_LINE> <INDENT> kwargs.setdefault('id', field.id) <NEW_LINE> if self.multiple: <NEW_LINE> <INDENT> kwargs['multiple'] = 'multiple' <NEW_LINE> <DEDENT> html = [u'<select %s>' % html_params(name=field.name, **kwargs)] <NEW_LINE> for val, label, selected, disabled in field.iter_choices(): <NEW_LINE> <INDENT> html.append(self.render_option(val, label, selected, disabled)) <NEW_LINE> <DEDENT> html.append(u'</select>') <NEW_LINE> return HTMLString(u''.join(html)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def render_option(cls, value, label, selected, disabled): <NEW_LINE> <INDENT> options = {'value': value} <NEW_LINE> if selected: <NEW_LINE> <INDENT> options['selected'] = u'selected' <NEW_LINE> <DEDENT> if disabled: <NEW_LINE> <INDENT> options['disabled'] = u'disabled' <NEW_LINE> <DEDENT> return HTMLString(u'<option %s>%s</option>' % (html_params(**options), escape(unicode(label)))) | Renders a select field.
If `multiple` is True, then the `size` property should be specified on
rendering to make the field useful.
The field must provide an `iter_choices()` method which the widget will
call on rendering; this method must yield tuples of
`(value, label, selected, disabled)`. | 6259903721bff66bcd723dd4 |
class ContiguousConstraint(BaseConstraint): <NEW_LINE> <INDENT> required = True <NEW_LINE> def check_schedule(self, schedule): <NEW_LINE> <INDENT> for section in schedule.class_sections.itervalues(): <NEW_LINE> <INDENT> if len(section.assigned_roomslots) > 1: <NEW_LINE> <INDENT> section_room = section.assigned_roomslots[0].room <NEW_LINE> prev_timeslot = section.assigned_roomslots[0].timeslot <NEW_LINE> for roomslot in section.assigned_roomslots[1:]: <NEW_LINE> <INDENT> if not util.contiguous( prev_timeslot, roomslot.timeslot): <NEW_LINE> <INDENT> return ConstraintViolation( self.__class__.__name__, "Section id {} had noncontiguous rooms" .format(section.id)) <NEW_LINE> <DEDENT> if roomslot.room.name != section_room.name: <NEW_LINE> <INDENT> return ConstraintViolation( self.__class__.__name__, "Section id {} is in 2 different rooms" .format(section.id)) <NEW_LINE> <DEDENT> prev_timeslot = roomslot.timeslot <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def check_schedule_section(self, section, start_roomslot, schedule): <NEW_LINE> <INDENT> classroom = start_roomslot.room <NEW_LINE> assigned_slots = classroom.get_roomslots_by_duration( start_roomslot, section.duration) <NEW_LINE> if len(assigned_slots) == 0: <NEW_LINE> <INDENT> return ConstraintViolation( self.__class__.__name__, "Section won't be assigned any roomslots") <NEW_LINE> <DEDENT> if len(assigned_slots) == 1: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> prev_timeslot = start_roomslot.timeslot <NEW_LINE> for roomslot in assigned_slots[1:]: <NEW_LINE> <INDENT> if not util.contiguous( prev_timeslot, roomslot.timeslot): <NEW_LINE> <INDENT> return ConstraintViolation( self.__class__.__name__, "Insufficiently many contiguous timeslots to schedule") <NEW_LINE> <DEDENT> prev_timeslot = roomslot.timeslot <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def check_move_section(self, section, start_roomslot, schedule): <NEW_LINE> <INDENT> return self.check_schedule_section( section, start_roomslot, schedule) <NEW_LINE> <DEDENT> def check_unschedule_section(self, section, schedule): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def check_swap_sections(self, section1, section2, schedule): <NEW_LINE> <INDENT> return None | Multi-hour sections may only be scheduled across
contiguous timeblocks in the same room. | 625990371f5feb6acb163d5f |
class RFSarsaStockTrader(StockTrader): <NEW_LINE> <INDENT> def __init__(self, name: str, utility: float, exchange: StockExchange, actions: tuple, epsilon: float, learning_rate: float, discount_factor: float): <NEW_LINE> <INDENT> super().__init__(name, utility, exchange) <NEW_LINE> self.learner = RandomForestSarsaMatrix(actions, epsilon, learning_rate, discount_factor) | A stock trader whose internal learner is random forest sarsa matrix
| 62599037d53ae8145f9195d0 |
class Item(models.Model): <NEW_LINE> <INDENT> sample_2 = models.TextField( verbose_name='サンプル項目2 メモ', blank=True, null=True, ) <NEW_LINE> sample_3 = models.IntegerField( verbose_name='サンプル項目3 整数', blank=True, null=True, ) <NEW_LINE> sample_4 = models.FloatField( verbose_name='サンプル項目4 浮動小数点', blank=True, null=True, ) <NEW_LINE> sample_5 = models.DecimalField( verbose_name='サンプル項目5 固定小数点', max_digits=5, decimal_places=2, blank=True, null=True, ) <NEW_LINE> sample_6 = models.BooleanField( verbose_name='サンプル項目6 ブール値', ) <NEW_LINE> sample_7 = models.DateField( verbose_name='サンプル項目7 日付', blank=True, null=True, ) <NEW_LINE> sample_8 = models.DateTimeField( verbose_name='サンプル項目8 日時', blank=True, null=True, ) <NEW_LINE> sample_9_choice = ( (1, '選択1'), (2, '選択2'), (3, '選択3'), ) <NEW_LINE> sample_9 = models.IntegerField( verbose_name='サンプル項目9_選択肢(固定)', choices=sample_9_choice, blank=True, null=True, ) <NEW_LINE> sample_10 = models.ForeignKey( User, verbose_name='サンプル項目10_選択肢(マスタ連動)', blank=True, null=True, related_name='sample_10', on_delete=models.SET_NULL, ) <NEW_LINE> created_by = models.ForeignKey( User, verbose_name='作成者', blank=True, null=True, related_name='CreatedBy', on_delete=models.SET_NULL, editable=False, ) <NEW_LINE> created_at = models.DateTimeField( verbose_name='作成時間', blank=True, null=True, editable=False, ) <NEW_LINE> updated_by = models.ForeignKey( User, verbose_name='更新者', blank=True, null=True, related_name='UpdatedBy', on_delete=models.SET_NULL, editable=False, ) <NEW_LINE> updated_at = models.DateTimeField( verbose_name='更新時間', blank=True, null=True, editable=False, ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.sample_2 <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'サンプル' <NEW_LINE> verbose_name_plural = 'サンプル' | データ定義クラス
各フィールドを定義する
参考:
・公式 モデルフィールドリファレンス
https://docs.djangoproject.com/ja/2.1/ref/models/fields/ | 62599037d164cc61758220df |
class MMUCache(Cache): <NEW_LINE> <INDENT> def __init__( self, size: str, assoc: Optional[int] = 4, tag_latency: Optional[int] = 1, data_latency: Optional[int] = 1, response_latency: Optional[int] = 1, mshrs: Optional[int] = 20, tgts_per_mshr: Optional[int] = 12, writeback_clean: Optional[bool] = True, prefetcher: BasePrefetcher = StridePrefetcher(), ): <NEW_LINE> <INDENT> super(MMUCache, self).__init__() <NEW_LINE> self.size = size <NEW_LINE> self.assoc = assoc <NEW_LINE> self.tag_latency = tag_latency <NEW_LINE> self.data_latency = data_latency <NEW_LINE> self.response_latency = response_latency <NEW_LINE> self.mshrs = mshrs <NEW_LINE> self.tgts_per_mshr = tgts_per_mshr <NEW_LINE> self.writeback_clean = writeback_clean <NEW_LINE> self.prefetcher = prefetcher | A simple Memory Management Unit (MMU) cache with default values. | 625990376e29344779b017be |
class GetInlineGameHighScores(Object): <NEW_LINE> <INDENT> ID = "getInlineGameHighScores" <NEW_LINE> def __init__(self, inline_message_id, user_id, extra=None, **kwargs): <NEW_LINE> <INDENT> self.extra = extra <NEW_LINE> self.inline_message_id = inline_message_id <NEW_LINE> self.user_id = user_id <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(q: dict, *args) -> "GetInlineGameHighScores": <NEW_LINE> <INDENT> inline_message_id = q.get('inline_message_id') <NEW_LINE> user_id = q.get('user_id') <NEW_LINE> return GetInlineGameHighScores(inline_message_id, user_id) | Returns game high scores and some part of the high score table in the range of the specified user; for bots only
Attributes:
ID (:obj:`str`): ``GetInlineGameHighScores``
Args:
inline_message_id (:obj:`str`):
Inline message identifier
user_id (:obj:`int`):
User identifier
Returns:
GameHighScores
Raises:
:class:`telegram.Error` | 6259903776d4e153a661db28 |
class Pool(OnnxOpConverter): <NEW_LINE> <INDENT> name = "" <NEW_LINE> @classmethod <NEW_LINE> def _impl_v1(cls, inputs, attr, params): <NEW_LINE> <INDENT> attr_cvt, data = cls._run_calculation(inputs, attr, params) <NEW_LINE> return attr_cvt([data], attr, params) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _run_calculation(cls, inputs, attr, params): <NEW_LINE> <INDENT> data = inputs[0] <NEW_LINE> input_shape = infer_shape(data) <NEW_LINE> input_dtype = infer_type(data).checked_type.dtype <NEW_LINE> ndim = len(input_shape) <NEW_LINE> if "auto_pad" in attr: <NEW_LINE> <INDENT> attr["auto_pad"] = attr["auto_pad"].decode("utf-8") <NEW_LINE> if attr["auto_pad"] in ("SAME_UPPER", "SAME_LOWER"): <NEW_LINE> <INDENT> if cls.name == "avg_pool": <NEW_LINE> <INDENT> pad_tuple = [] <NEW_LINE> for axis in range(len(input_shape) - 2): <NEW_LINE> <INDENT> axis_shape = input_shape[2 + axis] <NEW_LINE> stride = attr.get("strides", [1] * ndim)[axis] <NEW_LINE> kernel = attr["kernel_shape"][axis] <NEW_LINE> pad = get_pad_pair(axis_shape, kernel, stride, attr["auto_pad"]) <NEW_LINE> pad_tuple.append(pad) <NEW_LINE> <DEDENT> pad_tuple = tuple([val for pair in zip(*pad_tuple) for val in pair]) <NEW_LINE> attr["pads"] = pad_tuple <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if "int" in input_dtype: <NEW_LINE> <INDENT> pad_val = np.iinfo(np.dtype(input_dtype)).min <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pad_val = np.finfo(np.dtype(input_dtype)).min <NEW_LINE> <DEDENT> data = autopad( data, attr.get("strides", [1] * (ndim - 2)), attr["kernel_shape"], [1] * ndim, pad_value=pad_val, mode=attr["auto_pad"], ) <NEW_LINE> <DEDENT> <DEDENT> elif attr["auto_pad"] == "VALID": <NEW_LINE> <INDENT> attr["pads"] = tuple([0 for i in range(ndim - 2)]) <NEW_LINE> <DEDENT> elif attr["auto_pad"] == "NOTSET": <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = 'Value {} in attribute "auto_pad" of operator {} is invalid.' <NEW_LINE> raise tvm.error.OpAttributeInvalid(msg.format(attr["auto_pad"], cls.name)) <NEW_LINE> <DEDENT> attr.pop("auto_pad") <NEW_LINE> <DEDENT> if "storage_order" in attr: <NEW_LINE> <INDENT> attr["layout"] = onnx_storage_order2layout( attr["storage_order"], dims=(len(input_shape) - 2), op_name=cls.name ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> attr["layout"] = onnx_default_layout(dims=(len(input_shape) - 2), op_name=cls.name) <NEW_LINE> <DEDENT> return ( AttrCvt( op_name=dimension_picker(cls.name), transforms={ "kernel_shape": "pool_size", "pads": ("padding", 0), "dilations": ("dilation", 1), }, ignores=["storage_order"], custom_check=dimension_constraint(), ), data, ) | A helper class for pool op converters. | 625990375e10d32532ce41b9 |
class Deposit(BalanceManager): <NEW_LINE> <INDENT> def __init__(self, client, denomination, transfer_all, amount, testing=0): <NEW_LINE> <INDENT> super().__init__(client, denomination, transfer_all, amount, testing=testing, chains={'home'}) | Deposit only version of Balance Manager | 62599037287bf620b6272d56 |
class ConnectedComponents(object): <NEW_LINE> <INDENT> def __init__(self, G, order=None): <NEW_LINE> <INDENT> self.id = np.zeros([G.get_v()], dtype=int) <NEW_LINE> self.order = order <NEW_LINE> if order is None: <NEW_LINE> <INDENT> self.order = range(G.get_v()) <NEW_LINE> <DEDENT> search_obj = DFS(G, self.order, self.id) <NEW_LINE> self.count = search_obj.get_count() <NEW_LINE> <DEDENT> def are_connected(self, v, w): <NEW_LINE> <INDENT> return self.id[v] == self.id[w] <NEW_LINE> <DEDENT> def get_count(self): <NEW_LINE> <INDENT> return self.count <NEW_LINE> <DEDENT> def get_id(self, v): <NEW_LINE> <INDENT> return self.id[v] | Gives the connected components of an undirected graph.
Use DFS to find connected components. | 62599037be383301e0254983 |
class HiveTableWrapper(object): <NEW_LINE> <INDENT> def __init__(self, hive, table_name, table_spec): <NEW_LINE> <INDENT> self.hive = hive <NEW_LINE> self.table_name = table_name <NEW_LINE> self.table_spec = table_spec <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.hive.run_stmt_in_hive( 'create table if not exists %s %s' % (self.table_name, self.table_spec)) <NEW_LINE> return self.table_name <NEW_LINE> <DEDENT> def __exit__(self, typ, value, traceback): <NEW_LINE> <INDENT> self.hive.run_stmt_in_hive('drop table if exists %s' % self.table_name) | A wrapper class for using `with` guards with tables created through Hive
ensuring deletion even if an exception occurs. | 62599037507cdc57c63a5f06 |
class AgentNotifierApi(sg_rpc.SecurityGroupAgentRpcApiMixin): <NEW_LINE> <INDENT> def __init__(self, topic): <NEW_LINE> <INDENT> self.topic = topic <NEW_LINE> target = messaging.Target(topic=topic, version='1.0') <NEW_LINE> self.client = n_rpc.get_client(target) <NEW_LINE> self.topic_network_delete = topics.get_topic_name(topic, topics.NETWORK, topics.DELETE) <NEW_LINE> self.topic_port_update = topics.get_topic_name(topic, topics.PORT, topics.UPDATE) <NEW_LINE> <DEDENT> def network_delete(self, context, network_id): <NEW_LINE> <INDENT> cctxt = self.client.prepare(topic=self.topic_network_delete, fanout=True) <NEW_LINE> cctxt.cast(context, 'network_delete', network_id=network_id) <NEW_LINE> <DEDENT> def port_update(self, context, port, physical_network, vlan_id): <NEW_LINE> <INDENT> cctxt = self.client.prepare(topic=self.topic_port_update, fanout=True) <NEW_LINE> cctxt.cast(context, 'port_update', port=port, physical_network=physical_network, vlan_id=vlan_id) | Agent side of the linux bridge rpc API.
API version history:
1.0 - Initial version.
1.1 - Added get_active_networks_info, create_dhcp_port,
and update_dhcp_port methods. | 6259903726068e7796d4dab5 |
class One(KeyCode): <NEW_LINE> <INDENT> pass | 1
Shift+1 is ! on american keyboards | 62599037d6c5a102081e3292 |
class GWCSTypeMeta(ExtensionTypeMeta): <NEW_LINE> <INDENT> def __new__(mcls, name, bases, attrs): <NEW_LINE> <INDENT> cls = super(GWCSTypeMeta, mcls).__new__(mcls, name, bases, attrs) <NEW_LINE> if cls.organization == 'stsci.edu' and cls.standard == 'gwcs': <NEW_LINE> <INDENT> _gwcs_types.add(cls) <NEW_LINE> <DEDENT> return cls | Keeps track of `GWCSType` subclasses that are created so that they can
be stored automatically by astropy extensions for ASDF. | 62599037ec188e330fdf9a04 |
class EditPetForm(FlaskForm): <NEW_LINE> <INDENT> photo_url = StringField('Pet photo URL', validators=[InputRequired()]) <NEW_LINE> notes = TextAreaField("Notes about the pet:") <NEW_LINE> available = BooleanField("Available?") | Form to edit pet | 62599037b57a9660fecd2be9 |
class PARity(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "PARity" <NEW_LINE> args = ["EVEN", "NONE", "ODD", "ONE", "ZERO"] | `SYSTem:COMMunicate:SERial:PARity
<http://www.rohde-schwarz.com/webhelp/smb100a_webhelp/Content/007a87c5bc084b7e.htm#ID_ed84c7fa71cc127d0a00206a0162bb19-92f11b0771cc127d0a00206a012bc823-en-US>`_
Arguments: EVEN, NONE, ODD, ONE, ZERO | 62599037baa26c4b54d50416 |
class XMLGenerator(object): <NEW_LINE> <INDENT> def __init__(self, f, pretty = False, skip_stringify = False): <NEW_LINE> <INDENT> self.__f = f <NEW_LINE> self.__pretty = pretty <NEW_LINE> self.__skip_stringify = skip_stringify <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.__context = xmlfile(self.__f, encoding='ascii') <NEW_LINE> self._xf = self.__context.__enter__() <NEW_LINE> self._depth = 0 <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> return self.__context.__exit__(exc_type, exc_value, traceback) <NEW_LINE> <DEDENT> def _stringify(self, d): <NEW_LINE> <INDENT> if self.__skip_stringify: <NEW_LINE> <INDENT> return d <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {k: '{:g}'.format(v) if isinstance(v, float) else str(v) for k, v in iteritems(d)} <NEW_LINE> <DEDENT> <DEDENT> def _indent(self): <NEW_LINE> <INDENT> if self.__pretty and self._depth > 0: <NEW_LINE> <INDENT> self._xf.write('\t' * self._depth) <NEW_LINE> <DEDENT> <DEDENT> def _newline(self): <NEW_LINE> <INDENT> if self.__pretty and self._depth > 0: <NEW_LINE> <INDENT> self._xf.write('\n') <NEW_LINE> <DEDENT> <DEDENT> class __XMLElementContextManager(object): <NEW_LINE> <INDENT> def __init__(self, generator, tag, attrs): <NEW_LINE> <INDENT> self.__gen = generator <NEW_LINE> self.__tag = tag <NEW_LINE> self.__attrs = attrs <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.__gen._indent() <NEW_LINE> self.__gen._depth += 1 <NEW_LINE> self.__context = self.__gen._xf.element(self.__tag, self.__attrs) <NEW_LINE> ret = self.__context.__enter__() <NEW_LINE> self.__gen._newline() <NEW_LINE> return ret <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> self.__gen._depth -= 1 <NEW_LINE> self.__gen._indent() <NEW_LINE> ret = self.__context.__exit__(exc_type, exc_value, traceback) <NEW_LINE> self.__gen._newline() <NEW_LINE> return ret <NEW_LINE> <DEDENT> <DEDENT> def element(self, tag, attrs = None): <NEW_LINE> <INDENT> return self.__XMLElementContextManager(self, tag, self._stringify(attrs or {})) <NEW_LINE> <DEDENT> def element_leaf(self, tag, attrs = None, text = None): <NEW_LINE> <INDENT> self._indent() <NEW_LINE> with self._xf.element(tag, self._stringify(attrs or {})): <NEW_LINE> <INDENT> if text: <NEW_LINE> <INDENT> self._xf.write(text) <NEW_LINE> <DEDENT> <DEDENT> self._newline() | A XML Generator based on lxml.etree.
Args:
f (file-like object): the output stream
pretty (:obj:`bool`): if the output XML should be nicely broken into multiple lines and indented
skip_stringify (:obj:`bool`): assumes the dict passed into `element` and `element_leaf` are already converted
to string objects | 62599037596a897236128e0d |
class TriggerSingleton(QObject): <NEW_LINE> <INDENT> TRIGGER_FAST = 1 <NEW_LINE> TRIGGER_MED = 4 <NEW_LINE> TRIGGER_SLOW = 8 <NEW_LINE> ALL_TRIGGERS = ( TRIGGER_FAST, TRIGGER_MED, TRIGGER_SLOW ) <NEW_LINE> triggered = pyqtSignal(int) <NEW_LINE> instance = None <NEW_LINE> def __init__(self, parent=None): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self._count = -1 <NEW_LINE> self._refreshTimer = QTimer(self) <NEW_LINE> self._refreshTimer.setSingleShot(False) <NEW_LINE> self._refreshTimer.setInterval(200) <NEW_LINE> self._refreshTimer.start() <NEW_LINE> self._refreshTimer.timeout.connect( self._onTriggered ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get(): <NEW_LINE> <INDENT> if TriggerSingleton.instance == None: <NEW_LINE> <INDENT> TriggerSingleton.instance = TriggerSingleton() <NEW_LINE> WorkerSingleton.get().registerSingleton( TriggerSingleton.instance ) <NEW_LINE> <DEDENT> return TriggerSingleton.instance <NEW_LINE> <DEDENT> @pyqtSlot() <NEW_LINE> def _onTriggered(self): <NEW_LINE> <INDENT> self._count += 1 <NEW_LINE> for t in TriggerSingleton.ALL_TRIGGERS: <NEW_LINE> <INDENT> if self._count % t == 0: <NEW_LINE> <INDENT> self.triggered.emit(t) | A timer to sync updates of sensors | 6259903750485f2cf55dc0ed |
class ConverterProxy(Converter): <NEW_LINE> <INDENT> def __init__(self, delegate, extension): <NEW_LINE> <INDENT> if not isinstance(delegate, Converter): <NEW_LINE> <INDENT> raise TypeError("Converter must implement the asdf.extension.Converter interface") <NEW_LINE> <DEDENT> self._delegate = delegate <NEW_LINE> self._extension = extension <NEW_LINE> self._class_name = get_class_name(delegate) <NEW_LINE> relevant_tags = set() <NEW_LINE> for tag in delegate.tags: <NEW_LINE> <INDENT> if isinstance(tag, str): <NEW_LINE> <INDENT> relevant_tags.update(t.tag_uri for t in extension.tags if uri_match(tag, t.tag_uri)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Converter property 'tags' must contain str values") <NEW_LINE> <DEDENT> <DEDENT> if len(relevant_tags) > 1 and not hasattr(delegate, "select_tag"): <NEW_LINE> <INDENT> raise RuntimeError( "Converter handles multiple tags for this extension, " "but does not implement a select_tag method." ) <NEW_LINE> <DEDENT> self._tags = sorted(relevant_tags) <NEW_LINE> self._types = [] <NEW_LINE> for typ in delegate.types: <NEW_LINE> <INDENT> if isinstance(typ, (str, type)): <NEW_LINE> <INDENT> self._types.append(typ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Converter property 'types' must contain str or type values") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def tags(self): <NEW_LINE> <INDENT> return self._tags <NEW_LINE> <DEDENT> @property <NEW_LINE> def types(self): <NEW_LINE> <INDENT> return self._types <NEW_LINE> <DEDENT> def select_tag(self, obj, ctx): <NEW_LINE> <INDENT> method = getattr(self._delegate, "select_tag", None) <NEW_LINE> if method is None: <NEW_LINE> <INDENT> return self._tags[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return method(obj, self._tags, ctx) <NEW_LINE> <DEDENT> <DEDENT> def to_yaml_tree(self, obj, tag, ctx): <NEW_LINE> <INDENT> return self._delegate.to_yaml_tree(obj, tag, ctx) <NEW_LINE> <DEDENT> def from_yaml_tree(self, node, tag, ctx): <NEW_LINE> <INDENT> return self._delegate.from_yaml_tree(node, tag, ctx) <NEW_LINE> <DEDENT> @property <NEW_LINE> def delegate(self): <NEW_LINE> <INDENT> return self._delegate <NEW_LINE> <DEDENT> @property <NEW_LINE> def extension(self): <NEW_LINE> <INDENT> return self._extension <NEW_LINE> <DEDENT> @property <NEW_LINE> def package_name(self): <NEW_LINE> <INDENT> return self.extension.package_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def package_version(self): <NEW_LINE> <INDENT> return self.extension.package_version <NEW_LINE> <DEDENT> @property <NEW_LINE> def class_name(self): <NEW_LINE> <INDENT> return self._class_name <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, ConverterProxy): <NEW_LINE> <INDENT> return other.delegate is self.delegate and other.extension is self.extension <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash((id(self.delegate), id(self.extension))) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self.package_name is None: <NEW_LINE> <INDENT> package_description = "(none)" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> package_description = "{}=={}".format(self.package_name, self.package_version) <NEW_LINE> <DEDENT> return "<ConverterProxy class: {} package: {}>".format( self.class_name, package_description, ) | Proxy that wraps a `Converter` and provides default
implementations of optional methods. | 625990373eb6a72ae038b7d7 |
class OutputChart(Chart): <NEW_LINE> <INDENT> def __init__(self, workload_info, zipped_size=1000, title="", description="", label="", axis_label=""): <NEW_LINE> <INDENT> super(OutputChart, self).__init__(workload_info, zipped_size) <NEW_LINE> self.title = title <NEW_LINE> self.description = description <NEW_LINE> self.label = label <NEW_LINE> self.axis_label = axis_label <NEW_LINE> <DEDENT> def _map_iteration_values(self, iteration): <NEW_LINE> <INDENT> return iteration <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> return {"title": self.title, "description": self.description, "widget": self.widget, "data": super(OutputChart, self).render(), "label": self.label, "axis_label": self.axis_label} | Base class for charts related to scenario output. | 6259903750485f2cf55dc0ee |
class RateLimitException(TraktException): <NEW_LINE> <INDENT> http_code = 429 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.message = 'Rate Limit Exceeded' | TraktException type to be raised when a 429 return code is recieved | 62599037dc8b845886d54721 |
class UserData(object): <NEW_LINE> <INDENT> def __init__(self, data, parent): <NEW_LINE> <INDENT> self.parent = parent <NEW_LINE> self.rating = None <NEW_LINE> self.wantto = None <NEW_LINE> if 'rating' in data.keys(): <NEW_LINE> <INDENT> self.addRating(data['rating']) <NEW_LINE> <DEDENT> if 'wannto' in data.keys(): <NEW_LINE> <INDENT> self.addRating(data['wantto']) <NEW_LINE> <DEDENT> <DEDENT> def addRating(self, rating:dict): <NEW_LINE> <INDENT> self.rating = rating <NEW_LINE> for key, val in self.rating.items(): <NEW_LINE> <INDENT> self.parent.properties[key] = val <NEW_LINE> <DEDENT> dateOf = self.rating['dateOf'] <NEW_LINE> self.parent.properties['dateOf'] = date( year = dateOf['y'], month = dateOf['m'], day = dateOf['d'] ) <NEW_LINE> <DEDENT> def addWantTo(self, wantto): <NEW_LINE> <INDENT> self.wantto = wantto <NEW_LINE> for key, val in self.wantto.items(): <NEW_LINE> <INDENT> self.parent.properties[key] = val <NEW_LINE> <DEDENT> <DEDENT> def hasRating(self): <NEW_LINE> <INDENT> return True if self.rating is not None else False <NEW_LINE> <DEDENT> def hasWantTo(self): <NEW_LINE> <INDENT> return True if self.wantto is not None else False <NEW_LINE> <DEDENT> def serialize(self): <NEW_LINE> <INDENT> serial = {} <NEW_LINE> if self.rating is not None: <NEW_LINE> <INDENT> serial['rating'] = self.rating <NEW_LINE> <DEDENT> if self.wantto is not None: <NEW_LINE> <INDENT> serial['rating'] = self.wantto <NEW_LINE> <DEDENT> return serial | Encapsulates user information associated with each Item instance.
Works by holding a reference to the owning Item and modifying its attributes. | 625990374e696a045264e6d9 |
class UnexpectedSituationWarning(PathWarning): <NEW_LINE> <INDENT> pass | Raised to alert the user/developer of a situation that should theoretically not be possible | 62599037be383301e0254985 |
class File(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.required=[] <NEW_LINE> self.b_key = "file" <NEW_LINE> self.a10_url="/axapi/v3/file" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.ssl_cert_key = {} <NEW_LINE> self.bw_list = {} <NEW_LINE> self.ip_map_list = {} <NEW_LINE> self.syslog = {} <NEW_LINE> self.health_external = {} <NEW_LINE> self.auth_portal = {} <NEW_LINE> self.aflex = {} <NEW_LINE> self.health_postfile = {} <NEW_LINE> self.web_category_license = {} <NEW_LINE> self.log_backup = {} <NEW_LINE> self.ssl_crl = {} <NEW_LINE> self.debug_monitor = {} <NEW_LINE> self.system_backup = {} <NEW_LINE> self.policy = {} <NEW_LINE> self.auth_portal_image = {} <NEW_LINE> self.class_list = {} <NEW_LINE> self.dnssec_ds = {} <NEW_LINE> self.local_uri_file = {} <NEW_LINE> self.wsdl = {} <NEW_LINE> self.ssl_key = {} <NEW_LINE> self.A10WW_license = {} <NEW_LINE> self.ca_cert = {} <NEW_LINE> self.axdebug = {} <NEW_LINE> self.xml_schema = {} <NEW_LINE> self.startup_config = {} <NEW_LINE> self.auth_saml_idp = {} <NEW_LINE> self.ssl_cert = {} <NEW_LINE> self.dnssec_dnskey = {} <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value) | :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
Class Description::
Local file Mangement.
Class file supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/file`.
| 6259903726068e7796d4dab7 |
class ImgSlide(tornado.web.UIModule): <NEW_LINE> <INDENT> def render(self, *args, **kwargs): <NEW_LINE> <INDENT> info = kwargs.get('info', args[0]) <NEW_LINE> return self.render_string('modules/info/img_slide.html', post_info=info) | Module for Image slide.
fun(info) | 6259903723e79379d538d679 |
class TestCSApiResponseForPaginatedListExtendedGeofence(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testCSApiResponseForPaginatedListExtendedGeofence(self): <NEW_LINE> <INDENT> pass | CSApiResponseForPaginatedListExtendedGeofence unit test stubs | 6259903794891a1f408b9faf |
class SMTPFactory(protocol.ServerFactory): <NEW_LINE> <INDENT> protocol = LocalSMTPServer <NEW_LINE> domain = LOCAL_FQDN <NEW_LINE> timeout = 600 <NEW_LINE> encrypted_only = False <NEW_LINE> def __init__(self, soledad_sessions, keymanager_sessions, sendmail_opts, deferred=None, retries=3): <NEW_LINE> <INDENT> self._soledad_sessions = soledad_sessions <NEW_LINE> self._keymanager_sessions = keymanager_sessions <NEW_LINE> self._sendmail_opts = sendmail_opts <NEW_LINE> <DEDENT> def buildProtocol(self, addr): <NEW_LINE> <INDENT> p = self.protocol( self._soledad_sessions, self._keymanager_sessions, self._sendmail_opts, encrypted_only=self.encrypted_only) <NEW_LINE> p.factory = self <NEW_LINE> p.host = LOCAL_FQDN <NEW_LINE> p.challengers = {"LOGIN": LOGINCredentials, "PLAIN": PLAINCredentials} <NEW_LINE> return p | Factory for an SMTP server with encrypted gatewaying capabilities. | 62599037baa26c4b54d50418 |
class BookListItem(ThreeLineAvatarListItem): <NEW_LINE> <INDENT> def __init__(self, book, **kwargs): <NEW_LINE> <INDENT> super().__init__(text=book.title, secondary_text=book.subtitle, tertiary_text=f"Price: {book.price}", **kwargs) <NEW_LINE> self.book = book <NEW_LINE> image = AsyncImageLeftWidget(source=self.book.image) <NEW_LINE> self.add_widget(image) <NEW_LINE> <DEDENT> def on_release(self): <NEW_LINE> <INDENT> BooksTab.screens["book_info"].load_screen(self.book) <NEW_LINE> BooksTab.screen_manager.transition.direction = "left" <NEW_LINE> BooksTab.screen_manager.switch_to(BooksTab.screens["book_info"]) | List item with the cover and short information about the book. | 6259903773bcbd0ca4bcb3f8 |
class Listener(tweepy.StreamListener): <NEW_LINE> <INDENT> def on_status(self, tweet): <NEW_LINE> <INDENT> tweet = utf8mb4.sub(u'', tweet.json) <NEW_LINE> tweet = json.loads(tweet) <NEW_LINE> tweet['created_at'] = qactweet.util.isoformat(tweet['created_at']) <NEW_LINE> tweet['text'] = htmlparser.unescape(tweet['text']) <NEW_LINE> tweet['source'] = qactweet.util.strip_tags(tweet['source']) <NEW_LINE> q = u'INSERT IGNORE INTO status VALUES (%s, %s, %s, %s, %s);' <NEW_LINE> insert = (tweet['id'], tweet['user']['id'], tweet['created_at'], tweet['text'], tweet['source']) <NEW_LINE> c.execute(q, insert) <NEW_LINE> user = tweet['user'] <NEW_LINE> user['created_at'] = qactweet.util.isoformat(user['created_at']) <NEW_LINE> user['description'] = htmlparser.unescape(user['description']) <NEW_LINE> q = u'INSERT IGNORE INTO user VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s);' <NEW_LINE> insert = (user['id'], user['screen_name'], user['name'], user['created_at'], user['description'], user['location'], user['followers_count'], user['friends_count'], user['statuses_count']) <NEW_LINE> c.execute(q, insert) <NEW_LINE> q = u'INSERT INTO hashtag VALUES (%s, %s);' <NEW_LINE> for i in tweet['entities']['hashtags']: <NEW_LINE> <INDENT> c.execute(q, (tweet['id'], i['text'])) <NEW_LINE> <DEDENT> q = u'INSERT INTO url VALUES (%s, %s);' <NEW_LINE> for i in tweet['entities']['urls']: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> insert = (tweet['id'], qactweet.util.unshorten(i['expanded_url'])) <NEW_LINE> c.execute(q, insert) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> q = u'INSERT INTO user_mention VALUES (%s, %s, %s, %s);' <NEW_LINE> for i in tweet['entities']['user_mentions']: <NEW_LINE> <INDENT> insert = (tweet['id'], i['id'], i['screen_name'], i['name']) <NEW_LINE> c.execute(q, insert) <NEW_LINE> <DEDENT> if tweet['coordinates']: <NEW_LINE> <INDENT> lon, lat = tweet['coordinates']['coordinates'] <NEW_LINE> queue.put((tweet['id'], lon, lat)) <NEW_LINE> <DEDENT> elif tweet['user']['location']: <NEW_LINE> <INDENT> lon, lat = qactweet.geo.parse_location(tweet['user']['location']) <NEW_LINE> if lon is not None: <NEW_LINE> <INDENT> queue.put((tweet['id'], lon, lat)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> q = u'INSERT IGNORE INTO sentiment VALUES (%s, %s, %s);' <NEW_LINE> bag_of_words = qactweet.sentiment.preprocess(tweet['text']) <NEW_LINE> p, n = qactweet.sentiment.score(bag_of_words, qactweet.sentiment.positive_words, qactweet.sentiment.negative_words) <NEW_LINE> c.execute(q, (tweet['id'], p, n)) <NEW_LINE> conn.commit() <NEW_LINE> <DEDENT> def on_error(self, status_code): <NEW_LINE> <INDENT> raise tweepy.error.TweepError(status_code) | Consumes Tweets from the Twitter Streaming API.
This class overrides the default methods of the tweepy.StreamListener
class in order to clean Tweets and write them to MySQL. | 6259903750485f2cf55dc0ef |
class PriorityQueue(object): <NEW_LINE> <INDENT> def __init__(self, key, items=[]): <NEW_LINE> <INDENT> self.keys = dict((item, item) for item in items) <NEW_LINE> self.heap = list((key(item), item) for item in items) <NEW_LINE> self.key = key <NEW_LINE> heapq.heapify(self.heap) <NEW_LINE> <DEDENT> def contains(self, item): <NEW_LINE> <INDENT> return item in self.keys <NEW_LINE> <DEDENT> def get(self, item): <NEW_LINE> <INDENT> return item if self.contains(item) else None <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> prio, item = heapq.heappop(self.heap) <NEW_LINE> return self.keys.pop(item) <NEW_LINE> <DEDENT> def pop_big(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> prio, item = self.heap[-1] <NEW_LINE> self.heap.remove((prio,item)) <NEW_LINE> return self.keys.pop(item) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def push(self, item): <NEW_LINE> <INDENT> if not self.contains(item): <NEW_LINE> <INDENT> self.keys.update({item:item}) <NEW_LINE> heapq.heappush(self.heap, (self.key(item), item)) <NEW_LINE> <DEDENT> <DEDENT> def push_replace(self, item): <NEW_LINE> <INDENT> raise NotImplemented <NEW_LINE> <DEDENT> def peek(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pr, item = self.heap[0] <NEW_LINE> return item <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return len(self.keys) | A priority queue based on heapq and hashmap.
This is a overly complicated solution for something that shouldn't be that
hard. My input to the program was.
* be able to choose key to sort after.
* be unique and uniqueness aka __eq__() should not have something to
do with key.
* be able to pop from both ends.
* be fast lookup.
I think a sorted array would have been better than this, even though the
lookup with b-search in a sorted array is worst-case, it's less memory use
and overhead then keeping track of two datastructures. | 62599037ac7a0e7691f73658 |
class Compare(Expression): <NEW_LINE> <INDENT> def __init__(self, lhs, op, rhs): <NEW_LINE> <INDENT> super(Compare, self).__init__(lhs, op, rhs) <NEW_LINE> <DEDENT> def compute(self, memory): <NEW_LINE> <INDENT> if memory.is_variable(self.lhs): <NEW_LINE> <INDENT> lhs_val = memory.get_value(self.lhs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lhs_val = str(self.lhs) <NEW_LINE> <DEDENT> if memory.is_variable(self.rhs): <NEW_LINE> <INDENT> rhs_val = memory.get_value(self.rhs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rhs_val = str(self.rhs) <NEW_LINE> <DEDENT> if self.operand == '>': <NEW_LINE> <INDENT> return (int(lhs_val) > int(rhs_val)) <NEW_LINE> <DEDENT> elif self.operand == '<': <NEW_LINE> <INDENT> return (int(lhs_val) < int(rhs_val)) <NEW_LINE> <DEDENT> elif self.operand == '==': <NEW_LINE> <INDENT> return (str(lhs_val) == str(rhs_val)) | description of class | 62599037d4950a0f3b1116f7 |
class PostForm(FlaskForm): <NEW_LINE> <INDENT> post = TextAreaField('Say something', validators=[ DataRequired(), Length(min=1, max=140)]) <NEW_LINE> submit = SubmitField('Submit') | Blog submission form | 6259903766673b3332c31565 |
class GameObject(EventPublisher): <NEW_LINE> <INDENT> def __init__(self, **options): <NEW_LINE> <INDENT> EventPublisher.__init__(self) <NEW_LINE> if options["name"] in ["level", "game"]: <NEW_LINE> <INDENT> raise Exception("Invalid object name: `%s`" % options["name"]) <NEW_LINE> <DEDENT> self.__name = options["name"] <NEW_LINE> self.__game = options["game"] <NEW_LINE> <DEDENT> def modify_result_of(self, actual, method): <NEW_LINE> <INDENT> def wrapped(*args, **kwargs): <NEW_LINE> <INDENT> return method(*actual(*args, **kwargs)) <NEW_LINE> <DEDENT> setattr(self, actual.__name__, wrapped) <NEW_LINE> <DEDENT> def run_after(self, actual, method): <NEW_LINE> <INDENT> def wrapped(*args, **kwargs): <NEW_LINE> <INDENT> result = actual(*args, **kwargs) <NEW_LINE> method(*args, **kwargs) <NEW_LINE> return result <NEW_LINE> <DEDENT> setattr(self, actual.__name__, wrapped) <NEW_LINE> <DEDENT> def _extract_list_values(self, values): <NEW_LINE> <INDENT> if isinstance(values, (list, tuple)): <NEW_LINE> <INDENT> values = list( map( self.game.interpreter.evaluate_expression, map(str, values), ) ) <NEW_LINE> <DEDENT> elif isinstance(values, str): <NEW_LINE> <INDENT> values = self.game.interpreter.evaluate_expression(values) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("List should be lists...") <NEW_LINE> <DEDENT> return values <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.__name <NEW_LINE> <DEDENT> @property <NEW_LINE> def game(self): <NEW_LINE> <INDENT> return self.__game | Base class of all game objects. | 62599037507cdc57c63a5f0a |
class Deck(Hand): <NEW_LINE> <INDENT> def populate(self): <NEW_LINE> <INDENT> for s in Card.SUITS: <NEW_LINE> <INDENT> for r in Card.RANKS: <NEW_LINE> <INDENT> self.add(Card(r, s, False)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def shuffle(self): <NEW_LINE> <INDENT> random.shuffle(self.cards) <NEW_LINE> <DEDENT> def deal(self, hands, per_hand=1): <NEW_LINE> <INDENT> for i in range(per_hand): <NEW_LINE> <INDENT> for hand in hands: <NEW_LINE> <INDENT> if self.cards: <NEW_LINE> <INDENT> top_card = self.cards[0] <NEW_LINE> self.give(top_card, hand) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Can't continue deal. out of cards") | A Deck of playing cards
uses all of the hand functions from above
use populate after creating the deck as deck.populate()
this will fill out the deck with all 52 cards
use deck.shuffle(to shuffle the deck randomly
deck.deal(hands,per_hand) will take a list of player hands,
and how many cards for each hand
if the deck runs out of cards it will print out of cards | 6259903715baa7234946310b |
class WikipediaApi: <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> self.wiki_api_url: str = "https://fr.wikipedia.org/w/api.php" <NEW_LINE> <DEDENT> def _search_page_by_title(self, title: str) -> Dict[str, Any]: <NEW_LINE> <INDENT> params: Dict = { "action": "query", "format": "json", "list": "search", "srsearch": title, } <NEW_LINE> req = requests.get(self.wiki_api_url, params=params) <NEW_LINE> data = req.json() <NEW_LINE> return { "page_id": data["query"]["search"][0]["pageid"], "title": data["query"]["search"][0]["title"], } <NEW_LINE> <DEDENT> def _search_page_by_geo(self, coords: Dict[str, float]) -> Dict[str, Any]: <NEW_LINE> <INDENT> lat = str(coords["lat"]) <NEW_LINE> lng = str(coords["lng"]) <NEW_LINE> params: Dict = { "action": "query", "format": "json", "list": "geosearch", "gscoord": f"{lat}|{lng}", } <NEW_LINE> req = requests.get(self.wiki_api_url, params=params) <NEW_LINE> data = req.json()["query"]["geosearch"][0] <NEW_LINE> return { "page_id": data["pageid"], "title": data["title"], } <NEW_LINE> <DEDENT> def _get_page_summary(self, page_id: int) -> str: <NEW_LINE> <INDENT> params: Dict = { "action": "query", "format": "json", "prop": "extracts", "pageids": page_id, "formatversion": "latest", "exsentences": 3, "explaintext": True, } <NEW_LINE> req = requests.get(self.wiki_api_url, params=params) <NEW_LINE> return req.json()["query"]["pages"][0]["extract"] <NEW_LINE> <DEDENT> def _get_page_url(self, page_id: int) -> str: <NEW_LINE> <INDENT> params: Dict = { "action": "query", "format": "json", "prop": "info", "inprop": "url", "pageids": page_id, } <NEW_LINE> req = requests.get(self.wiki_api_url, params=params) <NEW_LINE> return req.json()["query"]["pages"][str(page_id)]["fullurl"] <NEW_LINE> <DEDENT> def get_page_info( self, gmaps_title: str, gmaps_coords: Dict[str, float] ) -> Dict[str, Any]: <NEW_LINE> <INDENT> page_id = self._search_page_by_geo(gmaps_coords) <NEW_LINE> page_info = { "page_info": { "title": page_id["title"], "summary": self._get_page_summary(page_id["page_id"]), "url": self._get_page_url(page_id["page_id"]), }, "search_type": "coords", } <NEW_LINE> return page_info | Wikipedia API interaction class.
| 6259903726068e7796d4dab9 |
class GraphqlWsTransport: <NEW_LINE> <INDENT> TIMEOUT: float = 60.0 <NEW_LINE> async def connect(self, timeout: Optional[float] = None) -> None: <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> async def send(self, message: dict) -> None: <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> async def receive(self, timeout: Optional[float] = None) -> dict: <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> async def disconnect(self, timeout: Optional[float] = None) -> None: <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> async def wait_disconnect(self, timeout: Optional[float] = None) -> None: <NEW_LINE> <INDENT> raise NotImplementedError() | Transport interface for the `GraphqlWsClient`. | 6259903773bcbd0ca4bcb3f9 |
class TestClientMoveMultipleUDFs(TestUDFServerBase): <NEW_LINE> <INDENT> @defer.inlineCallbacks <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> yield super(TestClientMoveMultipleUDFs, self).setUp() <NEW_LINE> yield self.wait_for_nirvana(.2) <NEW_LINE> self.other_udf = yield self.create_udf('TestUDF2') <NEW_LINE> self.other_udf_id = self.other_udf.id <NEW_LINE> self.other_udf_dir = self.other_udf.path <NEW_LINE> <DEDENT> @defer.inlineCallbacks <NEW_LINE> def test_simple_file_move(self): <NEW_LINE> <INDENT> yield self.make_file('my_udf', 'test_file', self.my_udf.node_id) <NEW_LINE> yield self.main.wait_for_nirvana(last_event_interval=0.3) <NEW_LINE> fname = self.my_udf_dir + "/test_file" <NEW_LINE> dest_fname = self.other_udf_dir + "/test_file" <NEW_LINE> os.rename(fname, dest_fname) <NEW_LINE> yield self.check('my_udf_dir', 'my_udf_id') <NEW_LINE> yield self.check('other_udf_dir', 'other_udf_id') <NEW_LINE> <DEDENT> @defer.inlineCallbacks <NEW_LINE> def test_dir_move(self): <NEW_LINE> <INDENT> yield self.make_dir('my_udf', 'test_dir', self.my_udf.node_id) <NEW_LINE> yield self.main.wait_for_nirvana(last_event_interval=0.3) <NEW_LINE> fname = self.my_udf_dir + "/test_dir" <NEW_LINE> dest_fname = self.other_udf_dir + "/test_dir" <NEW_LINE> os.rename(fname, dest_fname) <NEW_LINE> yield self.check('my_udf_dir', 'my_udf_id') <NEW_LINE> yield self.check('other_udf_dir', 'other_udf_id') | Moves on the client (between UDFs), e.g:
1) jack has two UDFs
2) jack moves (on the filesystem) a file from udf1 to udf2
3) jack moves (on the filesystem) a dir from udf1 to udf2 | 62599037d6c5a102081e3296 |
class Scene(object): <NEW_LINE> <INDENT> def __init__(self, folder='', duration=0): <NEW_LINE> <INDENT> self.actor_register = [] <NEW_LINE> self.folder = folder <NEW_LINE> self.duration = duration <NEW_LINE> <DEDENT> def tick(self,dt): <NEW_LINE> <INDENT> for actor in self.actor_register: <NEW_LINE> <INDENT> actor.tick(dt) <NEW_LINE> <DEDENT> <DEDENT> def paint(self): <NEW_LINE> <INDENT> for actor in self.actor_register: <NEW_LINE> <INDENT> actor.paint() <NEW_LINE> <DEDENT> <DEDENT> def configure(self): <NEW_LINE> <INDENT> os.chdir(self.folder) <NEW_LINE> for filename in glob("*.py"): <NEW_LINE> <INDENT> curdir = os.path.dirname(os.path.abspath(__file__)) <NEW_LINE> mysubdir= "/".join((curdir,self.folder)) <NEW_LINE> actor_file= "/".join((mysubdir,filename)) <NEW_LINE> new_actor = load_source(filename,actor_file) <NEW_LINE> if hasattr(new_actor, 'register'): <NEW_LINE> <INDENT> self.actor_registr.append(new_actor.register()) | Collects all actors in scene folder | 625990370a366e3fb87ddb56 |
class MecabTokenizer(object): <NEW_LINE> <INDENT> def __init__(self, do_lower_case=False, never_split=None, normalize_text=True, mecab_option=None): <NEW_LINE> <INDENT> self.do_lower_case = do_lower_case <NEW_LINE> self.never_split = never_split if never_split is not None else [] <NEW_LINE> self.normalize_text = normalize_text <NEW_LINE> import MeCab <NEW_LINE> self.mecab = MeCab.Tagger(mecab_option) if mecab_option is not None else MeCab.Tagger() <NEW_LINE> <DEDENT> def tokenize(self, text, never_split=None, **kwargs): <NEW_LINE> <INDENT> if self.normalize_text: <NEW_LINE> <INDENT> text = unicodedata.normalize("NFKC", text) <NEW_LINE> <DEDENT> never_split = self.never_split + (never_split if never_split is not None else []) <NEW_LINE> tokens = [] <NEW_LINE> mecab_output = self.mecab.parse(text) <NEW_LINE> cursor = 0 <NEW_LINE> for line in mecab_output.split("\n"): <NEW_LINE> <INDENT> if line == "EOS": <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> token, _ = line.split("\t") <NEW_LINE> token_start = text.index(token, cursor) <NEW_LINE> token_end = token_start + len(token) <NEW_LINE> if self.do_lower_case and token not in never_split: <NEW_LINE> <INDENT> token = token.lower() <NEW_LINE> <DEDENT> tokens.append(token) <NEW_LINE> cursor = token_end <NEW_LINE> <DEDENT> return tokens | Runs basic tokenization with MeCab morphological parser. | 625990378e05c05ec3f6f713 |
class SingleFilePrinter(ProgressPrinter): <NEW_LINE> <INDENT> def __init__(self, output, outputRDFobjects=None): <NEW_LINE> <INDENT> self.outputRDFobjects = outputRDFobjects <NEW_LINE> self.myout = output <NEW_LINE> <DEDENT> def printProgress(self, count, path, leaves, note): <NEW_LINE> <INDENT> self.myout.count = count <NEW_LINE> <DEDENT> def printSolution(self, path): <NEW_LINE> <INDENT> self.myout.path = path <NEW_LINE> self.myout.color = "#00ff00" <NEW_LINE> self.myout.output(self.outputRDFobjects) | Print the final solution to a output file. output is a pynt.output.BaseOutput class. | 6259903771ff763f4b5e890a |
@ddt.ddt <NEW_LINE> class TeamMembershipTest(SharedModuleStoreTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TeamMembershipTest, self).setUp() <NEW_LINE> self.user1 = UserFactory.create(username='user1') <NEW_LINE> self.user2 = UserFactory.create(username='user2') <NEW_LINE> self.team1 = CourseTeamFactory(course_id=COURSE_KEY1, team_id='team1') <NEW_LINE> self.team2 = CourseTeamFactory(course_id=COURSE_KEY2, team_id='team2') <NEW_LINE> self.team_membership11 = CourseTeamMembership(user=self.user1, team=self.team1) <NEW_LINE> self.team_membership11.save() <NEW_LINE> self.team_membership12 = CourseTeamMembership(user=self.user2, team=self.team1) <NEW_LINE> self.team_membership12.save() <NEW_LINE> self.team_membership21 = CourseTeamMembership(user=self.user1, team=self.team2) <NEW_LINE> self.team_membership21.save() <NEW_LINE> <DEDENT> def test_membership_last_activity_set(self): <NEW_LINE> <INDENT> current_last_activity = self.team_membership11.last_activity_at <NEW_LINE> self.assertIsNotNone(current_last_activity) <NEW_LINE> self.team_membership11.save() <NEW_LINE> self.assertEqual(self.team_membership11.last_activity_at, current_last_activity) <NEW_LINE> <DEDENT> @ddt.data( (None, None, None, 3), ('user1', None, None, 2), ('user1', [COURSE_KEY1], None, 1), ('user1', None, 'team1', 1), ('user2', None, None, 1), ) <NEW_LINE> @ddt.unpack <NEW_LINE> def test_get_memberships(self, username, course_ids, team_id, expected_count): <NEW_LINE> <INDENT> self.assertEqual( CourseTeamMembership.get_memberships(username=username, course_ids=course_ids, team_id=team_id).count(), expected_count ) <NEW_LINE> <DEDENT> @ddt.data( ('user1', COURSE_KEY1, True), ('user2', COURSE_KEY1, True), ('user2', COURSE_KEY2, False), ) <NEW_LINE> @ddt.unpack <NEW_LINE> def test_user_in_team_for_course(self, username, course_id, expected_value): <NEW_LINE> <INDENT> user = getattr(self, username) <NEW_LINE> self.assertEqual( CourseTeamMembership.user_in_team_for_course(user, course_id), expected_value ) | Tests for the TeamMembership model. | 62599037a4f1c619b294f73f |
class Validator(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def semver(version=None): <NEW_LINE> <INDENT> from re import match <NEW_LINE> try: <NEW_LINE> <INDENT> return bool(match(r'^%s$' % SEMVER_MATCH, version)) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def command_available(command, abort=False): <NEW_LINE> <INDENT> from distutils.spawn import find_executable <NEW_LINE> if bool(find_executable(command)): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif abort: <NEW_LINE> <INDENT> raise IOError('"%s" not found or not executable' % command) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def linted(filepath): <NEW_LINE> <INDENT> from subprocess import call, CalledProcessError <NEW_LINE> linters = [['pylint', '--rcfile=/dev/null', '--reports=n', '--disable=locally-disabled,locally-enabled'], ['pydocstyle'], ['flake8']] <NEW_LINE> for linter in linters: <NEW_LINE> <INDENT> linter.append(filepath) <NEW_LINE> if not Validator.command_available(linter[0]): <NEW_LINE> <INDENT> print('WARNING: disabling %s tests, binary not found ' 'in current PATH' % linter[0]) <NEW_LINE> <DEDENT> <DEDENT> linters = [linter for linter in linters if Validator.command_available(linter[0])] <NEW_LINE> try: <NEW_LINE> <INDENT> returncodes = [not bool(call(linter)) for linter in linters] <NEW_LINE> return all(returncodes) <NEW_LINE> <DEDENT> except CalledProcessError: <NEW_LINE> <INDENT> return False | Class wrapper for validation logics. | 62599037c432627299fa4169 |
class Solution: <NEW_LINE> <INDENT> def twoSum2(self, nums, target): <NEW_LINE> <INDENT> if not nums or len(nums) < 2: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> nums.sort() <NEW_LINE> count = 0 <NEW_LINE> left, right = 0, len(nums) - 1 <NEW_LINE> while left < right: <NEW_LINE> <INDENT> if nums[left] + nums[right] > target: <NEW_LINE> <INDENT> count += right - left <NEW_LINE> right -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> left += 1 <NEW_LINE> <DEDENT> <DEDENT> return count | @param nums: an array of integer
@param target: An integer
@return: an integer | 6259903730c21e258be9997f |
class HandlerService(DefaultHandlerService): <NEW_LINE> <INDENT> class DefaultXGBoostAlgoModeInferenceHandler(default_inference_handler.DefaultInferenceHandler): <NEW_LINE> <INDENT> def default_model_fn(self, model_dir): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> booster, format = serve_utils.get_loaded_booster(model_dir) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ModelLoadInferenceError("Unable to load model: {}".format(str(e))) <NEW_LINE> <DEDENT> return booster, format <NEW_LINE> <DEDENT> def default_input_fn(self, input_data, input_content_type): <NEW_LINE> <INDENT> if len(input_data) == 0: <NEW_LINE> <INDENT> raise NoContentInferenceError() <NEW_LINE> <DEDENT> dtest, content_type = serve_utils.parse_content_data(input_data, input_content_type) <NEW_LINE> return dtest, content_type <NEW_LINE> <DEDENT> def default_predict_fn(self, data, model): <NEW_LINE> <INDENT> booster, model_format = model <NEW_LINE> dtest, content_type = data <NEW_LINE> try: <NEW_LINE> <INDENT> return serve_utils.predict(booster, model_format, dtest, content_type) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise BadRequestInferenceError(str(e)) <NEW_LINE> <DEDENT> <DEDENT> def default_output_fn(self, prediction, accept): <NEW_LINE> <INDENT> accept_type = accept.lower() <NEW_LINE> try: <NEW_LINE> <INDENT> if accept_type == content_types.CSV or accept_type == 'csv': <NEW_LINE> <INDENT> if SAGEMAKER_BATCH: <NEW_LINE> <INDENT> return_data = "\n".join(map(str, prediction.tolist())) + '\n' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return_data = ",".join(map(str, prediction.tolist())) <NEW_LINE> <DEDENT> encoded_prediction = return_data.encode("utf-8") <NEW_LINE> <DEDENT> elif accept_type == content_types.JSON or accept_type == 'json': <NEW_LINE> <INDENT> encoded_prediction = encoder.encode(prediction, accept_type) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("{} is not an accepted Accept type. Please choose one of the following:" " ['{}', '{}'].".format(accept, content_types.CSV, content_types.JSON)) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise UnsupportedMediaTypeInferenceError( "Encoding to accept type {} failed with exception: {}".format(accept, e)) <NEW_LINE> <DEDENT> return encoded_prediction <NEW_LINE> <DEDENT> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> transformer = XGBMMSTransformer(default_inference_handler=self.DefaultXGBoostAlgoModeInferenceHandler()) <NEW_LINE> super(HandlerService, self).__init__(transformer=transformer) | Handler service that is executed by the model server.
Determines specific default inference handlers to use based on the type MXNet model being used.
This class extends ``DefaultHandlerService``, which define the following:
- The ``handle`` method is invoked for all incoming inference requests to the model server.
- The ``initialize`` method is invoked at model server start up.
Based on: https://github.com/awslabs/mxnet-model-server/blob/v1.0.8/docs/custom_service.md | 62599037d164cc61758220e5 |
class ResourceOptions: <NEW_LINE> <INDENT> parent: Optional['Resource'] <NEW_LINE> depends_on: Optional[List['Resource']] <NEW_LINE> protect: Optional[bool] <NEW_LINE> provider: Optional['ProviderResource'] <NEW_LINE> providers: Mapping[str, 'ProviderResource'] <NEW_LINE> def __init__(self, parent: Optional['Resource'] = None, depends_on: Optional[List['Resource']] = None, protect: Optional[bool] = None, provider: Optional['ProviderResource'] = None, providers: Optional[Mapping[str, 'ProviderResource']] = None) -> None: <NEW_LINE> <INDENT> self.parent = parent <NEW_LINE> self.depends_on = depends_on <NEW_LINE> self.protect = protect <NEW_LINE> self.provider = provider <NEW_LINE> self.providers = providers | ResourceOptions is a bag of optional settings that control a resource's behavior. | 625990371f5feb6acb163d64 |
class ApeTagger(MutagenTagger): <NEW_LINE> <INDENT> opener = {"ape": MonkeysAudio, "mpc": Musepack} <NEW_LINE> def save_tag(self): <NEW_LINE> <INDENT> tag = self.tag <NEW_LINE> for key, values in tag.items(): <NEW_LINE> <INDENT> if isinstance(values, APETextValue): <NEW_LINE> <INDENT> del tag[key] <NEW_LINE> <DEDENT> <DEDENT> tb = self.tag_frame.tb <NEW_LINE> lines = tb.get_text(tb.get_start_iter(), tb.get_end_iter()).splitlines() <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> key, val = line.split("=", 1) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key = key.strip() <NEW_LINE> val = val.strip() <NEW_LINE> if val: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> tag[key].value += "\0" + val.decode() <NEW_LINE> <DEDENT> except (KeyError, AttributeError): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> tag[key] = APETextValue(val.decode(), 0) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> print("Unacceptable key", key) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> tag.save() <NEW_LINE> <DEDENT> def load_tag(self): <NEW_LINE> <INDENT> tag = self.tag <NEW_LINE> lines = [] <NEW_LINE> primaries = "TITLE", "ARTIST", "AUTHOR", "ALBUM", "TRACKNUMBER", "TRACKTOTAL", "GENRE", "DATE" <NEW_LINE> for key in primaries: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> values = tag[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> lines.append(key + "=") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for val in values: <NEW_LINE> <INDENT> lines.append(key + "=" + val) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for key, values in tag.items(): <NEW_LINE> <INDENT> if key not in primaries and isinstance(values, APETextValue): <NEW_LINE> <INDENT> for val in values: <NEW_LINE> <INDENT> lines.append(key + "=" + val) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.tag_frame.tb.set_text("\n".join(lines)) <NEW_LINE> <DEDENT> def __init__(self, pathname, extension): <NEW_LINE> <INDENT> MutagenTagger.__init__(self, pathname) <NEW_LINE> try: <NEW_LINE> <INDENT> self.tag = self.opener[extension](pathname) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.tag = APEv2(pathname) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("ape tag not found") <NEW_LINE> self.tag = None <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("ape tag found on non-native format") <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> print("failed to create tagger for native format") <NEW_LINE> self.tag = None <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.tag.add_tags() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("ape tag found on native format") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("no existing ape tags found") <NEW_LINE> <DEDENT> <DEDENT> self.tag_frame = FreeTagFrame() <NEW_LINE> self.tag_frame.set_hexpand(True) <NEW_LINE> self.tag_frame.set_vexpand(True) <NEW_LINE> self.add(self.tag_frame) <NEW_LINE> self.tag_frame.show() | APEv2 tagging with Mutagen. | 6259903721bff66bcd723dda |
@register <NEW_LINE> class AssertHandler(WithScopeHandler): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> def Assert(condition, message, span): <NEW_LINE> <INDENT> return tvm.tir.AssertStmt(condition, tvm.runtime.convert(message), self.body, span=span) <NEW_LINE> <DEDENT> super().__init__(Assert, concise_scope=True, def_symbol=False) | With scope handler tir.Assert(condition, message) | 625990379b70327d1c57fef8 |
class QuasiNewton(OptimizationMethod): <NEW_LINE> <INDENT> def __init__(self, problem, method = "GB", condition = "goldstein",rho = 0.1, sigma = 0.7, tau = 0.1, xi = 9): <NEW_LINE> <INDENT> super().__init__(problem, condition, rho, sigma, tau, xi) <NEW_LINE> acceptedMethods=['DFP','GB','BFGS','BB'] <NEW_LINE> if not method in acceptedMethods: <NEW_LINE> <INDENT> print('Not acceptable method.') <NEW_LINE> print('Acceptable methods are: ',acceptedMethods) <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> if (method == 'DFP'): <NEW_LINE> <INDENT> self.updateHess = hessianUpdaterMethods.DFPmethod <NEW_LINE> <DEDENT> elif (method == 'BB'): <NEW_LINE> <INDENT> self.updateHess = hessianUpdaterMethods.BBmethod <NEW_LINE> <DEDENT> elif (method == 'BFGS'): <NEW_LINE> <INDENT> self.updateHess = hessianUpdaterMethods.BFGSmethod <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.updateHess = hessianUpdaterMethods.GBmethod <NEW_LINE> <DEDENT> <DEDENT> def initialHessian(self, x_0): <NEW_LINE> <INDENT> return identity(len(x_0)) <NEW_LINE> <DEDENT> def calculateDirection(self, x_k, H_k): <NEW_LINE> <INDENT> s_k = -H_k.dot(self.fgradient(x_k)) <NEW_LINE> return s_k <NEW_LINE> <DEDENT> def updateHessian(self, x_k, x_k1, H_k): <NEW_LINE> <INDENT> delta = x_k1 - x_k <NEW_LINE> gamma = self.fgradient(x_k1) - self.fgradient(x_k) <NEW_LINE> H_k1 = self.updateHess(H_k, delta, gamma) <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> L = cho.cho_factor(H_k1) <NEW_LINE> break <NEW_LINE> <DEDENT> except cho.LinAlgError: <NEW_LINE> <INDENT> print("LinAlgError: Matrix was possibly non PSD. Adding identity to compensate.") <NEW_LINE> H_k1 += identity(len(x_k)) <NEW_LINE> <DEDENT> <DEDENT> return H_k1 | Subclass to OptimizationMethod. Uses quasi-newton solver.
Args:
problem: OptimizationProblem object.
method: {Optional} {Default = "GB"} String containing name of
method to be uesd for updating hessian.
condition: {Optional} {Default = "goldstein"} String containing
type of condition for inexact line search.
rho: {Optional} {Default = 0.1} Parameter for inexact line search
sigma: {Optional} {Default = 0.7} Parameter for inexact line search
tau: {Optional} {Default = 0.1} Parameter for inexact line search
xi: {Optional} {Default = 9} Parameter for inexact line search | 62599037507cdc57c63a5f0c |
class BucketList(object): <NEW_LINE> <INDENT> def __init__(self, title, activities={}): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> self.activities = activities <NEW_LINE> <DEDENT> def add_activity(self, activity): <NEW_LINE> <INDENT> self.activities[activity.name] = activity <NEW_LINE> return self.activities <NEW_LINE> <DEDENT> def remove_activity(self, activity): <NEW_LINE> <INDENT> if activity.name in self.activities.keys(): <NEW_LINE> <INDENT> del self.activities[activity.name] <NEW_LINE> <DEDENT> return self.activities <NEW_LINE> <DEDENT> def change_title(self, title): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> return self.title | This class describes the structure of the BucketList object | 62599037e76e3b2f99fd9b7f |
class State(object): <NEW_LINE> <INDENT> def __init__(self, activity, zone, todo): <NEW_LINE> <INDENT> self.zone, self.activity, self.todo = zone, activity, todo <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s-%s-%s" % (self.zone, self.activity, sorted(self.todo)) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(repr(self)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.activity == other.activity and self.zone == other.zone and self.todo == other.todo | The state contains the position of the traveler (zone), the activity participated
and --lagged variable (autoregressive process)--, excluding timeslice. | 6259903796565a6dacd2d845 |
class CallbackModule(object): <NEW_LINE> <INDENT> CALLBACK_VERSION = 2.0 <NEW_LINE> CALLBACK_TYPE = 'notification' <NEW_LINE> CALLBACK_NAME = 'collector' <NEW_LINE> CALLBACK_NEEDS_WHITELIST = False <NEW_LINE> def v2_runner_on_ok(self, result): <NEW_LINE> <INDENT> data = result._result <NEW_LINE> try: <NEW_LINE> <INDENT> log_entry = data["custom_log"] <NEW_LINE> print(log_entry) <NEW_LINE> custom_log = open('./logs/log', 'a') <NEW_LINE> custom_log.write(log_entry + "\n") <NEW_LINE> custom_log.close() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass | Ansible callback plugin for collect result into a common repository for the platform | 6259903782261d6c5273077e |
class LookupTable(ParameterLayer): <NEW_LINE> <INDENT> def __init__(self, vocab_size, embedding_dim, init, update=True, pad_idx=None, name=None): <NEW_LINE> <INDENT> super(LookupTable, self).__init__(init, name) <NEW_LINE> self.embedding_dim = embedding_dim <NEW_LINE> self.vocab_size = vocab_size <NEW_LINE> self.update = update <NEW_LINE> self.pad_idx = pad_idx <NEW_LINE> self.outputs_t = None <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "LookupTable Layer : %d inputs, (%d, %d) outputs size" % ( self.nin, self.embedding_dim, self.nin) <NEW_LINE> <DEDENT> def configure(self, in_obj): <NEW_LINE> <INDENT> super(LookupTable, self).configure(in_obj) <NEW_LINE> (self.nin, self.nsteps) = interpret_in_shape(self.in_shape) <NEW_LINE> self.out_shape = (self.embedding_dim, self.nin) <NEW_LINE> if self.weight_shape is None: <NEW_LINE> <INDENT> self.weight_shape = (self.vocab_size, self.embedding_dim) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def allocate(self, shared_outputs=None): <NEW_LINE> <INDENT> super(LookupTable, self).allocate(shared_outputs=shared_outputs) <NEW_LINE> if self.inputs is None: <NEW_LINE> <INDENT> self.inputs = self.be.zeros((1, self.nin * self.be.bsz), dtype=np.int32) <NEW_LINE> <DEDENT> self.dW[:] = 0 <NEW_LINE> if self.pad_idx is not None: <NEW_LINE> <INDENT> self.W[self.pad_idx] = 0 <NEW_LINE> <DEDENT> if self.outputs_t is None: <NEW_LINE> <INDENT> self.outputs_t = self.be.empty_like(self.outputs.T) <NEW_LINE> <DEDENT> <DEDENT> def fprop(self, inputs, inference=False): <NEW_LINE> <INDENT> self.inputs[:] = inputs.reshape(self.inputs.shape) <NEW_LINE> self.outputs_t[:] = self.W.take(self.inputs, axis=0) <NEW_LINE> self.outputs[:] = self.outputs_t.T <NEW_LINE> return self.outputs <NEW_LINE> <DEDENT> def bprop(self, error, alpha=1.0, beta=0): <NEW_LINE> <INDENT> if self.update: <NEW_LINE> <INDENT> self.dW[:] = 0 <NEW_LINE> self.be.compound_bprop_lut(self.nin, self.inputs, error, self.outputs_t, self.dW, self.pad_idx, alpha, beta) <NEW_LINE> <DEDENT> return self.deltas | A lookup table layer or a word embedding layer.
The layer converts a word into a dense representation. When given a sentence,
which is a vector of words (as integers), a matrix of vectors/embeddings for
each word in the sentence is returned.
LookupTable of dimensions embedding_dim by vocab_size is learnt.
input shape - (nin, batch_size)
output shape - (embedding_dim, nin * batch_size)
weight shape - (embedding_dim, vocab_size)
Arguments:
vocab_size (int) : Number of words in the vocabulary
embedding_dim (int) : Desired size of the word embedding
init (Initializer): Initializer object to use for initializing layer weights
name (str, optional): Layer name. Defaults to "LookupTableLayer" | 6259903721bff66bcd723ddc |
class proxy(ExploitResult, CommonAttackMethods): <NEW_LINE> <INDENT> def __init__(self, proxyDaemonObject): <NEW_LINE> <INDENT> ExploitResult.__init__(self) <NEW_LINE> self._proxyDaemon = proxyDaemonObject <NEW_LINE> <DEDENT> def end(self): <NEW_LINE> <INDENT> raise BaseFrameworkException('You should implement the end method of classes that inherit from "proxy"') <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> raise BaseFrameworkException('You should implement the get_name method of classes that inherit from "proxy"') <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<proxy server at: ' + self._proxyDaemon.get_bind_ip() + ':' + str(self._proxyDaemon.get_bind_port()) + '>' <NEW_LINE> <DEDENT> __str__ = __repr__ | This class represents the output of an attack plugin that gives a proxy to the w3af user.
:author: Andres Riancho ([email protected]) | 625990373eb6a72ae038b7dd |
class YMDHMService(SAServiceBase): <NEW_LINE> <INDENT> ID = 0x18 <NEW_LINE> def read_value(self): <NEW_LINE> <INDENT> raise NotImplementedError | todo | 625990370a366e3fb87ddb5a |
class FileselWidget(QtGui.QWidget): <NEW_LINE> <INDENT> def __init__(self, type='openfile', title=None, parent=None): <NEW_LINE> <INDENT> super(FileselWidget, self).__init__(parent) <NEW_LINE> self.type = type <NEW_LINE> self.value = '' <NEW_LINE> mainlayout = QtGui.QGridLayout(self) <NEW_LINE> if isinstance(title, str): <NEW_LINE> <INDENT> mainlayout.addWidget(QtGui.QLabel(title), 0, 0) <NEW_LINE> <DEDENT> layout = QtGui.QHBoxLayout() <NEW_LINE> self.text = QtGui.QLineEdit() <NEW_LINE> self.text.setFixedWidth(300) <NEW_LINE> layout.addWidget(self.text) <NEW_LINE> self.button = QtGui.QPushButton("Select") <NEW_LINE> layout.addWidget(self.button) <NEW_LINE> mainlayout.addLayout(layout, 1, 0) <NEW_LINE> self.button.clicked.connect(self.filesel) <NEW_LINE> self.setContentsMargins(0, 0, 0, 0) <NEW_LINE> self.layout().setContentsMargins(0, 0, 0, 0) <NEW_LINE> <DEDENT> def filesel(self): <NEW_LINE> <INDENT> if self.type == 'openfile': <NEW_LINE> <INDENT> self.value = str(QtGui.QFileDialog(self).getOpenFileName()) <NEW_LINE> <DEDENT> elif self.type == 'opendir': <NEW_LINE> <INDENT> self.value = str(QtGui.QFileDialog(self).getExistingDirectory()) <NEW_LINE> <DEDENT> elif self.type == 'savefile': <NEW_LINE> <INDENT> self.value = str(QtGui.QFileDialog(self).getSaveFileName()) <NEW_LINE> <DEDENT> self.text.setText(self.value) <NEW_LINE> <DEDENT> def setvalue(self, value): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> self.text.setText(value) <NEW_LINE> <DEDENT> def getvalue(self): <NEW_LINE> <INDENT> return self.text.text() | Custom widget for file selection | 6259903726068e7796d4dabc |
class Window: <NEW_LINE> <INDENT> def __init__(self, handle=None): <NEW_LINE> <INDENT> self.window_handle = handle <NEW_LINE> <DEDENT> def is_active(self) -> bool: <NEW_LINE> <INDENT> if self.window_handle: <NEW_LINE> <INDENT> return self.window_handle == user32.GetForegroundWindow() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def set_active(self): <NEW_LINE> <INDENT> if self.window_handle and not self.is_active(): <NEW_LINE> <INDENT> user32.SetForegroundWindow(self.window_handle) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def get_rect(self) -> tuple: <NEW_LINE> <INDENT> if self.window_handle: <NEW_LINE> <INDENT> rect = ctypes.wintypes.RECT() <NEW_LINE> user32.GetWindowRect(self.window_handle, ctypes.byref(rect)) <NEW_LINE> return (rect.left, rect.top, rect.right - rect.left, rect.bottom - rect.top) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (0, 0, *screen_size()) | Base class for all classes in wizSDK. Keeps track of the wizard101 app window. | 625990374e696a045264e6dc |
class ThrottlingProtocol(ProtocolWrapper): <NEW_LINE> <INDENT> def write(self, data): <NEW_LINE> <INDENT> self.factory.registerWritten(len(data)) <NEW_LINE> ProtocolWrapper.write(self, data) <NEW_LINE> <DEDENT> def writeSequence(self, seq): <NEW_LINE> <INDENT> self.factory.registerWritten(reduce(operator.add, map(len, seq))) <NEW_LINE> ProtocolWrapper.writeSequence(self, seq) <NEW_LINE> <DEDENT> def dataReceived(self, data): <NEW_LINE> <INDENT> self.factory.registerRead(len(data)) <NEW_LINE> ProtocolWrapper.dataReceived(self, data) <NEW_LINE> <DEDENT> def registerProducer(self, producer, streaming): <NEW_LINE> <INDENT> self.producer = producer <NEW_LINE> ProtocolWrapper.registerProducer(self, producer, streaming) <NEW_LINE> <DEDENT> def unregisterProducer(self): <NEW_LINE> <INDENT> del self.producer <NEW_LINE> ProtocolWrapper.unregisterProducer(self) <NEW_LINE> <DEDENT> def throttleReads(self): <NEW_LINE> <INDENT> self.transport.pauseProducing() <NEW_LINE> <DEDENT> def unthrottleReads(self): <NEW_LINE> <INDENT> self.transport.resumeProducing() <NEW_LINE> <DEDENT> def throttleWrites(self): <NEW_LINE> <INDENT> if hasattr(self, "producer"): <NEW_LINE> <INDENT> self.producer.pauseProducing() <NEW_LINE> <DEDENT> <DEDENT> def unthrottleWrites(self): <NEW_LINE> <INDENT> if hasattr(self, "producer"): <NEW_LINE> <INDENT> self.producer.resumeProducing() | Protocol for ThrottlingFactory. | 6259903730dc7b76659a09a7 |
class RmBugIsOpenTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.rm_backup = decorators._get_redmine_bug_status_id <NEW_LINE> self.stat_backup = decorators._redmine_closed_issue_statuses <NEW_LINE> decorators._redmine_closed_issue_statuses = lambda: [1, 2] <NEW_LINE> self.bug_id = gen_integer() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> decorators._get_redmine_bug_status_id = self.rm_backup <NEW_LINE> decorators._redmine_closed_issue_statuses = self.stat_backup <NEW_LINE> <DEDENT> def test_bug_is_open(self): <NEW_LINE> <INDENT> decorators._get_redmine_bug_status_id = lambda bug_id: 0 <NEW_LINE> self.assertTrue(decorators.rm_bug_is_open(self.bug_id)) <NEW_LINE> decorators._get_redmine_bug_status_id = lambda bug_id: 3 <NEW_LINE> self.assertTrue(decorators.rm_bug_is_open(self.bug_id)) <NEW_LINE> <DEDENT> def test_bug_is_closed(self): <NEW_LINE> <INDENT> decorators._get_redmine_bug_status_id = lambda bug_id: 1 <NEW_LINE> self.assertFalse(decorators.rm_bug_is_open(self.bug_id)) <NEW_LINE> decorators._get_redmine_bug_status_id = lambda bug_id: 2 <NEW_LINE> self.assertFalse(decorators.rm_bug_is_open(self.bug_id)) <NEW_LINE> <DEDENT> def test_bug_lookup_fails(self): <NEW_LINE> <INDENT> def bomb(_): <NEW_LINE> <INDENT> raise decorators.BugFetchError <NEW_LINE> <DEDENT> decorators._get_redmine_bug_status_id = bomb <NEW_LINE> self.assertFalse(decorators.rm_bug_is_open(self.bug_id)) | Tests for :func:`robottelo.decorators.rm_bug_is_open`. | 625990378c3a8732951f76cc |
class ColdDeadWater(Story): <NEW_LINE> <INDENT> __name__ = 'Cold Dead Water' <NEW_LINE> __version__ = '0.0.1' <NEW_LINE> options = { 'excel_file': './stories/colddeadwater/data/map.xlsx', 'map_file': './stories/colddeadwater/data/map.csv', 'node_file': './stories/colddeadwater/data/nodes.csv', 'safe_prefix': 'HQ', 'start_at': 'HQ8', 'armory_at': 'HQ4' } <NEW_LINE> def initialize(self): <NEW_LINE> <INDENT> d = list() <NEW_LINE> d += [MP5] * 10 <NEW_LINE> d += [Model70] * 10 <NEW_LINE> d += [Model870] * 10 <NEW_LINE> d += [M1911] * 10 <NEW_LINE> d += [Magazine9mm] * 40 <NEW_LINE> d += [Magazine45ACP] * 40 <NEW_LINE> d += [Box12Gauge] * 40 <NEW_LINE> d += [BoxRifleCartridges] * 30 <NEW_LINE> d += [Crowbar] * 20 <NEW_LINE> d += [Frag] * 20 <NEW_LINE> d += [Radio] * 10 <NEW_LINE> d += [ClifBar] * 20 <NEW_LINE> d += [Burrito] * 20 <NEW_LINE> d += [CivilWarTrenchcoat] * 5 <NEW_LINE> d += [LeatherBoots] * 5 <NEW_LINE> d += [MotorcycleHelmet] * 5 <NEW_LINE> d += [RippedJeans] * 10 <NEW_LINE> d += [Tshirt] * 15 <NEW_LINE> d += [FreeBSDshirt] * 5 <NEW_LINE> d += [VibramFivefinger] * 2 <NEW_LINE> d += [Crate] * 5 <NEW_LINE> for i in range(int(len(self.map.nodes) * 0.65)): <NEW_LINE> <INDENT> node = random.choice(list(self.map.nodes.values())) <NEW_LINE> node.objects.append(random.choice(d)()) <NEW_LINE> <DEDENT> for i in range(int(len(self.map.nodes) * 0.66)): <NEW_LINE> <INDENT> node = random.choice(list(self.map.nodes.values())) <NEW_LINE> if not node.id.startswith(self.options.get('safe_prefix')): <NEW_LINE> <INDENT> z = enemies.Zombie() <NEW_LINE> z.move_to(node) <NEW_LINE> <DEDENT> <DEDENT> for node in self.map.nodes.values(): <NEW_LINE> <INDENT> node.sort() <NEW_LINE> <DEDENT> <DEDENT> def on_player_connect(self, player): <NEW_LINE> <INDENT> player.gender = random.choice(['M', 'F']) <NEW_LINE> player.name = construct_name(player.gender) <NEW_LINE> player.occupation = construct_occupation() <NEW_LINE> player.nouns.update(set(player.name.lower().split())) <NEW_LINE> player.description = english.resolve_single(player, player.__class__.description) <NEW_LINE> player.do('wakeup') <NEW_LINE> player.equip(M1911()) <NEW_LINE> player.inventory += [Magazine45ACP(), Magazine45ACP(), Magazine45ACP()] <NEW_LINE> player.send(serialize.full_character(player)) <NEW_LINE> return player <NEW_LINE> <DEDENT> def on_player_disconnect(self, player): <NEW_LINE> <INDENT> player.do('combust') | You suddenly discover that you are conscious. Darkness envelops you. You struggle to
remember anything, even your own name.
Slowly, you begin to remember. | 6259903707d97122c4217e12 |
class check_crc(gr.basic_block): <NEW_LINE> <INDENT> def __init__(self, include_header, verbose, force=False): <NEW_LINE> <INDENT> gr.basic_block.__init__( self, name='check_crc', in_sig=[], out_sig=[]) <NEW_LINE> self.include_header = include_header <NEW_LINE> self.verbose = verbose <NEW_LINE> self.force = force <NEW_LINE> self.message_port_register_in(pmt.intern('in')) <NEW_LINE> self.set_msg_handler(pmt.intern('in'), self.handle_msg) <NEW_LINE> self.message_port_register_out(pmt.intern('ok')) <NEW_LINE> self.message_port_register_out(pmt.intern('fail')) <NEW_LINE> <DEDENT> def handle_msg(self, msg_pmt): <NEW_LINE> <INDENT> msg = pmt.cdr(msg_pmt) <NEW_LINE> if not pmt.is_u8vector(msg): <NEW_LINE> <INDENT> print('[ERROR] Received invalid message type. Expected u8vector') <NEW_LINE> return <NEW_LINE> <DEDENT> packet = pmt.u8vector_elements(msg) <NEW_LINE> try: <NEW_LINE> <INDENT> header = csp_header.CSP(packet[:4]) <NEW_LINE> <DEDENT> except ValueError as e: <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print(e) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> if not self.force and not header.crc: <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print('CRC not used') <NEW_LINE> <DEDENT> self.message_port_pub(pmt.intern('ok'), msg_pmt) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if len(packet) < 8: <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print('Malformed CSP packet (too short)') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> crc = crc32c.crc(packet[:-4] if self.include_header else packet[4:-4]) <NEW_LINE> packet_crc = struct.unpack('>I', bytes(packet[-4:]))[0] <NEW_LINE> if crc == packet_crc: <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print('CRC OK') <NEW_LINE> <DEDENT> self.message_port_pub(pmt.intern('ok'), msg_pmt) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print('CRC failed') <NEW_LINE> <DEDENT> self.message_port_pub(pmt.intern('fail'), msg_pmt) | docstring for block check_crc | 62599037baa26c4b54d5041e |
class ExportEnvironmentForm(forms.ModelForm): <NEW_LINE> <INDENT> sdios_environment_uuid = forms.CharField(widget=forms.HiddenInput) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = EnvironmentMap <NEW_LINE> fields = ["name", "lti_environment_key", "sdios_environment_uuid"] | Form for exporting SDIs to LTI with fields populated
in the front-end. | 6259903721bff66bcd723dde |
class ChunkBySlice(unittest.TestCase): <NEW_LINE> <INDENT> def generate_tests(self, num, disabled=None): <NEW_LINE> <INDENT> disabled = disabled or [] <NEW_LINE> tests = [] <NEW_LINE> for i in range(num): <NEW_LINE> <INDENT> test = {'name': 'test%i' % i} <NEW_LINE> if i in disabled: <NEW_LINE> <INDENT> test['disabled'] = '' <NEW_LINE> <DEDENT> tests.append(test) <NEW_LINE> <DEDENT> return tests <NEW_LINE> <DEDENT> def run_all_combos(self, num_tests, disabled=None): <NEW_LINE> <INDENT> tests = self.generate_tests(num_tests, disabled=disabled) <NEW_LINE> for total in range(1, num_tests + 1): <NEW_LINE> <INDENT> res = [] <NEW_LINE> res_disabled = [] <NEW_LINE> for chunk in range(1, total+1): <NEW_LINE> <INDENT> f = chunk_by_slice(chunk, total) <NEW_LINE> res.append(list(f(tests, {}))) <NEW_LINE> if disabled: <NEW_LINE> <INDENT> f.disabled = True <NEW_LINE> res_disabled.append(list(f(tests, {}))) <NEW_LINE> <DEDENT> <DEDENT> lengths = [len([t for t in c if 'disabled' not in t]) for c in res] <NEW_LINE> self.assertLessEqual(max(lengths) - min(lengths), 1) <NEW_LINE> self.assertEqual(list(chain.from_iterable(res)), list(tests)) <NEW_LINE> if disabled: <NEW_LINE> <INDENT> lengths = [len(c) for c in res_disabled] <NEW_LINE> self.assertLessEqual(max(lengths) - min(lengths), 1) <NEW_LINE> self.assertEqual(list(chain.from_iterable(res_disabled)), list(tests)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_chunk_by_slice(self): <NEW_LINE> <INDENT> chunk = chunk_by_slice(1, 1) <NEW_LINE> self.assertEqual(list(chunk([], {})), []) <NEW_LINE> self.run_all_combos(num_tests=1) <NEW_LINE> self.run_all_combos(num_tests=10, disabled=[1, 2]) <NEW_LINE> num_tests = 67 <NEW_LINE> disabled = list(i for i in xrange(num_tests) if i % 4 == 0) <NEW_LINE> self.run_all_combos(num_tests=num_tests, disabled=disabled) | Test chunking related filters | 6259903750485f2cf55dc0f5 |
class PluginActivationError(PluginError): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self._errorMessage = QApplication.translate("PluginError", "Error activating plugin module: %1") .arg(name) | Class defining an error raised, when there was an error during plugin activation. | 6259903730c21e258be99983 |
class HardTimeoutException(OTSException): <NEW_LINE> <INDENT> errno = 6001 | Exception that is raised when hard timeout occurs. | 62599037dc8b845886d54729 |
class Constraint(helpers.RateObject): <NEW_LINE> <INDENT> def __init__(self, expression): <NEW_LINE> <INDENT> self._expression = expression <NEW_LINE> <DEDENT> @property <NEW_LINE> def expression(self): <NEW_LINE> <INDENT> return self._expression | Represents an inequality constraint.
This class is nothing but a thin wrapper around an `Expression`, and
represents the constraint that the wrapped expression is non-positive. | 6259903716aa5153ce401662 |
class Point: <NEW_LINE> <INDENT> def __init__(self, x=0, y=0): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"A point with coordinates x:{self.x}, y:{self.y}" <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"Point({self.x},{self.y})" <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> p = Point(self.x+other.x, self.y+other.y) <NEW_LINE> return p <NEW_LINE> <DEDENT> def __mul__(self, factor): <NEW_LINE> <INDENT> self.x *= factor <NEW_LINE> self.y *= factor <NEW_LINE> return self | Class representing a point | 62599037ac7a0e7691f7365e |
class CANErrorFrame(object): <NEW_LINE> <INDENT> def __init__(self, flag_bits=6, ifs_bits=0): <NEW_LINE> <INDENT> self.flag_bits = min(max(6, flag_bits), 12) <NEW_LINE> self.ifs_bits = max(0, ifs_bits) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'CANErrorFrame({}, {})'.format(self.flag_bits, self.ifs_bits) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '(error)' <NEW_LINE> <DEDENT> def get_edges(self, t, bit_period): <NEW_LINE> <INDENT> edges = [] <NEW_LINE> if self.ifs_bits > 0: <NEW_LINE> <INDENT> edges.append((t, 1)) <NEW_LINE> t += self.ifs_bits * bit_period <NEW_LINE> <DEDENT> edges.append((t, 0)) <NEW_LINE> t += self.flag_bits * bit_period <NEW_LINE> edges.append((t, 1)) <NEW_LINE> t += 8 * bit_period <NEW_LINE> edges.append((t, 1)) <NEW_LINE> return edges <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, CANErrorFrame) and str(self) == str(other) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | CAN Error frame | 6259903723e79379d538d67e |
class Type(object): <NEW_LINE> <INDENT> TYPE_UNSPECIFIED = 0 <NEW_LINE> FACE_DETECTION = 1 <NEW_LINE> LANDMARK_DETECTION = 2 <NEW_LINE> LOGO_DETECTION = 3 <NEW_LINE> LABEL_DETECTION = 4 <NEW_LINE> TEXT_DETECTION = 5 <NEW_LINE> DOCUMENT_TEXT_DETECTION = 11 <NEW_LINE> SAFE_SEARCH_DETECTION = 6 <NEW_LINE> IMAGE_PROPERTIES = 7 <NEW_LINE> CROP_HINTS = 9 <NEW_LINE> WEB_DETECTION = 10 | Type of image feature.
Attributes:
TYPE_UNSPECIFIED (int): Unspecified feature type.
FACE_DETECTION (int): Run face detection.
LANDMARK_DETECTION (int): Run landmark detection.
LOGO_DETECTION (int): Run logo detection.
LABEL_DETECTION (int): Run label detection.
TEXT_DETECTION (int): Run OCR.
DOCUMENT_TEXT_DETECTION (int): Run dense text document OCR. Takes precedence when both
DOCUMENT_TEXT_DETECTION and TEXT_DETECTION are present.
SAFE_SEARCH_DETECTION (int): Run computer vision models to compute image safe-search properties.
IMAGE_PROPERTIES (int): Compute a set of image properties, such as the image's dominant colors.
CROP_HINTS (int): Run crop hints.
WEB_DETECTION (int): Run web detection. | 62599037287bf620b6272d60 |
class ReportView(APIView): <NEW_LINE> <INDENT> def get(self, request, report_slug=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if report_slug is None: <NEW_LINE> <INDENT> reports = Report.objects.order_by('slug') <NEW_LINE> return HttpResponseRedirect(reverse('report-view', args=[reports[0].slug])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> report = Report.objects.get(slug=report_slug) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> logging.debug('Received request for report page: %s' % report_slug) <NEW_LINE> for device in Device.objects.all(): <NEW_LINE> <INDENT> if (device.enabled and ('host.or.ip' in device.host or device.username == '<username>' or device.password == '<password>')): <NEW_LINE> <INDENT> return HttpResponseRedirect(reverse('device-list')) <NEW_LINE> <DEDENT> <DEDENT> form_init = {'ignore_cache': request.user.userprofile.ignore_cache} <NEW_LINE> form = create_report_criteria_form(initial=form_init, report=report) <NEW_LINE> profile = request.user.userprofile <NEW_LINE> return render_to_response('report.html', {'report': report, 'developer': profile.developer, 'maps_version': profile.maps_version, 'maps_api_key': profile.maps_api_key, 'formstyle': FORMSTYLE, 'form': form}, context_instance=RequestContext(request)) <NEW_LINE> <DEDENT> def post(self, request, report_slug): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> report = Report.objects.get(slug=report_slug) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> logger.debug("Received POST for report %s, with params: %s" % (report_slug, request.POST)) <NEW_LINE> form = create_report_criteria_form(request.POST, request.FILES, report=report) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> formdata = form.cleaned_data <NEW_LINE> if formdata['debug']: <NEW_LINE> <INDENT> logger.debug("Debugging report and rotating logs now ...") <NEW_LINE> management.call_command('rotate_logs') <NEW_LINE> <DEDENT> logger.debug("Report %s validated form: %s" % (report_slug, formdata)) <NEW_LINE> profile = request.user.userprofile <NEW_LINE> timezone = pytz.timezone(profile.timezone) <NEW_LINE> definition = [] <NEW_LINE> now = datetime.datetime.now(timezone) <NEW_LINE> definition.append({'datetime': str(date(now, 'jS F Y H:i:s')), 'timezone': str(timezone), 'debug': formdata['debug']}) <NEW_LINE> lastrow = -1 <NEW_LINE> rows = [] <NEW_LINE> for w in Widget.objects.filter(report=report).order_by('row', 'col'): <NEW_LINE> <INDENT> if w.row != lastrow: <NEW_LINE> <INDENT> lastrow = w.row <NEW_LINE> rows.append([]) <NEW_LINE> <DEDENT> rows[-1].append(Widget.objects.get_subclass(id=w.id)) <NEW_LINE> <DEDENT> for row in rows: <NEW_LINE> <INDENT> for w in row: <NEW_LINE> <INDENT> widget_def = {"widgettype": w.widgettype().split("."), "posturl": "/report/%s/widget/%d/jobs/" % (report.slug, w.id), "options": w.uioptions, "widgetid": w.id, "row": w.row, "width": w.width, "height": w.height, "criteria": form.criteria() } <NEW_LINE> definition.append(widget_def) <NEW_LINE> <DEDENT> <DEDENT> logger.debug("Sending widget definitions for report %s: %s" % (report_slug, definition)) <NEW_LINE> return HttpResponse(json.dumps(definition)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return HttpResponse(str(form), status=400) | Main handler for /report/{id}
| 62599037c432627299fa416f |
class SourceCatalogHGPS(SourceCatalog): <NEW_LINE> <INDENT> name = 'hgps' <NEW_LINE> description = 'H.E.S.S. Galactic plane survey (HGPS) source catalog' <NEW_LINE> source_object_class = SourceCatalogObjectHGPS <NEW_LINE> def __init__(self, filename=None, hdu='HGPS_SOURCES'): <NEW_LINE> <INDENT> if not filename: <NEW_LINE> <INDENT> filename = Path(os.environ['HGPS_ANALYSIS']) / 'data/catalogs/HGPS3/release/HGPS_v0.4.fits' <NEW_LINE> <DEDENT> self.filename = str(filename) <NEW_LINE> self.hdu_list = fits.open(str(filename)) <NEW_LINE> if hdu == 'HGPS_SOURCES': <NEW_LINE> <INDENT> table = Table.read(self.hdu_list['HGPS_SOURCES']) <NEW_LINE> self.components = Table.read(self.hdu_list['HGPS_COMPONENTS']) <NEW_LINE> self.associations = Table.read(self.hdu_list['HGPS_ASSOCIATIONS']) <NEW_LINE> self.identifications = Table.read(self.hdu_list['HGPS_IDENTIFICATIONS']) <NEW_LINE> <DEDENT> elif hdu == 'HGPS_SOURCES_PA': <NEW_LINE> <INDENT> table = Table.read(self.hdu_list['HGPS_SOURCES_PA']) <NEW_LINE> <DEDENT> elif hdu == 'HESS_GALACTIC': <NEW_LINE> <INDENT> table = Table.read(self.hdu_list['HESS_GALACTIC_SOURCES']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Must be one of the following: 'HGPS_SOURCES'," "'HGPS_SOURCES_PA' or 'HESS_GALACTIC'") <NEW_LINE> <DEDENT> super(SourceCatalogHGPS, self).__init__(table=table) <NEW_LINE> <DEDENT> def _make_source_object(self, index): <NEW_LINE> <INDENT> source = super(SourceCatalogHGPS, self)._make_source_object(index) <NEW_LINE> if hasattr(self, 'components'): <NEW_LINE> <INDENT> if source.data['Components'] != '': <NEW_LINE> <INDENT> self._attach_component_info(source) <NEW_LINE> <DEDENT> <DEDENT> if hasattr(self, 'associations'): <NEW_LINE> <INDENT> self._attach_association_info(source) <NEW_LINE> <DEDENT> return source <NEW_LINE> <DEDENT> def _attach_component_info(self, source): <NEW_LINE> <INDENT> source.components = [] <NEW_LINE> lookup = SourceCatalog(self.components, source_name_key='Component_ID') <NEW_LINE> for name in source.data['Components'].split(', '): <NEW_LINE> <INDENT> component = HGPSGaussComponent(data=lookup[name].data) <NEW_LINE> source.components.append(component) <NEW_LINE> <DEDENT> <DEDENT> def _attach_association_info(self, source): <NEW_LINE> <INDENT> source.associations = [] <NEW_LINE> _ = source.data['Source_Name'] == self.associations['Source_Name'] <NEW_LINE> source.associations = list(self.associations['Association_Name'][_]) | HESS Galactic plane survey (HGPS) source catalog.
Note: this catalog isn't publicly available yet.
For now you need to be a H.E.S.S. member with an account
at MPIK to fetch it. | 62599037b57a9660fecd2bf3 |
class IllegalArgument(Exception): <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'message', None, None, ), ) <NEW_LINE> def __init__(self, message=None,): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.message = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('IllegalArgument') <NEW_LINE> if self.message != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('message', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.message) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | An IllegalArgument exception indicates an illegal or invalid
argument was passed into a procedure.
Attributes:
- message | 625990378a349b6b436873b9 |
class start_share(object): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def make(): <NEW_LINE> <INDENT> return _pmt_cpp_swig.start_share_make() <NEW_LINE> <DEDENT> make = staticmethod(make) <NEW_LINE> __swig_destroy__ = _pmt_cpp_swig.delete_start_share <NEW_LINE> __del__ = lambda self : None; | <+description of block+> | 6259903750485f2cf55dc0f7 |
class Solution: <NEW_LINE> <INDENT> def Clone(self, pHead): <NEW_LINE> <INDENT> if not pHead: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> cur = pHead <NEW_LINE> while cur: <NEW_LINE> <INDENT> copyNode = RandomListNode(cur.label) <NEW_LINE> copyNode.next = cur.next <NEW_LINE> cur.next = copyNode <NEW_LINE> cur = copyNode.next <NEW_LINE> <DEDENT> cur = pHead <NEW_LINE> while cur: <NEW_LINE> <INDENT> copyList = cur.next <NEW_LINE> if cur.random: <NEW_LINE> <INDENT> copyList.random = cur.random.next <NEW_LINE> <DEDENT> cur = copyList.next <NEW_LINE> <DEDENT> cur = pHead <NEW_LINE> result = pHead.next <NEW_LINE> while cur.next: <NEW_LINE> <INDENT> temp = cur.next <NEW_LINE> cur.next = temp.next <NEW_LINE> cur = temp <NEW_LINE> <DEDENT> return result <NEW_LINE> if not pHead: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = RandomListNode(pHead.label) <NEW_LINE> result.random = pHead.random <NEW_LINE> result.next = self.Clone(pHead.next) <NEW_LINE> <DEDENT> return result <NEW_LINE> cur = pHead <NEW_LINE> result = None <NEW_LINE> first_node = True <NEW_LINE> result_pHead = None <NEW_LINE> while cur is not None: <NEW_LINE> <INDENT> result = RandomListNode(pHead.label) <NEW_LINE> if first_node: <NEW_LINE> <INDENT> result_pHead = result <NEW_LINE> first_node = False <NEW_LINE> <DEDENT> result = result.next <NEW_LINE> cur = pHead.next <NEW_LINE> <DEDENT> cur = pHead <NEW_LINE> result = result_pHead <NEW_LINE> while cur is not None: <NEW_LINE> <INDENT> result.random = cur.random <NEW_LINE> cur = cur.next <NEW_LINE> result = result.none <NEW_LINE> <DEDENT> return result_pHead | 题目描述
输入一个复杂链表(每个节点中有节点值,以及两个指针,一个指向下一个节点,另一个特殊指针指向任意一个节点),
返回结果为复制后复杂链表的head。(注意,输出结果中请不要返回参数中的节点引用,否则判题程序会直接返回空) | 6259903730c21e258be99985 |
class CcTextDelegate(QStyledItemDelegate, UpdateEditorGeometry): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> QStyledItemDelegate.__init__(self, parent) <NEW_LINE> self.table_widget = parent <NEW_LINE> <DEDENT> def createEditor(self, parent, option, index): <NEW_LINE> <INDENT> m = index.model() <NEW_LINE> col = m.column_map[index.column()] <NEW_LINE> editor = EditWithComplete(parent) <NEW_LINE> editor.set_separator(None) <NEW_LINE> complete_items = sorted(list(m.db.all_custom(label=m.db.field_metadata.key_to_label(col))), key=sort_key) <NEW_LINE> editor.update_items_cache(complete_items) <NEW_LINE> return editor <NEW_LINE> <DEDENT> def setEditorData(self, editor, index): <NEW_LINE> <INDENT> editor.setText(get_val_for_textlike_columns(index)) <NEW_LINE> editor.selectAll() <NEW_LINE> <DEDENT> def setModelData(self, editor, model, index): <NEW_LINE> <INDENT> val = editor.text() <NEW_LINE> model.setData(index, (val), Qt.EditRole) | Delegate for text data. | 625990379b70327d1c57fefe |
class Sqrt(UnaryNode): <NEW_LINE> <INDENT> def __init__(self, child): <NEW_LINE> <INDENT> super(Sqrt, self).__init__(child) <NEW_LINE> self.in_vars = child.in_vars <NEW_LINE> self.out_vars = child.out_vars <NEW_LINE> self.name = 'sqrt(' + child.name + ')' | A class for storing STL Sqrt nodes
Inherits Node | 6259903730c21e258be99986 |
class GetTopTagsResultSet(ResultSet): <NEW_LINE> <INDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) | Retrieve the value for the "Response" output from this choreography execution. ((xml) The response from Last.fm.) | 6259903707d97122c4217e15 |
@PIPELINES.register_module <NEW_LINE> class RandomRotate(object): <NEW_LINE> <INDENT> def __init__(self, angles=None): <NEW_LINE> <INDENT> self.angles = angles <NEW_LINE> <DEDENT> def _rotate_img(self, results): <NEW_LINE> <INDENT> angle = self.angle <NEW_LINE> height, width = results['img_shape'][:2] <NEW_LINE> heightNew = int(width * fabs(sin(radians(angle))) + height * fabs(cos(radians(angle)))) <NEW_LINE> widthNew = int(height * fabs(sin(radians(angle))) + width * fabs(cos(radians(angle)))) <NEW_LINE> matRotation = cv2.getRotationMatrix2D((width / 2, height / 2), angle, 1) <NEW_LINE> matRotation[0, 2] += (widthNew - width) / 2 <NEW_LINE> matRotation[1, 2] += (heightNew - height) / 2 <NEW_LINE> imgRotation = cv2.warpAffine(results['img'], matRotation, (widthNew, heightNew), borderValue=(255, 255, 255)) <NEW_LINE> results['img'] = imgRotation <NEW_LINE> for key in results.get('bbox_fields', []): <NEW_LINE> <INDENT> gt_boxes_ret = [] <NEW_LINE> for poly in results[key]: <NEW_LINE> <INDENT> rot_array = [] <NEW_LINE> for i in range(0, len(poly), 2): <NEW_LINE> <INDENT> rot_array.append(np.array([int(poly[i]), int(poly[i + 1])])) <NEW_LINE> <DEDENT> rot_array = np.array([rot_array]) <NEW_LINE> rot_array = cv2.transform(rot_array, matRotation).squeeze().reshape(len(poly)) <NEW_LINE> gt_boxes_ret.append(rot_array) <NEW_LINE> <DEDENT> results[key] = gt_boxes_ret <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, results): <NEW_LINE> <INDENT> assert isinstance(self.angles, (list, tuple)) <NEW_LINE> if len(self.angles) == 1: <NEW_LINE> <INDENT> angle = self.angles[0] <NEW_LINE> <DEDENT> elif len(self.angles) == 2: <NEW_LINE> <INDENT> angle_max = max(self.angles) <NEW_LINE> angle_min = min(self.angles) <NEW_LINE> angle = np.random.randint(angle_min, angle_max) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> angle = np.random.choice(self.angles) <NEW_LINE> <DEDENT> self.angle = angle <NEW_LINE> self._rotate_img(results) <NEW_LINE> return results <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> repr_str = self.__class__.__name__ <NEW_LINE> repr_str += '(angles={})'.format(self.angles) <NEW_LINE> return repr_str | Description:
randomly rotate images and corresponding annotations
angles: contains single value or multiple values
if angles contains single value, this value represents `rotating fixed angle`;
if angles contains two values, angles represents `rotating random angle in the interval ranged by these two values`
if angles contains more than two values, angles represents `randomly choosing a value in this list as rotation angle`
Property:
angles(int/tuple/list(tuple)): rotation angles | 62599037d6c5a102081e329f |
class BlockStorageAnalyst(interface.Analyst): <NEW_LINE> <INDENT> image = interface.Parameter("analyst.block-storage.image") <NEW_LINE> partition = interface.Parameter("analyst.block-storage.partition") <NEW_LINE> block_size = interface.Parameter("analyst.block-storage.block-size") <NEW_LINE> output = interface.Parameter("analyst.block-storage.output") <NEW_LINE> def analyse(self, execution, sample): <NEW_LINE> <INDENT> logger.info("Attaching '%s' for block-storage analysis", self.image) <NEW_LINE> device = volume.attach(self.image) <NEW_LINE> try: <NEW_LINE> <INDENT> partition_device = volume.partition(device, self.partition) <NEW_LINE> volume.wait_for_partition(partition_device) <NEW_LINE> logger.info("Generating a hash of each block on the partition") <NEW_LINE> output_filename = self.output.format(execution=execution) <NEW_LINE> handle = bz2.BZ2File(output_filename, "w") <NEW_LINE> for block in blocks(partition_device, self.block_size): <NEW_LINE> <INDENT> checksum = hashlib.md5(block) <NEW_LINE> handle.write(checksum.hexdigest()) <NEW_LINE> handle.write("\n") <NEW_LINE> <DEDENT> logger.info("Finished block-storage analysis, detaching volume") <NEW_LINE> handle.close() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> volume.detach(device) <NEW_LINE> <DEDENT> <DEDENT> def discard(self, execution): <NEW_LINE> <INDENT> output_filename = self.output.format(execution=execution) <NEW_LINE> try: <NEW_LINE> <INDENT> os.unlink(output_filename) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass | Block storage digital forensic analyst.
Parameters:
analyst.block-storage.image -- the name of the image file.
analyst.block-storage.partition -- the index of the partition.
analyst.block-storage.block-size -- the block size of the partition.
analyst.block-storage.output -- the name of the output file. | 625990371d351010ab8f4c93 |
class Rest(object): <NEW_LINE> <INDENT> def make_command(self, fn): <NEW_LINE> <INDENT> argspec = inspect.getargspec(fn) <NEW_LINE> def self_fn(*args): <NEW_LINE> <INDENT> argcount = fn.func_code.co_argcount <NEW_LINE> if argcount > len(args)+1 and fn.__doc__: <NEW_LINE> <INDENT> return fn.__doc__ <NEW_LINE> <DEDENT> if argspec.varargs: <NEW_LINE> <INDENT> return fn(self, *args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return fn(self, *args[:argcount-1]) <NEW_LINE> <DEDENT> <DEDENT> return self_fn; <NEW_LINE> <DEDENT> def set_endpoint(self, endpoint): <NEW_LINE> <INDENT> self.endpoint = endpoint <NEW_LINE> <DEDENT> def set_credential(self, api_id, api_key): <NEW_LINE> <INDENT> self.api_id = api_id <NEW_LINE> self.api_key = api_key <NEW_LINE> <DEDENT> def set_auth_type(self, auth_type): <NEW_LINE> <INDENT> self.auth_type = auth_type <NEW_LINE> <DEDENT> def set_headers(self, headers={}): <NEW_LINE> <INDENT> self.headers = headers <NEW_LINE> <DEDENT> def make_auth(self, method, path, head={}, data=None): <NEW_LINE> <INDENT> if not hasattr(self,"auth_type"): <NEW_LINE> <INDENT> self.auth_type = None <NEW_LINE> <DEDENT> if self.auth_type == "basic": <NEW_LINE> <INDENT> return (self.api_id, self.api_key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def make_querystring(self, **kwargs): <NEW_LINE> <INDENT> qstr = "" <NEW_LINE> for k, v in kwargs.iteritems(): <NEW_LINE> <INDENT> if qstr: <NEW_LINE> <INDENT> qstr += "&" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> qstr = "?" <NEW_LINE> <DEDENT> import re <NEW_LINE> v = re.sub(r"\s+", r"+", v) <NEW_LINE> qstr += "%s=%s" % (k,v) <NEW_LINE> <DEDENT> return qstr <NEW_LINE> <DEDENT> def make_path(self, path_fmt, **kwargs): <NEW_LINE> <INDENT> fmt = Template(path_fmt) <NEW_LINE> path = fmt.safe_substitute(**kwargs) <NEW_LINE> return path <NEW_LINE> <DEDENT> def make_url(self, path): <NEW_LINE> <INDENT> url = self.endpoint + path <NEW_LINE> return url <NEW_LINE> <DEDENT> def _filter_dict(self, fields): <NEW_LINE> <INDENT> def filter_dict_by_fields(d): <NEW_LINE> <INDENT> newD = {} <NEW_LINE> for k in fields: <NEW_LINE> <INDENT> if k in d: <NEW_LINE> <INDENT> newD[k] = d[k] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return filter_dict_by_fields <NEW_LINE> <DEDENT> def filter_list_dict(self, json, *fields): <NEW_LINE> <INDENT> if fields: <NEW_LINE> <INDENT> json = map(lambda d: dict([(k, d[k]) for k in fields if k in d]), json) <NEW_LINE> <DEDENT> return json <NEW_LINE> <DEDENT> def filter_dict(self, json, *fields): <NEW_LINE> <INDENT> if fields: <NEW_LINE> <INDENT> json = dict([(k, json[k]) for k in fields if k in json]) <NEW_LINE> <DEDENT> return json <NEW_LINE> <DEDENT> def get(self, path, headers={}): <NEW_LINE> <INDENT> url = self.make_url(path) <NEW_LINE> auth = self.make_auth("GET", path) <NEW_LINE> if hasattr(self, "headers"): <NEW_LINE> <INDENT> headers.update(self.headers) <NEW_LINE> <DEDENT> response = requests.get(url, auth=auth, headers=headers) <NEW_LINE> return response <NEW_LINE> <DEDENT> def post(self, path, data=None, headers={}): <NEW_LINE> <INDENT> url = self.make_url(path) <NEW_LINE> auth = self.make_auth("POST", path) <NEW_LINE> if hasattr(self, "headers"): <NEW_LINE> <INDENT> headers.update(self.headers) <NEW_LINE> <DEDENT> response = requests.post(url, auth=auth, headers=headers) <NEW_LINE> return response | classdocs | 62599037d99f1b3c44d0681e |
class KnowledgeGraph(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._rels = {} <NEW_LINE> <DEDENT> def add(self, subject=None, rel=None, target=None, certainty=1): <NEW_LINE> <INDENT> previous = self.get(subject, rel, target) <NEW_LINE> if previous: <NEW_LINE> <INDENT> for p in previous: <NEW_LINE> <INDENT> self._rels[rel].remove(p) <NEW_LINE> self._rels[rel].append((p[0], p[1], certainty)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if rel not in self._rels: <NEW_LINE> <INDENT> self._rels[rel] = [] <NEW_LINE> <DEDENT> self._rels[rel].append((subject, target, certainty)) <NEW_LINE> <DEDENT> <DEDENT> def get(self, subject=None, rel=None, target=None, certainty=None): <NEW_LINE> <INDENT> if rel is None or rel not in self._rels: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> out = [] <NEW_LINE> entries = self._rels[rel] <NEW_LINE> for i in entries: <NEW_LINE> <INDENT> if subject and i[0] != subject: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if target and i[1] != target: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if certainty is not None and i[2] != certainty: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> out.append(i) <NEW_LINE> <DEDENT> if len(out) == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return out | The graph stores relationships by key, with a tuple of the subject, the
target, and the certainty of each relationship.
Hamlet is located in Elsinore:
{'located':('hamlet', 'elsinore', 1)}
Elsinore is the location of Hamlet:
{'location':('elsinore', 'hamlet', 1)} | 62599037baa26c4b54d50422 |
class LoginUserView(APIView): <NEW_LINE> <INDENT> def post(self, request, *args): <NEW_LINE> <INDENT> data = request.data <NEW_LINE> username = data.get('username') <NEW_LINE> password = data.get('password') <NEW_LINE> user = authenticate(username=username, password=password) <NEW_LINE> if user: <NEW_LINE> <INDENT> payload = jwt_payload_handler(user) <NEW_LINE> token = {'token': jwt.encode(payload, SECRET_KEY), 'status': 'success' } <NEW_LINE> return Response(token) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> token = {'error': 'Invalid credentials', 'status': 'failed' } <NEW_LINE> return Response(token) | Log in user API view. | 6259903707d97122c4217e16 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.