code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class TestCandidateFlagsPage(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return CandidateFlagsPage( pagination = openfec_sdk.models.offset_info.OffsetInfo( count = 56, page = 56, pages = 56, per_page = 56, ), results = [ openfec_sdk.models.candidate_flags.CandidateFlags( candidate_id = '0', federal_funds_flag = True, has_raised_funds = True, ) ] ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return CandidateFlagsPage( ) <NEW_LINE> <DEDENT> <DEDENT> def testCandidateFlagsPage(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
CandidateFlagsPage unit test stubs
62599063435de62698e9d51d
class VnfOnboardingReply(Model): <NEW_LINE> <INDENT> def __init__(self, onboarded_vnf_pkg_info_id: str=None, vnfd_id: str=None): <NEW_LINE> <INDENT> self.swagger_types = { "onboarded_vnf_pkg_info_id": str, "vnfd_id": str } <NEW_LINE> self.attribute_map = { "onboarded_vnf_pkg_info_id": "onboardedVnfPkgInfoId", "vnfd_id": "vnfdId" } <NEW_LINE> self._onboarded_vnf_pkg_info_id = onboarded_vnf_pkg_info_id <NEW_LINE> self._vnfd_id = vnfd_id <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt) -> "VnfOnboardingReply": <NEW_LINE> <INDENT> return util.deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def onboarded_vnf_pkg_info_id(self) -> str: <NEW_LINE> <INDENT> return self._onboarded_vnf_pkg_info_id <NEW_LINE> <DEDENT> @onboarded_vnf_pkg_info_id.setter <NEW_LINE> def onboarded_vnf_pkg_info_id(self, onboarded_vnf_pkg_info_id: str): <NEW_LINE> <INDENT> if onboarded_vnf_pkg_info_id is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `onboarded_vnf_pkg_info_id`, must not be `None`") <NEW_LINE> <DEDENT> self._onboarded_vnf_pkg_info_id = onboarded_vnf_pkg_info_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def vnfd_id(self) -> str: <NEW_LINE> <INDENT> return self._vnfd_id <NEW_LINE> <DEDENT> @vnfd_id.setter <NEW_LINE> def vnfd_id(self, vnfd_id: str): <NEW_LINE> <INDENT> if vnfd_id is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `vnfd_id`, must not be `None`") <NEW_LINE> <DEDENT> self._vnfd_id = vnfd_id
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62599063d6c5a102081e3839
class ParamType(object): <NEW_LINE> <INDENT> PRIMPAR_LABEL = 0x20 <NEW_LINE> HND = 0x10 <NEW_LINE> ADR = 0x08 <NEW_LINE> LCS = 0x84 <NEW_LINE> LAB1 = 0xA0 <NEW_LINE> LC0 = 0x00 <NEW_LINE> LC1 = 0x81 <NEW_LINE> LC2 = 0x82 <NEW_LINE> LC4 = 0x83 <NEW_LINE> LCA = 0x81 <NEW_LINE> LV1 = 0xC1 <NEW_LINE> LV2 = 0xC2 <NEW_LINE> LV4 = 0xC3 <NEW_LINE> LVA = 0xC1 <NEW_LINE> GV0 = 0x60 <NEW_LINE> GV1 = 0xE1 <NEW_LINE> GV2 = 0xE2 <NEW_LINE> GV4 = 0xE3 <NEW_LINE> GVA = 0xE1 <NEW_LINE> FLOAT = 0xFF
Parameter types that are used by the VM.
62599063be8e80087fbc079c
class ValidateRigControlsDefaults(pyblish.api.InstancePlugin): <NEW_LINE> <INDENT> order = colorbleed.api.ValidateContentsOrder + 0.05 <NEW_LINE> label = "Rig Controls Defaults" <NEW_LINE> hosts = ["maya"] <NEW_LINE> families = ["colorbleed.rig"] <NEW_LINE> actions = [colorbleed.api.RepairAction, colorbleed.maya.action.SelectInvalidAction] <NEW_LINE> CONTROLLER_DEFAULTS = { "translateX": 0, "translateY": 0, "translateZ": 0, "rotateX": 0, "rotateY": 0, "rotateZ": 0, "scaleX": 1, "scaleY": 1, "scaleZ": 1 } <NEW_LINE> def process(self, instance): <NEW_LINE> <INDENT> invalid = self.get_invalid(instance) <NEW_LINE> if invalid: <NEW_LINE> <INDENT> raise RuntimeError("Controls have non-default values: " "%s" % invalid) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def get_invalid(cls, instance): <NEW_LINE> <INDENT> invalid = list() <NEW_LINE> for control in get_controls(instance): <NEW_LINE> <INDENT> if cls.get_non_default_attributes(control): <NEW_LINE> <INDENT> invalid.append(control) <NEW_LINE> <DEDENT> <DEDENT> return invalid <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_non_default_attributes(cls, control): <NEW_LINE> <INDENT> invalid = [] <NEW_LINE> for attr, default in cls.CONTROLLER_DEFAULTS.items(): <NEW_LINE> <INDENT> if cmds.attributeQuery(attr, node=control, exists=True): <NEW_LINE> <INDENT> plug = "{}.{}".format(control, attr) <NEW_LINE> locked = cmds.getAttr(plug, lock=True) <NEW_LINE> if locked: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> value = cmds.getAttr(plug) <NEW_LINE> if value != default: <NEW_LINE> <INDENT> cls.log.warning("Control non-default value: " "%s = %s" % (plug, value)) <NEW_LINE> invalid.append(plug) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return invalid <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def repair(cls, instance): <NEW_LINE> <INDENT> invalid = cls.get_invalid(instance) <NEW_LINE> if not invalid: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> with undo_chunk(): <NEW_LINE> <INDENT> for control in invalid: <NEW_LINE> <INDENT> invalid_plugs = cls.get_non_default_attributes(control) <NEW_LINE> if invalid_plugs: <NEW_LINE> <INDENT> for plug in invalid_plugs: <NEW_LINE> <INDENT> attr = plug.split(".")[-1] <NEW_LINE> default = cls.CONTROLLER_DEFAULTS[attr] <NEW_LINE> cls.log.info("Setting %s to %s" % (plug, default)) <NEW_LINE> cmds.setAttr(plug, default)
Validate rig controller default values. Controls must have the transformation attributes on their default values of translate zero, rotate zero and scale one when they are unlocked attributes.
6259906301c39578d7f142bf
class DNSServerIDL(object): <NEW_LINE> <INDENT> thrift_spec = (None, (1, TType.STRUCT, 'addr', (Shared.ttypes.NetworkAddressIDL, Shared.ttypes.NetworkAddressIDL.thrift_spec), None), (2, TType.I32, 'primary', None, None), (3, TType.STRING, 'vrfName', None, None)) <NEW_LINE> def __init__(self, addr = None, primary = None, vrfName = None): <NEW_LINE> <INDENT> self.addr = addr <NEW_LINE> self.primary = primary <NEW_LINE> self.vrfName = vrfName <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid,) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.addr = Shared.ttypes.NetworkAddressIDL() <NEW_LINE> self.addr.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.primary = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 3: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.vrfName = iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('DNSServerIDL') <NEW_LINE> if self.addr != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('addr', TType.STRUCT, 1) <NEW_LINE> self.addr.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.primary != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('primary', TType.I32, 2) <NEW_LINE> oprot.writeI32(self.primary) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.vrfName != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('vrfName', TType.STRING, 3) <NEW_LINE> oprot.writeString(self.vrfName) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> def validate(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = [ '%s=%r' % (key, value) for (key, value,) in self.__dict__.iteritems() ] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
DNS Server IDL Attributes: - addr - primary - vrfName
625990634f6381625f19a02e
class CmdUnconnectedConnect(COMMAND_DEFAULT_CLASS): <NEW_LINE> <INDENT> key = 'connect' <NEW_LINE> aliases = ['conn', 'con', 'co'] <NEW_LINE> locks = 'cmd:all()' <NEW_LINE> arg_regex = r'\s.*?|$' <NEW_LINE> def func(self): <NEW_LINE> <INDENT> session = self.caller <NEW_LINE> if _throttle(session, maxlim=5, timeout=5 * 60, storage=_LATEST_FAILED_LOGINS): <NEW_LINE> <INDENT> session.msg('|RYou made too many connection attempts. Try again in a few minutes.|n') <NEW_LINE> return <NEW_LINE> <DEDENT> args = self.args <NEW_LINE> parts = [part.strip() for part in re.split(r"\"", args) if part.strip()] <NEW_LINE> if len(parts) == 1: <NEW_LINE> <INDENT> parts = parts[0].split(None, 1) <NEW_LINE> if len(parts) == 1 and parts[0].lower() == 'guest': <NEW_LINE> <INDENT> enabled, new_account = create_guest_account(session) <NEW_LINE> if new_account: <NEW_LINE> <INDENT> session.sessionhandler.login(session, new_account) <NEW_LINE> <DEDENT> if enabled: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if len(parts) != 2: <NEW_LINE> <INDENT> session.msg('\n\r Usage (without <>): connect <name> <password>') <NEW_LINE> return <NEW_LINE> <DEDENT> name, password = parts <NEW_LINE> account = create_normal_account(session, name, password) <NEW_LINE> if account: <NEW_LINE> <INDENT> session.sessionhandler.login(session, account)
connect to the game Usage (at login screen): connect accountname password connect "account name" "pass word" Use the create command to first create an account before logging in. If you have spaces in your name, enclose it in double quotes.
62599063009cb60464d02c4c
class InvalidRequestError(Error): <NEW_LINE> <INDENT> pass
The request was invalid.
6259906399cbb53fe68325f8
@attr.s <NEW_LINE> class EntityCollections(object): <NEW_LINE> <INDENT> _parent = attr.ib(repr=False, cmp=False, hash=False) <NEW_LINE> _availiable_collections = attr.ib(repr=False, cmp=False, hash=False) <NEW_LINE> _filters = attr.ib(cmp=False, hash=False, default=attr.Factory(dict)) <NEW_LINE> _collection_cache = attr.ib(repr=False, cmp=False, hash=False, init=False, default=attr.Factory(dict)) <NEW_LINE> @classmethod <NEW_LINE> def for_appliance(cls, appliance): <NEW_LINE> <INDENT> return cls(parent=appliance, availiable_collections=load_appliance_collections()) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def for_entity(cls, entity, collections): <NEW_LINE> <INDENT> return cls(parent=entity, availiable_collections=collections, filters={'parent': entity}) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def declared(cls, **spec): <NEW_LINE> <INDENT> @cached_property <NEW_LINE> def collections(self): <NEW_LINE> <INDENT> return cls.for_entity(self, spec) <NEW_LINE> <DEDENT> collections.spec = spec <NEW_LINE> return collections <NEW_LINE> <DEDENT> def __dir__(self): <NEW_LINE> <INDENT> internal_dir = dir(super(EntityCollections, self)) <NEW_LINE> return internal_dir + self._availiable_collections.keys() <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> if name not in self._availiable_collections: <NEW_LINE> <INDENT> sorted_collection_keys = self._availiable_collections.keys() <NEW_LINE> sorted_collection_keys.sort() <NEW_LINE> raise AttributeError('Collection [{}] not known to object, available collections: {}' .format(name, sorted_collection_keys)) <NEW_LINE> <DEDENT> if name not in self._collection_cache: <NEW_LINE> <INDENT> item_filters = self._filters.copy() <NEW_LINE> cls_and_or_filter = self._availiable_collections[name] <NEW_LINE> if isinstance(cls_and_or_filter, tuple): <NEW_LINE> <INDENT> item_filters.update(cls_and_or_filter[1]) <NEW_LINE> cls_or_verpick = cls_and_or_filter[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cls_or_verpick = cls_and_or_filter <NEW_LINE> <DEDENT> if isinstance(cls_or_verpick, VersionPick): <NEW_LINE> <INDENT> cls = cls_or_verpick.pick(self._parent.appliance.version) <NEW_LINE> try: <NEW_LINE> <INDENT> logger.info( '[COLLECTIONS] Version picked collection %s as %s.%s', name, cls.__module__, cls.__name__) <NEW_LINE> <DEDENT> except (AttributeError, TypeError, ValueError): <NEW_LINE> <INDENT> logger.exception('[COLLECTIONS] Is the collection %s truly a collection?', name) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> cls = cls_or_verpick <NEW_LINE> <DEDENT> self._collection_cache[name] = cls(self._parent, filters=item_filters) <NEW_LINE> <DEDENT> return self._collection_cache[name]
Caches instances of collection objects for use by the collections accessor The appliance object has a ``collections`` attribute. This attribute is an instance of this class. It is initialized with an appliance object and locally stores a cache of all known good collections.
625990638e7ae83300eea7a2
class SearchCondition(object): <NEW_LINE> <INDENT> EQUAL = 0 <NEW_LINE> LIKE = 1 <NEW_LINE> FULL_TEXT = 2 <NEW_LINE> GTE = 3 <NEW_LINE> LTE = 4 <NEW_LINE> NOT = 5 <NEW_LINE> _VALUES_TO_NAMES = { 0: "EQUAL", 1: "LIKE", 2: "FULL_TEXT", 3: "GTE", 4: "LTE", 5: "NOT", } <NEW_LINE> _NAMES_TO_VALUES = { "EQUAL": 0, "LIKE": 1, "FULL_TEXT": 2, "GTE": 3, "LTE": 4, "NOT": 5, }
<p>Different search operators that can be used with the entity search fields</p> <li>EQUAL : Simply matches for equality. Applicable for name, and parent entity id</li> <li>LIKE : Check for the condition %$FIELD% condition. Applicable for name, and description</li> <li>FULL_TEXT : Does a full text search. Only applicable for the FULL_TEXT field.</li> <li>GTE : Greater than or equal. Only applicable for created time, updated time and shared count.</li> <li>LTE : Less than or equal. Only applicable for created time, updated time and shared count.</li>
62599063009cb60464d02c4d
class MultiAgentSearchAgent(Agent): <NEW_LINE> <INDENT> def __init__(self, evalFn = 'scoreEvaluationFunction', depth = '2'): <NEW_LINE> <INDENT> self.index = 0 <NEW_LINE> self.evaluationFunction = util.lookup(evalFn, globals()) <NEW_LINE> self.depth = int(depth) <NEW_LINE> <DEDENT> def isTerminal(self, state, depth, agent): <NEW_LINE> <INDENT> return depth == self.depth or state.isWin() or state.isLose() or state.getLegalActions(agent) == 0
This class provides some common elements to all of your multi-agent searchers. Any methods defined here will be available to the MinimaxPacmanAgent, AlphaBetaPacmanAgent & ExpectimaxPacmanAgent. You *do not* need to make any changes here, but you can if you want to add functionality to all your adversarial search agents. Please do not remove anything, however. Note: this is an abstract class: one that should not be instantiated. It's only partially specified, and designed to be extended. Agent (game.py) is another abstract class.
6259906363d6d428bbee3e13
class UserBan( Base ): <NEW_LINE> <INDENT> __tablename__ = 'user_bans' <NEW_LINE> __table_args__ = {'extend_existing': True} <NEW_LINE> id = Column( Integer, primary_key = True ) <NEW_LINE> id_user = Column( Integer, ForeignKey( 'users.id') ) <NEW_LINE> date_ban = Column( Date ) <NEW_LINE> reason = Column( String ) <NEW_LINE> user = relationship( 'User', backref='fk_user_ban' )
Describe columns in table UserBans
625990634e4d562566373b1c
class PostDevelopCommand(develop): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> develop.run(self) <NEW_LINE> copyfile()
Post-installation for development mode.
6259906376e4537e8c3f0c98
class ModalTimerOperator(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "wm.implicit_event_loop" <NEW_LINE> bl_label = "Event Loop For Noodle Node Editor" <NEW_LINE> _timer = None <NEW_LINE> def modal(self, context, event): <NEW_LINE> <INDENT> global jobs <NEW_LINE> if event.type in {'ESC'}: <NEW_LINE> <INDENT> self.cancel(context) <NEW_LINE> return {'CANCELLED'} <NEW_LINE> <DEDENT> if event.type == 'TIMER': <NEW_LINE> <INDENT> for job in jobs[:]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> job.send(context) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> print("first run of job?") <NEW_LINE> job.__next__() <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> jobs.remove(job) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> traceback.print_exc() <NEW_LINE> jobs.remove(job) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return {'PASS_THROUGH'} <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> global loop_running <NEW_LINE> if loop_running: <NEW_LINE> <INDENT> print("loop already running!") <NEW_LINE> return {'CANCELLED'} <NEW_LINE> <DEDENT> loop_running = True <NEW_LINE> wm = context.window_manager <NEW_LINE> self._timer = wm.event_timer_add(0.1, context.window) <NEW_LINE> wm.modal_handler_add(self) <NEW_LINE> return {'RUNNING_MODAL'} <NEW_LINE> <DEDENT> def cancel(self, context): <NEW_LINE> <INDENT> global loop_running <NEW_LINE> wm = context.window_manager <NEW_LINE> wm.event_timer_remove(self._timer) <NEW_LINE> loop_running = False <NEW_LINE> return {'CANCELLED'}
Operator which runs its self from a timer
625990637d847024c075daec
class PublicKeyAlgorithm(Sequence): <NEW_LINE> <INDENT> _fields = [ ('algorithm', PublicKeyAlgorithmId), ('parameters', Any, {'optional': True}), ] <NEW_LINE> _oid_pair = ('algorithm', 'parameters') <NEW_LINE> _oid_specs = { 'rsa': Null, 'dsa': DSAParams, 'ec': ECDomainParameters, }
Original Name: AlgorithmIdentifier Source: https://tools.ietf.org/html/rfc5280#page-18
625990634e4d562566373b1d
class Application(object): <NEW_LINE> <INDENT> instance = None <NEW_LINE> def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> if Application.instance is None: <NEW_LINE> <INDENT> Application.instance = object.__new__(cls) <NEW_LINE> <DEDENT> return Application.instance <NEW_LINE> <DEDENT> def __init__(self, size, name, resizable=False): <NEW_LINE> <INDENT> super(Application, self).__init__() <NEW_LINE> self._size = size <NEW_LINE> self._name = name <NEW_LINE> self._resizable = resizable <NEW_LINE> self._window = pg.window.Window(*size, resizable=resizable) <NEW_LINE> self._window.set_minimum_size(*size) <NEW_LINE> self._window.set_caption(name) <NEW_LINE> self._window.maximize() <NEW_LINE> self._window.push_handlers(on_key_press=self._skip_escape) <NEW_LINE> self._events = AppEvents() <NEW_LINE> self._window.push_handlers( **{ev: getattr(self._events, "do_" + ev[3:]) for ev in EVENTS} ) <NEW_LINE> <DEDENT> def _clear(self): <NEW_LINE> <INDENT> self._window.clear() <NEW_LINE> pg.gl.glClearColor(0.2, 0.3, 0.3, 1) <NEW_LINE> <DEDENT> def _skip_escape(self, symbol, mod): <NEW_LINE> <INDENT> if symbol == pg.window.key.ESCAPE: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def _get_window(self): <NEW_LINE> <INDENT> return self._window <NEW_LINE> <DEDENT> window = property(_get_window) <NEW_LINE> def _get_size(self): <NEW_LINE> <INDENT> self._size = self._window.get_size() <NEW_LINE> return self._size <NEW_LINE> <DEDENT> def _set_size(self, val): <NEW_LINE> <INDENT> self._size = val <NEW_LINE> self._window.set_size(*val) <NEW_LINE> <DEDENT> size = property(_get_size, _set_size) <NEW_LINE> w = property(lambda self: self._window.width) <NEW_LINE> h = property(lambda self: self._window.height) <NEW_LINE> def _get_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def _set_name(self, val): <NEW_LINE> <INDENT> self._name = val <NEW_LINE> self._window.set_caption(val) <NEW_LINE> <DEDENT> name = property(_get_name, _set_name) <NEW_LINE> def run(self, debug=False): <NEW_LINE> <INDENT> self._window.push_handlers(on_draw=self._clear) <NEW_LINE> pg.gl.glBlendFunc(pg.gl.GL_SRC_ALPHA, pg.gl.GL_ONE_MINUS_SRC_ALPHA) <NEW_LINE> pg.gl.glEnable(pg.gl.GL_BLEND) <NEW_LINE> with profile(debug): <NEW_LINE> <INDENT> pg.app.run() <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def quit(): <NEW_LINE> <INDENT> pg.app.exit() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def process(cls, obj): <NEW_LINE> <INDENT> self = cls.instance <NEW_LINE> self._events.push_handlers(obj) <NEW_LINE> if hasattr(obj, "on_update"): <NEW_LINE> <INDENT> pg.clock.schedule_interval(obj.on_update, 1 / 60) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def remove(cls, obj): <NEW_LINE> <INDENT> self = cls.instance <NEW_LINE> self._events.remove_handlers() <NEW_LINE> if hasattr(obj, "on_update"): <NEW_LINE> <INDENT> pg.clock.unschedule(obj.on_update)
Base Application
62599063435de62698e9d51e
class Runnable(object): <NEW_LINE> <INDENT> thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> raise AttributeError("No constructor defined - class is abstract") <NEW_LINE> <DEDENT> __repr__ = _swig_repr <NEW_LINE> __swig_destroy__ = _mt.delete_Runnable <NEW_LINE> def run(self) -> "void": <NEW_LINE> <INDENT> return _mt.Runnable_run(self)
Proxy of C++ sys::Runnable class.
625990633539df3088ecd9b4
class Button(pygame.sprite.Sprite): <NEW_LINE> <INDENT> __press = pygame.mixer.Sound('sounds\\button.wav') <NEW_LINE> def __init__(self, num): <NEW_LINE> <INDENT> pygame.sprite.Sprite.__init__(self) <NEW_LINE> self.__font = pygame.font.Font('fonts\\Square.ttf', 30) <NEW_LINE> self.__text = ('START', 'CONTROLS', 'QUIT', 'BACK', 'RESUME', 'RETURN TO TITLE SCREEN')[num] <NEW_LINE> self.image = self.__font.render(self.__text, True, (255, 255, 255)) <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.rect.center = ((160, 200), (160, 250), (160, 300), (280, 440), (320, 200), (320, 280))[num] <NEW_LINE> self.__collided = False <NEW_LINE> <DEDENT> def get_pressed(self): <NEW_LINE> <INDENT> self.__collided = self.rect.collidepoint(pygame.mouse.get_pos()) <NEW_LINE> if self.__collided and pygame.mouse.get_pressed()[0]: <NEW_LINE> <INDENT> Button.__press.play() <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.image = self.__font.render(self.__text, True, ((255, 255, 255), (255, 0, 0))[self.__collided])
button class
625990642ae34c7f260ac7fd
class ProtocolFileRole(str, Enum): <NEW_LINE> <INDENT> MAIN = "main" <NEW_LINE> LABWARE = "labware"
The purpose of a given file in a protocol. Args: MAIN: The protocol's main file. In a JSON protocol, this is will be the JSON file. In a Python protocol, this is the file that exports the main `run` method. LABWARE: A labware definition file, loadable by a Python file in the same protocol.
6259906438b623060ffaa3dc
class LabelingTaskOrderingFilter(OrderingFilter): <NEW_LINE> <INDENT> REPLACE_REQUEST_FIELDS = { 'labeler': 'labeler_name', } <NEW_LINE> def get_ordering(self, request, queryset, view): <NEW_LINE> <INDENT> ordering = super().get_ordering(request, queryset, view) <NEW_LINE> return [self._replace_request_field(field) for field in ordering or []] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _replace_request_field(cls, field: str) -> str: <NEW_LINE> <INDENT> for searched, needed in cls.REPLACE_REQUEST_FIELDS.items(): <NEW_LINE> <INDENT> if searched in field: <NEW_LINE> <INDENT> return field.replace(searched, needed) <NEW_LINE> <DEDENT> <DEDENT> return field
To keep the API query names consistent with the `LabelingTaskFilterSet`, the ordering must be applied to the different field that is requested.
62599064d6c5a102081e383b
class ClientConnectorStatistics(ConnectorStatistics): <NEW_LINE> <INDENT> def init(self): <NEW_LINE> <INDENT> self._stats = { 'created_at': 0, 'last_received_pdu_at': 0, 'last_sent_pdu_at': 0, 'last_received_elink_at': 0, 'last_sent_elink_at': 0, 'last_seqNum_at': 0, 'last_seqNum': None, 'connected_at': 0, 'bound_at': 0, 'disconnected_at': 0, 'connected_count': 0, 'bound_count': 0, 'disconnected_count': 0, }
One client connector statistics holder
62599064d7e4931a7ef3d710
class TestUtilsPrune(unittest.TestCase): <NEW_LINE> <INDENT> pass
:param m: number of instances / samples :param L: number of labels / classes :param T: number of individual classifiers in the original ensemble :param H: size of the pruned sub-ensemble, number of individual classifiers
625990642ae34c7f260ac7fe
class TaskPanelHeightsPage(TaskPanelPage): <NEW_LINE> <INDENT> def getForm(self): <NEW_LINE> <INDENT> return FreeCADGui.PySideUic.loadUi(":/panels/PageHeightsEdit.ui") <NEW_LINE> <DEDENT> def initPage(self, obj): <NEW_LINE> <INDENT> self.safeHeight = PathGui.QuantitySpinBox(self.form.safeHeight, obj, 'SafeHeight') <NEW_LINE> self.clearanceHeight = PathGui.QuantitySpinBox(self.form.clearanceHeight, obj, 'ClearanceHeight') <NEW_LINE> <DEDENT> def getTitle(self, obj): <NEW_LINE> <INDENT> return translate("Path", "Heights") <NEW_LINE> <DEDENT> def getFields(self, obj): <NEW_LINE> <INDENT> self.safeHeight.updateProperty() <NEW_LINE> self.clearanceHeight.updateProperty() <NEW_LINE> <DEDENT> def setFields(self, obj): <NEW_LINE> <INDENT> self.safeHeight.updateSpinBox() <NEW_LINE> self.clearanceHeight.updateSpinBox() <NEW_LINE> <DEDENT> def getSignalsForUpdate(self, obj): <NEW_LINE> <INDENT> signals = [] <NEW_LINE> signals.append(self.form.safeHeight.editingFinished) <NEW_LINE> signals.append(self.form.clearanceHeight.editingFinished) <NEW_LINE> return signals <NEW_LINE> <DEDENT> def pageUpdateData(self, obj, prop): <NEW_LINE> <INDENT> if prop in ['SafeHeight', 'ClearanceHeight']: <NEW_LINE> <INDENT> self.setFields(obj)
Page controller for heights.
6259906497e22403b383c623
class networkgen(): <NEW_LINE> <INDENT> def __init__(self,**kwargs): <NEW_LINE> <INDENT> self.net_params = {} <NEW_LINE> for k,v in kwargs.items(): <NEW_LINE> <INDENT> self.net_params[k] = v <NEW_LINE> <DEDENT> self.data = pd.read_csv(self.net_params['protein_nodes_file']) <NEW_LINE> self.data.columns = ['uniprot_id','id', 'pathology'] <NEW_LINE> self.prot_list = combinations(self.data.uniprot_id,2) <NEW_LINE> <DEDENT> def get_pv(self): <NEW_LINE> <INDENT> edge_list = [] <NEW_LINE> pvalue_params = {} <NEW_LINE> pvalue_params['seed'] = 214 <NEW_LINE> pvalue_params['N'] = 500 <NEW_LINE> pvalue_params['drug_file'] = self.net_params['drug_file'] <NEW_LINE> pvalue_params['targets_file'] = self.net_params['targets_file'] <NEW_LINE> for (p1,p2) in self.prot_list: <NEW_LINE> <INDENT> if p1 != p2: <NEW_LINE> <INDENT> pvalue_params['proteinA'] = p1 <NEW_LINE> pvalue_params['proteinB'] = p2 <NEW_LINE> pval = Pvalue(**pvalue_params) <NEW_LINE> if pval.get_pvalue() <= 0.05: <NEW_LINE> <INDENT> edge_list.append((p1,p2)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return edge_list
A class to generate a network of protein based on their p-value from tanimoto score
62599064e64d504609df9f59
class icCellAddressInvalidError(Exception): <NEW_LINE> <INDENT> def __init__(self, args=None, user=None): <NEW_LINE> <INDENT> self.args = args
Ошибка некорректного адреса ячейки.
62599064a17c0f6771d5d731
class Tabular(object): <NEW_LINE> <INDENT> def __init__(self, columns, toprule=False, bottomrule=False): <NEW_LINE> <INDENT> self.header = r'\begin{tabular}{' + columns + r'}' <NEW_LINE> self.numcols = len(re.findall('l|c|r|p{.*?}', columns)) <NEW_LINE> self.rows = [] <NEW_LINE> self.footer = r'\end{tabular}' <NEW_LINE> self.toprule = toprule <NEW_LINE> self.bottomrule = bottomrule <NEW_LINE> <DEDENT> def row(self, *args): <NEW_LINE> <INDENT> if len(args) > self.numcols: <NEW_LINE> <INDENT> raise IndexError('too many columns specified') <NEW_LINE> <DEDENT> argtxt = ' '.join(args) <NEW_LINE> if any([True for sym in ('&', r'\\') if sym in argtxt]): <NEW_LINE> <INDENT> raise ValueError("arguments should not contain & or \\\\.") <NEW_LINE> <DEDENT> self.rows.append(' ' + r' & '.join(args) + r'\\') <NEW_LINE> <DEDENT> def midrule(self): <NEW_LINE> <INDENT> self.rows.append(r' \midrule') <NEW_LINE> <DEDENT> def hline(self): <NEW_LINE> <INDENT> self.rows.append(r' \hline') <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> table = [self.header] <NEW_LINE> if self.toprule: <NEW_LINE> <INDENT> table += [r' \toprule'] <NEW_LINE> <DEDENT> table += self.rows <NEW_LINE> if self.toprule: <NEW_LINE> <INDENT> table += [r' \bottomrule'] <NEW_LINE> <DEDENT> table += [self.footer] <NEW_LINE> return '\n'.join(table)
Simple LaTeX tabular generator.
62599064627d3e7fe0e085a2
class UserEvent(object): <NEW_LINE> <INDENT> def __init__(self, event_context, user_id, visitor_attributes, bot_filtering=None): <NEW_LINE> <INDENT> self.event_context = event_context <NEW_LINE> self.user_id = user_id <NEW_LINE> self.visitor_attributes = visitor_attributes <NEW_LINE> self.bot_filtering = bot_filtering <NEW_LINE> self.uuid = self._get_uuid() <NEW_LINE> self.timestamp = self._get_time() <NEW_LINE> <DEDENT> def _get_time(self): <NEW_LINE> <INDENT> return int(round(time.time() * 1000)) <NEW_LINE> <DEDENT> def _get_uuid(self): <NEW_LINE> <INDENT> return str(uuid.uuid4())
Class respresenting User Event.
625990640a50d4780f70694b
class Resolver: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._cache = {} <NEW_LINE> <DEDENT> def __call__(self, what): <NEW_LINE> <INDENT> if hasattr(what, '__iter__'): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for host in what: <NEW_LINE> <INDENT> result.extend(self(host)) <NEW_LINE> <DEDENT> result = list(set(result)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if what not in self._cache: <NEW_LINE> <INDENT> self._cache[what] = socket.gethostbyname_ex(str(what))[2] <NEW_LINE> <DEDENT> result = self._cache[what] <NEW_LINE> <DEDENT> dynobj.debug('%s -> %s', what, result) <NEW_LINE> return result
Implement a simple domain name to address resolver. Use the an instance as a function to resolve host names.
62599064a219f33f346c7f1e
class AbortError(Exception): <NEW_LINE> <INDENT> pass
Any fatal errors that would prevent handroll from proceeding should signal with the ``AbortError`` exception.
62599064f548e778e596cca1
class RequestMetadata(proto.Message): <NEW_LINE> <INDENT> domain = proto.Field( proto.STRING, number=1, ) <NEW_LINE> session_id = proto.Field( proto.STRING, number=2, ) <NEW_LINE> user_id = proto.Field( proto.STRING, number=3, ) <NEW_LINE> allow_missing_ids = proto.Field( proto.BOOL, number=4, ) <NEW_LINE> device_info = proto.Field( proto.MESSAGE, number=5, message="DeviceInfo", )
Meta information related to the job searcher or entity conducting the job search. This information is used to improve the performance of the service. Attributes: domain (str): Required if [allow_missing_ids][google.cloud.talent.v4.RequestMetadata.allow_missing_ids] is unset or ``false``. The client-defined scope or source of the service call, which typically is the domain on which the service has been implemented and is currently being run. For example, if the service is being run by client Foo, Inc., on job board www.foo.com and career site www.bar.com, then this field is set to "foo.com" for use on the job board, and "bar.com" for use on the career site. Note that any improvements to the model for a particular tenant site rely on this field being set correctly to a unique domain. The maximum number of allowed characters is 255. session_id (str): Required if [allow_missing_ids][google.cloud.talent.v4.RequestMetadata.allow_missing_ids] is unset or ``false``. A unique session identification string. A session is defined as the duration of an end user's interaction with the service over a certain period. Obfuscate this field for privacy concerns before providing it to the service. Note that any improvements to the model for a particular tenant site rely on this field being set correctly to a unique session ID. The maximum number of allowed characters is 255. user_id (str): Required if [allow_missing_ids][google.cloud.talent.v4.RequestMetadata.allow_missing_ids] is unset or ``false``. A unique user identification string, as determined by the client. To have the strongest positive impact on search quality make sure the client-level is unique. Obfuscate this field for privacy concerns before providing it to the service. Note that any improvements to the model for a particular tenant site rely on this field being set correctly to a unique user ID. The maximum number of allowed characters is 255. allow_missing_ids (bool): Only set when any of [domain][google.cloud.talent.v4.RequestMetadata.domain], [session_id][google.cloud.talent.v4.RequestMetadata.session_id] and [user_id][google.cloud.talent.v4.RequestMetadata.user_id] isn't available for some reason. It is highly recommended not to set this field and provide accurate [domain][google.cloud.talent.v4.RequestMetadata.domain], [session_id][google.cloud.talent.v4.RequestMetadata.session_id] and [user_id][google.cloud.talent.v4.RequestMetadata.user_id] for the best service experience. device_info (google.cloud.talent_v4.types.DeviceInfo): The type of device used by the job seeker at the time of the call to the service.
6259906467a9b606de54762e
class ConstantLeaf: <NEW_LINE> <INDENT> def __init__(self, v): <NEW_LINE> <INDENT> self.v = v <NEW_LINE> <DEDENT> def to_str(self, indent="", feature_names=None): <NEW_LINE> <INDENT> return indent + "Constant(" + str(self.v) + ")" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> X = np.atleast_2d(X) <NEW_LINE> if isinstance(self.v, int): <NEW_LINE> <INDENT> dtype = 'int32' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dtype = 'float' <NEW_LINE> <DEDENT> outputs = np.zeros(X.shape[0], dtype=dtype) <NEW_LINE> outputs[:] = self.v <NEW_LINE> return outputs <NEW_LINE> <DEDENT> def fill_predict(self, X, outputs, mask): <NEW_LINE> <INDENT> outputs[mask] = self.v
Decision tree node which always predicts the same value.
62599064435de62698e9d520
class DescribeCdnDataResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Interval = None <NEW_LINE> self.Data = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Interval = params.get("Interval") <NEW_LINE> if params.get("Data") is not None: <NEW_LINE> <INDENT> self.Data = [] <NEW_LINE> for item in params.get("Data"): <NEW_LINE> <INDENT> obj = ResourceData() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Data.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.RequestId = params.get("RequestId")
DescribeCdnData response structure.
62599064379a373c97d9a736
class Dao(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def db_2_file(db_helper, table, dst_file, column=0): <NEW_LINE> <INDENT> data = Dao.read_all(db_helper, table) <NEW_LINE> with codecs.open(dst_file, 'wb') as dst_fp: <NEW_LINE> <INDENT> for d_tuple in data: <NEW_LINE> <INDENT> if column >= 1: <NEW_LINE> <INDENT> info = unicode(d_tuple[column-1]) <NEW_LINE> info = re.sub(u'\n', u' ', info) <NEW_LINE> info = info.strip() <NEW_LINE> if info: <NEW_LINE> <INDENT> dst_fp.write(info + u'\n') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> info = u'' <NEW_LINE> for i in range(len(d_tuple)): <NEW_LINE> <INDENT> info += u'\t' + unicode(d_tuple[i]) <NEW_LINE> <DEDENT> info = info.strip() <NEW_LINE> dst_fp.write(info + u'\n') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def read_all(db_helper, table): <NEW_LINE> <INDENT> sql = "SELECT * FROM " + table <NEW_LINE> infos = db_helper.read(sql) <NEW_LINE> return infos
数据库访问对象
62599064442bda511e95d8e6
class ISliderPage(Interface): <NEW_LINE> <INDENT> pass
marker interface for a page that implements a slider
6259906416aa5153ce401bf5
@dataclass <NEW_LINE> class ScaleByYogi(GradientTransformation[GenericGradientState, Weights], Generic[Weights]): <NEW_LINE> <INDENT> b1: RealNumeric = 0.9 <NEW_LINE> b2: RealNumeric = 0.999 <NEW_LINE> eps: RealNumeric = 1e-3 <NEW_LINE> eps_root: RealNumeric = 0.0 <NEW_LINE> initial_accumulator_value: RealNumeric = 1e-6 <NEW_LINE> def init(self, parameters: Weights) -> GenericGradientState: <NEW_LINE> <INDENT> return GenericGradientState( scale_by_yogi(self.b1, self.b2, self.eps, self.eps_root).init(parameters)) <NEW_LINE> <DEDENT> def update(self, gradient: Weights, state: GenericGradientState, parameters: Optional[Weights]) -> Tuple[Weights, GenericGradientState]: <NEW_LINE> <INDENT> return GenericGradientState.wrap( *scale_by_yogi(self.b1, self.b2, self.eps, self.eps_root).update(gradient, state.data, parameters))
Rescale updates according to the Yogi algorithm. References: [Zaheer et al, 2018](https://papers.nips.cc/paper/2018/hash/90365351ccc7437a1309dc64e4db32a3-Abstract.html) # noqa, pylint: disable=line-too-long Args: b1: decay rate for the exponentially weighted average of grads. b2: decay rate for the exponentially weighted average of variance of grads. eps: term added to the denominator to improve numerical stability. eps_root: term added to the denominator inside the square-root to improve numerical stability when backpropagating gradients through the rescaling. initial_accumulator_value: The starting value for accumulators. Only positive values are allowed.
6259906499cbb53fe68325fc
class Url(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Url" <NEW_LINE> verbose_name_plural = "Urls" <NEW_LINE> <DEDENT> uuid = models.UUIDField( verbose_name="UUID", default=uuid.uuid4, editable=False, unique=True) <NEW_LINE> url = models.URLField( unique=False, verbose_name="URL", blank=False) <NEW_LINE> title = models.CharField( max_length=256, blank=True, verbose_name="Text") <NEW_LINE> domain = models.CharField( max_length=96, blank=True, verbose_name="Domain", default=DEFAULT_DOMAIN) <NEW_LINE> short_url = models.CharField( max_length=256, blank=True, unique=True, verbose_name="Short URL") <NEW_LINE> slug = models.SlugField( unique=False, verbose_name="Slug", default=short_url) <NEW_LINE> clicks = models.IntegerField( blank=True, verbose_name="Clicks", default=0) <NEW_LINE> create_dttm = models.DateTimeField( verbose_name="Created", blank=True, default=datetime.now) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f"URL: {self.url}. Short url: {self.short_url}"
Url Model
625990641b99ca40022900c2
class Stack: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.store = [] <NEW_LINE> <DEDENT> def push(self, item): <NEW_LINE> <INDENT> self.store.append(item) <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> return self.store.pop() <NEW_LINE> <DEDENT> def isempty(self): <NEW_LINE> <INDENT> return not self.store
implementation of a stack
625990648e7ae83300eea7a6
class SubTaskError(AppError): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> message = ''.join(['Sub task error: ', message]) <NEW_LINE> super().__init__(message)
This class describe error which appear when user try to work with incorrect sub task
625990648e7ae83300eea7a7
class Bullet_two(GameSprite): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__('/home/guobin/图片/bullet2.png',-2) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.rect.y += self.speed <NEW_LINE> if self.rect.bottom < 0: <NEW_LINE> <INDENT> self.kill()
子弹精灵
625990647047854f46340ad3
class BRAINSAlignMSP(SEMLikeCommandLine): <NEW_LINE> <INDENT> input_spec = BRAINSAlignMSPInputSpec <NEW_LINE> output_spec = BRAINSAlignMSPOutputSpec <NEW_LINE> _cmd = " BRAINSAlignMSP " <NEW_LINE> _outputs_filenames = { "OutputresampleMSP": "OutputresampleMSP.nii", "resultsDir": "resultsDir", } <NEW_LINE> _redirect_x = False
title: Align Mid Saggital Brain (BRAINS) category: Utilities.BRAINS description: Resample an image into ACPC alignement ACPCDetect
62599064a219f33f346c7f20
class StackMixin(BaseEvaluator): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self._stack = [0] <NEW_LINE> <DEDENT> def _check_stack(self, var): <NEW_LINE> <INDENT> idx = int(var[1]) <NEW_LINE> stack_size = len(self._stack) <NEW_LINE> if stack_size <= idx: <NEW_LINE> <INDENT> extend = [0] * (idx - stack_size + 1) <NEW_LINE> self._stack.extend(extend) <NEW_LINE> <DEDENT> return idx <NEW_LINE> <DEDENT> def __getitem__(self, variable): <NEW_LINE> <INDENT> return self._stack[self._check_stack(variable)] <NEW_LINE> <DEDENT> def __setitem__(self, variable, value): <NEW_LINE> <INDENT> self._stack[self._check_stack(variable)] = value <NEW_LINE> self._debug("|".join( " x{}: {:03} ".format(i, v) for i, v in enumerate(self._stack) ), end=False, right=True) <NEW_LINE> <DEDENT> def _set_parameters(self, *args): <NEW_LINE> <INDENT> self._stack.extend(args)
This mixin manages the stack of a program.
62599064627d3e7fe0e085a4
class HttpJsonRequest: <NEW_LINE> <INDENT> def __init__(self, url, encoding): <NEW_LINE> <INDENT> self._encoding = encoding <NEW_LINE> self._http = urlopen(url) <NEW_LINE> self._data_str = None <NEW_LINE> self._data = None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'<HttpJsonRequest: {self.url}>' <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return self._http.url <NEW_LINE> <DEDENT> @property <NEW_LINE> def raw_data(self): <NEW_LINE> <INDENT> if self._data_str is None: <NEW_LINE> <INDENT> self._data_str = self._http.read() <NEW_LINE> <DEDENT> return self._data_str <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> if self._data is None: <NEW_LINE> <INDENT> self._data = _json.loads(self.raw_data.decode(self._encoding)) <NEW_LINE> <DEDENT> return self._data
Class for handling HTTP/JSON requests. Use `HttpJsonClient` to create an instance.
62599064462c4b4f79dbd120
class DatacenterHAFullTopo( Topo ): <NEW_LINE> <INDENT> def build( self, numRacks=4, numHostsPerRack=4, numHASwitches=2 ): <NEW_LINE> <INDENT> if numHASwitches >= 16: <NEW_LINE> <INDENT> raise Exception( "Please use less than 16 HA switches" ) <NEW_LINE> <DEDENT> self.racks = [] <NEW_LINE> rootSwitches = [] <NEW_LINE> lastRootSwitch = None <NEW_LINE> for i in irange( 1, numHASwitches ): <NEW_LINE> <INDENT> rootSwitch = self.addSwitch( 's%s' % i ) <NEW_LINE> rootSwitches.append( rootSwitch ) <NEW_LINE> if lastRootSwitch: <NEW_LINE> <INDENT> self.addLink( lastRootSwitch, rootSwitch ) <NEW_LINE> <DEDENT> lastRootSwitch = rootSwitch <NEW_LINE> <DEDENT> if numHASwitches > 1: <NEW_LINE> <INDENT> self.addLink( lastRootSwitch, rootSwitches[0] ) <NEW_LINE> <DEDENT> for i in irange( 1, numRacks ): <NEW_LINE> <INDENT> rack = self.buildRack( i, numHostsPerRack=numHostsPerRack, numHASwitches=numHASwitches ) <NEW_LINE> self.racks.append( rack ) <NEW_LINE> for j in range( numHASwitches ): <NEW_LINE> <INDENT> self.addLink( rootSwitches[j], rack[j] ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def buildRack( self, loc, numHostsPerRack, numHASwitches ): <NEW_LINE> <INDENT> switches = [] <NEW_LINE> for n in irange( 1, numHASwitches ): <NEW_LINE> <INDENT> dpid = ( loc * 16 ) + n <NEW_LINE> switch = self.addSwitch( 's%sr%s' % (n, loc), dpid='%x' % dpid ) <NEW_LINE> switches.append( switch ) <NEW_LINE> <DEDENT> for n in irange( 1, numHostsPerRack ): <NEW_LINE> <INDENT> host = self.addHost( 'h%sr%s' % ( n, loc ) ) <NEW_LINE> for switch in switches: <NEW_LINE> <INDENT> self.addLink( switch, host ) <NEW_LINE> <DEDENT> <DEDENT> return switches
Configurable Datacenter Topology
62599064f548e778e596cca3
class SalesforceAutomaticFields(SalesforceBaseTest): <NEW_LINE> <INDENT> start_date = (datetime.now() + timedelta(days=-1)).strftime("%Y-%m-%dT00:00:00Z") <NEW_LINE> @staticmethod <NEW_LINE> def expected_sync_streams(): <NEW_LINE> <INDENT> return { 'Account', 'Contact', 'Lead', 'Opportunity', 'User', } <NEW_LINE> <DEDENT> def automatic_fields_test(self): <NEW_LINE> <INDENT> expected_streams = self.expected_sync_streams() <NEW_LINE> conn_id = connections.ensure_connection(self, original_properties=False) <NEW_LINE> found_catalogs = self.run_and_verify_check_mode(conn_id) <NEW_LINE> test_catalogs_automatic_fields = [catalog for catalog in found_catalogs if catalog.get('stream_name') in expected_streams] <NEW_LINE> self.perform_and_verify_table_and_field_selection( conn_id, test_catalogs_automatic_fields, select_all_fields=False, ) <NEW_LINE> record_count_by_stream = self.run_and_verify_sync(conn_id) <NEW_LINE> synced_records = runner.get_records_from_target_output() <NEW_LINE> for stream in expected_streams: <NEW_LINE> <INDENT> with self.subTest(stream=stream): <NEW_LINE> <INDENT> expected_keys = self.expected_automatic_fields().get(stream) <NEW_LINE> data = synced_records.get(stream) <NEW_LINE> record_messages_keys = [set(row['data'].keys()) for row in data['messages'] if row['action'] == 'upsert'] <NEW_LINE> self.assertGreater( record_count_by_stream.get(stream, -1), 0, msg="The number of records is not over the stream max limit") <NEW_LINE> for actual_keys in record_messages_keys: <NEW_LINE> <INDENT> self.assertSetEqual(expected_keys, actual_keys)
Test that with no fields selected for a stream automatic fields are still replicated
62599064e5267d203ee6cf4b
class Solution: <NEW_LINE> <INDENT> def rotate(self, matrix): <NEW_LINE> <INDENT> n = len(matrix) <NEW_LINE> for i in range(n / 2): <NEW_LINE> <INDENT> for j in range(n): <NEW_LINE> <INDENT> matrix[i][j], matrix[n - 1 - i][j] = matrix[n - 1 - i][j], matrix[i][j] <NEW_LINE> <DEDENT> <DEDENT> for i in range(n): <NEW_LINE> <INDENT> for j in range(i, n): <NEW_LINE> <INDENT> matrix[i][j], matrix[j][i] = matrix[j][i], matrix[i][j]
@param matrix: A list of lists of integers @return: Nothing
6259906444b2445a339b74ed
class Meta: <NEW_LINE> <INDENT> model = Notification <NEW_LINE> fields = ('id', 'timestamp', 'unread', 'description')
Defines fields to be returned to user
62599064435de62698e9d522
class UserRegisterForm(UserCreationForm): <NEW_LINE> <INDENT> email = forms.EmailField(required=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ['username', 'email', 'password1', 'password2']
Form to create new user
62599064435de62698e9d523
class TestObserverEvents(TestCase, RegexTestCase): <NEW_LINE> <INDENT> def test_on_newtab_changed(self): <NEW_LINE> <INDENT> patterns = ( 'Services.obs.addObserver(NewTabObserver, "newtab-url-changed", false);', ( 'var foo = Cc["foo"].getService(Ci.nsIObserverService);' 'foo.addObserver(NewTabObserver, "newtab-url-changed", false);' ) ) <NEW_LINE> warning = { 'id': ('js_entity_values', 'nsIObserverService', 'newtab_url_changed'), 'signing_severity': 'high' } <NEW_LINE> def fail(script): <NEW_LINE> <INDENT> self.setUp() <NEW_LINE> self.run_script(script) <NEW_LINE> self.assert_failed(with_warnings=[warning]) <NEW_LINE> <DEDENT> for pattern in patterns: <NEW_LINE> <INDENT> yield fail, pattern
Tests that code related to observer events trigger warnings.
6259906438b623060ffaa3de
class CmdlineTest(Test): <NEW_LINE> <INDENT> def __init__(self, input_idx): <NEW_LINE> <INDENT> Test.__init__(self, input_idx) <NEW_LINE> self.fio = (None, None, None) <NEW_LINE> <DEDENT> def cmd(self): <NEW_LINE> <INDENT> c = [] <NEW_LINE> if self.prefix != None: <NEW_LINE> <INDENT> c.extend(self.prefix) <NEW_LINE> <DEDENT> c.extend(self.input()[0]) <NEW_LINE> return c <NEW_LINE> <DEDENT> def sio(self): <NEW_LINE> <INDENT> return self.input()[1] <NEW_LINE> <DEDENT> def body(self): <NEW_LINE> <INDENT> self.open_stdio() <NEW_LINE> proc = subprocess.Popen(self.cmd(), stdin=self.fio[0], stdout=self.fio[1], stderr=self.fio[2]) <NEW_LINE> while True: <NEW_LINE> <INDENT> time.sleep(0.1) <NEW_LINE> retcode = proc.poll() <NEW_LINE> if retcode != None: <NEW_LINE> <INDENT> proc.wait() <NEW_LINE> if retcode < 0: <NEW_LINE> <INDENT> self.result = TestResult.CRASH <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.check_offline(): <NEW_LINE> <INDENT> self.result = TestResult.MISMATCH <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.result = TestResult.NORMAL <NEW_LINE> <DEDENT> <DEDENT> break <NEW_LINE> <DEDENT> if self.check_hang(): <NEW_LINE> <INDENT> self.result = TestResult.HANG <NEW_LINE> util.kill_process(proc.pid) <NEW_LINE> break <NEW_LINE> <DEDENT> if self.check_online(): <NEW_LINE> <INDENT> self.result = TestResult.MISMATCH <NEW_LINE> util.kill_process(proc.pid) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> self.close_stdio() <NEW_LINE> <DEDENT> def check_hang(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def check_online(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def check_offline(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def open_stdio(self): <NEW_LINE> <INDENT> sin, sout, serr = self.sio() <NEW_LINE> if sin != None: <NEW_LINE> <INDENT> self.fio[0] = open(sin) <NEW_LINE> <DEDENT> if sout != None: <NEW_LINE> <INDENT> self.fio[1] = open(sout, 'a') <NEW_LINE> <DEDENT> if serr != None: <NEW_LINE> <INDENT> self.fio[2] = open(serr, 'a') <NEW_LINE> <DEDENT> <DEDENT> def close_stdio(self): <NEW_LINE> <INDENT> sin, sout, serr = self.sio() <NEW_LINE> if sin != None: <NEW_LINE> <INDENT> self.fio[0].close() <NEW_LINE> <DEDENT> if sout != None: <NEW_LINE> <INDENT> self.fio[1].close() <NEW_LINE> <DEDENT> if serr != None: <NEW_LINE> <INDENT> self.fio[2].close()
Represents cmdline tests. The input format is a tuple (A, I) in which A is a list of arguments and I is a tuple of standard I/O files (stdin, stdout, stderr). (None means using default)
62599064d7e4931a7ef3d712
class FixedWithPts(Mortgage): <NEW_LINE> <INDENT> def __init__(self, loan, r, months, pts): <NEW_LINE> <INDENT> Mortgage.__init__(self, loan, r, months) <NEW_LINE> self.pts = pts <NEW_LINE> self.paid = [loan * (pts/100)] <NEW_LINE> self.legend = 'Fixed, ' + str(round(r*100, 2)) + '%, ' + str(pts) + ' points'
先支付贷款总额的pts%,剩下的部分可以享受低利率
6259906492d797404e3896ea
class PageContext(object): <NEW_LINE> <INDENT> def __init__(self, course, repo, commit_sha, flow_session, in_sandbox=False, page_uri=None): <NEW_LINE> <INDENT> self.course = course <NEW_LINE> self.repo = repo <NEW_LINE> self.commit_sha = commit_sha <NEW_LINE> self.flow_session = flow_session <NEW_LINE> self.in_sandbox = in_sandbox <NEW_LINE> self.page_uri = page_uri
.. attribute:: course .. attribute:: repo .. attribute:: commit_sha .. attribute:: flow_session May be None. .. attribute:: page_uri Note that this is different from :class:`course.utils.FlowPageContext`, which is used internally by the flow views.
62599064be8e80087fbc07a2
class User(Resource): <NEW_LINE> <INDENT> def post(self): <NEW_LINE> <INDENT> response = Response() <NEW_LINE> request_validators = [ {'payload': {'data': json.loads(request.data), 'schema': UserCreatePayloadSchema}}, ] <NEW_LINE> ValidateRequest.validate(response, request_validators) <NEW_LINE> if response.errors: <NEW_LINE> <INDENT> response.message = "Invalid Request" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if not response.errors: <NEW_LINE> <INDENT> response.status_code = 200 <NEW_LINE> <DEDENT> return response.to_dict()
This resource will handle all operations on User
625990643617ad0b5ee0786b
class RevResBottleneck(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, stride, preactivate, bottleneck_factor=4): <NEW_LINE> <INDENT> super(RevResBottleneck, self).__init__() <NEW_LINE> mid_channels = out_channels // bottleneck_factor <NEW_LINE> if preactivate: <NEW_LINE> <INDENT> self.conv1 = pre_conv1x1_block( in_channels=in_channels, out_channels=mid_channels) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.conv1 = conv1x1( in_channels=in_channels, out_channels=mid_channels) <NEW_LINE> <DEDENT> self.conv2 = pre_conv3x3_block( in_channels=mid_channels, out_channels=mid_channels, stride=stride) <NEW_LINE> self.conv3 = pre_conv1x1_block( in_channels=mid_channels, out_channels=out_channels) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.conv1(x) <NEW_LINE> x = self.conv2(x) <NEW_LINE> x = self.conv3(x) <NEW_LINE> return x
RevNet bottleneck block for residual path in RevNet unit. Parameters: ---------- in_channels : int Number of input channels. out_channels : int Number of output channels. stride : int or tuple/list of 2 int Strides of the convolution. preactivate : bool Whether use pre-activation for the first convolution block. bottleneck_factor : int, default 4 Bottleneck factor.
625990645fdd1c0f98e5f69e
class T85(Modular_joint): <NEW_LINE> <INDENT> def __init__(self, id, eds_file): <NEW_LINE> <INDENT> self.__reduction_ratio = 188.24 <NEW_LINE> Modular_joint.__init__(self, id, eds_file, self.__reduction_ratio)
T85 control base on the canopen
625990643d592f4c4edbc5f8
class PageCyclerV2Top10Mobile(_PageCyclerV2): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def Name(cls): <NEW_LINE> <INDENT> return 'page_cycler_v2.top_10_mobile' <NEW_LINE> <DEDENT> def CreateStorySet(self, options): <NEW_LINE> <INDENT> return page_sets.Top10MobilePageSet(run_no_page_interactions=True, cache_temperatures=[ cache_temperature.PCV1_COLD, cache_temperature.PCV1_WARM])
Page load time benchmark for the top 10 mobile web pages. Runs against pages recorded in November, 2013.
62599064cb5e8a47e493cd12
class UserModelForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ('first_name', 'middle_name', 'last_name', 'phone', 'city', 'photo', 'position' ) <NEW_LINE> widgets = { 'first_name': widgets.TextInput(attrs={ 'placeholder': _('First name') }), 'middle_name': widgets.TextInput(attrs={ 'placeholder': _('Middle name') }), 'last_name': widgets.TextInput(attrs={ 'placeholder': _('Last name') }), }
...
625990647047854f46340ad4
class Token(object): <NEW_LINE> <INDENT> __slots__ = ('type', 'value', 'directive', 'end_of_context') <NEW_LINE> def __init__(self, type, value=None, directive=None, end_of_context=False): <NEW_LINE> <INDENT> self.type = type <NEW_LINE> self.value = value <NEW_LINE> self.directive = directive <NEW_LINE> self.end_of_context = end_of_context <NEW_LINE> <DEDENT> def as_tuple(self): <NEW_LINE> <INDENT> return (self.type, self.value, self.directive, self.end_of_context) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Token(%r, %r, %r)>' % self.as_tuple()[:3] <NEW_LINE> <DEDENT> __str__ = __repr__ <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return unicode(self.__str__()) <NEW_LINE> <DEDENT> def __cmp__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> raise TypeError('Can\'t compare %s with %s' % (repr(other), repr(self.__class__))) <NEW_LINE> <DEDENT> return cmp(other.as_tuple(), self.as_tuple())
A token prepresents a part of a document with references to a directive that processes that token.
625990644e4d562566373b22
class GlobalG(GlobalGCore): <NEW_LINE> <INDENT> def __init__(self, log=False): <NEW_LINE> <INDENT> GlobalGCore.__init__(self, log) <NEW_LINE> <DEDENT> def __getattribute__(self, name='x', *l): <NEW_LINE> <INDENT> if name.startswith('__') and name.endswith('__') or name in dir(GlobalGCore): <NEW_LINE> <INDENT> return GlobalGCore.__getattribute__(self, name, *l) <NEW_LINE> <DEDENT> log = global_g_paras[id(self)].log <NEW_LINE> return TransportToRootFrame(name,log) <NEW_LINE> <DEDENT> def __setattr__(self, name, v): <NEW_LINE> <INDENT> log = global_g_paras[id(self)].log <NEW_LINE> transport = TransportToRootFrame(name,log) <NEW_LINE> transport(v)
TODO: for dev-tips: after every operating in IPython@spyder, will read this instance 10+ times some times will read attr like "__xx__" but not in dir(object): in this case don't return anything just raise the Exception if is instance.__getattribute__(name) try to use getattr(instance, name) instead
62599064f548e778e596cca5
class UserInfoResponse: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swaggerTypes = { 'result': 'UserInfoResult', 'status': 'str', 'error_message': 'str', 'composedOn': 'long' } <NEW_LINE> self.result = None <NEW_LINE> self.status = None <NEW_LINE> self.error_message = None <NEW_LINE> self.composedOn = None
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259906445492302aabfdbf7
class MicrosoftAzureTestUrl(object): <NEW_LINE> <INDENT> def __init__( self, credentials, subscription_id, api_version='2014-04-01-preview', accept_language='en-US', long_running_operation_retry_timeout=30, generate_client_request_id=True, base_url=None, filepath=None): <NEW_LINE> <INDENT> self.config = MicrosoftAzureTestUrlConfiguration(credentials, subscription_id, api_version, accept_language, long_running_operation_retry_timeout, generate_client_request_id, base_url, filepath) <NEW_LINE> self._client = ServiceClient(self.config.credentials, self.config) <NEW_LINE> client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} <NEW_LINE> self._serialize = Serializer(client_models) <NEW_LINE> self._deserialize = Deserializer(client_models) <NEW_LINE> self.group = GroupOperations( self._client, self.config, self._serialize, self._deserialize)
Some cool documentation. :ivar config: Configuration for client. :vartype config: MicrosoftAzureTestUrlConfiguration :ivar group: Group operations :vartype group: .operations.GroupOperations :param credentials: Credentials needed for the client to connect to Azure. :type credentials: :mod:`A msrestazure Credentials object<msrestazure.azure_active_directory>` :param subscription_id: Subscription Id. :type subscription_id: str :param api_version: API Version with value '2014-04-01-preview'. :type api_version: str :param accept_language: Gets or sets the preferred language for the response. :type accept_language: str :param long_running_operation_retry_timeout: Gets or sets the retry timeout in seconds for Long Running Operations. Default value is 30. :type long_running_operation_retry_timeout: int :param generate_client_request_id: When set to true a unique x-ms-client-request-id value is generated and included in each request. Default is true. :type generate_client_request_id: bool :param str base_url: Service URL :param str filepath: Existing config
6259906432920d7e50bc7762
class Inventory: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.items = {} <NEW_LINE> self.types = {} <NEW_LINE> self.total_count = 0 <NEW_LINE> for data in ItemTypes: <NEW_LINE> <INDENT> self.types[data.slot] = data <NEW_LINE> self.items[data.slot] = 0 <NEW_LINE> <DEDENT> <DEDENT> def set_item(self, item, count): <NEW_LINE> <INDENT> self.total_count += (count - self.items[item.slot]) <NEW_LINE> self.items[item.slot] = count <NEW_LINE> <DEDENT> def add_item(self, item, count): <NEW_LINE> <INDENT> self.total_count += count <NEW_LINE> self.items[item.slot] = self.items[item.slot] + count <NEW_LINE> <DEDENT> def remove_item(self, item, count): <NEW_LINE> <INDENT> if self.items[item.slot] >= count: <NEW_LINE> <INDENT> self.items[item.slot] = self.items[item.slot] - count <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def get_item_count(self, item): <NEW_LINE> <INDENT> return self.items[item.slot] <NEW_LINE> <DEDENT> def get_slot_count(self, slot): <NEW_LINE> <INDENT> return self.items[slot] <NEW_LINE> <DEDENT> def get_item_base_price(self, item): <NEW_LINE> <INDENT> return item.price <NEW_LINE> <DEDENT> def get_slot_base_price(self, slot): <NEW_LINE> <INDENT> return self.types[slot].price <NEW_LINE> <DEDENT> def get_item_base_count(self, item): <NEW_LINE> <INDENT> return item.count <NEW_LINE> <DEDENT> def get_slot_base_count(self, slot): <NEW_LINE> <INDENT> return self.types[slot].count <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> for data in ItemTypes: <NEW_LINE> <INDENT> self.items[data.slot] = 0 <NEW_LINE> <DEDENT> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> holder = {} <NEW_LINE> for item in ItemTypes: <NEW_LINE> <INDENT> holder["item" + str(item.slot)] = self.item_to_dict(item, self.get_item_count(item)) <NEW_LINE> <DEDENT> return holder <NEW_LINE> <DEDENT> def item_to_dict(self, item, count): <NEW_LINE> <INDENT> holder = {} <NEW_LINE> holder["name"] = item.name <NEW_LINE> holder["slot"] = item.slot <NEW_LINE> holder["size"] = item.size <NEW_LINE> holder["base_price"] = item.price <NEW_LINE> holder["base_count"] = item.count <NEW_LINE> holder["count"] = count <NEW_LINE> return holder
Inventory holds dictory of item counts where key is the item type and value is the count
625990644e4d562566373b23
class ROISelector(object): <NEW_LINE> <INDENT> def __init__(self, image): <NEW_LINE> <INDENT> self.__image = image.copy() <NEW_LINE> <DEDENT> def SelectArea(self, winName="Select an area", winPos=(400, 400)): <NEW_LINE> <INDENT> self.__ResetPoints() <NEW_LINE> self.__winName = winName <NEW_LINE> cv2.namedWindow(winName, cv2.WINDOW_AUTOSIZE) <NEW_LINE> cv2.setMouseCallback(winName, self.__OnMouseOver) <NEW_LINE> cv2.moveWindow(winName, winPos[0], winPos[1]) <NEW_LINE> self.__Update() <NEW_LINE> while True: <NEW_LINE> <INDENT> ch = cv2.waitKey(1) <NEW_LINE> if ch is 27 or ch is ord("q"): <NEW_LINE> <INDENT> cv2.destroyWindow(winName) <NEW_LINE> return None, False <NEW_LINE> <DEDENT> elif ch is 13 or ch is 32: <NEW_LINE> <INDENT> corners = self.__SetCorners() <NEW_LINE> if corners is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> cv2.destroyWindow(winName) <NEW_LINE> return corners, True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __ResetPoints(self): <NEW_LINE> <INDENT> self.leftPoints = None <NEW_LINE> self.rightPoints = None <NEW_LINE> <DEDENT> def __Update(self): <NEW_LINE> <INDENT> if self.leftPoints is None or self.rightPoints is None: <NEW_LINE> <INDENT> cv2.imshow(self.__winName, self.__image) <NEW_LINE> return <NEW_LINE> <DEDENT> image = self.__image.copy() <NEW_LINE> cv2.rectangle(image, self.leftPoints, self.rightPoints, (0, 0, 255), 1) <NEW_LINE> cv2.imshow(self.__winName, image) <NEW_LINE> <DEDENT> def __SetCorners(self): <NEW_LINE> <INDENT> if self.leftPoints is None or self.rightPoints is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> upLeft = (min(self.leftPoints[0], self.rightPoints[0]), min(self.leftPoints[1], self.rightPoints[1])) <NEW_LINE> downRight = (max(self.leftPoints[0], self.rightPoints[0]), max(self.leftPoints[1], self.rightPoints[1])) <NEW_LINE> points = [] <NEW_LINE> points.append(upLeft) <NEW_LINE> points.append(downRight) <NEW_LINE> return points <NEW_LINE> <DEDENT> def __OnMouseOver(self, event, x, y, flags, param): <NEW_LINE> <INDENT> if flags & cv2.EVENT_FLAG_LBUTTON: <NEW_LINE> <INDENT> self.leftPoints = x, y <NEW_LINE> <DEDENT> if flags & cv2.EVENT_FLAG_RBUTTON: <NEW_LINE> <INDENT> self.rightPoints = x, y <NEW_LINE> <DEDENT> self.__Update()
This class returns the corners of the selected area as: [(UpperLeftcorner), (LowerRightCorner)]. Use the Right Mouse Button to set upper left hand corner and the Left Mouse Button to set the lower right corner. Click on the image to set the area Keys: Enter/SPACE - OK ESC/q - Exit (Cancel)
62599064adb09d7d5dc0bc86
class ValloxStateProxy: <NEW_LINE> <INDENT> def __init__(self, hass, client): <NEW_LINE> <INDENT> self._hass = hass <NEW_LINE> self._client = client <NEW_LINE> self._metric_cache = {} <NEW_LINE> self._profile = None <NEW_LINE> self._valid = False <NEW_LINE> <DEDENT> def fetch_metric(self, metric_key): <NEW_LINE> <INDENT> _LOGGER.debug("Fetching metric key: %s", metric_key) <NEW_LINE> if not self._valid: <NEW_LINE> <INDENT> raise OSError("Device state out of sync.") <NEW_LINE> <DEDENT> if metric_key not in vlxDevConstants.__dict__: <NEW_LINE> <INDENT> raise KeyError("Unknown metric key: {}".format(metric_key)) <NEW_LINE> <DEDENT> return self._metric_cache[metric_key] <NEW_LINE> <DEDENT> def get_profile(self): <NEW_LINE> <INDENT> _LOGGER.debug("Returning profile") <NEW_LINE> if not self._valid: <NEW_LINE> <INDENT> raise OSError("Device state out of sync.") <NEW_LINE> <DEDENT> return PROFILE_TO_STR_REPORTABLE[self._profile] <NEW_LINE> <DEDENT> async def async_update(self, event_time): <NEW_LINE> <INDENT> _LOGGER.debug("Updating Vallox state cache") <NEW_LINE> try: <NEW_LINE> <INDENT> self._metric_cache = await self._client.fetch_metrics() <NEW_LINE> self._profile = await self._client.get_profile() <NEW_LINE> self._valid = True <NEW_LINE> <DEDENT> except OSError as err: <NEW_LINE> <INDENT> _LOGGER.error("Error during state cache update: %s", err) <NEW_LINE> self._valid = False <NEW_LINE> <DEDENT> async_dispatcher_send(self._hass, SIGNAL_VALLOX_STATE_UPDATE)
Helper class to reduce websocket API calls.
625990643539df3088ecd9ba
class Meta: <NEW_LINE> <INDENT> fields = ['trigger_conditions', 'end_conditions', 'task_templates']
This class contains the serializer metadata.
625990649c8ee82313040d16
class ArrayType(TypeDecorator): <NEW_LINE> <INDENT> impl = String <NEW_LINE> def process_bind_param(self, value, dialect): <NEW_LINE> <INDENT> return json.dumps(value) <NEW_LINE> <DEDENT> def process_result_value(self, value, dialect): <NEW_LINE> <INDENT> return json.loads(value) <NEW_LINE> <DEDENT> def copy(self, **kw): <NEW_LINE> <INDENT> return ArrayType(self.impl.length)
Sqlite-like does not support arrays. Let's use a custom type decorator. See http://docs.sqlalchemy.org/en/latest/core/types.html#sqlalchemy.types.TypeDecorator
625990643cc13d1c6d466e5f
class SynapseSite(Site): <NEW_LINE> <INDENT> def __init__(self, logger_name, site_tag, config, resource, *args, **kwargs): <NEW_LINE> <INDENT> Site.__init__(self, resource, *args, **kwargs) <NEW_LINE> self.site_tag = site_tag <NEW_LINE> proxied = config.get("x_forwarded", False) <NEW_LINE> self.requestFactory = SynapseRequestFactory(self, proxied) <NEW_LINE> self.access_logger = logging.getLogger(logger_name) <NEW_LINE> <DEDENT> def log(self, request): <NEW_LINE> <INDENT> pass
Subclass of a twisted http Site that does access logging with python's standard logging
6259906476e4537e8c3f0c9f
class ZincArtifactFactory(object): <NEW_LINE> <INDENT> def __init__(self, workdir, context, zinc_utils): <NEW_LINE> <INDENT> self._workdir = workdir <NEW_LINE> self.context = context <NEW_LINE> self.zinc_utils = zinc_utils <NEW_LINE> self._classes_dirs_base = os.path.join(self._workdir, 'classes') <NEW_LINE> self._analysis_files_base = os.path.join(self._workdir, 'analysis') <NEW_LINE> safe_mkdir(self._classes_dirs_base) <NEW_LINE> safe_mkdir(self._analysis_files_base) <NEW_LINE> <DEDENT> def artifact_for_target(self, target): <NEW_LINE> <INDENT> targets = [target] <NEW_LINE> sources_by_target = {target: ZincArtifactFactory._calculate_sources(target)} <NEW_LINE> factory = self <NEW_LINE> return _ZincArtifact(factory, targets, sources_by_target, *self._artifact_args([target])) <NEW_LINE> <DEDENT> def merged_artifact(self, artifacts): <NEW_LINE> <INDENT> targets = list(itertools.chain.from_iterable([a.targets for a in artifacts])) <NEW_LINE> sources_by_target = dict(itertools.chain.from_iterable( [a.sources_by_target.items() for a in artifacts])) <NEW_LINE> factory = self <NEW_LINE> return _MergedZincArtifact(artifacts, factory, targets, sources_by_target, *self._artifact_args(targets)) <NEW_LINE> <DEDENT> def analysis_file_for_targets(self, targets): <NEW_LINE> <INDENT> artifact_data = self._artifact_args(targets) <NEW_LINE> return AnalysisFileSpec(artifact_data[2], artifact_data[1]) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def portable(analysis_file): <NEW_LINE> <INDENT> return analysis_file + '.portable' <NEW_LINE> <DEDENT> def _artifact_args(self, targets): <NEW_LINE> <INDENT> artifact_id = Target.maybe_readable_identify(targets) <NEW_LINE> classes_dir = os.path.join(self._classes_dirs_base, artifact_id) <NEW_LINE> analysis_file = os.path.join(self._analysis_files_base, artifact_id) + '.analysis' <NEW_LINE> return artifact_id, classes_dir, analysis_file <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _calculate_sources(target): <NEW_LINE> <INDENT> sources = [] <NEW_LINE> srcs = [os.path.join(target.target_base, src) for src in target.sources if src.endswith('.scala')] <NEW_LINE> sources.extend(srcs) <NEW_LINE> if (isinstance(target, ScalaLibrary) or isinstance(target, ScalaTests)) and target.java_sources: <NEW_LINE> <INDENT> sources.extend(resolve_target_sources(target.java_sources, '.java')) <NEW_LINE> <DEDENT> return sources
Creates objects representing zinc artifacts.
62599064379a373c97d9a73a
class LogisticNormal(TransformedDistribution): <NEW_LINE> <INDENT> arg_constraints = {'loc': constraints.real, 'scale': constraints.positive} <NEW_LINE> support = constraints.simplex <NEW_LINE> has_rsample = True <NEW_LINE> def __init__(self, loc, scale, validate_args=None): <NEW_LINE> <INDENT> base_dist = Normal(loc, scale) <NEW_LINE> super(LogisticNormal, self).__init__(base_dist, StickBreakingTransform(), validate_args=validate_args) <NEW_LINE> self._event_shape = torch.Size([s + 1 for s in self._event_shape]) <NEW_LINE> <DEDENT> def expand(self, batch_shape, _instance=None): <NEW_LINE> <INDENT> new = self._get_checked_instance(LogisticNormal, _instance) <NEW_LINE> batch_shape = torch.Size(batch_shape) <NEW_LINE> base_dist = self.base_dist.expand(batch_shape + self.base_dist.batch_shape[-1:]) <NEW_LINE> super(LogisticNormal, new).__init__(base_dist, StickBreakingTransform(), validate_args=False) <NEW_LINE> new._event_shape = self._event_shape <NEW_LINE> new._validate_args = self._validate_args <NEW_LINE> return new <NEW_LINE> <DEDENT> @property <NEW_LINE> def loc(self): <NEW_LINE> <INDENT> return self.base_dist.loc <NEW_LINE> <DEDENT> @property <NEW_LINE> def scale(self): <NEW_LINE> <INDENT> return self.base_dist.scale
Creates a logistic-normal distribution parameterized by :attr:`loc` and :attr:`scale` that define the base `Normal` distribution transformed with the `StickBreakingTransform` such that:: X ~ LogisticNormal(loc, scale) Y = log(X / (1 - X.cumsum(-1)))[..., :-1] ~ Normal(loc, scale) Args: loc (float or Tensor): mean of the base distribution scale (float or Tensor): standard deviation of the base distribution Example:: >>> # logistic-normal distributed with mean=(0, 0, 0) and stddev=(1, 1, 1) >>> # of the base Normal distribution >>> m = distributions.LogisticNormal(torch.tensor([0.0] * 3), torch.tensor([1.0] * 3)) >>> m.sample() tensor([ 0.7653, 0.0341, 0.0579, 0.1427])
6259906491f36d47f2231a1d
class MobileDetectionMiddleware(object): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> if request.GET.get('cordova', None): <NEW_LINE> <INDENT> if request.GET.get('cordova') == 'true': <NEW_LINE> <INDENT> request.session['cordova'] = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request.session['cordova'] = False <NEW_LINE> <DEDENT> <DEDENT> if request.session.get('cordova', False): <NEW_LINE> <INDENT> request.is_cordova = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request.is_cordova = False <NEW_LINE> <DEDENT> is_mobile = False <NEW_LINE> if request.META.has_key('HTTP_USER_AGENT'): <NEW_LINE> <INDENT> user_agent = request.META['HTTP_USER_AGENT'] <NEW_LINE> pattern = "(ipad|iphone)" <NEW_LINE> prog = re.compile(pattern, re.IGNORECASE) <NEW_LINE> match = prog.search(user_agent) <NEW_LINE> if match: <NEW_LINE> <INDENT> request.is_ios = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request.is_ios = False <NEW_LINE> <DEDENT> pattern = "(android)" <NEW_LINE> prog = re.compile(pattern, re.IGNORECASE) <NEW_LINE> match = prog.search(user_agent) <NEW_LINE> if match: <NEW_LINE> <INDENT> request.is_android = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request.is_android = False <NEW_LINE> <DEDENT> pattern = "(up.browser|up.link|mmp|ipad|android|symbian|smartphone|midp|wap|phone|windows ce|pda|mobile|mini|palm|netfront)" <NEW_LINE> prog = re.compile(pattern, re.IGNORECASE) <NEW_LINE> match = prog.search(user_agent) <NEW_LINE> if match: <NEW_LINE> <INDENT> is_mobile = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if request.META.has_key('HTTP_ACCEPT'): <NEW_LINE> <INDENT> http_accept = request.META['HTTP_ACCEPT'] <NEW_LINE> pattern = "application/vnd\.wap\.xhtml\+xml" <NEW_LINE> prog = re.compile(pattern, re.IGNORECASE) <NEW_LINE> match = prog.search(http_accept) <NEW_LINE> if match: <NEW_LINE> <INDENT> is_mobile = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not is_mobile: <NEW_LINE> <INDENT> user_agents_test = ("w3c ", "acs-", "alav", "alca", "amoi", "audi", "avan", "benq", "bird", "blac", "blaz", "brew", "cell", "cldc", "cmd-", "dang", "doco", "eric", "hipt", "inno", "ipaq", "java", "jigs", "kddi", "keji", "leno", "lg-c", "lg-d", "lg-g", "lge-", "maui", "maxo", "midp", "mits", "mmef", "mobi", "mot-", "moto", "mwbp", "nec-", "newt", "noki", "xda", "palm", "pana", "pant", "phil", "play", "port", "prox", "qwap", "sage", "sams", "sany", "sch-", "sec-", "send", "seri", "sgh-", "shar", "sie-", "siem", "smal", "smar", "sony", "sph-", "symb", "t-mo", "teli", "tim-", "tosh", "tsm-", "upg1", "upsi", "vk-v", "voda", "wap-", "wapa", "wapi", "wapp", "wapr", "webc", "winw", "winw", "xda-",) <NEW_LINE> test = user_agent[0:4].lower() <NEW_LINE> if test in user_agents_test: <NEW_LINE> <INDENT> is_mobile = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> request.is_mobile = is_mobile
Useful middleware to detect if the user is on a mobile device. http://djangosnippets.org/snippets/2001/
625990642ae34c7f260ac804
class Restaurant(): <NEW_LINE> <INDENT> def __init__(self, restaurant_name, cuisine_type): <NEW_LINE> <INDENT> self.restaurant_name = restaurant_name <NEW_LINE> self.cuisine_type = cuisine_type <NEW_LINE> <DEDENT> def describe_restaurant(self): <NEW_LINE> <INDENT> print(self.restaurant_name.title() + " serves delicious " + self.cuisine_type + "!") <NEW_LINE> <DEDENT> def open_restaurant(self): <NEW_LINE> <INDENT> print(self.restaurant_name.title() + " is open for business!")
class to define attributes for restaurant instances and indicate if they're open or not
6259906456ac1b37e6303875
class _IsString(object): <NEW_LINE> <INDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, basestring)
Helper class to be used when checking equality when you don't what the ID is but you want to check that it's an ID
6259906499cbb53fe6832600
class FeatureParser(object): <NEW_LINE> <INDENT> def __init__(self, debug_level=0, use_fuzziness=1, feature_cleaner=FeatureValueCleaner()): <NEW_LINE> <INDENT> self._scanner = GenBankScanner(debug_level) <NEW_LINE> self.use_fuzziness = use_fuzziness <NEW_LINE> self._cleaner = feature_cleaner <NEW_LINE> <DEDENT> def parse(self, handle): <NEW_LINE> <INDENT> self._consumer = _FeatureConsumer(self.use_fuzziness, self._cleaner) <NEW_LINE> self._scanner.feed(handle, self._consumer) <NEW_LINE> return self._consumer.data
Parse GenBank files into Seq + Feature objects (OBSOLETE). Direct use of this class is discouraged, and may be deprecated in a future release of Biopython. Please use Bio.SeqIO.parse(...) or Bio.SeqIO.read(...) instead.
6259906416aa5153ce401bf9
class DataProblem: <NEW_LINE> <INDENT> def __init__(self, json_file_path): <NEW_LINE> <INDENT> self._depot_ = 0 <NEW_LINE> self._json_path = json_file_path <NEW_LINE> self.data = DataTransformItem(self._json_path) <NEW_LINE> <DEDENT> def fit(self): <NEW_LINE> <INDENT> self.data.transform() <NEW_LINE> return self <NEW_LINE> <DEDENT> @property <NEW_LINE> def _vehicle(self): <NEW_LINE> <INDENT> _vehicle_capacity = self.data.get_vehicle_capacity() <NEW_LINE> return Vehicle(_vehicle_capacity) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _num_vehicles(self): <NEW_LINE> <INDENT> return len(self.data.get_vehicle_capacity()) <NEW_LINE> <DEDENT> def locations(self): <NEW_LINE> <INDENT> _depot_location = self.depot() <NEW_LINE> node_locations = [(_loc[0], _loc[1]) for _index, _loc in self.data.get_node_locations()] <NEW_LINE> node_locations.insert(0, _depot_location) <NEW_LINE> return node_locations <NEW_LINE> <DEDENT> def depot(self): <NEW_LINE> <INDENT> return self.data.get_depot() <NEW_LINE> <DEDENT> def demand(self): <NEW_LINE> <INDENT> node_demands = [_demand for _index, _demand in self.data.get_node_demand()] <NEW_LINE> node_demands.insert(0, 0) <NEW_LINE> return node_demands
init data for vrp problem, vehicle, nodes, locations, demand etc.
6259906497e22403b383c629
class IpClusterBackend(object): <NEW_LINE> <INDENT> def __init__(self, ctx): <NEW_LINE> <INDENT> self.ctx = ctx <NEW_LINE> <DEDENT> def run(self, loop, mapPlugin): <NEW_LINE> <INDENT> from IPython import parallel <NEW_LINE> client = parallel.Client() <NEW_LINE> view = client.load_balanced_view() <NEW_LINE> try: <NEW_LINE> <INDENT> return view.map_sync(LoopWrapper(loop), mapPlugin.getWorkload()) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> pass
Backend based on IPython cluster. Will distribute the workload among the available engines.
625990641b99ca40022900c4
class RunNoWorries(Run): <NEW_LINE> <INDENT> def __init__(self, cmd, **kwargs): <NEW_LINE> <INDENT> super(RunNoWorries, self).__init__(cmd, **kwargs) <NEW_LINE> self._post_exitcode_log_failure = self.log.debug
When the exitcode is >0, log.debug instead of log.error
6259906421bff66bcd724382
class LinearDeformation(odl.Operator): <NEW_LINE> <INDENT> def __init__(self, fspace, vspace, grid, sigma): <NEW_LINE> <INDENT> self.grid = grid <NEW_LINE> self.sigma = sigma <NEW_LINE> super().__init__(odl.ProductSpace(fspace, vspace), fspace, False) <NEW_LINE> <DEDENT> def _call(self, x): <NEW_LINE> <INDENT> f, alphas = x <NEW_LINE> extension = f.space.extension(f.ntuple) <NEW_LINE> out_values = np.zeros(f.size) <NEW_LINE> for i, point in enumerate(self.range.points()): <NEW_LINE> <INDENT> point += v(point, self.grid, alphas, self.sigma) <NEW_LINE> if point in extension.domain: <NEW_LINE> <INDENT> out_values[i] = extension(point) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out_values[i] = 0 <NEW_LINE> <DEDENT> <DEDENT> return out_values
A linear deformation given by: ``g(x) = f(x + v(x))`` Where ``f(x)`` is the input template and ``v(x)`` is the translation at point ``x``. ``v(x)`` is computed using gaussian kernels with midpoints at ``grid``.
62599064cb5e8a47e493cd13
class FileHandler(object): <NEW_LINE> <INDENT> def __init__(self, rootdir, default_file): <NEW_LINE> <INDENT> logging.debug("fh: user specified rootdir: %s", rootdir) <NEW_LINE> rootdir = os.path.abspath(os.path.realpath(os.path.abspath(rootdir))) <NEW_LINE> logging.debug("fh: absolute rootdir is: %s", rootdir) <NEW_LINE> self._rootdir = rootdir <NEW_LINE> self._default_file = default_file <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> return FileRequestHandler(self._rootdir, self._default_file) <NEW_LINE> <DEDENT> @property <NEW_LINE> def rootdir(self): <NEW_LINE> <INDENT> return self._rootdir <NEW_LINE> <DEDENT> @property <NEW_LINE> def default_file(self): <NEW_LINE> <INDENT> return self._default_file
File handler class
625990640a50d4780f70694e
class ParticalTree(object): <NEW_LINE> <INDENT> def __init__(self,root,df,momentumScale = 1): <NEW_LINE> <INDENT> self.tree = Treeview(root) <NEW_LINE> self.df = df <NEW_LINE> self.ms = momentumScale <NEW_LINE> fieldnames = list(self.df.columns.values) <NEW_LINE> self.tree['columns'] = fieldnames <NEW_LINE> self.tree.column('#0',width=100) <NEW_LINE> for field in fieldnames: <NEW_LINE> <INDENT> self.tree.column(field,width=80) <NEW_LINE> self.tree.heading(field, text=field) <NEW_LINE> <DEDENT> for i,row in self.df.iterrows(): <NEW_LINE> <INDENT> values = [row[field] for field in fieldnames] <NEW_LINE> if not self.tree.exists(row['mother1']): <NEW_LINE> <INDENT> parent = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parent = row['mother1'] <NEW_LINE> <DEDENT> self.tree.insert( iid = row["id"], index = 'end', parent=parent, text='', values=values ) <NEW_LINE> <DEDENT> self.tree.bind('<<TreeviewSelect>>',self.handelSelect) <NEW_LINE> self.tree.pack() <NEW_LINE> <DEDENT> def getChildren(self,id): <NEW_LINE> <INDENT> children = list(self.tree.get_children(id)) <NEW_LINE> grandChildren = [] <NEW_LINE> for child in children: <NEW_LINE> <INDENT> grandChildren.extend(self.getChildren(child)) <NEW_LINE> <DEDENT> children.extend(grandChildren) <NEW_LINE> return children <NEW_LINE> <DEDENT> def getSelection(self): <NEW_LINE> <INDENT> rawSelection = self.tree.selection() <NEW_LINE> selection = [] <NEW_LINE> for row in rawSelection: <NEW_LINE> <INDENT> selection.extend(self.getChildren(row)) <NEW_LINE> <DEDENT> selection.extend(rawSelection) <NEW_LINE> return selection <NEW_LINE> <DEDENT> def handelSelect(self,event): <NEW_LINE> <INDENT> plt.clf() <NEW_LINE> selection = self.getSelection() <NEW_LINE> self.plotSelection(selection) <NEW_LINE> <DEDENT> def plotSelection(self,selection): <NEW_LINE> <INDENT> for particalId in selection: <NEW_LINE> <INDENT> print('.',end="",flush=True) <NEW_LINE> particals = self.df[self.df["id"] == float(particalId)] <NEW_LINE> for i,partical in particals.iterrows(): <NEW_LINE> <INDENT> x = [partical['xProd'], (partical['xProd'] + partical['px']) * self.ms] <NEW_LINE> y = [partical['yProd'], (partical['yProd'] + partical['py']) * self.ms] <NEW_LINE> plt.plot(x,y) <NEW_LINE> <DEDENT> <DEDENT> plt.show() <NEW_LINE> print("")
description of class
625990647d847024c075daf4
class QualificationDegree(ModelSQL, ModelView): <NEW_LINE> <INDENT> __name__ = 'rrhh.qualification.degree' <NEW_LINE> name = fields.Char('Name', required=True, translate=True) <NEW_LINE> active = fields.Boolean('Active') <NEW_LINE> @staticmethod <NEW_LINE> def default_active(): <NEW_LINE> <INDENT> return True
Qualification degree
625990643539df3088ecd9bb
class Plugin(GlancesPlugin): <NEW_LINE> <INDENT> def __init__(self, args=None, config=None): <NEW_LINE> <INDENT> super(Plugin, self).__init__(args=args, config=config) <NEW_LINE> self.display_curse = True <NEW_LINE> self.reset() <NEW_LINE> self.OPENSTACK = ThreadOpenStack() <NEW_LINE> self.OPENSTACK.start() <NEW_LINE> <DEDENT> def exit(self): <NEW_LINE> <INDENT> self.OPENSTACK.stop() <NEW_LINE> super(Plugin, self).exit() <NEW_LINE> <DEDENT> @GlancesPlugin._check_decorator <NEW_LINE> @GlancesPlugin._log_result_decorator <NEW_LINE> def update(self): <NEW_LINE> <INDENT> stats = self.get_init_value() <NEW_LINE> if import_error_tag: <NEW_LINE> <INDENT> return stats <NEW_LINE> <DEDENT> if self.input_method == 'local': <NEW_LINE> <INDENT> stats = self.OPENSTACK.stats <NEW_LINE> <DEDENT> self.stats = stats <NEW_LINE> return self.stats <NEW_LINE> <DEDENT> def msg_curse(self, args=None, max_width=None): <NEW_LINE> <INDENT> ret = [] <NEW_LINE> if not self.stats or self.stats == {} or self.is_disabled(): <NEW_LINE> <INDENT> return ret <NEW_LINE> <DEDENT> if 'instance-type' in self.stats and 'instance-id' in self.stats and 'region' in self.stats: <NEW_LINE> <INDENT> msg = 'Cloud ' <NEW_LINE> ret.append(self.curse_add_line(msg, "TITLE")) <NEW_LINE> msg = '{} instance {} ({})'.format( self.stats['instance-type'], self.stats['instance-id'], self.stats['region'] ) <NEW_LINE> ret.append(self.curse_add_line(msg)) <NEW_LINE> <DEDENT> return ret
Glances' cloud plugin. The goal of this plugin is to retrieve additional information concerning the datacenter where the host is connected. See https://github.com/nicolargo/glances/issues/1029 stats is a dict
625990643539df3088ecd9bc
class OutboundRule(SubResource): <NEW_LINE> <INDENT> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'allocated_outbound_ports': {'key': 'properties.allocatedOutboundPorts', 'type': 'int'}, 'frontend_ip_configurations': {'key': 'properties.frontendIPConfigurations', 'type': '[SubResource]'}, 'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'protocol': {'key': 'properties.protocol', 'type': 'str'}, 'enable_tcp_reset': {'key': 'properties.enableTcpReset', 'type': 'bool'}, 'idle_timeout_in_minutes': {'key': 'properties.idleTimeoutInMinutes', 'type': 'int'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(OutboundRule, self).__init__(**kwargs) <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.etag = kwargs.get('etag', None) <NEW_LINE> self.allocated_outbound_ports = kwargs.get('allocated_outbound_ports', None) <NEW_LINE> self.frontend_ip_configurations = kwargs.get('frontend_ip_configurations', None) <NEW_LINE> self.backend_address_pool = kwargs.get('backend_address_pool', None) <NEW_LINE> self.provisioning_state = kwargs.get('provisioning_state', None) <NEW_LINE> self.protocol = kwargs.get('protocol', None) <NEW_LINE> self.enable_tcp_reset = kwargs.get('enable_tcp_reset', None) <NEW_LINE> self.idle_timeout_in_minutes = kwargs.get('idle_timeout_in_minutes', None)
Outbound rule of the load balancer. :param id: Resource ID. :type id: str :param name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :type name: str :param etag: A unique read-only string that changes whenever the resource is updated. :type etag: str :param allocated_outbound_ports: The number of outbound ports to be used for NAT. :type allocated_outbound_ports: int :param frontend_ip_configurations: The Frontend IP addresses of the load balancer. :type frontend_ip_configurations: list[~azure.mgmt.network.v2018_12_01.models.SubResource] :param backend_address_pool: A reference to a pool of DIPs. Outbound traffic is randomly load balanced across IPs in the backend IPs. :type backend_address_pool: ~azure.mgmt.network.v2018_12_01.models.SubResource :param provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :type provisioning_state: str :param protocol: Protocol - TCP, UDP or All. Possible values include: "Tcp", "Udp", "All". :type protocol: str or ~azure.mgmt.network.v2018_12_01.models.OutboundRulePropertiesFormatProtocol :param enable_tcp_reset: Receive bidirectional TCP Reset on TCP flow idle timeout or unexpected connection termination. This element is only used when the protocol is set to TCP. :type enable_tcp_reset: bool :param idle_timeout_in_minutes: The timeout for the TCP idle connection. :type idle_timeout_in_minutes: int
625990641f5feb6acb164309
class List(ZenityBase): <NEW_LINE> <INDENT> def __init__(self, items, **kwargs): <NEW_LINE> <INDENT> self.dialog = 'list' <NEW_LINE> self.items = items <NEW_LINE> self.columns = [] <NEW_LINE> try: <NEW_LINE> <INDENT> kwargs['print-column'] = kwargs.pop('print_column') <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> super().__init__(self.dialog, **kwargs) <NEW_LINE> <DEDENT> def parse(self): <NEW_LINE> <INDENT> for option, value in self.options.items(): <NEW_LINE> <INDENT> self.cmd.extend([f'--{option}', value]) <NEW_LINE> <DEDENT> for item in self.items: <NEW_LINE> <INDENT> print(item) <NEW_LINE> for key in item.keys(): <NEW_LINE> <INDENT> if key not in self.columns: <NEW_LINE> <INDENT> self.cmd.extend(['--column', key]) <NEW_LINE> self.columns.append(key) <NEW_LINE> <DEDENT> <DEDENT> for value in item.values(): <NEW_LINE> <INDENT> self.cmd.extend([value])
List dialog
62599064d7e4931a7ef3d714
class Plugin: <NEW_LINE> <INDENT> def __init__(self, iface): <NEW_LINE> <INDENT> self.iface = iface <NEW_LINE> self.plugin_dir = os.path.dirname(__file__) <NEW_LINE> self.auto_curve_enabled = False <NEW_LINE> <DEDENT> def initGui(self): <NEW_LINE> <INDENT> self.toolbar = self.iface.addToolBar("Autocurve") <NEW_LINE> self.auto_curve_action = QAction( QIcon(os.path.join(self.plugin_dir, 'icon.svg')), 'Autocurve', self.toolbar, ) <NEW_LINE> self.auto_curve_action.setCheckable(True) <NEW_LINE> self.auto_curve_action.toggled.connect(self.toggle_auto_curve) <NEW_LINE> self.toolbar.addAction(self.auto_curve_action) <NEW_LINE> self.watched_layers = set() <NEW_LINE> self._prevent_recursion = False <NEW_LINE> self.watch_layer(self.iface.activeLayer()) <NEW_LINE> self.iface.currentLayerChanged.connect(self.watch_layer) <NEW_LINE> enabled = QgsSettings().value("autocurve/enabled", None) == 'true' <NEW_LINE> self.auto_curve_action.setChecked(enabled) <NEW_LINE> <DEDENT> def unload(self): <NEW_LINE> <INDENT> self.iface.mainWindow().removeToolBar(self.toolbar) <NEW_LINE> for layer in self.watched_layers: <NEW_LINE> <INDENT> if not sip.isdeleted(layer): <NEW_LINE> <INDENT> layer.geometryChanged.disconnect(self.add_to_changelog) <NEW_LINE> layer.featureAdded.disconnect(self.add_to_changelog) <NEW_LINE> layer.editCommandStarted.connect(self.reset_changelog) <NEW_LINE> layer.editCommandEnded.connect(self.curvify) <NEW_LINE> <DEDENT> <DEDENT> self.watched_layers = set() <NEW_LINE> <DEDENT> def toggle_auto_curve(self, checked): <NEW_LINE> <INDENT> self.auto_curve_enabled = checked <NEW_LINE> QgsSettings().setValue("autocurve/enabled", str(self.auto_curve_enabled).lower()) <NEW_LINE> <DEDENT> def watch_layer(self, layer): <NEW_LINE> <INDENT> if layer and layer.type() == QgsMapLayerType.VectorLayer and layer not in self.watched_layers: <NEW_LINE> <INDENT> layer.geometryChanged.connect(self.add_to_changelog) <NEW_LINE> layer.featureAdded.connect(self.add_to_changelog) <NEW_LINE> layer.editCommandStarted.connect(self.reset_changelog) <NEW_LINE> layer.editCommandEnded.connect(self.curvify) <NEW_LINE> self.watched_layers.add(layer) <NEW_LINE> <DEDENT> <DEDENT> def reset_changelog(self): <NEW_LINE> <INDENT> self.changed_fids = set() <NEW_LINE> <DEDENT> def add_to_changelog(self, fid, geometry=None): <NEW_LINE> <INDENT> self.changed_fids.add(fid) <NEW_LINE> <DEDENT> def curvify(self): <NEW_LINE> <INDENT> if not self.auto_curve_enabled: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not self.changed_fids: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self._prevent_recursion: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> params = { "DISTANCE": QgsSettings().value("/qgis/digitizing/convert_to_curve_distance_tolerance", 1e-6), "ANGLE": QgsSettings().value("/qgis/digitizing/convert_to_curve_angle_tolerance", 1e-6), } <NEW_LINE> alg = QgsApplication.processingRegistry().createAlgorithmById('native:converttocurves') <NEW_LINE> layer = self.iface.activeLayer() <NEW_LINE> layer.selectByIds(list(self.changed_fids)) <NEW_LINE> self._prevent_recursion = True <NEW_LINE> AlgorithmExecutor.execute_in_place(alg, params) <NEW_LINE> self._prevent_recursion = False <NEW_LINE> layer.removeSelection()
QGIS Plugin Implementation.
6259906476e4537e8c3f0ca1
class stopwatch: <NEW_LINE> <INDENT> def __init__(self, name = "stopwatch"): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.startTime = time.time() <NEW_LINE> self.elapsedTime = 0.0 <NEW_LINE> self.running = False <NEW_LINE> self.lap = {} <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> if not self.running: <NEW_LINE> <INDENT> self.startTime = time.time() <NEW_LINE> self.running = True <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> if self.running: <NEW_LINE> <INDENT> self.elapsedTime += time.time() - self.startTime <NEW_LINE> self.running = False <NEW_LINE> <DEDENT> <DEDENT> def getElapsedTime(self): <NEW_LINE> <INDENT> if self.running: <NEW_LINE> <INDENT> return self.elapsedTime + (time.time() - self.startTime) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.elapsedTime <NEW_LINE> <DEDENT> <DEDENT> def stopLapTimer(self, lap = "lap timer"): <NEW_LINE> <INDENT> self.lap[lap] = self.getElapsedTime() <NEW_LINE> return self.lap[lap] <NEW_LINE> <DEDENT> def getLapTime(self, lap = "lap timer"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.lap[lap] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.lap[lap] = self.getElapsedTime() <NEW_LINE> return self.lap[lap] <NEW_LINE> <DEDENT> <DEDENT> def getFormattedTime(self, lap = None): <NEW_LINE> <INDENT> if lap == None: <NEW_LINE> <INDENT> _et = self.getElapsedTime() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _et = self.getLapTime(lap) <NEW_LINE> <DEDENT> _et += 0.005 <NEW_LINE> _hh = int(_et / 3600) <NEW_LINE> _et -= (_hh * 3600) <NEW_LINE> _mm = int(_et / 60) <NEW_LINE> _et -= (_mm * 60) <NEW_LINE> _ss = int(_et) <NEW_LINE> _et -= _ss <NEW_LINE> _ds = int(_et * 100) <NEW_LINE> return "%.2d:%.2d:%.2d.%.2d" % (_hh, _mm, _ss, _ds)
Implements a timer with multiple named lap timers. A newly created timer is NOT running. Use start() and stop() to begin/end. Check boolean '.running'. A lap timer is created on reference.
625990642ae34c7f260ac805
class Rar( Archive ): <NEW_LINE> <INDENT> def scan_files( self, ext = 'nds' ): <NEW_LINE> <INDENT> list_archive = subprocess.Popen( [ 'unrar', 'lb', self.path ], stdout = subprocess.PIPE, stderr = subprocess.PIPE ) <NEW_LINE> for filename in list_archive.stdout.readlines(): <NEW_LINE> <INDENT> filename = filename.rstrip() <NEW_LINE> if re.search( "\.%s$" % ext, filename, flags = re.IGNORECASE ): <NEW_LINE> <INDENT> self.file_list.append( filename ) <NEW_LINE> <DEDENT> <DEDENT> list_archive.wait() <NEW_LINE> <DEDENT> def extract( self, archive_file, path ): <NEW_LINE> <INDENT> decompress = subprocess.Popen( [ 'unrar', 'x', '-y', self.path, archive_file, path ], stdout = subprocess.PIPE, stderr = subprocess.PIPE ) <NEW_LINE> decompress.wait() <NEW_LINE> return "%s/%s" % ( path, archive_file )
Rar archive handler
62599064498bea3a75a5918f
class QmlRepresentationMode(six.with_metaclass(EnumMetaclass, QObject)): <NEW_LINE> <INDENT> pass
A QML ready version of rtneuron.RepresentationMode
62599064a8370b77170f1aed
class TestTestClass(unittest.TestCase): <NEW_LINE> <INDENT> def test_answer(self): <NEW_LINE> <INDENT> t = TestClass() <NEW_LINE> sa, a = t.answer_me("Why?") <NEW_LINE> self.failUnless(a == 42)
Test class tester
6259906456ac1b37e6303876
class TipoHabitacion(models.Model): <NEW_LINE> <INDENT> objects = TipoHabitacionManager() <NEW_LINE> slug = models.SlugField(max_length=20, unique=True) <NEW_LINE> descripcion = models.CharField(max_length=100) <NEW_LINE> minimo = models.PositiveSmallIntegerField(default=2, help_text = "Ocupación mínima Adultos") <NEW_LINE> maximo = models.PositiveIntegerField(default=4, help_text= "Ocupación máxima adultos") <NEW_LINE> children = models.IntegerField(default=2, help_text= "Máxima ocupación de niños") <NEW_LINE> maximo_total = models.IntegerField(default=5, help_text = "Ocupación máxima entre adultos y niños") <NEW_LINE> is_base = models.BooleanField(help_text = "Marcar si es un referencia para cupo y precio") <NEW_LINE> is_individual = models.BooleanField(default=False, help_text="Indica si se refere a ocupación individual") <NEW_LINE> parent = models.ForeignKey('self', blank=True, null=True, related_name='subtipos', limit_choices_to={'is_base': True}) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "Tipo de Habitación" <NEW_LINE> verbose_name_plural = "Tipos de Habitación" <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.descripcion <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> if self.minimo > self.maximo: <NEW_LINE> <INDENT> raise ValidationError('La ocupación mínima debe ser menor que la máxima') <NEW_LINE> <DEDENT> if self.is_individual: <NEW_LINE> <INDENT> self.minimo = 1 <NEW_LINE> self.maximo = 1 <NEW_LINE> <DEDENT> if self.is_base and (not self.parent==None): <NEW_LINE> <INDENT> raise ValidationError('Una habitación base no puede estar ligada a otra') <NEW_LINE> <DEDENT> if not self.is_base and (self.maximo != self.minimo): <NEW_LINE> <INDENT> raise ValidationError('No puede existir rango para habitaciones que no son de tipo base') <NEW_LINE> <DEDENT> if not self.is_base and (self.parent.maximo < self.minimo): <NEW_LINE> <INDENT> raise ValidationError('El máximo no puede superar al de la habitación base') <NEW_LINE> <DEDENT> <DEDENT> def is_valid(self, adultos, ninyos): <NEW_LINE> <INDENT> return (adultos<=self.maximo) and (adultos>=self.minimo) and (ninyos>=0) and (ninyos<=self.children) and (self.maximo_total >= (adultos+ninyos)) <NEW_LINE> <DEDENT> def natural_key(self): <NEW_LINE> <INDENT> return (self.slug, )
Define los tipos de habitacion posibles. Una habitación se define por su ocupación mínima y máxima. Si una habitación es de un tipo base indica que habrá un contrato con cupo y precios especiales para este tipo de habitación
6259906416aa5153ce401bfb
class RawMeasurements(GenericAPIView): <NEW_LINE> <INDENT> permission_classes = (DjangoModelPermissionsOrAnonReadOnly,) <NEW_LINE> queryset = RawMeasurement.objects.none() <NEW_LINE> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> tide_gauge = TideGauge.objects.get(slug=self.kwargs['tide_gauge_slug']) <NEW_LINE> serializer = RawMeasurementListSerializer(data=request.data) <NEW_LINE> if not serializer.is_valid(): <NEW_LINE> <INDENT> raise InvalidDataError(serializer.errors) <NEW_LINE> <DEDENT> serializer.save(tide_gauge=tide_gauge) <NEW_LINE> return Response({'detail': 'OK.'}, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> def get_serializer(self, instance=None, data=None): <NEW_LINE> <INDENT> assert instance is None and data is None, ( 'instance={}, data={}'.format(instance, data)) <NEW_LINE> return RawMeasurementListSerializer.child
Store raw measurements for a tide gauge. Predictions must be given in a JSON array. Note that any bad prediction in the list will cause the entire batch to be dropped.
625990648e71fb1e983bd1ea
class SilverServiceTaxi(Car): <NEW_LINE> <INDENT> def __init__(self, name, fuel, fancy): <NEW_LINE> <INDENT> super().__init__(name, fuel) <NEW_LINE> self.price_per_km = 1.20 <NEW_LINE> self.current_fare_distance = 0 <NEW_LINE> self.fanciness = fancy*self.price_per_km <NEW_LINE> self.flagfall = float(4.50) <NEW_LINE> self.fare = 0 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{}, {}km on current fare, ${:.2f}/km plus flagfall of ${:.2f}".format(super().__str__(), self.current_fare_distance, self.fanciness, self.flagfall) <NEW_LINE> <DEDENT> def get_fare(self): <NEW_LINE> <INDENT> return (self.fanciness * self.current_fare_distance) + self.flagfall <NEW_LINE> <DEDENT> def start_fare(self): <NEW_LINE> <INDENT> self.current_fare_distance = 0 <NEW_LINE> <DEDENT> def drive(self, distance): <NEW_LINE> <INDENT> distance_driven = super().drive(distance) <NEW_LINE> self.current_fare_distance += distance_driven <NEW_LINE> return distance_driven
specialised version of a Car that includes fare costs
625990647cff6e4e811b7165
class Cat(Pet): <NEW_LINE> <INDENT> def sound(self): <NEW_LINE> <INDENT> print('Мяу!')
Кошка - является Домашним Животным
625990648e7ae83300eea7ac
class Extractor(object): <NEW_LINE> <INDENT> def __init__(self, dataset, imgs_per_gpu, workers_per_gpu, dist_mode=False): <NEW_LINE> <INDENT> from openselfsup import datasets <NEW_LINE> if isinstance(dataset, Dataset): <NEW_LINE> <INDENT> self.dataset = dataset <NEW_LINE> <DEDENT> elif isinstance(dataset, dict): <NEW_LINE> <INDENT> self.dataset = datasets.build_dataset(dataset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError( 'dataset must be a Dataset object or a dict, not {}'.format( type(dataset))) <NEW_LINE> <DEDENT> self.data_loader = datasets.build_dataloader( self.dataset, imgs_per_gpu, workers_per_gpu, dist=dist_mode, shuffle=False) <NEW_LINE> self.dist_mode = dist_mode <NEW_LINE> self.avg_pool = nn.AdaptiveAvgPool2d((1, 1)) <NEW_LINE> <DEDENT> def _forward_func(self, runner, **x): <NEW_LINE> <INDENT> backbone_feat = runner.model(mode='extract', **x) <NEW_LINE> last_layer_feat = runner.model.module.neck([backbone_feat[-1]])[0] <NEW_LINE> last_layer_feat = last_layer_feat.view(last_layer_feat.size(0), -1) <NEW_LINE> return dict(feature=last_layer_feat.cpu()) <NEW_LINE> <DEDENT> def __call__(self, runner): <NEW_LINE> <INDENT> func = lambda **x: self._forward_func(runner, **x) <NEW_LINE> if self.dist_mode: <NEW_LINE> <INDENT> feats = dist_forward_collect( func, self.data_loader, runner.rank, len(self.dataset), ret_rank=-1)['feature'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> feats = nondist_forward_collect(func, self.data_loader, len(self.dataset))['feature'] <NEW_LINE> <DEDENT> return feats
Feature extractor. Args: dataset (Dataset | dict): A PyTorch dataset or dict that indicates the dataset. imgs_per_gpu (int): Number of images on each GPU, i.e., batch size of each GPU. workers_per_gpu (int): How many subprocesses to use for data loading for each GPU. dist_mode (bool): Use distributed extraction or not. Default: False.
62599064f548e778e596cca8
class Settings: <NEW_LINE> <INDENT> def __init__(self, default=True): <NEW_LINE> <INDENT> self.default = default <NEW_LINE> self.authenPath = self.getPath("authen") <NEW_LINE> accounts = self.setupAccounts() <NEW_LINE> self.hotmailAcc = accounts[0] <NEW_LINE> self.gmailAcc = accounts[1] <NEW_LINE> self.unknownAcc = accounts[2] <NEW_LINE> self.spamListPath = self.getPath("spam") <NEW_LINE> self.spamList = self.getSpamList() <NEW_LINE> <DEDENT> def getPath(self, fileType): <NEW_LINE> <INDENT> _file = fl.FileSystem() <NEW_LINE> if self.default is True: <NEW_LINE> <INDENT> _file.useDefaultPath(fileType) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if fileType == "authen": <NEW_LINE> <INDENT> print("Please create an authenticaton.txt file.") <NEW_LINE> <DEDENT> elif fileType == "spam": <NEW_LINE> <INDENT> print("Please create a spam_list.txt file.") <NEW_LINE> <DEDENT> <DEDENT> _file.createFile() <NEW_LINE> return _file.filePath <NEW_LINE> <DEDENT> def setupAccounts(self): <NEW_LINE> <INDENT> if fl.FileSystem.isEmpty(self.authenPath): <NEW_LINE> <INDENT> text = input("Please enter your email address or environment " "variable\n> ") <NEW_LINE> fl.FileSystem.editFile(self.authenPath, text) <NEW_LINE> text = getpass.getpass("Please enter your email password or " "environment variable\n> ") <NEW_LINE> fl.FileSystem.editFile(self.authenPath, text) <NEW_LINE> <DEDENT> acc = acs.AccountsManager() <NEW_LINE> acc.getAccounts(self.authenPath) <NEW_LINE> acc.categorize() <NEW_LINE> return (acc.hotmailAcc, acc.gmailAcc, acc.uncategorizedAcc) <NEW_LINE> <DEDENT> def getSpamList(self): <NEW_LINE> <INDENT> if fl.FileSystem.isEmpty(self.spamListPath): <NEW_LINE> <INDENT> self.addSpam() <NEW_LINE> <DEDENT> with open(self.spamListPath, 'r') as rf: <NEW_LINE> <INDENT> spamList = rf.read().split("\n") <NEW_LINE> <DEDENT> spamList.pop() <NEW_LINE> return spamList <NEW_LINE> <DEDENT> def addSpam(self): <NEW_LINE> <INDENT> text = input("Please add the spam email address to delete.\n> ") <NEW_LINE> fl.FileSystem.editFile(self.spamListPath, text)
Get and set all of the basic required information neccessary for the email app to work.
625990645fdd1c0f98e5f6a2
@registry.register_module('dataset') <NEW_LINE> class ConcatDataset(_ConcatDataset): <NEW_LINE> <INDENT> def __init__(self, datasets): <NEW_LINE> <INDENT> super(ConcatDataset, self).__init__(datasets) <NEW_LINE> self.CLASSES = datasets[0].CLASSES <NEW_LINE> if hasattr(datasets[0], 'flag'): <NEW_LINE> <INDENT> flags = [] <NEW_LINE> for i in range(0, len(datasets)): <NEW_LINE> <INDENT> flags.append(datasets[i].flag) <NEW_LINE> <DEDENT> self.flag = np.concatenate(flags) <NEW_LINE> <DEDENT> <DEDENT> def get_cat_ids(self, idx): <NEW_LINE> <INDENT> if idx < 0: <NEW_LINE> <INDENT> if -idx > len(self): <NEW_LINE> <INDENT> raise ValueError( 'absolute value of index should not exceed dataset length') <NEW_LINE> <DEDENT> idx = len(self) + idx <NEW_LINE> <DEDENT> dataset_idx = bisect.bisect_right(self.cumulative_sizes, idx) <NEW_LINE> if dataset_idx == 0: <NEW_LINE> <INDENT> sample_idx = idx <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sample_idx = idx - self.cumulative_sizes[dataset_idx - 1] <NEW_LINE> <DEDENT> return self.datasets[dataset_idx].get_cat_ids(sample_idx)
A wrapper of concatenated dataset. Same as :obj:`torch.utils.data.dataset.ConcatDataset`, but concat the group flag for image aspect ratio. Args: datasets (list[:obj:`Dataset`]): A list of datasets.
625990643d592f4c4edbc5fc
class QDesignerTaskMenuExtension(): <NEW_LINE> <INDENT> def preferredEditAction(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def taskActions(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, QDesignerTaskMenuExtension=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> __weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None)
QDesignerTaskMenuExtension() QDesignerTaskMenuExtension(QDesignerTaskMenuExtension)
62599064ac7a0e7691f73c04
class DeclaredOn(Relationship): <NEW_LINE> <INDENT> pass
Describes the relationship between a type and the properties that instances of that type can have.
6259906463d6d428bbee3e18
class RecipesManagerException(RuntimeError): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self.parameter = value <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self.parameter)
RecipesManager Error
625990644e4d562566373b26
class EventProposer_C(pymc.Metropolis): <NEW_LINE> <INDENT> def __init__(self, stochastic, *args, **kwargs): <NEW_LINE> <INDENT> return super(self.__class__, self).__init__(stochastic, *args, **kwargs) <NEW_LINE> <DEDENT> def propose(self): <NEW_LINE> <INDENT> tau = 1./(self.adaptive_scale_factor * self.proposal_sd)**2 <NEW_LINE> time = pymc.rnormal(self.stochastic.value.time, tau) <NEW_LINE> censored = rand.random() > 0.5 <NEW_LINE> self.stochastic.value = _Survival._event(time, censored) <NEW_LINE> <DEDENT> def competence(self, stochastic): <NEW_LINE> <INDENT> if stochastic.dtype == _Survival._event: <NEW_LINE> <INDENT> return 3 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0
Simple proposal distribution for Event objects for use in Metropolis samplers.
62599064627d3e7fe0e085aa
class BayModel(cluster_template.ClusterTemplate): <NEW_LINE> <INDENT> SSH_AUTHORIZED_KEY = 'ssh_authorized_key' <NEW_LINE> deprecate_msg = _('Please use OS::Magnum::ClusterTemplate instead.') <NEW_LINE> support_status = support.SupportStatus( status=support.HIDDEN, message=deprecate_msg, version='11.0.0', previous_status=support.SupportStatus( status=support.DEPRECATED, message=deprecate_msg, version='9.0.0', previous_status=support.SupportStatus( status=support.SUPPORTED, version='5.0.0'), substitute_class=cluster_template.ClusterTemplate ) ) <NEW_LINE> def translation_rules(self, props): <NEW_LINE> <INDENT> if props.get(self.SSH_AUTHORIZED_KEY): <NEW_LINE> <INDENT> return [ translation.TranslationRule( props, translation.TranslationRule.DELETE, [self.SSH_AUTHORIZED_KEY] ) ]
A resource for the BayModel in Magnum. This resource has been deprecated by ClusterTemplate. BayModel is an object that stores template information about the bay which is used to create new bays consistently.
62599064b7558d5895464abe
class TokenizedTweet: <NEW_LINE> <INDENT> SEPARATOR_CHARS = "" <NEW_LINE> STRIP_CHARS = "" <NEW_LINE> def __init__(self, tweet, sender, extra=None): <NEW_LINE> <INDENT> self.tweet = tweet <NEW_LINE> self.sender = sender <NEW_LINE> self.extra = extra
Represents one tokenized tweet. Given a raw tweet and associated data, use this class to tokenize it and hold the resulting files. Has fields for the tweet's sender, its original author (if a retweet), the retweet type (RT, MT), length apart from the retweet header, and a field for extra data specific to a source.
625990648da39b475be04909
class UserProfileInfo(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User, on_delete=models.CASCADE) <NEW_LINE> portfolio_site = models.URLField(blank=True) <NEW_LINE> profile_pic = models.ImageField(upload_to='profile_pics', blank=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.user.username
Model class to add additional information that the django.contrib.auth.models.User class doesn't have. We don't directly inherit from that User class, as it may cause the DB to think that there are multiple instances of the User class. Instead we use a one-to-one field relationship.
62599064460517430c432be4