code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Frankeinstein(MetaBadge): <NEW_LINE> <INDENT> id="frankeinstein" <NEW_LINE> model = models.History <NEW_LINE> one_time_only = True <NEW_LINE> title = _("Frankeinstein") <NEW_LINE> description = _("Cloned a cancelled or deprecated object") <NEW_LINE> link_to_doc = "PLMObject/1_common.html#attributes" <NEW_LINE> level = "2" <NEW_LINE> def get_progress(self, user): <NEW_LINE> <INDENT> cloned = self.model.objects.filter(user=user, action__in=("Clone", "cloned")) <NEW_LINE> if not cloned : <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> cloned = [c.plmobject for c in cloned if c.plmobject.is_cancelled or c.plmobject.is_deprecated] <NEW_LINE> progress = 0 if not cloned else 1 <NEW_LINE> return progress | Badge won by user who clonee a cancelled or deprecated object | 62599041e64d504609df9d32 |
@ns.route('/') <NEW_LINE> class BookList(Resource): <NEW_LINE> <INDENT> @ns.doc('list_todos') <NEW_LINE> @ns.marshal_list_with(book) <NEW_LINE> def get(self): <NEW_LINE> <INDENT> return DAO.books <NEW_LINE> <DEDENT> @ns.doc('create_todo') <NEW_LINE> @ns.expect(book) <NEW_LINE> @ns.marshal_with(book, code=201) <NEW_LINE> def post(self): <NEW_LINE> <INDENT> return DAO.create(api.payload), 201 | Shows a list of all todos, and lets you POST to add new tasks | 625990413eb6a72ae038b924 |
class GoogleFusionTableService: <NEW_LINE> <INDENT> def __init__(self, service): <NEW_LINE> <INDENT> self.service = service <NEW_LINE> <DEDENT> def save_location(self, location): <NEW_LINE> <INDENT> sql_query = "INSERT INTO {} (Address, Location) VALUES ('{}', '{},{}')" .format(settings.TABLE_ID, location['address'], location['latitude'], location['longitude']) <NEW_LINE> query_statement = self.service.query().sql(sql=sql_query) <NEW_LINE> return query_statement.execute() <NEW_LINE> <DEDENT> def purge_table(self): <NEW_LINE> <INDENT> sql_query = "DELETE FROM {}".format(settings.TABLE_ID) <NEW_LINE> query_statement = self.service.query().sql(sql=sql_query) <NEW_LINE> return query_statement.execute() | Google fusion table service to save location records and purge whole table | 62599041a79ad1619776b342 |
class BridgesResource(CustomErrorHandlingResource, CSPResource): <NEW_LINE> <INDENT> isLeaf = True <NEW_LINE> def __init__(self, distributor, schedule, N=1, useForwardedHeader=False, includeFingerprints=True): <NEW_LINE> <INDENT> gettext.install("bridgedb", unicode=True) <NEW_LINE> CSPResource.__init__(self) <NEW_LINE> self.distributor = distributor <NEW_LINE> self.schedule = schedule <NEW_LINE> self.nBridgesToGive = N <NEW_LINE> self.useForwardedHeader = useForwardedHeader <NEW_LINE> self.includeFingerprints = includeFingerprints <NEW_LINE> <DEDENT> def render(self, request): <NEW_LINE> <INDENT> self.setCSPHeader(request) <NEW_LINE> try: <NEW_LINE> <INDENT> response = self.getBridgeRequestAnswer(request) <NEW_LINE> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> logging.exception(err) <NEW_LINE> response = self.renderAnswer(request) <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> def getClientIP(self, request): <NEW_LINE> <INDENT> return getClientIP(request, self.useForwardedHeader) <NEW_LINE> <DEDENT> def getBridgeRequestAnswer(self, request): <NEW_LINE> <INDENT> bridgeLines = None <NEW_LINE> interval = self.schedule.intervalStart(time.time()) <NEW_LINE> ip = self.getClientIP(request) <NEW_LINE> logging.info("Replying to web request from %s. Parameters were %r" % (ip, request.args)) <NEW_LINE> if ip: <NEW_LINE> <INDENT> bridgeRequest = HTTPSBridgeRequest() <NEW_LINE> bridgeRequest.client = ip <NEW_LINE> bridgeRequest.isValid(True) <NEW_LINE> bridgeRequest.withIPversion(request.args) <NEW_LINE> bridgeRequest.withPluggableTransportType(request.args) <NEW_LINE> bridgeRequest.withoutBlockInCountry(request) <NEW_LINE> bridgeRequest.generateFilters() <NEW_LINE> bridges = self.distributor.getBridges(bridgeRequest, interval) <NEW_LINE> bridgeLines = [replaceControlChars(bridge.getBridgeLine( bridgeRequest, self.includeFingerprints)) for bridge in bridges] <NEW_LINE> <DEDENT> return self.renderAnswer(request, bridgeLines) <NEW_LINE> <DEDENT> def getResponseFormat(self, request): <NEW_LINE> <INDENT> format = request.args.get("format", None) <NEW_LINE> if format and len(format): <NEW_LINE> <INDENT> format = format[0] <NEW_LINE> <DEDENT> return format <NEW_LINE> <DEDENT> def renderAnswer(self, request, bridgeLines=None): <NEW_LINE> <INDENT> rtl = False <NEW_LINE> format = self.getResponseFormat(request) <NEW_LINE> if format == 'plain': <NEW_LINE> <INDENT> request.setHeader("Content-Type", "text/plain") <NEW_LINE> try: <NEW_LINE> <INDENT> rendered = bytes('\n'.join(bridgeLines)) <NEW_LINE> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> rendered = replaceErrorPage(request, err, html=False) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> request.setHeader("Content-Type", "text/html; charset=utf-8") <NEW_LINE> qrcode = None <NEW_LINE> qrjpeg = generateQR(bridgeLines) <NEW_LINE> if qrjpeg: <NEW_LINE> <INDENT> qrcode = 'data:image/jpeg;base64,%s' % base64.b64encode(qrjpeg) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> langs = translations.getLocaleFromHTTPRequest(request) <NEW_LINE> rtl = translations.usingRTLLang(langs) <NEW_LINE> template = lookup.get_template('bridges.html') <NEW_LINE> rendered = template.render(strings, rtl=rtl, lang=langs[0], answer=bridgeLines, qrcode=qrcode) <NEW_LINE> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> rendered = replaceErrorPage(request, err) <NEW_LINE> <DEDENT> <DEDENT> return rendered | This resource displays bridge lines in response to a request. | 625990413eb6a72ae038b923 |
class GCDaySteps(GCDaySection): <NEW_LINE> <INDENT> def __init__(self, raw_html): <NEW_LINE> <INDENT> super().__init__(raw_html, tag="STEPS") <NEW_LINE> self.total = None <NEW_LINE> self.goal = None <NEW_LINE> self.avg = None <NEW_LINE> self.distance = None <NEW_LINE> <DEDENT> def parse(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.parse_steps_count() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.parse_steps_stats() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def parse_steps_count(self): <NEW_LINE> <INDENT> container = self.soup.find_all("div", {"class": "span4 text-center charts"})[0] <NEW_LINE> total = container.find_all("div", {"class": "data-bit"})[ 0].text <NEW_LINE> self.total = utils.parse_num(total) <NEW_LINE> goal = container.find_all("div", {"class": "h5"})[0].text.strip().split( " ")[ -1].strip() <NEW_LINE> self.goal = utils.parse_num(goal) <NEW_LINE> <DEDENT> def parse_steps_stats(self): <NEW_LINE> <INDENT> container = self.soup.find_all("div", { "class": "span8 daily-summary-stats-placeholder"})[0] <NEW_LINE> container = container.find_all("div", {"class": "row-fluid top-xl"})[0] <NEW_LINE> container = container.find_all("div", {"class": "data-bit"}) <NEW_LINE> self.distance = utils.parse_num(container[1].text.split("km")[0]) <NEW_LINE> self.avg = utils.parse_num(container[2].text) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return { "total": self.total, "goal": self.goal, "avg": self.avg, "distance": self.distance } | Standard activity in the Garmin Connect timeline of day.
Common features are total, goal, distance, avg daily | 6259904123e79379d538d7c1 |
class UbuntuNetworking(Networking, UbuntuPlugin): <NEW_LINE> <INDENT> trace_host = "archive.ubuntu.com" <NEW_LINE> def setup(self): <NEW_LINE> <INDENT> super(UbuntuNetworking, self).setup() <NEW_LINE> self.add_copy_specs([ "/etc/resolvconf", "/etc/ufw", "/var/log/ufw.Log", "/etc/resolv.conf"]) <NEW_LINE> self.add_cmd_output("/usr/sbin/ufw status") <NEW_LINE> self.add_cmd_output("/usr/sbin/ufw app list") <NEW_LINE> if self.get_option("traceroute"): <NEW_LINE> <INDENT> self.add_cmd_output("/usr/sbin/traceroute -n %s" % self.trace_host) | network related information for Ubuntu based distribution
| 62599041b830903b9686eddb |
class PositionWiseFFN(nn.Block): <NEW_LINE> <INDENT> def __init__(self, ffn_num_hiddens, ffn_num_outputs, **kwargs): <NEW_LINE> <INDENT> super(PositionWiseFFN, self).__init__(**kwargs) <NEW_LINE> self.dense1 = nn.Dense(ffn_num_hiddens, flatten=False, activation='relu') <NEW_LINE> self.dense2 = nn.Dense(ffn_num_outputs, flatten=False) <NEW_LINE> <DEDENT> def forward(self, X): <NEW_LINE> <INDENT> return self.dense2(self.dense1(X)) | Positionwise feed-forward network.
Defined in :numref:`sec_transformer` | 6259904107d97122c4217f62 |
class NodeController(object): <NEW_LINE> <INDENT> def __init__(self, nodeParams, logFile=[]): <NEW_LINE> <INDENT> self.logFile = logFile <NEW_LINE> self.logTime = 0 <NEW_LINE> self.nodeParams = nodeParams <NEW_LINE> <DEDENT> def controlNode(self): <NEW_LINE> <INDENT> self.processCommands() <NEW_LINE> self.monitorFormationStatus() <NEW_LINE> self.executeNode() <NEW_LINE> self.logData() <NEW_LINE> <DEDENT> def executeNode(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def processFCCommands(self, FCComm): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def processNodeCommands(self, comm): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def processCommands(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def logData(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def monitorNodeUpdates(self): <NEW_LINE> <INDENT> if 'nodeStatus' not in self.nodeParams.__dict__ or 'clock' not in self.nodeParams.__dict__: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for node in self.nodeParams.nodeStatus: <NEW_LINE> <INDENT> if (node.lastStateUpdateTime + self.nodeParams.config.nodeUpdateTimeout) < self.nodeParams.clock.getTime(): <NEW_LINE> <INDENT> node.updating = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> node.updating = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def checkNodeLinks(self): <NEW_LINE> <INDENT> thisNode = self.nodeParams.config.nodeId - 1 <NEW_LINE> for i in range(self.nodeParams.config.maxNumNodes): <NEW_LINE> <INDENT> if (i == thisNode): <NEW_LINE> <INDENT> self.nodeParams.linkStatus[thisNode][i] = LinkStatus.GoodLink <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if (self.nodeParams.nodeStatus[i].present and (self.nodeParams.clock.getTime() - self.nodeParams.nodeStatus[i].lastMsgRcvdTime) < 1.5*self.nodeParams.config.commConfig['frameLength']): <NEW_LINE> <INDENT> self.nodeParams.linkStatus[thisNode][i] = LinkStatus.GoodLink <NEW_LINE> <DEDENT> elif (self.nodeParams.nodeStatus[i].updating == True): <NEW_LINE> <INDENT> self.nodeParams.linkStatus[thisNode][i] = LinkStatus.IndirectLink <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if (self.nodeParams.linkStatus[thisNode][i] != LinkStatus.NoLink): <NEW_LINE> <INDENT> self.nodeParams.linkStatus[thisNode][i] = LinkStatus.BadLink <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def monitorFormationStatus(self): <NEW_LINE> <INDENT> self.monitorNodeUpdates() <NEW_LINE> self.checkNodeLinks() | Generic node controller to subtype for specific vehicle types.
Attributes:
logFile: File object for logging node data.
logTime: Time of last write to log file.
nodeConfig: NodeConfig instance that stores configuration data for this node. | 62599041097d151d1a2c232b |
class _ValueCtxManager(Awaitable[_T], AsyncContextManager[_T]): <NEW_LINE> <INDENT> def __init__(self, value_obj: "Value", coro: Awaitable[Any], *, acquire_lock: bool): <NEW_LINE> <INDENT> self.value_obj = value_obj <NEW_LINE> self.coro = coro <NEW_LINE> self.raw_value = None <NEW_LINE> self.__original_value = None <NEW_LINE> self.__acquire_lock = acquire_lock <NEW_LINE> self.__lock = self.value_obj.get_lock() <NEW_LINE> <DEDENT> def __await__(self) -> _T: <NEW_LINE> <INDENT> return self.coro.__await__() <NEW_LINE> <DEDENT> async def __aenter__(self) -> _T: <NEW_LINE> <INDENT> if self.__acquire_lock is True: <NEW_LINE> <INDENT> await self.__lock.acquire() <NEW_LINE> <DEDENT> self.raw_value = await self <NEW_LINE> if not isinstance(self.raw_value, (list, dict)): <NEW_LINE> <INDENT> raise TypeError( "Type of retrieved value must be mutable (i.e. " "list or dict) in order to use a config value as " "a context manager." ) <NEW_LINE> <DEDENT> self.__original_value = pickle.loads(pickle.dumps(self.raw_value, -1)) <NEW_LINE> return self.raw_value <NEW_LINE> <DEDENT> async def __aexit__(self, exc_type, exc, tb): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if isinstance(self.raw_value, dict): <NEW_LINE> <INDENT> raw_value = _str_key_dict(self.raw_value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raw_value = self.raw_value <NEW_LINE> <DEDENT> if raw_value != self.__original_value: <NEW_LINE> <INDENT> await self.value_obj.set(self.raw_value) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> if self.__acquire_lock is True: <NEW_LINE> <INDENT> self.__lock.release() | Context manager implementation of config values.
This class allows mutable config values to be both "get" and "set" from
within an async context manager.
The context manager can only be used to get and set a mutable data type,
i.e. `dict`s or `list`s. This is because this class's ``raw_value``
attribute must contain a reference to the object being modified within the
context manager.
It should also be noted that the use of this context manager implies
the acquisition of the value's lock when the ``acquire_lock`` kwarg
to ``__init__`` is set to ``True``. | 6259904130c21e258be99aca |
class DownloadDmsFileRequest(object): <NEW_LINE> <INDENT> swagger_types = { 'cloud_db_instance_no': 'str', 'file_name': 'str' } <NEW_LINE> attribute_map = { 'cloud_db_instance_no': 'cloudDBInstanceNo', 'file_name': 'fileName' } <NEW_LINE> def __init__(self, cloud_db_instance_no=None, file_name=None): <NEW_LINE> <INDENT> self._cloud_db_instance_no = None <NEW_LINE> self._file_name = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.cloud_db_instance_no = cloud_db_instance_no <NEW_LINE> self.file_name = file_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def cloud_db_instance_no(self): <NEW_LINE> <INDENT> return self._cloud_db_instance_no <NEW_LINE> <DEDENT> @cloud_db_instance_no.setter <NEW_LINE> def cloud_db_instance_no(self, cloud_db_instance_no): <NEW_LINE> <INDENT> if cloud_db_instance_no is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `cloud_db_instance_no`, must not be `None`") <NEW_LINE> <DEDENT> self._cloud_db_instance_no = cloud_db_instance_no <NEW_LINE> <DEDENT> @property <NEW_LINE> def file_name(self): <NEW_LINE> <INDENT> return self._file_name <NEW_LINE> <DEDENT> @file_name.setter <NEW_LINE> def file_name(self, file_name): <NEW_LINE> <INDENT> if file_name is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `file_name`, must not be `None`") <NEW_LINE> <DEDENT> self._file_name = file_name <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DownloadDmsFileRequest): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62599041d6c5a102081e33e9 |
class BaseBlocking(object): <NEW_LINE> <INDENT> def __init__(self, ref_attr_index, target_attr_index): <NEW_LINE> <INDENT> self.ref_attr_index = ref_attr_index <NEW_LINE> self.target_attr_index = target_attr_index <NEW_LINE> self.refids = None <NEW_LINE> self.targetids = None <NEW_LINE> self.is_fitted = False <NEW_LINE> <DEDENT> def _fit(self, refset, targetset): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def _iter_blocks(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def _cleanup(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def fit(self, refset, targetset): <NEW_LINE> <INDENT> self._fit(refset, targetset) <NEW_LINE> self.refids = [(i, r[0]) for i, r in enumerate(refset)] <NEW_LINE> self.targetids = [(i, r[0]) for i, r in enumerate(targetset)] <NEW_LINE> self.is_fitted = True <NEW_LINE> <DEDENT> def iter_blocks(self): <NEW_LINE> <INDENT> assert self.is_fitted <NEW_LINE> return self._iter_blocks() <NEW_LINE> <DEDENT> def iter_indice_blocks(self): <NEW_LINE> <INDENT> assert self.is_fitted <NEW_LINE> for block1, block2 in self._iter_blocks(): <NEW_LINE> <INDENT> yield [r[0] for r in block1], [r[0] for r in block2] <NEW_LINE> <DEDENT> <DEDENT> def iter_id_blocks(self): <NEW_LINE> <INDENT> assert self.is_fitted <NEW_LINE> for block1, block2 in self._iter_blocks(): <NEW_LINE> <INDENT> yield [r[1] for r in block1], [r[1] for r in block2] <NEW_LINE> <DEDENT> <DEDENT> def iter_pairs(self): <NEW_LINE> <INDENT> assert self.is_fitted <NEW_LINE> for block1, block2 in self.iter_blocks(): <NEW_LINE> <INDENT> for val1 in block1: <NEW_LINE> <INDENT> for val2 in block2: <NEW_LINE> <INDENT> yield val1, val2 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def iter_indice_pairs(self): <NEW_LINE> <INDENT> assert self.is_fitted <NEW_LINE> for block1, block2 in self.iter_indice_blocks(): <NEW_LINE> <INDENT> for val1 in block1: <NEW_LINE> <INDENT> for val2 in block2: <NEW_LINE> <INDENT> yield val1, val2 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def iter_id_pairs(self): <NEW_LINE> <INDENT> assert self.is_fitted <NEW_LINE> for block1, block2 in self.iter_id_blocks(): <NEW_LINE> <INDENT> for val1 in block1: <NEW_LINE> <INDENT> for val2 in block2: <NEW_LINE> <INDENT> yield val1, val2 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> self.is_fitted = True <NEW_LINE> self._cleanup() | An abstract general blocking object that exposes
the API that should be common to all blockings object | 62599041c432627299fa4263 |
class AzureActiveDirectoryValidation(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'jwt_claim_checks': {'key': 'jwtClaimChecks', 'type': 'JwtClaimChecks'}, 'allowed_audiences': {'key': 'allowedAudiences', 'type': '[str]'}, } <NEW_LINE> def __init__( self, *, jwt_claim_checks: Optional["JwtClaimChecks"] = None, allowed_audiences: Optional[List[str]] = None, **kwargs ): <NEW_LINE> <INDENT> super(AzureActiveDirectoryValidation, self).__init__(**kwargs) <NEW_LINE> self.jwt_claim_checks = jwt_claim_checks <NEW_LINE> self.allowed_audiences = allowed_audiences | The configuration settings of the Azure Active Directory token validation flow.
:ivar jwt_claim_checks: The configuration settings of the checks that should be made while
validating the JWT Claims.
:vartype jwt_claim_checks: ~azure.mgmt.web.v2020_12_01.models.JwtClaimChecks
:ivar allowed_audiences: The list of audiences that can make successful
authentication/authorization requests.
:vartype allowed_audiences: list[str] | 625990414e696a045264e783 |
class HostList(GenericDropDownList): <NEW_LINE> <INDENT> _model = m_hosts.HostListModel <NEW_LINE> _label = 'Host select list' <NEW_LINE> def selected_host(self): <NEW_LINE> <INDENT> return self.value <NEW_LINE> <DEDENT> def choose_host(self, host_name): <NEW_LINE> <INDENT> self.value = host_name | DropDown list of hosts | 62599041287bf620b6272eaa |
class Dataset(_StructuralElement): <NEW_LINE> <INDENT> def __init__(self, name, comment='', attributes=dict(), data=np.empty(0), display_name='', scale_name='', quantity='', unit='', display_unit='', is_scale=False, scales=[] ): <NEW_LINE> <INDENT> _StructuralElement.__init__(self, name, comment, attributes) <NEW_LINE> self.data = data <NEW_LINE> self._display_name = display_name <NEW_LINE> self.quantity = quantity <NEW_LINE> self.unit = unit <NEW_LINE> self._display_unit = display_unit <NEW_LINE> self.is_scale = is_scale <NEW_LINE> self.scale_name = scale_name <NEW_LINE> self.scales = scales <NEW_LINE> <DEDENT> @property <NEW_LINE> def display_data(self): <NEW_LINE> <INDENT> return convert_unit(self.data, self.unit, self.display_unit) <NEW_LINE> <DEDENT> @display_data.setter <NEW_LINE> def display_data(self, value): <NEW_LINE> <INDENT> self.data = convert_unit(value, self.display_unit, self.unit) <NEW_LINE> <DEDENT> @property <NEW_LINE> def display_name(self): <NEW_LINE> <INDENT> return self._display_name if self._display_name else self.name <NEW_LINE> <DEDENT> @display_name.setter <NEW_LINE> def display_name(self, value): <NEW_LINE> <INDENT> self._display_name = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def display_unit(self): <NEW_LINE> <INDENT> return self._display_unit if self._display_unit else self.unit <NEW_LINE> <DEDENT> @display_unit.setter <NEW_LINE> def display_unit(self, value): <NEW_LINE> <INDENT> self._display_unit = value <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if self.display_unit and not self.unit: <NEW_LINE> <INDENT> return ('ERROR', 'display_unit was set but no unit') <NEW_LINE> <DEDENT> if self.minimum != None and (self.data < self.minimum).any(): <NEW_LINE> <INDENT> return ('WARNING', 'some values are less than the allowed minimum (%s)' % self.minimum) <NEW_LINE> <DEDENT> if self.maximum != None and (self.data > self.maximum).any(): <NEW_LINE> <INDENT> return ('WARNING', 'some values are greater than the allowed maximum (%s)' % self.maximum) <NEW_LINE> <DEDENT> return 'OK' <NEW_LINE> <DEDENT> @property <NEW_LINE> def d(self): <NEW_LINE> <INDENT> return self.data <NEW_LINE> <DEDENT> dd = display_data <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "dataset(name='%s', data=%s, comment='%s', display_name='%s', quantity='%s', unit='%s', display_unit='%s', is_scale='%s', scale_name='%s')" % (self.name, self.data, self.comment, self.display_name, self.quantity, self.unit, self.display_unit, self.is_scale, self.scale_name) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> equal = self.name == other.name and (self.data == other.data).all() and self.comment == other.comment and self.display_name == other.display_name and self.quantity == other.quantity and self.unit == other.unit and self.display_unit == other.display_unit and self.is_scale == other.is_scale and self.scale_name == other.scale_name <NEW_LINE> for ri in range(len(self.scales)): <NEW_LINE> <INDENT> equal = equal and self.scales[ri] == other.scales[ri] <NEW_LINE> <DEDENT> return equal | SDF Dataset | 6259904182261d6c52730826 |
class ListaEncadeada: <NEW_LINE> <INDENT> class No: <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self._value = value <NEW_LINE> self.next = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self.head = None <NEW_LINE> self.tail = None <NEW_LINE> self._size = 0 <NEW_LINE> <DEDENT> def append(self, value): <NEW_LINE> <INDENT> if self.tail is None: <NEW_LINE> <INDENT> self.head = self.tail = ListaEncadeada.No(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.tail.next = ListaEncadeada.No(value) <NEW_LINE> self.tail = self.tail.next <NEW_LINE> <DEDENT> self._size += 1 <NEW_LINE> <DEDENT> def addFirst(self, value): <NEW_LINE> <INDENT> novo = ListaEncadeada.No(value) <NEW_LINE> if self.head is None: <NEW_LINE> <INDENT> self.tail = novo <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> novo.next = self.head <NEW_LINE> <DEDENT> self.head = novo <NEW_LINE> self._size += 1 <NEW_LINE> <DEDENT> def remove(self, valor): <NEW_LINE> <INDENT> while self.head is not None and self.head.value == valor: <NEW_LINE> <INDENT> self.head = self.head.next <NEW_LINE> self._size -= 1 <NEW_LINE> <DEDENT> if self.head is not None: <NEW_LINE> <INDENT> last = self.head <NEW_LINE> i = self.head.next <NEW_LINE> while i is not None: <NEW_LINE> <INDENT> if i.value == valor: <NEW_LINE> <INDENT> last.next = i.next <NEW_LINE> self._size -= 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> last = i <NEW_LINE> <DEDENT> i = i.next <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.tail = last <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.tail = None <NEW_LINE> <DEDENT> <DEDENT> def pop(self): <NEW_LINE> <INDENT> if self.tail is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> value = self.tail.value <NEW_LINE> if self.head is self.tail: <NEW_LINE> <INDENT> self.head = self.tail = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iterator = self.head <NEW_LINE> while iterator.next is not self.tail: <NEW_LINE> <INDENT> iterator = iterator.next <NEW_LINE> <DEDENT> iterator.next = None <NEW_LINE> self.tail = iterator <NEW_LINE> <DEDENT> self._size -= 1 <NEW_LINE> return value <NEW_LINE> <DEDENT> def removeFirst(self): <NEW_LINE> <INDENT> if self.head is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.head is self.tail: <NEW_LINE> <INDENT> self.head = self.tail = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.head = self.head.next <NEW_LINE> <DEDENT> self._size -= 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return self._size <NEW_LINE> <DEDENT> @property <NEW_LINE> def first(self): <NEW_LINE> <INDENT> return self.head <NEW_LINE> <DEDENT> @property <NEW_LINE> def last(self): <NEW_LINE> <INDENT> return self.tail | Implementa uma estrutura de dados do tipo Lista Encadeada. | 62599041507cdc57c63a6060 |
class UserInfo(models.Model): <NEW_LINE> <INDENT> uName = models.CharField("用户名", max_length=20) <NEW_LINE> uPwd = models.CharField("密码", max_length=100) <NEW_LINE> uEmail = models.EmailField("电子邮件") <NEW_LINE> uPhone = models.DecimalField("手机号", max_digits=11, decimal_places=0) <NEW_LINE> uAddr = models.CharField("收货地址", max_length=200) <NEW_LINE> uTime = models.DateTimeField(auto_now_add=True) | 用户信息表 | 62599041379a373c97d9a2ed |
class Setup(CLIRunnable): <NEW_LINE> <INDENT> action = 'setup' <NEW_LINE> def execute(self, args): <NEW_LINE> <INDENT> username, secret, endpoint_url, timeout = self.get_user_input() <NEW_LINE> api_key = get_api_key(self.client, username, secret, endpoint_url=endpoint_url) <NEW_LINE> path = '~/.softlayer' <NEW_LINE> if args.get('--config'): <NEW_LINE> <INDENT> path = args.get('--config') <NEW_LINE> <DEDENT> config_path = os.path.expanduser(path) <NEW_LINE> self.env.out( format_output(config_table({'username': username, 'api_key': api_key, 'endpoint_url': endpoint_url, 'timeout': timeout}))) <NEW_LINE> if not confirm('Are you sure you want to write settings to "%s"?' % config_path, default=True): <NEW_LINE> <INDENT> raise CLIAbort('Aborted.') <NEW_LINE> <DEDENT> config = configparser.RawConfigParser() <NEW_LINE> config.read(config_path) <NEW_LINE> try: <NEW_LINE> <INDENT> config.add_section('softlayer') <NEW_LINE> <DEDENT> except configparser.DuplicateSectionError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> config.set('softlayer', 'username', username) <NEW_LINE> config.set('softlayer', 'api_key', api_key) <NEW_LINE> config.set('softlayer', 'endpoint_url', endpoint_url) <NEW_LINE> config_file = os.fdopen(os.open(config_path, (os.O_WRONLY | os.O_CREAT), 0o600), 'w') <NEW_LINE> try: <NEW_LINE> <INDENT> config.write(config_file) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> config_file.close() <NEW_LINE> <DEDENT> return "Configuration Updated Successfully" <NEW_LINE> <DEDENT> def get_user_input(self): <NEW_LINE> <INDENT> defaults = get_settings_from_client(self.client) <NEW_LINE> timeout = defaults['timeout'] <NEW_LINE> while True: <NEW_LINE> <INDENT> username = self.env.input( 'Username [%s]: ' % defaults['username']) or defaults['username'] <NEW_LINE> if username: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> while True: <NEW_LINE> <INDENT> secret = self.env.getpass( 'API Key or Password [%s]: ' % defaults['api_key']) or defaults['api_key'] <NEW_LINE> if secret: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> while True: <NEW_LINE> <INDENT> endpoint_type = self.env.input( 'Endpoint (public|private|custom): ') <NEW_LINE> endpoint_type = endpoint_type.lower() <NEW_LINE> if not endpoint_type: <NEW_LINE> <INDENT> endpoint_url = API_PUBLIC_ENDPOINT <NEW_LINE> break <NEW_LINE> <DEDENT> if endpoint_type == 'public': <NEW_LINE> <INDENT> endpoint_url = API_PUBLIC_ENDPOINT <NEW_LINE> break <NEW_LINE> <DEDENT> elif endpoint_type == 'private': <NEW_LINE> <INDENT> endpoint_url = API_PRIVATE_ENDPOINT <NEW_LINE> break <NEW_LINE> <DEDENT> elif endpoint_type == 'custom': <NEW_LINE> <INDENT> endpoint_url = self.env.input( 'Endpoint URL [%s]: ' % defaults['endpoint_url'] ) or defaults['endpoint_url'] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return username, secret, endpoint_url, timeout | usage: sl config setup [options]
Setup configuration | 625990426e29344779b01917 |
class FilterDataset(Dataset): <NEW_LINE> <INDENT> def __init__(self, input_dataset, predicate): <NEW_LINE> <INDENT> super(FilterDataset, self).__init__() <NEW_LINE> self._input_dataset = input_dataset <NEW_LINE> @function.Defun(*nest.flatten( sparse.as_dense_types(input_dataset.output_types, input_dataset.output_classes))) <NEW_LINE> def tf_predicate(*args): <NEW_LINE> <INDENT> dense_shapes = sparse.as_dense_shapes(input_dataset.output_shapes, input_dataset.output_classes) <NEW_LINE> for arg, shape in zip(args, nest.flatten(dense_shapes)): <NEW_LINE> <INDENT> arg.set_shape(shape) <NEW_LINE> <DEDENT> nested_args = nest.pack_sequence_as(input_dataset.output_types, args) <NEW_LINE> nested_args = sparse.deserialize_sparse_tensors( nested_args, input_dataset.output_types, input_dataset.output_shapes, input_dataset.output_classes) <NEW_LINE> if _should_unpack_args(nested_args): <NEW_LINE> <INDENT> ret = predicate(*nested_args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret = predicate(nested_args) <NEW_LINE> <DEDENT> ret = ops.convert_to_tensor(ret, dtype=dtypes.bool) <NEW_LINE> if not (ret.dtype == dtypes.bool and ret.shape.is_compatible_with(tensor_shape.scalar())): <NEW_LINE> <INDENT> raise ValueError("`predicate` must return a scalar boolean tensor.") <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> self._predicate = tf_predicate <NEW_LINE> self._predicate.add_to_graph(ops.get_default_graph()) <NEW_LINE> <DEDENT> def _as_variant_tensor(self): <NEW_LINE> <INDENT> return gen_dataset_ops.filter_dataset( self._input_dataset._as_variant_tensor(), other_arguments=self._predicate.captured_inputs, predicate=self._predicate, output_types=nest.flatten( sparse.as_dense_types(self.output_types, self.output_classes)), output_shapes=nest.flatten( sparse.as_dense_shapes(self.output_shapes, self.output_classes))) <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_classes(self): <NEW_LINE> <INDENT> return self._input_dataset.output_classes <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_shapes(self): <NEW_LINE> <INDENT> return self._input_dataset.output_shapes <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_types(self): <NEW_LINE> <INDENT> return self._input_dataset.output_types | A `Dataset` that filters its input according to a predicate function. | 6259904226068e7796d4dc0b |
class ImageOps: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def resize_image(cls, image_body, size, fit_to_size=False): <NEW_LINE> <INDENT> image_file = BytesIO(image_body) <NEW_LINE> try: <NEW_LINE> <INDENT> image = Image.open(image_file) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> format = image.format <NEW_LINE> image = cls.adjust_image_orientation(image) <NEW_LINE> if not fit_to_size: <NEW_LINE> <INDENT> image.thumbnail(PROFILE_PICTURE_SIZES[size], Image.ANTIALIAS) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> image = PILOps.fit(image, PROFILE_PICTURE_SIZES[size], method=Image.ANTIALIAS, centering=(0.5, 0.5)) <NEW_LINE> <DEDENT> output = BytesIO() <NEW_LINE> if format.lower() == 'jpg': <NEW_LINE> <INDENT> format = 'jpeg' <NEW_LINE> <DEDENT> image.save(output, format=format, quality=95) <NEW_LINE> return output <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def adjust_image_orientation(cls, image): <NEW_LINE> <INDENT> if hasattr(image, '_getexif'): <NEW_LINE> <INDENT> exif = image._getexif() <NEW_LINE> if exif: <NEW_LINE> <INDENT> for tag, value in list(exif.items()): <NEW_LINE> <INDENT> decoded = TAGS.get(tag, tag) <NEW_LINE> if decoded == 'Orientation': <NEW_LINE> <INDENT> if value == 6: <NEW_LINE> <INDENT> image = image.rotate(-90) <NEW_LINE> <DEDENT> if value == 8: <NEW_LINE> <INDENT> image = image.rotate(90) <NEW_LINE> <DEDENT> if value == 3: <NEW_LINE> <INDENT> image = image.rotate(180) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return image <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def image_size(cls, url, headers=None): <NEW_LINE> <INDENT> if not headers: headers = {} <NEW_LINE> req = urllib.request.Request(url, data=None, headers=headers) <NEW_LINE> file = urllib.request.urlopen(req) <NEW_LINE> size = file.headers.get("content-length") <NEW_LINE> if size: <NEW_LINE> <INDENT> size = int(size) <NEW_LINE> <DEDENT> p = ImageFile.Parser() <NEW_LINE> while True: <NEW_LINE> <INDENT> data = file.read(1024) <NEW_LINE> if not data: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> p.feed(data) <NEW_LINE> if p.image: <NEW_LINE> <INDENT> return p.image.size <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> file.close() <NEW_LINE> return None, None | Module that holds all image operations. Since there's no state,
everything is a classmethod. | 625990428a43f66fc4bf3456 |
class FiberPrinter: <NEW_LINE> <INDENT> def __init__(self, val): <NEW_LINE> <INDENT> self.val = val <NEW_LINE> state = self.val['state_'] <NEW_LINE> d = gdb.types.make_enum_dict(state.type) <NEW_LINE> d = dict((v, k) for k, v in d.items()) <NEW_LINE> self.state = d[int(state)] <NEW_LINE> <DEDENT> def state_to_string(self): <NEW_LINE> <INDENT> if self.state == "folly::fibers::Fiber::INVALID": <NEW_LINE> <INDENT> return "Invalid" <NEW_LINE> <DEDENT> if self.state == "folly::fibers::Fiber::NOT_STARTED": <NEW_LINE> <INDENT> return "Not started" <NEW_LINE> <DEDENT> if self.state == "folly::fibers::Fiber::READY_TO_RUN": <NEW_LINE> <INDENT> return "Ready to run" <NEW_LINE> <DEDENT> if self.state == "folly::fibers::Fiber::RUNNING": <NEW_LINE> <INDENT> return "Running" <NEW_LINE> <DEDENT> if self.state == "folly::fibers::Fiber::AWAITING": <NEW_LINE> <INDENT> return "Awaiting" <NEW_LINE> <DEDENT> if self.state == "folly::fibers::Fiber::AWAITING_IMMEDIATE": <NEW_LINE> <INDENT> return "Awaiting immediate" <NEW_LINE> <DEDENT> if self.state == "folly::fibers::Fiber::YIELDED": <NEW_LINE> <INDENT> return "Yielded" <NEW_LINE> <DEDENT> return "Unknown" <NEW_LINE> <DEDENT> def backtrace_available(self): <NEW_LINE> <INDENT> return self.state != "folly::fibers::Fiber::INVALID" and self.state != "folly::fibers::Fiber::NOT_STARTED" and self.state != "folly::fibers::Fiber::RUNNING" <NEW_LINE> <DEDENT> def children(self): <NEW_LINE> <INDENT> result = collections.OrderedDict() <NEW_LINE> result["state"] = self.state_to_string() <NEW_LINE> result["backtrace available"] = self.backtrace_available() <NEW_LINE> return result.items() <NEW_LINE> <DEDENT> def to_string(self): <NEW_LINE> <INDENT> return "folly::fibers::Fiber" <NEW_LINE> <DEDENT> def display_hint(self): <NEW_LINE> <INDENT> return "folly::fibers::Fiber" | Print a folly::fibers::Fiber | 625990428da39b475be044b3 |
class test_sort_and_remove(unittest.TestCase): <NEW_LINE> <INDENT> def test_sort_(self): <NEW_LINE> <INDENT> input_ = [9, 4, -1, 429, -62, 3, 0, 9, -1000, 17] <NEW_LINE> output_ = [-1000, -62, -1, 0, 3, 4, 9, 9, 17, 429] <NEW_LINE> self.assertEqual(sort_(input_), output_) <NEW_LINE> <DEDENT> def test_remove_(self): <NEW_LINE> <INDENT> input_ = [9, 4, -1, 429, -62, 3, 0, 9, -1000, 17] <NEW_LINE> output_ = [9, 4, 429, 3, 0, 9, 17] <NEW_LINE> self.assertEqual(remove_(input_), output_) <NEW_LINE> <DEDENT> def test_sort_and_remove(self): <NEW_LINE> <INDENT> input_ = [9, 4, -1, 429, -62, 3, 0, 9, -1000, 17] <NEW_LINE> output_ = [0, 3, 4, 9, 9, 17, 429] <NEW_LINE> self.assertEqual(sort_and_remove(input_), output_) | Performs unit tests on sort_, remove_, and sort_and_remove
| 6259904230dc7b76659a0af4 |
class PKCS1Primitives(ISO18033Primitives): <NEW_LINE> <INDENT> def OS2IP(self, X): <NEW_LINE> <INDENT> xLen = len(X) <NEW_LINE> x = 0 <NEW_LINE> for i in range(0, xLen): <NEW_LINE> <INDENT> char2int = ord(X[i]) <NEW_LINE> pow = xLen - i - 1 <NEW_LINE> x = x + 256 ** pow * char2int <NEW_LINE> if self.explain: <NEW_LINE> <INDENT> print('256**', pow, '* ord(' + X[i] + ')->', char2int, '=', 256 ** pow * char2int, '|', 'x =', x) <NEW_LINE> <DEDENT> <DEDENT> return x <NEW_LINE> <DEDENT> def I2OSP(self, x, xLen): <NEW_LINE> <INDENT> assert x > 0, 'x should be non-negative integer' <NEW_LINE> assert x <= 256 ** xLen, 'integer too large' <NEW_LINE> x = int(x) <NEW_LINE> X = '' <NEW_LINE> for char in range(0, xLen): <NEW_LINE> <INDENT> if self.explain: <NEW_LINE> <INDENT> print(chr(x % 256), x) <NEW_LINE> <DEDENT> X = X + chr(x % 256) <NEW_LINE> x = x - x % 256 <NEW_LINE> x = x / 256 <NEW_LINE> <DEDENT> X = X[::-1] <NEW_LINE> return X | Defined in section 4 of RSA PKCS#1 | 62599042711fe17d825e15ff |
class NameHandler(BaseHandler): <NEW_LINE> <INDENT> @required_login <NEW_LINE> def post(self, *args, **kwargs): <NEW_LINE> <INDENT> user_id = self.session.data["user_id"] <NEW_LINE> name = self.json_args.get("name") <NEW_LINE> if name in (None, ""): <NEW_LINE> <INDENT> return self.write(dict(errcode=RET.PARAMERR,errmsg="params error")) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.db.execute_rowcount("update ih_user_profile set up_name=%s where up_user_id=%s",name,user_id) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error(e) <NEW_LINE> return self.write(dict(errcode=RET.DBERR, errmsg="name hae exist")) <NEW_LINE> <DEDENT> self.session.data["name"] = name <NEW_LINE> try: <NEW_LINE> <INDENT> self.session.sava() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error(e) <NEW_LINE> <DEDENT> self.write(dict(errcode=RET.OK,errmsg="OK")) | 修改用户名 | 62599042e64d504609df9d33 |
class Frenzied(SpecialRule): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Frenzied, self).__init__(name="Frenzied") | Rule book p.69
Frenzied troops have the Extra Attack and Immune to Psychology special rules | 62599042d164cc617582223c |
class Https(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Switch = None <NEW_LINE> self.Http2 = None <NEW_LINE> self.OcspStapling = None <NEW_LINE> self.VerifyClient = None <NEW_LINE> self.CertInfo = None <NEW_LINE> self.ClientCertInfo = None <NEW_LINE> self.Spdy = None <NEW_LINE> self.SslStatus = None <NEW_LINE> self.Hsts = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Switch = params.get("Switch") <NEW_LINE> self.Http2 = params.get("Http2") <NEW_LINE> self.OcspStapling = params.get("OcspStapling") <NEW_LINE> self.VerifyClient = params.get("VerifyClient") <NEW_LINE> if params.get("CertInfo") is not None: <NEW_LINE> <INDENT> self.CertInfo = ServerCert() <NEW_LINE> self.CertInfo._deserialize(params.get("CertInfo")) <NEW_LINE> <DEDENT> if params.get("ClientCertInfo") is not None: <NEW_LINE> <INDENT> self.ClientCertInfo = ClientCert() <NEW_LINE> self.ClientCertInfo._deserialize(params.get("ClientCertInfo")) <NEW_LINE> <DEDENT> self.Spdy = params.get("Spdy") <NEW_LINE> self.SslStatus = params.get("SslStatus") <NEW_LINE> if params.get("Hsts") is not None: <NEW_LINE> <INDENT> self.Hsts = Hsts() <NEW_LINE> self.Hsts._deserialize(params.get("Hsts")) <NEW_LINE> <DEDENT> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | 域名https配置。
| 62599042d53ae8145f919722 |
class OsWalk(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(OsWalk, self).__init__() <NEW_LINE> <DEDENT> def walk(self, directory): <NEW_LINE> <INDENT> return os.walk(directory) | A OsWalk object so that unit tests mock will work on it. Other
routines may need to be added as they are needed | 6259904207d97122c4217f64 |
class QuestionView(ListCreateAPIView): <NEW_LINE> <INDENT> serializer_class = QuestionSerializer <NEW_LINE> queryset = Question.objects.all() | Question API endpoint | 6259904226238365f5fade1e |
class profile_loader(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.events = event_handler() <NEW_LINE> self.t = tab_completer.tabCompleter() <NEW_LINE> readline.set_completer_delims('\t') <NEW_LINE> readline.parse_and_bind("tab: complete") <NEW_LINE> readline.set_completer(self.t.pathCompleter) <NEW_LINE> <DEDENT> def load_file(self): <NEW_LINE> <INDENT> file_path = raw_input("Choose Test Profile [.csv] > ") <NEW_LINE> print(" File Chosen : {}".format(file_path)) <NEW_LINE> self.file = open(file_path) <NEW_LINE> self.profile = csv.DictReader(self.file) <NEW_LINE> self.read_row() <NEW_LINE> <DEDENT> def set_start_time(self): <NEW_LINE> <INDENT> self.start_time = time.time() <NEW_LINE> <DEDENT> def run_continuous(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> while(1): <NEW_LINE> <INDENT> self.time = time.time() - self.start_time <NEW_LINE> if(self.time > self.test_time): <NEW_LINE> <INDENT> self.events.function['set_curr'](current=self.current) <NEW_LINE> volt, curr = self.events.function['read_load']() <NEW_LINE> self.print_profile_row(volt, curr) <NEW_LINE> self.read_row() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> print('Profile end') <NEW_LINE> print('Reseting Profile') <NEW_LINE> self.reset_profile() <NEW_LINE> self.set_start_time() <NEW_LINE> self.run_continuous() <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> print('Wrong csv row name: Try step,time,current') <NEW_LINE> <DEDENT> <DEDENT> def read_row(self): <NEW_LINE> <INDENT> row = next(self.profile) <NEW_LINE> try: <NEW_LINE> <INDENT> self.step = int(row['step']) <NEW_LINE> self.test_time = float(row['time']) <NEW_LINE> self.current = float(row['current']) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> print('Wrong csv row name: Try step,time,current') <NEW_LINE> <DEDENT> <DEDENT> def print_profile_row(self, vload, iload): <NEW_LINE> <INDENT> print('{:2.2f}s| step = {} | ts = {:2.2f}s | Isp = {:2.2f}A | Vload = {:2.2f}V | Iload = {:2.2f}A'.format( self.time, self.step, self.test_time, self.current, vload, iload)) <NEW_LINE> <DEDENT> def reset_profile(self): <NEW_LINE> <INDENT> self.file.seek(0) <NEW_LINE> self.profile = csv.DictReader(self.file) <NEW_LINE> self.read_row() | Load and run through test profile defined in a .csv. | 62599042097d151d1a2c232d |
class MotorPacketBits(ctypes.LittleEndianStructure): <NEW_LINE> <INDENT> _fields_ = [("motor_id", ctypes.c_uint8, 2), ("negative", ctypes.c_uint8, 1), ("speed", ctypes.c_uint8, 5)] | The bits for the packet sent to the motors.
Note that the mbed's processor is little endian, which is why a
``LittleEndianStructure`` is used.
See Also
--------
MotorPacket | 6259904210dbd63aa1c71e9d |
class FdbInterface(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> @catch_exceptions <NEW_LINE> def add(mac, dev, dst_ip=None, namespace=None, **kwargs): <NEW_LINE> <INDENT> priv_ip_lib.add_bridge_fdb(mac, dev, dst_ip=dst_ip, namespace=namespace, **kwargs) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @catch_exceptions <NEW_LINE> def append(mac, dev, dst_ip=None, namespace=None, **kwargs): <NEW_LINE> <INDENT> priv_ip_lib.append_bridge_fdb(mac, dev, dst_ip=dst_ip, namespace=namespace, **kwargs) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @catch_exceptions <NEW_LINE> def replace(mac, dev, dst_ip=None, namespace=None, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> priv_ip_lib.delete_bridge_fdb(mac, dev, namespace=namespace, **kwargs) <NEW_LINE> <DEDENT> except (RuntimeError, OSError, netlink_exceptions.NetlinkError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> priv_ip_lib.add_bridge_fdb(mac, dev, dst_ip=dst_ip, namespace=namespace, **kwargs) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @catch_exceptions <NEW_LINE> def delete(mac, dev, dst_ip=None, namespace=None, **kwargs): <NEW_LINE> <INDENT> priv_ip_lib.delete_bridge_fdb(mac, dev, dst_ip=dst_ip, namespace=namespace, **kwargs) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def show(dev=None, namespace=None, **kwargs): <NEW_LINE> <INDENT> def find_device_name(ifindex, devices): <NEW_LINE> <INDENT> for device in (device for device in devices if device['index'] == ifindex): <NEW_LINE> <INDENT> return device['name'] <NEW_LINE> <DEDENT> <DEDENT> ret = collections.defaultdict(list) <NEW_LINE> fdbs = priv_ip_lib.list_bridge_fdb(namespace=namespace, **kwargs) <NEW_LINE> devices = ip_lib.get_devices_info(namespace) <NEW_LINE> for fdb in fdbs: <NEW_LINE> <INDENT> name = find_device_name(fdb['ifindex'], devices) <NEW_LINE> if dev and dev != name: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> master = find_device_name(ip_lib.get_attr(fdb, 'NDA_MASTER'), devices) <NEW_LINE> fdb_info = {'mac': ip_lib.get_attr(fdb, 'NDA_LLADDR'), 'master': master, 'vlan': ip_lib.get_attr(fdb, 'NDA_VLAN'), 'dst_ip': ip_lib.get_attr(fdb, 'NDA_DST')} <NEW_LINE> ret[name].append(fdb_info) <NEW_LINE> <DEDENT> return ret | Provide basic functionality to edit the FDB table | 62599042d99f1b3c44d06962 |
class amg(pyamgcl_ext.amg): <NEW_LINE> <INDENT> def __init__(self, A, prm={}): <NEW_LINE> <INDENT> Acsr = A.tocsr() <NEW_LINE> self.shape = A.shape <NEW_LINE> pyamgcl_ext.amg.__init__(self, Acsr.indptr, Acsr.indices, Acsr.data, prm) | Algebraic multigrid hierarchy to be used as a preconditioner | 625990421d351010ab8f4de3 |
class MatchesDetail(View): <NEW_LINE> <INDENT> def get(self,request,match_id): <NEW_LINE> <INDENT> match = get_object_or_404(Matches, pk=match_id) <NEW_LINE> context = {'match': match} <NEW_LINE> return render(request, 'matches/detail.html', context) | View method for 'cricteam' app's matches' details page.
:param request:
:param match_id:
:return: HttpResponse | 6259904294891a1f408ba059 |
class UsageTrigger(InstanceResource): <NEW_LINE> <INDENT> def update(self, **kwargs): <NEW_LINE> <INDENT> return self.parent.update(self.name, **kwargs) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> return self.parent.delete(self.name) | A usage trigger resource | 625990421d351010ab8f4de4 |
class SimulationResult: <NEW_LINE> <INDENT> def __init__(self, molecule_collection): <NEW_LINE> <INDENT> self.molecule_collection = molecule_collection <NEW_LINE> self.trace = [] <NEW_LINE> self.time = [] <NEW_LINE> <DEDENT> def add_timepoint(self, time): <NEW_LINE> <INDENT> self.trace.append(self.molecule_collection.count()) <NEW_LINE> self.time.append(time) | handles and stores a simulation result for one species | 62599042b5575c28eb71362c |
class DescribeFavorRepositoryPersonalRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RepoName = None <NEW_LINE> self.Limit = None <NEW_LINE> self.Offset = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RepoName = params.get("RepoName") <NEW_LINE> self.Limit = params.get("Limit") <NEW_LINE> self.Offset = params.get("Offset") | DescribeFavorRepositoryPersonal请求参数结构体
| 6259904282261d6c52730827 |
class mrp_replacement_resource(models.Model): <NEW_LINE> <INDENT> _name = 'mrp.replacement.resource' <NEW_LINE> _description = 'Replacement resource' <NEW_LINE> _rec_name = 'resource_id' <NEW_LINE> resource_id = fields.Many2one('mrp.resource', string='Resource', required=True, ondelete='cascade') <NEW_LINE> new_resource_id = fields.Many2one('mrp.resource', string='New Resource', required=True, ondelete='cascade') <NEW_LINE> line_ids = fields.One2many('mrp.replacement.resource.line', 'modif_resource_id', string='Line') <NEW_LINE> is_valid = fields.Boolean(string='Valid', default=False) <NEW_LINE> validation_date = fields.Date(string='Validation Date') <NEW_LINE> @api.onchange('resource_id') <NEW_LINE> def _onchange_resource_id(self): <NEW_LINE> <INDENT> list_resource = [] <NEW_LINE> if self.resource_id: <NEW_LINE> <INDENT> rl_resource_obj = self.env['mrp.rl.resource'] <NEW_LINE> rl_obj = self.env['mrp.routing.line'] <NEW_LINE> rl_resource_ids = rl_resource_obj.search([('resource_id', '=', self.resource_id.id)]) <NEW_LINE> for rl_resource in rl_resource_ids: <NEW_LINE> <INDENT> if rl_resource.rl_category_id: <NEW_LINE> <INDENT> routing_line_ids = rl_obj.search([('rl_resource_category_ids', '=', rl_resource.rl_category_id.id)]) <NEW_LINE> for routing_line in routing_line_ids: <NEW_LINE> <INDENT> if routing_line.product_id: <NEW_LINE> <INDENT> dict_replacement_resource_line = {'routing_id':routing_line.routing_id.id, 'product_id': routing_line.product_id.id, 'routing_line_id': routing_line.id, 'resource_valid_id': rl_resource.id} <NEW_LINE> list_resource.append((0, 0, dict_replacement_resource_line)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.new_resource_id = False <NEW_LINE> <DEDENT> self.line_ids = list_resource <NEW_LINE> <DEDENT> @api.multi <NEW_LINE> def action_validate(self): <NEW_LINE> <INDENT> if not self.resource_id: <NEW_LINE> <INDENT> raise except_orm(_('There is not resource.')) <NEW_LINE> <DEDENT> if not self.new_resource_id: <NEW_LINE> <INDENT> raise except_orm(_('There is not new resource')) <NEW_LINE> <DEDENT> if not self.line_ids: <NEW_LINE> <INDENT> raise except_orm(_('There is not lines.')) <NEW_LINE> <DEDENT> rl_res_rs = self.env['mrp.rl.resource'] <NEW_LINE> for line in self.line_ids: <NEW_LINE> <INDENT> if line.resource_valid_id: <NEW_LINE> <INDENT> rl_res_rs += line.resource_valid_id <NEW_LINE> <DEDENT> <DEDENT> if rl_res_rs: <NEW_LINE> <INDENT> rl_res_rs.write({'resource_id': self.new_resource_id.id}) <NEW_LINE> self.write({'is_valid':True}) <NEW_LINE> <DEDENT> self.write({'validation_date': fields.Date.today()}) <NEW_LINE> return {'type': 'ir.actions.act_window_close'} | Replacement resource | 62599042d7e4931a7ef3d33c |
class Pipeline(object): <NEW_LINE> <INDENT> def __init__(self, input_stage): <NEW_LINE> <INDENT> self._input_stage = input_stage <NEW_LINE> self._output_stages = input_stage.getLeaves() <NEW_LINE> self._input_stage.build() <NEW_LINE> <DEDENT> def put(self, task): <NEW_LINE> <INDENT> self._input_stage.put(task) <NEW_LINE> <DEDENT> def get(self, timeout=None): <NEW_LINE> <INDENT> result = None <NEW_LINE> for stage in self._output_stages: <NEW_LINE> <INDENT> result = stage.get(timeout) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def results(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> result = self.get() <NEW_LINE> if result is None: break <NEW_LINE> yield result | A pipeline of stages. | 6259904245492302aabfd7a1 |
class Model(object): <NEW_LINE> <INDENT> def __init__(self, input_dim, *layers): <NEW_LINE> <INDENT> self.x = PlaceHolder(input_dim) <NEW_LINE> last_layer = self.x <NEW_LINE> for layer in layers: <NEW_LINE> <INDENT> if layer == "linear": <NEW_LINE> <INDENT> last_layer = Linear(32, last_layer) <NEW_LINE> <DEDENT> if layer == "relu": <NEW_LINE> <INDENT> last_layer = Relu(last_layer) <NEW_LINE> <DEDENT> if layer == "conv": <NEW_LINE> <INDENT> last_layer = Conv(8, 3, 3, precursor=last_layer) <NEW_LINE> <DEDENT> if layer == "maxpool": <NEW_LINE> <INDENT> last_layer = Maxpool(last_layer) <NEW_LINE> <DEDENT> if layer == "flatten": <NEW_LINE> <INDENT> last_layer = Flatten(last_layer) <NEW_LINE> <DEDENT> <DEDENT> self.pred_layer = Linear(10, last_layer) <NEW_LINE> self.loss_layer = SoftMaxXent(self.pred_layer) <NEW_LINE> <DEDENT> def train(self, l_rate, x_batch, y_batch): <NEW_LINE> <INDENT> self.x.load(x_batch) <NEW_LINE> self.loss_layer.fpass(y_batch) <NEW_LINE> self.loss_layer.bprop(l_rate, y_batch) <NEW_LINE> pass <NEW_LINE> <DEDENT> def evaluate(self, x_batch, y_batch): <NEW_LINE> <INDENT> self.x.load(x_batch) <NEW_LINE> self.pred_layer.fpass() <NEW_LINE> return np.mean(np.equal(np.argmax(self.pred_layer.output, 1), np.argmax(y_batch, 1)).astype(float)) | used to create complete models. Persistently stores varaibles
values between updates. Exposes handles for training the model
and for checking accuracy | 625990420fa83653e46f61a0 |
class WarningsValueListEntry(_messages.Message): <NEW_LINE> <INDENT> class CodeValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> CLEANUP_FAILED = 0 <NEW_LINE> DEPRECATED_RESOURCE_USED = 1 <NEW_LINE> DISK_SIZE_LARGER_THAN_IMAGE_SIZE = 2 <NEW_LINE> INJECTED_KERNELS_DEPRECATED = 3 <NEW_LINE> NEXT_HOP_ADDRESS_NOT_ASSIGNED = 4 <NEW_LINE> NEXT_HOP_CANNOT_IP_FORWARD = 5 <NEW_LINE> NEXT_HOP_INSTANCE_NOT_FOUND = 6 <NEW_LINE> NEXT_HOP_INSTANCE_NOT_ON_NETWORK = 7 <NEW_LINE> NEXT_HOP_NOT_RUNNING = 8 <NEW_LINE> NOT_CRITICAL_ERROR = 9 <NEW_LINE> NO_RESULTS_ON_PAGE = 10 <NEW_LINE> REQUIRED_TOS_AGREEMENT = 11 <NEW_LINE> RESOURCE_NOT_DELETED = 12 <NEW_LINE> SINGLE_INSTANCE_PROPERTY_TEMPLATE = 13 <NEW_LINE> UNREACHABLE = 14 <NEW_LINE> <DEDENT> class DataValueListEntry(_messages.Message): <NEW_LINE> <INDENT> key = _messages.StringField(1) <NEW_LINE> value = _messages.StringField(2) <NEW_LINE> <DEDENT> code = _messages.EnumField('CodeValueValuesEnum', 1) <NEW_LINE> data = _messages.MessageField('DataValueListEntry', 2, repeated=True) <NEW_LINE> message = _messages.StringField(3) | A WarningsValueListEntry object.
Enums:
CodeValueValuesEnum: [Output Only] A warning code, if applicable. For
example, Compute Engine returns NO_RESULTS_ON_PAGE if there are no
results in the response.
Messages:
DataValueListEntry: A DataValueListEntry object.
Fields:
code: [Output Only] A warning code, if applicable. For example, Compute
Engine returns NO_RESULTS_ON_PAGE if there are no results in the
response.
data: [Output Only] Metadata about this warning in key: value format.
For example: "data": [ { "key": "scope", "value": "zones/us-east1-d" }
message: [Output Only] A human-readable description of the warning code. | 6259904207f4c71912bb06f8 |
class TemporaryRedirect(Response): <NEW_LINE> <INDENT> status_code = 307 | 307 Temporary Redirect
Should be used to tell clients to resubmit the request to another URI.
HTTP/1.1 introduced the 307 status code to reiterate the originally
intended semantics of the 302 ("Found") status code. A 307 response
indicates that the REST API is not going to process the client's request.
Instead, the client should resubmit the request to the URI specified by
the response message's Location header.
A REST API can use this status code to assign a temporary URI to the
client's requested resource. For example, a 307 response can be used to
shift a client request over to another host. | 6259904230dc7b76659a0af6 |
class TestPath(object): <NEW_LINE> <INDENT> def test_regular_isdir_isfile_islink(self): <NEW_LINE> <INDENT> host = test_base.ftp_host_factory() <NEW_LINE> testdir = "/home/sschwarzer" <NEW_LINE> host.chdir(testdir) <NEW_LINE> assert not host.path.isdir("notthere") <NEW_LINE> assert not host.path.isfile("notthere") <NEW_LINE> assert not host.path.islink("notthere") <NEW_LINE> assert not host.path.isdir("/notthere/notthere") <NEW_LINE> assert not host.path.isfile("/notthere/notthere") <NEW_LINE> assert not host.path.islink("/notthere/notthere") <NEW_LINE> assert host.path.isdir(testdir) <NEW_LINE> assert not host.path.isfile(testdir) <NEW_LINE> assert not host.path.islink(testdir) <NEW_LINE> testfile = "/home/sschwarzer/index.html" <NEW_LINE> assert not host.path.isdir(testfile) <NEW_LINE> assert host.path.isfile(testfile) <NEW_LINE> assert not host.path.islink(testfile) <NEW_LINE> testlink = "/home/sschwarzer/osup" <NEW_LINE> assert not host.path.isdir(testlink) <NEW_LINE> assert not host.path.isfile(testlink) <NEW_LINE> assert host.path.islink(testlink) <NEW_LINE> <DEDENT> def test_workaround_for_spaces(self): <NEW_LINE> <INDENT> host = test_base.ftp_host_factory() <NEW_LINE> testdir = "/home/sschwarzer" <NEW_LINE> host.chdir(testdir) <NEW_LINE> testfile = "/home/dir with spaces/file with spaces" <NEW_LINE> assert not host.path.isdir(testfile) <NEW_LINE> assert host.path.isfile(testfile) <NEW_LINE> assert not host.path.islink(testfile) <NEW_LINE> <DEDENT> def test_inaccessible_home_directory_and_whitespace_workaround(self): <NEW_LINE> <INDENT> host = test_base.ftp_host_factory( session_factory=SessionWithInaccessibleLoginDirectory) <NEW_LINE> with pytest.raises(ftputil.error.InaccessibleLoginDirError): <NEW_LINE> <INDENT> host._dir("/home dir") <NEW_LINE> <DEDENT> <DEDENT> def test_isdir_isfile_islink_with_exception(self): <NEW_LINE> <INDENT> host = test_base.ftp_host_factory(ftp_host_class=FailingFTPHost) <NEW_LINE> testdir = "/home/sschwarzer" <NEW_LINE> host.chdir(testdir) <NEW_LINE> FTPOSError = ftputil.error.FTPOSError <NEW_LINE> with pytest.raises(FTPOSError): <NEW_LINE> <INDENT> host.path.isdir("index.html") <NEW_LINE> <DEDENT> with pytest.raises(FTPOSError): <NEW_LINE> <INDENT> host.path.isfile("index.html") <NEW_LINE> <DEDENT> with pytest.raises(FTPOSError): <NEW_LINE> <INDENT> host.path.islink("index.html") <NEW_LINE> <DEDENT> <DEDENT> def test_exists(self): <NEW_LINE> <INDENT> host = test_base.ftp_host_factory() <NEW_LINE> testdir = "/home/sschwarzer" <NEW_LINE> host.chdir(testdir) <NEW_LINE> assert host.path.exists("index.html") <NEW_LINE> assert not host.path.exists("notthere") <NEW_LINE> host = test_base.ftp_host_factory(ftp_host_class=FailingFTPHost) <NEW_LINE> with pytest.raises(ftputil.error.FTPOSError): <NEW_LINE> <INDENT> host.path.exists("index.html") | Test operations in `FTPHost.path`. | 62599042d53ae8145f919724 |
class EpistasisClassifierMixin: <NEW_LINE> <INDENT> def _fit_additive(self, X=None, y=None): <NEW_LINE> <INDENT> self.Additive = EpistasisLinearRegression( order=1, model_type=self.model_type) <NEW_LINE> self.Additive.add_gpm(self.gpm) <NEW_LINE> self.Additive.epistasis = EpistasisMap( sites=self.Additive.Xcolumns, ) <NEW_LINE> self.Additive.fit(X=X, y=y) <NEW_LINE> return self <NEW_LINE> <DEDENT> def _fit_classifier(self, X=None, y=None): <NEW_LINE> <INDENT> add_coefs = self.Additive.epistasis.values <NEW_LINE> add_X = self.Additive._X(data=X) <NEW_LINE> X = add_X * add_coefs <NEW_LINE> y = binarize(y.reshape(1, -1), self.threshold)[0] <NEW_LINE> self.classes = y <NEW_LINE> super().fit(X=X, y=y) <NEW_LINE> return self <NEW_LINE> <DEDENT> def fit_transform(self, X=None, y=None, **kwargs): <NEW_LINE> <INDENT> self.fit(X=X, y=y, **kwargs) <NEW_LINE> ypred = self.predict(X=X) <NEW_LINE> gpm = GenotypePhenotypeMap.read_dataframe( dataframe=self.gpm.data[ypred==1], wildtype=self.gpm.wildtype, mutations=self.gpm.mutations ) <NEW_LINE> return gpm <NEW_LINE> <DEDENT> def predict(self, X=None): <NEW_LINE> <INDENT> Xadd = self.Additive._X(data=X) <NEW_LINE> X = Xadd * self.Additive.epistasis.values <NEW_LINE> return super().predict(X=X) <NEW_LINE> <DEDENT> def predict_transform(self, X=None, y=None): <NEW_LINE> <INDENT> x = self.predict(X=X) <NEW_LINE> y[x <= 0.5] = self.threshold <NEW_LINE> return y <NEW_LINE> <DEDENT> def predict_log_proba(self, X=None): <NEW_LINE> <INDENT> Xadd = self.Additive._X(data=X) <NEW_LINE> X = Xadd * self.Additive.epistasis.values <NEW_LINE> return super().predict_log_proba(X) <NEW_LINE> <DEDENT> def predict_proba(self, X=None): <NEW_LINE> <INDENT> Xadd = self.Additive._X(data=X) <NEW_LINE> X = Xadd * self.Additive.epistasis.values <NEW_LINE> return super().predict_proba(X=X) | A Mixin class for epistasis classifiers
| 6259904207d97122c4217f66 |
class ExpectRemoteRef(object): <NEW_LINE> <INDENT> def __init__(self, rrclass): <NEW_LINE> <INDENT> self.rrclass = rrclass <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.rrclass) | Define an expected RemoteReference in the args to an L{Expect} class | 6259904207f4c71912bb06f9 |
class LocationInfoAction(Action): <NEW_LINE> <INDENT> name = "info" <NEW_LINE> target_type = "location" <NEW_LINE> @classmethod <NEW_LINE> def setup_cl_args(cls, parser): <NEW_LINE> <INDENT> parser.add_argument( "code", nargs="?", default=current_location_code(), help="Print info for the supplied location code." ) <NEW_LINE> <DEDENT> def __init__(self, code): <NEW_LINE> <INDENT> super(LocationInfoAction, self).__init__(code) <NEW_LINE> self._code = code <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> description = 'Description' <NEW_LINE> timezone = 'Timezone' <NEW_LINE> lat_long = 'Lat/Long' <NEW_LINE> active = 'Active' <NEW_LINE> code = 'Code' <NEW_LINE> host = 'Host' <NEW_LINE> filesystem_root = 'Filesystem root' <NEW_LINE> output = Output() <NEW_LINE> output.header_names = [ code, description, host, filesystem_root, timezone, lat_long, ] <NEW_LINE> output.add_item( { code: self.location.code, description: self.location.description, host: self.location.host, filesystem_root: self.location.filesystem_root, timezone: self.location.timezone, lat_long: "{l.latitude}, {l.longitude}".format(l=self.location), }, color_all=Style.bright, ) <NEW_LINE> title = " {l.name} ".format(l=self.location) <NEW_LINE> if not self.location.active: <NEW_LINE> <INDENT> title += " [INACTIVE]" <NEW_LINE> <DEDENT> title += " " <NEW_LINE> output.title = title <NEW_LINE> output.dump() <NEW_LINE> <DEDENT> def undo(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> location = Location.get(self.code) <NEW_LINE> <DEDENT> except LocationError as e: <NEW_LINE> <INDENT> raise ActionError( 'Could not determine location from code: "{c}"'. format(c=self.code) ) <NEW_LINE> <DEDENT> self._location = location <NEW_LINE> <DEDENT> @property <NEW_LINE> def code(self): <NEW_LINE> <INDENT> return self._code <NEW_LINE> <DEDENT> @property <NEW_LINE> def location(self): <NEW_LINE> <INDENT> return self._location | Print information about a location. | 62599042d99f1b3c44d06964 |
class PhysicalProvider(TefloProvider): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(PhysicalProvider, self).__init__() <NEW_LINE> pass | Physical provider class. | 625990426fece00bbacccc79 |
class Item: <NEW_LINE> <INDENT> def __init__(self, x, y, height, width, board): <NEW_LINE> <INDENT> self.__x = x <NEW_LINE> self.__y = y <NEW_LINE> self.__height = height <NEW_LINE> self.__width = width <NEW_LINE> self.__matrix = [] <NEW_LINE> <DEDENT> def get_x(self): <NEW_LINE> <INDENT> return self.__x <NEW_LINE> <DEDENT> def set_x(self,x): <NEW_LINE> <INDENT> self.__x = x <NEW_LINE> <DEDENT> x = property(get_x,set_x) <NEW_LINE> def get_y(self): <NEW_LINE> <INDENT> return self.__y <NEW_LINE> <DEDENT> def set_y(self,x): <NEW_LINE> <INDENT> self.__y = x <NEW_LINE> <DEDENT> y = property(get_y,set_y) <NEW_LINE> def get_he(self): <NEW_LINE> <INDENT> return self.__height <NEW_LINE> <DEDENT> def set_he(self,x): <NEW_LINE> <INDENT> self.__height = x <NEW_LINE> <DEDENT> height = property(get_he,set_he) <NEW_LINE> def get_wi(self): <NEW_LINE> <INDENT> return self.__width <NEW_LINE> <DEDENT> def set_wi(self,x): <NEW_LINE> <INDENT> self.__width = x <NEW_LINE> <DEDENT> width = property(get_wi,set_wi) <NEW_LINE> def get_mat(self): <NEW_LINE> <INDENT> return self.__matrix <NEW_LINE> <DEDENT> def set_mat(self,x): <NEW_LINE> <INDENT> self.__matrix = x <NEW_LINE> <DEDENT> matrix = property(get_mat,set_mat) <NEW_LINE> def get_matrix(self): <NEW_LINE> <INDENT> return self.matrix <NEW_LINE> <DEDENT> def check_overlap(self, item): <NEW_LINE> <INDENT> if self.x < ( item.x + item.width) and ( self.x + self.width) > item.x and self.y < ( item.y + item.height) and ( self.y + self.height) > item.y: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | General item class to define various
obstacles and objects | 625990421d351010ab8f4de6 |
class WebMediaSlice: <NEW_LINE> <INDENT> def __init__(self, source): <NEW_LINE> <INDENT> self.source = source <NEW_LINE> _, self.name = split(source) <NEW_LINE> self.width = int(re.match(r'^.+-(\d+)\..+$', source).groups()[0]) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> media_type = 'image' <NEW_LINE> if self.is_video: <NEW_LINE> <INDENT> media_type = 'video' <NEW_LINE> <DEDENT> return '<WebMediaSlice: {}w ({})>'.format(self.width, media_type) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_video(self): <NEW_LINE> <INDENT> return not self.source.endswith('.jpg') | A slice is a single processed file that makes up part of the output for
a given source media file.
For example, if canoe-trip.mp4 was the source file, you might see slices
like canoe-trip-640.mp4 and canoe-trip-1080.mp4. | 62599042507cdc57c63a6064 |
class PublicAPIStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.GetInfo = channel.unary_unary( '/v1alpha.PublicAPI/GetInfo', request_serializer=api__pb2.GetInfoRequest.SerializeToString, response_deserializer=api__pb2.GetInfoResponse.FromString, ) <NEW_LINE> self.ListPods = channel.unary_unary( '/v1alpha.PublicAPI/ListPods', request_serializer=api__pb2.ListPodsRequest.SerializeToString, response_deserializer=api__pb2.ListPodsResponse.FromString, ) <NEW_LINE> self.InspectPod = channel.unary_unary( '/v1alpha.PublicAPI/InspectPod', request_serializer=api__pb2.InspectPodRequest.SerializeToString, response_deserializer=api__pb2.InspectPodResponse.FromString, ) <NEW_LINE> self.ListImages = channel.unary_unary( '/v1alpha.PublicAPI/ListImages', request_serializer=api__pb2.ListImagesRequest.SerializeToString, response_deserializer=api__pb2.ListImagesResponse.FromString, ) <NEW_LINE> self.InspectImage = channel.unary_unary( '/v1alpha.PublicAPI/InspectImage', request_serializer=api__pb2.InspectImageRequest.SerializeToString, response_deserializer=api__pb2.InspectImageResponse.FromString, ) <NEW_LINE> self.ListenEvents = channel.unary_stream( '/v1alpha.PublicAPI/ListenEvents', request_serializer=api__pb2.ListenEventsRequest.SerializeToString, response_deserializer=api__pb2.ListenEventsResponse.FromString, ) <NEW_LINE> self.GetLogs = channel.unary_stream( '/v1alpha.PublicAPI/GetLogs', request_serializer=api__pb2.GetLogsRequest.SerializeToString, response_deserializer=api__pb2.GetLogsResponse.FromString, ) | PublicAPI defines the read-only APIs that will be supported.
These will be handled over TCP sockets. | 62599042d10714528d69eff0 |
class TestDiceRoller(unittest.TestCase): <NEW_LINE> <INDENT> def test_multi_roll(self): <NEW_LINE> <INDENT> d = dice_roll.DiceRoller(dice_roll.Dice(6)) <NEW_LINE> t = d.roll(4) <NEW_LINE> self.assertTrue(len(t) == 4) | Test multiple roll functionality | 625990426e29344779b0191b |
class MySpider(scrapy.spiders.Spider): <NEW_LINE> <INDENT> name = 'myFirst' <NEW_LINE> page_index = 1 <NEW_LINE> def start_requests(self): <NEW_LINE> <INDENT> urls = [ 'http://lab.scrapyd.cn/' ] <NEW_LINE> for url in urls: <NEW_LINE> <INDENT> yield scrapy.Request(url=url, callback=self.parse) <NEW_LINE> <DEDENT> <DEDENT> def parse(self, response): <NEW_LINE> <INDENT> page = self.page_index <NEW_LINE> self.page_index += 1 <NEW_LINE> filename = 'parse_html/scrapyd-%s.html' % page <NEW_LINE> body = response.body <NEW_LINE> with open(filename, 'wb') as fp: <NEW_LINE> <INDENT> fp.write(body) <NEW_LINE> <DEDENT> select_quote = response.css('div.quote') <NEW_LINE> data = [] <NEW_LINE> for div in select_quote: <NEW_LINE> <INDENT> text = div.css('.text::text').extract_first() <NEW_LINE> author = div.css('.author::text').extract_first() <NEW_LINE> tags = '' <NEW_LINE> for tag in div.css('.tags .tag'): <NEW_LINE> <INDENT> tags += tag.css('::text').extract_first() <NEW_LINE> <DEDENT> data.append({ 'text': text, 'author': author, 'tags': tags }) <NEW_LINE> <DEDENT> with open('parse_html/scrapyd-%s.json' % page, 'w') as fp: <NEW_LINE> <INDENT> fp.write(json.dumps(data, ensure_ascii=False, indent=2)) <NEW_LINE> <DEDENT> self.log('保存文件 %s ' % filename) <NEW_LINE> next_page = response.css('.page-navigator .next a::attr(href)').extract_first() <NEW_LINE> if next_page is not None: <NEW_LINE> <INDENT> next_page_url = response.urljoin(next_page) <NEW_LINE> yield scrapy.Request(url=next_page_url, callback=self.parse) | 定义一个蜘蛛类,类名随意,必须继承scrapy.Spider
var name:蜘蛛的名称,唯一
fun start_requests:蜘蛛运行的方法,请求对应的页面
fun parse:页面请求后的回调方法,后续的解析都会在这里 | 6259904245492302aabfd7a3 |
class MinimaxPlayer(IsolationPlayer): <NEW_LINE> <INDENT> def get_move(self, game, time_left): <NEW_LINE> <INDENT> self.time_left = time_left <NEW_LINE> best_move = (-1, -1) <NEW_LINE> try: <NEW_LINE> <INDENT> return self.minimax(game, self.search_depth) <NEW_LINE> <DEDENT> except SearchTimeout: <NEW_LINE> <INDENT> return best_move <NEW_LINE> <DEDENT> return best_move <NEW_LINE> <DEDENT> def minimax(self, game, depth): <NEW_LINE> <INDENT> def max_value(game, depth): <NEW_LINE> <INDENT> if self.time_left() < self.TIMER_THRESHOLD: <NEW_LINE> <INDENT> raise SearchTimeout() <NEW_LINE> <DEDENT> if depth == 0 or not game.get_legal_moves(): <NEW_LINE> <INDENT> return self.score(game, self) <NEW_LINE> <DEDENT> if not game.get_legal_moves(): <NEW_LINE> <INDENT> return game.utility(self) <NEW_LINE> <DEDENT> depth -= 1 <NEW_LINE> v = float("-inf") <NEW_LINE> for m in game.get_legal_moves(): <NEW_LINE> <INDENT> v = max(v, min_value(game.forecast_move(m), depth)) <NEW_LINE> <DEDENT> return v <NEW_LINE> <DEDENT> def min_value(game, depth): <NEW_LINE> <INDENT> if self.time_left() < self.TIMER_THRESHOLD: <NEW_LINE> <INDENT> raise SearchTimeout() <NEW_LINE> <DEDENT> if depth == 0: <NEW_LINE> <INDENT> return self.score(game, self) <NEW_LINE> <DEDENT> if not game.get_legal_moves(): <NEW_LINE> <INDENT> return game.utility(self) <NEW_LINE> <DEDENT> depth -= 1 <NEW_LINE> v = float("inf") <NEW_LINE> for m in game.get_legal_moves(): <NEW_LINE> <INDENT> v = min(v, max_value(game.forecast_move(m), depth)) <NEW_LINE> <DEDENT> return v <NEW_LINE> <DEDENT> if self.time_left() < self.TIMER_THRESHOLD: <NEW_LINE> <INDENT> raise SearchTimeout() <NEW_LINE> <DEDENT> if not game.get_legal_moves(): <NEW_LINE> <INDENT> return game.utility(self) <NEW_LINE> <DEDENT> if depth == 0: <NEW_LINE> <INDENT> return self.score(game, game.active_player) <NEW_LINE> <DEDENT> main_score = float('-inf') <NEW_LINE> best_move = (-1, -1) <NEW_LINE> legal_moves = game.get_legal_moves() <NEW_LINE> if legal_moves: <NEW_LINE> <INDENT> best_move = legal_moves[random.randint(0, len(legal_moves) - 1)] <NEW_LINE> <DEDENT> for each_move in legal_moves: <NEW_LINE> <INDENT> game_subbranch = game.forecast_move(each_move) <NEW_LINE> score = min_value(game_subbranch, depth - 1) <NEW_LINE> if score > main_score: <NEW_LINE> <INDENT> best_move = each_move <NEW_LINE> main_score = score <NEW_LINE> <DEDENT> <DEDENT> return best_move | Game-playing agent that chooses a move using depth-limited minimax
search. You must finish and test this player to make sure it properly uses
minimax to return a good move before the search time limit expires. | 625990428e71fb1e983bcd96 |
class FlowListMixin(object): <NEW_LINE> <INDENT> ns_map = None <NEW_LINE> ns_map_absolute = False <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.ns_map = kwargs.get('ns_map', {}) <NEW_LINE> super(FlowListMixin, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def flows(self): <NEW_LINE> <INDENT> return self.ns_map.keys() <NEW_LINE> <DEDENT> def get_flow_namespace(self, flow_class): <NEW_LINE> <INDENT> namespace = self.ns_map.get(flow_class) <NEW_LINE> if namespace is None: <NEW_LINE> <INDENT> raise FlowRuntimeError("{} are not registred in {}".format(flow_class, self)) <NEW_LINE> <DEDENT> if not self.ns_map_absolute: <NEW_LINE> <INDENT> return "{}:{}".format(self.request.resolver_match.namespace, namespace) <NEW_LINE> <DEDENT> <DEDENT> def get_process_url(self, process, url_type='detail'): <NEW_LINE> <INDENT> namespace = self.get_flow_namespace(process.flow_class) <NEW_LINE> return reverse('{}:{}'.format(namespace, url_type), args=[process.pk]) <NEW_LINE> <DEDENT> def get_task_url(self, task, url_type=None): <NEW_LINE> <INDENT> namespace = self.get_flow_namespace(task.process.flow_class) <NEW_LINE> return task.flow_task.get_task_url( task, url_type=url_type if url_type else 'guess', user=self.request.user, namespace=namespace) | Mixin for list view contains multiple flows. | 625990420fa83653e46f61a2 |
class FormDelivery(Form): <NEW_LINE> <INDENT> title = StringField(label='Заголовок', validators=[ validators.Required(message='Это поле необходимо заполнить'), validators.Length(max=255, message='Поле не должно превышать более %(max)s символов') ]) <NEW_LINE> description = TextAreaField(label='Описание', validators=[ validators.Required(message='Это поле необходимо заполнить') ]) <NEW_LINE> cost = FloatField(label='Стоимость доставки', default=0) <NEW_LINE> status = BooleanField(label='Активен') <NEW_LINE> submit = SubmitField('Сохранить') | Доставка | 6259904273bcbd0ca4bcb553 |
class ExtraContextTemplateView(TemplateView): <NEW_LINE> <INDENT> extra_context = None <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(ExtraContextTemplateView, self).get_context_data(**kwargs) <NEW_LINE> if self.extra_context is not None: <NEW_LINE> <INDENT> context.update(self.extra_context) <NEW_LINE> <DEDENT> return context | Extends TemplateView to accept a dictionary of additional context.
Example usage in URL config:
url(r'^foo/$', ExtraContextTemplateView.as_view(
template_name='foo.html', extra_context={'foo': 'bar'})), | 62599042e76e3b2f99fd9cd3 |
class RestBadParameter(RestDispException404): <NEW_LINE> <INDENT> pass; | Bad parameter. | 625990428a43f66fc4bf345a |
class WampRawSocketServerFactory(WampRawSocketFactory): <NEW_LINE> <INDENT> protocol = WampRawSocketServerProtocol <NEW_LINE> def __init__(self, factory, serializers=None): <NEW_LINE> <INDENT> assert(callable(factory)) <NEW_LINE> self._factory = factory <NEW_LINE> if serializers is None: <NEW_LINE> <INDENT> serializers = get_serializes() <NEW_LINE> if not serializers: <NEW_LINE> <INDENT> raise Exception("could not import any WAMP serializers") <NEW_LINE> <DEDENT> <DEDENT> self._serializers = {ser.RAWSOCKET_SERIALIZER_ID: ser for ser in serializers} | Base class for Twisted-based WAMP-over-RawSocket server factories. | 625990423eb6a72ae038b929 |
class TestJob1(interface.TurbiniaJob): <NEW_LINE> <INDENT> NAME = 'testjob1' <NEW_LINE> def create_tasks(self, evidence): <NEW_LINE> <INDENT> return None | Test job. | 62599042d164cc6175822240 |
class URLScene(NormalizedScene): <NEW_LINE> <INDENT> def __init__(self, metadata_url, band_urls=None): <NEW_LINE> <INDENT> self._metadata_url = metadata_url <NEW_LINE> if band_urls is None: <NEW_LINE> <INDENT> self._band_base_url = metadata_url.rsplit('/', maxsplit=1)[0] <NEW_LINE> <DEDENT> elif isinstance(band_urls, str): <NEW_LINE> <INDENT> self._band_base_url = band_urls <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._band_base_url = None <NEW_LINE> self.band_urls = band_urls <NEW_LINE> <DEDENT> <DEDENT> @cached_property <NEW_LINE> def metadata(self): <NEW_LINE> <INDENT> with urllib.request.urlopen(self._metadata_url) as f: <NEW_LINE> <INDENT> metadata = json.loads(f.read().decode('utf-8')) <NEW_LINE> <DEDENT> return metadata <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def band_urls(self): <NEW_LINE> <INDENT> scene_id_str = self.metadata['scene_id'] <NEW_LINE> band_urls = {band: '{}/{}_{}.tif'.format(self._band_base_url, scene_id_str, band) for band in band_names} <NEW_LINE> return band_urls | Lazy NormalizedScene that is read via urlopen | 62599042596a897236128f13 |
class IPhoneWidget(ITextWidget): <NEW_LINE> <INDENT> pass | A Phone widget ( html5 type="tel") | 6259904291af0d3eaad3b0ed |
class CurrencyDoesntExistError(Exception): <NEW_LINE> <INDENT> pass | exchange rate for either of the currency codes doesn't exist on the date
I will show an appropriate message. | 625990428c3a8732951f7823 |
class TestConcave: <NEW_LINE> <INDENT> def test_no_surfaces(self): <NEW_LINE> <INDENT> S = [Point(0.9, 0.5, 0.5)] <NEW_LINE> R = [Point(0.1, 0.501, 0.501)] <NEW_LINE> walls = [] <NEW_LINE> model = Model(walls, S, R) <NEW_LINE> with pytest.raises(ValueError): <NEW_LINE> <INDENT> list(model.mirrors()) <NEW_LINE> <DEDENT> <DEDENT> def test_single_surface(self, impedance1): <NEW_LINE> <INDENT> S = [Point(0.7, 0.5, 0.5)] <NEW_LINE> R = [Point(0.3, 0.501, 0.501)] <NEW_LINE> corners1 = [ Point(0.0, 0.0, 0.0), Point(1.0, 0.0, 0.0), Point(1.0, 1.0, 0.0), Point(0.0, 1.0, 0.0) ] <NEW_LINE> wall1 = Wall(corners1, Point(0.5, 0.5, 0.0), impedance1) <NEW_LINE> model = Model([wall1], S, R, max_order=3) <NEW_LINE> mirrors = list(model.mirrors()) <NEW_LINE> assert(len(mirrors)==2) <NEW_LINE> mirrors = model.determine() <NEW_LINE> for mirror in mirrors: <NEW_LINE> <INDENT> assert( mirror.effective.all() == True ) <NEW_LINE> <DEDENT> <DEDENT> def test_single_surface_wrong_orientation(self, impedance1): <NEW_LINE> <INDENT> S = [Point(0.7, 0.5, 0.5)] <NEW_LINE> R = [Point(0.3, 0.501, 0.501)] <NEW_LINE> corners1m = [ Point(0.0, 0.0, 0.0), Point(0.0, 1.0, 0.0), Point(1.0, 1.0, 0.0), Point(1.0, 0.0, 0.0) ] <NEW_LINE> wall1m = Wall(corners1m, Point(0.5, 0.5, 0.0), impedance1) <NEW_LINE> model = Model([wall1m], S, R, max_order=10) <NEW_LINE> mirrors = list(model.mirrors()) <NEW_LINE> assert( len(mirrors) == 1) <NEW_LINE> mirrors = list(model.determine()) <NEW_LINE> assert( mirrors[0].effective.all() == True ) <NEW_LINE> <DEDENT> def test_single_surface_receiver_both_sides(self, impedance1): <NEW_LINE> <INDENT> S = [Point(0.7, 0.5, 0.5)] <NEW_LINE> R = [Point(0.3, 0.501, 0.501), Point(0.3, 0.501, 0.501)] <NEW_LINE> corners1 = [ Point(0.0, 0.0, 0.0), Point(1.0, 0.0, 0.0), Point(1.0, 1.0, 0.0), Point(0.0, 1.0, 0.0) ] <NEW_LINE> wall1 = Wall(corners1, Point(0.5, 0.5, 0.0), impedance1) <NEW_LINE> model = Model([wall1], S, R, max_order=10) <NEW_LINE> mirrors = list(model.mirrors()) <NEW_LINE> """One reflecting surface should give an additional so two sources.""" <NEW_LINE> assert( len(mirrors) == 2) <NEW_LINE> mirrors = model.determine() <NEW_LINE> """And both are effective.""" <NEW_LINE> for mirror in mirrors: <NEW_LINE> <INDENT> assert(mirror.effective.all() == True ) | Tests for :class:`ism.Model`.
| 62599042c432627299fa4266 |
class KNNRegresser(KNN): <NEW_LINE> <INDENT> def _predict_one(self, array, smoothing, probability): <NEW_LINE> <INDENT> k_nearest_indices = self._k_nearest_indices(array) <NEW_LINE> k_nearest_labels = self._y[k_nearest_indices] <NEW_LINE> if self.weights == "uniform": <NEW_LINE> <INDENT> my_weights = np.repeat(1, repeats=self.k) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> k_nearest_distances = self._euclidean_distances(array, k_nearest_indices=k_nearest_indices) <NEW_LINE> my_weights = 1 / (k_nearest_distances + smoothing) <NEW_LINE> <DEDENT> label = np.matmul(my_weights, k_nearest_labels) / np.sum(my_weights) <NEW_LINE> return label | This class is the child class of the KNN class that is performs
KNN regression. | 625990426fece00bbacccc7b |
class RecordBuilder(object): <NEW_LINE> <INDENT> def __init__(self, rel_path=None): <NEW_LINE> <INDENT> super(RecordBuilder, self).__init__() <NEW_LINE> self.rel_path = rel_path <NEW_LINE> self.lf = { "local_version": 0, "local_size": 0, "local_mtime": 0, "local_permission": 0, } <NEW_LINE> self.rf = { "remote_version": 0, "remote_size": 0, "remote_mtime": 0, "remote_permission": 0, "remote_public": None, "remote_encryption": None, } <NEW_LINE> <DEDENT> def _update_int(self, obj, key, value): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> obj[key] = value <NEW_LINE> <DEDENT> <DEDENT> def _update_null(self, obj, key, value): <NEW_LINE> <INDENT> obj[key] = value <NEW_LINE> <DEDENT> def localFromInfo(self, info): <NEW_LINE> <INDENT> return self.local(info.version, info.size, info.mtime, info.permission) <NEW_LINE> <DEDENT> def local(self, version=None, size=None, mtime=None, permission=None, **kwargs): <NEW_LINE> <INDENT> self._update_int(self.lf, "local_version", version) <NEW_LINE> self._update_int(self.lf, "local_size", size) <NEW_LINE> self._update_int(self.lf, "local_mtime", mtime) <NEW_LINE> self._update_int(self.lf, "local_permission", permission) <NEW_LINE> return self <NEW_LINE> <DEDENT> def remoteFromInfo(self, info): <NEW_LINE> <INDENT> return self.remote(info.version, info.size, info.mtime, info.permission) <NEW_LINE> <DEDENT> def remote(self, version=None, size=None, mtime=None, permission=None, **kwargs): <NEW_LINE> <INDENT> self._update_int(self.rf, "remote_version", version) <NEW_LINE> self._update_int(self.rf, "remote_size", size) <NEW_LINE> self._update_int(self.rf, "remote_mtime", mtime) <NEW_LINE> self._update_int(self.rf, "remote_permission", permission) <NEW_LINE> self._update_null(self.rf, "remote_public", kwargs.get('public', None)) <NEW_LINE> self._update_null(self.rf, "remote_encryption", kwargs.get('encryption', None)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def build(self): <NEW_LINE> <INDENT> record = {} <NEW_LINE> if self.rel_path is not None: <NEW_LINE> <INDENT> record['rel_path'] = self.rel_path <NEW_LINE> <DEDENT> record.update(self.lf) <NEW_LINE> record.update(self.rf) <NEW_LINE> return record | docstring for RecordBuilder | 62599042507cdc57c63a6066 |
class TenantAdd(Command): <NEW_LINE> <INDENT> log = logging.getLogger(__name__) <NEW_LINE> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(TenantAdd, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( 'tenant_name', metavar='<tenant-name>', help='Tenant name', ) <NEW_LINE> parser.add_argument( '--authz', metavar='<authz-intraextension-uuid>', help='Authz IntraExtension UUID', ) <NEW_LINE> parser.add_argument( '--admin', metavar='<admin-intraextension-uuid>', help='Admin IntraExtension UUID', ) <NEW_LINE> parser.add_argument( '--desc', metavar='<tenant-description-str>', help='Tenant description', ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> post_data = dict() <NEW_LINE> post_data["tenant_name"] = parsed_args.tenant_name <NEW_LINE> if parsed_args.authz: <NEW_LINE> <INDENT> post_data["tenant_intra_authz_extension_id"] = parsed_args.authz <NEW_LINE> <DEDENT> if parsed_args.admin: <NEW_LINE> <INDENT> post_data["tenant_intra_admin_extension_id"] = parsed_args.admin <NEW_LINE> <DEDENT> if parsed_args.desc: <NEW_LINE> <INDENT> post_data["tenant_description"] = parsed_args.desc <NEW_LINE> <DEDENT> tenants = self.app.get_url(self.app.url_prefix+"/tenants", post_data=post_data, authtoken=True) <NEW_LINE> return ( ("id", "name", "description", "intra_authz_extension_id", "intra_admin_extension_id"), (( tenant_id, tenants[tenant_id]["name"], tenants[tenant_id]["description"], tenants[tenant_id]["intra_authz_extension_id"], tenants[tenant_id]["intra_admin_extension_id"], ) for tenant_id in tenants) ) | Add a tenant. | 6259904223e79379d538d7c8 |
class EscenaJuego(Escena): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Escena.__init__(self) <NEW_LINE> self.fondo = cargar_imagen("fondo.jpg") <NEW_LINE> self.pelota = Pelota() <NEW_LINE> self.jugador = Jugador() <NEW_LINE> self.muro = Muro() <NEW_LINE> self.puntos = 0 <NEW_LINE> self.puntuacion = Texto("Puntos: ") <NEW_LINE> self.vidas = 3 <NEW_LINE> self.t_vidas = Texto("Vidas: ") <NEW_LINE> p.key.set_repeat(1, 25) <NEW_LINE> puerto = "COM3" <NEW_LINE> baudios = 9600 <NEW_LINE> try: <NEW_LINE> <INDENT> self.arduino = ArduinoUNO(puerto, baudios) <NEW_LINE> self.arduino.leer() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.arduino = None <NEW_LINE> <DEDENT> <DEDENT> def leer_eventos(self, eventos): <NEW_LINE> <INDENT> for evento in eventos: <NEW_LINE> <INDENT> if evento.type == p.KEYDOWN: <NEW_LINE> <INDENT> self.jugador.mover(evento.key) <NEW_LINE> <DEDENT> <DEDENT> if self.arduino: <NEW_LINE> <INDENT> self.jugador.moverConAcelerometro(self.arduino.leer()) <NEW_LINE> <DEDENT> <DEDENT> def actualizar(self): <NEW_LINE> <INDENT> self.pelota.actualizar() <NEW_LINE> self.pelota.colision(self.jugador) <NEW_LINE> self.puntos += self.pelota.colisionMultiple(self.muro.ladrillos) <NEW_LINE> self.vidas -= self.pelota.se_salio(self.jugador.rect) <NEW_LINE> if self.vidas == 0: <NEW_LINE> <INDENT> self.cambiar_escena(EscenaJuegoTerminado(self.puntos)) <NEW_LINE> <DEDENT> <DEDENT> def dibujar(self, pantalla): <NEW_LINE> <INDENT> pantalla.blit(self.fondo, (0, 0)) <NEW_LINE> pantalla.blit(self.puntuacion.mostrar(str(self.puntos)), (0, 0)) <NEW_LINE> pantalla.blit(self.t_vidas.mostrar(str(self.vidas)), (560, 0)) <NEW_LINE> pantalla.blit(self.pelota.image, self.pelota.rect) <NEW_LINE> pantalla.blit(self.jugador.image, self.jugador.rect) <NEW_LINE> for i in range(len(self.muro.ladrillos)): <NEW_LINE> <INDENT> pantalla.blit(self.muro.image, self.muro.ladrillos[i]) | Clase que define la escena principal del videojuego. | 625990424e696a045264e786 |
class about(): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> response = current.response <NEW_LINE> request = current.request <NEW_LINE> T = current.T <NEW_LINE> view = path.join(request.folder, "private", "templates", "DRRPP", "views", "about.html") <NEW_LINE> try: <NEW_LINE> <INDENT> response.view = open(view, "rb") <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> from gluon.http import HTTP <NEW_LINE> raise HTTP("404", "Unable to open Custom View: %s" % view) <NEW_LINE> <DEDENT> response.title = T("About") <NEW_LINE> return dict( title=T("About"), ) | Custom About page | 62599042d7e4931a7ef3d340 |
class ReportIndex(TemplateView): <NEW_LINE> <INDENT> template_name = 'ereports/index.html' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> reports = [] <NEW_LINE> for report in ReportConfiguration.objects.filter(published=True).order_by('group', 'name'): <NEW_LINE> <INDENT> reports.append(report) <NEW_LINE> <DEDENT> return {'reports': reports} | Index of all available reports for an office
| 6259904224f1403a92686231 |
class ExpFitSGCombinedSNR(ExpFitCombinedSNR): <NEW_LINE> <INDENT> def __init__(self, files, ifos=None): <NEW_LINE> <INDENT> ExpFitCombinedSNR.__init__(self, files, ifos=ifos) <NEW_LINE> self.get_newsnr = ranking.get_newsnr_sgveto | ExpFitCombinedSNR but with sine-Gaussian veto added to the single
detector ranking | 62599042b57a9660fecd2d46 |
class CIFARBasicBlockV2(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channels, channels, stride, downsample=False, **kwargs): <NEW_LINE> <INDENT> super(CIFARBasicBlockV2, self).__init__(**kwargs) <NEW_LINE> self.bn1 = nn.BatchNorm2d(in_channels) <NEW_LINE> self.conv1 = _conv3x3(in_channels, channels, stride) <NEW_LINE> self.bn2 = nn.BatchNorm2d(channels) <NEW_LINE> self.conv2 = _conv3x3(channels, channels, 1) <NEW_LINE> if downsample: <NEW_LINE> <INDENT> self.downsample = nn.Conv2d(in_channels, channels, 1, stride, bias=False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.downsample = None <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> residual = x <NEW_LINE> x = self.bn1(x) <NEW_LINE> x = F.relu(x) <NEW_LINE> x = self.conv1(x) <NEW_LINE> x = self.bn2(x) <NEW_LINE> x = F.relu(x) <NEW_LINE> x = self.conv2(x) <NEW_LINE> if self.downsample: <NEW_LINE> <INDENT> residual = self.downsample(residual) <NEW_LINE> <DEDENT> return x + residual | BasicBlock V2 from
`"Identity Mappings in Deep Residual Networks"
<https://arxiv.org/abs/1603.05027>`_ paper.
This is used for ResNet V2 for 18, 34 layers.
Parameters
----------
in_channels : int
Number of input channels.
channels : int
Number of output channels.
stride : int
Stride size.
downsample : bool, default False
Whether to down sample the input.
norm_layer : object
Normalization layer used (default: :class:`nn.BatchNorm`)
Can be :class:`nn.BatchNorm` or :class:`other normalization`.
norm_kwargs : dict
Additional `norm_layer` arguments | 6259904229b78933be26aa28 |
class CudaErrorCheck(CustomCodeNode): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__(f'{self.err_check_function.function_name}(cudaPeekAtLastError());', [], []) <NEW_LINE> <DEDENT> err_check_function = CudaErrorCheckDefinition() <NEW_LINE> required_global_declarations = [err_check_function] <NEW_LINE> headers = ['<cuda.h>'] | Checks whether the last call to the CUDA API was successful and panics in the negative case.
.. code:: c++
# define gpuErrchk(ans) { gpuAssert((ans), __PRETTY_FUNCTION__, __FILE__, __LINE__); }
inline static void gpuAssert(cudaError_t code, const char* function, const char *file, int line, bool abort=true)
{
if (code != cudaSuccess)
{
fprintf(stderr,"CUDA error: %s: %s:%d\n", cudaGetErrorString(code), file, line);
if (abort) exit(code);
}
}
...
gpuErrchk(cudaPeekAtLastError()); | 625990421f5feb6acb163ebe |
class ILiegeUrbanLayer(IDefaultBrowserLayer): <NEW_LINE> <INDENT> pass | Marker interface that defines a browser layer. | 62599042d53ae8145f919728 |
class PMarkov(PStochasticPattern): <NEW_LINE> <INDENT> def __init__(self, nodes=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> if isinstance(nodes, list): <NEW_LINE> <INDENT> learner = MarkovLearner() <NEW_LINE> for value in nodes: <NEW_LINE> <INDENT> learner.register(value) <NEW_LINE> <DEDENT> self.nodes = learner.markov.nodes <NEW_LINE> <DEDENT> elif isinstance(nodes, dict): <NEW_LINE> <INDENT> self.nodes = nodes <NEW_LINE> <DEDENT> elif nodes: <NEW_LINE> <INDENT> raise ValueError("Invalid value for nodes") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.nodes = {} <NEW_LINE> <DEDENT> self.node = None <NEW_LINE> <DEDENT> def randomize(self): <NEW_LINE> <INDENT> for node in list(self.nodes.keys()): <NEW_LINE> <INDENT> self.nodes[node] = [] <NEW_LINE> for other in list(self.nodes.keys()): <NEW_LINE> <INDENT> prob = self.rng.randint(0, 10) <NEW_LINE> self.nodes[node] += [other] * prob <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self.node is None and len(self.nodes) > 0: <NEW_LINE> <INDENT> self.node = self.rng.choice(list(self.nodes.keys())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.node = self.rng.choice(self.nodes[self.node]) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> self.node = self.rng.choice(list(self.nodes.keys())) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> print("No such node: %s" % self.node) <NEW_LINE> <DEDENT> <DEDENT> if self.node is None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return self.node <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_sequence(self, sequence): <NEW_LINE> <INDENT> learner = MarkovLearner() <NEW_LINE> for value in sequence: <NEW_LINE> <INDENT> learner.register(value) <NEW_LINE> <DEDENT> return PMarkov(learner.markov) | PMarkov: First-order Markov chain generator.
| 62599042e64d504609df9d36 |
class Location: <NEW_LINE> <INDENT> def __init__(self, city: str = None, postoffice: int = None): <NEW_LINE> <INDENT> self.city = city <NEW_LINE> self.postoffice = postoffice | Location were to deliver a thing
>>> place = Location('Lwiw', 121)
>>> place.__class__.__name__
'Location' | 6259904207f4c71912bb06fd |
class ActorViewset(ModelViewSet): <NEW_LINE> <INDENT> queryset = Actor.objects.all() <NEW_LINE> serializer_class = ActorSerializer | A simple ViewSet for viewing and editing accounts. | 6259904215baa7234946325d |
class Paviani(Benchmark): <NEW_LINE> <INDENT> def __init__(self, dimensions=10): <NEW_LINE> <INDENT> Benchmark.__init__(self, dimensions) <NEW_LINE> self.bounds = zip([2.001] * self.dimensions, [9.999] * self.dimensions) <NEW_LINE> self.global_optimum = [9.350266 for _ in range(self.dimensions)] <NEW_LINE> self.fglob = -45.7784684040686 <NEW_LINE> <DEDENT> def evaluator(self, x, *args): <NEW_LINE> <INDENT> self.fun_evals += 1 <NEW_LINE> return sum(log(x-2)**2.0 + log(10.0 - x)**2.0) - prod(x)**0.2 | Paviani test objective function.
This class defines the Paviani global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Paviani}}(\mathbf{x}) = \sum_{i=1}^{10} \left[\log^{2}\left(10 - x_i\right) + \log^{2}\left(x_i -2\right)\right] - \left(\prod_{i=1}^{10} x_i^{10} \right)^{0.2}
Here, :math:`n` represents the number of dimensions and :math:`x_i \in [2.001, 9.999]` for :math:`i=1,...,n`.
*Global optimum*: :math:`f(x_i) = -45.7784684040686` for :math:`x_i = 9.350266` for :math:`i=1,...,n` | 6259904266673b3332c316c5 |
class Card(object): <NEW_LINE> <INDENT> values = {"T": 10, "J": 11, "Q": 12, "K": 13, "A": 14} <NEW_LINE> def __init__(self, value, suit): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.value = int(value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.value = Card.values[value.upper()] <NEW_LINE> <DEDENT> self.suit = suit.upper() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> beeldjes = ["T", "J", "Q", "K", "A"] <NEW_LINE> if self.value < 10: <NEW_LINE> <INDENT> return str(self.value) + self.suit <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return beeldjes[self.value - 10] + self.suit <NEW_LINE> <DEDENT> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return self.value > other.value <NEW_LINE> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> return self.value >= other.value <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.value == other.value | Standard playing card | 62599042507cdc57c63a6068 |
class Strategy(metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def run(self): <NEW_LINE> <INDENT> pass | Контекст использует данный интерфейс для вызова алгоритма, определённого
конкретной стратегией | 6259904207d97122c4217f6b |
class White(Token): <NEW_LINE> <INDENT> whiteStrs = { ' ' : '<SP>', '\t': '<TAB>', '\n': '<LF>', '\r': '<CR>', '\f': '<FF>', u'\u00A0': '<NBSP>', u'\u1680': '<OGHAM_SPACE_MARK>', u'\u180E': '<MONGOLIAN_VOWEL_SEPARATOR>', u'\u2000': '<EN_QUAD>', u'\u2001': '<EM_QUAD>', u'\u2002': '<EN_SPACE>', u'\u2003': '<EM_SPACE>', u'\u2004': '<THREE-PER-EM_SPACE>', u'\u2005': '<FOUR-PER-EM_SPACE>', u'\u2006': '<SIX-PER-EM_SPACE>', u'\u2007': '<FIGURE_SPACE>', u'\u2008': '<PUNCTUATION_SPACE>', u'\u2009': '<THIN_SPACE>', u'\u200A': '<HAIR_SPACE>', u'\u200B': '<ZERO_WIDTH_SPACE>', u'\u202F': '<NNBSP>', u'\u205F': '<MMSP>', u'\u3000': '<IDEOGRAPHIC_SPACE>', } <NEW_LINE> def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): <NEW_LINE> <INDENT> super(White, self).__init__() <NEW_LINE> self.matchWhite = ws <NEW_LINE> self.setWhitespaceChars("".join(c for c in self.whiteChars if c not in self.matchWhite)) <NEW_LINE> self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) <NEW_LINE> self.mayReturnEmpty = True <NEW_LINE> self.errmsg = "Expected " + self.name <NEW_LINE> self.minLen = min <NEW_LINE> if max > 0: <NEW_LINE> <INDENT> self.maxLen = max <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.maxLen = _MAX_INT <NEW_LINE> <DEDENT> if exact > 0: <NEW_LINE> <INDENT> self.maxLen = exact <NEW_LINE> self.minLen = exact <NEW_LINE> <DEDENT> <DEDENT> def parseImpl(self, instring, loc, doActions=True): <NEW_LINE> <INDENT> if instring[loc] not in self.matchWhite: <NEW_LINE> <INDENT> raise ParseException(instring, loc, self.errmsg, self) <NEW_LINE> <DEDENT> start = loc <NEW_LINE> loc += 1 <NEW_LINE> maxloc = start + self.maxLen <NEW_LINE> maxloc = min(maxloc, len(instring)) <NEW_LINE> while loc < maxloc and instring[loc] in self.matchWhite: <NEW_LINE> <INDENT> loc += 1 <NEW_LINE> <DEDENT> if loc - start < self.minLen: <NEW_LINE> <INDENT> raise ParseException(instring, loc, self.errmsg, self) <NEW_LINE> <DEDENT> return loc, instring[start:loc] | Special matching class for matching whitespace. Normally,
whitespace is ignored by pyparsing grammars. This class is included
when some whitespace structures are significant. Define with
a string containing the whitespace characters to be matched; default
is ``" \t\r\n"``. Also takes optional ``min``,
``max``, and ``exact`` arguments, as defined for the
:class:`Word` class. | 62599042b57a9660fecd2d47 |
class MSG_srec_end_ok(RXMessage): <NEW_LINE> <INDENT> _format = '>BBB' <NEW_LINE> _filter = [(True, 0), (True, 0), (True, 1)] <NEW_LINE> def __init__(self, raw): <NEW_LINE> <INDENT> super().__init__(expected_id=RSP_ID, raw=raw) | sent in response to an internal part of an S-record | 6259904282261d6c5273082a |
class JvoltDipole: <NEW_LINE> <INDENT> def __init__(self, VoltDpinfo): <NEW_LINE> <INDENT> self.dipole = VoltDpinfo.DIPOLE <NEW_LINE> try: <NEW_LINE> <INDENT> self.Rx1x = float(VoltDpinfo.Rx1x) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.Rx1y = float(VoltDpinfo.Rx1y) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.Rx1 = float(VoltDpinfo.Rx1y) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.Rx1File = VoltDpinfo.Rx1File <NEW_LINE> self.Rx1East = float(VoltDpinfo.Rx1East) <NEW_LINE> self.Rx1North = float(VoltDpinfo.Rx1North) <NEW_LINE> self.Rx1Elev = float(VoltDpinfo.Rx1Elev) <NEW_LINE> self.Rx2File = VoltDpinfo.Rx2File <NEW_LINE> self.Rx2East = float(VoltDpinfo.Rx2East) <NEW_LINE> self.Rx2North = float(VoltDpinfo.Rx2North) <NEW_LINE> self.Rx2Elev = float(VoltDpinfo.Rx2Elev) <NEW_LINE> self.Vp = float(VoltDpinfo.Vp) <NEW_LINE> self.Vp_err = float(VoltDpinfo.Vp_err) <NEW_LINE> self.Rho = float(VoltDpinfo.Rho) <NEW_LINE> self.flagRho = VoltDpinfo.Rho_QC <NEW_LINE> self.Stack = float(VoltDpinfo.Stack) <NEW_LINE> try: <NEW_LINE> <INDENT> self.Mx = float(VoltDpinfo.Mx) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.Mx = -99.9 <NEW_LINE> <DEDENT> self.Mx_err = float(VoltDpinfo.Mx_err) <NEW_LINE> self.flagMx = VoltDpinfo.Mx_QC <NEW_LINE> self.flagBad = VoltDpinfo.Status <NEW_LINE> self.TimeBase = VoltDpinfo.TimeBase <NEW_LINE> self.Vs = np.asarray(VoltDpinfo.Vs) <NEW_LINE> self.direction = True <NEW_LINE> <DEDENT> def getXplotpoint(self, Idp): <NEW_LINE> <INDENT> if not self.direction: <NEW_LINE> <INDENT> self.Rx1 = self.Rx1x <NEW_LINE> Idp.Tx1 = Idp.Tx1x <NEW_LINE> <DEDENT> if (self.Rx1 > Idp.Tx1): <NEW_LINE> <INDENT> x = Idp.Tx1 + ((self.Rx1 - Idp.Tx1) / 2.0) <NEW_LINE> <DEDENT> elif (self.Rx1 < Idp.Tx1): <NEW_LINE> <INDENT> x = Idp.Tx1 - ((Idp.Tx1 - self.Rx1) / 2.0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print <NEW_LINE> <DEDENT> return[x] <NEW_LINE> <DEDENT> def getZplotpoint(self, Idp): <NEW_LINE> <INDENT> if not self.direction: <NEW_LINE> <INDENT> self.Rx1 = self.Rx1x <NEW_LINE> Idp.Tx1 = Idp.Tx1x <NEW_LINE> <DEDENT> z = -(abs(Idp.Tx1 - self.Rx1)) / 2.0 <NEW_LINE> return[z] <NEW_LINE> <DEDENT> def calcRho(self): <NEW_LINE> <INDENT> return[self.Rho] | object containing voltage information | 6259904245492302aabfd7a7 |
class BatchTransform(Transform): <NEW_LINE> <INDENT> def __init__(self, job_queue, job_definition, parameters=None, container_overrides=None, **kwargs): <NEW_LINE> <INDENT> super(BatchTransform, self).__init__(**kwargs) <NEW_LINE> self.job_queue = job_queue <NEW_LINE> self.job_definition = job_definition <NEW_LINE> self.parameters = parameters <NEW_LINE> self.container_overrides = container_overrides <NEW_LINE> <DEDENT> def setup(self, name): <NEW_LINE> <INDENT> setup = super(BatchTransform, self).setup(name) <NEW_LINE> setup['type'] = 'batch' <NEW_LINE> setup['params'] = { 'jobName': name, 'jobQueue':self.job_queue, 'jobDefinition': self.job_definition, 'parameters': {}, 'containerOverrides': {} } <NEW_LINE> if self.parameters: <NEW_LINE> <INDENT> setup['params']['parameters'] = {**setup['params']['parameters'], **self.parameters} <NEW_LINE> <DEDENT> if self.container_overrides: <NEW_LINE> <INDENT> setup['params']['containerOverrides'] = {**setup['params']['containerOverrides'], **self.container_overrides} <NEW_LINE> <DEDENT> return setup | docstring for . | 625990420fa83653e46f61a6 |
class removeByInner_args(object): <NEW_LINE> <INDENT> def __init__(self, boxId=None, innerPath=None,): <NEW_LINE> <INDENT> self.boxId = boxId <NEW_LINE> self.innerPath = innerPath <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I64: <NEW_LINE> <INDENT> self.boxId = iprot.readI64() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.innerPath = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('removeByInner_args') <NEW_LINE> if self.boxId is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('boxId', TType.I64, 1) <NEW_LINE> oprot.writeI64(self.boxId) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.innerPath is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('innerPath', TType.STRING, 2) <NEW_LINE> oprot.writeString(self.innerPath.encode('utf-8') if sys.version_info[0] == 2 else self.innerPath) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- boxId
- innerPath | 62599042e76e3b2f99fd9cd7 |
class ProtocolError(object): <NEW_LINE> <INDENT> pass | Invalid protocol exception raised for AddOn Traffic. | 6259904223849d37ff852386 |
class TestUpdateUserHandler(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.__interactor = Mock(UpdateUserInteractor) <NEW_LINE> interactor_factory = Mock(InteractorFactory) <NEW_LINE> interactor_factory.create = Mock(return_value=self.__interactor) <NEW_LINE> self.__target = UpdateUserHandler(interactor_factory, None) <NEW_LINE> self.__target.session = Mock(Session) <NEW_LINE> params = {"id": "id"} <NEW_LINE> self.__user = User.from_dict(params) <NEW_LINE> self.__get_page = lambda: self.__target.get_page(params) <NEW_LINE> <DEDENT> def test_is_instance_of_authenticated_handler(self): <NEW_LINE> <INDENT> self.assertIsInstance(self.__target, AuthenticatedHandler) <NEW_LINE> <DEDENT> def test_executes_interactor(self): <NEW_LINE> <INDENT> self.__get_page() <NEW_LINE> self.__interactor.execute.assert_called_with(self.__user) | Unit tests for the UpdateUserHandler class | 62599042d164cc6175822244 |
class OggPacket(_vorbisenc.ogg_packet): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(OggPacket, self).__init__() | An ogg packet wrapper.
| 6259904291af0d3eaad3b0f1 |
class directoryServiceStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.GetFileServer = channel.unary_unary( '/directoryService/GetFileServer', request_serializer=service__pb2.SimpleRequest.SerializeToString, response_deserializer=service__pb2.SimpleReply.FromString, ) <NEW_LINE> self.GetSlaves = channel.unary_unary( '/directoryService/GetSlaves', request_serializer=service__pb2.SimpleRequest.SerializeToString, response_deserializer=service__pb2.SimpleReply.FromString, ) | 目录服务器
| 625990428c3a8732951f7827 |
class Error(Exception): <NEW_LINE> <INDENT> pass | Base Class for all exceptions | 6259904215baa7234946325f |
@skipIfFailed(TestDesign, TestDesign.test_functions_defined.__name__, tag=A1.display_guess_matrix.__name__) <NEW_LINE> class TestDisplayGuessMatrix(TestFunctionality): <NEW_LINE> <INDENT> def test_display_01(self): <NEW_LINE> <INDENT> with RedirectStdIO(stdinout=True) as stdio: <NEW_LINE> <INDENT> self.a1.display_guess_matrix(1, 6, ()) <NEW_LINE> <DEDENT> actual = stdio.stdinout <NEW_LINE> expected = self.load_test_data('display_01.out') <NEW_LINE> self.assertEqual(actual, expected) <NEW_LINE> <DEDENT> def test_display_02(self): <NEW_LINE> <INDENT> with RedirectStdIO(stdinout=True) as stdio: <NEW_LINE> <INDENT> self.a1.display_guess_matrix(2, 6, (0,)) <NEW_LINE> <DEDENT> actual = stdio.stdinout <NEW_LINE> expected = self.load_test_data('display_02.out') <NEW_LINE> self.assertEqual(actual, expected) <NEW_LINE> <DEDENT> def test_display_03(self): <NEW_LINE> <INDENT> with RedirectStdIO(stdinout=True) as stdio: <NEW_LINE> <INDENT> self.a1.display_guess_matrix(3, 6, (26, 14)) <NEW_LINE> <DEDENT> actual = stdio.stdinout <NEW_LINE> expected = self.load_test_data('display_03.out') <NEW_LINE> self.assertEqual(actual, expected) <NEW_LINE> <DEDENT> def test_display_04(self): <NEW_LINE> <INDENT> with RedirectStdIO(stdinout=True) as stdio: <NEW_LINE> <INDENT> self.a1.display_guess_matrix(9, 9, (26, 14, 0, 12, 14, 26, 0, 0)) <NEW_LINE> <DEDENT> actual = stdio.stdinout <NEW_LINE> expected = self.load_test_data('display_04.out') <NEW_LINE> self.assertEqual(actual, expected) | Tests display_guess_matrix | 625990423c8af77a43b688a3 |
class LoginPageLocators(object): <NEW_LINE> <INDENT> USERNAME = (By.ID, 'username') <NEW_LINE> PASSWORD = (By.ID, 'password') <NEW_LINE> SUBMIT = (By.NAME, 'op') | A class for login page locators. All login page locators should come here | 62599042e76e3b2f99fd9cd9 |
class CannotBindAction(APIException): <NEW_LINE> <INDENT> name = 'cannot-bind-action' | This exception is raised when an Action is attempted to be assigned to a
Target that does not exist. | 625990428a43f66fc4bf3460 |
class EventSendPort(EventPort, SendPort): <NEW_LINE> <INDENT> nineml_type = 'EventSendPort' | EventSendPort
An |EventSendPort| is a port that can transmit discrete events at
points in time. For example, an integrate-and-fire could 'send' events to
notify other components that it had fired. | 625990421f5feb6acb163ec2 |
class SingleColumnDataExampleWithoutSlices(Tracker): <NEW_LINE> <INDENT> tracks = ("track1", "track2", "track3") <NEW_LINE> def __call__(self, track, slice=None): <NEW_LINE> <INDENT> s = [random.randint(0, 20) for x in range(40)] <NEW_LINE> random.shuffle(s) <NEW_LINE> return odict((("data", s),)) | return a single column of data. | 625990428a349b6b43687517 |
class Driver(object): <NEW_LINE> <INDENT> instance = None <NEW_LINE> @classmethod <NEW_LINE> def get_instance(cls): <NEW_LINE> <INDENT> if cls.instance is None: <NEW_LINE> <INDENT> cls.instance = Driver() <NEW_LINE> <DEDENT> return cls.instance <NEW_LINE> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self.driver = webdriver.Firefox(executable_path=DRIVER_PATH) <NEW_LINE> <DEDENT> def get_driver(self): <NEW_LINE> <INDENT> return self.driver <NEW_LINE> <DEDENT> def stop_instance(self): <NEW_LINE> <INDENT> self.driver.quit() <NEW_LINE> instance = None <NEW_LINE> <DEDENT> def clear_cookies(self): <NEW_LINE> <INDENT> self.driver.delete_all_cookies() <NEW_LINE> <DEDENT> def navigate(self, url): <NEW_LINE> <INDENT> self.driver.get(url) | Singleton class for interacting with the selenium webdriver object | 6259904229b78933be26aa2a |
class Package(object): <NEW_LINE> <INDENT> def __init__(self, kind, name, version): <NEW_LINE> <INDENT> self.kind = kind <NEW_LINE> self.name = name <NEW_LINE> self.version = version <NEW_LINE> self.required_by = [] <NEW_LINE> self.morphology = None <NEW_LINE> self.dependencies = None <NEW_LINE> self.is_build_dep = False <NEW_LINE> self.version_in_use = version <NEW_LINE> <DEDENT> def __cmp__(self, other): <NEW_LINE> <INDENT> return cmp(self.name, other.name) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Package %s-%s>' % (self.name, self.version) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if len(self.required_by) > 0: <NEW_LINE> <INDENT> required_msg = ', '.join(self.required_by) <NEW_LINE> required_msg = ', required by: ' + required_msg <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> required_msg = '' <NEW_LINE> <DEDENT> return '%s-%s%s' % (self.name, self.version, required_msg) <NEW_LINE> <DEDENT> def add_required_by(self, item): <NEW_LINE> <INDENT> self.required_by.append('%s-%s' % (item.name, item.version)) <NEW_LINE> <DEDENT> def match(self, kind, name, version): <NEW_LINE> <INDENT> return (self.kind == kind and self.name == name and self.version == version) <NEW_LINE> <DEDENT> def set_morphology(self, morphology): <NEW_LINE> <INDENT> self.morphology = morphology <NEW_LINE> <DEDENT> def set_dependencies(self, dependencies): <NEW_LINE> <INDENT> self.dependencies = dependencies <NEW_LINE> <DEDENT> def set_is_build_dep(self, is_build_dep): <NEW_LINE> <INDENT> self.is_build_dep = is_build_dep <NEW_LINE> <DEDENT> def set_version_in_use(self, version_in_use): <NEW_LINE> <INDENT> self.version_in_use = version_in_use | A package in the processing queue.
In order to provide helpful errors, this item keeps track of what
packages depend on it, and hence of why it was added to the queue. | 6259904207d97122c4217f6e |
class TestReupholster(TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> delete_all_locations() <NEW_LINE> cls.domain = create_domain('locations-test') <NEW_LINE> cls.domain.locations_enabled = True <NEW_LINE> bootstrap_location_types(cls.domain.name) <NEW_LINE> cls.state = make_loc("Florida", type='state') <NEW_LINE> cls.district = make_loc("Duval", type='district', parent=cls.state) <NEW_LINE> cls.block = make_loc("Jacksonville", type='block', parent=cls.district) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> delete_all_locations() <NEW_LINE> <DEDENT> def test_replace_all_ids(self): <NEW_LINE> <INDENT> original_result = set([r['id'] for r in Location.get_db().view( 'locations/by_name', reduce=False, ).all()]) <NEW_LINE> new_result = set(SQLLocation.objects.location_ids()) <NEW_LINE> self.assertEqual(original_result, new_result) <NEW_LINE> <DEDENT> def test_all_for_domain_by_type(self): <NEW_LINE> <INDENT> original_result = [r['id'] for r in Location.get_db().view( 'locations/by_type', reduce=False, startkey=[self.domain.name], endkey=[self.domain.name, {}], ).all()] <NEW_LINE> new_result = SQLLocation.objects.filter(domain=self.domain.name).location_ids() <NEW_LINE> self.assertEqual(set(original_result), set(new_result)) <NEW_LINE> <DEDENT> def _blocks_by_type(self, loc_id, reduce=False): <NEW_LINE> <INDENT> return Location.get_db().view('locations/by_type', reduce=reduce, startkey=[self.domain.name, 'block', loc_id], endkey=[self.domain.name, 'block', loc_id, {}], ) <NEW_LINE> <DEDENT> def test_count_by_type(self): <NEW_LINE> <INDENT> from custom.intrahealth.report_calcs import _locations_per_type <NEW_LINE> original_result = (self._blocks_by_type(self.state._id, reduce=True) .one()['value']) <NEW_LINE> new_result = _locations_per_type(self.domain.name, 'block', self.state) <NEW_LINE> self.assertEqual(original_result, new_result) <NEW_LINE> <DEDENT> def test_filter_by_type(self): <NEW_LINE> <INDENT> original_result = [r['id'] for r in self._blocks_by_type(self.state._id)] <NEW_LINE> new_result = (self.state.sql_location .get_descendants(include_self=True) .filter(domain=self.domain.name, location_type__name='block') .location_ids()) <NEW_LINE> self.assertEqual(original_result, list(new_result)) <NEW_LINE> <DEDENT> def test_filter_by_type_no_root(self): <NEW_LINE> <INDENT> original_result = [r['id'] for r in self._blocks_by_type(None)] <NEW_LINE> new_result = (SQLLocation.objects .filter(domain=self.domain.name, location_type__name='block') .location_ids()) <NEW_LINE> self.assertEqual(original_result, list(new_result)) | These tests were written to drive removal of sepecific queries. It
is safe to delete this when the reuholstering of Location is done
and somone has written test coverage for the methods used in here. | 62599042b830903b9686ede1 |
class ManagerNotificationWrapper(object): <NEW_LINE> <INDENT> def __init__(self, operation, resource_type, host=None): <NEW_LINE> <INDENT> self.operation = operation <NEW_LINE> self.resource_type = resource_type <NEW_LINE> RESOURCE_TYPES.add(resource_type) <NEW_LINE> self.host = host <NEW_LINE> <DEDENT> def __call__(self, f): <NEW_LINE> <INDENT> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = f(*args, **kwargs) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _send_notification( self.operation, self.resource_type, args[1], self.host) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> return wrapper | Send event notifications for ``Manager`` methods.
Sends a notification if the wrapped Manager method does not raise an
``Exception`` (such as ``keystone.exception.NotFound``).
:param resource_type: type of resource being affected
:param host: host of the resource (optional) | 62599042baa26c4b54d50578 |
class ParametricBuffer(BalancedExemplarsBuffer): <NEW_LINE> <INDENT> def __init__( self, max_size: int, groupby=None, selection_strategy: Optional["ExemplarsSelectionStrategy"] = None, ): <NEW_LINE> <INDENT> super().__init__(max_size) <NEW_LINE> assert groupby in {None, "task", "class", "experience"}, ( "Unknown grouping scheme. Must be one of {None, 'task', " "'class', 'experience'}" ) <NEW_LINE> self.groupby = groupby <NEW_LINE> ss = selection_strategy or RandomExemplarsSelectionStrategy() <NEW_LINE> self.selection_strategy = ss <NEW_LINE> self.seen_groups = set() <NEW_LINE> self._curr_strategy = None <NEW_LINE> <DEDENT> def update(self, strategy: "SupervisedTemplate", **kwargs): <NEW_LINE> <INDENT> new_data = strategy.experience.dataset <NEW_LINE> new_groups = self._make_groups(strategy, new_data) <NEW_LINE> self.seen_groups.update(new_groups.keys()) <NEW_LINE> lens = self.get_group_lengths(len(self.seen_groups)) <NEW_LINE> group_to_len = {} <NEW_LINE> for group_id, ll in zip(self.seen_groups, lens): <NEW_LINE> <INDENT> group_to_len[group_id] = ll <NEW_LINE> <DEDENT> for group_id, new_data_g in new_groups.items(): <NEW_LINE> <INDENT> ll = group_to_len[group_id] <NEW_LINE> if group_id in self.buffer_groups: <NEW_LINE> <INDENT> old_buffer_g = self.buffer_groups[group_id] <NEW_LINE> old_buffer_g.update_from_dataset(strategy, new_data_g) <NEW_LINE> old_buffer_g.resize(strategy, ll) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_buffer = _ParametricSingleBuffer( ll, self.selection_strategy ) <NEW_LINE> new_buffer.update_from_dataset(strategy, new_data_g) <NEW_LINE> self.buffer_groups[group_id] = new_buffer <NEW_LINE> <DEDENT> <DEDENT> for group_id, class_buf in self.buffer_groups.items(): <NEW_LINE> <INDENT> self.buffer_groups[group_id].resize( strategy, group_to_len[group_id] ) <NEW_LINE> <DEDENT> <DEDENT> def _make_groups(self, strategy, data): <NEW_LINE> <INDENT> if self.groupby is None: <NEW_LINE> <INDENT> return {0: data} <NEW_LINE> <DEDENT> elif self.groupby == "task": <NEW_LINE> <INDENT> return self._split_by_task(data) <NEW_LINE> <DEDENT> elif self.groupby == "experience": <NEW_LINE> <INDENT> return self._split_by_experience(strategy, data) <NEW_LINE> <DEDENT> elif self.groupby == "class": <NEW_LINE> <INDENT> return self._split_by_class(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert False, "Invalid groupby key. Should never get here." <NEW_LINE> <DEDENT> <DEDENT> def _split_by_class(self, data): <NEW_LINE> <INDENT> class_idxs = {} <NEW_LINE> for idx, target in enumerate(data.targets): <NEW_LINE> <INDENT> if target not in class_idxs: <NEW_LINE> <INDENT> class_idxs[target] = [] <NEW_LINE> <DEDENT> class_idxs[target].append(idx) <NEW_LINE> <DEDENT> new_groups = {} <NEW_LINE> for c, c_idxs in class_idxs.items(): <NEW_LINE> <INDENT> new_groups[c] = AvalancheSubset(data, indices=c_idxs) <NEW_LINE> <DEDENT> return new_groups <NEW_LINE> <DEDENT> def _split_by_experience(self, strategy, data): <NEW_LINE> <INDENT> exp_id = strategy.clock.train_exp_counter + 1 <NEW_LINE> return {exp_id: data} <NEW_LINE> <DEDENT> def _split_by_task(self, data): <NEW_LINE> <INDENT> new_groups = {} <NEW_LINE> for task_id in data.task_set: <NEW_LINE> <INDENT> new_groups[task_id] = data.task_set[task_id] <NEW_LINE> <DEDENT> return new_groups | Stores samples for replay using a custom selection strategy and
grouping. | 6259904216aa5153ce4017bc |
class PostOwnStatus(permissions.BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> if request.method in permissions.SAFE_METHODS: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return obj.user_profile.id == request.user.id | Allow users to update their own status. | 62599042d99f1b3c44d0696c |
class NewTaskSchema(colander.Schema): <NEW_LINE> <INDENT> name = colander.SchemaNode( colander.String(), title=u"Nom du document", description=u"Ce nom n'apparaît pas dans le document final", validator=colander.Length(max=255), default=deferred_default_name, missing="", ) <NEW_LINE> customer_id = customer_choice_node_factory( default=deferred_default_customer, query_func=_get_customers_options, ) <NEW_LINE> project_id = colander.SchemaNode( colander.Integer(), title=u"Projet dans lequel insérer le document", widget=deferred_project_widget, default=deferred_default_project ) <NEW_LINE> phase_id = colander.SchemaNode( colander.Integer(), title=u"Dossier dans lequel insérer le document", widget=deferred_phases_widget, default=deferred_default_phase, missing=colander.drop, ) <NEW_LINE> business_type_id = colander.SchemaNode( colander.Integer(), title=u"Type d'affaire", widget=deferred_business_type_widget, default=deferred_business_type_default, ) | schema used to initialize a new task | 6259904221a7993f00c67237 |
class Associator: <NEW_LINE> <INDENT> def __init__(self, profile_name, debug=False, vpc_id=None, vpc_region=None): <NEW_LINE> <INDENT> self.debug = debug <NEW_LINE> self.vpc_id = None <NEW_LINE> self.vpc_region = None <NEW_LINE> self.profile_name = profile_name <NEW_LINE> self.utility = Utils(debug=self.debug) <NEW_LINE> session = boto3.session.Session(profile_name=self.profile_name) <NEW_LINE> self.client = self.utility.get_client(session, 'route53') <NEW_LINE> if self.debug: <NEW_LINE> <INDENT> LOGGER.setLevel(logging.DEBUG) <NEW_LINE> <DEDENT> if vpc_id: <NEW_LINE> <INDENT> self.vpc_id = vpc_id <NEW_LINE> <DEDENT> if vpc_region: <NEW_LINE> <INDENT> self.vpc_region = vpc_region <NEW_LINE> <DEDENT> <DEDENT> def associate_zones_to_vpc(self, pretty=False): <NEW_LINE> <INDENT> zones = self.list_hosted_zones() <NEW_LINE> results = [] <NEW_LINE> for zone in zones: <NEW_LINE> <INDENT> LOGGER.debug("zone: %s %s %s", zone, str(zones[zone]), lineno()) <NEW_LINE> already_exists = False <NEW_LINE> for vpcs in zones[zone]: <NEW_LINE> <INDENT> if vpcs['VPCId'] == self.vpc_id: <NEW_LINE> <INDENT> already_exists = True <NEW_LINE> <DEDENT> <DEDENT> if not already_exists: <NEW_LINE> <INDENT> self.utility.associate_vpc_to_zone(self.client, zone, self.vpc_id, self.vpc_region) <NEW_LINE> results.append('Successfully associated ' 'zone: '+str(zone)+' with vpc: '+str(self.vpc_id)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> results.append('Not creating - an association ' 'already exists between ' 'zone: '+str(zone)+' and vpc: '+str(self.vpc_id)) <NEW_LINE> <DEDENT> <DEDENT> if pretty: <NEW_LINE> <INDENT> return self.utility.pretty(results) <NEW_LINE> <DEDENT> return results <NEW_LINE> <DEDENT> def list_hosted_zones(self, pretty=False): <NEW_LINE> <INDENT> results = self.utility.get_hosted_zones(self.client) <NEW_LINE> zones = {} <NEW_LINE> for item in results: <NEW_LINE> <INDENT> LOGGER.debug("id: %s %s", str(item['Id']), lineno()) <NEW_LINE> results = self.utility.get_zone_vpc_associations(self.client, item['Id']) <NEW_LINE> LOGGER.debug("zone results: %s %s",str(results), lineno()) <NEW_LINE> zones[item['Id']] = results <NEW_LINE> <DEDENT> if pretty: <NEW_LINE> <INDENT> return self.utility.pretty(zones) <NEW_LINE> <DEDENT> return zones | Creates an Associator object | 6259904291af0d3eaad3b0f3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.