code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class RestClientError(Exception): <NEW_LINE> <INDENT> def __init__(self, status, name="ERR_INTERNAL", message=None): <NEW_LINE> <INDENT> super(RestClientError, self).__init__(message) <NEW_LINE> self.code = status <NEW_LINE> self.name = name <NEW_LINE> self.msg = message <NEW_LINE> if status in http_client.responses: <NEW_LINE> <INDENT> self.msg = http_client.responses[status] <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%d %s %s" % (self.code, self.name, self.msg)
Exception for ZFS REST API client errors.
62599061be8e80087fbc0752
class AlexaRefreshToken(HttpRunner): <NEW_LINE> <INDENT> config = ( Config("Alexa 使用 refresh token 重新获取 access token") .variables(**{ "client_id": alexa_settings.client_id }) ) <NEW_LINE> teststeps = [ Step( RunRequest("Alexa 使用 refresh token 重新获取 access token") .with_variables(**{"api": "${get_api_from_orm_by_name(alexaRefreshToken)}"}) .post("${getattr($api, url)}") .with_data({ "client_id": "$client_id", "grant_type": "refresh_token", "refresh_token": "$refresh_token" }) .extract() .with_jmespath("body.access_token", "access_token") .validate() .assert_equal("status_code", 200) .assert_equal("body.token_type", "bearer", "响应中 token_type 字段应该为 bearer") ), ]
Refresh access token assigned by vesync with refresh token. Config Variables: - refresh_token (str): required - client_id (str): optional, default to 'alexa_settings.client_id' Export Variables: - access_token (str)
62599061f548e778e596cc55
class ProviderTopology(JujuTopology): <NEW_LINE> <INDENT> @property <NEW_LINE> def scrape_identifier(self): <NEW_LINE> <INDENT> return "juju_{}_prometheus_scrape".format( "_".join([self.model, self.model_uuid[:7], self.application, self.charm_name]) )
Class for initializing topology information for MetricsEndpointProvider.
625990617cff6e4e811b7112
class ClassifierBaseSGD(BaseSGD, ClassifierMixin): <NEW_LINE> <INDENT> def __init__(self, loss="hinge", penalty='l2', alpha=0.0001, rho=0.85, fit_intercept=True, n_iter=5, shuffle=False, verbose=0, n_jobs=1): <NEW_LINE> <INDENT> super(ClassifierBaseSGD, self).__init__(loss=loss, penalty=penalty, alpha=alpha, rho=rho, fit_intercept=fit_intercept, n_iter=n_iter, shuffle=shuffle, verbose=verbose) <NEW_LINE> self.n_jobs = int(n_jobs) <NEW_LINE> <DEDENT> def _get_loss_function(self): <NEW_LINE> <INDENT> loss_functions = { "hinge": Hinge(), "log": Log(), "modified_huber": ModifiedHuber(), } <NEW_LINE> try: <NEW_LINE> <INDENT> self.loss_function = loss_functions[self.loss] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise ValueError("The loss %s is not supported. " % self.loss) <NEW_LINE> <DEDENT> <DEDENT> def _get_class_weight(self, class_weight, classes, y): <NEW_LINE> <INDENT> if class_weight == {}: <NEW_LINE> <INDENT> weight = np.ones(classes.shape[0], dtype=np.float64, order='C') <NEW_LINE> <DEDENT> elif class_weight == 'auto': <NEW_LINE> <INDENT> weight = np.array([1.0 / np.sum(y==i) for i in classes], dtype=np.float64, order='C') <NEW_LINE> weight *= classes.shape[0] / np.sum(weight) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> weight = np.zeros(classes.shape[0], dtype=np.float64, order='C') <NEW_LINE> for i, c in enumerate(classes): <NEW_LINE> <INDENT> weight[i] = class_weight.get(i, 1.0) <NEW_LINE> <DEDENT> <DEDENT> return weight <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> scores = self.decision_function(X) <NEW_LINE> if self.classes.shape[0] == 2: <NEW_LINE> <INDENT> indices = np.array(scores > 0, dtype=np.int) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> indices = scores.argmax(axis=1) <NEW_LINE> <DEDENT> return self.classes[np.ravel(indices)] <NEW_LINE> <DEDENT> def predict_proba(self, X): <NEW_LINE> <INDENT> if (isinstance(self.loss_function, Log) and self.classes.shape[0] == 2): <NEW_LINE> <INDENT> return 1.0 / (1.0 + np.exp(-self.decision_function(X))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError("%s loss does not provide " "this functionality" % self.loss)
Base class for dense and sparse classification using SGD.
6259906155399d3f05627bec
class ProductionConfig(BaseConfig): <NEW_LINE> <INDENT> SECRET_KEY = 'my_precious' <NEW_LINE> DEBUG = False <NEW_LINE> SQLALCHEMY_DATABASE_URI = 'postgresql://username:password@host:port/database' <NEW_LINE> DEBUG_TB_ENABLED = False <NEW_LINE> config_path = os.path.join(basedir, 'instance', 'production.cfg') <NEW_LINE> if os.path.isfile(config_path): <NEW_LINE> <INDENT> config = configparser.ConfigParser() <NEW_LINE> with open(config_path) as configfile: <NEW_LINE> <INDENT> config.readfp(configfile) <NEW_LINE> <DEDENT> SECRET_KEY = config.get('keys', 'SECRET_KEY') <NEW_LINE> SECURITY_PASSWORD_SALT = config.get('keys', 'SECURITY_PASSWORD_SALT')
Production configuration.
625990613617ad0b5ee0781b
class LoadBalancer(resource.Resource): <NEW_LINE> <INDENT> PROPERTIES = ( POOL_ID, PROTOCOL_PORT, MEMBERS, ) = ( 'pool_id', 'protocol_port', 'members', ) <NEW_LINE> properties_schema = { POOL_ID: properties.Schema( properties.Schema.STRING, _('The ID of the load balancing pool.'), required=True, update_allowed=True ), PROTOCOL_PORT: properties.Schema( properties.Schema.INTEGER, _('Port number on which the servers are running on the members.'), required=True ), MEMBERS: properties.Schema( properties.Schema.LIST, _('The list of Nova server IDs load balanced.'), default=[], update_allowed=True ), } <NEW_LINE> def handle_create(self): <NEW_LINE> <INDENT> pool = self.properties[self.POOL_ID] <NEW_LINE> client = self.neutron() <NEW_LINE> nova_client = self.nova() <NEW_LINE> protocol_port = self.properties[self.PROTOCOL_PORT] <NEW_LINE> for member in self.properties.get(self.MEMBERS): <NEW_LINE> <INDENT> address = nova_utils.server_to_ipaddress(nova_client, member) <NEW_LINE> lb_member = client.create_member({ 'member': { 'pool_id': pool, 'address': address, 'protocol_port': protocol_port}})['member'] <NEW_LINE> self.data_set(member, lb_member['id']) <NEW_LINE> <DEDENT> <DEDENT> def handle_update(self, json_snippet, tmpl_diff, prop_diff): <NEW_LINE> <INDENT> if self.MEMBERS in prop_diff: <NEW_LINE> <INDENT> members = set(prop_diff[self.MEMBERS]) <NEW_LINE> rd_members = self.data() <NEW_LINE> old_members = set(rd_members.keys()) <NEW_LINE> client = self.neutron() <NEW_LINE> for member in old_members - members: <NEW_LINE> <INDENT> member_id = rd_members[member] <NEW_LINE> try: <NEW_LINE> <INDENT> client.delete_member(member_id) <NEW_LINE> <DEDENT> except NeutronClientException as ex: <NEW_LINE> <INDENT> if ex.status_code != 404: <NEW_LINE> <INDENT> raise ex <NEW_LINE> <DEDENT> <DEDENT> self.data_delete(member) <NEW_LINE> <DEDENT> pool = self.properties[self.POOL_ID] <NEW_LINE> nova_client = self.nova() <NEW_LINE> protocol_port = self.properties[self.PROTOCOL_PORT] <NEW_LINE> for member in members - old_members: <NEW_LINE> <INDENT> address = nova_utils.server_to_ipaddress(nova_client, member) <NEW_LINE> lb_member = client.create_member({ 'member': { 'pool_id': pool, 'address': address, 'protocol_port': protocol_port}})['member'] <NEW_LINE> self.data_set(member, lb_member['id']) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def handle_delete(self): <NEW_LINE> <INDENT> client = self.neutron() <NEW_LINE> for member in self.properties.get(self.MEMBERS): <NEW_LINE> <INDENT> member_id = self.data().get(member) <NEW_LINE> try: <NEW_LINE> <INDENT> client.delete_member(member_id) <NEW_LINE> <DEDENT> except NeutronClientException as ex: <NEW_LINE> <INDENT> if ex.status_code != 404: <NEW_LINE> <INDENT> raise ex <NEW_LINE> <DEDENT> <DEDENT> self.data_delete(member)
A resource to link a neutron pool with servers.
6259906145492302aabfdba7
class Plugin(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.pid = None <NEW_LINE> <DEDENT> def run(self, url, checks, output, auth): <NEW_LINE> <INDENT> return NotImplementedError("Method has not been implemented") <NEW_LINE> <DEDENT> def __exec_process__(self, cmd, good_ret=0): <NEW_LINE> <INDENT> logging.info("Attempting to exec {0}".format(cmd[0])) <NEW_LINE> proc = Popen(cmd, stdout=PIPE, stderr=PIPE) <NEW_LINE> self.pid = proc.pid <NEW_LINE> stdout, stderr = proc.communicate() <NEW_LINE> returncode = proc.returncode <NEW_LINE> if returncode != good_ret: <NEW_LINE> <INDENT> logging.error("Failed to execute {0} successfully".format(cmd[0])) <NEW_LINE> msg = "Proc returned {0} when command {1} was used. Message is {2}" <NEW_LINE> msg = msg.format(returncode, ' '.join(cmd), str(stderr, 'utf-8')) <NEW_LINE> raise ProcessException(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.info("Successfully executed {0}".format(cmd[0])) <NEW_LINE> return stdout.decode('utf-8') <NEW_LINE> <DEDENT> <DEDENT> def kill(self): <NEW_LINE> <INDENT> if self.pid is not None: <NEW_LINE> <INDENT> os.killpg(self.pid, signal.SIGTERM) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("PID is not set")
Represents the base methods a Plugin must implement.
6259906167a9b606de547608
@route(r"/") <NEW_LINE> class Index(RequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> path = os.path.join(self.settings['root_path'], 'README.md') <NEW_LINE> html = '' <NEW_LINE> if os.path.isfile(path): <NEW_LINE> <INDENT> md_file = open(path) <NEW_LINE> md = md_file.read() <NEW_LINE> md_file.close() <NEW_LINE> html = markdown(md) <NEW_LINE> <DEDENT> self.render('default/index.html', html=html)
喵星人 wiki
625990614f88993c371f1085
class JSONStorage(Storage): <NEW_LINE> <INDENT> def __init__(self, path, create_dirs=False, **kwargs): <NEW_LINE> <INDENT> super(JSONStorage, self).__init__() <NEW_LINE> touch(path, create_dirs=create_dirs) <NEW_LINE> self.kwargs = kwargs <NEW_LINE> self._handle = open(path, 'r+') <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._handle.close() <NEW_LINE> <DEDENT> def read(self): <NEW_LINE> <INDENT> self._handle.seek(0, os.SEEK_END) <NEW_LINE> size = self._handle.tell() <NEW_LINE> if not size: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._handle.seek(0) <NEW_LINE> return json.load(self._handle) <NEW_LINE> <DEDENT> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> self._handle.seek(0) <NEW_LINE> serialized = json.dumps(data, **self.kwargs) <NEW_LINE> self._handle.write(serialized) <NEW_LINE> self._handle.flush() <NEW_LINE> self._handle.truncate()
Store the data in a JSON file.
625990618a43f66fc4bf385c
class NetAppsSecureDyn(LEDMTree): <NEW_LINE> <INDENT> def __init__(self, data=ledm_templates.NET_APPS_SECURE_DYN): <NEW_LINE> <INDENT> super().__init__(data) <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self.get("State") <NEW_LINE> <DEDENT> @state.setter <NEW_LINE> def state(self, value): <NEW_LINE> <INDENT> expected_values = ["enabled", "disabled"] <NEW_LINE> if value not in expected_values: <NEW_LINE> <INDENT> raise ValueError("state incorrect settings\Expected: {}\nReceived: {}".format(expected_values, value)) <NEW_LINE> <DEDENT> self.set("State", value)
NetAppsDyn tree /DevMgmt/NetAppsSecureDyn.xml
6259906156b00c62f0fb3f98
class WizardGenerarContabilidad(models.TransientModel): <NEW_LINE> <INDENT> _name = 'saving.wizard.generate.accounting' <NEW_LINE> _description = 'Asistente Generar Contabilidad' <NEW_LINE> @api.multi <NEW_LINE> def generate_accounting(self): <NEW_LINE> <INDENT> context = dict(self._context or {}) <NEW_LINE> active_ids = context.get('active_ids', []) or [] <NEW_LINE> for record in self.env['saving.management.saving'].browse(active_ids): <NEW_LINE> <INDENT> if record.state not in ('aprobado','desembolso'): <NEW_LINE> <INDENT> raise UserError(_("Selected saving(s) cannot be confirmed.")) <NEW_LINE> <DEDENT> record.generar_contabilidad() <NEW_LINE> <DEDENT> return {'type': 'ir.actions.act_window_close'}
DCLS
6259906116aa5153ce401ba9
class ActionParameterType(models.Model): <NEW_LINE> <INDENT> name = models.CharField( max_length=64, verbose_name=_('Action parameter type'), ) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
Defines the type of parameter the ``ActionParameter`` model stores. e.g. "amount" for an ``Action`` called "payment sent".
625990618e7ae83300eea75a
class KB(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, stopper, q): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.q = q <NEW_LINE> self.stopper = stopper <NEW_LINE> self.listener = keyboard.Listener(on_press=self.on_press) <NEW_LINE> <DEDENT> def on_press(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if key == keyboard.Key.esc: <NEW_LINE> <INDENT> self.stopper.set() <NEW_LINE> self.listener.stop() <NEW_LINE> print("ESC pressed") <NEW_LINE> raise MyException(key) <NEW_LINE> <DEDENT> self.q.put(key.char) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> print('special key {0} pressed'.format( key)) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.listener.start()
Threaded Keyboard Event detection handling thread. Parameters: (1) stopper : threaded event for stopping threads (2) q : the queue to place the keyboard stuff too
62599061fff4ab517ebceef4
class TwitterTimelineExtractor(TwitterExtractor): <NEW_LINE> <INDENT> subcategory = "timeline" <NEW_LINE> pattern = (BASE_PATTERN + r"/(?!search)(?:([^/?#]+)/?(?:$|[?#])" r"|i(?:/user/|ntent/user\?user_id=)(\d+))") <NEW_LINE> test = ( ("https://twitter.com/supernaturepics", { "range": "1-40", "url": "c570ac1aae38ed1463be726cc46f31cac3d82a40", }), ("https://twitter.com/realDonaldTrump", { "exception": exception.NotFoundError, }), ("https://mobile.twitter.com/supernaturepics?p=i"), ("https://www.twitter.com/id:2976459548"), ("https://twitter.com/i/user/2976459548"), ("https://twitter.com/intent/user?user_id=2976459548"), ) <NEW_LINE> def __init__(self, match): <NEW_LINE> <INDENT> TwitterExtractor.__init__(self, match) <NEW_LINE> user_id = match.group(2) <NEW_LINE> if user_id: <NEW_LINE> <INDENT> self.user = "id:" + user_id <NEW_LINE> <DEDENT> <DEDENT> def tweets(self): <NEW_LINE> <INDENT> return self.api.user_tweets(self.user)
Extractor for Tweets from a user's timeline
625990617d43ff2487427f76
class QuotaRequestOneResourceSubmitResponse(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'message': {'readonly': True}, 'request_submit_time': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'message': {'key': 'properties.message', 'type': 'str'}, 'request_submit_time': {'key': 'properties.requestSubmitTime', 'type': 'iso-8601'}, 'properties': {'key': 'properties.properties.properties', 'type': 'QuotaProperties'}, } <NEW_LINE> def __init__( self, *, properties: Optional["QuotaProperties"] = None, **kwargs ): <NEW_LINE> <INDENT> super(QuotaRequestOneResourceSubmitResponse, self).__init__(**kwargs) <NEW_LINE> self.id = None <NEW_LINE> self.name = None <NEW_LINE> self.type = None <NEW_LINE> self.provisioning_state = None <NEW_LINE> self.message = None <NEW_LINE> self.request_submit_time = None <NEW_LINE> self.properties = properties
Response for the quota submission request. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: The quota request ID. :vartype id: str :ivar name: The name of the quota request. :vartype name: str :ivar type: Type of resource. "Microsoft.Capacity/ServiceLimits". :vartype type: str :ivar provisioning_state: The quota request status. Possible values include: "Accepted", "Invalid", "Succeeded", "Failed", "InProgress". :vartype provisioning_state: str or ~azure.mgmt.reservations.models.QuotaRequestState :ivar message: User friendly status message. :vartype message: str :ivar request_submit_time: The time when the quota request was submitted using format: yyyy-MM-ddTHH:mm:ssZ as specified by the ISO 8601 standard. :vartype request_submit_time: ~datetime.datetime :param properties: Quota properties for the resource. :type properties: ~azure.mgmt.reservations.models.QuotaProperties
62599061627d3e7fe0e08558
class FlyBehavior(): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> @abc.abstractmethod <NEW_LINE> def fly(self): <NEW_LINE> <INDENT> return
FlyBehavior :param:nothing
6259906144b2445a339b74c7
class BilinearResizeLayer(Layer): <NEW_LINE> <INDENT> def __init__(self, LayerParameter): <NEW_LINE> <INDENT> super(BilinearResizeLayer, self).__init__(LayerParameter) <NEW_LINE> param = LayerParameter.resize_param <NEW_LINE> dsize = [int(dim) for dim in param.shape.dim] if param.HasField('shape') else None <NEW_LINE> self._param = {'dsize': dsize, 'fx': float(param.fx), 'fy': float(param.fy), 'data_format': 'NCHW'} <NEW_LINE> <DEDENT> def Setup(self, bottom): <NEW_LINE> <INDENT> super(BilinearResizeLayer, self).Setup(bottom) <NEW_LINE> input = bottom[0] if isinstance(bottom, list) else bottom <NEW_LINE> if isinstance(bottom, list) and len(bottom) > 1: <NEW_LINE> <INDENT> dshape = ops.Shape(bottom[1]) <NEW_LINE> self._param['dsize'] = (dshape[2], dshape[3]) <NEW_LINE> <DEDENT> return ops.BilinearResize(input, **self._param)
The implementation of ``BilinearResizeLayer``. Parameters ---------- shape : caffe_pb2. BlobShape The output shape. Refer `ResizeParameter.shape`_. fx : float The scale factor of height. Refer `ResizeParameter.fx`_. fy : float The scale factor of width. Refer `ResizeParameter.fy`_.
62599061097d151d1a2c273d
class Game(): <NEW_LINE> <INDENT> def __init__(self, name, width, height): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.running = True <NEW_LINE> self.frame = 0 <NEW_LINE> pygame.display.init() <NEW_LINE> pygame.font.init() <NEW_LINE> pygame.display.set_caption(name) <NEW_LINE> self.screen = pygame.display.set_mode([width,height]) <NEW_LINE> self.balls=[] <NEW_LINE> self.balls.append(BouncingBall(100,50,30,1,2)) <NEW_LINE> <DEDENT> def runGame(self): <NEW_LINE> <INDENT> while self.running: <NEW_LINE> <INDENT> for event in pygame.event.get(): <NEW_LINE> <INDENT> if event.type == pygame.QUIT: <NEW_LINE> <INDENT> self.running = False <NEW_LINE> <DEDENT> <DEDENT> self.handle_input() <NEW_LINE> self.update_simulation() <NEW_LINE> self.paint() <NEW_LINE> <DEDENT> pygame.quit() <NEW_LINE> <DEDENT> def paint(self): <NEW_LINE> <INDENT> self.screen.fill( (0,0,0) ) <NEW_LINE> self.render_objects() <NEW_LINE> pygame.display.flip() <NEW_LINE> <DEDENT> def handle_input(self): <NEW_LINE> <INDENT> keys_pressed = pygame.key.get_pressed() <NEW_LINE> if keys_pressed[K_q]: <NEW_LINE> <INDENT> print("User initiated a QUIT") <NEW_LINE> self.running = False <NEW_LINE> <DEDENT> <DEDENT> def update_simulation(self): <NEW_LINE> <INDENT> self.frame += 1 <NEW_LINE> for ball in self.balls: <NEW_LINE> <INDENT> ball.update(self.width, self.height) <NEW_LINE> <DEDENT> <DEDENT> def render_objects(self): <NEW_LINE> <INDENT> for ball in self.balls: <NEW_LINE> <INDENT> ball.draw(self.screen)
Game is an abstract base class to manage basic game concepts
625990613cc13d1c6d466e10
class CmdPlayers(MuxCommand): <NEW_LINE> <INDENT> key = "@players" <NEW_LINE> aliases = ["@listplayers"] <NEW_LINE> locks = "cmd:perm(listplayers) or perm(Wizards)" <NEW_LINE> help_category = "System" <NEW_LINE> def func(self): <NEW_LINE> <INDENT> caller = self.caller <NEW_LINE> if self.args and self.args.isdigit(): <NEW_LINE> <INDENT> nlim = int(self.args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> nlim = 10 <NEW_LINE> <DEDENT> nplayers = PlayerDB.objects.count() <NEW_LINE> dbtotals = PlayerDB.objects.object_totals() <NEW_LINE> typetable = EvTable("{wtypeclass{n", "{wcount{n", "{w%%{n", border="cells", align="l") <NEW_LINE> for path, count in dbtotals.items(): <NEW_LINE> <INDENT> typetable.add_row(path, count, "%.2f" % ((float(count) / nplayers) * 100)) <NEW_LINE> <DEDENT> plyrs = PlayerDB.objects.all().order_by("db_date_created")[max(0, nplayers - nlim):] <NEW_LINE> latesttable = EvTable("{wcreated{n", "{wdbref{n", "{wname{n", "{wtypeclass{n", border="cells", align="l") <NEW_LINE> for ply in plyrs: <NEW_LINE> <INDENT> latesttable.add_row(utils.datetime_format(ply.date_created), ply.dbref, ply.key, ply.path) <NEW_LINE> <DEDENT> string = "\n{wPlayer typeclass distribution:{n\n%s" % typetable <NEW_LINE> string += "\n{wLast %s Players created:{n\n%s" % (min(nplayers, nlim), latesttable) <NEW_LINE> caller.msg(string)
list all registered players Usage: @players [nr] Lists statistics about the Players registered with the game. It will list the <nr> amount of latest registered players If not given, <nr> defaults to 10.
625990614e4d562566373ad5
class GuessEval(messages.Message): <NEW_LINE> <INDENT> guessEval = messages.StringField(1) <NEW_LINE> gameState = messages.StringField(2) <NEW_LINE> firstPlayerHistory = messages.StringField(3) <NEW_LINE> secondPlayerHistory = messages.StringField(4)
GuessEval -- evaluation form message
62599061a219f33f346c7ed5
class OneCardMonthlyTransactionsList(GenericAPIView): <NEW_LINE> <INDENT> allowed_methods = ('GET',) <NEW_LINE> authentication_classes = (OneCardTokenAuthentication,) <NEW_LINE> serializer_class = OneCardMonthlyTransactionsSerializer <NEW_LINE> def get(self, request, username, year, month, format=None): <NEW_LINE> <INDENT> if self.ensure_valid_number(year, month): <NEW_LINE> <INDENT> data = core.OneCardTransactions(username).get_monthly(year, month) <NEW_LINE> serializer = BaseOneCardSerializer(data) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.get_response_with_params_errors() <NEW_LINE> <DEDENT> <DEDENT> def ensure_valid_number(self, year, month): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if int(year) >= 2015: <NEW_LINE> <INDENT> if 1 <= int(month) <= 12: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def get_response_with_params_errors(self): <NEW_LINE> <INDENT> return Response({ 'code': CODE_PARAMS_ERROR, 'message': MESSAGE_PARAMS_ERROR, 'result': None, }, status=status.HTTP_400_BAD_REQUEST)
Get one-month of transactions of OneCard
6259906124f1403a92686435
class BzrBranchFormat4(BranchFormat): <NEW_LINE> <INDENT> def initialize(self, a_controldir, name=None, repository=None, append_revisions_only=None): <NEW_LINE> <INDENT> if append_revisions_only: <NEW_LINE> <INDENT> raise errors.UpgradeRequired(a_controldir.user_url) <NEW_LINE> <DEDENT> if repository is not None: <NEW_LINE> <INDENT> raise NotImplementedError( "initialize(repository=<not None>) on %r" % (self,)) <NEW_LINE> <DEDENT> if not [isinstance(a_controldir._format, format) for format in self._compatible_bzrdirs]: <NEW_LINE> <INDENT> raise errors.IncompatibleFormat(self, a_controldir._format) <NEW_LINE> <DEDENT> utf8_files = [('revision-history', b''), ('branch-name', b''), ] <NEW_LINE> mutter('creating branch %r in %s', self, a_controldir.user_url) <NEW_LINE> branch_transport = a_controldir.get_branch_transport(self, name=name) <NEW_LINE> control_files = lockable_files.LockableFiles(branch_transport, 'branch-lock', lockable_files.TransportLock) <NEW_LINE> control_files.create_lock() <NEW_LINE> try: <NEW_LINE> <INDENT> control_files.lock_write() <NEW_LINE> <DEDENT> except errors.LockContention: <NEW_LINE> <INDENT> lock_taken = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lock_taken = True <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> for (filename, content) in utf8_files: <NEW_LINE> <INDENT> branch_transport.put_bytes( filename, content, mode=a_controldir._get_file_mode()) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> if lock_taken: <NEW_LINE> <INDENT> control_files.unlock() <NEW_LINE> <DEDENT> <DEDENT> branch = self.open(a_controldir, name, _found=True, found_repository=None) <NEW_LINE> self._run_post_branch_init_hooks(a_controldir, name, branch) <NEW_LINE> return branch <NEW_LINE> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> super(BzrBranchFormat4, self).__init__() <NEW_LINE> from .bzrdir import ( BzrDirFormat4, BzrDirFormat5, BzrDirFormat6, ) <NEW_LINE> self._matchingcontroldir = BzrDirFormat6() <NEW_LINE> self._compatible_bzrdirs = [BzrDirFormat4, BzrDirFormat5, BzrDirFormat6] <NEW_LINE> <DEDENT> def network_name(self): <NEW_LINE> <INDENT> return self._matchingcontroldir.get_format_string() <NEW_LINE> <DEDENT> def get_format_description(self): <NEW_LINE> <INDENT> return "Branch format 4" <NEW_LINE> <DEDENT> def open(self, a_controldir, name=None, _found=False, ignore_fallbacks=False, found_repository=None, possible_transports=None): <NEW_LINE> <INDENT> if name is None: <NEW_LINE> <INDENT> name = a_controldir._get_selected_branch() <NEW_LINE> <DEDENT> if name != "": <NEW_LINE> <INDENT> raise errors.NoColocatedBranchSupport(self) <NEW_LINE> <DEDENT> if not _found: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> if found_repository is None: <NEW_LINE> <INDENT> found_repository = a_controldir.open_repository() <NEW_LINE> <DEDENT> return BzrBranch4(_format=self, _control_files=a_controldir._control_files, a_controldir=a_controldir, name=name, _repository=found_repository, possible_transports=possible_transports) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Bazaar-NG branch format 4" <NEW_LINE> <DEDENT> def supports_leaving_lock(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> supports_reference_locations = False
Bzr branch format 4. This format has: - a revision-history file. - a branch-lock lock file [ to be shared with the bzrdir ] It does not support binding.
625990617d847024c075daa3
class ReportDesign(Element): <NEW_LINE> <INDENT> typeof = "report_design" <NEW_LINE> def generate( self, start_time=0, end_time=0, senders=None, wait_for_finish=False, timeout=5, **kw ): <NEW_LINE> <INDENT> if start_time and end_time: <NEW_LINE> <INDENT> kw.setdefault("params", {}).update({"start_time": start_time, "end_time": end_time}) <NEW_LINE> <DEDENT> if senders: <NEW_LINE> <INDENT> kw.setdefault("json", {}).update({"senders": element_resolver(senders)}) <NEW_LINE> <DEDENT> return Task.execute( self, "generate", timeout=timeout, wait_for_finish=wait_for_finish, **kw ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def report_files(self): <NEW_LINE> <INDENT> return [Report(**report) for report in self.make_request(resource="report_files")]
A ReportDesign defines a report available in the SMC. This class provides access to generating these reports and exporting into a format supported by the SMC. Example of generating a report, and providing a callback once the report is complete which exports the report:: >>> def export_my_report(task): ... if task.resource: ... report = task.resource[0] ... print("My report reference: %s" % report) ... report.export_pdf('/Users/foo/myfile.pdf') ... >>> >>> report = ReportDesign('Application and Web Security') >>> poller = report.generate(wait_for_finish=True) >>> poller.add_done_callback(export_my_report) >>> while not poller.done(): ... poller.wait(3) ... My report reference: Report(name=Application and Web Security #1515375369483)
6259906145492302aabfdba9
class CobraAdminUser(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'user' <NEW_LINE> id = db.Column(INTEGER(unsigned=True), primary_key=True, autoincrement=True, nullable=None) <NEW_LINE> username = db.Column(db.String(64), nullable=True, default=None, unique=True) <NEW_LINE> password = db.Column(db.String(256), nullable=True, default=None) <NEW_LINE> role = db.Column(TINYINT(2), nullable=True, default=None) <NEW_LINE> last_login_time = db.Column(db.DateTime, nullable=True, default=None) <NEW_LINE> last_login_ip = db.Column(db.String(16), nullable=True, default=None) <NEW_LINE> created_at = db.Column(db.DateTime, nullable=True, default=None) <NEW_LINE> updated_at = db.Column(db.DateTime, nullable=True, default=None) <NEW_LINE> def __init__(self, username, password, role, last_login_time, last_login_ip, created_at, updated_at): <NEW_LINE> <INDENT> self.username = username <NEW_LINE> self.generate_password(password) <NEW_LINE> self.role = role <NEW_LINE> self.last_login_time = last_login_time <NEW_LINE> self.last_login_ip = last_login_ip <NEW_LINE> self.created_at = created_at <NEW_LINE> self.updated_at = updated_at <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<CobraAdminUser %r-%r>" % (self.username, self.role) <NEW_LINE> <DEDENT> def verify_password(self, password): <NEW_LINE> <INDENT> return check_password_hash(self.password, password) <NEW_LINE> <DEDENT> def generate_password(self, password): <NEW_LINE> <INDENT> self.password = generate_password_hash(password)
:role: 1-super admin, 2-admin, 3-rule admin
6259906191f36d47f22319f6
class InputHandler(object): <NEW_LINE> <INDENT> _commands_callbacks = {} <NEW_LINE> @classmethod <NEW_LINE> def register_command(cls,command,callback): <NEW_LINE> <INDENT> if command not in cls._commands_callbacks.keys(): <NEW_LINE> <INDENT> cls._commands_callbacks[command] = [] <NEW_LINE> <DEDENT> cls._commands_callbacks[command].append(callback) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def init(cls): <NEW_LINE> <INDENT> ThreadHandler.create_thread(InputHandler._listen) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _listen(cls): <NEW_LINE> <INDENT> command = input() <NEW_LINE> return cls.handle_input(command) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def handle_input(cls,command): <NEW_LINE> <INDENT> if command.strip() == "": <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if cls.parse_command(command) is False: <NEW_LINE> <INDENT> out('Unknown command:',command) <NEW_LINE> cmdlist = ', '.join(cls._commands_callbacks.keys()) <NEW_LINE> out('Try using one of these:\n\t', cmdlist) <NEW_LINE> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def parse_command(cls,text): <NEW_LINE> <INDENT> for command in cls._commands_callbacks.keys(): <NEW_LINE> <INDENT> length = len(command) <NEW_LINE> if text[0:length].lower() == command: <NEW_LINE> <INDENT> cls._run_all_cmds(command,text) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _run_all_cmds(cls,command,text): <NEW_LINE> <INDENT> for callback in cls._commands_callbacks[command]: <NEW_LINE> <INDENT> callback(text)
_commands_callbacks is where register_command adds the callback for a certain command. The structure is a dictionary with key=command and value is a list to support multiple callbacks per command.
625990612ae34c7f260ac7b5
class LoadBalancerHealthCheck: <NEW_LINE> <INDENT> def __init__(self, target='ICMP', interval=5, unhealthy_threshold=1): <NEW_LINE> <INDENT> self.target = target <NEW_LINE> self.interval = interval <NEW_LINE> self.unhealthy_threshold = unhealthy_threshold <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if other is None or type(self) != type(other): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def parse_describe(self, res): <NEW_LINE> <INDENT> health_check = res['xml_body'].find( './/{{{nc}}}HealthCheck'.format(**res['xml_namespace'])) <NEW_LINE> if health_check is not None: <NEW_LINE> <INDENT> self.target = health_check.find( './/{{{nc}}}Target'.format(**res['xml_namespace'])).text <NEW_LINE> self.interval = int(health_check.find( './/{{{nc}}}Interval'.format(**res['xml_namespace'])).text) <NEW_LINE> self.unhealthy_threshold = int(health_check.find( './/{{{nc}}}UnhealthyThreshold'.format(**res['xml_namespace'])).text)
Model of NIFCLOUD LoadBalancer HealthCheck
62599061379a373c97d9a6f3
class Xsd11AnyAttribute(XsdAnyAttribute): <NEW_LINE> <INDENT> def _parse(self) -> None: <NEW_LINE> <INDENT> super(Xsd11AnyAttribute, self)._parse() <NEW_LINE> self._parse_not_constraints() <NEW_LINE> <DEDENT> def is_matching(self, name: Optional[str], default_namespace: Optional[str] = None, **kwargs: Any) -> bool: <NEW_LINE> <INDENT> if name is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif not name or name[0] == '{': <NEW_LINE> <INDENT> namespace = get_namespace(name) <NEW_LINE> <DEDENT> elif not default_namespace: <NEW_LINE> <INDENT> namespace = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name = '{%s}%s' % (default_namespace, name) <NEW_LINE> namespace = default_namespace <NEW_LINE> <DEDENT> if '##defined' in self.not_qname and name in self.maps.attributes: <NEW_LINE> <INDENT> xsd_attribute = self.maps.attributes[name] <NEW_LINE> if isinstance(xsd_attribute, tuple): <NEW_LINE> <INDENT> if xsd_attribute[1] is self.schema: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> elif xsd_attribute.schema is self.schema: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return name not in self.not_qname and self.is_namespace_allowed(namespace)
Class for XSD 1.1 *anyAttribute* declarations. .. <anyAttribute id = ID namespace = ((##any | ##other) | List of (anyURI | (##targetNamespace | ##local)) ) notNamespace = List of (anyURI | (##targetNamespace | ##local)) notQName = List of (QName | ##defined) processContents = (lax | skip | strict) : strict {any attributes with non-schema namespace . . .}> Content: (annotation?) </anyAttribute>
62599061a17c0f6771d5d70c
class Brick(GRectangle): <NEW_LINE> <INDENT> def collideb(self, ball): <NEW_LINE> <INDENT> if self.contains(ball.left, ball.bottom): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.contains(ball.right, ball.bottom): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.contains(ball.left, ball.top): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.contains(ball.right, ball.top): <NEW_LINE> <INDENT> return True
An instance is a brick. This class contains a method to detect collision with the ball. You may wish to add more features to this class. The attributes of this class are those inherited from GRectangle. LIST MORE ATTRIBUTES (AND THEIR INVARIANTS) HERE IF NECESSARY
62599061627d3e7fe0e0855a
class CountSearch(ModelView): <NEW_LINE> <INDENT> __name__ = 'stock.inventory.count.search' <NEW_LINE> search = fields.Reference( "Search", [ ('product.product', "Product"), ], required=True, domain={ 'product.product': [ ('type', '=', 'goods'), ('consumable', '=', False), ], }, help="The item that's counted.") <NEW_LINE> @classmethod <NEW_LINE> def default_search(cls): <NEW_LINE> <INDENT> return 'product.product,-1'
Stock Inventory Count
62599061435de62698e9d4d7
class Plugin(plugin.AdminPlugin, plugin.OnReadyPlugin): <NEW_LINE> <INDENT> def __init__(self, *args, always_watch_messages=list(), **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, always_watch_messages=always_watch_messages, **kwargs) <NEW_LINE> self.always_watch_messages=always_watch_messages <NEW_LINE> <DEDENT> async def action(self): <NEW_LINE> <INDENT> to_del = list() <NEW_LINE> for ui in self.public_namespace.ui_messages: <NEW_LINE> <INDENT> ui_inst_dict = self.public_namespace.ui_messages[ui] <NEW_LINE> ui_json = self.public_namespace.ui_jsons[self.public_namespace.ui_messages[ui][UI_NAME]] <NEW_LINE> if ui_json[LIFESPAN]>=0 and (time.time()-ui_inst_dict[LAST_UPDATED])>ui_json[LIFESPAN]: <NEW_LINE> <INDENT> if ui_json[ONDELETE]: <NEW_LINE> <INDENT> result, is_success = ui_helper.do_eval(ui_json[ONDELETE], self.public_namespace.ui_messages[ui], ui_json) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> await self.bot.delete_message(ui_inst_dict[UI_INSTANCE].message) <NEW_LINE> <DEDENT> except discord.NotFound: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> msg_id, channel_id = ui.split(':') <NEW_LINE> for msg in self.always_watch_messages: <NEW_LINE> <INDENT> if msg.id==msg_id and msg.channel.id==channel_id: <NEW_LINE> <INDENT> self.always_watch_messages.remove(msg) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> to_del.append(ui) <NEW_LINE> <DEDENT> <DEDENT> for i in to_del: <NEW_LINE> <INDENT> del(self.public_namespace.ui_messages[i])
UIdea plugin to terminate UIs. The lifespan of your UI should be declared in the config .json for it. For more information about UIdea, see GitHub : <https://github.com/IdeaBot/UIdea> Your interaction with this will probably never be evident. If it is evident, I've probably done something wrong. **NOTE:** This is run periodically and independently, so it could delete something mid-action in extreme cases
625990614428ac0f6e659c02
class CodeEntry(object): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> self.code = None <NEW_LINE> with open(self.path, "r") as code_file: <NEW_LINE> <INDENT> self.code = code_file.read() <NEW_LINE> <DEDENT> self.url_pattern = r"//.*?(http://rosettacode.org/wiki/[^\s]+)" <NEW_LINE> self.url_regexp = re.compile(self.url_pattern) <NEW_LINE> <DEDENT> def extract_url(self): <NEW_LINE> <INDENT> for line in self.code.splitlines(): <NEW_LINE> <INDENT> matches = self._extract_url_string(line) <NEW_LINE> if matches: <NEW_LINE> <INDENT> return matches.group(1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _extract_url_string(self, string): <NEW_LINE> <INDENT> return self.url_regexp.search(string) <NEW_LINE> <DEDENT> def generate_json_doc(self): <NEW_LINE> <INDENT> json_str = "" <NEW_LINE> with tempfile.NamedTemporaryFile() as temp: <NEW_LINE> <INDENT> subprocess.call(["rustdoc", "-o", temp.name, "--output-format", "json", self.path]) <NEW_LINE> json_str = temp.read().decode("utf-8") <NEW_LINE> <DEDENT> return json_str <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _extract_docs(json_doc): <NEW_LINE> <INDENT> json_str = json.loads(json_doc) <NEW_LINE> jsonpath_expr = jsonpath.parse("$..module.attrs") <NEW_LINE> matches = [match.value for match in jsonpath_expr.find(json_str)] <NEW_LINE> if matches: <NEW_LINE> <INDENT> matches = matches[0] <NEW_LINE> module_docs = [] <NEW_LINE> for match in matches: <NEW_LINE> <INDENT> if 'fields' in match and match['fields'][0] == 'doc': <NEW_LINE> <INDENT> module_docs.append(match['fields'][1]) <NEW_LINE> <DEDENT> <DEDENT> return module_docs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> def make_wiki_markup(self): <NEW_LINE> <INDENT> json_doc = self.generate_json_doc() <NEW_LINE> raw_docs = CodeEntry._extract_docs(json_doc) <NEW_LINE> docs = "\n".join(raw_docs) <NEW_LINE> jinja_env = jinja2.Environment( loader=jinja2.PackageLoader('rosettarobot', 'templates'), block_start_string='<%', block_end_string='%>', variable_start_string='%%', variable_end_string='%%', comment_start_string='<#', comment_end_string='#>', ) <NEW_LINE> template = jinja_env.get_template("mediawiki_template.jinja2") <NEW_LINE> return template.render(header=None, documentation=docs, code=self.code, output=None) <NEW_LINE> <DEDENT> def make_github_markup(self): <NEW_LINE> <INDENT> json_doc = self.generate_json_doc() <NEW_LINE> raw_docs = CodeEntry._extract_docs(json_doc) <NEW_LINE> docs = "\n".join(raw_docs) <NEW_LINE> jinja_env = jinja2.Environment( loader=jinja2.PackageLoader('rosettarobot', 'templates'), ) <NEW_LINE> template = jinja_env.get_template("github_template.jinja2") <NEW_LINE> return template.render(header=None, documentation=docs, code=self.code, output=None)
Code information.
62599061adb09d7d5dc0bc3a
class StyleReader: <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> self._filename = filename <NEW_LINE> if not self._filename.endswith('.sld'): <NEW_LINE> <INDENT> self._filename += '.sld' <NEW_LINE> <DEDENT> if not os.path.isabs(self._filename): <NEW_LINE> <INDENT> self._filename = os.path.join( os.path.dirname(__file__), self._filename ) <NEW_LINE> <DEDENT> self._read_sld_file() <NEW_LINE> <DEDENT> def _read_sld_file(self): <NEW_LINE> <INDENT> with open(self._filename, encoding='utf-8') as fd: <NEW_LINE> <INDENT> root = ElementTree.fromstring(fd.read()) <NEW_LINE> if not root.tag.endswith('StyledLayerDescriptor'): <NEW_LINE> <INDENT> raise StyleReaderError( "File {} is not a valid SLD".format(self._filename) ) <NEW_LINE> <DEDENT> ns_prefix = root.tag[:root.tag.index('}')+1] <NEW_LINE> ns_dict = {'sld': ns_prefix[1:-1]} <NEW_LINE> rs_node = root.find('.//sld:RasterSymbolizer', ns_dict) <NEW_LINE> if rs_node is None: <NEW_LINE> <INDENT> raise StyleReaderError( "File {} is not a valid SLD: no RasterSymbolizer defined".format( self._filename )) <NEW_LINE> <DEDENT> cl_node = rs_node.find('sld:ColorMap', ns_dict) <NEW_LINE> if cl_node is None: <NEW_LINE> <INDENT> raise StyleReaderError( "File {} is not a valid SLD: no ColorMap defined".format( self._filename )) <NEW_LINE> <DEDENT> cl_entries = cl_node.findall('sld:ColorMapEntry', ns_dict) <NEW_LINE> if not cl_entries: <NEW_LINE> <INDENT> raise StyleReaderError( "No color entries defined in file {}".format( self._filename )) <NEW_LINE> <DEDENT> self._vc = OrderedDict() <NEW_LINE> for ce in cl_entries: <NEW_LINE> <INDENT> v = int(ce.attrib.get('quantity')) <NEW_LINE> h = ce.attrib.get('color').lstrip('#') <NEW_LINE> rgb = tuple(int(h[i:i+2], 16) for i in (0, 2, 4)) <NEW_LINE> self._vc[v] = rgb <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_values(self): <NEW_LINE> <INDENT> return self._vc.keys() <NEW_LINE> <DEDENT> def get_rgb_color(self, value): <NEW_LINE> <INDENT> return self._vc[value] <NEW_LINE> <DEDENT> def set_band_colors(self, ds, ib=1): <NEW_LINE> <INDENT> from osgeo import gdal <NEW_LINE> colors = gdal.ColorTable() <NEW_LINE> for v in self.get_values(): <NEW_LINE> <INDENT> colors.SetColorEntry(v, self.get_rgb_color(v)) <NEW_LINE> <DEDENT> band = ds.GetRasterBand(ib) <NEW_LINE> band.SetRasterColorTable(colors) <NEW_LINE> band.SetRasterColorInterpretation(gdal.GCI_PaletteIndex) <NEW_LINE> band = None
Raster style reader :param str filename: input SLD file
62599061a8370b77170f1a9e
class End(six.with_metaclass(abc.ABCMeta)): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def operation_stats(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def add_idle_action(self, action): <NEW_LINE> <INDENT> raise NotImplementedError()
Common type for entry-point objects on both sides of an operation.
62599061baa26c4b54d50973
class Primitive_guard (Primitive): <NEW_LINE> <INDENT> def __init__ (self, G, name, attributes): <NEW_LINE> <INDENT> interfaces = { 'inputs': ['data', 'cond'], 'outputs': ['data'] } <NEW_LINE> super ().setup (name, G, attributes, interfaces) <NEW_LINE> self.append_rule (Intg (self.input.cond)) <NEW_LINE> self.append_rule (Implies (Intg(self.output.data), Intg(self.input.data))) <NEW_LINE> self.append_rule (Implies (Conf(self.input.data), Conf(self.output.data)))
Guard primitive This primitive guards the control the data flow in a protocol. Input data is only transferred to the output interfaces if the condition on the input interfaces is true.
625990610a50d4780f706927
class CheckPayView(View): <NEW_LINE> <INDENT> def post(self,request): <NEW_LINE> <INDENT> user = request.user <NEW_LINE> if not user.is_authenticated(): <NEW_LINE> <INDENT> return JsonResponse({'res': 0, 'errmsg': '用户未登录'}) <NEW_LINE> <DEDENT> order_id = request.POST.get('order_id') <NEW_LINE> if not order_id: <NEW_LINE> <INDENT> return JsonResponse({'res': 1, 'errmsg': '无效的订单id'}) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> order = OrderInfo.objects.get(order_id=order_id, user=user, pay_method=3, order_status=1) <NEW_LINE> <DEDENT> except OrderInfo.DoesNotExist: <NEW_LINE> <INDENT> return JsonResponse({'res': 2, 'errmsg': '订单错误'}) <NEW_LINE> <DEDENT> alipay = AliPay( appid='2016092500594368', app_notify_url=None, app_private_key_path=os.path.join(settings.BASE_DIR, 'apps/order/app_private_key.pem'), alipay_public_key_path=os.path.join(settings.BASE_DIR, 'apps/order/alipay_public_key.pem'), sign_type="RSA2", debug=True ) <NEW_LINE> while True: <NEW_LINE> <INDENT> response = alipay.api_alipay_trade_query(order_id) <NEW_LINE> code = response.get('code') <NEW_LINE> if code == '10000' and response.get('trade_status') == 'TRADE_SUCCESS': <NEW_LINE> <INDENT> trade_no = response.get('trade_no') <NEW_LINE> order.trade_no = trade_no <NEW_LINE> order.order_status = 4 <NEW_LINE> order.save() <NEW_LINE> return JsonResponse({'res':3, 'message':'支付成功'}) <NEW_LINE> <DEDENT> elif code == '40004' or (code == '10000' and response.get('trade_status') == 'WAIT_BUYER_PAY'): <NEW_LINE> <INDENT> time.sleep(5) <NEW_LINE> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return JsonResponse({'res':4, 'errmsg':'支付失败'})
查看订单支付的结果
62599061a219f33f346c7ed7
class LegacyChargeSummary(ChargeSummary): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'kind': {'required': True}, 'billing_period_id': {'readonly': True}, 'usage_start': {'readonly': True}, 'usage_end': {'readonly': True}, 'azure_charges': {'readonly': True}, 'charges_billed_separately': {'readonly': True}, 'marketplace_charges': {'readonly': True}, 'currency': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'e_tag': {'key': 'eTag', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'billing_period_id': {'key': 'properties.billingPeriodId', 'type': 'str'}, 'usage_start': {'key': 'properties.usageStart', 'type': 'str'}, 'usage_end': {'key': 'properties.usageEnd', 'type': 'str'}, 'azure_charges': {'key': 'properties.azureCharges', 'type': 'float'}, 'charges_billed_separately': {'key': 'properties.chargesBilledSeparately', 'type': 'float'}, 'marketplace_charges': {'key': 'properties.marketplaceCharges', 'type': 'float'}, 'currency': {'key': 'properties.currency', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, e_tag: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(LegacyChargeSummary, self).__init__(e_tag=e_tag, **kwargs) <NEW_LINE> self.kind = 'legacy' <NEW_LINE> self.billing_period_id = None <NEW_LINE> self.usage_start = None <NEW_LINE> self.usage_end = None <NEW_LINE> self.azure_charges = None <NEW_LINE> self.charges_billed_separately = None <NEW_LINE> self.marketplace_charges = None <NEW_LINE> self.currency = None
Legacy charge summary. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: Resource Id. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :ivar e_tag: eTag of the resource. To handle concurrent update scenario, this field will be used to determine whether the user is updating the latest version or not. :vartype e_tag: str :ivar kind: Required. Specifies the kind of charge summary.Constant filled by server. Possible values include: "legacy", "modern". :vartype kind: str or ~azure.mgmt.consumption.models.ChargeSummaryKind :ivar billing_period_id: The id of the billing period resource that the charge belongs to. :vartype billing_period_id: str :ivar usage_start: Usage start date. :vartype usage_start: str :ivar usage_end: Usage end date. :vartype usage_end: str :ivar azure_charges: Azure Charges. :vartype azure_charges: float :ivar charges_billed_separately: Charges Billed separately. :vartype charges_billed_separately: float :ivar marketplace_charges: Marketplace Charges. :vartype marketplace_charges: float :ivar currency: Currency Code. :vartype currency: str
62599061f548e778e596cc59
class Schur(Basis): <NEW_LINE> <INDENT> def __init__(self, A): <NEW_LINE> <INDENT> SymSuperfunctionsAlgebra.Basis.__init__( self, A, prefix='s')
Class of the type I super Schur.
6259906129b78933be26ac2c
class WorkItemTests(PlacelessSetup, unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(WorkItemTests, self).setUp() <NEW_LINE> setUpZCML(self) <NEW_LINE> setUpIntIds(self) <NEW_LINE> setUpRelationCatalog(self) <NEW_LINE> self.root = rootFolder() <NEW_LINE> testing.setUpWorkLists(self.root) <NEW_LINE> testing.setUpIndices(self) <NEW_LINE> generateContent(self.root) <NEW_LINE> from quotationtool.workflow.interfaces import IWorkList <NEW_LINE> self.worklist = zope.component.getUtility(IWorkList, name='editor', context=self.root) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> tearDown(self) <NEW_LINE> <DEDENT> def test_Label(self): <NEW_LINE> <INDENT> startRemove(TestRequest().principal.id, self.root['foo2']) <NEW_LINE> item = self.worklist.pop() <NEW_LINE> view = zope.component.queryMultiAdapter((item, TestRequest()), name='label') <NEW_LINE> self.assertTrue(isinstance(view(), unicode))
Tests for components common to all/different workitems
62599061379a373c97d9a6f4
@register_relay_node <NEW_LINE> class CompileEngine(NodeBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> raise RuntimeError("Cannot construct a CompileEngine") <NEW_LINE> <DEDENT> def lower(self, source_func, target=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> key = _get_cache_key(source_func, target) <NEW_LINE> return _backend._CompileEngineLower(self, key) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> import traceback <NEW_LINE> msg = traceback.format_exc() <NEW_LINE> msg += "Error during compile func\n" <NEW_LINE> msg += "--------------------------\n" <NEW_LINE> msg += source_func.astext(show_meta_data=False) <NEW_LINE> msg += "--------------------------\n" <NEW_LINE> raise RuntimeError(msg) <NEW_LINE> <DEDENT> <DEDENT> def jit(self, source_func, target=None): <NEW_LINE> <INDENT> key = _get_cache_key(source_func, target) <NEW_LINE> return _backend._CompileEngineJIT(self, key) <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> _backend._CompileEngineClear(self) <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> res = _backend._CompileEngineListItems(self) <NEW_LINE> assert len(res) % 2 == 0 <NEW_LINE> return [(res[2*i], res[2*i+1]) for i in range(len(res) // 2)] <NEW_LINE> <DEDENT> def dump(self): <NEW_LINE> <INDENT> items = self.items() <NEW_LINE> res = "====================================\n" <NEW_LINE> res += "CompilerEngine dump, %d items cached\n" % len(items) <NEW_LINE> for k, v in items: <NEW_LINE> <INDENT> res += "------------------------------------\n" <NEW_LINE> res += "target={}\n".format(k.target) <NEW_LINE> res += "use_count={}\n".format(v.use_count) <NEW_LINE> res += "func_name={}\n".format(v.cached_func.func_name) <NEW_LINE> res += k.source_func.astext() + "\n" <NEW_LINE> <DEDENT> res += "===================================\n" <NEW_LINE> return res
CompileEngine to get lowered code.
625990617d847024c075daa5
class BaseAlembicCommand(distutils.core.Command): <NEW_LINE> <INDENT> user_options = [ ('config=', 'c', 'Configuration file (YAML or Python)'), ('debug', 'd', 'Print debug logs'), ] <NEW_LINE> def initialize_options(self): <NEW_LINE> <INDENT> self.config = None <NEW_LINE> self.debug = False <NEW_LINE> <DEDENT> def finalize_options(self): <NEW_LINE> <INDENT> if self.config is None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.config = os.environ['CLICHE_CONFIG'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise distutils.errors.DistutilsOptionError( 'The -c/--config option or CLICHE_CONFIG environment ' 'variable is required' ) <NEW_LINE> <DEDENT> <DEDENT> if not os.path.isfile(self.config): <NEW_LINE> <INDENT> raise distutils.errors.DistutilsOptionError( self.config + ' cannot be found' ) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> from cliche.cli import initialize_app <NEW_LINE> from cliche.orm import Base, get_alembic_config, import_all_modules <NEW_LINE> from cliche.web.app import app <NEW_LINE> from cliche.web.db import get_database_engine <NEW_LINE> import_all_modules() <NEW_LINE> <DEDENT> except ImportError as e: <NEW_LINE> <INDENT> raise ImportError('dependencies are not resolved yet; run ' '"pip install -e ." first\n' + str(e)) <NEW_LINE> <DEDENT> if self.debug: <NEW_LINE> <INDENT> print('These mapping classes are loaded into ORM registry:', file=sys.stderr) <NEW_LINE> for cls in Base._decl_class_registry.values(): <NEW_LINE> <INDENT> if isinstance(cls, type): <NEW_LINE> <INDENT> print('- {0.__module__}.{0.__name__}'.format(cls), file=sys.stderr) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> initialize_app(self.config) <NEW_LINE> with app.app_context(): <NEW_LINE> <INDENT> engine = get_database_engine() <NEW_LINE> <DEDENT> config = get_alembic_config(engine) <NEW_LINE> self.alembic_process(config) <NEW_LINE> <DEDENT> def alembic_process(self, config): <NEW_LINE> <INDENT> raise NotImplementedError('override alembic_process() method')
Base class for commands provided by Alembic.
625990617d847024c075daa6
class SetBWListRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(SetBWListRequest, self).__init__( '/regions/{regionId}/blackwhite:list', 'POST', header, version) <NEW_LINE> self.parameters = parameters
设置黑白名单
625990613617ad0b5ee0781f
class Solution: <NEW_LINE> <INDENT> def lastPosition(self, nums, target): <NEW_LINE> <INDENT> if nums is None or len(nums) == 0: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> start, end = 0, len(nums) - 1 <NEW_LINE> while start + 1 < end: <NEW_LINE> <INDENT> mid = (start + end) // 2 <NEW_LINE> if nums[mid] == target: <NEW_LINE> <INDENT> start = mid <NEW_LINE> <DEDENT> elif nums[mid] < target: <NEW_LINE> <INDENT> start = mid <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> end = mid <NEW_LINE> <DEDENT> <DEDENT> if nums[end] == target: <NEW_LINE> <INDENT> return end <NEW_LINE> <DEDENT> if nums[start] == target: <NEW_LINE> <INDENT> return start <NEW_LINE> <DEDENT> return -1
@param nums: An integer array sorted in ascending order @param target: An integer @return: An integer
625990614a966d76dd5f05c5
class SlackApiError(SlackBaseError): <NEW_LINE> <INDENT> def __init__(self, response: dict): <NEW_LINE> <INDENT> self.msg = 'Slack error - {}'.format(response.get('error')) <NEW_LINE> super(HandlerBaseError, self).__init__(self.msg)
Slack API error class
6259906191f36d47f22319f7
class RebuildTenantRelations(ZenPackMigration): <NEW_LINE> <INDENT> version = Version(2, 2, 0) <NEW_LINE> def migrate(self, pack): <NEW_LINE> <INDENT> results = ICatalogTool(pack.dmd.Devices).search(Tenant) <NEW_LINE> LOG.info("starting: %s total devices", results.total) <NEW_LINE> progress = ProgressLogger( LOG, prefix="progress", total=results.total, interval=PROGRESS_LOG_INTERVAL) <NEW_LINE> objects_migrated = 0 <NEW_LINE> for brain in results: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.updateRelations(brain.getObject()): <NEW_LINE> <INDENT> objects_migrated += 1 <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> LOG.exception( "error updating relationships for %s", brain.id) <NEW_LINE> <DEDENT> progress.increment() <NEW_LINE> <DEDENT> LOG.info( "finished: %s of %s devices required migration", objects_migrated, results.total) <NEW_LINE> <DEDENT> def updateRelations(self, device): <NEW_LINE> <INDENT> for relname in (x[0] for x in Tenant._relations): <NEW_LINE> <INDENT> rel = getattr(aq_base(device), relname, None) <NEW_LINE> if not rel or not isinstance(rel, RelationshipBase): <NEW_LINE> <INDENT> device.buildRelations() <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> return False
Rebuilds relations on all Tenant objects. This is necessary anytime new relations are added to Tenant. The code is generic enough to work simply by updating the version at which the migrate script should run. Update the version anytime you add a new relationship to Tenant. No other changes to this script are necessary.
625990614f88993c371f1087
class User(Base,UserMixin): <NEW_LINE> <INDENT> __tablename__ = 'user' <NEW_LINE> ROLE_USER = 10 <NEW_LINE> ROLE_COMPANY = 20 <NEW_LINE> ROLE_ADMIN = 30 <NEW_LINE> id = db.Column(db.Integer,primary_key=True) <NEW_LINE> username = db.Column(db.String(32),unique=True,index=True,nullable=False) <NEW_LINE> email = db.Column(db.String(64),unique=True,index=True,nullable=False) <NEW_LINE> coms = db.relationship('ComInfo') <NEW_LINE> _password = db.Column('password',db.String(256),nullable=False) <NEW_LINE> role = db.Column(db.SmallInteger,default=ROLE_USER) <NEW_LINE> realname = db.Column(db.String(32)) <NEW_LINE> resume = db.Column(db.String(128)) <NEW_LINE> phone = db.Column(db.String(12)) <NEW_LINE> exp = db.Column(db.String(24)) <NEW_LINE> status = db.Column(db.Boolean, default=True) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<User:{}>'.format(self.username) <NEW_LINE> <DEDENT> @property <NEW_LINE> def password(self): <NEW_LINE> <INDENT> return self._password <NEW_LINE> <DEDENT> @password.setter <NEW_LINE> def password(self,orig_password): <NEW_LINE> <INDENT> self._password = generate_password_hash(orig_password) <NEW_LINE> <DEDENT> def check_password(self, password): <NEW_LINE> <INDENT> return check_password_hash(self._password,password) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_admin(self): <NEW_LINE> <INDENT> return self.role == self.ROLE_ADMIN <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_company(self): <NEW_LINE> <INDENT> return self.role == self.ROLE_COMPANY <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_disable(self): <NEW_LINE> <INDENT> return not self.status
用户信息类
62599061379a373c97d9a6f5
class BlochOp(OpBase): <NEW_LINE> <INDENT> def __init__(self, theta, phi, name=None): <NEW_LINE> <INDENT> super(BlochOp, self).__init__() <NEW_LINE> self.theta = theta <NEW_LINE> self.phi = phi <NEW_LINE> if name is None: <NEW_LINE> <INDENT> self.name = 'Bloch Op (theta=%s, phi=%s)' % (theta, phi) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __list__(self): <NEW_LINE> <INDENT> t = self.theta <NEW_LINE> p = self.phi <NEW_LINE> return [[np.cos(2 * t), np.sin(2 * t) * np.exp(-1.j * p)], [np.sin(2 * t) * np.exp(1.j * p), -np.cos(2 * t)]] <NEW_LINE> <DEDENT> def __eigen__(self, i): <NEW_LINE> <INDENT> t = self.theta <NEW_LINE> p = self.phi <NEW_LINE> if i is 0: <NEW_LINE> <INDENT> return [np.cos(t), np.sin(t) * np.exp(1.j * p)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [- np.sin(t) * np.exp(-1.j * p), np.cos(t)] <NEW_LINE> <DEDENT> <DEDENT> def __u__(self): <NEW_LINE> <INDENT> t = self.theta <NEW_LINE> p = self.phi <NEW_LINE> return [[np.cos(t), - np.sin(t) * np.exp(-1.j * p)], [np.sin(t) * np.exp(1.j * p), np.cos(t)]] <NEW_LINE> <DEDENT> def __invu__(self): <NEW_LINE> <INDENT> t = self.theta <NEW_LINE> p = self.phi <NEW_LINE> return [[np.cos(t), np.sin(t) * np.exp(-1.j * p)], [-np.sin(t) * np.exp(1.j * p), np.cos(t)]]
single particle operator on bloch sphere This class offers methods related to operators on bloch sphere
625990613539df3088ecd96e
class OperationDisplay(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'provider': {'key': 'provider', 'type': 'str'}, 'resource': {'key': 'resource', 'type': 'str'}, 'operation': {'key': 'operation', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, provider: Optional[str] = None, resource: Optional[str] = None, operation: Optional[str] = None, description: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(OperationDisplay, self).__init__(**kwargs) <NEW_LINE> self.provider = provider <NEW_LINE> self.resource = resource <NEW_LINE> self.operation = operation <NEW_LINE> self.description = description
Operation details. :param provider: The service provider. :type provider: str :param resource: Resource on which the operation is performed. :type resource: str :param operation: The operation type. :type operation: str :param description: The operation description. :type description: str
625990612c8b7c6e89bd4ec0
class ToggleAction(gtk.ToggleAction, _ActionBase): <NEW_LINE> <INDENT> def __init__(self, keypresses=(), name=None, label=None, tooltip=None, stock_id=None, preference_name=None, default=True): <NEW_LINE> <INDENT> if name is None: <NEW_LINE> <INDENT> name = label <NEW_LINE> <DEDENT> gtk.ToggleAction.__init__(self, name=name, label=label, tooltip=tooltip, stock_id=stock_id) <NEW_LINE> _ActionBase.__init__(self, label, keypresses) <NEW_LINE> self.preference_name = preference_name <NEW_LINE> self.default = default <NEW_LINE> <DEDENT> def load_from_preferences(self): <NEW_LINE> <INDENT> if self.preference_name is not None: <NEW_LINE> <INDENT> self.set_active(Preferences.entry( self.preference_name, default=bool(self.default))) <NEW_LINE> <DEDENT> <DEDENT> def save_to_preferences(self): <NEW_LINE> <INDENT> if self.preference_name is not None: <NEW_LINE> <INDENT> Preferences.entry(self.preference_name, value=self.get_active())
A custom Action class based on gtk.ToggleAction. Pass additional arguments such as keypresses.
625990618da39b475be048ba
class ImprimirCertificadoRTNPersonaNaturalPstMenuView(LoginRequiredMixin, DetailView, MenuPSTMixin): <NEW_LINE> <INDENT> model = Pst <NEW_LINE> template_name = 'registro/funcionario/imprimir_certificado_persona_natural_rtn_menu_pst.html' <NEW_LINE> context_object_name = "pst" <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(ImprimirCertificadoRTNPersonaNaturalPstMenuView, self).get_context_data(**kwargs) <NEW_LINE> pst = Pst.objects.get(id=int(self.kwargs['pk']), cached=True) <NEW_LINE> rtn = models.CertificacionRTN.objects.get(pst=pst) <NEW_LINE> direccion = models.Direccion.objects.get(pst=pst) <NEW_LINE> context['direccion'] = direccion <NEW_LINE> context['certificacion'] = rtn <NEW_LINE> context['pst'] = pst <NEW_LINE> return context
Vista utilizada para mostrar el comprobante de certificacion en el menu pst solo si ya lo tiene registrado
625990614e4d562566373ad8
class PersonBuilder: <NEW_LINE> <INDENT> def __init__(self, person: Optional[Person] = None) -> None: <NEW_LINE> <INDENT> if person is None: <NEW_LINE> <INDENT> self.person = Person() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.person = person <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def works(self) -> "PersonJobBuilder": <NEW_LINE> <INDENT> return PersonJobBuilder(self.person) <NEW_LINE> <DEDENT> @property <NEW_LINE> def lives(self) -> "PersonAddressBuilder": <NEW_LINE> <INDENT> return PersonAddressBuilder(self.person) <NEW_LINE> <DEDENT> def build(self) -> Person: <NEW_LINE> <INDENT> return self.person
Builder to build a person. Contains sub-builders to build the person's job and address.
62599061d6c5a102081e37f6
class PositionWeightMatrix(GenericPositionMatrix): <NEW_LINE> <INDENT> def __init__(self, alphabet, counts): <NEW_LINE> <INDENT> GenericPositionMatrix.__init__(self, alphabet, counts) <NEW_LINE> for i in range(self.length): <NEW_LINE> <INDENT> total = sum(float(self[letter][i]) for letter in alphabet) <NEW_LINE> for letter in alphabet: <NEW_LINE> <INDENT> self[letter][i] /= total <NEW_LINE> <DEDENT> <DEDENT> for letter in alphabet: <NEW_LINE> <INDENT> self[letter] = tuple(self[letter]) <NEW_LINE> <DEDENT> <DEDENT> def log_odds(self, background=None): <NEW_LINE> <INDENT> values = {} <NEW_LINE> alphabet = self.alphabet <NEW_LINE> if background is None: <NEW_LINE> <INDENT> background = dict.fromkeys(self.alphabet, 1.0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> background = dict(background) <NEW_LINE> <DEDENT> total = sum(background.values()) <NEW_LINE> for letter in alphabet: <NEW_LINE> <INDENT> background[letter] /= total <NEW_LINE> values[letter] = [] <NEW_LINE> <DEDENT> for i in range(self.length): <NEW_LINE> <INDENT> for letter in alphabet: <NEW_LINE> <INDENT> b = background[letter] <NEW_LINE> if b > 0: <NEW_LINE> <INDENT> p = self[letter][i] <NEW_LINE> if p > 0: <NEW_LINE> <INDENT> logodds = math.log(p / b, 2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logodds = -math.inf <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> p = self[letter][i] <NEW_LINE> if p > 0: <NEW_LINE> <INDENT> logodds = math.inf <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logodds = math.nan <NEW_LINE> <DEDENT> <DEDENT> values[letter].append(logodds) <NEW_LINE> <DEDENT> <DEDENT> pssm = PositionSpecificScoringMatrix(alphabet, values) <NEW_LINE> return pssm
Class for the support of weight calculations on the Position Matrix.
6259906163d6d428bbee3df1
class VonAgentError(Exception): <NEW_LINE> <INDENT> def __init__(self, error_code: ErrorCode, message: str): <NEW_LINE> <INDENT> self.error_code = error_code <NEW_LINE> self.message = message
Error class for von_agent operation.
62599061460517430c432bbc
class XingAuth(ConsumerBasedOAuth): <NEW_LINE> <INDENT> AUTH_BACKEND = XingBackend <NEW_LINE> AUTHORIZATION_URL = XING_AUTHORIZATION_URL <NEW_LINE> REQUEST_TOKEN_URL = XING_REQUEST_TOKEN_URL <NEW_LINE> ACCESS_TOKEN_URL = XING_ACCESS_TOKEN_URL <NEW_LINE> SETTINGS_KEY_NAME = 'XING_CONSUMER_KEY' <NEW_LINE> SETTINGS_SECRET_NAME = 'XING_CONSUMER_SECRET' <NEW_LINE> SCOPE_SEPARATOR = '+' <NEW_LINE> def user_data(self, access_token, *args, **kwargs): <NEW_LINE> <INDENT> key, secret = self.get_key_and_secret() <NEW_LINE> consumer = oauth.Consumer(key=key, secret=secret) <NEW_LINE> client = oauth.Client(consumer, access_token) <NEW_LINE> resp, content = client.request(XING_CHECK_AUTH, 'GET') <NEW_LINE> profile = simplejson.loads(content)['users'][0] <NEW_LINE> try: <NEW_LINE> <INDENT> return { 'user_id': profile['id'], 'id': profile['id'], 'first_name': profile['first_name'], 'last_name': profile['last_name'], 'email': profile['active_email'] } <NEW_LINE> <DEDENT> except (KeyError, IndexError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def auth_complete(self, *args, **kwargs): <NEW_LINE> <INDENT> oauth_problem = self.request.GET.get('oauth_problem') <NEW_LINE> if oauth_problem: <NEW_LINE> <INDENT> if oauth_problem == 'user_refused': <NEW_LINE> <INDENT> raise AuthCanceled(self, '') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AuthUnknownError(self, 'Xing error was %s' % oauth_problem) <NEW_LINE> <DEDENT> <DEDENT> return super(XingAuth, self).auth_complete(*args, **kwargs) <NEW_LINE> <DEDENT> def unauthorized_token(self): <NEW_LINE> <INDENT> request_token_url = self.REQUEST_TOKEN_URL <NEW_LINE> scope = self.get_scope_argument() <NEW_LINE> if scope: <NEW_LINE> <INDENT> request_token_url = request_token_url + '?' + urlencode(scope) <NEW_LINE> <DEDENT> request = self.oauth_request( token=None, url=request_token_url, extra_params=self.request_token_extra_arguments() ) <NEW_LINE> response = self.fetch_response(request) <NEW_LINE> return Token.from_string(response)
Xing OAuth authentication mechanism
62599061a8370b77170f1aa0
class ZmeyObjectPropertiesPanel(bpy.types.Panel): <NEW_LINE> <INDENT> bl_label = "Zmey Object" <NEW_LINE> bl_idname = "ZMEY_OBJECT" <NEW_LINE> bl_space_type = "PROPERTIES" <NEW_LINE> bl_region_type = "WINDOW" <NEW_LINE> bl_context = "object" <NEW_LINE> def draw(self, context): <NEW_LINE> <INDENT> layout = self.layout <NEW_LINE> obj = context.object <NEW_LINE> layout.row().prop(obj.zmey_props, "enabled", toggle=True) <NEW_LINE> if obj.zmey_props.enabled: <NEW_LINE> <INDENT> layout.row().prop(obj.zmey_props, "type") <NEW_LINE> box = layout.box() <NEW_LINE> zmey_type = bpy.context.scene.world.zmey_scene_types.types[int(obj.zmey_props.type)] <NEW_LINE> box.box().prop( obj.zmey_props, "mesh_export", text="Export Mesh" if zmey_type.mesh_reference == None else "Override Type Mesh") <NEW_LINE> obj.zmey_props.components.draw_type(box)
Zmey Object Properties
625990614428ac0f6e659c05
@Operations.register_operation("drop_constraint") <NEW_LINE> @BatchOperations.register_operation("drop_constraint", "batch_drop_constraint") <NEW_LINE> class DropConstraintOp(MigrateOperation): <NEW_LINE> <INDENT> def __init__(self, constraint_name, table_name, type_=None, schema=None): <NEW_LINE> <INDENT> self.constraint_name = constraint_name <NEW_LINE> self.table_name = table_name <NEW_LINE> self.constraint_type = type_ <NEW_LINE> self.schema = schema <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_constraint(cls, constraint): <NEW_LINE> <INDENT> types = { "unique_constraint": "unique", "foreign_key_constraint": "foreignkey", "primary_key_constraint": "primary", "check_constraint": "check", "column_check_constraint": "check", } <NEW_LINE> constraint_table = sqla_compat._table_for_constraint(constraint) <NEW_LINE> return cls( constraint.name, constraint_table.name, schema=constraint_table.schema, type_=types[constraint.__visit_name__] ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> @util._with_legacy_names([("type", "type_")]) <NEW_LINE> def drop_constraint( cls, operations, name, table_name, type_=None, schema=None): <NEW_LINE> <INDENT> op = cls(name, table_name, type_=type_, schema=schema) <NEW_LINE> return operations.invoke(op) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def batch_drop_constraint(cls, operations, name, type_=None): <NEW_LINE> <INDENT> op = cls( name, operations.impl.table_name, type_=type_, schema=operations.impl.schema ) <NEW_LINE> return operations.invoke(op)
Represent a drop constraint operation.
62599061adb09d7d5dc0bc3c
class Repository(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'repository' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> name = db.Column(db.String, nullable=False) <NEW_LINE> owner_id = db.Column(db.Integer, db.ForeignKey('github_user.id'), nullable=False) <NEW_LINE> owner = db.relationship('GitHubUser', back_populates='repositories')
Table containing references to repository sources of pull requests. Subclasses ``wptdash.app.db.Model``
625990618e71fb1e983bd19d
class RagelCppLexer(DelegatingLexer): <NEW_LINE> <INDENT> name = 'Ragel in CPP Host' <NEW_LINE> aliases = ['ragel-cpp'] <NEW_LINE> filenames = ['*.rl'] <NEW_LINE> def __init__(self, **options): <NEW_LINE> <INDENT> super(RagelCppLexer, self).__init__(CppLexer, RagelEmbeddedLexer, **options) <NEW_LINE> <DEDENT> def analyse_text(text): <NEW_LINE> <INDENT> return '@LANG: c++' in text
A lexer for `Ragel`_ in a CPP host file. *New in Pygments 1.1*
625990613539df3088ecd96f
class Trend(object): <NEW_LINE> <INDENT> def __init__(self, x=None, y=None): <NEW_LINE> <INDENT> self.fx, self.fy = fx, fy <NEW_LINE> <DEDENT> def plot(self, page, **kw): <NEW_LINE> <INDENT> return page.line(fx, fy, **kw)
A line fx vs. fy. Note that this may be infinite extent, and should probably have a mechanism for recalculating given new axes limits.
625990611f037a2d8b9e53d4
class EffiEdgeResUnit(HybridBlock): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, strides, exp_factor, se_factor, mid_from_in, use_skip, bn_epsilon, bn_use_global_stats, activation, **kwargs): <NEW_LINE> <INDENT> super(EffiEdgeResUnit, self).__init__(**kwargs) <NEW_LINE> self.residual = (in_channels == out_channels) and (strides == 1) and use_skip <NEW_LINE> self.use_se = se_factor > 0 <NEW_LINE> mid_channels = in_channels * exp_factor if mid_from_in else out_channels * exp_factor <NEW_LINE> with self.name_scope(): <NEW_LINE> <INDENT> self.conv1 = conv3x3_block( in_channels=in_channels, out_channels=mid_channels, bn_epsilon=bn_epsilon, bn_use_global_stats=bn_use_global_stats, activation=activation) <NEW_LINE> if self.use_se: <NEW_LINE> <INDENT> self.se = SEBlock( channels=mid_channels, reduction=(exp_factor * se_factor), mid_activation=activation) <NEW_LINE> <DEDENT> self.conv2 = conv1x1_block( in_channels=mid_channels, out_channels=out_channels, strides=strides, bn_epsilon=bn_epsilon, bn_use_global_stats=bn_use_global_stats, activation=None) <NEW_LINE> <DEDENT> <DEDENT> def hybrid_forward(self, F, x): <NEW_LINE> <INDENT> if self.residual: <NEW_LINE> <INDENT> identity = x <NEW_LINE> <DEDENT> x = self.conv1(x) <NEW_LINE> if self.use_se: <NEW_LINE> <INDENT> x = self.se(x) <NEW_LINE> <DEDENT> x = self.conv2(x) <NEW_LINE> if self.residual: <NEW_LINE> <INDENT> x = x + identity <NEW_LINE> <DEDENT> return x
EfficientNet-Edge edge residual unit. Parameters: ---------- in_channels : int Number of input channels. out_channels : int Number of output channels. strides : int or tuple/list of 2 int Strides of the second convolution layer. exp_factor : int Factor for expansion of channels. se_factor : int SE reduction factor for each unit. mid_from_in : bool Whether to use input channel count for middle channel count calculation. use_skip : bool Whether to use skip connection. bn_epsilon : float Small float added to variance in Batch norm. bn_use_global_stats : bool Whether global moving statistics is used instead of local batch-norm for BatchNorm layers. activation : str Name of activation function.
6259906166673b3332c31acf
class Events: <NEW_LINE> <INDENT> class Key: <NEW_LINE> <INDENT> OperationCode = "code" <NEW_LINE> Time = "time" <NEW_LINE> Description = "description" <NEW_LINE> Extra = "extra" <NEW_LINE> <DEDENT> class ExtraInformation: <NEW_LINE> <INDENT> CloseReason = "订单关闭原因" <NEW_LINE> Payments = "支付详情" <NEW_LINE> PayId = "支付单号" <NEW_LINE> PayFee = "实际支付金额" <NEW_LINE> PayFailReason = "支付失败原因" <NEW_LINE> ShipInfo = "物流信息" <NEW_LINE> RefundReason = "退款原因" <NEW_LINE> RefundNumber = "退款单号" <NEW_LINE> RefundDenyReason = "退款申请未通过原因" <NEW_LINE> RefundFailReason = "退款在支付平台处理失败原因" <NEW_LINE> <DEDENT> class Description: <NEW_LINE> <INDENT> Confirm = "用户已经确认了订单, 等待支付" <NEW_LINE> UserClose = "用户主动关闭订单" <NEW_LINE> Paying = "用户正在付款, 等待支付结果" <NEW_LINE> PayingSuccess = "用户已支付成功" <NEW_LINE> PayingFailed = "用户支付失败" <NEW_LINE> OrderRetry = "支付失败, 转入重试" <NEW_LINE> OrderTimedOut = "订单超时, 系统关闭订单" <NEW_LINE> Shipped = "商品已经交付物流" <NEW_LINE> Delieverd = "商品已经送达" <NEW_LINE> Recieved = "用户确认收货" <NEW_LINE> RecieveTimingExcced = "超时系统自动确认收货" <NEW_LINE> RequestRefund = "用户申请退款" <NEW_LINE> RefundDenied = "用户退款申请已拒绝" <NEW_LINE> RefundApproved = "用户退款申请已通过, 等待支付平台处理退款" <NEW_LINE> RefundSuccess = "支付平台退款完成" <NEW_LINE> RefundFailed = "支付平台退款失败"
事件相关设置
625990610c0af96317c578c8
class UpdateProfile(UpdateView): <NEW_LINE> <INDENT> model = Patient <NEW_LINE> form_class = ProfileForm <NEW_LINE> template_name = 'HealthApps/patient_profile.html' <NEW_LINE> success_url = '/profile' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(UpdateView, self).get_context_data(**kwargs) <NEW_LINE> context['user_type'] = get_user_type(self.request.user) <NEW_LINE> return context <NEW_LINE> <DEDENT> def user_matches_patient(self, request, **kwargs): <NEW_LINE> <INDENT> if request.user.is_authenticated(): <NEW_LINE> <INDENT> patient = Patient.objects.get(pk=kwargs['pk']) <NEW_LINE> return patient.user.id == request.user.id <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if not self.user_matches_patient(request, **kwargs): <NEW_LINE> <INDENT> return HttpResponseRedirect('/login') <NEW_LINE> <DEDENT> return super(UpdateProfile, self).dispatch(request, *args, **kwargs) <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> old_patient = self.request.user.patient <NEW_LINE> self.object = form.save() <NEW_LINE> CreateLogItem.li_patient_edit(self.object, old_patient) <NEW_LINE> return HttpResponseRedirect(self.success_url)
View for updating profile
625990613539df3088ecd970
class RedBaronProvider(provider.ProviderBase): <NEW_LINE> <INDENT> @red_src(dump=False) <NEW_LINE> def analyze(self, red, deep=2, with_formatting=False): <NEW_LINE> <INDENT> return "\n".join(red.__help__(deep=deep, with_formatting=False)) <NEW_LINE> <DEDENT> @red_src() <NEW_LINE> @red_validate([validators.OptionalRegionValidator(), validators.SingleNodeValidator(), validators.TypeValidator(["def"])]) <NEW_LINE> def rename_arg(self, red, start, end, oldname, newname): <NEW_LINE> <INDENT> for arg in red.arguments: <NEW_LINE> <INDENT> if arg.target.value == oldname: <NEW_LINE> <INDENT> arg.target.value = newname <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Expected argument %s to be one of %s" % (oldname, [arg.target.value for arg in red.arguments])) <NEW_LINE> <DEDENT> namenodes = red.value.find_all("name", value=oldname) <NEW_LINE> for node in namenodes: <NEW_LINE> <INDENT> node.value = newname <NEW_LINE> <DEDENT> return red <NEW_LINE> <DEDENT> @red_src(dump=False) <NEW_LINE> @red_validate([validators.OptionalRegionValidator(), validators.SingleNodeValidator(), validators.TypeValidator(["def"])]) <NEW_LINE> def get_args(self, red, start, end): <NEW_LINE> <INDENT> args = [] <NEW_LINE> for arg in red.arguments: <NEW_LINE> <INDENT> if isinstance(arg, (redbaron.ListArgumentNode, redbaron.DictArgumentNode)): <NEW_LINE> <INDENT> args.append((arg.dumps(), None)) <NEW_LINE> continue <NEW_LINE> <DEDENT> target = arg.target.value <NEW_LINE> if arg.value: <NEW_LINE> <INDENT> value = arg.value.dumps() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = None <NEW_LINE> <DEDENT> args.append((target, value)) <NEW_LINE> <DEDENT> return args <NEW_LINE> <DEDENT> @red_src() <NEW_LINE> @red_validate([validators.OptionalRegionValidator(), validators.SingleNodeValidator(), validators.TypeValidator(["def"])]) <NEW_LINE> def add_arg(self, red, start, end, index, arg): <NEW_LINE> <INDENT> red.arguments.insert(index, arg) <NEW_LINE> return red <NEW_LINE> <DEDENT> @red_src(dump=False) <NEW_LINE> @red_validate([validators.MandatoryRegionValidator()]) <NEW_LINE> def get_parents(self, red, start, end): <NEW_LINE> <INDENT> parents = [] <NEW_LINE> current = redlib.get_node_of_region(red, start, end) <NEW_LINE> while current != red: <NEW_LINE> <INDENT> region = current.absolute_bounding_box <NEW_LINE> nodetype = current.type <NEW_LINE> start = redlib.Position(*region.top_left.to_tuple()) <NEW_LINE> end = redlib.Position(*region.bottom_right.to_tuple()) <NEW_LINE> current = current.parent <NEW_LINE> if parents and parents[-1].start == start and parents[-1].end == end: <NEW_LINE> <INDENT> parents.pop() <NEW_LINE> <DEDENT> parents.append(redlib.Parent(nodetype, start, end)) <NEW_LINE> <DEDENT> return parents
Provider for inspecting and transforming source code via redbaron.
62599061379a373c97d9a6f7
class FramesSequence(FramesStream): <NEW_LINE> <INDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> _len = len(self) <NEW_LINE> if isinstance(key, slice): <NEW_LINE> <INDENT> return (self.get_frame(_k) for _k in xrange(*key.indices(_len))) <NEW_LINE> <DEDENT> elif isinstance(key, collections.Iterable): <NEW_LINE> <INDENT> if isinstance(key, np.ndarray) and key.dtype == np.bool: <NEW_LINE> <INDENT> return (self.get_frame(_k) for _k in np.arange(len(self))[key]) <NEW_LINE> <DEDENT> if any(_k < -_len or _k >= _len for _k in key): <NEW_LINE> <INDENT> raise IndexError("Keys out of range") <NEW_LINE> <DEDENT> return (self.get_frame(_k if _k >= 0 else _len + _k) for _k in key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if key < -_len or key >= _len: <NEW_LINE> <INDENT> raise IndexError("Key out of range") <NEW_LINE> <DEDENT> return self.get_frame(key if key >= 0 else _len + key) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self[:] <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def __len__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_frame(self, ind): <NEW_LINE> <INDENT> pass
Baseclass for wrapping data buckets that have random access. Support random access. Supports standard slicing and fancy slicing, but returns a generator. Must be finite length.
62599061a79ad1619776b626
class GUI: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.user_id_var = None <NEW_LINE> self.result_list = None <NEW_LINE> self.ratings = None <NEW_LINE> self.movie_names = None <NEW_LINE> self._load_data() <NEW_LINE> self._create_gui() <NEW_LINE> <DEDENT> def _create_gui(self): <NEW_LINE> <INDENT> root = Tk() <NEW_LINE> root.title("Movie Recommender System") <NEW_LINE> frame = Frame(root) <NEW_LINE> frame.pack(fill=BOTH, expand=True, padx=5, pady=5) <NEW_LINE> top_panel = Frame(frame) <NEW_LINE> top_panel.pack(side=TOP, fill=X) <NEW_LINE> Label(top_panel, text="User ID:").pack(side=LEFT) <NEW_LINE> self.user_id_var = IntVar() <NEW_LINE> self.user_id_var.set(1) <NEW_LINE> Entry(top_panel, textvariable=self.user_id_var, width=10).pack(side=LEFT) <NEW_LINE> Button(top_panel, text="Find Movies", command=self.find_movies).pack(side=RIGHT) <NEW_LINE> result_panel = Frame(frame) <NEW_LINE> result_panel.pack(side=TOP, fill=BOTH, expand=True) <NEW_LINE> Label(result_panel, text="Top 5 Recommended Movies").pack(side=TOP) <NEW_LINE> self.result_list = Listbox(result_panel, height=8) <NEW_LINE> self.result_list.pack(side=LEFT, fill=BOTH, expand=True) <NEW_LINE> root.mainloop() <NEW_LINE> <DEDENT> def _load_data(self): <NEW_LINE> <INDENT> self.ratings = np.array(pd.read_csv(RATINGS_DATA, index_col=0)) <NEW_LINE> items = pd.read_csv(MOVIES_DATA, index_col=0) <NEW_LINE> self.movie_names = items.Name <NEW_LINE> <DEDENT> def find_movies(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user_id = self.user_id_var.get() <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> tkMessageBox.showerror("Input Error", "Please enter a number as User ID") <NEW_LINE> return <NEW_LINE> <DEDENT> self.result_list.delete(0, END) <NEW_LINE> try: <NEW_LINE> <INDENT> movies = ur.getmemovies(self.ratings, user_id, self.movie_names) <NEW_LINE> for item in movies: <NEW_LINE> <INDENT> self.result_list.insert(END, item) <NEW_LINE> <DEDENT> <DEDENT> except IndexError: <NEW_LINE> <INDENT> tkMessageBox.showerror("Input Error", "Invalid User ID") <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> tkMessageBox.showerror("Unexpected Error", e)
Implement a user interface for the movie recommender system. The GUI allows user to input a user id to find the top 5 recommended movies based on user's rating data.
625990612c8b7c6e89bd4ec2
class RedirectDashboard(BrowserView): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> url = self.context.absolute_url() + '/wp-admin-dashboard' <NEW_LINE> self.request.response.redirect(url)
Redirect to wp-admin-dashboard
625990618e7ae83300eea760
class DeciderTests(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.decider = Decider(100, 0.05) <NEW_LINE> <DEDENT> def test_decide(self): <NEW_LINE> <INDENT> pump = Pump('127.0.0.1', 1000) <NEW_LINE> actions = { 'PUMP_IN': pump.PUMP_IN, 'PUMP_OUT': pump.PUMP_OUT, 'PUMP_OFF': pump.PUMP_OFF, } <NEW_LINE> self.assertEqual(self.decider.decide(130, 'PUMP_OFF', actions), 1) <NEW_LINE> self.assertEqual(self.decider.decide(40, 'PUMP_OFF', actions), -1) <NEW_LINE> self.assertEqual(self.decider.decide(105, 'PUMP_OFF', actions), 0) <NEW_LINE> self.assertEqual(self.decider.decide(140, 'PUMP_IN', actions), 0) <NEW_LINE> self.assertEqual(self.decider.decide(85, 'PUMP_OUT', actions), 0) <NEW_LINE> self.assertEqual(self.decider.decide(110, 'PUMP_OUT', actions), -1)
This method does a setup for unit testing Decider
62599061097d151d1a2c2743
class FramedProtocol(asyncio.BaseProtocol): <NEW_LINE> <INDENT> def frame_received(self, frame): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def eof_received(self): <NEW_LINE> <INDENT> pass
An interface for protocols expecting to receive framed data. This class should be implemented almost identically to the ``asyncio.Protocol`` class, with the substitution of a ``frame_received()`` method for the ``data_received()`` method.
62599061b7558d5895464a97
class ManifestEntryIGNORE(ManifestPathEntry): <NEW_LINE> <INDENT> tag = 'IGNORE' <NEW_LINE> @classmethod <NEW_LINE> def from_list(cls, data): <NEW_LINE> <INDENT> assert data[0] == cls.tag <NEW_LINE> return cls(cls.process_path(data)) <NEW_LINE> <DEDENT> def to_list(self): <NEW_LINE> <INDENT> return (self.tag, self.encoded_path)
Ignored path
625990614e4d562566373ada
class Tipue(LateTask): <NEW_LINE> <INDENT> name = "local_search" <NEW_LINE> def gen_tasks(self): <NEW_LINE> <INDENT> self.site.scan_posts() <NEW_LINE> kw = { "translations": self.site.config['TRANSLATIONS'], "output_folder": self.site.config['OUTPUT_FOLDER'], "filters": self.site.config['FILTERS'], "timeline": self.site.timeline, } <NEW_LINE> posts = self.site.timeline[:] <NEW_LINE> dst_path = os.path.join(kw["output_folder"], "assets", "js", "tipuesearch_content.json") <NEW_LINE> def save_data(): <NEW_LINE> <INDENT> pages = [] <NEW_LINE> for lang in kw["translations"]: <NEW_LINE> <INDENT> for post in posts: <NEW_LINE> <INDENT> if post.is_draft or post.is_private or post.publish_later: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> text = post.text(lang, strip_html=True) <NEW_LINE> text = text.replace('^', '') <NEW_LINE> data = {} <NEW_LINE> data["title"] = post.title(lang) <NEW_LINE> data["text"] = text <NEW_LINE> data["tags"] = ",".join(post.tags) <NEW_LINE> data["url"] = post.permalink(lang, absolute=True) <NEW_LINE> pages.append(data) <NEW_LINE> <DEDENT> <DEDENT> output = json.dumps({"pages": pages}, indent=2) <NEW_LINE> makedirs(os.path.dirname(dst_path)) <NEW_LINE> with codecs.open(dst_path, "wb+", "utf8") as fd: <NEW_LINE> <INDENT> fd.write(output) <NEW_LINE> <DEDENT> <DEDENT> task = { "basename": str(self.name), "name": dst_path, "targets": [dst_path], "actions": [(save_data, [])], 'uptodate': [config_changed(kw)], 'calc_dep': ['_scan_locs:sitemap'] } <NEW_LINE> yield apply_filters(task, kw['filters']) <NEW_LINE> asset_folder = os.path.join(os.path.dirname(__file__), "files") <NEW_LINE> for task in copy_tree(asset_folder, kw["output_folder"]): <NEW_LINE> <INDENT> task["basename"] = str(self.name) <NEW_LINE> yield apply_filters(task, kw['filters'])
Render the blog posts as JSON data.
6259906144b2445a339b74ca
class DeleteSubnetResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId")
DeleteSubnet返回参数结构体
62599061e5267d203ee6cf29
class win32tz(datetime.tzinfo): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.data = win32tz_data(name) <NEW_LINE> <DEDENT> def utcoffset(self, dt): <NEW_LINE> <INDENT> if self._isdst(dt): <NEW_LINE> <INDENT> return datetime.timedelta(minutes=self.data.dstoffset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return datetime.timedelta(minutes=self.data.stdoffset) <NEW_LINE> <DEDENT> <DEDENT> def dst(self, dt): <NEW_LINE> <INDENT> if self._isdst(dt): <NEW_LINE> <INDENT> minutes = self.data.dstoffset - self.data.stdoffset <NEW_LINE> return datetime.timedelta(minutes=minutes) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return datetime.timedelta(0) <NEW_LINE> <DEDENT> <DEDENT> def tzname(self, dt): <NEW_LINE> <INDENT> if self._isdst(dt): <NEW_LINE> <INDENT> return self.data.dstname <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.data.stdname <NEW_LINE> <DEDENT> <DEDENT> def _isdst(self, dt): <NEW_LINE> <INDENT> dat = self.data <NEW_LINE> dston = pickNthWeekday(dt.year, dat.dstmonth, dat.dstdayofweek, dat.dsthour, dat.dstminute, dat.dstweeknumber) <NEW_LINE> dstoff = pickNthWeekday(dt.year, dat.stdmonth, dat.stddayofweek, dat.stdhour, dat.stdminute, dat.stdweeknumber) <NEW_LINE> if dston < dstoff: <NEW_LINE> <INDENT> return (dston <= dt.replace(tzinfo=None) < dstoff) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return not (dstoff <= dt.replace(tzinfo=None) < dston) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<win32tz - {0!s}>".format(self.data.display)
tzinfo class based on win32's timezones available in the registry. >>> local = win32tz('Central Standard Time') >>> oct1 = datetime.datetime(month=10, year=2004, day=1, tzinfo=local) >>> dec1 = datetime.datetime(month=12, year=2004, day=1, tzinfo=local) >>> oct1.dst() datetime.timedelta(0, 3600) >>> dec1.dst() datetime.timedelta(0) >>> braz = win32tz('E. South America Standard Time') >>> braz.dst(oct1) datetime.timedelta(0) >>> braz.dst(dec1) datetime.timedelta(0, 3600)
62599061be8e80087fbc075a
class conformation_scorer (object) : <NEW_LINE> <INDENT> def __init__ (self, old_residue, new_residue) : <NEW_LINE> <INDENT> from scitbx.array_family import flex <NEW_LINE> old_residue_atoms = old_residue.atoms() <NEW_LINE> self.new_residue_atoms = new_residue.atoms() <NEW_LINE> n_atoms = self.new_residue_atoms.size() <NEW_LINE> self.new_residue_selection = flex.bool(n_atoms, False) <NEW_LINE> self.selection_mappings = flex.size_t(n_atoms, 0) <NEW_LINE> for i_seq, old_atom in enumerate(old_residue_atoms) : <NEW_LINE> <INDENT> for j_seq, new_atom in enumerate(self.new_residue_atoms) : <NEW_LINE> <INDENT> if (old_atom.name == new_atom.name) : <NEW_LINE> <INDENT> self.new_residue_selection[j_seq] = True <NEW_LINE> self.selection_mappings[j_seq] = i_seq <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.sites_old = old_residue_atoms.extract_xyz() <NEW_LINE> self.sites_cart = self.new_residue_atoms.extract_xyz() <NEW_LINE> self.dist_min = None <NEW_LINE> <DEDENT> def update (self, sites_cart, selection) : <NEW_LINE> <INDENT> first_i_seq = selection[0] <NEW_LINE> if (self.new_residue_selection[first_i_seq]) : <NEW_LINE> <INDENT> dist = abs(col(sites_cart[first_i_seq]) - col(self.sites_old[self.selection_mappings[first_i_seq]])) <NEW_LINE> if (dist < self.dist_min) : <NEW_LINE> <INDENT> self.sites_cart = sites_cart <NEW_LINE> self.dist_min = dist <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def reset (self, sites_cart, selection) : <NEW_LINE> <INDENT> self.sites_cart = sites_cart <NEW_LINE> first_i_seq = selection[0] <NEW_LINE> if (self.new_residue_selection[first_i_seq]) : <NEW_LINE> <INDENT> self.dist_min = abs(col(sites_cart[first_i_seq]) - col(self.sites_old[self.selection_mappings[first_i_seq]])) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> self.dist_min = sys.maxint <NEW_LINE> <DEDENT> <DEDENT> def apply_final (self) : <NEW_LINE> <INDENT> self.new_residue_atoms.set_xyz(self.sites_cart)
Stand-in for the conformation scoring class in mmtbx.refinement.real_space; instead of calculating fit to the map, this simply uses the change in position of the first atom being moved at each rotation. This allows us to superimpose the conformations for those atoms which are present in both the old and the new residues.
625990614428ac0f6e659c07
class TextColumn(Column): <NEW_LINE> <INDENT> def __init__(self, name: str): <NEW_LINE> <INDENT> super().__init__(name, 'text')
TextColumn class representing a column with data type "text".
62599061cc0a2c111447c639
@implementer(ITensorSource) <NEW_LINE> class ElasticSearch(Source): <NEW_LINE> <INDENT> def __init__(self, *a, **kw): <NEW_LINE> <INDENT> Source.__init__(self, *a, **kw) <NEW_LINE> self.url = self.config.get('url', 'http://localhost:9200').rstrip('\n') <NEW_LINE> user = self.config.get('user') <NEW_LINE> passwd = self.config.get('password') <NEW_LINE> self.client = elasticsearch.ElasticSearch(self.url, user, passwd) <NEW_LINE> <DEDENT> @defer.inlineCallbacks <NEW_LINE> def get(self): <NEW_LINE> <INDENT> stats = yield self.client.stats() <NEW_LINE> node_stats = yield self.client.node_stats() <NEW_LINE> status = {'green': 2, 'yellow': 1, 'red': 0}[stats['status']] <NEW_LINE> nodes = stats['nodes']['count']['total'] <NEW_LINE> index_count = stats['indices']['count'] <NEW_LINE> shards = stats['indices']['shards']['total'] <NEW_LINE> shards_primary = stats['indices']['shards']['primaries'] <NEW_LINE> docs = stats['indices']['docs']['count'] <NEW_LINE> store = stats['indices']['store']['size_in_bytes'] <NEW_LINE> events = [ self.createEvent('ok', 'Status', status, prefix='cluster.status'), self.createEvent('ok', 'Nodes', nodes, prefix='cluster.nodes'), self.createEvent('ok', 'Indices', index_count, prefix='indices'), self.createEvent('ok', 'Shards', shards, prefix='shards.total'), self.createEvent('ok', 'Primary shards', shards_primary, prefix='shards.primary'), self.createEvent('ok', 'Documents', shards_primary, prefix='documents.total'), self.createEvent('ok', 'Documents', shards_primary, prefix='documents.rate', aggregation=Counter64), self.createEvent('ok', 'Store size', store, prefix='documents.size'), ] <NEW_LINE> nodes = {} <NEW_LINE> for k, v in node_stats['nodes'].items(): <NEW_LINE> <INDENT> node_name = v['host'] <NEW_LINE> if v.get('attributes', {}).get('client', 'false') == 'true': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if node_name not in nodes: <NEW_LINE> <INDENT> nodes[node_name] = { 'search': v['indices']['search']['query_total'], 'delete': v['indices']['indexing']['delete_total'], 'index': v['indices']['indexing']['index_total'], 'get': v['indices']['get']['total'] } <NEW_LINE> <DEDENT> <DEDENT> for node, ms in nodes.items(): <NEW_LINE> <INDENT> for mname, m in ms.items(): <NEW_LINE> <INDENT> events.append(self.createEvent('ok', mname, m, prefix='nodes.%s.%s' % (node, mname), aggregation=Counter64)) <NEW_LINE> <DEDENT> <DEDENT> defer.returnValue(events)
Reads elasticsearch metrics **Configuration arguments:** :param url: Elasticsearch base URL (default: http://localhost:9200) :type url: str. :param user: Basic auth username :type user: str. :param password: Password :type password: str. **Metrics:** :(service name).cluster.status: Cluster status (Red=0, Yellow=1, Green=2) :(service name).cluster.nodes: Cluster node count :(service name).indices: Total indices in cluster :(service name).shards.total: Total number of shards :(service name).shards.primary: Number of primary shards :(service name).documents.total: Total documents :(service name).documents.rate: Documents per second :(service name).documents.size: Size of document store in bytes
62599061f548e778e596cc5c
class TestGameEditorial(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testGameEditorial(self): <NEW_LINE> <INDENT> pass
GameEditorial unit test stubs
62599061a219f33f346c7edb
class L2ProductFunctionalQ1(NumpyMatrixBasedOperator): <NEW_LINE> <INDENT> sparse = False <NEW_LINE> def __init__(self, grid, function, boundary_info=None, dirichlet_data=None, order=2, name=None): <NEW_LINE> <INDENT> assert grid.reference_element(0) in {square} <NEW_LINE> assert function.shape_range == tuple() <NEW_LINE> self.dim_source = grid.size(grid.dim) <NEW_LINE> self.dim_range = 1 <NEW_LINE> self.grid = grid <NEW_LINE> self.boundary_info = boundary_info <NEW_LINE> self.function = function <NEW_LINE> self.dirichlet_data = dirichlet_data <NEW_LINE> self.order = order <NEW_LINE> self.name = name <NEW_LINE> self.build_parameter_type(inherits=(function, dirichlet_data)) <NEW_LINE> <DEDENT> def _assemble(self, mu=None): <NEW_LINE> <INDENT> mu = self.parse_parameter(mu) <NEW_LINE> g = self.grid <NEW_LINE> bi = self.boundary_info <NEW_LINE> F = self.function(g.quadrature_points(0, order=self.order), mu=mu) <NEW_LINE> q, w = g.reference_element.quadrature(order=self.order) <NEW_LINE> if g.dim == 2: <NEW_LINE> <INDENT> SF = np.array(((1-q[..., 0])*(1-q[..., 1]), (1-q[..., 1])*(q[..., 0]), (q[..., 0])*(q[..., 1]), (q[..., 1])*(1-q[..., 0]))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> SF_INTS = np.einsum('ei,pi,e,i->ep', F, SF, g.integration_elements(0), w).ravel() <NEW_LINE> SF_I = g.subentities(0, g.dim).ravel() <NEW_LINE> I = np.array(coo_matrix((SF_INTS, (np.zeros_like(SF_I), SF_I)), shape=(1, g.size(g.dim))).todense()).ravel() <NEW_LINE> if bi is not None and bi.has_dirichlet: <NEW_LINE> <INDENT> DI = bi.dirichlet_boundaries(g.dim) <NEW_LINE> if self.dirichlet_data is not None: <NEW_LINE> <INDENT> I[DI] = self.dirichlet_data(g.centers(g.dim)[DI], mu=mu) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> I[DI] = 0 <NEW_LINE> <DEDENT> <DEDENT> return NumpyMatrixOperator(I.reshape((1, -1)))
|Functional| representing the scalar product with an L2-|Function| for bilinear finite elements. Boundary treatment can be performed by providing `boundary_info` and `dirichlet_data`, in which case the DOFs corresponding to Dirichlet boundaries are set to the values provided by `dirichlet_data`. The current implementation works in two dimensions, but can be trivially extended to arbitrary dimensions. Parameters ---------- grid |Grid| over which to assemble the functional. function The |Function| with which to take the scalar product. boundary_info |BoundaryInfo| determining the Dirichlet boundaries or `None`. If `None`, no boundary treatment is performed. dirichlet_data |Function| providing the Dirichlet boundary values. If `None`, constant-zero boundary is assumed. order Order of the Gauss quadrature to use for numerical integration. name The name of the functional.
625990613539df3088ecd971
class Test0003PuppetIndexesDropped(unittest.TestCase): <NEW_LINE> <INDENT> @patch.object(migration, 'get_collection') <NEW_LINE> def test_migration(self, mock_get_collection): <NEW_LINE> <INDENT> migration.migrate() <NEW_LINE> mock_get_collection.assert_called_once_with('units_puppet_module') <NEW_LINE> calls = [call('name_1_version_1_author_1'), call('author_1'), call('tag_list_1')] <NEW_LINE> mock_get_collection.return_value.drop_index.assert_has_calls(calls)
Test the migration of dropping the puppet module indexes
625990613617ad0b5ee07823
class CuTarget(CcTarget): <NEW_LINE> <INDENT> def __init__(self, name, type, srcs, deps, visibility, tags, warning, defs, incs, extra_cppflags, extra_linkflags, kwargs): <NEW_LINE> <INDENT> srcs = var_to_list(srcs) <NEW_LINE> deps = var_to_list(deps) <NEW_LINE> extra_cppflags = var_to_list(extra_cppflags) <NEW_LINE> extra_linkflags = var_to_list(extra_linkflags) <NEW_LINE> super(CuTarget, self).__init__( name=name, type=type, srcs=srcs, deps=deps, visibility=visibility, tags=tags, warning=warning, defs=defs, incs=incs, export_incs=[], optimize=None, linkflags=None, extra_cppflags=extra_cppflags, extra_linkflags=extra_linkflags, kwargs=kwargs) <NEW_LINE> self._add_tags('lang:cu') <NEW_LINE> <DEDENT> def _get_cu_flags(self): <NEW_LINE> <INDENT> nvcc_flags = [] <NEW_LINE> if self.attr.get('warning', '') == 'no': <NEW_LINE> <INDENT> nvcc_flags.append('-w') <NEW_LINE> <DEDENT> defs = self.attr.get('defs', []) <NEW_LINE> nvcc_flags += [('-D' + macro) for macro in defs] <NEW_LINE> if (self.blade.get_options().profile == 'release' or self.attr.get('always_optimize')): <NEW_LINE> <INDENT> nvcc_flags += self._get_optimize_flags() <NEW_LINE> <DEDENT> nvcc_flags += self.attr.get('extra_cppflags', []) <NEW_LINE> incs = self._get_incs_list() <NEW_LINE> return nvcc_flags, incs
This class is derived from CcTarget and is the base class of cu_library, cu_binary etc.
6259906145492302aabfdbaf
class ComputeVpnTunnelsListRequest(_messages.Message): <NEW_LINE> <INDENT> filter = _messages.StringField(1) <NEW_LINE> maxResults = _messages.IntegerField(2, variant=_messages.Variant.UINT32, default=500) <NEW_LINE> orderBy = _messages.StringField(3) <NEW_LINE> pageToken = _messages.StringField(4) <NEW_LINE> project = _messages.StringField(5, required=True) <NEW_LINE> region = _messages.StringField(6, required=True)
A ComputeVpnTunnelsListRequest object. Fields: filter: Sets a filter {expression} for filtering listed resources. Your {expression} must be in the format: field_name comparison_string literal_string. The field_name is the name of the field you want to compare. Only atomic field types are supported (string, number, boolean). The comparison_string must be either eq (equals) or ne (not equals). The literal_string is the string value to filter to. The literal value must be valid for the type of field you are filtering by (string, number, boolean). For string fields, the literal value is interpreted as a regular expression using RE2 syntax. The literal value must match the entire field. For example, to filter for instances that do not have a name of example-instance, you would use name ne example- instance. You can filter on nested fields. For example, you could filter on instances that have set the scheduling.automaticRestart field to true. Use filtering on nested fields to take advantage of labels to organize and search for results based on label values. To filter on multiple expressions, provide each separate expression within parentheses. For example, (scheduling.automaticRestart eq true) (zone eq us-central1-f). Multiple expressions are treated as AND expressions, meaning that resources must match all expressions to pass the filters. maxResults: The maximum number of results per page that should be returned. If the number of available results is larger than maxResults, Compute Engine returns a nextPageToken that can be used to get the next page of results in subsequent list requests. Acceptable values are 0 to 500, inclusive. (Default: 500) orderBy: Sorts list results by a certain order. By default, results are returned in alphanumerical order based on the resource name. You can also sort results in descending order based on the creation timestamp using orderBy="creationTimestamp desc". This sorts results based on the creationTimestamp field in reverse chronological order (newest result first). Use this to sort resources like operations so that the newest operation is returned first. Currently, only sorting by name or creationTimestamp desc is supported. pageToken: Specifies a page token to use. Set pageToken to the nextPageToken returned by a previous list request to get the next page of results. project: Project ID for this request. region: Name of the region for this request.
625990613eb6a72ae038bd34
class DestinationResponse(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.systran_types = { 'error': 'ErrorResponse', 'total': 'int', 'offset': 'int', 'destinations': 'list[Destination]' } <NEW_LINE> self.attribute_map = { 'error': 'error', 'total': 'total', 'offset': 'offset', 'destinations': 'destinations' } <NEW_LINE> self.error = None <NEW_LINE> self.total = None <NEW_LINE> self.offset = None <NEW_LINE> self.destinations = None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> properties = [] <NEW_LINE> for p in self.__dict__: <NEW_LINE> <INDENT> if p != 'systran_types' and p != 'attribute_map': <NEW_LINE> <INDENT> properties.append('{prop}={val!r}'.format(prop=p, val=self.__dict__[p])) <NEW_LINE> <DEDENT> <DEDENT> return '<{name} {props}>'.format(name=__name__, props=' '.join(properties))
NOTE: This class is auto generated by the systran code generator program. Do not edit the class manually.
625990614f88993c371f1089
class User(AbstractUser): <NEW_LINE> <INDENT> @property <NEW_LINE> def access_token(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.social_auth.first().extra_data[u'access_token'] <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> class Meta(object): <NEW_LINE> <INDENT> get_latest_by = 'date_joined' <NEW_LINE> db_table = 'ecommerce_user'
Custom user model for use with OIDC.
62599061a8ecb033258728ec
class BasePreprocessor(object): <NEW_LINE> <INDENT> def __init__(self, name=None, shape=None, *args, **kwargs): <NEW_LINE> <INDENT> self._shape = shape <NEW_LINE> self._name = name <NEW_LINE> <DEDENT> def process(self, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def provide_data(self): <NEW_LINE> <INDENT> return [(self._name, self._shape)] <NEW_LINE> <DEDENT> @property <NEW_LINE> def provide_output(self): <NEW_LINE> <INDENT> return [self._name] <NEW_LINE> <DEDENT> @property <NEW_LINE> def provide_input(self): <NEW_LINE> <INDENT> return [self._name]
Base class for preprocessors
6259906197e22403b383c5e2
class ADJUSTMENT(Aggregate): <NEW_LINE> <INDENT> adjno = String(32) <NEW_LINE> adjdesc = String(80, required=True) <NEW_LINE> adjamt = Decimal(required=True) <NEW_LINE> adjdate = DateTime()
OFX Section 12.5.2.4
625990629c8ee82313040cf4
class A2_XML(SetupMixin, unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> SetupMixin.setUp(self, ET.fromstring(XML_LONG)) <NEW_LINE> <DEDENT> def test_1_HouseDiv(self): <NEW_LINE> <INDENT> l_xml = self.m_xml.house_div <NEW_LINE> self.assertEqual(l_xml.attrib['Name'], TESTING_HOUSE_NAME) <NEW_LINE> self.assertEqual(l_xml.attrib['Active'], TESTING_HOUSE_ACTIVE) <NEW_LINE> self.assertEqual(l_xml.attrib['Key'], TESTING_HOUSE_KEY) <NEW_LINE> self.assertEqual(l_xml.find('UUID').text, TESTING_HOUSE_UUID) <NEW_LINE> <DEDENT> def test_2_ScheduleSect(self): <NEW_LINE> <INDENT> l_xml = self.m_xml.schedule_sect <NEW_LINE> l_len = len(l_xml) <NEW_LINE> self.assertEqual(l_len, 4) <NEW_LINE> <DEDENT> def test_3_Schedule(self): <NEW_LINE> <INDENT> l_xml = self.m_xml.schedule <NEW_LINE> self.assertEqual(l_xml.attrib['Name'], TESTING_SCHEDULE_NAME_0) <NEW_LINE> self.assertEqual(l_xml.attrib['Active'], TESTING_SCHEDULE_ACTIVE_0) <NEW_LINE> self.assertEqual(l_xml.attrib['Key'], TESTING_SCHEDULE_KEY_0) <NEW_LINE> self.assertEqual(l_xml.find('UUID').text, TESTING_SCHEDULE_UUID_0)
Be sure that we load the data properly as a whole test. Detailed test of xml is in the test_schedule_xml module.
62599062a17c0f6771d5d70f
class TestSqlGroupsAdapter(GroupsAdapterTester, _BaseSqlAdapterTester): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestSqlGroupsAdapter, self).setUp() <NEW_LINE> databasesetup.setup_database() <NEW_LINE> self.adapter = SqlGroupsAdapter(databasesetup.Group, databasesetup.User, databasesetup.DBSession)
Test suite for the SQL group source adapter
62599062627d3e7fe0e08560
class _Clearable: <NEW_LINE> <INDENT> def clear(self: _HasRedisClientAndKey) -> None: <NEW_LINE> <INDENT> self.redis.unlink(self.key)
Mixin class that implements clearing (emptying) a Redis-backed collection.
625990623d592f4c4edbc5b2
class XsdMinInclusiveFacet(XsdFacet): <NEW_LINE> <INDENT> base_type: BaseXsdType <NEW_LINE> _ADMITTED_TAGS = XSD_MIN_INCLUSIVE, <NEW_LINE> def _parse_value(self, elem: ElementType) -> None: <NEW_LINE> <INDENT> value = elem.attrib['value'] <NEW_LINE> self.value, errors = cast(LaxDecodeType, self.base_type.decode(value, 'lax')) <NEW_LINE> for e in errors: <NEW_LINE> <INDENT> self.parse_error("invalid restriction: {}".format(e.reason)) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, value: Any) -> None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if value < self.value: <NEW_LINE> <INDENT> reason = "value has to be greater or equal than {!r}".format(self.value) <NEW_LINE> raise XMLSchemaValidationError(self, value, reason) <NEW_LINE> <DEDENT> <DEDENT> except TypeError as err: <NEW_LINE> <INDENT> raise XMLSchemaValidationError(self, value, str(err)) from None
XSD *minInclusive* facet. .. <minInclusive fixed = boolean : false id = ID value = anySimpleType {any attributes with non-schema namespace . . .}> Content: (annotation?) </minInclusive>
6259906276e4537e8c3f0c62
@tf_export('keras.layers.FlexPooling') <NEW_LINE> class FlexPooling(Layer): <NEW_LINE> <INDENT> def __init__(self, features, neighborhoods, data_format='simple', name=None): <NEW_LINE> <INDENT> super(FlexPooling, self).__init__(name=name) <NEW_LINE> self.features = features <NEW_LINE> self.neighborhoods = neighborhoods <NEW_LINE> self.data_format = data_format <NEW_LINE> <DEDENT> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> return tensor_shape.TensorShape(input_shape) <NEW_LINE> <DEDENT> def build(self, input_shape): <NEW_LINE> <INDENT> self.built = True <NEW_LINE> <DEDENT> def call(self, inputs): <NEW_LINE> <INDENT> if not isinstance(inputs, list): <NEW_LINE> <INDENT> raise ValueError('A flexconv layer should be called ' 'on a list of inputs.') <NEW_LINE> <DEDENT> features = ops.convert_to_tensor(inputs[0], dtype=self.dtype) <NEW_LINE> neighborhoods = ops.convert_to_tensor(inputs[1], dtype=tf.int32) <NEW_LINE> if self.data_format == 'expanded': <NEW_LINE> <INDENT> features = _remove_dim(features, 2) <NEW_LINE> neighborhoods = _remove_dim(neighborhoods, 2) <NEW_LINE> <DEDENT> y, _ = _flex_pooling(features, neighborhoods) <NEW_LINE> if self.data_format == 'expanded': <NEW_LINE> <INDENT> y = tf.expand_dims(y, axis=2) <NEW_LINE> <DEDENT> return y
flex pooling layer. This layer performs a max-pooling operation over elements in arbitrary neighborhoods. When `data_format` is 'simple', the input shape should have rank 3, otherwise rank 4 and dimension 2 should be 1. Remarks: In contrast to traditional pooling, this operation has no option for sub-sampling. Arguments: features: A `Tensor` of the format [B, Din, (1), N]. neighborhoods: A `Tensor` of the format [B, K, (1), N] (tf.int32). name: A string, the name of the layer.
62599062a219f33f346c7edd
class BGEError(Exception): <NEW_LINE> <INDENT> pass
SDK 错误
62599062379a373c97d9a6fa
class ModelEntity: <NEW_LINE> <INDENT> def __init__(self, entity_id, model, history_index=-1, connected=True): <NEW_LINE> <INDENT> self.entity_id = entity_id <NEW_LINE> self.model = model <NEW_LINE> self._history_index = history_index <NEW_LINE> self.connected = connected <NEW_LINE> self.connection = model.connection() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<{} entity_id="{}">'.format(type(self).__name__, self.entity_id) <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.safe_data[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> name = name.replace('_', '-') <NEW_LINE> if name in self.safe_data: <NEW_LINE> <INDENT> return self.safe_data[name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return bool(self.data) <NEW_LINE> <DEDENT> def on_change(self, callable_): <NEW_LINE> <INDENT> self.model.add_observer( callable_, self.entity_type, 'change', self.entity_id) <NEW_LINE> <DEDENT> def on_remove(self, callable_): <NEW_LINE> <INDENT> self.model.add_observer( callable_, self.entity_type, 'remove', self.entity_id) <NEW_LINE> <DEDENT> @property <NEW_LINE> def entity_type(self): <NEW_LINE> <INDENT> if hasattr(self.__class__, "type_name_override") and callable(self.__class__.type_name_override): <NEW_LINE> <INDENT> return self.__class__.type_name_override() <NEW_LINE> <DEDENT> def first_lower(s): <NEW_LINE> <INDENT> if len(s) == 0: <NEW_LINE> <INDENT> return s <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return s[0].lower() + s[1:] <NEW_LINE> <DEDENT> <DEDENT> return first_lower(self.__class__.__name__) <NEW_LINE> <DEDENT> @property <NEW_LINE> def current(self): <NEW_LINE> <INDENT> return self._history_index == -1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def dead(self): <NEW_LINE> <INDENT> return ( self.data is None or self.model.state.entity_data( self.entity_type, self.entity_id, -1) is None ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def alive(self): <NEW_LINE> <INDENT> return not self.dead <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self.model.state.entity_data( self.entity_type, self.entity_id, self._history_index) <NEW_LINE> <DEDENT> @property <NEW_LINE> def safe_data(self): <NEW_LINE> <INDENT> if self.data is None: <NEW_LINE> <INDENT> raise DeadEntityException( "Entity {}:{} is dead - its attributes can no longer be " "accessed. Use the .previous() method on this object to get " "a copy of the object at its previous state.".format( self.entity_type, self.entity_id)) <NEW_LINE> <DEDENT> return self.data <NEW_LINE> <DEDENT> def previous(self): <NEW_LINE> <INDENT> return self.model.state.get_entity( self.entity_type, self.entity_id, self._history_index - 1, connected=False) <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self._history_index == -1: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> new_index = self._history_index + 1 <NEW_LINE> connected = ( new_index == len(self.model.state.entity_history( self.entity_type, self.entity_id)) - 1 ) <NEW_LINE> return self.model.state.get_entity( self.entity_type, self.entity_id, self._history_index - 1, connected=connected) <NEW_LINE> <DEDENT> def latest(self): <NEW_LINE> <INDENT> if self._history_index == -1: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> return self.model.state.get_entity(self.entity_type, self.entity_id)
An object in the Model tree
6259906232920d7e50bc771d
class element_exists_and_does_not_contain_expected_text(object): <NEW_LINE> <INDENT> def __init__(self, locator, expected_text): <NEW_LINE> <INDENT> self.locator = locator <NEW_LINE> self.expected_text = expected_text <NEW_LINE> <DEDENT> def __call__(self, driver): <NEW_LINE> <INDENT> element = driver.find_element(*self.locator) <NEW_LINE> if not element: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> element_content = element.get_attribute('innerText').strip() <NEW_LINE> if self.expected_text not in element_content: <NEW_LINE> <INDENT> return element <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
An expectation for checking that an element exists and its innerText attribute does not contain expected text. This class is meant to be used in combination with Selenium's `WebDriverWait::until()`. For example: ``` custom_wait = WebDriverWait(browser, 10) smart_ballot_tracker_element = custom_wait.until(element_exists_and_does_not_contain_expected_text((By.ID, "my_id"), "my expected text")) ``` :param locator: Selenium locator used to find the element. For example: `(By.ID, "my_id")` :param expected_text: Text expected to not be present in element's innerText attribute (parameter type: string) :return: The WebElement once its innerText attribute contains expected_text
6259906245492302aabfdbb1
class MessageBuilder(object): <NEW_LINE> <INDENT> ASCII_CODES = [0, 1, 2, 4] <NEW_LINE> DATAFRAME_CODES = [3] <NEW_LINE> BINARY_CODE = [5] <NEW_LINE> GET_RESULT_CLASS = lambda: Message <NEW_LINE> @classmethod <NEW_LINE> def unpack_all(cls, mbytes, mtype): <NEW_LINE> <INDENT> if mtype in cls.ASCII_CODES: <NEW_LINE> <INDENT> return AsciiMessage(mbytes, mtype) <NEW_LINE> <DEDENT> elif mtype in cls.DATAFRAME_CODES: <NEW_LINE> <INDENT> return DataFrame(rawdf=mbytes) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Application does not support message type {}'.format(mtype)) <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def pack_all(messageobj): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def average_dataframes(dflist): <NEW_LINE> <INDENT> n = len(dflist) <NEW_LINE> if n > 1 and all(dflist[0].check_same_structure(dflist[n]) for n in range(1, n)): <NEW_LINE> <INDENT> meanobj = DataFrame() <NEW_LINE> meanobj.__dict__ = copy.deepcopy(dflist[-1].__dict__) <NEW_LINE> meanobj.smoothed = True <NEW_LINE> newcoils = meanobj.give_coils() <NEW_LINE> smoothedcoils = [CoilBuilder.average(cs) for cs in zip(*[d.give_coils() for d in dflist])] <NEW_LINE> for n, s in zip(newcoils, smoothedcoils): <NEW_LINE> <INDENT> n = s <NEW_LINE> <DEDENT> return meanobj <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise KeyError("The DataFrame objects you are trying to average have a different structure")
Give a bytestring to build the relevant message object (either DataFrame or AsciiMessage)
625990624f6381625f19a00e
class InvalidArgumentError(RuntimeError): <NEW_LINE> <INDENT> pass
! @brief Exception class raised for invalid target names.
625990628a43f66fc4bf3865
class Timer(object): <NEW_LINE> <INDENT> def __init__(self, task_description='elapsed time', verbose=False): <NEW_LINE> <INDENT> self.verbose = verbose <NEW_LINE> self.task_description = task_description <NEW_LINE> self.laps = OrderedDict() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.start(lap="__enter__") <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> sys.stdout.write('{}...'.format(self.task_description)) <NEW_LINE> sys.stdout.flush() <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, *args): <NEW_LINE> <INDENT> self.stop() <NEW_LINE> backspace = '\b\b\b' <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> sys.stdout.flush() <NEW_LINE> if self.elapsed_raw() < 1.0: <NEW_LINE> <INDENT> sys.stdout.write(backspace + ':' + '{:.2f}ms\n'.format( self.elapsed_raw() * 1000)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sys.stdout.write(backspace + ': ' + '{}\n'.format( self.elapsed())) <NEW_LINE> <DEDENT> sys.stdout.flush() <NEW_LINE> <DEDENT> <DEDENT> def start(self, lap=None): <NEW_LINE> <INDENT> t = time.time() <NEW_LINE> first = None if len(self.laps) == 0 else self.laps.iteritems().next()[0] <NEW_LINE> if first is None: <NEW_LINE> <INDENT> self.laps["__enter__"] = t <NEW_LINE> <DEDENT> if lap is not None: <NEW_LINE> <INDENT> self.laps[lap] = t <NEW_LINE> <DEDENT> return t <NEW_LINE> <DEDENT> def lap(self, lap="__lap__"): <NEW_LINE> <INDENT> t = time.time() <NEW_LINE> self.laps[lap] = t <NEW_LINE> return t <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> return self.lap(lap="__exit__") <NEW_LINE> <DEDENT> def get_lap(self, lap="__exit__"): <NEW_LINE> <INDENT> return self.lap[lap] <NEW_LINE> <DEDENT> def elapsed_raw(self, start="__enter__", end="__exit__"): <NEW_LINE> <INDENT> return self.laps[end] - self.laps[start] <NEW_LINE> <DEDENT> def elapsed(self, start="__enter__", end="__exit__"): <NEW_LINE> <INDENT> hours, rem = divmod(self.elapsed_raw(start, end), 3600) <NEW_LINE> minutes, seconds = divmod(rem, 60) <NEW_LINE> return "{:0>2}:{:0>2}:{:05.2f}".format( int(hours), int(minutes), seconds)
Timer object usable as a context manager, or for manual timing. Based on code from http://coreygoldberg.blogspot.com/2012/06/python-timer-class-context-manager-for.html # noqa As a context manager, do: from timer import Timer url = 'https://github.com/timeline.json' with Timer() as t: r = requests.get(url) print 'fetched %r in %.2f millisecs' % (url, t.elapsed*1000)
625990621f037a2d8b9e53d6
class AbstractTkPushButton(AbstractTkControl): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def shell_text_changed(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def shell_icon_changed(self, icon): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def shell_icon_size_changed(self, icon_size): <NEW_LINE> <INDENT> raise NotImplementedError
The abstract toolkit interface for a PushButton.
6259906266673b3332c31ad3
class FirewallPolicyNatRule(FirewallPolicyRule): <NEW_LINE> <INDENT> _validation = { 'rule_type': {'required': True}, 'priority': {'maximum': 65000, 'minimum': 100}, } <NEW_LINE> _attribute_map = { 'rule_type': {'key': 'ruleType', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'priority': {'key': 'priority', 'type': 'int'}, 'action': {'key': 'action', 'type': 'FirewallPolicyNatRuleAction'}, 'translated_address': {'key': 'translatedAddress', 'type': 'str'}, 'translated_port': {'key': 'translatedPort', 'type': 'str'}, 'rule_condition': {'key': 'ruleCondition', 'type': 'FirewallPolicyRuleCondition'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(FirewallPolicyNatRule, self).__init__(**kwargs) <NEW_LINE> self.rule_type = 'FirewallPolicyNatRule' <NEW_LINE> self.action = kwargs.get('action', None) <NEW_LINE> self.translated_address = kwargs.get('translated_address', None) <NEW_LINE> self.translated_port = kwargs.get('translated_port', None) <NEW_LINE> self.rule_condition = kwargs.get('rule_condition', None)
Firewall Policy NAT Rule. All required parameters must be populated in order to send to Azure. :param rule_type: Required. The type of the rule.Constant filled by server. Possible values include: "FirewallPolicyNatRule", "FirewallPolicyFilterRule". :type rule_type: str or ~azure.mgmt.network.v2019_07_01.models.FirewallPolicyRuleType :param name: The name of the rule. :type name: str :param priority: Priority of the Firewall Policy Rule resource. :type priority: int :param action: The action type of a Nat rule. :type action: ~azure.mgmt.network.v2019_07_01.models.FirewallPolicyNatRuleAction :param translated_address: The translated address for this NAT rule. :type translated_address: str :param translated_port: The translated port for this NAT rule. :type translated_port: str :param rule_condition: The match conditions for incoming traffic. :type rule_condition: ~azure.mgmt.network.v2019_07_01.models.FirewallPolicyRuleCondition
62599062a8ecb033258728ee
class Zoomify(object): <NEW_LINE> <INDENT> def __init__(self, width, height, tile_size=256, tileformat='jpg'): <NEW_LINE> <INDENT> self.tile_size = tile_size <NEW_LINE> self.tileformat = tileformat <NEW_LINE> imagesize = (width, height) <NEW_LINE> tiles = (math.ceil(width / tile_size), math.ceil(height / tile_size)) <NEW_LINE> self.tierSizeInTiles = [] <NEW_LINE> self.tierSizeInTiles.append(tiles) <NEW_LINE> self.tierImageSize = [] <NEW_LINE> self.tierImageSize.append(imagesize) <NEW_LINE> while (imagesize[0] > tile_size or imagesize[1] > tile_size): <NEW_LINE> <INDENT> imagesize = (math.floor(imagesize[0] / 2), math.floor(imagesize[1] / 2)) <NEW_LINE> tiles = (math.ceil(imagesize[0] / tile_size), math.ceil(imagesize[1] / tile_size)) <NEW_LINE> self.tierSizeInTiles.append(tiles) <NEW_LINE> self.tierImageSize.append(imagesize) <NEW_LINE> <DEDENT> self.tierSizeInTiles.reverse() <NEW_LINE> self.tierImageSize.reverse() <NEW_LINE> self.numberOfTiers = len(self.tierSizeInTiles) <NEW_LINE> self.tileCountUpToTier = [] <NEW_LINE> self.tileCountUpToTier[0] = 0 <NEW_LINE> for i in range(1, self.numberOfTiers + 1): <NEW_LINE> <INDENT> self.tileCountUpToTier.append( self.tierSizeInTiles[i - 1][0] * self.tierSizeInTiles[i - 1][1] + self.tileCountUpToTier[i - 1] ) <NEW_LINE> <DEDENT> <DEDENT> def tilefilename(self, x, y, z): <NEW_LINE> <INDENT> tileIndex = x + y * self.tierSizeInTiles[z][0] + self.tileCountUpToTier[z] <NEW_LINE> return os.path.join("TileGroup%.0f" % math.floor(tileIndex / 256), "%s-%s-%s.%s" % (z, x, y, self.tileformat))
Tiles compatible with the Zoomify viewer ----------------------------------------
62599062435de62698e9d4de
class Webserver(Thread): <NEW_LINE> <INDENT> def __init__(self, port=8000, root='.'): <NEW_LINE> <INDENT> Thread.__init__(self) <NEW_LINE> Handler = AcrylServe <NEW_LINE> Handler.www_root = root <NEW_LINE> Handler.log_error = lambda x, *y: None <NEW_LINE> Handler.log_message = lambda x, *y: None <NEW_LINE> self.httpd = ReuseAddressServer(("", port), Handler) <NEW_LINE> self.kill_received = False <NEW_LINE> <DEDENT> def serve_forever(self): <NEW_LINE> <INDENT> while not self.kill_received: <NEW_LINE> <INDENT> self.handle_request() <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.httpd.serve_forever() <NEW_LINE> self.join(1)
A single-threaded webserver to serve while generation. :param port: port to listen on :param root: serve this directory under /
6259906256b00c62f0fb3fa2
class DR(Conversion): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def to_dict(): <NEW_LINE> <INDENT> return {"type": "datetime"} <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def x12_to_python(raw): <NEW_LINE> <INDENT> if raw is None or raw == "": <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> d1, punct, d2 = raw.partition('-') <NEW_LINE> if d1 is None or d2 is None or d1 == "" or d2 == "": <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> yy1, mm1, dd1 = int(d1[0:4]), int(d1[4:6]), int(d1[6:8]) <NEW_LINE> yy2, mm2, dd2 = int(d2[0:4]), int(d2[4:6]), int(d2[6:8]) <NEW_LINE> return datetime.date(yy1, mm1, dd1), datetime.date(yy2, mm2, dd2) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def python_to_x12(value): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> d1, d2 = value <NEW_LINE> return "%s-%s" % (d1.strftime("4Y%m%d"), d2.strftime("%Y%m%d"))
Convert between DR format dates to proper DateTime objects.
62599062a79ad1619776b628
class ECParameters(univ.Choice): <NEW_LINE> <INDENT> componentType = namedtype.NamedTypes( namedtype.NamedType("namedCurve", univ.ObjectIdentifier()), namedtype.NamedType("implicitCurve", univ.Null()), namedtype.NamedType("specifiedCurve", SpecifiedECDomain()), )
RFC5480: Elliptic Curve Cryptography Subject Public Key Information ECParameters ::= CHOICE { namedCurve OBJECT IDENTIFIER -- implicitCurve NULL -- specifiedCurve SpecifiedECDomain }
6259906238b623060ffaa3bc