code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Task: <NEW_LINE> <INDENT> def __init__(self, taskname=''): <NEW_LINE> <INDENT> self.data = dict() <NEW_LINE> self.setName(taskname) <NEW_LINE> <DEDENT> def setName(self, name): <NEW_LINE> <INDENT> if name != '': <NEW_LINE> <INDENT> self.data["name"] = name <NEW_LINE> <DEDENT> <DEDENT> def setCommand(self, command, TransferToServer=True): <NEW_LINE> <INDENT> if TransferToServer: <NEW_LINE> <INDENT> command = Pathmap.toServer(command) <NEW_LINE> <DEDENT> self.data["command"] = command <NEW_LINE> <DEDENT> def setFiles(self, files, TransferToServer=True): <NEW_LINE> <INDENT> if "files" not in self.data: <NEW_LINE> <INDENT> self.data["files"] = [] <NEW_LINE> <DEDENT> for afile in files: <NEW_LINE> <INDENT> if TransferToServer: <NEW_LINE> <INDENT> afile = Pathmap.toServer(afile) <NEW_LINE> <DEDENT> self.data["files"].append(afile)
Missing DocString
625990865fc7496912d49020
class HighAvailabilitySettingsJson(object): <NEW_LINE> <INDENT> swagger_types = { 'ha_enabled': 'bool', 'sync_interval_in_hours': 'int', 'url': 'str' } <NEW_LINE> attribute_map = { 'ha_enabled': 'haEnabled', 'sync_interval_in_hours': 'syncIntervalInHours', 'url': 'url' } <NEW_LINE> def __init__(self, ha_enabled=None, sync_interval_in_hours=None, url=None): <NEW_LINE> <INDENT> self._ha_enabled = None <NEW_LINE> self._sync_interval_in_hours = None <NEW_LINE> self._url = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.ha_enabled = ha_enabled <NEW_LINE> self.sync_interval_in_hours = sync_interval_in_hours <NEW_LINE> self.url = url <NEW_LINE> <DEDENT> @property <NEW_LINE> def ha_enabled(self): <NEW_LINE> <INDENT> return self._ha_enabled <NEW_LINE> <DEDENT> @ha_enabled.setter <NEW_LINE> def ha_enabled(self, ha_enabled): <NEW_LINE> <INDENT> if ha_enabled is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `ha_enabled`, must not be `None`") <NEW_LINE> <DEDENT> self._ha_enabled = ha_enabled <NEW_LINE> <DEDENT> @property <NEW_LINE> def sync_interval_in_hours(self): <NEW_LINE> <INDENT> return self._sync_interval_in_hours <NEW_LINE> <DEDENT> @sync_interval_in_hours.setter <NEW_LINE> def sync_interval_in_hours(self, sync_interval_in_hours): <NEW_LINE> <INDENT> if sync_interval_in_hours is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `sync_interval_in_hours`, must not be `None`") <NEW_LINE> <DEDENT> self._sync_interval_in_hours = sync_interval_in_hours <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return self._url <NEW_LINE> <DEDENT> @url.setter <NEW_LINE> def url(self, url): <NEW_LINE> <INDENT> if url is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `url`, must not be `None`") <NEW_LINE> <DEDENT> self._url = url <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(HighAvailabilitySettingsJson, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, HighAvailabilitySettingsJson): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62599086be7bc26dc9252c0b
class OrderInfo: <NEW_LINE> <INDENT> def __init__(self, dictionary=None): <NEW_LINE> <INDENT> if dictionary is None: <NEW_LINE> <INDENT> dictionary = {} <NEW_LINE> <DEDENT> self.dict = dictionary <NEW_LINE> self.name = dictionary["name"] if "name" in dictionary else None <NEW_LINE> self.phone_number = dictionary["phone_number"] if "phone_number" in dictionary else None <NEW_LINE> self.email = dictionary["email"] if "email" in dictionary else None <NEW_LINE> self.shipping_address = ShippingAddress(dictionary["shipping_address"]) if "shipping_address" in dictionary else None <NEW_LINE> for index, value in self.dict.items(): <NEW_LINE> <INDENT> if not hasattr(self, index): <NEW_LINE> <INDENT> setattr(self, index, helper.setBvar(value))
This object represents information about an order.[See on Telegram API](https://core.telegram.org/bots/api#orderinfo) - - - - - **Fields**: - `name`: `string` - Optional. User name - `phone_number`: `string` - Optional. User's phone number - `email`: `string` - Optional. User email - `shipping_address`: `ShippingAddress` - Optional. User shipping address
625990867047854f46340f20
class FrameSize(Message): <NEW_LINE> <INDENT> set_type(FRAME_SIZE) <NEW_LINE> def __init__(self, size=None): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> <DEDENT> def decode(self, buf): <NEW_LINE> <INDENT> self.size = buf.read_ulong() <NEW_LINE> <DEDENT> def encode(self, buf): <NEW_LINE> <INDENT> if self.size is None: <NEW_LINE> <INDENT> raise EncodeError('Frame size not set') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> buf.write_ulong(self.size) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> raise EncodeError('Frame size wrong type ' '(expected int, got %r)' % (type(self.size),)) <NEW_LINE> <DEDENT> <DEDENT> def dispatch(self, listener, timestamp): <NEW_LINE> <INDENT> listener.onFrameSize(self.size, timestamp)
A frame size message. This determines the maximum number of bytes for the frame body in the RTMP stream. @ivar size: Number of bytes for RTMP frame bodies. @type size: C{int}
62599086adb09d7d5dc0c0c6
class Label (pyxb.binding.datatypes.IDREF): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'Label') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/Volumes/SamsungSSD/git/ShExSolbrig/ShEx/ShEx/static/xsd/ShEx.xsd', 178, 4) <NEW_LINE> _Documentation = None
An atomic simple type.
62599086ad47b63b2c5a93be
class Embedder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, text_embedding_vectors: torch.Tensor): <NEW_LINE> <INDENT> super(Embedder, self).__init__() <NEW_LINE> self.embeddings = nn.Embedding.from_pretrained( embeddings=text_embedding_vectors, freeze=True ) <NEW_LINE> <DEDENT> def forward(self, x) -> nn.Embedding: <NEW_LINE> <INDENT> x_vec = self.embeddings(x) <NEW_LINE> return x_vec
idで示されている単語をベクトルに変換する
62599086a8370b77170f1f38
class PortProfileBindingAlreadyExists(exceptions.QuantumException): <NEW_LINE> <INDENT> message = _("PortProfileBinding for port profile %(pp_id)s to " "port %(port_id)s already exists")
Binding cannot be created, since it already exists
625990864c3428357761be27
class CheckQueryTypeFieldsNameMatchVisitor(Visitor): <NEW_LINE> <INDENT> def __init__(self, query_type: str) -> None: <NEW_LINE> <INDENT> self.query_type = query_type <NEW_LINE> self.in_query_type = False <NEW_LINE> <DEDENT> def enter_object_type_definition( self, node: ObjectTypeDefinitionNode, key: Any, parent: Any, path: List[Any], ancestors: List[Any], ) -> None: <NEW_LINE> <INDENT> if node.name.value == self.query_type: <NEW_LINE> <INDENT> self.in_query_type = True <NEW_LINE> <DEDENT> <DEDENT> def leave_object_type_definition( self, node: ObjectTypeDefinitionNode, key: Any, parent: Any, path: List[Any], ancestors: List[Any], ) -> None: <NEW_LINE> <INDENT> if node.name.value == self.query_type: <NEW_LINE> <INDENT> self.in_query_type = False <NEW_LINE> <DEDENT> <DEDENT> def enter_field_definition( self, node: FieldDefinitionNode, key: Any, parent: Any, path: List[Any], ancestors: List[Any], ) -> None: <NEW_LINE> <INDENT> if self.in_query_type: <NEW_LINE> <INDENT> field_name = node.name.value <NEW_LINE> type_node = get_ast_with_non_null_and_list_stripped(node.type) <NEW_LINE> queried_type_name = type_node.name.value <NEW_LINE> if field_name != queried_type_name: <NEW_LINE> <INDENT> raise SchemaStructureError( 'Query type\'s field name "{}" does not match corresponding queried type ' 'name "{}"'.format(field_name, queried_type_name) )
Check that every query type field's name is identical to the type it queries. If not, raise SchemaStructureError.
625990867047854f46340f22
class OrganizedEventsList(LoginRequiredMixin, ListView): <NEW_LINE> <INDENT> template_name = 'events/organized_events.html' <NEW_LINE> allow_empty = False <NEW_LINE> model = Event <NEW_LINE> ordering = '-start_date' <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return super().get_queryset().for_organizer(self.request.user).annotate( num_registered=Count('registrations', filter=Q(registrations__status=Registration.statuses.REGISTERED)), num_waitinglist=Count('registrations', filter=Q(registrations__status=Registration.statuses.WAITINGLIST)), num_cancelled=Count('registrations', filter=Q(registrations__status=Registration.statuses.CANCELLED)), show_registration_details=QExpr(Q(end_date__gte=date.today())), ) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> counts = Registration.objects.values_list('status').annotate(count=Count('status')) <NEW_LINE> kwargs['counts'] = {status.id: count for status, count in counts} <NEW_LINE> return super().get_context_data(**kwargs)
List of events a user has organizer access to.
6259908660cbc95b06365b22
class MessageID: <NEW_LINE> <INDENT> def __init__(self, domain=None, idstring=None): <NEW_LINE> <INDENT> self.domain = domain or DNS_NAME <NEW_LINE> self.idstring = idstring <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> timeval = time.time() <NEW_LINE> utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval)) <NEW_LINE> try: <NEW_LINE> <INDENT> pid = os.getpid() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pid = 1 <NEW_LINE> <DEDENT> randint = random.randrange(100000) <NEW_LINE> if self.idstring is None: <NEW_LINE> <INDENT> idstring = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> idstring = '.' + self.idstring <NEW_LINE> <DEDENT> msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, self.domain) <NEW_LINE> return msgid
Returns a string suitable for RFC 2822 compliant Message-ID, e.g: <[email protected]> Optional idstring if given is a string used to strengthen the uniqueness of the message id. Based on django.core.mail.message.make_msgid
62599086f9cc0f698b1c6082
class TestContactOutputOnly(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testContactOutputOnly(self): <NEW_LINE> <INDENT> pass
ContactOutputOnly unit test stubs
6259908663b5f9789fe86cd6
class Enemy(GameSprite): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__('./images/enemy1.png') <NEW_LINE> self.speed = random.randint(1, 3) <NEW_LINE> self.rect.bottom = 0 <NEW_LINE> max_x = SCREEN_RECT.width - self.rect.width <NEW_LINE> self.rect.x = random.randrange(0, max_x, self.rect.width) <NEW_LINE> <DEDENT> def update(self, current_time, rate=100): <NEW_LINE> <INDENT> super().update() <NEW_LINE> if self.rect.y > SCREEN_RECT.height: <NEW_LINE> <INDENT> self.kill() <NEW_LINE> <DEDENT> self.last_time = current_time <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> pass
敌方飞机
62599086e1aae11d1e7cf5ca
class Lambda(mixins.FilterStmtsMixin, LocalsDictNodeNG): <NEW_LINE> <INDENT> _astroid_fields = ("args", "body") <NEW_LINE> _other_other_fields = ("locals",) <NEW_LINE> name = "<lambda>" <NEW_LINE> is_lambda = True <NEW_LINE> def implicit_parameters(self): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> if self.args.arguments and self.args.arguments[0].name == "self": <NEW_LINE> <INDENT> if isinstance(self.parent.scope(), ClassDef): <NEW_LINE> <INDENT> return "method" <NEW_LINE> <DEDENT> <DEDENT> return "function" <NEW_LINE> <DEDENT> def __init__(self, lineno=None, col_offset=None, parent=None): <NEW_LINE> <INDENT> self.locals = {} <NEW_LINE> self.args = [] <NEW_LINE> self.body = [] <NEW_LINE> super().__init__(lineno, col_offset, parent) <NEW_LINE> <DEDENT> def postinit(self, args, body): <NEW_LINE> <INDENT> self.args = args <NEW_LINE> self.body = body <NEW_LINE> <DEDENT> def pytype(self): <NEW_LINE> <INDENT> if "method" in self.type: <NEW_LINE> <INDENT> return "%s.instancemethod" % BUILTINS <NEW_LINE> <DEDENT> return "%s.function" % BUILTINS <NEW_LINE> <DEDENT> def display_type(self): <NEW_LINE> <INDENT> if "method" in self.type: <NEW_LINE> <INDENT> return "Method" <NEW_LINE> <DEDENT> return "Function" <NEW_LINE> <DEDENT> def callable(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def argnames(self): <NEW_LINE> <INDENT> if self.args.arguments: <NEW_LINE> <INDENT> names = _rec_get_names(self.args.arguments) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> names = [] <NEW_LINE> <DEDENT> if self.args.vararg: <NEW_LINE> <INDENT> names.append(self.args.vararg) <NEW_LINE> <DEDENT> if self.args.kwarg: <NEW_LINE> <INDENT> names.append(self.args.kwarg) <NEW_LINE> <DEDENT> return names <NEW_LINE> <DEDENT> def infer_call_result(self, caller, context=None): <NEW_LINE> <INDENT> return self.body.infer(context) <NEW_LINE> <DEDENT> def scope_lookup(self, node, name, offset=0): <NEW_LINE> <INDENT> if node in self.args.defaults or node in self.args.kw_defaults: <NEW_LINE> <INDENT> frame = self.parent.frame() <NEW_LINE> offset = -1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> frame = self <NEW_LINE> <DEDENT> return frame._scope_lookup(node, name, offset) <NEW_LINE> <DEDENT> def bool_value(self, context=None): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def get_children(self): <NEW_LINE> <INDENT> yield self.args <NEW_LINE> yield self.body
Class representing an :class:`ast.Lambda` node. >>> node = astroid.extract_node('lambda arg: arg + 1') >>> node <Lambda.<lambda> l.1 at 0x7f23b2e41518>
62599086656771135c48ade7
class AdcptMWVSRecoveredDriver(SimpleDatasetDriver): <NEW_LINE> <INDENT> def _build_parser(self, stream_handle): <NEW_LINE> <INDENT> parser_config = { DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.adcpt_m_wvs', DataSetDriverConfigKeys.PARTICLE_CLASS: 'AdcptMWVSInstrumentDataParticle' } <NEW_LINE> parser = AdcptMWVSParser(parser_config, stream_handle, self._exception_callback) <NEW_LINE> return parser
The adcpt_m_wvs driver class extends the SimpleDatasetDriver.
625990864428ac0f6e65a09c
class Graphical_metaclass(type): <NEW_LINE> <INDENT> def __new__(cls,name,bases,classdict:dict): <NEW_LINE> <INDENT> if name == "Graphical": <NEW_LINE> <INDENT> return type.__new__(cls,name,bases,classdict) <NEW_LINE> <DEDENT> if callable(classdict.get('plugin')): <NEW_LINE> <INDENT> return type.__new__(cls,name,bases,classdict) <NEW_LINE> <DEDENT> formula = classdict.get("formula") <NEW_LINE> _args = classdict.get("args") <NEW_LINE> if "extension" in classdict: <NEW_LINE> <INDENT> _extension = classdict.get("extension") <NEW_LINE> has_extension = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> has_extension = False <NEW_LINE> _extension = None <NEW_LINE> <DEDENT> if isinstance(_args,dict): <NEW_LINE> <INDENT> key_value = _args <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key_value = {} <NEW_LINE> for key,value in _args: <NEW_LINE> <INDENT> key_value[key] = value <NEW_LINE> <DEDENT> <DEDENT> classdict["formula"] = formula <NEW_LINE> classdict["_args"] = key_value <NEW_LINE> if has_extension: <NEW_LINE> <INDENT> if hasattr(_extension, '__iter__'): <NEW_LINE> <INDENT> temp = {} <NEW_LINE> for extend in _extension: <NEW_LINE> <INDENT> if not isinstance(extend,Extension): <NEW_LINE> <INDENT> raise TypeError("\'%s\' not a extension object"%extend) <NEW_LINE> <DEDENT> temp[extend.get_name()] = extend.get_function() <NEW_LINE> <DEDENT> _extension = temp <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(_extension,Extension): <NEW_LINE> <INDENT> raise TypeError("%r not a extension object"%_extension) <NEW_LINE> <DEDENT> _extension = {_extension.get_name():_extension.get_function()} <NEW_LINE> <DEDENT> classdict["extension"] = _extension <NEW_LINE> <DEDENT> return type.__new__(cls,name,bases,classdict)
这个是Graphical类的元类,不应该被修改
6259908697e22403b383ca66
class PublisherACL(object): <NEW_LINE> <INDENT> def __init__(self, blacklist): <NEW_LINE> <INDENT> self.blacklist = blacklist <NEW_LINE> <DEDENT> def user_is_blacklisted(self, user): <NEW_LINE> <INDENT> for blacklisted_user in self.blacklist.get('users', []): <NEW_LINE> <INDENT> if re.match(blacklisted_user, user): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def cmd_is_blacklisted(self, cmd): <NEW_LINE> <INDENT> for blacklisted_module in self.blacklist.get('modules', []): <NEW_LINE> <INDENT> if isinstance(cmd, str): <NEW_LINE> <INDENT> funs_to_check = [cmd] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> funs_to_check = cmd <NEW_LINE> <DEDENT> for fun in funs_to_check: <NEW_LINE> <INDENT> if re.match(blacklisted_module, fun): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False
Represents the publisher ACL and provides methods to query the ACL for given operations
625990867c178a314d78e9a1
class AdamParameters(_OptimizationParameters): <NEW_LINE> <INDENT> def __init__(self, learning_rate, beta1=0.9, beta2=0.999, epsilon=1e-08, lazy_adam=True, sum_inside_sqrt=True, use_gradient_accumulation=True): <NEW_LINE> <INDENT> super(AdamParameters, self).__init__(learning_rate, use_gradient_accumulation) <NEW_LINE> if beta1 < 0. or beta1 >= 1.: <NEW_LINE> <INDENT> raise ValueError('beta1 must be between 0. and 1; got {}.'.format(beta1)) <NEW_LINE> <DEDENT> if beta2 < 0. or beta2 >= 1.: <NEW_LINE> <INDENT> raise ValueError('beta2 must be between 0. and 1; got {}.'.format(beta2)) <NEW_LINE> <DEDENT> if epsilon <= 0.: <NEW_LINE> <INDENT> raise ValueError('epsilon must be positive; got {}.'.format(epsilon)) <NEW_LINE> <DEDENT> if not use_gradient_accumulation and not lazy_adam: <NEW_LINE> <INDENT> raise ValueError( 'When disabling Lazy Adam, gradient accumulation must be used.') <NEW_LINE> <DEDENT> self.beta1 = beta1 <NEW_LINE> self.beta2 = beta2 <NEW_LINE> self.epsilon = epsilon <NEW_LINE> self.lazy_adam = lazy_adam <NEW_LINE> self.sum_inside_sqrt = sum_inside_sqrt
Optimization parameters for Adam.
62599086ec188e330fdfa41a
class NormalizeFeatures(BaseTransform): <NEW_LINE> <INDENT> def __init__(self, attrs: List[str] = ["x"]): <NEW_LINE> <INDENT> self.attrs = attrs <NEW_LINE> <DEDENT> def __call__(self, data: Union[Data, HeteroData]): <NEW_LINE> <INDENT> for store in data.stores: <NEW_LINE> <INDENT> for key, value in store.items(*self.attrs): <NEW_LINE> <INDENT> value = value - value.min() <NEW_LINE> value.div_(value.sum(dim=-1, keepdim=True).clamp_(min=1.)) <NEW_LINE> store[key] = value <NEW_LINE> <DEDENT> <DEDENT> return data <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return f'{self.__class__.__name__}()'
Row-normalizes the attributes given in :obj:`attrs` to sum-up to one. Args: attrs (List[str]): The names of attributes to normalize. (default: :obj:`["x"]`)
6259908623849d37ff852c29
class FitData(object): <NEW_LINE> <INDENT> def __init__(self, group, xvalues, yvalues, d_sample): <NEW_LINE> <INDENT> self.group = group <NEW_LINE> self.xvalues = xvalues <NEW_LINE> self.yvalues = yvalues <NEW_LINE> self.d_sample = d_sample
Fitting data
62599086ad47b63b2c5a93c2
@versioned_properties <NEW_LINE> class NameServerEntity(SharedVersionedEntity): <NEW_LINE> <INDENT> label = 'NameServer' <NEW_LINE> state_label = 'NameServerState' <NEW_LINE> properties = { 'ip': VersionedProperty(is_identity=True) }
Model a name server node in the graph.
62599086e1aae11d1e7cf5cb
class Invoker(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> from click.testing import CliRunner <NEW_LINE> self.runner = CliRunner() <NEW_LINE> from ymp.cli import main <NEW_LINE> self.main = main <NEW_LINE> self.initialized = False <NEW_LINE> self.toclean = [] <NEW_LINE> <DEDENT> def call(self, *args, standalone_mode=False, **kwargs): <NEW_LINE> <INDENT> if not self.initialized: <NEW_LINE> <INDENT> ymp.config.ConfigMgr.CONF_USER_FNAME = "ymp_user.yml" <NEW_LINE> ymp.get_config().unload() <NEW_LINE> <DEDENT> if not os.path.exists("cmd.sh"): <NEW_LINE> <INDENT> with open("cmd.sh", "w") as f: <NEW_LINE> <INDENT> f.write("#!/bin/bash -x\n") <NEW_LINE> <DEDENT> <DEDENT> argstr = " ".join(shlex.quote(arg) for arg in args) <NEW_LINE> with open("cmd.sh", "w") as f: <NEW_LINE> <INDENT> f.write(f"PATH={os.environ['PATH']} ymp {argstr} \"$@\"\n") <NEW_LINE> <DEDENT> result = self.runner.invoke(self.main, args, **kwargs, standalone_mode=standalone_mode) <NEW_LINE> with open("out.log", "w") as f: <NEW_LINE> <INDENT> f.write(result.output) <NEW_LINE> <DEDENT> if result.exception and not standalone_mode: <NEW_LINE> <INDENT> raise result.exception <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def call_raises(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.call(*args, standalone_mode=True, **kwargs) <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> for toclean in self.toclean: <NEW_LINE> <INDENT> shutil.rmtree(toclean, ignore_errors=True)
Wrap invoking shell command Handles writing of out.log and cmd.sh as well as reloading ymp config on each call.
62599086bf627c535bcb3043
class Deposit: <NEW_LINE> <INDENT> def __init__ (self, pURI=None): <NEW_LINE> <INDENT> self.uri = pURI <NEW_LINE> self.qual = pURI <NEW_LINE> self.proto = None <NEW_LINE> <DEDENT> def show (self, outFile=sys.stdout): <NEW_LINE> <INDENT> if outFile is not None: <NEW_LINE> <INDENT> outFile.write("{}\n".format(self.to_string())) <NEW_LINE> <DEDENT> <DEDENT> def to_string (self): <NEW_LINE> <INDENT> info = "" if self.proto is None else self.proto <NEW_LINE> other = "://" if self.proto is not None else "" <NEW_LINE> s = "{}{}{}".format(info, other, self.uri) <NEW_LINE> return s <NEW_LINE> <DEDENT> def to_uri (self, s, forceLocal=True): <NEW_LINE> <INDENT> drive = None <NEW_LINE> if isinstance(s, str): <NEW_LINE> <INDENT> pos = s.rfind( "/" ) <NEW_LINE> if pos == -1: <NEW_LINE> <INDENT> r = s.replace("\\", "/") <NEW_LINE> if len(r) >= 2: <NEW_LINE> <INDENT> if r[0].isalpha() and r[1] == ":": <NEW_LINE> <INDENT> drive = r[:2] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> r = s <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> assert False <NEW_LINE> <DEDENT> (self.proto, self.uri) = self.str_proto(r) <NEW_LINE> addQual = 0 <NEW_LINE> if forceLocal: <NEW_LINE> <INDENT> if drive is not None: <NEW_LINE> <INDENT> addQual = 1 <NEW_LINE> <DEDENT> <DEDENT> if addQual: <NEW_LINE> <INDENT> self.qual = "file:///" + self.uri <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.qual = self.uri <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def str_proto (self, s): <NEW_LINE> <INDENT> assert isinstance(s, str) <NEW_LINE> pos = s.find("://") <NEW_LINE> if 1 < pos < 6: <NEW_LINE> <INDENT> tup = (s[:pos], s[pos+3:]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tup = (None, s) <NEW_LINE> <DEDENT> return tup
Deposit class for URLs/ URIs
625990863617ad0b5ee07cc1
class GiftiToCSV(SimpleInterface): <NEW_LINE> <INDENT> input_spec = _GiftiToCSVInputSpec <NEW_LINE> output_spec = _GiftiToCSVOutputSpec <NEW_LINE> def _run_interface(self, runtime): <NEW_LINE> <INDENT> gii = nb.load(self.inputs.in_file) <NEW_LINE> data = gii.darrays[0].data <NEW_LINE> if self.inputs.itk_lps: <NEW_LINE> <INDENT> data[:, :2] *= -1 <NEW_LINE> <DEDENT> csvdata = np.hstack((data, np.zeros((data.shape[0], 3)))) <NEW_LINE> out_file = fname_presuffix( self.inputs.in_file, newpath=runtime.cwd, use_ext=False, suffix="points.csv" ) <NEW_LINE> np.savetxt( out_file, csvdata, delimiter=",", header="x,y,z,t,label,comment", fmt=["%.5f"] * 4 + ["%d"] * 2, ) <NEW_LINE> self._results["out_file"] = out_file <NEW_LINE> return runtime
Converts GIfTI files to CSV to make them ammenable to use with ``antsApplyTransformsToPoints``.
6259908644b2445a339b7714
class WeblateMemory(MachineTranslation): <NEW_LINE> <INDENT> name = "Weblate Translation Memory" <NEW_LINE> rank_boost = 2 <NEW_LINE> cache_translations = False <NEW_LINE> same_languages = True <NEW_LINE> accounting_key = "internal" <NEW_LINE> do_cleanup = False <NEW_LINE> def convert_language(self, language): <NEW_LINE> <INDENT> return get_machinery_language(language) <NEW_LINE> <DEDENT> def is_supported(self, source, language): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def is_rate_limited(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def download_translations( self, source, language, text: str, unit, user, search: bool, threshold: int = 75, ): <NEW_LINE> <INDENT> for result in Memory.objects.lookup( source, language, text, user, unit.translation.component.project, unit.translation.component.project.use_shared_tm, ).iterator(): <NEW_LINE> <INDENT> quality = self.comparer.similarity(text, result.source) <NEW_LINE> if quality < 10 or (quality < threshold and not search): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> yield { "text": result.target, "quality": quality, "service": self.name, "origin": result.get_origin_display(), "source": result.source, }
Translation service using strings already translated in Weblate.
62599086283ffb24f3cf5411
class Section(BreadCrumbsMixin, BaseTree): <NEW_LINE> <INDENT> title = models.CharField(max_length=255, verbose_name=_('Title')) <NEW_LINE> slug = models.SlugField(max_length=255, unique=True, verbose_name=_('Slug')) <NEW_LINE> sort = models.IntegerField(default=500, verbose_name=_('Sort')) <NEW_LINE> metatitle = models.CharField(max_length=2000, blank=True, verbose_name=_('Title')) <NEW_LINE> keywords = models.CharField(max_length=2000, blank=True, verbose_name=_('Keywords')) <NEW_LINE> description = models.CharField(max_length=2000, blank=True, verbose_name=_('Description')) <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('midnight_news:news_list', kwargs={'slug': self.slug}) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> class MPTTMeta: <NEW_LINE> <INDENT> order_insertion_by = ['sort'] <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _('NewsSection') <NEW_LINE> verbose_name_plural = _('NewsSections')
Модель категории новостей
625990867b180e01f3e49e1d
class Attraction: <NEW_LINE> <INDENT> def __init__( self, ID=None, name=None, url=None, attr_type=None, location=None, num_reviews=0 ): <NEW_LINE> <INDENT> self._ID = ID <NEW_LINE> self._name = name <NEW_LINE> self._url = url <NEW_LINE> self._attr_type = attr_type <NEW_LINE> self._location = location <NEW_LINE> self._num_reviews = num_reviews <NEW_LINE> <DEDENT> @property <NEW_LINE> def ID(self): <NEW_LINE> <INDENT> return self._ID <NEW_LINE> <DEDENT> @ID.setter <NEW_LINE> def ID(self, value): <NEW_LINE> <INDENT> self._ID = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, value): <NEW_LINE> <INDENT> self._name = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return self._url <NEW_LINE> <DEDENT> @url.setter <NEW_LINE> def url(self, value): <NEW_LINE> <INDENT> self._url = value <NEW_LINE> self._ID = int( re.match(r"/Attraction_Review-g\d*-d(\d*)-", value).group(1) ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def attr_type(self): <NEW_LINE> <INDENT> return self._attr_type <NEW_LINE> <DEDENT> @attr_type.setter <NEW_LINE> def attr_type(self, value): <NEW_LINE> <INDENT> self._attr_type = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def location(self): <NEW_LINE> <INDENT> return self._location <NEW_LINE> <DEDENT> @location.setter <NEW_LINE> def location(self, value): <NEW_LINE> <INDENT> self._location = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_reviews(self): <NEW_LINE> <INDENT> return self._num_reviews <NEW_LINE> <DEDENT> @num_reviews.setter <NEW_LINE> def num_reviews(self, value): <NEW_LINE> <INDENT> self._num_reviews = value
TripAdvisor attraction class. Attributes ---------- ID : int TripAdvisor attraction id name : str the attraction's full, human readable, name url : str the relative URL to the attraction's TripAdvisor page attr_type : str the attraction's type according to TripAdvisor's classification system location : list the coordinate pair in [Lat, Lon] of an attraction's location num_reviews : dict the number of reviews in the three most frequent review languages
625990865fcc89381b266f16
class CourseAverages(Stat.Stat): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__('CourseAverages') <NEW_LINE> self.course_averages = {} <NEW_LINE> <DEDENT> def calc_stats(self): <NEW_LINE> <INDENT> for path in self.paths: <NEW_LINE> <INDENT> with open(path, mode = 'r') as infile: <NEW_LINE> <INDENT> reader = csv.reader(infile, quoting = csv.QUOTE_MINIMAL) <NEW_LINE> next(reader) <NEW_LINE> course_name = next(reader)[1] <NEW_LINE> round_scores = [] <NEW_LINE> for row in reader: <NEW_LINE> <INDENT> round_scores.append(int(row[5])) <NEW_LINE> <DEDENT> avg_score = sum(round_scores)/len(round_scores) <NEW_LINE> if course_name not in self.course_averages.keys(): <NEW_LINE> <INDENT> self.course_averages[course_name] = [avg_score] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.course_averages[course_name].append(avg_score) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for course_name in self.course_averages.keys(): <NEW_LINE> <INDENT> recorded_values = self.course_averages[course_name] <NEW_LINE> total_score = sum(recorded_values) <NEW_LINE> total_games = len(recorded_values) <NEW_LINE> self.course_averages[course_name] = total_score/total_games <NEW_LINE> <DEDENT> <DEDENT> def write_stats(self): <NEW_LINE> <INDENT> with open(self.csv_dump, mode="w") as outfile: <NEW_LINE> <INDENT> writer = csv.writer(outfile, delimiter=',', quotechar='"',quoting=csv.QUOTE_MINIMAL) <NEW_LINE> for key, value in self.course_averages.items(): <NEW_LINE> <INDENT> writer.writerow([key, round(value,2)])
Calculates course average score
62599086aad79263cf43032c
class TestAutoScalingInstancePoolChooseInstances(unittest.TestCase): <NEW_LINE> <INDENT> class Instance(object): <NEW_LINE> <INDENT> def __init__(self, num_outstanding_requests, can_accept_requests=True): <NEW_LINE> <INDENT> self.num_outstanding_requests = num_outstanding_requests <NEW_LINE> self.remaining_request_capacity = 10 - num_outstanding_requests <NEW_LINE> self.can_accept_requests = can_accept_requests <NEW_LINE> <DEDENT> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> api_server.test_setup_stubs() <NEW_LINE> self.mox = mox.Mox() <NEW_LINE> self.servr = AutoScalingServerFacade( instance_factory=instance.InstanceFactory(object(), 10)) <NEW_LINE> self.mox.StubOutWithMock(self.servr, '_split_instances') <NEW_LINE> self.mox.StubOutWithMock(self.servr._condition, 'wait') <NEW_LINE> self.time = 10 <NEW_LINE> self.mox.stubs.Set(time, 'time', lambda: self.time) <NEW_LINE> <DEDENT> def advance_time(self, *unused_args): <NEW_LINE> <INDENT> self.time += 10 <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.mox.UnsetStubs() <NEW_LINE> <DEDENT> def test_choose_instance_required_available(self): <NEW_LINE> <INDENT> instance1 = self.Instance(1) <NEW_LINE> instance2 = self.Instance(2) <NEW_LINE> instance3 = self.Instance(3) <NEW_LINE> instance4 = self.Instance(4) <NEW_LINE> self.servr._split_instances().AndReturn((set([instance3, instance4]), set([instance1, instance2]))) <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> self.assertEqual(instance3, self.servr._choose_instance(15)) <NEW_LINE> self.mox.VerifyAll() <NEW_LINE> <DEDENT> def test_choose_instance_no_instances(self): <NEW_LINE> <INDENT> self.servr._split_instances().AndReturn((set([]), set([]))) <NEW_LINE> self.servr._condition.wait(5).WithSideEffects(self.advance_time) <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> self.assertEqual(None, self.servr._choose_instance(15)) <NEW_LINE> self.mox.VerifyAll() <NEW_LINE> <DEDENT> def test_choose_instance_no_instance_that_can_accept_requests(self): <NEW_LINE> <INDENT> instance1 = self.Instance(1, can_accept_requests=False) <NEW_LINE> self.servr._split_instances().AndReturn((set([]), set([instance1]))) <NEW_LINE> self.servr._condition.wait(5).WithSideEffects(self.advance_time) <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> self.assertEqual(None, self.servr._choose_instance(15)) <NEW_LINE> self.mox.VerifyAll() <NEW_LINE> <DEDENT> def test_choose_instance_required_full(self): <NEW_LINE> <INDENT> instance1 = self.Instance(1) <NEW_LINE> instance2 = self.Instance(2) <NEW_LINE> instance3 = self.Instance(10) <NEW_LINE> instance4 = self.Instance(10) <NEW_LINE> self.servr._split_instances().AndReturn((set([instance3, instance4]), set([instance1, instance2]))) <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> self.assertEqual(instance2, self.servr._choose_instance(15)) <NEW_LINE> self.mox.VerifyAll() <NEW_LINE> <DEDENT> def test_choose_instance_must_wait(self): <NEW_LINE> <INDENT> instance1 = self.Instance(10) <NEW_LINE> instance2 = self.Instance(10) <NEW_LINE> self.servr._split_instances().AndReturn((set([instance1]), set([instance2]))) <NEW_LINE> self.servr._condition.wait(5).WithSideEffects(self.advance_time) <NEW_LINE> self.mox.ReplayAll() <NEW_LINE> self.assertIsNone(self.servr._choose_instance(15)) <NEW_LINE> self.mox.VerifyAll()
Tests for server.AutoScalingServer._choose_instance.
6259908644b2445a339b7715
class TwilioView(View): <NEW_LINE> <INDENT> response_text = None <NEW_LINE> @method_decorator(csrf_exempt) <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return super(TwilioView, self).dispatch(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_data(self): <NEW_LINE> <INDENT> return self.request.POST <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> data = self.get_data() <NEW_LINE> return self.handle_request(data) <NEW_LINE> <DEDENT> def handle_request(self, data): <NEW_LINE> <INDENT> return self.get_response(data) <NEW_LINE> <DEDENT> def get_response_text(self): <NEW_LINE> <INDENT> return self.response_text <NEW_LINE> <DEDENT> def get_response(self, message, **kwargs): <NEW_LINE> <INDENT> response = MessagingResponse() <NEW_LINE> response_text = self.get_response_text() <NEW_LINE> if response_text: <NEW_LINE> <INDENT> response.message(response_text) <NEW_LINE> <DEDENT> response = HttpResponse(response.to_xml(), content_type='application/xml') <NEW_LINE> return response
Base view for Twilio callbacks
6259908666673b3332c31f72
class BrickletPiezoBuzzer(Device): <NEW_LINE> <INDENT> DEVICE_IDENTIFIER = 214 <NEW_LINE> DEVICE_DISPLAY_NAME = 'Piezo Buzzer Bricklet' <NEW_LINE> CALLBACK_BEEP_FINISHED = 3 <NEW_LINE> CALLBACK_MORSE_CODE_FINISHED = 4 <NEW_LINE> FUNCTION_BEEP = 1 <NEW_LINE> FUNCTION_MORSE_CODE = 2 <NEW_LINE> FUNCTION_GET_IDENTITY = 255 <NEW_LINE> def __init__(self, uid, ipcon): <NEW_LINE> <INDENT> Device.__init__(self, uid, ipcon) <NEW_LINE> self.api_version = (2, 0, 0) <NEW_LINE> self.response_expected[BrickletPiezoBuzzer.FUNCTION_BEEP] = BrickletPiezoBuzzer.RESPONSE_EXPECTED_FALSE <NEW_LINE> self.response_expected[BrickletPiezoBuzzer.FUNCTION_MORSE_CODE] = BrickletPiezoBuzzer.RESPONSE_EXPECTED_FALSE <NEW_LINE> self.response_expected[BrickletPiezoBuzzer.CALLBACK_BEEP_FINISHED] = BrickletPiezoBuzzer.RESPONSE_EXPECTED_ALWAYS_FALSE <NEW_LINE> self.response_expected[BrickletPiezoBuzzer.CALLBACK_MORSE_CODE_FINISHED] = BrickletPiezoBuzzer.RESPONSE_EXPECTED_ALWAYS_FALSE <NEW_LINE> self.response_expected[BrickletPiezoBuzzer.FUNCTION_GET_IDENTITY] = BrickletPiezoBuzzer.RESPONSE_EXPECTED_ALWAYS_TRUE <NEW_LINE> self.callback_formats[BrickletPiezoBuzzer.CALLBACK_BEEP_FINISHED] = '' <NEW_LINE> self.callback_formats[BrickletPiezoBuzzer.CALLBACK_MORSE_CODE_FINISHED] = '' <NEW_LINE> <DEDENT> def beep(self, duration): <NEW_LINE> <INDENT> self.ipcon.send_request(self, BrickletPiezoBuzzer.FUNCTION_BEEP, (duration,), 'I', '') <NEW_LINE> <DEDENT> def morse_code(self, morse): <NEW_LINE> <INDENT> self.ipcon.send_request(self, BrickletPiezoBuzzer.FUNCTION_MORSE_CODE, (morse,), '60s', '') <NEW_LINE> <DEDENT> def get_identity(self): <NEW_LINE> <INDENT> return GetIdentity(*self.ipcon.send_request(self, BrickletPiezoBuzzer.FUNCTION_GET_IDENTITY, (), '', '8s 8s c 3B 3B H')) <NEW_LINE> <DEDENT> def register_callback(self, id, callback): <NEW_LINE> <INDENT> if callback is None: <NEW_LINE> <INDENT> self.registered_callbacks.pop(id, None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.registered_callbacks[id] = callback
Creates 1kHz beep
625990862c8b7c6e89bd5359
class Entities(RDFStatInterface): <NEW_LINE> <INDENT> def __init__(self, results): <NEW_LINE> <INDENT> super(Entities, self).__init__(results) <NEW_LINE> self.results['count'] = 0 <NEW_LINE> <DEDENT> def count(self, s, p, o, s_blank, o_l, o_blank, statement): <NEW_LINE> <INDENT> if statement.subject.is_resource and not dh.query_distinct_subject(s, 0): <NEW_LINE> <INDENT> self.results['count'] += 1 <NEW_LINE> dh.set_distinct_subject(s, 0) <NEW_LINE> <DEDENT> <DEDENT> def voidify(self, void_model, dataset): <NEW_LINE> <INDENT> result_node = RDF.Node(literal=str(self.results['count']), datatype=ns_xs.integer.uri) <NEW_LINE> void_model.append(RDF.Statement(dataset, ns_void.entities, result_node)) <NEW_LINE> <DEDENT> def sparql(self, endpoint): <NEW_LINE> <INDENT> pass
count entities (triple has an URI as subject, distinct)
6259908663b5f9789fe86cdc
class ChangePasswordForm(Form): <NEW_LINE> <INDENT> old_password = PasswordField('Old password', validators=[Required()]) <NEW_LINE> password = PasswordField('New password', validators=[ Required(), EqualTo('password2', message='Passwords must match')]) <NEW_LINE> password2 = PasswordField('Confirm new password', validators=[Required()]) <NEW_LINE> submit = SubmitField('Update Password')
修改密码表单
62599086adb09d7d5dc0c0ce
class BankAccount(Base): <NEW_LINE> <INDENT> __tablename__ = 'bankaccount' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> name = Column(String, nullable=False) <NEW_LINE> description = Column(String) <NEW_LINE> currency = Column(String, nullable=False) <NEW_LINE> institute_id = Column(Integer, ForeignKey('institute.id')) <NEW_LINE> @sqlalchemy.orm.validates('name') <NEW_LINE> def validate_name(self, key, name): <NEW_LINE> <INDENT> assert re.match('[\d-]+', name), ( "Valid account names should be account numbers! e.g. 12312-213123") <NEW_LINE> return name <NEW_LINE> <DEDENT> @sqlalchemy.orm.validates('currency') <NEW_LINE> def validate_currency(self, key, name): <NEW_LINE> <INDENT> assert pycountry.currencies.get(letter=name), ( "Currency should be the iso letter, e.g. EUR, AUD, USD") <NEW_LINE> return name <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def find_by_name(self, session, name): <NEW_LINE> <INDENT> return session.query(BankAccount).filter_by(name=name).first() <NEW_LINE> <DEDENT> def rollback_batch(self, session, batchid): <NEW_LINE> <INDENT> batch = session.query(ImportBatch).filter_by(id = batchid).first() <NEW_LINE> if batch is not None: <NEW_LINE> <INDENT> session.delete(batch)
A bank account
625990863317a56b869bf2fe
class TwoLevelsDownList(NetAppObject): <NEW_LINE> <INDENT> _group3_stats = None <NEW_LINE> @property <NEW_LINE> def group3_stats(self): <NEW_LINE> <INDENT> return self._group3_stats <NEW_LINE> <DEDENT> @group3_stats.setter <NEW_LINE> def group3_stats(self, val): <NEW_LINE> <INDENT> if val != None: <NEW_LINE> <INDENT> self.validate('group3_stats', val) <NEW_LINE> <DEDENT> self._group3_stats = val <NEW_LINE> <DEDENT> _zfield2 = None <NEW_LINE> @property <NEW_LINE> def zfield2(self): <NEW_LINE> <INDENT> return self._zfield2 <NEW_LINE> <DEDENT> @zfield2.setter <NEW_LINE> def zfield2(self, val): <NEW_LINE> <INDENT> if val != None: <NEW_LINE> <INDENT> self.validate('zfield2', val) <NEW_LINE> <DEDENT> self._zfield2 = val <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_api_name(): <NEW_LINE> <INDENT> return "two-levels-down-list" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_desired_attrs(): <NEW_LINE> <INDENT> return [ 'group3-stats', 'zfield2', ] <NEW_LINE> <DEDENT> def describe_properties(self): <NEW_LINE> <INDENT> return { 'group3_stats': { 'class': ThreeLevelsDownList, 'is_list': True, 'required': 'optional' }, 'zfield2': { 'class': basestring, 'is_list': False, 'required': 'optional' }, }
Two levels deep and of list type
625990863617ad0b5ee07cc5
class WalletUser(models.Model): <NEW_LINE> <INDENT> public_key = models.CharField(max_length=200, unique=True) <NEW_LINE> username = models.CharField(max_length=50, null=True) <NEW_LINE> created = models.DateTimeField('created', auto_now_add=True) <NEW_LINE> active_date = models.DateTimeField('active date', auto_now_add=True) <NEW_LINE> status = models.IntegerField('0:normal,1:frozen,2:suspend,3.deleted', default=0) <NEW_LINE> address = models.CharField(max_length=50,null=True) <NEW_LINE> avatar = models.CharField(max_length=50,null=True) <NEW_LINE> email = models.CharField(max_length=50,null=True) <NEW_LINE> gender = models.IntegerField('0:unknown,1:male,2:female', default=0) <NEW_LINE> mobile = models.CharField(max_length=50,null=True) <NEW_LINE> product_number = models.IntegerField(null=True) <NEW_LINE> USERNAME_FIELD = 'public_key' <NEW_LINE> REQUIRED_FIELDS = ['public_key'] <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ('created', 'public_key') <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.public_key
The wallet user model.
6259908676e4537e8c3f10f4
class BucketItem(object): <NEW_LINE> <INDENT> def __init__(self, key, modification_date, etag, size, storage_class, owner=None): <NEW_LINE> <INDENT> self.key = key <NEW_LINE> self.modification_date = modification_date <NEW_LINE> self.etag = etag <NEW_LINE> self.size = size <NEW_LINE> self.storage_class = storage_class <NEW_LINE> self.owner = owner
The contents of an Amazon S3 bucket.
625990864a966d76dd5f0a5a
class TestAuthMetadata(TGAuthMetadata): <NEW_LINE> <INDENT> def get_user(self, identity, userid): <NEW_LINE> <INDENT> if ':' in userid: <NEW_LINE> <INDENT> return userid.split(':')[0] <NEW_LINE> <DEDENT> return super(TestAuthMetadata, self).get_user(identity, userid) <NEW_LINE> <DEDENT> def get_groups(self, identity, userid): <NEW_LINE> <INDENT> if userid: <NEW_LINE> <INDENT> parts = userid.split(':') <NEW_LINE> return parts[1:2] <NEW_LINE> <DEDENT> return super(TestAuthMetadata, self).get_groups(identity, userid) <NEW_LINE> <DEDENT> def get_permissions(self, identity, userid): <NEW_LINE> <INDENT> if userid: <NEW_LINE> <INDENT> parts = userid.split(':') <NEW_LINE> return parts[2:] <NEW_LINE> <DEDENT> return super(TestAuthMetadata, self).get_permissions(identity, userid) <NEW_LINE> <DEDENT> def authenticate(self, environ, identity): <NEW_LINE> <INDENT> return None
Provides a way to lookup for user, groups and permissions given the current identity. This has to be specialized for each storage backend. By default it returns empty lists for groups and permissions and None for the user.
62599086d8ef3951e32c8c18
class HelpGenDirective(AbstractHelpGenDirective): <NEW_LINE> <INDENT> def _get_help_files(self, az_cli): <NEW_LINE> <INDENT> create_invoker_and_load_cmds_and_args(az_cli) <NEW_LINE> return get_all_help(az_cli) <NEW_LINE> <DEDENT> def _load_doc_source_map(self): <NEW_LINE> <INDENT> map_path = os.path.join(get_cli_repo_path(), DOC_SOURCE_MAP_PATH) <NEW_LINE> with open(map_path) as open_file: <NEW_LINE> <INDENT> return json.load(open_file) <NEW_LINE> <DEDENT> <DEDENT> def _get_doc_source_content(self, doc_source_map, help_file): <NEW_LINE> <INDENT> is_command = isinstance(help_file, CliCommandHelpFile) <NEW_LINE> result = None <NEW_LINE> if not is_command: <NEW_LINE> <INDENT> top_group_name = help_file.command.split()[0] if help_file.command else 'az' <NEW_LINE> doc_source_value = doc_source_map[top_group_name] if top_group_name in doc_source_map else '' <NEW_LINE> result = '{}:docsource: {}'.format(self._INDENT, doc_source_value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> top_command_name = help_file.command.split()[0] if help_file.command else '' <NEW_LINE> if top_command_name in doc_source_map: <NEW_LINE> <INDENT> result = '{}:docsource: {}'.format(self._INDENT, doc_source_map[top_command_name]) <NEW_LINE> <DEDENT> <DEDENT> return result
General CLI Sphinx Directive The Core CLI has a doc source map to determine help text source for core cli commands. Extension help processed here will have no doc source
62599086099cdd3c636761b5
class OVSTrunkSkeleton(agent.TrunkSkeleton): <NEW_LINE> <INDENT> def __init__(self, ovsdb_handler): <NEW_LINE> <INDENT> super(OVSTrunkSkeleton, self).__init__() <NEW_LINE> self.ovsdb_handler = ovsdb_handler <NEW_LINE> registry.unsubscribe(self.handle_trunks, resources.TRUNK) <NEW_LINE> <DEDENT> def handle_trunks(self, trunk, event_type): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def handle_subports(self, subports, event_type): <NEW_LINE> <INDENT> trunk_id = subports[0].trunk_id <NEW_LINE> if self.ovsdb_handler.manages_this_trunk(trunk_id): <NEW_LINE> <INDENT> if event_type not in (events.CREATED, events.DELETED): <NEW_LINE> <INDENT> LOG.error(_LE("Unknown or unimplemented event %s"), event_type) <NEW_LINE> return <NEW_LINE> <DEDENT> ctx = self.ovsdb_handler.context <NEW_LINE> try: <NEW_LINE> <INDENT> LOG.debug("Event %s for subports: %s", event_type, subports) <NEW_LINE> if event_type == events.CREATED: <NEW_LINE> <INDENT> status = self.ovsdb_handler.wire_subports_for_trunk( ctx, trunk_id, subports) <NEW_LINE> <DEDENT> elif event_type == events.DELETED: <NEW_LINE> <INDENT> subport_ids = [subport.port_id for subport in subports] <NEW_LINE> status = self.ovsdb_handler.unwire_subports_for_trunk( trunk_id, subport_ids) <NEW_LINE> <DEDENT> self.ovsdb_handler.report_trunk_status(ctx, trunk_id, status) <NEW_LINE> <DEDENT> except oslo_messaging.MessagingException as e: <NEW_LINE> <INDENT> LOG.error(_LE( "Error on event %(event)s for subports " "%(subports)s: %(err)s"), {'event': event_type, 'subports': subports, 'trunk_id': trunk_id, 'err': e})
It processes Neutron Server events to create the physical resources associated to a logical trunk in response to user initiated API events (such as trunk subport add/remove). It collaborates with the OVSDBHandler to implement the trunk control plane.
6259908626068e7796d4e4b8
class BaseMixin(object): <NEW_LINE> <INDENT> __table_args__ = {'extend_existing': True} <NEW_LINE> @declared_attr <NEW_LINE> def __tablename__(cls): <NEW_LINE> <INDENT> def _join(match): <NEW_LINE> <INDENT> word = match.group() <NEW_LINE> if len(word) > 1: <NEW_LINE> <INDENT> return ('_%s_%s' % (word[:-1], word[-1])).lower() <NEW_LINE> <DEDENT> return '_' + word.lower() <NEW_LINE> <DEDENT> return _camelcase_re.sub(_join, cls.__name__).lstrip('_')
Use this class has a mixin for your models, it will define your tablenames automatically MyModel will be called my_model on the database. :: from sqlalchemy import Table, Column, Integer, String, Boolean, ForeignKey, Date from flask.ext.appbuilder import Base class MyModel(BaseMixin, Base): id = Column(Integer, primary_key=True) name = Column(String(50), unique = True, nullable=False)
625990863346ee7daa33841d
class UidAlreadyExistsError(exceptions.AlreadyExistsError): <NEW_LINE> <INDENT> default_message = 'The user with the provided uid already exists' <NEW_LINE> def __init__(self, message, cause, http_response): <NEW_LINE> <INDENT> exceptions.AlreadyExistsError.__init__(self, message, cause, http_response)
The user with the provided uid already exists.
62599086fff4ab517ebcf38d
class NOAAPredictIndicesLightCurve(LightCurve): <NEW_LINE> <INDENT> def peek(self, **plot_args): <NEW_LINE> <INDENT> figure = plt.figure() <NEW_LINE> axes = plt.gca() <NEW_LINE> axes = self.data['sunspot'].plot(color='b') <NEW_LINE> self.data['sunspot low'].plot(linestyle='--', color='b') <NEW_LINE> self.data['sunspot high'].plot(linestyle='--', color='b') <NEW_LINE> axes.set_ylim(0) <NEW_LINE> axes.set_title('Solar Cycle Sunspot Number Prediction') <NEW_LINE> axes.set_ylabel('Sunspot Number') <NEW_LINE> axes.yaxis.grid(True, 'major') <NEW_LINE> axes.xaxis.grid(True, 'major') <NEW_LINE> axes.legend() <NEW_LINE> figure.show() <NEW_LINE> return figure <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _get_default_uri(self): <NEW_LINE> <INDENT> return "http://services.swpc.noaa.gov/text/predicted-sunspot-radio-flux.txt" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_url_for_date_range(*args, **kwargs): <NEW_LINE> <INDENT> return NOAAPredictIndicesLightCurve._get_default_uri() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _parse_csv(filepath): <NEW_LINE> <INDENT> header = '' <NEW_LINE> with open(filepath, 'r') as fp: <NEW_LINE> <INDENT> line = fp.readline() <NEW_LINE> while line.startswith((":", "#")): <NEW_LINE> <INDENT> header += line <NEW_LINE> line = fp.readline() <NEW_LINE> <DEDENT> fields = ('yyyy', 'mm', 'sunspot', 'sunspot low', 'sunspot high', 'radio flux', 'radio flux low', 'radio flux high') <NEW_LINE> data = read_csv(filepath, delim_whitespace=True, names = fields, comment='#', skiprows=2, dtype={'yyyy':np.str, 'mm':np.str}) <NEW_LINE> data = data.dropna(how='any') <NEW_LINE> timeindex = [datetime.datetime.strptime(x + y, '%Y%m') for x,y in zip(data['yyyy'], data['mm'])] <NEW_LINE> data['time']=timeindex <NEW_LINE> data = data.set_index('time') <NEW_LINE> data = data.drop('mm',1) <NEW_LINE> data = data.drop('yyyy',1) <NEW_LINE> return {'comments': header}, data
NOAA Solar Cycle Predicted Progression The predictions are updated monthly and are produced by ISES. Observed values are initially the preliminary values which are replaced with the final values as they become available. The following predicted values are available. * The predicted RI sunspot number is the official International Sunspot Number and is issued by the `Solar Influence Data Analysis Center (SDIC) <http://sidc.oma.be>`_ in Brussels, Belgium. * The predicted radio flux at 10.7 cm is produced by `Penticon/Ottawa <http://www.ngdc.noaa.gov/stp/solar/flux.html>`_ and the units are in sfu. Examples -------- >>> from sunpy import lightcurve as lc >>> noaa = lc.NOAAPredictIndicesLightCurve.create() >>> noaa.peek() # doctest: +SKIP References ---------- * `Solar and Geomagnetic Indices Data Archive <http://legacy-www.swpc.noaa.gov/Data/index.html#indices>`_ * `Predicted solar indices <http://services.swpc.noaa.gov/text/predicted-sunspot-radio-flux.txt>`_ * `NOAA plots of Solar Cycle Progression <http://www.swpc.noaa.gov/products/solar-cycle-progression>`_ * `NOAA Product List <http://www.swpc.noaa.gov/products-and-data>`_
6259908655399d3f0562808a
class FakeDriver(base.BaseDriver): <NEW_LINE> <INDENT> def __init__(self, conf, url, default_exchange=None, allowed_remote_exmods=None): <NEW_LINE> <INDENT> super(FakeDriver, self).__init__(conf, url, default_exchange, allowed_remote_exmods) <NEW_LINE> self._exchange_manager = FakeExchangeManager(default_exchange) <NEW_LINE> <DEDENT> def require_features(self, requeue=True): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _check_serialize(message): <NEW_LINE> <INDENT> json.dumps(message) <NEW_LINE> <DEDENT> def _send(self, target, ctxt, message, wait_for_reply=None, timeout=None): <NEW_LINE> <INDENT> self._check_serialize(message) <NEW_LINE> exchange = self._exchange_manager.get_exchange(target.exchange) <NEW_LINE> reply_q = None <NEW_LINE> if wait_for_reply: <NEW_LINE> <INDENT> reply_q = moves.queue.Queue() <NEW_LINE> <DEDENT> exchange.deliver_message(target.topic, ctxt, message, server=target.server, fanout=target.fanout, reply_q=reply_q) <NEW_LINE> if wait_for_reply: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> reply, failure = reply_q.get(timeout=timeout) <NEW_LINE> if failure: <NEW_LINE> <INDENT> raise failure <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return reply <NEW_LINE> <DEDENT> <DEDENT> except moves.queue.Empty: <NEW_LINE> <INDENT> raise oslo_messaging.MessagingTimeout( 'No reply on topic %s' % target.topic) <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def send(self, target, ctxt, message, wait_for_reply=None, timeout=None, call_monitor_timeout=None, retry=None): <NEW_LINE> <INDENT> return self._send(target, ctxt, message, wait_for_reply, timeout) <NEW_LINE> <DEDENT> def send_notification(self, target, ctxt, message, version, retry=None): <NEW_LINE> <INDENT> self._send(target, ctxt, message) <NEW_LINE> <DEDENT> def listen(self, target, batch_size, batch_timeout): <NEW_LINE> <INDENT> exchange = target.exchange or self._default_exchange <NEW_LINE> listener = FakeListener(self._exchange_manager, [oslo_messaging.Target( topic=target.topic, server=target.server, exchange=exchange), oslo_messaging.Target( topic=target.topic, exchange=exchange)]) <NEW_LINE> return base.PollStyleListenerAdapter(listener, batch_size, batch_timeout) <NEW_LINE> <DEDENT> def listen_for_notifications(self, targets_and_priorities, pool, batch_size, batch_timeout): <NEW_LINE> <INDENT> targets = [ oslo_messaging.Target( topic='%s.%s' % (target.topic, priority), exchange=target.exchange) for target, priority in targets_and_priorities] <NEW_LINE> listener = FakeListener(self._exchange_manager, targets, pool) <NEW_LINE> return base.PollStyleListenerAdapter(listener, batch_size, batch_timeout) <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> pass
Fake driver used for testing. This driver passes messages in memory, and should only be used for unit tests.
6259908699fddb7c1ca63b96
class ISanitizedHTMLContentFragmentField(IHTMLContentFragmentField): <NEW_LINE> <INDENT> pass
A :class:`Text` type that also requires the object implement an interface descending from :class:`.ISanitizedHTMLContentFragment`. .. versionadded:: 1.2.0
62599086a05bb46b3848bee2
class MediaIoBaseUpload(MediaUpload): <NEW_LINE> <INDENT> @util.positional(3) <NEW_LINE> def __init__(self, fd, mimetype, chunksize=DEFAULT_CHUNK_SIZE, resumable=False): <NEW_LINE> <INDENT> super(MediaIoBaseUpload, self).__init__() <NEW_LINE> self._fd = fd <NEW_LINE> self._mimetype = mimetype <NEW_LINE> if not (chunksize == -1 or chunksize > 0): <NEW_LINE> <INDENT> raise InvalidChunkSizeError() <NEW_LINE> <DEDENT> self._chunksize = chunksize <NEW_LINE> self._resumable = resumable <NEW_LINE> self._fd.seek(0, os.SEEK_END) <NEW_LINE> self._size = self._fd.tell() <NEW_LINE> <DEDENT> def chunksize(self): <NEW_LINE> <INDENT> return self._chunksize <NEW_LINE> <DEDENT> def mimetype(self): <NEW_LINE> <INDENT> return self._mimetype <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return self._size <NEW_LINE> <DEDENT> def resumable(self): <NEW_LINE> <INDENT> return self._resumable <NEW_LINE> <DEDENT> def getbytes(self, begin, length): <NEW_LINE> <INDENT> self._fd.seek(begin) <NEW_LINE> return self._fd.read(length) <NEW_LINE> <DEDENT> def has_stream(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def stream(self): <NEW_LINE> <INDENT> return self._fd <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> raise NotImplementedError('MediaIoBaseUpload is not serializable.')
A MediaUpload for a io.Base objects. Note that the Python file object is compatible with io.Base and can be used with this class also. fh = io.BytesIO('...Some data to upload...') media = MediaIoBaseUpload(fh, mimetype='image/png', chunksize=1024*1024, resumable=True) farm.animals().insert( id='cow', name='cow.png', media_body=media).execute() Depending on the platform you are working on, you may pass -1 as the chunksize, which indicates that the entire file should be uploaded in a single request. If the underlying platform supports streams, such as Python 2.6 or later, then this can be very efficient as it avoids multiple connections, and also avoids loading the entire file into memory before sending it. Note that Google App Engine has a 5MB limit on request size, so you should never set your chunksize larger than 5MB, or to -1.
62599086aad79263cf430331
class Solution: <NEW_LINE> <INDENT> def maxSubArray(self, nums): <NEW_LINE> <INDENT> ans = nums[0] <NEW_LINE> sum = 0 <NEW_LINE> for i in range(len(nums)): <NEW_LINE> <INDENT> sum += nums[i] <NEW_LINE> if sum > ans: <NEW_LINE> <INDENT> ans = sum <NEW_LINE> <DEDENT> if sum < 0: <NEW_LINE> <INDENT> sum = 0 <NEW_LINE> <DEDENT> <DEDENT> return ans
@param nums: A list of integers @return: A integer indicate the sum of max subarray
625990865fc7496912d49026
class CosineSimilarityDescriptionAndDDG(BaseFeature): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def feature_labels(): <NEW_LINE> <INDENT> return ['Cosine Similarity: Profile Description and DDG Description'] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def score(query, profile, data=None): <NEW_LINE> <INDENT> if not len(profile.description): <NEW_LINE> <INDENT> return [-1] <NEW_LINE> <DEDENT> vectorspace = VectorSpace([]) <NEW_LINE> tokenized_description = LowerTokenizer.tokenize(profile.description) <NEW_LINE> description_vector = vectorspace.vector_for_document( tokenized_document=tokenized_description, update=True) <NEW_LINE> ddg_description = DuckDuckDescription.query(query.lower()) <NEW_LINE> ddg_vector = [] <NEW_LINE> if ddg_description: <NEW_LINE> <INDENT> ddg_text = ddg_description['description']['text'] <NEW_LINE> ddg_tokenized = LowerTokenizer.tokenize(ddg_text) <NEW_LINE> ddg_vector = vectorspace.vector_for_document( tokenized_document=ddg_tokenized, update=True) <NEW_LINE> <DEDENT> if not len(ddg_vector): <NEW_LINE> <INDENT> return [-1] <NEW_LINE> <DEDENT> return [cossim(description_vector, ddg_vector)]
Cosine similarity between the profile description and a description retrieved from the DuckDuckGo search engine.
62599086dc8b845886d55131
class test04UnregisterHandlingTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> my_resvn_setup() <NEW_LINE> initA.reserve(my_rtype) <NEW_LINE> <DEDENT> def testUnregisterReleasesReservation(self): <NEW_LINE> <INDENT> resvnA = initA.getReservation() <NEW_LINE> self.assertEqual(resvnA.key, initA.key) <NEW_LINE> self.assertEqual(resvnA.getRtypeNum(), my_rtype) <NEW_LINE> res = initA.unregister() <NEW_LINE> self.assertEqual(res, 0) <NEW_LINE> time.sleep(1) <NEW_LINE> resvnA = initA.getReservation() <NEW_LINE> self.assertEqual(resvnA.key, None) <NEW_LINE> self.assertEqual(resvnA.rtype, None) <NEW_LINE> <DEDENT> def testUnregisterDoesNotReleaseReservation(self): <NEW_LINE> <INDENT> resvnA = initA.getReservation() <NEW_LINE> self.assertEqual(resvnA.key, initA.key) <NEW_LINE> self.assertEqual(resvnA.getRtypeNum(), my_rtype) <NEW_LINE> res = initB.unregister() <NEW_LINE> self.assertEqual(res, 0) <NEW_LINE> resvnA = initA.getReservation() <NEW_LINE> self.assertEqual(resvnA.key, initA.key) <NEW_LINE> self.assertEqual(resvnA.getRtypeNum(), my_rtype)
Test how PGR RESERVE Exclusive Access reservation is handled during unregistration
62599086bf627c535bcb304b
class C6ZGate(CompositeGate): <NEW_LINE> <INDENT> def __init__(self, ctl1, ctl2, ctl3, ctl4, ctl5, ctl6, tgt, circ=None): <NEW_LINE> <INDENT> super().__init__("c6z", [], [ctl1, ctl2, ctl3, ctl4, ctl5, ctl6, tgt], circ) <NEW_LINE> self.c5x(ctl1, ctl2, ctl3, ctl4, ctl5, ctl6) <NEW_LINE> self.cu1(-pi/2, ctl6, tgt) <NEW_LINE> self.c5x(ctl1, ctl2, ctl3, ctl4, ctl5, ctl6) <NEW_LINE> self.cu1(pi/2, ctl6, tgt) <NEW_LINE> self.ccccx(ctl1, ctl2, ctl3, ctl4, ctl5) <NEW_LINE> self.cu1(-pi/4, ctl5, tgt) <NEW_LINE> self.ccccx(ctl1, ctl2, ctl3, ctl4, ctl5) <NEW_LINE> self.cu1(pi/4, ctl5, tgt) <NEW_LINE> self.cccx(ctl1, ctl2, ctl3, ctl4) <NEW_LINE> self.cu1(-pi/8, ctl4, tgt) <NEW_LINE> self.cccx(ctl1, ctl2, ctl3, ctl4) <NEW_LINE> self.cu1(pi/8, ctl4, tgt) <NEW_LINE> self.ccx(ctl1, ctl2, ctl3) <NEW_LINE> self.cu1(-pi/16, ctl3, tgt) <NEW_LINE> self.ccx(ctl1, ctl2, ctl3) <NEW_LINE> self.cu1(pi/16, ctl3, tgt) <NEW_LINE> self.cx(ctl1, ctl2) <NEW_LINE> self.cu1(-pi/32, ctl2, tgt) <NEW_LINE> self.cx(ctl1, ctl2) <NEW_LINE> self.cu1(pi/32, ctl2, tgt) <NEW_LINE> self.cu1(pi/32, ctl1, tgt) <NEW_LINE> <DEDENT> def reapply(self, circ): <NEW_LINE> <INDENT> self._modifiers(circ.c6z(self.arg[0], self.arg[1], self.arg[2], self.arg[3], self.arg[4], self.arg[5], self.arg[6]))
c6z gate.
62599086656771135c48adec
class CreateManagementGroupDetails(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'version': {'readonly': True}, 'updated_time': {'readonly': True}, 'updated_by': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'version': {'key': 'version', 'type': 'int'}, 'updated_time': {'key': 'updatedTime', 'type': 'iso-8601'}, 'updated_by': {'key': 'updatedBy', 'type': 'str'}, 'parent': {'key': 'parent', 'type': 'CreateParentGroupInfo'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(CreateManagementGroupDetails, self).__init__(**kwargs) <NEW_LINE> self.version = None <NEW_LINE> self.updated_time = None <NEW_LINE> self.updated_by = None <NEW_LINE> self.parent = kwargs.get('parent', None)
The details of a management group used during creation. Variables are only populated by the server, and will be ignored when sending a request. :ivar version: The version number of the object. :vartype version: int :ivar updated_time: The date and time when this object was last updated. :vartype updated_time: ~datetime.datetime :ivar updated_by: The identity of the principal or process that updated the object. :vartype updated_by: str :param parent: (Optional) The ID of the parent management group used during creation. :type parent: ~azure.mgmt.managementgroups.models.CreateParentGroupInfo
625990867cff6e4e811b75ba
class UrlTestCases(KALiteTestCase): <NEW_LINE> <INDENT> def validate_url(self, url, status_code=200, find_str=None): <NEW_LINE> <INDENT> resp = Client().get(url) <NEW_LINE> self.assertEquals(resp.status_code, status_code, "%s (check status code=%d != %d)" % (url, status_code, resp.status_code)) <NEW_LINE> if find_str is not None: <NEW_LINE> <INDENT> self.assertTrue(find_str in resp.content, "%s (check content)" % url) <NEW_LINE> <DEDENT> <DEDENT> def test_urls(self): <NEW_LINE> <INDENT> settings.DEBUG=False <NEW_LINE> self.validate_url('/') <NEW_LINE> self.validate_url('/exercisedashboard/') <NEW_LINE> self.validate_url(reverse('login')) <NEW_LINE> self.validate_url(reverse('facility_user_signup'), status_code=302) <NEW_LINE> self.validate_url('/math/') <NEW_LINE> self.validate_url('/content/', status_code=404) <NEW_LINE> self.validate_url('/accounts/login/', status_code=404) <NEW_LINE> self.validate_url('/accounts/register/', status_code=404)
Walk through a set of URLs, and validate very basic properties (status code, some text) A good test to weed out untested view/template errors
62599086d486a94d0ba2db2e
class Handler(object): <NEW_LINE> <INDENT> def spawn(self, target, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError
Base class for Handler classes
62599086fff4ab517ebcf38f
class sqlite_connection(sql.sql_connection, transactions.transactional_connection): <NEW_LINE> <INDENT> def __init__(self, connector): <NEW_LINE> <INDENT> verify_type(connector, SQLiteConnector) <NEW_LINE> super().__init__(connector) <NEW_LINE> self._connection = None <NEW_LINE> self._cursor = None <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> self.verify_closed() <NEW_LINE> super().open() <NEW_LINE> self._connection = sqlite3.connect(str(self._connector)) <NEW_LINE> self._cursor = self._connection.cursor() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.verify_open() <NEW_LINE> super().close() <NEW_LINE> if self._cursor is not None: <NEW_LINE> <INDENT> self._cursor.close() <NEW_LINE> self._cursor = None <NEW_LINE> <DEDENT> if self._connection is not None: <NEW_LINE> <INDENT> self._connection.close() <NEW_LINE> self._connection = None <NEW_LINE> <DEDENT> <DEDENT> def begin(self): <NEW_LINE> <INDENT> self.verify_open() <NEW_LINE> self._cursor.begin() <NEW_LINE> <DEDENT> def commit(self): <NEW_LINE> <INDENT> self.verify_open() <NEW_LINE> self._cursor.commit() <NEW_LINE> <DEDENT> def rollback(self): <NEW_LINE> <INDENT> self.verify_open() <NEW_LINE> self._cursor.rollback() <NEW_LINE> <DEDENT> def _execute(self, command): <NEW_LINE> <INDENT> self.verify_open() <NEW_LINE> self._cursor.execute(command) <NEW_LINE> return SQLiteRecordSet(self._cursor) <NEW_LINE> <DEDENT> def _call(self, name, *parameters): <NEW_LINE> <INDENT> raise OperationNotSupportedError('Operation not supported.')
A sqlite_connection manages the state for a connection to a SQLite database, providing an interface for executing queries and commands.
625990863317a56b869bf300
@public <NEW_LINE> class AQueue(_QueuesBase): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> def on_get(self, request, response): <NEW_LINE> <INDENT> if self._name not in config.switchboards: <NEW_LINE> <INDENT> not_found(response) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> okay(response, self._resource_as_json(self._name)) <NEW_LINE> <DEDENT> <DEDENT> def on_post(self, request, response): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> validator = Validator(list_id=str, text=str) <NEW_LINE> values = validator(request) <NEW_LINE> <DEDENT> except ValueError as error: <NEW_LINE> <INDENT> bad_request(response, str(error)) <NEW_LINE> return <NEW_LINE> <DEDENT> list_id = values['list_id'] <NEW_LINE> mlist = getUtility(IListManager).get_by_list_id(list_id) <NEW_LINE> if mlist is None: <NEW_LINE> <INDENT> bad_request(response, 'No such list: {}'.format(list_id)) <NEW_LINE> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> filebase = inject_text( mlist, values['text'], switchboard=self._name) <NEW_LINE> <DEDENT> except Exception as error: <NEW_LINE> <INDENT> bad_request(response, str(error)) <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> location = self.api.path_to( 'queues/{}/{}'.format(self._name, filebase)) <NEW_LINE> created(response, location)
A single queue.
6259908692d797404e389918
class StorageBucketsDeleteResponse(_messages.Message): <NEW_LINE> <INDENT> pass
An empty StorageBucketsDelete response.
625990864428ac0f6e65a0a6
class powerlaw(distribution): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(powerlaw, self).__init__() <NEW_LINE> self.name = 'power law' <NEW_LINE> self.n_para = 1 <NEW_LINE> <DEDENT> def _loglikelihood(self, alpha_, xmin, logsum_N): <NEW_LINE> <INDENT> alpha, = alpha_ <NEW_LINE> logsum, N = logsum_N <NEW_LINE> logll = - alpha * logsum - N * np.log(float(mp.zeta(alpha, xmin))) <NEW_LINE> return -logll <NEW_LINE> <DEDENT> def _fitting(self, xmin=1): <NEW_LINE> <INDENT> freq = self.freq[self.freq[:, 0] >= xmin] <NEW_LINE> sumlog, N = np.sum(freq[:, -1] * np.log(freq[:, 0])), np.sum(freq[:, -1]) <NEW_LINE> if xmin not in self.N_xmin: <NEW_LINE> <INDENT> self.N_xmin[xmin] = N <NEW_LINE> <DEDENT> res = minimize(self._loglikelihood, x0=(2.5), method='SLSQP', tol=1e-15, args=(xmin, (sumlog, N)), bounds=((1. + 1e-6, 5.0),)) <NEW_LINE> aic = 2 * res.fun + 2 * self.n_para <NEW_LINE> fits = {} <NEW_LINE> fits['alpha'] = res.x[0] <NEW_LINE> return (res.x[0], -res.fun, aic), fits <NEW_LINE> <DEDENT> def _get_ccdf(self, xmin): <NEW_LINE> <INDENT> alpha = self.fitting_res[xmin][1]['alpha'] <NEW_LINE> total, ccdf = 1., [] <NEW_LINE> normfactor = 1. / float(mp.zeta(alpha, xmin)) <NEW_LINE> for x in range(xmin, self.xmax): <NEW_LINE> <INDENT> total -= x**(-alpha) * normfactor <NEW_LINE> ccdf.append([x, total]) <NEW_LINE> <DEDENT> return np.asarray(ccdf)
Discrete power law distributions, given by P(x) ~ x^(-alpha)
6259908699fddb7c1ca63b97
class Environment(objects.VarContainer): <NEW_LINE> <INDENT> readonly = ["ADDR", "CLIENT_ADDR", "HOST", "HTTP_SOFTWARE", "PATH_SEP", "PHP_VERSION", "WEB_ROOT"] <NEW_LINE> item_deleters = ["NONE"] <NEW_LINE> def __init__(self, value={}, readonly=[]): <NEW_LINE> <INDENT> self.readonly += readonly <NEW_LINE> self.defaults = {} <NEW_LINE> super().__init__(value) <NEW_LINE> self.defaults = copy.copy(dict(self)) <NEW_LINE> <DEDENT> def __setitem__(self, name, value): <NEW_LINE> <INDENT> if name in ["", "__DEFAULTS__"] or not utils.ascii.isgraph(name): <NEW_LINE> <INDENT> raise KeyError("illegal name: '{}'".format(name)) <NEW_LINE> <DEDENT> if name in self.readonly and name in self.keys(): <NEW_LINE> <INDENT> raise AttributeError("«{}» variable is read-only".format(name)) <NEW_LINE> <DEDENT> if value == "%%DEFAULT%%": <NEW_LINE> <INDENT> if name in self.defaults.keys(): <NEW_LINE> <INDENT> value = self.defaults[name] <NEW_LINE> super().__setitem__(name, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError("'%s' have no default value" % name) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> super().__setitem__(name, value) <NEW_LINE> <DEDENT> if name not in self.defaults.keys(): <NEW_LINE> <INDENT> self.defaults[name] = self[name] <NEW_LINE> <DEDENT> <DEDENT> def _isattr(self, name): <NEW_LINE> <INDENT> return re.match("^[A-Z][A-Z0-9_]+$", name) <NEW_LINE> <DEDENT> def update(self, dic): <NEW_LINE> <INDENT> readonly = self.readonly <NEW_LINE> self.readonly = [] <NEW_LINE> if "__DEFAULTS__" in dic.keys(): <NEW_LINE> <INDENT> self.defaults = copy.copy(dict(dic.pop("__DEFAULTS__"))) <NEW_LINE> <DEDENT> elif hasattr(dic, "defaults"): <NEW_LINE> <INDENT> self.defaults = copy.copy(dict(dic.defaults)) <NEW_LINE> <DEDENT> for key, value in dic.items(): <NEW_LINE> <INDENT> if key in self.keys() and key not in readonly and self[key] != self.defaults[key]: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> super().update({key: value}) <NEW_LINE> <DEDENT> self.readonly = readonly <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.defaults = {} <NEW_LINE> super().clear()
Environment Variables Instanciate a dict() like object that stores PhpSploit environment variables. Unlike settings, env vars object works exactly the same way than its parent (MetaDict), excepting the fact that some items (env vars) are tagged as read-only. This behavior only aplies if the concerned variable already exists. In order to set a tagged variable's value, it must not exist already. Example: >>> Env = Environment() >>> Env.HOST = "foo" >>> Env.HOST = "bar" AttributeError: «HOST» variable is read-only >>> Env.HOST 'foo' >>> del Env.HOST >>> Env.HOST = "bar" >>> Env.HOST 'bar'
62599086be7bc26dc9252c12
class InstanceNormalization(Layer): <NEW_LINE> <INDENT> def __init__(self, epsilon=1e-5): <NEW_LINE> <INDENT> super(InstanceNormalization, self).__init__() <NEW_LINE> self.epsilon = epsilon <NEW_LINE> <DEDENT> def build(self, input_shape): <NEW_LINE> <INDENT> self.scale = self.add_weight(name="scale", shape=input_shape[-1:], initializer="glorot_uniform", trainable=True) <NEW_LINE> self.offset = self.add_weight(name="offset", shape=input_shape[-1:], initializer="zeros", trainable=True) <NEW_LINE> <DEDENT> def call(self, x): <NEW_LINE> <INDENT> mean, variance = tf.nn.moments(x, axes=[1, 2], keepdims=True) <NEW_LINE> inv = tf.math.rsqrt(variance + self.epsilon) <NEW_LINE> normalized = (x - mean) * inv <NEW_LINE> return self.scale * normalized + self.offset
Instance Normalization Layer (https://arxiv.org/abs/1607.08022). Applies Instance Normalization for each channel in each data sample in a batch. Args: epsilon: a small positive decimal number to avoid dividing by 0
62599086ad47b63b2c5a93cc
class CommonSampleForm(FlaskForm): <NEW_LINE> <INDENT> notes = TextAreaField('Notes', [DataRequired(message='Notes are not filled in')]) <NEW_LINE> parameters = TextAreaField('Parameters', [DataRequired(message='Parameters are not filled in')]) <NEW_LINE> platform = SelectField( 'Platform', [DataRequired(message='Platform is not selected')], coerce=str, choices=[(p.value, p.description) for p in Platform] ) <NEW_LINE> version = SelectField('Version', [DataRequired(message='Version is not selected')], coerce=int) <NEW_LINE> @staticmethod <NEW_LINE> def validate_version(form, field) -> None: <NEW_LINE> <INDENT> version = CCExtractorVersion.query.filter(CCExtractorVersion.id == field.data).first() <NEW_LINE> if version is None: <NEW_LINE> <INDENT> raise ValidationError('Invalid version selected')
Form to submit common sample data.
6259908626068e7796d4e4bc
class CheckpointableDataStructure(checkpointable_lib.CheckpointableBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._layers = [] <NEW_LINE> self.trainable = True <NEW_LINE> self._extra_variables = [] <NEW_LINE> <DEDENT> def _track_value(self, value, name): <NEW_LINE> <INDENT> if isinstance(value, checkpointable_lib.CheckpointableBase): <NEW_LINE> <INDENT> self._track_checkpointable(value, name=name) <NEW_LINE> if isinstance(value, variables.Variable): <NEW_LINE> <INDENT> self._extra_variables.append(value) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( ("Only checkpointable objects (such as Layers or Optimizers) may be " "stored in a List object. Got %s, which does not inherit from " "CheckpointableBase.") % (value,)) <NEW_LINE> <DEDENT> if isinstance(value, (base_layer.Layer, CheckpointableDataStructure)): <NEW_LINE> <INDENT> if value not in self._layers: <NEW_LINE> <INDENT> self._layers.append(value) <NEW_LINE> if hasattr(value, "_use_resource_variables"): <NEW_LINE> <INDENT> value._use_resource_variables = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def layers(self): <NEW_LINE> <INDENT> return self._layers <NEW_LINE> <DEDENT> @property <NEW_LINE> def trainable_weights(self): <NEW_LINE> <INDENT> return layer_utils.gather_trainable_weights( trainable=self.trainable, sub_layers=self.layers, extra_variables=self._extra_variables) <NEW_LINE> <DEDENT> @property <NEW_LINE> def non_trainable_weights(self): <NEW_LINE> <INDENT> return layer_utils.gather_non_trainable_weights( trainable=self.trainable, sub_layers=self.layers, extra_variables=self._extra_variables) <NEW_LINE> <DEDENT> @property <NEW_LINE> def weights(self): <NEW_LINE> <INDENT> return self.trainable_weights + self.non_trainable_weights <NEW_LINE> <DEDENT> @property <NEW_LINE> def trainable_variables(self): <NEW_LINE> <INDENT> return self.trainable_weights <NEW_LINE> <DEDENT> @property <NEW_LINE> def non_trainable_variables(self): <NEW_LINE> <INDENT> return self.non_trainable_weights <NEW_LINE> <DEDENT> @property <NEW_LINE> def variables(self): <NEW_LINE> <INDENT> return self.weights <NEW_LINE> <DEDENT> @property <NEW_LINE> def updates(self): <NEW_LINE> <INDENT> aggregated = [] <NEW_LINE> for layer in self.layers: <NEW_LINE> <INDENT> aggregated += layer.updates <NEW_LINE> <DEDENT> return aggregated <NEW_LINE> <DEDENT> @property <NEW_LINE> def losses(self): <NEW_LINE> <INDENT> aggregated = [] <NEW_LINE> for layer in self.layers: <NEW_LINE> <INDENT> aggregated += layer.losses <NEW_LINE> <DEDENT> return aggregated <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return id(self) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self is other
Base class for data structures which contain checkpointable objects.
625990867cff6e4e811b75bc
class ChainVolatilityManager(QtWidgets.QWidget): <NEW_LINE> <INDENT> IMPV_CHANGE_STEP = 0.001 <NEW_LINE> def __init__(self, chain, parent=None): <NEW_LINE> <INDENT> super(ChainVolatilityManager, self).__init__(parent) <NEW_LINE> self.chain = chain <NEW_LINE> self.initUi() <NEW_LINE> <DEDENT> def initUi(self): <NEW_LINE> <INDENT> self.monitor = ChainVolatilityMonitor(self.chain) <NEW_LINE> buttonCallIncrease = QtWidgets.QPushButton(u'看涨+' + ('%.1f%%' %(self.IMPV_CHANGE_STEP*100))) <NEW_LINE> buttonCallDecrease = QtWidgets.QPushButton(u'看涨-' + ('%.1f%%' %(self.IMPV_CHANGE_STEP*100))) <NEW_LINE> buttonPutIncrease = QtWidgets.QPushButton(u'看跌+' + ('%.1f%%' %(self.IMPV_CHANGE_STEP*100))) <NEW_LINE> buttonPutDecrease = QtWidgets.QPushButton(u'看跌-' + ('%.1f%%' %(self.IMPV_CHANGE_STEP*100))) <NEW_LINE> buttonCallReset = QtWidgets.QPushButton(u'看涨重置') <NEW_LINE> buttonPutReset = QtWidgets.QPushButton(u'看跌重置') <NEW_LINE> buttonRefresh = QtWidgets.QPushButton(u'刷新') <NEW_LINE> buttonCallIncrease.clicked.connect(self.callIncrease) <NEW_LINE> buttonCallDecrease.clicked.connect(self.callDecrease) <NEW_LINE> buttonPutIncrease.clicked.connect(self.putIncrease) <NEW_LINE> buttonPutDecrease.clicked.connect(self.putDecrease) <NEW_LINE> buttonCallReset.clicked.connect(self.callReset) <NEW_LINE> buttonPutReset.clicked.connect(self.putReset) <NEW_LINE> buttonRefresh.clicked.connect(self.monitor.refresh) <NEW_LINE> hbox = QtWidgets.QHBoxLayout() <NEW_LINE> hbox.addStretch() <NEW_LINE> hbox.addWidget(buttonCallIncrease) <NEW_LINE> hbox.addWidget(buttonCallReset) <NEW_LINE> hbox.addWidget(buttonCallDecrease) <NEW_LINE> hbox.addStretch() <NEW_LINE> hbox.addWidget(buttonRefresh) <NEW_LINE> hbox.addStretch() <NEW_LINE> hbox.addWidget(buttonPutIncrease) <NEW_LINE> hbox.addWidget(buttonPutReset) <NEW_LINE> hbox.addWidget(buttonPutDecrease) <NEW_LINE> hbox.addStretch() <NEW_LINE> vbox = QtWidgets.QVBoxLayout() <NEW_LINE> vbox.addLayout(hbox) <NEW_LINE> vbox.addWidget(self.monitor) <NEW_LINE> self.setLayout(vbox) <NEW_LINE> <DEDENT> def callIncrease(self): <NEW_LINE> <INDENT> for option in self.chain.callDict.values(): <NEW_LINE> <INDENT> option.pricingImpv += self.IMPV_CHANGE_STEP <NEW_LINE> <DEDENT> self.monitor.refresh() <NEW_LINE> <DEDENT> def callDecrease(self): <NEW_LINE> <INDENT> for option in self.chain.callDict.values(): <NEW_LINE> <INDENT> option.pricingImpv -= self.IMPV_CHANGE_STEP <NEW_LINE> <DEDENT> self.monitor.refresh() <NEW_LINE> <DEDENT> def callReset(self): <NEW_LINE> <INDENT> for option in self.chain.callDict.values(): <NEW_LINE> <INDENT> option.pricingImpv = option.midImpv <NEW_LINE> <DEDENT> self.monitor.refresh() <NEW_LINE> <DEDENT> def putIncrease(self): <NEW_LINE> <INDENT> for option in self.chain.putDict.values(): <NEW_LINE> <INDENT> option.pricingImpv += self.IMPV_CHANGE_STEP <NEW_LINE> <DEDENT> self.monitor.refresh() <NEW_LINE> <DEDENT> def putDecrease(self): <NEW_LINE> <INDENT> for option in self.chain.putDict.values(): <NEW_LINE> <INDENT> option.pricingImpv -= self.IMPV_CHANGE_STEP <NEW_LINE> <DEDENT> self.monitor.refresh() <NEW_LINE> <DEDENT> def putReset(self): <NEW_LINE> <INDENT> for option in self.chain.putDict.values(): <NEW_LINE> <INDENT> option.pricingImpv = option.midImpv <NEW_LINE> <DEDENT> self.monitor.refresh()
期权链波动率管理
625990863617ad0b5ee07ccb
class Constraint(object): <NEW_LINE> <INDENT> def __init__(self, i=0, j=0, distribution=""): <NEW_LINE> <INDENT> self.distribution = distribution <NEW_LINE> self.sampled_duration = 0 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> to_print = "" <NEW_LINE> to_print += self.distribution <NEW_LINE> return to_print <NEW_LINE> <DEDENT> def dtype(self): <NEW_LINE> <INDENT> if self.distribution is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self.distribution[0] == "U": <NEW_LINE> <INDENT> return "uniform" <NEW_LINE> <DEDENT> elif self.distribution[0] == "N": <NEW_LINE> <INDENT> return "gaussian" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "unknown" <NEW_LINE> <DEDENT> <DEDENT> def resample(self, random_state): <NEW_LINE> <INDENT> sample = None <NEW_LINE> if self.distribution[0] == "N": <NEW_LINE> <INDENT> sample = norm_sample(self.mu, self.sigma, random_state) <NEW_LINE> <DEDENT> elif self.distribution[0] == "U": <NEW_LINE> <INDENT> sample = uniform_sample(self.dist_lb, self.dist_ub, random_state) <NEW_LINE> <DEDENT> self.sampled_duration = round(sample) <NEW_LINE> return self.sampled_duration <NEW_LINE> <DEDENT> @property <NEW_LINE> def mu(self): <NEW_LINE> <INDENT> name_split = self.distribution.split("_") <NEW_LINE> if len(name_split) != 3 or name_split[0] != "N": <NEW_LINE> <INDENT> raise ValueError("No mu for non-normal dist") <NEW_LINE> <DEDENT> return float(name_split[1]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def sigma(self): <NEW_LINE> <INDENT> name_split = self.distribution.split("_") <NEW_LINE> if len(name_split) != 3 or name_split[0] != "N": <NEW_LINE> <INDENT> raise ValueError("No sigma for non-normal dist") <NEW_LINE> <DEDENT> return float(name_split[2]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def dist_ub(self): <NEW_LINE> <INDENT> name_split = self.distribution.split("_") <NEW_LINE> if len(name_split) != 3 or name_split[0] != "U": <NEW_LINE> <INDENT> raise ValueError("No upper bound for non-uniform dist") <NEW_LINE> <DEDENT> return float(name_split[2]) * 1000 <NEW_LINE> <DEDENT> @property <NEW_LINE> def dist_lb(self): <NEW_LINE> <INDENT> name_split = self.distribution.split("_") <NEW_LINE> if len(name_split) != 3 or name_split[0] != "U": <NEW_LINE> <INDENT> raise ValueError("No lower bound for non-uniform dist") <NEW_LINE> <DEDENT> return float(name_split[1]) * 1000
Represents a contingent constraint between two nodes in the PSTN i: starting node j: ending node The duration between i and j is represented as a contingent constraint with a probability distribution i ------> j
62599086ec188e330fdfa426
class BaseLeapTest(unittest.TestCase): <NEW_LINE> <INDENT> __name__ = "leap_test" <NEW_LINE> _system = platform.system() <NEW_LINE> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.setUpEnv() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> cls.tearDownEnv() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def setUpEnv(cls): <NEW_LINE> <INDENT> cls.old_path = os.environ['PATH'] <NEW_LINE> cls.old_home = os.environ['HOME'] <NEW_LINE> cls.old_xdg_config = None <NEW_LINE> if "XDG_CONFIG_HOME" in os.environ: <NEW_LINE> <INDENT> cls.old_xdg_config = os.environ["XDG_CONFIG_HOME"] <NEW_LINE> <DEDENT> cls.tempdir = tempfile.mkdtemp(prefix="leap_tests-") <NEW_LINE> cls.home = cls.tempdir <NEW_LINE> bin_tdir = os.path.join( cls.tempdir, 'bin') <NEW_LINE> os.environ["PATH"] = bin_tdir <NEW_LINE> os.environ["HOME"] = cls.tempdir <NEW_LINE> os.environ["XDG_CONFIG_HOME"] = os.path.join(cls.tempdir, ".config") <NEW_LINE> cls._init_events() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _init_events(cls): <NEW_LINE> <INDENT> cls._server = events_server.ensure_server( emit_addr="tcp://127.0.0.1:0", reg_addr="tcp://127.0.0.1:0") <NEW_LINE> events_client.configure_client( emit_addr="tcp://127.0.0.1:%d" % cls._server.pull_port, reg_addr="tcp://127.0.0.1:%d" % cls._server.pub_port) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownEnv(cls): <NEW_LINE> <INDENT> events_client.shutdown() <NEW_LINE> cls._server.shutdown() <NEW_LINE> os.environ["PATH"] = cls.old_path <NEW_LINE> os.environ["HOME"] = cls.old_home <NEW_LINE> if cls.old_xdg_config is not None: <NEW_LINE> <INDENT> os.environ["XDG_CONFIG_HOME"] = cls.old_xdg_config <NEW_LINE> <DEDENT> leap_assert( cls.tempdir.startswith('/tmp/leap_tests-') or (cls.tempdir.startswith('/tmp/') and cls.tempdir.startswith(tempfile.gettempdir()) and 'leap_tests-' in cls.tempdir) or cls.tempdir.startswith('/var/folder'), "beware! tried to remove a dir which does not " "live in temporal folder!") <NEW_LINE> shutil.rmtree(cls.tempdir) <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> raise NotImplementedError("abstract base class") <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> raise NotImplementedError("abstract base class") <NEW_LINE> <DEDENT> def _missing_test_for_plat(self, do_raise=False): <NEW_LINE> <INDENT> if do_raise: <NEW_LINE> <INDENT> raise NotImplementedError( "This test is not implemented " "for the running platform: %s" % self._system) <NEW_LINE> <DEDENT> <DEDENT> def get_tempfile(self, filename): <NEW_LINE> <INDENT> return os.path.join(self.tempdir, filename) <NEW_LINE> <DEDENT> def touch(self, filepath): <NEW_LINE> <INDENT> folder, filename = os.path.split(filepath) <NEW_LINE> if not os.path.isdir(folder): <NEW_LINE> <INDENT> mkdir_p(folder) <NEW_LINE> <DEDENT> self.assertTrue(os.path.isdir(folder)) <NEW_LINE> with open(filepath, 'w') as fp: <NEW_LINE> <INDENT> fp.write(' ') <NEW_LINE> <DEDENT> self.assertTrue(os.path.isfile(filepath)) <NEW_LINE> <DEDENT> def chmod600(self, filepath): <NEW_LINE> <INDENT> check_and_fix_urw_only(filepath)
Base Leap TestCase
625990864a966d76dd5f0a60
class DaemonCommands(object): <NEW_LINE> <INDENT> namespace = 'daemon' <NEW_LINE> @staticmethod <NEW_LINE> def start(): <NEW_LINE> <INDENT> started = daemonizer.start() <NEW_LINE> if not started: <NEW_LINE> <INDENT> return 'daemon already started' <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def stop(): <NEW_LINE> <INDENT> stopped = daemonizer.stop() <NEW_LINE> if stopped: <NEW_LINE> <INDENT> return 'daemon stopped' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'daemon not running' <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def restart(): <NEW_LINE> <INDENT> daemonizer.restart() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def status(): <NEW_LINE> <INDENT> status, pid = daemonizer.status() <NEW_LINE> if status == daemonizer.STATUS_STOPPED: <NEW_LINE> <INDENT> return 'daemon is (probably) stopped' <NEW_LINE> <DEDENT> elif status == daemonizer.STATUS_CORRUPTED: <NEW_LINE> <INDENT> if pid: <NEW_LINE> <INDENT> return 'daemon pidfile exists but process does not seem ' 'to be running (pid: {0}). you should probably clean ' 'the files in {1} and manually check if there' ' is a daemon running somewhere' .format(pid, config.pysource_dir) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'daemon seems to be in an unstable state. manually ' 'remove the files in {0} and kill leftover daemon ' 'processes (if there are any)' .format(config.pysource_dir) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return 'daemon is (probably) running (pid: {0})'.format(pid) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def commands(cls): <NEW_LINE> <INDENT> return [cls.start, cls.stop, cls.restart, cls.status]
Daemon related commands
625990865fdd1c0f98e5faf5
class PackageLookupError(Exception): <NEW_LINE> <INDENT> def __init__(self, product): <NEW_LINE> <INDENT> message = u'Package lookup failed ' u'for product "{0}"'.format(product.title) <NEW_LINE> Exception.__init__(self, message) <NEW_LINE> self.product = product
Exception for package not found in `data_map.yaml`
625990865fc7496912d49028
class DelNodes(Base): <NEW_LINE> <INDENT> __tablename__ = "delnodes" <NEW_LINE> taxid = Column(String, primary_key=True)
Deleted nodes. Not sure If I want to support updating the database or if I should just force reconstruction. tax_id -- deleted node id
62599086dc8b845886d55135
class DefaultValueParameterWidget(GenericParameterWidget): <NEW_LINE> <INDENT> def __init__(self, parameter, parent=None): <NEW_LINE> <INDENT> super(DefaultValueParameterWidget, self).__init__(parameter, parent) <NEW_LINE> self.radio_button_layout = QHBoxLayout() <NEW_LINE> self.input_button_group = QButtonGroup() <NEW_LINE> for i in range(len(self._parameter.labels)): <NEW_LINE> <INDENT> if '%s' in self._parameter.labels[i]: <NEW_LINE> <INDENT> label = ( self._parameter.labels[i] % self._parameter.options[i]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> label = self._parameter.labels[i] <NEW_LINE> <DEDENT> radio_button = QRadioButton(label) <NEW_LINE> self.radio_button_layout.addWidget(radio_button) <NEW_LINE> self.input_button_group.addButton(radio_button, i) <NEW_LINE> if self._parameter.value == self._parameter.options[i]: <NEW_LINE> <INDENT> radio_button.setChecked(True) <NEW_LINE> <DEDENT> <DEDENT> self.custom_value = QDoubleSpinBox() <NEW_LINE> self.custom_value.setSingleStep(0.1) <NEW_LINE> if self._parameter.options[-1]: <NEW_LINE> <INDENT> self.custom_value.setValue(self._parameter.options[-1]) <NEW_LINE> <DEDENT> self.radio_button_layout.addWidget(self.custom_value) <NEW_LINE> self.toggle_custom_value() <NEW_LINE> self.inner_input_layout.addLayout(self.radio_button_layout) <NEW_LINE> self.input_button_group.buttonClicked.connect( self.toggle_custom_value) <NEW_LINE> <DEDENT> def raise_invalid_type_exception(self): <NEW_LINE> <INDENT> message = 'Expecting element type of %s' % ( self._parameter.element_type.__name__) <NEW_LINE> err = ValueError(message) <NEW_LINE> return err <NEW_LINE> <DEDENT> def get_parameter(self): <NEW_LINE> <INDENT> radio_button_checked_id = self.input_button_group.checkedId() <NEW_LINE> if radio_button_checked_id == -1: <NEW_LINE> <INDENT> self._parameter.value = None <NEW_LINE> <DEDENT> elif radio_button_checked_id == len(self._parameter.options) - 1: <NEW_LINE> <INDENT> self._parameter.options[radio_button_checked_id] = self.custom_value.value() <NEW_LINE> self._parameter.value = self.custom_value.value() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._parameter.value = self._parameter.options[ radio_button_checked_id] <NEW_LINE> <DEDENT> return self._parameter <NEW_LINE> <DEDENT> def set_value(self, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value_index = self._parameter.options.index(value) <NEW_LINE> self.input_button_group.button(value_index).setChecked(True) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> last_index = len(self._parameter.options) - 1 <NEW_LINE> self.input_button_group.button(last_index).setChecked( True) <NEW_LINE> self.custom_value.setValue(value) <NEW_LINE> <DEDENT> self.toggle_custom_value() <NEW_LINE> <DEDENT> def toggle_custom_value(self): <NEW_LINE> <INDENT> radio_button_checked_id = self.input_button_group.checkedId() <NEW_LINE> if (radio_button_checked_id == len(self._parameter.options) - 1): <NEW_LINE> <INDENT> self.custom_value.setDisabled(False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.custom_value.setDisabled(True)
Widget class for Default Value Parameter.
62599086a8370b77170f1f48
class CornersProblem(search.SearchProblem): <NEW_LINE> <INDENT> def __init__(self, startingGameState): <NEW_LINE> <INDENT> self.walls = startingGameState.getWalls() <NEW_LINE> self.startingPosition = startingGameState.getPacmanPosition() <NEW_LINE> top, right = self.walls.height-2, self.walls.width-2 <NEW_LINE> self.corners = ((1,1), (1,top), (right, 1), (right, top)) <NEW_LINE> for corner in self.corners: <NEW_LINE> <INDENT> if not startingGameState.hasFood(*corner): <NEW_LINE> <INDENT> print('Warning: no food in corner ' + str(corner)) <NEW_LINE> <DEDENT> <DEDENT> self._expanded = 0 <NEW_LINE> self.goalStates = { hash(self.corners[0]): False, hash(self.corners[1]): False, hash(self.corners[2]): False, hash(self.corners[3]): False } <NEW_LINE> self.costFn = lambda x: 1 <NEW_LINE> <DEDENT> def getStartState(self): <NEW_LINE> <INDENT> state = (self.startingPosition[0], self.startingPosition[1], self.goalStates[hash(self.corners[0])], self.goalStates[hash(self.corners[1])], self.goalStates[hash(self.corners[2])], self.goalStates[hash(self.corners[3])]) <NEW_LINE> return state <NEW_LINE> <DEDENT> def isGoalState(self, state): <NEW_LINE> <INDENT> Goal = all(state[2:]) <NEW_LINE> return Goal <NEW_LINE> <DEDENT> def getSuccessors(self, state): <NEW_LINE> <INDENT> successors = [] <NEW_LINE> for action in [Directions.NORTH, Directions.SOUTH, Directions.EAST, Directions.WEST]: <NEW_LINE> <INDENT> x,y = state[0:2] <NEW_LINE> dx, dy = Actions.directionToVector(action) <NEW_LINE> nextx, nexty = int(x + dx), int(y + dy) <NEW_LINE> hitsWall = self.walls[nextx][nexty] <NEW_LINE> if not hitsWall: <NEW_LINE> <INDENT> nextPos = (nextx, nexty) <NEW_LINE> print("Position", nextPos) <NEW_LINE> print(self.corners) <NEW_LINE> if nextPos in self.corners: <NEW_LINE> <INDENT> print("Corner", nextPos) <NEW_LINE> self.goalStates[hash(nextPos)] = True <NEW_LINE> <DEDENT> nextState = (nextx, nexty, self.goalStates[hash(self.corners[0])], self.goalStates[hash(self.corners[1])], self.goalStates[hash(self.corners[2])], self.goalStates[hash(self.corners[3])] ) <NEW_LINE> cost = self.costFn(nextState[0:2]) <NEW_LINE> successors.append( ( nextState, action, cost) ) <NEW_LINE> <DEDENT> <DEDENT> self._expanded += 1 <NEW_LINE> return successors <NEW_LINE> <DEDENT> def getCostOfActions(self, actions): <NEW_LINE> <INDENT> if actions == None: return 999999 <NEW_LINE> x,y= self.startingPosition <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> dx, dy = Actions.directionToVector(action) <NEW_LINE> x, y = int(x + dx), int(y + dy) <NEW_LINE> if self.walls[x][y]: return 999999 <NEW_LINE> <DEDENT> return len(actions)
This search problem finds paths through all four corners of a layout. You must select a suitable state space and successor function
62599086ad47b63b2c5a93ce
class zstack_kvm_image_file_checker(checker_header.TestChecker): <NEW_LINE> <INDENT> def check(self): <NEW_LINE> <INDENT> super(zstack_kvm_image_file_checker, self).check() <NEW_LINE> image = self.test_obj.image <NEW_LINE> backupStorages = image.backupStorageRefs <NEW_LINE> image_url = backupStorages[0].installPath <NEW_LINE> host = test_lib.lib_get_backup_storage_host(image.backupStorageUuid) <NEW_LINE> self.judge(test_lib.lib_check_file_exist(host, image_url))
check kvm image file existencex . If it is in backup storage, return self.judge(True). If not, return self.judge(False)
62599086e1aae11d1e7cf5d1
class RechargeMemberThirdPayRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TranNetMemberCode = None <NEW_LINE> self.MemberFillAmt = None <NEW_LINE> self.Commission = None <NEW_LINE> self.Ccy = None <NEW_LINE> self.PayChannelType = None <NEW_LINE> self.PayChannelAssignMerNo = None <NEW_LINE> self.PayChannelTranSeqNo = None <NEW_LINE> self.EjzbOrderNo = None <NEW_LINE> self.MrchCode = None <NEW_LINE> self.EjzbOrderContent = None <NEW_LINE> self.Remark = None <NEW_LINE> self.ReservedMsgOne = None <NEW_LINE> self.ReservedMsgTwo = None <NEW_LINE> self.ReservedMsgThree = None <NEW_LINE> self.Profile = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.TranNetMemberCode = params.get("TranNetMemberCode") <NEW_LINE> self.MemberFillAmt = params.get("MemberFillAmt") <NEW_LINE> self.Commission = params.get("Commission") <NEW_LINE> self.Ccy = params.get("Ccy") <NEW_LINE> self.PayChannelType = params.get("PayChannelType") <NEW_LINE> self.PayChannelAssignMerNo = params.get("PayChannelAssignMerNo") <NEW_LINE> self.PayChannelTranSeqNo = params.get("PayChannelTranSeqNo") <NEW_LINE> self.EjzbOrderNo = params.get("EjzbOrderNo") <NEW_LINE> self.MrchCode = params.get("MrchCode") <NEW_LINE> self.EjzbOrderContent = params.get("EjzbOrderContent") <NEW_LINE> self.Remark = params.get("Remark") <NEW_LINE> self.ReservedMsgOne = params.get("ReservedMsgOne") <NEW_LINE> self.ReservedMsgTwo = params.get("ReservedMsgTwo") <NEW_LINE> self.ReservedMsgThree = params.get("ReservedMsgThree") <NEW_LINE> self.Profile = params.get("Profile") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
RechargeMemberThirdPay请求参数结构体
62599086656771135c48adee
class _NullContextManager(ContextManager[None]): <NEW_LINE> <INDENT> def __exit__( self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: <NEW_LINE> <INDENT> pass
A context manager which does nothing.
62599086adb09d7d5dc0c0d6
class VersionLib(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> @cache.cache() <NEW_LINE> def get_version_by_id(id): <NEW_LINE> <INDENT> m = tools.mysql_conn('r') <NEW_LINE> m.Q("SELECT id, version, os_type, ctime, what_news, update_is_recommend, update_is_force, app_id, dl_url, channel, status FROM o_version WHERE id = %s;", (id, )) <NEW_LINE> res = m.fetch_one() <NEW_LINE> return res <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_version_id(os_type, app_version, uid_ext): <NEW_LINE> <INDENT> version_list = VersionLib.get_version_list(os_type) <NEW_LINE> for item in version_list: <NEW_LINE> <INDENT> if VersionLib.version_compare(app_version, item['version']) < 0 and (item['rate'] == '[]' or uid_ext in json.loads(item['rate'])): <NEW_LINE> <INDENT> return item['id'] <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @cache.cache() <NEW_LINE> def get_version_list(os_type): <NEW_LINE> <INDENT> m = tools.mysql_conn('r') <NEW_LINE> m.Q("SELECT id, version, os_type, ctime, what_news, update_is_recommend, update_is_force, app_id, dl_url, channel, status, rate FROM o_version WHERE os_type = %s ORDER BY ctime LIMIT 10;", (os_type, )) <NEW_LINE> res = m.fetch_all() <NEW_LINE> return res <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @cache.cache() <NEW_LINE> def get_update_is_force(os_type, app_version): <NEW_LINE> <INDENT> m = tools.mysql_conn('r') <NEW_LINE> m.Q("SELECT update_is_force FROM o_version WHERE os_type = %s and version = %s;", (os_type, app_version)) <NEW_LINE> res = m.fetch_one() <NEW_LINE> return True if (res and res['update_is_force']) else False <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_version(os_type, app_version, uid_ext = ''): <NEW_LINE> <INDENT> version_id = VersionLib.get_version_id(os_type, app_version, uid_ext) <NEW_LINE> if version_id: <NEW_LINE> <INDENT> version = VersionLib.get_version_by_id(version_id) <NEW_LINE> version['update_is_force'] = VersionLib.get_update_is_force(os_type, app_version) <NEW_LINE> return version <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def version_compare(version_1, version_2): <NEW_LINE> <INDENT> version_1 = VersionLib.get_version_value(version_1) <NEW_LINE> version_2 = VersionLib.get_version_value(version_2) <NEW_LINE> if version_1 and version_2 and len(version_1) == len(version_2): <NEW_LINE> <INDENT> for ii in range(len(version_1)): <NEW_LINE> <INDENT> if version_1[ii] > version_2[ii]: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> elif version_1[ii] < version_2[ii]: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def get_version_value(version): <NEW_LINE> <INDENT> if isinstance(version, str): <NEW_LINE> <INDENT> res = [int(x) for x in version.split('.')] <NEW_LINE> for ii in range(4): <NEW_LINE> <INDENT> if len(res) - 1 < ii: <NEW_LINE> <INDENT> res.append(0) <NEW_LINE> <DEDENT> <DEDENT> return res <NEW_LINE> <DEDENT> return None
docstring for VersionLib
625990863617ad0b5ee07ccd
class EquipmentDimmEntry(ManagedObject): <NEW_LINE> <INDENT> consts = EquipmentDimmEntryConsts() <NEW_LINE> naming_props = set([u'id']) <NEW_LINE> mo_meta = MoMeta("EquipmentDimmEntry", "equipmentDimmEntry", "dimm-entry[id]", VersionMeta.Version202m, "InputOutput", 0x3f, [], [""], [u'equipmentDimmMapping'], [], ["Get"]) <NEW_LINE> prop_meta = { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version202m, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version202m, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []), "id": MoPropertyMeta("id", "id", "ushort", VersionMeta.Version202m, MoPropertyMeta.NAMING, 0x8, None, None, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version202m, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []), "sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []), "smbiosname": MoPropertyMeta("smbiosname", "smbiosname", "string", VersionMeta.Version202m, MoPropertyMeta.READ_ONLY, None, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version202m, MoPropertyMeta.READ_WRITE, 0x20, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), } <NEW_LINE> prop_map = { "childAction": "child_action", "dn": "dn", "id": "id", "rn": "rn", "sacl": "sacl", "smbiosname": "smbiosname", "status": "status", } <NEW_LINE> def __init__(self, parent_mo_or_dn, id, **kwargs): <NEW_LINE> <INDENT> self._dirty_mask = 0 <NEW_LINE> self.id = id <NEW_LINE> self.child_action = None <NEW_LINE> self.sacl = None <NEW_LINE> self.smbiosname = None <NEW_LINE> self.status = None <NEW_LINE> ManagedObject.__init__(self, "EquipmentDimmEntry", parent_mo_or_dn, **kwargs)
This is EquipmentDimmEntry class.
62599086ec188e330fdfa428
class Canto(base.ThreadPoolText): <NEW_LINE> <INDENT> defaults = [ ("fetch", False, "Whether to fetch new items on update"), ("feeds", [], "List of feeds to display, empty for all"), ("one_format", "{name}: {number}", "One feed display format"), ("all_format", "{number}", "All feeds display format"), ] <NEW_LINE> def __init__(self, **config): <NEW_LINE> <INDENT> base.ThreadPoolText.__init__(self, "", **config) <NEW_LINE> self.add_defaults(Canto.defaults) <NEW_LINE> <DEDENT> def poll(self): <NEW_LINE> <INDENT> if not self.feeds: <NEW_LINE> <INDENT> arg = "-a" <NEW_LINE> if self.fetch: <NEW_LINE> <INDENT> arg += "u" <NEW_LINE> <DEDENT> output = self.all_format.format(number=self.call_process(["canto", arg])[:-1]) <NEW_LINE> return output <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.fetch: <NEW_LINE> <INDENT> call(["canto", "-u"]) <NEW_LINE> <DEDENT> return "".join( [ self.one_format.format( name=feed, number=self.call_process(["canto", "-n", feed])[:-1] ) for feed in self.feeds ] )
Display RSS feeds updates using the canto console reader
625990867c178a314d78e9a8
class EncryptionEnabledError(Exception): <NEW_LINE> <INDENT> pass
Raised when high security encryption is enabled
62599086d8ef3951e32c8c1c
class OrderedSampler(Sampler): <NEW_LINE> <INDENT> def __init__( self, dataset: Dataset, batch_size: int, drop_last_batch: bool = True, events: List[SamplerEvent] = None, transformations: Callable[[Dict[str, Any]], Dict[str, Any]] = None ): <NEW_LINE> <INDENT> super().__init__(dataset, batch_size, 0, drop_last_batch, events, transformations) <NEW_LINE> self.batch_idx = 0 <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> for event in self.events: event.before_sampling(self, self.batch_size) <NEW_LINE> if (self.drop_last_batch and self.batch_idx + self.batch_size > len(self.dataset)): <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> if self.batch_idx >= len(self.dataset): <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> batch = self.dataset[self.batch_idx: min(self.batch_idx + self.batch_size, len(self.dataset))] <NEW_LINE> batch = self._transform(batch) <NEW_LINE> self.batch_idx += self.batch_size <NEW_LINE> for event in self.events: event.after_sampling(self, batch) <NEW_LINE> return batch <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> super().reset() <NEW_LINE> self.batch_idx = 0
The OrderedSampler samples the dataset in a sequential order.
6259908663b5f9789fe86ce6
class PublishExtension(object): <NEW_LINE> <INDENT> def __init__(self, scheduler): <NEW_LINE> <INDENT> self.scheduler = scheduler <NEW_LINE> self.datasets = dict() <NEW_LINE> handlers = { "publish_list": self.list, "publish_put": self.put, "publish_get": self.get, "publish_delete": self.delete, } <NEW_LINE> self.scheduler.handlers.update(handlers) <NEW_LINE> self.scheduler.extensions["publish"] = self <NEW_LINE> <DEDENT> def put(self, stream=None, keys=None, data=None, name=None, client=None): <NEW_LINE> <INDENT> with log_errors(): <NEW_LINE> <INDENT> if name in self.datasets: <NEW_LINE> <INDENT> raise KeyError("Dataset %s already exists" % name) <NEW_LINE> <DEDENT> self.scheduler.client_desires_keys(keys, "published-%s" % tokey(name)) <NEW_LINE> self.datasets[name] = {"data": data, "keys": keys} <NEW_LINE> return {"status": "OK", "name": name} <NEW_LINE> <DEDENT> <DEDENT> def delete(self, stream=None, name=None): <NEW_LINE> <INDENT> with log_errors(): <NEW_LINE> <INDENT> out = self.datasets.pop(name, {"keys": []}) <NEW_LINE> self.scheduler.client_releases_keys( out["keys"], "published-%s" % tokey(name) ) <NEW_LINE> <DEDENT> <DEDENT> def list(self, *args): <NEW_LINE> <INDENT> with log_errors(): <NEW_LINE> <INDENT> return list(sorted(self.datasets.keys(), key=str)) <NEW_LINE> <DEDENT> <DEDENT> def get(self, stream, name=None, client=None): <NEW_LINE> <INDENT> with log_errors(): <NEW_LINE> <INDENT> return self.datasets.get(name, None)
An extension for the scheduler to manage collections * publish-list * publish-put * publish-get * publish-delete
62599086bf627c535bcb3051
class MultiplierDrawing(Drawing): <NEW_LINE> <INDENT> def __init__(self, **kargs): <NEW_LINE> <INDENT> super().__init__(**kargs) <NEW_LINE> self._settings.update({ 'strokes' : 3 }) <NEW_LINE> self.apply_settings(**kargs) <NEW_LINE> <DEDENT> def draw(self, display): <NEW_LINE> <INDENT> for _ in range(self._strokes): <NEW_LINE> <INDENT> super().draw(display)
Draws multiple times, like making multiple 'brushstrokes'.
625990864c3428357761be38
class FruitFlyMutation(FruitFly): <NEW_LINE> <INDENT> def __init__(self, genome, parent, start = 0, goal = 0): <NEW_LINE> <INDENT> super(FruitFlyMutation, self).__init__(genome, parent, start, goal) <NEW_LINE> self.generation = self.distance_to_goal() <NEW_LINE> <DEDENT> def distance_to_goal(self): <NEW_LINE> <INDENT> generation = 0 <NEW_LINE> genome = [0] + copy.copy(self.genome) + [len(self.genome) + 1] <NEW_LINE> for i in range(1, len(self.genome) + 2): <NEW_LINE> <INDENT> if not (genome[i - 1] == genome[i] - 1 or genome[i - 1] == genome[i] + 1): <NEW_LINE> <INDENT> generation += 1 <NEW_LINE> <DEDENT> <DEDENT> return generation * 5 + self.moved_genes <NEW_LINE> <DEDENT> def create_children(self): <NEW_LINE> <INDENT> if not self.children: <NEW_LINE> <INDENT> for i in range(2, len(self.genome) + 1): <NEW_LINE> <INDENT> for j in xrange(len(self.goal) - i + 1): <NEW_LINE> <INDENT> genome = copy.copy(self.genome) <NEW_LINE> mutated_genome = mutation(i, j, genome) <NEW_LINE> child = FruitFlyMutation(mutated_genome, self) <NEW_LINE> self.children.append(child)
A next generation of a fruit fly, with a mutated genome.
625990868a349b6b43687ddc
class GetAnswers(APIView): <NEW_LINE> <INDENT> def get(self, request, unique_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> question = Questionnaire.objects.get(unique_id=unique_id) <NEW_LINE> answers = question.answers <NEW_LINE> return JsonResponse({ 'answers': answers }) <NEW_LINE> <DEDENT> except Questionnaire.DoesNotExist: <NEW_LINE> <INDENT> msg = 'Object does not exist.' <NEW_LINE> raise serializers.ValidationError(msg, code='authorization')
API to get answers.
62599086fff4ab517ebcf395
class StoredUploadedFile(FieldFile): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> File.__init__(self, None, name) <NEW_LINE> self.field = self <NEW_LINE> <DEDENT> @property <NEW_LINE> def storage(self): <NEW_LINE> <INDENT> return get_storage() <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError('Static files are read-only') <NEW_LINE> <DEDENT> def delete(self, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError('Static files are read-only') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
A wrapper for uploaded files that is compatible to the FieldFile class, i.e. you can use instances of this class in templates just like you use the value of FileFields (e.g. `{{ my_file.url }}`)
6259908697e22403b383ca76
class BranchesItem(BoxLayout): <NEW_LINE> <INDENT> date = StringProperty("") <NEW_LINE> sha = StringProperty("") <NEW_LINE> name = StringProperty("") <NEW_LINE> commiter = StringProperty("") <NEW_LINE> subject = StringProperty("") <NEW_LINE> published = BooleanProperty(False) <NEW_LINE> republish = BooleanProperty(False) <NEW_LINE> merge = BooleanProperty(False) <NEW_LINE> def __del__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass
BranchesItem; on branches screen, on other branches list, each element is using this class to display.
625990863317a56b869bf303
class NewPostHandler(Handler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> if self.user: <NEW_LINE> <INDENT> self.render("post-form.html") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.redirect("/login") <NEW_LINE> <DEDENT> <DEDENT> def post(self): <NEW_LINE> <INDENT> if not self.user: <NEW_LINE> <INDENT> return self.redirect('/login') <NEW_LINE> <DEDENT> subject = self.request.get('subject') <NEW_LINE> content = self.request.get('content') <NEW_LINE> if subject and content: <NEW_LINE> <INDENT> post = Post(parent=Utils.blog_key(), subject=subject, content=content, user=self.user) <NEW_LINE> post.put() <NEW_LINE> return self.redirect('/blog/%s' % str(post.key().id())) <NEW_LINE> <DEDENT> elif not subject: <NEW_LINE> <INDENT> error = "Subject must be provided!" <NEW_LINE> self.render("post-form.html", subject=subject, content=content, error=error) <NEW_LINE> <DEDENT> elif not content: <NEW_LINE> <INDENT> error = "Content must be provided!" <NEW_LINE> self.render("post-form.html", subject=subject, content=content, error=error) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> error = "Subject and content must be provided!" <NEW_LINE> self.render("post-form.html", subject=subject, content=content, error=error)
This class represents request handler for the new post form. It checks whether the user is logged in or not. If the user is logged in, it takes form input values and create a new post.
625990864a966d76dd5f0a64
class NullDevice(object): <NEW_LINE> <INDENT> def write(self, x): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> pass
Class to capture any output to stdout from the function under test. Create non-operative write and flush methods to redirect output from the run_setup function so that it is not printed to the console. The write method is used to print the stdout to the console, while the flush function is simply a requirement for an object receiving stdout content.
62599086be7bc26dc9252c15
class OpenCommand(object): <NEW_LINE> <INDENT> def __init__(self, launcher): <NEW_LINE> <INDENT> self.launcher = launcher <NEW_LINE> self._isFetchingArgs = False <NEW_LINE> <DEDENT> def on_quasimode_start(self): <NEW_LINE> <INDENT> if self._isFetchingArgs: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._isFetchingArgs = True <NEW_LINE> for _ in self.launcher.update(): <NEW_LINE> <INDENT> yield <NEW_LINE> <DEDENT> self.valid_args = self.launcher.get_namespace() <NEW_LINE> self._isFetchingArgs = False <NEW_LINE> <DEDENT> valid_args = [] <NEW_LINE> def __call__(self, ensoapi, target=None): <NEW_LINE> <INDENT> if not target: <NEW_LINE> <INDENT> seldict = ensoapi.get_selection() <NEW_LINE> if seldict.get("files"): <NEW_LINE> <INDENT> for file in seldict["files"]: <NEW_LINE> <INDENT> subprocess.Popen( ["open", file] ) <NEW_LINE> <DEDENT> <DEDENT> elif seldict.get("text"): <NEW_LINE> <INDENT> filename = seldict["text"].strip() <NEW_LINE> if os.path.isabs(filename): <NEW_LINE> <INDENT> subprocess.Popen( ["open", filename] ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> webbrowser.open(filename) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.launcher.get_target(target).launch()
Opens an application, folder, or URL.
6259908623849d37ff852c39
class TestCorpcPreFwd(commontestsuite.CommonTestSuite): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.get_test_info() <NEW_LINE> log_mask = os.getenv("D_LOG_MASK", "INFO") <NEW_LINE> crt_phy_addr = os.getenv("CRT_PHY_ADDR_STR", "ofi+sockets") <NEW_LINE> ofi_interface = os.getenv("OFI_INTERFACE", "eth0") <NEW_LINE> ofi_share_addr = os.getenv("CRT_CTX_SHARE_ADDR", "0") <NEW_LINE> ofi_ctx_num = os.getenv("CRT_CTX_NUM", "0") <NEW_LINE> self.pass_env = ' -x D_LOG_MASK={!s} -x CRT_PHY_ADDR_STR={!s}' ' -x OFI_INTERFACE={!s} -x CRT_CTX_SHARE_ADDR={!s}' ' -x CRT_CTX_NUM={!s}'.format( log_mask, crt_phy_addr, ofi_interface, ofi_share_addr, ofi_ctx_num) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.logger.info("tearDown begin") <NEW_LINE> os.environ.pop("CRT_PHY_ADDR_STR", "") <NEW_LINE> os.environ.pop("OFI_INTERFACE", "") <NEW_LINE> os.environ.pop("D_LOG_MASK", "") <NEW_LINE> os.environ.pop("CRT_TEST_SERVER", "") <NEW_LINE> self.free_port() <NEW_LINE> self.logger.info("tearDown end\n") <NEW_LINE> <DEDENT> def test_corpc_version(self): <NEW_LINE> <INDENT> testmsg = self.shortDescription() <NEW_LINE> servers = self.get_server_list() <NEW_LINE> if servers: <NEW_LINE> <INDENT> all_servers = ','.join(servers) <NEW_LINE> hosts = ''.join([' -H ', all_servers]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hosts = ''.join([' -H ', gethostname().split('.')[0]]) <NEW_LINE> <DEDENT> (cmd, prefix) = self.add_prefix_logdir() <NEW_LINE> srv_args = 'tests/test_corpc_prefwd' + ' --name service_group --is_service ' <NEW_LINE> cmdstr = "{!s} {!s} -N 5 {!s} {!s} {!s}".format( cmd, hosts, self.pass_env, prefix, srv_args) <NEW_LINE> srv_rtn = self.execute_cmd(testmsg, cmdstr) <NEW_LINE> if srv_rtn: <NEW_LINE> <INDENT> self.fail("Corpc version test failed, return code %d" % srv_rtn) <NEW_LINE> <DEDENT> return srv_rtn
Execute corpc pre-fwd tests
62599086283ffb24f3cf541f
class Tfluiddb_service_objects_status(object): <NEW_LINE> <INDENT> def __init__(self, status=None, nTagValues=None,): <NEW_LINE> <INDENT> self.status = status <NEW_LINE> self.nTagValues = nTagValues <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and ( self.__dict__ == other.__dict__) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - status - nTagValues
62599086dc8b845886d55139
class PostProcessorBaseClass: <NEW_LINE> <INDENT> def __init__(self, spec: Any) -> None: <NEW_LINE> <INDENT> self._spec = spec <NEW_LINE> self._casedir = Path(spec.casedir) <NEW_LINE> self._fields: Dict[str, FieldBaseClass] = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def casedir(self) -> Path: <NEW_LINE> <INDENT> return self._casedir
Baseclass for post processors.
62599086099cdd3c636761ba
class Frame(wx.Frame): <NEW_LINE> <INDENT> def __init__(self, image, parent=None, id=-1, pos=wx.DefaultPosition, title='Hello, Python!'): <NEW_LINE> <INDENT> temp = image.ConvertToBitmap() <NEW_LINE> size = temp.GetWidth(), temp.GetHeight() <NEW_LINE> wx.Frame.__init__(self, parent, id, title, pos, size) <NEW_LINE> self.bmp = wx.StaticBitmap(parent=self, bitmap=temp)
Frame class that displays an image.
62599086283ffb24f3cf5420
class IDeprecatedManageAddDeleteDirective(Interface): <NEW_LINE> <INDENT> class_ = GlobalObject( title="Class", required=True)
Call manage_afterAdd & co for these contained content classes.
625990863617ad0b5ee07cd1
class Float(Field): <NEW_LINE> <INDENT> type = 'float' <NEW_LINE> _slots = { '_digits': None, 'group_operator': None, } <NEW_LINE> def __init__(self, string=None, digits=None, **kwargs): <NEW_LINE> <INDENT> super(Float, self).__init__(string=string, _digits=digits, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def digits(self): <NEW_LINE> <INDENT> if callable(self._digits): <NEW_LINE> <INDENT> with fields._get_cursor() as cr: <NEW_LINE> <INDENT> return self._digits(cr) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return self._digits <NEW_LINE> <DEDENT> <DEDENT> def _setup_digits(self, env): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _setup_regular(self, env): <NEW_LINE> <INDENT> super(Float, self)._setup_regular(env) <NEW_LINE> self._setup_digits(env) <NEW_LINE> <DEDENT> _related__digits = property(attrgetter('_digits')) <NEW_LINE> _related_group_operator = property(attrgetter('group_operator')) <NEW_LINE> _description_digits = property(attrgetter('digits')) <NEW_LINE> _column_digits = property(lambda self: not callable(self._digits) and self._digits) <NEW_LINE> _column_digits_compute = property(lambda self: callable(self._digits) and self._digits) <NEW_LINE> _column_group_operator = property(attrgetter('group_operator')) <NEW_LINE> def convert_to_cache(self, value, record, validate=True): <NEW_LINE> <INDENT> value = float(value or 0.0) <NEW_LINE> digits = self.digits <NEW_LINE> return float_round(value, precision_digits=digits[1]) if digits else value
The precision digits are given by the attribute :param digits: a pair (total, decimal), or a function taking a database cursor and returning a pair (total, decimal)
62599086167d2b6e312b8357
class Device(Connect): <NEW_LINE> <INDENT> def auth(self): <NEW_LINE> <INDENT> payload = json.dumps({ "login": True }) <NEW_LINE> return json.loads(self.request("POST", "/v1/api/device/auth", payload)) <NEW_LINE> <DEDENT> def id(self): <NEW_LINE> <INDENT> return self.request("GET", "/v1/api/device/id") <NEW_LINE> <DEDENT> def hello(self): <NEW_LINE> <INDENT> return self.request("GET", "/v1/api/device/hello")
Revive API device calls: Provides methods: revive.device.auth() revive.device.id() revive.device.hello()
6259908660cbc95b06365b2c
class dbOperate: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.gDbIP ="localhost" <NEW_LINE> self.gDbUser ="root" <NEW_LINE> self.gDbPass ="1234" <NEW_LINE> self.gDbName ="zj" <NEW_LINE> <DEDENT> def setDbConInfo(self,dbIP,dbUser,dbPass,dbName): <NEW_LINE> <INDENT> self.gDbIP =dbIP <NEW_LINE> self.gDbUser =dbUser <NEW_LINE> self.gDbPass =dbPass <NEW_LINE> self.gDbName =dbName <NEW_LINE> <DEDENT> def connectDb(self): <NEW_LINE> <INDENT> self.db =pymysql.connect(self.gDbIP,self.gDbUser,self.gDbPass,self.gDbName ) <NEW_LINE> <DEDENT> def select(self,sel): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cursor = self.db.cursor() <NEW_LINE> cursor.execute(sel) <NEW_LINE> data =cursor.fetchall() <NEW_LINE> cursor.close() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("except "+sel) <NEW_LINE> data ="" <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def updateInsertDelete(self,updateSentence): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cursor = self.db.cursor() <NEW_LINE> row =cursor.execute(updateSentence) <NEW_LINE> self.db.commit() <NEW_LINE> cursor.close() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("except "+updateSentence) <NEW_LINE> self.db.rollback() <NEW_LINE> <DEDENT> <DEDENT> def commit(self): <NEW_LINE> <INDENT> self.db.commit() <NEW_LINE> <DEDENT> def closeDb(self): <NEW_LINE> <INDENT> self.db.close()
configure DBIP,DBUser,DBPass,DBName
625990865fc7496912d4902b
class Emissor(ABC): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def envia(self, mensagem): <NEW_LINE> <INDENT> pass
Classe abstrata utilizada como template para criar emissores de mensagens.
625990863346ee7daa338423
class MailUser(db.Model): <NEW_LINE> <INDENT> __tablename__ = "MailUser" <NEW_LINE> id = db.Column(db.Integer, primary_key=True, autoincrement=True) <NEW_LINE> username = db.Column(db.String(50),nullable=False) <NEW_LINE> email = db.Column(db.String(50),index=True,unique=True) <NEW_LINE> def __init__(self,username,email): <NEW_LINE> <INDENT> self.username = username <NEW_LINE> self.email = email
mailuser
6259908623849d37ff852c3d
@dataclass <NEW_LINE> class DMPseudoModelScan(DMModelScan): <NEW_LINE> <INDENT> _coupling: str = 'pseudo' <NEW_LINE> def mediator_total_width(self): <NEW_LINE> <INDENT> return self.mediator_partial_width_quarks() + self.mediator_partial_width_dm() + self.mediator_partial_width_gluon() <NEW_LINE> <DEDENT> def mediator_partial_width_quarks(self): <NEW_LINE> <INDENT> width = 0 <NEW_LINE> v = 246 <NEW_LINE> for mq in Quarks: <NEW_LINE> <INDENT> yq = np.sqrt(2) * mq.value / v <NEW_LINE> iwidth = np.select([self.mmed < 2*mq.value, self.mmed >= 2*mq.value], [0, 3 * self.gq**2 * yq**2 * self.mmed / (16 * PI) * beta(mq.value, self.mmed)], default=np.nan) <NEW_LINE> width += iwidth <NEW_LINE> <DEDENT> return width <NEW_LINE> <DEDENT> def mediator_partial_width_dm(self): <NEW_LINE> <INDENT> width = np.select([self.mmed < 2*self.mdm, self.mmed >= 2*self.mdm], [0, self.gdm **2 * self.mmed / (8 * PI) * beta(self.mdm, self.mmed)], default=np.nan) <NEW_LINE> return width <NEW_LINE> <DEDENT> def mediator_partial_width_gluon(self): <NEW_LINE> <INDENT> alphas = 0.130 <NEW_LINE> v = 246 <NEW_LINE> width = alphas ** 2 * self.gq**2 * self.mmed**3 / (32 * PI**3 * v**2) <NEW_LINE> width = width * np.abs(self.fps(4 * (Quarks.top.value / self.mmed)**2))**2 <NEW_LINE> return width <NEW_LINE> <DEDENT> def fps(self,simple): <NEW_LINE> <INDENT> tau = simple.astype(complex) <NEW_LINE> return tau * (np.arctan(1. / np.sqrt(tau - 1)))**2
Specific implementation of a parameter scan for a pseudoscalar mediator.
625990864527f215b58eb761
class OrgDescView(View): <NEW_LINE> <INDENT> def get(self, request, org_id): <NEW_LINE> <INDENT> course_org = CourseOrg.objects.get(id=int(org_id)) <NEW_LINE> current_page = 'desc' <NEW_LINE> has_fav = False <NEW_LINE> if request.user.is_authenticated(): <NEW_LINE> <INDENT> if UserFavorite.objects.filter(user=request.user, fav_id=org_id, fav_type=2): <NEW_LINE> <INDENT> has_fav = True <NEW_LINE> <DEDENT> <DEDENT> return render(request, 'org-detail-desc.html', {'course_org': course_org, 'current_page': current_page, 'has_fav': has_fav, })
机构介绍页
625990867b180e01f3e49e26
class Message(MessageAPI): <NEW_LINE> <INDENT> __slots__ = [ 'to', 'sender', 'value', 'data', 'depth', 'gas', 'code', '_code_address', 'create_address', 'should_transfer_value', 'is_static', '_storage_address' ] <NEW_LINE> logger = logging.getLogger('eth.vm.message.Message') <NEW_LINE> def __init__(self, gas: int, to: Address, sender: Address, value: int, data: BytesOrView, code: bytes, depth: int=0, create_address: Address=None, code_address: Address=None, should_transfer_value: bool=True, is_static: bool=False) -> None: <NEW_LINE> <INDENT> validate_uint256(gas, title="Message.gas") <NEW_LINE> self.gas: int = gas <NEW_LINE> if to != CREATE_CONTRACT_ADDRESS: <NEW_LINE> <INDENT> validate_canonical_address(to, title="Message.to") <NEW_LINE> <DEDENT> self.to = to <NEW_LINE> validate_canonical_address(sender, title="Message.sender") <NEW_LINE> self.sender = sender <NEW_LINE> validate_uint256(value, title="Message.value") <NEW_LINE> self.value = value <NEW_LINE> validate_is_bytes_or_view(data, title="Message.data") <NEW_LINE> self.data = data <NEW_LINE> validate_is_integer(depth, title="Message.depth") <NEW_LINE> validate_gte(depth, minimum=0, title="Message.depth") <NEW_LINE> self.depth = depth <NEW_LINE> validate_is_bytes(code, title="Message.code") <NEW_LINE> self.code = code <NEW_LINE> if create_address is not None: <NEW_LINE> <INDENT> validate_canonical_address(create_address, title="Message.storage_address") <NEW_LINE> <DEDENT> self.storage_address = create_address <NEW_LINE> if code_address is not None: <NEW_LINE> <INDENT> validate_canonical_address(code_address, title="Message.code_address") <NEW_LINE> <DEDENT> self.code_address = code_address <NEW_LINE> validate_is_boolean(should_transfer_value, title="Message.should_transfer_value") <NEW_LINE> self.should_transfer_value = should_transfer_value <NEW_LINE> validate_is_boolean(is_static, title="Message.is_static") <NEW_LINE> self.is_static = is_static <NEW_LINE> <DEDENT> @property <NEW_LINE> def code_address(self) -> Address: <NEW_LINE> <INDENT> if self._code_address is not None: <NEW_LINE> <INDENT> return self._code_address <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.to <NEW_LINE> <DEDENT> <DEDENT> @code_address.setter <NEW_LINE> def code_address(self, value: Address) -> None: <NEW_LINE> <INDENT> self._code_address = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def storage_address(self) -> Address: <NEW_LINE> <INDENT> if self._storage_address is not None: <NEW_LINE> <INDENT> return self._storage_address <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.to <NEW_LINE> <DEDENT> <DEDENT> @storage_address.setter <NEW_LINE> def storage_address(self, value: Address) -> None: <NEW_LINE> <INDENT> self._storage_address = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_create(self) -> bool: <NEW_LINE> <INDENT> return self.to == CREATE_CONTRACT_ADDRESS <NEW_LINE> <DEDENT> @property <NEW_LINE> def data_as_bytes(self) -> bytes: <NEW_LINE> <INDENT> return bytes(self.data)
A message for VM computation.
62599086adb09d7d5dc0c0dd
class TestRegisterObject(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.x86 = TritonContext(ARCH.X86) <NEW_LINE> self.x64 = TritonContext(ARCH.X86_64) <NEW_LINE> self.arm = TritonContext(ARCH.ARM32) <NEW_LINE> self.aarch = TritonContext(ARCH.AARCH64) <NEW_LINE> <DEDENT> def test_object(self): <NEW_LINE> <INDENT> self.assertEqual(self.x86.registers.eax, self.x86.getRegister('eax')) <NEW_LINE> self.assertEqual(self.x64.registers.rax, self.x64.getRegister('RaX')) <NEW_LINE> self.assertEqual(self.arm.registers.r0, self.arm.getRegister('R0')) <NEW_LINE> self.assertEqual(self.aarch.registers.x9, self.aarch.getRegister('x9'))
Test register object
62599086091ae356687067c5
class index_select_syn_node(object): <NEW_LINE> <INDENT> node_type = DATA_TYPE.INDEX_SELECT <NEW_LINE> def __init__(self, target, indices): <NEW_LINE> <INDENT> self._target = target <NEW_LINE> self._indices = [] <NEW_LINE> for i in indices: <NEW_LINE> <INDENT> self._indices.append(Expr_syn_node(i)) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def target(self): <NEW_LINE> <INDENT> return self._target <NEW_LINE> <DEDENT> @property <NEW_LINE> def ID(self): <NEW_LINE> <INDENT> if self._target.node_type == DATA_TYPE.ID: <NEW_LINE> <INDENT> return self._target.ID <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def target_type(self): <NEW_LINE> <INDENT> return self._target.node_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def indices(self): <NEW_LINE> <INDENT> return self._indices
Syntax node represnting index select operations
62599086bf627c535bcb3057