code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class PhpLanguageModel(language_model.LanguageModel): <NEW_LINE> <INDENT> _SCHEMA_TYPE_TO_PHP_TYPE = { 'any': 'object', 'boolean': 'bool', 'integer': 'int', 'long': 'string', 'number': 'double', 'string': 'string', 'uint32': 'string', 'uint64': 'string', 'int32': 'int', 'int64': 'string', 'double': 'double', 'float': 'float', } <NEW_LINE> PHP_KEYWORDS = frozenset(( 'abstract', 'and', 'array', 'as', 'break', 'case', 'catch', 'cfunction', 'class', 'clone', 'const', 'continue', 'declare', 'default', 'do', 'else', 'elseif', 'enddeclare', 'endfor', 'endforeach', 'endif', 'endswitch', 'endwhile', 'extends', 'final', 'for', 'foreach', 'function', 'global', 'goto', 'if', 'implements', 'interface', 'instanceof', 'list', 'namespace', 'new', 'old_function', 'or', 'private', 'protected', 'public', 'static', 'switch', 'throw', 'try', 'use', 'var', 'while', 'xor', )) <NEW_LINE> PHP_TYPES = frozenset(( 'bool', 'boolean', 'int', 'integer', 'float', 'double', 'string', 'array', 'object', 'null', 'resource', )) <NEW_LINE> RESERVED_CLASS_NAMES = PHP_KEYWORDS | PHP_TYPES <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(PhpLanguageModel, self).__init__(class_name_delimiter='.') <NEW_LINE> <DEDENT> def GetCodeTypeFromDictionary(self, def_dict): <NEW_LINE> <INDENT> json_type = def_dict.get('type', 'string') <NEW_LINE> json_format = def_dict.get('format') <NEW_LINE> php_type = (self._SCHEMA_TYPE_TO_PHP_TYPE.get(json_format) or self._SCHEMA_TYPE_TO_PHP_TYPE.get(json_type, json_type)) <NEW_LINE> return php_type <NEW_LINE> <DEDENT> def CodeTypeForArrayOf(self, type_name): <NEW_LINE> <INDENT> return type_name <NEW_LINE> <DEDENT> def CodeTypeForMapOf(self, type_name): <NEW_LINE> <INDENT> return type_name <NEW_LINE> <DEDENT> def ToMemberName(self, s, unused_api): <NEW_LINE> <INDENT> return s.replace('-', '_').replace('.', '_')
A LanguageModel tunded for PHP.
625990658e7ae83300eea7c7
class INodes(object): <NEW_LINE> <INDENT> std_interval = np.array([0.0, 0.0]) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._num_nodes = None <NEW_LINE> self._nodes = None <NEW_LINE> self._interval = None <NEW_LINE> <DEDENT> def init(self, n_nodes, interval=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def transform(self, interval): <NEW_LINE> <INDENT> assert_is_instance(interval, np.ndarray, descriptor="Interval", checking_obj=self) <NEW_LINE> assert_condition(interval.size == 2, ValueError, message="Intervals must be of size 2: {} ({:s})".format(interval, class_name(interval)), checking_obj=self) <NEW_LINE> assert_condition(interval[0] < interval[1], ValueError, message="Interval must be positive: {:.2f} > {:.2f}".format(interval[0], interval[1]), checking_obj=self) <NEW_LINE> _old_interval = self.interval <NEW_LINE> self._interval = interval <NEW_LINE> self._nodes = (self.nodes - _old_interval[0]) * (interval[1] - interval[0]) / (_old_interval[1] - _old_interval[0]) + interval[0] <NEW_LINE> assert_condition(self._nodes[0] - self._interval[0] <= 1e-16 and self._nodes[-1] - self._interval[1] <= 1e-16, RuntimeError, message="Newly computed nodes do not match new interval: {} NOT IN {}" .format(self._nodes, self._interval), checking_obj=self) <NEW_LINE> LOG.debug("Nodes: %s" % self._nodes.tolist()) <NEW_LINE> <DEDENT> @property <NEW_LINE> def interval(self): <NEW_LINE> <INDENT> return self._interval <NEW_LINE> <DEDENT> @interval.setter <NEW_LINE> def interval(self, interval): <NEW_LINE> <INDENT> self.transform(interval) <NEW_LINE> <DEDENT> @property <NEW_LINE> def nodes(self): <NEW_LINE> <INDENT> return self._nodes <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_nodes(self): <NEW_LINE> <INDENT> return self._num_nodes <NEW_LINE> <DEDENT> @num_nodes.setter <NEW_LINE> def num_nodes(self, num_nodes): <NEW_LINE> <INDENT> self._num_nodes = num_nodes <NEW_LINE> <DEDENT> def print_lines_for_log(self): <NEW_LINE> <INDENT> _lines = { 'Type': class_name(self), 'Number Nodes': "%d" % self.num_nodes } <NEW_LINE> return _lines <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "INodes<0x%x>(n=%d, nodes=%s)" % (id(self), self.num_nodes, self.nodes) <NEW_LINE> <DEDENT> def __copy__(self): <NEW_LINE> <INDENT> copy = self.__class__.__new__(self.__class__) <NEW_LINE> copy.__dict__.update(self.__dict__) <NEW_LINE> return copy <NEW_LINE> <DEDENT> def __deepcopy__(self, memo): <NEW_LINE> <INDENT> copy = self.__class__.__new__(self.__class__) <NEW_LINE> memo[id(self)] = copy <NEW_LINE> for item, value in self.__dict__.items(): <NEW_LINE> <INDENT> setattr(copy, item, deepcopy(value, memo)) <NEW_LINE> <DEDENT> return copy
Provider for integration nodes. This is an abstract interface for providers of integration nodes.
62599065cc0a2c111447c66c
class Serializer(six.with_metaclass(SerializerMeta, SerializerBase)): <NEW_LINE> <INDENT> default_getter = operator.attrgetter <NEW_LINE> def __init__(self, instance=None, many=False, **kwargs): <NEW_LINE> <INDENT> super(Serializer, self).__init__(**kwargs) <NEW_LINE> self.instance = instance <NEW_LINE> self.many = many <NEW_LINE> self._data = None <NEW_LINE> <DEDENT> def _serialize(self, instance, fields): <NEW_LINE> <INDENT> serialized_value = {} <NEW_LINE> for name, getter, to_value, call, required, pass_self in fields: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if pass_self: <NEW_LINE> <INDENT> result = getter(self, instance) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = getter(instance) <NEW_LINE> if required or result is not None: <NEW_LINE> <INDENT> if call: <NEW_LINE> <INDENT> result = result() <NEW_LINE> <DEDENT> if to_value: <NEW_LINE> <INDENT> result = to_value(result) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except (KeyError, AttributeError, TypeError, ValueError) as e: <NEW_LINE> <INDENT> if required: <NEW_LINE> <INDENT> raise SerializationError(str(e)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> serialized_value[name] = result <NEW_LINE> <DEDENT> <DEDENT> return serialized_value <NEW_LINE> <DEDENT> def to_value(self, instance): <NEW_LINE> <INDENT> fields = self._compiled_fields <NEW_LINE> if self.many: <NEW_LINE> <INDENT> serialize = self._serialize <NEW_LINE> return [serialize(o, fields) for o in instance] <NEW_LINE> <DEDENT> return self._serialize(instance, fields) <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> if self._data is None: <NEW_LINE> <INDENT> self._data = self.to_value(self.instance) <NEW_LINE> <DEDENT> return self._data
The Serializer class is used as a base for custom serializers. A Serializer class is also a subclass of Field class, which allows nesting Serializers. A new serializer is defined by subclassing the `Serializer` class, then adding each `Field` as a class variable. Example: : class FooSerializer(Serializer): foo = Field() bar = Field() foo = Foo(foo='hello', bar=5) serialized_obj = FooSerializer(foo).data # serialized_obj = {'foo': 'hello', 'bar': 5} :param instance: The object or iterable of objects to serialize. :param bool many: If `instance` is an iterable of objects, set `many` to `True` to serialize it as a list.
625990657047854f46340aed
@register_plugin <NEW_LINE> class I13StxmLoader(BaseLoader): <NEW_LINE> <INDENT> def __init__(self, name='I13StxmLoader'): <NEW_LINE> <INDENT> super(I13StxmLoader, self).__init__(name) <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> exp = self.exp <NEW_LINE> data_obj = exp.create_data_object('in_data', 'tomo') <NEW_LINE> data_obj.backing_file = h5py.File(self.exp.meta_data.get("data_file"), 'r') <NEW_LINE> data_obj.data = data_obj.backing_file['/entry1/merlin_sw_hdf/merlin_tot'] <NEW_LINE> sh = data_obj.data.shape <NEW_LINE> rotation_angle = data_obj.backing_file['entry1/merlin_sw_hdf/t1_theta'].value.astype(float)[:,0] <NEW_LINE> data_obj.meta_data.set('rotation_angle', rotation_angle) <NEW_LINE> data_obj.set_axis_labels('rotation_angle.degrees', 'x.pixel') <NEW_LINE> data_obj.add_pattern('PROJECTION', core_dims=(1,),slice_dims=(0,)) <NEW_LINE> data_obj.add_pattern('SINOGRAM', core_dims=(0,1),slice_dims=()) <NEW_LINE> data_obj.set_shape(sh) <NEW_LINE> self.set_data_reduction_params(data_obj)
A class to load tomography data from the txm
62599065a219f33f346c7f40
class APIRouter(base_wsgi.Router): <NEW_LINE> <INDENT> ExtensionManager = None <NEW_LINE> @classmethod <NEW_LINE> def factory(cls, global_config, **local_config): <NEW_LINE> <INDENT> return cls() <NEW_LINE> <DEDENT> def __init__(self, ext_mgr=None): <NEW_LINE> <INDENT> if ext_mgr is None: <NEW_LINE> <INDENT> if self.ExtensionManager: <NEW_LINE> <INDENT> ext_mgr = self.ExtensionManager() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception(_("Must specify an ExtensionManager class")) <NEW_LINE> <DEDENT> <DEDENT> mapper = ProjectMapper() <NEW_LINE> self.resources = {} <NEW_LINE> self._setup_routes(mapper, ext_mgr) <NEW_LINE> self._setup_ext_routes(mapper, ext_mgr) <NEW_LINE> self._setup_extensions(ext_mgr) <NEW_LINE> super(APIRouter, self).__init__(mapper) <NEW_LINE> <DEDENT> def _setup_ext_routes(self, mapper, ext_mgr): <NEW_LINE> <INDENT> for resource in ext_mgr.get_resources(): <NEW_LINE> <INDENT> LOG.debug(_('Extended resource: %s'), resource.collection) <NEW_LINE> wsgi_resource = wsgi.Resource(resource.controller) <NEW_LINE> self.resources[resource.collection] = wsgi_resource <NEW_LINE> kargs = dict( controller=wsgi_resource, collection=resource.collection_actions, member=resource.member_actions) <NEW_LINE> if resource.parent: <NEW_LINE> <INDENT> kargs['parent_resource'] = resource.parent <NEW_LINE> <DEDENT> mapper.resource(resource.collection, resource.collection, **kargs) <NEW_LINE> if resource.custom_routes_fn: <NEW_LINE> <INDENT> resource.custom_routes_fn(mapper, wsgi_resource) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _setup_extensions(self, ext_mgr): <NEW_LINE> <INDENT> for extension in ext_mgr.get_controller_extensions(): <NEW_LINE> <INDENT> ext_name = extension.extension.name <NEW_LINE> collection = extension.collection <NEW_LINE> controller = extension.controller <NEW_LINE> if collection not in self.resources: <NEW_LINE> <INDENT> LOG.warning(_('Extension %(ext_name)s: Cannot extend ' 'resource %(collection)s: No such resource') % locals()) <NEW_LINE> continue <NEW_LINE> <DEDENT> LOG.debug(_('Extension %(ext_name)s extending resource: ' '%(collection)s') % locals()) <NEW_LINE> resource = self.resources[collection] <NEW_LINE> resource.register_actions(controller) <NEW_LINE> resource.register_extensions(controller) <NEW_LINE> <DEDENT> <DEDENT> def _setup_routes(self, mapper, ext_mgr): <NEW_LINE> <INDENT> raise NotImplementedError
Routes requests on the OpenStack API to the appropriate controller and method.
6259906545492302aabfdc15
class RawString(piSetting): <NEW_LINE> <INDENT> def _validate(self, value): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def _convert_to_kodi_setting(self, value): <NEW_LINE> <INDENT> return str(value) <NEW_LINE> <DEDENT> def _convert_to_piconfig_setting(self, value): <NEW_LINE> <INDENT> return str(value)
Class to handle strings that pass directly from the config.txt to Kodi, and back again. Such as codec serial numbers.
62599065e76e3b2f99fda139
class Redis(redis.Redis): <NEW_LINE> <INDENT> def __init__(self, app): <NEW_LINE> <INDENT> app.config.setdefault('REDIS_HOST', 'localhost') <NEW_LINE> app.config.setdefault('REDIS_PORT', 6379) <NEW_LINE> app.config.setdefault('REDIS_DB', 0) <NEW_LINE> app.config.setdefault('REDIS_PASSWORD', None) <NEW_LINE> app.config.setdefault('REDIS_CONFIG_KEY_PREFIX', 'CONFIG') <NEW_LINE> self.app = app <NEW_LINE> self.config_key_prefix = app.config['REDIS_CONFIG_KEY_PREFIX'] <NEW_LINE> super(Redis, self).__init__( host=app.config['REDIS_HOST'], port=app.config['REDIS_PORT'], db=app.config['REDIS_DB'], password=app.config['REDIS_PASSWORD']) <NEW_LINE> <DEDENT> def set_config(self, name, value): <NEW_LINE> <INDENT> self.set('%s_%s' % (self.config_key_prefix, name), json.dumps(value)) <NEW_LINE> <DEDENT> def get_config(self, name): <NEW_LINE> <INDENT> result = self.get('%s_%s' % (self.config_key_prefix, name)) <NEW_LINE> if result is None: <NEW_LINE> <INDENT> result = self.app.config[name] <NEW_LINE> self.set_config(name, result) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = json.loads(result) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def erase_config(self): <NEW_LINE> <INDENT> keys = self.keys('%s*' % self.config_key_prefix) <NEW_LINE> if not keys: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for k in keys: <NEW_LINE> <INDENT> self.delete(k)
Redis Flask wrapper, inspired by: https://github.com/satori/flask-redis Can also provide Flask configuration values using regular cache paradigm. Use Redis.get_config() to retreive any flask configuration value. This class will return the value stored in redis or if it is not (yet) stored, retreive it from the Flask config and store it for further use. This allows config changes at runtime.
6259906591af0d3eaad3b562
class Console: <NEW_LINE> <INDENT> pass
Welcome to ttproto console. - Use [tab] to complete - Use help(object) to print help messages. - Quit using ctrl+d
625990653539df3088ecd9d8
class UserList(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'value': {'readonly': True}, 'next_link': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[User]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(UserList, self).__init__(**kwargs) <NEW_LINE> self.value = None <NEW_LINE> self.next_link = None
Collection of users. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: The list of users. :vartype value: list[~azure.mgmt.databoxedge.v2020_05_01_preview.models.User] :ivar next_link: Link to the next set of results. :vartype next_link: str
62599065baa26c4b54d509de
class TableSpikers(object): <NEW_LINE> <INDENT> def __init__(self, N, tau_out=5.0): <NEW_LINE> <INDENT> self.N = N <NEW_LINE> self.spike_table = None <NEW_LINE> self.tau_out = tau_out <NEW_LINE> <DEDENT> def prepare(self, tmax, dt): <NEW_LINE> <INDENT> self.out = np.zeros(self.N) <NEW_LINE> <DEDENT> def evolve(self, t, dt): <NEW_LINE> <INDENT> if self.spike_table is not None: <NEW_LINE> <INDENT> i = int_r(t/dt) <NEW_LINE> if i < len(self.spike_table): <NEW_LINE> <INDENT> self.spike = self.spike_table[i] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.spike = np.zeros(self.N, dtype=bool) <NEW_LINE> <DEDENT> <DEDENT> self.out += (1000.0*self.spike - self.out*dt)/self.tau_out
A layer of neurons that spike at times indicated by a boolean array. Set `spike_table` (for example by overloading `prepare` method) to decide when firing should happen. Attributes ---------- spike_table: (n_steps, n_neurons) matrix Boolean matrix identifying the steps when each neuron fires.
625990653617ad0b5ee0788a
class IndexedWordForms(object): <NEW_LINE> <INDENT> def __init__(self, semcor, wfs): <NEW_LINE> <INDENT> self.semcor = semcor <NEW_LINE> self.wfs = wfs <NEW_LINE> self.lemma_idx = create_lemma_index(wfs) <NEW_LINE> self.lemma_fname_idx = {} <NEW_LINE> for lemma, wfs in self.lemma_idx.items(): <NEW_LINE> <INDENT> self.lemma_fname_idx[lemma] = create_fname_index(wfs) <NEW_LINE> <DEDENT> self.fname_lemma_idx = invert_index(self.lemma_fname_idx) <NEW_LINE> self.btypes_idx = {} <NEW_LINE> <DEDENT> def filter_lemmas_with_only_one_sense_per_document(self): <NEW_LINE> <INDENT> self.lemma_fname_idx = filter_lemma_fname_index(self.lemma_fname_idx) <NEW_LINE> self.fname_lemma_idx = invert_index(self.lemma_fname_idx) <NEW_LINE> <DEDENT> def initialize_btypes_index(self): <NEW_LINE> <INDENT> self.btypes_idx = BTypePairDictionary(self) <NEW_LINE> <DEDENT> def get_pairs(self, min_lemmas=1, min_instances=1): <NEW_LINE> <INDENT> pairs = self.btypes_idx.keys() <NEW_LINE> pairs = [pair for pair in pairs if len(self.btypes_idx[pair]['ALL']) >= min_instances and len(self.btypes_idx[pair]['LEMMAS']) >= min_lemmas] <NEW_LINE> return pairs <NEW_LINE> <DEDENT> def print_lemma_fname_index(self): <NEW_LINE> <INDENT> for lemma in sorted(self.lemma_fname_idx): <NEW_LINE> <INDENT> fname_idx = self.lemma_fname_idx[lemma] <NEW_LINE> wf_count = sum([len(wfs) for wfs in fname_idx.values()]) <NEW_LINE> print("\n%s (%d)" % (lemma, wf_count)) <NEW_LINE> for fname, wfs in fname_idx.items(): <NEW_LINE> <INDENT> base = os.path.basename(fname) <NEW_LINE> btypes = set([wf.synset.btypes for wf in wfs]) <NEW_LINE> for wf in wfs: <NEW_LINE> <INDENT> print(' ', base, wf.synset.btypes, wf) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> print() <NEW_LINE> <DEDENT> def print_btypes_index(self, n=None): <NEW_LINE> <INDENT> self.btypes_idx.print_index(n) <NEW_LINE> <DEDENT> def print_btypes_index_summary(self): <NEW_LINE> <INDENT> self.btypes_idx.print_summary()
Class that provides an interface to a set of WordForms. This is in addition to the lemma_idx on Semcor, which is a simple index from lemmas to WordForm lists, which contains all WordForms in the corpus. This class is intended to store the results of specific searches and has more than one way to access the data. Attributes: lemma_idx : dict ( string -> list of WordForms ) lemma_fname_idx : dict ( string -> dict ( string -> list of WordForms ) ) fname_lemma_idx : dict ( string -> dict ( string -> list of WordForms ) ) btypes_idx : BTypePairDictionary Index to store wordforms that go with pairs of basic types (btypes). The key is always an ordered pair of btypes and excludes btypes with spaces in them (meaning a sense is associated with two basic types, for now we avoid dealing with these).
6259906599cbb53fe683261e
class ViewInspector(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.instance_schemas = WeakKeyDictionary() <NEW_LINE> <DEDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> if instance in self.instance_schemas: <NEW_LINE> <INDENT> return self.instance_schemas[instance] <NEW_LINE> <DEDENT> self.view = instance <NEW_LINE> return self <NEW_LINE> <DEDENT> def __set__(self, instance, other): <NEW_LINE> <INDENT> self.instance_schemas[instance] = other <NEW_LINE> if other is not None: <NEW_LINE> <INDENT> other.view = instance <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def view(self): <NEW_LINE> <INDENT> assert self._view is not None, ( "Schema generation REQUIRES a view instance. (Hint: you accessed " "`schema` from the view class rather than an instance.)" ) <NEW_LINE> return self._view <NEW_LINE> <DEDENT> @view.setter <NEW_LINE> def view(self, value): <NEW_LINE> <INDENT> self._view = value <NEW_LINE> <DEDENT> @view.deleter <NEW_LINE> def view(self): <NEW_LINE> <INDENT> self._view = None <NEW_LINE> <DEDENT> def get_link(self, path, method, base_url): <NEW_LINE> <INDENT> raise NotImplementedError(".get_link() must be overridden.")
Descriptor class on APIView. Provide subclass for per-view schema generation
625990652ae34c7f260ac821
class BootResource(Object, metaclass=BootResourceType): <NEW_LINE> <INDENT> id = ObjectField.Checked( "id", check(int), readonly=True) <NEW_LINE> type = ObjectField.Checked( "type", check(str), check(str), readonly=True) <NEW_LINE> name = ObjectField.Checked( "name", check(str), check(str), readonly=True) <NEW_LINE> architecture = ObjectField.Checked( "architecture", check(str), check(str), readonly=True) <NEW_LINE> subarches = ObjectField.Checked( "subarches", check_optional(str), check_optional(str), default=None, readonly=True) <NEW_LINE> sets = ObjectField.Checked( "sets", mapping_of(BootResourceSet), default=None, readonly=True) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return super(BootResource, self).__repr__( fields={"type", "name", "architecture"}) <NEW_LINE> <DEDENT> async def delete(self): <NEW_LINE> <INDENT> await self._handler.delete(id=self.id)
A boot resource.
62599065009cb60464d02c72
class Dimensions(messages.Message): <NEW_LINE> <INDENT> os_family = messages.EnumField(OSFamily, 1) <NEW_LINE> backend = messages.EnumField(Backend, 2) <NEW_LINE> hostname = messages.StringField(3) <NEW_LINE> num_cpus = messages.IntegerField(4) <NEW_LINE> memory_gb = messages.FloatField(5) <NEW_LINE> disk_gb = messages.IntegerField(6) <NEW_LINE> linux_flavor = messages.EnumField(LinuxFlavor, 7) <NEW_LINE> os_version = messages.StringField(8)
Represents the dimensions of a machine.
6259906545492302aabfdc16
class ThreadScan(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, queue): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.queue = queue <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> site = self.queue.get(False) <NEW_LINE> <DEDENT> except Empty: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> req = Request(site, headers={"User-Agent": user_agent}) <NEW_LINE> connection = urlopen(req) <NEW_LINE> found.append(site) <NEW_LINE> self.queue.task_done() <NEW_LINE> <DEDENT> except HTTPError: <NEW_LINE> <INDENT> error.append(site) <NEW_LINE> self.queue.task_done() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.queue.task_done() <NEW_LINE> <DEDENT> <DEDENT> except KeyboardInterrupt as e: <NEW_LINE> <INDENT> print("\n\033[36m[?] \033[0mCTRL+C Detected...")
thread untuk admin page
62599065379a373c97d9a758
class ResourceGroup(Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'location': {'required': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'properties': {'key': 'properties', 'type': 'ResourceGroupProperties'}, 'location': {'key': 'location', 'type': 'str'}, 'managed_by': {'key': 'managedBy', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, } <NEW_LINE> def __init__(self, *, location: str, name: str=None, properties=None, managed_by: str=None, tags=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(ResourceGroup, self).__init__(**kwargs) <NEW_LINE> self.id = None <NEW_LINE> self.name = name <NEW_LINE> self.properties = properties <NEW_LINE> self.location = location <NEW_LINE> self.managed_by = managed_by <NEW_LINE> self.tags = tags
Resource group information. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: The ID of the resource group. :vartype id: str :param name: The name of the resource group. :type name: str :param properties: :type properties: ~azure.mgmt.resource.resources.v2016_09_01.models.ResourceGroupProperties :param location: Required. The location of the resource group. It cannot be changed after the resource group has been created. It muct be one of the supported Azure locations. :type location: str :param managed_by: The ID of the resource that manages this resource group. :type managed_by: str :param tags: The tags attached to the resource group. :type tags: dict[str, str]
625990655166f23b2e244b0b
class GeneralTeamPackageTable(BasePackageTable): <NEW_LINE> <INDENT> default_title = "All team packages" <NEW_LINE> slug = 'general' <NEW_LINE> @property <NEW_LINE> def packages(self): <NEW_LINE> <INDENT> if self.tag: <NEW_LINE> <INDENT> return self.scope.packages.filter( data__key=self.tag).order_by('name') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.scope.packages.all().order_by('name')
This table displays the packages information of a team in a simple fashion. It must receive a :class:`Team <distro_tracker.core.models.Team>` as scope
625990658e71fb1e983bd200
class OrderSTATUS(Enum): <NEW_LINE> <INDENT> ORDER_STATE_PENDING = 0 <NEW_LINE> ORDER_STATE_CLOSED = 1 <NEW_LINE> ORDER_STATE_CANCELED = 2 <NEW_LINE> ORDER_STATE_UNKNOWN = 3 <NEW_LINE> ORDER_STATE_PARTIALLY_FILLED = 4
Direction of order/trade/position.
6259906597e22403b383c647
class RequestMethod: <NEW_LINE> <INDENT> GET = 'get' <NEW_LINE> POST = 'post' <NEW_LINE> PATCH = 'patch' <NEW_LINE> PUT = 'put' <NEW_LINE> DELETE = 'delete'
class to hold request methods
625990651f037a2d8b9e5408
class Neighbour(object): <NEW_LINE> <INDENT> def __init__(self, types=None): <NEW_LINE> <INDENT> self.types = types <NEW_LINE> <DEDENT> def build_list(self, x, t): <NEW_LINE> <INDENT> if self.types == None: <NEW_LINE> <INDENT> i = range(len(t)) <NEW_LINE> return np.array(list(it.combinations(i, 2)), dtype=np.int64) <NEW_LINE> <DEDENT> t1, t2 = self.types <NEW_LINE> i1 = np.arange(len(t))[t == t1] <NEW_LINE> if t1 == t2: <NEW_LINE> <INDENT> return np.array(list(it.combinations(i1, 2)), dtype=np.int64) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> i2 = np.arange(len(t))[t == t2] <NEW_LINE> return np.array(list(it.product(i1, i2)), dtype=np.int64)
Base Neighbour class.
62599065009cb60464d02c73
class MockSensor(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(MockSensor, self).__init__() <NEW_LINE> <DEDENT> def begin(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def readTempC(self): <NEW_LINE> <INDENT> return 25.
docstring for MockSensor
62599065a219f33f346c7f42
class Alert(AbstractAction): <NEW_LINE> <INDENT> def __init__(self, text): <NEW_LINE> <INDENT> self.text = text <NEW_LINE> <DEDENT> def __call__(self, world, entity): <NEW_LINE> <INDENT> world.infobox.write(self.text)
On call, writes text to the infobox.
6259906599fddb7c1ca6396c
class AxesHandler(AbstractAxesHandler, AxesBaseHandler): <NEW_LINE> <INDENT> def user_login_failed(self, sender, credentials: dict, request=None, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def user_logged_in(self, sender, request, user, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def user_logged_out(self, sender, request, user, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_failures(self, request, credentials: dict = None) -> int: <NEW_LINE> <INDENT> return 0
Signal bare handler implementation without any storage backend.
62599065e76e3b2f99fda13b
class WindowsFactoryBase: <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self, func, *args): <NEW_LINE> <INDENT> self._func = func <NEW_LINE> self._args = args <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def _java_cls(self, sc): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _jwindow(self, sc): <NEW_LINE> <INDENT> return self._java_cls(sc).__getattr__(self._func)(*self._args) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s(%s)" % (self._func, ", ".join(str(arg) for arg in self._args))
WindowFactory represents an intended window that will be instantiated later when we have access to a SparkContext. Typical usage is that a user constructs one of these (using the functions in this module), then passes it to one of the window-related methods of :class:`TimeSeriesDataFrame`, where we have a SparkContext. In those methods, we have this factory construct the actual window object the user wanted.
625990653cc13d1c6d466e7f
class TellCoreServer(object): <NEW_LINE> <INDENT> def __init__(self, port_client, port_events): <NEW_LINE> <INDENT> self.proc = None <NEW_LINE> self.port_client = port_client <NEW_LINE> self.port_events = port_events <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.proc = [] <NEW_LINE> for telldus, port in ( (TELLDUS_CLIENT, self.port_client), (TELLDUS_EVENTS, self.port_events)): <NEW_LINE> <INDENT> args = shlex.split(SOCAT_SERVER.format( type=telldus, port=port)) <NEW_LINE> self.proc.append(subprocess.Popen( args, stdin=subprocess.DEVNULL, stderr=subprocess.DEVNULL, stdout=subprocess.DEVNULL )) <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> if self.proc: <NEW_LINE> <INDENT> for proc in self.proc: <NEW_LINE> <INDENT> proc.kill() <NEW_LINE> <DEDENT> <DEDENT> self.proc = None
Server for tellcore.
62599065d6c5a102081e3861
class Square: <NEW_LINE> <INDENT> def __init__(self, __size=0): <NEW_LINE> <INDENT> if not isinstance(__size, int): <NEW_LINE> <INDENT> raise TypeError('size must be an integer') <NEW_LINE> <DEDENT> elif __size < 0: <NEW_LINE> <INDENT> raise ValueError('size must be >= 0') <NEW_LINE> <DEDENT> self.__size = __size <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> self.area = self.__size * self.__size <NEW_LINE> return self.area
Class docstring __init__: This class defines a square by: (based on 2-square.py). Args: size (int): size of square Attributes: size: (int): size of square
625990654a966d76dd5f0631
@register(PathResourceID.CLIPBOARD) <NEW_LINE> @attr.s(slots=True) <NEW_LINE> class ClipboardRecord(BaseElement): <NEW_LINE> <INDENT> top = attr.ib(default=0, type=int) <NEW_LINE> left = attr.ib(default=0, type=int) <NEW_LINE> bottom = attr.ib(default=0, type=int) <NEW_LINE> right = attr.ib(default=0, type=int) <NEW_LINE> resolution = attr.ib(default=0, type=int) <NEW_LINE> @classmethod <NEW_LINE> def read(cls, fp): <NEW_LINE> <INDENT> return cls(*decode_fixed_point(read_fmt('5i4x', fp))) <NEW_LINE> <DEDENT> def write(self, fp): <NEW_LINE> <INDENT> return write_fmt(fp, '5i4x', *encode_fixed_point(attr.astuple(self)))
Clipboard record. ..py:attribute: top ..py:attribute: left ..py:attribute: bottom ..py:attribute: right ..py:attribute: resolution
62599065dd821e528d6da51f
class PyKerasApplications(PythonPackage): <NEW_LINE> <INDENT> homepage = "http://keras.io" <NEW_LINE> url = "https://github.com/keras-team/keras-applications/archive/1.0.4.tar.gz" <NEW_LINE> version('1.0.7', sha256='8580a885c8abe4bf8429cb0e551f23e79b14eda73d99138cfa1d355968dd4b0a') <NEW_LINE> version('1.0.6', sha256='2cb412c97153160ec267b238e958d281ac3532b139cab42045c2d7086a157c21') <NEW_LINE> version('1.0.4', sha256='37bd2f3ba9c0e0105c193999b1162fd99562cf43e5ef06c73932950ecc46d085') <NEW_LINE> version('1.0.3', sha256='35b663a4933ee3c826a9349d19048221c997f0dd5ea24dd598c05cf90c72879d') <NEW_LINE> version('1.0.2', sha256='6d8923876a7f7f2d459dd7efe3b10830f316f714b707f0c136e7f00c63035338') <NEW_LINE> version('1.0.1', sha256='05ad1a73fddd22ed73ae59065b554e7ea13d05c3d4c6755ac166702b88686db5') <NEW_LINE> depends_on('py-setuptools', type='build')
Sample Deep Learning application in Keras. Keras depends on this package to run properly.
62599065498bea3a75a5919e
class DataQueue(object): <NEW_LINE> <INDENT> queue_ttl = 86400 <NEW_LINE> queue_max_age = 3600 <NEW_LINE> def __init__(self, key, redis_client, batch=0, compress=False, json=True): <NEW_LINE> <INDENT> self.key = key <NEW_LINE> self.redis_client = redis_client <NEW_LINE> self.batch = batch <NEW_LINE> self.compress = compress <NEW_LINE> self.json = json <NEW_LINE> <DEDENT> def dequeue(self, batch=None): <NEW_LINE> <INDENT> if batch is None: <NEW_LINE> <INDENT> batch = self.batch <NEW_LINE> <DEDENT> with self.redis_client.pipeline() as pipe: <NEW_LINE> <INDENT> pipe.multi() <NEW_LINE> pipe.lrange(self.key, 0, batch - 1) <NEW_LINE> if batch != 0: <NEW_LINE> <INDENT> pipe.ltrim(self.key, batch, -1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pipe.ltrim(self.key, 1, 0) <NEW_LINE> <DEDENT> result = pipe.execute()[0] <NEW_LINE> if self.compress: <NEW_LINE> <INDENT> result = [util.decode_gzip(item, encoding=None) for item in result] <NEW_LINE> <DEDENT> if self.json: <NEW_LINE> <INDENT> result = [simplejson.loads(item, encoding='utf-8') for item in result] <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def _push(self, pipe, items, batch): <NEW_LINE> <INDENT> for i in range(0, len(items), batch): <NEW_LINE> <INDENT> pipe.rpush(self.key, *items[i:i + batch]) <NEW_LINE> <DEDENT> pipe.expire(self.key, self.queue_ttl) <NEW_LINE> <DEDENT> def enqueue(self, items, batch=None, pipe=None): <NEW_LINE> <INDENT> if batch is None: <NEW_LINE> <INDENT> batch = self.batch <NEW_LINE> <DEDENT> if batch == 0: <NEW_LINE> <INDENT> batch = len(items) <NEW_LINE> <DEDENT> if self.json: <NEW_LINE> <INDENT> items = [simplejson.dumps(item, encoding='utf-8').encode('utf-8') for item in items] <NEW_LINE> <DEDENT> if self.compress: <NEW_LINE> <INDENT> items = [util.encode_gzip(item, encoding=None) for item in items] <NEW_LINE> <DEDENT> if pipe is not None: <NEW_LINE> <INDENT> self._push(pipe, items, batch) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with redis_pipeline(self.redis_client) as pipe: <NEW_LINE> <INDENT> self._push(pipe, items, batch) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def ready(self, batch=None): <NEW_LINE> <INDENT> if batch is None: <NEW_LINE> <INDENT> batch = self.batch <NEW_LINE> <DEDENT> with self.redis_client.pipeline() as pipe: <NEW_LINE> <INDENT> pipe.ttl(self.key) <NEW_LINE> pipe.llen(self.key) <NEW_LINE> ttl, size = pipe.execute() <NEW_LINE> <DEDENT> if ttl < 0: <NEW_LINE> <INDENT> age = -1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> age = max(self.queue_ttl - ttl, 0) <NEW_LINE> <DEDENT> return bool(size > 0 and (size >= batch or age >= self.queue_max_age)) <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return self.redis_client.llen(self.key)
A Redis based queue which stores binary or JSON encoded items in lists. The queue uses a single static queue key. The lists maintain a TTL value corresponding to the time data has been last put into the queue.
62599065d7e4931a7ef3d723
class GraphAlgoInterface: <NEW_LINE> <INDENT> def get_graph(self) -> GraphInterface: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def load_from_json(self, file_name: str) -> bool: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def save_to_json(self, file_name: str) -> bool: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def shortest_path(self, id1: int, id2: int) -> (float, list): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def connected_component(self, id1: int) -> list: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def connected_components(self) -> List[list]: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def plot_graph(self) -> None: <NEW_LINE> <INDENT> raise NotImplementedError
This abstract class represents an interface of a graph.
62599065baa26c4b54d509e0
class Callback(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def set(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def notify_unary_operations(self, obj): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def notify_bitwise_operations(self, obj): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def notify_comparisons(self, obj): <NEW_LINE> <INDENT> pass
callback before the operations
62599065be8e80087fbc07c4
class LinkedQueue: <NEW_LINE> <INDENT> class _Node: <NEW_LINE> <INDENT> __slots__ = '_item', '_next' <NEW_LINE> def __init__(self, item, next=None): <NEW_LINE> <INDENT> self._item = item <NEW_LINE> self._next = next <NEW_LINE> <DEDENT> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self._head = None <NEW_LINE> self._tail = None <NEW_LINE> self._size = 0 <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._size <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> return self._size == 0 <NEW_LINE> <DEDENT> def enqueue(self, i): <NEW_LINE> <INDENT> new_node = self._Node(i) <NEW_LINE> if self.is_empty(): <NEW_LINE> <INDENT> self._head = new_node <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._tail._next = new_node <NEW_LINE> <DEDENT> self._tail = new_node <NEW_LINE> self._size += 1 <NEW_LINE> <DEDENT> def dequeue(self): <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> raise Empty('The Queue is empty.') <NEW_LINE> <DEDENT> value = self._head._item <NEW_LINE> self._head = self._head._next <NEW_LINE> if self.is_empty(): <NEW_LINE> <INDENT> self._tail = None <NEW_LINE> <DEDENT> self._size -= 1 <NEW_LINE> return value <NEW_LINE> <DEDENT> def front(self): <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> raise Empty('The Queue is empty.') <NEW_LINE> <DEDENT> return self._head._item
Singly Linked List Queue implementation.
62599065435de62698e9d545
class OrganizationProfile(UserProfile): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> app_label = 'api' <NEW_LINE> <DEDENT> is_organization = models.BooleanField(default=True) <NEW_LINE> creator = models.ForeignKey(User) <NEW_LINE> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> super(OrganizationProfile, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> def remove_user_from_organization(self, user): <NEW_LINE> <INDENT> for group in user.groups.filter('%s#' % self.user.username): <NEW_LINE> <INDENT> user.groups.remove(group) <NEW_LINE> <DEDENT> <DEDENT> def is_organization_owner(self, user): <NEW_LINE> <INDENT> has_owner_group = user.groups.filter( name='%s#%s' % (self.user.username, Team.OWNER_TEAM_NAME)) <NEW_LINE> return True if has_owner_group else False
Organization: Extends the user profile for organization specific info * What does this do? - it has a createor - it has owner(s), through permissions/group - has members, through permissions/group - no login access, no password? no registration like a normal user? - created by a user who becomes the organization owner * What relationships?
62599065a8ecb03325872954
class BucketlistSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> items = BucketlistItemSerializer(many=True, read_only=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Bucketlist <NEW_LINE> fields = ('id', 'name', 'items', 'date_created', 'date_modified', 'created_by', 'user')
Bucketlist Model serializer class.
625990657d43ff2487427fae
class RateLimit: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.limit: int = 0 <NEW_LINE> self.remaining: int = 0 <NEW_LINE> self.reset: int = 0 <NEW_LINE> self.reset_time: float = time.time() <NEW_LINE> <DEDENT> def wait_for_next_request(self): <NEW_LINE> <INDENT> if not self.reset_time: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.remaining < 1: <NEW_LINE> <INDENT> while self.reset_time > time.time(): <NEW_LINE> <INDENT> time.sleep(1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update_rate_limit(self, response: Response, reset_time: int): <NEW_LINE> <INDENT> self.limit = int(response.headers.get("X-RateLimit-Limit", -1)) <NEW_LINE> self.remaining = int(response.headers.get("X-RateLimit-Remaining", -1)) <NEW_LINE> self.reset = int(response.headers.get("X-RateLimit-Reset", -1)) <NEW_LINE> self.reset_time = time.time() + self.reset <NEW_LINE> if self.limit < 0 or self.remaining < 0 or self.reset_time < 0: <NEW_LINE> <INDENT> self.limit = 0 <NEW_LINE> self.remaining = 0 <NEW_LINE> self.reset = reset_time <NEW_LINE> self.reset_time = time.time() + self.reset <NEW_LINE> raise HttpNoXRateLimitHeader("There is no XRateLimit Header!")
meetup api rate limit, wait for new request if needed Raises: HttpNoXRateLimitHeader: Raise when HTTP response has no XRateLimitHeader
6259906599cbb53fe6832620
class VoteCountAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ['votes', 'choice', 'contest_batch']
Modify default layout of admin form
625990652ae34c7f260ac824
class StdLogListener (LogListener): <NEW_LINE> <INDENT> def __init__ (self, level = LOG_INFO, info_out = sys.stdout, error_out = sys.stderr, *a, **k): <NEW_LINE> <INDENT> super (StdLogListener, self).__init__ (*a, **k) <NEW_LINE> self.level = level <NEW_LINE> self.info_output = info_out <NEW_LINE> self.error_output = error_out <NEW_LINE> <DEDENT> def on_message (self, node, level, msg): <NEW_LINE> <INDENT> if level >= self.level: <NEW_LINE> <INDENT> out = self.info_output if level <= LOG_INFO else self.error_output <NEW_LINE> out.write ('[' + node.get_path_name () + '] ' + level[1].upper () + ': ' + msg + '\n')
This is a log listener that outputs all the messages to the given files. It can filter the messages below a given log level.
625990655166f23b2e244b0d
class ExceptionThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> threading.Thread.__init__(self, *args, **kwargs) <NEW_LINE> self._eq = Queue.Queue() <NEW_LINE> <DEDENT> def run(self, *args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> threading.Thread.run(self, *args, **kwargs) <NEW_LINE> self._eq.put(None) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> tb = traceback.format_exc() <NEW_LINE> self._eq.put(tb) <NEW_LINE> <DEDENT> <DEDENT> def join(self, *args, **kwargs): <NEW_LINE> <INDENT> threading.Thread.join(*args, **kwargs) <NEW_LINE> exception = self._eq.get() <NEW_LINE> if exception: <NEW_LINE> <INDENT> raise Exception(exception)
Wrapper around a python :class:`Thread` which will raise an exception on join if the child threw an unhandled exception.
625990655fdd1c0f98e5f6c0
class Usage(Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'unit': {'required': True, 'constant': True}, 'current_value': {'required': True}, 'limit': {'required': True}, 'name': {'required': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'unit': {'key': 'unit', 'type': 'str'}, 'current_value': {'key': 'currentValue', 'type': 'long'}, 'limit': {'key': 'limit', 'type': 'long'}, 'name': {'key': 'name', 'type': 'UsageName'}, } <NEW_LINE> unit = "Count" <NEW_LINE> def __init__(self, current_value, limit, name): <NEW_LINE> <INDENT> super(Usage, self).__init__() <NEW_LINE> self.id = None <NEW_LINE> self.current_value = current_value <NEW_LINE> self.limit = limit <NEW_LINE> self.name = name
Describes network resource usage. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource identifier. :vartype id: str :ivar unit: An enum describing the unit of measurement. Default value: "Count" . :vartype unit: str :param current_value: The current value of the usage. :type current_value: long :param limit: The limit of usage. :type limit: long :param name: The name of the type of usage. :type name: ~azure.mgmt.network.v2017_10_01.models.UsageName
62599065aad79263cf42ff00
class MessageSchema(ma.Schema): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> fields = ('mid','bid','uid','message','created')
Some info
625990657d847024c075db13
class Shell(object): <NEW_LINE> <INDENT> def __init__(self, *iargs, **ikwargs): <NEW_LINE> <INDENT> self.initargs = iargs <NEW_LINE> self.initkwargs = ikwargs <NEW_LINE> <DEDENT> def callme(self, *args, **kwargs): <NEW_LINE> <INDENT> print("args:", args) <NEW_LINE> print("kwargs:", kwargs)
A simple class for testing object wrappers.
625990651f037a2d8b9e5409
class MockFactory(LocalPiFactory): <NEW_LINE> <INDENT> def __init__( self, revision=os.getenv('GPIOZERO_MOCK_REVISION', 'a02082'), pin_class=os.getenv('GPIOZERO_MOCK_PIN_CLASS', MockPin)): <NEW_LINE> <INDENT> super(MockFactory, self).__init__() <NEW_LINE> self._revision = revision <NEW_LINE> if not issubclass(pin_class, MockPin): <NEW_LINE> <INDENT> if isinstance(pin_class, bytes): <NEW_LINE> <INDENT> pin_class = pin_class.decode('ascii') <NEW_LINE> <DEDENT> dist = pkg_resources.get_distribution('gpiozero') <NEW_LINE> group = 'gpiozero_mock_pin_classes' <NEW_LINE> pin_class = pkg_resources.load_entry_point(dist, group, pin_class.lower()) <NEW_LINE> <DEDENT> self.pin_class = pin_class <NEW_LINE> <DEDENT> def _get_revision(self): <NEW_LINE> <INDENT> return self._revision <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.pins.clear() <NEW_LINE> self._reservations.clear() <NEW_LINE> <DEDENT> def pin(self, spec, pin_class=None, **kwargs): <NEW_LINE> <INDENT> if pin_class is None: <NEW_LINE> <INDENT> pin_class = self.pin_class <NEW_LINE> <DEDENT> n = self._to_gpio(spec) <NEW_LINE> try: <NEW_LINE> <INDENT> pin = self.pins[n] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pin = pin_class(self, n, **kwargs) <NEW_LINE> self.pins[n] = pin <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if issubclass(pin_class, MockPWMPin) != isinstance(pin, MockPWMPin): <NEW_LINE> <INDENT> raise ValueError('pin %d is already in use as a %s' % (n, pin.__class__.__name__)) <NEW_LINE> <DEDENT> <DEDENT> return pin
Factory for generating mock pins. The *revision* parameter specifies what revision of Pi the mock factory pretends to be (this affects the result of the :attr:`pi_info` attribute as well as where pull-ups are assumed to be). The *pin_class* attribute specifies which mock pin class will be generated by the :meth:`pin` method by default. This can be changed after construction by modifying the :attr:`pin_class` attribute.
62599065009cb60464d02c75
class Error(Exception): <NEW_LINE> <INDENT> def __init__(self, msg=''): <NEW_LINE> <INDENT> self.message = msg <NEW_LINE> Exception.__init__(self, msg) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.message <NEW_LINE> <DEDENT> __str__ = __repr__
Base class for custom exceptions.
6259906563d6d428bbee3e27
class QuestionsPage(MethodView): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> return redirect(url_for('ui.home'), 303)
Encapsulates the views for the questions page
625990650a50d4780f70695e
class EmitterMixin(object): <NEW_LINE> <INDENT> __metaclass__ = EmitterMeta <NEW_LINE> Meta = Meta <NEW_LINE> def emit(self, content, request=None, emitter=None): <NEW_LINE> <INDENT> emitter = emitter or self.determine_emitter(request) <NEW_LINE> emitter = emitter(self, request=request, response=content) <NEW_LINE> response = emitter.emit() <NEW_LINE> assert isinstance(response, HttpResponse), "Emitter must return HttpResponse" <NEW_LINE> if isinstance(content, Paginator): <NEW_LINE> <INDENT> linked_resources = [] <NEW_LINE> if content.next_page: <NEW_LINE> <INDENT> linked_resources.append('<{0}>; rel="next"'.format( content.next_page)) <NEW_LINE> <DEDENT> if content.previous_page: <NEW_LINE> <INDENT> linked_resources.append( '<{0}>; rel="previous"'.format(content.previous_page)) <NEW_LINE> <DEDENT> response["Link"] = ", ".join(linked_resources) <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def to_simple(content, simple, serializer=None): <NEW_LINE> <INDENT> return simple <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def determine_emitter(cls, request): <NEW_LINE> <INDENT> default_emitter = cls._meta.emitters[0] <NEW_LINE> if not request: <NEW_LINE> <INDENT> return default_emitter <NEW_LINE> <DEDENT> if request.method == 'OPTIONS': <NEW_LINE> <INDENT> return JSONEmitter <NEW_LINE> <DEDENT> accept = request.META.get('HTTP_ACCEPT', '*/*') <NEW_LINE> if accept == '*/*': <NEW_LINE> <INDENT> return default_emitter <NEW_LINE> <DEDENT> base_format = mimeparse.best_match(cls._meta.emitters_dict.keys(), accept) <NEW_LINE> return cls._meta.emitters_dict.get( base_format, default_emitter)
Serialize response. .. autoclass:: adrest.mixin.emitter.Meta :members: Example: :: class SomeResource(): class Meta: emit_fields = ['pk', 'user', 'customfield'] emit_related = { 'user': { fields: ['username'] } } def to_simple__customfield(self, user): return "I'm hero! " + user.username
62599065a219f33f346c7f44
class TextSequenceGenerator(keras.utils.Sequence): <NEW_LINE> <INDENT> def __init__(self, mode="train", batch_size=16, img_size=(224, 224), no_channels=3, shuffle=True): <NEW_LINE> <INDENT> self.imgs, self.labels = [], [] <NEW_LINE> if mode == "train": <NEW_LINE> <INDENT> base_train = 'path-to-train-folder/train' <NEW_LINE> for folder in tqdm(id_labels): <NEW_LINE> <INDENT> label_path = os.path.join(base_train, folder, 'images') <NEW_LINE> fn_paths = sorted(os.listdir(label_path)) <NEW_LINE> for fn_path in fn_paths: <NEW_LINE> <INDENT> self.imgs.append(os.path.join(label_path, fn_path)) <NEW_LINE> self.labels.append(folder) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif mode == "val": <NEW_LINE> <INDENT> base_val = 'path-to-val-folder/val' <NEW_LINE> with open('path-to-val-folder/val/val_annotations.txt') as f: <NEW_LINE> <INDENT> for line in f: <NEW_LINE> <INDENT> fn_path = os.path.join( base_val, "images", line.split('\t')[0]) <NEW_LINE> id_label = line.split('\t')[1] <NEW_LINE> self.imgs.append(fn_path) <NEW_LINE> self.labels.append(id_label) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.ids = range(len(self.imgs)) <NEW_LINE> self.img_size = img_size <NEW_LINE> self.img_w, self.img_h = self.img_size <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.no_channels = no_channels <NEW_LINE> self.shuffle = shuffle <NEW_LINE> self.on_epoch_end() <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return int(np.floor(len(self.ids) / self.batch_size)) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> indexes = self.indexes[index * self.batch_size:(index + 1) * self.batch_size] <NEW_LINE> ids = [self.ids[k] for k in indexes] <NEW_LINE> X, y = self.__data_generation(ids) <NEW_LINE> return X, y <NEW_LINE> <DEDENT> def on_epoch_end(self): <NEW_LINE> <INDENT> self.indexes = np.arange(len(self.ids)) <NEW_LINE> if self.shuffle: <NEW_LINE> <INDENT> np.random.shuffle(self.indexes) <NEW_LINE> <DEDENT> <DEDENT> def __data_generation(self, ids): <NEW_LINE> <INDENT> size = len(ids) <NEW_LINE> X = np.empty( (size, self.img_w, self.img_h, self.no_channels), dtype=np.float32 ) <NEW_LINE> Y = np.empty((size, 100), dtype=np.float32) <NEW_LINE> for i, id_ in enumerate(ids): <NEW_LINE> <INDENT> img = image.load_img(self.imgs[id_], target_size=(224, 224)) <NEW_LINE> img = image.img_to_array(img) <NEW_LINE> img = np.expand_dims(img, axis=0) <NEW_LINE> img = preprocess_input(img) <NEW_LINE> X[i, ] = img <NEW_LINE> Y[i] = wv_label_mapping[self.labels[id_]] <NEW_LINE> <DEDENT> return X, Y
Generates data for Keras
625990652c8b7c6e89bd4f2c
class Zwierz: <NEW_LINE> <INDENT> def __init__(self, gatunek, wiek): <NEW_LINE> <INDENT> self.gatunek = gatunek <NEW_LINE> self.wiek = wiek <NEW_LINE> <DEDENT> def podaj_gatunek(self): <NEW_LINE> <INDENT> print('lis')
pierwsza klasa
62599065462c4b4f79dbd144
class Like(TimeStampModel): <NEW_LINE> <INDENT> creator = models.ForeignKey(user_model.User, on_delete=models.PROTECT, null=True) <NEW_LINE> image = models.ForeignKey(Image, on_delete=models.PROTECT, null=True, related_name='likes') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "User: {} - Image Caption: {}".format(self.creator.username, self.image.caption)
Like Model
62599065435de62698e9d546
class User(AbstractUser, BaseModel): <NEW_LINE> <INDENT> phone = models.CharField(max_length=20, verbose_name='手机号码') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'mak_user' <NEW_LINE> verbose_name = '用户' <NEW_LINE> verbose_name_plural = verbose_name
用户模型类
625990654a966d76dd5f0633
class GloAvgConv(nn.Module): <NEW_LINE> <INDENT> def __init__( self, C_in, C_out, init=nn.init.kaiming_normal, bias = True, activation = nn.ReLU(inplace=True) ): <NEW_LINE> <INDENT> super(GloAvgConv, self).__init__() <NEW_LINE> self.conv_avg = nn.Conv2d(in_channels = C_in, out_channels = C_out, kernel_size = (1, 1), stride = (1, 1), bias = bias) <NEW_LINE> self.bn_avg = nn.BatchNorm2d(C_out) <NEW_LINE> self.activation = activation <NEW_LINE> init(self.conv_avg.weight) <NEW_LINE> if bias: <NEW_LINE> <INDENT> nn.init.constant(self.conv_avg.bias, 0) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> nsample = x.size()[3] <NEW_LINE> x = self.activation(self.bn_avg(self.conv_avg(x))) <NEW_LINE> x = F.max_pool2d(x, kernel_size = (1, nsample)).squeeze(3) <NEW_LINE> return x
Input shape: (B, C_in, 1, nsample) Output shape: (B, C_out, npoint)
625990654e4d562566373b45
class BruteForce(SolvingAlgorithm): <NEW_LINE> <INDENT> def __init__(self, pattern_size, colors, attempts): <NEW_LINE> <INDENT> super().__init__(pattern_size, colors, attempts) <NEW_LINE> self.guessed = [] <NEW_LINE> <DEDENT> def guess_pattern(self): <NEW_LINE> <INDENT> if len(self.guessed) == 0: <NEW_LINE> <INDENT> self.guessed = list(map(lambda x: list(x), list(product(self.colors, repeat=self.pattern_size))))[:self.attempts+1] <NEW_LINE> <DEDENT> <DEDENT> def decide_next_step(self, evaluation, iteration): <NEW_LINE> <INDENT> if evaluation == [str(1) for _ in range(self.pattern_size)]: <NEW_LINE> <INDENT> return "finish" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "continue"
Class represents Brute Force solving algorithm.
62599065460517430c432bf3
class ExtruderModel(Model): <NEW_LINE> <INDENT> Type = 'OSEExtruder' <NEW_LINE> def __init__(self, obj, placement=Placement(), origin_translation_offset=Vector()): <NEW_LINE> <INDENT> super(ExtruderModel, self).__init__(obj) <NEW_LINE> self.placement = placement <NEW_LINE> self.origin_translation_offset = origin_translation_offset <NEW_LINE> <DEDENT> def execute(self, obj): <NEW_LINE> <INDENT> obj.Shape = Extruder.make( self.placement, self.origin_translation_offset) <NEW_LINE> <DEDENT> def __getstate__(self) -> Union[str, tuple]: <NEW_LINE> <INDENT> return self.Type <NEW_LINE> <DEDENT> def __setstate__(self, state: str) -> None: <NEW_LINE> <INDENT> if state: <NEW_LINE> <INDENT> self.Type = state
Encapsulates the data (i.e. topography and shape) for a Extruder, and is separate from the "view" or GUI representation. Based on: https://wiki.opensourceecology.org/wiki/File:Simpleextruderassy.fcstd See: https://wiki.opensourceecology.org/wiki/File:Finalextruder.png
62599065a8370b77170f1b0c
class LinkedList(object): <NEW_LINE> <INDENT> def __init__(self, head=None, tail=None): <NEW_LINE> <INDENT> self.head = head <NEW_LINE> self.tail = tail <NEW_LINE> <DEDENT> def insert(self, data): <NEW_LINE> <INDENT> new_token = Token(data) <NEW_LINE> if self.head is None: <NEW_LINE> <INDENT> self.head = new_token <NEW_LINE> self.tail = new_token <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prev_token = self.tail <NEW_LINE> prev_token.set_next_token(new_token) <NEW_LINE> new_token.set_prev_token(prev_token) <NEW_LINE> self.tail = new_token <NEW_LINE> <DEDENT> <DEDENT> def merge(self, left_phrase, right_phrase): <NEW_LINE> <INDENT> merged_token = Token('{} {}'.format(left_phrase.get_token(), right_phrase.get_token()), next_token=right_phrase.get_next(), prev_token=left_phrase.get_prev()) <NEW_LINE> previous = left_phrase.get_prev() <NEW_LINE> previous.set_next_token(merged_token) <NEW_LINE> return merged_token <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> current = self.head <NEW_LINE> size = 0 <NEW_LINE> while current: <NEW_LINE> <INDENT> size += 1 <NEW_LINE> print('{}'.format(current.get_token())) <NEW_LINE> current = current.get_next() <NEW_LINE> <DEDENT> return size <NEW_LINE> <DEDENT> def get_phrases(self): <NEW_LINE> <INDENT> current = self.head <NEW_LINE> phrases = [] <NEW_LINE> while current: <NEW_LINE> <INDENT> if current.is_phrase(): <NEW_LINE> <INDENT> phrases.append(current.get_token()) <NEW_LINE> <DEDENT> current = current.get_next() <NEW_LINE> <DEDENT> return phrases <NEW_LINE> <DEDENT> def stats(self): <NEW_LINE> <INDENT> print('Head of list is {}'.format(self.head.get_token())) <NEW_LINE> print('Tail of list is {}'.format(self.tail.get_token())) <NEW_LINE> print('Size of list is {}'.format(self.size()))
Simple linkedlist implementation for storing tokens from the sentence. It assumes that the list is always populated from the beginning of the sentence.
62599065be8e80087fbc07c6
class NormalDistribution(Distribution): <NEW_LINE> <INDENT> def __init__(self, mean, precision): <NEW_LINE> <INDENT> super(NormalDistribution, self).__init__(['mean', 'precision']) <NEW_LINE> self._mean = as_tensor(mean) <NEW_LINE> self._precision = as_tensor(precision) <NEW_LINE> <DEDENT> def _statistic(self, statistic, name): <NEW_LINE> <INDENT> if statistic == 'entropy': <NEW_LINE> <INDENT> return tf.multiply(constants.HALF, constants.LOG2PIE - tf.log(self._precision), name) <NEW_LINE> <DEDENT> elif statistic == 1: <NEW_LINE> <INDENT> return self._mean <NEW_LINE> <DEDENT> elif statistic == 'var': <NEW_LINE> <INDENT> return tf.reciprocal(self._precision, name) <NEW_LINE> <DEDENT> elif statistic == 2: <NEW_LINE> <INDENT> return tf.add(tf.square(self.statistic(1)), self.statistic('var'), name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(NormalDistribution, self)._statistic(statistic, name) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def sample_rank(self): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def log_likelihood(x, mean, precision, name=None): <NEW_LINE> <INDENT> chi2 = evaluate_statistic(x, 2) - 2 * evaluate_statistic(x, 1) * evaluate_statistic(mean, 1) + evaluate_statistic(mean, 2) <NEW_LINE> return tf.multiply(constants.HALF, evaluate_statistic(precision, 'log') - constants.LOG2PI - evaluate_statistic(precision, 1) * chi2, name) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def linear_log_likelihood(x, y, theta, tau, name=None): <NEW_LINE> <INDENT> y, x, theta, tau = map(as_tensor, [y, x, theta, tau]) <NEW_LINE> chi2 = evaluate_statistic(y, 2) - constants.TWO * tf.reduce_sum( evaluate_statistic(y[..., None], 1) * evaluate_statistic(x, 1) * evaluate_statistic(theta, 1), axis=-1 ) + tf.reduce_sum( evaluate_statistic(x, 'outer') * evaluate_statistic(theta, 'outer'), axis=(-1, -2) ) <NEW_LINE> ll = tf.multiply(constants.HALF, evaluate_statistic(tau, 'log') - constants.LOG2PI - evaluate_statistic(tau, 1) * chi2, name) <NEW_LINE> return ll
Univariate normal distribution. Parameters ---------- mean : tf.Tensor mean of the normal distribution precision : tf.Tensor precision of the normal distribution
625990653d592f4c4edbc61b
class File(content.Content): <NEW_LINE> <INDENT> icon("file.png") <NEW_LINE> content.name(_(u"File")) <NEW_LINE> content.schema(IFile) <NEW_LINE> content.require(security.CanAddContent) <NEW_LINE> data = BlobProperty(IFile['data'])
A file content type storing the data in blobs.
625990658e71fb1e983bd204
class QuoraQDA(QuoraClassifier): <NEW_LINE> <INDENT> def __init__(self, features, targets): <NEW_LINE> <INDENT> classifier = QDA(reg_param=0.5) <NEW_LINE> QuoraClassifier.__init__(self, classifier, features, targets)
Classifier implementation encapsulating scikit-learn Quadratic Discriminant classifier.
625990658e7ae83300eea7cc
class AirPressure(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'air_pressure' <NEW_LINE> id = db.Column(db.INTEGER, primary_key=True) <NEW_LINE> created_datetime = db.Column(db.DATETIME, nullable=False) <NEW_LINE> value = db.Column(db.FLOAT, nullable=False) <NEW_LINE> def __init__(self, value): <NEW_LINE> <INDENT> self.created_datetime = datetime.datetime.now(timezone('UTC')) <NEW_LINE> self.value = value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'AirPressure: {0}, {1}, {2}'.format(self.id, self.created_datetime, self.value)
気圧を表すクラス
6259906532920d7e50bc7785
class GenericSignedAttestation(_messages.Message): <NEW_LINE> <INDENT> class ContentTypeValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> CONTENT_TYPE_UNSPECIFIED = 0 <NEW_LINE> SIMPLE_SIGNING_JSON = 1 <NEW_LINE> <DEDENT> contentType = _messages.EnumField('ContentTypeValueValuesEnum', 1) <NEW_LINE> serializedPayload = _messages.BytesField(2) <NEW_LINE> signatures = _messages.MessageField('Signature', 3, repeated=True)
An attestation wrapper that uses the Grafeas `Signature` message. This attestation must define the `plaintext` that the `signatures` verify and any metadata necessary to interpret that plaintext. The signatures should always be over the `plaintext` bytestring. Enums: ContentTypeValueValuesEnum: Type (for example schema) of the attestation payload that was signed. The verifier must ensure that the provided type is one that the verifier supports, and that the attestation payload is a valid instantiation of that type (for example by validating a JSON schema). Fields: contentType: Type (for example schema) of the attestation payload that was signed. The verifier must ensure that the provided type is one that the verifier supports, and that the attestation payload is a valid instantiation of that type (for example by validating a JSON schema). serializedPayload: The serialized payload that is verified by one or more `signatures`. The encoding and semantic meaning of this payload must match what is set in `content_type`. signatures: One or more signatures over `serialized_payload`. Verifier implementations should consider this attestation message verified if at least one `signature` verifies `serialized_payload`. See `Signature` in common.proto for more details on signature structure and verification.
6259906501c39578d7f142d4
class Gmetric(object): <NEW_LINE> <INDENT> def __init__(self, metric_type, name, units, maximum, mcast=None, debug=0): <NEW_LINE> <INDENT> self.bin = "/usr/bin/gmetric" <NEW_LINE> self.metric_type = metric_type <NEW_LINE> self.name = name <NEW_LINE> self.units = units <NEW_LINE> self.maximum = maximum <NEW_LINE> self.mcast = mcast <NEW_LINE> self.mcast_if = "" <NEW_LINE> self.debug = debug <NEW_LINE> self.version = 2 <NEW_LINE> if mcast is None: <NEW_LINE> <INDENT> self.get_mc_channel() <NEW_LINE> <DEDENT> <DEDENT> def send(self, value, float_num=0): <NEW_LINE> <INDENT> if float_num: <NEW_LINE> <INDENT> value = "%.3f" % value <NEW_LINE> <DEDENT> cmd_v2 = "%s --type=%s --name=%s --value=%s --units=%s " "--tmax=%s --mcast_channel=%s %s" % (self.bin, self.metric_type, self.name, value, self.units, self.maximum, self.mcast, self.mcast_if) <NEW_LINE> cmd_v3 = "%s -c /etc/gmond.conf --type=%s --name=%s --value=%s " "--units=%s --tmax=%s" % (self.bin, self.metric_type, self.name, value, self.units, self.maximum) <NEW_LINE> if self.version == 2: <NEW_LINE> <INDENT> cmd = cmd_v2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cmd = cmd_v3 <NEW_LINE> <DEDENT> LOG.debug('COMMAND: {0}'.format(cmd)) <NEW_LINE> if self.debug == 1: <NEW_LINE> <INDENT> ret = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret = os.system(cmd) <NEW_LINE> <DEDENT> if ret != 0: <NEW_LINE> <INDENT> LOG.warn('There was an error running: {0} Switching to ganglia ' 'version 3...'.format(cmd)) <NEW_LINE> if self.version == 2: <NEW_LINE> <INDENT> self.version = 3 <NEW_LINE> LOG.debug('COMMAND: {0}'.format(cmd_v3)) <NEW_LINE> ret = os.system(cmd_v3) <NEW_LINE> if ret != 0: <NEW_LINE> <INDENT> LOG.error('Version 3 fails as well!') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> LOG.info('INFO version 3 works.') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def get_mc_channel(self): <NEW_LINE> <INDENT> conf = "/etc/gmond.conf" <NEW_LINE> if os.path.exists(conf): <NEW_LINE> <INDENT> regex = re.compile("^mcast_channel\s+([\d.]+)") <NEW_LINE> regex2 = re.compile("^mcast_if\s+(\w+)") <NEW_LINE> try: <NEW_LINE> <INDENT> conf_fd = open(conf, 'r') <NEW_LINE> lines = conf_fd.readlines() <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> metric = regex.search(line) <NEW_LINE> if metric: <NEW_LINE> <INDENT> self.mcast = metric.group(1) <NEW_LINE> <DEDENT> metric = regex2.search(line) <NEW_LINE> if metric: <NEW_LINE> <INDENT> self.mcast_if = "--mcast_if=%s" % metric.group(1) <NEW_LINE> <DEDENT> <DEDENT> conf_fd.close() <NEW_LINE> return 1 <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> LOG.error("Couldn't find mcast_channel in conf: {0}".format(conf)) <NEW_LINE> sys.exit(9) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> LOG.error('Conf does not exist: {0}'.format(conf)) <NEW_LINE> sys.exit(9)
Send metrics to gmond.
625990652c8b7c6e89bd4f2e
class CurrentPhases(PhaseStatus): <NEW_LINE> <INDENT> def __getitem__(self, nominal_phase: SinglePhaseKind) -> SinglePhaseKind: <NEW_LINE> <INDENT> return self.terminal.traced_phases.current(nominal_phase) <NEW_LINE> <DEDENT> def __setitem__(self, nominal_phase: SinglePhaseKind, traced_phase: SinglePhaseKind) -> bool: <NEW_LINE> <INDENT> return self.terminal.traced_phases.set_current(nominal_phase, traced_phase)
The traced phases in the current state of the network.
62599065462c4b4f79dbd146
class Filter(AxObj): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.field = '' <NEW_LINE> self.op = 'EQ' <NEW_LINE> self.value = '' <NEW_LINE> self._init_kwargs(kwargs, [ 'field', 'op', 'value', ]) <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if not self.field: <NEW_LINE> <INDENT> raise ValueError("Missing Filter field") <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def field(self): <NEW_LINE> <INDENT> return self._field <NEW_LINE> <DEDENT> @field.setter <NEW_LINE> def field(self, val): <NEW_LINE> <INDENT> self._assert_type_string("field", val) <NEW_LINE> self._field = val <NEW_LINE> <DEDENT> @property <NEW_LINE> def op(self): <NEW_LINE> <INDENT> return self._op <NEW_LINE> <DEDENT> @op.setter <NEW_LINE> def op(self, val): <NEW_LINE> <INDENT> self._assert_type_string("op", val) <NEW_LINE> self._assert_value("op", val, FILTER_OPS) <NEW_LINE> self._op = val <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, val): <NEW_LINE> <INDENT> self._value = val <NEW_LINE> <DEDENT> def __cmp__(self, other): <NEW_LINE> <INDENT> if self.field != other.field: <NEW_LINE> <INDENT> return cmp(self.field, other.field) <NEW_LINE> <DEDENT> if self.op != other.op: <NEW_LINE> <INDENT> return cmp(self.op, other.op) <NEW_LINE> <DEDENT> if self.value != other.value: <NEW_LINE> <INDENT> return cmp(self.value, other.value) <NEW_LINE> <DEDENT> return 0 <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return (u"Filter({self.field} {self.op} '{self.value}')" .format(self=self) )
Field filter representation.
62599065097d151d1a2c27ab
class Eleve: <NEW_LINE> <INDENT> def __init__(self, id_el, nom): <NEW_LINE> <INDENT> self.id_eleve = id_el <NEW_LINE> self.nom_eleve = nom
classdocs
62599065d6c5a102081e3865
class PostgreSQLServer(SQLServer): <NEW_LINE> <INDENT> def __init__(self, model_meta, migration_root, host, port, user, database, password=None, interactive=False, run_env=None): <NEW_LINE> <INDENT> super(PostgreSQLServer, self).__init__(model_meta, migration_root) <NEW_LINE> self.host = host <NEW_LINE> self.port = port <NEW_LINE> self.user = user <NEW_LINE> self.database = database <NEW_LINE> self.password = password <NEW_LINE> self.interactive = interactive <NEW_LINE> self.run_env = run_env <NEW_LINE> <DEDENT> def _get_connection_string(self, database): <NEW_LINE> <INDENT> driver = host_check.get_postgresql_driver_name() <NEW_LINE> password = self.password <NEW_LINE> if driver == 'pg8000' and not password: <NEW_LINE> <INDENT> pfilepath = os.environ.get('PGPASSFILE') <NEW_LINE> if pfilepath: <NEW_LINE> <INDENT> password = pgpass.get_password_from_file(pfilepath, self.host, str(self.port), self.database, self.user) <NEW_LINE> <DEDENT> <DEDENT> extra_args = {'client_encoding': 'utf8'} <NEW_LINE> return str(URL('postgresql+' + driver, username=self.user, password=password, host=self.host, port=str(self.port), database=database, query=extra_args)) <NEW_LINE> <DEDENT> def connect(self, init=False): <NEW_LINE> <INDENT> LOG.debug("Connecting to database...") <NEW_LINE> LOG.debug("Checking if database is running at [%s:%s]", self.host, str(self.port)) <NEW_LINE> if self.user: <NEW_LINE> <INDENT> check_db = ['psql', '-h', self.host, '-p', str(self.port), '-U', self.user, '-d', self.database, '-c', 'SELECT version();'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> check_db = ['psql', '-h', self.host, '-p', str(self.port), '-c', 'SELECT version();'] <NEW_LINE> <DEDENT> if not self.interactive: <NEW_LINE> <INDENT> check_db.append('--no-password') <NEW_LINE> <DEDENT> env = self.run_env if self.run_env else os.environ <NEW_LINE> env = env.copy() <NEW_LINE> if self.password: <NEW_LINE> <INDENT> env['PGPASSWORD'] = self.password <NEW_LINE> <DEDENT> err, code = util.call_command(check_db, env) <NEW_LINE> if code: <NEW_LINE> <INDENT> LOG.debug(err) <NEW_LINE> return DBStatus.FAILED_TO_CONNECT <NEW_LINE> <DEDENT> if init: <NEW_LINE> <INDENT> if not self._create_schema(): <NEW_LINE> <INDENT> return DBStatus.SCHEMA_INIT_ERROR <NEW_LINE> <DEDENT> <DEDENT> return self.check_schema() <NEW_LINE> <DEDENT> def get_connection_string(self): <NEW_LINE> <INDENT> return self._get_connection_string(self.database) <NEW_LINE> <DEDENT> def get_db_location(self): <NEW_LINE> <INDENT> return self.host + ":" + str(self.port) + "/" + self.database
Handler for PostgreSQL.
62599065498bea3a75a591a0
class WindowsRegistryListEvent(time_events.FiletimeEvent): <NEW_LINE> <INDENT> DATA_TYPE = 'windows:registry:list' <NEW_LINE> def __init__( self, filetime, key_path, list_name, list_values, timestamp_description=None, value_name=None): <NEW_LINE> <INDENT> if timestamp_description is None: <NEW_LINE> <INDENT> timestamp_description = eventdata.EventTimestamp.WRITTEN_TIME <NEW_LINE> <DEDENT> super(WindowsRegistryListEvent, self).__init__( filetime, timestamp_description) <NEW_LINE> self.key_path = key_path <NEW_LINE> self.list_name = list_name <NEW_LINE> self.list_values = list_values <NEW_LINE> self.value_name = value_name
Convenience class for a list retrieved from the Registry e.g. MRU. Attributes: key_path: string containing the Windows Registry key path. list_name: string containing the name of the list. list_values: string containing the list values. value_name: string containing the Windows Registry value name.
6259906538b623060ffaa3f1
class MockClearDirTest(ClearDirTest): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.manifest_file = self.temp_dir.joinpath('removed_files') <NEW_LINE> <DEDENT> def test_clear_dir(self): <NEW_LINE> <INDENT> def track_file(path): <NEW_LINE> <INDENT> with self.manifest_file.open('a') as manifest: <NEW_LINE> <INDENT> print(path, file=manifest) <NEW_LINE> <DEDENT> <DEDENT> with unittest.mock.patch('os.remove', track_file): <NEW_LINE> <INDENT> clear_dir(self.temp_dir) <NEW_LINE> <DEDENT> cleared_files = self.manifest_file.read_text().split('\n') <NEW_LINE> for path in self.test_file_paths: <NEW_LINE> <INDENT> self.assertIn(str(path), cleared_files)
Mock out os.remove in clear_dir.
6259906599cbb53fe6832623
class RouterStatusEntryV3(RouterStatusEntry): <NEW_LINE> <INDENT> TYPE_ANNOTATION_NAME = 'network-status-consensus-3' <NEW_LINE> ATTRIBUTES = dict(RouterStatusEntry.ATTRIBUTES, **{ 'digest': (None, _parse_r_line), 'or_addresses': ([], _parse_a_line), 'identifier_type': (None, _parse_id_line), 'identifier': (None, _parse_id_line), 'bandwidth': (None, _parse_w_line), 'measured': (None, _parse_w_line), 'is_unmeasured': (False, _parse_w_line), 'unrecognized_bandwidth_entries': ([], _parse_w_line), 'exit_policy': (None, _parse_p_line), 'protocols': ({}, _parse_pr_line), 'microdescriptor_hashes': ([], _parse_m_line), }) <NEW_LINE> PARSER_FOR_LINE = dict(RouterStatusEntry.PARSER_FOR_LINE, **{ 'a': _parse_a_line, 'w': _parse_w_line, 'p': _parse_p_line, 'pr': _parse_pr_line, 'id': _parse_id_line, 'm': _parse_m_line, }) <NEW_LINE> @classmethod <NEW_LINE> def content(cls: Type['stem.descriptor.router_status_entry.RouterStatusEntryV3'], attr: Optional[Mapping[str, str]] = None, exclude: Sequence[str] = ()) -> bytes: <NEW_LINE> <INDENT> return _descriptor_content(attr, exclude, ( ('r', '%s p1aag7VwarGxqctS7/fS0y5FU+s oQZFLYe9e4A7bOkWKR7TaNxb0JE %s %s 9001 0' % (_random_nickname(), _random_date(), _random_ipv4_address())), ('s', 'Fast Named Running Stable Valid'), )) <NEW_LINE> <DEDENT> def _name(self, is_plural: bool = False) -> str: <NEW_LINE> <INDENT> return 'Router status entries (v3)' if is_plural else 'Router status entry (v3)' <NEW_LINE> <DEDENT> def _required_fields(self) -> Tuple[str, ...]: <NEW_LINE> <INDENT> return ('r', 's') <NEW_LINE> <DEDENT> def _single_fields(self) -> Tuple[str, ...]: <NEW_LINE> <INDENT> return ('r', 's', 'v', 'w', 'p', 'pr')
Information about an individual router stored within a version 3 network status document. :var list or_addresses: **\*** relay's OR addresses, this is a tuple listing of the form (address (**str**), port (**int**), is_ipv6 (**bool**)) :var str identifier_type: identity digest key type :var str identifier: base64 encoded identity digest :var str digest: **\*** router's upper-case hex digest :var int bandwidth: bandwidth measured to be available by the relay, this is an arbitrary units (currently kilobytes per second) heuristic generated by the Bandwidth authoritites to weight relay selection :var int measured: *bandwidth* vote provided by a bandwidth authority :var bool is_unmeasured: *bandwidth* measurement isn't based on three or more measurements :var list unrecognized_bandwidth_entries: **\*** bandwidth weighting information that isn't yet recognized :var stem.exit_policy.MicroExitPolicy exit_policy: router's exit policy :var dict protocols: mapping of protocols to their supported versions :var list microdescriptor_hashes: **\*** tuples of two values, the list of consensus methods for generating a set of digests and the 'algorithm => digest' mappings **\*** attribute is either required when we're parsed with validation or has a default value, others are left as **None** if undefined .. versionchanged:: 1.5.0 Added the identifier and identifier_type attributes. .. versionchanged:: 1.6.0 Added the protocols attribute.
62599065adb09d7d5dc0bcaa
class ApiGenericMixin(object): <NEW_LINE> <INDENT> def finalize_response(self, request, response, *args, **kwargs): <NEW_LINE> <INDENT> if not isinstance(response, Response): <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> response_data = {"result": True, "code": "OK", "message": "success", "data": []} <NEW_LINE> if ( response.data is None or (isinstance(response.data, (list, tuple))) or (isinstance(response.data, dict) and not ("code" in response.data and "result" in response.data)) ): <NEW_LINE> <INDENT> response_data["data"] = response.data <NEW_LINE> response.data = response_data <NEW_LINE> <DEDENT> if response.status_code in [status.HTTP_201_CREATED, status.HTTP_204_NO_CONTENT]: <NEW_LINE> <INDENT> response.status_code = status.HTTP_200_OK <NEW_LINE> <DEDENT> return super(ApiGenericMixin, self).finalize_response(request, response, *args, **kwargs)
API视图类通用函数
625990652ae34c7f260ac828
class TZscoreLinear(ScoreTransformModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__('tzl') <NEW_LINE> self.rscore_max = 150 <NEW_LINE> self.rscore_min = 0 <NEW_LINE> self.tscore_mean = 50 <NEW_LINE> self.tscore_std = 10 <NEW_LINE> self.tscore_stdnum = 4 <NEW_LINE> <DEDENT> def set_data(self, rawdf=None, scorefields=None): <NEW_LINE> <INDENT> self.rawdf = rawdf <NEW_LINE> self.scorefields = scorefields <NEW_LINE> <DEDENT> def set_parameters(self, rscore_max=150, rscore_min=0, tscore_std=10, tscore_mean=50, tscore_stdnum=4): <NEW_LINE> <INDENT> self.rscore_max = rscore_max <NEW_LINE> self.rscore_min = rscore_min <NEW_LINE> self.tscore_mean = tscore_mean <NEW_LINE> self.tscore_std = tscore_std <NEW_LINE> self.tscore_stdnum = tscore_stdnum <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> super().run() <NEW_LINE> self.outdf = self.rawdf[self.scorefields] <NEW_LINE> for sf in self.scorefields: <NEW_LINE> <INDENT> rmean, rstd = self.outdf[sf].describe().loc[['mean', 'std']].valuse[:, 0] <NEW_LINE> self.outdf[sf+'_zscore'] = self.outdf[sf].apply(lambda x: min(max((x - rmean) / rstd, -self.tscore_stdnum), self.tscore_stdnum)) <NEW_LINE> self.outdf[sf+'_tscore'] = self.outdf[sf+'_zscore']. apply(lambda x: x * self.tscore_std + self.tscore_mean) <NEW_LINE> <DEDENT> <DEDENT> def report(self): <NEW_LINE> <INDENT> pl.report_stats_describe(self.rawdf) <NEW_LINE> <DEDENT> def plot(self, mode='raw'): <NEW_LINE> <INDENT> super().plot()
Get Zscore by linear formula: (x-mean)/std
6259906591f36d47f2231a2f
class ListColumnRepresentation(ColumnRepresentation): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(ListColumnRepresentation, self).__init__() <NEW_LINE> <DEDENT> def as_feature_spec(self, logical_column): <NEW_LINE> <INDENT> return tf.VarLenFeature(logical_column.domain.dtype) <NEW_LINE> <DEDENT> def as_batched_placeholder(self, logical_column): <NEW_LINE> <INDENT> return tf.sparse_placeholder( logical_column.domain.dtype, [None] + logical_column.shape.tf_shape().as_list())
Represent the column using a variable size.
62599065a17c0f6771d5d746
class VivisectRemoteTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_basic(self): <NEW_LINE> <INDENT> testfile = helpers.getTestPath('windows', 'amd64', 'firefox.exe') <NEW_LINE> good = vivisect.VivWorkspace() <NEW_LINE> good.loadFromFile(testfile) <NEW_LINE> host = 'localhost' <NEW_LINE> port = 0x4097 <NEW_LINE> with tempfile.TemporaryDirectory() as tmpd: <NEW_LINE> <INDENT> tmpf = tempfile.NamedTemporaryFile(dir=tmpd, delete=False) <NEW_LINE> try: <NEW_LINE> <INDENT> proc = mp.Process(target=runServer, args=(tmpf.name, port,)) <NEW_LINE> proc.daemon = True <NEW_LINE> proc.start() <NEW_LINE> time.sleep(0.5) <NEW_LINE> retry = 0 <NEW_LINE> conn = False <NEW_LINE> while retry < 5: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> server = v_r_server.connectToServer(host, port) <NEW_LINE> conn = True <NEW_LINE> break <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> retry += 1 <NEW_LINE> time.sleep(0.2) <NEW_LINE> <DEDENT> <DEDENT> if not conn: <NEW_LINE> <INDENT> self.fail('Could not connect to %s:%s' % (host, port)) <NEW_LINE> <DEDENT> wslist = server.listWorkspaces() <NEW_LINE> self.assertEqual(len(wslist), 1) <NEW_LINE> self.assertEqual(server.getServerVersion(), 20130820) <NEW_LINE> othr = v_r_server.getServerWorkspace(server, wslist[0]) <NEW_LINE> retry = 0 <NEW_LINE> while retry < 5: <NEW_LINE> <INDENT> locs = othr.getLocations() <NEW_LINE> if len(locs) != 1389: <NEW_LINE> <INDENT> retry += 1 <NEW_LINE> time.sleep(0.2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> self.assertEqual(len(othr.getLocations()), 1389) <NEW_LINE> self.assertEqual(set(othr.getLocations()), set(good.getLocations())) <NEW_LINE> self.assertEqual(set(othr.getXrefs()), set(good.getXrefs())) <NEW_LINE> try: <NEW_LINE> <INDENT> othr.server = None <NEW_LINE> q = othr.chan_lookup.get(othr.rchan) <NEW_LINE> if q: <NEW_LINE> <INDENT> q.puts((v_const.VWE_AUTOANALFIN, None)) <NEW_LINE> <DEDENT> proc.terminate() <NEW_LINE> proc.close() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> tmpf.close() <NEW_LINE> os.unlink(tmpf.name)
So...what would be fun is basically a chain of remote workspaces all tied in interesting configurations.
625990655fdd1c0f98e5f6c4
class FileBackupTask(BackupTask): <NEW_LINE> <INDENT> checker_fqdn = models.CharField(max_length=255, choices=settings.FILE_BACKUP_CHECKERS, verbose_name=_(u"Checker fqdn"), help_text=_(u"Machine fqdn where this backups shoud be checked.")) <NEW_LINE> directory = models.CharField(max_length=255, help_text=_(u'Directory where files shoud be.')) <NEW_LINE> days_in_hard_drive = models.IntegerField(blank=False, null=False, default=180, help_text=_(u'Number of days that this backup shoud be on disk at most.')) <NEW_LINE> max_backup_month = models.IntegerField(blank=False, null=False, default=7, help_text=_(u'Number of backups that shoud to be on disk after a month.')) <NEW_LINE> objects = TaskManager() <NEW_LINE> @staticmethod <NEW_LINE> def get_fbp(machine, filename): <NEW_LINE> <INDENT> logger.debug('Searching FileBackupProduct for filename %s and machine %s', filename, machine) <NEW_LINE> for fbp in FileBackupProduct.objects.filter(file_backup_task__machine=machine, file_backup_task__active=True): <NEW_LINE> <INDENT> if fbp.file_pattern.get_re(machine).match(filename): <NEW_LINE> <INDENT> return fbp <NEW_LINE> <DEDENT> <DEDENT> logger.debug('There is no FileBackupProduct for machine %s', machine) <NEW_LINE> return None
File backup task
62599065f548e778e596ccca
class Constant(CExpression): <NEW_LINE> <INDENT> def __init__(self, s): <NEW_LINE> <INDENT> self.s = str(s) <NEW_LINE> <DEDENT> def add_includes(self, program): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def to_c_string(self, root=False): <NEW_LINE> <INDENT> return self.s
A predefined C constant
62599065aad79263cf42ff04
class FeedConfigException(StreamApiException): <NEW_LINE> <INDENT> status_code = 400 <NEW_LINE> code = 6
Raised when there are missing or misconfigured custom fields
6259906532920d7e50bc7787
class TenCrops(object): <NEW_LINE> <INDENT> def __init__(self, size, mean=[0.0, 0.0, 0.0], std=[1.0, 1.0, 1.0], interpolation=Image.BILINEAR): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> self.interpolation = interpolation <NEW_LINE> self.mean = mean <NEW_LINE> self.std = std <NEW_LINE> self.fiveCrops = FiveCrops(self.size, self.mean, self.std, self.interpolation, True) <NEW_LINE> <DEDENT> def __call__(self, img, inv, flow): <NEW_LINE> <INDENT> return self.fiveCrops(img, inv, flow) <NEW_LINE> <DEDENT> def randomize_parameters(self): <NEW_LINE> <INDENT> pass
Generates four corner and center crops and their horizontally flipped versions
6259906556ac1b37e6303885
class School(NonEssentialBusinessBaseLocation[SchoolState]): <NEW_LINE> <INDENT> state_type = SchoolState
Implements a simple school
625990650a50d4780f706960
class Subject(): <NEW_LINE> <INDENT> def add_observer(self, obs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def remove_observer(self, obs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update_observers(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_state(self): <NEW_LINE> <INDENT> pass
Subject interface
62599065b7558d5895464acf
class Time: <NEW_LINE> <INDENT> pass
表示一天的时间,属性hour、minute、second
625990654428ac0f6e659c73
class ConsulMapParser(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.log = logging.getLogger(self.__class__.__name__) <NEW_LINE> <DEDENT> def GetMap(self, cache_info, data): <NEW_LINE> <INDENT> entries = collections.defaultdict(dict) <NEW_LINE> for line in json.loads(cache_info.read()): <NEW_LINE> <INDENT> key = line.get('Key', '').split('/') <NEW_LINE> value = line.get('Value', '') <NEW_LINE> if not value or not key: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> value = base64.b64decode(value) <NEW_LINE> name = str(key[-2]) <NEW_LINE> entry_piece = key[-1] <NEW_LINE> entries[name][entry_piece] = value <NEW_LINE> <DEDENT> for name, entry in entries.iteritems(): <NEW_LINE> <INDENT> map_entry = self._ReadEntry(name, entry) <NEW_LINE> if map_entry is None: <NEW_LINE> <INDENT> self.log.warn('Could not create entry from line %r in cache, skipping', entry) <NEW_LINE> continue <NEW_LINE> <DEDENT> if not data.Add(map_entry): <NEW_LINE> <INDENT> self.log.warn('Could not add entry %r read from line %r in cache', map_entry, entry) <NEW_LINE> <DEDENT> <DEDENT> return data
A base class for parsing nss_files module cache.
62599065097d151d1a2c27ad
class Relationship: <NEW_LINE> <INDENT> def __init__(self, id, subject, predicate, object, synset): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.subject = subject <NEW_LINE> self.predicate = predicate <NEW_LINE> self.object = object <NEW_LINE> self.synset = synset <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{0}: {1} {2} {3}".format(self.id, self.subject, self.predicate, self.object) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self)
Relationships. Ex, 'man - jumping over - fire hydrant'. subject int predicate string object int rel_canon Synset
62599065fff4ab517ebcef5d
class TaskMetrics(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.SubmittedCount = None <NEW_LINE> self.PendingCount = None <NEW_LINE> self.RunnableCount = None <NEW_LINE> self.StartingCount = None <NEW_LINE> self.RunningCount = None <NEW_LINE> self.SucceedCount = None <NEW_LINE> self.FailedInterruptedCount = None <NEW_LINE> self.FailedCount = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.SubmittedCount = params.get("SubmittedCount") <NEW_LINE> self.PendingCount = params.get("PendingCount") <NEW_LINE> self.RunnableCount = params.get("RunnableCount") <NEW_LINE> self.StartingCount = params.get("StartingCount") <NEW_LINE> self.RunningCount = params.get("RunningCount") <NEW_LINE> self.SucceedCount = params.get("SucceedCount") <NEW_LINE> self.FailedInterruptedCount = params.get("FailedInterruptedCount") <NEW_LINE> self.FailedCount = params.get("FailedCount")
Task statistical metrics
62599065e76e3b2f99fda141
class AccountAgedTrialBalance(orm.TransientModel): <NEW_LINE> <INDENT> _inherit = "open.invoices.webkit" <NEW_LINE> _name = "account.aged.trial.balance.webkit" <NEW_LINE> _description = "Aged partner balanced" <NEW_LINE> def _get_current_fiscalyear(self, cr, uid, context=None): <NEW_LINE> <INDENT> user_obj = self.pool['res.users'] <NEW_LINE> company = user_obj.browse(cr, uid, uid, context=context).company_id <NEW_LINE> fyear_obj = self.pool['account.period'] <NEW_LINE> today = date.today().strftime(DATE_FORMAT) <NEW_LINE> fyear_ids = fyear_obj.search( cr, uid, [('date_start', '>=', today), ('date_stop', '<=', today), ('company_id', '=', company.id)], limit=1, context=context) <NEW_LINE> if fyear_ids: <NEW_LINE> <INDENT> return fyear_ids[0] <NEW_LINE> <DEDENT> <DEDENT> _columns = { 'filter': fields.selection( [('filter_period', 'Periods')], "Filter by", required=True), 'fiscalyear_id': fields.many2one( 'account.fiscalyear', 'Fiscal Year', required=True), 'period_to': fields.many2one('account.period', 'End Period', required=True), 'subsidiary_ids': fields.many2many('res.company.subsidiary', 'subsidiary_report_aged_partner_rel', 'aged_partner_id', 'subsidiary_id', string="Filters on subsidiarys"), } <NEW_LINE> _defaults = { 'filter': 'filter_period', 'fiscalyear_id': _get_current_fiscalyear, } <NEW_LINE> def onchange_fiscalyear(self, cr, uid, ids, fiscalyear=False, period_id=False, date_to=False, until_date=False, context=None): <NEW_LINE> <INDENT> res = super(AccountAgedTrialBalance, self).onchange_fiscalyear( cr, uid, ids, fiscalyear=fiscalyear, period_id=period_id, date_to=date_to, until_date=until_date, context=context ) <NEW_LINE> filters = self.onchange_filter(cr, uid, ids, filter='filter_period', fiscalyear_id=fiscalyear, context=context) <NEW_LINE> res['value'].update({ 'period_from': filters['value']['period_from'], 'period_to': filters['value']['period_to'], }) <NEW_LINE> return res <NEW_LINE> <DEDENT> def _print_report(self, cr, uid, ids, data, context=None): <NEW_LINE> <INDENT> data = self.pre_print_report(cr, uid, ids, data, context=context) <NEW_LINE> return {'type': 'ir.actions.report.xml', 'report_name': 'account.account_aged_trial_balance_webkit', 'datas': data}
Will launch age partner balance report. This report is based on Open Invoice Report and share a lot of knowledge with him
625990657c178a314d78e78d
class LinkEnableCfg(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.b_key = "link-enable-cfg" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.ena_sequence = "" <NEW_LINE> self.enaeth = "" <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value)
This class does not support CRUD Operations please use parent. :param ena_sequence: {"description": "Sequence number (Specify the physical port number)", "minimum": 1, "type": "number", "maximum": 16, "format": "number"} :param enaeth: {"type": "number", "description": "Specify the physical port number (Ethernet interface number)", "format": "interface"} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
625990653539df3088ecd9df
class CollectionUsage(object): <NEW_LINE> <INDENT> def __init__(self, available=None, maximum_allowed=None): <NEW_LINE> <INDENT> self.available = available <NEW_LINE> self.maximum_allowed = maximum_allowed <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> args = {} <NEW_LINE> if 'available' in _dict: <NEW_LINE> <INDENT> args['available'] = _dict['available'] <NEW_LINE> <DEDENT> if 'maximum_allowed' in _dict: <NEW_LINE> <INDENT> args['maximum_allowed'] = _dict['maximum_allowed'] <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'available') and self.available is not None: <NEW_LINE> <INDENT> _dict['available'] = self.available <NEW_LINE> <DEDENT> if hasattr(self, 'maximum_allowed') and self.maximum_allowed is not None: <NEW_LINE> <INDENT> _dict['maximum_allowed'] = self.maximum_allowed <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return json.dumps(self._to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
Summary of the collection usage in the environment. :attr int available: (optional) Number of active collections in the environment. :attr int maximum_allowed: (optional) Total number of collections allowed in the environment.
625990654a966d76dd5f0637
class IServiceRequest(Interface): <NEW_LINE> <INDENT> pass
Marker interface for the ServiceRequest
62599065796e427e5384feb8
class CoordinationNumberAttributeGenerator: <NEW_LINE> <INDENT> def generate_features(self, entries): <NEW_LINE> <INDENT> feat_values = [] <NEW_LINE> feat_headers = [] <NEW_LINE> if not isinstance(entries, list): <NEW_LINE> <INDENT> raise ValueError("Argument should be of type list of " "CrystalStructureEntry's") <NEW_LINE> <DEDENT> elif (entries and not isinstance(entries[0], CrystalStructureEntry)): <NEW_LINE> <INDENT> raise ValueError("Argument should be of type list of " "CrystalStructureEntry's") <NEW_LINE> <DEDENT> feat_headers.append("mean_Coordination") <NEW_LINE> feat_headers.append("var_Coordination") <NEW_LINE> feat_headers.append("min_Coordination") <NEW_LINE> feat_headers.append("max_Coordination") <NEW_LINE> for entry in entries: <NEW_LINE> <INDENT> temp_list = [] <NEW_LINE> output = entry.compute_voronoi_tessellation() <NEW_LINE> mean = output.face_count_average() <NEW_LINE> variance = output.face_count_variance() <NEW_LINE> minimum = output.face_count_minimum() <NEW_LINE> maximum = output.face_count_maximum() <NEW_LINE> temp_list.append(mean) <NEW_LINE> temp_list.append(variance) <NEW_LINE> temp_list.append(minimum) <NEW_LINE> temp_list.append(maximum) <NEW_LINE> feat_values.append(temp_list) <NEW_LINE> <DEDENT> features = pd.DataFrame(feat_values, columns=feat_headers) <NEW_LINE> return features
Class to compute attributes based on the coordination number. Uses the Voronoi tessellation to define the coordination network. DEV NOTE (LW 15Jul15): Could benefit from adding a face size cutoff, where atoms are only defined as coordinated if the face between them is larger than a certain fraction of the surface area of both cells. Otherwise faces on the cells that are only present to numerical issues will be counted as neighbors. Metallic glass community commonly removes any faces smaller than 1% of the total surface area of a cell.
625990653539df3088ecd9e0
class AverageMeter(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.val = 0 <NEW_LINE> self.avg = 0 <NEW_LINE> self.sum = 0 <NEW_LINE> self.count = 0 <NEW_LINE> <DEDENT> def update(self, val, n=1): <NEW_LINE> <INDENT> self.val = val <NEW_LINE> self.sum += val * n <NEW_LINE> self.count += n <NEW_LINE> if self.count != 0: <NEW_LINE> <INDENT> self.avg = self.sum / self.count
Computes and stores the average and current value
62599065be8e80087fbc07ca
class ServiceCatalog(object): <NEW_LINE> <INDENT> def __init__(self, resource_dict, region_name=None): <NEW_LINE> <INDENT> self.catalog = resource_dict <NEW_LINE> self.region_name = region_name <NEW_LINE> <DEDENT> def get_token(self): <NEW_LINE> <INDENT> token = {'id': self.catalog['token']['id'], 'expires': self.catalog['token']['expires']} <NEW_LINE> try: <NEW_LINE> <INDENT> token['user_id'] = self.catalog['user']['id'] <NEW_LINE> token['tenant_id'] = self.catalog['token']['tenant']['id'] <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return token <NEW_LINE> <DEDENT> def url_for(self, attr=None, filter_value=None, service_type='identity', endpoint_type='publicURL'): <NEW_LINE> <INDENT> catalog = self.catalog.get('serviceCatalog', []) <NEW_LINE> if not catalog: <NEW_LINE> <INDENT> raise exceptions.EmptyCatalog('The service catalog is empty.') <NEW_LINE> <DEDENT> for service in catalog: <NEW_LINE> <INDENT> if service['type'] != service_type: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> endpoints = service['endpoints'] <NEW_LINE> for endpoint in endpoints: <NEW_LINE> <INDENT> if self.region_name and endpoint.get('region') != self.region_name: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not filter_value or endpoint.get(attr) == filter_value: <NEW_LINE> <INDENT> return endpoint[endpoint_type] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> raise exceptions.EndpointNotFound('Endpoint not found.') <NEW_LINE> <DEDENT> def get_endpoints(self, service_type=None, endpoint_type=None): <NEW_LINE> <INDENT> sc = {} <NEW_LINE> for service in self.catalog.get('serviceCatalog', []): <NEW_LINE> <INDENT> if service_type and service_type != service['type']: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> sc[service['type']] = [] <NEW_LINE> for endpoint in service['endpoints']: <NEW_LINE> <INDENT> if endpoint_type and endpoint_type not in endpoint.keys(): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> sc[service['type']].append(endpoint) <NEW_LINE> <DEDENT> <DEDENT> return sc
Helper methods for dealing with a Keystone Service Catalog.
62599065a8ecb0332587295a
class TestPluginCiscoNXOSEnrichmentN3KModels(TestPluginCiscoNXOSEnrichment, unittest.TestCase): <NEW_LINE> <INDENT> resource_id = 'n3k_3048T' <NEW_LINE> snmp_community = 'n3k_3048T' <NEW_LINE> results_data_file = 'n3k_3048T.results.json' <NEW_LINE> resource_model = '3048T'
Test plugin's handling of N3K Model NXOS devices.
62599065d486a94d0ba2d70b
class LazyGroup(click.Group): <NEW_LINE> <INDENT> def __init__(self, import_name, **kwargs): <NEW_LINE> <INDENT> self._import_name = import_name <NEW_LINE> super().__init__(**kwargs) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def _impl(self): <NEW_LINE> <INDENT> module, name = self._import_name.split(':', 1) <NEW_LINE> return getattr(import_module(module), name) <NEW_LINE> <DEDENT> def get_command(self, ctx, cmd_name): <NEW_LINE> <INDENT> return self._impl.get_command(ctx, cmd_name) <NEW_LINE> <DEDENT> def list_commands(self, ctx): <NEW_LINE> <INDENT> return self._impl.list_commands(ctx) <NEW_LINE> <DEDENT> def invoke(self, ctx): <NEW_LINE> <INDENT> return self._impl.invoke(ctx) <NEW_LINE> <DEDENT> def get_usage(self, ctx): <NEW_LINE> <INDENT> return self._impl.get_usage(ctx) <NEW_LINE> <DEDENT> def get_params(self, ctx): <NEW_LINE> <INDENT> return self._impl.get_params(ctx)
A click Group that imports the actual implementation only when needed. This allows for more resilient CLIs where the top-level command does not fail when a subcommand is broken enough to fail at import time.
62599065adb09d7d5dc0bcac
class GetCodeList: <NEW_LINE> <INDENT> def __init__(self, verbose=False): <NEW_LINE> <INDENT> self.verbose = verbose <NEW_LINE> self.url = TOSHO_1ST_LIST_URL <NEW_LINE> <DEDENT> def get_response(self): <NEW_LINE> <INDENT> print("url: ", self.url) <NEW_LINE> with urllib.request.urlopen(self.url) as response: <NEW_LINE> <INDENT> return response.read() <NEW_LINE> <DEDENT> <DEDENT> def get_data_url(self, tag, attr, html): <NEW_LINE> <INDENT> parser = Parser(tag, attr) <NEW_LINE> parser.feed(str(html)) <NEW_LINE> for attr in parser.attrs: <NEW_LINE> <INDENT> if "data_j.xls" in attr: <NEW_LINE> <INDENT> return "http://www.jpx.co.jp" + attr <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_data_from_url(self, link): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> socket = urllib.request.urlopen(link) <NEW_LINE> xls = pd.ExcelFile(socket) <NEW_LINE> df_all = xls.parse(xls.sheet_names[0], header=0, index_col=None) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("use pandas read_excel") <NEW_LINE> df_all = pd.read_excel(link, header=0, index_col=None) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> data_1st_df = df_all.ix[df_all["市場・商品区分"]=="市場第一部(内国株)", :] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("ix の使用を避けました") <NEW_LINE> data_1st_df = df_all.loc[df_all["市場・商品区分"]=="市場第一部(内国株)", :] <NEW_LINE> <DEDENT> if self.verbose: <NEW_LINE> <INDENT> print("[Get_Code_List:get_data_from_url]: len(data_1st_df): {}".format(len(data_1st_df))) <NEW_LINE> <DEDENT> return data_1st_df <NEW_LINE> <DEDENT> def get_new_stock_code(self): <NEW_LINE> <INDENT> link = self.get_data_url("a", "href", self.get_response()) <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print("[Get_Code_List:get_new_stock_code]: data url link: {}".format(link)) <NEW_LINE> <DEDENT> data_df = self.get_data_from_url(link) <NEW_LINE> return data_df
GetCodeList class 銘柄リストを取得する
62599065fff4ab517ebcef5e
class GuardianSpider(scrapy.Spider): <NEW_LINE> <INDENT> name = "guardian" <NEW_LINE> start_urls = ["https://www.theguardian.com/au"] <NEW_LINE> def __init__(self, num_of_days=2, *args, **kwargs): <NEW_LINE> <INDENT> super(GuardianSpider, self).__init__(*args, **kwargs) <NEW_LINE> self.num_of_days = self.crawl_day_count = int(num_of_days) <NEW_LINE> <DEDENT> def parse(self, response): <NEW_LINE> <INDENT> primary_tabs = response.xpath( '//ul[@class="menu-group menu-group--primary"]/li[@class="menu-item js-navigation-item"]') <NEW_LINE> for index, tab in enumerate(primary_tabs): <NEW_LINE> <INDENT> if index != 1: <NEW_LINE> <INDENT> category = tab.xpath('./@data-section-name').extract_first() <NEW_LINE> for secondary_tab in tab.xpath('ul/li/a'): <NEW_LINE> <INDENT> sub_category = secondary_tab.xpath('./text()').extract_first() <NEW_LINE> sub_category_url = secondary_tab.xpath('./@href').extract_first() <NEW_LINE> date_to_process = datetime.today().date() <NEW_LINE> while self.num_of_days: <NEW_LINE> <INDENT> formatted_date = date_to_process.strftime('%Y/%b/%d').lower() <NEW_LINE> news_url = "{}/{}/all".format(sub_category_url, formatted_date) <NEW_LINE> yield scrapy.Request( response.urljoin(news_url), callback=self.fetch_news_url, meta={ 'category': category, 'sub_category': sub_category, 'date': date_to_process } ) <NEW_LINE> self.num_of_days -= 1 <NEW_LINE> date_to_process = date_to_process - timedelta(days=1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.num_of_days = self.crawl_day_count <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def fetch_news_url(self, response): <NEW_LINE> <INDENT> news_links = response.xpath('//div[@class="fc-item__container"]/a/@href').extract() <NEW_LINE> for news_link in news_links: <NEW_LINE> <INDENT> yield scrapy.Request( response.urljoin(news_link), callback=self.fetch_news_attributes, meta=response.meta ) <NEW_LINE> <DEDENT> <DEDENT> def fetch_news_attributes(self, response): <NEW_LINE> <INDENT> category = response.meta.get('category', '') <NEW_LINE> sub_category = response.meta.get('sub_category', '') <NEW_LINE> creation_date = response.meta.get('date', '').strftime('%Y-%m-%d') <NEW_LINE> item_loader = ItemLoader(item=GuardianItem(), response=response) <NEW_LINE> item_loader.add_xpath('headline', '//h1[contains(@class, "content__headline")]//text()') <NEW_LINE> item_loader.add_xpath('author', '//a[@rel="author"]/span/text()') <NEW_LINE> item_loader.add_xpath('content', '//div[contains(@class, "content__article-body")]//p[not(contains(@class, "Tweet-text"))]') <NEW_LINE> item_loader.add_value('category', category) <NEW_LINE> item_loader.add_value('sub_category', sub_category) <NEW_LINE> item_loader.add_value('url', response.url) <NEW_LINE> item_loader.add_value('creation_date', creation_date) <NEW_LINE> yield item_loader.load_item()
Spider to fetch values from guardian HTML
625990657d847024c075db19
class TestReplace(TestSMIME): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self.test_cert_id = 1234 <NEW_LINE> self.api_version = "v2" <NEW_LINE> self.api_url = f"{self.cfixt.base_url}{self.ep_path}/{self.api_version}" <NEW_LINE> self.test_url = f"{self.api_url}/replace/order/{self.test_cert_id}" <NEW_LINE> self.test_csr = TestCertificates.fake_csr() <NEW_LINE> <DEDENT> def test_defaults(self): <NEW_LINE> <INDENT> smime = SMIME(client=self.client) <NEW_LINE> self.assertRaises(Exception, smime.replace) <NEW_LINE> <DEDENT> @responses.activate <NEW_LINE> def test_success(self): <NEW_LINE> <INDENT> responses.add(responses.POST, self.test_url, body='', status=204) <NEW_LINE> smime = SMIME(client=self.client) <NEW_LINE> smime.replace( cert_id=self.test_cert_id, csr=self.test_csr, ) <NEW_LINE> self.assertEqual(len(responses.calls), 1) <NEW_LINE> self.assertEqual(responses.calls[0].request.url, self.test_url)
Test the replace method.
625990658e7ae83300eea7d0
class SearchKeyword(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Key = None <NEW_LINE> self.Value = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Key = params.get("Key") <NEW_LINE> self.Value = params.get("Value") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
搜索关键词
625990653eb6a72ae038bda2
class DeleteMixin(object): <NEW_LINE> <INDENT> def delete(self): <NEW_LINE> <INDENT> db.session.delete(self) <NEW_LINE> db.session.flush() <NEW_LINE> return self
Provides a 'delete' method deleting an object from the DB.
625990654428ac0f6e659c75
class Phoneme: <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.created = True <NEW_LINE> return self <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_json(cls, json): <NEW_LINE> <INDENT> return cls(json)
A class defining a phoneme object, including several functions for constructing them and extracting information
62599065462c4b4f79dbd14a
class ExternalUrlsPinger(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, entry, timeout=10, start_now=True): <NEW_LINE> <INDENT> self.results = [] <NEW_LINE> self.entry = entry <NEW_LINE> self.timeout = timeout <NEW_LINE> self.entry_url = '%s%s' % (site, self.entry.get_absolute_url()) <NEW_LINE> threading.Thread.__init__(self) <NEW_LINE> if start_now: <NEW_LINE> <INDENT> self.start() <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> socket.setdefaulttimeout(self.timeout) <NEW_LINE> external_urls = self.find_external_urls(self.entry) <NEW_LINE> external_urls_pingable = self.find_pingback_urls(external_urls) <NEW_LINE> for url, server_name in external_urls_pingable.items(): <NEW_LINE> <INDENT> reply = self.pingback_url(server_name, url) <NEW_LINE> self.results.append(reply) <NEW_LINE> <DEDENT> socket.setdefaulttimeout(None) <NEW_LINE> <DEDENT> def is_external_url(self, url, site_url=site): <NEW_LINE> <INDENT> url_splitted = urlsplit(url) <NEW_LINE> if not url_splitted.netloc: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return url_splitted.netloc != urlsplit(site_url).netloc <NEW_LINE> <DEDENT> def find_external_urls(self, entry): <NEW_LINE> <INDENT> soup = BeautifulSoup(entry.html_content) <NEW_LINE> external_urls = [a['href'] for a in soup.findAll('a') if self.is_external_url(a['href'])] <NEW_LINE> return external_urls <NEW_LINE> <DEDENT> def find_pingback_href(self, content): <NEW_LINE> <INDENT> soup = BeautifulSoup(content) <NEW_LINE> for link in soup.findAll('link'): <NEW_LINE> <INDENT> dict_attr = dict(link.attrs) <NEW_LINE> if 'rel' in dict_attr and 'href' in dict_attr: <NEW_LINE> <INDENT> if dict_attr['rel'].lower() == 'pingback': <NEW_LINE> <INDENT> return dict_attr.get('href') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def find_pingback_urls(self, urls): <NEW_LINE> <INDENT> pingback_urls = {} <NEW_LINE> for url in urls: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> page = urlopen(url) <NEW_LINE> server_url = page.info().get('X-Pingback') or self.find_pingback_href(page.read()) <NEW_LINE> if server_url: <NEW_LINE> <INDENT> server_url_splitted = urlsplit(server_url) <NEW_LINE> if not server_url_splitted.netloc: <NEW_LINE> <INDENT> url_splitted = urlsplit(url) <NEW_LINE> server_url = '%s://%s%s' % (url_splitted.scheme, url_splitted.netloc, server_url) <NEW_LINE> <DEDENT> pingback_urls[url] = server_url <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return pingback_urls <NEW_LINE> <DEDENT> def pingback_url(self, server_name, target_url): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> server = xmlrpclib.ServerProxy(server_name) <NEW_LINE> reply = server.pingback.ping(self.entry_url, target_url) <NEW_LINE> <DEDENT> except (xmlrpclib.Fault, xmlrpclib.ProtocolError): <NEW_LINE> <INDENT> reply = '%s cannot be pinged.' % target_url <NEW_LINE> <DEDENT> return reply
Threaded ExternalUrls Pinger
6259906567a9b606de547644
class UnorderedList: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.head = None <NEW_LINE> <DEDENT> def isEmpty(self): <NEW_LINE> <INDENT> return self.head == None <NEW_LINE> <DEDENT> def add(self, item): <NEW_LINE> <INDENT> temp = Node(item) <NEW_LINE> temp.setNext(self.head) <NEW_LINE> self.head = temp <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> current = self.head <NEW_LINE> count = 0 <NEW_LINE> while current != None: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> current = current.getNext() <NEW_LINE> <DEDENT> return count <NEW_LINE> <DEDENT> def search(self, item): <NEW_LINE> <INDENT> current = self.head <NEW_LINE> found = False <NEW_LINE> while current != None and not found: <NEW_LINE> <INDENT> if current.getData() == item: <NEW_LINE> <INDENT> found = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> current.getNext() <NEW_LINE> <DEDENT> <DEDENT> return found <NEW_LINE> <DEDENT> def remove(self, item): <NEW_LINE> <INDENT> current = self.head <NEW_LINE> previous = None <NEW_LINE> found = False <NEW_LINE> while not found: <NEW_LINE> <INDENT> if current.getData() == item: <NEW_LINE> <INDENT> found = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> previous = current <NEW_LINE> current = current.getNext() <NEW_LINE> <DEDENT> <DEDENT> if previous == None: <NEW_LINE> <INDENT> self.head = current.getNext() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> previous.setNext(current.getNext())
here we will begin creating the unordered list from a collection of nodes that we set above. As long as we know where to find the first node, each item can be found successfully by folliwing the links hence the unordered list must contain a rederence to the first node.
625990653cc13d1c6d466e87
class BaseModel(object): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True, autoincrement=True) <NEW_LINE> create_time = db.Column(db.String(64), index=True, default=int(time.time() * 1000), comment="创建时间") <NEW_LINE> update_time = db.Column(db.String(64), default=int(time.time() * 1000), comment="更新时间") <NEW_LINE> is_delete = db.Column(db.Boolean, default=False, comment="是否删除")
模型基类,为每个模型补充创建时间与更新时间
625990651f5feb6acb16432f
class ExtranonceCounter(object): <NEW_LINE> <INDENT> def __init__(self, instance_id): <NEW_LINE> <INDENT> if instance_id < 0 or instance_id > 31: <NEW_LINE> <INDENT> raise Exception("Current ExtranonceCounter implementation needs an instance_id in <0, 31>.") <NEW_LINE> <DEDENT> self.counter = instance_id << 58 <NEW_LINE> self.size = struct.calcsize('>q') <NEW_LINE> <DEDENT> def get_size(self): <NEW_LINE> <INDENT> return self.size <NEW_LINE> <DEDENT> def get_new_bin(self): <NEW_LINE> <INDENT> self.counter += 1000000 <NEW_LINE> return struct.pack('>q', self.counter)
Implementation of a counter producing unique extranonce across all pool instances. This is just dumb "quick&dirty" solution, but it can be changed at any time without breaking anything.
62599065796e427e5384feba
class IssueCredentialStatus(IntEnum): <NEW_LINE> <INDENT> Null = 0 <NEW_LINE> OfferCredential = 1 <NEW_LINE> RequestCredential = 2 <NEW_LINE> IssueCredential = 3
https://github.com/hyperledger/aries-rfcs/tree/master/features/0036-issue-credential
625990657b25080760ed8883