code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class ContentProvider(Base): <NEW_LINE> <INDENT> def __init__(self, type=None, original_content_url=None, preview_image_url=None, **kwargs): <NEW_LINE> <INDENT> super(ContentProvider, self).__init__(**kwargs) <NEW_LINE> self.type = type <NEW_LINE> self.original_content_url = original_content_url <NEW_LINE> self.preview_image_url = preview_image_url | Content provider. | 6259904c4428ac0f6e659951 |
class _BaseDiff(object): <NEW_LINE> <INDENT> def __init__(self, a, b): <NEW_LINE> <INDENT> self.a = a <NEW_LINE> self.b = b <NEW_LINE> self._fileobj = None <NEW_LINE> self._indent = 0 <NEW_LINE> self._diff() <NEW_LINE> <DEDENT> def __nonzero__(self): <NEW_LINE> <INDENT> return not self.identical <NEW_LINE> <DEDENT> if not six.PY2: <NEW_LINE> <INDENT> __bool__ = __nonzero__ <NEW_LINE> del __nonzero__ <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def fromdiff(cls, other, a, b): <NEW_LINE> <INDENT> sig = signature(cls.__init__) <NEW_LINE> kwargs = {} <NEW_LINE> for arg in list(sig.parameters.keys())[3:]: <NEW_LINE> <INDENT> if hasattr(other, arg): <NEW_LINE> <INDENT> kwargs[arg] = getattr(other, arg) <NEW_LINE> <DEDENT> <DEDENT> return cls(a, b, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def identical(self): <NEW_LINE> <INDENT> return not any(getattr(self, attr) for attr in self.__dict__ if attr.startswith('diff_')) <NEW_LINE> <DEDENT> @deprecated_renamed_argument('clobber', 'overwrite', '1.3') <NEW_LINE> def report(self, fileobj=None, indent=0, overwrite=False): <NEW_LINE> <INDENT> return_string = False <NEW_LINE> filepath = None <NEW_LINE> if isinstance(fileobj, string_types): <NEW_LINE> <INDENT> if os.path.exists(fileobj) and not overwrite: <NEW_LINE> <INDENT> raise IOError("File {0} exists, aborting (pass in " "overwrite=True to overwrite)".format(fileobj)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> filepath = fileobj <NEW_LINE> fileobj = open(filepath, 'w') <NEW_LINE> <DEDENT> <DEDENT> elif fileobj is None: <NEW_LINE> <INDENT> fileobj = io.StringIO() <NEW_LINE> return_string = True <NEW_LINE> <DEDENT> self._fileobj = fileobj <NEW_LINE> self._indent = indent <NEW_LINE> try: <NEW_LINE> <INDENT> self._report() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if filepath: <NEW_LINE> <INDENT> fileobj.close() <NEW_LINE> <DEDENT> <DEDENT> if return_string: <NEW_LINE> <INDENT> return fileobj.getvalue() <NEW_LINE> <DEDENT> <DEDENT> def _writeln(self, text): <NEW_LINE> <INDENT> self._fileobj.write(indent(text, self._indent) + u('\n')) <NEW_LINE> <DEDENT> def _diff(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def _report(self): <NEW_LINE> <INDENT> raise NotImplementedError | Base class for all FITS diff objects.
When instantiating a FITS diff object, the first two arguments are always
the two objects to diff (two FITS files, two FITS headers, etc.).
Instantiating a ``_BaseDiff`` also causes the diff itself to be executed.
The returned ``_BaseDiff`` instance has a number of attribute that describe
the results of the diff operation.
The most basic attribute, present on all ``_BaseDiff`` instances, is
``.identical`` which is `True` if the two objects being compared are
identical according to the diff method for objects of that type. | 6259904cd53ae8145f919881 |
class VolumeBackupPolicyTests(base.TestCase): <NEW_LINE> <INDENT> NAME = uuid.uuid4().hex <NEW_LINE> OTHER_NAME = uuid.uuid4().hex <NEW_LINE> def tearDown(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.openstack( DELETE_COMMAND % {'id': self.policy_id} ) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> super(VolumeBackupPolicyTests, self).tearDown() <NEW_LINE> <DEDENT> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> super(VolumeBackupPolicyTests, self).setUp() <NEW_LINE> json_output = json.loads(self.openstack( CREATE_COMMAND % {'name': self.NAME} )) <NEW_LINE> self.policy_id = json_output["id"] <NEW_LINE> self.assertOutput(self.NAME, json_output['name']) <NEW_LINE> ver_fixture = fixtures.EnvironmentVariable( 'OS_VBS_API_VERSION', '2' ) <NEW_LINE> self.useFixture(ver_fixture) <NEW_LINE> <DEDENT> def test_policy_list(self): <NEW_LINE> <INDENT> json_output = json.loads(self.openstack( 'vbs policy list -f json ' )) <NEW_LINE> self.assertIn( self.NAME, [img['name'] for img in json_output] ) <NEW_LINE> <DEDENT> def test_policy_set_rename(self): <NEW_LINE> <INDENT> name = uuid.uuid4().hex <NEW_LINE> json_output = json.loads(self.openstack( CREATE_COMMAND % {'name': name} )) <NEW_LINE> policy_id = json_output["id"] <NEW_LINE> self.assertEqual( name, json_output["name"], ) <NEW_LINE> self.openstack( 'vbs policy update --name {name}xx {policy}'.format( name=name, policy=policy_id) ) <NEW_LINE> json_output = json.loads(self.openstack( 'vbs policy show {name}xx -f json'.format(name=name) )) <NEW_LINE> self.assertEqual( name + 'xx', json_output["name"], ) <NEW_LINE> self.openstack( DELETE_COMMAND % {'id': json_output['id']} ) <NEW_LINE> <DEDENT> def test_policy_execute(self): <NEW_LINE> <INDENT> self.openstack( 'vbs policy execute ' + self.policy_id ) | Functional tests for vbs. | 6259904c507cdc57c63a61c0 |
class ProjectsFinishedLib(KPIBase): <NEW_LINE> <INDENT> def __call__(self, doc): <NEW_LINE> <INDENT> details = doc.get("details", {}) <NEW_LINE> if details.get("sample_type") == "Finished Library" and _is_ongoing(doc): <NEW_LINE> <INDENT> self.state += 1 | Projects open which are sequenced as finished libraries | 6259904c1f5feb6acb164016 |
class PypiDownloader(PackageGetter): <NEW_LINE> <INDENT> def __init__(self, client, name, version=None, save_dir=None): <NEW_LINE> <INDENT> self.client = client <NEW_LINE> self.name = name <NEW_LINE> self.versions = self.client.package_releases(self.name) <NEW_LINE> if not self.versions: <NEW_LINE> <INDENT> raise exceptions.NoSuchPackageException( 'Package "{0}" could not be found on PyPI.'.format(name)) <NEW_LINE> logger.error('Package "{0}" could not be found on PyPI.'.format(name)) <NEW_LINE> <DEDENT> self.version = version or self.versions[0] <NEW_LINE> if version and self.client.release_urls(name, version) == []: <NEW_LINE> <INDENT> raise exceptions.NoSuchPackageException( 'Package with name "{0}" and version "{1}" could not be found on PyPI.'.format(name, version)) <NEW_LINE> logger.error('Package with name "{0}" and version "{1}" could not be found on PyPI.'.format(name, version)) <NEW_LINE> <DEDENT> self.save_dir = save_dir or settings.DEFAULT_PKG_SAVE_PATH <NEW_LINE> if self.save_dir == settings.DEFAULT_PKG_SAVE_PATH: <NEW_LINE> <INDENT> self.save_dir += '/SOURCES' <NEW_LINE> <DEDENT> if not os.path.exists(self.save_dir): <NEW_LINE> <INDENT> if self.save_dir != (settings.DEFAULT_PKG_SAVE_PATH + '/SOURCES'): <NEW_LINE> <INDENT> os.makedirs(self.save_dir) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> subprocess.Popen( 'rpmdev-setuptree', stdout=subprocess.PIPE) <NEW_LINE> logger.info('Using rpmdevtools package to make rpmbuild folders tree.') <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> self.save_dir = '/tmp' <NEW_LINE> logger.warn('Package rpmdevtools is missing , using default folder: ' '{0} to store {1}.'.format(self.save_dir, self.name)) <NEW_LINE> logger.warn('Specify folder to store a file (SAVE_DIR) or install rpmdevtools.') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> logger.info('Using {0} as directory to save source.'.format(self.save_dir)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> urls = self.client.release_urls(self.name, self.version) <NEW_LINE> if urls: <NEW_LINE> <INDENT> for url in urls: <NEW_LINE> <INDENT> if url['url'].endswith(".tar.gz"): <NEW_LINE> <INDENT> return url['url'] <NEW_LINE> <DEDENT> <DEDENT> return urls[0]['url'] <NEW_LINE> <DEDENT> return self.client.release_data(self.name, self.version)['release_url'] <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> save_file = '{0}/{1}'.format(self.save_dir, self.url.split('/')[-1]) <NEW_LINE> request.urlretrieve(self.url, save_file) <NEW_LINE> logger.info('Downloaded package from PyPI: {0}.'.format(save_file)) <NEW_LINE> return save_file <NEW_LINE> <DEDENT> def get_name_version(self): <NEW_LINE> <INDENT> return (self.name, self.version) | Class for downloading the package from PyPI. | 6259904c8da39b475be04611 |
class Event(object): <NEW_LINE> <INDENT> def __init__(self, supports_channels=True): <NEW_LINE> <INDENT> self.supports_channels = supports_channels <NEW_LINE> self.handlers = [] <NEW_LINE> <DEDENT> def __call__(self, handler=None, channel=None): <NEW_LINE> <INDENT> if handler is None: <NEW_LINE> <INDENT> def handler_with_channel(handler): <NEW_LINE> <INDENT> return self.__call__(handler, channel) <NEW_LINE> <DEDENT> return handler_with_channel <NEW_LINE> <DEDENT> if channel: <NEW_LINE> <INDENT> if not self.supports_channels: <NEW_LINE> <INDENT> msg = "The %s event does not support channels so the " "handler `%s` could not be registered" <NEW_LINE> raise EventError(msg % self.name, handler.__name__) <NEW_LINE> <DEDENT> channel = re.compile(channel) <NEW_LINE> <DEDENT> self.handlers.append((handler, channel)) <NEW_LINE> return handler <NEW_LINE> <DEDENT> def send(self, request, socket, context, channel=None, *args): <NEW_LINE> <INDENT> for handler, pattern in self.handlers: <NEW_LINE> <INDENT> no_channel = not pattern and channel is None <NEW_LINE> if self.name.endswith("subscribe") and pattern: <NEW_LINE> <INDENT> matches = [pattern.match(args[0])] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> matches = [pattern.match(c) for c in [channel] if pattern] <NEW_LINE> <DEDENT> if no_channel or filter(None, matches): <NEW_LINE> <INDENT> args += (channel, ) if channel and channel not in args else () <NEW_LINE> handler(request, socket, context, *args) | Signal-like object for Socket.IO events that supports
filtering on channels. Registering event handlers is
performed by using the Event instance as a decorator::
@on_message
def message(request, socket, message):
...
Event handlers can also be registered for particular
channels using the channel keyword argument with a
regular expression pattern::
@on_message(channel="^room-")
def message(request, socket, message):
...
The ``on_connect`` event cannot be registered with a
channel pattern since channel subscription occurs
after a connection is established. | 6259904c63d6d428bbee3bec |
class Report(object): <NEW_LINE> <INDENT> sender = '' <NEW_LINE> message = '' <NEW_LINE> def __init__(self, sender=None, message=None, *args, **kwargs): <NEW_LINE> <INDENT> self.sender = sender <NEW_LINE> self.message = message <NEW_LINE> super(Report, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def notify(self, is_connected): <NEW_LINE> <INDENT> print("Sender: %s\nMessage: %s" % (self.sender, self.message)) <NEW_LINE> if self.sender == 'sensor' and is_connected: <NEW_LINE> <INDENT> data = { 'value1': str(self.message), } <NEW_LINE> result = urequests.post(CONFIG['iftt_url'], data=ujson.dumps(data)) <NEW_LINE> if result.status_code == 200: <NEW_LINE> <INDENT> print('Notification successful') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Notification failed') | Report to be given to another service | 6259904c711fe17d825e16ae |
class GroveGpio(GPIO): <NEW_LINE> <INDENT> def __init__(self, pin): <NEW_LINE> <INDENT> super(GroveGpio, self).__init__(pin, GPIO.OUT) <NEW_LINE> <DEDENT> def on(self): <NEW_LINE> <INDENT> self.write(1) <NEW_LINE> <DEDENT> def off(self): <NEW_LINE> <INDENT> self.write(0) | Class for Grove - Relay
Args:
pin(int): number of digital pin the relay connected. | 6259904c26068e7796d4dd65 |
class Rectangle: <NEW_LINE> <INDENT> number_of_instances = 0 <NEW_LINE> print_symbol = "#" <NEW_LINE> def __init__(self, width=0, height=0): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> Rectangle.number_of_instances += 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def width(self): <NEW_LINE> <INDENT> return self.__width <NEW_LINE> <DEDENT> @width.setter <NEW_LINE> def width(self, width): <NEW_LINE> <INDENT> if type(width) is not int: <NEW_LINE> <INDENT> raise TypeError("width must be an integer") <NEW_LINE> <DEDENT> if width < 0: <NEW_LINE> <INDENT> raise ValueError("width must be >= 0") <NEW_LINE> <DEDENT> self.__width = width <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> return self.__height <NEW_LINE> <DEDENT> @height.setter <NEW_LINE> def height(self, height): <NEW_LINE> <INDENT> if type(height) is not int: <NEW_LINE> <INDENT> raise TypeError("height must be an integer") <NEW_LINE> <DEDENT> if height < 0: <NEW_LINE> <INDENT> raise ValueError("height must be >= 0") <NEW_LINE> <DEDENT> self.__height = height <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> return self.__width * self.__height <NEW_LINE> <DEDENT> def perimeter(self): <NEW_LINE> <INDENT> if self.__width == 0 or self.__height == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return 2 * (self.__width + self.__height) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> see = "" <NEW_LINE> if self.__width == 0 or self.__height == 0: <NEW_LINE> <INDENT> return see <NEW_LINE> <DEDENT> for i in range(self.__height): <NEW_LINE> <INDENT> for j in range(self.__width): <NEW_LINE> <INDENT> see += str(self.print_symbol) <NEW_LINE> <DEDENT> if i != self.__height - 1: <NEW_LINE> <INDENT> see = see + "\n" <NEW_LINE> <DEDENT> <DEDENT> return see <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> rectangle = "Rectangle({}, {})".format(self.__width, self.__height) <NEW_LINE> return rectangle <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> print("Bye rectangle...") <NEW_LINE> Rectangle.number_of_instances -= 1 | Class Rectangle | 6259904cd99f1b3c44d06abb |
class ConservativeUnpickler (pickle.Unpickler): <NEW_LINE> <INDENT> safe_modules = { "builtins" : set(["set", "sum", "object"]), "copy_reg" : set(["_reconstructor"]), "kupfer.*" : universalset(), } <NEW_LINE> @classmethod <NEW_LINE> def is_safe_symbol(cls, module, name): <NEW_LINE> <INDENT> for pattern in cls.safe_modules: <NEW_LINE> <INDENT> if fnmatch.fnmatchcase(module, pattern): <NEW_LINE> <INDENT> return name in cls.safe_modules[pattern] <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def find_class(self, module, name): <NEW_LINE> <INDENT> if module not in sys.modules: <NEW_LINE> <INDENT> raise pickle.UnpicklingError("Refusing to load module %s" % module) <NEW_LINE> <DEDENT> if not self.is_safe_symbol(module, name): <NEW_LINE> <INDENT> raise pickle.UnpicklingError("Refusing unsafe %s.%s" % (module, name)) <NEW_LINE> <DEDENT> return pickle.Unpickler.find_class(self, module, name) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def loads(cls, pickledata): <NEW_LINE> <INDENT> unpickler = cls(io.BytesIO(pickledata)) <NEW_LINE> return unpickler.load() | An Unpickler that refuses to import new modules
>>> import pickle
>>> import kupfer.objects
>>> ConservativeUnpickler.loads(pickle.dumps(kupfer.objects.FileLeaf("A")))
<builtin.FileLeaf A>
>>> ConservativeUnpickler.loads(pickle.dumps(eval))
Traceback (most recent call last):
...
UnpicklingError: Refusing unsafe __builtin__.eval
>>> import sys
>>> import kupfer.obj.base
>>> pdata = pickle.dumps(kupfer.obj.base.Leaf(1, "A"))
>>> del sys.modules["kupfer.obj.base"]
>>> ConservativeUnpickler.loads(pdata)
Traceback (most recent call last):
...
UnpicklingError: Refusing to load module kupfer.obj.base | 6259904c24f1403a926862de |
class BinaryStatistics(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.nr_pos = 0 <NEW_LINE> self.nr_neg = 0 <NEW_LINE> self.nr_pred_pos = 0 <NEW_LINE> self.nr_pred_neg = 0 <NEW_LINE> self.corr_pos = 0 <NEW_LINE> self.corr_neg = 0 <NEW_LINE> <DEDENT> def feed(self, pred, label): <NEW_LINE> <INDENT> assert pred.shape == label.shape <NEW_LINE> self.nr_pos += (label == 1).sum() <NEW_LINE> self.nr_neg += (label == 0).sum() <NEW_LINE> self.nr_pred_pos += (pred == 1).sum() <NEW_LINE> self.nr_pred_neg += (pred == 0).sum() <NEW_LINE> self.corr_pos += ((pred == 1) & (pred == label)).sum() <NEW_LINE> self.corr_neg += ((pred == 0) & (pred == label)).sum() <NEW_LINE> <DEDENT> @property <NEW_LINE> def precision(self): <NEW_LINE> <INDENT> if self.nr_pred_pos == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return self.corr_pos * 1. / self.nr_pred_pos <NEW_LINE> <DEDENT> @property <NEW_LINE> def recall(self): <NEW_LINE> <INDENT> if self.nr_pos == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return self.corr_pos * 1. / self.nr_pos <NEW_LINE> <DEDENT> @property <NEW_LINE> def false_positive(self): <NEW_LINE> <INDENT> if self.nr_pred_pos == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return 1 - self.precision <NEW_LINE> <DEDENT> @property <NEW_LINE> def false_negative(self): <NEW_LINE> <INDENT> if self.nr_pos == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return 1 - self.recall | Statistics for binary decision,
including precision, recall, false positive, false negative | 6259904cd6c5a102081e353e |
class ThumbModel(MongoDBModel): <NEW_LINE> <INDENT> coll_name = "thumb_doc" <NEW_LINE> fields = ["praise_person_id", "article_id", "create_time", "is_praise"] <NEW_LINE> async def find_or_insert(self, valid_obj): <NEW_LINE> <INDENT> count_docs = await self.collection.count_documents({"praise_person_id": valid_obj["praise_person_id"], "article_id": valid_obj[ 'article_id']}) <NEW_LINE> if count_docs == 0: <NEW_LINE> <INDENT> doc = self.collection.insert_one(valid_obj) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> doc = await self.collection.update_one({"praise_person_id": valid_obj["praise_person_id"], "article_id": valid_obj[ 'article_id']}, {'$set': {'is_praise': valid_obj['is_praise'], 'create_time': valid_obj['create_time']}}) <NEW_LINE> <DEDENT> doc = await self.collection.find_one({"praise_person_id": valid_obj["praise_person_id"], "article_id": valid_obj[ 'article_id']}) <NEW_LINE> doc = self.trans_obj_id_str(doc) <NEW_LINE> return doc <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def trans_obj_id_str(docs): <NEW_LINE> <INDENT> if isinstance(docs, list): <NEW_LINE> <INDENT> for doc in docs: <NEW_LINE> <INDENT> doc_id = str(doc.pop("_id")) <NEW_LINE> doc['id'] = doc_id <NEW_LINE> <DEDENT> return docs <NEW_LINE> <DEDENT> elif isinstance(docs, dict): <NEW_LINE> <INDENT> doc_id = str(docs.pop('_id')) <NEW_LINE> docs['id'] = doc_id <NEW_LINE> return docs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "gggggggg" | 点赞
concern_person_id: 点赞人id
article_id: 文章id
create_time: 创建时间
is_praise: 是否点赞 | 6259904ce76e3b2f99fd9e2c |
class LinkedList(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.head = None <NEW_LINE> <DEDENT> def inserthead(self, newNode): <NEW_LINE> <INDENT> temp = self.head <NEW_LINE> self.head = newNode <NEW_LINE> self.head.next = temp <NEW_LINE> del temp <NEW_LINE> <DEDENT> def insertEnd(self, newNode): <NEW_LINE> <INDENT> if self.head is None: <NEW_LINE> <INDENT> self.head = newNode <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> lastNode = self.head <NEW_LINE> if lastNode.next is None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lastNode = lastNode.next <NEW_LINE> <DEDENT> <DEDENT> lastNode.next = newNode <NEW_LINE> <DEDENT> <DEDENT> def printList(self): <NEW_LINE> <INDENT> currentNode = self.head <NEW_LINE> while True: <NEW_LINE> <INDENT> if currentNode is None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(currentNode.data) <NEW_LINE> <DEDENT> currentNode = currentNode.next | Creating a LinkedList. | 6259904c76e4537e8c3f09a7 |
class GetAuthors(AuthenticatedMethod): <NEW_LINE> <INDENT> method_name = 'wp.getAuthors' <NEW_LINE> results_class = WordPressAuthor | Retrieve list of authors in the blog.
Parameters:
None
Returns: `list` of :class:`WordPressAuthor` instances. | 6259904c07d97122c42180c5 |
class VerletListHadressLennardJonesAutoBondsLocal(InteractionLocal, interaction_VerletListHadressLennardJonesAutoBonds): <NEW_LINE> <INDENT> def __init__(self, vl, fixedtupleList): <NEW_LINE> <INDENT> if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup(): <NEW_LINE> <INDENT> cxxinit(self, interaction_VerletListHadressLennardJonesAutoBonds, vl, fixedtupleList) <NEW_LINE> <DEDENT> <DEDENT> def setPotential(self, type1, type2, potential): <NEW_LINE> <INDENT> if not (pmi._PMIComm and pmi._PMIComm.isActive()) or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup(): <NEW_LINE> <INDENT> self.cxxclass.setPotential(self, type1, type2, potential) | The (local) Lennard Jones auto bonds interaction using Verlet lists. | 6259904c94891a1f408ba106 |
class POPM(FrameOpt): <NEW_LINE> <INDENT> _framespec = [ Latin1TextSpec('email'), ByteSpec('rating'), ] <NEW_LINE> _optionalspec = [IntegerSpec('count')] <NEW_LINE> @property <NEW_LINE> def HashKey(self): <NEW_LINE> <INDENT> return '%s:%s' % (self.FrameID, self.email) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.rating == other <NEW_LINE> <DEDENT> __hash__ = FrameOpt.__hash__ <NEW_LINE> def __pos__(self): <NEW_LINE> <INDENT> return self.rating <NEW_LINE> <DEDENT> def _pprint(self): <NEW_LINE> <INDENT> return "%s=%r %r/255" % ( self.email, getattr(self, 'count', None), self.rating) | Popularimeter.
This frame keys a rating (out of 255) and a play count to an email
address.
Attributes:
* email -- email this POPM frame is for
* rating -- rating from 0 to 255
* count -- number of times the files has been played (optional) | 6259904c07f4c71912bb0856 |
class nop(object): <NEW_LINE> <INDENT> def __init__(self, name=''): <NEW_LINE> <INDENT> self.name_ = name <NEW_LINE> <DEDENT> def __get__(self, *args): <NEW_LINE> <INDENT> return MayBeCalled() <NEW_LINE> <DEDENT> def __hasattr__(self, attr): <NEW_LINE> <INDENT> if len(self.name_): print('{}::{}'.format(self.name_, attr)) <NEW_LINE> return True <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> if len(self.name_): print('{}::{}'.format(self.name_, attr)) <NEW_LINE> return MayBeCalled() | Nop class to handle misc optional imports
Shamelessly ripped off
from http://stackoverflow.com/questions/24946321/how-do-i-write-a-no-op-or-dummy-class-in-python | 6259904c435de62698e9d22a |
class CsrfExemptSessionAuthentication(authentication.BaseAuthentication): <NEW_LINE> <INDENT> def authenticate(self, request): <NEW_LINE> <INDENT> user = getattr(request._request, 'user', None) <NEW_LINE> if not user or not user.is_active: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return (user, None) | Use Django's session framework for authentication. But bypass the CSRF token check mechanism. | 6259904c21a7993f00c6738b |
class NoConfigKeyException (Exception): <NEW_LINE> <INDENT> pass | A process does not define a config lookup | 6259904cd4950a0f3b111854 |
class AntilifeShell(Spell): <NEW_LINE> <INDENT> name = "Antilife Shell" <NEW_LINE> level = 5 <NEW_LINE> casting_time = "1 action" <NEW_LINE> casting_range = "Self (10-foot radius)" <NEW_LINE> components = ("V", "S") <NEW_LINE> duration = "Concentration, up to 1 hour" <NEW_LINE> magic_school = "Abjuration" <NEW_LINE> classes = ('Druid', ) | A shimmering barrier extends out from you in a 10-foot radius and
moves with you, remaining centered on you and hedging out
creatures other than undead and constructs. The barrier lasts for
the duration. The barrier prevents an affected creature from
passing or reaching through. An affected creature can cast spells
or make attacks with ranged or reach weapons through the
barrier. If you move so that an affected creature is forced to
pass through the barrier, the spell ends. | 6259904ce64d504609df9de1 |
class LoopThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, mqttc): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.mqttc = mqttc <NEW_LINE> self.stopped = False <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while not self.stopped: <NEW_LINE> <INDENT> self.mqttc.loop() <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.mqttc.disconnect() <NEW_LINE> self.stopped = True | It keeps alive the server | 6259904cd10714528d69f09f |
class CertificateDescription(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'etag': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'properties': {'key': 'properties', 'type': 'CertificateProperties'}, 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, properties: Optional["CertificateProperties"] = None, **kwargs ): <NEW_LINE> <INDENT> super(CertificateDescription, self).__init__(**kwargs) <NEW_LINE> self.properties = properties <NEW_LINE> self.id = None <NEW_LINE> self.name = None <NEW_LINE> self.etag = None <NEW_LINE> self.type = None | The X509 Certificate.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar properties: The description of an X509 CA Certificate.
:vartype properties: ~azure.mgmt.iothub.v2021_07_01.models.CertificateProperties
:ivar id: The resource identifier.
:vartype id: str
:ivar name: The name of the certificate.
:vartype name: str
:ivar etag: The entity tag.
:vartype etag: str
:ivar type: The resource type.
:vartype type: str | 6259904cbe383301e0254c3e |
class DotDict(dict): <NEW_LINE> <INDENT> __getattr__ = dict.__getitem__ <NEW_LINE> __setattr__ = dict.__setitem__ <NEW_LINE> __delattr__ = dict.__delitem__ <NEW_LINE> def __init__(self, dct=None): <NEW_LINE> <INDENT> super(DotDict, self).__init__(self) <NEW_LINE> if dct: <NEW_LINE> <INDENT> for key, value in dct.items(): <NEW_LINE> <INDENT> if hasattr(value, 'keys'): <NEW_LINE> <INDENT> value = DotDict(value) <NEW_LINE> <DEDENT> self[key] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> if key in self: <NEW_LINE> <INDENT> return self[key] <NEW_LINE> <DEDENT> raise AttributeError(key) <NEW_LINE> <DEDENT> def as_dict(self): <NEW_LINE> <INDENT> data = dict() <NEW_LINE> for k, v in self.items(): <NEW_LINE> <INDENT> if isinstance(v, DotDict): <NEW_LINE> <INDENT> dict_val = v.as_dict() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dict_val = v <NEW_LINE> <DEDENT> data[k] = dict_val <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def save_yaml(self, file_path): <NEW_LINE> <INDENT> with open(file_path, "w") as f: <NEW_LINE> <INDENT> ruamel.yaml.dump(self.as_dict(), f, ruamel.yaml.RoundTripDumper) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def load_yaml(cls, file_path): <NEW_LINE> <INDENT> with open(file_path, "r") as f: <NEW_LINE> <INDENT> yaml_obj = ruamel.yaml.load(f, ruamel.yaml.RoundTripLoader, preserve_quotes=True) <NEW_LINE> <DEDENT> data = DotDict() <NEW_LINE> return _yaml_to_dot_dict(data, yaml_obj) | A dictionary that supports dot notation as well as dictionary access notation
Examples:
d = DotDict() or d = DotDict({'val1':'first'})
set attributes: d.val2 = 'second' or d['val2'] = 'second'
get attributes: d.val2 or d['val2'] | 6259904c8e05c05ec3f6f86c |
@dataclass <NEW_LINE> class Soa: <NEW_LINE> <INDENT> path: Path <NEW_LINE> members: List[SoaMember] | A structure of array, loaded from a Blender array of structure like MeshVertices | 6259904c63b5f9789fe86591 |
class version_code: <NEW_LINE> <INDENT> def __init__ (self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def read (): <NEW_LINE> <INDENT> if os.altsep: <NEW_LINE> <INDENT> code_file = uno.fileUrlToSystemPath(LeenO_path() + os.altsep + 'leeno_version_code') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> code_file = uno.fileUrlToSystemPath(LeenO_path() + os.sep + 'leeno_version_code') <NEW_LINE> <DEDENT> f = open(code_file, 'r') <NEW_LINE> return f.readline() <NEW_LINE> <DEDENT> def write (): <NEW_LINE> <INDENT> if os.altsep: <NEW_LINE> <INDENT> code_file = uno.fileUrlToSystemPath(LeenO_path() + os.altsep + 'leeno_version_code') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> code_file = uno.fileUrlToSystemPath(LeenO_path() + os.sep + 'leeno_version_code') <NEW_LINE> <DEDENT> f = open(code_file, 'r') <NEW_LINE> Ldev = str (int(f.readline().split('LeenO-')[1].split('-')[0].split('.')[-1]) + 1) <NEW_LINE> tempo = ''.join(''.join(''.join(str(datetime.now()).split('.')[0].split(' ')).split('-')).split(':')) <NEW_LINE> of = open(code_file, 'w') <NEW_LINE> new = ( 'LeenO-' + str(LeenoUtils.getGlobalVar('Lmajor')) + '.' + str(LeenoUtils.getGlobalVar('Lminor')) + '.' + LeenoUtils.getGlobalVar('Lsubv').split('.')[0] + '.' + Ldev + '-TESTING-' + tempo[:-6]) <NEW_LINE> of.write(new) <NEW_LINE> of.close() <NEW_LINE> return new | Gestisce il nome del file OXT in leeno_version_code | 6259904c16aa5153ce401911 |
class StatusSummary(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'total': {'required': True}, 'failed': {'required': True}, 'success': {'required': True}, 'in_progress': {'required': True}, 'not_yet_started': {'required': True}, 'cancelled': {'required': True}, 'total_character_charged': {'required': True}, } <NEW_LINE> _attribute_map = { 'total': {'key': 'total', 'type': 'int'}, 'failed': {'key': 'failed', 'type': 'int'}, 'success': {'key': 'success', 'type': 'int'}, 'in_progress': {'key': 'inProgress', 'type': 'int'}, 'not_yet_started': {'key': 'notYetStarted', 'type': 'int'}, 'cancelled': {'key': 'cancelled', 'type': 'int'}, 'total_character_charged': {'key': 'totalCharacterCharged', 'type': 'long'}, } <NEW_LINE> def __init__( self, *, total: int, failed: int, success: int, in_progress: int, not_yet_started: int, cancelled: int, total_character_charged: int, **kwargs ): <NEW_LINE> <INDENT> super(StatusSummary, self).__init__(**kwargs) <NEW_LINE> self.total = total <NEW_LINE> self.failed = failed <NEW_LINE> self.success = success <NEW_LINE> self.in_progress = in_progress <NEW_LINE> self.not_yet_started = not_yet_started <NEW_LINE> self.cancelled = cancelled <NEW_LINE> self.total_character_charged = total_character_charged | StatusSummary.
All required parameters must be populated in order to send to Azure.
:param total: Required. Total count.
:type total: int
:param failed: Required. Failed count.
:type failed: int
:param success: Required. Number of Success.
:type success: int
:param in_progress: Required. Number of in progress.
:type in_progress: int
:param not_yet_started: Required. Count of not yet started.
:type not_yet_started: int
:param cancelled: Required. Number of cancelled.
:type cancelled: int
:param total_character_charged: Required. Total characters charged by the API.
:type total_character_charged: long | 6259904c8a43f66fc4bf35bb |
class EdgeMock(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.angles = (0, 0, 0) <NEW_LINE> self.speeds = (0, 0, 0) <NEW_LINE> logger.info('__init__') <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> logger.info('stop') <NEW_LINE> self.speeds = (0, 0, 0) <NEW_LINE> <DEDENT> def output(self, duration, motors): <NEW_LINE> <INDENT> logger.info('moving % for %s', motors, duration) <NEW_LINE> unit_vecs = to_unit_vecs(motors) <NEW_LINE> for i in range(0, 2): <NEW_LINE> <INDENT> self.angles[i] = unit_vecs[i] * duration <NEW_LINE> <DEDENT> self.stop() | Mock low level driver for the OWI Edge | 6259904cd4950a0f3b111855 |
class ReplicapoolInstanceGroupManagersSetAutoHealingPolicyRequest(_messages.Message): <NEW_LINE> <INDENT> instanceGroupManager = _messages.StringField(1, required=True) <NEW_LINE> instanceGroupManagersSetAutoHealingPolicyRequest = _messages.MessageField('InstanceGroupManagersSetAutoHealingPolicyRequest', 2) <NEW_LINE> project = _messages.StringField(3, required=True) <NEW_LINE> zone = _messages.StringField(4, required=True) | A ReplicapoolInstanceGroupManagersSetAutoHealingPolicyRequest object.
Fields:
instanceGroupManager: The name of the instance group manager.
instanceGroupManagersSetAutoHealingPolicyRequest: A
InstanceGroupManagersSetAutoHealingPolicyRequest resource to be passed
as the request body.
project: The Google Developers Console project name.
zone: The name of the zone in which the instance group manager resides. | 6259904cec188e330fdf9cc3 |
class multiGetLastReadMessageIds_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.LIST, 'success', (TType.STRUCT,(LastReadMessageIds, LastReadMessageIds.thrift_spec)), None, ), (1, TType.STRUCT, 'e', (TalkException, TalkException.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, success=None, e=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.e = e <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.success = [] <NEW_LINE> (_etype673, _size670) = iprot.readListBegin() <NEW_LINE> for _i674 in range(_size670): <NEW_LINE> <INDENT> _elem675 = LastReadMessageIds() <NEW_LINE> _elem675.read(iprot) <NEW_LINE> self.success.append(_elem675) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.e = TalkException() <NEW_LINE> self.e.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('multiGetLastReadMessageIds_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.LIST, 0) <NEW_LINE> oprot.writeListBegin(TType.STRUCT, len(self.success)) <NEW_LINE> for iter676 in self.success: <NEW_LINE> <INDENT> iter676.write(oprot) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.e is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('e', TType.STRUCT, 1) <NEW_LINE> self.e.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.success) <NEW_LINE> value = (value * 31) ^ hash(self.e) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- success
- e | 6259904cd10714528d69f0a0 |
class ConstantGivenFunction(GivenFunction): <NEW_LINE> <INDENT> def __init__(self, value=0): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> GivenFunction.__init__(self, _ConstantFunctionContainer(value)) | A :class:`GivenFunction` that has a constant value on all space.
| 6259904cd99f1b3c44d06abf |
class MrpService(ManualService): <NEW_LINE> <INDENT> @deprecated <NEW_LINE> def __init__( self, identifier: Optional[str], port: int, credentials: Optional[str] = None, properties: Optional[Mapping[str, str]] = None, ) -> None: <NEW_LINE> <INDENT> super().__init__(identifier, Protocol.MRP, port, properties, credentials) | Representation of a MediaRemote Protocol (MRP) service.
**DEPRECATED: Use `pyatv.conf.ManualService` instead.** | 6259904c15baa723494633b2 |
class TwitterBackend(OAuthBackend): <NEW_LINE> <INDENT> name = 'twitter' <NEW_LINE> EXTRA_DATA = [('id', 'id')] <NEW_LINE> def get_user_details(self, response): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> first_name, last_name = response['name'].split(' ', 1) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> first_name = response['name'] <NEW_LINE> last_name = '' <NEW_LINE> <DEDENT> return {USERNAME: response['screen_name'], 'email': '', 'fullname': response['name'], 'first_name': first_name, 'last_name': last_name} <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tokens(cls, instance): <NEW_LINE> <INDENT> token = super(TwitterBackend, cls).tokens(instance) <NEW_LINE> if token and 'access_token' in token: <NEW_LINE> <INDENT> token = dict(tok.split('=') for tok in token['access_token'].split('&')) <NEW_LINE> <DEDENT> return token | Twitter OAuth authentication backend | 6259904c30c21e258be99c2b |
class Observation(Base): <NEW_LINE> <INDENT> __tablename__ = 'observation' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> mag = Column(Float) <NEW_LINE> mag_err = Column(Float) <NEW_LINE> bandpass_id = Column(Integer, ForeignKey('bandpass.id')) <NEW_LINE> bandpass = relationship("Bandpass", foreign_keys="[Observation.bandpass_id]") <NEW_LINE> catalog_star_id = Column(Integer, ForeignKey('catalog_star.id', ondelete="CASCADE")) <NEW_LINE> def __init__(self, mag, mag_err): <NEW_LINE> <INDENT> self.mag = mag <NEW_LINE> self.mag_err = mag_err <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Observation(%i)>" % self.id | SQLAlchemy table for representing an `observation`. | 6259904c94891a1f408ba108 |
class BroadlinkRMSwitch(BroadlinkSwitch): <NEW_LINE> <INDENT> def __init__(self, device, config): <NEW_LINE> <INDENT> super().__init__( device, config.get(CONF_COMMAND_ON), config.get(CONF_COMMAND_OFF) ) <NEW_LINE> self._attr_name = config[CONF_NAME] <NEW_LINE> <DEDENT> async def _async_send_packet(self, packet): <NEW_LINE> <INDENT> device = self._device <NEW_LINE> if packet is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> await device.async_request(device.api.send_data, packet) <NEW_LINE> <DEDENT> except (BroadlinkException, OSError) as err: <NEW_LINE> <INDENT> _LOGGER.error("Failed to send packet: %s", err) <NEW_LINE> return False <NEW_LINE> <DEDENT> return True | Representation of a Broadlink RM switch. | 6259904c10dbd63aa1c72003 |
class WebKitTimeEvent(TimestampEvent): <NEW_LINE> <INDENT> def __init__(self, webkit_time, usage, data_type=None): <NEW_LINE> <INDENT> super(WebKitTimeEvent, self).__init__( timelib.Timestamp.FromWebKitTime(webkit_time), usage, data_type=data_type) | Convenience class for a WebKit time-based event. | 6259904c71ff763f4b5e8bcc |
class ExtensionTestMixin(typing.Generic[ExtensionTypeVar], AbstractExtensionTestMixin[ExtensionTypeVar]): <NEW_LINE> <INDENT> def test_as_extension(self) -> None: <NEW_LINE> <INDENT> for config in self.test_values.values(): <NEW_LINE> <INDENT> if config["extension_type"] is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ext = self.ext(config["expected"]) <NEW_LINE> cg_ext = x509.extensions.Extension( oid=self.ext_class.oid, critical=self.ext_class.default_critical, value=config["extension_type"], ) <NEW_LINE> self.assertEqual(ext.as_extension(), cg_ext) <NEW_LINE> for critical in self.critical_values: <NEW_LINE> <INDENT> ext = self.ext(config["expected"], critical=critical) <NEW_LINE> self.assertEqual( ext.as_extension(), x509.extensions.Extension( oid=self.ext_class.oid, critical=critical, value=config["extension_type"] ), ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_config(self) -> None: <NEW_LINE> <INDENT> self.assertTrue(issubclass(self.ext_class, Extension)) <NEW_LINE> self.assertEqual(self.ext_class.key, self.ext_class_key) <NEW_LINE> self.assertEqual(self.ext_class.name, self.ext_class_name) <NEW_LINE> self.assertIsInstance(self.ext_class.oid, ObjectIdentifier) <NEW_LINE> self.assertIsInstance(self.ext_class.key, str) <NEW_LINE> self.assertGreater(len(self.ext_class.key), 0) <NEW_LINE> self.assertIsInstance(self.ext_class.name, str) <NEW_LINE> self.assertGreater(len(self.ext_class.name), 0) <NEW_LINE> self.assertEqual(KEY_TO_EXTENSION[self.ext_class.key], self.ext_class) <NEW_LINE> self.assertEqual(OID_TO_EXTENSION[self.ext_class.oid], self.ext_class) <NEW_LINE> self.assertTrue(hasattr(X509CertMixin, self.ext_class.key)) <NEW_LINE> self.assertIsInstance(getattr(X509CertMixin, self.ext_class.key), cached_property) <NEW_LINE> <DEDENT> def test_extension_type(self) -> None: <NEW_LINE> <INDENT> for config in self.test_values.values(): <NEW_LINE> <INDENT> if config["extension_type"] is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ext = self.ext(config["expected"]) <NEW_LINE> self.assertEqual(ext.extension_type, config["extension_type"]) <NEW_LINE> <DEDENT> <DEDENT> def test_for_builder(self) -> None: <NEW_LINE> <INDENT> for config in self.test_values.values(): <NEW_LINE> <INDENT> if config["extension_type"] is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ext = self.ext(config["expected"]) <NEW_LINE> self.assertEqual(ext.for_builder(), (config["extension_type"], self.ext_class.default_critical)) <NEW_LINE> for critical in self.critical_values: <NEW_LINE> <INDENT> ext = self.ext(config["expected"], critical=critical) <NEW_LINE> self.assertEqual(ext.for_builder(), (config["extension_type"], critical)) | Override generic implementations to use test_value property. | 6259904c004d5f362081f9fb |
class GatewayController(ABC): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def create_address(self, waves_address: str) -> str: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_attempt_list_by_trigger(self, trigger: AttemptListTrigger) -> Optional[TransactionAttemptList]: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_attempt_list_by_id(self, attempt_list_id: str) -> Optional[TransactionAttemptList]: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def query_attempt_lists(self, query: AttemptListQuery) -> List[TransactionAttemptList]: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def validate_waves_address(self, address: str) -> bool: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def check_waves_transaction(self, tx: str) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def check_coin_transaction(self, tx: str) -> None: <NEW_LINE> <INDENT> pass | Defines the API for the Waves Client application.
All possible interfaces should forward their requests to this abstract controller. | 6259904cdc8b845886d549e4 |
@python_2_unicode_compatible <NEW_LINE> class ProductLine(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=64, verbose_name='产品线') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = '产品线' <NEW_LINE> verbose_name_plural = '产品线' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name | 产品线表 | 6259904ca79ad1619776b4a7 |
class Inode: <NEW_LINE> <INDENT> def __init__(self, _id): <NEW_LINE> <INDENT> self.id = _id <NEW_LINE> self.name = '' <NEW_LINE> self.isDir = True <NEW_LINE> self.size = 0 <NEW_LINE> self.permissions = None <NEW_LINE> self.uid = os.getuid() <NEW_LINE> self.gid = os.getgid() <NEW_LINE> self.atime = self.ctime = self.mtime = time() <NEW_LINE> self.children = [] <NEW_LINE> self.parent = None <NEW_LINE> self.version = 1 <NEW_LINE> self.blockMetadata = None <NEW_LINE> self.explored = False <NEW_LINE> self.bid = None | In-memory FS representation.
Inode data structure. | 6259904c8e71fb1e983bceed |
class AnswerViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> serializer_class = AnswerSerializer <NEW_LINE> queryset = Answer.objects.all() <NEW_LINE> permission_classes = (IsOwnerOrIsAuthenticatdThenCreateOnlyOrReadOnly,) <NEW_LINE> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save( author=self.request.user ) | This viewset automatically provides `list`, `create`, `retrieve`,
`update` and `destroy` actions. | 6259904c45492302aabfd8fb |
class ModernOpenSslServer(_OpenSslServer): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_openssl_path(cls): <NEW_LINE> <INDENT> return ModernOpenSslBuildConfig(CURRENT_PLATFORM).exe_path <NEW_LINE> <DEDENT> def get_verify_argument(cls, client_auth_config: ClientAuthConfigEnum) -> str: <NEW_LINE> <INDENT> options = { ClientAuthConfigEnum.DISABLED: "", ClientAuthConfigEnum.OPTIONAL: f"-verify 1 {cls._CLIENT_CA_PATH}", ClientAuthConfigEnum.REQUIRED: f"-Verify 1 {cls._CLIENT_CA_PATH}", } <NEW_LINE> return options[client_auth_config] <NEW_LINE> <DEDENT> def __init__( self, client_auth_config: ClientAuthConfigEnum = ClientAuthConfigEnum.DISABLED, max_early_data: Optional[int] = None, cipher: Optional[str] = None, prefer_server_order: bool = False, groups: Optional[str] = None, ) -> None: <NEW_LINE> <INDENT> extra_args = [] <NEW_LINE> if prefer_server_order: <NEW_LINE> <INDENT> extra_args.append("-serverpref") <NEW_LINE> <DEDENT> if groups: <NEW_LINE> <INDENT> extra_args.append(f"-groups {groups}") <NEW_LINE> <DEDENT> if max_early_data is not None: <NEW_LINE> <INDENT> extra_args += ["-early_data", f"-max_early_data {max_early_data}"] <NEW_LINE> <DEDENT> super().__init__(client_auth_config, extra_args, cipher) | A wrapper around the OpenSSL 1.1.1 s_server binary. | 6259904c29b78933be26aad6 |
class ApiTest(unittest.TestCase): <NEW_LINE> <INDENT> @unittest.skipUnless(server_available,"local server is not running") <NEW_LINE> def test_01_train(self): <NEW_LINE> <INDENT> r = requests.post('http://127.0.0.1:{}/train'.format(port),json={"mode":"test"}) <NEW_LINE> train_complete = re.sub("\W+","",r.text) <NEW_LINE> self.assertEqual(train_complete,'true') <NEW_LINE> <DEDENT> @unittest.skipUnless(server_available,"local server is not running") <NEW_LINE> def test_02_predict_empty(self): <NEW_LINE> <INDENT> r = requests.post('http://127.0.0.1:{}/predict'.format(port)) <NEW_LINE> self.assertEqual(re.sub('\n|"','',r.text),"[]") <NEW_LINE> r = requests.post('http://127.0.0.1:{}/predict'.format(port),json={"key":"value"}) <NEW_LINE> self.assertEqual(re.sub('\n|"','',r.text),"[]") <NEW_LINE> <DEDENT> @unittest.skipUnless(server_available,"local server is not running") <NEW_LINE> def test_03_predict(self): <NEW_LINE> <INDENT> request_json = {'country':'all','year':'2018','month':'05','day':'01','mode':'test'} <NEW_LINE> r = requests.post('http://127.0.0.1:{}/predict'.format(port),json=request_json) <NEW_LINE> response = json.loads(r.text) <NEW_LINE> self.assertTrue(len(response['y_pred']) > 0) <NEW_LINE> <DEDENT> @unittest.skipUnless(server_available,"local server is not running") <NEW_LINE> def test_04_logs(self): <NEW_LINE> <INDENT> file_name = 'train-test.log' <NEW_LINE> request_json = {'file':'train-test.log'} <NEW_LINE> r = requests.get('http://127.0.0.1:{}/logs/{}'.format(port,file_name)) <NEW_LINE> with open(file_name, 'wb') as f: <NEW_LINE> <INDENT> f.write(r.content) <NEW_LINE> <DEDENT> self.assertTrue(os.path.exists(file_name)) <NEW_LINE> if os.path.exists(file_name): <NEW_LINE> <INDENT> os.remove(file_name) | test the essential functionality | 6259904c3cc13d1c6d466b60 |
class Action(Enum): <NEW_LINE> <INDENT> WEST = (0, -1, 1) <NEW_LINE> EAST = (0, 1, 1) <NEW_LINE> NORTH = (-1, 0, 1) <NEW_LINE> SOUTH = (1, 0, 1) <NEW_LINE> NW = (-1, -1, math.sqrt(2)) <NEW_LINE> SW = (1, -1, math.sqrt(2)) <NEW_LINE> NE = (-1, 1, math.sqrt(2)) <NEW_LINE> SE = (1, 1, math.sqrt(2)) <NEW_LINE> @property <NEW_LINE> def cost(self): <NEW_LINE> <INDENT> return self.value[2] <NEW_LINE> <DEDENT> @property <NEW_LINE> def delta(self): <NEW_LINE> <INDENT> return (self.value[0], self.value[1]) | An action is represented by a 3 element tuple.
The first 2 values are the delta of the action relative
to the current grid position. The third and final value
is the cost of performing the action. | 6259904c94891a1f408ba109 |
class ReplayBuffer: <NEW_LINE> <INDENT> def __init__(self, action_size, buffer_size, batch_size, seed, device): <NEW_LINE> <INDENT> self.device = device <NEW_LINE> self.action_size = action_size <NEW_LINE> self.memory = deque(maxlen=buffer_size) <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.experience = namedtuple("Experience", field_names=["state", "action", "reward", "next_state", "done"]) <NEW_LINE> self.seed = random.seed(seed) <NEW_LINE> <DEDENT> def add(self, state, action, reward, next_state, done): <NEW_LINE> <INDENT> e = self.experience(state, action, reward, next_state, done) <NEW_LINE> self.memory.append(e) <NEW_LINE> <DEDENT> def sample(self): <NEW_LINE> <INDENT> experiences = random.sample(self.memory, k=self.batch_size) <NEW_LINE> states = torch.from_numpy(np.vstack([e.state for e in experiences if e is not None])).float().to(self.device) <NEW_LINE> actions = torch.from_numpy(np.vstack([e.action for e in experiences if e is not None])).float().to(self.device) <NEW_LINE> rewards = torch.from_numpy(np.vstack([e.reward for e in experiences if e is not None])).float().to(self.device) <NEW_LINE> next_states = torch.from_numpy(np.vstack([e.next_state for e in experiences if e is not None])).float().to(self.device) <NEW_LINE> dones = torch.from_numpy(np.vstack([e.done for e in experiences if e is not None]).astype(np.uint8)).float().to(self.device) <NEW_LINE> return (states, actions, rewards, next_states, dones) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.memory) | Fixed-size buffer to store experience tuples. | 6259904cf7d966606f7492cc |
class CompositeShape(Shape): <NEW_LINE> <INDENT> def __init__(self, components: List[Shape]): <NEW_LINE> <INDENT> Shape.__init__(self, wx.Pen(wx.BLACK, 1), 0, 0, 0, 0) <NEW_LINE> self._Shape__Pen.DashStyle = wx.PENSTYLE_SHORT_DASH <NEW_LINE> self.__Components = components <NEW_LINE> self.CalculateEnclosingRectangle() <NEW_LINE> <DEDENT> def CalculateEnclosingRectangle(self): <NEW_LINE> <INDENT> self._Shape__X1 = min([min(c.X1(), c.X2()) for c in self.Components()]) <NEW_LINE> self._Shape__Y1 = min([min(c.Y1(), c.Y2()) for c in self.Components()]) <NEW_LINE> self._Shape__X2 = max([max(c.X1(), c.X2()) for c in self.Components()]) <NEW_LINE> self._Shape__Y2 = max([max(c.Y1(), c.Y2()) for c in self.Components()]) <NEW_LINE> <DEDENT> def Draw(self, dc: wx.DC): <NEW_LINE> <INDENT> for shape in self.Components(): <NEW_LINE> <INDENT> shape.Draw(dc) <NEW_LINE> <DEDENT> if self.Selected(): <NEW_LINE> <INDENT> dc.Pen = self.Pen() <NEW_LINE> dc.DrawRectangle(self.X1(), self.Y1(), self.X2() - self.X1(), self.Y2() - self.Y1()) <NEW_LINE> <DEDENT> <DEDENT> def MoveBy(self, xDelta: int, yDelta: int): <NEW_LINE> <INDENT> for shape in self.Components(): <NEW_LINE> <INDENT> shape.MoveBy(xDelta, yDelta) <NEW_LINE> <DEDENT> self.CalculateEnclosingRectangle() <NEW_LINE> <DEDENT> def Components(self) -> List[Shape]: <NEW_LINE> <INDENT> return self.__Components <NEW_LINE> <DEDENT> def Clone(self) -> Shape: <NEW_LINE> <INDENT> members: List[Shape] = [] <NEW_LINE> for shape in self.Components(): <NEW_LINE> <INDENT> members.append(shape.Clone()) <NEW_LINE> <DEDENT> return CompositeShape(members) | I represent a composite shape in the OOPDraw system,
holding a collection of the shapes that I consiste of. | 6259904c8a43f66fc4bf35bf |
class BaseModificationsPlugin(MegrimPlugin): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.tool = "BaseModifications" <NEW_LINE> <DEDENT> def execute(self, args): <NEW_LINE> <INDENT> warnings.simplefilter(action='ignore', category=FutureWarning) <NEW_LINE> os.environ["NUMEXPR_MAX_THREADS"] = str(multiprocessing.cpu_count()) <NEW_LINE> fast5 = args.fast5 <NEW_LINE> bam = BamHandler(args.bam, args) <NEW_LINE> reference = ReferenceGenome(args.fasta) <NEW_LINE> base_mods = BaseModifications( fast5, bam, reference, modification=args.modification, threshold=args.probability, context=args.context, args=args) <NEW_LINE> if args.index: <NEW_LINE> <INDENT> logging.degug( f"saving base-mod coordinates to CSV file [{args.output}]") <NEW_LINE> base_mods.fast5s_to_basemods().to_csv(args.output, sep="\t") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.debug(f"saving data as CSV file [{args.output}]") <NEW_LINE> base_mods.reduce_mapped_methylation_signal().to_csv( args.output, sep="\t", index=False, chunksize=1e6) <NEW_LINE> <DEDENT> logging.debug(f"fin ...") <NEW_LINE> <DEDENT> def arg_params(self, subparsers, parent_parser): <NEW_LINE> <INDENT> argparser = subparsers.add_parser( self.tool, help="base modifications help", parents=[parent_parser]) <NEW_LINE> argparser.add_argument( '-5', '--fast5', metavar="/path/to/FAST5", action='store', help='Path to the FAST5-format sequences', required=True, dest="fast5") <NEW_LINE> argparser.add_argument( '-b', '--bam', metavar="/path/to/BAM", action='store', help='Path to the BAM-format mapping file', required=True, dest="bam") <NEW_LINE> argparser.add_argument( '-f', '--fasta', metavar="/path/to/fasta", action='store', help='Path to the fasta format reference sequence', required=True, dest="fasta") <NEW_LINE> argparser.add_argument( '-p', '--probability', metavar="[0..1]", action='store', help='Base-modification probability. This is a floating point ' 'number between 0 and 1. A stringent selection will be closer to ' '1. [The default is 0.90]', dest="probability", default=0.90, type=float) <NEW_LINE> argparser.add_argument( '-c', '--context', metavar="CG", action='store', help='Base-modification context. Only CpG has been implemented at ' 'present. [The default is CG]', dest="context", default="CG") <NEW_LINE> argparser.add_argument( '-m', '--modification', metavar="[5mC|6mA]", action='store', help='The base modification to score for - this may be either 5mC ' 'or 6mA in this version of the software. [The default is 5mC]', dest="modification", default="5mC") <NEW_LINE> argparser.add_argument( '-x', '--index', action='store_true', dest="index", help="only index the FAST5; do not process the bam files.", default=False) <NEW_LINE> argparser.add_argument( '-o', '--output', metavar="results-file", action='store', dest="output", required=True, help='file path to a file location ' 'where the results will be stored. The results will be stored in ' 'a TSV format.') | BaseModifications plugin class for megrim toolbox. | 6259904c50485f2cf55dc3b4 |
class truncexpon_gen(rv_continuous): <NEW_LINE> <INDENT> def _argcheck(self, b): <NEW_LINE> <INDENT> self.b = b <NEW_LINE> return (b > 0) <NEW_LINE> <DEDENT> def _pdf(self, x, b): <NEW_LINE> <INDENT> return exp(-x)/(-special.expm1(-b)) <NEW_LINE> <DEDENT> def _logpdf(self, x, b): <NEW_LINE> <INDENT> return -x - log(-special.expm1(-b)) <NEW_LINE> <DEDENT> def _cdf(self, x, b): <NEW_LINE> <INDENT> return special.expm1(-x)/special.expm1(-b) <NEW_LINE> <DEDENT> def _ppf(self, q, b): <NEW_LINE> <INDENT> return -special.log1p(q*special.expm1(-b)) <NEW_LINE> <DEDENT> def _munp(self, n, b): <NEW_LINE> <INDENT> if n == 1: <NEW_LINE> <INDENT> return (1-(b+1)*exp(-b))/(-special.expm1(-b)) <NEW_LINE> <DEDENT> elif n == 2: <NEW_LINE> <INDENT> return 2*(1-0.5*(b*b+2*b+2)*exp(-b))/(-special.expm1(-b)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._mom1_sc(n, b) <NEW_LINE> <DEDENT> <DEDENT> def _entropy(self, b): <NEW_LINE> <INDENT> eB = exp(b) <NEW_LINE> return log(eB-1)+(1+eB*(b-1.0))/(1.0-eB) | A truncated exponential continuous random variable.
%(before_notes)s
Notes
-----
The probability density function for `truncexpon` is::
truncexpon.pdf(x, b) = exp(-x) / (1-exp(-b))
for ``0 < x < b``.
`truncexpon` takes ``b`` as a shape parameter.
%(after_notes)s
%(example)s | 6259904cdc8b845886d549e6 |
class AddToFeed(SingleObjectMixin, FormView): <NEW_LINE> <INDENT> queryset = FeedModel.objects.all() <NEW_LINE> pk_url_kwarg = 'feed_id' <NEW_LINE> template_name = 'feeds/add.html' <NEW_LINE> form_class = AddToFeedForm <NEW_LINE> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.object = self.get_object() <NEW_LINE> return super(AddToFeed, self).get(request, *args, **kwargs) <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.object = self.get_object() <NEW_LINE> self.object.date_last_posted = datetime.datetime.now() <NEW_LINE> self.object.save() <NEW_LINE> return super(AddToFeed, self).post(request, *args, **kwargs) <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> feed = self.object <NEW_LINE> _ = form.add_to_feed(feed) <NEW_LINE> return redirect('feed', feed.id) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> return super(AddToFeed, self).get_context_data(object=self.object, **kwargs) | Add a new item to the feed. | 6259904c07f4c71912bb085d |
class FlyCamera(): <NEW_LINE> <INDENT> def __init__(self,camIndex=0): <NEW_LINE> <INDENT> bus = PyCapture2.BusManager() <NEW_LINE> numCams = bus.getNumOfCameras() <NEW_LINE> self.camera = PyCapture2.Camera() <NEW_LINE> uid = bus.getCameraFromIndex(0) <NEW_LINE> self.camera.connect(uid) <NEW_LINE> self.camera.startCapture() <NEW_LINE> <DEDENT> def read(self): <NEW_LINE> <INDENT> image = self.camera.retrieveBuffer() <NEW_LINE> imdata = image.getData() <NEW_LINE> row_bytes = float(len(imdata)) / float(image.getRows()); <NEW_LINE> grayimg = np.array(imdata, dtype="uint8"). reshape((image.getRows(), image.getCols()) ); <NEW_LINE> outimg = cv2.cvtColor(grayimg, cv2.COLOR_BAYER_BG2BGR) <NEW_LINE> return 1,outimg | this class does all the gritty image capture stuff you need
to do to get data from the pointgrey cameras | 6259904c8da39b475be04619 |
class SET(SCPINode, SCPIQuery, SCPISet): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "SET" <NEW_LINE> args = [] | SYSTem:TIME:HRTimer:ABSolute:SET
Arguments: | 6259904c91af0d3eaad3b24d |
class Page(models.Model): <NEW_LINE> <INDENT> title = models.CharField(default='', max_length=80) <NEW_LINE> link = models.CharField(default='', max_length=50) <NEW_LINE> body = models.TextField('Content', default='') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.link <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Страницы' <NEW_LINE> verbose_name_plural = 'Статичные страницы' | Сраница основного раздела сайта kiteup.ru | 6259904c76d4e153a661dc8c |
class Dict(List): <NEW_LINE> <INDENT> def decode(self, value): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> if callable(self.default): <NEW_LINE> <INDENT> return self.default() <NEW_LINE> <DEDENT> return self.default or {} <NEW_LINE> <DEDENT> return dict((k, self.fld.decode(v)) for k, v in value.iteritems()) <NEW_LINE> <DEDENT> def encode(self, value): <NEW_LINE> <INDENT> return dict((k, self.fld.encode(v)) for k, v in value.iteritems()) | A field representing a homogeneous mapping of data.
The elements of the mapping are decoded through another field specified
when the `Dict` is declared. | 6259904c6fece00bbacccde2 |
class ClientMeta(object): <NEW_LINE> <INDENT> def __init__(self, events): <NEW_LINE> <INDENT> self.events = events <NEW_LINE> <DEDENT> def __copy__(self): <NEW_LINE> <INDENT> copied_events = copy.copy(self.events) <NEW_LINE> return ClientMeta(copied_events) | Holds additional client methods.
This class holds additional information for clients. It exists for
two reasons:
* To give advanced functionality to clients
* To namespace additional client attributes from the operation
names which are mapped to methods at runtime. This avoids
ever running into collisions with operation names. | 6259904c379a373c97d9a454 |
class HStoreVirtualMixin(object): <NEW_LINE> <INDENT> def contribute_to_class(self, cls, name): <NEW_LINE> <INDENT> if self.choices: <NEW_LINE> <INDENT> setattr(cls, 'get_%s_display' % self.name, curry(cls._get_FIELD_display, field=self)) <NEW_LINE> <DEDENT> self.attname = name <NEW_LINE> self.name = name <NEW_LINE> self.model = cls <NEW_LINE> self.column = None <NEW_LINE> setattr(cls, name, self) <NEW_LINE> cls._meta.add_field(self) <NEW_LINE> cls._meta.virtual_fields.append(self) <NEW_LINE> <DEDENT> def db_type(self, connection): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def __get__(self, instance, instance_type=None): <NEW_LINE> <INDENT> if instance is None: <NEW_LINE> <INDENT> raise AttributeError('Can only be accessed via instance') <NEW_LINE> <DEDENT> return getattr(instance, self.hstore_field_name).get(self.name, self.default) <NEW_LINE> <DEDENT> def __set__(self, instance, value): <NEW_LINE> <INDENT> hstore_dictionary = getattr(instance, self.hstore_field_name) <NEW_LINE> hstore_dictionary[self.name] = value | must be mixed-in with django fields | 6259904c596a897236128fc3 |
class XMLEquals(object): <NEW_LINE> <INDENT> def __init__(self, expected): <NEW_LINE> <INDENT> self.expected = expected <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s(%r)" % (self.__class__.__name__, self.expected) <NEW_LINE> <DEDENT> def match(self, other): <NEW_LINE> <INDENT> def xml_element_equals(expected_doc, observed_doc): <NEW_LINE> <INDENT> if expected_doc.tag != observed_doc.tag: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if expected_doc.attrib != observed_doc.attrib: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def _sorted_children(doc): <NEW_LINE> <INDENT> return sorted(doc.getchildren(), key=lambda el: el.tag) <NEW_LINE> <DEDENT> expected_children = _sorted_children(expected_doc) <NEW_LINE> observed_children = _sorted_children(observed_doc) <NEW_LINE> if len(expected_children) != len(observed_children): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for expected_el, observed_el in zip(expected_children, observed_children): <NEW_LINE> <INDENT> if not xml_element_equals(expected_el, observed_el): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> parser = etree.XMLParser(remove_blank_text=True) <NEW_LINE> expected_doc = etree.fromstring(self.expected.strip(), parser) <NEW_LINE> observed_doc = etree.fromstring(other.strip(), parser) <NEW_LINE> if xml_element_equals(expected_doc, observed_doc): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> return XMLMismatch(self.expected, other) | Parses two XML documents from strings and compares the results.
| 6259904c23e79379d538d927 |
class TestTimeUT1: <NEW_LINE> <INDENT> @pytest.mark.remote_data <NEW_LINE> def test_utc_to_ut1(self): <NEW_LINE> <INDENT> "" <NEW_LINE> t = Time(['2012-06-30 12:00:00', '2012-06-30 23:59:59', '2012-06-30 23:59:60', '2012-07-01 00:00:00', '2012-07-01 12:00:00'], scale='utc') <NEW_LINE> t_ut1_jd = t.ut1.jd <NEW_LINE> t_comp = np.array([2456108.9999932079, 2456109.4999816339, 2456109.4999932083, 2456109.5000047823, 2456110.0000047833]) <NEW_LINE> assert allclose_jd(t_ut1_jd, t_comp) <NEW_LINE> t_back = t.ut1.utc <NEW_LINE> assert allclose_jd(t.jd, t_back.jd) <NEW_LINE> tnow = Time.now() <NEW_LINE> tnow.ut1 <NEW_LINE> <DEDENT> def test_ut1_to_utc(self): <NEW_LINE> <INDENT> with iers_conf.set_temp('auto_download', False): <NEW_LINE> <INDENT> t = Time(['2012-06-30 12:00:00', '2012-06-30 23:59:59', '2012-07-01 00:00:00', '2012-07-01 00:00:01', '2012-07-01 12:00:00'], scale='ut1') <NEW_LINE> t_utc_jd = t.utc.jd <NEW_LINE> t_comp = np.array([2456109.0000010049, 2456109.4999836441, 2456109.4999952177, 2456109.5000067917, 2456109.9999952167]) <NEW_LINE> assert allclose_jd(t_utc_jd, t_comp) <NEW_LINE> t_back = t.utc.ut1 <NEW_LINE> assert allclose_jd(t.jd, t_back.jd) <NEW_LINE> <DEDENT> <DEDENT> def test_delta_ut1_utc(self): <NEW_LINE> <INDENT> with iers_conf.set_temp('auto_download', False): <NEW_LINE> <INDENT> t = Time('2012-06-30 12:00:00', scale='utc') <NEW_LINE> assert not hasattr(t, '_delta_ut1_utc') <NEW_LINE> assert allclose_sec(t.delta_ut1_utc, -0.58682110003124965) <NEW_LINE> assert allclose_sec(t._delta_ut1_utc, -0.58682110003124965) | Test Time.ut1 using IERS tables | 6259904ca8ecb0332587263b |
class EmcItemClass(metaclass=utils.Singleton): <NEW_LINE> <INDENT> def item_selected(self, url, user_data): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def label_get(self, url, user_data): <NEW_LINE> <INDENT> return 'Unknow' <NEW_LINE> <DEDENT> def label_end_get(self, url, user_data): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def icon_get(self, url, user_data): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def icon_end_get(self, url, user_data): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def info_get(self, url, user_data): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def poster_get(self, url, user_data): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def cover_get(self, url, user_data): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def fanart_get(self, url, user_data): <NEW_LINE> <INDENT> return None | TODO Class doc | 6259904c0a366e3fb87dde0f |
class xMsgRegistrar: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.context = zmq.Context.instance() <NEW_LINE> self.proxy = xMsgProxy(self.context, "localhost", 7771) <NEW_LINE> self.registrar_service = xMsgRegService(self.context, RegAddress()) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> xMsgUtil.log("Local ip: %s" % xMsgUtil.get_local_ip()) <NEW_LINE> self.registrar_service.start() <NEW_LINE> xMsgUtil.log("Local registration and discovery server starting") <NEW_LINE> self.proxy.start() <NEW_LINE> self.registrar_service.join() <NEW_LINE> <DEDENT> except AddressInUseException as e: <NEW_LINE> <INDENT> xMsgUtil.log(e.message) | xMsgRegistrar, the main registrar service
The service always runs in a separate thread. Contains two
separate databases to store publishers and subscribers
registration data. The key for the data base is xMsgTopic, constructed as:
* *domain:subject:type*
Creates REP socket server on a default port. Following request will be
serviced:
* Register publisher
* Register subscriber
* Find publisher
* Find subscriber
* Remove publisher
* Remove subscriber | 6259904ce76e3b2f99fd9e32 |
class FlagThumbAction(Action): <NEW_LINE> <INDENT> def thumb_contents(self): <NEW_LINE> <INDENT> filename = self.db.filename('flag', self.nodeid) <NEW_LINE> image = open_image_no_alpha(filename) <NEW_LINE> return scale_image_to_width_png(image, int(self.form['width'].value)) <NEW_LINE> <DEDENT> def handle(self): <NEW_LINE> <INDENT> if self.classname != 'flag': <NEW_LINE> <INDENT> raise ValueError('This action only applies to flags') <NEW_LINE> <DEDENT> if self.nodeid is None: <NEW_LINE> <INDENT> raise ValueError('No id specified to generate thumbnail') <NEW_LINE> <DEDENT> if 'width' not in self.form: <NEW_LINE> <INDENT> raise ValueError('No width specified to generate thumbnail') <NEW_LINE> <DEDENT> if self.form['width'].value != '200': <NEW_LINE> <INDENT> raise ValueError('Invalid width specified to generate thumbnail') <NEW_LINE> <DEDENT> if not self.hasPermission('View', classname='flag', itemid=self.nodeid): <NEW_LINE> <INDENT> raise Unauthorised('You do not have permission to view this flag') <NEW_LINE> <DEDENT> content = cached_bin(self.db, 'thumbnails', 'flag%s-%s.png' % (self.nodeid, self.form['width'].value), self.thumb_contents) <NEW_LINE> self.client.setHeader('Content-Type', 'image/png') <NEW_LINE> return content | Action to return a thumbnail for a flag. | 6259904c76e4537e8c3f09af |
class RepositoryRoot(Container, RepositoryMixin, TranslatedTitleMixin): <NEW_LINE> <INDENT> Title = TranslatedTitleMixin.Title <NEW_LINE> def get_repository_number(self): <NEW_LINE> <INDENT> return u'' <NEW_LINE> <DEDENT> def get_retention_period(self): <NEW_LINE> <INDENT> return u'' <NEW_LINE> <DEDENT> def get_retention_period_annotation(self): <NEW_LINE> <INDENT> return u'' <NEW_LINE> <DEDENT> def get_archival_value(self): <NEW_LINE> <INDENT> return u'' <NEW_LINE> <DEDENT> def get_archival_value_annotation(self): <NEW_LINE> <INDENT> return u'' <NEW_LINE> <DEDENT> def get_custody_period(self): <NEW_LINE> <INDENT> return u'' | A Repositoryroot.
| 6259904c82261d6c527308db |
class GroupMember(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(settings.AUTH_USER_MODEL) <NEW_LINE> group = models.ForeignKey('stronger.Group') <NEW_LINE> joined = models.DateField() <NEW_LINE> approved = models.BooleanField(default=False) <NEW_LINE> approved_by = models.BooleanField(default=False) <NEW_LINE> admin = models.BooleanField(default=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ('-joined',) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return "{} - {}".format(self.user, self.group) | User membership for a group. | 6259904c1f037a2d8b9e5281 |
class ViewerBase(QWidget): <NEW_LINE> <INDENT> need_refresh = pyqtSignal(bool) <NEW_LINE> def __init__(self, parent = None): <NEW_LINE> <INDENT> super(ViewerBase, self).__init__(parent) <NEW_LINE> self.t = 0. <NEW_LINE> self.is_refreshing = False <NEW_LINE> self.need_refresh.connect(self.refresh, type = Qt.QueuedConnection) <NEW_LINE> self.delay_timer = QTimer(singleShot = True) <NEW_LINE> self.delay_timer.timeout.connect(self.refresh) <NEW_LINE> <DEDENT> def fast_seek(self, t): <NEW_LINE> <INDENT> if self.is_refreshing: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.t = t <NEW_LINE> self.is_refreshing = True <NEW_LINE> self.need_refresh.emit(True) <NEW_LINE> <DEDENT> def seek(self, t): <NEW_LINE> <INDENT> if self.is_refreshing: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.t = t <NEW_LINE> self.is_refreshing = True <NEW_LINE> self.need_refresh.emit(False) <NEW_LINE> <DEDENT> def refresh(self, fast = False): <NEW_LINE> <INDENT> if fast: <NEW_LINE> <INDENT> print('fast refresh') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time.sleep(.5) <NEW_LINE> print('slow refresh') <NEW_LINE> <DEDENT> self.is_refreshing = False <NEW_LINE> <DEDENT> def delayed_refresh(self, interval = 50): <NEW_LINE> <INDENT> if self.delay_timer.isActive(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.delay_timer.setInterval(interval) <NEW_LINE> self.delay_timer.start() | Base for SignalViewer, TimeFreqViewer, EpochViewer, ...
This handle seek and fast_seek with TimeSeeker time_changed and fast_time_changed signals | 6259904c23849d37ff8524e7 |
class Discriminator(object): <NEW_LINE> <INDENT> def __init__(self, args, name=''): <NEW_LINE> <INDENT> self.args = args <NEW_LINE> self.name = name <NEW_LINE> self.first_call = True <NEW_LINE> <DEDENT> def __call__(self, x, is_training): <NEW_LINE> <INDENT> with tf.variable_scope(self.name, reuse=tf.AUTO_REUSE): <NEW_LINE> <INDENT> if self.first_call: print(tf.get_variable_scope().name) <NEW_LINE> minshape = 4 <NEW_LINE> filt = self.args.nfilt * 2 <NEW_LINE> maxfilt = 8 * filt <NEW_LINE> basefilt = filt <NEW_LINE> nfilt = basefilt <NEW_LINE> nshape = x.get_shape()[1].value <NEW_LINE> layer = 1 <NEW_LINE> iinput = x <NEW_LINE> while nshape > minshape: <NEW_LINE> <INDENT> if self.first_call: print(iinput) <NEW_LINE> output = unet_conv(iinput, nfilt, 'h{}'.format(layer), is_training, sn=spectral_norm, activation=lrelu, batch_norm=batch_norm if layer != 1 else False) <NEW_LINE> nshape /= 2 <NEW_LINE> nfilt = min(2 * nfilt, maxfilt) <NEW_LINE> layer += 1 <NEW_LINE> iinput = output <NEW_LINE> <DEDENT> if self.first_call: print(output) <NEW_LINE> output = tf.reduce_sum(output, axis=[1, 2]) <NEW_LINE> out = dense(output, 1, sn=spectral_norm, name='out') <NEW_LINE> if self.first_call: <NEW_LINE> <INDENT> print("{}\n".format(out)) <NEW_LINE> self.first_call = False <NEW_LINE> <DEDENT> <DEDENT> return out | A discriminator class. | 6259904d004d5f362081f9fd |
class AccountInvoiceCancel(models.TransientModel): <NEW_LINE> <INDENT> _name = "account.invoice.cancel" <NEW_LINE> _description = "Cancel the Selected Invoices" <NEW_LINE> @api.multi <NEW_LINE> def invoice_cancel(self): <NEW_LINE> <INDENT> context = dict(self._context or {}) <NEW_LINE> active_ids = context.get('active_ids', []) or [] <NEW_LINE> for record in self.env['account.invoice'].browse(active_ids): <NEW_LINE> <INDENT> if record.state in ('cancel', 'paid'): <NEW_LINE> <INDENT> raise UserError(_("Selected invoice(s) cannot be cancelled as they are already in 'Cancelled' or 'Done' state.")) <NEW_LINE> <DEDENT> record.signal_workflow('invoice_cancel') <NEW_LINE> <DEDENT> return {'type': 'ir.actions.act_window_close'} | This wizard will cancel the all the selected invoices.
If in the journal, the option allow cancelling entry is not selected then it will give warning message. | 6259904d10dbd63aa1c72007 |
class TransportTestCase(unittest.TestCase): <NEW_LINE> <INDENT> klass = None <NEW_LINE> if Crypto is None: <NEW_LINE> <INDENT> skip = "cannot run w/o PyCrypto" <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.transport = proto_helpers.StringTransport() <NEW_LINE> self.proto = self.klass() <NEW_LINE> self.packets = [] <NEW_LINE> def secureRandom(len): <NEW_LINE> <INDENT> return '\x99' * len <NEW_LINE> <DEDENT> self.oldSecureRandom = randbytes.secureRandom <NEW_LINE> randbytes.secureRandom = secureRandom <NEW_LINE> def stubSendPacket(messageType, payload): <NEW_LINE> <INDENT> self.packets.append((messageType, payload)) <NEW_LINE> <DEDENT> self.proto.makeConnection(self.transport) <NEW_LINE> self.proto.sendPacket = stubSendPacket <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> randbytes.secureRandom = self.oldSecureRandom <NEW_LINE> self.oldSecureRandom = None | Base class for transport test cases. | 6259904df7d966606f7492cd |
class Hidden(Text): <NEW_LINE> <INDENT> pass | Field representing ``<input type="hidden">`` | 6259904dd53ae8145f91988c |
class Share_actions(extensions.ExtensionDescriptor): <NEW_LINE> <INDENT> name = 'ShareActions' <NEW_LINE> alias = 'share-actions' <NEW_LINE> namespace = '' <NEW_LINE> updated = '2012-08-14T00:00:00+00:00' <NEW_LINE> def get_controller_extensions(self): <NEW_LINE> <INDENT> controller = ShareActionsController() <NEW_LINE> extension = extensions.ControllerExtension(self, 'shares', controller) <NEW_LINE> return [extension] | Enable share actions. | 6259904ddc8b845886d549e8 |
class HTail: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.rootSection = None <NEW_LINE> self.tipSection = None <NEW_LINE> self.surface = None <NEW_LINE> self.control = None <NEW_LINE> <DEDENT> def set_control(self,gain=1.0,xc_hinge=0.0): <NEW_LINE> <INDENT> ailControl = {"gain" : gain, "x_hinge" : xc_hinge} <NEW_LINE> elevator_control = Control(name="elevator",duplicate_sign=1, hinge_vector=Vector(0,1,0),**ailControl) <NEW_LINE> self.control=[elevator_control] <NEW_LINE> if self.rootSection != None: <NEW_LINE> <INDENT> raise Warning("Htail geometry might already be defined") <NEW_LINE> <DEDENT> <DEDENT> def autoSections(self, croot, ctip, semispan, airfoilLoc, rootPos = np.zeros((3,1)),sectionKwargs={}): <NEW_LINE> <INDENT> AF = FileAirfoil(airfoilLoc) <NEW_LINE> airfoil = {"airfoil" : AF} <NEW_LINE> rootLE = Point(rootPos[0,0]+croot/4.,rootPos[1,0],rootPos[2,0]) <NEW_LINE> self.rootSection = Section(rootLE,croot, controls=self.control,**dict(sectionKwargs,**airfoil)) <NEW_LINE> tipLE = Point(rootPos[0,0]+ctip/4.,rootPos[1,0]+semispan,rootPos[2,0]) <NEW_LINE> self.tipSection = Section(tipLE, ctip, controls=self.control,**dict(sectionKwargs,**airfoil)) <NEW_LINE> <DEDENT> def autoHTail(self,n_spanwise,n_chordwise,name="HTail"): <NEW_LINE> <INDENT> if (self.rootSection == None) or (self.tipSection == None): <NEW_LINE> <INDENT> raise NameError("rootSection or tipSection not defined, run autoSections") <NEW_LINE> <DEDENT> self.surface = Surface(name=name, n_chordwise=n_chordwise, chord_spacing=Spacing.cosine, n_spanwise=n_spanwise, span_spacing=Spacing.cosine, y_duplicate=0.0, sections=[self.rootSection,self.tipSection]) <NEW_LINE> <DEDENT> def set_root_section(self, croot, airfoilLoc, position): <NEW_LINE> <INDENT> self.rootSection = None | This class contains information about the details of
the horizontal tail aerodynamic model.
Namely:
- Geometry information
- Meshing info
The root and tip sections of the wing should be specified.
This class describes multiple ways to build the wing geometry
Wings are assumed to be symmetric. | 6259904d07d97122c42180ce |
class StorePeek(Peek): <NEW_LINE> <INDENT> pass | Request to get an *item* from the *store*. The request is triggered
once there is an item available in the store. | 6259904dec188e330fdf9cc9 |
class Class2(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.name = "class2" <NEW_LINE> self.R0 = 0. <NEW_LINE> self.K2 = 0. <NEW_LINE> self.K3 = 0. <NEW_LINE> self.K4 = 0. <NEW_LINE> self.reduced = False <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.reduced: <NEW_LINE> <INDENT> return "%15.6f %15.6f %15.6f %15.6f"%(self.R0, self.K2, self.K3,self.K4) <NEW_LINE> <DEDENT> return "%28s %15.6f %15.6f %15.6f %15.6f"%(self.name, self.R0, self.K2, self.K3, self.K4) | Potential defined as
E = K2*(r-R0)^2 + K3*(r-R0)^3 + K4*(r-R0)^4
Input parameters: R0, K2, K3, K4. | 6259904dd99f1b3c44d06ac5 |
class EspeakTtsSender(): <NEW_LINE> <INDENT> def send(self, sender, text, lang='en'): <NEW_LINE> <INDENT> text = text.replace("'", '"') <NEW_LINE> file_path = self.tts_record(text, lang) <NEW_LINE> opus_file = "image.opus" <NEW_LINE> subprocess.check_call(("ffmpeg -y -i {} -c:a libopus {}".format(file_path, opus_file)).split(' ')) <NEW_LINE> os.remove(file_path) <NEW_LINE> return Message(sender, opus_file, "audio/ogg; codecs=opus") <NEW_LINE> <DEDENT> def tts_record(self, text, lang='en'): <NEW_LINE> <INDENT> file_path = "image.wav" <NEW_LINE> cmd = "espeak -v%s -w %s '%s'" % (lang, file_path, text) <NEW_LINE> subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE).wait() <NEW_LINE> return file_path | Uses espeak to text to speach | 6259904d009cb60464d02963 |
class DialogHelper: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> async def run_dialog( dialog: Dialog, turn_context: TurnContext, accessor: StatePropertyAccessor ): <NEW_LINE> <INDENT> dialog_set = DialogSet(accessor) <NEW_LINE> dialog_set.add(dialog) <NEW_LINE> dialog_context = await dialog_set.create_context(turn_context) <NEW_LINE> results = await dialog_context.continue_dialog() <NEW_LINE> if results.status == DialogTurnStatus.Empty: <NEW_LINE> <INDENT> await dialog_context.begin_dialog(dialog.id) | Dialog Helper implementation. | 6259904d30dc7b76659a0c5f |
class Ldappy(object): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> self._ldap_config = Config(config) <NEW_LINE> self._ldap_conn = None <NEW_LINE> self._user_objects = Users(self._ldap_config) <NEW_LINE> self._group_objects = Groups(self._ldap_config) <NEW_LINE> <DEDENT> def authenticate(self, username, password, retries=1): <NEW_LINE> <INDENT> connection = LdapConnection(self._ldap_config) <NEW_LINE> success = False <NEW_LINE> while not success and retries > 0: <NEW_LINE> <INDENT> logger.debug('Authentication attempt for user: {0}'.format(username)) <NEW_LINE> try: <NEW_LINE> <INDENT> connection.connect(username, password) <NEW_LINE> success = True <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> logger.error('Authentication failed with error: {0}'.format(ex)) <NEW_LINE> <DEDENT> retries -= 1 <NEW_LINE> if not success and retries > 0: <NEW_LINE> <INDENT> time.sleep(1) <NEW_LINE> <DEDENT> <DEDENT> connection.disconnect() <NEW_LINE> return success <NEW_LINE> <DEDENT> @property <NEW_LINE> def user_objects(self): <NEW_LINE> <INDENT> return self._user_objects <NEW_LINE> <DEDENT> @property <NEW_LINE> def group_objects(self): <NEW_LINE> <INDENT> return self._group_objects | Wrapper to the ldap lib, one that will make you happy.
ldap docs: https://www.python-ldap.org/doc/html/ldap.html
ldap docs references: https://www.python-ldap.org/docs.html
ldap samples: http://www.grotan.com/ldap/python-ldap-samples.html | 6259904da79ad1619776b4ab |
class BodyguardAnt(Ant): <NEW_LINE> <INDENT> name = 'Bodyguard' <NEW_LINE> implemented = True <NEW_LINE> food_cost = 4 <NEW_LINE> container = True <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> Ant.__init__(self, 2) <NEW_LINE> self.ant = None <NEW_LINE> <DEDENT> def contain_ant(self, ant): <NEW_LINE> <INDENT> self.ant = ant <NEW_LINE> <DEDENT> def action(self, colony): <NEW_LINE> <INDENT> if self.ant is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.ant.action(colony) | BodyguardAnt provides protection to other Ants. | 6259904d8e71fb1e983bcef1 |
class DeployDeviceEnvironment(Action): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(DeployDeviceEnvironment, self).__init__() <NEW_LINE> self.name = "deploy-device-env" <NEW_LINE> self.summary = "deploy device environment" <NEW_LINE> self.description = "deploy device environment" <NEW_LINE> self.env = "" <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> super(DeployDeviceEnvironment, self).validate() <NEW_LINE> if 'lava_test_shell_file' not in self.parameters['deployment_data'].keys(): <NEW_LINE> <INDENT> self.errors = "Invalid deployment data - missing lava_test_shell_file" <NEW_LINE> <DEDENT> if 'env_dut' in self.job.parameters and self.job.parameters['env_dut']: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> yaml.load(self.job.parameters['env_dut']) <NEW_LINE> <DEDENT> except (TypeError, yaml.scanner.ScannerError) as exc: <NEW_LINE> <INDENT> self.errors = exc <NEW_LINE> return <NEW_LINE> <DEDENT> self.env = self.job.parameters['env_dut'] <NEW_LINE> environment = self._create_environment() <NEW_LINE> self.set_namespace_data( action=self.name, label='environment', key='shell_file', value=self.parameters['deployment_data']['lava_test_shell_file'] ) <NEW_LINE> self.set_namespace_data( action=self.name, label='environment', key='env_dict', value=environment ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.info("no device environment specified") <NEW_LINE> <DEDENT> self.set_namespace_data( action=self.name, label='environment', key='line_separator', value=self.parameters['deployment_data'].get('line_separator', LINE_SEPARATOR) ) <NEW_LINE> <DEDENT> def _create_environment(self): <NEW_LINE> <INDENT> conf = yaml.load(self.env) if self.env is not '' else {} <NEW_LINE> if conf.get("purge", False): <NEW_LINE> <INDENT> environ = {} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> environ = dict(os.environ) <NEW_LINE> <DEDENT> for var in conf.get("removes", {}): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> del environ[var] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> environ.update(conf.get("overrides", {})) <NEW_LINE> return environ | Create environment found in job parameters 'env_dut' and set it in common_data. | 6259904dcb5e8a47e493cb9c |
class BondType(ParameterType): <NEW_LINE> <INDENT> _VALENCE_TYPE = "Bond" <NEW_LINE> _ELEMENT_NAME = "Bond" <NEW_LINE> length = ParameterAttribute(default=None, unit=unit.angstrom) <NEW_LINE> k = ParameterAttribute( default=None, unit=unit.kilocalorie_per_mole / unit.angstrom**2 ) <NEW_LINE> length_bondorder = MappedParameterAttribute(default=None, unit=unit.angstrom) <NEW_LINE> k_bondorder = MappedParameterAttribute( default=None, unit=unit.kilocalorie_per_mole / unit.angstrom**2 ) <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> has_k = "k" in kwargs.keys() <NEW_LINE> has_k_bondorder = any(["k_bondorder" in key for key in kwargs.keys()]) <NEW_LINE> has_length = "length" in kwargs.keys() <NEW_LINE> has_length_bondorder = any( ["length_bondorder" in key for key in kwargs.keys()] ) <NEW_LINE> if has_k: <NEW_LINE> <INDENT> if has_k_bondorder: <NEW_LINE> <INDENT> raise SMIRNOFFSpecError( "BOTH k and k_bondorder* cannot be specified simultaneously." ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not has_k_bondorder: <NEW_LINE> <INDENT> raise SMIRNOFFSpecError( "Either k or k_bondorder* must be specified." ) <NEW_LINE> <DEDENT> <DEDENT> if has_length: <NEW_LINE> <INDENT> if has_length_bondorder: <NEW_LINE> <INDENT> raise SMIRNOFFSpecError( "BOTH length and length_bondorder* cannot be specified simultaneously." ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not has_length_bondorder: <NEW_LINE> <INDENT> raise SMIRNOFFSpecError( "Either length or length_bondorder* must be specified." ) <NEW_LINE> <DEDENT> <DEDENT> super().__init__(**kwargs) | A SMIRNOFF bond type
.. warning :: This API is experimental and subject to change. | 6259904d63b5f9789fe86599 |
class Frame(Model): <NEW_LINE> <INDENT> movie = models.ForeignKey(TMDBMovie) <NEW_LINE> file = models.ImageField(upload_to='pictures') <NEW_LINE> owner = models.ForeignKey(UserProfile) | Кадр из фильма | 6259904d23e79379d538d929 |
class ClassifierResult(object): <NEW_LINE> <INDENT> def __init__(self, name, classifier_id, classes): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.classifier_id = classifier_id <NEW_LINE> self.classes = classes <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> args = {} <NEW_LINE> if 'name' in _dict: <NEW_LINE> <INDENT> args['name'] = _dict.get('name') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'name\' not present in ClassifierResult JSON' ) <NEW_LINE> <DEDENT> if 'classifier_id' in _dict: <NEW_LINE> <INDENT> args['classifier_id'] = _dict.get('classifier_id') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'classifier_id\' not present in ClassifierResult JSON' ) <NEW_LINE> <DEDENT> if 'classes' in _dict: <NEW_LINE> <INDENT> args['classes'] = [ ClassResult._from_dict(x) for x in (_dict.get('classes')) ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'classes\' not present in ClassifierResult JSON' ) <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'name') and self.name is not None: <NEW_LINE> <INDENT> _dict['name'] = self.name <NEW_LINE> <DEDENT> if hasattr(self, 'classifier_id') and self.classifier_id is not None: <NEW_LINE> <INDENT> _dict['classifier_id'] = self.classifier_id <NEW_LINE> <DEDENT> if hasattr(self, 'classes') and self.classes is not None: <NEW_LINE> <INDENT> _dict['classes'] = [x._to_dict() for x in self.classes] <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return json.dumps(self._to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | Classifier and score combination.
:attr str name: Name of the classifier.
:attr str classifier_id: The ID of a classifier identified in the image.
:attr list[ClassResult] classes: An array of classes within the classifier. | 6259904d24f1403a926862e3 |
class ArmRoleReceiver(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'name': {'required': True}, 'role_id': {'required': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'role_id': {'key': 'roleId', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, name: str, role_id: str, **kwargs ): <NEW_LINE> <INDENT> super(ArmRoleReceiver, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.role_id = role_id | An arm role receiver.
All required parameters must be populated in order to send to Azure.
:param name: Required. The name of the arm role receiver. Names must be unique across all
receivers within an action group.
:type name: str
:param role_id: Required. The arm role id.
:type role_id: str | 6259904d45492302aabfd8ff |
class count_min_sketch(basic_sketch): <NEW_LINE> <INDENT> def update(self, key, freq = 1): <NEW_LINE> <INDENT> pos = self.proj.hash(key) <NEW_LINE> self.vec[range(self.proj.depth), pos] += freq <NEW_LINE> <DEDENT> def __matmul__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, count_min_sketch): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return count_min_sketch_inner(self, other) | Count-Min sketch | 6259904d3cc13d1c6d466b64 |
class SysIpstat(SysIpstatSchema): <NEW_LINE> <INDENT> cli_command = "/mgmt/tm/sys/ip-stat" <NEW_LINE> def rest(self): <NEW_LINE> <INDENT> response = self.device.get(self.cli_command) <NEW_LINE> response_json = response.json() <NEW_LINE> if not response_json: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> return response_json | To F5 resource for /mgmt/tm/sys/ip-stat
| 6259904d0fa83653e46f630a |
class UwsgiLogEntry(): <NEW_LINE> <INDENT> def __extractEntryData(self, log): <NEW_LINE> <INDENT> p = re.compile( "^.+?\] (.+?) .+? \[(.+?)\] .+? (\d+) bytes .+? \(HTTP.+? (\d+)\)", re.IGNORECASE) <NEW_LINE> m = p.match(log) <NEW_LINE> ip_address = "" <NEW_LINE> date_time = None <NEW_LINE> bytes_count = 0 <NEW_LINE> response_code = "" <NEW_LINE> try: <NEW_LINE> <INDENT> if m is not None: <NEW_LINE> <INDENT> g = m.groups() <NEW_LINE> ip_address = g[0] <NEW_LINE> date_time = datetime.strptime(g[1], "%a %b %d %H:%M:%S %Y") <NEW_LINE> bytes_count = int(g[2]) <NEW_LINE> response_code = int(g[3]) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> return (ip_address, date_time, bytes_count, response_code) <NEW_LINE> <DEDENT> <DEDENT> def __resolveEntryType(self, log): <NEW_LINE> <INDENT> p = re.compile("^\[(.+?)\]", re.IGNORECASE) <NEW_LINE> m = p.match(log) <NEW_LINE> try: <NEW_LINE> <INDENT> if m is not None: <NEW_LINE> <INDENT> t = m.groups()[0].upper() <NEW_LINE> if t in self.ENTRIES: <NEW_LINE> <INDENT> return t <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "ENTRY" <NEW_LINE> <DEDENT> <DEDENT> return "UNKNOWN" <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return "UNKNOWN" <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, log): <NEW_LINE> <INDENT> self.log = log <NEW_LINE> self.ENTRIES = [ "UNKNOWN", "INFO", "ERROR", "DEBUG", "ENTRY" ] <NEW_LINE> self.type = self.__resolveEntryType(log) <NEW_LINE> if self.type == "ENTRY": <NEW_LINE> <INDENT> ipaddr, dt, bc, rc = self.__extractEntryData(log) <NEW_LINE> self.ip_address = ipaddr <NEW_LINE> self.date_time = dt <NEW_LINE> self.bytes_count = bc <NEW_LINE> self.response_code = rc <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ip_address = "" <NEW_LINE> self.date_time = None <NEW_LINE> self.bytes_count = 0 <NEW_LINE> self.response_code = "" | Represents single uWSGI log entry.
Has following properties:
- type
- ip_address
- date_time
- bytes_count
- response_code | 6259904d07f4c71912bb0860 |
class Post(models.Model): <NEW_LINE> <INDENT> rareuser = models.ForeignKey("RareUser", on_delete=models.CASCADE) <NEW_LINE> category = models.ForeignKey("Category", on_delete=models.CASCADE) <NEW_LINE> title = models.CharField(max_length=75) <NEW_LINE> publication_date = models.DateField(auto_now=False, auto_now_add=False, null=True, blank=True) <NEW_LINE> image_url = models.ImageField(upload_to="headerimages", height_field=None, width_field=None, max_length=None, null=True) <NEW_LINE> content = models.CharField(max_length=5000) <NEW_LINE> approved = models.BooleanField() <NEW_LINE> @property <NEW_LINE> def is_user_author(self): <NEW_LINE> <INDENT> return self.__is_user_author <NEW_LINE> <DEDENT> @is_user_author.setter <NEW_LINE> def is_user_author(self, value): <NEW_LINE> <INDENT> self.__is_user_author = value | Post Model | 6259904d10dbd63aa1c72009 |
class IInternalWorkflowTransition(Interface): <NEW_LINE> <INDENT> pass | Request layer to indicate workflow transitions triggered by
other actions. | 6259904da219f33f346c7c2e |
class AiReviewTerrorismTaskInput(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Definition = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Definition = params.get("Definition") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | 内容审核涉敏任务输入参数类型
| 6259904ddc8b845886d549ea |
class CallUnits(unittest.TestCase): <NEW_LINE> <INDENT> def testCase000(self): <NEW_LINE> <INDENT> kargs = {} <NEW_LINE> kargs['raw'] = True <NEW_LINE> kargs['rtype'] = False <NEW_LINE> apstr = 'file://///hostname///////////share/a///b//c////////////////////////' <NEW_LINE> retRef = ('raw','', '', 'file://///hostname///////////share/a///b//c////////////////////////') <NEW_LINE> ret = filesysobjects.apppaths.splitapppathx(apstr, appsplit=True, **kargs)[0] <NEW_LINE> self.assertEqual(retRef, ret) | Split network resources IEEE.1003.1/CIFS/SMB/UNC/URI
| 6259904d507cdc57c63a61cc |
class PersistentModelQuerySet(models.QuerySet): <NEW_LINE> <INDENT> def delete(self): <NEW_LINE> <INDENT> self.update(deleted=True) | Model implementing QuerySet for PersistentModel: allows soft-deletion | 6259904d6fece00bbacccde6 |
class NaayaContentTestCase(NaayaTestCase.NaayaTestCase): <NEW_LINE> <INDENT> def afterSetUp(self): <NEW_LINE> <INDENT> self.login() <NEW_LINE> <DEDENT> def beforeTearDown(self): <NEW_LINE> <INDENT> self.logout() <NEW_LINE> <DEDENT> def test_main(self): <NEW_LINE> <INDENT> addNySemProject(self._portal().info, id='doc1', title='doc1', lang='en', submitted=1, budget="112324.234", start_date="12/12/2000", end_date="12/12/2000") <NEW_LINE> addNySemProject(self._portal().info, id='doc1_fr', title='doc1_fr', lang='fr', submitted=1, budget="112324.234", start_date="12/12/2000", end_date="12/12/2000") <NEW_LINE> docs = self._portal().getCatalogedObjectsCheckView(meta_type=['Naaya Semide Project']) <NEW_LINE> for x in docs: <NEW_LINE> <INDENT> if x.getLocalProperty('title', 'en') == 'doc1': <NEW_LINE> <INDENT> meta = x <NEW_LINE> <DEDENT> if x.getLocalProperty('title', 'fr') == 'doc1_fr': <NEW_LINE> <INDENT> meta_fr = x <NEW_LINE> <DEDENT> <DEDENT> self.assertEqual(meta.getLocalProperty('title', 'en'), 'doc1') <NEW_LINE> self.assertEqual(meta_fr.getLocalProperty('title', 'fr'), 'doc1_fr') <NEW_LINE> meta.saveProperties(title='doc1_edited', lang='en', source="Eau de Web", budget="112324.234", start_date="12/12/2000", end_date="12/12/2000") <NEW_LINE> meta_fr.saveProperties(title='doc1_fr_edited', lang='fr', source="Eau de Web", budget="112324.234", start_date="12/12/2000", end_date="12/12/2000") <NEW_LINE> self.assertEqual(meta.getLocalProperty('title', 'en'), 'doc1_edited') <NEW_LINE> self.assertEqual(meta_fr.getLocalProperty('title', 'fr'), 'doc1_fr_edited') <NEW_LINE> self._portal().info.manage_delObjects([meta.id]) <NEW_LINE> self._portal().info.manage_delObjects([meta_fr.id]) <NEW_LINE> brains = self._portal().getCatalogedObjectsCheckView(meta_type=['Naaya Semide Project']) <NEW_LINE> self.assertEqual(len(brains), 0) | TestCase for NaayaContent object
| 6259904d76d4e153a661dc8e |
class DatasetFromNumpy(Dataset): <NEW_LINE> <INDENT> def __init__(self, ds): <NEW_LINE> <INDENT> self.ds = ds <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> x, y = self.ds[0][index], self.ds[1][index] <NEW_LINE> return x, y <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> assert len(self.ds[0]) == len(self.ds[1]) <NEW_LINE> return len(self.ds[0]) | TensorDataset with support of transforms.
| 6259904d462c4b4f79dbce2d |
class CurveHelperFNGeneratorProperties(bpy.types.PropertyGroup) : <NEW_LINE> <INDENT> amplitude : bpy.props.FloatProperty(name = "Amplitude", default = 1) <NEW_LINE> phase_multiplier : bpy.props.FloatProperty(name = "Phase Multiplier", default = 1) <NEW_LINE> phase_offset : bpy.props.FloatProperty(name = "Phase Offset") <NEW_LINE> value_offset : bpy.props.FloatProperty(name = "Value Offset") <NEW_LINE> use_additive : bpy.props.BoolProperty(name = "Additive") <NEW_LINE> function_type_items = [ ('SIN', 'Sine', ""), ('COS', 'Cosine', ""), ('TAN', 'Tangent', ""), ('SQRT', 'Square Root', ""), ('LN', 'Natural Logarithm', ""), ('SINC', 'Normalized Sine', ""), ] <NEW_LINE> function_type : bpy.props.EnumProperty(name = "Type", items = function_type_items) | name : StringProperty() | 6259904d30dc7b76659a0c61 |
class PicamLibError(PicamError): <NEW_LINE> <INDENT> def __init__(self, func, code, lib=None): <NEW_LINE> <INDENT> self.func=func <NEW_LINE> self.code=code <NEW_LINE> self.name=picam_defs.drPicamError.get(code,"Unknown") <NEW_LINE> self.desc=None <NEW_LINE> try: <NEW_LINE> <INDENT> if lib is not None: <NEW_LINE> <INDENT> self.desc=py3.as_str(lib.Picam_GetEnumerationString(PicamEnumeratedType.PicamEnumeratedType_Error,code)) <NEW_LINE> <DEDENT> <DEDENT> except PicamError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> descstr="" if self.desc is None else ": {}".format(self.desc) <NEW_LINE> self.msg="function '{}' raised error {}({}){}".format(func,self.code,self.name,descstr) <NEW_LINE> super().__init__(self.msg) | Generic Picam library error | 6259904d596a897236128fc5 |
class ExtraTreesClassifier(ForestClassifier): <NEW_LINE> <INDENT> def __init__(self, n_estimators=10, criterion="gini", max_depth=10, min_split=1, min_density=0.1, max_features=None, bootstrap=False, compute_importances=True, random_state=None): <NEW_LINE> <INDENT> super(ExtraTreesClassifier, self).__init__( base_estimator=ExtraTreeClassifier(), n_estimators=n_estimators, estimator_params=("criterion", "max_depth", "min_split", "min_density", "max_features", "random_state"), bootstrap=bootstrap, compute_importances=compute_importances, random_state=random_state) <NEW_LINE> self.criterion = criterion <NEW_LINE> self.max_depth = max_depth <NEW_LINE> self.min_split = min_split <NEW_LINE> self.min_density = min_density <NEW_LINE> self.max_features = max_features | An extra-trees classifier.
This class implements a meta estimator that fits a number of
randomized decision trees (a.k.a. extra-trees) on various sub-samples
of the dataset and use averaging to improve the predictive accuracy
and control over-fitting.
Parameters
----------
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default="gini")
The function to measure the quality of a split. Supported criteria are
"gini" for the Gini impurity and "entropy" for the information gain.
max_depth : integer or None, optional (default=10)
The maximum depth of the tree. If None, then nodes are expanded until
all leaves are pure or until all leaves contain less than min_split
samples.
min_split : integer, optional (default=1)
The minimum number of samples required to split an internal node.
min_density : float, optional (default=0.1)
The minimum density of the `sample_mask` (i.e. the fraction of samples
in the mask). If the density falls below this threshold the mask is
recomputed and the input data is packed which results in data copying.
If `min_density` equals to one, the partitions are always represented
as copies of the original data. Otherwise, partitions are represented
as bit masks (a.k.a. sample masks).
max_features : int or None, optional (default=None)
The number of features to consider when looking for the best split.
If None, all features are considered, otherwise max_features are chosen
at random.
bootstrap : boolean, optional (default=False)
Whether bootstrap samples are used when building trees.
compute_importances : boolean, optional (default=True)
Whether feature importances are computed and stored into the
``feature_importances_`` attribute when calling fit.
random_state : int, RandomState instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If RandomState instance, random_state is the random number generator;
If None, the random number generator is the RandomState instance used
by `np.random`.
Attributes
----------
feature_importances_ : array of shape = [n_features]
The feature mportances (the higher, the more important the feature).
Notes
-----
**References**:
.. [1] P. Geurts, D. Ernst., and L. Wehenkel, "Extremely randomized trees",
Machine Learning, 63(1), 3-42, 2006.
See also
--------
ExtraTreesRegressor, RandomForestClassifier, RandomForestRegressor | 6259904dcb5e8a47e493cb9d |
class TestSimpleMatch(TestCase): <NEW_LINE> <INDENT> def test_simulation(self): <NEW_LINE> <INDENT> team1 = 'First' <NEW_LINE> team2 = 'Second' <NEW_LINE> for __ in range(1000): <NEW_LINE> <INDENT> match = SimpleMatch(team1, team2) <NEW_LINE> self.assertEqual(match.team1, team1) <NEW_LINE> self.assertEqual(match.team2, team2) <NEW_LINE> match.play() <NEW_LINE> if match.score1 > match.score2: <NEW_LINE> <INDENT> self.assertEqual(match.winner, team1) <NEW_LINE> <DEDENT> elif match.score2 > match.score1: <NEW_LINE> <INDENT> self.assertEqual(match.winner, team2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.assertIsNone(match.winner) <NEW_LINE> <DEDENT> shortstr = '{:>3}-{:<3}'.format(match.score1, match.score2) <NEW_LINE> self.assertEqual(match.score_str(), shortstr) <NEW_LINE> longstr = '{} {} - {} {}'.format(team1, match.score1, match.score2, team2) <NEW_LINE> self.assertEqual(str(match), longstr) | Basic sanity checks for SimpleMatch. | 6259904db830903b9686ee91 |
class BirdMigrating(DNFZ): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> DNFZ.__init__(self, 'BirdMigrating') <NEW_LINE> self.setspeed(random.uniform(4,16)) <NEW_LINE> self.setyawrate(random.uniform(-0.2,0.2)) <NEW_LINE> self.randalt() <NEW_LINE> <DEDENT> def update(self, deltat=1.0): <NEW_LINE> <INDENT> DNFZ.update(self, deltat) <NEW_LINE> if (self.distance_from_home() > gen_settings.region_width or self.getalt() < self.ground_height() or self.getalt() > self.ground_height() + 1000): <NEW_LINE> <INDENT> self.randpos() <NEW_LINE> self.randalt() | an bird that circles slowly climbing, then dives | 6259904d29b78933be26aad9 |
class SGD(object): <NEW_LINE> <INDENT> def __init__(self, weights, lr=0.01, momentum=0.9, decay=1e-5): <NEW_LINE> <INDENT> self.v = _copy_weights_to_zeros(weights) <NEW_LINE> self.iterations = 0 <NEW_LINE> self.lr = self.init_lr = lr <NEW_LINE> self.momentum = momentum <NEW_LINE> self.decay = decay <NEW_LINE> <DEDENT> def iterate(self, m: Model): <NEW_LINE> <INDENT> self.lr = self.init_lr / (1 + self.iterations * self.decay) <NEW_LINE> for layer in m.layers: <NEW_LINE> <INDENT> for key in layer.weights.keys(): <NEW_LINE> <INDENT> self.v[key] = self.momentum * self.v[key] + self.lr * layer.gradients[key] <NEW_LINE> layer.weights[key] -= self.v[key] <NEW_LINE> <DEDENT> <DEDENT> self.iterations += 1 | 小批量梯度下降法 | 6259904d8e71fb1e983bcef4 |
class ProxyTransport(xmlrpc.client.Transport): <NEW_LINE> <INDENT> def __init__(self, schema="http"): <NEW_LINE> <INDENT> xmlrpc.client.Transport.__init__(self) <NEW_LINE> self.schema = schema <NEW_LINE> <DEDENT> def request(self, host, handler, request_body, verbose): <NEW_LINE> <INDENT> self.verbose = verbose <NEW_LINE> url = self.schema + "://" + host + handler <NEW_LINE> request = urllib.request.Request(url) <NEW_LINE> request.add_data(request_body) <NEW_LINE> request.add_header("User-Agent", self.user_agent) <NEW_LINE> request.add_header("Content-Type", "text/xml") <NEW_LINE> proxy_handler = urllib.request.ProxyHandler() <NEW_LINE> opener = urllib.request.build_opener(proxy_handler) <NEW_LINE> f = opener.open(request) <NEW_LINE> return self.parse_response(f) | Provides an XMl-RPC transport routing via a http proxy.
This is done by using urllib2, which in turn uses the environment
varable http_proxy and whatever else it is built to use (e.g. the
windows registry).
NOTE: the environment variable http_proxy should be set correctly.
See checkProxySetting() below.
Written from scratch but inspired by xmlrpc_urllib_transport.py
file from http://starship.python.net/crew/jjkunce/ byself, jjk.
A. Ellerton 2006-07-06 | 6259904d50485f2cf55dc3bb |
class OneshotAction: <NEW_LINE> <INDENT> def __init__(self, action=None, weight=1, watch=0): <NEW_LINE> <INDENT> if action is None: <NEW_LINE> <INDENT> raise UserError("action is required") <NEW_LINE> <DEDENT> self.action = action <NEW_LINE> self.weight = weight <NEW_LINE> self.watch = watch <NEW_LINE> <DEDENT> def __dir__(self): <NEW_LINE> <INDENT> return ["action", "weight", "watch", "msg"] <NEW_LINE> <DEDENT> def __setattr__(self, name, value): <NEW_LINE> <INDENT> if name[0] == "_": <NEW_LINE> <INDENT> super().__setattr__(name, value) <NEW_LINE> return <NEW_LINE> <DEDENT> if name == "action": <NEW_LINE> <INDENT> if not isinstance(value, Action): <NEW_LINE> <INDENT> raise UserError("action must be an instance of Action") <NEW_LINE> <DEDENT> <DEDENT> elif name == "weight": <NEW_LINE> <INDENT> if type(value) is not int: <NEW_LINE> <INDENT> raise UserError("weight must be an integer") <NEW_LINE> <DEDENT> <DEDENT> elif name == "watch": <NEW_LINE> <INDENT> if type(value) is not int: <NEW_LINE> <INDENT> raise UserError("watch must be an integer") <NEW_LINE> <DEDENT> <DEDENT> super().__setattr__(name, value) <NEW_LINE> <DEDENT> def msg(self): <NEW_LINE> <INDENT> msg = p4runtime_pb2.ActionProfileAction() <NEW_LINE> msg.action.CopyFrom(self.action.msg()) <NEW_LINE> msg.weight = self.weight <NEW_LINE> msg.watch = self.watch <NEW_LINE> return msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.msg()) <NEW_LINE> <DEDENT> def _repr_pretty_(self, p, cycle): <NEW_LINE> <INDENT> p.text(str(self.msg())) | An action in a oneshot action set.
Construct with OneshotAction(<action (Action instance)>, weight=<weight>, watch=<watch>).
You can set / get attributes action (required), weight (default 1), watch (default 0). | 6259904d07d97122c42180d2 |
class FakeJwtReturnValueNone: <NEW_LINE> <INDENT> return_value = None <NEW_LINE> @classmethod <NEW_LINE> def decode(cls, *args, **kwargs): <NEW_LINE> <INDENT> return {"sub": cls.return_value} | Fake jwt object that returns `None` when executing `decode`. | 6259904d91af0d3eaad3b253 |
class Die: <NEW_LINE> <INDENT> def __init__(self, faces): <NEW_LINE> <INDENT> self.faces = faces <NEW_LINE> self.value = faces[0] <NEW_LINE> self.held = False <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.value) <NEW_LINE> <DEDENT> def roll(self): <NEW_LINE> <INDENT> self.value = choice(self.faces) <NEW_LINE> return self.value | Creates a Die object with arbitrary faces and a roll method.
Arguments:
faces = a list of objects that represent each face
(e.g., [1,2,3,4,5,6])
value = an item in faces that the object is currently 'showing'
held = whether or not the die's value is locked | 6259904d0c0af96317c57778 |
class TestVulnerabilityExceptionApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = swagger_client.api.vulnerability_exception_api.VulnerabilityExceptionApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_create_vulnerability_exception(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_vulnerability_exception(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_vulnerability_exception_expiration(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_vulnerability_exceptions(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_remove_vulnerability_exception(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_update_vulnerability_exception_expiration(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_update_vulnerability_exception_status(self): <NEW_LINE> <INDENT> pass | VulnerabilityExceptionApi unit test stubs | 6259904dec188e330fdf9cce |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.