code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Pools(Client): <NEW_LINE> <INDENT> def __init__(self, host=None, port=None, user=None, password=None, api_version=None, ssl_verify=None): <NEW_LINE> <INDENT> super(Pools, self).__init__(host, port, user, password, api_version, ssl_verify) <NEW_LINE> self.config_path = '{0}/config/active/pools/'.format(self.api_version) <NEW_LINE> self.pools = {} <NEW_LINE> pools_list = self._api_get(self.config_path) <NEW_LINE> for pool in pools_list['children']: <NEW_LINE> <INDENT> self.pools[pool['name']] = pool['href'] <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Stingray Pools: {0}>'.format(self.api_host) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_client(cls, client): <NEW_LINE> <INDENT> pools = cls(host=client.api_host, port=client.api_port, user=client.api_user, password=client.api_password, api_version=client.api_version, ssl_verify=client.ssl_verify) <NEW_LINE> return pools <NEW_LINE> <DEDENT> def get(self, pool): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return Pool.from_client(self, pool, self.pools[pool]) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise StingrayAPIClientError( "Pool {0} not found".format(pool) ) <NEW_LINE> <DEDENT> <DEDENT> def add(self, pool, nodes=None, **pool_props): <NEW_LINE> <INDENT> if nodes is None: <NEW_LINE> <INDENT> raise StingrayAPIClientError( "No nodes specified, cannot create pool" ) <NEW_LINE> <DEDENT> if nodes and type(nodes) != list: <NEW_LINE> <INDENT> raise StingrayAPIClientError( "Nodes must be specified as a list" ) <NEW_LINE> <DEDENT> pool_data = dict( properties=dict( basic=dict( nodes_table=[] ) ) ) <NEW_LINE> for prop in pool_props: <NEW_LINE> <INDENT> pool_data['properties'].setdefault(prop, dict()) <NEW_LINE> for key, value in pool_props[prop].items(): <NEW_LINE> <INDENT> pool_data['properties'][prop][key] = value <NEW_LINE> <DEDENT> <DEDENT> for node in nodes: <NEW_LINE> <INDENT> pool_data['properties']['basic']['nodes_table'].append(dict( node=node, state='active' )) <NEW_LINE> <DEDENT> add_pool_response = self._api_put( '{0}{1}'.format(self.config_path, pool), pool_data ) <NEW_LINE> new_pool = Pool(pool, '{0}/{1}'.format(self.config_path, pool), add_pool_response['properties'], self.api_host, self.api_port, self.api_user, self.api_password, self.api_version, self.ssl_verify) <NEW_LINE> self.pools[pool] = new_pool.config_path <NEW_LINE> return new_pool <NEW_LINE> <DEDENT> def delete(self, pool): <NEW_LINE> <INDENT> delete_response = self._api_delete('{0}{1}'.format( self.config_path, pool)) <NEW_LINE> if 'success' in delete_response: <NEW_LINE> <INDENT> self.pools.pop(pool) <NEW_LINE> <DEDENT> return delete_response
Class for interacting with Pools via the REST API
6259905cd7e4931a7ef3d694
class CirculationTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_get_active_mode_on(self) -> None: <NEW_LINE> <INDENT> circulation = _circulation() <NEW_LINE> circulation.operating_mode = OperatingModes.ON <NEW_LINE> active_mode = circulation.active_mode <NEW_LINE> self.assertEqual(OperatingModes.ON, active_mode.current) <NEW_LINE> self.assertIsNone(active_mode.target) <NEW_LINE> self.assertIsNone(active_mode.sub) <NEW_LINE> <DEDENT> def test_get_active_mode_off(self) -> None: <NEW_LINE> <INDENT> circulation = _circulation() <NEW_LINE> circulation.operating_mode = OperatingModes.OFF <NEW_LINE> active_mode = circulation.active_mode <NEW_LINE> self.assertEqual(OperatingModes.OFF, active_mode.current) <NEW_LINE> self.assertIsNone(active_mode.target) <NEW_LINE> self.assertIsNone(active_mode.sub)
Test class.
6259905c45492302aabfdaf9
class AssignMemberView(View): <NEW_LINE> <INDENT> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> ticket = Ticket.objects.get(id=kwargs.get('ticket_id')) <NEW_LINE> assigned = ticket.assigned.all() <NEW_LINE> unassigned = Board.objects.get( id=ticket.lists.board_id).member.exclude(id__in=assigned).values() <NEW_LINE> serializer = TicketSerializer(ticket).data <NEW_LINE> serializer['board_id'] = ticket.lists.board_id <NEW_LINE> serializer['unassigned'] = list(unassigned) <NEW_LINE> serializer['assigned'] = list(assigned.values()) <NEW_LINE> serializer['members'] = list(ticket.lists.board.member.all().values('id','first_name','last_name','email')) <NEW_LINE> return JsonResponse(serializer, safe=False) <NEW_LINE> <DEDENT> def post(self, *args, **kwargs): <NEW_LINE> <INDENT> user = User.objects.get(id=kwargs.get('user_id')) <NEW_LINE> ticket = Ticket.objects.get(id=kwargs.get('ticket_id')) <NEW_LINE> ticket.assigned.add(user) <NEW_LINE> ticket.save() <NEW_LINE> serializer = TicketSerializer(ticket).data <NEW_LINE> serializer['assigned'] = list(ticket.assigned.all().values()) <NEW_LINE> serializer['user'] = UserSerializer(user).data <NEW_LINE> return JsonResponse(serializer, safe=False)
Assign member to a card
6259905c32920d7e50bc7666
class BaseProgram(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.defaults = PARAM_DEFAULTS.copy() <NEW_LINE> self.params = PARAM_DEFAULTS.copy() <NEW_LINE> self.codes = PARAM_CODES.copy() <NEW_LINE> <DEDENT> def add_params(self, defaults, codes): <NEW_LINE> <INDENT> self.defaults.update(defaults) <NEW_LINE> self.params.update(defaults) <NEW_LINE> self.codes.update(codes) <NEW_LINE> <DEDENT> @property <NEW_LINE> def speed(self): <NEW_LINE> <INDENT> return self.params['speed'] <NEW_LINE> <DEDENT> def dist_to_time(self, dist): <NEW_LINE> <INDENT> return self.params['dist_to_time'] * dist / self.speed <NEW_LINE> <DEDENT> def angle_to_time(self, angle): <NEW_LINE> <INDENT> return self.params['angle_to_time'] * angle / self.speed <NEW_LINE> <DEDENT> def time_to_dist(self, time): <NEW_LINE> <INDENT> return self.speed * time / self.params['dist_to_time'] <NEW_LINE> <DEDENT> def time_to_angle(self, time): <NEW_LINE> <INDENT> return self.speed * time / self.params['angle_to_time'] <NEW_LINE> <DEDENT> def __call__(self, command): <NEW_LINE> <INDENT> if command == 'other:beep': <NEW_LINE> <INDENT> myro.beep(self.params['beep_len'], self.params['beep_freq']) <NEW_LINE> return "successful beep" <NEW_LINE> <DEDENT> if command == 'other:info': <NEW_LINE> <INDENT> return "battery: " + str(myro.getBattery()) <NEW_LINE> <DEDENT> if command.startswith(PARAM_PREFIX): <NEW_LINE> <INDENT> code, value = command[len(PARAM_PREFIX):].split('=') <NEW_LINE> if not code in self.codes: <NEW_LINE> <INDENT> return "invalid code: " + code <NEW_LINE> <DEDENT> name = self.codes[code] <NEW_LINE> if value == "" or value == "?": <NEW_LINE> <INDENT> return name + " = " + str(self.params[name]) <NEW_LINE> <DEDENT> if "default".startswith(value): <NEW_LINE> <INDENT> n = self.defaults[name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> n = float(value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return "NaN: " + value <NEW_LINE> <DEDENT> <DEDENT> self.params[name] = n <NEW_LINE> return name + " = " + str(n) <NEW_LINE> <DEDENT> <DEDENT> def start(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> myro.stop() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def loop(self): <NEW_LINE> <INDENT> pass
Implements the general aspects of robot programs and basic server communcation. Also manages the parameter dictionary.
6259905cbe8e80087fbc06a4
class MarkdownSlideshowCommand(sublime_plugin.TextCommand): <NEW_LINE> <INDENT> def run(self, edit, themes=None, theme='default', extensions=[], clean=False, output_file=None, browser=True, presenter=False, save=None, path=None): <NEW_LINE> <INDENT> opts = { 'themes': themes, 'theme': theme, 'contents': self.view.substr(sublime.Region(0, self.view.size())), 'extensions': extensions, 'clean': clean } <NEW_LINE> if opts['themes'] is None or not os.path.isdir(opts['themes']): <NEW_LINE> <INDENT> opts['themes'] = os.path.join(pkg_path, 'themes') <NEW_LINE> <DEDENT> if output_file is None: <NEW_LINE> <INDENT> output_path = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output_path = os.path.abspath(os.path.dirname(output_file)) <NEW_LINE> if not os.path.isdir(output_path): <NEW_LINE> <INDENT> output_path = None <NEW_LINE> <DEDENT> <DEDENT> if output_path is None: <NEW_LINE> <INDENT> output_path = tempfile.mkdtemp() <NEW_LINE> output_file = os.path.join(output_path, 'slide.html') <NEW_LINE> <DEDENT> slide = converter.Slide(opts) <NEW_LINE> html = slide.maker(output_path) <NEW_LINE> util.fs_writer(output_file, html) <NEW_LINE> if browser: <NEW_LINE> <INDENT> url = 'file://' + output_file <NEW_LINE> if slide.options['theme'] == 'io2012': <NEW_LINE> <INDENT> url += '?presentme=' <NEW_LINE> url += 'true' if presenter else 'false' <NEW_LINE> <DEDENT> webbrowser.open_new_tab(url)
slideshow in your web browser from file contents
6259905c498bea3a75a5910d
class semicolon(parser.semicolon): <NEW_LINE> <INDENT> def __init__(self, sString=';'): <NEW_LINE> <INDENT> parser.semicolon.__init__(self)
unique_id = selected_waveform_assignment : semicolon
6259905c91f36d47f223199f
class SurfaceBaseSeries(BaseSeries): <NEW_LINE> <INDENT> is_3Dsurface = True <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(SurfaceBaseSeries, self).__init__() <NEW_LINE> self.surface_color = None <NEW_LINE> <DEDENT> def get_color_array(self): <NEW_LINE> <INDENT> np = import_module('numpy') <NEW_LINE> c = self.surface_color <NEW_LINE> if isinstance(c, Callable): <NEW_LINE> <INDENT> f = np.vectorize(c) <NEW_LINE> nargs = arity(c) <NEW_LINE> if self.is_parametric: <NEW_LINE> <INDENT> variables = list(map(centers_of_faces, self.get_parameter_meshes())) <NEW_LINE> if nargs == 1: <NEW_LINE> <INDENT> return f(variables[0]) <NEW_LINE> <DEDENT> elif nargs == 2: <NEW_LINE> <INDENT> return f(*variables) <NEW_LINE> <DEDENT> <DEDENT> variables = list(map(centers_of_faces, self.get_meshes())) <NEW_LINE> if nargs == 1: <NEW_LINE> <INDENT> return f(variables[0]) <NEW_LINE> <DEDENT> elif nargs == 2: <NEW_LINE> <INDENT> return f(*variables[:2]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return f(*variables) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return c*np.ones(self.nb_of_points)
A base class for 3D surfaces.
6259905c23849d37ff8526e6
class CancelFulfillmentOrderInputSet(InputSet): <NEW_LINE> <INDENT> def set_AWSAccessKeyId(self, value): <NEW_LINE> <INDENT> super(CancelFulfillmentOrderInputSet, self)._set_input('AWSAccessKeyId', value) <NEW_LINE> <DEDENT> def set_AWSMarketplaceId(self, value): <NEW_LINE> <INDENT> super(CancelFulfillmentOrderInputSet, self)._set_input('AWSMarketplaceId', value) <NEW_LINE> <DEDENT> def set_AWSMerchantId(self, value): <NEW_LINE> <INDENT> super(CancelFulfillmentOrderInputSet, self)._set_input('AWSMerchantId', value) <NEW_LINE> <DEDENT> def set_AWSSecretKeyId(self, value): <NEW_LINE> <INDENT> super(CancelFulfillmentOrderInputSet, self)._set_input('AWSSecretKeyId', value) <NEW_LINE> <DEDENT> def set_Endpoint(self, value): <NEW_LINE> <INDENT> super(CancelFulfillmentOrderInputSet, self)._set_input('Endpoint', value) <NEW_LINE> <DEDENT> def set_MWSAuthToken(self, value): <NEW_LINE> <INDENT> super(CancelFulfillmentOrderInputSet, self)._set_input('MWSAuthToken', value) <NEW_LINE> <DEDENT> def set_ResponseFormat(self, value): <NEW_LINE> <INDENT> super(CancelFulfillmentOrderInputSet, self)._set_input('ResponseFormat', value) <NEW_LINE> <DEDENT> def set_SellerFulfillmentOrderId(self, value): <NEW_LINE> <INDENT> super(CancelFulfillmentOrderInputSet, self)._set_input('SellerFulfillmentOrderId', value)
An InputSet with methods appropriate for specifying the inputs to the CancelFulfillmentOrder Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
6259905c8e71fb1e983bd0eb
class Error(Exception): <NEW_LINE> <INDENT> pass
Base ClientDeployInfo Exception type.
6259905c8e7ae83300eea6ae
class Privileges(plugin.VerbosityMixIn, common.WinProcessFilter): <NEW_LINE> <INDENT> name = "privileges" <NEW_LINE> table_header = plugin.PluginHeader( dict(name="Process", type="_EPROCESS"), dict(name="Value", width=3, align="r"), dict(name="Privileges", width=40), dict(name="Attributes", type="list") ) <NEW_LINE> def collect(self): <NEW_LINE> <INDENT> privilege_table = self.session.GetParameter("privilege_table") <NEW_LINE> for task in self.filter_processes(): <NEW_LINE> <INDENT> for value, flags in task.Token.GetPrivileges(): <NEW_LINE> <INDENT> if self.plugin_args.verbosity <= 1 and "Present" not in flags: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> yield (task, value, privilege_table.get(value), flags)
Prints process privileges.
6259905c16aa5153ce401b04
class BlazeShareBuybackAuthLoaderNotInteractiveTestCase( BlazeShareBuybackAuthLoaderTestCase): <NEW_LINE> <INDENT> def loader_args(self, dates): <NEW_LINE> <INDENT> (bound_expr,) = super( BlazeShareBuybackAuthLoaderNotInteractiveTestCase, self, ).loader_args(dates) <NEW_LINE> return swap_resources_into_scope(bound_expr, {})
Test case for passing a non-interactive symbol and a dict of resources.
6259905c4428ac0f6e659b5e
class Campaign: <NEW_LINE> <INDENT> def __init__(self, parser): <NEW_LINE> <INDENT> self.parser = parser <NEW_LINE> self.name = self.parser.get_text_val("name") <NEW_LINE> self.id = self.parser.get_text_val("id") <NEW_LINE> self.description = self.parser.get_text_val("description") <NEW_LINE> self.levels = len(self.parser.get_all(tag="difficulty")) <NEW_LINE> self.credits_link = "https://wiki.wesnoth.org/Credits#" + self.id <NEW_LINE> self.units_link = "https://units.wesnoth.org/trunk/mainline/en_US/%s.html" % self.id
A class for a specific campaign.
6259905c462c4b4f79dbd027
class IAtlasSaver(_object): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, IAtlasSaver, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, IAtlasSaver, name) <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> __swig_destroy__ = _fife.delete_IAtlasSaver <NEW_LINE> __del__ = lambda self : None; <NEW_LINE> def save(self, *args): <NEW_LINE> <INDENT> return _fife.IAtlasSaver_save(self, *args) <NEW_LINE> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> if self.__class__ == IAtlasSaver: <NEW_LINE> <INDENT> _self = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _self = self <NEW_LINE> <DEDENT> this = _fife.new_IAtlasSaver(_self, ) <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> def __disown__(self): <NEW_LINE> <INDENT> self.this.disown() <NEW_LINE> _fife.disown_IAtlasSaver(self) <NEW_LINE> return weakref_proxy(self)
Proxy of C++ FIFE::IAtlasSaver class
6259905ca17c0f6771d5d6b3
class ComplexPasswordValidator: <NEW_LINE> <INDENT> def validate(self, password, user=None): <NEW_LINE> <INDENT> if re.search('([A-Za-z]+[0-9]|[0-9]+[A-Za-z])[A-Za-z0-9]*', password) is None: <NEW_LINE> <INDENT> raise ValidationError("Пароль не удовлетворяет требованиям", ) <NEW_LINE> <DEDENT> <DEDENT> def get_help_text(self): <NEW_LINE> <INDENT> return 'Ваш пароль должен содержать как минимум 1 цифру и 1 букву'
Validate whether the password contains minimum one uppercase, one digit and one symbol.
6259905c16aa5153ce401b05
class Level(): <NEW_LINE> <INDENT> def __init__(self, player): <NEW_LINE> <INDENT> self.backgroundImage = None <NEW_LINE> self.world_shift = 0 <NEW_LINE> self.player = player <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def draw(self, screen): <NEW_LINE> <INDENT> screen.blit(self.background, (self.world_shift // 3, 0))
This is a generic super-class used to define a level. Create a child class for each level with level-specific info.
6259905c67a9b606de5475b2
class SFTPServerFile: <NEW_LINE> <INDENT> def __init__(self, server): <NEW_LINE> <INDENT> self._server = server <NEW_LINE> self._file_obj = None <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def stat(self, path): <NEW_LINE> <INDENT> attrs = self._server.stat(path) <NEW_LINE> if asyncio.iscoroutine(attrs): <NEW_LINE> <INDENT> attrs = yield from attrs <NEW_LINE> <DEDENT> if isinstance(attrs, os.stat_result): <NEW_LINE> <INDENT> attrs = SFTPAttrs.from_local(attrs) <NEW_LINE> <DEDENT> return attrs <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def setstat(self, path, attrs): <NEW_LINE> <INDENT> result = self._server.setstat(path, attrs) <NEW_LINE> if asyncio.iscoroutine(result): <NEW_LINE> <INDENT> attrs = yield from result <NEW_LINE> <DEDENT> <DEDENT> @asyncio.coroutine <NEW_LINE> def _mode(self, path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return (yield from self.stat(path)).permissions <NEW_LINE> <DEDENT> except OSError as exc: <NEW_LINE> <INDENT> if exc.errno in (errno.ENOENT, errno.EACCES): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> except SFTPError as exc: <NEW_LINE> <INDENT> if exc.code in (FX_NO_SUCH_FILE, FX_PERMISSION_DENIED): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @asyncio.coroutine <NEW_LINE> def exists(self, path): <NEW_LINE> <INDENT> return (yield from self._mode(path)) != 0 <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def isdir(self, path): <NEW_LINE> <INDENT> return stat.S_ISDIR((yield from self._mode(path))) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def mkdir(self, path): <NEW_LINE> <INDENT> result = self._server.mkdir(path, SFTPAttrs()) <NEW_LINE> if asyncio.iscoroutine(result): <NEW_LINE> <INDENT> yield from result <NEW_LINE> <DEDENT> <DEDENT> @asyncio.coroutine <NEW_LINE> def listdir(self, path): <NEW_LINE> <INDENT> files = self._server.listdir(path) <NEW_LINE> if asyncio.iscoroutine(files): <NEW_LINE> <INDENT> files = yield from files <NEW_LINE> <DEDENT> return files <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def open(self, path, mode='rb'): <NEW_LINE> <INDENT> pflags, _ = _mode_to_pflags(mode) <NEW_LINE> file_obj = self._server.open(path, pflags, SFTPAttrs()) <NEW_LINE> if asyncio.iscoroutine(file_obj): <NEW_LINE> <INDENT> file_obj = yield from file_obj <NEW_LINE> <DEDENT> self._file_obj = file_obj <NEW_LINE> return self <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def read(self, size, offset): <NEW_LINE> <INDENT> data = self._server.read(self._file_obj, offset, size) <NEW_LINE> if asyncio.iscoroutine(data): <NEW_LINE> <INDENT> data = yield from data <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def write(self, data, offset): <NEW_LINE> <INDENT> size = self._server.write(self._file_obj, offset, data) <NEW_LINE> if asyncio.iscoroutine(size): <NEW_LINE> <INDENT> size = yield from size <NEW_LINE> <DEDENT> return size <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def close(self): <NEW_LINE> <INDENT> result = self._server.close(self._file_obj) <NEW_LINE> if asyncio.iscoroutine(result): <NEW_LINE> <INDENT> yield from result
A wrapper around SFTPServer used to access files it manages
6259905c3eb6a72ae038bc82
class Question(models.Model): <NEW_LINE> <INDENT> be_asked = models.TextField(verbose_name="вопрос экзаменатора") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.be_asked <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Вопрос' <NEW_LINE> verbose_name_plural = 'Вопросы'
Возможные вопросы для составления испытания
6259905c097d151d1a2c2690
class ePOSElement: <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> text = None <NEW_LINE> attr = {} <NEW_LINE> local_attributes = {} <NEW_LINE> required_attributes = [] <NEW_LINE> def __init__(self, text=None, attr=None): <NEW_LINE> <INDENT> self.text = text <NEW_LINE> if attr is not None: <NEW_LINE> <INDENT> self.attr = attr <NEW_LINE> <DEDENT> <DEDENT> @abstractmethod <NEW_LINE> def get_tag(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_attr(self): <NEW_LINE> <INDENT> return self.clean_attributes() <NEW_LINE> <DEDENT> def clean_attributes(self): <NEW_LINE> <INDENT> cleaned = {} <NEW_LINE> for a in self.attr: <NEW_LINE> <INDENT> if a in self.local_attributes: <NEW_LINE> <INDENT> self.local_attributes[a] = self.attr[a] <NEW_LINE> <DEDENT> <DEDENT> for k, v in self.local_attributes.iteritems(): <NEW_LINE> <INDENT> if v is not None: <NEW_LINE> <INDENT> cleaned[k] = v <NEW_LINE> <DEDENT> <DEDENT> return cleaned <NEW_LINE> <DEDENT> def get_text(self): <NEW_LINE> <INDENT> return self.text
Base class for the various elements in the ePOS object
6259905c009cb60464d02b58
class TestRequirementFileParserTestCases(unittest.TestCase): <NEW_LINE> <INDENT> def test_empty_file(self): <NEW_LINE> <INDENT> packages = parse_requirements_file(open(os.path.join(BASE_PATH, 'files/requirements_empty.txt'))) <NEW_LINE> self.assertEqual(len(packages), 0) <NEW_LINE> <DEDENT> def test_file_with_comments(self): <NEW_LINE> <INDENT> packages = parse_requirements_file(open(os.path.join(BASE_PATH, 'files/requirements_comments.txt'))) <NEW_LINE> self.assertEqual(len(packages), 1) <NEW_LINE> <DEDENT> def test_file_with_repo_links(self): <NEW_LINE> <INDENT> packages = parse_requirements_file(open(os.path.join(BASE_PATH, 'files/requirements_repos.txt'))) <NEW_LINE> self.assertEqual(len(packages), 1)
Test cases for parsing requirements files into packages
6259905c4e4d562566373a2a
class ElasticsearchFilterOnlyRetriever(ElasticsearchRetriever): <NEW_LINE> <INDENT> def retrieve(self, query: str, filters: dict = None, top_k: int = 10, index: str = None) -> List[Document]: <NEW_LINE> <INDENT> if index is None: <NEW_LINE> <INDENT> index = self.document_store.index <NEW_LINE> <DEDENT> documents = self.document_store.query(query=None, filters=filters, top_k=top_k, custom_query=self.custom_query, index=index) <NEW_LINE> return documents
Naive "Retriever" that returns all documents that match the given filters. No impact of query at all. Helpful for benchmarking, testing and if you want to do QA on small documents without an "active" retriever.
6259905c0c0af96317c57870
class BufferedConsumer(object): <NEW_LINE> <INDENT> def __init__(self, bytes_producer): <NEW_LINE> <INDENT> self.producer = bytes_producer <NEW_LINE> self.producer.consumer = self <NEW_LINE> self.buffer = StringIO() <NEW_LINE> <DEDENT> def resumeProducing(self): <NEW_LINE> <INDENT> if self.producer: <NEW_LINE> <INDENT> self.producer.resumeProducing() <NEW_LINE> <DEDENT> <DEDENT> def stopProducing(self): <NEW_LINE> <INDENT> if self.producer: <NEW_LINE> <INDENT> self.producer.stopProducing() <NEW_LINE> <DEDENT> <DEDENT> def pauseProducing(self): <NEW_LINE> <INDENT> if self.producer: <NEW_LINE> <INDENT> self.producer.pauseProducing() <NEW_LINE> <DEDENT> <DEDENT> def write(self, content): <NEW_LINE> <INDENT> self.buffer.write(content)
Consumer that stores the content in a internal buffer.
6259905c8a43f66fc4bf37af
class TestFileResponse(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testFileResponse(self): <NEW_LINE> <INDENT> pass
FileResponse unit test stubs
6259905ca219f33f346c7e28
class AgnosticClientBase(AgnosticBase): <NEW_LINE> <INDENT> database_names = AsyncRead() <NEW_LINE> server_info = AsyncRead() <NEW_LINE> alive = AsyncRead() <NEW_LINE> close_cursor = AsyncCommand() <NEW_LINE> drop_database = AsyncCommand().unwrap('MotorDatabase') <NEW_LINE> disconnect = DelegateMethod() <NEW_LINE> tz_aware = ReadOnlyProperty() <NEW_LINE> close = DelegateMethod() <NEW_LINE> is_primary = ReadOnlyProperty() <NEW_LINE> is_mongos = ReadOnlyProperty() <NEW_LINE> max_bson_size = ReadOnlyProperty() <NEW_LINE> max_message_size = ReadOnlyProperty() <NEW_LINE> min_wire_version = ReadOnlyProperty() <NEW_LINE> max_wire_version = ReadOnlyProperty() <NEW_LINE> max_pool_size = ReadOnlyProperty() <NEW_LINE> _ensure_connected = AsyncRead() <NEW_LINE> def __init__(self, io_loop, *args, **kwargs): <NEW_LINE> <INDENT> check_deprecated_kwargs(kwargs) <NEW_LINE> pool_class = functools.partial(MotorPool, io_loop, self._framework) <NEW_LINE> kwargs['_pool_class'] = pool_class <NEW_LINE> kwargs['_connect'] = False <NEW_LINE> delegate = self.__delegate_class__(*args, **kwargs) <NEW_LINE> super(AgnosticClientBase, self).__init__(delegate) <NEW_LINE> if io_loop: <NEW_LINE> <INDENT> self._framework.check_event_loop(io_loop) <NEW_LINE> self.io_loop = io_loop <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.io_loop = self._framework.get_event_loop() <NEW_LINE> <DEDENT> <DEDENT> def get_io_loop(self): <NEW_LINE> <INDENT> return self.io_loop <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> db_class = create_class_with_framework( AgnosticDatabase, self._framework, self.__module__) <NEW_LINE> return db_class(self, name) <NEW_LINE> <DEDENT> __getitem__ = __getattr__ <NEW_LINE> def get_default_database(self): <NEW_LINE> <INDENT> attr_name = mangle_delegate_name( self.__class__, '__default_database_name') <NEW_LINE> default_db_name = getattr(self.delegate, attr_name) <NEW_LINE> if default_db_name is None: <NEW_LINE> <INDENT> raise pymongo.errors.ConfigurationError( 'No default database defined') <NEW_LINE> <DEDENT> return self[default_db_name]
MotorClient and MotorReplicaSetClient common functionality.
6259905c91f36d47f22319a0
class MultiGroupMetaClass(list): <NEW_LINE> <INDENT> def __setitem__(self, index, element): <NEW_LINE> <INDENT> msg = '{} list elements should be instances of {}'.format( struct.metadata.name, struct_metaclass) <NEW_LINE> assert isinstance(element, struct_metaclass), msg <NEW_LINE> return super(MultiGroupMetaClass, self).__setitem__(index, element) <NEW_LINE> <DEDENT> def append(self, element): <NEW_LINE> <INDENT> msg = '{} list elements should be instances of {}'.format( struct.metadata.name, struct_metaclass) <NEW_LINE> assert isinstance(element, struct_metaclass), msg <NEW_LINE> return super(MultiGroupMetaClass, self).append(element)
Multivalued Group Metaclass. Metaclass used to ensure list elements are instances of right metaclasses.
6259905c63d6d428bbee3d99
class PopulateTestData(APIView): <NEW_LINE> <INDENT> def post(self, request, format=None): <NEW_LINE> <INDENT> new_location = request.data.get('locations') <NEW_LINE> if validate_data(new_location): <NEW_LINE> <INDENT> update_locations(new_location) <NEW_LINE> return HttpResponse(status=201) <NEW_LINE> <DEDENT> return HttpResponse(status=400)
This end point is used to acquire live test data to store in text file as json data can be used later to test on Android post data ex: {"locations": '{"lat": 1, "long": 1}''}
6259905c3c8af77a43b68a52
class EditDistance(MetricBase): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> super(EditDistance, self).__init__(name) <NEW_LINE> self.total_distance = .0 <NEW_LINE> self.seq_num = 0 <NEW_LINE> self.instance_error = 0 <NEW_LINE> <DEDENT> def update(self, distances, seq_num): <NEW_LINE> <INDENT> if not _is_numpy_(distances): <NEW_LINE> <INDENT> raise ValueError("The 'distances' must be a numpy ndarray.") <NEW_LINE> <DEDENT> if not _is_number_(seq_num): <NEW_LINE> <INDENT> raise ValueError("The 'seq_num' must be a number(int, float).") <NEW_LINE> <DEDENT> seq_right_count = np.sum(distances == 0) <NEW_LINE> total_distance = np.sum(distances) <NEW_LINE> self.seq_num += seq_num <NEW_LINE> self.instance_error += seq_num - seq_right_count <NEW_LINE> self.total_distance += total_distance <NEW_LINE> <DEDENT> def eval(self): <NEW_LINE> <INDENT> if self.seq_num == 0: <NEW_LINE> <INDENT> raise ValueError( "There is no data in EditDistance Metric. Please check layers.edit_distance output has been added to EditDistance." ) <NEW_LINE> <DEDENT> avg_distance = self.total_distance / self.seq_num <NEW_LINE> avg_instance_error = self.instance_error / float(self.seq_num) <NEW_LINE> return avg_distance, avg_instance_error
Edit distance is a way of quantifying how dissimilar two strings (e.g., words) are to one another by counting the minimum number of operations required to transform one string into the other. Refer to https://en.wikipedia.org/wiki/Edit_distance Accumulate edit distance sum and sequence number from mini-batches and compute the average edit_distance and instance error of all batches. Args: name: the metrics name Examples: .. code-block:: python distances, seq_num = fluid.layers.edit_distance(input, label) distance_evaluator = fluid.metrics.EditDistance() for epoch in PASS_NUM: distance_evaluator.reset() for data in batches: loss = exe.run(fetch_list=[cost] + list(edit_distance_metrics)) distance_evaluator.update(distances, seq_num) distance, instance_error = distance_evaluator.eval() In the above example: - 'distance' is the average of the edit distance in a pass. - 'instance_error' is the instance error rate in a pass.
6259905c3539df3088ecd8bf
class JsonTokenAuthMiddleware(BaseJSONWebTokenAuthentication): <NEW_LINE> <INDENT> def __init__(self, app): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> <DEDENT> async def __call__(self, scope, receive, send): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> jwt_value = get_jwt_value(scope) <NEW_LINE> user_id = jwt_decode_handler(jwt_value).get("user_id") <NEW_LINE> user = await get_user(user_id) <NEW_LINE> scope["user"] = user <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> scope["user"] = AnonymousUser() <NEW_LINE> <DEDENT> return await self.app(scope, receive, send)
Token authorization middleware for Django Channels 2
6259905c7047854f463409e4
class AlignAceConsumer(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> import warnings <NEW_LINE> warnings.warn("Bio.Motif.Parsers.AlignAce.AlignAceConsumer is deprecated; please use the read() function in this module instead.", Bio.BiopythonDeprecationWarning) <NEW_LINE> self.motifs=[] <NEW_LINE> self.current_motif=None <NEW_LINE> self.param_dict = None <NEW_LINE> <DEDENT> def parameters(self,line): <NEW_LINE> <INDENT> self.param_dict={} <NEW_LINE> <DEDENT> def parameter(self,line): <NEW_LINE> <INDENT> par_name = line.split("=")[0].strip() <NEW_LINE> par_value = line.split("=")[1].strip() <NEW_LINE> self.param_dict[par_name]=par_value <NEW_LINE> <DEDENT> def sequences(self,line): <NEW_LINE> <INDENT> self.seq_dict=[] <NEW_LINE> <DEDENT> def sequence(self,line): <NEW_LINE> <INDENT> seq_name = line.split("\t")[1] <NEW_LINE> self.seq_dict.append(seq_name) <NEW_LINE> <DEDENT> def motif(self,line): <NEW_LINE> <INDENT> self.current_motif = Motif() <NEW_LINE> self.motifs.append(self.current_motif) <NEW_LINE> self.current_motif.alphabet=IUPAC.unambiguous_dna <NEW_LINE> <DEDENT> def motif_hit(self,line): <NEW_LINE> <INDENT> seq = Seq(line.split("\t")[0],IUPAC.unambiguous_dna) <NEW_LINE> self.current_motif.add_instance(seq) <NEW_LINE> <DEDENT> def motif_score(self,line): <NEW_LINE> <INDENT> self.current_motif.score = float(line.split()[-1]) <NEW_LINE> <DEDENT> def motif_mask(self,line): <NEW_LINE> <INDENT> self.current_motif.set_mask(line.strip("\n\c")) <NEW_LINE> <DEDENT> def noevent(self,line): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def version(self,line): <NEW_LINE> <INDENT> self.ver = line <NEW_LINE> <DEDENT> def command_line(self,line): <NEW_LINE> <INDENT> self.cmd_line = line
The general purpose consumer for the AlignAceScanner (DEPRECATED). Should be passed as the consumer to the feed method of the AlignAceScanner. After 'consuming' the file, it has the list of motifs in the motifs property. This class is DEPRECATED; please use the read() function in this module instead.
6259905c24f1403a926863e0
class EmbeddingGenerator(nn.Module): <NEW_LINE> <INDENT> def __init__(self, content_layers=[4], style_layers=[1,2,3,4,5]): <NEW_LINE> <INDENT> super(EmbeddingGenerator, self).__init__() <NEW_LINE> pretrained_model = models.vgg19(pretrained=True).features.eval().to(device) <NEW_LINE> for layer in pretrained_model: <NEW_LINE> <INDENT> layer.requires_grad = False <NEW_LINE> <DEDENT> self.pretrained_layers = [] <NEW_LINE> self.content_layers = [] <NEW_LINE> self.style_layers = [] <NEW_LINE> required_layers = max(max(content_layers), max(style_layers)) <NEW_LINE> conv_counter = 0 <NEW_LINE> for i,layer in enumerate(pretrained_model): <NEW_LINE> <INDENT> if isinstance(layer,nn.Conv2d): <NEW_LINE> <INDENT> conv_counter+=1 <NEW_LINE> if conv_counter>required_layers: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if conv_counter in content_layers: <NEW_LINE> <INDENT> self.content_layers.append(i) <NEW_LINE> <DEDENT> if conv_counter in style_layers: <NEW_LINE> <INDENT> self.style_layers.append(i) <NEW_LINE> <DEDENT> <DEDENT> self.pretrained_layers.append(layer) <NEW_LINE> <DEDENT> self.content_layers = set(self.content_layers) <NEW_LINE> self.style_layers = set(self.style_layers) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> content_activations = [] <NEW_LINE> style_activations = [] <NEW_LINE> for i,layer in enumerate(self.pretrained_layers): <NEW_LINE> <INDENT> x = layer(x) <NEW_LINE> if i in self.content_layers: <NEW_LINE> <INDENT> content_activations.append(x.flatten()) <NEW_LINE> <DEDENT> if i in self.style_layers: <NEW_LINE> <INDENT> style_activations.append(x.view(x.shape[1],-1)) <NEW_LINE> <DEDENT> <DEDENT> return content_activations, style_activations
Compute activations in content and style layers using pretrained VGG19 model
6259905c99cbb53fe6832504
class TextInputField(RichTextInputField): <NEW_LINE> <INDENT> pass
Generic model for text input field.
6259905c30dc7b76659a0d92
@api.route('/demo') <NEW_LINE> class DemoResource(Resource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> return { 'status': 'success', 'message': 'get request successful', }
Resource class for demo use
6259905cd99f1b3c44d06cc5
class TranslateProblem(text_problems.Text2TextProblem): <NEW_LINE> <INDENT> def is_generate_per_split(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def approx_vocab_size(self): <NEW_LINE> <INDENT> return 2**15 <NEW_LINE> <DEDENT> def source_data_files(self, dataset_split): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def vocab_data_files(self): <NEW_LINE> <INDENT> return self.source_data_files(problem.DatasetSplit.TRAIN) <NEW_LINE> <DEDENT> def generate_samples(self, data_dir, tmp_dir, dataset_split): <NEW_LINE> <INDENT> train = dataset_split == problem.DatasetSplit.TRAIN <NEW_LINE> datasets = self.source_data_files(dataset_split) <NEW_LINE> tag = "train" if train else "dev" <NEW_LINE> data_path = compile_data(tmp_dir, datasets, "%s-compiled-%s" % (self.name, tag)) <NEW_LINE> if self.vocab_type == text_problems.VocabType.SUBWORD: <NEW_LINE> <INDENT> generator_utils.get_or_generate_vocab( data_dir, tmp_dir, self.vocab_filename, self.approx_vocab_size, self.vocab_data_files()) <NEW_LINE> <DEDENT> return text_problems.text2text_txt_iterator(data_path + ".lang1", data_path + ".lang2")
Base class for translation problems.
6259905c4a966d76dd5f0516
class ParentNodeRelatedField(serializers.PrimaryKeyRelatedField): <NEW_LINE> <INDENT> def get_queryset(self): <NEW_LINE> <INDENT> article = Article.objects.get(id=self.context['view'].kwargs['article_id']) <NEW_LINE> return Comment.objects.filter(article=article, is_deleted=False)
筛选可回复的评论: 1、同一篇文章的所有评论(包括自己的评论) 2、已经被删除的评论不能回复
6259905cf548e778e596cbae
class WebSocketChannel(object): <NEW_LINE> <INDENT> addr = ['127.0.0.1'] <NEW_LINE> closed = 0 <NEW_LINE> def __init__(self, server, conn): <NEW_LINE> <INDENT> self.server = server <NEW_LINE> self.conn = conn <NEW_LINE> <DEDENT> def push(self, producer, send=1): <NEW_LINE> <INDENT> if type(producer) == str: <NEW_LINE> <INDENT> lines = producer.split('\r\n') <NEW_LINE> statusline = lines.pop(0) <NEW_LINE> assert statusline <NEW_LINE> message = email.message_from_string('\r\n'.join(lines)) <NEW_LINE> self.conn.send(unicode(message.get_payload(), 'utf-8')) <NEW_LINE> <DEDENT> <DEDENT> def done(self): <NEW_LINE> <INDENT> pass
Medusa channel for WebSocket server
6259905cb57a9660fecd309f
class CourseRecord(models.Model): <NEW_LINE> <INDENT> class_obj = models.ForeignKey(verbose_name="班级", to="ClassList", on_delete=models.CASCADE) <NEW_LINE> day_num = models.IntegerField(verbose_name="节次", help_text=u"此处填写第几节课或第几天课程...,必须为数字") <NEW_LINE> teacher = models.ForeignKey(verbose_name="讲师", to='UserInfo',limit_choices_to={"depart_id__in":[1002,1003]}, on_delete=models.CASCADE) <NEW_LINE> date = models.DateField(verbose_name="上课日期", auto_now_add=True) <NEW_LINE> course_title = models.CharField(verbose_name='本节课程标题', max_length=64, blank=True, null=True) <NEW_LINE> course_memo = models.TextField(verbose_name='本节课程内容概要', blank=True, null=True) <NEW_LINE> has_homework = models.BooleanField(default=True, verbose_name="本节有作业") <NEW_LINE> homework_title = models.CharField(verbose_name='本节作业标题', max_length=64, blank=True, null=True) <NEW_LINE> homework_memo = models.TextField(verbose_name='作业描述', max_length=500, blank=True, null=True) <NEW_LINE> exam = models.TextField(verbose_name='踩分点', max_length=300, blank=True, null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{0} day{1}".format(self.class_obj, self.day_num)
上课记录表 (班级记录)
6259905c097d151d1a2c2691
class OpBlacklistParser(object): <NEW_LINE> <INDENT> def __init__(self, blacklist_fp, api_model): <NEW_LINE> <INDENT> self.blacklist_fp = blacklist_fp <NEW_LINE> self.api_model = api_model <NEW_LINE> self._cfg_parser = ConfigParser.RawConfigParser(allow_no_value=True) <NEW_LINE> self._cfg_parser.optionxform = str <NEW_LINE> self._cfg_parser.readfp(self.blacklist_fp) <NEW_LINE> err = False <NEW_LINE> for svc_name in self._cfg_parser.sections(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> available_ops = set(api_model[svc_name]['ops']) <NEW_LINE> blacklist_ops = set(self._cfg_parser.options(svc_name)) <NEW_LINE> invalid_ops = blacklist_ops - available_ops <NEW_LINE> if invalid_ops: <NEW_LINE> <INDENT> err = True <NEW_LINE> LOGGER.error('[%s] Invalid operation(s): %s.', svc_name, ', '.join(invalid_ops)) <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> LOGGER.warning('Invalid service name "%s".', svc_name) <NEW_LINE> <DEDENT> <DEDENT> if err: <NEW_LINE> <INDENT> raise BlacklistError('Failure to validate blacklist file.') <NEW_LINE> <DEDENT> <DEDENT> def is_blacklisted(self, svc_name, op_name): <NEW_LINE> <INDENT> return self._cfg_parser.has_option(svc_name, op_name)
Parser for operations blacklist.
6259905ca8370b77170f19f2
class ForwardingTable(_ValidatedDict): <NEW_LINE> <INDENT> def validate(self, dst, entry): <NEW_LINE> <INDENT> if not isinstance(dst, HostEntity): <NEW_LINE> <INDENT> raise ValueError("destination %s is not a host" % dst) <NEW_LINE> <DEDENT> if not isinstance(entry, ForwardingTableEntry): <NEW_LINE> <INDENT> raise ValueError("entry %s isn't a forwarding table entry" % entry) <NEW_LINE> <DEDENT> if entry.dst != dst: <NEW_LINE> <INDENT> raise ValueError("entry destination %s doesn't match key %s" % (entry.dst, dst)) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if not self: <NEW_LINE> <INDENT> return "\t(empty forwarding table)" <NEW_LINE> <DEDENT> return ("=== Forwarding Table ===\n" + "\n".join("\t{}".format(v) for v in self.values()))
A forwarding table for a switch. A `ForwardingTable` instance should be used as a `dict` mapping a destination host to a ForwardingTableEntry (if any route is known).
6259905ca79ad1619776b5cf
class RepairType(models.Model): <NEW_LINE> <INDENT> _name = 'repair.type' <NEW_LINE> _description = 'Vehicle Repair Type' <NEW_LINE> name = fields.Char(string='Repair Type', translate=True) <NEW_LINE> @api.constrains('name') <NEW_LINE> def check_repair_type(self): <NEW_LINE> <INDENT> for repair in self: <NEW_LINE> <INDENT> if self.search_count([ ('id', '!=', repair.id), ('name', 'ilike', repair.name.strip()) ]): <NEW_LINE> <INDENT> raise ValidationError(_('Repair type with this name already exists!'))
Repair Type.
6259905c3d592f4c4edbc501
class BounterInitTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_default_init(self): <NEW_LINE> <INDENT> counter = bounter(7) <NEW_LINE> self.assertEqual(type(counter), HashTable) <NEW_LINE> <DEDENT> def test_no_size_init(self): <NEW_LINE> <INDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> counter = bounter() <NEW_LINE> <DEDENT> <DEDENT> def test_explicit_init(self): <NEW_LINE> <INDENT> counter = bounter(size_mb=2, need_iteration=True) <NEW_LINE> self.assertEqual(type(counter), HashTable) <NEW_LINE> self.assertEqual(counter.buckets(), 2 ** 16) <NEW_LINE> <DEDENT> def test_cms_init_default(self): <NEW_LINE> <INDENT> counter = bounter(size_mb=64, need_iteration=False) <NEW_LINE> self.assertEqual(type(counter), CountMinSketch) <NEW_LINE> self.assertEqual(type(counter.cms), cmsc.CMS_Conservative) <NEW_LINE> self.assertEqual(counter.size(), 2 ** 26) <NEW_LINE> <DEDENT> def test_cms_init_log8(self): <NEW_LINE> <INDENT> counter = bounter(size_mb=1, need_iteration=False, log_counting=8) <NEW_LINE> self.assertEqual(type(counter), CountMinSketch) <NEW_LINE> self.assertEqual(type(counter.cms), cmsc.CMS_Log8) <NEW_LINE> self.assertEqual(counter.size(), 2 ** 20) <NEW_LINE> <DEDENT> def test_ht_log_init(self): <NEW_LINE> <INDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> bounter(size_mb=4, log_counting=8) <NEW_LINE> <DEDENT> <DEDENT> def test_nocounts_init(self): <NEW_LINE> <INDENT> counter = bounter(need_counts=False) <NEW_LINE> self.assertTrue(issubclass(type(counter), CountMinSketch)) <NEW_LINE> self.assertEqual(counter.size(), 4) <NEW_LINE> <DEDENT> def test_sanity_default(self): <NEW_LINE> <INDENT> counter = bounter(size_mb=16) <NEW_LINE> counter.update([u'foo', u'bar', u'foo']) <NEW_LINE> self.assertEqual(counter[u'foo'], 2) <NEW_LINE> self.assertEqual(counter[u'bar'], 1) <NEW_LINE> self.assertEqual(counter.cardinality(), 2) <NEW_LINE> <DEDENT> def test_contains(self): <NEW_LINE> <INDENT> counter = bounter(size_mb=16) <NEW_LINE> counter.update([u'foo', u'bar', u'foo']) <NEW_LINE> self.assertTrue('foo' in counter) <NEW_LINE> self.assertFalse('foobar' in counter) <NEW_LINE> <DEDENT> def test_sanity_nocount(self): <NEW_LINE> <INDENT> counter = bounter(need_counts=False) <NEW_LINE> counter.update([u'foo', u'bar', u'foo']) <NEW_LINE> self.assertEqual(counter.total(), 3) <NEW_LINE> self.assertEqual(counter.cardinality(), 2) <NEW_LINE> with self.assertRaises(NotImplementedError): <NEW_LINE> <INDENT> print(counter[u'foo'])
Basic test for factory method. Tests for CountMinSketch and HashTable implementations found in respective subdirectories
6259905ce5267d203ee6ced2
class TestInlineResponse200(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return InlineResponse200( total_result_size = 56, data = [ talon_one.models.customer_profile.CustomerProfile( integration_id = '0', created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), attributes = talon_one.models.attributes.attributes(), account_id = 56, closed_sessions = 56, total_sales = 1.337, loyalty_memberships = [ talon_one.models.loyalty_membership.LoyaltyMembership( joined = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), loyalty_program_id = 56, ) ], audience_memberships = [ talon_one.models.audience_membership.AudienceMembership( id = 56, name = '0', ) ], last_activity = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), ) ] ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return InlineResponse200( total_result_size = 56, data = [ talon_one.models.customer_profile.CustomerProfile( integration_id = '0', created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), attributes = talon_one.models.attributes.attributes(), account_id = 56, closed_sessions = 56, total_sales = 1.337, loyalty_memberships = [ talon_one.models.loyalty_membership.LoyaltyMembership( joined = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), loyalty_program_id = 56, ) ], audience_memberships = [ talon_one.models.audience_membership.AudienceMembership( id = 56, name = '0', ) ], last_activity = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), ) ], ) <NEW_LINE> <DEDENT> <DEDENT> def testInlineResponse200(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
InlineResponse200 unit test stubs
6259905c3cc13d1c6d466d65
class QuickEntryForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Entry
Form for posting an entry quickly
6259905cd7e4931a7ef3d696
class Pipeline: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._stages = [] <NEW_LINE> self.stats = Stats() <NEW_LINE> <DEDENT> def build(self, configuration): <NEW_LINE> <INDENT> builder = PipelineBuilder() <NEW_LINE> self._stages = builder.build(configuration, self.stats) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> source = self._stages[0] <NEW_LINE> operations = self._stages[1:] <NEW_LINE> log.warning('Entering pipeline') <NEW_LINE> pipeline_start = time() <NEW_LINE> try: <NEW_LINE> <INDENT> for messages in source.run(): <NEW_LINE> <INDENT> start = time() <NEW_LINE> for stage in operations: <NEW_LINE> <INDENT> messages = stage.handle_bulk(messages) <NEW_LINE> if not messages: <NEW_LINE> <INDENT> self.stats.incr('pipeline/dropped') <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> took = time() - start <NEW_LINE> self.stats.incr('pipeline/entries') <NEW_LINE> self.stats.incr('pipeline/stages_total_s', took) <NEW_LINE> <DEDENT> <DEDENT> except StopPipeline as e: <NEW_LINE> <INDENT> log.info("Pipeline stopped on software request: %s", e.args[0]) <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> log.info("Pipeline stopped on keyboard request") <NEW_LINE> <DEDENT> took = time() - pipeline_start <NEW_LINE> self.stats.incr('pipeline/total_s', took) <NEW_LINE> self.stats.dump() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def register_stage(name): <NEW_LINE> <INDENT> def decorator(stage_cls): <NEW_LINE> <INDENT> if name in PipelineBuilder._registered: <NEW_LINE> <INDENT> raise Exception("Stage with that name already defined") <NEW_LINE> <DEDENT> PipelineBuilder._registered[name] = stage_cls <NEW_LINE> stage_cls.log = logging.getLogger('root.stage.' + name) <NEW_LINE> return stage_cls <NEW_LINE> <DEDENT> return decorator
Creates a pipeline and handles flow of the data within the logger.
6259905c8e71fb1e983bd0ee
class JsonCluster(Cluster): <NEW_LINE> <INDENT> def __init__(self, cluster_json=None): <NEW_LINE> <INDENT> super(JsonCluster, self).__init__() <NEW_LINE> if cluster_json is None: <NEW_LINE> <INDENT> cluster_json_path = os.path.abspath(os.path.join(os.getcwd(), "cluster.json")) <NEW_LINE> cluster_json = json.load(open(cluster_json_path)) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> init_nodes = [RemoteAccount(ninfo["hostname"], ninfo.get("user"), ninfo.get("ssh_args"), ssh_hostname=ninfo.get("ssh_hostname")) for ninfo in cluster_json["nodes"]] <NEW_LINE> <DEDENT> except BaseException as e: <NEW_LINE> <INDENT> raise ValueError("JSON cluster definition invalid", e) <NEW_LINE> <DEDENT> self.available_nodes = collections.deque(init_nodes) <NEW_LINE> self.in_use_nodes = set() <NEW_LINE> self.id_source = 1 <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.available_nodes) + len(self.in_use_nodes) <NEW_LINE> <DEDENT> def num_available_nodes(self): <NEW_LINE> <INDENT> return len(self.available_nodes) <NEW_LINE> <DEDENT> def request(self, nslots): <NEW_LINE> <INDENT> if nslots > self.num_available_nodes(): <NEW_LINE> <INDENT> err_msg = "There aren't enough available nodes to satisfy the resource request. " "Total cluster size: %d, Requested: %d, Already allocated: %d, Available: %d. " % (len(self), nslots, len(self.in_use_nodes), self.num_available_nodes()) <NEW_LINE> err_msg += "Make sure your cluster has enough nodes to run your test or service(s)." <NEW_LINE> raise RuntimeError(err_msg) <NEW_LINE> <DEDENT> result = [] <NEW_LINE> for i in range(nslots): <NEW_LINE> <INDENT> node = self.available_nodes.popleft() <NEW_LINE> result.append(ClusterSlot(self, node, slot_id=self.id_source)) <NEW_LINE> self.in_use_nodes.add(self.id_source) <NEW_LINE> self.id_source += 1 <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def free_single(self, slot): <NEW_LINE> <INDENT> assert(slot.slot_id in self.in_use_nodes) <NEW_LINE> self.in_use_nodes.remove(slot.slot_id) <NEW_LINE> self.available_nodes.append(slot.account)
An implementation of Cluster that uses static settings specified in a cluster file.
6259905c15baa723494635b7
class EDTestCasePluginExecuteExecSaxsAddv1_0(EDTestCasePluginExecute): <NEW_LINE> <INDENT> def __init__(self, _strTestName=None): <NEW_LINE> <INDENT> EDTestCasePluginExecute.__init__(self, "EDPluginExecSaxsAddv1_0") <NEW_LINE> self.setDataInputFile(os.path.join(self.getPluginTestsDataHome(), "XSDataInputSaxsAdd_reference.xml")) <NEW_LINE> self.setReferenceDataOutputFile(os.path.join(self.getPluginTestsDataHome(), "XSDataResultSaxsAdd_reference.xml")) <NEW_LINE> <DEDENT> def preProcess(self): <NEW_LINE> <INDENT> EDTestCasePluginExecute.preProcess(self) <NEW_LINE> self.loadTestImage(["bsa_014_07.edf", "bsa_014_07.msk", "Pcon_01Apr_msk.edf"]) <NEW_LINE> strExpectedOutput = self.readAndParseFile (self.getReferenceDataOutputFile()) <NEW_LINE> EDVerbose.DEBUG("strExpectedOutput:" + strExpectedOutput) <NEW_LINE> xsDataResultReference = XSDataResultSaxsAddv1_0.parseString(strExpectedOutput) <NEW_LINE> self.refOutput = xsDataResultReference.getOutputImage().getPath().getValue() <NEW_LINE> EDVerbose.DEBUG(" Output file is %s" % self.refOutput) <NEW_LINE> if not os.path.isdir(os.path.dirname(self.refOutput)): <NEW_LINE> <INDENT> os.makedirs(os.path.dirname(self.refOutput)) <NEW_LINE> <DEDENT> if os.path.isfile(self.refOutput): <NEW_LINE> <INDENT> EDVerbose.DEBUG(" Output file exists %s, I will remove it" % self.refOutput) <NEW_LINE> os.remove(self.refOutput) <NEW_LINE> <DEDENT> <DEDENT> def testExecute(self): <NEW_LINE> <INDENT> self.run() <NEW_LINE> plugin = self.getPlugin() <NEW_LINE> strExpectedOutput = self.readAndParseFile (self.getReferenceDataOutputFile()) <NEW_LINE> EDVerbose.DEBUG("Checking obtained result...") <NEW_LINE> xsDataResultReference = XSDataResultSaxsAddv1_0.parseString(strExpectedOutput) <NEW_LINE> xsDataResultObtained = plugin.getDataOutput() <NEW_LINE> EDAssert.strAlmostEqual(xsDataResultReference.marshal(), xsDataResultObtained.marshal(), "XSDataResult output are the same") <NEW_LINE> outputData = openimage(xsDataResultObtained.getOutputImage().getPath().getValue()).data <NEW_LINE> referenceData = openimage(os.path.join(self.getTestsDataImagesHome(), "bsa_014_07.msk")).data <NEW_LINE> EDAssert.arraySimilar(outputData, referenceData , _fAbsMaxDelta=0.8, _fScaledMaxDelta=0.05, _fRfactor=5, _fRelMaxDelta=1.5 , _strComment="Images are the same") <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> self.addTestMethod(self.testExecute)
Those are all execution tests for the EDNA Exec plugin SaxsAddv1_0
6259905c0a50d4780f7068d1
class GraphIds(BrowserView): <NEW_LINE> <INDENT> @json <NEW_LINE> @formreq <NEW_LINE> def __call__(self, deviceIds=(), componentPaths=()): <NEW_LINE> <INDENT> graphIds = set() <NEW_LINE> if isinstance(deviceIds, basestring): <NEW_LINE> <INDENT> deviceIds = [deviceIds] <NEW_LINE> <DEDENT> if isinstance(componentPaths, basestring): <NEW_LINE> <INDENT> componentPaths = [componentPaths] <NEW_LINE> <DEDENT> if not componentPaths: <NEW_LINE> <INDENT> componentPaths = ('',) <NEW_LINE> <DEDENT> for devId in deviceIds: <NEW_LINE> <INDENT> thing = self.context.findDevice(devId) <NEW_LINE> if thing: <NEW_LINE> <INDENT> for compPath in componentPaths: <NEW_LINE> <INDENT> if compPath: <NEW_LINE> <INDENT> thing = getObjByPath(thing, compPath) <NEW_LINE> <DEDENT> for t in thing.getRRDTemplates(): <NEW_LINE> <INDENT> for g in t.getGraphDefs(): <NEW_LINE> <INDENT> graphIds.add(g.id) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return sorted(graphIds)
Get a list of the graph defs available for the given device and component. Adapts DeviceClasses.
6259905ccc0a2c111447c5e1
class KlineIndexHandler(BaseHandler): <NEW_LINE> <INDENT> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cur = self.db.cursor() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error("创建游标失败", e) <NEW_LINE> return self.write(dict(code=RET.DATAERR, errmsg=e)) <NEW_LINE> <DEDENT> sql = f'select source, currency, CAST(amount AS CHAR(50)) as amount,CAST(close_ AS CHAR(50)) as close_,' f'CAST(high_ AS CHAR(50)) as high_,CAST(low_ AS CHAR(50)) as low_,' f'CAST(open_ AS CHAR(50)) as open_, CAST(volume AS CHAR(50)) as volume,' f'DATE_FORMAT(sample_time,"%Y-%m-%d %H:%i:%S") as sample_time ' f'from kline_data_5m' <NEW_LINE> try: <NEW_LINE> <INDENT> print(sql) <NEW_LINE> cur.execute(sql) <NEW_LINE> ret = cur.fetchall() <NEW_LINE> cur.close() <NEW_LINE> self.write(dict(code=RET.OK,errmsg="", data=ret)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> logging.error("查询数据库错误") <NEW_LINE> cur.close() <NEW_LINE> return self.write(dict(code=RET.DATAERR, errmsg=e))
kline 数据
6259905c2c8b7c6e89bd4e13
class TestIDMController(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> contr_params = {"v0": 30, "b": 1.5, "delta": 4, "s0": 2, "noise": 0} <NEW_LINE> vehicles = VehicleParams() <NEW_LINE> vehicles.add( veh_id="test", acceleration_controller=(IDMController, contr_params), routing_controller=(ContinuousRouter, {}), car_following_params=SumoCarFollowingParams( tau=1, accel=1, decel=5), num_vehicles=5) <NEW_LINE> self.env, _, _ = ring_road_exp_setup(vehicles=vehicles) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.env.terminate() <NEW_LINE> self.env = None <NEW_LINE> <DEDENT> def test_get_action(self): <NEW_LINE> <INDENT> self.env.reset() <NEW_LINE> ids = self.env.k.vehicle.get_ids() <NEW_LINE> test_headways = [10, 20, 30, 40, 50] <NEW_LINE> for i, veh_id in enumerate(ids): <NEW_LINE> <INDENT> self.env.k.vehicle.set_headway(veh_id, test_headways[i]) <NEW_LINE> <DEDENT> requested_accel = [ self.env.k.vehicle.get_acc_controller(veh_id).get_action(self.env) for veh_id in ids ] <NEW_LINE> expected_accel = [0.96, 0.99, 0.995556, 0.9975, 0.9984] <NEW_LINE> np.testing.assert_array_almost_equal(requested_accel, expected_accel) <NEW_LINE> test_headways = [0, 0, 0, 0, 0] <NEW_LINE> for i, veh_id in enumerate(ids): <NEW_LINE> <INDENT> self.env.k.vehicle.set_headway(veh_id, test_headways[i]) <NEW_LINE> <DEDENT> [ self.env.k.vehicle.get_acc_controller(veh_id).get_action(self.env) for veh_id in ids ]
Tests that the IDM Controller returning mathematically accurate values.
6259905cbaa26c4b54d508cb
class GANLoss(nn.Module): <NEW_LINE> <INDENT> def __init__(self, gan_mode, target_real_label=1.0, target_fake_label=0.0, random_label = 1, delta_rand = 0.15,reduction = "mean" ): <NEW_LINE> <INDENT> super(GANLoss, self).__init__() <NEW_LINE> self.register_buffer('real_label', torch.tensor(target_real_label)) <NEW_LINE> self.register_buffer('fake_label', torch.tensor(target_fake_label)) <NEW_LINE> self.register_buffer('delta_rand', torch.tensor(delta_rand)) <NEW_LINE> self.gan_mode = gan_mode <NEW_LINE> if gan_mode == 'lsgan': <NEW_LINE> <INDENT> self.loss = nn.MSELoss(reduction = reduction) <NEW_LINE> self.register_buffer('random_label', torch.tensor(0)) <NEW_LINE> <DEDENT> elif gan_mode == 'vanilla': <NEW_LINE> <INDENT> self.loss = nn.BCEWithLogitsLoss(reduction = reduction) <NEW_LINE> self.register_buffer('random_label', torch.tensor(random_label)) <NEW_LINE> <DEDENT> elif gan_mode in ['wgangp']: <NEW_LINE> <INDENT> self.loss = None <NEW_LINE> self.register_buffer('random_label', torch.tensor(0)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError('gan mode %s not implemented' % gan_mode) <NEW_LINE> <DEDENT> <DEDENT> def get_target_tensor(self, prediction, target_is_real): <NEW_LINE> <INDENT> if self.random_label: <NEW_LINE> <INDENT> if target_is_real: <NEW_LINE> <INDENT> labels = torch.FloatTensor(prediction.size()).uniform_(self.real_label - self.delta_rand, self.real_label) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> labels = torch.FloatTensor(prediction.size()).uniform_(self.fake_label,self.fake_label + self.delta_rand) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if target_is_real: <NEW_LINE> <INDENT> target_tensor = self.real_label <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> target_tensor = self.fake_label <NEW_LINE> <DEDENT> labels = target_tensor.expand_as(prediction) <NEW_LINE> <DEDENT> return labels <NEW_LINE> <DEDENT> def __call__(self, prediction, target_is_real): <NEW_LINE> <INDENT> if self.gan_mode in ['lsgan', 'vanilla']: <NEW_LINE> <INDENT> target_tensor = self.get_target_tensor(prediction, target_is_real).to(prediction) <NEW_LINE> loss = self.loss(prediction, target_tensor) <NEW_LINE> <DEDENT> elif self.gan_mode == 'wgangp': <NEW_LINE> <INDENT> if target_is_real: <NEW_LINE> <INDENT> loss = -prediction <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> loss = prediction <NEW_LINE> <DEDENT> <DEDENT> return loss
Define different GAN objectives. The GANLoss class abstracts away the need to create the target label tensor that has the same size as the input.
6259905c24f1403a926863e1
@attributes(["command"]) <NEW_LINE> class Sudo(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def from_args(cls, command_args): <NEW_LINE> <INDENT> return cls(command=" ".join(map(shell_quote, command_args)))
Run a shell command on a remote host. :param bytes command: The command to run.
6259905c55399d3f05627b45
class CustomAdd(keras.layers.Add): <NEW_LINE> <INDENT> def compute_mask(self, inputs, mask=None): <NEW_LINE> <INDENT> return mask[0]
Embedding layer with weights returned.
6259905cadb09d7d5dc0bb90
class EmbeddedResourceLinkField(SubField): <NEW_LINE> <INDENT> def __init__( self, resource_spec_class: Union[Type[ResourceSpec], str], alti_key: str = None, optional: bool = False, value_is_id: bool = False, ): <NEW_LINE> <INDENT> self._resource_spec_class = resource_spec_class <NEW_LINE> self.alti_key = alti_key <NEW_LINE> self.optional = optional <NEW_LINE> self.value_is_id = value_is_id <NEW_LINE> <DEDENT> def parse(self, data: str, context: Dict[str, Any]) -> LinkCollection: <NEW_LINE> <INDENT> if isinstance(self._resource_spec_class, str): <NEW_LINE> <INDENT> resource_spec_class: Type[ResourceSpec] = ResourceSpec.get_by_class_name( self._resource_spec_class ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> resource_spec_class = self._resource_spec_class <NEW_LINE> <DEDENT> if not self.alti_key: <NEW_LINE> <INDENT> self.alti_key = resource_spec_class.type_name <NEW_LINE> <DEDENT> short_resource_id = data <NEW_LINE> if self.value_is_id: <NEW_LINE> <INDENT> resource_id = short_resource_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> resource_id = resource_spec_class.generate_id(short_resource_id, context) <NEW_LINE> <DEDENT> return LinkCollection(resource_links=[ResourceLink(pred=self.alti_key, obj=resource_id)],)
An EmbeddedResourceLinkField is a ResourceLinkField where the input is the resource id only, not a key/value where the value is a resource id. Examples: A link to a TestResourceSpec resource:: >>> from altimeter.core.graph.field.list_field import ListField >>> from altimeter.core.resource.resource_spec import ResourceSpec >>> class TestResourceSpec(ResourceSpec): type_name="thing" >>> input = {"Thing": ["123", "456"]} >>> field = ListField("Thing", EmbeddedResourceLinkField(TestResourceSpec)) >>> link_collection = field.parse(data=input, context={}) >>> print(link_collection.dict(exclude_unset=True)) {'resource_links': ({'pred': 'thing', 'obj': 'thing:123'}, {'pred': 'thing', 'obj': 'thing:456'})} Args: resource_spec_class: The name of the ResourceSpec class or the ResourceSpec class which this link represents. optional: Whether this key is optional. Defaults to False. value_is_id: Whether the value for this key contains the entire resource id. For AWS resources set this to True if the value is a complete arn.
6259905c4a966d76dd5f0518
class SLBLoadBalancerAttribute(object): <NEW_LINE> <INDENT> def __init__(self, balancer, listeners, backend_servers, extra=None): <NEW_LINE> <INDENT> self.balancer = balancer <NEW_LINE> self.listeners = listeners or [] <NEW_LINE> self.backend_servers = backend_servers or [] <NEW_LINE> self.extra = extra or {} <NEW_LINE> <DEDENT> def is_listening(self, port): <NEW_LINE> <INDENT> for listener in self.listeners: <NEW_LINE> <INDENT> if listener.get('ListenerPort') == port: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def is_attached(self, member): <NEW_LINE> <INDENT> for server in self.backend_servers: <NEW_LINE> <INDENT> if server.get('Serverid') == member.id: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ('<SLBLoadBalancerAttribute id=%s, ports=%s, servers=%s ...>' % (self.balancer.id, self.listeners, self.backend_servers))
This class used to get listeners and backend servers related to a balancer listeners is a ``list`` of ``dict``, each element contains 'ListenerPort' and 'ListenerProtocol' keys. backend_servers is a ``list`` of ``dict``, each element contains 'ServerId' and 'Weight' keys.
6259905c9c8ee82313040c9d
class Unit(np.ndarray): <NEW_LINE> <INDENT> def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> shape = len(baseunits) <NEW_LINE> obj = np.zeros(shape).view(cls) <NEW_LINE> if len(args) == 0: <NEW_LINE> <INDENT> return obj <NEW_LINE> <DEDENT> if len(args) == 1 and isinstance(args[0], basestring): <NEW_LINE> <INDENT> if not re.match(r"^[ \t]*$", args[0]): <NEW_LINE> <INDENT> tmp = unit_parser(args[0])[0] <NEW_LINE> if isinstance(tmp, tuple): <NEW_LINE> <INDENT> obj += tmp[1].unit <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> obj += tmp.unit <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif len(args) == 1 and isinstance(args[0], np.ndarray): <NEW_LINE> <INDENT> np.copyto(obj, args[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tmp = np.array(*args) <NEW_LINE> np.copyto(obj, tmp) <NEW_LINE> <DEDENT> return obj <NEW_LINE> <DEDENT> def __array_finalize__(self, obj): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> def show(self, *args, **kwargs): <NEW_LINE> <INDENT> lst = tuple((baseunits[n], e) for n, e in enumerate(self) if e != 0) <NEW_LINE> return UnitTree(lst).show(*args, **kwargs) <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return np.asscalar(np.any(self)) <NEW_LINE> <DEDENT> def __eq__(self, obj): <NEW_LINE> <INDENT> return not bool(self - obj) <NEW_LINE> <DEDENT> def __ne__(self, obj): <NEW_LINE> <INDENT> return bool(self - obj) <NEW_LINE> <DEDENT> def __nonzero__(self): <NEW_LINE> <INDENT> return self.__bool__() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> unit = [] <NEW_LINE> for n, e in enumerate(self): <NEW_LINE> <INDENT> if not almost_equal(e, 0): <NEW_LINE> <INDENT> s = baseunits[n] <NEW_LINE> if not almost_equal(e, 1): <NEW_LINE> <INDENT> s += "^%g" % e <NEW_LINE> <DEDENT> unit.append(s) <NEW_LINE> <DEDENT> <DEDENT> return "[" + " ".join(unit) + "]"
A unit representation. Units are stored as simple vectors of real numbers, where each element is the exponent of the corresponding base unit. For example, if the base units are 'm', 's', 'kg', the unit [1, -2, 0] would represent an acceleration [m/s^2].
6259905cac7a0e7691f73b08
class ydError(RuntimeError): <NEW_LINE> <INDENT> def __init__(self, errno, errmsg): <NEW_LINE> <INDENT> self.errno = errno <NEW_LINE> self.errmsg = "{0}".format(errmsg) <NEW_LINE> self.args = (errno, errmsg) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.errmsg
Внутреннее исключение, выбрасываемое в случаях: * Таймаут запроса к API * Исчерпание количества попыток запроса к API * Неверные аргументы, переданные в командной строке
6259905c21bff66bcd72428b
class ChewyTester(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_smth(self): <NEW_LINE> <INDENT> pass
Unit tests for [???]
6259905c3cc13d1c6d466d66
class IQCSample(ISample): <NEW_LINE> <INDENT> veracis_id = schema.TextLine( title=_(u"QC Veracis Sample ID"), description=_(u"QC Veracis Sample ID"), defaultFactory=assignVeracisId(), required=True, ) <NEW_LINE> source_id_one = schema.TextLine( title=_(u"Primary QC Source Sample ID"), description=_(u"Primary QC Source Sample ID"), required=True, ) <NEW_LINE> source_id_two = schema.TextLine( title=_(u"Secondary QC Source Sample ID"), description=_(u"Secondary QC Source Sample ID"), required=False, ) <NEW_LINE> source_id_three = schema.TextLine( title=_(u"Tertiary QC Source Sample ID"), description=_(u"Tertiary QC Source Sample ID"), required=False, ) <NEW_LINE> date_added = schema.Date( title=_(u"Date QC Sample was added to LIMS"), description=_(u"Date QC Sample was added to LIMS"), defaultFactory=currentDate, required=True, ) <NEW_LINE> added_by = schema.Choice( title=_(u"Operator that Added QC Sample to LIMS"), description=_(u"Operator that Added QC Sample to LIMS"), vocabulary=u"plone.principalsource.Users", required=False, ) <NEW_LINE> source = schema.TextLine( title=_(u"Source of QC Sample"), description=_(u"Source of QC Sample"), required=False, ) <NEW_LINE> description = schema.TextLine( title=_(u"Description of QC Sample"), description=_(u"Description of QC Sample"), required=False, ) <NEW_LINE> fluid_type = schema.Choice( title=_(u"QC Fluid Type"), description=_(u"QC Fluid Type"), values=[_(u"Serum"), _(u"Plasma"), _(u"CSF"), _(u"Tissue")], required=True, ) <NEW_LINE> date_received = schema.Date( title=_(u"Date QC Sample was Received"), description=_(u"Date QC Sample was Received"), required=True, ) <NEW_LINE> comment = schema.Text( title=_(u"Any Notes or Comments About the QC Sample"), description=_(u"Any Notes or Comments About the QC Sample"), required=False, )
QC Sample!
6259905c009cb60464d02b5c
class ConnectionMonitor(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'source': {'required': True}, 'destination': {'required': True}, } <NEW_LINE> _attribute_map = { 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'source': {'key': 'properties.source', 'type': 'ConnectionMonitorSource'}, 'destination': {'key': 'properties.destination', 'type': 'ConnectionMonitorDestination'}, 'auto_start': {'key': 'properties.autoStart', 'type': 'bool'}, 'monitoring_interval_in_seconds': {'key': 'properties.monitoringIntervalInSeconds', 'type': 'int'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ConnectionMonitor, self).__init__(**kwargs) <NEW_LINE> self.location = kwargs.get('location', None) <NEW_LINE> self.tags = kwargs.get('tags', None) <NEW_LINE> self.source = kwargs['source'] <NEW_LINE> self.destination = kwargs['destination'] <NEW_LINE> self.auto_start = kwargs.get('auto_start', True) <NEW_LINE> self.monitoring_interval_in_seconds = kwargs.get('monitoring_interval_in_seconds', 60)
Parameters that define the operation to create a connection monitor. All required parameters must be populated in order to send to Azure. :param location: Connection monitor location. :type location: str :param tags: A set of tags. Connection monitor tags. :type tags: dict[str, str] :param source: Required. Describes the source of connection monitor. :type source: ~azure.mgmt.network.v2018_06_01.models.ConnectionMonitorSource :param destination: Required. Describes the destination of connection monitor. :type destination: ~azure.mgmt.network.v2018_06_01.models.ConnectionMonitorDestination :param auto_start: Determines if the connection monitor will start automatically once created. :type auto_start: bool :param monitoring_interval_in_seconds: Monitoring interval in seconds. :type monitoring_interval_in_seconds: int
6259905c2ae34c7f260ac70d
class TestSetup(unittest.TestCase): <NEW_LINE> <INDENT> layer = EEA_CEFTRANSLATIONS_INTEGRATION_TESTING <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.portal = self.layer['portal'] <NEW_LINE> self.installer = api.portal.get_tool('portal_quickinstaller') <NEW_LINE> <DEDENT> def test_product_installed(self): <NEW_LINE> <INDENT> self.assertTrue(self.installer.isProductInstalled( 'eea.ceftranslations')) <NEW_LINE> <DEDENT> def test_browserlayer(self): <NEW_LINE> <INDENT> from eea.ceftranslations.interfaces import ( IEEACEFTranslationsLayer) <NEW_LINE> from plone.browserlayer import utils <NEW_LINE> self.assertIn(IEEACEFTranslationsLayer, utils.registered_layers())
Test that eea.ceftranslations is properly installed.
6259905ca219f33f346c7e2c
class ControlList(LoginRequiredMixin, View): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> start_year_of_check = date.today().year - 1 <NEW_LINE> list_of_coordinated_topics = get_list_of_coordinated_topics(request.user) <NEW_LINE> controls_in_history = Document.objects. filter(is_evaluated=True, control_year__gt=start_year_of_check, coordinator__inspector__username=request.user) <NEW_LINE> list_of_controls = queryset_to_list(controls_in_history, 'control_id') <NEW_LINE> controls = Control.objects.using('kontrole'). filter(rok__gt=start_year_of_check, control_topics__nr_tematu__in=list_of_coordinated_topics) <NEW_LINE> if len(controls_in_history) > 0: <NEW_LINE> <INDENT> controls = controls.exclude(pk__in=list_of_controls) <NEW_LINE> <DEDENT> controls = controls.distinct() <NEW_LINE> ctx = {'controls': controls, 'evaluated_controls': controls_in_history[0:10] } <NEW_LINE> if not controls: <NEW_LINE> <INDENT> messages.info(request, 'Nie masz kontroli do oceny') <NEW_LINE> <DEDENT> return render(request, 'oceniarka/control_list_template.html', ctx)
lista kontroli z danego roku. Pobiera kontrole z historii i bazy po czym wypisuje te których nie ma w historii. Przenosi do widoku control_documents
6259905c8e7ae83300eea6b4
class UserPasswordForm(FlaskForm): <NEW_LINE> <INDENT> username = StringField('User or Team name', validators=[DataRequired(), Regexp(r"^\w(\w| )*\w$", message="At least 2 alphanumeric characters with only spaces in between.") ] ) <NEW_LINE> password = PasswordField('Password', validators=[DataRequired(), Length(min=4)])
Username password form used for login.
6259905c627d3e7fe0e084b2
class BeautiAccountAddMsg(messages.Message): <NEW_LINE> <INDENT> beautician = messages.MessageField(BeauticianAccountEditMsg, 1)
美容師アカウント新規登録依頼メッセージ
6259905c442bda511e95d86d
class RemovalAgent( RequestAgentBase ): <NEW_LINE> <INDENT> def __init__( self, *args, **kwargs ): <NEW_LINE> <INDENT> self.setRequestType( "removal" ) <NEW_LINE> self.setRequestTask( RemovalTask ) <NEW_LINE> RequestAgentBase.__init__( self, *args, **kwargs ) <NEW_LINE> agentName = args[0] <NEW_LINE> self.monitor.registerActivity( "PhysicalRemovalAtt", "Physical removals attempted", "RemovalAgent", "Removal/min", gMonitor.OP_SUM ) <NEW_LINE> self.monitor.registerActivity( "PhysicalRemovalDone", "Successful physical removals", "RemovalAgent", "Removal/min", gMonitor.OP_SUM ) <NEW_LINE> self.monitor.registerActivity( "PhysicalRemovalFail", "Failed physical removals", "RemovalAgent", "Removal/min", gMonitor.OP_SUM ) <NEW_LINE> self.monitor.registerActivity( "PhysicalRemovalSize", "Physically removed size", "RemovalAgent", "Bytes", gMonitor.OP_ACUM ) <NEW_LINE> self.monitor.registerActivity( "ReplicaRemovalAtt", "Replica removal attempted", "RemovalAgent", "Removal/min", gMonitor.OP_SUM ) <NEW_LINE> self.monitor.registerActivity( "ReplicaRemovalDone", "Successful replica removals", "RemovalAgent", "Removal/min", gMonitor.OP_SUM ) <NEW_LINE> self.monitor.registerActivity( "ReplicaRemovalFail", "Failed replica removals", "RemovalAgent", "Removal/min", gMonitor.OP_SUM ) <NEW_LINE> self.monitor.registerActivity( "RemoveFileAtt", "File removal attempted", "RemovalAgent", "Removal/min", gMonitor.OP_SUM ) <NEW_LINE> self.monitor.registerActivity( "RemoveFileDone", "File removal done", "RemovalAgent", "Removal/min", gMonitor.OP_SUM ) <NEW_LINE> self.monitor.registerActivity( "RemoveFileFail", "File removal failed", "RemovalAgent", "Removal/min", gMonitor.OP_SUM ) <NEW_LINE> self.log.info( "%s agent has been constructed" % agentName )
.. class:: RemovalAgent This agent is preocessing 'removal' requests read from RequestClient. Each request is executed in a separate sub-process using ProcessPool and RemovalTask. Config Options -------------- * set the number of requests to be processed in agent's cycle: RequestsPerCycle = 10 * minimal number of sub-processes running together MinProcess = 1 * maximal number of sub-processes running togehter MaxProcess = 4 * results queue size ProcessPoolQueueSize = 10 * request type RequestType = removal * default proxy for handling requests shifterProxy = DataManager
6259905ce64d504609df9ee2
@content( 'User', icon='icon-user', add_view='add_user', tab_order=('properties',), propertysheets = ( ('', UserPropertySheet), ) ) <NEW_LINE> @implementer(IUser) <NEW_LINE> class User(Folder): <NEW_LINE> <INDENT> pwd_manager = BCRYPTPasswordManager() <NEW_LINE> groupids = multireference_sourceid_property(UserToGroup) <NEW_LINE> groups = multireference_source_property(UserToGroup) <NEW_LINE> def __init__(self, password, email): <NEW_LINE> <INDENT> Folder.__init__(self) <NEW_LINE> self.password = self.pwd_manager.encode(password) <NEW_LINE> self.email = email <NEW_LINE> <DEDENT> def check_password(self, password): <NEW_LINE> <INDENT> return self.pwd_manager.check(self.password, password) <NEW_LINE> <DEDENT> def set_password(self, password): <NEW_LINE> <INDENT> self.password = self.pwd_manager.encode(password) <NEW_LINE> <DEDENT> def email_password_reset(self, request): <NEW_LINE> <INDENT> root = request.root <NEW_LINE> sitename = getattr(root, 'title', None) or 'Substance D' <NEW_LINE> principals = find_service(self, 'principals') <NEW_LINE> resets = principals['resets'] <NEW_LINE> reset = resets.add_reset(self) <NEW_LINE> reseturl = request.application_url + request.mgmt_path(reset) <NEW_LINE> message = Message( subject = 'Account information for %s' % sitename, recipients = [self.email], body = render('templates/resetpassword_email.pt', dict(reseturl=reseturl)) ) <NEW_LINE> mailer = get_mailer(request) <NEW_LINE> mailer.send(message)
Represents a user.
6259905ccb5e8a47e493cc99
class AbsoluteConstraint(NeuralConstraint): <NEW_LINE> <INDENT> def __init__( self, time_step_spec, action_spec, constraint_network, error_loss_fn=tf.compat.v1.losses.mean_squared_error, comparator_fn=tf.greater, absolute_value=0.0, name='AbsoluteConstraint'): <NEW_LINE> <INDENT> self._absolute_value = absolute_value <NEW_LINE> self._comparator_fn = comparator_fn <NEW_LINE> self._error_loss_fn = error_loss_fn <NEW_LINE> super(AbsoluteConstraint, self).__init__( time_step_spec, action_spec, constraint_network, error_loss_fn=self._error_loss_fn, name=name) <NEW_LINE> <DEDENT> def __call__(self, observation, actions=None): <NEW_LINE> <INDENT> predicted_values, _ = self._constraint_network( observation, training=False) <NEW_LINE> is_satisfied = self._comparator_fn( predicted_values, self._absolute_value) <NEW_LINE> return tf.cast(is_satisfied, tf.float32)
Class for representing a trainable absolute value constraint. This constraint class implements an absolute value constraint such as ``` expected_value(action) >= absolute_value ``` or ``` expected_value(action) <= absolute_value ```
6259905c3c8af77a43b68a54
class no_autoflush(object): <NEW_LINE> <INDENT> def __init__(self, session): <NEW_LINE> <INDENT> self.session = session <NEW_LINE> self.autoflush = session.autoflush <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.session.autoflush = False <NEW_LINE> <DEDENT> def __exit__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.session.autoflush = self.autoflush
A content manager that disables sqlalchemy's autoflush, restoring it afterwards. Adapted from https://bitbucket.org/zzzeek/sqlalchemy/wiki/UsageRecipes/DisableAutoflush
6259905c8e7ae83300eea6b5
class TestInterfaceVrrpPriority(BaseActionTestCase): <NEW_LINE> <INDENT> action_cls = interface_vrrp_priority <NEW_LINE> def test_action(self): <NEW_LINE> <INDENT> action = self.get_action_instance() <NEW_LINE> mock_callback = MockCallback() <NEW_LINE> kwargs = { 'username': '', 'name': '10/0/2', 'ip': '', 'vrid': '10', 'priority': '200', 'int_type': 'tengigabitethernet', 'ip_version': '4', 'password': '', 'port': '22', 'test': True, 'callback': mock_callback.callback } <NEW_LINE> action.run(**kwargs) <NEW_LINE> expected_xml = ( '<config><interface xmlns="urn:brocade.com:mgmt:brocade-interface"' '><tengigabitethernet><name>10/0/2</name><vrrp xmlns="urn:brocade.' 'com:mgmt:brocade-vrrp"><vrid>10</vrid><version>3</version><priori' 'ty>200</priority></vrrp></tengigabitethernet></interface></config' '>' ) <NEW_LINE> self.assertTrue(expected_xml, mock_callback.returned_data)
Test holder class
6259905c3539df3088ecd8c3
class AuthProtocol(auth_token.AuthProtocol): <NEW_LINE> <INDENT> def _build_user_headers(self, token_info): <NEW_LINE> <INDENT> rval = super(AuthProtocol, self)._build_user_headers(token_info) <NEW_LINE> rval['X-Auth-Url'] = self.auth_uri <NEW_LINE> return rval
Subclass of keystoneclient auth_token middleware which also sets the 'X-Auth-Url' header to the value specified in the config.
6259905c99cbb53fe6832508
class ExpectationFailed(BadRequest): <NEW_LINE> <INDENT> code = 417 <NEW_LINE> message = "Expectation failed" <NEW_LINE> detail = ("The server could not meet the requirements indicated in" " the request's Expect header(s).")
Can't. always. get. what you want.
6259905c435de62698e9d42c
class MetadataItem: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def decode_int(s): <NEW_LINE> <INDENT> if s.startswith("0x"): <NEW_LINE> <INDENT> return int(s, 16) <NEW_LINE> <DEDENT> elif s.startswith("0"): <NEW_LINE> <INDENT> return int(s, 8) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return int(s, 10) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def decode_str(s): <NEW_LINE> <INDENT> return cumulus.util.uri_decode_pathname(s) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def raw_str(s): <NEW_LINE> <INDENT> return s <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def decode_user(s): <NEW_LINE> <INDENT> items = s.split() <NEW_LINE> uid = MetadataItem.decode_int(items[0]) <NEW_LINE> name = None <NEW_LINE> if len(items) > 1: <NEW_LINE> <INDENT> if items[1].startswith("(") and items[1].endswith(")"): <NEW_LINE> <INDENT> name = MetadataItem.decode_str(items[1][1:-1]) <NEW_LINE> <DEDENT> <DEDENT> return (uid, name) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def decode_device(s): <NEW_LINE> <INDENT> (major, minor) = map(MetadataItem.decode_int, s.split("/")) <NEW_LINE> return (major, minor) <NEW_LINE> <DEDENT> class Items: pass <NEW_LINE> def __init__(self, fields, object_store): <NEW_LINE> <INDENT> self.fields = fields <NEW_LINE> self.object_store = object_store <NEW_LINE> self.keys = [] <NEW_LINE> self.items = self.Items() <NEW_LINE> for (k, v) in fields.items(): <NEW_LINE> <INDENT> if k in self.field_types: <NEW_LINE> <INDENT> decoder = self.field_types[k] <NEW_LINE> setattr(self.items, k, decoder(v)) <NEW_LINE> self.keys.append(k) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def data(self): <NEW_LINE> <INDENT> objects = self.fields['data'].split() <NEW_LINE> objects.reverse() <NEW_LINE> stack = [objects] <NEW_LINE> def follow_ref(refstr): <NEW_LINE> <INDENT> if len(stack) >= MAX_RECURSION_DEPTH: raise OverflowError <NEW_LINE> objects = self.object_store.get(refstr).split() <NEW_LINE> objects.reverse() <NEW_LINE> stack.append(objects) <NEW_LINE> <DEDENT> while len(stack) > 0: <NEW_LINE> <INDENT> top = stack[-1] <NEW_LINE> if len(top) == 0: <NEW_LINE> <INDENT> stack.pop() <NEW_LINE> continue <NEW_LINE> <DEDENT> ref = top.pop() <NEW_LINE> if len(ref) > 0 and ref[0] == '@': <NEW_LINE> <INDENT> follow_ref(ref[1:]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield ref
Metadata for a single file (or directory or...) from a snapshot.
6259905c462c4b4f79dbd02d
class GitHubRepoHandler(GithubClientMixin, BaseHandler): <NEW_LINE> <INDENT> async def get(self, user, repo): <NEW_LINE> <INDENT> response = await self.github_client.get_repo(user, repo) <NEW_LINE> default_branch = json.loads(response_text(response))["default_branch"] <NEW_LINE> new_url = self.from_base( "/", self.format_prefix, "github", user, repo, "tree", default_branch ) <NEW_LINE> self.log.info("Redirecting %s to %s", self.request.uri, new_url) <NEW_LINE> self.redirect(new_url)
redirect /github/user/repo to .../tree/master
6259905c379a373c97d9a64c
@pytest.mark.django_db <NEW_LINE> class TestConfirmAccountLink: <NEW_LINE> <INDENT> def setup(self): <NEW_LINE> <INDENT> self.user = UserFactory() <NEW_LINE> self.data = { 'first_name': self.user.first_name, 'last_name': self.user.last_name, 'email': self.user.email, 'password': self.user.password, 'confirm_password': self.user.password, 'acct_type': 'IND', 'toc': True, } <NEW_LINE> <DEDENT> def test_confirm_email_response(self, mailoutbox, client): <NEW_LINE> <INDENT> response = client.get('/accounts/register/') <NEW_LINE> current_site = get_current_site(response.wsgi_request) <NEW_LINE> site_name = current_site.name <NEW_LINE> domain = current_site.domain <NEW_LINE> use_https = False <NEW_LINE> token = default_token_generator.make_token(self.user) <NEW_LINE> context = { 'user': self.user, 'token': token, 'uid': urlsafe_base64_encode(force_bytes(self.user.pk)), 'protocol': 'https' if use_https else 'http', 'domain': domain, 'site_name': site_name, } <NEW_LINE> subject, from_email, to_email = 'Welcome to Ibhuku.com. Confirm your email.', 'Ibhuku Team <[email protected]>', self.user.email <NEW_LINE> text_content = render_to_string('emails/registration.txt', context) <NEW_LINE> html_content = render_to_string('emails/registration.html', context) <NEW_LINE> msg = EmailMultiAlternatives( subject, text_content, from_email, [to_email]) <NEW_LINE> msg.attach_alternative(html_content, "text/html") <NEW_LINE> msg.send() <NEW_LINE> assert len(mailoutbox) == 1, 'Returns 1 if email was sent.' <NEW_LINE> mail = mailoutbox[0] <NEW_LINE> assert mail.subject == 'Welcome to Ibhuku.com. Confirm your email.', 'Should return email subject line of sent email.' <NEW_LINE> assert mail.from_email == 'Ibhuku Team <[email protected]>', 'Should return "from email" found in email.' <NEW_LINE> assert list(mail.to) == [ '[email protected]'], 'Should return confirm link email to.' <NEW_LINE> <DEDENT> def test_confirm_account_link_lib(self, mailoutbox, client): <NEW_LINE> <INDENT> response = client.get('/acounts/register/') <NEW_LINE> token = default_token_generator.make_token(self.user) <NEW_LINE> mail = confirm_account_link( self.user, self.user.email, token, request=response.wsgi_request) <NEW_LINE> assert len(mailoutbox) == 1, 'Returns 1 if email has been sent.' <NEW_LINE> sent_mail = mailoutbox[0] <NEW_LINE> assert sent_mail.subject == 'Welcome to Ibhuku.com. Confirm your email.', 'Should return email subject line of sent email.' <NEW_LINE> assert sent_mail.from_email == 'Ibhuku Team <[email protected]>', 'Should return "from email" found in email.' <NEW_LINE> assert list(sent_mail.to) == [ '[email protected]'], 'Should return user email used to register.'
Test ConfrimAccountLink Function.
6259905cf7d966606f7493cc
class GyroscopeV1(CDPDataItem): <NEW_LINE> <INDENT> type = 0x012A <NEW_LINE> definition = [DIUInt64Attr('network_time'), DIInt32Attr('x'), DIInt32Attr('y'), DIInt32Attr('z'), DIUInt16Attr('scale')]
CDP Data Item: Ciholas Data Protocol Gyroscope V1 Definition
6259905c3539df3088ecd8c4
class FeedbackDetail(ResourceDetail): <NEW_LINE> <INDENT> def before_get_object(self, view_kwargs): <NEW_LINE> <INDENT> event = None <NEW_LINE> if view_kwargs.get('event_id'): <NEW_LINE> <INDENT> event = safe_query(self, Event, 'id', view_kwargs['event_id'], 'event_id') <NEW_LINE> <DEDENT> elif view_kwargs.get('event_identifier'): <NEW_LINE> <INDENT> event = safe_query(self, Event, 'identifier', view_kwargs['event_identifier'], 'event_identifier') <NEW_LINE> <DEDENT> if event: <NEW_LINE> <INDENT> feedback = safe_query(self, Feedback, 'event_id', event.id, 'event_id') <NEW_LINE> view_kwargs['id'] = feedback.id <NEW_LINE> <DEDENT> <DEDENT> decorators = (api.has_permission('is_user_itself', fetch='user_id', fetch_as="user_id", model=Feedback, methods="PATCH,DELETE"),) <NEW_LINE> schema = FeedbackSchema <NEW_LINE> data_layer = {'session': db.session, 'model': Feedback}
Feedback Resource
6259905c8da39b475be0480e
class DiceLoss(nn.Module): <NEW_LINE> <INDENT> def __init__(self, smooth: Optional[float] = 1e-8, square_denominator: Optional[bool] = False, with_logits: Optional[bool] = True, reduction: Optional[str] = "mean") -> None: <NEW_LINE> <INDENT> super(DiceLoss, self).__init__() <NEW_LINE> self.reduction = reduction <NEW_LINE> self.with_logits = with_logits <NEW_LINE> self.smooth = smooth <NEW_LINE> self.square_denominator = square_denominator <NEW_LINE> <DEDENT> def forward(self, input: Tensor, target: Tensor, mask: Optional[Tensor] = None) -> Tensor: <NEW_LINE> <INDENT> flat_input = input.view(-1) <NEW_LINE> flat_target = target.view(-1) <NEW_LINE> if self.with_logits: <NEW_LINE> <INDENT> flat_input = torch.sigmoid(flat_input) <NEW_LINE> <DEDENT> if mask is not None: <NEW_LINE> <INDENT> mask = mask.view(-1).float() <NEW_LINE> flat_input = flat_input * mask <NEW_LINE> flat_target = flat_target * mask <NEW_LINE> <DEDENT> interection = torch.sum(flat_input * flat_target, -1) <NEW_LINE> if not self.square_denominator: <NEW_LINE> <INDENT> return 1 - ((2 * interection + self.smooth) / (flat_input.sum() + flat_target.sum() + self.smooth)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1 - ((2 * interection + self.smooth) / (torch.sum(torch.square(flat_input,), -1) + torch.sum(torch.square(flat_target), -1) + self.smooth)) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"Dice Loss smooth:{self.smooth}"
Dice coefficient for short, is an F1-oriented statistic used to gauge the similarity of two sets. Given two sets A and B, the vanilla dice coefficient between them is given as follows: Dice(A, B) = 2 * True_Positive / (2 * True_Positive + False_Positive + False_Negative) = 2 * |A and B| / (|A| + |B|) Math Function: U-NET: https://arxiv.org/abs/1505.04597.pdf dice_loss(p, y) = 1 - numerator / denominator numerator = 2 * \sum_{1}^{t} p_i * y_i + smooth denominator = \sum_{1}^{t} p_i + \sum_{1} ^{t} y_i + smooth if square_denominator is True, the denominator is \sum_{1}^{t} (p_i ** 2) + \sum_{1} ^{t} (y_i ** 2) + smooth V-NET: https://arxiv.org/abs/1606.04797.pdf Args: smooth (float, optional): a manual smooth value for numerator and denominator. square_denominator (bool, optional): [True, False], specifies whether to square the denominator in the loss function. with_logits (bool, optional): [True, False], specifies whether the input tensor is normalized by Sigmoid/Softmax funcs. True: the loss combines a `sigmoid` layer and the `BCELoss` in one single class. False: the loss contains `BCELoss`. Shape: - input: (*) - target: (*) - mask: (*) 0,1 mask for the input sequence. - Output: Scalar loss Examples: >>> loss = DiceLoss() >>> input = torch.randn(3, 1, requires_grad=True) >>> target = torch.empty(3, dtype=torch.long).random_(5) >>> output = loss(input, target) >>> output.backward()
6259905c1f037a2d8b9e537f
class ISEOConfigTitleSchema_accessibilitypage(Interface): <NEW_LINE> <INDENT> accessibilitypage_title = schema.TextLine( title=_("label_accessibilitypage_title", default=u"Accessibility Title"), required=False) <NEW_LINE> accessibilitypage_description = schema.Text( title=_("label_accessibilitypage_description", default=u"Accessibility Description"), required=False) <NEW_LINE> accessibilitypage_keywords = schema.List( title=_("label_accessibilitypage_keywords", default=u"Accessibility Keywords"), description=_("help_keywords", default=u"You can enter multiple keywords - one pr. line."), value_type=schema.TextLine(), required=False)
Schema for Title accessibilitypages
6259905cac7a0e7691f73b0a
class MinHashSignature(Signature): <NEW_LINE> <INDENT> def hash_functions(self): <NEW_LINE> <INDENT> def hash_factory(n): <NEW_LINE> <INDENT> return lambda x: hash("salt" + unicode(n) + unicode(x) + "salt") <NEW_LINE> <DEDENT> return [ hash_factory(_) for _ in range(self.dim) ] <NEW_LINE> <DEDENT> def sign(self, s): <NEW_LINE> <INDENT> sig = [ float("inf") ] * self.dim <NEW_LINE> for hash_ix, hash_fn in enumerate(self.hashes): <NEW_LINE> <INDENT> sig[hash_ix] = min(hash_fn(value) for value in s) <NEW_LINE> <DEDENT> return sig
Creates signatures for sets/tuples using minhash.
6259905c8a43f66fc4bf37b5
class TestSay(SpeakerbotClientTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestSay, self).setUp() <NEW_LINE> self.client._post = Mock() <NEW_LINE> <DEDENT> def test_calls_post_with_correct_arguments(self): <NEW_LINE> <INDENT> arg_dicts = [ { 'text': 'foo', 'record_utterance': False }, { 'text': 'foo', 'record_utterance': True } ] <NEW_LINE> for arg_dict in arg_dicts: <NEW_LINE> <INDENT> self.client._post = Mock() <NEW_LINE> self.client.say(arg_dict['text'], arg_dict['record_utterance']) <NEW_LINE> self.client._post.assert_called_once_with('say/', { 'speech-text': arg_dict['text'], 'record_utterance': str(arg_dict['record_utterance']).lower() }) <NEW_LINE> <DEDENT> <DEDENT> def test_defaults_record_utterance(self): <NEW_LINE> <INDENT> text = 'foo' <NEW_LINE> self.client.say(text) <NEW_LINE> self.client._post.assert_called_once_with('say/', { 'speech-text': text, 'record_utterance': 'true' })
Test say()
6259905c8e71fb1e983bd0f3
class Molecule(Body_data): <NEW_LINE> <INDENT> def __init__(self, data, molecule_id): <NEW_LINE> <INDENT> Body_data.__init__(self, data.atom_style) <NEW_LINE> self.extract(data, "molecule", molecule_id)
stores the body_data associated with a specific molecule
6259905c7d43ff2487427f24
class AgentSequence(BaseHandler): <NEW_LINE> <INDENT> @require() <NEW_LINE> def get(self): <NEW_LINE> <INDENT> form = Form(self.request.arguments, list_schema) <NEW_LINE> sql = 'select s.short_name, em.amount eamount, em.remark eremark, ac.amount aamount, ac.remark aremark, ' 'em.created_by, em.created_at ' 'from external_money em left join account_sequence ac ' 'on em.id=ac.trade_id and ac.type=6 join supplier s ' 'where em.source=3 and em.flag=0 and em.deleted=0 and s.id=em.uid ' <NEW_LINE> params = [] <NEW_LINE> if form.agent_id.value: <NEW_LINE> <INDENT> sql += ' and s.agent_id = %s ' <NEW_LINE> params.append(form.agent_id.value) <NEW_LINE> <DEDENT> if form.start_date.value: <NEW_LINE> <INDENT> sql += ' and em.created_at > %s ' <NEW_LINE> params.append(form.start_date.value) <NEW_LINE> <DEDENT> if form.end_date.value: <NEW_LINE> <INDENT> sql += ' and em.created_at < %s ' <NEW_LINE> params.append(form.end_date.value) <NEW_LINE> <DEDENT> page = Paginator(self, sql, params) <NEW_LINE> self.render('finance/agent/sequence.html', form=form, page=page)
代理商资金明细
6259905c63b5f9789fe8679b
class NucleotideArray(SequenceArray): <NEW_LINE> <INDENT> def __init__(self, input_obj, name='', description='', validate=False): <NEW_LINE> <INDENT> super().__init__(input_obj, name=name, seqtype='nucl', description=description, validate=validate) <NEW_LINE> <DEDENT> def to_codonarray(self): <NEW_LINE> <INDENT> return CodonArray(deepcopy(self), name=self.name, description=self.description) <NEW_LINE> <DEDENT> def basecomp(self): <NEW_LINE> <INDENT> basecomp_of = super().composition(self, seqtype=self.seqtype) <NEW_LINE> for key, comp in basecomp_of.items(): <NEW_LINE> <INDENT> basecomp_of[key]['AT'] = comp['A'] + comp['T'] <NEW_LINE> basecomp_of[key]['GC'] = comp['G'] + comp['C'] <NEW_LINE> <DEDENT> return basecomp_of <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def nucleotide_to_codon(nucleotide_str): <NEW_LINE> <INDENT> if len(nucleotide_str) % 3 != 0: <NEW_LINE> <INDENT> raise ValueError('SequenceArray length is not a multiple of three ({0}).'.format(len(nucleotide_str))) <NEW_LINE> <DEDENT> for j in range(0, len(nucleotide_str), 3): <NEW_LINE> <INDENT> if j+3 <= len(nucleotide_str): <NEW_LINE> <INDENT> yield nucleotide_str[j:j+3]
Nucleotide sequence array object constructor This is a special type of SequenceArray for nucleotide sequences containing additional methods specific for handling nucleotide sequence data. On instantiation, it constructs a SequenceArray object whose seqtype is set to 'nucl'. NucleotideArray is suitable for both protein-coding and non-protein coding nucleotide sequences. However, if sequences are protein-coding, it is better to use the CodonArray object as this contains methods useful for protein-coding sequences such as the ability count by nucleotide triplets and to translate to amino acid sequences. If the array contains in-frame protein-coding sequence, NucleotideArray can construct a CodonArray using the method `to_codonarray`. However, NucleotideArray cannot differentiate by itself whether a sequence is coding or non-coding, and if coding, whether it is in-frame or not, therefore it is up to the user to judge whether it is appropriate to represent the sequences as plain nucleotides through NucleotideArray or as protein-coding sequences through CodonArray.
6259905c56b00c62f0fb3ef4
class MediaAiAnalysisCoverItem(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.CoverPath = None <NEW_LINE> self.Confidence = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.CoverPath = params.get("CoverPath") <NEW_LINE> self.Confidence = params.get("Confidence") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
智能封面信息
6259905c16aa5153ce401b0d
class SearchViewTest(TestCase): <NEW_LINE> <INDENT> def test_SearchOk(self): <NEW_LINE> <INDENT> response = self.client.get(reverse('search')) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> <DEDENT> def test_SearchWithQueryOk(self): <NEW_LINE> <INDENT> response = self.client.get(reverse('search')+"?q=petition") <NEW_LINE> self.assertEqual(response.status_code, 200)
Test search view
6259905c4a966d76dd5f051c
class BooleanField(Field): <NEW_LINE> <INDENT> def __init__(self, name=None,default=False): <NEW_LINE> <INDENT> super.__init__(name,'boolean',False,default)
docstring for BooleanField
6259905c56ac1b37e63037fc
class NumberedFormatter(FormatterMixin): <NEW_LINE> <INDENT> def notation_for_chord(self, chord): <NEW_LINE> <INDENT> if chord.chord_type == Chord.MINOR: <NEW_LINE> <INDENT> minor = '-' <NEW_LINE> chord_type = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> chord_type = self.CHORD_TYPE_NOTATION.get(chord.chord_type, '') <NEW_LINE> minor = '' <NEW_LINE> <DEDENT> return "{minor}{mod}{degree}{chord_type}{extension}{sustained}{addition}{bass}".format( minor=minor, mod=self.MODIFIER_NOTATION.get(chord.modifier, ''), degree=chord.degree, chord_type=chord_type, extension=self.get_extension_notation(chord), sustained='sus4' if chord.sustained else '', addition='add%s' % chord.addition if chord.addition else '', bass=self.get_bass_notation(chord), ) <NEW_LINE> <DEDENT> def get_bass_notation(self, chord): <NEW_LINE> <INDENT> if chord.bass: <NEW_LINE> <INDENT> return '/{modifier}{degree}'.format( degree=chord.bass, modifier=self.MODIFIER_NOTATION.get(chord.bass_modifier, '')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ''
Formats :class:`Chord`s into the standard numbered notation. This is the inverse of :class:`NumberedParser`.
6259905cb57a9660fecd30a5
@set_module('numpy') <NEW_LINE> class errstate(contextlib.ContextDecorator): <NEW_LINE> <INDENT> def __init__(self, *, call=_Unspecified, **kwargs): <NEW_LINE> <INDENT> self.call = call <NEW_LINE> self.kwargs = kwargs <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.oldstate = seterr(**self.kwargs) <NEW_LINE> if self.call is not _Unspecified: <NEW_LINE> <INDENT> self.oldcall = seterrcall(self.call) <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, *exc_info): <NEW_LINE> <INDENT> seterr(**self.oldstate) <NEW_LINE> if self.call is not _Unspecified: <NEW_LINE> <INDENT> seterrcall(self.oldcall)
errstate(**kwargs) Context manager for floating-point error handling. Using an instance of `errstate` as a context manager allows statements in that context to execute with a known error handling behavior. Upon entering the context the error handling is set with `seterr` and `seterrcall`, and upon exiting it is reset to what it was before. .. versionchanged:: 1.17.0 `errstate` is also usable as a function decorator, saving a level of indentation if an entire function is wrapped. See :py:class:`contextlib.ContextDecorator` for more information. Parameters ---------- kwargs : {divide, over, under, invalid} Keyword arguments. The valid keywords are the possible floating-point exceptions. Each keyword should have a string value that defines the treatment for the particular error. Possible values are {'ignore', 'warn', 'raise', 'call', 'print', 'log'}. See Also -------- seterr, geterr, seterrcall, geterrcall Notes ----- For complete documentation of the types of floating-point exceptions and treatment options, see `seterr`. Examples -------- >>> olderr = np.seterr(all='ignore') # Set error handling to known state. >>> np.arange(3) / 0. array([nan, inf, inf]) >>> with np.errstate(divide='warn'): ... np.arange(3) / 0. array([nan, inf, inf]) >>> np.sqrt(-1) nan >>> with np.errstate(invalid='raise'): ... np.sqrt(-1) Traceback (most recent call last): File "<stdin>", line 2, in <module> FloatingPointError: invalid value encountered in sqrt Outside the context the error handling behavior has not changed: >>> np.geterr() {'divide': 'ignore', 'over': 'ignore', 'under': 'ignore', 'invalid': 'ignore'}
6259905cbe8e80087fbc06ae
class ValidationTest(APITestCase): <NEW_LINE> <INDENT> def test_if_date_is_future(self): <NEW_LINE> <INDENT> with self.assertRaises(ValidationError): <NEW_LINE> <INDENT> NotFuture(timezone('Europe/Rome').localize(datetime.datetime.now() + datetime.timedelta(days=1)))
Check if the date is from future
6259905c009cb60464d02b5f
class MMAEFilterBank(object): <NEW_LINE> <INDENT> def __init__(self, filters, p, dim_x, H=None): <NEW_LINE> <INDENT> assert len(filters) == len(p) <NEW_LINE> assert dim_x > 0 <NEW_LINE> self.filters = filters <NEW_LINE> self.p = asarray(p) <NEW_LINE> self.dim_x = dim_x <NEW_LINE> self.x = None <NEW_LINE> <DEDENT> def predict(self, u=0): <NEW_LINE> <INDENT> for f in self.filters: <NEW_LINE> <INDENT> f.predict(u) <NEW_LINE> <DEDENT> <DEDENT> def update(self, z, R=None, H=None): <NEW_LINE> <INDENT> for i, f in enumerate(self.filters): <NEW_LINE> <INDENT> f.update(z, R, H) <NEW_LINE> self.p[i] *= f.likelihood <NEW_LINE> <DEDENT> self.p /= sum(self.p) <NEW_LINE> self.P = zeros(self.filters[0].P.shape) <NEW_LINE> is_row_vector = (self.filters[0].x.ndim == 1) <NEW_LINE> if is_row_vector: <NEW_LINE> <INDENT> self.x = zeros(self.dim_x) <NEW_LINE> for f, p in zip(self.filters, self.p): <NEW_LINE> <INDENT> self.x += dot(f.x, p) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.x = zeros((self.dim_x, 1)) <NEW_LINE> for f, p in zip(self.filters, self.p): <NEW_LINE> <INDENT> self.x = zeros((self.dim_x, 1)) <NEW_LINE> self.x += dot(f.x, p) <NEW_LINE> <DEDENT> <DEDENT> for x, f, p in zip(self.x, self.filters, self.p): <NEW_LINE> <INDENT> y = f.x - x <NEW_LINE> self.P += p*(outer(y, y) + f.P)
Implements the fixed Multiple Model Adaptive Estimator (MMAE). This is a bank of independent Kalman filters. This estimator computes the likelihood that each filter is the correct one, and blends their state estimates weighted by their likelihood to produce the state estimate. Examples -------- ..code: ca = make_ca_filter(dt, noise_factor=0.6) cv = make_ca_filter(dt, noise_factor=0.6) cv.F[:,2] = 0 # remove acceleration term cv.P[2,2] = 0 cv.Q[2,2] = 0 filters = [cv, ca] bank = MMAEFilterBank(filters, p=(0.5, 0.5), dim_x=3) for z in zs: bank.predict() bank.update(z) References ---------- Zarchan and Musoff. "Fundamentals of Kalman filtering: A Practical Approach." AIAA, third edition.
6259905c3cc13d1c6d466d6a
class Request(pydantic.BaseModel): <NEW_LINE> <INDENT> symbol: typing.Optional[SYMBOL] <NEW_LINE> timestamp: TIMESTAMP_MS <NEW_LINE> recvWindow: typing.Optional[RECV_WINDOW]
Request model for endpoint GET https://api.binance.com/api/v3/openOrders Model Fields: ------------- symbol : str If symbol is ommited, request weight = 40 (optional) timestamp : float Timestamp in millisecond recvWindow : int Number of milliseconds after timestamp the request is valid for (optional) Default = 5000
6259905ce5267d203ee6ced5
class mMulScalar(MulScalar): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> def __imul__(self, other): <NEW_LINE> <INDENT> other = convert(other, self.dtype) <NEW_LINE> for i, x in enumerate(self.flat): <NEW_LINE> <INDENT> self.flat[i] *= other <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def __itruediv__(self, other): <NEW_LINE> <INDENT> other = convert(other, self.dtype) <NEW_LINE> for i, x in enumerate(self.flat): <NEW_LINE> <INDENT> self.flat[i] /= other <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def __ifloordiv__(self, other): <NEW_LINE> <INDENT> other = convert(other, self.dtype) <NEW_LINE> for i, x in enumerate(self.flat): <NEW_LINE> <INDENT> self.flat[i] //= other <NEW_LINE> <DEDENT> return self
Inplace multiplication.
6259905ca219f33f346c7e30
class RefundError(HelcimError): <NEW_LINE> <INDENT> pass
Exception to handle refund errors.
6259905c009cb60464d02b60
class FrozenJobErrorHandler(ErrorHandler): <NEW_LINE> <INDENT> is_monitor = True <NEW_LINE> def __init__(self, output_filename="vasp.out", timeout=21600): <NEW_LINE> <INDENT> self.output_filename = output_filename <NEW_LINE> self.timeout = timeout <NEW_LINE> <DEDENT> def check(self): <NEW_LINE> <INDENT> st = os.stat(self.output_filename) <NEW_LINE> if time.time() - st.st_mtime > self.timeout: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def correct(self): <NEW_LINE> <INDENT> backup(VASP_BACKUP_FILES | {self.output_filename}) <NEW_LINE> vi = VaspInput.from_directory(".") <NEW_LINE> actions = [] <NEW_LINE> if vi["INCAR"].get("ALGO", "Normal").lower() == "fast": <NEW_LINE> <INDENT> actions.append({"dict": "INCAR", "action": {"_set": {"ALGO": "Normal"}}}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> actions.append({"dict": "INCAR", "action": {"_set": {"SYMPREC": 1e-8}}}) <NEW_LINE> <DEDENT> VaspModder(vi=vi).apply_actions(actions) <NEW_LINE> return {"errors": ["Frozen job"], "actions": actions}
Detects an error when the output file has not been updated in timeout seconds. Changes ALGO to Normal from Fast
6259905c2ae34c7f260ac711
class OraDirectory(OraObject): <NEW_LINE> <INDENT> def __init__(self, directoryOwner="", directoryName=""): <NEW_LINE> <INDENT> OraObject.__init__(self, directoryOwner, directoryName, "DIRECTORY") <NEW_LINE> <DEDENT> def getPath(self, db): <NEW_LINE> <INDENT> result = db.executeAll(directorySql["pathFromName"], [self.getName()]) <NEW_LINE> if len(result) == 0: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return result[0][0]
Oracle directory object
6259905cfff4ab517ebcee4f
class LutronCasetaLight(LutronCasetaDevice, SwitchDevice): <NEW_LINE> <INDENT> async def async_turn_on(self, **kwargs): <NEW_LINE> <INDENT> self._smartbridge.turn_on(self._device_id) <NEW_LINE> <DEDENT> async def async_turn_off(self, **kwargs): <NEW_LINE> <INDENT> self._smartbridge.turn_off(self._device_id) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self._state["current_state"] > 0 <NEW_LINE> <DEDENT> async def async_update(self): <NEW_LINE> <INDENT> self._state = self._smartbridge.get_device_by_id(self._device_id) <NEW_LINE> _LOGGER.debug(self._state)
Representation of a Lutron Caseta switch.
6259905c32920d7e50bc7670
class reviews(models.Model): <NEW_LINE> <INDENT> book = models.OneToOneField(books, null=True, on_delete=models.CASCADE, ) <NEW_LINE> grReviewId = models.TextField() <NEW_LINE> dateCreated = models.BigIntegerField() <NEW_LINE> userName = models.TextField() <NEW_LINE> userUrl = models.TextField() <NEW_LINE> url = models.TextField() <NEW_LINE> body = models.TextField() <NEW_LINE> fullBody = models.TextField() <NEW_LINE> rating = models.IntegerField() <NEW_LINE> votes = models.IntegerField() <NEW_LINE> spoiler = models.TextField()
reviews
6259905c23e79379d538db26
class GPIODevice(Device): <NEW_LINE> <INDENT> def __init__(self, pin=None): <NEW_LINE> <INDENT> super(GPIODevice, self).__init__() <NEW_LINE> self._pin = None <NEW_LINE> if pin is None: <NEW_LINE> <INDENT> raise GPIOPinMissing('No pin given') <NEW_LINE> <DEDENT> if isinstance(pin, int): <NEW_LINE> <INDENT> pin = pin_factory(pin) <NEW_LINE> <DEDENT> with _PINS_LOCK: <NEW_LINE> <INDENT> if pin in _PINS: <NEW_LINE> <INDENT> raise GPIOPinInUse( 'pin %r is already in use by another gpiozero object' % pin ) <NEW_LINE> <DEDENT> _PINS.add(pin) <NEW_LINE> <DEDENT> self._pin = pin <NEW_LINE> self._active_state = True <NEW_LINE> self._inactive_state = False <NEW_LINE> <DEDENT> def _state_to_value(self, state): <NEW_LINE> <INDENT> return bool(state == self._active_state) <NEW_LINE> <DEDENT> def _read(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._state_to_value(self.pin.state) <NEW_LINE> <DEDENT> except (AttributeError, TypeError): <NEW_LINE> <INDENT> self._check_open() <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> super(GPIODevice, self).close() <NEW_LINE> with _PINS_LOCK: <NEW_LINE> <INDENT> pin = self._pin <NEW_LINE> self._pin = None <NEW_LINE> if pin in _PINS: <NEW_LINE> <INDENT> _PINS.remove(pin) <NEW_LINE> pin.close() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def closed(self): <NEW_LINE> <INDENT> return self._pin is None <NEW_LINE> <DEDENT> def _check_open(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> super(GPIODevice, self)._check_open() <NEW_LINE> <DEDENT> except DeviceClosed as e: <NEW_LINE> <INDENT> raise GPIODeviceClosed(str(e)) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def pin(self): <NEW_LINE> <INDENT> return self._pin <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._read() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return "<gpiozero.%s object on pin %r, is_active=%s>" % ( self.__class__.__name__, self.pin, self.is_active) <NEW_LINE> <DEDENT> except DeviceClosed: <NEW_LINE> <INDENT> return "<gpiozero.%s object closed>" % self.__class__.__name__
Extends :class:`Device`. Represents a generic GPIO device and provides the services common to all single-pin GPIO devices (like ensuring two GPIO devices do no share a :attr:`pin`). :param int pin: The GPIO pin (in BCM numbering) that the device is connected to. If this is ``None``, :exc:`GPIOPinMissing` will be raised. If the pin is already in use by another device, :exc:`GPIOPinInUse` will be raised.
6259905c91f36d47f22319a4
class CapsuleLayer(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_unit, in_channel, num_unit, unit_size, use_routing, num_routing, cuda_enabled): <NEW_LINE> <INDENT> super(CapsuleLayer, self).__init__() <NEW_LINE> self.in_unit = in_unit <NEW_LINE> self.in_channel = in_channel <NEW_LINE> self.num_unit = num_unit <NEW_LINE> self.use_routing = use_routing <NEW_LINE> self.num_routing = num_routing <NEW_LINE> self.cuda_enabled = cuda_enabled <NEW_LINE> if self.use_routing: <NEW_LINE> <INDENT> self.weight = nn.Parameter(torch.randn(1, in_channel, num_unit, unit_size, in_unit)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.conv_units = nn.ModuleList([ nn.Conv2d(self.in_channel, 32, 9, 2) for u in range(self.num_unit) ]) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> if self.use_routing: <NEW_LINE> <INDENT> return self.routing(x) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.no_routing(x) <NEW_LINE> <DEDENT> <DEDENT> def routing(self, x): <NEW_LINE> <INDENT> batch_size = x.size(0) <NEW_LINE> x = x.transpose(1, 2) <NEW_LINE> x = torch.stack([x] * self.num_unit, dim=2).unsqueeze(4) <NEW_LINE> batch_weight = torch.cat([self.weight] * batch_size, dim=0) <NEW_LINE> u_hat = torch.matmul(batch_weight, x) <NEW_LINE> b_ij = Variable(torch.zeros(1, self.in_channel, self.num_unit, 1)) <NEW_LINE> if self.cuda_enabled: <NEW_LINE> <INDENT> b_ij = b_ij.cuda() <NEW_LINE> <DEDENT> num_iterations = self.num_routing <NEW_LINE> for iteration in range(num_iterations): <NEW_LINE> <INDENT> c_ij = F.softmax(b_ij, dim=2) <NEW_LINE> c_ij = torch.cat([c_ij] * batch_size, dim=0).unsqueeze(4) <NEW_LINE> s_j = (c_ij * u_hat).sum(dim=1, keepdim=True) <NEW_LINE> v_j = caps_utils.squash(s_j, dim=3) <NEW_LINE> v_j1 = torch.cat([v_j] * self.in_channel, dim=1) <NEW_LINE> u_vj1 = torch.matmul(u_hat.transpose(3, 4), v_j1).squeeze(4).mean(dim=0, keepdim=True) <NEW_LINE> b_ij = b_ij + u_vj1 <NEW_LINE> <DEDENT> return v_j.squeeze(1) <NEW_LINE> <DEDENT> def no_routing(self, x): <NEW_LINE> <INDENT> unit = [self.conv_units[i](x) for i, l in enumerate(self.conv_units)] <NEW_LINE> unit = torch.stack(unit, dim=1) <NEW_LINE> batch_size = x.size(0) <NEW_LINE> unit = unit.view(batch_size, self.num_unit, -1) <NEW_LINE> return squash(unit, dim=2)
The core implementation of the idea of capsules
6259905c8e71fb1e983bd0f5
class Standard: <NEW_LINE> <INDENT> Specification = Callable[[DataFrame], DataFrame] <NEW_LINE> @staticmethod <NEW_LINE> def _mean(df: DataFrame) -> Series: <NEW_LINE> <INDENT> mean = df.mean() <NEW_LINE> mean.name = 'mean' <NEW_LINE> return mean <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _stack_as_rows(top: Series, bottom: Series): <NEW_LINE> <INDENT> return concat([top, bottom], axis=1).T <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def none(cls, df: DataFrame) -> DataFrame: <NEW_LINE> <INDENT> return DataFrame() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def mean_and_range(cls, df: DataFrame) -> DataFrame: <NEW_LINE> <INDENT> scale = df.max() - df.min() <NEW_LINE> scale.name = 'range' <NEW_LINE> result = cls._stack_as_rows(cls._mean(df), scale) <NEW_LINE> return result <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def mean_and_std(cls, df: DataFrame) -> DataFrame: <NEW_LINE> <INDENT> scale = df.std() <NEW_LINE> scale.name = 'std' <NEW_LINE> return cls._stack_as_rows(cls._mean(df), scale)
Encapsulates Specifications for standardizing data as ``classmethods``. A Specification is a function taking an (N,M+L) DataFrame ``df `` (to be standardized) as input and returning a (2,M+L) DataFrame. The first row of the return contains (,M+L) ``loc`` values, the second row (,M+L) ``scale`` values. Ultimately the Store class standardizes ``df`` to ``(df - loc)/scale``.
6259905c45492302aabfdb03
class WildCard: <NEW_LINE> <INDENT> def __init__(self, wildcard, sep="|"): <NEW_LINE> <INDENT> self.pats = ["*"] <NEW_LINE> if wildcard: <NEW_LINE> <INDENT> self.pats = wildcard.split(sep) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"<{self.__class__.__name__}, patterns = {self.pats}>" <NEW_LINE> <DEDENT> def filter(self, names): <NEW_LINE> <INDENT> names = list_strings(names) <NEW_LINE> fnames = [] <NEW_LINE> for f in names: <NEW_LINE> <INDENT> for pat in self.pats: <NEW_LINE> <INDENT> if fnmatch.fnmatch(f, pat): <NEW_LINE> <INDENT> fnames.append(f) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return fnames <NEW_LINE> <DEDENT> def match(self, name): <NEW_LINE> <INDENT> for pat in self.pats: <NEW_LINE> <INDENT> if fnmatch.fnmatch(name, pat): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
This object provides an easy-to-use interface for filename matching with shell patterns (fnmatch). >>> w = WildCard("*.nc|*.pdf") >>> w.filter(["foo.nc", "bar.pdf", "hello.txt"]) ['foo.nc', 'bar.pdf'] >>> w.filter("foo.nc") ['foo.nc']
6259905c01c39578d7f1424c
class InjectDependency(): <NEW_LINE> <INDENT> dependencies = {} <NEW_LINE> def __init__(self, *dependenciesToInject): <NEW_LINE> <INDENT> self.dependenciesToInject = dependenciesToInject <NEW_LINE> <DEDENT> def __call__(self, targetObject): <NEW_LINE> <INDENT> for name in self.dependenciesToInject: <NEW_LINE> <INDENT> InjectDependency._throwIfNoSuchField(name, targetObject) <NEW_LINE> InjectDependency._throwIfFieldNotDesignatedForInjection(name, targetObject) <NEW_LINE> InjectDependency._registerIfNotRegistered(name) <NEW_LINE> InjectDependency._inject(name, targetObject) <NEW_LINE> <DEDENT> return targetObject <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _registerIfNotRegistered(name): <NEW_LINE> <INDENT> if (name not in InjectDependency.dependencies): <NEW_LINE> <INDENT> InjectDependency.dependencies[name] = DependencyProxy(NotRegistered) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _inject(dependencyName, targetObject): <NEW_LINE> <INDENT> setattr(targetObject, dependencyName, InjectDependency.dependencies[dependencyName]) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def manualInject(fieldName, value, targetObject): <NEW_LINE> <INDENT> InjectDependency._throwIfNoSuchField(fieldName, targetObject) <NEW_LINE> setattr(targetObject, fieldName, value) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def setDependency(name, value): <NEW_LINE> <INDENT> InjectDependency._registerIfNotRegistered(name) <NEW_LINE> InjectDependency.dependencies[name].setProxyTarget(value) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _throwIfNoSuchField(name, targetClass): <NEW_LINE> <INDENT> ERROR_STR = 'Cant _inject dependency. No field "%s" in class "%s"' <NEW_LINE> if (name not in targetClass.__dict__): <NEW_LINE> <INDENT> raise DependencyInjectionException(ERROR_STR % (name, targetClass.__name__)) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _throwIfFieldNotDesignatedForInjection(name, targetClass): <NEW_LINE> <INDENT> ERROR_STR = 'Field "%s.%s" not designated for injection. Should be initially set to "Inject" instead of "%s"' <NEW_LINE> if (targetClass.__dict__[name] != Inject): <NEW_LINE> <INDENT> raise DependencyInjectionException(ERROR_STR % ( targetClass.__name__, name, targetClass.__dict__[name]))
"Dependency injection decorator for classes
6259905c7b25080760ed87f5
class InternalCompose(object): <NEW_LINE> <INDENT> def __init__(self, transforms): <NEW_LINE> <INDENT> self.transforms = transforms <NEW_LINE> <DEDENT> def __call__(self, img, boxes=None, labels=None): <NEW_LINE> <INDENT> for t in self.transforms: <NEW_LINE> <INDENT> img, boxes, labels = t(img, boxes, labels) <NEW_LINE> <DEDENT> return img, boxes, labels
Composes several augmentations together. Args: transforms (List[Transform]): list of transforms to compose. Example: >>> augmentations.Compose([ >>> transforms.CenterCrop(10), >>> transforms.ToTensor(), >>> ])
6259905c63d6d428bbee3d9d