code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class ActionType: <NEW_LINE> <INDENT> CREATE = 'create' <NEW_LINE> UPDATE = 'update' <NEW_LINE> DELETE = 'delete' <NEW_LINE> CHOICES = ( (CREATE, _('Created')), (UPDATE, _('Updated')), (DELETE, _('Deleted')) ) | Available action types for ``History`` | 625990835fcc89381b266ee3 |
class Answer_5_3(): <NEW_LINE> <INDENT> inputFileName = '/home/inoue/python/python.txt' <NEW_LINE> def Step1(self): <NEW_LINE> <INDENT> with open(self.inputFileName) as txt: <NEW_LINE> <INDENT> for line in txt: <NEW_LINE> <INDENT> result = re.match(r'[A-Z].*',line) <NEW_LINE> if result: <NEW_LINE> <INDENT> print(result.group()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def Step2(self): <NEW_LINE> <INDENT> with open(self.inputFileName) as txt: <NEW_LINE> <INDENT> for line in txt: <NEW_LINE> <INDENT> result = re.match(r'[A-Z][^ ]*',line) <NEW_LINE> if result: <NEW_LINE> <INDENT> print(result.group()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def Step3(self): <NEW_LINE> <INDENT> with open(self.inputFileName) as txt: <NEW_LINE> <INDENT> for line in txt: <NEW_LINE> <INDENT> if line != '\n': <NEW_LINE> <INDENT> print(re.split(r', |,| ',line)[3]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def Step4(self): <NEW_LINE> <INDENT> with open(self.inputFileName) as txt: <NEW_LINE> <INDENT> lines = txt.read() <NEW_LINE> <DEDENT> print(re.findall(r'\bc\w*\b',lines)) <NEW_LINE> <DEDENT> def Step5(self): <NEW_LINE> <INDENT> with open(self.inputFileName) as txt: <NEW_LINE> <INDENT> lines = txt.read() <NEW_LINE> <DEDENT> print(re.sub(r'peach',r'apple',lines)) | 正規表現の解答 | 6259908399fddb7c1ca63b60 |
class InverseTransform(Transform): <NEW_LINE> <INDENT> def __init__(self, transform): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._transform = transform <NEW_LINE> <DEDENT> def forward(self, inputs, context=None): <NEW_LINE> <INDENT> return self._transform.inverse(inputs, context) <NEW_LINE> <DEDENT> def inverse(self, inputs, context=None): <NEW_LINE> <INDENT> return self._transform(inputs, context) | Creates a transform that is the inverse of a given transform. | 625990837d847024c075deea |
class ListenerMixin(object): <NEW_LINE> <INDENT> _EVENTS = tuple() <NEW_LINE> _EVENT_PARSER = Xlib.protocol.rq.EventField(None) <NEW_LINE> def _run(self): <NEW_LINE> <INDENT> self._display_stop = Xlib.display.Display() <NEW_LINE> self._display_record = Xlib.display.Display() <NEW_LINE> with display_manager(self._display_stop) as dm: <NEW_LINE> <INDENT> self._context = dm.record_create_context( 0, [Xlib.ext.record.AllClients], [{ 'core_requests': (0, 0), 'core_replies': (0, 0), 'ext_requests': (0, 0, 0, 0), 'ext_replies': (0, 0, 0, 0), 'delivered_events': (0, 0), 'device_events': self._EVENTS, 'errors': (0, 0), 'client_started': False, 'client_died': False}]) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._initialize(self._display_stop) <NEW_LINE> self._mark_ready() <NEW_LINE> if self.suppress: <NEW_LINE> <INDENT> with display_manager(self._display_record) as dm: <NEW_LINE> <INDENT> self._suppress_start(dm) <NEW_LINE> <DEDENT> <DEDENT> self._display_record.record_enable_context( self._context, self._handler) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if self.suppress: <NEW_LINE> <INDENT> with display_manager(self._display_stop) as dm: <NEW_LINE> <INDENT> self._suppress_stop(dm) <NEW_LINE> <DEDENT> <DEDENT> self._display_record.record_free_context(self._context) <NEW_LINE> self._display_stop.close() <NEW_LINE> self._display_record.close() <NEW_LINE> <DEDENT> <DEDENT> def _stop_platform(self): <NEW_LINE> <INDENT> if not hasattr(self, '_context'): <NEW_LINE> <INDENT> self.wait() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> with display_manager(self._display_stop) as dm: <NEW_LINE> <INDENT> dm.record_disable_context(self._context) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def _suppress_start(self, display): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def _suppress_stop(self, display): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @property <NEW_LINE> def _event_mask(self): <NEW_LINE> <INDENT> return functools.reduce(operator.__or__, self._EVENTS, 0) <NEW_LINE> <DEDENT> @AbstractListener._emitter <NEW_LINE> def _handler(self, events): <NEW_LINE> <INDENT> if not self.running: <NEW_LINE> <INDENT> raise self.StopException() <NEW_LINE> <DEDENT> data = events.data <NEW_LINE> while data and len(data): <NEW_LINE> <INDENT> event, data = self._EVENT_PARSER.parse_binary_value( data, self._display_record.display, None, None) <NEW_LINE> self._handle(self._display_stop, event) <NEW_LINE> <DEDENT> <DEDENT> def _initialize(self, display): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _handle(self, display, event): <NEW_LINE> <INDENT> pass | A mixin for *X* event listeners.
Subclasses should set a value for :attr:`_EVENTS` and implement
:meth:`_handle`. | 6259908397e22403b383ca0c |
class ProgressBar(object): <NEW_LINE> <INDENT> colorList = { "red": 1, "yellow": 3, "green": 2, "magenta": 5, } <NEW_LINE> def __init__(self, number): <NEW_LINE> <INDENT> self.__current = 0 <NEW_LINE> self.__max = number <NEW_LINE> self.__done = {} <NEW_LINE> <DEDENT> def incMax(self, number): <NEW_LINE> <INDENT> self.__max += number <NEW_LINE> <DEDENT> def decCurrent(self): <NEW_LINE> <INDENT> self.__current -= 1 <NEW_LINE> <DEDENT> def incCurrent(self): <NEW_LINE> <INDENT> self.__current += 1 <NEW_LINE> <DEDENT> def display(self, info=""): <NEW_LINE> <INDENT> if not info in self.__done: <NEW_LINE> <INDENT> self.incCurrent() <NEW_LINE> self.__done[info] = True <NEW_LINE> <DEDENT> progress = (self.__current * 100) / self.__max <NEW_LINE> if progress < 35: <NEW_LINE> <INDENT> color = ProgressBar.colorList["red"] <NEW_LINE> <DEDENT> elif progress < 70: <NEW_LINE> <INDENT> color = ProgressBar.colorList["magenta"] <NEW_LINE> <DEDENT> elif progress < 90: <NEW_LINE> <INDENT> color = ProgressBar.colorList["yellow"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> color = ProgressBar.colorList["green"] <NEW_LINE> <DEDENT> if self.__current != 1: <NEW_LINE> <INDENT> sys.stdout.write("\r") <NEW_LINE> <DEDENT> sys.stdout.write( "Generating \033[34m%-10s\033[39m : [\033[3%dm%-60s\033[39m] %3d%%" % (info, color, "#" * (int((float(progress) / 100) * 60)), progress)) <NEW_LINE> sys.stdout.flush() <NEW_LINE> <DEDENT> def finish(self): <NEW_LINE> <INDENT> sys.stdout.write("\n") | Progress bar definition (used in console mode) | 625990835fc7496912d48ff1 |
class LocalsDictNodeNG(LookupMixIn, NodeNG): <NEW_LINE> <INDENT> def qname(self): <NEW_LINE> <INDENT> if self.parent is None: <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> return '%s.%s' % (self.parent.frame().qname(), self.name) <NEW_LINE> <DEDENT> def frame(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def scope(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def _scope_lookup(self, node, name, offset=0): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> stmts = node._filter_stmts(self.locals[name], self, offset) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> stmts = () <NEW_LINE> <DEDENT> if stmts: <NEW_LINE> <INDENT> return self, stmts <NEW_LINE> <DEDENT> if self.parent: <NEW_LINE> <INDENT> pscope = self.parent.scope() <NEW_LINE> if not isinstance(pscope, Function): <NEW_LINE> <INDENT> pscope = pscope.root() <NEW_LINE> <DEDENT> return pscope.scope_lookup(node, name) <NEW_LINE> <DEDENT> return builtin_lookup(name) <NEW_LINE> <DEDENT> def set_local(self, name, stmt): <NEW_LINE> <INDENT> self.locals.setdefault(name, []).append(stmt) <NEW_LINE> <DEDENT> __setitem__ = set_local <NEW_LINE> def _append_node(self, child): <NEW_LINE> <INDENT> self.body.append(child) <NEW_LINE> child.parent = self <NEW_LINE> <DEDENT> def add_local_node(self, child_node, name=None): <NEW_LINE> <INDENT> if name != '__class__': <NEW_LINE> <INDENT> self._append_node(child_node) <NEW_LINE> <DEDENT> self.set_local(name or child_node.name, child_node) <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return self.locals[item][0] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.keys()) <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return self.locals.keys() <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> return [self[key] for key in self.keys()] <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> return zip(self.keys(), self.values()) <NEW_LINE> <DEDENT> def has_key(self, name): <NEW_LINE> <INDENT> return self.locals.has_key(name) <NEW_LINE> <DEDENT> __contains__ = has_key | this class provides locals handling common to Module, Function
and Class nodes, including a dict like interface for direct access
to locals information | 62599083d8ef3951e32c8be5 |
class LockError(Exception): <NEW_LINE> <INDENT> pass | Raised for any errors related to locks. | 625990834428ac0f6e65a03c |
class StupidGit(object): <NEW_LINE> <INDENT> def abuse(self, a, b, c): <NEW_LINE> <INDENT> self.argue(a, b, c) <NEW_LINE> <DEDENT> def argue(self, a, b, c): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> spam(a, b, c) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.ex = sys.exc_info() <NEW_LINE> self.tr = inspect2.trace() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def contradiction(self): <NEW_LINE> <INDENT> pass | A longer,
indented
docstring. | 625990837b180e01f3e49deb |
class TrainClassificatorView(grok.View): <NEW_LINE> <INDENT> grok.context(IAMQPErrorClassificationFolder) <NEW_LINE> grok.require('zope2.View') <NEW_LINE> grok.name('train-classificator') <NEW_LINE> def __call__(self): <NEW_LINE> <INDENT> context = self.context.aq_inner <NEW_LINE> form = UploadForm(context, self.request) <NEW_LINE> view = UploadFormWrapper(context, self.request) <NEW_LINE> view = view.__of__(context) <NEW_LINE> view.form_instance = form <NEW_LINE> self.form_wrapper = view <NEW_LINE> return super(TrainClassificatorView,self).__call__() | sample view class | 6259908366673b3332c31f0d |
class GetSensitiveOpSettingRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(GetSensitiveOpSettingRequest, self).__init__( '/regions/{regionId}/sensitiveOpSetting', 'GET', header, version) <NEW_LINE> self.parameters = parameters | 获取操作保护设置信息 | 6259908326068e7796d4e450 |
class TestConnectionManagerTimings(Base): <NEW_LINE> <INDENT> @defer.inlineCallbacks <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> yield super(TestConnectionManagerTimings, self).setUp() <NEW_LINE> self.sm.connection.handshake_timeout = 0 <NEW_LINE> <DEDENT> def check(self, n_from, event, result): <NEW_LINE> <INDENT> self.sm.state = getattr(StateManager, n_from) <NEW_LINE> self.sm.handle_default(event) <NEW_LINE> return self.wait_event(result) <NEW_LINE> <DEDENT> def test_handshaketimeout_checkversion(self): <NEW_LINE> <INDENT> self.sm.connection.state = ConnectionManager.WU_WN <NEW_LINE> return self.check('READY', 'SYS_CONNECTION_MADE', 'SYS_HANDSHAKE_TIMEOUT') <NEW_LINE> <DEDENT> def test_handshaketimeout_setcapabilities(self): <NEW_LINE> <INDENT> return self.check('CHECK_VERSION', 'SYS_PROTOCOL_VERSION_OK', 'SYS_HANDSHAKE_TIMEOUT') <NEW_LINE> <DEDENT> def test_handshaketimeout_authenticate(self): <NEW_LINE> <INDENT> return self.check('SET_CAPABILITIES', 'SYS_SET_CAPABILITIES_OK', 'SYS_HANDSHAKE_TIMEOUT') <NEW_LINE> <DEDENT> def test_waiting_triggers(self): <NEW_LINE> <INDENT> self.sm.connection.waiting_timeout = 0 <NEW_LINE> return self.check('READY', 'SYS_CONNECTION_FAILED', 'SYS_CONNECTION_RETRY') <NEW_LINE> <DEDENT> def test_waiting_behaviour(self): <NEW_LINE> <INDENT> timeouts = [2, 4, 8, 16, 32, 64, 120, 120, 120] <NEW_LINE> self.sm.connection.waiting_timeout = 1 <NEW_LINE> for t in timeouts: <NEW_LINE> <INDENT> self.sm.state = StateManager.READY <NEW_LINE> self.sm.handle_default('SYS_CONNECTION_FAILED') <NEW_LINE> self.assertEqual(self.sm.connection.waiting_timeout, t) | Times handled by ConnectionManager. | 62599083796e427e53850289 |
class RefundOverflowException(PrException): <NEW_LINE> <INDENT> error_code = 61 <NEW_LINE> error_msg = "the sum of refunds for a payment may not exceed the " + "payment's original amount" | refund overflow | 62599083099cdd3c63676181 |
class Key(Validator): <NEW_LINE> <INDENT> tag = 'key' <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Key, self).__init__(*args, **kwargs) <NEW_LINE> self.key_name = args[0] <NEW_LINE> <DEDENT> def _is_valid(self, value): <NEW_LINE> <INDENT> if self.key_name in value: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def fail(self, value): <NEW_LINE> <INDENT> return '\'%s\' is missing from the map.' % (self.key_name) | Custom Key validator | 625990833317a56b869bf2cb |
class Data(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> filename = os.path.join(_data_dir, 'snapshot_*.json') <NEW_LINE> self.filenames = glob.glob(filename) + glob.glob(filename + '.gz') <NEW_LINE> self.data = None <NEW_LINE> <DEDENT> def load(self, n_files=None): <NEW_LINE> <INDENT> d = list() <NEW_LINE> filenames = self.filenames <NEW_LINE> if n_files is not None: <NEW_LINE> <INDENT> filenames = filenames[-n_files:] <NEW_LINE> <DEDENT> for f in filenames: <NEW_LINE> <INDENT> print('reading {}'.format(f)) <NEW_LINE> opener = open <NEW_LINE> if f.endswith('.gz'): <NEW_LINE> <INDENT> opener = gzip.open <NEW_LINE> <DEDENT> d.append(json.load(opener(f))) <NEW_LINE> <DEDENT> self.data = d <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> out = ['{} entries:'.format(len(self.filenames))] <NEW_LINE> if self.data is not None: <NEW_LINE> <INDENT> for k in self.data[0]: <NEW_LINE> <INDENT> out += ['{} {} entries '.format(k, len(self.data[0][k]))] <NEW_LINE> <DEDENT> <DEDENT> return '\n'.join(out) | Processing the json snapshots. There are the following records in snapshot: ['curr', 'instr', 'orderbook', 'lasttrades', 'summary']
Each of these are per-instrument records.
summary: can be flattened, only need to lookup strike and maturity from instr. This should be starting point.
orderbook:
lasttrades: | 625990833617ad0b5ee07c5f |
class Version: <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.__dict__.update(kwargs) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> keys = sorted(self.__dict__) <NEW_LINE> items = ("{}={!r}".format(k, self.__dict__[k]) for k in keys) <NEW_LINE> return "{}({})".format(type(self).__name__, ", ".join(items)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ | Helper class for version info. | 62599083283ffb24f3cf53b0 |
class Inline(Raw, ResourceBound): <NEW_LINE> <INDENT> def __init__(self, resource, patchable=False, **kwargs): <NEW_LINE> <INDENT> self.target_reference = ResourceReference(resource) <NEW_LINE> self.patchable = patchable <NEW_LINE> def schema(): <NEW_LINE> <INDENT> def _response_schema(): <NEW_LINE> <INDENT> if self.resource == self.target: <NEW_LINE> <INDENT> return {"$ref": "#"} <NEW_LINE> <DEDENT> return {"$ref": self.resource.routes["schema"].rule_factory(self.resource)} <NEW_LINE> <DEDENT> if not not self.patchable: <NEW_LINE> <INDENT> return _response_schema() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return _response_schema(), self.target.schema.patchable.request <NEW_LINE> <DEDENT> <DEDENT> super(Inline, self).__init__(schema, **kwargs) <NEW_LINE> <DEDENT> def rebind(self, resource): <NEW_LINE> <INDENT> if self.target_reference.value == 'self': <NEW_LINE> <INDENT> return self.__class__( 'self', patchable=self.patchable, default=self.default, attribute=self.attribute, nullable=self.nullable, title=self.title, description=self.description, io=self.io ).bind(resource) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> <DEDENT> @cached_property <NEW_LINE> def target(self): <NEW_LINE> <INDENT> return self.target_reference.resolve(self.resource) <NEW_LINE> <DEDENT> def format(self, item): <NEW_LINE> <INDENT> return self.target.schema.format(item) <NEW_LINE> <DEDENT> def convert(self, item): <NEW_LINE> <INDENT> return self.target.schema.convert(item, patchable=self.patchable) | Formats and converts items in a :class:`ModelResource` using the resource's ``schema``.
:param resource: a resource reference as in :class:`ToOne`
:param bool patch_instance: whether to allow partial objects | 62599083656771135c48adb8 |
class VehicleDeleteView(CustomUserMixin, DeleteView): <NEW_LINE> <INDENT> model = Vehicle <NEW_LINE> template_name = 'buildings/administrative/vehicles/vehicle_delete_confirm.html' <NEW_LINE> def test_func(self): <NEW_LINE> <INDENT> return BuildingPermissions.can_edit_unit( user=self.request.user, building=self.get_object().unit.building, ) <NEW_LINE> <DEDENT> def get_object(self, queryset=None): <NEW_LINE> <INDENT> return get_object_or_404( Vehicle, unit_id=self.kwargs['u_pk'], pk=self.kwargs['v_pk'], ) <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> return reverse( 'buildings:unit_detail', args=[self.kwargs['b_pk'], self.kwargs['u_pk']] ) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> context['unit'] = self.get_object().unit <NEW_LINE> context['active_units'] = True <NEW_LINE> context['building'] = self.get_object().unit.building <NEW_LINE> return context <NEW_LINE> <DEDENT> def delete(self, request, *args, **kwargs): <NEW_LINE> <INDENT> messages.success( self.request, _('Vehículo eliminado exitosamente.') ) <NEW_LINE> return super().delete(request, *args, **kwargs) | Vehicle delete view. Users are redirected to a view
in which they will be asked about confirmation for
delete a vehicle definitely. | 6259908392d797404e3898e4 |
class ShrunkCovariance(EmpiricalCovariance): <NEW_LINE> <INDENT> def __init__(self, store_precision=True, shrinkage=0.1): <NEW_LINE> <INDENT> self.store_precision = store_precision <NEW_LINE> self.shrinkage = shrinkage <NEW_LINE> <DEDENT> def fit(self, X, assume_centered=False): <NEW_LINE> <INDENT> empirical_cov = empirical_covariance(X, assume_centered=assume_centered) <NEW_LINE> covariance = shrunk_covariance(empirical_cov, self.shrinkage) <NEW_LINE> self._set_estimates(covariance) <NEW_LINE> return self | Covariance estimator with shrinkage
Parameters
----------
store_precision : bool
Specify if the estimated precision is stored
shrinkage: float, 0 <= shrinkage <= 1
coefficient in the convex combination used for the computation
of the shrunk estimate.
Attributes
----------
`covariance_` : array-like, shape (n_features, n_features)
Estimated covariance matrix
`precision_` : array-like, shape (n_features, n_features)
Estimated pseudo inverse matrix.
(stored only if store_precision is True)
`shrinkage`: float, 0 <= shrinkage <= 1
coefficient in the convex combination used for the computation
of the shrunk estimate.
Notes
-----
The regularized covariance is given by
(1 - shrinkage)*cov
+ shrinkage*mu*np.identity(n_features)
where mu = trace(cov) / n_features | 625990837c178a314d78e972 |
class SoftDeadlineExceeded(Exception): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(SoftDeadlineExceeded, self).__init__( 'Operation exceeded deadline.') | Raised when an overall client operation takes too long. | 62599083167d2b6e312b831e |
class FrontmatterExtractor(FrontmatterComposerMixin): <NEW_LINE> <INDENT> def extract(self, source_file): <NEW_LINE> <INDENT> data, source = self.get_data(source_file) <NEW_LINE> return data | Extract frontmatter from a source file. | 62599083aad79263cf4302cc |
class ItemRuleBuilderLower(ItemRuleBuilder): <NEW_LINE> <INDENT> def __init__(self, tokenizer=None, keyword_processor=None, kp_lower=None): <NEW_LINE> <INDENT> super(ItemRuleBuilderLower, self).__init__(tokenizer, keyword_processor) <NEW_LINE> self.kp_lower = self.build_kp(case_sensitive=False) if kp_lower is None else kp_lower <NEW_LINE> <DEDENT> def _keyword_match_lower(self, claim): <NEW_LINE> <INDENT> return self._keyword_match(claim, custom_kp=self.kp_lower) <NEW_LINE> <DEDENT> def lower_match_rule(self): <NEW_LINE> <INDENT> item = self.item <NEW_LINE> if len(item['prioritized_docids']) < 1: <NEW_LINE> <INDENT> cts = [c for c in item['claim_tokens'] if c not in STOPWORDS] <NEW_LINE> claim = ' '.join(cts) <NEW_LINE> finded_keys = self._keyword_match_lower(claim) <NEW_LINE> item['prioritized_docids'].extend(list(finded_keys)) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def google_disambiguious_rule(self): <NEW_LINE> <INDENT> item = self.item <NEW_LINE> return self <NEW_LINE> <DEDENT> def more_rules(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> @property <NEW_LINE> def rules(self): <NEW_LINE> <INDENT> return lambda x: self.exact_match_rule(x) .eliminate_articles_rule() .docid_based_rule() .singularize_rule() .lower_match_rule() | docstring for ItemRuleBuilderLower | 62599083283ffb24f3cf53b2 |
class ContextsServicer(object): <NEW_LINE> <INDENT> def ListContexts(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def GetContext(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def CreateContext(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def UpdateContext(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def DeleteContext(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def DeleteAllContexts(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') | Manages contexts.
Refer to the [Dialogflow documentation](https://dialogflow.com/docs/contexts)
for more details about contexts.
# | 62599083ec188e330fdfa3bc |
class InputFnOps(collections.namedtuple('InputFnOps', ['features', 'labels', 'default_inputs'])): <NEW_LINE> <INDENT> pass | A return type for an input_fn (deprecated).
THIS CLASS IS DEPRECATED. Please use tf.estimator.export.ServingInputReceiver
instead.
This return type is currently only supported for serving input_fn.
Training and eval input_fn should return a `(features, labels)` tuple.
The expected return values are:
features: A dict of string to `Tensor` or `SparseTensor`, specifying the
features to be passed to the model.
labels: A `Tensor`, `SparseTensor`, or a dict of string to `Tensor` or
`SparseTensor`, specifying labels for training or eval. For serving, set
`labels` to `None`.
default_inputs: a dict of string to `Tensor` or `SparseTensor`, specifying
the input placeholders (if any) that this input_fn expects to be fed.
Typically, this is used by a serving input_fn, which expects to be fed
serialized `tf.Example` protos. | 6259908397e22403b383ca0f |
class ComplexConv1d(_ComplexConvNd): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1): <NEW_LINE> <INDENT> kernel_size = single(kernel_size) <NEW_LINE> stride = single(stride) <NEW_LINE> padding = padding <NEW_LINE> dilation = single(dilation) <NEW_LINE> super(ComplexConv1d, self).__init__(in_channels, out_channels, kernel_size, stride, padding, dilation, False, single(0)) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> if self.padding: <NEW_LINE> <INDENT> x = F.pad(x, (self.padding, self.padding), 'reflect') <NEW_LINE> <DEDENT> real_part = F.conv1d(x, self.A, None, stride=self.stride, padding=0, dilation=self.dilation, groups=2) <NEW_LINE> spl = self.in_channels // 2 <NEW_LINE> weight_B = torch.cat([self.B[:spl].data * (-1), self.B[spl:].data]) <NEW_LINE> idea_part = F.conv1d(x, weight_B, None, stride=self.stride, padding=0, dilation=self.dilation, groups=2) <NEW_LINE> return real_part + idea_part | Complex Convolution 1d | 62599083656771135c48adb9 |
class FlowList(ListResource): <NEW_LINE> <INDENT> def __init__(self, version): <NEW_LINE> <INDENT> super(FlowList, self).__init__(version) <NEW_LINE> self._solution = {} <NEW_LINE> self._uri = '/Flows'.format(**self._solution) <NEW_LINE> <DEDENT> def stream(self, limit=None, page_size=None): <NEW_LINE> <INDENT> limits = self._version.read_limits(limit, page_size) <NEW_LINE> page = self.page(page_size=limits['page_size'], ) <NEW_LINE> return self._version.stream(page, limits['limit'], limits['page_limit']) <NEW_LINE> <DEDENT> def list(self, limit=None, page_size=None): <NEW_LINE> <INDENT> return list(self.stream(limit=limit, page_size=page_size, )) <NEW_LINE> <DEDENT> def page(self, page_token=values.unset, page_number=values.unset, page_size=values.unset): <NEW_LINE> <INDENT> params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, }) <NEW_LINE> response = self._version.page( 'GET', self._uri, params=params, ) <NEW_LINE> return FlowPage(self._version, response, self._solution) <NEW_LINE> <DEDENT> def get_page(self, target_url): <NEW_LINE> <INDENT> response = self._version.domain.twilio.request( 'GET', target_url, ) <NEW_LINE> return FlowPage(self._version, response, self._solution) <NEW_LINE> <DEDENT> def get(self, sid): <NEW_LINE> <INDENT> return FlowContext(self._version, sid=sid, ) <NEW_LINE> <DEDENT> def __call__(self, sid): <NEW_LINE> <INDENT> return FlowContext(self._version, sid=sid, ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Twilio.Preview.Studio.FlowList>' | PLEASE NOTE that this class contains preview products that are subject
to change. Use them with caution. If you currently do not have developer
preview access, please contact [email protected]. | 62599083adb09d7d5dc0c06c |
class OneTimeBootDevice(ManagedObject): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> ManagedObject.__init__(self, "OneTimeBootDevice") <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def class_id(): <NEW_LINE> <INDENT> return "oneTimeBootDevice" <NEW_LINE> <DEDENT> DEVICE = "Device" <NEW_LINE> DN = "Dn" <NEW_LINE> NAME = "Name" <NEW_LINE> RN = "Rn" <NEW_LINE> STATUS = "Status" <NEW_LINE> CONST_DEVICE_HUU = "huu" <NEW_LINE> CONST_DEVICE_HV = "hv" <NEW_LINE> CONST_DEVICE_NONE = "none" <NEW_LINE> CONST_DEVICE_SCU = "scu" | This class contains the relevant properties and constant supported by this MO. | 625990833346ee7daa3383eb |
class PForTestCase(test.TestCase): <NEW_LINE> <INDENT> def _run_targets(self, targets1, targets2=None, run_init=True): <NEW_LINE> <INDENT> targets1 = nest.flatten(targets1) <NEW_LINE> targets2 = ([] if targets2 is None else nest.flatten(targets2)) <NEW_LINE> assert len(targets1) == len(targets2) or not targets2 <NEW_LINE> if run_init: <NEW_LINE> <INDENT> init = variables.global_variables_initializer() <NEW_LINE> self.evaluate(init) <NEW_LINE> <DEDENT> return self.evaluate(targets1 + targets2) <NEW_LINE> <DEDENT> def run_and_assert_equal(self, targets1, targets2, rtol=1e-4, atol=1e-5): <NEW_LINE> <INDENT> outputs = self._run_targets(targets1, targets2) <NEW_LINE> outputs = nest.flatten(outputs) <NEW_LINE> n = len(outputs) // 2 <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> if outputs[i + n].dtype != np.object: <NEW_LINE> <INDENT> self.assertAllClose(outputs[i + n], outputs[i], rtol=rtol, atol=atol) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.assertAllEqual(outputs[i + n], outputs[i]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _test_loop_fn(self, loop_fn, iters, parallel_iterations=None, rtol=1e-4, atol=1e-5): <NEW_LINE> <INDENT> t1 = pfor_control_flow_ops.pfor(loop_fn, iters=iters, parallel_iterations=parallel_iterations) <NEW_LINE> loop_fn_dtypes = nest.map_structure(lambda x: x.dtype, t1) <NEW_LINE> t2 = pfor_control_flow_ops.for_loop(loop_fn, loop_fn_dtypes, iters=iters, parallel_iterations=parallel_iterations) <NEW_LINE> self.run_and_assert_equal(t1, t2, rtol=rtol, atol=atol) | Base class for test cases. | 62599083167d2b6e312b831f |
class ExecutionHandler(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractmethod <NEW_LINE> def execute_order(self, event): <NEW_LINE> <INDENT> raise NotImplementedError("Abstract Method supports no implement.") | The ExecutionHandler abstract class handles the interaction between a set of order objects generated by a Portfolio and the ultimate set of Fill objects that actually occur in the market.
The handlers can be used to subclass simulated brokerages or live brokerages, with identical interfaces. This allows strategies to be backtested in a very similar manner to the live trading engine. | 62599083dc8b845886d550cd |
class RedHatLdap(Ldap, RedHatPlugin): <NEW_LINE> <INDENT> packages = ('openldap', 'nss-pam-ldapd') <NEW_LINE> files = ('/etc/ldap.conf', '/etc/pam_ldap.conf') <NEW_LINE> def setup(self): <NEW_LINE> <INDENT> super(RedHatLdap, self).setup() <NEW_LINE> self.add_copy_specs([ "/etc/openldap", "/etc/nslcd.conf", "/etc/pam_ldap.conf" ]) <NEW_LINE> <DEDENT> def postproc(self): <NEW_LINE> <INDENT> self.do_file_sub("/etc/nslcd.conf", r"(\s*bindpw\s*)\S+", r"\1********") <NEW_LINE> self.do_file_sub("/etc/pam_ldap.conf", r"(\s*bindpw\s*)\S+", r"\1********") | LDAP related information for RedHat based distribution
| 625990835fdd1c0f98e5fa93 |
class TestDisallowLongAbbreviationAllowsShortGroupingPrefix(ParserTestCase): <NEW_LINE> <INDENT> parser_signature = Sig(prefix_chars='+', allow_abbrev=False) <NEW_LINE> argument_signatures = [ Sig('+r'), Sig('+c', action='count'), ] <NEW_LINE> failures = ['+r', '+c +r'] <NEW_LINE> successes = [ ('', NS(r=None, c=None)), ('+ra', NS(r='a', c=None)), ('+rcc', NS(r='cc', c=None)), ('+cc', NS(r=None, c=2)), ('+cc +ra', NS(r='a', c=2)), ('+ccrcc', NS(r='cc', c=2)), ] | Short option grouping works with custom prefix and allow_abbrev=False | 6259908371ff763f4b5e92c0 |
class Point(object): <NEW_LINE> <INDENT> def __init__(self, x, y, z=0): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.z = z <NEW_LINE> <DEDENT> def get_x(self): <NEW_LINE> <INDENT> return self.x <NEW_LINE> <DEDENT> def get_y(self): <NEW_LINE> <INDENT> return self.y <NEW_LINE> <DEDENT> def get_z(self): <NEW_LINE> <INDENT> return self.z <NEW_LINE> <DEDENT> def set_x(self, x): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> return self <NEW_LINE> <DEDENT> def set_y(self, y): <NEW_LINE> <INDENT> self.y = y <NEW_LINE> return self <NEW_LINE> <DEDENT> def set_z(self, z): <NEW_LINE> <INDENT> self.z = z <NEW_LINE> return self <NEW_LINE> <DEDENT> def dist_to(self, other_point): <NEW_LINE> <INDENT> return math.sqrt( pow(self.x - other_point.x, 2) + pow(self.y - other_point.y, 2) + pow(self.z - other_point.z, 2)) <NEW_LINE> <DEDENT> def to_unit_vector(self): <NEW_LINE> <INDENT> mag = self.dist_to(Point(0, 0, 0)) <NEW_LINE> if mag == 0: <NEW_LINE> <INDENT> return Point(0, 0, 0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Point(self.x / mag, self.y / mag, self.z / mag) <NEW_LINE> <DEDENT> <DEDENT> def to_list(self): <NEW_LINE> <INDENT> return [self.x, self.y, self.z] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "X: {0}, Y: {1}, Z: {2}".format(self.x, self.y, self.z) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Point({0}, {1}, {2})".format(self.x, self.y, self.z) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(str(self)) <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> return Point(self.x + other.x, self.y + other.y, self.z + other.z) <NEW_LINE> <DEDENT> def __mult__(self, scalar): <NEW_LINE> <INDENT> return Point(scalar * self.x, scalar * self.y, scalar * self.z) <NEW_LINE> <DEDENT> def __eq__(self, val): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return val.x == self.x and val.y == self.y and val.z == self.z <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False | Simple geometric point class. That is all. | 625990834428ac0f6e65a042 |
class X509(certificate.Certificate): <NEW_LINE> <INDENT> def __init__(self, data, name=None, created=None, id=None): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> super().__init__(name=name, created=created, id=id) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def managed_type(cls): <NEW_LINE> <INDENT> return "certificate" <NEW_LINE> <DEDENT> @property <NEW_LINE> def format(self): <NEW_LINE> <INDENT> return "X.509" <NEW_LINE> <DEDENT> def get_encoded(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, X509): <NEW_LINE> <INDENT> return (self._data == other._data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> result = self.__eq__(other) <NEW_LINE> return not result | This class represents X.509 certificates. | 6259908392d797404e3898e6 |
class BlueprintOptionsFlowHandler(config_entries.OptionsFlow): <NEW_LINE> <INDENT> def __init__(self, config_entry: ConfigEntry): <NEW_LINE> <INDENT> self.config_entry = config_entry <NEW_LINE> self.options = dict(config_entry.options) <NEW_LINE> <DEDENT> async def async_step_init(self, user_input: Optional[ConfigType] = None): <NEW_LINE> <INDENT> return await self.async_step_user() <NEW_LINE> <DEDENT> async def async_step_user(self, user_input: Optional[ConfigType] = None): <NEW_LINE> <INDENT> if user_input is not None: <NEW_LINE> <INDENT> self.options.update(user_input) <NEW_LINE> return await self._update_options() <NEW_LINE> <DEDENT> return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required(x, default=self.options.get(x, True)): bool for x in sorted(PLATFORMS) } ), ) <NEW_LINE> <DEDENT> async def _update_options(self): <NEW_LINE> <INDENT> return self.async_create_entry( title=self.config_entry.data.get(CONF_USERNAME), data=self.options ) | Blueprint config flow options handler. | 62599083adb09d7d5dc0c06e |
class AbortException(Exception): <NEW_LINE> <INDENT> pass | This exception is used for when the user wants to quit algorithms mid-way.
The `AbortException` can for instance be sent by pygame input, and caught
by whatever is running the algorithm. | 62599083283ffb24f3cf53b5 |
class TestSystem(unittest.TestCase): <NEW_LINE> <INDENT> BOOK_EXAMPLE = [[1, -2, 11, 3], [1, 3, -2, -5], [1, -1/1, -3, 8/2], [1, 2, 0, -3]] <NEW_LINE> BANERJEE2 = [[1, -2, 11, 3], [1, 3, -2, -5], [1, -1, -3, 4], [1, 4, 0, -6]] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.system = System(TestSystem.BOOK_EXAMPLE) <NEW_LINE> <DEDENT> def test_can_create(self): <NEW_LINE> <INDENT> assert self.system != None <NEW_LINE> <DEDENT> def test_solve(self): <NEW_LINE> <INDENT> self.system.dump() <NEW_LINE> sol = self.system.solve() <NEW_LINE> sol.dump() <NEW_LINE> sol.as_system() <NEW_LINE> <DEDENT> def test_to_list_book(self): <NEW_LINE> <INDENT> assert self.system.to_list() == TestSystem.BOOK_EXAMPLE, "array not recovered" <NEW_LINE> <DEDENT> def test_to_list_banerjee2(self): <NEW_LINE> <INDENT> system2 = System(TestSystem.BANERJEE2) <NEW_LINE> assert system2.to_list() == TestSystem.BANERJEE2, "array not recovered correctly" | this is not a true unit test set, more a driver to check that it works | 625990834a966d76dd5f09fa |
class SubunitLogObserver(logobserver.LogLineObserver, TestResult): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> logobserver.LogLineObserver.__init__(self) <NEW_LINE> TestResult.__init__(self) <NEW_LINE> try: <NEW_LINE> <INDENT> from subunit import TestProtocolServer, PROGRESS_CUR, PROGRESS_SET <NEW_LINE> from subunit import PROGRESS_PUSH, PROGRESS_POP <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> raise ImportError("subunit is not importable, but is required for " "SubunitLogObserver support.") <NEW_LINE> <DEDENT> self.PROGRESS_CUR = PROGRESS_CUR <NEW_LINE> self.PROGRESS_SET = PROGRESS_SET <NEW_LINE> self.PROGRESS_PUSH = PROGRESS_PUSH <NEW_LINE> self.PROGRESS_POP = PROGRESS_POP <NEW_LINE> self.warningio = StringIO() <NEW_LINE> self.protocol = TestProtocolServer(self, self.warningio) <NEW_LINE> self.skips = [] <NEW_LINE> self.seen_tags = set() <NEW_LINE> <DEDENT> def outLineReceived(self, line): <NEW_LINE> <INDENT> self.protocol.lineReceived(line + '\n') <NEW_LINE> <DEDENT> def errLineReceived(self, line): <NEW_LINE> <INDENT> self.protocol.lineReceived(line + '\n') <NEW_LINE> <DEDENT> def stopTest(self, test): <NEW_LINE> <INDENT> TestResult.stopTest(self, test) <NEW_LINE> self.step.setProgress('tests', self.testsRun) <NEW_LINE> <DEDENT> def addSuccess(self, test): <NEW_LINE> <INDENT> TestResult.addSuccess(self, test) <NEW_LINE> self.addAResult(test, SUCCESS, 'SUCCESS') <NEW_LINE> <DEDENT> def addSkip(self, test, detail): <NEW_LINE> <INDENT> if hasattr(TestResult, 'addSkip'): <NEW_LINE> <INDENT> TestResult.addSkip(self, test, detail) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.skips.append((test, detail)) <NEW_LINE> <DEDENT> self.addAResult(test, SKIPPED, 'SKIPPED', detail) <NEW_LINE> <DEDENT> def addError(self, test, err): <NEW_LINE> <INDENT> TestResult.addError(self, test, err) <NEW_LINE> self.issue(test, err) <NEW_LINE> <DEDENT> def addFailure(self, test, err): <NEW_LINE> <INDENT> TestResult.addFailure(self, test, err) <NEW_LINE> self.issue(test, err) <NEW_LINE> <DEDENT> def addAResult(self, test, result, text, log=""): <NEW_LINE> <INDENT> tr = aTestResult(tuple(test.id().split('.')), result, text, log) <NEW_LINE> self.step.build.build_status.addTestResult(tr) <NEW_LINE> <DEDENT> def issue(self, test, err): <NEW_LINE> <INDENT> self.addAResult(test, FAILURE, 'FAILURE', err) <NEW_LINE> self.step.setProgress('tests failed', len(self.failures) + len(self.errors)) <NEW_LINE> <DEDENT> expectedTests = 0 <NEW_LINE> contextLevel = 0 <NEW_LINE> def tags(self, new_tags, gone_tags): <NEW_LINE> <INDENT> self.seen_tags.update(new_tags) | Observe a log that may contain subunit output.
This class extends TestResult to receive the callbacks from the subunit
parser in the most direct fashion. | 6259908360cbc95b06365af6 |
class ConnectionProxy(object): <NEW_LINE> <INDENT> def execute(self, conn, execute, clauseelement, *multiparams, **params): <NEW_LINE> <INDENT> return execute(clauseelement, *multiparams, **params) <NEW_LINE> <DEDENT> def cursor_execute(self, execute, cursor, statement, parameters, context, executemany): <NEW_LINE> <INDENT> return execute(cursor, statement, parameters, context) | Allows interception of statement execution by Connections.
Either or both of the ``execute()`` and ``cursor_execute()``
may be implemented to intercept compiled statement and
cursor level executions, e.g.::
class MyProxy(ConnectionProxy):
def execute(self, conn, execute, clauseelement, *multiparams, **params):
print "compiled statement:", clauseelement
return execute(clauseelement, *multiparams, **params)
def cursor_execute(self, execute, cursor, statement, parameters, context, executemany):
print "raw statement:", statement
return execute(cursor, statement, parameters, context)
The ``execute`` argument is a function that will fulfill the default
execution behavior for the operation. The signature illustrated
in the example should be used.
The proxy is installed into an :class:`~sqlalchemy.engine.Engine` via
the ``proxy`` argument::
e = create_engine('someurl://', proxy=MyProxy()) | 62599083dc8b845886d550cf |
class FollowUser(object): <NEW_LINE> <INDENT> def __init__(self, Repository): <NEW_LINE> <INDENT> self.repository = Repository <NEW_LINE> <DEDENT> def execute(self, User, userToFollow): <NEW_LINE> <INDENT> user = self.repository.getUserById(User) <NEW_LINE> try: <NEW_LINE> <INDENT> uToFollow = self.repository.getUserById(userToFollow) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise Exception(f'The user {nickname} does not exist') <NEW_LINE> <DEDENT> if user.nickname == uToFollow.nickname: <NEW_LINE> <INDENT> raise Exception('you can not follow yourself') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.repository.setFollowing(user, uToFollow) | docstring for followTo. | 625990835fdd1c0f98e5fa95 |
class CapitalizeStr: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> from weakref import WeakKeyDictionary <NEW_LINE> self._instance_data = WeakKeyDictionary() <NEW_LINE> <DEDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> if instance is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> return self._instance_data[instance] <NEW_LINE> <DEDENT> def __set__(self, instance, value: str): <NEW_LINE> <INDENT> if not isinstance(value, str): <NEW_LINE> <INDENT> raise ValueError("Value {} is not str".format(value)) <NEW_LINE> <DEDENT> self._instance_data[instance] = value.capitalize() <NEW_LINE> <DEDENT> def __delete__(self): <NEW_LINE> <INDENT> raise AttributeError("Cannot delete attribute") | Descriptor | 62599083ad47b63b2c5a9367 |
class Options: <NEW_LINE> <INDENT> def __init__(self, rs): <NEW_LINE> <INDENT> self.rs = rs <NEW_LINE> self.selectedOpts = set() <NEW_LINE> <DEDENT> def selection(self): <NEW_LINE> <INDENT> return self.selectedOpts <NEW_LINE> <DEDENT> def toggle(self, option): <NEW_LINE> <INDENT> if option not in self.selectedOpts: <NEW_LINE> <INDENT> dfsPath = list(self.rs.forwardDfs(option)) <NEW_LINE> for dfsOpt in dfsPath: <NEW_LINE> <INDENT> self.selectedOpts.add(dfsOpt) <NEW_LINE> conflicts = self.rs.conflicts[dfsOpt] <NEW_LINE> for conflict in conflicts: <NEW_LINE> <INDENT> self.__deselectOption(conflict) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.__deselectOption(option) <NEW_LINE> <DEDENT> <DEDENT> def __deselectOption(self, option): <NEW_LINE> <INDENT> reverseDfsPath = list(self.rs.reverseDfs(option)) <NEW_LINE> for dfsOpt in reverseDfsPath: <NEW_LINE> <INDENT> if dfsOpt in self.selectedOpts: <NEW_LINE> <INDENT> self.selectedOpts.remove(dfsOpt) | Options represents the state of RuleSet and enables selection and
deselection of options in the RuleSet. It maintains a set of selected
options.
Attributes:
rs: A RuleSet object to use.
selectedOpts: A set of selected options at any point. | 625990837cff6e4e811b7557 |
class TokenManager(BaseTokenManager): <NEW_LINE> <INDENT> def __init__(self, file_location, log): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.file_location = file_location <NEW_LINE> self.log = log <NEW_LINE> <DEDENT> def post_refresh_callback(self, authorizer): <NEW_LINE> <INDENT> config = configparser.ConfigParser() <NEW_LINE> config.read(self.file_location) <NEW_LINE> config["credentials"]["refresh_token"] = authorizer.refresh_token <NEW_LINE> self.log.append_log("Set refresh token: " + repr(redact_praw(authorizer.refresh_token))) <NEW_LINE> with open(self.file_location, "w") as f: <NEW_LINE> <INDENT> config.write(f) <NEW_LINE> <DEDENT> <DEDENT> def pre_refresh_callback(self, authorizer): <NEW_LINE> <INDENT> if authorizer.refresh_token is None: <NEW_LINE> <INDENT> config = configparser.ConfigParser() <NEW_LINE> config.read(self.file_location) <NEW_LINE> authorizer.refresh_token = config["credentials"]["refresh_token"] <NEW_LINE> self.log.append_log("Loaded refresh token: " + repr(redact_praw(authorizer.refresh_token))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.log.append_log("Already have loaded refresh token: " + repr(redact_praw(authorizer.refresh_token))) | Custom token manager for new Reddit/PRAW refresh token managing | 6259908376e4537e8c3f1095 |
class SortishSampler(Sampler): <NEW_LINE> <INDENT> def __init__(self, data_source, key, bs): <NEW_LINE> <INDENT> self.data_source,self.key,self.bs = data_source,key,bs <NEW_LINE> <DEDENT> def __len__(self): return len(self.data_source) <NEW_LINE> def __iter__(self): <NEW_LINE> <INDENT> idxs = np.random.permutation(len(self.data_source)) <NEW_LINE> sz = self.bs*50 <NEW_LINE> ck_idx = [idxs[i:i+sz] for i in range(0, len(idxs), sz)] <NEW_LINE> sort_idx = np.concatenate([sorted(s, key=self.key, reverse=True) for s in ck_idx]) <NEW_LINE> sz = self.bs <NEW_LINE> ck_idx = [sort_idx[i:i+sz] for i in range(0, len(sort_idx), sz)] <NEW_LINE> max_ck = np.argmax([self.key(ck[0]) for ck in ck_idx]) <NEW_LINE> ck_idx[0],ck_idx[max_ck] = ck_idx[max_ck],ck_idx[0] <NEW_LINE> sort_idx = np.concatenate(np.random.permutation(ck_idx[1:])) <NEW_LINE> sort_idx = np.concatenate((ck_idx[0], sort_idx)) <NEW_LINE> return iter(sort_idx) | Returns an iterator that traverses the the data in randomly ordered batches that are approximately the same size.
The max key size batch is always returned in the first call because of pytorch cuda memory allocation sequencing.
Without that max key returned first multiple buffers may be allocated when the first created isn't large enough
to hold the next in the sequence. | 62599083e1aae11d1e7cf59e |
@unique <NEW_LINE> class WebAuthNUserVerificationRequirement(IntEnum): <NEW_LINE> <INDENT> ANY = 0 <NEW_LINE> REQUIRED = 1 <NEW_LINE> PREFERRED = 2 <NEW_LINE> DISCOURAGED = 3 <NEW_LINE> @classmethod <NEW_LINE> def from_string(cls, value): <NEW_LINE> <INDENT> return getattr(cls, value.upper().replace("-", "_")) | Maps to WEBAUTHN_USER_VERIFICATION_REQUIREMENT_*.
https://github.com/microsoft/webauthn/blob/master/webauthn.h#L335 | 625990837047854f46340ecc |
class Image(models.Model): <NEW_LINE> <INDENT> url = models.URLField() <NEW_LINE> picture = models.ImageField(upload_to=FULL, max_length=500) <NEW_LINE> gallery = models.ForeignKey('Gallery', related_name='images') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('Image') <NEW_LINE> verbose_name_plural = _('Images') <NEW_LINE> app_label = 'galleries' <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'%s' % self.url | An Image model for a specific gallery, it represents an image with
multiple thumbnails | 625990835fdd1c0f98e5fa96 |
class OpenWeatherApi: <NEW_LINE> <INDENT> def __init__(self, city_name: str, country_slug: str): <NEW_LINE> <INDENT> self.base_url = settings.OPEN_WEATHER_API_BASE_URL <NEW_LINE> self.city_name = city_name <NEW_LINE> self.country_slug = country_slug <NEW_LINE> <DEDENT> def get_country_city_weather_data(self) -> (int, dict): <NEW_LINE> <INDENT> api_url = f"{self.base_url}weather" <NEW_LINE> request_params = dict( q=f"{self.city_name},{self.country_slug}", appid=settings.OPEN_WEATHER_API_ID_KEY, units=ApiConstants.UNITS.value ) <NEW_LINE> response = requests.get(api_url, params=request_params) <NEW_LINE> if response.status_code == ApiConstants.OK_STATUS_CODE.value: <NEW_LINE> <INDENT> return response.status_code, response.json() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> LogResponse.save_log_response( status_code=response.status_code, response_data=response.json() ) <NEW_LINE> exception_message = f"The API response with status code " f"{response.status_code} and data\n {response.json()}" <NEW_LINE> raise APIRequestException(exception_message) | Class to handle request to open weather map API | 6259908397e22403b383ca15 |
class MajorBlues(MajorPentatonic): <NEW_LINE> <INDENT> @property <NEW_LINE> def intervals(self): <NEW_LINE> <INDENT> intervals = super(MajorBlues, self).intervals <NEW_LINE> intervals.insert(2, Dim(4)) <NEW_LINE> return intervals | Major Blues is the same as the Major Pentatonic but it adds a
diminished 4th and consists of 6 notes. | 625990838a349b6b43687d75 |
class RingBuffer1d(object): <NEW_LINE> <INDENT> def __init__(self, length, dtype=None): <NEW_LINE> <INDENT> self.offset = 0 <NEW_LINE> self._data = np.zeros(length, dtype=dtype) <NEW_LINE> self.stored = 0 <NEW_LINE> <DEDENT> def fill(self, number): <NEW_LINE> <INDENT> self._data.fill(number) <NEW_LINE> self.offset = 0 <NEW_LINE> <DEDENT> def append(self, data): <NEW_LINE> <INDENT> data = np.asarray(data) <NEW_LINE> if len(self._data) == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if len(data) >= len(self._data): <NEW_LINE> <INDENT> self._data[:] = data[-len(self._data):] <NEW_LINE> self.offset = 0 <NEW_LINE> self.stored = len(self._data) <NEW_LINE> <DEDENT> elif len(self._data) - self.offset >= len(data): <NEW_LINE> <INDENT> self._data[self.offset:self.offset + len(data)] = data <NEW_LINE> self.offset = self.offset + len(data) <NEW_LINE> self.stored += len(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._data[self.offset:] = data[:len(self._data) - self.offset] <NEW_LINE> self._data[:len(data) - (len(self._data) - self.offset)] = data[-len(data) + (len(self._data) - self.offset):] <NEW_LINE> self.offset = len(data) - (len(self._data) - self.offset) <NEW_LINE> self.stored += len(data) <NEW_LINE> <DEDENT> self.read = self._read <NEW_LINE> <DEDENT> def read(self, number=None, step=1): <NEW_LINE> <INDENT> logging.error('**READ**') <NEW_LINE> logging.error(np.array([])) <NEW_LINE> return np.array([]) <NEW_LINE> <DEDENT> def _read(self, number=None, step=1): <NEW_LINE> <INDENT> if number == None: <NEW_LINE> <INDENT> number = len(self._data) // step <NEW_LINE> <DEDENT> number *= step <NEW_LINE> assert abs(number) <= len(self._data), 'Number to read*step must be smaller then length' <NEW_LINE> if number < 0: <NEW_LINE> <INDENT> if abs(number) <= self.offset: <NEW_LINE> <INDENT> return self._data[self.offset + number: self.offset: step] <NEW_LINE> <DEDENT> spam = (self.offset - 1) % step <NEW_LINE> return np.concatenate( (self._data[step - spam - 1 + self.offset + number:: step], self._data[spam: self.offset: step])) <NEW_LINE> <DEDENT> if number - (len(self._data) - self.offset) > 0: <NEW_LINE> <INDENT> spam = ((self.offset + number) - self.offset - 1) % step <NEW_LINE> return np.concatenate( (self._data[self.offset:self.offset + number:step], self._data[spam: number - (len(self._data) - self.offset): step])) <NEW_LINE> <DEDENT> logging.error('*********NUM*******') <NEW_LINE> logging.error(number) <NEW_LINE> return self._data[self.offset: self.offset + number: step].copy() | This class implements an array being written in as a ring and that can
be read from continuously ending with the newest data or starting with the
oldest. It returns a numpy array copy of the data; | 6259908355399d3f0562802b |
class House(Model, BaseModel): <NEW_LINE> <INDENT> owner = ForeignKey(Merchant) <NEW_LINE> landlord = CharField(max_length=255) <NEW_LINE> landphone = CharField(max_length=20) <NEW_LINE> house_type = CharField(max_length=255) <NEW_LINE> house_address = CharField(max_length=255) <NEW_LINE> house_square = CharField(max_length=255) <NEW_LINE> house_decoration = CharField(max_length=255) <NEW_LINE> house_apartment = CharField(max_length=255) <NEW_LINE> house_twords = CharField(max_length=20) <NEW_LINE> house_floor = CharField(max_length=255) <NEW_LINE> house_year = CharField(max_length=255) <NEW_LINE> house_set = CharField(max_length=255) <NEW_LINE> house_money = CharField(max_length=255) <NEW_LINE> pay_type = CharField(max_length=255) <NEW_LINE> house_deposit = CharField(max_length=255) <NEW_LINE> rent_time = CharField(max_length=255) <NEW_LINE> created_at = DateTimeField(auto_now_add=True) | Merchant have many houses to renter | 6259908371ff763f4b5e92c4 |
@implementer(IArchiver) <NEW_LINE> class MailArchive: <NEW_LINE> <INDENT> name = 'mail-archive' <NEW_LINE> @staticmethod <NEW_LINE> def list_url(mlist): <NEW_LINE> <INDENT> if mlist.archive_policy is ArchivePolicy.public: <NEW_LINE> <INDENT> return urljoin(config.archiver.mail_archive.base_url, quote(mlist.posting_address)) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def permalink(mlist, msg): <NEW_LINE> <INDENT> if mlist.archive_policy is not ArchivePolicy.public: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> message_id_hash = msg.get('x-message-id-hash') <NEW_LINE> if message_id_hash is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return urljoin(config.archiver.mail_archive.base_url, message_id_hash) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def archive_message(mlist, msg): <NEW_LINE> <INDENT> if mlist.archive_policy is ArchivePolicy.public: <NEW_LINE> <INDENT> config.switchboards['out'].enqueue( msg, listname=mlist.fqdn_listname, recipients=[config.archiver.mail_archive.recipient]) | Public archiver at the Mail-Archive.com.
Messages get archived at http://go.mail-archive.com. | 625990834428ac0f6e65a046 |
class FrameMoveCenter(ActionMoveCenter): <NEW_LINE> <INDENT> def __init__(self, main_frame, smbedit): <NEW_LINE> <INDENT> super(FrameMoveCenter, self).__init__(main_frame, smbedit) <NEW_LINE> self.setTitle("Move Center") <NEW_LINE> v_box = QVBoxLayout() <NEW_LINE> self._gui_move_center_by_block_id(v_box) <NEW_LINE> self._gui_move_center_by_vector(v_box) <NEW_LINE> v_box.addStretch() <NEW_LINE> self.setLayout(v_box) <NEW_LINE> self.button_block_id.pressed.connect(self.button_press_block_id) <NEW_LINE> self.button_vector.pressed.connect(self.button_press_vector) <NEW_LINE> <DEDENT> def _gui_move_center_by_block_id(self, box): <NEW_LINE> <INDENT> self.button_block_id = QPushButton() <NEW_LINE> self.button_block_id.setText("Move to") <NEW_LINE> self.button_block_id.setToolTip("Move to a specific block") <NEW_LINE> self.block_id_combobox = QComboBox() <NEW_LINE> self.block_id_combobox.setStyleSheet("combobox-popup: 0;") <NEW_LINE> self.block_id_combobox.setMaxVisibleItems(10) <NEW_LINE> target_ids = { 123: "Build Block", 94: "Undeathinator", 347: "Shop Module", 56: "Gravity Unit", } <NEW_LINE> for block_id, name in target_ids.items(): <NEW_LINE> <INDENT> self.block_id_combobox.addItem(name, block_id) <NEW_LINE> <DEDENT> grid = QGridLayout() <NEW_LINE> grid.addWidget(self.block_id_combobox, 0, 0) <NEW_LINE> grid.addWidget(self.button_block_id, 1, 0) <NEW_LINE> line = QFrame() <NEW_LINE> line.setFrameShape(QFrame.HLine) <NEW_LINE> grid.addWidget(line) <NEW_LINE> box.addLayout(grid) <NEW_LINE> <DEDENT> def _gui_move_center_by_vector(self, box): <NEW_LINE> <INDENT> self.variable_x = QLineEdit() <NEW_LINE> self.variable_y = QLineEdit() <NEW_LINE> self.variable_z = QLineEdit() <NEW_LINE> self.variable_x.setText('0') <NEW_LINE> self.variable_y.setText('0') <NEW_LINE> self.variable_z.setText('0') <NEW_LINE> self.variable_x.setValidator(QIntValidator()) <NEW_LINE> self.variable_y.setValidator(QIntValidator()) <NEW_LINE> self.variable_z.setValidator(QIntValidator()) <NEW_LINE> self.button_vector = QPushButton() <NEW_LINE> self.button_vector.setText("Move by") <NEW_LINE> self.button_vector.setToolTip("Move in a direction") <NEW_LINE> grid = QGridLayout() <NEW_LINE> grid.addWidget(QLabel("Right"), 0, 0) <NEW_LINE> grid.addWidget(QLabel("Up"), 0, 1) <NEW_LINE> grid.addWidget(QLabel("Forward"), 0, 2) <NEW_LINE> grid.addWidget(self.variable_x, 1, 0) <NEW_LINE> grid.addWidget(self.variable_y, 1, 1) <NEW_LINE> grid.addWidget(self.variable_z, 1, 2) <NEW_LINE> grid.addWidget(self.button_vector, 2, 0, 1, 3) <NEW_LINE> box.addLayout(grid) <NEW_LINE> <DEDENT> def disable(self): <NEW_LINE> <INDENT> self.button_block_id.setEnabled(False) <NEW_LINE> self.button_vector.setEnabled(False) <NEW_LINE> <DEDENT> def enable(self): <NEW_LINE> <INDENT> self.button_block_id.setEnabled(True) <NEW_LINE> self.button_vector.setEnabled(True) | @type button_block_id: QPushButton
@type button_vector: QPushButton | 6259908399fddb7c1ca63b66 |
class SyndicatedFeed(object): <NEW_LINE> <INDENT> __slots__ = [ '__title', '__items', ] <NEW_LINE> def __init__(self, content_stream: io.BytesIO): <NEW_LINE> <INDENT> assert hasattr(content_stream, 'read'), "Input must be a stream." <NEW_LINE> parsed_feed = feedparser.parse(content_stream) <NEW_LINE> if not parsed_feed.get('version', ''): <NEW_LINE> <INDENT> raise Exception("Feed type was not determined, not proceeding further.") <NEW_LINE> <DEDENT> self.__title = None <NEW_LINE> feed = parsed_feed.get('feed', None) <NEW_LINE> if feed: <NEW_LINE> <INDENT> self.__title = feed.get('title', None) <NEW_LINE> <DEDENT> self.__items = [] <NEW_LINE> for raw_entry in parsed_feed.get('entries', []): <NEW_LINE> <INDENT> item = SyndicatedFeedItem(raw_entry) <NEW_LINE> self.__items.append(item) <NEW_LINE> <DEDENT> <DEDENT> def title(self) -> Optional[str]: <NEW_LINE> <INDENT> return self.__title <NEW_LINE> <DEDENT> def items(self) -> List[SyndicatedFeedItem]: <NEW_LINE> <INDENT> return self.__items | Parsed feed object. | 6259908363b5f9789fe86c81 |
class getSquareChatMember_args(object): <NEW_LINE> <INDENT> def __init__(self, request=None,): <NEW_LINE> <INDENT> self.request = request <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.request = GetSquareChatMemberRequest() <NEW_LINE> self.request.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('getSquareChatMember_args') <NEW_LINE> if self.request is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('request', TType.STRUCT, 1) <NEW_LINE> self.request.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- request | 62599083be7bc26dc9252be2 |
class AlignedVolumeNotFoundException(MaterializationEngineException): <NEW_LINE> <INDENT> pass | error raised when a aligned_volume is not found | 625990837047854f46340ece |
class CrawlinoManager(metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> def __init__(self, running_config: RunningConfig): <NEW_LINE> <INDENT> current_config.running_config = running_config <NEW_LINE> <DEDENT> def update_global_config(self, crawler): <NEW_LINE> <INDENT> current_config.add_crawler_crawler(crawler) | Abstract class for run managers
This class also manages the app config contexts | 625990833317a56b869bf2d0 |
class Temperature(db.Model, CRUDMixin): <NEW_LINE> <INDENT> id = db.Column(db.Integer(), primary_key=True) <NEW_LINE> mac_address = db.Column(db.String(17)) <NEW_LINE> android_id = db.Column(db.String(16)) <NEW_LINE> device_id = db.Column(db.String(16)) <NEW_LINE> system_time = db.Column(db.DateTime) <NEW_LINE> time_stamp_device = db.Column(db.DateTime) <NEW_LINE> temperature = db.Column(db.Float()) <NEW_LINE> client_hash = db.Column(db.String(64)) <NEW_LINE> participant_number = db.Column(db.Integer) <NEW_LINE> study_number = db.Column(db.Integer) | A Temperature is a single temperature log for a specific device/participant | 62599083ec188e330fdfa3c4 |
@hashable_attrs <NEW_LINE> class MixTableChange(object): <NEW_LINE> <INDENT> instrument = attr.ib(default=None) <NEW_LINE> rse = attr.ib(default=None) <NEW_LINE> volume = attr.ib(default=None) <NEW_LINE> balance = attr.ib(default=None) <NEW_LINE> chorus = attr.ib(default=None) <NEW_LINE> reverb = attr.ib(default=None) <NEW_LINE> phaser = attr.ib(default=None) <NEW_LINE> tremolo = attr.ib(default=None) <NEW_LINE> tempoName = attr.ib(default='') <NEW_LINE> tempo = attr.ib(default=None) <NEW_LINE> hideTempo = attr.ib(default=True) <NEW_LINE> wah = attr.ib(default=None) <NEW_LINE> useRSE = attr.ib(default=False) <NEW_LINE> rse = attr.ib(default=attr.Factory(RSEInstrument)) <NEW_LINE> @property <NEW_LINE> def isJustWah(self): <NEW_LINE> <INDENT> return (self.instrument is None and self.volume is None and self.balance is None and self.chorus is None and self.reverb is None and self.phaser is None and self.tremolo is None and self.tempo is None and self.wah is not None) | A MixTableChange describes a change in mix parameters. | 62599083d8ef3951e32c8beb |
class TeacherDetailView(View): <NEW_LINE> <INDENT> def get(self, request, teacher_id): <NEW_LINE> <INDENT> teacher = Teacher.objects.get(id=int(teacher_id)) <NEW_LINE> teacher_courses = Course.objects.filter(teacher=teacher) <NEW_LINE> teacher.click_nums += 1 <NEW_LINE> teacher.save() <NEW_LINE> hot_teachers = Teacher.objects.all().order_by('-click_nums')[:3] <NEW_LINE> has_fav_teacher = False <NEW_LINE> has_fav_org = False <NEW_LINE> if request.user.is_authenticated: <NEW_LINE> <INDENT> if UserFavorite.objects.filter(user=request.user, fav_id=teacher_id, fav_type=3): <NEW_LINE> <INDENT> has_fav_teacher = True <NEW_LINE> <DEDENT> if UserFavorite.objects.filter(user=request.user, fav_id=teacher.org.id, fav_type=2): <NEW_LINE> <INDENT> has_fav_teacher = True <NEW_LINE> <DEDENT> <DEDENT> return render(request, "teacher-detail.html", { 'teacher': teacher, 'teacher_courses': teacher_courses, 'hot_teachers': hot_teachers, 'has_fav_teacher': has_fav_teacher, 'has_fav_org': has_fav_org, }) | 讲师详情 | 62599083fff4ab517ebcf32f |
class TestRepository: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__list = [] <NEW_LINE> <DEDENT> def save(self,test): <NEW_LINE> <INDENT> self.__list.append(test) <NEW_LINE> <DEDENT> def listTests(self): <NEW_LINE> <INDENT> return self.__list | Descriere | 62599083a05bb46b3848beb4 |
class Bool(CTLS.Bool, PathFormula): <NEW_LINE> <INDENT> pass | A class representing LTL Boolean atomic propositions. | 625990834428ac0f6e65a048 |
class UserGroup(Base, SikrModelMixin): <NEW_LINE> <INDENT> name = Column(String) <NEW_LINE> users = relationship("User", secondary=user_group_table, backref="groups") <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return f"<Group: {self.name}" | Basic model to group users. | 6259908371ff763f4b5e92c6 |
class ArchiveView(ArchiveListViewMixin, ListView): <NEW_LINE> <INDENT> def __parse_int_or_none(self, maybe_int): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return int(maybe_int) <NEW_LINE> <DEDENT> except (TypeError, ValueError): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def setup(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.section_id = self.__parse_int_or_none(request.GET.get('section_id')) <NEW_LINE> self.year = self.__parse_int_or_none(request.GET.get('year')) <NEW_LINE> return super().setup(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_template_names(self): <NEW_LINE> <INDENT> return ['archive.html'] | View for http://ubyssey.ca/archive/
Bugs:
Cannot click on "All years" or "All sections" once you have selected a particular year or section | 6259908355399d3f0562802e |
class ParameterValueError(WXpayException): <NEW_LINE> <INDENT> pass | Raised when parameter value is incorrect | 6259908397e22403b383ca18 |
class Article: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__init__(None, 0, '', '', '', '', '', '', '') <NEW_LINE> <DEDENT> def __init__(self, id, category_id, title, thumb, download_links_http, download_links_thunder, update_time, orig_article_url, orig_thumb): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.category_id = category_id <NEW_LINE> self.title = title <NEW_LINE> self.thumb = thumb <NEW_LINE> self.download_links_http = download_links_http <NEW_LINE> self.download_links_thunder = download_links_thunder <NEW_LINE> self.update_time = update_time <NEW_LINE> self.orig_article_url = orig_article_url <NEW_LINE> self.orig_thumb = orig_thumb <NEW_LINE> <DEDENT> def to_sql_insert_data(self): <NEW_LINE> <INDENT> return (self.category_id, self.title, self.thumb, str(self.download_links_http), str(self.download_links_thunder), self.update_time, self.orig_article_url, self.orig_thumb) | 文章数据 | 62599083adb09d7d5dc0c074 |
class Plot1DWithPlugins(Plot1D): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> Plot1D.__init__(self, parent) <NEW_LINE> self._plotType = "SCAN" <NEW_LINE> self._toolbar = qt.QToolBar(self) <NEW_LINE> self.addToolBar(self._toolbar) <NEW_LINE> pluginsToolButton = PluginsToolButton(plot=self, parent=self) <NEW_LINE> if PLUGINS_DIR: <NEW_LINE> <INDENT> pluginsToolButton.getPlugins( method="getPlugin1DInstance", directoryList=PLUGINS_DIR) <NEW_LINE> <DEDENT> self._toolbar.addWidget(pluginsToolButton) | Add a plugin toolbutton to a Plot1D | 6259908376e4537e8c3f1099 |
class agilentMSOX3024A(agilent3000A): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.__dict__.setdefault('_instrument_id', 'MSO-X 3024A') <NEW_LINE> super(agilentMSOX3024A, self).__init__(*args, **kwargs) <NEW_LINE> self._analog_channel_count = 4 <NEW_LINE> self._digital_channel_count = 16 <NEW_LINE> self._channel_count = self._analog_channel_count + self._digital_channel_count <NEW_LINE> self._bandwidth = 200e6 <NEW_LINE> self._init_channels() | Agilent InfiniiVision MSOX3024A IVI oscilloscope driver | 625990833346ee7daa3383ef |
class _ScalarAccessIndexer(NDFrameIndexerBase): <NEW_LINE> <INDENT> def _convert_key(self, key): <NEW_LINE> <INDENT> raise AbstractMethodError(self) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if not isinstance(key, tuple): <NEW_LINE> <INDENT> if not is_list_like_indexer(key): <NEW_LINE> <INDENT> key = (key,) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Invalid call for scalar access (getting)!") <NEW_LINE> <DEDENT> <DEDENT> key = self._convert_key(key) <NEW_LINE> return self.obj._get_value(*key, takeable=self._takeable) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if isinstance(key, tuple): <NEW_LINE> <INDENT> key = tuple(com.apply_if_callable(x, self.obj) for x in key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key = com.apply_if_callable(key, self.obj) <NEW_LINE> <DEDENT> if not isinstance(key, tuple): <NEW_LINE> <INDENT> key = _tuplify(self.ndim, key) <NEW_LINE> <DEDENT> key = list(self._convert_key(key)) <NEW_LINE> if len(key) != self.ndim: <NEW_LINE> <INDENT> raise ValueError("Not enough indexers for scalar access (setting)!") <NEW_LINE> <DEDENT> self.obj._set_value(*key, value=value, takeable=self._takeable) | Access scalars quickly. | 625990835fcc89381b266eea |
class TMC2100(): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.__channel = channel <NEW_LINE> self.registers = TMC2100_register <NEW_LINE> self.fields = TMC2100_fields <NEW_LINE> self.variants = TMC2100_register_variant <NEW_LINE> self.MOTORS = 2 <NEW_LINE> <DEDENT> def showChipInfo(self): <NEW_LINE> <INDENT> print("TMC2100 chip info: The TMC2100 is a standalone driver IC for two-phase stepper motors. Voltage supply: 4.75 - 46V") <NEW_LINE> <DEDENT> def writeRegister(self, registerAddress, value, channel): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def readRegister(self, registerAddress, channel): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def writeRegisterField(self, field, value): <NEW_LINE> <INDENT> return self.writeRegister(field[0], TMC_helpers.field_set(self.readRegister(field[0], self.__channel), field[1], field[2], value), self.__channel) <NEW_LINE> <DEDENT> def readRegisterField(self, field): <NEW_LINE> <INDENT> return TMC_helpers.field_get(self.readRegister(field[0], self.__channel), field[1], field[2]) <NEW_LINE> <DEDENT> def moveBy(self, motor, distance, velocity): <NEW_LINE> <INDENT> if not(0 <= motor < self.MOTORS): <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> position = self.readRegister(self.registers.XACTUAL, self.__channel, signed=True) <NEW_LINE> self.moveTo(motor, position + distance, velocity) <NEW_LINE> return position + distance <NEW_LINE> <DEDENT> def get_pin_state(self): <NEW_LINE> <INDENT> pass | Class for the TMC2100 IC | 625990833617ad0b5ee07c6b |
class Update(Base): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> install_helper = PkgInstall(self.settings, self.options, self.index, self.repo_cache, True) <NEW_LINE> pkg_names = self.options["<package>"] <NEW_LINE> for pkg_name in pkg_names: <NEW_LINE> <INDENT> install_helper.install(pkg_name, self.base_pkg_dir) | Updates a package! | 625990837b180e01f3e49df2 |
class BootstrapInfo: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.LIST, 'profiles', (TType.STRUCT,(BootstrapProfile, BootstrapProfile.thrift_spec)), None, ), ) <NEW_LINE> def __init__(self, profiles=None,): <NEW_LINE> <INDENT> self.profiles = profiles <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.profiles = [] <NEW_LINE> (_etype3, _size0) = iprot.readListBegin() <NEW_LINE> for _i4 in xrange(_size0): <NEW_LINE> <INDENT> _elem5 = BootstrapProfile() <NEW_LINE> _elem5.read(iprot) <NEW_LINE> self.profiles.append(_elem5) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('BootstrapInfo') <NEW_LINE> if self.profiles is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('profiles', TType.LIST, 1) <NEW_LINE> oprot.writeListBegin(TType.STRUCT, len(self.profiles)) <NEW_LINE> for iter6 in self.profiles: <NEW_LINE> <INDENT> iter6.write(oprot) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if self.profiles is None: <NEW_LINE> <INDENT> raise TProtocol.TProtocolException(message='Required field profiles is unset!') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.profiles) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | This structure describes a collection of bootstrap profiles.
<dl>
<dt>profiles:</dt>
<dd>
List of one or more bootstrap profiles, in descending
preference order.
</dd>
</dl>
Attributes:
- profiles | 62599083fff4ab517ebcf332 |
class TypedObject(backend.TypedObject): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def from_new(cls, *args, **kwargs): <NEW_LINE> <INDENT> obj = super(TypedObject, cls).from_new(*args, **kwargs) <NEW_LINE> if db.exists(obj.key): <NEW_LINE> <INDENT> raise PersistentObjectError("Key already exists in DB") <NEW_LINE> <DEDENT> return obj <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_existing(cls, *args, **kwargs): <NEW_LINE> <INDENT> obj = super(TypedObject, cls).from_existing(*args, **kwargs) <NEW_LINE> if not db.exists(obj.key): <NEW_LINE> <INDENT> raise ObjectDNE(obj) <NEW_LINE> <DEDENT> return obj <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> if not db.delete(self.key): <NEW_LINE> <INDENT> raise PersistentObjectError("Delete Failed") <NEW_LINE> <DEDENT> super(TypedObject, self).delete() <NEW_LINE> <DEDENT> def exists(self): <NEW_LINE> <INDENT> return db.exists(self.full_key) | Typed Redis Object Class | 62599083adb09d7d5dc0c076 |
class BasicAutoEncoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(BasicAutoEncoder, self).__init__() <NEW_LINE> self.encoder = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=32, kernel_size=5, stride=4, padding=2), nn.ELU(), nn.Conv2d(in_channels=32, out_channels=1, kernel_size=1, stride=1, padding=0), ) <NEW_LINE> self.decoder = nn.Sequential( nn.Conv2d(in_channels=1, out_channels=16, kernel_size=3, stride=1, padding=1), nn.PixelShuffle(4), ) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> encoder_output = self.encoder(x) <NEW_LINE> encoder_output = Relu1.apply(encoder_output) <NEW_LINE> decoder_output = self.decoder(encoder_output) <NEW_LINE> decoder_output = Relu1.apply(decoder_output) <NEW_LINE> return encoder_output, decoder_output | Basic auto encoder as stated in: "Jointly Learning Convolutional Representations to Compress Radiological
Images and Classify Thoracic Diseases in the Compressed Domain"
https://dl.acm.org/doi/abs/10.1145/3293353.3293408 | 6259908399fddb7c1ca63b68 |
class Googletest(CMakePackage): <NEW_LINE> <INDENT> homepage = "https://github.com/google/googletest" <NEW_LINE> url = "https://github.com/google/googletest/tarball/release-1.7.0" <NEW_LINE> version('1.8.1', sha256='8e40a005e098b1ba917d64104549e3da274e31261dedc57d6250fe91391b2e84') <NEW_LINE> version('1.8.0', sha256='d8c33605d23d303b08a912eaee7f84c4e091d6e3d90e9a8ec8aaf7450dfe2568') <NEW_LINE> version('1.7.0', sha256='9639cf8b7f37a4d0c6575f52c01ef167c5f11faee65252296b3ffc2d9acd421b') <NEW_LINE> version('1.6.0', sha256='a61e20c65819eb39a2da85c88622bac703b865ca7fe2bfdcd3da734d87d5521a') <NEW_LINE> variant('gmock', default=False, description='Build with gmock') <NEW_LINE> conflicts('+gmock', when='@:1.7.0') <NEW_LINE> variant('pthreads', default=True, description='Build multithreaded version with pthreads') <NEW_LINE> variant('shared', default=True, description='Build shared libraries (DLLs)') <NEW_LINE> def cmake_args(self): <NEW_LINE> <INDENT> spec = self.spec <NEW_LINE> if '@1.8.0:' in spec: <NEW_LINE> <INDENT> options = ['-DBUILD_GTEST=ON'] <NEW_LINE> if '+gmock' in spec: <NEW_LINE> <INDENT> options.append('-DBUILD_GMOCK=ON') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> options.append('-DBUILD_GMOCK=OFF') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> options = [] <NEW_LINE> <DEDENT> options.append('-Dgtest_disable_pthreads={0}'.format( 'ON' if '+pthreads' in spec else 'OFF')) <NEW_LINE> options.append('-DBUILD_SHARED_LIBS={0}'.format( 'ON' if '+shared' in spec else 'OFF')) <NEW_LINE> return options <NEW_LINE> <DEDENT> @when('@:1.7.0') <NEW_LINE> def install(self, spec, prefix): <NEW_LINE> <INDENT> with working_dir(self.build_directory): <NEW_LINE> <INDENT> install_tree(join_path(self.stage.source_path, 'include'), prefix.include) <NEW_LINE> mkdirp(prefix.lib) <NEW_LINE> if '+shared' in spec: <NEW_LINE> <INDENT> install('libgtest.{0}'.format(dso_suffix), prefix.lib) <NEW_LINE> install('libgtest_main.{0}'.format(dso_suffix), prefix.lib) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> install('libgtest.a', prefix.lib) <NEW_LINE> install('libgtest_main.a', prefix.lib) | Google test framework for C++. Also called gtest. | 62599083091ae3566870675d |
class OraclePlatformVelocityHardLimitException(OracleException): <NEW_LINE> <INDENT> pass | This transaction is over platform-wide velocity limit. Consider increasing velocity limits. | 625990837047854f46340ed2 |
class FaceDetector: <NEW_LINE> <INDENT> def __init__(self, dnn_proto_text='assets/deploy.prototxt', dnn_model='assets/res10_300x300_ssd_iter_140000.caffemodel'): <NEW_LINE> <INDENT> self.face_net = cv2.dnn.readNetFromCaffe(dnn_proto_text, dnn_model) <NEW_LINE> self.detection_result = None <NEW_LINE> <DEDENT> def get_faceboxes(self, image, threshold=0.5): <NEW_LINE> <INDENT> rows, cols, _ = image.shape <NEW_LINE> confidences = [] <NEW_LINE> faceboxes = [] <NEW_LINE> self.face_net.setInput(cv2.dnn.blobFromImage( image, 1.0, (300, 300), (104.0, 177.0, 123.0), False)) <NEW_LINE> detections = self.face_net.forward() <NEW_LINE> for result in detections[0, 0, :, :]: <NEW_LINE> <INDENT> confidence = result[2] <NEW_LINE> if confidence > threshold: <NEW_LINE> <INDENT> x_left_bottom = int(result[3] * cols) <NEW_LINE> y_left_bottom = int(result[4] * rows) <NEW_LINE> x_right_top = int(result[5] * cols) <NEW_LINE> y_right_top = int(result[6] * rows) <NEW_LINE> confidences.append(confidence) <NEW_LINE> faceboxes.append( [x_left_bottom, y_left_bottom, x_right_top, y_right_top]) <NEW_LINE> <DEDENT> <DEDENT> self.detection_result = [faceboxes, confidences] <NEW_LINE> return confidences, faceboxes <NEW_LINE> <DEDENT> def draw_all_result(self, image): <NEW_LINE> <INDENT> for facebox, conf in self.detection_result: <NEW_LINE> <INDENT> cv2.rectangle(image, (facebox[0], facebox[1]), (facebox[2], facebox[3]), (0, 255, 0)) <NEW_LINE> label = "face: %.4f" % conf <NEW_LINE> label_size, base_line = cv2.getTextSize( label, cv2.FONT_HERSHEY_SIMPLEX, 0.5, 1) <NEW_LINE> cv2.rectangle(image, (facebox[0], facebox[1] - label_size[1]), (facebox[0] + label_size[0], facebox[1] + base_line), (0, 255, 0), cv2.FILLED) <NEW_LINE> cv2.putText(image, label, (facebox[0], facebox[1]), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0)) | Detect human face from image | 625990833317a56b869bf2d2 |
class ClientError(Exception): <NEW_LINE> <INDENT> pass | Exception that represents errors in the client, regardless of
implementation. | 625990833617ad0b5ee07c6d |
class MotifFinder(object): <NEW_LINE> <INDENT> def __init__(self, alphabet_strict=1): <NEW_LINE> <INDENT> self.alphabet_strict = alphabet_strict <NEW_LINE> <DEDENT> def find(self, seq_records, motif_size): <NEW_LINE> <INDENT> motif_info = self._get_motif_dict(seq_records, motif_size) <NEW_LINE> return PatternRepository(motif_info) <NEW_LINE> <DEDENT> def _get_motif_dict(self, seq_records, motif_size): <NEW_LINE> <INDENT> if self.alphabet_strict: <NEW_LINE> <INDENT> alphabet = seq_records[0].seq.alphabet <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> alphabet = None <NEW_LINE> <DEDENT> all_motifs = {} <NEW_LINE> for seq_record in seq_records: <NEW_LINE> <INDENT> if alphabet is not None and seq_record.seq.alphabet != alphabet: <NEW_LINE> <INDENT> raise ValueError('Working with alphabet %s and got %s' % ( alphabet, seq_record.seq.alphabet)) <NEW_LINE> <DEDENT> for start in range(len(seq_record.seq) - (motif_size - 1)): <NEW_LINE> <INDENT> motif = str(seq_record.seq[start:start + motif_size]) <NEW_LINE> if alphabet is not None: <NEW_LINE> <INDENT> motif_seq = Seq(motif, alphabet) <NEW_LINE> if _verify_alphabet(motif_seq): <NEW_LINE> <INDENT> all_motifs = self._add_motif(all_motifs, motif) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> all_motifs = self._add_motif(all_motifs, motif) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return all_motifs <NEW_LINE> <DEDENT> def find_differences(self, first_records, second_records, motif_size): <NEW_LINE> <INDENT> first_motifs = self._get_motif_dict(first_records, motif_size) <NEW_LINE> second_motifs = self._get_motif_dict(second_records, motif_size) <NEW_LINE> motif_diffs = {} <NEW_LINE> for cur_key in first_motifs: <NEW_LINE> <INDENT> if cur_key in second_motifs: <NEW_LINE> <INDENT> motif_diffs[cur_key] = first_motifs[cur_key] - second_motifs[cur_key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> motif_diffs[cur_key] = first_motifs[cur_key] <NEW_LINE> <DEDENT> <DEDENT> missing_motifs = list(second_motifs) <NEW_LINE> for added_motif in motif_diffs: <NEW_LINE> <INDENT> if added_motif in missing_motifs: <NEW_LINE> <INDENT> missing_motifs.remove(added_motif) <NEW_LINE> <DEDENT> <DEDENT> for cur_key in missing_motifs: <NEW_LINE> <INDENT> motif_diffs[cur_key] = 0 - second_motifs[cur_key] <NEW_LINE> <DEDENT> return PatternRepository(motif_diffs) <NEW_LINE> <DEDENT> def _add_motif(self, motif_dict, motif_to_add): <NEW_LINE> <INDENT> if motif_to_add in motif_dict: <NEW_LINE> <INDENT> motif_dict[motif_to_add] += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> motif_dict[motif_to_add] = 1 <NEW_LINE> <DEDENT> return motif_dict | Find motifs in a set of Sequence Records. | 625990835fdd1c0f98e5fa9c |
class CallbackServer(object): <NEW_LINE> <INDENT> def __init__(self, pool, gateway_client, port=DEFAULT_PYTHON_PROXY_PORT): <NEW_LINE> <INDENT> super(CallbackServer, self).__init__() <NEW_LINE> self.gateway_client = gateway_client <NEW_LINE> self.port = port <NEW_LINE> self.pool = pool <NEW_LINE> self.connections = [] <NEW_LINE> self.lock = RLock() <NEW_LINE> self.is_shutdown = False <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> self.server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) <NEW_LINE> self.server_socket.bind(('localhost', self.port)) <NEW_LINE> self.thread = Thread(target=self.run) <NEW_LINE> self.thread.start() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> self.is_shutdown = False <NEW_LINE> <DEDENT> logger.info('Callback Server Starting') <NEW_LINE> self.server_socket.listen(5) <NEW_LINE> logger.info('Socket listening on {0}'. format(smart_decode(self.server_socket.getsockname()))) <NEW_LINE> while not self.is_shutdown: <NEW_LINE> <INDENT> socket, _ = self.server_socket.accept() <NEW_LINE> input = socket.makefile('rb', 0) <NEW_LINE> connection = CallbackConnection(self.pool, input, socket, self.gateway_client) <NEW_LINE> with self.lock: <NEW_LINE> <INDENT> if not self.is_shutdown: <NEW_LINE> <INDENT> self.connections.append(connection) <NEW_LINE> connection.start() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> connection.socket.shutdown(socket.SHUT_RDWR) <NEW_LINE> connection.socket.close() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> logger.exception('Error while waiting for a connection.') <NEW_LINE> <DEDENT> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> logger.info('Callback Server Shutting Down') <NEW_LINE> with self.lock: <NEW_LINE> <INDENT> self.is_shutdown = True <NEW_LINE> try: <NEW_LINE> <INDENT> self.server_socket.shutdown(socket.SHUT_RDWR) <NEW_LINE> self.server_socket.close() <NEW_LINE> self.server_socket = None <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> for connection in self.connections: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> connection.socket.shutdown(socket.SHUT_RDWR) <NEW_LINE> connection.socket.close() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self.pool.clear() <NEW_LINE> <DEDENT> self.thread.join() <NEW_LINE> self.thread = None | The CallbackServer is responsible for receiving call back connection
requests from the JVM. Usually connections are reused on the Java side,
but there is at least one connection per concurrent thread. | 625990834527f215b58eb72e |
class _FixedFindCallerLogger(logging.Logger): <NEW_LINE> <INDENT> def findCaller(self, stack_info=False): <NEW_LINE> <INDENT> f, name = _find_first_app_frame_and_name(['logging']) <NEW_LINE> if PY3: <NEW_LINE> <INDENT> if stack_info: <NEW_LINE> <INDENT> sinfo = _format_stack(f) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sinfo = None <NEW_LINE> <DEDENT> return f.f_code.co_filename, f.f_lineno, f.f_code.co_name, sinfo <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return f.f_code.co_filename, f.f_lineno, f.f_code.co_name | Change the behavior of findCaller to cope with structlog's extra frames. | 62599083ad47b63b2c5a936f |
class CUBClassDataset(BaseCUBDataset): <NEW_LINE> <INDENT> def __init__(self, param, mode): <NEW_LINE> <INDENT> super(CUBClassDataset, self).__init__(param, mode) <NEW_LINE> self.used_class = self.param.data['use_classes'][0] <NEW_LINE> self.path_dir = os.path.join('/media/john/D/projects/platonicgan', param.data.path_dir, self.param.data['image_folder']) <NEW_LINE> self.list_path = os.path.join('{}/lists/class_sets/{}/'.format(self.metadata_path, self.used_class)) <NEW_LINE> if not os.path.exists(os.path.join(self.list_path, 'train.txt')): <NEW_LINE> <INDENT> self.create_splits() <NEW_LINE> <DEDENT> file_names = self.load_split_files(mode) <NEW_LINE> self.file_names = [] <NEW_LINE> for file_name in file_names: <NEW_LINE> <INDENT> if self.used_class in file_name: <NEW_LINE> <INDENT> self.file_names.append(file_name.rstrip().replace('.jpg', '.png')) <NEW_LINE> <DEDENT> <DEDENT> print('Got {} samples for mode {}'.format(len(self.file_names), mode)) <NEW_LINE> self.dataset_length = len(self.file_names) | Dataset for single bird class in CUB | 6259908392d797404e3898eb |
class BitAndOpExprTests(WithModuleTests): <NEW_LINE> <INDENT> def test_bit_and_op_expr_is_bit_and_op(self): <NEW_LINE> <INDENT> bit_and_op_expr = self.get_expr('1 & 2', 'int') <NEW_LINE> self.assertTrue(bit_and_op_expr.is_bit_and_op()) <NEW_LINE> <DEDENT> def test_bit_and_op_expr_is_no_other_expr(self): <NEW_LINE> <INDENT> bit_and_op_expr = self.get_expr('1 & 2', 'int') <NEW_LINE> self.assertFalse(bit_and_op_expr.is_eq_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_neq_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_gt_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_gt_eq_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_lt_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_lt_eq_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_add_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_sub_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_mul_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_mod_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_div_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_and_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_or_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_bit_or_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_bit_xor_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_bit_shl_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_bit_shr_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_not_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_neg_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_assign_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_address_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_deref_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_array_index_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_comma_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_ternary_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_call()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_cast()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_pre_increment_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_post_increment_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_pre_decrement_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_post_decrement_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_compound_assign_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_struct_ref_op()) <NEW_LINE> self.assertFalse(bit_and_op_expr.is_struct_deref_op()) <NEW_LINE> <DEDENT> def test_repr_returns_correct_repr(self): <NEW_LINE> <INDENT> bit_and_op_expr = self.get_expr('1 & 2', 'int') <NEW_LINE> self.assertEqual(repr(bit_and_op_expr), '<BitAndOpExpr lhs=1 rhs=2>') <NEW_LINE> <DEDENT> def test_str_returns_correct_str(self): <NEW_LINE> <INDENT> bit_and_op_expr = self.get_expr('1 & 2', 'int') <NEW_LINE> self.assertEqual(str(bit_and_op_expr), '1 & 2') | Tests for `BitAndOpExpr`. | 6259908350812a4eaa621953 |
class Profile(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User) <NEW_LINE> partner = models.ForeignKey(Partner, null=True, related_name='user_profiles') <NEW_LINE> full_name = models.CharField(verbose_name=_("Full name"), max_length=128, null=True) <NEW_LINE> change_password = models.BooleanField(default=False, help_text=_("User must change password on next login")) | Extension for the user class | 62599083796e427e53850299 |
class WagtailMvcView(DetailView): <NEW_LINE> <INDENT> page = None <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.page = kwargs.pop('page') <NEW_LINE> return super(WagtailMvcView, self).dispatch(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_template_names(self): <NEW_LINE> <INDENT> return [self.page.get_template( self.request, *self.args, **self.kwargs )] <NEW_LINE> <DEDENT> def get_object(self): <NEW_LINE> <INDENT> return self.page <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> ctx = super(WagtailMvcView, self).get_context_data(**kwargs) <NEW_LINE> ctx.update(self.page.get_context( self.request, *self.args, **kwargs )) <NEW_LINE> return ctx | Basic default wagtail mvc view class | 62599083091ae3566870675f |
class StackNotFound(ManausException): <NEW_LINE> <INDENT> def __init__(self, name: str): <NEW_LINE> <INDENT> super().__init__("CloudFormation Stack not found: {}".format(name)) | Error raised when the CloudFormation Stack is not found | 62599083aad79263cf4302d9 |
class Vehicle_Attachment( CSV_Attachment ): <NEW_LINE> <INDENT> source_name= 'vid' <NEW_LINE> gtf_name= 'bus' | A :class:`CSV_Attachment` for the vehicle mapping. | 625990837b180e01f3e49df4 |
@dataclass(eq=False, repr=False) <NEW_LINE> class Resource(betterproto.Message): <NEW_LINE> <INDENT> type: "ResourceType" = betterproto.enum_field(1) <NEW_LINE> name: str = betterproto.string_field(2) | Resource represents any resource that has role-bindings in the system | 625990835fdd1c0f98e5fa9f |
class CheckboxPainter(Painter): <NEW_LINE> <INDENT> DEFAULT_SIZING = DefaultPoliciesViaClass(UseSizeHint) <NEW_LINE> def __init__(self, content): <NEW_LINE> <INDENT> Painter.__init__(self) <NEW_LINE> self.content = content <NEW_LINE> <DEDENT> def buildQWidget(self, widget): <NEW_LINE> <INDENT> checkbox = QCheckBox(self.content.text) <NEW_LINE> return checkbox | Handles creation of the Qt widget for drawing a checkbox | 62599083dc8b845886d550d9 |
class OnProcessIO(EventHandler): <NEW_LINE> <INDENT> def __init__( self, *, target_action: Optional['ExecuteProcess'] = None, on_stdin: Callable[[ProcessIO], Optional[SomeActionsType]] = None, on_stdout: Callable[[ProcessIO], Optional[SomeActionsType]] = None, on_stderr: Callable[[ProcessIO], Optional[SomeActionsType]] = None, **kwargs ) -> None: <NEW_LINE> <INDENT> from ..actions import ExecuteProcess <NEW_LINE> if not isinstance(target_action, (ExecuteProcess, type(None))): <NEW_LINE> <INDENT> raise RuntimeError("OnProcessIO requires an 'ExecuteProcess' action as the target") <NEW_LINE> <DEDENT> super().__init__(matcher=self._matcher, entities=None, **kwargs) <NEW_LINE> self.__target_action = target_action <NEW_LINE> self.__on_stdin = on_stdin <NEW_LINE> self.__on_stdout = on_stdout <NEW_LINE> self.__on_stderr = on_stderr <NEW_LINE> <DEDENT> def _matcher(self, event: Event) -> bool: <NEW_LINE> <INDENT> if not hasattr(event, '__class__'): <NEW_LINE> <INDENT> raise RuntimeError("event '{}' unexpectedly not a class".format(event)) <NEW_LINE> <DEDENT> return ( issubclass(event.__class__, ProcessIO) and ( self.__target_action is None or cast(ProcessIO, event).action == self.__target_action ) ) <NEW_LINE> <DEDENT> def handle(self, event: Event, context: LaunchContext) -> Optional[SomeActionsType]: <NEW_LINE> <INDENT> event = cast(ProcessIO, event) <NEW_LINE> if event.from_stdout and self.__on_stdout is not None: <NEW_LINE> <INDENT> return self.__on_stdout(event) <NEW_LINE> <DEDENT> elif event.from_stderr and self.__on_stderr is not None: <NEW_LINE> <INDENT> return self.__on_stderr(event) <NEW_LINE> <DEDENT> elif event.from_stdin and self.__on_stdin is not None: <NEW_LINE> <INDENT> return self.__on_stdin(event) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> @property <NEW_LINE> def handler_description(self) -> Text: <NEW_LINE> <INDENT> handlers = [] <NEW_LINE> if self.__on_stdin is not None: <NEW_LINE> <INDENT> handlers.append("on_stdin: '{}'".format(self.__on_stdin)) <NEW_LINE> <DEDENT> if self.__on_stdout is not None: <NEW_LINE> <INDENT> handlers.append("on_stdout: '{}'".format(self.__on_stdout)) <NEW_LINE> <DEDENT> if self.__on_stderr is not None: <NEW_LINE> <INDENT> handlers.append("on_stderr: '{}'".format(self.__on_stderr)) <NEW_LINE> <DEDENT> handlers_str = '{' + ', '.join(handlers) + '}' <NEW_LINE> return handlers_str <NEW_LINE> <DEDENT> @property <NEW_LINE> def matcher_description(self) -> Text: <NEW_LINE> <INDENT> if self.__target_action is None: <NEW_LINE> <INDENT> return 'event issubclass of ProcessIO' <NEW_LINE> <DEDENT> return 'event issubclass of ProcessIO and event.action == ExecuteProcess({})'.format( hex(id(self.__target_action)) ) | Convenience class for handling I/O from processes via events. | 625990837cff6e4e811b7561 |
class Solution: <NEW_LINE> <INDENT> def trap(self, height: List[int]) -> int: <NEW_LINE> <INDENT> left = 0 <NEW_LINE> right= len(height)-1 <NEW_LINE> answer = 0 <NEW_LINE> for idx,wall in enumerate(height): <NEW_LINE> <INDENT> left_max = max(height[:idx+1]) <NEW_LINE> right_max = max(height[idx:]) <NEW_LINE> min_wall = min(left_max,right_max) <NEW_LINE> answer += max(min_wall-wall,0) <NEW_LINE> <DEDENT> return answer | brute force | 625990833346ee7daa3383f2 |
class EntityType(type): <NEW_LINE> <INDENT> def __new__(cls, name, parent, prop): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> mod, sep, cm_cls = environ.DATAGATOR_CACHE_BACKEND.rpartition(".") <NEW_LINE> CacheManagerBackend = getattr(importlib.import_module(mod), cm_cls) <NEW_LINE> assert(issubclass(CacheManagerBackend, CacheManager)) <NEW_LINE> <DEDENT> except (ImportError, AssertionError): <NEW_LINE> <INDENT> raise AssertionError("invalid cache backend '{0}'".format( environ.DATAGATOR_CACHE_BACKEND)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prop['store'] = CacheManagerBackend() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> service = DataGatorService() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise RuntimeError("failed to initialize backend service") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prop['service'] = service <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> filename = os.path.join(os.path.dirname(__file__), "schema.json") <NEW_LINE> schema = None <NEW_LINE> if os.access(filename, os.F_OK | os.R_OK): <NEW_LINE> <INDENT> with open(filename, "r") as f: <NEW_LINE> <INDENT> schema = json.load(f) <NEW_LINE> f.close() <NEW_LINE> <DEDENT> <DEDENT> if schema is None: <NEW_LINE> <INDENT> schema = prop['service'].schema <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> raise RuntimeError("failed to initialize schema validator") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> JsonSchemaValidator = jsonschema.validators.create( meta_schema=jsonschema.Draft4Validator.META_SCHEMA, validators=jsonschema.Draft4Validator.VALIDATORS, version=b"draft4", default_types=dict(itertools.chain( jsonschema.Draft4Validator.DEFAULT_TYPES.items(), [('array', (list, tuple)), ])) ) <NEW_LINE> prop['schema'] = JsonSchemaValidator(schema) <NEW_LINE> <DEDENT> return type(to_native(name), parent, prop) <NEW_LINE> <DEDENT> pass | Meta class for initializing class members of the Entity class | 62599083091ae35668706761 |
class WsgiLogErrors(object): <NEW_LINE> <INDENT> omit_exceptions = [ PermissionDenied, Http404, ] <NEW_LINE> def process_exception(self, request, exception): <NEW_LINE> <INDENT> if not type(exception) in self.omit_exceptions: <NEW_LINE> <INDENT> tb_text = traceback.format_exc() <NEW_LINE> url = request.build_absolute_uri() <NEW_LINE> request.META['wsgi.errors'].write('EXCEPTION raised serving: %s\n%s\n' % (url, str(tb_text))) | Log all but the omitted exceptions w tracebacks to web server error_log via wsgi.errors. | 625990835fcc89381b266eed |
class StopGroupRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.GroupId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.GroupId = params.get("GroupId") | StopGroup请求参数结构体
| 62599083099cdd3c6367618a |
class Auth(BaseAuth): <NEW_LINE> <INDENT> def login(self, user, password): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> host = self.configuration.get('auth', 'imap_host') <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> host = '' <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> secure = _convert_to_bool(self.configuration.get('auth', 'imap_secure')) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> secure = True <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if ':' in host: <NEW_LINE> <INDENT> address, port = host.rsplit(':', maxsplit=1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> address, port = host, 143 <NEW_LINE> <DEDENT> address, port = address.strip('[] '), int(port) <NEW_LINE> <DEDENT> except ValueError as e: <NEW_LINE> <INDENT> raise RuntimeError( 'Failed to parse address %r: %s' % (host, e)) from e <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> connection = imaplib.IMAP4(host=address, port=port) <NEW_LINE> <DEDENT> except (OSError, imaplib.IMAP4.error) as e: <NEW_LINE> <INDENT> raise RuntimeError('Failed to communicate with IMAP server %r: ' '%s' % (host, e)) from e <NEW_LINE> <DEDENT> context = ssl.create_default_context() <NEW_LINE> if not secure: <NEW_LINE> <INDENT> context.check_hostname = False <NEW_LINE> context.verify_mode = ssl.CERT_NONE <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> connection.starttls(context) <NEW_LINE> <DEDENT> except (imaplib.IMAP4.error, ssl.CertificateError) as e: <NEW_LINE> <INDENT> raise RuntimeError('Failed to establish secure connection with %r: %s' % (host, e)) from e <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> connection.login(user, password) <NEW_LINE> <DEDENT> except imaplib.IMAP4.error as e: <NEW_LINE> <INDENT> logger.debug( 'IMAP authentication failed: %s', e, exc_info=True) <NEW_LINE> return "" <NEW_LINE> <DEDENT> connection.logout() <NEW_LINE> return user | Authenticate user with IMAP.
Configuration:
[auth]
type = radicale_imap
imap_host = example.com:143
imap_secure = True | 62599083aad79263cf4302dc |
class Invoice(models.Model): <NEW_LINE> <INDENT> _inherit = 'account.invoice' <NEW_LINE> subs = fields.Boolean(related='invoice_line_ids.subs', string='subs', help="It indicates that the invoice is an Subscription Invoice.", store=True) | Inherits invoice and adds ad boolean to invoice to flag Subscription-invoices | 625990835fdd1c0f98e5faa1 |
class RouteFilterRule(SubResource): <NEW_LINE> <INDENT> _validation = { 'etag': {'readonly': True}, 'provisioning_state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'access': {'key': 'properties.access', 'type': 'str'}, 'route_filter_rule_type': {'key': 'properties.routeFilterRuleType', 'type': 'str'}, 'communities': {'key': 'properties.communities', 'type': '[str]'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, location: Optional[str] = None, access: Optional[Union[str, "Access"]] = None, route_filter_rule_type: Optional[Union[str, "RouteFilterRuleType"]] = None, communities: Optional[List[str]] = None, **kwargs ): <NEW_LINE> <INDENT> super(RouteFilterRule, self).__init__(id=id, **kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.location = location <NEW_LINE> self.etag = None <NEW_LINE> self.access = access <NEW_LINE> self.route_filter_rule_type = route_filter_rule_type <NEW_LINE> self.communities = communities <NEW_LINE> self.provisioning_state = None | Route Filter Rule Resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:param location: Resource location.
:type location: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param access: The access type of the rule. Possible values include: "Allow", "Deny".
:type access: str or ~azure.mgmt.network.v2019_02_01.models.Access
:param route_filter_rule_type: The rule type of the rule. Valid value is: 'Community'. Possible
values include: "Community".
:type route_filter_rule_type: str or ~azure.mgmt.network.v2019_02_01.models.RouteFilterRuleType
:param communities: The collection for bgp community values to filter on. e.g.
['12076:5010','12076:5020'].
:type communities: list[str]
:ivar provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', 'Succeeded' and 'Failed'.
:vartype provisioning_state: str | 6259908360cbc95b06365afc |
class CategoryAdminForm(forms.ModelForm): <NEW_LINE> <INDENT> parent = TreeNodeChoiceField(label=_('parent category').capitalize(), required=False, empty_label=_('No parent category'), queryset=Category.tree.all(), level_indicator=u'|--') <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(CategoryAdminForm, self).__init__(*args, **kwargs) <NEW_LINE> rel = ManyToOneRel(Category, 'id') <NEW_LINE> self.fields['parent'].widget = RelatedFieldWidgetWrapper( self.fields['parent'].widget, rel, self.admin_site) <NEW_LINE> <DEDENT> def clean_parent(self): <NEW_LINE> <INDENT> data = self.cleaned_data['parent'] <NEW_LINE> if data == self.instance: <NEW_LINE> <INDENT> raise forms.ValidationError( _('A category cannot be parent of itself.')) <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = Category | Form for Category's Admin | 62599083f548e778e596d0ad |
class CartpolePolicy(ParameterCreatingPolicy): <NEW_LINE> <INDENT> def act(self, observation): <NEW_LINE> <INDENT> self.current_parameter = 0 <NEW_LINE> p = self.param <NEW_LINE> x, x_dot, theta, theta_dot = observation <NEW_LINE> go_right = (p(0) * x + p(0) * x_dot + p(0) * theta + p(0) * theta_dot) > 0 <NEW_LINE> return 1 if go_right else 0 | A policy to solve CartPole-v0 | 6259908371ff763f4b5e92ce |
class AberrationPhantom(BaseMaterial): <NEW_LINE> <INDENT> def __init__(self, temperature: float): <NEW_LINE> <INDENT> super().__init__(temperature) <NEW_LINE> <DEDENT> @property <NEW_LINE> def constant_of_attenuation(self) -> float: <NEW_LINE> <INDENT> temperatures = numpy.array([20.0, 30.0]) <NEW_LINE> measurements = numpy.array([0.5, 0.5]) <NEW_LINE> return self._interpolation(temperatures, measurements) <NEW_LINE> <DEDENT> @property <NEW_LINE> def exponent_of_attenuation(self) -> float: <NEW_LINE> <INDENT> temperatures = numpy.array([20.0, 30.0]) <NEW_LINE> measurements = numpy.array([1.0, 1.0]) <NEW_LINE> return self._interpolation(temperatures, measurements) <NEW_LINE> <DEDENT> @property <NEW_LINE> def non_linearity_coefficient(self) -> float: <NEW_LINE> <INDENT> temperatures = numpy.array([20.0, 30.0]) <NEW_LINE> measurements = numpy.array([0.0, 0.0]) <NEW_LINE> return self._interpolation(temperatures, measurements) <NEW_LINE> <DEDENT> @property <NEW_LINE> def mass_density(self) -> float: <NEW_LINE> <INDENT> temperatures = numpy.array([20.0, 30.0]) <NEW_LINE> measurements = numpy.array([1250.0, 1250.0]) <NEW_LINE> return self._interpolation(temperatures, measurements) <NEW_LINE> <DEDENT> @property <NEW_LINE> def sound_speed(self) -> float: <NEW_LINE> <INDENT> temperatures = numpy.array([20.0, 30.0]) <NEW_LINE> measurements = numpy.array([1640.0, 1640.0]) <NEW_LINE> return self._interpolation(temperatures, measurements) | AberrationPhantom
TODO Need unit tests | 625990835fdd1c0f98e5faa2 |
class RoleMetadata(Base): <NEW_LINE> <INDENT> _allow_duplicates = FieldAttribute(isa='bool', default=False) <NEW_LINE> _dependencies = FieldAttribute(isa='list', default=list) <NEW_LINE> _galaxy_info = FieldAttribute(isa='GalaxyInfo') <NEW_LINE> def __init__(self, owner=None): <NEW_LINE> <INDENT> self._owner = owner <NEW_LINE> super(RoleMetadata, self).__init__() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def load(data, owner, variable_manager=None, loader=None): <NEW_LINE> <INDENT> if not isinstance(data, dict): <NEW_LINE> <INDENT> raise AnsibleParserError("the 'meta/main.yml' for role %s is not a dictionary" % owner.get_name()) <NEW_LINE> <DEDENT> m = RoleMetadata(owner=owner).load_data(data, variable_manager=variable_manager, loader=loader) <NEW_LINE> return m <NEW_LINE> <DEDENT> def _load_dependencies(self, attr, ds): <NEW_LINE> <INDENT> roles = [] <NEW_LINE> if ds: <NEW_LINE> <INDENT> if not isinstance(ds, list): <NEW_LINE> <INDENT> raise AnsibleParserError("Expected role dependencies to be a list.", obj=self._ds) <NEW_LINE> <DEDENT> for role_def in ds: <NEW_LINE> <INDENT> if isinstance(role_def, string_types) or 'role' in role_def or 'name' in role_def: <NEW_LINE> <INDENT> roles.append(role_def) <NEW_LINE> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> def_parsed = RoleRequirement.role_yaml_parse(role_def) <NEW_LINE> if def_parsed.get('name'): <NEW_LINE> <INDENT> role_def['name'] = def_parsed['name'] <NEW_LINE> <DEDENT> roles.append(role_def) <NEW_LINE> <DEDENT> except AnsibleError as exc: <NEW_LINE> <INDENT> raise AnsibleParserError(to_native(exc), obj=role_def, orig_exc=exc) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> current_role_path = None <NEW_LINE> if self._owner: <NEW_LINE> <INDENT> current_role_path = os.path.dirname(self._owner._role_path) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return load_list_of_roles(roles, play=self._owner._play, current_role_path=current_role_path, variable_manager=self._variable_manager, loader=self._loader) <NEW_LINE> <DEDENT> except AssertionError as e: <NEW_LINE> <INDENT> raise AnsibleParserError("A malformed list of role dependencies was encountered.", obj=self._ds, orig_exc=e) <NEW_LINE> <DEDENT> <DEDENT> def _load_galaxy_info(self, attr, ds): <NEW_LINE> <INDENT> return ds <NEW_LINE> <DEDENT> def serialize(self): <NEW_LINE> <INDENT> return dict( allow_duplicates=self._allow_duplicates, dependencies=self._dependencies, ) <NEW_LINE> <DEDENT> def deserialize(self, data): <NEW_LINE> <INDENT> setattr(self, 'allow_duplicates', data.get('allow_duplicates', False)) <NEW_LINE> setattr(self, 'dependencies', data.get('dependencies', [])) | This class wraps the parsing and validation of the optional metadata
within each Role (meta/main.yml). | 625990837b180e01f3e49df6 |
class SparseUniformNeighborSampler(object): <NEW_LINE> <INDENT> def __init__(self, adj,): <NEW_LINE> <INDENT> assert sparse.issparse(adj), "SparseUniformNeighborSampler: not sparse.issparse(adj)" <NEW_LINE> self.adj = adj <NEW_LINE> idx, partial_degrees = np.unique(adj.nonzero()[0], return_counts=True) <NEW_LINE> self.degrees = np.zeros(adj.shape[0]).astype(int) <NEW_LINE> self.degrees[idx] = partial_degrees <NEW_LINE> <DEDENT> def __call__(self, ids, n_samples=128): <NEW_LINE> <INDENT> assert n_samples > 0, 'SparseUniformNeighborSampler: n_samples must be set explicitly' <NEW_LINE> is_cuda = ids.is_cuda <NEW_LINE> ids = to_numpy(ids) <NEW_LINE> tmp = self.adj[ids] <NEW_LINE> sel = np.random.choice(self.adj.shape[1], (ids.shape[0], n_samples)) <NEW_LINE> sel = sel % self.degrees[ids].reshape(-1, 1) <NEW_LINE> tmp = tmp[ np.arange(ids.shape[0]).repeat(n_samples).reshape(-1), np.array(sel).reshape(-1) ] <NEW_LINE> tmp = np.asarray(tmp).squeeze() <NEW_LINE> tmp = Variable(torch.LongTensor(tmp)) <NEW_LINE> if is_cuda: <NEW_LINE> <INDENT> tmp = tmp.cuda() <NEW_LINE> <DEDENT> return tmp | Samples from "sparse 2D edgelist", which looks like
[
[0, 0, 0, 0, ..., 0],
[1, 2, 3, 0, ..., 0],
[1, 3, 0, 0, ..., 0],
...
]
stored as a scipy.sparse.csr_matrix.
The first row is a "dummy node", so there's an "off-by-one" issue vs `feats`.
Have to increment/decrement by 1 in a couple of places. In the regular
uniform sampler, this "dummy node" is at the end.
Ideally, obviously, we'd be doing this sampling on the GPU. But it does not
appear that torch.sparse.LongTensor can support this ATM. | 62599083fff4ab517ebcf33a |
class ConvertToASCIIPreprocessorTestCase(ChatBotTestCase): <NEW_LINE> <INDENT> def test_convert_to_ascii(self): <NEW_LINE> <INDENT> statement = Statement(text=u'Klüft skräms inför på fédéral électoral große') <NEW_LINE> cleaned = preprocessors.convert_to_ascii(statement) <NEW_LINE> normal_text = 'Kluft skrams infor pa federal electoral groe' <NEW_LINE> self.assertEqual(cleaned.text, normal_text) | Make sure that ChatterBot's ASCII conversion preprocessor works as expected. | 6259908392d797404e3898ee |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.