code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class BaseSourceManager(BaseManager): <NEW_LINE> <INDENT> SOURCE_CLASS = None <NEW_LINE> def __init__(self, job_manager, datasource_path="dataload.sources", *args, **kwargs): <NEW_LINE> <INDENT> super(BaseSourceManager,self).__init__(job_manager,*args,**kwargs) <NEW_LINE> self.conn = get_src_conn() <NEW_LINE> self.default_src_path = datasource_path <NEW_LINE> <DEDENT> def filter_class(self,klass): <NEW_LINE> <INDENT> return klass <NEW_LINE> <DEDENT> def register_classes(self,klasses): <NEW_LINE> <INDENT> raise NotImplementedError("implement me in sub-class") <NEW_LINE> <DEDENT> def find_classes(self,src_module,fail_on_notfound=True): <NEW_LINE> <INDENT> found_one = False <NEW_LINE> for attr in dir(src_module): <NEW_LINE> <INDENT> something = getattr(src_module,attr) <NEW_LINE> if type(something) == type and issubclass(something,self.__class__.SOURCE_CLASS): <NEW_LINE> <INDENT> klass = something <NEW_LINE> if not self.filter_class(klass): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> found_one = True <NEW_LINE> logger.debug("Found a class based on %s: '%s'" % (self.__class__.SOURCE_CLASS.__name__,klass)) <NEW_LINE> yield klass <NEW_LINE> <DEDENT> <DEDENT> if not found_one: <NEW_LINE> <INDENT> if fail_on_notfound: <NEW_LINE> <INDENT> raise UnknownResource("Can't find a class based on %s in module '%s'" % (self.__class__.SOURCE_CLASS.__name__,src_module)) <NEW_LINE> <DEDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> def register_source(self, src, fail_on_notfound=True): <NEW_LINE> <INDENT> if isinstance(src,str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> src_m = importlib.import_module(src) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> src_m = importlib.import_module("%s.%s" % (self.default_src_path,src)) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> msg = "Can't find module '%s', even in '%s'" % (src,self.default_src_path) <NEW_LINE> logger.error(msg) <NEW_LINE> raise UnknownResource(msg) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif isinstance(src,dict): <NEW_LINE> <INDENT> assert len(src) == 1, "Should have only one element in source dict '%s'" % src <NEW_LINE> _, sub_srcs = list(src.items())[0] <NEW_LINE> for src in sub_srcs: <NEW_LINE> <INDENT> self.register_source(src,fail_on_notfound) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> src_m = src <NEW_LINE> <DEDENT> klasses = self.find_classes(src_m,fail_on_notfound) <NEW_LINE> self.register_classes(klasses) <NEW_LINE> <DEDENT> def register_sources(self, sources): <NEW_LINE> <INDENT> assert not isinstance(sources,str), "sources argument is a string, should pass a list" <NEW_LINE> self.register.clear() <NEW_LINE> for src in sources: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.register_source(src,fail_on_notfound=False) <NEW_LINE> <DEDENT> except UnknownResource as e: <NEW_LINE> <INDENT> logger.info("Can't register source '%s', skip it; %s" % (src,e)) <NEW_LINE> import traceback <NEW_LINE> logger.error(traceback.format_exc())
Base class to provide source management: discovery, registration Actual launch of tasks must be defined in subclasses
62599070cc0a2c111447c727
class MusicbrainzTrack(MusicbrainzResource): <NEW_LINE> <INDENT> __tablename__ = 'mbz_Tracks' <NEW_LINE> releases = Column('releases', String) <NEW_LINE> artist_credit = Column('artist_credit', String) <NEW_LINE> length = Column('length', Integer) <NEW_LINE> score = Column('score', String) <NEW_LINE> video = Column('video', String) <NEW_LINE> def __init__(self, data): <NEW_LINE> <INDENT> d = data <NEW_LINE> super().__init__( uri='musicbrainz:track:{}'.format(d.get('id')), id=d.get('id'), type='track', provider='musicbrainz', name=d.get('title'), artist_credit=json.dumps(d.get('artist-credit')), releases=json.dumps(d.get('releases')), score=d.get('score'), length=d.get('length'), video=d.get('video'), )
[ Track resources in Musicbrainz ]
625990704e4d562566373cb3
class ClassDict: <NEW_LINE> <INDENT> def __init__(self, initdata=None): <NEW_LINE> <INDENT> self._dict = {} <NEW_LINE> if initdata is not None: <NEW_LINE> <INDENT> self._dict.update(initdata) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, key, value): <NEW_LINE> <INDENT> if key.startswith('_'): <NEW_LINE> <INDENT> super().__setattr__(key, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self[key] = value <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, item): <NEW_LINE> <INDENT> if item.startswith('_'): <NEW_LINE> <INDENT> return super().__getattribute__(item) <NEW_LINE> <DEDENT> return self[item] <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> return item in self._dict <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> self._dict[key] = value <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._dict[item] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> msg = '{} instance has no key/attribute "{}"'.format( self.__class__.__name__, item) <NEW_LINE> raise KeyError(msg) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._dict) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._dict) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, ClassDict): <NEW_LINE> <INDENT> return self._dict == other._dict <NEW_LINE> <DEDENT> return self._dict == other <NEW_LINE> <DEDENT> def update(self, other): <NEW_LINE> <INDENT> self._dict.update(other._dict) <NEW_LINE> <DEDENT> def get(self, key, default=None): <NEW_LINE> <INDENT> return self._dict.get(key, default) <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return self._dict.keys() <NEW_LINE> <DEDENT> def classnamelower(self): <NEW_LINE> <INDENT> return self.__class__.__name__.lower()
Dictionary like object accessible in class notation.
62599070adb09d7d5dc0be18
class WhenSendingAnUnsupportedMethodToAttributeListView(TestCaseWithFixtureData): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpTestData(cls): <NEW_LINE> <INDENT> super(WhenSendingAnUnsupportedMethodToAttributeListView, cls).setUpTestData() <NEW_LINE> cls.client = APIClient() <NEW_LINE> cls.response = cls.client.delete( reverse("attributes"), { "type": "color" }, format="json" ) <NEW_LINE> <DEDENT> def test_should_receive_a_400_bad_request_response(self): <NEW_LINE> <INDENT> self.assertTrue(self.response.status_code, status.HTTP_400_BAD_REQUEST)
This class is to show that we expect to receive a bad request when trying to use an unsupported HTTP method
62599070442bda511e95d9ae
class DispatchTestHelperMixin(object): <NEW_LINE> <INDENT> def clear_message(self): <NEW_LINE> <INDENT> self.message = Message() <NEW_LINE> self.headers = [] <NEW_LINE> <DEDENT> def set_package_name(self, package_name): <NEW_LINE> <INDENT> self.package_name = package_name <NEW_LINE> self.add_header('To', '{package}@{pts_fqdn}'.format( package=self.package_name, pts_fqdn=PTS_FQDN)) <NEW_LINE> <DEDENT> def set_message_content(self, content): <NEW_LINE> <INDENT> self.message.set_payload(content) <NEW_LINE> <DEDENT> def add_header(self, header_name, header_value): <NEW_LINE> <INDENT> self.message.add_header(header_name, header_value) <NEW_LINE> self.headers.append((header_name, header_value)) <NEW_LINE> <DEDENT> def set_header(self, header_name, header_value): <NEW_LINE> <INDENT> if header_name in self.message: <NEW_LINE> <INDENT> del self.message[header_name] <NEW_LINE> <DEDENT> self.add_header(header_name, header_value) <NEW_LINE> <DEDENT> def run_dispatch(self, sent_to_address=None): <NEW_LINE> <INDENT> dispatch.process( force_bytes(self.message.as_string(), 'utf-8'), sent_to_address ) <NEW_LINE> <DEDENT> def subscribe_user_with_keyword(self, email, keyword): <NEW_LINE> <INDENT> subscription = Subscription.objects.create_for( email=email, package_name=self.package.name ) <NEW_LINE> subscription.keywords.add(Keyword.objects.get(name=keyword)) <NEW_LINE> <DEDENT> def subscribe_user_to_package(self, user_email, package, active=True): <NEW_LINE> <INDENT> Subscription.objects.create_for( package_name=package, email=user_email, active=active) <NEW_LINE> <DEDENT> def make_address_with_keyword(self, package, keyword): <NEW_LINE> <INDENT> return '{package}_{keyword}@{pts_fqdn}'.format( package=package, keyword=keyword, pts_fqdn=PTS_FQDN) <NEW_LINE> <DEDENT> def assert_message_forwarded_to(self, email): <NEW_LINE> <INDENT> self.assertTrue(mail.outbox) <NEW_LINE> self.assertIn(email, (message.to[0] for message in mail.outbox)) <NEW_LINE> <DEDENT> def assert_forward_content_equal(self, content): <NEW_LINE> <INDENT> msg = mail.outbox[0].message() <NEW_LINE> self.assertEqual(get_decoded_message_payload(msg), content) <NEW_LINE> <DEDENT> def assert_all_headers_found(self, headers): <NEW_LINE> <INDENT> for msg in mail.outbox: <NEW_LINE> <INDENT> msg = msg.message() <NEW_LINE> for header_name, header_value in headers: <NEW_LINE> <INDENT> self.assertIn(header_name, msg) <NEW_LINE> self.assertIn( header_value, msg.get_all(header_name), '{header_name}: {header_value} not found in {all}'.format( header_name=header_name, header_value=header_value, all=msg.get_all(header_name))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def assert_header_equal(self, header_name, header_value): <NEW_LINE> <INDENT> for msg in mail.outbox: <NEW_LINE> <INDENT> msg = msg.message() <NEW_LINE> self.assertEqual(msg[header_name], header_value)
A mixin containing methods to assist testing dispatch functionality.
62599070be8e80087fbc093c
@registry.register_model <NEW_LINE> class LSTMEncoder(t2t_model.T2TModel): <NEW_LINE> <INDENT> def body(self, features): <NEW_LINE> <INDENT> if self._hparams.initializer == "orthogonal": <NEW_LINE> <INDENT> raise ValueError("LSTM models fail with orthogonal initializer.") <NEW_LINE> <DEDENT> train = self._hparams.mode == tf.estimator.ModeKeys.TRAIN <NEW_LINE> inputs = features.get("inputs") <NEW_LINE> inputs = common_layers.flatten4d3d(inputs) <NEW_LINE> encoder_output, _ = lstm( tf.reverse(inputs, axis=[1]), self._hparams, train, "encoder") <NEW_LINE> return tf.expand_dims(encoder_output, axis=2)
LSTM encoder only.
625990708da39b475be04a9b
class Probe(Function): <NEW_LINE> <INDENT> title = "" <NEW_LINE> def __init__(self, title): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> <DEDENT> def makeCircuit(self, circuit, port, vdd, slew, capa, config, polarity): <NEW_LINE> <INDENT> circuit.C("load_" + port, port, "gnd", capa * 1e-15) <NEW_LINE> circuit.R("load_" + port, port, "gnd", 1e6) <NEW_LINE> <DEDENT> def isOutput(self): <NEW_LINE> <INDENT> return True
Identifies a port as output
6259907023849d37ff852963
class CoerceExpr(Node): <NEW_LINE> <INDENT> expr = Undefined(Node) <NEW_LINE> target_type = Undefined('mypy.types.Type') <NEW_LINE> source_type = Undefined('mypy.types.Type') <NEW_LINE> is_wrapper_class = False <NEW_LINE> def __init__(self, expr: Node, target_type: 'mypy.types.Type', source_type: 'mypy.types.Type', is_wrapper_class: bool) -> None: <NEW_LINE> <INDENT> self.expr = expr <NEW_LINE> self.target_type = target_type <NEW_LINE> self.source_type = source_type <NEW_LINE> self.is_wrapper_class = is_wrapper_class <NEW_LINE> <DEDENT> def accept(self, visitor: NodeVisitor[T]) -> T: <NEW_LINE> <INDENT> return visitor.visit_coerce_expr(self)
Implicit coercion expression. This is used only when compiling/transforming. These are inserted after type checking.
625990701f037a2d8b9e54c1
class Ethnicity(Field): <NEW_LINE> <INDENT> children = ('content', ) <NEW_LINE> def __init__(self, content=None, valid_since=None, inferred=None): <NEW_LINE> <INDENT> super(Ethnicity, self).__init__(valid_since, inferred) <NEW_LINE> self.content = content.lower() <NEW_LINE> <DEDENT> @property <NEW_LINE> def display(self): <NEW_LINE> <INDENT> if self.content: <NEW_LINE> <INDENT> return self.content.replace("_", " ").title()
An ethnicity value. The content will be a string with one of the following values (based on US census definitions): 'white', 'black', 'american_indian', 'alaska_native', 'chinese', 'filipino', 'other_asian', 'japanese', 'korean', 'viatnamese', 'native_hawaiian', 'guamanian', 'chamorro', 'samoan', 'other_pacific_islander', 'other'.
6259907091f36d47f2231ae5
class SchedulePaged(Paged): <NEW_LINE> <INDENT> _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[Schedule]'} } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(SchedulePaged, self).__init__(*args, **kwargs)
A paging container for iterating over a list of :class:`Schedule <azure.mgmt.devtestlabs.models.Schedule>` object
6259907099fddb7c1ca63a29
class TryLink(Event): <NEW_LINE> <INDENT> def __init__(self,url,expectedTitle): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.expectedTitle = expectedTitle
Used when trying an imdb (or any other source) link. This allows us to fetch the title
625990705fc7496912d48ebf
class DapricotConfig(SimpleDapricotConfig): <NEW_LINE> <INDENT> def ready(self): <NEW_LINE> <INDENT> super().ready() <NEW_LINE> self.module.autodiscover()
The default AppConfig for admin which does autodiscovery.
62599070a8370b77170f1c77
class OrderedMultisetPartitionsIntoSets_n(OrderedMultisetPartitionsIntoSets): <NEW_LINE> <INDENT> def __init__(self, n): <NEW_LINE> <INDENT> self._n = n <NEW_LINE> OrderedMultisetPartitionsIntoSets.__init__(self, True) <NEW_LINE> <DEDENT> def _repr_(self): <NEW_LINE> <INDENT> return "Ordered Multiset Partitions into Sets of integer %s" % self._n <NEW_LINE> <DEDENT> def cardinality(self): <NEW_LINE> <INDENT> if self._n <= 5: <NEW_LINE> <INDENT> orders = {0:1, 1:1, 2:2, 3:5, 4:11, 5:25} <NEW_LINE> return ZZ(orders[self._n]) <NEW_LINE> <DEDENT> t = var('t') <NEW_LINE> partspoly = prod(1+t**k for k in range(1,self._n+1)).coefficients() <NEW_LINE> deg = 0 <NEW_LINE> for alpha in composition_iterator_fast(self._n): <NEW_LINE> <INDENT> deg += prod(partspoly[d][0] for d in alpha) <NEW_LINE> <DEDENT> return ZZ(deg) <NEW_LINE> <DEDENT> def _an_element_(self): <NEW_LINE> <INDENT> alpha = Compositions(self._n, max_part=self._n//3+1).an_element() <NEW_LINE> out = [] <NEW_LINE> for a in alpha: <NEW_LINE> <INDENT> if a in {1, 2, 4}: <NEW_LINE> <INDENT> out.append([a]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if a % 2: <NEW_LINE> <INDENT> out.append([a//2+1, a//2]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out.append([a//2, a//2-1, 1]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return self.element_class(self, map(frozenset, out)) <NEW_LINE> <DEDENT> def random_element(self): <NEW_LINE> <INDENT> C = Compositions(self._n).random_element() <NEW_LINE> co = [IntegerListsLex(c, min_part=1, max_part=c, min_slope=1).random_element() for c in C] <NEW_LINE> return self.element_class(self, map(frozenset, co)) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for co in _iterator_size(self._n): <NEW_LINE> <INDENT> yield self.element_class(self, co)
Ordered multiset partitions into sets of a fixed integer `n`.
6259907099cbb53fe6832797
class TestModel_Resource(): <NEW_LINE> <INDENT> def test_resource_serialization(self): <NEW_LINE> <INDENT> resource_model_json = {} <NEW_LINE> resource_model_json['resource_id'] = 'testString' <NEW_LINE> resource_model_json['resource_type'] = 'testString' <NEW_LINE> resource_model = Resource.from_dict(resource_model_json) <NEW_LINE> assert resource_model != False <NEW_LINE> resource_model_dict = Resource.from_dict(resource_model_json).__dict__ <NEW_LINE> resource_model2 = Resource(**resource_model_dict) <NEW_LINE> assert resource_model == resource_model2 <NEW_LINE> resource_model_json2 = resource_model.to_dict() <NEW_LINE> assert resource_model_json2 == resource_model_json
Test Class for Resource
625990700a50d4780f706a18
class DistributedSparseDispatcher(object): <NEW_LINE> <INDENT> def __init__(self, data_parallelism, expert_parallelism, gates): <NEW_LINE> <INDENT> self._gates = gates <NEW_LINE> self._dp = data_parallelism <NEW_LINE> self._ep = expert_parallelism <NEW_LINE> assert len(gates) == self._dp.n <NEW_LINE> self._dispatchers = self._dp(SparseDispatcher, self._ep.n, gates) <NEW_LINE> <DEDENT> def dispatch(self, inp): <NEW_LINE> <INDENT> dispatched = self._dp(lambda a, b: a.dispatch(b), self._dispatchers, inp) <NEW_LINE> ret = self._ep(tf.concat, transpose_list_of_lists(dispatched), 0) <NEW_LINE> if ret[0].dtype == tf.float32: <NEW_LINE> <INDENT> ret = self._ep(convert_gradient_to_tensor, ret) <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> def combine(self, expert_out, multiply_by_gates=True): <NEW_LINE> <INDENT> expert_part_sizes = tf.unstack( tf.stack([d.part_sizes for d in self._dispatchers]), num=self._ep.n, axis=1) <NEW_LINE> expert_output_parts = self._ep(tf.split, expert_out, expert_part_sizes) <NEW_LINE> expert_output_parts_t = transpose_list_of_lists(expert_output_parts) <NEW_LINE> def my_combine(dispatcher, parts): <NEW_LINE> <INDENT> return dispatcher.combine( convert_gradient_to_tensor(tf.concat(parts, 0)), multiply_by_gates=multiply_by_gates) <NEW_LINE> <DEDENT> return self._dp(my_combine, self._dispatchers, expert_output_parts_t) <NEW_LINE> <DEDENT> def expert_to_gates(self): <NEW_LINE> <INDENT> return self._ep( tf.concat, transpose_list_of_lists( self._dp(lambda d: d.expert_to_gates(), self._dispatchers)), 0)
A distributed version of SparseDispatcher. Instead of one batch of input examples, we simultaneously process a list of num_datashards batches of input examples. The per-expert `Tensor`s contain a combination of examples from the different datashards. Each datashard is associated with a particular device and each expert is associated with a particular device. All per-datashard and per-expert `Tensor`s are created on those devices. There is no single-device bottleneck.
62599070f548e778e596ce3b
class Seasontology(Stream): <NEW_LINE> <INDENT> station_one = param.String(default=CMI, doc=STATION_ID) <NEW_LINE> station_two = param.String(default=MRY, doc=STATION_ID) <NEW_LINE> variable = param.ObjectSelector( default=list(VAR_RANGE.keys())[0], objects=list(VAR_RANGE.keys()), ) <NEW_LINE> data = param.Parameter(default="mesonet.agron.iastate.edu", constant=True, precedence=0.) <NEW_LINE> output = parambokeh.view.Plot() <NEW_LINE> def view(self, *args, **kwargs): <NEW_LINE> <INDENT> return plot_stations(self.station_one, self.station_two, self.variable) <NEW_LINE> <DEDENT> def event(self, **kwargs): <NEW_LINE> <INDENT> if not self.output or any(k in kwargs for k in ['station_one', 'station_two']): <NEW_LINE> <INDENT> self.output = hv.DynamicMap(self.view, streams=[self]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(Seasontology, self).event(**kwargs)
Highest level function to run the interactivity
625990704a966d76dd5f0798
class B2MuMuMuMuLinesConf(LineBuilder) : <NEW_LINE> <INDENT> __configuration_keys__ = ('B2MuMuMuMuLinePrescale', 'B2MuMuMuMuLinePostscale', 'D2MuMuMuMuLinePrescale', 'D2MuMuMuMuLinePostscale' ) <NEW_LINE> config_default={ 'B2MuMuMuMuLinePrescale' : 1, 'B2MuMuMuMuLinePostscale' : 1, 'D2MuMuMuMuLinePrescale' : 1, 'D2MuMuMuMuLinePostscale' : 1, } <NEW_LINE> def __init__(self, name = 'B2MuMuMuMu', config = None) : <NEW_LINE> <INDENT> LineBuilder.__init__(self, name, config) <NEW_LINE> default_name=name <NEW_LINE> D_name='D2MuMuMuMu' <NEW_LINE> self.selDefault = makeDefault(default_name) <NEW_LINE> self.selD2MuMuMuMu = makeD2MuMuMuMu(D_name) <NEW_LINE> self.defaultLine = StrippingLine(default_name+"Line", prescale = config['B2MuMuMuMuLinePrescale'], postscale = config['B2MuMuMuMuLinePostscale'], algos = [ self.selDefault ] ) <NEW_LINE> self.D2MuMuMuMuLine = StrippingLine(D_name+"Line", prescale = config['D2MuMuMuMuLinePrescale'], postscale = config['D2MuMuMuMuLinePostscale'], algos = [ self.selD2MuMuMuMu ] ) <NEW_LINE> self.registerLine( self.defaultLine ) <NEW_LINE> self.registerLine( self.D2MuMuMuMuLine )
Builder of: ... Usage: >>> config = { .... } >>> bsConf = Bs2MuMuLinesConf('PrescaledBs2MuMuTest',config) >>> bsLines = bsConf.lines >>> for line in line : >>> print line.name(), line.outputLocation() The lines can be used directly to build a StrippingStream object. Exports as instance data members: selDefault : nominal Bs2mumu stripping line selLoose : loose Bs2MuMu stripping line to understand systematics defaultLine : Stripping line made from selDefault looseLine : Stripping line made from selLoose lines : lsit of lines: [ defaultLine, looseLine ] Exports as class data member: Bs2MuMuLinesConf.__configuration_keys__ : List of required configuration parameters.
62599070379a373c97d9a8ce
class WorkerAvatar(ModuleAvatar, RSAAvatar): <NEW_LINE> <INDENT> popen = None <NEW_LINE> pid = None <NEW_LINE> key = None <NEW_LINE> version = None <NEW_LINE> args = None <NEW_LINE> workunits = None <NEW_LINE> main_worker = None <NEW_LINE> task_id = None <NEW_LINE> run_task_deferred = None <NEW_LINE> remote = None <NEW_LINE> finished = False <NEW_LINE> def __init__(self, server, name): <NEW_LINE> <INDENT> self.server = server <NEW_LINE> self.name = name <NEW_LINE> node_key = server.priv_key <NEW_LINE> master_key = server.priv_key <NEW_LINE> ModuleAvatar.__init__(self, server.manager._remotes['WORKER']) <NEW_LINE> RSAAvatar.__init__(self, master_key, None, node_key, server.worker_authenticated, True) <NEW_LINE> <DEDENT> def attached(self, mind): <NEW_LINE> <INDENT> self.remote = mind <NEW_LINE> <DEDENT> def detached(self, mind): <NEW_LINE> <INDENT> logger.info('worker:%s - disconnected' % self.name) <NEW_LINE> if self.authenticated: <NEW_LINE> <INDENT> self.server.worker_disconnected(self) <NEW_LINE> <DEDENT> self.remote = None <NEW_LINE> <DEDENT> def get_pid(self, results): <NEW_LINE> <INDENT> deferred = self.remote.callRemote('getpid') <NEW_LINE> deferred.addCallback(self.set_pid) <NEW_LINE> <DEDENT> def set_pid(self, pid): <NEW_LINE> <INDENT> self.pid = pid
Class representing Workers for a Node. This class encapsulates everything about a worker process and task. It also contains all functions that the Worker is capable of calling
6259907092d797404e3897b2
class VirtualWanVpnProfileParameters(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'vpn_server_configuration_resource_id': {'key': 'vpnServerConfigurationResourceId', 'type': 'str'}, 'authentication_method': {'key': 'authenticationMethod', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, vpn_server_configuration_resource_id: Optional[str] = None, authentication_method: Optional[Union[str, "AuthenticationMethod"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(VirtualWanVpnProfileParameters, self).__init__(**kwargs) <NEW_LINE> self.vpn_server_configuration_resource_id = vpn_server_configuration_resource_id <NEW_LINE> self.authentication_method = authentication_method
Virtual Wan Vpn profile parameters Vpn profile generation. :param vpn_server_configuration_resource_id: VpnServerConfiguration partial resource uri with which VirtualWan is associated to. :type vpn_server_configuration_resource_id: str :param authentication_method: VPN client authentication method. Possible values include: "EAPTLS", "EAPMSCHAPv2". :type authentication_method: str or ~azure.mgmt.network.v2019_08_01.models.AuthenticationMethod
62599070d268445f2663a7b4
class RetractCubeForTime(TimedCommand): <NEW_LINE> <INDENT> def __init__(self, timeoutInSeconds): <NEW_LINE> <INDENT> super().__init__('RetractCubeForTime', timeoutInSeconds) <NEW_LINE> self.requires(subsystems.claw) <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> subsystems.claw.retract() <NEW_LINE> <DEDENT> def end(self): <NEW_LINE> <INDENT> subsystems.claw.hold()
Ejects a cube from the claw
625990702ae34c7f260ac998
class DataModel: <NEW_LINE> <INDENT> def __init__( self, content="", language="", version="", knowledge=[], tokens=[], phrases=[], sentences=[], paragraphs=[], topics=[], main_sentences=[], main_phrases=[], main_lemmas=[], main_syncons=[], entities=[], categories=[], iptc={}, standard={}, ): <NEW_LINE> <INDENT> self._content = content <NEW_LINE> self._language = language <NEW_LINE> self._version = version <NEW_LINE> self._knowledge = [Knowledge(**kw) for kw in knowledge] <NEW_LINE> self._tokens = [Token(**tok) for tok in tokens] <NEW_LINE> self._phrases = [Phrase(**ph) for ph in phrases] <NEW_LINE> self._sentences = [Sentence(**s) for s in sentences] <NEW_LINE> self._paragraphs = [Paragraph(**par) for par in paragraphs] <NEW_LINE> self._topics = [Topic(**t) for t in topics] <NEW_LINE> self._main_sentences = [MainSentence(**ms) for ms in main_sentences] <NEW_LINE> self._main_phrases = [MainPhrase(**mp) for mp in main_phrases] <NEW_LINE> self._main_lemmas = [MainLemma(**ml) for ml in main_lemmas] <NEW_LINE> self._main_syncons = [MainSyncon(**ms) for ms in main_syncons] <NEW_LINE> self._entities = [Entity(**ent) for ent in entities] <NEW_LINE> self._categories = [Category(**cat) for cat in categories] <NEW_LINE> self._iptc = Iptc(**iptc) if iptc else None <NEW_LINE> self._standard = Standard(**standard) if standard else None <NEW_LINE> <DEDENT> @property <NEW_LINE> def content(self): <NEW_LINE> <INDENT> return self._content <NEW_LINE> <DEDENT> @property <NEW_LINE> def language(self): <NEW_LINE> <INDENT> return self._language <NEW_LINE> <DEDENT> @property <NEW_LINE> def version(self): <NEW_LINE> <INDENT> return self._version <NEW_LINE> <DEDENT> @property <NEW_LINE> def knowledge(self): <NEW_LINE> <INDENT> return self._knowledge <NEW_LINE> <DEDENT> @property <NEW_LINE> def tokens(self): <NEW_LINE> <INDENT> return self._tokens <NEW_LINE> <DEDENT> @property <NEW_LINE> def phrases(self): <NEW_LINE> <INDENT> return self._phrases <NEW_LINE> <DEDENT> @property <NEW_LINE> def sentences(self): <NEW_LINE> <INDENT> return self._sentences <NEW_LINE> <DEDENT> @property <NEW_LINE> def paragraphs(self): <NEW_LINE> <INDENT> return self._paragraphs <NEW_LINE> <DEDENT> @property <NEW_LINE> def topics(self): <NEW_LINE> <INDENT> return self._topics <NEW_LINE> <DEDENT> @property <NEW_LINE> def main_sentences(self): <NEW_LINE> <INDENT> return self._main_sentences <NEW_LINE> <DEDENT> @property <NEW_LINE> def main_phrases(self): <NEW_LINE> <INDENT> return self._main_phrases <NEW_LINE> <DEDENT> @property <NEW_LINE> def main_lemmas(self): <NEW_LINE> <INDENT> return self._main_lemmas <NEW_LINE> <DEDENT> @property <NEW_LINE> def main_syncons(self): <NEW_LINE> <INDENT> return self._main_syncons <NEW_LINE> <DEDENT> @property <NEW_LINE> def entities(self): <NEW_LINE> <INDENT> return self._entities <NEW_LINE> <DEDENT> @property <NEW_LINE> def categories(self): <NEW_LINE> <INDENT> return self._categories <NEW_LINE> <DEDENT> @property <NEW_LINE> def iptc(self): <NEW_LINE> <INDENT> return self._iptc <NEW_LINE> <DEDENT> @property <NEW_LINE> def standard(self): <NEW_LINE> <INDENT> return self._standard
This class can be considered the root of the data model structure. All other classes are initialised from here. The ObjectMapper handles the JSON contained in the API response to this class. Not all the arguments might be valued. It depends on the type of document analysis that was requested. No intrigued logic is stored inside these classes, aside from the getter/setter methods. This choice was intentional so that it would be possible, with a small effort, to replaces those classes with the definition of a database tables.
625990709c8ee82313040ddf
class TestGitHubHandler(unittest.TestCase): <NEW_LINE> <INDENT> def test_init(self): <NEW_LINE> <INDENT> repo = GitHubHandler(url=TEST_REPO_BLOB_URL) <NEW_LINE> self.assertIsInstance(repo, GitHubHandler) <NEW_LINE> self.assertTrue(hasattr(repo, 'repository')) <NEW_LINE> self.assertTrue(hasattr(repo, 'user')) <NEW_LINE> self.assertTrue(hasattr(repo, 'project')) <NEW_LINE> try: <NEW_LINE> <INDENT> self.assertTrue(hasattr(repo, 'languages')) <NEW_LINE> <DEDENT> except StatusError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def test_strip_src_url(self): <NEW_LINE> <INDENT> repo = GitHubHandler(url=TEST_REPO_BLOB_URL) <NEW_LINE> self.assertEqual(repo.repository, TEST_REPO_SRC_URL) <NEW_LINE> <DEDENT> def test_get_user_project(self): <NEW_LINE> <INDENT> repo = GitHubHandler(url=TEST_REPO_BLOB_URL) <NEW_LINE> user, project = repo.user, repo.project <NEW_LINE> self.assertEqual((user, project), TEST_USER_PROJ) <NEW_LINE> <DEDENT> def test_languages(self): <NEW_LINE> <INDENT> pass
Tests for GitHubHandler class.
6259907038b623060ffaa4ab
class Parameter(Component, sympy.Symbol): <NEW_LINE> <INDENT> def __new__(cls, name, value=0.0, _export=True): <NEW_LINE> <INDENT> return super(sympy.Symbol, cls).__new__(cls, name) <NEW_LINE> <DEDENT> def __getnewargs__(self): <NEW_LINE> <INDENT> return (self.name, self.value, False) <NEW_LINE> <DEDENT> def __init__(self, name, value=0.0, _export=True): <NEW_LINE> <INDENT> Component.__init__(self, name, _export) <NEW_LINE> self.value = float(value) <NEW_LINE> <DEDENT> def get_value(self): <NEW_LINE> <INDENT> return self.value <NEW_LINE> <DEDENT> @property <NEW_LINE> def func(self): <NEW_LINE> <INDENT> return sympy.Symbol <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '%s(%s, %s)' % (self.__class__.__name__, repr(self.name), repr(self.value)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self)
Model component representing a named constant floating point number. Parameters are used as reaction rate constants, compartment volumes and initial (boundary) conditions for species. Parameters ---------- value : number, optional The numerical value of the parameter. Defaults to 0.0 if not specified. The provided value is converted to a float before being stored, so any value that cannot be coerced to a float will trigger an exception. Attributes ---------- Identical to Parameters (see above).
6259907016aa5153ce401d88
class RecipeViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> serializer_class = serializers.RecipeSerializer <NEW_LINE> queryset = Recipe.objects.all() <NEW_LINE> authentication_classes = (TokenAuthentication,) <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return self.queryset.filter(user=self.request.user) <NEW_LINE> <DEDENT> def get_serializer_class(self): <NEW_LINE> <INDENT> if self.action == 'retrieve': <NEW_LINE> <INDENT> return serializers.RecipeDetailSerializer <NEW_LINE> <DEDENT> elif self.action == 'upload_image': <NEW_LINE> <INDENT> return serializers.RecipeImageSerializer <NEW_LINE> <DEDENT> return self.serializer_class <NEW_LINE> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(user=self.request.user) <NEW_LINE> <DEDENT> @action(methods=['POST'], detail=True, url_path='upload-image') <NEW_LINE> def upload_image(self, request, pk=None): <NEW_LINE> <INDENT> recipe = self.get_object() <NEW_LINE> serializer = self.get_serializer( recipe, data=request.data ) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> return Response( serializer.data, status=status.HTTP_200_OK ) <NEW_LINE> <DEDENT> return Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST )
Manage recipes in the database
62599070796e427e53850027
class AvitoPhoneImgParser: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.num_img_list = dict() <NEW_LINE> logger.debug('Получаю список картинок цифр из папки "%s"', NUMBERS_DIR) <NEW_LINE> num_file_list = glob.glob("{}/*.png".format(NUMBERS_DIR)) <NEW_LINE> num_file_list = sorted(num_file_list) <NEW_LINE> if len(num_file_list) != 10: <NEW_LINE> <INDENT> raise Exception('Файлов изображений в папке "{}" может быть только 10 -- для ' 'каждой цифры свой файл. Найдено же {}.'.format(NUMBERS_DIR, len(num_file_list))) <NEW_LINE> <DEDENT> logger.debug('Выполняю загрузку файлов изображений и заполнение словаря изображений цифр') <NEW_LINE> for i, path in enumerate(num_file_list): <NEW_LINE> <INDENT> num_im = Image.open(path).convert('L') <NEW_LINE> self.num_img_list[i] = num_im <NEW_LINE> <DEDENT> logger.debug('Закончена загрузка файлов изображений и заполнение словаря изображений цифр') <NEW_LINE> <DEDENT> def parse_from_data(self, byte_data): <NEW_LINE> <INDENT> logger.debug('Открываю изображение телефона от байтового массива') <NEW_LINE> ph_im = Image.open(BytesIO(byte_data)).convert('L') <NEW_LINE> ph_im = crop_im_phone(ph_im) <NEW_LINE> logger.debug('Закончена подготовка изображения') <NEW_LINE> ph_w, ph_h = ph_im.size <NEW_LINE> phone_number = "" <NEW_LINE> logger.debug('Начинаю разбор изображения телефона') <NEW_LINE> t = time.clock() <NEW_LINE> for offset in range(ph_w + 1): <NEW_LINE> <INDENT> for num, im_num in self.num_img_list.items(): <NEW_LINE> <INDENT> num_w, num_h = im_num.size <NEW_LINE> if offset + num_w > ph_w: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> find = True <NEW_LINE> for x in range(num_w): <NEW_LINE> <INDENT> for y in range(num_h): <NEW_LINE> <INDENT> ph_pxl = ph_im.getpixel((x + offset, y)) <NEW_LINE> num_pxl = im_num.getpixel((x, y)) <NEW_LINE> if ph_pxl != num_pxl: <NEW_LINE> <INDENT> find = False <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if find: <NEW_LINE> <INDENT> phone_number += str(num) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> logger.debug('Разбор изображения телефона закончено за {:.3f} секунд'.format(time.clock() - t)) <NEW_LINE> logger.debug('Закончил разбор, телефон: "%s", длина %s символов', phone_number, len(phone_number)) <NEW_LINE> return phone_number
Класс для разбора изображения номера телефона, который дает авито
625990704c3428357761bb63
class Timeline(node.Node): <NEW_LINE> <INDENT> def __init__(self, attrs): <NEW_LINE> <INDENT> super(Timeline, self).__init__("Timeline") <NEW_LINE> self.fps = attrs.getValue("fps") <NEW_LINE> self.resources_acquisition = attrs.getValue("resources_acquisition") <NEW_LINE> self.size = attrs.getValue("size") <NEW_LINE> self.enable = attrs.getValue("enable") <NEW_LINE> self.start_frame = attrs.getValue("start_frame") <NEW_LINE> self.end_frame = attrs.getValue("end_frame") <NEW_LINE> self.scale = attrs.getValue("scale") <NEW_LINE> self.behavior_layers = [] <NEW_LINE> self.actuator_list = None <NEW_LINE> self._function_map = {'ActuatorList': Timeline.attach_actuator_list, 'BehaviorLayer': Timeline.attach_behavior_layer} <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not other: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not isinstance(other, Timeline): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> rdict = self.__dict__ <NEW_LINE> ldict = other.__dict__ <NEW_LINE> for key in rdict.keys(): <NEW_LINE> <INDENT> if (key == "parent_node" or key == "children_node" or key == "_function_map" or key == "behavior_layers"): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if rdict[key] != ldict[key]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> if self.enable == "0": <NEW_LINE> <INDENT> rfld = self.behavior_layers[0].behavior_keyframes[0].diagram <NEW_LINE> lfld = other.behavior_layers[0].behavior_keyframes[0].diagram <NEW_LINE> if rfld != lfld: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if len(self.behavior_layers) != len(other.behavior_layers): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for i in range(len(self.behavior_layers)): <NEW_LINE> <INDENT> if (len(self.behavior_layers[i].behavior_keyframes) != len(other.behavior_layers[i].behavior_keyframes)): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for j in range(len(self.behavior_layers[i].behavior_keyframes)): <NEW_LINE> <INDENT> if (self.behavior_layers[i].behavior_keyframes[j] != other.behavior_layers[i].behavior_keyframes[j]): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def attach_behavior_layer(self, behavior_layer): <NEW_LINE> <INDENT> behavior_layer.id = len(self.behavior_layers) <NEW_LINE> self.behavior_layers.append(behavior_layer) <NEW_LINE> <DEDENT> def attach_actuator_list(self, actuator_list): <NEW_LINE> <INDENT> self.actuator_list = actuator_list
Stores informations about Timeline in the xar format
62599070283ffb24f3cf5158
class StandardBars(BaseBars): <NEW_LINE> <INDENT> def __init__(self, metric: str, threshold: int = 50000, batch_size: int = 20000000): <NEW_LINE> <INDENT> BaseBars.__init__(self, metric, batch_size) <NEW_LINE> self.threshold = threshold <NEW_LINE> <DEDENT> def _reset_cache(self): <NEW_LINE> <INDENT> self.open_price = None <NEW_LINE> self.high_price, self.low_price = -np.inf, np.inf <NEW_LINE> self.cum_statistics = {'cum_ticks': 0, 'cum_dollar_value': 0, 'cum_volume': 0, 'cum_buy_volume': 0} <NEW_LINE> <DEDENT> def _extract_bars(self, data: Union[list, tuple, np.ndarray]) -> list: <NEW_LINE> <INDENT> list_bars = [] <NEW_LINE> for row in data: <NEW_LINE> <INDENT> date_time = row[0] <NEW_LINE> self.tick_num += 1 <NEW_LINE> price = np.float(row[1]) <NEW_LINE> volume = row[2] <NEW_LINE> dollar_value = price * volume <NEW_LINE> signed_tick = self._apply_tick_rule(price) <NEW_LINE> if isinstance(self.threshold, (int, float)): <NEW_LINE> <INDENT> threshold = self.threshold <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> threshold = self.threshold.iloc[self.threshold.index.get_loc(date_time, method='pad')] <NEW_LINE> <DEDENT> if self.open_price is None: <NEW_LINE> <INDENT> self.open_price = price <NEW_LINE> <DEDENT> self.high_price, self.low_price = self._update_high_low(price) <NEW_LINE> self.cum_statistics['cum_ticks'] += 1 <NEW_LINE> self.cum_statistics['cum_dollar_value'] += dollar_value <NEW_LINE> self.cum_statistics['cum_volume'] += volume <NEW_LINE> if signed_tick == 1: <NEW_LINE> <INDENT> self.cum_statistics['cum_buy_volume'] += volume <NEW_LINE> <DEDENT> if self.cum_statistics[self.metric] >= threshold: <NEW_LINE> <INDENT> self._create_bars(date_time, price, self.high_price, self.low_price, list_bars) <NEW_LINE> self._reset_cache() <NEW_LINE> <DEDENT> <DEDENT> return list_bars
Contains all of the logic to construct the standard bars from chapter 2. This class shouldn't be used directly. We have added functions to the package such as get_dollar_bars which will create an instance of this class and then construct the standard bars, to return to the user. This is because we wanted to simplify the logic as much as possible, for the end user.
62599070ac7a0e7691f73d97
class DeleteTopicRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TopicName = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.TopicName = params.get("TopicName")
DeleteTopic request structure.
62599070d486a94d0ba2d86e
class Employee(object): <NEW_LINE> <INDENT> def __init__(self, fixed_wage): <NEW_LINE> <INDENT> self.fixed_wage = fixed_wage
Employee of a casino
62599070be8e80087fbc093e
class BoliChoice(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=50, verbose_name=_("name")) <NEW_LINE> request_choice = models.BooleanField(default=False, verbose_name=_("Request Choices")) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _("Boli Choice") <NEW_LINE> verbose_name_plural = _("Boli Choices") <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
Boli choices so we can add and remove
625990705fc7496912d48ec0
class ErrorListError(TAMError): <NEW_LINE> <INDENT> def __init__(self, errorList): <NEW_LINE> <INDENT> self.errorList = errorList <NEW_LINE> <DEDENT> def errors(self): <NEW_LINE> <INDENT> return self.errorList
Raised to indicate that one or more errors occurred when executing TileSetTemplate.createTiles. It can be queried to retrieve that list.
62599070627d3e7fe0e08737
class KeywordResult(): <NEW_LINE> <INDENT> def __init__(self, normalized_text: str, start_time: float, end_time: float, confidence: float) -> None: <NEW_LINE> <INDENT> self.normalized_text = normalized_text <NEW_LINE> self.start_time = start_time <NEW_LINE> self.end_time = end_time <NEW_LINE> self.confidence = confidence <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, _dict: Dict) -> 'KeywordResult': <NEW_LINE> <INDENT> args = {} <NEW_LINE> if 'normalized_text' in _dict: <NEW_LINE> <INDENT> args['normalized_text'] = _dict.get('normalized_text') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'normalized_text\' not present in KeywordResult JSON' ) <NEW_LINE> <DEDENT> if 'start_time' in _dict: <NEW_LINE> <INDENT> args['start_time'] = _dict.get('start_time') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'start_time\' not present in KeywordResult JSON' ) <NEW_LINE> <DEDENT> if 'end_time' in _dict: <NEW_LINE> <INDENT> args['end_time'] = _dict.get('end_time') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'end_time\' not present in KeywordResult JSON' ) <NEW_LINE> <DEDENT> if 'confidence' in _dict: <NEW_LINE> <INDENT> args['confidence'] = _dict.get('confidence') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'confidence\' not present in KeywordResult JSON' ) <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> return cls.from_dict(_dict) <NEW_LINE> <DEDENT> def to_dict(self) -> Dict: <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'normalized_text') and self.normalized_text is not None: <NEW_LINE> <INDENT> _dict['normalized_text'] = self.normalized_text <NEW_LINE> <DEDENT> if hasattr(self, 'start_time') and self.start_time is not None: <NEW_LINE> <INDENT> _dict['start_time'] = self.start_time <NEW_LINE> <DEDENT> if hasattr(self, 'end_time') and self.end_time is not None: <NEW_LINE> <INDENT> _dict['end_time'] = self.end_time <NEW_LINE> <DEDENT> if hasattr(self, 'confidence') and self.confidence is not None: <NEW_LINE> <INDENT> _dict['confidence'] = self.confidence <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> return self.to_dict() <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return json.dumps(self.to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other: 'KeywordResult') -> bool: <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other: 'KeywordResult') -> bool: <NEW_LINE> <INDENT> return not self == other
Information about a match for a keyword from speech recognition results. :attr str normalized_text: A specified keyword normalized to the spoken phrase that matched in the audio input. :attr float start_time: The start time in seconds of the keyword match. :attr float end_time: The end time in seconds of the keyword match. :attr float confidence: A confidence score for the keyword match in the range of 0.0 to 1.0.
62599070f548e778e596ce3d
class network: <NEW_LINE> <INDENT> __slots__ = 'numHiddenNodes', 'inputNodes', 'outputNodes', 'hiddenNodes', 'biasNodes', 'v', 'layer', 'weights' <NEW_LINE> def __init__(self, numHiddenNodes): <NEW_LINE> <INDENT> self.inputNodes = [] <NEW_LINE> self.outputNodes = [1, 2, 3, 4] <NEW_LINE> self.hiddenNodes = [] <NEW_LINE> self.biasNodes = [] <NEW_LINE> self.layer = [[] for i in range(3)] <NEW_LINE> self.v = [0 for i in range(numHiddenNodes + 9)] <NEW_LINE> self.weights = [[0 for i in range(numHiddenNodes + 9)] for j in range(numHiddenNodes + 9)] <NEW_LINE> self.setup(numHiddenNodes) <NEW_LINE> <DEDENT> def setup(self, numHiddenNodes): <NEW_LINE> <INDENT> self.biasNodes.append(5) <NEW_LINE> for i in range(numHiddenNodes): <NEW_LINE> <INDENT> self.hiddenNodes.append(i + 6) <NEW_LINE> <DEDENT> self.biasNodes.append(6 + numHiddenNodes) <NEW_LINE> self.inputNodes.append(7 + numHiddenNodes) <NEW_LINE> self.inputNodes.append(8 + numHiddenNodes) <NEW_LINE> self.layer[0].extend(self.inputNodes) <NEW_LINE> self.layer[0].append(self.biasNodes[1]) <NEW_LINE> self.layer[1].extend(self.hiddenNodes) <NEW_LINE> self.layer[1].append(self.biasNodes[0]) <NEW_LINE> self.layer[2].extend(self.outputNodes) <NEW_LINE> self.v[self.biasNodes[0]] = 1 <NEW_LINE> self.v[self.biasNodes[1]] = 1 <NEW_LINE> self.assignInitialWeights() <NEW_LINE> <DEDENT> def assignInitialWeights(self): <NEW_LINE> <INDENT> for i in self.inputNodes: <NEW_LINE> <INDENT> for j in self.hiddenNodes: <NEW_LINE> <INDENT> self.weights[i][j] = random.uniform(-1.0, 1.0) <NEW_LINE> <DEDENT> <DEDENT> for j in self.hiddenNodes: <NEW_LINE> <INDENT> self.weights[self.biasNodes[1]][j] = random.uniform(-1.0, 1.0) <NEW_LINE> <DEDENT> for i in self.hiddenNodes: <NEW_LINE> <INDENT> for j in self.outputNodes: <NEW_LINE> <INDENT> self.weights[i][j] = random.uniform(-1.0, 1.0) <NEW_LINE> <DEDENT> <DEDENT> for j in self.outputNodes: <NEW_LINE> <INDENT> self.weights[self.biasNodes[0]][j] = random.uniform(-1.0, 1.0)
Class network to create nodes of the neural network
625990707c178a314d78e843
class HeaderAction(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'header_action_type': {'required': True}, 'header_name': {'required': True}, } <NEW_LINE> _attribute_map = { 'header_action_type': {'key': 'headerActionType', 'type': 'str'}, 'header_name': {'key': 'headerName', 'type': 'str'}, 'value': {'key': 'value', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(HeaderAction, self).__init__(**kwargs) <NEW_LINE> self.header_action_type = kwargs['header_action_type'] <NEW_LINE> self.header_name = kwargs['header_name'] <NEW_LINE> self.value = kwargs.get('value', None)
An action that can manipulate an http header. All required parameters must be populated in order to send to Azure. :param header_action_type: Required. Which type of manipulation to apply to the header. Possible values include: "Append", "Delete", "Overwrite". :type header_action_type: str or ~azure.mgmt.frontdoor.models.HeaderActionType :param header_name: Required. The name of the header this action will apply to. :type header_name: str :param value: The value to update the given header name with. This value is not used if the actionType is Delete. :type value: str
62599070379a373c97d9a8d0
class Audio(_AudioBase): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> @staticmethod <NEW_LINE> def from_result(result): <NEW_LINE> <INDENT> if result is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return Audio( file_id=result.get('file_id'), duration=result.get('duration'), mime_type=result.get('mime_type'), file_size=result.get('file_size') )
This object represents a generic audio file (not voice note). Attributes: file_id (str) :Unique identifier for this file duration (int) :Duration of the audio in seconds as defined by sender performer (str) :*Optional.* Performer of the audio as defined by sender or by audio tags title (str) :*Optional.* Title of the audio as defined by sender or by audio tags mime_type (str) :*Optional.* MIME type of the file as defined by sender file_size (int) :*Optional.* File size
625990702ae34c7f260ac99a
class Row(object): <NEW_LINE> <INDENT> def __init__(self, state="start", symbol=">", write=">", direction=">", new_state="start"): <NEW_LINE> <INDENT> self.state = state <NEW_LINE> self.symbol = symbol <NEW_LINE> self.write = write <NEW_LINE> self.direction = direction <NEW_LINE> self.new_state = new_state <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return(" {:16}{:7}{:7}{:7}{:16}".format(self.state, self.symbol, self.write, self.direction, self.new_state))
A row in a Turing machine program
62599070e5267d203ee6d015
class logger: <NEW_LINE> <INDENT> log_console = logging.getLogger('console') <NEW_LINE> log_fileout = logging.getLogger('fileout') <NEW_LINE> enableFileout = False <NEW_LINE> @staticmethod <NEW_LINE> def init(options): <NEW_LINE> <INDENT> log_format = '%(asctime)s - %(levelname)s: %(message)s' <NEW_LINE> log_handler_console = logging.StreamHandler() <NEW_LINE> log_handler_console.setFormatter(logging.Formatter(log_format)) <NEW_LINE> logger.log_console.setLevel(options.loglevel.upper()) <NEW_LINE> logger.log_console.addHandler(log_handler_console) <NEW_LINE> logging.enableFileout = False <NEW_LINE> if options.logfile != '': <NEW_LINE> <INDENT> log_handler_fileout = logging.FileHandler(options.logfile) <NEW_LINE> log_handler_fileout.setFormatter(logging.Formatter(log_format)) <NEW_LINE> logger.log_fileout.setLevel(options.loglevel) <NEW_LINE> logger.log_fileout.addHandler(log_handler_fileout) <NEW_LINE> logger.enableFileout = True <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def debug(msg): <NEW_LINE> <INDENT> logger.printlog('debug', msg) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def info(msg): <NEW_LINE> <INDENT> logger.printlog('info', msg) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def warning(msg): <NEW_LINE> <INDENT> logger.printlog('warning', msg) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def error(msg): <NEW_LINE> <INDENT> logger.printlog('error', msg) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def critical(msg): <NEW_LINE> <INDENT> _, exc_obj, tb = sys.exc_info() <NEW_LINE> lineno = tb.tb_lineno <NEW_LINE> logger.printlog('critical', '%s - line %d - %s' % (msg, lineno, str(exc_obj))) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def printlog(level, msg): <NEW_LINE> <INDENT> getattr(logger.log_console, level)(msg) <NEW_LINE> if logger.enableFileout: <NEW_LINE> <INDENT> getattr(logger.log_fileout, level)(msg)
Configure a simple logger for both console and file output
625990707d847024c075dc89
class Ticumulator: <NEW_LINE> <INDENT> INPUT_FIELDS = ('time', 'bid', 'bidsize', 'ask', 'asksize', 'last', 'lastsize', 'lasttime', 'volume', 'open_interest') <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.time = float('NaN') <NEW_LINE> self.bid = float('NaN') <NEW_LINE> self.bidsize = float('NaN') <NEW_LINE> self.ask = float('NaN') <NEW_LINE> self.asksize = float('NaN') <NEW_LINE> self.last = float('NaN') <NEW_LINE> self.lastsize = float('NaN') <NEW_LINE> self.lasttime = float('NaN') <NEW_LINE> self.volume = float('NaN') <NEW_LINE> self.open_interest = float('NaN') <NEW_LINE> self.open = float('NaN') <NEW_LINE> self.high = float('NaN') <NEW_LINE> self.low = float('NaN') <NEW_LINE> self.close = float('NaN') <NEW_LINE> self.sum_last = 0.0 <NEW_LINE> self.sum_vol = 0.0 <NEW_LINE> <DEDENT> def add(self, what, value): <NEW_LINE> <INDENT> self.time = time.time() <NEW_LINE> if what not in self.INPUT_FIELDS[1:]: <NEW_LINE> <INDENT> raise ValueError("Invalid `what` '{}'".format(what)) <NEW_LINE> <DEDENT> if not math.isfinite(value) or value < 0: <NEW_LINE> <INDENT> raise ValueError("Invalid value {}".format(value)) <NEW_LINE> <DEDENT> setattr(self, what, value) <NEW_LINE> if what == 'last': <NEW_LINE> <INDENT> if math.isnan(self.open): <NEW_LINE> <INDENT> self.open = self.high = self.low = self.close = value <NEW_LINE> <DEDENT> self.high = max(self.high, value) <NEW_LINE> self.low = min(self.low, value) <NEW_LINE> self.close = value <NEW_LINE> <DEDENT> if what == 'lastsize': <NEW_LINE> <INDENT> self.sum_last += self.last * self.lastsize <NEW_LINE> self.sum_vol += self.lastsize <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def vwap(self): <NEW_LINE> <INDENT> return (self.sum_last / self.sum_vol) if self.sum_vol else 0.0 <NEW_LINE> <DEDENT> def bar(self): <NEW_LINE> <INDENT> bar = self.peek() <NEW_LINE> self.open = self.close <NEW_LINE> self.high = self.last <NEW_LINE> self.low = self.last <NEW_LINE> self.sum_last = self.sum_vol = 0.0 <NEW_LINE> return bar <NEW_LINE> <DEDENT> def peek(self): <NEW_LINE> <INDENT> return time.time(), self.bid, self.bidsize, self.ask, self.asksize, self.last, self.lastsize, self.lasttime, self.open, self.high, self.low, self.close, self.vwap, self.volume, self.open_interest
Accumulates ticks (bid/ask/last/volume changes) into bars (open/high/low/close/vwap). Bars contains the traditional OHLCV data, as well as bid/ask/last data and volume-weighted average price. You can use the :class:`Bar` namedtuple to wrap the output of this class for convenient attribute access. `bar()` will return data since the last `bar()` call (or creation), allowing you to make bars of any duration you like. Until a tick of each type has been added, the first results may contain ``NaN`` values and the volume may be off. `time` is Unix timestamp (float sec since epoch) of the end of the bar; `lasttime` is Unix time of last trade. `volume` is total cumulative volume for the day. For US stocks, it is divided by 100.
625990701f5feb6acb1644a2
class _MemoryInfra(perf_benchmark.PerfBenchmark): <NEW_LINE> <INDENT> def CreateCoreTimelineBasedMeasurementOptions(self): <NEW_LINE> <INDENT> return CreateCoreTimelineBasedMemoryMeasurementOptions() <NEW_LINE> <DEDENT> def SetExtraBrowserOptions(self, options): <NEW_LINE> <INDENT> SetExtraBrowserOptionsForMemoryMeasurement(options)
Base class for new-generation memory benchmarks based on memory-infra. This benchmark records data using memory-infra (https://goo.gl/8tGc6O), which is part of chrome tracing, and extracts it using timeline-based measurements.
62599070cc0a2c111447c729
class Git: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.base_command = 'git' <NEW_LINE> self.meta_data_base_dir = '.git' <NEW_LINE> self.osx = Osx() <NEW_LINE> <DEDENT> def set_base_command(self, base_command): <NEW_LINE> <INDENT> self.base_command = base_command <NEW_LINE> <DEDENT> def set_redirect_output_to_log(self, redirect_output_to_log=True): <NEW_LINE> <INDENT> self.osx.set_redirect_output_to_log(redirect_output_to_log) <NEW_LINE> <DEDENT> def exec_sub_command(self, sub_command): <NEW_LINE> <INDENT> self.osx.exec_command(self.base_command + ' ' + sub_command) <NEW_LINE> <DEDENT> def exec_sub_command_output(self, sub_command): <NEW_LINE> <INDENT> return self.osx.exec_command_output(self.base_command + ' ' + sub_command) <NEW_LINE> <DEDENT> def get_current_branch(self, path='.'): <NEW_LINE> <INDENT> head_file_path = os.path.join(path, self.meta_data_base_dir, 'HEAD') <NEW_LINE> with open(head_file_path) as fp: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> line = fp.readline().rstrip('\n') <NEW_LINE> refs_heads = line.split(': ')[1] <NEW_LINE> branch_name = refs_heads[len('refs/heads/'):] <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise GitParseMetaDataError("Can't parse branch name from head file %s: %s" % (head_file_path, str(e))) <NEW_LINE> <DEDENT> <DEDENT> refs_heads_path = os.path.normpath(os.path.join(path, self.meta_data_base_dir, refs_heads)) <NEW_LINE> with open(refs_heads_path) as fp2: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> revision = fp2.readline().rstrip('\n') <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise GitParseMetaDataError("Can't parse revision from %s: %s" % (refs_heads_path, str(e))) <NEW_LINE> <DEDENT> <DEDENT> return (branch_name, revision) <NEW_LINE> <DEDENT> def clone(self, url, path): <NEW_LINE> <INDENT> cmd = 'clone ' + url + ' ' + path <NEW_LINE> self.exec_sub_command(cmd) <NEW_LINE> <DEDENT> def get_clean(self, url, path, branch_name='master', revision=None): <NEW_LINE> <INDENT> if not os.path.exists(path): <NEW_LINE> <INDENT> self.clone(url, path) <NEW_LINE> <DEDENT> with self.osx.ChangeDirectory(path): <NEW_LINE> <INDENT> self.exec_sub_command('reset --hard') <NEW_LINE> self.exec_sub_command('fetch') <NEW_LINE> self.exec_sub_command('checkout ' + branch_name) <NEW_LINE> self.exec_sub_command('merge origin/' + branch_name) <NEW_LINE> if revision is not None: <NEW_LINE> <INDENT> self.exec_sub_command('reset %s --hard' % revision)
A git command wrapper.
62599070097d151d1a2c2922
class DjangoFilterBackend(BaseFilterBackend): <NEW_LINE> <INDENT> default_filter_set = FilterSet <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> assert django_filters, 'Using DjangoFilterBackend, but django-filter is not installed' <NEW_LINE> <DEDENT> def get_filter_class(self, view): <NEW_LINE> <INDENT> filter_class = getattr(view, 'filter_class', None) <NEW_LINE> filter_fields = getattr(view, 'filter_fields', None) <NEW_LINE> view_model = getattr(view, 'model', None) <NEW_LINE> if filter_class: <NEW_LINE> <INDENT> filter_model = filter_class.Meta.model <NEW_LINE> assert issubclass(filter_model, view_model), 'FilterSet model %s does not match view model %s' % (filter_model, view_model) <NEW_LINE> return filter_class <NEW_LINE> <DEDENT> if filter_fields: <NEW_LINE> <INDENT> class AutoFilterSet(self.default_filter_set): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = view_model <NEW_LINE> fields = filter_fields <NEW_LINE> <DEDENT> <DEDENT> return AutoFilterSet <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def filter_queryset(self, request, queryset, view): <NEW_LINE> <INDENT> filter_class = self.get_filter_class(view) <NEW_LINE> if filter_class: <NEW_LINE> <INDENT> return filter_class(request.QUERY_PARAMS, queryset=queryset) <NEW_LINE> <DEDENT> return queryset
A filter backend that uses django-filter.
625990707047854f46340c69
class CarouselsRequest: <NEW_LINE> <INDENT> url = "https://frontend.vh.yandex.ru/v23/carousels_videohub.json" <NEW_LINE> headers = { "Origin": "https://yandex.ru", "Accept-Encoding": "gzip, deflate, br", "Accept-Language": "ru-RU,ru;q=0.9,en-US;q=0.8,en;q=0.7", "User-Agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36", "Accept": "application/json, text/javascript, */*; q=0.01", } <NEW_LINE> query_params = { "filter": "carousels", "delete_filtered": "0", "locale": "ru", "from": "efir", "service": "ya-main", "disable_trackings": "1", "vitrina_limit": "1", } <NEW_LINE> @classmethod <NEW_LINE> def get_response(cls, tag, offset, limit, num_docs=None, cache_hash=None): <NEW_LINE> <INDENT> params = cls.query_params.copy() <NEW_LINE> params.update({"offset": f"{offset}", "limit": f"{limit}"}) <NEW_LINE> if tag != "common": <NEW_LINE> <INDENT> params["tag"] = tag <NEW_LINE> <DEDENT> if cache_hash: <NEW_LINE> <INDENT> params["cache_hash"] = cache_hash <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> response = requests.request( "GET", cls.url, headers=cls.headers, params=params ) <NEW_LINE> <DEDENT> except ConnectionError: <NEW_LINE> <INDENT> response = Response() <NEW_LINE> carousels_logger.warning("Порвано соединение с ручкой carousels") <NEW_LINE> response.status_code = 500 <NEW_LINE> <DEDENT> return CarouselsData(response)
Ручка возвращает список каруселей, принадлежащих разделу tag - название раздела (limit - offset) число каруселей, который должен вернуть запрос vitrina_limit - задаёт число документов, которые будут возвращаться в данных карусели поскольку данная ручка в коде используется для получения id каруселей, числу документов присвоено минимальное значение по умолчанию
62599070b7558d5895464b8b
class ChannelInfoResult(object): <NEW_LINE> <INDENT> pass
Object to hold channel info results
62599070796e427e53850029
class Braintree(object): <NEW_LINE> <INDENT> def charge(self, params): <NEW_LINE> <INDENT> return braintree.Transaction.sale(params)
Sends data to Braintree.
6259907032920d7e50bc78f8
class TimerStats: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass
This class may be used to keep information on performance of your code
625990704c3428357761bb65
class TestBasicsNoFileListStorage(TestBasics): <NEW_LINE> <INDENT> temporary_file_list = True
Repeat basic tests with temporary file list
62599070aad79263cf430067
class MachineLearningComputeManagementClient(object): <NEW_LINE> <INDENT> def __init__( self, credentials, subscription_id, base_url=None): <NEW_LINE> <INDENT> self.config = MachineLearningComputeManagementClientConfiguration(credentials, subscription_id, base_url) <NEW_LINE> self._client = ServiceClient(self.config.credentials, self.config) <NEW_LINE> client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} <NEW_LINE> self.api_version = '2017-08-01-preview' <NEW_LINE> self._serialize = Serializer(client_models) <NEW_LINE> self._deserialize = Deserializer(client_models) <NEW_LINE> self.operationalization_clusters = OperationalizationClustersOperations( self._client, self.config, self._serialize, self._deserialize) <NEW_LINE> self.machine_learning_compute = MachineLearningComputeOperations( self._client, self.config, self._serialize, self._deserialize)
These APIs allow end users to operate on Azure Machine Learning Compute resources. They support the following operations:&lt;ul&gt;&lt;li&gt;Create or update a cluster&lt;/li&gt;&lt;li&gt;Get a cluster&lt;/li&gt;&lt;li&gt;Patch a cluster&lt;/li&gt;&lt;li&gt;Delete a cluster&lt;/li&gt;&lt;li&gt;Get keys for a cluster&lt;/li&gt;&lt;li&gt;Check if updates are available for system services in a cluster&lt;/li&gt;&lt;li&gt;Update system services in a cluster&lt;/li&gt;&lt;li&gt;Get all clusters in a resource group&lt;/li&gt;&lt;li&gt;Get all clusters in a subscription&lt;/li&gt;&lt;/ul&gt; :ivar config: Configuration for client. :vartype config: MachineLearningComputeManagementClientConfiguration :ivar operationalization_clusters: OperationalizationClusters operations :vartype operationalization_clusters: azure.mgmt.machinelearningcompute.operations.OperationalizationClustersOperations :ivar machine_learning_compute: MachineLearningCompute operations :vartype machine_learning_compute: azure.mgmt.machinelearningcompute.operations.MachineLearningComputeOperations :param credentials: Credentials needed for the client to connect to Azure. :type credentials: :mod:`A msrestazure Credentials object<msrestazure.azure_active_directory>` :param subscription_id: The Azure subscription ID. :type subscription_id: str :param str base_url: Service URL
62599070d486a94d0ba2d870
class Meta: <NEW_LINE> <INDENT> series_name = 'fan_value' <NEW_LINE> fields = ['value'] <NEW_LINE> tags = ['host', 'type'] <NEW_LINE> autocommit = False
Meta class for the SeriesHelper.
62599070f548e778e596ce3e
class SpaceObjectBaseTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_clear_init(self): <NEW_LINE> <INDENT> test = SpaceObjectBase() <NEW_LINE> setters = ('x', 'y', 'velocity_x', 'velocity_y') <NEW_LINE> for item in setters: <NEW_LINE> <INDENT> self.assertEqual(getattr(test, item), 0) <NEW_LINE> <DEDENT> <DEDENT> def test_property(self): <NEW_LINE> <INDENT> test = SpaceObjectBase() <NEW_LINE> setters = ('x', 'y', 'velocity_x', 'velocity_y','position') <NEW_LINE> for item in setters: <NEW_LINE> <INDENT> value = 5 * 5 + random.randint(0,5) <NEW_LINE> setattr(test, item, value) <NEW_LINE> ret_val = getattr(test, item) <NEW_LINE> self.assertEqual(value, ret_val) <NEW_LINE> <DEDENT> <DEDENT> def test_str(self): <NEW_LINE> <INDENT> test = SpaceObjectBase() <NEW_LINE> ret_val = str(test) <NEW_LINE> self.assertTrue(isinstance(ret_val, str))
Test case docstring
62599070a8370b77170f1c7a
class MillerRabin: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> print('Hello world, I am a MillerRabin object instance!') <NEW_LINE> <DEDENT> def is_prime(self, n, k=128): <NEW_LINE> <INDENT> if n == 2 or n == 3: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if n <= 1 or n % 2 == 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> s = 0 <NEW_LINE> r = n - 1 <NEW_LINE> while r & 1 == 0: <NEW_LINE> <INDENT> s += 1 <NEW_LINE> r // 2 <NEW_LINE> <DEDENT> for _ in range(k): <NEW_LINE> <INDENT> a = randrange(2, n - 1) <NEW_LINE> x = pow(a, r, n) <NEW_LINE> if x != 1 and x != n - 1: <NEW_LINE> <INDENT> j = 1 <NEW_LINE> while j < s and x != n - 1: <NEW_LINE> <INDENT> x = pow(x, 2, n) <NEW_LINE> if x == 1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> j += 1 <NEW_LINE> <DEDENT> if x != n - 1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def generate_prime_candidate(self, length): <NEW_LINE> <INDENT> p = getrandbits(length) <NEW_LINE> p |= (1 << length - 1) | 1 <NEW_LINE> return p <NEW_LINE> <DEDENT> def generate_prime_number(self, length=1024): <NEW_LINE> <INDENT> p = 4 <NEW_LINE> while not is_prime(p, 128): <NEW_LINE> <INDENT> p = generate_prime_candidate(length) <NEW_LINE> <DEDENT> return p
The Miller Rabin algorithm. The algorithm. 1) Generate a prime candidate. 2) Test if the generated number is prime. 3) If the number is not prime, restart from beginning.
62599070f9cc0f698b1c5f23
class Course: <NEW_LINE> <INDENT> def __init__(self, course_name, course_time, course_cost, teacher, admin): <NEW_LINE> <INDENT> self.course_name = course_name <NEW_LINE> self.course_time = course_time <NEW_LINE> self.course_cost = course_cost <NEW_LINE> self.create_time = time.strftime('%Y-%m-%d %H:%M:%S') <NEW_LINE> self.teacher = teacher <NEW_LINE> self.create_admin = admin
创建课程
625990701f037a2d8b9e54c3
class IdPSPForm(FormHandler): <NEW_LINE> <INDENT> form_type = 'idp' <NEW_LINE> signature = ['SAMLResponse', 'TARGET'] <NEW_LINE> def submit(self, opener, res): <NEW_LINE> <INDENT> log.info('Submitting IdP SAML form') <NEW_LINE> data = self.data <NEW_LINE> url = urlparse.urljoin(res.url, data['form']['action']) <NEW_LINE> data = urllib.urlencode({'SAMLResponse': data['SAMLResponse']['value'], 'TARGET': 'cookie'}) <NEW_LINE> request = Request(url, data=data) <NEW_LINE> log.debug("POST: %s" % request.get_full_url()) <NEW_LINE> response = opener.open(request) <NEW_LINE> return request, response
IDP Post-back Form Handler
6259907091f36d47f2231ae7
class Albums: <NEW_LINE> <INDENT> def on_get(self, req, resp): <NEW_LINE> <INDENT> resp.body = json.dumps(list(_albums.values())) <NEW_LINE> resp.status = falcon.HTTP_200 <NEW_LINE> <DEDENT> def on_post(self, req, resp): <NEW_LINE> <INDENT> payload = req.stream.read().decode('utf-8') <NEW_LINE> if not payload: <NEW_LINE> <INDENT> raise falcon.HTTPBadRequest(title='Empty body', description='Valid JSON document required') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> album = json.loads(payload) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise falcon.HTTPBadRequest(title='Invalid body', description='Valid JSON document required') <NEW_LINE> <DEDENT> album.update({'id': uuid.uuid4().hex}) <NEW_LINE> _albums.update({album['id']: album}) <NEW_LINE> resp.set_header('Location', '%s/%s' % (req.uri, album['id'])) <NEW_LINE> resp.body = json.dumps(album) <NEW_LINE> resp.status = falcon.HTTP_201
API resource for the collection of albums.
625990704f6381625f19a101
class IDPool(object): <NEW_LINE> <INDENT> def __init__(self, start = 0): <NEW_LINE> <INDENT> self.free_ids = [] <NEW_LINE> self.new_ids = itertools.count(start) <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> if self.free_ids: <NEW_LINE> <INDENT> return self.free_ids.pop() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.new_ids.next() <NEW_LINE> <DEDENT> <DEDENT> def put_back(self, id): <NEW_LINE> <INDENT> self.free_ids.append(id)
Manage pool of IDs
625990708e7ae83300eea942
class AnalysisFinder: <NEW_LINE> <INDENT> name = 'analysis' <NEW_LINE> def __init__(self, deployment, **args): <NEW_LINE> <INDENT> self.deployment = deployment <NEW_LINE> self.service_account_key = args.get('credentials') <NEW_LINE> with AnalysisAgent.ignore_logging_msg(): <NEW_LINE> <INDENT> self.analysis = AnalysisAgent(deployment=self.deployment, service_account_key=self.service_account_key) <NEW_LINE> <DEDENT> <DEDENT> def find(self, expression): <NEW_LINE> <INDENT> if not self.service_account_key: <NEW_LINE> <INDENT> raise DcpDiagException("No auth information provided, skip checking Secondary Analysis for workflows.") <NEW_LINE> <DEDENT> field_name, field_value = expression.split('=') <NEW_LINE> field_name = re.sub(r"wf([^a-z])", "workflow\\1", field_name) <NEW_LINE> if field_name == 'workflow_uuid': <NEW_LINE> <INDENT> print(f"Searching for workflow with UUID {field_name}...") <NEW_LINE> with self.analysis.ignore_logging_msg(): <NEW_LINE> <INDENT> return self.analysis.query_by_workflow_uuid(uuid=field_value) <NEW_LINE> <DEDENT> <DEDENT> elif field_name == 'bundle_uuid': <NEW_LINE> <INDENT> print(f"Searching for workflow(s) with Bundle {field_name}...") <NEW_LINE> with self.analysis.ignore_logging_msg(): <NEW_LINE> <INDENT> candidates = self.analysis.query_by_bundle(bundle_uuid=field_value) <NEW_LINE> return candidates <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print(f"Sorry I don't know how to find a {field_name}") <NEW_LINE> exit(1)
dcpdig @analysis workflow_uuid=<id> dcpdig @analysis bundle_uuid=<id>
6259907026068e7796d4e1ed
class HourParser(Parser): <NEW_LINE> <INDENT> MIN_VALUE = 0 <NEW_LINE> MAX_VALUE = 23
Custom parser for hours
625990707c178a314d78e844
class Hooks(list): <NEW_LINE> <INDENT> def get_read_hooks(self): <NEW_LINE> <INDENT> return (h for h in self if isinstance(h, AbstractReadHook)) <NEW_LINE> <DEDENT> def get_inject_hooks(self): <NEW_LINE> <INDENT> return (h for h in self if isinstance(h, AbstractInjectHook))
Runtime representation of registered pydov hooks, i.e. a list of instances of AbstractReadHook and/or AbstractInjectHook.
625990702ae34c7f260ac99c
class ConvertWarp(FSLCommand): <NEW_LINE> <INDENT> input_spec = ConvertWarpInputSpec <NEW_LINE> output_spec = ConvertWarpOutputSpec <NEW_LINE> _cmd = 'convertwarp'
Use FSL `convertwarp <http://fsl.fmrib.ox.ac.uk/fsl/fsl-4.1.9/fnirt/warp_utils.html>`_ for combining multiple transforms into one. Examples -------- >>> from nipype.interfaces.fsl import ConvertWarp >>> warputils = ConvertWarp() >>> warputils.inputs.warp1 = "warpfield.nii" >>> warputils.inputs.reference = "T1.nii" >>> warputils.inputs.relwarp = True >>> warputils.inputs.output_type = "NIFTI_GZ" >>> warputils.cmdline # doctest: +ELLIPSIS 'convertwarp --ref=T1.nii --rel --warp1=warpfield.nii --out=T1_concatwarp.nii.gz' >>> res = invwarp.run() # doctest: +SKIP
62599070a17c0f6771d5d803
class AxonError(Exception): <NEW_LINE> <INDENT> def __init__(self, msg, exc): <NEW_LINE> <INDENT> self.msg = msg <NEW_LINE> self.exc = exc <NEW_LINE> super(AxonError, self).__init__(msg)
Pass.
6259907016aa5153ce401d8c
class DefaultActionLog(AbstractActionLog): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def is_error(e: Optional[Tuple[Exception, str]]): <NEW_LINE> <INDENT> return e is not None and not isinstance(e[0], ImmediateRedirectException) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def expand_error_desc(e: Tuple[Exception, str]) -> Tuple[str, Union[str, None], Union[str, None]]: <NEW_LINE> <INDENT> if not isinstance(e[0], Exception): <NEW_LINE> <INDENT> return f'Unknown Error [{e[0]}]', None, None <NEW_LINE> <DEDENT> elif isinstance(e[0], UserActionException): <NEW_LINE> <INDENT> return e[0].__class__.__name__, e[0].internal_message, e[1] <NEW_LINE> <DEDENT> elif hasattr(e, 'message'): <NEW_LINE> <INDENT> return e[0].__class__.__name__, e.message, e[1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return e[0].__class__.__name__, str(e[0]), e[1] <NEW_LINE> <DEDENT> <DEDENT> def collect_args(self, request, args_map, action_log_mapper, full_action_name, err_desc, proc_time): <NEW_LINE> <INDENT> log_data = {'args': {}} <NEW_LINE> if action_log_mapper: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> log_data['args'] = action_log_mapper(request) <NEW_LINE> <DEDENT> except Exception as ex: <NEW_LINE> <INDENT> logging.getLogger(__name__).error('Failed to map request info to log: {}'.format(ex)) <NEW_LINE> <DEDENT> <DEDENT> corpora = log_data['args'].get('corpora', []) <NEW_LINE> if len(corpora) == 0: <NEW_LINE> <INDENT> log_data['args']['corpora'] = [args_map.corpname] + args_map.align <NEW_LINE> <DEDENT> if self.is_error(err_desc): <NEW_LINE> <INDENT> err_name, err_msg, err_anchor = self.expand_error_desc(err_desc) <NEW_LINE> log_data['error'] = dict(name=err_name, message=err_msg, anchor=err_anchor) <NEW_LINE> <DEDENT> log_data['date'] = datetime.datetime.today().strftime('%s.%%f' % settings.DEFAULT_DATETIME_FORMAT) <NEW_LINE> log_data['action'] = full_action_name <NEW_LINE> log_data['user_id'] = request.session.get('user', {}).get('id') <NEW_LINE> if proc_time is not None: <NEW_LINE> <INDENT> log_data['proc_time'] = proc_time <NEW_LINE> <DEDENT> log_data['request'] = { 'REMOTE_ADDR': request.environ.get('REMOTE_ADDR'), 'HTTP_X_FORWARDED_FOR': request.environ.get('HTTP_X_FORWARDED_FOR'), 'HTTP_USER_AGENT': request.environ.get('HTTP_USER_AGENT') } <NEW_LINE> return log_data <NEW_LINE> <DEDENT> def write_action(self, data: str) -> None: <NEW_LINE> <INDENT> logging.getLogger('QUERY').info(json.dumps(data))
DefaultActionLog stores action logs via standard 'logging' package as initialized and configured by KonText. Custom action arguments are stored in a nested dictionary under the 'args' key. The plug-in stores also - date, user_id, action (name), proc_time and some request properties (client IP, client user agent).
625990704a966d76dd5f079d
class _DummyVariantMergeStrategy(variant_merge_strategy.VariantMergeStrategy): <NEW_LINE> <INDENT> def modify_bigquery_schema(self, schema, info_keys): <NEW_LINE> <INDENT> schema.fields.append(bigquery.TableFieldSchema( name='ADDED_BY_MERGER', type=TableFieldConstants.TYPE_STRING, mode=TableFieldConstants.MODE_NULLABLE))
A dummy strategy. It just adds a new field to the schema.
62599070e76e3b2f99fda2b5
class _dummySerial: <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._read_num = 0 <NEW_LINE> self._data = {} <NEW_LINE> self._data[0] = [0x0D, 0x01, 0x00, 0x01, 0x02, 0x53, 0x45, 0x10, 0x0C, 0x2F, 0x01, 0x01, 0x00, 0x00] <NEW_LINE> self._data[1] = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00] <NEW_LINE> self._data[2] = [0x0b, 0x15, 0x00, 0x2a, 0x12, 0x34, 0x41, 0x05, 0x03, 0x01, 0x00, 0x70] <NEW_LINE> self._data[3] = [0x0b, 0x15, 0x00, 0x2a, 0x12, 0x34, 0x41, 0x05, 0x03, 0x01, 0x00, 0x70] <NEW_LINE> self._data[4] = [0x0a, 0x51, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00] <NEW_LINE> self._data[5] = [0x0b, 0x15, 0x00, 0x2a, 0x12, 0x34, 0x41, 0x05, 0x03, 0x01, 0x00, 0x70] <NEW_LINE> self._data[6] = [0x0a, 0x51, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00] <NEW_LINE> self._data[7] = [0x0a, 0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00] <NEW_LINE> <DEDENT> def write(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def flushInput(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def read(self, data=None): <NEW_LINE> <INDENT> if data is not None or self._read_num >= len(self._data): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> res = self._data[self._read_num] <NEW_LINE> self._read_num = self._read_num + 1 <NEW_LINE> return res <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass
Dummy class for testing
625990703539df3088ecdb4f
class ProtocolTests(TestCase): <NEW_LINE> <INDENT> def test_interfaces(self): <NEW_LINE> <INDENT> proto = Protocol() <NEW_LINE> self.assertTrue(verifyObject(IProtocol, proto)) <NEW_LINE> self.assertTrue(verifyObject(ILoggingContext, proto)) <NEW_LINE> <DEDENT> def test_logPrefix(self): <NEW_LINE> <INDENT> class SomeThing(Protocol): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.assertEqual("SomeThing", SomeThing().logPrefix()) <NEW_LINE> <DEDENT> def test_makeConnection(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> class SomeProtocol(Protocol): <NEW_LINE> <INDENT> def connectionMade(self): <NEW_LINE> <INDENT> result.append(self.transport) <NEW_LINE> <DEDENT> <DEDENT> transport = object() <NEW_LINE> protocol = SomeProtocol() <NEW_LINE> protocol.makeConnection(transport) <NEW_LINE> self.assertEqual(result, [transport])
Tests for L{twisted.internet.protocol.Protocol}.
62599070d486a94d0ba2d871
class AncestorTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.a = TC.objects.create(name="a") <NEW_LINE> self.b = TC.objects.create(name="b") <NEW_LINE> self.c = TC.objects.create(name="c") <NEW_LINE> self.b.parent2 = self.a <NEW_LINE> self.b.save() <NEW_LINE> self.c.parent2 = self.b <NEW_LINE> self.c.save() <NEW_LINE> <DEDENT> def test_ancestors(self): <NEW_LINE> <INDENT> self.failUnlessEqual(list(self.a.get_ancestors()), []) <NEW_LINE> self.failUnlessEqual(list(self.b.get_ancestors()), [self.a]) <NEW_LINE> self.failUnlessEqual( list(self.a.get_ancestors(include_self=True)), [self.a] ) <NEW_LINE> self.failUnlessEqual( list(self.c.get_ancestors(include_self=True)), [self.c, self.b, self.a] ) <NEW_LINE> self.failUnlessEqual( list(self.c.get_ancestors(include_self=True, depth=1)), [self.c, self.b] ) <NEW_LINE> <DEDENT> def test_descendants(self): <NEW_LINE> <INDENT> self.failUnlessEqual(list(self.c.get_descendants()), []) <NEW_LINE> self.failUnlessEqual(list(self.b.get_descendants()), [self.c]) <NEW_LINE> self.failUnlessEqual( list(self.a.get_descendants(include_self=True)), [self.a, self.b, self.c] ) <NEW_LINE> self.failUnlessEqual( list(self.c.get_descendants(include_self=True)), [self.c] ) <NEW_LINE> <DEDENT> def test_children(self): <NEW_LINE> <INDENT> self.failUnlessEqual(list(self.c.get_children()), []) <NEW_LINE> self.failUnlessEqual(list(self.b.get_children()), [self.c])
Testing things to do with ancestors.
62599070a8370b77170f1c7c
class UpdateStudyMutation(graphene.Mutation): <NEW_LINE> <INDENT> class Arguments: <NEW_LINE> <INDENT> id = graphene.ID( required=True, description="The ID of the study to update" ) <NEW_LINE> input = StudyInput( required=True, description="Attributes for the new study" ) <NEW_LINE> <DEDENT> study = graphene.Field(StudyNode) <NEW_LINE> def mutate(self, info, id, input): <NEW_LINE> <INDENT> user = info.context.user <NEW_LINE> if not user.has_perm("studies.change_study"): <NEW_LINE> <INDENT> raise GraphQLError("Not allowed") <NEW_LINE> <DEDENT> if not ( settings.FEAT_DATASERVICE_UPDATE_STUDIES and settings.DATASERVICE_URL ): <NEW_LINE> <INDENT> raise GraphQLError( "Updating studies is not enabled. " "You may need to make sure that the api is configured with a " "valid dataservice url and FEAT_DATASERVICE_UPDATE_STUDIES " "has been set." ) <NEW_LINE> <DEDENT> model, kf_id = from_global_id(id) <NEW_LINE> study = Study.objects.get(kf_id=kf_id) <NEW_LINE> updated_att = [] <NEW_LINE> for attr, val in input.items(): <NEW_LINE> <INDENT> in_val = getattr(study, attr) <NEW_LINE> if (in_val or val) and in_val != val: <NEW_LINE> <INDENT> updated_att.append(attr.replace("_", " ")) <NEW_LINE> <DEDENT> <DEDENT> attributes = sanitize_fields(input) <NEW_LINE> if "short code" in updated_att: <NEW_LINE> <INDENT> short_code = attributes["short_code"] <NEW_LINE> if Study.objects.filter(short_code=short_code).count(): <NEW_LINE> <INDENT> raise GraphQLError("Study short_code provided was not unique.") <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> resp = requests.patch( f"{settings.DATASERVICE_URL}/studies/{kf_id}", json=attributes, timeout=settings.REQUESTS_TIMEOUT, headers=settings.REQUESTS_HEADERS, ) <NEW_LINE> <DEDENT> except requests.exceptions.RequestException as e: <NEW_LINE> <INDENT> raise GraphQLError(f"Problem updating study: {e}") <NEW_LINE> <DEDENT> if not resp.status_code == 200 or "results" not in resp.json(): <NEW_LINE> <INDENT> error = resp.json() <NEW_LINE> if "_status" in error: <NEW_LINE> <INDENT> error = error["_status"] <NEW_LINE> <DEDENT> if "message" in error: <NEW_LINE> <INDENT> error = error["message"] <NEW_LINE> <DEDENT> raise GraphQLError(f"Problem updating study: {error}") <NEW_LINE> <DEDENT> attributes = {**input, **resp.json()["results"]} <NEW_LINE> if "created_at" in attributes: <NEW_LINE> <INDENT> attributes["created_at"] = parse(attributes["created_at"]) <NEW_LINE> <DEDENT> for attr, value in attributes.items(): <NEW_LINE> <INDENT> setattr(study, attr, value) <NEW_LINE> <DEDENT> study.save() <NEW_LINE> message = ( f"{user.display_name} updated " f"{', '.join(updated_att)} of study {study.kf_id}" ) <NEW_LINE> event = Event( organization=study.organization, study=study, description=message, event_type="SD_UPD", ) <NEW_LINE> if not user._state.adding: <NEW_LINE> <INDENT> event.user = user <NEW_LINE> <DEDENT> event.save() <NEW_LINE> return CreateStudyMutation(study=study)
Mutation to update an existing study
6259907023849d37ff852969
class Token(TreeDataItem): <NEW_LINE> <INDENT> def __init__(self, text, target_str=None, sentence=None, position=None): <NEW_LINE> <INDENT> data = { 'text': text } <NEW_LINE> super(Token, self).__init__( data=data, target_str=target_str, parent=sentence, position=position, children=None ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def text(self): <NEW_LINE> <INDENT> return self.data['text'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def sentence(self): <NEW_LINE> <INDENT> return self.parent
A single word, symbol, or other minimal element of text.
625990703d592f4c4edbc794
class Coord(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def add(arg1, arg2): <NEW_LINE> <INDENT> return (arg1[0] +arg2[0], arg1[1] +arg2[1]) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def subtract(arg1, arg2): <NEW_LINE> <INDENT> return (arg1[0] -arg2[0], arg1[1] -arg2[1]) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def negate(arg): <NEW_LINE> <INDENT> return (-arg[0], -arg[1]) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def grid_normalize(arg): <NEW_LINE> <INDENT> cx, cy = 0,0 <NEW_LINE> if arg[0] > 0: <NEW_LINE> <INDENT> cx = 1 <NEW_LINE> <DEDENT> elif arg[0] < 0: <NEW_LINE> <INDENT> cx = -1 <NEW_LINE> <DEDENT> if arg[1] > 0: <NEW_LINE> <INDENT> cy = 1 <NEW_LINE> <DEDENT> elif arg[1] < 0: <NEW_LINE> <INDENT> cy = -1 <NEW_LINE> <DEDENT> return (cx, cy) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def linearize(arg, dims): <NEW_LINE> <INDENT> return arg[0] +arg[1]*dims[0] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def unlinearize(lidx, dims): <NEW_LINE> <INDENT> icol = lidx % dims[0] <NEW_LINE> irow = lidx // dims[0] <NEW_LINE> return (icol, irow) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def oob_rect(coord, dims): <NEW_LINE> <INDENT> cx, cy = coord <NEW_LINE> dx, dy = dims <NEW_LINE> return not ((0 <= cx < dx) and (0 <= cy < dy))
Helper class for coordinate objects. Discrete and two dimensions. (col,row)
62599070a8370b77170f1c7d
class DesignTeamFunctionLookup(ArchiveLookup): <NEW_LINE> <INDENT> model = DesignTeamFunction <NEW_LINE> @staticmethod <NEW_LINE> def get_query(request, term): <NEW_LINE> <INDENT> return DesignTeamFunction.objects.filter(title__icontains=term)
lookup with a search filed for the Design Team Function Model
6259907056ac1b37e630393c
class MMError(Exception): <NEW_LINE> <INDENT> def __init__(self, error_dict): <NEW_LINE> <INDENT> super(MMError, self).__init__() <NEW_LINE> self.error_code = error_dict.get('code', None) <NEW_LINE> self.error_message = error_dict.get('message', None) <NEW_LINE> logging.warn(self.to_result()) <NEW_LINE> <DEDENT> def to_result(self): <NEW_LINE> <INDENT> return json.dumps({ 'status': 'error', 'error_code': self.error_code, 'error_message': self.error_message }, indent=4)
An error thrown by a request handler
62599070009cb60464d02deb
class BuildingBlockDeformable(link.Chain): <NEW_LINE> <INDENT> def __init__(self, n_layer, in_channels, mid_channels, out_channels, stride, initialW=None): <NEW_LINE> <INDENT> links = [ ('a', BottleneckA( in_channels, mid_channels, out_channels, stride, initialW)) ] <NEW_LINE> links.append(('b1', BottleneckB(out_channels, mid_channels, initialW))) <NEW_LINE> links.append(('b2', DeformableBottleneckB(out_channels, mid_channels, initialW))) <NEW_LINE> super(BuildingBlockDeformable, self).__init__(**dict(links)) <NEW_LINE> self.forward = links <NEW_LINE> <DEDENT> def __call__(self, x, test=True): <NEW_LINE> <INDENT> for name, func in self.forward: <NEW_LINE> <INDENT> x = func(x, test=test) <NEW_LINE> <DEDENT> return x
A building block that consists of several Bottleneck layers. Args: n_layer (int): Number of layers used in the building block. in_channels (int): Number of channels of input arrays. mid_channels (int): Number of channels of intermediate arrays. out_channels (int): Number of channels of output arrays. stride (int or tuple of ints): Stride of filter application. initialW (4-D array): Initial weight value used in the convolutional layers.
625990707b180e01f3e49cbe
class SentimentRating(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.rating = 0.5 <NEW_LINE> <DEDENT> def update_rating(self, latest_score) -> None: <NEW_LINE> <INDENT> self.rating = (0.2*self.rating + 0.8*latest_score)/1 <NEW_LINE> <DEDENT> def get_rating(self) -> float: <NEW_LINE> <INDENT> return self.rating
This will be the overarching container class for our sentiment ratings, we'll use methods in here to return the current instance of sentiment rating, and also the overarching average rating. The overarching rating weighs the current sentiment more than previous sentiment.
6259907067a9b606de5476fd
class SentinmentCalculator: <NEW_LINE> <INDENT> def __init__(self, tweets, sentiments): <NEW_LINE> <INDENT> self.tweets = tweets <NEW_LINE> self.sentiments = sentiments <NEW_LINE> <DEDENT> def analyze_tweet_sentiments(self): <NEW_LINE> <INDENT> total_sentiments = 0.0 <NEW_LINE> for tweet in self.tweets: <NEW_LINE> <INDENT> total_sentiments += self.average_sentiment(tweet) <NEW_LINE> <DEDENT> return total_sentiments/len(self.tweets) <NEW_LINE> <DEDENT> def average_sentiment(self, tweet): <NEW_LINE> <INDENT> total = 0.0 <NEW_LINE> count = 0 <NEW_LINE> average = 0.0 <NEW_LINE> words = self.extract_words(tweet) <NEW_LINE> for word in words: <NEW_LINE> <INDENT> word = word.replace(" ' ", '') <NEW_LINE> try: <NEW_LINE> <INDENT> curr_sentiment = self.sentiments[word] <NEW_LINE> print("current sentiment of " + word + " : " + str(curr_sentiment)) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> print(word + " not found in sentiments") <NEW_LINE> curr_sentiment = 0.0 <NEW_LINE> <DEDENT> total += curr_sentiment <NEW_LINE> count += 1 <NEW_LINE> <DEDENT> print("Total : " + str(total)) <NEW_LINE> print("Count : " + str(count)) <NEW_LINE> try: <NEW_LINE> <INDENT> average = total/count <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("Divide by zero error!") <NEW_LINE> <DEDENT> print("AVERAGE: " + str(average)) <NEW_LINE> return average <NEW_LINE> <DEDENT> def extract_words(self, tweet): <NEW_LINE> <INDENT> numbers = ['1','2','3','4','5','6','7','8','9'] <NEW_LINE> for i in string.punctuation : <NEW_LINE> <INDENT> tweet = tweet.replace(i, ' ') <NEW_LINE> <DEDENT> for i in numbers: <NEW_LINE> <INDENT> tweet = tweet.replace(i, ' ') <NEW_LINE> <DEDENT> return tweet.split() <NEW_LINE> <DEDENT> @property <NEW_LINE> def sentiments(self): <NEW_LINE> <INDENT> return self.sentiments <NEW_LINE> <DEDENT> @property <NEW_LINE> def tweets(self): <NEW_LINE> <INDENT> return self.tweets
Given a list of TWEETS and dictionary of SENTIMENTS, performs operations to find the average sentiment, etc.
62599070dd821e528d6da5db
class Appointment(EmbeddedDocument): <NEW_LINE> <INDENT> meta = {'collection': 'appointments'} <NEW_LINE> uid = IntField(min_value=1, unique=True, requied=True) <NEW_LINE> datetime = DateTimeField(required=True) <NEW_LINE> patient_id = IntField(min_value=1) <NEW_LINE> new_patient = BooleanField(required=True) <NEW_LINE> applied_on = DateTimeField(requied=True) <NEW_LINE> purpose = StringField(max_length=500) <NEW_LINE> requested_through = StringField(choices=['website', 'phone', 'email']) <NEW_LINE> patient_turned_up = BooleanField()
Represents an appointment.
6259907055399d3f05627dcc
class ConnectionMonitorQueryResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'source_status': {'key': 'sourceStatus', 'type': 'str'}, 'states': {'key': 'states', 'type': '[ConnectionStateSnapshot]'}, } <NEW_LINE> def __init__( self, *, source_status: Optional[Union[str, "ConnectionMonitorSourceStatus"]] = None, states: Optional[List["ConnectionStateSnapshot"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ConnectionMonitorQueryResult, self).__init__(**kwargs) <NEW_LINE> self.source_status = source_status <NEW_LINE> self.states = states
List of connection states snapshots. :param source_status: Status of connection monitor source. Possible values include: "Unknown", "Active", "Inactive". :type source_status: str or ~azure.mgmt.network.v2019_04_01.models.ConnectionMonitorSourceStatus :param states: Information about connection states. :type states: list[~azure.mgmt.network.v2019_04_01.models.ConnectionStateSnapshot]
625990707b25080760ed893d
class BrokenTestException(RichSkipTestException): <NEW_LINE> <INDENT> def __init__(self, item_number, reason): <NEW_LINE> <INDENT> super(BrokenTestException, self).__init__(reason) <NEW_LINE> self.item_number = item_number <NEW_LINE> self.reason = reason <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%s, '%s')" % (self.__class__.__name__, repr(self.item_number), self.reason) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Broken Test: %s (defect #%s)" % (self.reason, self.item_number)
Skip a test because it is known to be broken. This avoids constantly re-running tests that are known to fail or cause an error, yet allows us to still mark them as failures. In a perfect Test Driven Development world, all broken tests would be fixed immediately by the developer that caused them to break. However, test frameworks are not necessarily used in a perfect TDD world (for instance, consider a situation where tests are being written against an existing legacy codebase), so pragmatically we need some process to manage the broken tests. Some example scenarios: * The development team decide that the defect exposed by the test can be tolerated in the short-term, but will allocate resources to fix it in the long-term. * It is not known how to fix the defect exposed by the test. * The defect exposed by the test is caused by a third party product. * The defect exposed by the test cannot be fixed without making incompatible changes to the product's external API (and these changes need to be carefully managed). This differs from the standard unittest.expectedFailure decorator in that it does not attempt to run the test. It is also more flexible (expectedFailure can only be applied to functions as a decorator), and there is support for it in Nose. It is expected that each broken test will have an associated defect item in the project's issue tracker. This must be included in the exception as a separate field.
625990705fcc89381b266db1
class Keyword(Cipher): <NEW_LINE> <INDENT> def __init__(self, key): <NEW_LINE> <INDENT> self.key = key <NEW_LINE> self.arr = Keyword.fill_array(self.key) <NEW_LINE> self.keyword_dict = {key_lttr: letter for letter, key_lttr in zip(list(string.ascii_uppercase), self.arr)} <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def fill_array(key): <NEW_LINE> <INDENT> arr = list(key) <NEW_LINE> for letter in string.ascii_uppercase: <NEW_LINE> <INDENT> if letter not in list(key): <NEW_LINE> <INDENT> arr.append(letter) <NEW_LINE> <DEDENT> <DEDENT> return arr <NEW_LINE> <DEDENT> def encrypt(self, text): <NEW_LINE> <INDENT> output = [] <NEW_LINE> text = text.upper() <NEW_LINE> text_list = list(text) <NEW_LINE> for char in text_list: <NEW_LINE> <INDENT> output.append(self.get_key(char)) <NEW_LINE> <DEDENT> return ''.join(output) <NEW_LINE> <DEDENT> def decrypt(self, text): <NEW_LINE> <INDENT> output = [] <NEW_LINE> text = text.upper() <NEW_LINE> text_list = list(text) <NEW_LINE> for char in text_list: <NEW_LINE> <INDENT> output.append(self.keyword_dict.get(char, char)) <NEW_LINE> <DEDENT> return ''.join(output) <NEW_LINE> <DEDENT> def get_key(self, letter): <NEW_LINE> <INDENT> for key, value in self.keyword_dict.items(): <NEW_LINE> <INDENT> if value == letter: <NEW_LINE> <INDENT> return key <NEW_LINE> <DEDENT> <DEDENT> return letter
keyword cipher encrypts the data by matching the letter with a letter in a special alphabet which starts with the keyword that was given & for decryption vice versa.
625990709c8ee82313040de2
class MoveWatchFolderTasksToDoneSignal: <NEW_LINE> <INDENT> def __init__(self, application_preferences): <NEW_LINE> <INDENT> self.application_preferences = application_preferences <NEW_LINE> <DEDENT> def on_move_watch_folder_tasks_to_done_switch_state_set(self, move_watch_folder_tasks_to_done_switch, user_data=None): <NEW_LINE> <INDENT> self.application_preferences.is_watch_folder_move_tasks_to_done_enabled = move_watch_folder_tasks_to_done_switch.get_active()
Handles the signal emitted when the Move Completed Watch Folder Tasks to the Done Folder option is changed in the preferences dialog.
62599070cc0a2c111447c72b
class ReseekFile: <NEW_LINE> <INDENT> def __init__(self, file): <NEW_LINE> <INDENT> self.file = file <NEW_LINE> self.buffer_file = StringIO() <NEW_LINE> self.at_beginning = 1 <NEW_LINE> try: <NEW_LINE> <INDENT> self.beginning = file.tell() <NEW_LINE> <DEDENT> except (IOError, AttributeError): <NEW_LINE> <INDENT> self.beginning = 0 <NEW_LINE> <DEDENT> self._use_buffer = 1 <NEW_LINE> <DEDENT> def seek(self, offset, whence = 0): <NEW_LINE> <INDENT> if whence != 0: <NEW_LINE> <INDENT> raise TypeError("Unexpected whence value of %s; expecting 0" % (whence,)) <NEW_LINE> <DEDENT> if offset != self.beginning: <NEW_LINE> <INDENT> raise TypeError("Unexpected offset value of %r; expecting '%s'" % (offset, self.beginning)) <NEW_LINE> <DEDENT> self.buffer_file.seek(0) <NEW_LINE> self.at_beginning = 1 <NEW_LINE> <DEDENT> def tell(self): <NEW_LINE> <INDENT> if not self.at_beginning: <NEW_LINE> <INDENT> raise TypeError("ReseekFile cannot tell except at the beginning of file") <NEW_LINE> <DEDENT> return self.beginning <NEW_LINE> <DEDENT> def _read(self, size): <NEW_LINE> <INDENT> if size < 0: <NEW_LINE> <INDENT> y = self.file.read() <NEW_LINE> z = self.buffer_file.read() + y <NEW_LINE> if self._use_buffer: <NEW_LINE> <INDENT> self.buffer_file.write(y) <NEW_LINE> <DEDENT> return z <NEW_LINE> <DEDENT> if size == 0: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> x = self.buffer_file.read(size) <NEW_LINE> if len(x) < size: <NEW_LINE> <INDENT> y = self.file.read(size - len(x)) <NEW_LINE> if self._use_buffer: <NEW_LINE> <INDENT> self.buffer_file.write(y) <NEW_LINE> <DEDENT> return x + y <NEW_LINE> <DEDENT> return x <NEW_LINE> <DEDENT> def read(self, size = -1): <NEW_LINE> <INDENT> x = self._read(size) <NEW_LINE> if self.at_beginning and x: <NEW_LINE> <INDENT> self.at_beginning = 0 <NEW_LINE> <DEDENT> self._check_no_buffer() <NEW_LINE> return x <NEW_LINE> <DEDENT> def readline(self): <NEW_LINE> <INDENT> s = self.buffer_file.readline() <NEW_LINE> if s[-1:] == "\n": <NEW_LINE> <INDENT> return s <NEW_LINE> <DEDENT> t = self.file.readline() <NEW_LINE> if self._use_buffer: <NEW_LINE> <INDENT> self.buffer_file.write(t) <NEW_LINE> <DEDENT> self._check_no_buffer() <NEW_LINE> return s + t <NEW_LINE> <DEDENT> def readlines(self): <NEW_LINE> <INDENT> s = self.read() <NEW_LINE> lines = [] <NEW_LINE> i, j = 0, s.find("\n") <NEW_LINE> while j > -1: <NEW_LINE> <INDENT> lines.append(s[i:j+1]) <NEW_LINE> i = j+1 <NEW_LINE> j = s.find("\n", i) <NEW_LINE> <DEDENT> if i < len(s): <NEW_LINE> <INDENT> lines.append(s[i:]) <NEW_LINE> <DEDENT> return lines <NEW_LINE> <DEDENT> def _check_no_buffer(self): <NEW_LINE> <INDENT> if self._use_buffer == 0 and self.buffer_file.tell() == len(self.buffer_file.getvalue()): <NEW_LINE> <INDENT> self.seek = getattr(self.file, "seek", None) <NEW_LINE> self.tell = getattr(self.file, "tell", None) <NEW_LINE> self.read = self.file.read <NEW_LINE> self.readline = self.file.readline <NEW_LINE> self.readlines = self.file.readlines <NEW_LINE> del self.buffer_file <NEW_LINE> <DEDENT> <DEDENT> def nobuffer(self): <NEW_LINE> <INDENT> self._use_buffer = 0
wrap a file handle to allow seeks back to the beginning Takes a file handle in the constructor. See the module docstring for more documentation.
62599070baa26c4b54d50b5f
class PaymentCharge: <NEW_LINE> <INDENT> QUALNAME = "pyrogram.raw.base.PaymentCharge" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> raise TypeError("Base types can only be used for type checking purposes: " "you tried to use a base type instance as argument, " "but you need to instantiate one of its constructors instead. " "More info: https://docs.pyrogram.org/telegram/base/payment-charge")
This base type has 1 constructor available. Constructors: .. hlist:: :columns: 2 - :obj:`PaymentCharge <pyrogram.raw.types.PaymentCharge>`
62599070e76e3b2f99fda2b7
class GpVarDiagMixinResponse(base_schemas.StrictMappingSchema): <NEW_LINE> <INDENT> var = base_schemas.ListOfFloats()
A mixin response colander schema for the variance of a gaussian process. **Output fields** :ivar var: (*list of float64*) variances of the GP at ``points_to_evaluate``; i.e., diagonal of the ``var`` response from gp_mean_var (:class:`moe.views.schemas.base_schemas.ListOfFloats`) **Example Response** .. sourcecode:: http { "var": ["0.228910114429","0.996177332647","0.228910114429"], }
625990708da39b475be04aa3
class TicketNoteAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ["ticket_id", "created", "notes"] <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = TicketNote
Override the default Django Admin website display of Ticket Notes app
62599070ac7a0e7691f73d9d
class Tumblr(BlogBase): <NEW_LINE> <INDENT> def copy_relations(self, oldinstance): <NEW_LINE> <INDENT> super(Tumblr, self).copy_relations(oldinstance) <NEW_LINE> <DEDENT> def _regex_id(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> blogname = re.match(r'(http://)?([\w_-]+)(.tumblr.com)?(.*)', self.url).group(2) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise goscale_models.WrongAttribute(attribute='url') <NEW_LINE> <DEDENT> return blogname <NEW_LINE> <DEDENT> def _get_data_source_url(self): <NEW_LINE> <INDENT> url = 'http://%s.tumblr.com' % self._regex_id() <NEW_LINE> if self.label: <NEW_LINE> <INDENT> url = '%(url)s/tagged/%(label)s' % {'url': url, 'label': self.label} <NEW_LINE> <DEDENT> return '%s/rss' % url
Tumblr posts
62599070442bda511e95d9b2
class EditProductsView(APIView): <NEW_LINE> <INDENT> permission_classes = (permissions.IsAdminUser,) <NEW_LINE> def post(self, request): <NEW_LINE> <INDENT> data = request.data <NEW_LINE> return product_utils.add_product(data) <NEW_LINE> <DEDENT> def delete(self, request, pk): <NEW_LINE> <INDENT> return product_utils.delete_product(pk) <NEW_LINE> <DEDENT> def patch(self, request, pk): <NEW_LINE> <INDENT> data = request.data <NEW_LINE> return product_utils.update_product(data, pk)
POST: To add a new product DELETE: To delete an existing product PATCH: To update an existing product
6259907001c39578d7f1438f
class tqdm_progress_bar(progress_bar): <NEW_LINE> <INDENT> def __init__(self, iterable, epoch=None, prefix=None): <NEW_LINE> <INDENT> super().__init__(iterable, epoch, prefix) <NEW_LINE> self.tqdm = tqdm( iterable, self.prefix, leave=False, ascii=True, dynamic_ncols=True ) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.tqdm) <NEW_LINE> <DEDENT> def log(self, stats): <NEW_LINE> <INDENT> self.tqdm.set_postfix(self._format_stats(stats), refresh=False) <NEW_LINE> <DEDENT> def print(self, stats, color=None): <NEW_LINE> <INDENT> postfix = self._str_pipes(self._format_stats_full(stats)) <NEW_LINE> if color: <NEW_LINE> <INDENT> self.tqdm.write( "{}{} | {}".format( COLOR_MAPPINGS[color], self.tqdm.desc, postfix ) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.tqdm.write("{} | {}".format(self.tqdm.desc, postfix)) <NEW_LINE> <DEDENT> <DEDENT> def string(self, stats): <NEW_LINE> <INDENT> postfix = self._str_pipes(self._format_stats_full(stats)) <NEW_LINE> return "{} | {}".format(self.prefix, postfix)
Log to tqdm.
62599070fff4ab517ebcf0cf
class ModelContainer(object): <NEW_LINE> <INDENT> def __init__(self, model_num=None): <NEW_LINE> <INDENT> self.num = model_num <NEW_LINE> self.mol = MolList() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> text = "Class containing the data for model %s.\n" % self.num <NEW_LINE> text = text + "\n" <NEW_LINE> text = text + "Objects:\n" <NEW_LINE> for name in dir(self): <NEW_LINE> <INDENT> if name == 'mol': <NEW_LINE> <INDENT> text = text + " mol: The list of %s molecules within the model.\n" % len(self.mol) <NEW_LINE> continue <NEW_LINE> <DEDENT> if name == 'is_empty': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if match("^__", name): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> text = text + " " + name + ": " + repr(getattr(self, name)) + "\n" <NEW_LINE> <DEDENT> return text <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> if self.num != None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for name in dir(self): <NEW_LINE> <INDENT> if name == 'num' or name == 'mol': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if name == 'is_empty': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if match("^__", name): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> if not self.mol.is_empty(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def mol_loop(self): <NEW_LINE> <INDENT> for mol in self.mol: <NEW_LINE> <INDENT> if mol.is_empty(): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> yield mol
Class containing all the model specific data.
6259907099cbb53fe683279e
class PrintFormatCommand(GefUnitTestGeneric): <NEW_LINE> <INDENT> def test_cmd_print_format(self): <NEW_LINE> <INDENT> self.assertFailIfInactiveSession(gdb_run_cmd("print-format")) <NEW_LINE> res = gdb_start_silent_cmd("print-format $sp") <NEW_LINE> self.assertNoException(res) <NEW_LINE> self.assertTrue("buf = [" in res) <NEW_LINE> res = gdb_start_silent_cmd("print-format --lang js $sp") <NEW_LINE> self.assertNoException(res) <NEW_LINE> self.assertTrue("var buf = [" in res) <NEW_LINE> res = gdb_start_silent_cmd("set *((int*)$sp) = 0x41414141", after=["print-format --lang hex $sp"]) <NEW_LINE> self.assertNoException(res) <NEW_LINE> self.assertTrue("41414141" in res, f"{res}") <NEW_LINE> res = gdb_start_silent_cmd("print-format --lang iDontExist $sp") <NEW_LINE> self.assertNoException(res) <NEW_LINE> self.assertTrue("Language must be in:" in res)
`print-format` command test module
62599070009cb60464d02ded
class SplitLinesToWordsFn(beam.DoFn): <NEW_LINE> <INDENT> OUTPUT_TAG_SHORT_WORDS = 'tag_short_words' <NEW_LINE> OUTPUT_TAG_CHARACTER_COUNT = 'tag_character_count' <NEW_LINE> def process(self, element): <NEW_LINE> <INDENT> yield pvalue.TaggedOutput( self.OUTPUT_TAG_CHARACTER_COUNT, len(element)) <NEW_LINE> words = re.findall(r'[A-Za-z\']+', element) <NEW_LINE> for word in words: <NEW_LINE> <INDENT> if len(word) <= 3: <NEW_LINE> <INDENT> yield pvalue.TaggedOutput(self.OUTPUT_TAG_SHORT_WORDS, word) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield word
A transform to split a line of text into individual words. This transform will have 3 outputs: - main output: all words that are longer than 3 characters. - short words output: all other words. - character count output: Number of characters in each processed line.
6259907067a9b606de5476fe
@dataclass <NEW_LINE> class TestConfig: <NEW_LINE> <INDENT> client_lang: str <NEW_LINE> server_lang: str <NEW_LINE> version: str <NEW_LINE> def version_ge(self, another: str) -> bool: <NEW_LINE> <INDENT> if self.version == 'master': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return _parse_version(self.version) >= _parse_version(another)
Describes the config for the test suite.
625990707b25080760ed893e
class ServiceInstanceDescriptor(object): <NEW_LINE> <INDENT> def __init__(self, template='', parameterValues=None, descriptor=None, propertySet=Ice._struct_marker): <NEW_LINE> <INDENT> self.template = template <NEW_LINE> self.parameterValues = parameterValues <NEW_LINE> self.descriptor = descriptor <NEW_LINE> if propertySet is Ice._struct_marker: <NEW_LINE> <INDENT> self.propertySet = _M_IceGrid.PropertySetDescriptor() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.propertySet = propertySet <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if other is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif not isinstance(other, _M_IceGrid.ServiceInstanceDescriptor): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.template != other.template: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.parameterValues != other.parameterValues: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.descriptor != other.descriptor: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.propertySet != other.propertySet: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return IcePy.stringify(self, _M_IceGrid._t_ServiceInstanceDescriptor) <NEW_LINE> <DEDENT> __repr__ = __str__
A service template instance descriptor.
625990707c178a314d78e846
class _ConsumedRequestBatch( namedtuple("_ConsumedRequestBatchBase", "batchID objects ack")): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def decodeMessage(cls, msg): <NEW_LINE> <INDENT> r = RequestMessagePackager.unmarshal(msg.body) <NEW_LINE> return cls(batchID=r.batchID, objects=BatchPackager.unmarshal(r.batchState), ack=msg.ack)
Container for a consumed request batch batchID: UUID of the batch batch objects: sequence of request objects (instances of ModelCommand, ModelInputRow, etc.) ack: function to call to ack the batch: NoneType ack(multiple=False); recepient is responsible for ACK'ing each batch in order get more messages and also for supporting the "at-least-once" delivery guarantee.
625990703317a56b869bf19f
class PedanticFileWrapper(object): <NEW_LINE> <INDENT> def __init__(self, stream): <NEW_LINE> <INDENT> self.__stream = stream <NEW_LINE> <DEDENT> def seek(self, offset, whence = 0): <NEW_LINE> <INDENT> pos = self.__stream.tell() <NEW_LINE> self.__stream.seek(0, 2) <NEW_LINE> length = self.__stream.tell() <NEW_LINE> if whence == 1: <NEW_LINE> <INDENT> offset = pos + offset <NEW_LINE> <DEDENT> elif whence == 2: <NEW_LINE> <INDENT> offset = length + offset <NEW_LINE> <DEDENT> result = self.__stream.seek(offset) <NEW_LINE> if offset > length: <NEW_LINE> <INDENT> raise IOError("Attempt to seek at offset %d for file of %d bytes" % (offset, length)) <NEW_LINE> <DEDENT> elif offset < 0: <NEW_LINE> <INDENT> raise IOError("Attempt to seek at offset %d for file of %d bytes" % (offset, length)) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def read(self, size = -1): <NEW_LINE> <INDENT> pos = self.__stream.tell() <NEW_LINE> self.__stream.seek(0, 2) <NEW_LINE> length = self.__stream.tell() <NEW_LINE> self.__stream.seek(pos) <NEW_LINE> if pos + size > length: <NEW_LINE> <INDENT> raise IOError("Attempt to read bytes %d to %d from file of %d bytes" % (pos, pos + size, length)) <NEW_LINE> <DEDENT> return self.__stream.read(size) <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self.__stream, name) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.__stream) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.__stream)
Pedantic wrapper around a file object. Is guaranteed to raise an IOError if an attempt is made to: - seek to a location larger than the file - read behind the file boundary Only works for random access files that support seek() and tell().
62599070d268445f2663a7b8
class Screen(object): <NEW_LINE> <INDENT> def __init__(self, image_file=None): <NEW_LINE> <INDENT> self.size = SCREENSIZE <NEW_LINE> self.bgcolour = BACKGROUND <NEW_LINE> self.display = pygame.display.set_mode(self.size) <NEW_LINE> self.title = pygame.display.set_caption('Moby Dick') <NEW_LINE> if image_file: <NEW_LINE> <INDENT> self.image = load_image(image_file) <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.image = '' <NEW_LINE> self.rect = Rect(0, 0, 0, 0) <NEW_LINE> <DEDENT> self.show() <NEW_LINE> <DEDENT> def show(self): <NEW_LINE> <INDENT> self.display.fill(self.bgcolour) <NEW_LINE> if self.image != '': <NEW_LINE> <INDENT> self.display.blit(self.image, (0, 0))
Starts a screen and displays background
625990702ae34c7f260ac9a0
class ActivityLogAlertResourcePaged(Paged): <NEW_LINE> <INDENT> _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[ActivityLogAlertResource]'} } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ActivityLogAlertResourcePaged, self).__init__(*args, **kwargs)
A paging container for iterating over a list of :class:`ActivityLogAlertResource <azure.mgmt.monitor.models.ActivityLogAlertResource>` object
625990708e7ae83300eea947
class WriteAccessRecord(BiffRecord): <NEW_LINE> <INDENT> _REC_ID = 0x005C <NEW_LINE> def __init__(self, owner): <NEW_LINE> <INDENT> uowner = owner[0:0x30] <NEW_LINE> uowner_len = len(uowner) <NEW_LINE> self._rec_data = pack(bytes('%ds%ds' % (uowner_len, 0x70 - uowner_len), encoding='utf8'), bytes(uowner, encoding='utf8'), b' '*(0x70 - uowner_len))
This record is part of the file protection. It contains the name of the user that has saved the file. The user name is always stored as an equal-sized string. All unused characters after the name are filled with space characters. It is not required to write the mentioned string length. Every other length will be accepted too.
625990705166f23b2e244c8a
class SimRunError(StandardError): <NEW_LINE> <INDENT> pass
Generic error for model simulating run. Attributes include current results stack.
62599070b7558d5895464b8e
class DarwinKASLRMixin(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def args(cls, parser): <NEW_LINE> <INDENT> super(DarwinKASLRMixin, cls).args(parser) <NEW_LINE> parser.add_argument("--vm_kernel_slide", action=config.IntParser, help="OS X 10.8 and later: kernel ASLR slide.") <NEW_LINE> <DEDENT> def __init__(self, vm_kernel_slide=None, **kwargs): <NEW_LINE> <INDENT> super(DarwinKASLRMixin, self).__init__(**kwargs) <NEW_LINE> if not MOUNTAIN_LION_OR_LATER(self.profile): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if vm_kernel_slide is not None: <NEW_LINE> <INDENT> self.session.SetParameter("vm_kernel_slide", vm_kernel_slide)
Ensures that KASLR slide is computed and stored in the session.
62599070097d151d1a2c2928
class HelloWorld(Resource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> r = ResponseJson({"message": "hello, world"}) <NEW_LINE> return r.make_response()
hello, world! if this endpoint works then yaaay your code deploys correctly
625990707d847024c075dc8f
class LeadUpdateView(LoginRequiredMixin, generic.UpdateView): <NEW_LINE> <INDENT> template_name = "leads/lead_update.html" <NEW_LINE> queryset = Lead.objects.all() <NEW_LINE> form_class = LeadModelForm <NEW_LINE> def get_success_url(self): <NEW_LINE> <INDENT> return reverse('leads:lead-list')
Replaced lead_update with a django class based view
625990704a966d76dd5f07a1
class Embed(Field): <NEW_LINE> <INDENT> def __init__(self, packet): <NEW_LINE> <INDENT> self.packet = packet <NEW_LINE> super(Embed, self).__init__() <NEW_LINE> <DEDENT> def value_to_bytes(self, obj, value, default_endianness=DEFAULT_ENDIANNESS): <NEW_LINE> <INDENT> return value.serialise(default_endianness=default_endianness) <NEW_LINE> <DEDENT> def buffer_to_value(self, obj, buffer, offset, default_endianness=DEFAULT_ENDIANNESS): <NEW_LINE> <INDENT> return self.packet.parse(buffer[offset:], default_endianness=default_endianness)
Embeds another :class:`.PebblePacket`. Useful for implementing repetitive packets. :param packet: The packet to embed. :type packet: .PebblePacket
625990703539df3088ecdb52
class TestLinearPositionListResult(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testLinearPositionListResult(self): <NEW_LINE> <INDENT> pass
LinearPositionListResult unit test stubs
625990701b99ca4002290191