code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class ManualTlsSni01(common.TLSSNI01): <NEW_LINE> <INDENT> def perform(self): <NEW_LINE> <INDENT> for achall in self.achalls: <NEW_LINE> <INDENT> self._setup_challenge_cert(achall) | TLS-SNI-01 authenticator for the Manual plugin
:ivar configurator: Authenticator object
:type configurator: :class:`~certbot.plugins.manual.Authenticator`
:ivar list achalls: Annotated
class:`~certbot.achallenges.KeyAuthorizationAnnotatedChallenge`
challenges
:param list indices: Meant to hold indices of challenges in a
larger array. NginxTlsSni01 is capable of solving many challenges
at once which causes an indexing issue within NginxConfigurator
who must return all responses in order. Imagine NginxConfigurator
maintaining state about where all of the http-01 Challenges,
TLS-SNI-01 Challenges belong in the response array. This is an
optional utility.
:param str challenge_conf: location of the challenge config file | 6259904c07d97122c42180b6 |
class Kusto_Client(object): <NEW_LINE> <INDENT> _DEFAULT_CLIENTID = "db662dc1-0cfe-4e1c-a843-19a68e65be58" <NEW_LINE> def __init__(self, conn_kv): <NEW_LINE> <INDENT> kusto_cluster = "https://{0}.kusto.windows.net".format(conn_kv["cluster"]) <NEW_LINE> if all([conn_kv.get("username"), conn_kv.get("password")]): <NEW_LINE> <INDENT> kcsb = KustoConnectionStringBuilder.with_aad_user_password_authentication(kusto_cluster, conn_kv.get("username"), conn_kv.get("password")) <NEW_LINE> if conn_kv.get("tenant") is not None: kcsb.authority_id = conn_kv.get("tenant") <NEW_LINE> <DEDENT> elif all([conn_kv.get("clientid"), conn_kv.get("clientsecret")]): <NEW_LINE> <INDENT> kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication( kusto_cluster, conn_kv.get("clientid"), conn_kv.get("clientsecret"), conn_kv.get("tenant")) <NEW_LINE> <DEDENT> elif all([conn_kv.get("clientid"), conn_kv.get("certificate"), conn_kv.get("certificate_thumbprint")]): <NEW_LINE> <INDENT> kcsb = KustoConnectionStringBuilder.with_aad_application_certificate_authentication( kusto_cluster, conn_kv.get("clientid"), conn_kv.get("certificate"), conn_kv.get("certificate_thumbprint", conn_kv.get("tenant")) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kcsb = KustoConnectionStringBuilder.with_aad_device_authentication(kusto_cluster) <NEW_LINE> if conn_kv.get("tenant") is not None: kcsb.authority_id = conn_kv.get("tenant") <NEW_LINE> <DEDENT> self.client = KustoClient(kcsb) <NEW_LINE> self.client._aad_helper = _MyAadHelper(kcsb, self._DEFAULT_CLIENTID) <NEW_LINE> self.mgmt_endpoint_version = "v2" if self.client._mgmt_endpoint.endswith("v2/rest/query") else "v1" <NEW_LINE> self.query_endpoint_version = "v2" if self.client._query_endpoint.endswith("v2/rest/query") else "v1" <NEW_LINE> <DEDENT> def execute(self, kusto_database, query, accept_partial_results=False, timeout=None): <NEW_LINE> <INDENT> endpoint_version = self.mgmt_endpoint_version if query.startswith(".") else self.query_endpoint_version <NEW_LINE> get_raw_response=True <NEW_LINE> response = self.client.execute(kusto_database, query, accept_partial_results, timeout, get_raw_response) <NEW_LINE> return KqlResponse(response, endpoint_version) | Kusto client wrapper for Python.
KustoClient works with both 2.x and 3.x flavors of Python. All primitive types are supported.
KustoClient takes care of ADAL authentication, parsing response and giving you typed result set,
and offers familiar Python DB API.
Test are run using nose.
Examples
--------
To use KustoClient, you can choose betwen two ways of authentication.
For the first option, you'll need to have your own AAD application and know your client credentials (client_id and client_secret).
>>> kusto_cluster = 'https://help.kusto.windows.net'
>>> kusto_client = KustoClient(kusto_cluster, client_id, client_secret='your_app_secret')
For the second option, you can use KustoClient's client id and authenticate using your username and password.
>>> kusto_cluster = 'https://help.kusto.windows.net'
>>> client_id = 'e07cf1fb-c6a6-4668-b21a-f74731afa19a'
>>> kusto_client = KustoClient(kusto_cluster, client_id, username='your_username', password='your_password') | 6259904cdc8b845886d549d0 |
class SSMSStyle(Style): <NEW_LINE> <INDENT> background_color = "#ffffff" <NEW_LINE> default_style = "" <NEW_LINE> styles = { Comment: "#008000", Keyword: "#0000ff", Operator: "#808080", Name: "#000000", Name.Function: "#ff00ff", Name.Builtin: "#01ff00", Name.Class: "#0000ff", String: "#ff0000", Error: "border:#ff0000", Number: "#000000", Punctuation: "#808080" } | A Pygments style inspired by Microsoft SSMS. | 6259904cbe383301e0254c2e |
class SingleUseTokenizer(DelimiterTokenizer): <NEW_LINE> <INDENT> def __init__(self, context: TokenizationContext, opening_delimiter:str, opening_delimiter_position:StreamPosition, opening_delimiter_position_after:StreamPosition): <NEW_LINE> <INDENT> Tokenizer.__init__(self, context) <NEW_LINE> self.opening_delimiter = opening_delimiter <NEW_LINE> self.opening_delimiter_position = opening_delimiter_position <NEW_LINE> self.opening_delimiter_position_after = opening_delimiter_position_after <NEW_LINE> self.closing_delimiter = opening_delimiter <NEW_LINE> readtable = context.readtable <NEW_LINE> opening_delimiter_properties = readtable.query(opening_delimiter)[0] <NEW_LINE> self.my_tokenizer_name = opening_delimiter_properties["tokenizer"] <NEW_LINE> del opening_delimiter_properties["tokenizer"] <NEW_LINE> opening_delimiter_properties["type"] = RT.CLOSING <NEW_LINE> <DEDENT> def on_close(self): <NEW_LINE> <INDENT> readtable = self.context.readtable <NEW_LINE> opening_delimiter_properties = readtable.query(self.opening_delimiter)[0] <NEW_LINE> opening_delimiter_properties["tokenizer"] = self.my_tokenizer_name <NEW_LINE> opening_delimiter_properties["type"] = RT.MACRO | Like a normal delimiter, but for equal opening and closing delimiter tokens.
Inside the delimiter, no further uses are allowed. | 6259904c7cff6e4e811b6e4e |
class TestEnlightener(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.device1 = '99000512002151' <NEW_LINE> self.sheet_values = [ ['99000512002128', '1'], ['99000512000619', '2'] ] <NEW_LINE> self.new_values = [ ['99000512000621', '256'], ['99000512000648', '512'] ] <NEW_LINE> self.test_device_list = ['99000512002128', '99000512000619'] <NEW_LINE> self.time1 = '2018-06-01 00:00:00' <NEW_LINE> self.time2 = '2018-06-01 00:05:00' <NEW_LINE> self.time3 = '2018-06-01 00:10:00' <NEW_LINE> self.time4 = '2018-06-01 00:15:00' <NEW_LINE> self.time5 = '2018-06-01 00:20:00' <NEW_LINE> <DEDENT> def test_get_light_threshold(self): <NEW_LINE> <INDENT> comp = compile_light_time(self.device1) <NEW_LINE> threshold = comp['light'] <NEW_LINE> self.assertGreater( int(threshold), 0) <NEW_LINE> <DEDENT> def test_get_device_list(self): <NEW_LINE> <INDENT> bool_test = False <NEW_LINE> resp = get_device_list() <NEW_LINE> mylist = resp['devices'] <NEW_LINE> test_device_list = self.test_device_list <NEW_LINE> for device in test_device_list: <NEW_LINE> <INDENT> if device in mylist: <NEW_LINE> <INDENT> bool_test = True <NEW_LINE> <DEDENT> <DEDENT> self.assertEqual(bool_test, True) <NEW_LINE> <DEDENT> def test_analyze_time_diff(self): <NEW_LINE> <INDENT> diff1 = get_time_diff(self.time1, self.time2) <NEW_LINE> diff2 = get_time_diff(self.time1, self.time3) <NEW_LINE> diff3 = get_time_diff(self.time1, self.time4) <NEW_LINE> diff4 = get_time_diff(self.time1, self.time5) <NEW_LINE> self.assertLessEqual(diff1, 5) <NEW_LINE> self.assertLessEqual(diff2, 10) <NEW_LINE> self.assertLessEqual(diff3, 15) <NEW_LINE> self.assertGreaterEqual(diff3, 15) <NEW_LINE> self.assertGreaterEqual(diff4, 20) <NEW_LINE> <DEDENT> def test_get_light_threshold_set_get(self): <NEW_LINE> <INDENT> device_id = self.device1 <NEW_LINE> update_light_value(device_id, "1001") <NEW_LINE> time.sleep(math.floor(100 / 24)) <NEW_LINE> req = get_config_for_device(device_id) <NEW_LINE> print(req) <NEW_LINE> threshold_value = get_light_threshold(req) <NEW_LINE> self.assertEqual(threshold_value, '1001') <NEW_LINE> <DEDENT> def test_get_device_ids(self): <NEW_LINE> <INDENT> values = get_device_ids() <NEW_LINE> print(values) <NEW_LINE> test = False <NEW_LINE> if '99000512000647' in values: <NEW_LINE> <INDENT> test = True <NEW_LINE> <DEDENT> elif '99000512000619' in values: <NEW_LINE> <INDENT> test = True <NEW_LINE> <DEDENT> self.assertTrue(test) <NEW_LINE> <DEDENT> def test_update_device_light_thresholds(self): <NEW_LINE> <INDENT> read_write("read") | Test connections. | 6259904c50485f2cf55dc39f |
class AU(AST, namedtuple('AU', ['left', 'right'])): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return 'A[{left} U {right}]'.format(left=str(self.left), right=str(self.right)) | The "strong until" operator (for all paths). | 6259904c009cb60464d0294a |
class INAC(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.preproVars = Preprocessing() <NEW_LINE> self.Model = Model(self.preproVars) <NEW_LINE> <DEDENT> def main(self): <NEW_LINE> <INDENT> loadOrCreate = '' <NEW_LINE> predictBool = '' <NEW_LINE> newOrOld = '' <NEW_LINE> while loadOrCreate not in ('c', 'l', 'tl'): <NEW_LINE> <INDENT> loadOrCreate = input("Would you like to load, create or train a loaded a model(l/c/tl): ").lower() <NEW_LINE> <DEDENT> if loadOrCreate == 'l': <NEW_LINE> <INDENT> predictBool = 'y' <NEW_LINE> <DEDENT> while predictBool not in ('y', 'n'): <NEW_LINE> <INDENT> predictBool = input("Would you like to see predictions from the model at the end(y/n): ").lower() <NEW_LINE> <DEDENT> if loadOrCreate == 'c': <NEW_LINE> <INDENT> self.Model.createModel() <NEW_LINE> self.Model.teachModel() <NEW_LINE> self.Model.saveModel() <NEW_LINE> <DEDENT> elif loadOrCreate == 'tl': <NEW_LINE> <INDENT> self.Model.loadModel() <NEW_LINE> self.Model.teachModel() <NEW_LINE> self.Model.saveModel() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.Model.loadModel() <NEW_LINE> <DEDENT> while predictBool == "y": <NEW_LINE> <INDENT> while newOrOld not in ('n', 'o'): <NEW_LINE> <INDENT> newOrOld = input("Would you lke to record a new sound or use an old sound(n/o): ") <NEW_LINE> <DEDENT> predictIdx = random.randint(0, self.preproVars.datasetSize) <NEW_LINE> self.Model.predict(newOrOld, index=predictIdx) <NEW_LINE> predictBool = input("Would you like to see another prediction(y/any char): ").lower() <NEW_LINE> newOrOld = '' | Given a dataset of .wav files you can create, train, save, load and use an AI model. | 6259904c462c4b4f79dbce13 |
class TestAuthorsFile(TempDirTest): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> authors = os.path.join(self.dir, "authors") <NEW_LINE> with open(authors, "w") as file: <NEW_LINE> <INDENT> file.write( "user = Some Body <[email protected]>\n" "tricky = E = mc squared\n" ) <NEW_LINE> <DEDENT> output = os.path.join(self.dir, "output") <NEW_LINE> stdin = BytesIO(b'<log><logentry revision="100"/></log>') <NEW_LINE> with patch("svnex.Exporter", self.Exporter), patch("svnex.stdin", TextIOWrapper(stdin, "ascii")): <NEW_LINE> <INDENT> svnex.main(os.devnull, "dummy", file=output, git_ref="refs/ref", authors_file=authors) <NEW_LINE> <DEDENT> self.assertEqual(dict( user="Some Body <[email protected]>", tricky="E = mc squared", ), self.author_map) <NEW_LINE> <DEDENT> def Exporter(self, *pos, author_map, **kw): <NEW_LINE> <INDENT> self.author_map = author_map <NEW_LINE> return self.MockExporter() <NEW_LINE> <DEDENT> class MockExporter: <NEW_LINE> <INDENT> def export(self, *pos, **kw): <NEW_LINE> <INDENT> pass | Parsing authors file | 6259904cd99f1b3c44d06aad |
@implementer(IAfterTransition) <NEW_LINE> class AfterTransition(object): <NEW_LINE> <INDENT> def __init__(self, object, old_state, new_state, transition): <NEW_LINE> <INDENT> self.object = object <NEW_LINE> self.old_state = old_state <NEW_LINE> self.new_state = new_state <NEW_LINE> self.transition = transition | Event sent after any workflow transition happens | 6259904cd6c5a102081e3530 |
class LatticeOpNode(OperandSetNode): <NEW_LINE> <INDENT> _optimize_new = False <NEW_LINE> def __init__(self, operands, *args, **why_kwargs): <NEW_LINE> <INDENT> why_therefore = why_kwargs.pop( 'why_identity_implies_all_operands_identity', None) <NEW_LINE> why_becauseof = why_kwargs.pop( 'why_all_operands_identity_implies_identity', None) <NEW_LINE> super(LatticeOpNode, self).__init__(operands, *args, **why_kwargs) <NEW_LINE> identity = self.identity <NEW_LINE> self[identity].equivalent_all(dict.fromkeys(operands, identity), why_therefore, why_becauseof) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _new(cls, operands, *args, **kwargs): <NEW_LINE> <INDENT> if cls._optimize_new: <NEW_LINE> <INDENT> identity_const_type = ConstNode.types[cls.identity] <NEW_LINE> operands = filternot(instanceof(identity_const_type), operands) <NEW_LINE> <DEDENT> return super(LatticeOpNode, cls)._new(operands, *args, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _new_no_operands(cls, *args, **kwargs): <NEW_LINE> <INDENT> if cls._optimize_new: <NEW_LINE> <INDENT> return cls.pgraph.new_const(cls.identity) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def _new_one_operand(cls, operand, *args, **kwargs): <NEW_LINE> <INDENT> if cls._optimize_new: <NEW_LINE> <INDENT> return operand <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self._repr_sign.join(map(repr, self._operands)).join('()') | An operation with the following properties:
- Associative: op(op(A, B), C) == op(A, op(B, C))
- Commutative: op(A, B) == op(B, A)
- Idempotent: op(A, A) == op(A) == A
Neutral elements:
- Identity: op(Identity, A) == A; op() == Identity
- Zero: op(Zero, A) == Zero | 6259904cb830903b9686ee84 |
@python_2_unicode_compatible <NEW_LINE> class Expection(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(verbose_name=_('User'), to=User) <NEW_LINE> title = models.CharField(verbose_name=_('Title'), max_length=15) <NEW_LINE> description = models.TextField(verbose_name=_('Description')) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'portfolio_expection' <NEW_LINE> verbose_name = _('Expection') <NEW_LINE> verbose_name_plural = _('Expection') | 期望,用于在期望 | 6259904c30c21e258be99c19 |
class Docs(): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.pids = list(self.data.keys()) <NEW_LINE> self.i = 0 <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> self.i += 1 <NEW_LINE> if self.i > len(self.data): <NEW_LINE> <INDENT> self.i = 0 <NEW_LINE> raise StopIteration <NEW_LINE> <DEDENT> pid = self.pids[self.i-1] <NEW_LINE> text = self.data[pid] <NEW_LINE> tokens = pre.preprocess_string(text, filters=FILTERS) <NEW_LINE> return TaggedDocument(tokens, [pid]) | Document iterator | 6259904c29b78933be26aacc |
class ClassInstance(object): <NEW_LINE> <INDENT> def __init__(self, class_entity, namespace): <NEW_LINE> <INDENT> self.class_entity = class_entity <NEW_LINE> self.namespace = namespace | Represents an instance of a user-defined class. | 6259904c23849d37ff8524d1 |
class ConditionalCategoricalDistribution(object): <NEW_LINE> <INDENT> def __init__(self, size, interval, hidden_layer_sizes, hidden_activation_fn=tf.nn.tanh, initializers=None, fcnet=None, bias_init=0.0, name="conditional_categorical_distribution"): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> self.interval = interval <NEW_LINE> self.bias_init = bias_init <NEW_LINE> if initializers is None: <NEW_LINE> <INDENT> initializers = _DEFAULT_INITIALIZERS <NEW_LINE> <DEDENT> if fcnet is None: <NEW_LINE> <INDENT> self.fcnet = snt.nets.MLP( output_sizes=hidden_layer_sizes + [size * interval], activation=hidden_activation_fn, initializers=initializers, activate_final=False, use_bias=True, name=name + "_fcnet") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fcnet = fcnet <NEW_LINE> <DEDENT> <DEDENT> def condition(self, tensor_list): <NEW_LINE> <INDENT> inputs = tf.concat(tensor_list, axis=1) <NEW_LINE> raw_return = self.fcnet(inputs) + self.bias_init <NEW_LINE> return tf.reshape(raw_return, [-1, self.size, self.interval]) <NEW_LINE> <DEDENT> def __call__(self, *args): <NEW_LINE> <INDENT> p = self.condition(args) <NEW_LINE> return tf.distributions.Categorical(logits=p) | A Categorical distribution conditioned on Tensor inputs via a fc net. | 6259904c96565a6dacd2d993 |
class MethodGroup(object): <NEW_LINE> <INDENT> def __init__(self, group_name, exception_list): <NEW_LINE> <INDENT> self._group_name = group_name <NEW_LINE> self._exception_list = exception_list <NEW_LINE> <DEDENT> def group_name(self): <NEW_LINE> <INDENT> return self._group_name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '<%s "%s">' % (self.__class__.__name__, self._group_name) <NEW_LINE> <DEDENT> def AddMethod(self, mock_method): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def MethodCalled(self, mock_method): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def IsSatisfied(self): <NEW_LINE> <INDENT> raise NotImplementedError() | Base class containing common behaviour for MethodGroups. | 6259904c0a50d4780f7067c7 |
class AbstractQueueDataset(AbstractDataset): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self, data_dir, dataset_size, input_shape, target_shape, min_examples_in_queue=1024, queue_capacitiy=2048, num_threads=8): <NEW_LINE> <INDENT> self._min_examples_in_queue = min_examples_in_queue <NEW_LINE> self._queue_capacitiy = queue_capacitiy <NEW_LINE> self._num_threads = num_threads <NEW_LINE> super(AbstractQueueDataset, self).__init__(data_dir, dataset_size, input_shape, target_shape) <NEW_LINE> <DEDENT> @light.utils.attr.override <NEW_LINE> def reset(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> @property <NEW_LINE> def min_examples_in_queue(self): <NEW_LINE> <INDENT> return self._min_examples_in_queue <NEW_LINE> <DEDENT> @property <NEW_LINE> def queue_capacity(self): <NEW_LINE> <INDENT> return self._queue_capacitiy <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_threads(self): <NEW_LINE> <INDENT> return self._num_threads | Dataset base class, used for datasets with input queue. | 6259904c76e4537e8c3f0999 |
class BrepRegion(object): <NEW_LINE> <INDENT> def BoundaryBrep(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def GetFaceSides(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> BoundingBox=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> Brep=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> Index=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> IsFinite=property(lambda self: object(),lambda self,v: None,lambda self: None) | Represents a brep topological region that has sides. | 6259904c23e79379d538d912 |
class EveryValue(Tube): <NEW_LINE> <INDENT> def __init__(self, values, **kwargs): <NEW_LINE> <INDENT> self.index = -1 <NEW_LINE> self.values = list(values) <NEW_LINE> self.length = len(self.values) <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self.length == 0: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> self.index += 1 <NEW_LINE> return self.values[self.index % self.length] | Yields values from the passed iterable in order, looping into infinity. | 6259904cd53ae8145f919875 |
class CreateView(generics.ListCreateAPIView): <NEW_LINE> <INDENT> queryset = todo.objects.all() <NEW_LINE> serializer_class = TodoSerializer | This class defines the create behavior of our rest api. | 6259904cd7e4931a7ef3d48c |
class CreditCourseAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> search_fields = ("course_key",) | Admin for credit courses. | 6259904cbaa26c4b54d506bf |
class Label(InstallerMixin, widgets.DOMWidget): <NEW_LINE> <INDENT> _view_name = Unicode('LabelView', sync=True) <NEW_LINE> _view_module = Unicode('nbextensions/ipbs/js/widget_label', sync=True) <NEW_LINE> value = CUnicode(sync=True) <NEW_LINE> html = Bool(False, sync=True) <NEW_LINE> lead = Bool(False, sync=True) <NEW_LINE> align = Enum(bs.Alignment, sync=True) <NEW_LINE> transform = Enum(bs.Transformation, sync=True) <NEW_LINE> context = Enum(bs.Context, default_value=bs.Context.default, sync=True) <NEW_LINE> def __init__(self, value=None, **kwargs): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> kwargs["value"] = value <NEW_LINE> <DEDENT> super(Label, self).__init__(**kwargs) | Just some text... | 6259904cec188e330fdf9cb3 |
class HttxOption(object): <NEW_LINE> <INDENT> __slots__ = ['name', 'defvalue', 'storage', '__weakref__'] <NEW_LINE> def __init__(self, name, defvalue): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.defvalue = defvalue <NEW_LINE> self.storage = dict() <NEW_LINE> <DEDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> with instance.lock: <NEW_LINE> <INDENT> return self.storage.setdefault(instance, deepcopy(self.defvalue)) <NEW_LINE> <DEDENT> <DEDENT> def __set__(self, instance, value): <NEW_LINE> <INDENT> with instance.lock: <NEW_LINE> <INDENT> self.storage[instance] = deepcopy(value) | A class representing an individual option.
The class is implemented as a descriptor. The class locks access
automatically on access (read/write) to the storage by using the
lock from the instance object accessing the option.
@ivar name: Name of the option
@type name: str
@ivar defvalue: Default value of the option
@type defvalue: Opaque type. Each option may have different types
@ivar storage: Actual storage for the option value(s) on a per
instance (descriptor) basis
@type storage: dict | 6259904c07d97122c42180b8 |
class Test_get_job_or_promise(TransactionTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.q = Queue(scheduled=True) <NEW_LINE> self.timeout = 60 <NEW_LINE> future = now() + timedelta(seconds=self.timeout/2) <NEW_LINE> self.job = self.q.schedule_call(future, do_nothing) <NEW_LINE> <DEDENT> def test_get_job_or_promise(self): <NEW_LINE> <INDENT> job, promise, timeout = Job._get_job_or_promise( self.q.connection, self.q, self.timeout) <NEW_LINE> self.assertLessEqual(timeout, self.timeout) <NEW_LINE> self.assertIsNone(job) <NEW_LINE> self.assertEqual(promise, self.q.name) <NEW_LINE> <DEDENT> def test_get_no_job_no_promise(self): <NEW_LINE> <INDENT> job, promise, timeout = Job._get_job_or_promise( self.q.connection, self.q, 1) <NEW_LINE> self.assertEqual(timeout, 1) <NEW_LINE> self.assertIsNone(job) <NEW_LINE> self.assertIsNone(promise) <NEW_LINE> <DEDENT> def test_get_earlier_job_no_promise(self): <NEW_LINE> <INDENT> now_job = self.q.enqueue(do_nothing) <NEW_LINE> job, promise, timeout = Job._get_job_or_promise( self.q.connection, self.q, 60) <NEW_LINE> self.assertEqual(timeout, 60) <NEW_LINE> self.assertEqual(now_job.id, job.id) <NEW_LINE> self.assertIsNone(promise) | Test the Job._get_job_or_promise classmethod | 6259904cb57a9660fecd2e92 |
class TGetSchemasReq(object): <NEW_LINE> <INDENT> def __init__(self, sessionHandle=None, catalogName=None, schemaName=None,): <NEW_LINE> <INDENT> self.sessionHandle = sessionHandle <NEW_LINE> self.catalogName = catalogName <NEW_LINE> self.schemaName = schemaName <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.sessionHandle = TSessionHandle() <NEW_LINE> self.sessionHandle.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.catalogName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 3: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.schemaName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('TGetSchemasReq') <NEW_LINE> if self.sessionHandle is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1) <NEW_LINE> self.sessionHandle.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.catalogName is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('catalogName', TType.STRING, 2) <NEW_LINE> oprot.writeString(self.catalogName.encode('utf-8') if sys.version_info[0] == 2 else self.catalogName) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.schemaName is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('schemaName', TType.STRING, 3) <NEW_LINE> oprot.writeString(self.schemaName.encode('utf-8') if sys.version_info[0] == 2 else self.schemaName) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if self.sessionHandle is None: <NEW_LINE> <INDENT> raise TProtocolException(message='Required field sessionHandle is unset!') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- sessionHandle
- catalogName
- schemaName | 6259904cbe383301e0254c30 |
class Place: <NEW_LINE> <INDENT> def __init__(self, name, exit=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.exit = exit <NEW_LINE> self.bees = [] <NEW_LINE> self.ant = None <NEW_LINE> self.entrance = None <NEW_LINE> if self.exit!= None: <NEW_LINE> <INDENT> exit.entrance = self <NEW_LINE> <DEDENT> <DEDENT> def add_insect(self, insect): <NEW_LINE> <INDENT> if insect.is_ant(): <NEW_LINE> <INDENT> if self.ant != None: <NEW_LINE> <INDENT> if self.ant.container == True and insect.container == True: <NEW_LINE> <INDENT> assert self.ant.container != True, 'BodyguardAnt cannot contain BodyguardAnt'.format(insect, self) <NEW_LINE> <DEDENT> elif self.ant.container == True: <NEW_LINE> <INDENT> assert self.ant.has_ant(), 'cannot add third ant'.format(insect,self) <NEW_LINE> self.ant.contain_ant(insect) <NEW_LINE> <DEDENT> elif self.ant.container == False and insect.container == True: <NEW_LINE> <INDENT> insect.contain_ant(self.ant) <NEW_LINE> self.ant = insect <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> assert self.ant is None, 'Two ants in {0}'.format(self) <NEW_LINE> self.ant = insect <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.bees.append(insect) <NEW_LINE> <DEDENT> insect.place = self <NEW_LINE> <DEDENT> def remove_insect(self, insect): <NEW_LINE> <INDENT> if not insect.is_ant(): <NEW_LINE> <INDENT> self.bees.remove(insect) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert self.ant == insect, '{0} is not in {1}'.format(insect, self) <NEW_LINE> if self.ant.queen_count != 1: <NEW_LINE> <INDENT> if insect.container == True: <NEW_LINE> <INDENT> self.ant = insect.ant <NEW_LINE> insect.place = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ant = None <NEW_LINE> insect.place = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name | A Place holds insects and has an exit to another Place. | 6259904c8e05c05ec3f6f865 |
class Body: <NEW_LINE> <INDENT> def __init__(self, mass, rx, ry, vx=0, vy=0, fx=0, fy=0, L=0, color='k'): <NEW_LINE> <INDENT> self.m = mass <NEW_LINE> self.r = np.array([rx,ry]) <NEW_LINE> self.v = np.array([vx,vy]) <NEW_LINE> self.f = np.array([fx,fy]) <NEW_LINE> self.L = L <NEW_LINE> self.c = color <NEW_LINE> <DEDENT> def update(self, dt): <NEW_LINE> <INDENT> self.v = self.v + (self.f/self.m)*dt <NEW_LINE> self.r = np.mod(self.r + self.v*dt + self.L, 2*self.L) - self.L <NEW_LINE> <DEDENT> def leapFrog(self, dt): <NEW_LINE> <INDENT> self.v = self.v + (self.f/self.m)*0.5*dt <NEW_LINE> self.r = np.mod(self.r + self.v*dt + self.L, 2*self.L) - self.L <NEW_LINE> <DEDENT> def vHalfStep(self, dt): <NEW_LINE> <INDENT> return self.v + (self.f/self.m)*0.5*dt <NEW_LINE> <DEDENT> def distanceTo(self, body): <NEW_LINE> <INDENT> dr = self.r-body.r <NEW_LINE> return np.sqrt(dr.dot(dr)) <NEW_LINE> <DEDENT> def resetForce(self, fx=0, fy=0): <NEW_LINE> <INDENT> self.f = np.array([fx, fy]) <NEW_LINE> <DEDENT> def addForce(self, body, epsilon): <NEW_LINE> <INDENT> dr = self.r-body.r <NEW_LINE> d = np.sqrt(dr.dot(dr) + epsilon**2) <NEW_LINE> df = -G*self.m*body.m*dr/d**3 <NEW_LINE> self.f = self.f + df <NEW_LINE> <DEDENT> def Kenergy(self, dt): <NEW_LINE> <INDENT> v = self.vHalfStep(dt) <NEW_LINE> return 0.5*self.m*np.sqrt(v.dot(v)) <NEW_LINE> <DEDENT> def Uinteract(self, body): <NEW_LINE> <INDENT> return -G*self.m*body.m/self.distanceTo(body) <NEW_LINE> <DEDENT> def inQuad(self, quad): <NEW_LINE> <INDENT> rx, ry = self.r[0], self.r[1] <NEW_LINE> qx, qy, qL = quad.r[0], quad.r[1], quad.L <NEW_LINE> inX = rx >= qx and rx < qx+qL <NEW_LINE> inY = ry >= qy and ry < qy+qL <NEW_LINE> if inX and inY: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def plot(self): <NEW_LINE> <INDENT> rx, ry = self.r[0], self.r[1] <NEW_LINE> plt.scatter(rx, ry, c=self.c) | definition for object with mass | 6259904c16aa5153ce401903 |
class AudioUnit(object): <NEW_LINE> <INDENT> def __init__(self, ah_audiounit): <NEW_LINE> <INDENT> self._ah_au = ah_audiounit <NEW_LINE> self._desc = None <NEW_LINE> self._ah_parameters = {} <NEW_LINE> self._ah_parameters_dicts = {} <NEW_LINE> self._last_aupreset = '' <NEW_LINE> <DEDENT> def get_parameters(self, scope = au.kAudioUnitScope_Global, element = 0): <NEW_LINE> <INDENT> if (scope, element) not in self._ah_parameters: <NEW_LINE> <INDENT> self._ah_parameters[(scope, element)] = [Parameter(x) for x in self._ah_au.GetParameterList(scope, element)] <NEW_LINE> <DEDENT> return self._ah_parameters[(scope, element)] <NEW_LINE> <DEDENT> def get_parameters_dict(self, scope = au.kAudioUnitScope_Global, element = 0): <NEW_LINE> <INDENT> if (scope, element) not in self._ah_parameters_dicts: <NEW_LINE> <INDENT> params = self.get_parameters() <NEW_LINE> names = [p.name for p in params] <NEW_LINE> d = dict(zip(names, params)) <NEW_LINE> self._ah_parameters_dicts[(scope, element)] = d <NEW_LINE> <DEDENT> return self._ah_parameters_dicts[(scope, element)] <NEW_LINE> <DEDENT> def load_aupreset(self, aupreset_file): <NEW_LINE> <INDENT> self._ah_au.LoadAUPresetFromFile(aupreset_file) <NEW_LINE> self._last_aupreset=aupreset_file <NEW_LINE> <DEDENT> def save_aupreset(self, aupreset_file): <NEW_LINE> <INDENT> self._ah_au.SaveAUPresetToFile(aupreset_file) <NEW_LINE> <DEDENT> name = property(lambda self : self._ah_au.GetName(), doc = 'Returns the name of the audiounit') <NEW_LINE> manu = property(lambda self : self._ah_au.GetManu(), doc = 'Returns the manufacturer of the audiounit') <NEW_LINE> bypass = property(lambda self : self._ah_au.GetBypass(), lambda self, x : self._ah_au.SetBypass(x), doc = 'If True the audio unit passed its input unchanged through its output.') <NEW_LINE> def gui(self): <NEW_LINE> <INDENT> launch_gui(self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> s = '%s by %s' % (self.name, self.manu) <NEW_LINE> return s | Represents an AudioUnit.
Should not be created directly, but instead be taken from an AHTrack. | 6259904c3c8af77a43b68948 |
class TensorBoardBmode(tf.keras.callbacks.TensorBoard): <NEW_LINE> <INDENT> def __init__(self, val_data, *args, **kwargs): <NEW_LINE> <INDENT> self.val_data = val_data <NEW_LINE> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def set_model(self, *args, **kwargs): <NEW_LINE> <INDENT> super().set_model(*args, **kwargs) <NEW_LINE> dynamic_range = [-60, 0] <NEW_LINE> bimg = make_bmode_tf(self.model.inputs[0]) <NEW_LINE> yhat = self.model.outputs[0] <NEW_LINE> if tf.__version__ <= "1.13.0": <NEW_LINE> <INDENT> ytgt = self.model.targets[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ytgt = self.model._targets[0] <NEW_LINE> <DEDENT> szy = yhat.get_shape().as_list() <NEW_LINE> szy[0] = -1 <NEW_LINE> ytgt = tf.reshape(ytgt, szy) <NEW_LINE> yhat, bimg, ytgt = make_tensorboard_images(dynamic_range, yhat, bimg, ytgt) <NEW_LINE> self.bsumm = tf.summary.image("Bmode", bimg) <NEW_LINE> self.ysumm = tf.summary.image("Target", ytgt) <NEW_LINE> self.psumm = tf.summary.image("Output", yhat) <NEW_LINE> <DEDENT> def on_epoch_end(self, epoch, logs={}): <NEW_LINE> <INDENT> super().on_epoch_end(epoch, logs) <NEW_LINE> if tf.__version__ <= "1.13.0": <NEW_LINE> <INDENT> feed_dict = { self.model.inputs[0]: self.val_data[0], self.model.targets[0]: self.val_data[1], } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> feed_dict = { self.model.inputs[0]: self.val_data[0], self.model._targets[0]: self.val_data[1], } <NEW_LINE> <DEDENT> if epoch == 0: <NEW_LINE> <INDENT> bs, ys = tf.keras.backend.get_session().run( [self.bsumm, self.ysumm], feed_dict=feed_dict ) <NEW_LINE> self.writer.add_summary(bs, 0) <NEW_LINE> self.writer.add_summary(ys, 0) <NEW_LINE> <DEDENT> if epoch % 10 == 9: <NEW_LINE> <INDENT> ps = tf.keras.backend.get_session().run(self.psumm, feed_dict=feed_dict) <NEW_LINE> self.writer.add_summary(ps, epoch + 1) <NEW_LINE> <DEDENT> self.writer.flush() | TensorBoardBmode extends tf.keras.callbacks.TensorBoard, adding custom processing
upon setup and after every epoch to store properly processed ultrasound images. | 6259904c24f1403a926862d8 |
class HistorialPagosPDF(PDFTemplateView): <NEW_LINE> <INDENT> filename = 'recibo_de_entrega.pdf' <NEW_LINE> show_content_in_browser = True <NEW_LINE> template_name = 'historial_pagos.html' <NEW_LINE> footer_template = 'footerpdf.html' <NEW_LINE> paciente = None <NEW_LINE> cmd_options = { 'margin-top': 20, 'margin-bottom': 20, 'page-size': 'Letter' } <NEW_LINE> def get_paciente(self): <NEW_LINE> <INDENT> if self.paciente is None: <NEW_LINE> <INDENT> servicio = self.pagos[0].servicio <NEW_LINE> self.paciente = servicio.procedimiento.odontograma.paciente <NEW_LINE> <DEDENT> return self.paciente <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(HistorialPagosPDF, self).get_context_data(**kwargs) <NEW_LINE> self.pagos = PagoAplicado.objects.all() <NEW_LINE> paciente = self.get_paciente <NEW_LINE> fecha = datetime.now().strftime("%d/%m/%Y") <NEW_LINE> hora = datetime.now().strftime("%I:%M %p") <NEW_LINE> context.update({ 'paciente': paciente, 'pagos': self.pagos, 'fecha': fecha, 'hora': hora }) <NEW_LINE> return context | pdf que hace posible entrgar un estado de cuenta del paciente | 6259904c097d151d1a2c2484 |
class LetterIter: <NEW_LINE> <INDENT> def __init__(self, start='a', end='e'): <NEW_LINE> <INDENT> self.next_letter = start <NEW_LINE> self.end = end <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self.next_letter == self.end: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> letter = self.next_letter <NEW_LINE> self.next_letter = chr(ord(letter)+1) <NEW_LINE> return letter | An iterator over letters of the alphabet in ASCII order. | 6259904cb830903b9686ee85 |
class Bars(Weapon): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> atk = random.uniform(2.0,2.4) <NEW_LINE> name = "Chocolate Bars" <NEW_LINE> uses = 4 <NEW_LINE> super().__init__(name,atk,uses) | Weapon of player with 4 uses | 6259904cb5575c28eb7136d4 |
class ActivityTypeView(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> authentication_classes = (authentication.SessionAuthentication, JSONWebTokenAuthentication) <NEW_LINE> permission_classes = (IsAuthenticated, IsOwnerOrReadOnly) <NEW_LINE> queryset = ActivityTypeModel.objects.all().order_by('indexnum') <NEW_LINE> serializer_class = ActivityTypeSerializers | 获取全部活动分类 | 6259904c0fa83653e46f62f4 |
class SubjectOfInvestigation(YAMLRoot): <NEW_LINE> <INDENT> _inherited_slots: ClassVar[List[str]] = [] <NEW_LINE> class_class_uri: ClassVar[URIRef] = CSOLINK.SubjectOfInvestigation <NEW_LINE> class_class_curie: ClassVar[str] = "csolink:SubjectOfInvestigation" <NEW_LINE> class_name: ClassVar[str] = "subject of investigation" <NEW_LINE> class_model_uri: ClassVar[URIRef] = CSOLINK.SubjectOfInvestigation | An entity that has the role of being studied in an investigation, study, or experiment | 6259904ca8ecb03325872628 |
class TensorBoardStepCallback(Callback): <NEW_LINE> <INDENT> def __init__(self, log_dir, logging_per_steps=100, step=0): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.step = step <NEW_LINE> self.logging_per_steps = logging_per_steps <NEW_LINE> self.writer = tf.summary.FileWriter(log_dir) <NEW_LINE> <DEDENT> def on_batch_end(self, batch, logs=None): <NEW_LINE> <INDENT> self.step += 1 <NEW_LINE> if self.step % self.logging_per_steps > 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for name, value in logs.items(): <NEW_LINE> <INDENT> if name in ['batch', 'size']: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> summary = tf.Summary() <NEW_LINE> summary_value = summary.value.add() <NEW_LINE> summary_value.simple_value = value.item() <NEW_LINE> summary_value.tag = name <NEW_LINE> self.writer.add_summary(summary, self.step) <NEW_LINE> <DEDENT> self.writer.flush() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.writer.close() | Tensorboard basic visualizations by step.
| 6259904c3cc13d1c6d466b4f |
class NoramalizeMinMaxFixWindow(object): <NEW_LINE> <INDENT> def __init__(self, sampleWindowSize, cliping=False): <NEW_LINE> <INDENT> self._cliping = cliping <NEW_LINE> self._sampleWindowSize = sampleWindowSize <NEW_LINE> <DEDENT> def __call__(self, sample): <NEW_LINE> <INDENT> data = sample['data'] <NEW_LINE> low = -1 * torch.ones((data.shape[0], 1)) <NEW_LINE> high = 1 * torch.ones((data.shape[0], 1)) <NEW_LINE> maxValues = torch.max(data[:,0:self._sampleWindowSize], 1, True)[0].float() <NEW_LINE> minValues = torch.min(data[:,0:self._sampleWindowSize], 1, True)[0].float() <NEW_LINE> if self._cliping: <NEW_LINE> <INDENT> data = torch.max(torch.min(data, maxValues), minValues) <NEW_LINE> <DEDENT> normalizedData = (high-low)*((data-minValues)/(maxValues-minValues)) + low <NEW_LINE> sample['data'] = normalizedData <NEW_LINE> return sample | Normalize data using min max technique | 6259904c3617ad0b5ee07555 |
@pyblish.api.log <NEW_LINE> class IntegrateMasterRig(pyblish.api.InstancePlugin): <NEW_LINE> <INDENT> order = pyblish.api.IntegratorOrder <NEW_LINE> families = ['scene'] <NEW_LINE> label = 'Master Rig' <NEW_LINE> def process(self, instance): <NEW_LINE> <INDENT> sourcePath = os.path.normpath(instance.context.data('currentFile')) <NEW_LINE> self.log.debug(sourcePath) <NEW_LINE> publishFile = instance.context.data('publishFile') <NEW_LINE> self.log.debug(publishFile) <NEW_LINE> vstring, version = utils.version_get(publishFile, 'v') <NEW_LINE> self.log.debug(vstring + version) <NEW_LINE> master_file = publishFile.replace('_{}{}'.format(vstring, version), '') <NEW_LINE> self.log.debug('master file: {}'.format(master_file)) <NEW_LINE> shutil.copy(sourcePath, master_file) | Copies asset to it's final location
| 6259904c6e29344779b01a58 |
class Section(object): <NEW_LINE> <INDENT> def __init__(self, name, lon, lat, dire): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.lat = np.array(lat) <NEW_LINE> self.lon = np.array(lon) <NEW_LINE> self.dire = np.array(dire) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> output = 'Section %s\n' % self.name <NEW_LINE> output += ' -lon: %s\n' % self.lon <NEW_LINE> output += ' -lat: %s\n' % self.lat <NEW_LINE> output += ' -dire: %s\n' % self.dire <NEW_LINE> return output | Section endpoint object. | 6259904c45492302aabfd8ea |
class exact_length(): <NEW_LINE> <INDENT> def __init__(self, L=101): <NEW_LINE> <INDENT> self.L = L + 1 <NEW_LINE> self.failures = 0 <NEW_LINE> <DEDENT> def __call__(self, r1, r2): <NEW_LINE> <INDENT> predicate = (len(r1) == self.L) and (len(r2) == self.L) <NEW_LINE> if not predicate: <NEW_LINE> <INDENT> self.failures += 1 <NEW_LINE> <DEDENT> return predicate <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Exact length: {0}".format(self.failures) | Filter any read that does not have length ``L``
:param L: Read filter length | 6259904c711fe17d825e16a9 |
class Meta(object): <NEW_LINE> <INDENT> get_latest_by = 'created_at' <NEW_LINE> ordering = ['-promoted', '-created_at'] <NEW_LINE> permissions = ( ('can_promote_image', 'Can promote an image'), ('can_access_admin_gallery', 'Can access the admin gallery') ) | Meta options for Image model. | 6259904cd10714528d69f099 |
class IGcsLayer(Interface): <NEW_LINE> <INDENT> pass | Theme layer.
| 6259904c73bcbd0ca4bcb6a0 |
class TestTierPermission(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testTierPermission(self): <NEW_LINE> <INDENT> pass | TierPermission unit test stubs | 6259904c8e05c05ec3f6f866 |
class SingleProduct(Resource, Product): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @jwt_required <NEW_LINE> @token_required <NEW_LINE> def get(self, product_id): <NEW_LINE> <INDENT> return product.get_single_product(product_id) <NEW_LINE> <DEDENT> @jwt_required <NEW_LINE> @admin_only <NEW_LINE> @expects_json(product_schema) <NEW_LINE> def put(self, product_id): <NEW_LINE> <INDENT> product_name = request.get_json("product_name")[ "product_name"].strip(" ") <NEW_LINE> model = request.get_json("model")["model"].strip(" ") <NEW_LINE> product_price = request.get_json("product_price")["product_price"] <NEW_LINE> quantity = request.get_json("quantity")["quantity"] <NEW_LINE> category = request.get_json("category")[("category")] <NEW_LINE> min_quantity = request.get_json("min_quantity")["min_quantity"] <NEW_LINE> if not product_name or not model or not product_price or not quantity or not min_quantity: <NEW_LINE> <INDENT> return jsonify({ "message": "Check all required fields", "status": 400 }) <NEW_LINE> <DEDENT> current_user = get_jwt_identity()["username"].lower() <NEW_LINE> product = { "product_id": product_id, "product_name": product_name, "model": model, "product_price": product_price, "quantity": quantity, "category": category, "min_quantity": min_quantity, "created_by": current_user } <NEW_LINE> return Product().update_product(**product) <NEW_LINE> <DEDENT> @jwt_required <NEW_LINE> @admin_only <NEW_LINE> def delete(self, product_id): <NEW_LINE> <INDENT> return product.delete_product(product_id) | This resource will be used to retrieves a single product | 6259904c462c4b4f79dbce17 |
class SDRCallback(MetricCallback): <NEW_LINE> <INDENT> def __init__( self, input_key: str = "targets", output_key: str = "logits", prefix: str = "sdr", mixed_audio_key: str = "input_audio", ): <NEW_LINE> <INDENT> self.mixed_audio_key = mixed_audio_key <NEW_LINE> super().__init__(prefix=prefix, metric_fn=snr, input_key=input_key, output_key=output_key) <NEW_LINE> <DEDENT> def on_batch_end(self, state): <NEW_LINE> <INDENT> output_audios = state.output[self.output_key] <NEW_LINE> true_audios = state.input[self.input_key] <NEW_LINE> if hasattr(state.model, "module"): <NEW_LINE> <INDENT> num_person = state.model.module.num_person <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> num_person = state.model.num_person <NEW_LINE> <DEDENT> batch = output_audios.shape[0] <NEW_LINE> avg_sdr = 0 <NEW_LINE> for n in range(batch): <NEW_LINE> <INDENT> output_audio = output_audios[n, ...] <NEW_LINE> true_audio = true_audios[n, ...] <NEW_LINE> output_audio = output_audio.detach().cpu().numpy() <NEW_LINE> true_audio = true_audio.detach().cpu().numpy() <NEW_LINE> sdr_value = sdr(output_audio, true_audio) <NEW_LINE> sdr_value = np.mean(sdr_value) <NEW_LINE> avg_sdr += sdr_value <NEW_LINE> <DEDENT> avg_sdr /= batch <NEW_LINE> state.metrics.add_batch_value(name=self.prefix, value=avg_sdr) | SDR callback.
Args:
input_key (str): input key to use for dice calculation;
specifies our y_true.
output_key (str): output key to use for dice calculation;
specifies our y_pred. | 6259904c596a897236128fba |
class ChunkedSimplePeakFinder(IPeakFinder): <NEW_LINE> <INDENT> def __init__(self, threshold=0.05, nchunks=10): <NEW_LINE> <INDENT> self.threshold = threshold <NEW_LINE> self.nchunks = nchunks <NEW_LINE> <DEDENT> def find(self, data): <NEW_LINE> <INDENT> peaks = [] <NEW_LINE> pf = SimplePeakFinder(threshold=self.threshold) <NEW_LINE> subvalues = np.array_split(data, self.nchunks) <NEW_LINE> last_index = 0 <NEW_LINE> for sv in subvalues: <NEW_LINE> <INDENT> chunk_peaks = pf.find(NumericalData(sv)) <NEW_LINE> for peak in chunk_peaks: <NEW_LINE> <INDENT> k = peak.index + last_index <NEW_LINE> peaks.append(PeakInfo(k, peak.value)) <NEW_LINE> <DEDENT> last_index += len(sv) <NEW_LINE> <DEDENT> return peaks | Breaks the spectrum up into nchunks
applying the threshold relative to that chunk | 6259904c15baa723494633a4 |
class PatientDataPayload(models.Model): <NEW_LINE> <INDENT> STATUS_CHOICES = ( ('received', 'Received'), ('error', 'Error'), ('success', 'Success'), ) <NEW_LINE> raw_data = models.TextField() <NEW_LINE> submit_date = models.DateTimeField() <NEW_LINE> status = models.CharField(max_length=16, default='received', choices=STATUS_CHOICES) <NEW_LINE> error_message = models.TextField(blank=True) <NEW_LINE> def save(self, **kwargs): <NEW_LINE> <INDENT> if not self.pk: <NEW_LINE> <INDENT> self.submit_date = datetime.datetime.now() <NEW_LINE> <DEDENT> return super(PatientDataPayload, self).save(**kwargs) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> msg = u'Raw Data Payload, submitted on: {date}' <NEW_LINE> return msg.format(date=self.submit_date) | Dumping area for incoming patient data XML snippets | 6259904c96565a6dacd2d995 |
class Theme(object): <NEW_LINE> <INDENT> Format = flags( "NONE", "CENTER_HORZ", "CENTER_VERT", "PAD_HORZ", "PAD_VERT", ) <NEW_LINE> Format.CENTER_FULL = Format.CENTER_HORZ | Format.CENTER_VERT <NEW_LINE> Format.PAD_FULL = Format.PAD_HORZ | Format.PAD_VERT <NEW_LINE> DEFAULT_FONT_COLOR = pg.Color(255, 255, 255) <NEW_LINE> DEFAULT_FONT_SIZE = 12 <NEW_LINE> FONT = None <NEW_LINE> FONT_COLOR = pg.Color(255, 255, 255) <NEW_LINE> PADDING = 0 <NEW_LINE> _font_cache = {} <NEW_LINE> def drawFrame(self, surf, rect): <NEW_LINE> <INDENT> raise NotImplemented() <NEW_LINE> <DEDENT> def drawButton(self, surf, rect, state): <NEW_LINE> <INDENT> raise NotImplemented() <NEW_LINE> <DEDENT> def drawImage(self, surf, rect, image): <NEW_LINE> <INDENT> raise NotImplemented() <NEW_LINE> <DEDENT> def drawInput(self, surf, rect): <NEW_LINE> <INDENT> raise NotImplemented() <NEW_LINE> <DEDENT> def drawProgress(self, surf, rect, progress, steps): <NEW_LINE> <INDENT> raise NotImplemented() <NEW_LINE> <DEDENT> def drawText(self, surf, rect, text, flags_=0): <NEW_LINE> <INDENT> if not self.FONT: <NEW_LINE> <INDENT> self.FONT = self.generateFont("default", None, self.DEFAULT_FONT_SIZE) <NEW_LINE> <DEDENT> texts = text.split("\n") <NEW_LINE> rect = rect.copy() <NEW_LINE> for text in texts: <NEW_LINE> <INDENT> text_surf = self.FONT.render(text, True, self.FONT_COLOR) <NEW_LINE> if flags_ & Theme.Format.PAD_FULL: <NEW_LINE> <INDENT> rect = rect.inflate(-self.PADDING if flags_ & self.Format.PAD_HORZ else 0, -self.PADDING if flags_ & self.Format.PAD_VERT else 0) <NEW_LINE> <DEDENT> pos = rect.topleft <NEW_LINE> if flags_ & Theme.Format.CENTER_FULL: <NEW_LINE> <INDENT> pos = (pos[0] + ((rect.width-text_surf.get_width())/2 if flags_ & self.Format.CENTER_HORZ else 0), pos[1] + ((rect.height-text_surf.get_height())/2 if flags_ & self.Format.CENTER_VERT else 0)) <NEW_LINE> <DEDENT> surf.blit(text_surf, pos) <NEW_LINE> rect.y += text_surf.get_height() <NEW_LINE> <DEDENT> return text_surf.get_width() <NEW_LINE> <DEDENT> def generateFont(self, name, face=None, size=12): <NEW_LINE> <INDENT> font = self._font_cache.get(name, None) <NEW_LINE> face = face if face else pg.font.get_default_font() <NEW_LINE> if not font: <NEW_LINE> <INDENT> font = pg.font.Font(face, size) <NEW_LINE> self._font_cache[name] = font <NEW_LINE> <DEDENT> return font <NEW_LINE> <DEDENT> def selectFont(self, name, color=(255,255,255)): <NEW_LINE> <INDENT> font = self._font_cache.get(name, None) <NEW_LINE> if font: <NEW_LINE> <INDENT> self.FONT = font <NEW_LINE> self.FONT_COLOR = color <NEW_LINE> return True <NEW_LINE> <DEDENT> return False | Base Theme acting as a Base Class for all other themes. | 6259904c76e4537e8c3f099d |
class HttpTestServer(object): <NEW_LINE> <INDENT> def __init__(self, use_ssl=False): <NEW_LINE> <INDENT> self.use_ssl = use_ssl <NEW_LINE> self.responses = [] <NEW_LINE> <DEDENT> def add_response(self, httptestresponse): <NEW_LINE> <INDENT> assert isinstance(httptestresponse, HttpTestResponse), httptestresponse <NEW_LINE> self.responses.append(httptestresponse) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> from multiprocessing import Process <NEW_LINE> from socket import socket, SHUT_RDWR <NEW_LINE> self.SHUT_RDWR = SHUT_RDWR <NEW_LINE> self.socket = socket() <NEW_LINE> if self.use_ssl: <NEW_LINE> <INDENT> here = path.dirname(__file__) <NEW_LINE> self.socket = ssl.wrap_socket( self.socket, certfile=path.join(here, 'http_testserver.crt'), keyfile=path.join(here, 'http_testserver.key')) <NEW_LINE> <DEDENT> self.socket.bind(('127.0.0.1', 0)) <NEW_LINE> self.socket.listen(0) <NEW_LINE> self.port = self.socket.getsockname()[1] <NEW_LINE> self.process = Process(target=self.respond_all) <NEW_LINE> self.process.start() <NEW_LINE> self.socket.close() <NEW_LINE> <DEDENT> def join(self): <NEW_LINE> <INDENT> self.socket.close() <NEW_LINE> self.process.join() <NEW_LINE> <DEDENT> def respond_all(self): <NEW_LINE> <INDENT> for response in self.responses: <NEW_LINE> <INDENT> self.respond(response) <NEW_LINE> <DEDENT> <DEDENT> def respond(self, response): <NEW_LINE> <INDENT> error = False <NEW_LINE> try: <NEW_LINE> <INDENT> peersock, peeraddr = self.socket.accept() <NEW_LINE> <DEDENT> except ssl.SSLError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> data = peersock.recv(4096) <NEW_LINE> if HttpTestCase.to_str(data).startswith(response.method + ' '): <NEW_LINE> <INDENT> if response.body is None: <NEW_LINE> <INDENT> body = data <NEW_LINE> if str != bytes: <NEW_LINE> <INDENT> body = body.decode('utf-8') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> body = response.body <NEW_LINE> <DEDENT> peersock.send( ('HTTP/1.0 %s Unused Response Title\r\n' 'Content-Type: text/plain; utf-8\r\n' '\r\n%s' % (response.code, body) ).encode('utf-8')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> peersock.send( ('HTTP/1.0 405 Method Not Implemented\r\n' 'Content-Type: text/plain; utf-8\r\n' '\r\nUnexpected stuff' ).encode('utf-8')) <NEW_LINE> error = True <NEW_LINE> <DEDENT> peersock.shutdown(self.SHUT_RDWR) <NEW_LINE> peersock.close() <NEW_LINE> if error: <NEW_LINE> <INDENT> raise RuntimeError('request mismatch') | Super simple builtin HTTP test server. | 6259904cd6c5a102081e3535 |
class PyFuncBatchEnv(InGraphBatchEnv): <NEW_LINE> <INDENT> def __init__(self, batch_env): <NEW_LINE> <INDENT> self._batch_env = batch_env <NEW_LINE> observ_shape = utils.parse_shape(self._batch_env.observation_space) <NEW_LINE> observ_dtype = utils.parse_dtype(self._batch_env.observation_space) <NEW_LINE> self.action_shape = list(utils.parse_shape(self._batch_env.action_space)) <NEW_LINE> self.action_dtype = utils.parse_dtype(self._batch_env.action_space) <NEW_LINE> with tf.variable_scope('env_temporary'): <NEW_LINE> <INDENT> self._observ = tf.Variable( tf.zeros((len(self._batch_env),) + observ_shape, observ_dtype), name='observ', trainable=False) <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self._batch_env, name) <NEW_LINE> <DEDENT> def initialize(self, sess): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._batch_env) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> return self._batch_env[index] <NEW_LINE> <DEDENT> def simulate(self, action): <NEW_LINE> <INDENT> with tf.name_scope('environment/simulate'): <NEW_LINE> <INDENT> if action.dtype in (tf.float16, tf.float32, tf.float64): <NEW_LINE> <INDENT> action = tf.check_numerics(action, 'action') <NEW_LINE> <DEDENT> observ_dtype = utils.parse_dtype(self._batch_env.observation_space) <NEW_LINE> observ, reward, done = tf.py_func( lambda a: self._batch_env.step(a)[:3], [action], [observ_dtype, tf.float32, tf.bool], name='step') <NEW_LINE> observ = tf.check_numerics(observ, 'observ') <NEW_LINE> reward = tf.check_numerics(reward, 'reward') <NEW_LINE> reward.set_shape((len(self),)) <NEW_LINE> done.set_shape((len(self),)) <NEW_LINE> with tf.control_dependencies([self._observ.assign(observ)]): <NEW_LINE> <INDENT> return tf.identity(reward), tf.identity(done) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _reset_non_empty(self, indices): <NEW_LINE> <INDENT> observ_dtype = utils.parse_dtype(self._batch_env.observation_space) <NEW_LINE> observ = tf.py_func( self._batch_env.reset, [indices], observ_dtype, name='reset') <NEW_LINE> observ = tf.check_numerics(observ, 'observ') <NEW_LINE> with tf.control_dependencies([ tf.scatter_update(self._observ, indices, observ)]): <NEW_LINE> <INDENT> return tf.identity(observ) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def observ(self): <NEW_LINE> <INDENT> return self._observ.read_value() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._batch_env.close() | Batch of environments inside the TensorFlow graph.
The batch of environments will be stepped and reset inside of the graph using
a tf.py_func(). The current batch of observations, actions, rewards, and done
flags are held in according variables. | 6259904c07f4c71912bb084c |
class UntrashInputSet(InputSet): <NEW_LINE> <INDENT> def set_AccessToken(self, value): <NEW_LINE> <INDENT> super(UntrashInputSet, self)._set_input('AccessToken', value) <NEW_LINE> <DEDENT> def set_ClientID(self, value): <NEW_LINE> <INDENT> super(UntrashInputSet, self)._set_input('ClientID', value) <NEW_LINE> <DEDENT> def set_ClientSecret(self, value): <NEW_LINE> <INDENT> super(UntrashInputSet, self)._set_input('ClientSecret', value) <NEW_LINE> <DEDENT> def set_Fields(self, value): <NEW_LINE> <INDENT> super(UntrashInputSet, self)._set_input('Fields', value) <NEW_LINE> <DEDENT> def set_FileID(self, value): <NEW_LINE> <INDENT> super(UntrashInputSet, self)._set_input('FileID', value) <NEW_LINE> <DEDENT> def set_RefreshToken(self, value): <NEW_LINE> <INDENT> super(UntrashInputSet, self)._set_input('RefreshToken', value) | An InputSet with methods appropriate for specifying the inputs to the Untrash
Choreo. The InputSet object is used to specify input parameters when executing this Choreo. | 6259904c10dbd63aa1c71ff5 |
class BiasResNetAddUpNoNorm2(nn.Module): <NEW_LINE> <INDENT> def __init__(self, num_init_features, args): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> num_layers = args.num_layers <NEW_LINE> kernel_size = args.kernel_size <NEW_LINE> num_features = num_init_features <NEW_LINE> self.reduce_channels = Linear(num_features, num_features // args.divide_channels) if args.divide_channels > 1 else None <NEW_LINE> num_features = num_features // args.divide_channels <NEW_LINE> self.output_channels = num_features <NEW_LINE> self.add_up_scale = 1 / (num_layers + 1) <NEW_LINE> self.residual_blocks = nn.ModuleList([]) <NEW_LINE> for _ in range(num_layers): <NEW_LINE> <INDENT> self.residual_blocks.append(_ResLayer(num_features, kernel_size, args)) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x, encoder_mask=None, decoder_mask=None, incremental_state=None): <NEW_LINE> <INDENT> if self.reduce_channels is not None: <NEW_LINE> <INDENT> x = self.reduce_channels(x) <NEW_LINE> <DEDENT> add_up = self.add_up_scale * x <NEW_LINE> for layer in self.residual_blocks: <NEW_LINE> <INDENT> x = layer(x, encoder_mask=encoder_mask, decoder_mask=decoder_mask, incremental_state=incremental_state) <NEW_LINE> add_up += self.add_up_scale * x <NEW_LINE> <DEDENT> return add_up | A network of residual convolutional layers | 6259904c6e29344779b01a5a |
class UptimeRobotSensor(UptimeRobotEntity, SensorEntity): <NEW_LINE> <INDENT> @property <NEW_LINE> def native_value(self) -> str: <NEW_LINE> <INDENT> return SENSORS_INFO[self.monitor.status]["value"] <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self) -> str: <NEW_LINE> <INDENT> return SENSORS_INFO[self.monitor.status]["icon"] | Representation of a UptimeRobot sensor. | 6259904c8e71fb1e983bcede |
class ListingCAsTestCase(CATestCommon): <NEW_LINE> <INDENT> def test_list_and_get_cas(self): <NEW_LINE> <INDENT> (resp, cas, total, next_ref, prev_ref) = self.ca_behaviors.get_cas() <NEW_LINE> self.assertGreater(total, 0) <NEW_LINE> for item in cas: <NEW_LINE> <INDENT> ca = self.ca_behaviors.get_ca(item) <NEW_LINE> self.assertIsNotNone(ca.model.plugin_name) <NEW_LINE> self.assertIsNotNone(ca.model.ca_id) <NEW_LINE> self.assertIsNotNone(ca.model.plugin_ca_id) <NEW_LINE> <DEDENT> <DEDENT> @depends_on_ca_plugins('snakeoil_ca', 'simple_certificate') <NEW_LINE> def test_list_snakeoil_and_simple_cert_cas(self): <NEW_LINE> <INDENT> (resp, cas, total, next_ref, prev_ref) = self.ca_behaviors.get_cas() <NEW_LINE> self.assertEqual(2, total) <NEW_LINE> <DEDENT> @depends_on_ca_plugins('dogtag') <NEW_LINE> def test_list_dogtag_cas(self): <NEW_LINE> <INDENT> (resp, cas, total, next_ref, prev_ref) = self.ca_behaviors.get_cas() <NEW_LINE> self.assertGreater(total, 0) | Tests for listing CAs.
Must be in a separate class so that we can deselect them
in the parallel CA tests, until we can deselect specific tests
using a decorator. | 6259904c435de62698e9d220 |
@admin.register(Teacher) <NEW_LINE> class TeacherAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ('id', 'get_full_name', 'code') | Creates admin interface for maintaining teachers. | 6259904c50485f2cf55dc3a4 |
class InstanceActionsTestV2(integrated_helpers._IntegratedTestBase): <NEW_LINE> <INDENT> def test_get_instance_actions(self): <NEW_LINE> <INDENT> server = self._create_server() <NEW_LINE> actions = self.api.get_instance_actions(server['id']) <NEW_LINE> self.assertEqual('create', actions[0]['action']) <NEW_LINE> <DEDENT> def test_get_instance_actions_deleted(self): <NEW_LINE> <INDENT> server = self._create_server() <NEW_LINE> self._delete_server(server) <NEW_LINE> self.assertRaises(client.OpenStackApiNotFoundException, self.api.get_instance_actions, server['id']) | Tests Instance Actions API | 6259904cac7a0e7691f738f3 |
class PhreeqcInputLog(object): <NEW_LINE> <INDENT> def __init__(self, filename, dbpath): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.pq_input = open(self.filename,'w') <NEW_LINE> self.pq_input.truncate() <NEW_LINE> self._preamble() <NEW_LINE> self.pq_input.write("\nDATABASE %s" % dbpath) <NEW_LINE> self.pq_input.close() <NEW_LINE> <DEDENT> def _preamble(self): <NEW_LINE> <INDENT> now = datetime.datetime.now() <NEW_LINE> line1 = "#\tIPhreeqc input log.\n" <NEW_LINE> line2 = "#\tDate:\t%i-%i-%i\n" % (now.year, now.month, now.day) <NEW_LINE> line3 = "#\tTime:\t%i:%i:%i\n" % (now.hour, now.minute, now.second) <NEW_LINE> self.pq_input.write(line1 + '\n') <NEW_LINE> self.pq_input.write(line2) <NEW_LINE> self.pq_input.write(line3) <NEW_LINE> <DEDENT> def _buffer(self): <NEW_LINE> <INDENT> self.pq_input.write("\n\n" + '#' + '-'*20 + "\n\n") <NEW_LINE> <DEDENT> def add(self, string): <NEW_LINE> <INDENT> self.pq_input = open(self.filename,'a') <NEW_LINE> self._buffer() <NEW_LINE> self.pq_input.write(string) <NEW_LINE> self.pq_input.close() | Logs IPhreeqc input to a text file.
PhreeqcInputLog is a utility class used by caves.Simulator objects to log
iphreeqc input strings to a .phr text file. The resulting file is useful
for understanding how the code runs and debugging failed models. The log
file is also valid phreeqc input and can be run directly as a phreeqc
script. | 6259904ccb5e8a47e493cb93 |
class Node(object): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.next = None | The basic building block for the linked list.
Attributes:
data: Contain the list item.
next (Node): Reference to the next Node. | 6259904c7cff6e4e811b6e54 |
class GetMaximumLevelWithData(TestMixins.GetWithDataMixin, OptionalParameterTestFixture): <NEW_LINE> <INDENT> PID = 'MAXIMUM_LEVEL' | Get MAXIMUM_LEVEL with extra data. | 6259904c8e05c05ec3f6f867 |
class chi_gen(rv_continuous): <NEW_LINE> <INDENT> def _rvs(self, df): <NEW_LINE> <INDENT> sz, rndm = self._size, self._random_state <NEW_LINE> return np.sqrt(chi2.rvs(df, size=sz, random_state=rndm)) <NEW_LINE> <DEDENT> def _pdf(self, x, df): <NEW_LINE> <INDENT> return np.exp(self._logpdf(x, df)) <NEW_LINE> <DEDENT> def _logpdf(self, x, df): <NEW_LINE> <INDENT> l = np.log(2) - .5*np.log(2)*df - sc.gammaln(.5*df) <NEW_LINE> return l + sc.xlogy(df - 1., x) - .5*x**2 <NEW_LINE> <DEDENT> def _cdf(self, x, df): <NEW_LINE> <INDENT> return sc.gammainc(.5*df, .5*x**2) <NEW_LINE> <DEDENT> def _ppf(self, q, df): <NEW_LINE> <INDENT> return np.sqrt(2*sc.gammaincinv(.5*df, q)) <NEW_LINE> <DEDENT> def _stats(self, df): <NEW_LINE> <INDENT> mu = np.sqrt(2)*sc.gamma(df/2.0+0.5)/sc.gamma(df/2.0) <NEW_LINE> mu2 = df - mu*mu <NEW_LINE> g1 = (2*mu**3.0 + mu*(1-2*df))/np.asarray(np.power(mu2, 1.5)) <NEW_LINE> g2 = 2*df*(1.0-df)-6*mu**4 + 4*mu**2 * (2*df-1) <NEW_LINE> g2 /= np.asarray(mu2**2.0) <NEW_LINE> return mu, mu2, g1, g2 <NEW_LINE> <DEDENT> def _fitstart(self, data): <NEW_LINE> <INDENT> m = data.mean() <NEW_LINE> v = data.var() <NEW_LINE> df = max(np.round(v + m ** 2), 1) <NEW_LINE> return super(chi_gen, self)._fitstart(data, args=(df,)) | A chi continuous random variable.
%(before_notes)s
Notes
-----
The probability density function for `chi` is:
.. math::
f(x, k) = \frac{1}{2^{k/2-1} \Gamma \left( k/2 \right)}
x^{k-1} \exp \left( -x^2/2 \right)
for :math:`x >= 0` and :math:`k > 0` (degrees of freedom, denoted ``df``
in the implementation). :math:`\Gamma` is the gamma function
(`scipy.special.gamma`).
Special cases of `chi` are:
- ``chi(1, loc, scale)`` is equivalent to `halfnorm`
- ``chi(2, 0, scale)`` is equivalent to `rayleigh`
- ``chi(3, 0, scale)`` is equivalent to `maxwell`
`chi` takes ``df`` as a shape parameter.
%(after_notes)s
%(example)s | 6259904c30dc7b76659a0c4e |
class TestCrossBorderQuotesErrors(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return CrossBorderQuotesErrors( quote = [ pbshipping.models.cross_border_quotes_errors_quote.CrossBorderQuotesErrors_quote( quote_currency = '0', quote_lines = [ pbshipping.models.cross_border_quotes_errors_quote_lines.CrossBorderQuotesErrors_quoteLines( line_id = '0', merchant_com_ref_id = '0', quantity = 56, unit_errors = [ pbshipping.models.cross_border_quotes_errors_unit_errors.CrossBorderQuotesErrors_unitErrors( error = 56, message = '0', ) ], ) ], errors = pbshipping.models.cross_border_quotes_errors_errors.CrossBorderQuotesErrors_errors(), ) ] ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return CrossBorderQuotesErrors( ) <NEW_LINE> <DEDENT> <DEDENT> def testCrossBorderQuotesErrors(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True) | CrossBorderQuotesErrors unit test stubs | 6259904c3eb6a72ae038ba75 |
class CheckEqualExceptFor(PfTestBase): <NEW_LINE> <INDENT> def test_equal_except_for(self): <NEW_LINE> <INDENT> from pyfusion.utils.debug import equal_except_for <NEW_LINE> d1 = Dummy(a=1, b=2, abc=3) <NEW_LINE> d2 = Dummy(a=1, b=2) <NEW_LINE> self.assertFalse(equal_except_for(d1, d2)) <NEW_LINE> self.assertTrue(equal_except_for(d1, d1)) <NEW_LINE> self.assertTrue(equal_except_for(d1, d2, ['b', 'abc'])) <NEW_LINE> self.assertTrue(equal_except_for(d1, d2, 'abc')) | Test custom object comparison, which allows specified
attributed to be ignored | 6259904c63b5f9789fe86587 |
class OpenAIGPTLMHead(nn.Module): <NEW_LINE> <INDENT> def __init__(self, model_embeddings_weights, config): <NEW_LINE> <INDENT> super(OpenAIGPTLMHead, self).__init__() <NEW_LINE> self.n_embd = config.n_embd <NEW_LINE> self.vocab_size = config.vocab_size <NEW_LINE> self.predict_special_tokens = config.predict_special_tokens <NEW_LINE> embed_shape = model_embeddings_weights.shape <NEW_LINE> self.decoder = nn.Linear(embed_shape[1], embed_shape[0], bias=False) <NEW_LINE> self.set_embeddings_weights(model_embeddings_weights) <NEW_LINE> <DEDENT> def set_embeddings_weights(self, model_embeddings_weights, predict_special_tokens=True): <NEW_LINE> <INDENT> self.predict_special_tokens = predict_special_tokens <NEW_LINE> embed_shape = model_embeddings_weights.shape <NEW_LINE> self.decoder.weight = model_embeddings_weights <NEW_LINE> <DEDENT> def forward(self, hidden_state): <NEW_LINE> <INDENT> lm_logits = self.decoder(hidden_state) <NEW_LINE> if not self.predict_special_tokens: <NEW_LINE> <INDENT> lm_logits = lm_logits[..., :self.vocab_size] <NEW_LINE> <DEDENT> return lm_logits | Language Model Head for the transformer | 6259904c3c8af77a43b6894a |
class PayLogicMiyu(object): <NEW_LINE> <INDENT> def init_info(self, config, is_sandbox): <NEW_LINE> <INDENT> self.is_sandbox = is_sandbox <NEW_LINE> <DEDENT> def calc_order_info(self, user_id, server_id, order, order_number, now, value): <NEW_LINE> <INDENT> price = int(order.truePrice) <NEW_LINE> product_id = order.productId <NEW_LINE> id_3rd = value <NEW_LINE> timestamp = int(time.time()) <NEW_LINE> info = { "time":timestamp, "server_id":server_id, "server_name":server_id, "product_id":product_id, "product_price":price, "product_count":order.productCount, "product_name":order.subject.encode("utf-8"), "product_description":order.subject.encode("utf-8"), "order_number":order_number, "ext":order_number } <NEW_LINE> info = json.dumps(info) <NEW_LINE> logger.notice("Generate order info" "[user_id=%d][pay_id=%d][order_no=%s][now=%d]" "[server_id=%d][product_id=%s][info=%s]" % (user_id, order.id, order_number, now, server_id, product_id, info)) <NEW_LINE> return info <NEW_LINE> <DEDENT> def check_order_reply(self, user_id, server_id, reply, order_number, now): <NEW_LINE> <INDENT> infos = PayRedisAgent().get_set(server_id, user_id) <NEW_LINE> if len(infos) == 0: <NEW_LINE> <INDENT> logger.warning("No pay info[server_id=%d][user_id=%d]" % (server_id, user_id)) <NEW_LINE> return None <NEW_LINE> <DEDENT> orders = [] <NEW_LINE> for (product_id, pay_order_number) in infos: <NEW_LINE> <INDENT> o = PayPool().get_by_product_id(product_id) <NEW_LINE> if o is None: <NEW_LINE> <INDENT> logger.warning("Order not found[product_id=%s]" % product_id) <NEW_LINE> continue <NEW_LINE> <DEDENT> ADD_GOLD_PRODUCT_ID = "com.anqu.zwsgApp.zwsg99999999" <NEW_LINE> if product_id == ADD_GOLD_PRODUCT_ID: <NEW_LINE> <INDENT> str_list = pay_order_number.split('_') <NEW_LINE> vip = str_list[0] <NEW_LINE> gold = str_list[1] <NEW_LINE> o.gold = gold <NEW_LINE> if vip == "1": <NEW_LINE> <INDENT> o.truePrice = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> o.truePrice = int(gold) / int(float( data_loader.OtherBasicInfo_dict["ratio_pay_price_to_vip_points"].value)) <NEW_LINE> <DEDENT> <DEDENT> orders.append((o, pay_order_number)) <NEW_LINE> <DEDENT> if PayRedisAgent().finish_set(server_id, user_id, infos): <NEW_LINE> <INDENT> for (product_id, pay_order_number) in infos: <NEW_LINE> <INDENT> logger.notice("Finish order[server_id=%d][user_id=%d]" "[product_id=%s][order_number=%s]" % ( server_id, user_id, product_id, pay_order_number)) <NEW_LINE> <DEDENT> return orders <NEW_LINE> <DEDENT> return None | Miyu
| 6259904c30c21e258be99c1f |
class TwitterDialogRedirect(views.OAuthDialogRedirectView): <NEW_LINE> <INDENT> client_class = TwitterClient | View that handles the redirects for the Twitter authorization dialog. | 6259904c15baa723494633a6 |
class Angle: <NEW_LINE> <INDENT> dtype = 'angle' <NEW_LINE> def __init__(self, body=None): <NEW_LINE> <INDENT> self.body = body if body!=None else [] <NEW_LINE> self.buf = [] <NEW_LINE> <DEDENT> def addline(self): <NEW_LINE> <INDENT> line = self.buf <NEW_LINE> if len(line)>2: <NEW_LINE> <INDENT> self.body.append(line) <NEW_LINE> <DEDENT> self.buf = [] <NEW_LINE> <DEDENT> def snap(self, x, y, lim): <NEW_LINE> <INDENT> minl, idx = 1000, None <NEW_LINE> for i in self.body: <NEW_LINE> <INDENT> for j in i: <NEW_LINE> <INDENT> d = (j[0]-x)**2+(j[1]-y)**2 <NEW_LINE> if d < minl:minl,idx = d,(i, i.index(j)) <NEW_LINE> <DEDENT> <DEDENT> return idx if minl**0.5<lim else None <NEW_LINE> <DEDENT> def pick(self, x, y, lim): <NEW_LINE> <INDENT> return self.snap(x, y, lim) <NEW_LINE> <DEDENT> def draged(self, ox, oy, nx, ny, i): <NEW_LINE> <INDENT> i[0][i[1]] = (nx, ny) <NEW_LINE> <DEDENT> def draw(self, dc, f, **key): <NEW_LINE> <INDENT> dc.SetPen(wx.Pen(Setting['color'], width=1, style=wx.SOLID)) <NEW_LINE> dc.SetTextForeground(Setting['tcolor']) <NEW_LINE> linefont = wx.Font(8, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, False) <NEW_LINE> dc.SetFont(linefont) <NEW_LINE> dc.DrawLines([f(*i) for i in self.buf]) <NEW_LINE> for i in self.buf:dc.DrawCircle(f(*i),2) <NEW_LINE> for line in self.body: <NEW_LINE> <INDENT> dc.DrawLines([f(*i) for i in line]) <NEW_LINE> for i in line:dc.DrawCircle(f(*i),2) <NEW_LINE> pts = np.array(line) <NEW_LINE> v1 = pts[:-2]-pts[1:-1] <NEW_LINE> v2 = pts[2:]-pts[1:-1] <NEW_LINE> a = np.sum(v1*v2, axis=1)*1.0 <NEW_LINE> a/=norm(v1,axis=1)*norm(v2,axis=1) <NEW_LINE> ang = np.arccos(a)/np.pi*180 <NEW_LINE> for i,j in zip(ang,line[1:-1]): <NEW_LINE> <INDENT> dc.DrawText('%.0f'%i, f(*j)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def report(self, title): <NEW_LINE> <INDENT> rst = [] <NEW_LINE> for line in self.body: <NEW_LINE> <INDENT> pts = np.array(line) <NEW_LINE> v1 = pts[:-2]-pts[1:-1] <NEW_LINE> v2 = pts[2:]-pts[1:-1] <NEW_LINE> a = np.sum(v1*v2, axis=1)*1.0 <NEW_LINE> a/=norm(v1,axis=1)*norm(v2,axis=1) <NEW_LINE> ang = np.arccos(a)/np.pi*180 <NEW_LINE> rst.append(list(ang.round(1))) <NEW_LINE> <DEDENT> lens = [len(i) for i in rst] <NEW_LINE> maxlen = max(lens) <NEW_LINE> fill = [[0]*(maxlen-i) for i in lens] <NEW_LINE> rst = [i+j for i,j in zip(rst, fill)] <NEW_LINE> titles = ["A{}".format(i+1) for i in range(maxlen)] <NEW_LINE> IPy.show_table(pd.DataFrame(rst, columns=titles), title) | Define the class with line drawing fucntions | 6259904c96565a6dacd2d996 |
class PSF(): <NEW_LINE> <INDENT> @property <NEW_LINE> def pixel_scale(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._pixel_scale <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @pixel_scale.setter <NEW_LINE> def pixel_scale(self, pixel_scale): <NEW_LINE> <INDENT> pixel_scale = utils.ensure_unit(pixel_scale, (u.arcsecond / u.pixel)) <NEW_LINE> if pixel_scale <= 0 * u.arcsecond / u.pixel: <NEW_LINE> <INDENT> raise ValueError("Pixel scale should be > 0, got {}!".format(pixel_scale)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._pixel_scale = pixel_scale <NEW_LINE> <DEDENT> self._update_model() <NEW_LINE> <DEDENT> @property <NEW_LINE> def n_pix(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._n_pix <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def peak(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._peak <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def _get_peak(self): <NEW_LINE> <INDENT> centred_psf = self.pixellated(size=(1, 1), offsets=(0, 0)) <NEW_LINE> return centred_psf[0, 0] / u.pixel <NEW_LINE> <DEDENT> def _get_n_pix(self, size=(20, 20)): <NEW_LINE> <INDENT> size = tuple(s + s % 2 for s in size) <NEW_LINE> corner_psf = self.pixellated(size, offsets=(0, 0)) <NEW_LINE> return 1 / ((corner_psf**2).sum()) * u.pixel <NEW_LINE> <DEDENT> def _update_model(self): <NEW_LINE> <INDENT> raise NotImplementedError | Abstract base class representing a 2D point spread function.
Used to calculate pixelated version of the PSF and associated
parameters useful for point source signal to noise and saturation
limit calculations. | 6259904c3cc13d1c6d466b53 |
class Symbolizer(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.stats = [] <NEW_LINE> <DEDENT> def add_stats(self, val): <NEW_LINE> <INDENT> self.stats.append(val) <NEW_LINE> <DEDENT> def load(self, data): <NEW_LINE> <INDENT> for dat in data: <NEW_LINE> <INDENT> self.add_stats(dat[2]) | separator based method | 6259904c71ff763f4b5e8bc0 |
class RangeCounter(object): <NEW_LINE> <INDENT> def __init__(self, n): <NEW_LINE> <INDENT> if not n > 0: <NEW_LINE> <INDENT> raise ValueError("the number of counters must be positive.") <NEW_LINE> <DEDENT> trees = [] <NEW_LINE> offsets = [] <NEW_LINE> self._n = n <NEW_LINE> base = 1 <NEW_LINE> offset = 0 <NEW_LINE> while base <= self._n: <NEW_LINE> <INDENT> if self._n & base: <NEW_LINE> <INDENT> trees.append(_FCTree(base)) <NEW_LINE> offsets.append(offset) <NEW_LINE> offset += base <NEW_LINE> <DEDENT> base <<= 1 <NEW_LINE> <DEDENT> self._trees = tuple(trees) <NEW_LINE> self._offsets = tuple(offsets) <NEW_LINE> self._lock = threading.Lock() <NEW_LINE> <DEDENT> @property <NEW_LINE> def n(self): <NEW_LINE> <INDENT> return self._n <NEW_LINE> <DEDENT> def increment(self, start, end, count=1): <NEW_LINE> <INDENT> if count <= 0: <NEW_LINE> <INDENT> raise ValueError("count must be positive.") <NEW_LINE> <DEDENT> self._validate_index(start) <NEW_LINE> self._validate_index(end) <NEW_LINE> left_bond = self._left_bound(start) <NEW_LINE> right_bond = self._left_bound(end) <NEW_LINE> with self._lock: <NEW_LINE> <INDENT> while left_bond <= right_bond: <NEW_LINE> <INDENT> offset = self._offsets[left_bond] <NEW_LINE> tree = self._trees[left_bond] <NEW_LINE> tree.increment(max(offset, start) - offset, min(end - offset, tree.leaves - 1), count) <NEW_LINE> left_bond += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def decrement(self, start, end, count=1): <NEW_LINE> <INDENT> if count <= 0: <NEW_LINE> <INDENT> raise ValueError("count must be positive") <NEW_LINE> <DEDENT> self._validate_index(start) <NEW_LINE> self._validate_index(end) <NEW_LINE> left_bond = self._left_bound(start) <NEW_LINE> right_bond = self._left_bound(end) <NEW_LINE> with self._lock: <NEW_LINE> <INDENT> while left_bond <= right_bond: <NEW_LINE> <INDENT> offset = self._offsets[left_bond] <NEW_LINE> tree = self._trees[left_bond] <NEW_LINE> tree.decrement(max(offset, start) - offset, min(end - offset, tree.leaves - 1), count) <NEW_LINE> left_bond += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> self._validate_index(index) <NEW_LINE> tree_index = self._left_bound(index) <NEW_LINE> with self._lock: <NEW_LINE> <INDENT> return self._trees[tree_index][index - self._offsets[tree_index]] <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for i in range(self._n): <NEW_LINE> <INDENT> yield self[i] <NEW_LINE> <DEDENT> <DEDENT> def all(self): <NEW_LINE> <INDENT> bottoms = [] <NEW_LINE> for tree in self._trees: <NEW_LINE> <INDENT> tree.fall_down() <NEW_LINE> bottoms.extend(tree.bottoms()) <NEW_LINE> <DEDENT> return bottoms <NEW_LINE> <DEDENT> def _validate_index(self, index): <NEW_LINE> <INDENT> if not 0 <= index < self._n: <NEW_LINE> <INDENT> raise ValueError("index[%s] is out of range[0...%s]." % (index, self._n)) <NEW_LINE> <DEDENT> <DEDENT> def _left_bound(self, index): <NEW_LINE> <INDENT> i = bisect.bisect_right(self._offsets, index) <NEW_LINE> return i if i == 0 else i - 1 | :type _trees: [_FCTree]
:type _offsets: [int]
:type _n: n | 6259904c26068e7796d4dd5f |
@magics_class <NEW_LINE> class MagicClassWithHelpers(Magics): <NEW_LINE> <INDENT> _parser_store = {} <NEW_LINE> @property <NEW_LINE> def Context(self): <NEW_LINE> <INDENT> if self.shell is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.shell.user_ns <NEW_LINE> <DEDENT> def add_context(self, context): <NEW_LINE> <INDENT> if self.shell is None: <NEW_LINE> <INDENT> class EmptyClass: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.user_ns = {} <NEW_LINE> <DEDENT> <DEDENT> self.shell = EmptyClass() <NEW_LINE> <DEDENT> for k, v in context.items(): <NEW_LINE> <INDENT> self.shell.user_ns[k] = v <NEW_LINE> <DEDENT> <DEDENT> def get_parser(self, parser_class, name): <NEW_LINE> <INDENT> res = MagicClassWithHelpers._parser_store.get(name, None) <NEW_LINE> if res is None: <NEW_LINE> <INDENT> MagicClassWithHelpers._parser_store[name] = parser_class() <NEW_LINE> return MagicClassWithHelpers._parser_store[name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return res <NEW_LINE> <DEDENT> <DEDENT> def get_args(self, line, parser, print_function=print): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> args = parser.parse_cmd(line, context=self.Context) <NEW_LINE> <DEDENT> except SystemExit: <NEW_LINE> <INDENT> print_function(parser.format_usage()) <NEW_LINE> args = None <NEW_LINE> <DEDENT> return args | Provides some functions reused in others classes inherited from *Magics*.
The class should not be registered as it is but should be
used as an ancestor for another class.
It can be registered this way::
def register_file_magics():
from IPython import get_ipython
ip = get_ipython()
ip.register_magics(MagicFile)
.. versionadded:: 0.9 | 6259904ccb5e8a47e493cb94 |
class TxtSettingsDto(object): <NEW_LINE> <INDENT> swagger_types = { 'tag_regexp': 'str', 'translatable_text_regexp': 'str' } <NEW_LINE> attribute_map = { 'tag_regexp': 'tagRegexp', 'translatable_text_regexp': 'translatableTextRegexp' } <NEW_LINE> def __init__(self, tag_regexp=None, translatable_text_regexp=None): <NEW_LINE> <INDENT> self._tag_regexp = None <NEW_LINE> self._translatable_text_regexp = None <NEW_LINE> self.discriminator = None <NEW_LINE> if tag_regexp is not None: <NEW_LINE> <INDENT> self.tag_regexp = tag_regexp <NEW_LINE> <DEDENT> if translatable_text_regexp is not None: <NEW_LINE> <INDENT> self.translatable_text_regexp = translatable_text_regexp <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def tag_regexp(self): <NEW_LINE> <INDENT> return self._tag_regexp <NEW_LINE> <DEDENT> @tag_regexp.setter <NEW_LINE> def tag_regexp(self, tag_regexp): <NEW_LINE> <INDENT> self._tag_regexp = tag_regexp <NEW_LINE> <DEDENT> @property <NEW_LINE> def translatable_text_regexp(self): <NEW_LINE> <INDENT> return self._translatable_text_regexp <NEW_LINE> <DEDENT> @translatable_text_regexp.setter <NEW_LINE> def translatable_text_regexp(self, translatable_text_regexp): <NEW_LINE> <INDENT> self._translatable_text_regexp = translatable_text_regexp <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(TxtSettingsDto, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, TxtSettingsDto): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259904cdc8b845886d549d8 |
class RawDataReport(Report): <NEW_LINE> <INDENT> __name__ = 'health_disease_notification.rawdata' <NEW_LINE> @classmethod <NEW_LINE> def parse(cls, report, records, data, localcontext): <NEW_LINE> <INDENT> symptoms = [("R19.7", "Diarrhoea, unspecified"), ("R53.1", "Asthenia (generalized weakness)"), ("R29.5", "Joint pain or stiffness (arthralgia)"), ("R29.7", "Joint swelling "), ("R60.2", "Periarticular oedema"), ("R52.3", "Muscle pain"), ("R52.4", "Back pain"), ("R11.1", "Vomitting"), ("R50.9", "Fever, unspecified "), ("R05", "Cough"), ("R06.0", "Dyspnoea "), ("R06.2", "Wheezing"), ("R07.4", "Chest pain"), ("R60.0", "Swelling of feet"), ("R56.0", "Convulsions, not elsewhere classified"), ("R07.0", "Pain in throat (sore throat)"), ("R21", "Rash"), ("R68.6", "Non-purulent conjunctivitis "), ("R68.7", "Conjunctival hyperaemia"), ("R51", "Headache"), ("R53", "Malaise and fatigue"), ("R17", "Jaundice"), ("R29.1", "Meningeal irritation "), ("R40", "Altered consciousness/somnolence "), ("R40.1", "Stupor"), ("R26", "Paralysis"), ("R04.2", "Cough with Haemorrhage "), ("R58", "Haemorrhage"), ("R04.4", "Epistaxis")] <NEW_LINE> default_symptoms = defaultdict(lambda: u'') <NEW_LINE> ordered_symptoms = defaultdict(default_symptoms.copy) <NEW_LINE> other_symptoms = default_symptoms.copy() <NEW_LINE> symptom_set = set([x for x, _ in symptoms]) <NEW_LINE> for rec in records: <NEW_LINE> <INDENT> my_symptoms = set([x.pathology.code for x in rec.symptoms]) <NEW_LINE> my_yeses = my_symptoms.intersection(symptom_set) <NEW_LINE> my_others = my_symptoms.difference(symptom_set) <NEW_LINE> ordered_symptoms[rec.id].update([(r, u'Yes') for r in my_yeses]) <NEW_LINE> if my_others: <NEW_LINE> <INDENT> other_symptoms[rec.id] = u', '.join(sorted(my_others)) <NEW_LINE> <DEDENT> <DEDENT> xldate = lambda val: val if val else None <NEW_LINE> localcontext.update( ordered_symptoms=ordered_symptoms, symptom_list=symptoms, xldate=xldate, other_symptoms=other_symptoms) <NEW_LINE> return super(RawDataReport, cls).parse(report, records, data, localcontext) | Disease Notification Spreadsheet Export | 6259904c30dc7b76659a0c50 |
class List(MiniLinq): <NEW_LINE> <INDENT> def __init__(self, items): <NEW_LINE> <INDENT> self.items = items <NEW_LINE> <DEDENT> def eval(self, env): <NEW_LINE> <INDENT> return [item.eval(env) for item in self.items] <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, List) and self.items == other.items <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '%s(%s)' % (self.__class__.__name__, self.items) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_jvalue(cls, jvalue): <NEW_LINE> <INDENT> return cls([MiniLinq.from_jvalue(item) for item in jvalue['List']]) <NEW_LINE> <DEDENT> def to_jvalue(self): <NEW_LINE> <INDENT> return {'List': [item.to_jvalue() for item in self.items]} | A list of expressions, embeds the [ ... ] syntax into the
MiniLinq meta-leval | 6259904c4428ac0f6e65994d |
class IQTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.outlist = [] <NEW_LINE> authenticator = xmlstream.ConnectAuthenticator('otherhost') <NEW_LINE> authenticator.namespace = 'testns' <NEW_LINE> self.xmlstream = xmlstream.XmlStream(authenticator) <NEW_LINE> self.xmlstream.transport = self <NEW_LINE> self.xmlstream.transport.write = self.outlist.append <NEW_LINE> self.xmlstream.connectionMade() <NEW_LINE> self.xmlstream.dataReceived( "<stream:stream xmlns:stream='http://etherx.jabber.org/streams' " "xmlns='testns' from='otherhost' version='1.0'>") <NEW_LINE> self.iq = xmlstream.IQ(self.xmlstream, type='get') <NEW_LINE> <DEDENT> def testBasic(self): <NEW_LINE> <INDENT> self.assertEquals(self.iq['type'], 'get') <NEW_LINE> self.assert_(self.iq['id']) <NEW_LINE> <DEDENT> def testSend(self): <NEW_LINE> <INDENT> self.iq.send() <NEW_LINE> self.assertEquals("<iq type='get' id='%s'/>" % self.iq['id'], self.outlist[-1]) <NEW_LINE> <DEDENT> def testResultResponse(self): <NEW_LINE> <INDENT> def cb(result): <NEW_LINE> <INDENT> self.assertEquals(result['type'], 'result') <NEW_LINE> <DEDENT> d = self.iq.send() <NEW_LINE> d.addCallback(cb) <NEW_LINE> xs = self.xmlstream <NEW_LINE> xs.dataReceived("<iq type='result' id='%s'/>" % self.iq['id']) <NEW_LINE> return d <NEW_LINE> <DEDENT> def testErrorResponse(self): <NEW_LINE> <INDENT> d = self.iq.send() <NEW_LINE> self.assertFailure(d, error.StanzaError) <NEW_LINE> xs = self.xmlstream <NEW_LINE> xs.dataReceived("<iq type='error' id='%s'/>" % self.iq['id']) <NEW_LINE> return d | Tests both IQ and the associated IIQResponseTracker callback. | 6259904cb5575c28eb7136d7 |
class PolicyActions(elements.BaseElement): <NEW_LINE> <INDENT> _FIELDS = ("config", "action") <NEW_LINE> def __init__(self, policy_result="ACCEPT_ROUTE"): <NEW_LINE> <INDENT> super(PolicyActions, self).__init__("actions") <NEW_LINE> self.config = PolicyActionsConfig(policy_result) <NEW_LINE> self.action = None | policy-definition actions element | 6259904c23849d37ff8524d9 |
class AutoNoEmbed: <NEW_LINE> <INDENT> def __init__(self, bot): <NEW_LINE> <INDENT> self.bot = bot <NEW_LINE> print('Addon "{}" loaded'.format(self.__class__.__name__)) <NEW_LINE> <DEDENT> async def on_member_join(self, member): <NEW_LINE> <INDENT> await self.bot.add_roles(Viewers, self.bot.noembed_role) | Logs join and leave messages. | 6259904c1f037a2d8b9e527a |
@declare_messagetype("ninchat.com/info/channel") <NEW_LINE> class ChannelInfoMessage(Message): <NEW_LINE> <INDENT> __slots__ = Message.__slots__ | Stub for the ninchat.com/info/channel message type.
| 6259904cd6c5a102081e3539 |
class MCRefreshVertexGroups(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "object.mc_refresh_vertex_groups" <NEW_LINE> bl_label = "MC Refresh Vertex Groups" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> ob = MC_data['recent_object'] <NEW_LINE> if ob is None: <NEW_LINE> <INDENT> ob = bpy.context.object <NEW_LINE> <DEDENT> cloth = get_cloth(ob) <NEW_LINE> refresh(cloth) <NEW_LINE> return {'FINISHED'} | Refresh Vertex Group Weights To Cloth Settings | 6259904c004d5f362081f9f6 |
class Date: <NEW_LINE> <INDENT> def __init__(self, date, event): <NEW_LINE> <INDENT> self._date = date <NEW_LINE> self._event = event <NEW_LINE> self._event_list = [] <NEW_LINE> <DEDENT> def add_event(self): <NEW_LINE> <INDENT> self._event_list.append(self._event) <NEW_LINE> <DEDENT> def get_event_list(self): <NEW_LINE> <INDENT> return self._event_list <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self._date | This a Date class and it makes each canoncial date its own
object you can get the event with this date, the events and
the date itself also adds events to event list
Parameters: date which is the canonical date and the string
event
Returns: the date, the events list on that date
Pre-conditions: date is the cancical date in string form and event
is a string of event of that date
Post - conditions: event list is a list of all events that happened
on date | 6259904cf7d966606f7492c6 |
class WARPBatchUpdate: <NEW_LINE> <INDENT> def __init__(self, batch_size, d): <NEW_LINE> <INDENT> self.u = np.zeros(batch_size, dtype='int32') <NEW_LINE> self.dU = np.zeros((batch_size, d), order='F') <NEW_LINE> self.v_pos = np.zeros(batch_size, dtype='int32') <NEW_LINE> self.dV_pos = np.zeros((batch_size, d)) <NEW_LINE> self.v_neg = np.zeros(batch_size, dtype='int32') <NEW_LINE> self.dV_neg = np.zeros((batch_size, d)) <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def set_update(self, ix, update): <NEW_LINE> <INDENT> u, v_pos, v_neg, dU, dV_pos, dV_neg = update <NEW_LINE> self.u[ix] = u <NEW_LINE> self.dU[ix] = dU <NEW_LINE> self.v_pos[ix] = v_pos <NEW_LINE> self.dV_pos[ix] = dV_pos <NEW_LINE> self.v_neg[ix] = v_neg <NEW_LINE> self.dV_neg[ix] = dV_neg | Collection of arrays to hold a batch of WARP sgd updates. | 6259904cbaa26c4b54d506c7 |
class Plugin(object): <NEW_LINE> <INDENT> entry_point = None <NEW_LINE> @class_lazy <NEW_LINE> def extra_entry_points(cls): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _load_class_entry_point(cls, entry_point): <NEW_LINE> <INDENT> class_ = entry_point.load() <NEW_LINE> setattr(class_, 'plugin_name', entry_point.name) <NEW_LINE> return class_ <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load_class(cls, identifier, default=None, select=None): <NEW_LINE> <INDENT> identifier = identifier.lower() <NEW_LINE> key = (cls.entry_point, identifier) <NEW_LINE> if key not in PLUGIN_CACHE: <NEW_LINE> <INDENT> if select is None: <NEW_LINE> <INDENT> select = default_select <NEW_LINE> <DEDENT> all_entry_points = list(pkg_resources.iter_entry_points(cls.entry_point, name=identifier)) <NEW_LINE> for extra_identifier, extra_entry_point in cls.extra_entry_points: <NEW_LINE> <INDENT> if identifier == extra_identifier: <NEW_LINE> <INDENT> all_entry_points.append(extra_entry_point) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> selected_entry_point = select(identifier, all_entry_points) <NEW_LINE> <DEDENT> except PluginMissingError: <NEW_LINE> <INDENT> if default is not None: <NEW_LINE> <INDENT> return default <NEW_LINE> <DEDENT> raise <NEW_LINE> <DEDENT> PLUGIN_CACHE[key] = cls._load_class_entry_point(selected_entry_point) <NEW_LINE> <DEDENT> return PLUGIN_CACHE[key] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load_classes(cls, fail_silently=True): <NEW_LINE> <INDENT> all_classes = itertools.chain( pkg_resources.iter_entry_points(cls.entry_point), (entry_point for identifier, entry_point in cls.extra_entry_points), ) <NEW_LINE> for class_ in all_classes: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> yield (class_.name, cls._load_class_entry_point(class_)) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> if fail_silently: <NEW_LINE> <INDENT> log.warning('Unable to load %s %r', cls.__name__, class_.name, exc_info=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def register_temp_plugin(cls, class_, identifier=None, dist='xblock'): <NEW_LINE> <INDENT> from mock import Mock <NEW_LINE> if identifier is None: <NEW_LINE> <INDENT> identifier = class_.__name__.lower() <NEW_LINE> <DEDENT> entry_point = Mock( dist=Mock(key=dist), load=Mock(return_value=class_), ) <NEW_LINE> entry_point.name = identifier <NEW_LINE> def _decorator(func): <NEW_LINE> <INDENT> @functools.wraps(func) <NEW_LINE> def _inner(*args, **kwargs): <NEW_LINE> <INDENT> old = list(cls.extra_entry_points) <NEW_LINE> cls.extra_entry_points.append((identifier, entry_point)) <NEW_LINE> try: <NEW_LINE> <INDENT> return func(*args, **kwargs) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> cls.extra_entry_points = old <NEW_LINE> <DEDENT> <DEDENT> return _inner <NEW_LINE> <DEDENT> return _decorator | Base class for a system that uses entry_points to load plugins.
Implementing classes are expected to have the following attributes:
`entry_point`: The name of the entry point to load plugins from. | 6259904c0c0af96317c5776f |
class ContatoViolencia(models.Model): <NEW_LINE> <INDENT> id_contato = models.AutoField(primary_key=True) <NEW_LINE> nome_contato = models.CharField(max_length=200, null=False, blank=False) <NEW_LINE> numero_contato = models.CharField(max_length=50, null=False, blank=False) <NEW_LINE> ds_contato = models.TextField(null=False, blank=False) <NEW_LINE> categoria_fk = models.ForeignKey(CategoriaContato, on_delete=models.CASCADE) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.nome_contato + " | " + str(self.categoria_fk) | Legenda:
id: Identidicador único;
ds_contato: Descrição;
nome_contato: Nome
categoria: Categoria
Campos:
id_contato: Identidicador único contato;
nome_contato: Nome do contato;
ds_contato: Descrição do contato;
categoria: Tipo de contato (Ong, Psicólogos órgãos competentes) | 6259904ce64d504609df9dde |
@add_attribute_self('iterable') <NEW_LINE> class flist(FilterMixin, list): <NEW_LINE> <INDENT> root = list <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> if len(args) == 1 and isinstance(args[0], Iterable): <NEW_LINE> <INDENT> list.__init__(self, args[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> list.__init__(self, args) <NEW_LINE> <DEDENT> <DEDENT> def clear(self): <NEW_LINE> <INDENT> del self[:] <NEW_LINE> return self <NEW_LINE> <DEDENT> def append(self, x): <NEW_LINE> <INDENT> list.append(self, x) <NEW_LINE> return self <NEW_LINE> <DEDENT> def extend(self, iterable): <NEW_LINE> <INDENT> list.extend(self, iterable) <NEW_LINE> return self <NEW_LINE> <DEDENT> def insert(self, i, x): <NEW_LINE> <INDENT> if i < 0: <NEW_LINE> <INDENT> if i == -1: <NEW_LINE> <INDENT> return self.append(x) <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> list.insert(self, i, x) <NEW_LINE> return self <NEW_LINE> <DEDENT> def remove(self, value): <NEW_LINE> <INDENT> list.remove(self, value) <NEW_LINE> return self <NEW_LINE> <DEDENT> def remove_all(self, iterable): <NEW_LINE> <INDENT> for i in iterable: <NEW_LINE> <INDENT> list.remove(self, i) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def remove_slice(self, start=None, end=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def discard_slice(self, start=None, end=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def discard(self, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> list.remove(self, value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def discard_all(self, iterable): <NEW_LINE> <INDENT> for i in iterable: <NEW_LINE> <INDENT> self.discard(i) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def reverse(self): <NEW_LINE> <INDENT> list.reverse() <NEW_LINE> return self <NEW_LINE> <DEDENT> def sort(self, **p): <NEW_LINE> <INDENT> list.sort(self, **p) <NEW_LINE> return self <NEW_LINE> <DEDENT> __isub__ = discard_all <NEW_LINE> def __add__(self, iterable): <NEW_LINE> <INDENT> return flist(self).extend(iterable) <NEW_LINE> <DEDENT> def __sub__(self, iterable): <NEW_LINE> <INDENT> return flist(self).discard_all(iterable) | Replacement class for list, with better API and many useful methods.
In place methods return 'self' instead of None, better for chaining and returning
Many new methods have been added, they are classified as immutable, muttable and helpers | 6259904c73bcbd0ca4bcb6a6 |
class ITimeSlot(IContained): <NEW_LINE> <INDENT> tstart = zope.schema.Time( title=u"Time of the start of the event", required=True) <NEW_LINE> duration = zope.schema.Timedelta( title=u"Timedelta of the duration of the event", required=True) <NEW_LINE> activity_type = zope.schema.Choice( title=_("Activity type"), required=False, default="lesson", vocabulary=activity_types) | Time slot designated for an activity. | 6259904cb57a9660fecd2e9a |
class CommaFile(object): <NEW_LINE> <INDENT> _header = None <NEW_LINE> _params = None <NEW_LINE> _primary_key = None <NEW_LINE> def __init__( self, header: comma.typing.OptionalHeaderType = None, primary_key: typing.Optional[str] = None, params: typing.Optional[comma.typing.CommaInfoParamsType] = None, ): <NEW_LINE> <INDENT> if header is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._header = list(map(str, header)) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> raise comma.exceptions.CommaInvalidHeaderException( "`header` does not seem to be an iterable of strings" ) <NEW_LINE> <DEDENT> <DEDENT> self._params = params <NEW_LINE> self._primary_key = primary_key <NEW_LINE> <DEDENT> @property <NEW_LINE> def header(self) -> comma.typing.OptionalHeaderType: <NEW_LINE> <INDENT> return self._header <NEW_LINE> <DEDENT> @header.setter <NEW_LINE> def header(self, value: comma.typing.OptionalHeaderType): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> self._header = None <NEW_LINE> return <NEW_LINE> <DEDENT> validated_header = comma.helpers.validate_header(value) <NEW_LINE> if self._header is not None: <NEW_LINE> <INDENT> old_length = len(self._header) <NEW_LINE> new_length = len(validated_header) <NEW_LINE> if old_length != new_length: <NEW_LINE> <INDENT> warnings.warn( "changing length of header; was {old}, now is {new}".format( old=old_length, new=new_length)) <NEW_LINE> <DEDENT> <DEDENT> self._header = validated_header <NEW_LINE> <DEDENT> @header.deleter <NEW_LINE> def header(self): <NEW_LINE> <INDENT> self._header = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def primary_key(self) -> str: <NEW_LINE> <INDENT> return self._primary_key <NEW_LINE> <DEDENT> @primary_key.setter <NEW_LINE> def primary_key(self, value: str): <NEW_LINE> <INDENT> if value is None or value == "" or value == False: <NEW_LINE> <INDENT> del self.primary_key <NEW_LINE> return <NEW_LINE> <DEDENT> if self.header is None: <NEW_LINE> <INDENT> raise comma.exceptions.CommaNoHeaderException( "cannot set the primary key of a `CommaFile` " "that does not have a header" ) <NEW_LINE> <DEDENT> if value not in self.header: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> header_string = self.header.__repr__() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> header_string = "" <NEW_LINE> <DEDENT> raise comma.exceptions.CommaKeyError( "the requested primary key (" + value + ") is not one of the headers: " + header_string ) <NEW_LINE> <DEDENT> self._primary_key = value <NEW_LINE> <DEDENT> @primary_key.deleter <NEW_LINE> def primary_key(self): <NEW_LINE> <INDENT> self._primary_key = None | Store the metadata associated with a CSV/DSV file. This includes the
`header` (a list of column names) if it exists; the `primary_key` (that is,
whether one of the columns should function as an index for rows); and
the internal parameters, such as dialect and encoding, that are detetcted
when the table data was loaded. | 6259904c3c8af77a43b6894c |
class Hashable(object): <NEW_LINE> <INDENT> def __init__(self, wrapped, tight=False): <NEW_LINE> <INDENT> self.__tight = tight <NEW_LINE> if isinstance(wrapped, list): <NEW_LINE> <INDENT> wrapped = array(wrapped) <NEW_LINE> <DEDENT> self.__wrapped = array(wrapped) if tight else wrapped <NEW_LINE> self.__hash = hash(wrapped.tostring()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'H%s' % (repr(self.__wrapped),) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return all(self.__wrapped == other.__wrapped) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return self.__hash <NEW_LINE> <DEDENT> def unwrap(self): <NEW_LINE> <INDENT> return array(self.__wrapped) if self.__tight else self.__wrapped | Hashable wrapper for ndarray objects.
Instances of ndarray are not hashable, meaning they cannot be added to
sets, nor used as keys in dictionaries. This is by design - ndarray
objects are mutable, and therefore cannot reliably implement the
__hash__() method.
The hashable class allows a way around this limitation. It implements
the required methods for hashable objects in terms of an encapsulated
ndarray object. This can be either a copied instance (which is safer)
or the original object (which requires the user to be careful enough
not to modify it). | 6259904c0a366e3fb87dde03 |
class ReadOnlyFileSystemException(PermissionsException): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> ApiException.__init__(self) <NEW_LINE> self.path = path <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.path: <NEW_LINE> <INDENT> return _("Could not complete the operation on {0}: " "read-only filesystem.").format(self.path) <NEW_LINE> <DEDENT> return _("Could not complete the operation: read-only " "filesystem.") | Used to indicate that the operation was attempted on a
read-only filesystem | 6259904c4e696a045264e82f |
class UserLogout(Resource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> if 'username' in session: <NEW_LINE> <INDENT> session.pop('username') <NEW_LINE> config.pop('admin_session') <NEW_LINE> print(session) <NEW_LINE> print(config) <NEW_LINE> return {"logout": "success"} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return{"logout": "failed"} | user logout | 6259904c1f037a2d8b9e527b |
class IgnoredTasksTest(fixtures.TestCaseWithNamespace): <NEW_LINE> <INDENT> def test_ignore_future_task(self): <NEW_LINE> <INDENT> future_task = todotxt.Task("(A) 9999-01-01 Start preparing for five-digit years") <NEW_LINE> regular_task = todotxt.Task("(B) Look busy") <NEW_LINE> self.assertEqual([regular_task], pick_action.next_actions([future_task, regular_task], self.namespace)) <NEW_LINE> <DEDENT> def test_only_future_tasks(self): <NEW_LINE> <INDENT> future_task1 = todotxt.Task("(A) 9999-01-01 Start preparing for five-digit years") <NEW_LINE> future_task2 = todotxt.Task("(A) Start preparing for five-digit years t:9999-01-01") <NEW_LINE> self.assertEqual([], pick_action.next_actions([future_task1, future_task2], self.namespace)) | Test that certain tasks are ignored when picking the next action. | 6259904cd6c5a102081e353b |
class RconError(Exception): <NEW_LINE> <INDENT> pass | Generic RCON error | 6259904c287bf620b6273007 |
class WikipediaCa(Platform): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.platformName = "Wikipedia_ca" <NEW_LINE> self.tags = ["education", "wiki"] <NEW_LINE> self.isValidMode = {} <NEW_LINE> self.isValidMode["phonefy"] = False <NEW_LINE> self.isValidMode["usufy"] = True <NEW_LINE> self.isValidMode["searchfy"] = False <NEW_LINE> self.url = {} <NEW_LINE> self.url["usufy"] = "http://ca.wikipedia.org/wiki/Usuari:" + "<usufy>" <NEW_LINE> self.needsCredentials = {} <NEW_LINE> self.needsCredentials["usufy"] = False <NEW_LINE> self.validQuery = {} <NEW_LINE> self.validQuery["usufy"] = ".+" <NEW_LINE> self.notFoundText = {} <NEW_LINE> self.notFoundText["usufy"] = ["no està registrat."] <NEW_LINE> self.fieldsRegExp = {} <NEW_LINE> self.fieldsRegExp["usufy"] = {} <NEW_LINE> self.foundFields = {} | A <Platform> object for WikipediaCa | 6259904cd53ae8145f91987f |
class Coordinate(object): <NEW_LINE> <INDENT> def __init__(self, lat, lng): <NEW_LINE> <INDENT> self.lat = lat <NEW_LINE> self.lon = lng <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.lat == other.lat and self.lon == other.lon <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Coord(%s,%s)"%(self.lat,self.lon) | A geolocation representation.
| 6259904c507cdc57c63a61be |
class SimpleImputerTransformer(BaseEstimator, TransformerMixin): <NEW_LINE> <INDENT> def __init__(self, features_df, target=None): <NEW_LINE> <INDENT> self.features_df = features_df <NEW_LINE> <DEDENT> def fit(self, features_df, target=None): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def transform(self, features_df, target=None): <NEW_LINE> <INDENT> imputer = SimpleImputer(missing_values = np.NaN, strategy='median') <NEW_LINE> imputer = imputer.fit(features_df) <NEW_LINE> imputed = imputer.transform(features_df) <NEW_LINE> features_df = pd.DataFrame(data=imputed) <NEW_LINE> return features_df | This transformer imputes missing values | 6259904c498bea3a75a58f3e |
class Katakana(unicode_set): <NEW_LINE> <INDENT> _ranges = [ (0x3099, 0x309C), (0x30A0, 0x30FF), (0x31F0, 0x31FF), (0x32D0, 0x32FE), (0xFF65, 0xFF9F), (0x1B000,), (0x1B164, 0x1B167), (0x1F201, 0x1F202), (0x1F213,), ] | Unicode set for Katakana Unicode Character Range | 6259904c07d97122c42180c2 |
class StellarAccountNotExists(Exception): <NEW_LINE> <INDENT> pass | A stellar account not exist. | 6259904ccb5e8a47e493cb96 |
class ISERTestCase(ISCSITestCase): <NEW_LINE> <INDENT> driver_name = "cinder.volume.drivers.lvm.LVMISERDriver" <NEW_LINE> base_driver = driver.ISERDriver <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(ISERTestCase, self).setUp() <NEW_LINE> self.configuration = mox.MockObject(conf.Configuration) <NEW_LINE> self.configuration.num_iser_scan_tries = 3 <NEW_LINE> self.configuration.iser_num_targets = 100 <NEW_LINE> self.configuration.iser_target_prefix = 'iqn.2010-10.org.openstack:' <NEW_LINE> self.configuration.iser_ip_address = '0.0.0.0' <NEW_LINE> self.configuration.iser_port = 3260 <NEW_LINE> <DEDENT> def test_get_volume_stats(self): <NEW_LINE> <INDENT> def _fake_get_all_physical_volumes(obj, root_helper, vg_name): <NEW_LINE> <INDENT> return [{}] <NEW_LINE> <DEDENT> def _fake_get_all_volume_groups(obj, vg_name=None, no_suffix=True): <NEW_LINE> <INDENT> return [{'name': 'cinder-volumes', 'size': '5.52', 'available': '0.52', 'lv_count': '2', 'uuid': 'vR1JU3-FAKE-C4A9-PQFh-Mctm-9FwA-Xwzc1m'}] <NEW_LINE> <DEDENT> self.stubs.Set(brick_lvm.LVM, 'get_all_physical_volumes', _fake_get_all_physical_volumes) <NEW_LINE> self.stubs.Set(brick_lvm.LVM, 'get_all_volume_groups', _fake_get_all_volume_groups) <NEW_LINE> self.volume.driver.vg = brick_lvm.LVM('cinder-volumes', 'sudo') <NEW_LINE> stats = self.volume.driver.get_volume_stats(refresh=True) <NEW_LINE> self.assertEqual(stats['total_capacity_gb'], float('5.52')) <NEW_LINE> self.assertEqual(stats['free_capacity_gb'], float('0.52')) <NEW_LINE> self.assertEqual(stats['storage_protocol'], 'iSER') <NEW_LINE> <DEDENT> def test_get_volume_stats2(self): <NEW_LINE> <INDENT> iser_driver = self.base_driver(configuration=self.configuration) <NEW_LINE> stats = iser_driver.get_volume_stats(refresh=True) <NEW_LINE> self.assertEqual(stats['total_capacity_gb'], 'infinite') <NEW_LINE> self.assertEqual(stats['free_capacity_gb'], 'infinite') <NEW_LINE> self.assertEqual(stats['storage_protocol'], 'iSER') | Test Case for ISERDriver. | 6259904c50485f2cf55dc3ab |
class SpynnerPageError(Exception): <NEW_LINE> <INDENT> pass | Error loading page. | 6259904c8e71fb1e983bcee5 |
class Function(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'functions' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> name = db.Column(db.String) <NEW_LINE> code = db.Column(db.String) <NEW_LINE> private = db.Column(db.Boolean) <NEW_LINE> def __init__(self, name, code): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.code = code | Model for User Defined Functions | 6259904c23e79379d538d91d |
class MonomorphizationResource(Partializable): <NEW_LINE> <INDENT> def __init__(self, resources): <NEW_LINE> <INDENT> self.resources = resources <NEW_LINE> self.engine = resources.inferrer.engine <NEW_LINE> self.manager = resources.opt_manager <NEW_LINE> self.mono = Monomorphizer(resources, self.engine) <NEW_LINE> <DEDENT> def __call__(self, context): <NEW_LINE> <INDENT> with tracer("monomorphize", engine=self.engine, context=context) as tr: <NEW_LINE> <INDENT> rval = self.mono.run(context) <NEW_LINE> tr.set_results(output=rval) <NEW_LINE> return rval | Performs monomorphization. | 6259904c30dc7b76659a0c53 |
class TestRunnerSimple(): <NEW_LINE> <INDENT> name = 'test_runner_simple' <NEW_LINE> @pytest.fixture(scope="class") <NEW_LINE> def runner( self, atomic_data_fname, tardis_ref_data, generate_reference): <NEW_LINE> <INDENT> config = Configuration.from_yaml( 'tardis/io/tests/data/tardis_configv1_verysimple.yml') <NEW_LINE> config['atom_data'] = atomic_data_fname <NEW_LINE> simulation = Simulation.from_config(config) <NEW_LINE> simulation.run() <NEW_LINE> if not generate_reference: <NEW_LINE> <INDENT> return simulation.runner <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> simulation.runner.hdf_properties = [ 'j_blue_estimator', 'spectrum', 'spectrum_virtual' ] <NEW_LINE> simulation.runner.to_hdf( tardis_ref_data, '', self.name) <NEW_LINE> pytest.skip( 'Reference data was generated during this run.') <NEW_LINE> <DEDENT> <DEDENT> @pytest.fixture(scope='class') <NEW_LINE> def refdata(self, tardis_ref_data): <NEW_LINE> <INDENT> def get_ref_data(key): <NEW_LINE> <INDENT> return tardis_ref_data[os.path.join( self.name, key)] <NEW_LINE> <DEDENT> return get_ref_data <NEW_LINE> <DEDENT> def test_j_blue_estimators(self, runner, refdata): <NEW_LINE> <INDENT> j_blue_estimator = refdata('j_blue_estimator').values <NEW_LINE> npt.assert_allclose( runner.j_blue_estimator, j_blue_estimator) <NEW_LINE> <DEDENT> def test_spectrum(self, runner, refdata): <NEW_LINE> <INDENT> luminosity = u.Quantity(refdata('spectrum/luminosity'), 'erg /s') <NEW_LINE> assert_quantity_allclose( runner.spectrum.luminosity, luminosity) <NEW_LINE> <DEDENT> def test_virtual_spectrum(self, runner, refdata): <NEW_LINE> <INDENT> luminosity = u.Quantity( refdata('spectrum_virtual/luminosity'), 'erg /s') <NEW_LINE> assert_quantity_allclose( runner.spectrum_virtual.luminosity, luminosity) <NEW_LINE> <DEDENT> def test_runner_properties(self, runner): <NEW_LINE> <INDENT> virt_type = np.ndarray <NEW_LINE> props_required_by_modeltohdf5 = dict([ ("virt_packet_last_interaction_type", virt_type), ("virt_packet_last_line_interaction_in_id", virt_type), ("virt_packet_last_line_interaction_out_id", virt_type), ("virt_packet_last_interaction_in_nu", virt_type), ("virt_packet_nus", virt_type), ("virt_packet_energies", virt_type), ]) <NEW_LINE> required_props = props_required_by_modeltohdf5.copy() <NEW_LINE> for prop, prop_type in required_props.items(): <NEW_LINE> <INDENT> actual = getattr(runner, prop) <NEW_LINE> assert type(actual) == prop_type, ( "wrong type of attribute '{}':" "expected {}, found {}".format( prop, prop_type, type(actual))) | Very simple run | 6259904c6fece00bbacccdd8 |
class Reader: <NEW_LINE> <INDENT> def __init__(self, file_obj): <NEW_LINE> <INDENT> self._file_obj = file_obj <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> line = self._file_obj.readline() <NEW_LINE> if line == '': <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> return json.loads(line) | Class to read and parse a text file with a single JSON object per readline
Provides an iterator that will iterate over the lines in the file and yield
a dict of fields. | 6259904c596a897236128fbe |
class UserRetrieveAPIView(RetrieveAPIView): <NEW_LINE> <INDENT> serializer_class = UserDetailSerializer <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> queryset = User.objects.none() <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> return self.request.user | Retrieve authenticated user | 6259904c16aa5153ce40190d |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.