code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
@registerCommand(MODE, 'save', arguments=[ (['--all'], {'action': 'store_true', 'help': 'save all attachments'}), (['path'], {'nargs': '?', 'help': 'path to save to'})]) <NEW_LINE> class SaveAttachmentCommand(Command): <NEW_LINE> <INDENT> def __init__(self, all=False, path=None, **kwargs): <NEW_LINE> <INDENT> Command.__init__(self, **kwargs) <NEW_LINE> self.all = all <NEW_LINE> self.path = path <NEW_LINE> <DEDENT> @inlineCallbacks <NEW_LINE> def apply(self, ui): <NEW_LINE> <INDENT> pcomplete = PathCompleter() <NEW_LINE> savedir = settings.get('attachment_prefix', '~') <NEW_LINE> if self.all: <NEW_LINE> <INDENT> msg = ui.current_buffer.get_selected_message() <NEW_LINE> if not self.path: <NEW_LINE> <INDENT> self.path = yield ui.prompt('save attachments to', text=os.path.join(savedir, ''), completer=pcomplete) <NEW_LINE> <DEDENT> if self.path: <NEW_LINE> <INDENT> if os.path.isdir(os.path.expanduser(self.path)): <NEW_LINE> <INDENT> for a in msg.get_attachments(): <NEW_LINE> <INDENT> dest = a.save(self.path) <NEW_LINE> name = a.get_filename() <NEW_LINE> if name: <NEW_LINE> <INDENT> ui.notify('saved %s as: %s' % (name, dest)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ui.notify('saved attachment as: %s' % dest) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> ui.notify('not a directory: %s' % self.path, priority='error') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise CommandCanceled() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> focus = ui.get_deep_focus() <NEW_LINE> if isinstance(focus, AttachmentWidget): <NEW_LINE> <INDENT> attachment = focus.get_attachment() <NEW_LINE> filename = attachment.get_filename() <NEW_LINE> if not self.path: <NEW_LINE> <INDENT> msg = 'save attachment (%s) to ' % filename <NEW_LINE> initialtext = os.path.join(savedir, filename) <NEW_LINE> self.path = yield ui.prompt(msg, completer=pcomplete, text=initialtext) <NEW_LINE> <DEDENT> if self.path: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> dest = attachment.save(self.path) <NEW_LINE> ui.notify('saved attachment as: %s' % dest) <NEW_LINE> <DEDENT> except (IOError, OSError) as e: <NEW_LINE> <INDENT> ui.notify(str(e), priority='error') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise CommandCanceled()
save attachment(s)
6259906a32920d7e50bc7833
class NotModifiedError(CloudifyClientError): <NEW_LINE> <INDENT> ERROR_CODE = 'not_modified' <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.message
Raised when a 304 not modified error was returned
6259906a91f36d47f2231a85
class AttriDict(dict): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(AttriDict, self).__init__(*args, **kwargs) <NEW_LINE> self.__dict__ = self
Dict that can get attribute by dot
6259906a4527f215b58eb596
class ModuleStream(Entity, EntityReadMixin, EntitySearchMixin): <NEW_LINE> <INDENT> def __init__(self, server_config=None, **kwargs): <NEW_LINE> <INDENT> self._fields = { 'uuid': entity_fields.StringField(), 'name': entity_fields.StringField(), 'description': entity_fields.StringField(), 'context': entity_fields.StringField(), 'arch': entity_fields.StringField(), 'stream': entity_fields.StringField(), 'summary': entity_fields.StringField(), 'version': entity_fields.StringField(), 'module_spec': entity_fields.StringField(), } <NEW_LINE> self._meta = {'api_path': 'katello/api/v2/module_streams'} <NEW_LINE> super().__init__(server_config, **kwargs)
A representation of a Module Stream entity.
6259906a4428ac0f6e659d1e
class Normalize(Transform): <NEW_LINE> <INDENT> def __init__(self, scaler): <NEW_LINE> <INDENT> self.scaler = scaler <NEW_LINE> <DEDENT> def transform_data(self, data): <NEW_LINE> <INDENT> return self.scaler.normalize(data)
Normalize inputs Args: scaler: Scaler object, the scaler to be used to normalize the data Attributes: scaler : Scaler object, the scaler to be used to normalize the data
6259906ad486a94d0ba2d7ab
class CustomParallelUpdater(training.updaters.MultiprocessParallelUpdater): <NEW_LINE> <INDENT> def __init__(self, train_iters, optimizer, converter, devices, accum_grad=1): <NEW_LINE> <INDENT> super(CustomParallelUpdater, self).__init__( train_iters, optimizer, converter=converter, devices=devices) <NEW_LINE> from cupy.cuda import nccl <NEW_LINE> self.accum_grad = accum_grad <NEW_LINE> self.forward_count = 0 <NEW_LINE> self.nccl = nccl <NEW_LINE> <DEDENT> def update_core(self): <NEW_LINE> <INDENT> self.setup_workers() <NEW_LINE> self._send_message(('update', None)) <NEW_LINE> with cuda.Device(self._devices[0]): <NEW_LINE> <INDENT> optimizer = self.get_optimizer('main') <NEW_LINE> batch = self.get_iterator('main').next() <NEW_LINE> x = self.converter(batch, self._devices[0]) <NEW_LINE> loss = self._master(*x) / self.accum_grad <NEW_LINE> loss.backward() <NEW_LINE> loss.unchain_backward() <NEW_LINE> null_stream = cuda.Stream.null <NEW_LINE> if self.comm is not None: <NEW_LINE> <INDENT> gg = gather_grads(self._master) <NEW_LINE> self.comm.reduce(gg.data.ptr, gg.data.ptr, gg.size, self.nccl.NCCL_FLOAT, self.nccl.NCCL_SUM, 0, null_stream.ptr) <NEW_LINE> scatter_grads(self._master, gg) <NEW_LINE> del gg <NEW_LINE> <DEDENT> self.forward_count += 1 <NEW_LINE> if self.forward_count != self.accum_grad: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.forward_count = 0 <NEW_LINE> grad_norm = np.sqrt(sum_sqnorm( [p.grad for p in optimizer.target.params(False)])) <NEW_LINE> logging.info('grad norm={}'.format(grad_norm)) <NEW_LINE> if math.isnan(grad_norm): <NEW_LINE> <INDENT> logging.warning('grad norm is nan. Do not update model.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> optimizer.update() <NEW_LINE> <DEDENT> self._master.cleargrads() <NEW_LINE> if self.comm is not None: <NEW_LINE> <INDENT> gp = gather_params(self._master) <NEW_LINE> self.comm.bcast(gp.data.ptr, gp.size, self.nccl.NCCL_FLOAT, 0, null_stream.ptr) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.update_core() <NEW_LINE> if self.forward_count == 0: <NEW_LINE> <INDENT> self.iteration += 1
Custom Parallel Updater for chainer. Defines the main update routine. Args: train_iter (iterator | dict[str, iterator]): Dataset iterator for the training dataset. It can also be a dictionary that maps strings to iterators. If this is just an iterator, then the iterator is registered by the name ``'main'``. optimizer (optimizer | dict[str, optimizer]): Optimizer to update parameters. It can also be a dictionary that maps strings to optimizers. If this is just an optimizer, then the optimizer is registered by the name ``'main'``. converter (espnet.asr.chainer_backend.asr.CustomConverter): Converter function to build input arrays. Each batch extracted by the main iterator and the ``device`` option are passed to this function. :func:`chainer.dataset.concat_examples` is used by default. device (torch.device): Device to which the training data is sent. Negative value indicates the host memory (CPU). accum_grad (int):The number of gradient accumulation. if set to 2, the network parameters will be updated once in twice, i.e. actual batchsize will be doubled.
6259906a63b5f9789fe8694f
class Pdf2svg(AutotoolsPackage): <NEW_LINE> <INDENT> homepage = "http://www.cityinthesky.co.uk/opensource/pdf2svg" <NEW_LINE> url = "https://github.com/dawbarton/pdf2svg/archive/v0.2.3.tar.gz" <NEW_LINE> version('0.2.3', 'd398b3b1c1979f554596238a44f12123') <NEW_LINE> version('0.2.2', 'f7e0d2213f9e1422cee9421e18f72553') <NEW_LINE> depends_on('cairo', type='run') <NEW_LINE> depends_on('poppler', type='run')
A simple PDF to SVG converter using the Poppler and Cairo libraries.
6259906a01c39578d7f1432b
@dataclass <NEW_LINE> class Website(): <NEW_LINE> <INDENT> name: str <NEW_LINE> homepage: str <NEW_LINE> seed_urls: List[str] <NEW_LINE> url_patterns: Match <NEW_LINE> relative_url: bool <NEW_LINE> title_class: str <NEW_LINE> body_class: str <NEW_LINE> date_class: str <NEW_LINE> favicon: str <NEW_LINE> next_button_id: str <NEW_LINE> next_request: Callable[[HtmlResponse], Request]
Holds information on newspaper's structure for scraping. Args: name name of the newspaper. seed_urls URLs to start scraping from. base_url base url to use in case of relative urls. next_page returns next page to scrape. urls seed URLs for scraping. target_patterns regular expressions of links to follow during scraping. relative_url True if URLs in website are relative, False if they are absolute. title_class CSS class of title. body_class CSS class of body.
6259906a7d847024c075dbc7
class HTTPBearerAuth(AuthBase): <NEW_LINE> <INDENT> def __init__(self, token): <NEW_LINE> <INDENT> self.token = token <NEW_LINE> <DEDENT> def __call__(self, r): <NEW_LINE> <INDENT> r.headers['Authorization'] = 'Bearer {}'.format(self.token) <NEW_LINE> return r
Attaches HTTP Basic Authentication to the given Request object.
6259906a2ae34c7f260ac8d5
class QtSpinBox(QtControl, AbstractTkSpinBox): <NEW_LINE> <INDENT> def create(self, parent): <NEW_LINE> <INDENT> self.widget = EnamlQSpinBox(parent) <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> super(QtSpinBox, self).initialize() <NEW_LINE> shell = self.shell_obj <NEW_LINE> self.set_spin_low(shell.low) <NEW_LINE> self.set_spin_high(shell.high) <NEW_LINE> self.set_spin_step(shell.step) <NEW_LINE> self.set_spin_converter(shell.converter) <NEW_LINE> self.set_spin_wrap(shell.wrap) <NEW_LINE> self.set_spin_value(shell.value) <NEW_LINE> self.set_spin_tracking(shell.tracking) <NEW_LINE> <DEDENT> def bind(self): <NEW_LINE> <INDENT> super(QtSpinBox, self).bind() <NEW_LINE> self.widget.valueChanged.connect(self.on_value_changed) <NEW_LINE> <DEDENT> def shell_value_changed(self, value): <NEW_LINE> <INDENT> self.set_spin_value(value) <NEW_LINE> <DEDENT> def shell_low_changed(self, low): <NEW_LINE> <INDENT> self.set_spin_low(low) <NEW_LINE> <DEDENT> def shell_high_changed(self, high): <NEW_LINE> <INDENT> self.set_spin_high(high) <NEW_LINE> <DEDENT> def shell_step_changed(self, step): <NEW_LINE> <INDENT> self.set_spin_step(step) <NEW_LINE> <DEDENT> def shell_converter_changed(self, converter): <NEW_LINE> <INDENT> self.set_spin_converter(converter) <NEW_LINE> <DEDENT> def shell_wrap_changed(self, wrap): <NEW_LINE> <INDENT> self.set_spin_wrap(wrap) <NEW_LINE> <DEDENT> def shell_tracking_changed(self, tracking): <NEW_LINE> <INDENT> self.set_spin_tracking(tracking) <NEW_LINE> <DEDENT> def on_value_changed(self): <NEW_LINE> <INDENT> self.shell_obj.value = self.widget.value() <NEW_LINE> <DEDENT> def set_spin_value(self, value): <NEW_LINE> <INDENT> self.widget.setValue(value) <NEW_LINE> <DEDENT> def set_spin_low(self, low): <NEW_LINE> <INDENT> self.widget.setMinimum(low) <NEW_LINE> <DEDENT> def set_spin_high(self, high): <NEW_LINE> <INDENT> self.widget.setMaximum(high) <NEW_LINE> <DEDENT> def set_spin_step(self, step): <NEW_LINE> <INDENT> self.widget.setSingleStep(step) <NEW_LINE> <DEDENT> def set_spin_converter(self, converter): <NEW_LINE> <INDENT> self.widget.setConverter(converter) <NEW_LINE> <DEDENT> def set_spin_wrap(self, wrap): <NEW_LINE> <INDENT> self.widget.setWrapping(wrap) <NEW_LINE> <DEDENT> def set_spin_tracking(self, tracking): <NEW_LINE> <INDENT> self.widget.setKeyboardTracking(tracking)
A Qt implementation of SpinBox.
6259906ae1aae11d1e7cf403
class CacheHandler(urllib2.BaseHandler): <NEW_LINE> <INDENT> def __init__(self,cacheLocation): <NEW_LINE> <INDENT> self.cacheLocation = cacheLocation <NEW_LINE> if not os.path.exists(self.cacheLocation): <NEW_LINE> <INDENT> os.mkdir(self.cacheLocation) <NEW_LINE> <DEDENT> <DEDENT> def default_open(self,request): <NEW_LINE> <INDENT> if ((request.get_method() == "GET") and (CachedResponse.ExistsInCache(self.cacheLocation, request.get_full_url()))): <NEW_LINE> <INDENT> return CachedResponse(self.cacheLocation, request.get_full_url(), setCacheHeader=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def http_response(self, request, response): <NEW_LINE> <INDENT> if request.get_method() == "GET": <NEW_LINE> <INDENT> if 'x-cache' not in response.info(): <NEW_LINE> <INDENT> CachedResponse.StoreInCache(self.cacheLocation, request.get_full_url(), response) <NEW_LINE> return CachedResponse(self.cacheLocation, request.get_full_url(), setCacheHeader=False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return CachedResponse(self.cacheLocation, request.get_full_url(), setCacheHeader=True) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return response
Stores responses in a persistant on-disk cache. If a subsequent GET request is made for the same URL, the stored response is returned, saving time, resources and bandwith
6259906a435de62698e9d5f8
class ButtonHandler(object): <NEW_LINE> <INDENT> ind = 0 <NEW_LINE> def quit(self, event): <NEW_LINE> <INDENT> self.ind += 1 <NEW_LINE> handle_close(event) <NEW_LINE> plt.draw() <NEW_LINE> <DEDENT> def pause(self, event): <NEW_LINE> <INDENT> global state <NEW_LINE> self.ind -= 1 <NEW_LINE> state += 1 <NEW_LINE> plt.draw()
Class created to handle button functionality via .on_clicked()
6259906abaa26c4b54d50a94
class TestSMIME(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self.cfixt = self.useFixture(ClientFixture()) <NEW_LINE> self.client = self.cfixt.client <NEW_LINE> self.ep_path = "/smime" <NEW_LINE> self.api_version = "v1" <NEW_LINE> self.api_url = f"{self.cfixt.base_url}{self.ep_path}/{self.api_version}"
Serve as a Base class for all tests of the Certificates class.
6259906a7047854f46340ba3
class BaseTest: <NEW_LINE> <INDENT> logger = logger <NEW_LINE> def request(self, method, path, data=None, **args): <NEW_LINE> <INDENT> self.application._status = None <NEW_LINE> self.application._headers = None <NEW_LINE> self.application._answer = None <NEW_LINE> for key in args: <NEW_LINE> <INDENT> args[key.upper()] = args[key] <NEW_LINE> <DEDENT> args["REQUEST_METHOD"] = method.upper() <NEW_LINE> args["PATH_INFO"] = path <NEW_LINE> if data: <NEW_LINE> <INDENT> data = data.encode("utf-8") <NEW_LINE> args["wsgi.input"] = BytesIO(data) <NEW_LINE> args["CONTENT_LENGTH"] = str(len(data)) <NEW_LINE> <DEDENT> self.application._answer = self.application(args, self.start_response) <NEW_LINE> return ( int(self.application._status.split()[0]), dict(self.application._headers), self.application._answer[0].decode("utf-8") if self.application._answer else None) <NEW_LINE> <DEDENT> def start_response(self, status, headers): <NEW_LINE> <INDENT> self.application._status = status <NEW_LINE> self.application._headers = headers
Base class for tests.
6259906aa8370b77170f1bb2
class Reconstructor(TreeMatcher): <NEW_LINE> <INDENT> write_tokens: WriteTokensTransformer <NEW_LINE> def __init__(self, parser: Lark, term_subs: Optional[Dict[str, Callable[[Symbol], str]]]=None) -> None: <NEW_LINE> <INDENT> TreeMatcher.__init__(self, parser) <NEW_LINE> self.write_tokens = WriteTokensTransformer({t.name:t for t in self.tokens}, term_subs or {}) <NEW_LINE> <DEDENT> def _reconstruct(self, tree): <NEW_LINE> <INDENT> unreduced_tree = self.match_tree(tree, tree.data) <NEW_LINE> res = self.write_tokens.transform(unreduced_tree) <NEW_LINE> for item in res: <NEW_LINE> <INDENT> if isinstance(item, Tree): <NEW_LINE> <INDENT> yield from self._reconstruct(item) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield item <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def reconstruct(self, tree: Tree, postproc: Optional[Callable[[Iterable[str]], Iterable[str]]]=None, insert_spaces: bool=True) -> str: <NEW_LINE> <INDENT> x = self._reconstruct(tree) <NEW_LINE> if postproc: <NEW_LINE> <INDENT> x = postproc(x) <NEW_LINE> <DEDENT> y = [] <NEW_LINE> prev_item = '' <NEW_LINE> for item in x: <NEW_LINE> <INDENT> if insert_spaces and prev_item and item and is_id_continue(prev_item[-1]) and is_id_continue(item[0]): <NEW_LINE> <INDENT> y.append(' ') <NEW_LINE> <DEDENT> y.append(item) <NEW_LINE> prev_item = item <NEW_LINE> <DEDENT> return ''.join(y)
A Reconstructor that will, given a full parse Tree, generate source code. Note: The reconstructor cannot generate values from regexps. If you need to produce discarded regexes, such as newlines, use `term_subs` and provide default values for them. Paramters: parser: a Lark instance term_subs: a dictionary of [Terminal name as str] to [output text as str]
6259906a7b180e01f3e49c5a
class SoilTextureCreateAPIView(CreateAPIViewHook): <NEW_LINE> <INDENT> queryset = SoilTexture.objects.all() <NEW_LINE> serializer_class = soil_texture_serializer['SoilTextureDetailSerializer'] <NEW_LINE> permission_classes = [IsAuthenticated]
Creates a single record.
6259906a1b99ca400229012c
class Hook: <NEW_LINE> <INDENT> def __init__(self, display=1, logdir=None, context=5, file=None, format="html"): <NEW_LINE> <INDENT> self.display = display <NEW_LINE> self.logdir = logdir <NEW_LINE> self.context = context <NEW_LINE> self.file = file or sys.stdout <NEW_LINE> self.format = format <NEW_LINE> <DEDENT> def __call__(self, etype, evalue, etb): <NEW_LINE> <INDENT> self.handle((etype, evalue, etb)) <NEW_LINE> <DEDENT> def handle(self, info=None): <NEW_LINE> <INDENT> info = info or sys.exc_info() <NEW_LINE> if self.format == "html": <NEW_LINE> <INDENT> self.file.write(reset()) <NEW_LINE> <DEDENT> formatter = (self.format=="html") and html or text <NEW_LINE> plain = False <NEW_LINE> try: <NEW_LINE> <INDENT> doc = formatter(info, self.context) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> doc = ''.join(traceback.format_exception(*info)) <NEW_LINE> plain = True <NEW_LINE> <DEDENT> if self.display: <NEW_LINE> <INDENT> if plain: <NEW_LINE> <INDENT> doc = doc.replace('&', '&amp;').replace('<', '&lt;') <NEW_LINE> self.file.write('<pre>' + doc + '</pre>\n') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.file.write(doc + '\n') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.file.write('<p>A problem occurred in a Python script.\n') <NEW_LINE> <DEDENT> if self.logdir is not None: <NEW_LINE> <INDENT> suffix = ['.txt', '.html'][self.format=="html"] <NEW_LINE> (fd, path) = tempfile.mkstemp(suffix=suffix, dir=self.logdir) <NEW_LINE> try: <NEW_LINE> <INDENT> file = os.fdopen(fd, 'w') <NEW_LINE> file.write(doc) <NEW_LINE> file.close() <NEW_LINE> msg = '<p> %s contains the description of this error.' % path <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> msg = '<p> Tried to save traceback to %s, but failed.' % path <NEW_LINE> <DEDENT> self.file.write(msg + '\n') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.file.flush() <NEW_LINE> <DEDENT> except: pass
A hook to replace sys.excepthook that shows tracebacks in HTML.
6259906a7c178a314d78e7e2
@runner.MAGPIE_TEST_UI <NEW_LINE> @runner.MAGPIE_TEST_LOCAL <NEW_LINE> class TestCase_MagpieUI_UsersAuth_Local(ti.Interface_MagpieUI_UsersAuth, unittest.TestCase): <NEW_LINE> <INDENT> __test__ = True <NEW_LINE> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.grp = get_constant("MAGPIE_ADMIN_GROUP") <NEW_LINE> cls.usr = get_constant("MAGPIE_TEST_ADMIN_USERNAME", raise_missing=False, raise_not_set=False) <NEW_LINE> cls.pwd = get_constant("MAGPIE_TEST_ADMIN_PASSWORD", raise_missing=False, raise_not_set=False) <NEW_LINE> cls.app = utils.get_test_magpie_app() <NEW_LINE> cls.url = cls.app <NEW_LINE> cls.version = utils.TestSetup.get_Version(cls) <NEW_LINE> cls.setup_admin() <NEW_LINE> cls.login_admin() <NEW_LINE> cls.test_user_name = get_constant("MAGPIE_TEST_USER", default_value="unittest-user-auth_ui-user-local", raise_missing=False, raise_not_set=False) <NEW_LINE> cls.test_group_name = get_constant("MAGPIE_TEST_GROUP", default_value="unittest-user-auth_ui-group-local", raise_missing=False, raise_not_set=False)
Test any operation that require logged user AuthN/AuthZ, but lower than ``MAGPIE_ADMIN_GROUP``. Use a local Magpie test application.
6259906af548e778e596cd79
class LWR(DifferentiableMap): <NEW_LINE> <INDENT> def __init__(self, m, n): <NEW_LINE> <INDENT> self._m = m <NEW_LINE> self._n = n <NEW_LINE> self.X = None <NEW_LINE> self.Y = None <NEW_LINE> self.D = None <NEW_LINE> self.ridge_lambda = None <NEW_LINE> <DEDENT> def output_dimension(self): <NEW_LINE> <INDENT> return self._m <NEW_LINE> <DEDENT> def input_dimension(self): <NEW_LINE> <INDENT> return self._n <NEW_LINE> <DEDENT> def forward(self, p): <NEW_LINE> <INDENT> v = np.empty(self._m) <NEW_LINE> for i in range(self._m): <NEW_LINE> <INDENT> v[i] = locally_weighted_regression( p, self.X[i], self.Y[i], self.D[i], self.ridge_lambda[i]) <NEW_LINE> <DEDENT> return v
Embeds the Locally Weighted Regressor This allows it to be used for interpolation of derivatives on multi-dimensional output
6259906a1f037a2d8b9e5461
class DummyLocator(object): <NEW_LINE> <INDENT> def getColumnNumber(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def getLineNumber(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def getPublicId(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def getSystemId(self): <NEW_LINE> <INDENT> return None
A dummy locator which is used if no document locator is available.
6259906a6e29344779b01e41
class EntityRelationEmbeddingModel(EntityEmbeddingModel): <NEW_LINE> <INDENT> def __init__( self, triples_factory: TriplesFactory, embedding_dim: int = 50, relation_dim: Optional[int] = None, loss: Optional[Loss] = None, predict_with_sigmoid: bool = False, automatic_memory_optimization: Optional[bool] = None, preferred_device: Optional[str] = None, random_seed: Optional[int] = None, regularizer: Optional[Regularizer] = None, ) -> None: <NEW_LINE> <INDENT> super().__init__( triples_factory=triples_factory, automatic_memory_optimization=automatic_memory_optimization, loss=loss, preferred_device=preferred_device, random_seed=random_seed, regularizer=regularizer, predict_with_sigmoid=predict_with_sigmoid, embedding_dim=embedding_dim, ) <NEW_LINE> if relation_dim is None: <NEW_LINE> <INDENT> relation_dim = embedding_dim <NEW_LINE> <DEDENT> self.relation_dim = relation_dim <NEW_LINE> self.relation_embeddings = get_embedding( num_embeddings=triples_factory.num_relations, embedding_dim=self.relation_dim, device=self.device, )
A base module for most KGE models that have one embedding for entities and one for relations.
6259906a009cb60464d02d27
class WalletSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> username = serializers.CharField(source='user.username') <NEW_LINE> currency = serializers.CharField(source='currency.symbol') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Wallet <NEW_LINE> fields = ('id', 'serial_number', 'balance', 'user', 'username', 'currency')
Кошельки пользователей
6259906a21bff66bcd724454
class DesignateTempestPlugin(plugins.TempestPlugin): <NEW_LINE> <INDENT> def load_tests(self): <NEW_LINE> <INDENT> base_path = os.path.split(os.path.dirname( os.path.abspath(__file__)))[0] <NEW_LINE> test_dir = "designate_tempest_plugin/tests" <NEW_LINE> full_test_dir = os.path.join(base_path, test_dir) <NEW_LINE> return full_test_dir, base_path <NEW_LINE> <DEDENT> def register_opts(self, conf): <NEW_LINE> <INDENT> config.register_opt_group(conf, project_config.service_available_group, project_config.ServiceAvailableGroup) <NEW_LINE> config.register_opt_group(conf, project_config.dns_group, project_config.DnsGroup) <NEW_LINE> config.register_opt_group(conf, project_config.dns_feature_group, project_config.DnsFeatureGroup) <NEW_LINE> config.register_opt_group(conf, project_config.enforce_scope_group, project_config.EnforceScopeGroup) <NEW_LINE> <DEDENT> def get_opt_lists(self): <NEW_LINE> <INDENT> return [ (project_config.service_available_group.name, project_config.ServiceAvailableGroup), (project_config.dns_group.name, project_config.DnsGroup), (project_config.dns_feature_group.name, project_config.DnsFeatureGroup), ] <NEW_LINE> <DEDENT> def get_service_clients(self): <NEW_LINE> <INDENT> dns_config = config.service_client_config('dns') <NEW_LINE> admin_params = { 'name': 'dns_admin', 'service_version': 'dns.admin', 'module_path': 'designate_tempest_plugin.services.dns.admin', 'client_names': ['QuotasClient'] } <NEW_LINE> v2_params = { 'name': 'dns_v2', 'service_version': 'dns.v2', 'module_path': 'designate_tempest_plugin.services.dns.v2', 'client_names': dns_v2_services.__all__ } <NEW_LINE> admin_params.update(dns_config) <NEW_LINE> v2_params.update(dns_config) <NEW_LINE> return [admin_params, v2_params]
A DesignateTempestPlugin class provides the basic hooks for an external plugin to provide tempest the necessary information to run the plugin.
6259906aaad79263cf42ffa3
class ManagePosixCI(object): <NEW_LINE> <INDENT> def __init__(self, core_ci): <NEW_LINE> <INDENT> self.core_ci = core_ci <NEW_LINE> self.ssh_args = ['-o', 'BatchMode=yes', '-o', 'StrictHostKeyChecking=no', '-i', self.core_ci.ssh_key.key] <NEW_LINE> if self.core_ci.platform == 'freebsd': <NEW_LINE> <INDENT> self.become = ['su', '-l', 'root', '-c'] <NEW_LINE> <DEDENT> elif self.core_ci.platform == 'osx': <NEW_LINE> <INDENT> self.become = ['sudo', '-in', 'PATH=/usr/local/bin:$PATH'] <NEW_LINE> <DEDENT> <DEDENT> def setup(self): <NEW_LINE> <INDENT> self.wait() <NEW_LINE> self.configure() <NEW_LINE> self.upload_source() <NEW_LINE> <DEDENT> def wait(self): <NEW_LINE> <INDENT> for _ in range(1, 90): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.ssh('id') <NEW_LINE> return <NEW_LINE> <DEDENT> except SubprocessError: <NEW_LINE> <INDENT> sleep(10) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> raise ApplicationError('Timeout waiting for %s/%s instance %s.' % (self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id)) <NEW_LINE> <DEDENT> def configure(self): <NEW_LINE> <INDENT> self.upload('test/runner/setup/remote.sh', '/tmp') <NEW_LINE> self.ssh('chmod +x /tmp/remote.sh && /tmp/remote.sh %s' % self.core_ci.platform) <NEW_LINE> <DEDENT> def upload_source(self): <NEW_LINE> <INDENT> with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd: <NEW_LINE> <INDENT> remote_source_dir = '/tmp' <NEW_LINE> remote_source_path = os.path.join(remote_source_dir, os.path.basename(local_source_fd.name)) <NEW_LINE> if not self.core_ci.args.explain: <NEW_LINE> <INDENT> lib.pytar.create_tarfile(local_source_fd.name, '.', lib.pytar.ignore) <NEW_LINE> <DEDENT> self.upload(local_source_fd.name, remote_source_dir) <NEW_LINE> self.ssh('rm -rf ~/ansible && mkdir ~/ansible && cd ~/ansible && tar oxzf %s' % remote_source_path) <NEW_LINE> <DEDENT> <DEDENT> def download(self, remote, local): <NEW_LINE> <INDENT> self.scp('%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote), local) <NEW_LINE> <DEDENT> def upload(self, local, remote): <NEW_LINE> <INDENT> self.scp(local, '%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote)) <NEW_LINE> <DEDENT> def ssh(self, command, options=None): <NEW_LINE> <INDENT> if not options: <NEW_LINE> <INDENT> options = [] <NEW_LINE> <DEDENT> if isinstance(command, list): <NEW_LINE> <INDENT> command = ' '.join(pipes.quote(c) for c in command) <NEW_LINE> <DEDENT> run_command(self.core_ci.args, ['ssh', '-tt', '-q'] + self.ssh_args + options + ['-p', str(self.core_ci.connection.port), '%s@%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname)] + self.become + [pipes.quote(command)]) <NEW_LINE> <DEDENT> def scp(self, src, dst): <NEW_LINE> <INDENT> run_command(self.core_ci.args, ['scp'] + self.ssh_args + ['-P', str(self.core_ci.connection.port), '-q', '-r', src, dst])
Manage access to a POSIX instance provided by Ansible Core CI.
6259906a8e7ae83300eea87c
class LinkEmpty(Exception): <NEW_LINE> <INDENT> pass
空链表异常
6259906afff4ab517ebcf009
class TaskUpdate(models.Model): <NEW_LINE> <INDENT> job_exe = models.ForeignKey('job.JobExecution', on_delete=models.PROTECT) <NEW_LINE> task_id = models.CharField(max_length=250) <NEW_LINE> status = models.CharField(max_length=250) <NEW_LINE> timestamp = models.DateTimeField(blank=True, null=True) <NEW_LINE> source = models.CharField(blank=True, max_length=250, null=True) <NEW_LINE> reason = models.CharField(blank=True, max_length=250, null=True) <NEW_LINE> message = models.TextField(blank=True, null=True) <NEW_LINE> created = models.DateTimeField(auto_now_add=True) <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> db_table = 'task_update'
Represents a status update received for a task :keyword job_exe: The job execution that the task belongs to :type job_exe: :class:`django.db.models.ForeignKey` :keyword task_id: The task ID :type task_id: :class:`django.db.models.CharField` :keyword status: The status of the task :type status: :class:`django.db.models.CharField` :keyword timestamp: When the status update occurred (may be None) :type timestamp: :class:`django.db.models.DateTimeField` :keyword source: An optional source of the task status update :type source: :class:`django.db.models.CharField` :keyword reason: An optional reason for the task status update :type reason: :class:`django.db.models.CharField` :keyword message: An optional message related to the task status update :type message: :class:`django.db.models.TextField` :keyword created: When the task update was saved in the database :type created: :class:`django.db.models.DateTimeField`
6259906a9c8ee82313040d7f
class sinogram(object): <NEW_LINE> <INDENT> io = [] <NEW_LINE> meta = [] <NEW_LINE> display = [] <NEW_LINE> analyse = [] <NEW_LINE> process = [] <NEW_LINE> reconstruction = [] <NEW_LINE> data = [] <NEW_LINE> _wisdom_status = 1 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.io = io(self) <NEW_LINE> self.meta = meta(self) <NEW_LINE> self.display = display(self) <NEW_LINE> self.analyse = analyse(self) <NEW_LINE> self.process = process(self) <NEW_LINE> self.reconstruct = reconstruct(self) <NEW_LINE> self.data = data(self) <NEW_LINE> <DEDENT> def message(self, msg): <NEW_LINE> <INDENT> log = logging.getLogger() <NEW_LINE> log.setLevel(logging.DEBUG) <NEW_LINE> log.debug(msg) <NEW_LINE> <DEDENT> def error(self, msg): <NEW_LINE> <INDENT> self.meta.history['error'] = msg <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> def warning(self, msg): <NEW_LINE> <INDENT> self.meta.history['warning'] = msg <NEW_LINE> warnings.warn(msg) <NEW_LINE> <DEDENT> def what_to_do(self): <NEW_LINE> <INDENT> if ~self._pronounce_wisdom(): <NEW_LINE> <INDENT> self._check_min_hist_keys() <NEW_LINE> <DEDENT> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return copy.deepcopy(self) <NEW_LINE> <DEDENT> def _pronounce_wisdom(self): <NEW_LINE> <INDENT> randomator = 0 <NEW_LINE> for ii in range(0, self._wisdom_status): <NEW_LINE> <INDENT> randomator += numpy.random.randint(0, 100) <NEW_LINE> <DEDENT> if (randomator < 20): <NEW_LINE> <INDENT> self._wisdom_status += 1 <NEW_LINE> l = numpy.size(_wisdoms) <NEW_LINE> self.message(_wisdoms[numpy.random.randint(0, l)]) <NEW_LINE> return 1 <NEW_LINE> <DEDENT> return 0 <NEW_LINE> <DEDENT> def _check_min_hist_keys(self): <NEW_LINE> <INDENT> finished = True <NEW_LINE> for k in _min_history: <NEW_LINE> <INDENT> self.message((k in self.meta.history.keys())) <NEW_LINE> if ~(k in self.meta.history.keys()): <NEW_LINE> <INDENT> self.message('You should use ' + k + ' as a next step') <NEW_LINE> finished = False <NEW_LINE> break <NEW_LINE> <DEDENT> if finished: <NEW_LINE> <INDENT> self.message('All basic processing steps were done. Use "reconstruct.FDK" to compute filtered backprojection.') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _check_double_hist(self, new_key): <NEW_LINE> <INDENT> if new_key in self.meta.history.keys(): <NEW_LINE> <INDENT> self.error(new_key + ' is found in the history of operations! Aborting.')
Class that will contain the raw data and links to all operations that we need to process and reconstruct it.
6259906a3539df3088ecda8d
class StateMachine(FileDict): <NEW_LINE> <INDENT> def __init__(self, node_id): <NEW_LINE> <INDENT> filename = os.path.join(config.log_path, '{}.state_machine'.format(node_id)) <NEW_LINE> super().__init__(filename) <NEW_LINE> <DEDENT> def apply(self, command): <NEW_LINE> <INDENT> self.update(command)
Raft Replicated State Machine — dict
6259906ae5267d203ee6cfb5
class SendCustomAlarmMsgResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId")
SendCustomAlarmMsg response structure.
6259906aac7a0e7691f73cd6
class Multilevel_Grid(Grid): <NEW_LINE> <INDENT> def initialize(self): <NEW_LINE> <INDENT> self.levels = [] <NEW_LINE> self.levelOf = {} <NEW_LINE> self.stairs = {} <NEW_LINE> <DEDENT> def add_level(self, rows, cols, **kwargs): <NEW_LINE> <INDENT> grid = Rectangular_Grid(rows, cols, **kwargs) <NEW_LINE> self.add_grid(grid) <NEW_LINE> return grid <NEW_LINE> <DEDENT> def add_grid(self, grid): <NEW_LINE> <INDENT> assert grid not in self.levelOf, "the grid is already part of the maze" <NEW_LINE> level = len(self.levels) <NEW_LINE> self.levelOf[grid] = level <NEW_LINE> self.levels.append(grid) <NEW_LINE> for cell in grid.each(): <NEW_LINE> <INDENT> index = (level, cell.index, "floor") <NEW_LINE> self[index] = cell <NEW_LINE> <DEDENT> <DEDENT> def add_stairs_upward(self, level, downcell, link=False): <NEW_LINE> <INDENT> index1 = downcell.index <NEW_LINE> grid1 = self.levels[level] <NEW_LINE> assert downcell is grid1[index1], "Cell %s is not in level %d." % (str(index1), level) <NEW_LINE> grid2 = self.levels[level+1] <NEW_LINE> upcell = grid2[index1] <NEW_LINE> assert upcell, "There is no cell %s in level %d" % (str(index1), level+1) <NEW_LINE> index2 = (downcell, upcell, "stairs") <NEW_LINE> self[index2] = stairs = Stairwell_Cell(index2, downcell, upcell) <NEW_LINE> self.stairs[stairs] = level <NEW_LINE> if link: <NEW_LINE> <INDENT> stairs.makePassage(downcell) <NEW_LINE> stairs.makePassage(upcell)
a class for multi-level mazes
6259906a435de62698e9d5fa
class Bonus(AbstractDnDEntity): <NEW_LINE> <INDENT> stackable = models.BooleanField(blank=False)
A type of Bonus that can be granted
6259906a7047854f46340ba5
@Document.persistenceSchema.Persistent <NEW_LINE> class DependencyRelation(object): <NEW_LINE> <INDENT> @observable_property <NEW_LINE> def span(self): <NEW_LINE> <INDENT> controller = self.controller <NEW_LINE> target = self.target <NEW_LINE> guard((controller.span is not None) and (controller.span is target.span), ValueError("Dependency relation cannot cross spans")) <NEW_LINE> return controller.span <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Dependency(%s --> %s)' % (self.controller, self.target) <NEW_LINE> <DEDENT> def __init__(self, controller, target): <NEW_LINE> <INDENT> guard((controller.span is not None) and (controller.span is target.span), ValueError("Dependency relation cannot cross spans")) <NEW_LINE> self.controller = controller <NEW_LINE> self.target = target <NEW_LINE> willChange(self.__db__.document, 'dependencies', ObservingContext(action = 'add', object = self)) <NEW_LINE> willChange(self.span, 'dependencies', ObservingContext(action = 'add', object = self)) <NEW_LINE> dependencies = list(self.__db__.root['dependencies']) <NEW_LINE> dependencies.append(self) <NEW_LINE> self.__db__.root['dependencies'] = dependencies <NEW_LINE> didChange(self.span, 'dependencies', ObservingContext(action = 'add', object = self)) <NEW_LINE> didChange(self.__db__.document, 'dependencies', ObservingContext(action = 'add', object = self)) <NEW_LINE> <DEDENT> @observable_property <NEW_LINE> def mnemonic(self): <NEW_LINE> <INDENT> return self.variables.getMnemonic(separator=', ') <NEW_LINE> <DEDENT> @observable_property <NEW_LINE> def variables(self): <NEW_LINE> <INDENT> return VariablesAccessor(self) <NEW_LINE> <DEDENT> @variables.changing <NEW_LINE> def variables(self, value): <NEW_LINE> <INDENT> willChange(self, 'mnemonic', {}) <NEW_LINE> <DEDENT> @variables.changed <NEW_LINE> def variables(self, value): <NEW_LINE> <INDENT> didChange(self, 'mnemonic', {})
A dependency relation between two constituents. Tokens also count as constituents for this purpose
6259906a167d2b6e312b8185
class MLP(object): <NEW_LINE> <INDENT> def __init__(self, rng, input_, n_in, n_hidden, n_out): <NEW_LINE> <INDENT> self.hiddenLayer = HiddenLayer( rng=rng, input_=input_, n_in=n_in, n_out=n_hidden, activation=T.tanh ) <NEW_LINE> self.logRegressionLayer = LogisticRegression( input_=self.hiddenLayer.output, n_in=n_hidden, n_out=n_out ) <NEW_LINE> self.L1 = ( abs(self.hiddenLayer.W).sum() + abs(self.logRegressionLayer.W).sum() ) <NEW_LINE> self.L2_sqr = ( T.sum(self.hiddenLayer.W ** 2) + T.sum(self.logRegressionLayer.W ** 2) ) <NEW_LINE> self.negative_log_likelihood = ( self.logRegressionLayer.negative_log_likelihood ) <NEW_LINE> self.errors = self.logRegressionLayer.errors <NEW_LINE> self.params = self.hiddenLayer.params + self.logRegressionLayer.params <NEW_LINE> self.input = input_
Multi-Layer Perceptron Class A multilayer perceptron is a feed-forward artificial neural network model that has one layer or more of hidden units and nonlinear activations. Intermediate layers usually have as activation function tanh or the sigmoid function (defined here by a ``HiddenLayer`` class) while the top layer is a softmax layer (defined here by a ``LogisticRegression`` class).
6259906a3cc13d1c6d466f35
class ReaktorApiError(ReaktorError): <NEW_LINE> <INDENT> AUTHENTICATION_INVALID = u"AUTHENTICATION_INVALID" <NEW_LINE> DISCOVERY_SERVICE_ACCESS_ERROR = u"DISCOVERY_SERVICE_ACCESS_ERROR" <NEW_LINE> ILLEGAL_ARGUMENT_ERROR = u"ILLEGAL_ARGUMENT_ERROR" <NEW_LINE> UNKNOWN_ENTITY_ERROR = u"UNKNOWN_ENTITY_ERROR" <NEW_LINE> ILLEGAL_CALL = u"ILLEGAL_CALL" <NEW_LINE> REQUESTED_FEATURE_NOT_FOUND = u"Requested feature not found." <NEW_LINE> DOCUMENT_IS_REMOVED = u"Document is removed"
ReaktorError to be thrown by class Reaktor, caused by the remote reaktor api. self.code here is a reaktor error message.
6259906a45492302aabfdcc7
class UpdateIgnoredWarning(object): <NEW_LINE> <INDENT> swagger_types = { 'id': 'str' } <NEW_LINE> attribute_map = { 'id': 'id' } <NEW_LINE> def __init__(self, id=None): <NEW_LINE> <INDENT> self._id = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.id = id <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @id.setter <NEW_LINE> def id(self, id): <NEW_LINE> <INDENT> if id is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `id`, must not be `None`") <NEW_LINE> <DEDENT> self._id = id <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(UpdateIgnoredWarning, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, UpdateIgnoredWarning): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259906a627d3e7fe0e08677
class FakeStream(object): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> def byte_to_seq(self, n): <NEW_LINE> <INDENT> return n <NEW_LINE> <DEDENT> def seq_final_arrival(self, n): <NEW_LINE> <INDENT> return None
Emulates a tcp.Direction with a predetermined data stream. Useful for debugging http message classes.
6259906a1b99ca400229012d
class Essay(AnswerSet): <NEW_LINE> <INDENT> def __init__(self, question): <NEW_LINE> <INDENT> AnswerSet.__init__(self, question) <NEW_LINE> <DEDENT> def toHTML(self, doc): <NEW_LINE> <INDENT> with doc.tag('textarea', name=self.question.getId(), placeholder=_('Your answer here')): <NEW_LINE> <INDENT> doc.text('')
Essay Answer : This Class represents the type of Answer with a free area text
6259906af548e778e596cd7b
class MLPAnimationCircles(_BaseMLPAnimation): <NEW_LINE> <INDENT> @property <NEW_LINE> def artists(self) -> Tuple[Circle, ...]: <NEW_LINE> <INDENT> return tuple(filter(lambda c: isinstance(c, Circle), self.ax.get_children())) <NEW_LINE> <DEDENT> def _add_particles(self, step: int = 0) -> None: <NEW_LINE> <INDENT> for i in range(self.sm.NUM_PARTICLES): <NEW_LINE> <INDENT> self.ax.add_patch( Circle( xy=(self.r_coords[step][0][i], self.r_coords[step][1][i]), fc=self.colors[step][i] if self.colorspeed else self.dflt_clr, radius=self.sm.RADIUS_PARTICLE, alpha=0.9, lw=0, ) ) <NEW_LINE> <DEDENT> <DEDENT> def _update_particles(self, step: int) -> None: <NEW_LINE> <INDENT> for a in self.artists: <NEW_LINE> <INDENT> a.remove() <NEW_LINE> <DEDENT> self._add_particles(step=step)
Not ideal for for simulations with large number of particles and/or number of steps. It uses ``matplotlib.patches.Circle`` to plot the particles, which results in * being zoom-friendly * particle radius being accurately represented * faster draw times
6259906a1f5feb6acb1643dd
class ThreadPool: <NEW_LINE> <INDENT> def __init__(self, num_threads): <NEW_LINE> <INDENT> self.tasks = Queue(num_threads) <NEW_LINE> for n in range(num_threads): <NEW_LINE> <INDENT> Worker(self.tasks, n) <NEW_LINE> <DEDENT> <DEDENT> def add_task(self, func, *args, **kargs): <NEW_LINE> <INDENT> self.tasks.put((func, args, kargs)) <NEW_LINE> <DEDENT> def map(self, func, args_list): <NEW_LINE> <INDENT> for args in args_list: <NEW_LINE> <INDENT> self.add_task(func, args) <NEW_LINE> <DEDENT> <DEDENT> def wait_completion(self): <NEW_LINE> <INDENT> self.tasks.join()
Pool of threads consuming tasks from a queue
6259906a76e4537e8c3f0d72
@implementer(interfaces.ITransport) <NEW_LINE> class SSHSessionProcessProtocol(protocol.ProcessProtocol): <NEW_LINE> <INDENT> _signalValuesToNames = None <NEW_LINE> def __init__(self, session): <NEW_LINE> <INDENT> self.session = session <NEW_LINE> self.lostOutOrErrFlag = False <NEW_LINE> <DEDENT> def connectionMade(self): <NEW_LINE> <INDENT> if self.session.buf: <NEW_LINE> <INDENT> self.transport.write(self.session.buf) <NEW_LINE> self.session.buf = None <NEW_LINE> <DEDENT> <DEDENT> def outReceived(self, data): <NEW_LINE> <INDENT> self.session.write(data) <NEW_LINE> <DEDENT> def errReceived(self, err): <NEW_LINE> <INDENT> self.session.writeExtended(connection.EXTENDED_DATA_STDERR, err) <NEW_LINE> <DEDENT> def outConnectionLost(self): <NEW_LINE> <INDENT> if self.lostOutOrErrFlag: <NEW_LINE> <INDENT> self.session.conn.sendEOF(self.session) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.lostOutOrErrFlag = True <NEW_LINE> <DEDENT> <DEDENT> def errConnectionLost(self): <NEW_LINE> <INDENT> self.outConnectionLost() <NEW_LINE> <DEDENT> def connectionLost(self, reason = None): <NEW_LINE> <INDENT> self.session.loseConnection() <NEW_LINE> <DEDENT> def _getSignalName(self, signum): <NEW_LINE> <INDENT> if self._signalValuesToNames is None: <NEW_LINE> <INDENT> self._signalValuesToNames = {} <NEW_LINE> for signame in SUPPORTED_SIGNALS: <NEW_LINE> <INDENT> signame = 'SIG' + signame <NEW_LINE> sigvalue = getattr(signal, signame, None) <NEW_LINE> if sigvalue is not None: <NEW_LINE> <INDENT> self._signalValuesToNames[sigvalue] = signame <NEW_LINE> <DEDENT> <DEDENT> for k, v in signal.__dict__.items(): <NEW_LINE> <INDENT> if k.startswith('SIG') and not k.startswith('SIG_'): <NEW_LINE> <INDENT> if v not in self._signalValuesToNames: <NEW_LINE> <INDENT> self._signalValuesToNames[v] = k + '@' + sys.platform <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return self._signalValuesToNames[signum] <NEW_LINE> <DEDENT> def processEnded(self, reason=None): <NEW_LINE> <INDENT> if reason is not None: <NEW_LINE> <INDENT> err = reason.value <NEW_LINE> if err.signal is not None: <NEW_LINE> <INDENT> signame = self._getSignalName(err.signal) <NEW_LINE> if (getattr(os, 'WCOREDUMP', None) is not None and os.WCOREDUMP(err.status)): <NEW_LINE> <INDENT> log.msg('exitSignal: %s (core dumped)' % (signame,)) <NEW_LINE> coreDumped = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.msg('exitSignal: %s' % (signame,)) <NEW_LINE> coreDumped = 0 <NEW_LINE> <DEDENT> self.session.conn.sendRequest( self.session, b'exit-signal', common.NS(networkString(signame[3:])) + chr(coreDumped) + common.NS(b'') + common.NS(b'')) <NEW_LINE> <DEDENT> elif err.exitCode is not None: <NEW_LINE> <INDENT> log.msg('exitCode: %r' % (err.exitCode,)) <NEW_LINE> self.session.conn.sendRequest(self.session, b'exit-status', struct.pack('>L', err.exitCode)) <NEW_LINE> <DEDENT> <DEDENT> self.session.loseConnection() <NEW_LINE> <DEDENT> def getHost(self): <NEW_LINE> <INDENT> return self.session.conn.transport.getHost() <NEW_LINE> <DEDENT> def getPeer(self): <NEW_LINE> <INDENT> return self.session.conn.transport.getPeer() <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> self.session.write(data) <NEW_LINE> <DEDENT> def writeSequence(self, seq): <NEW_LINE> <INDENT> self.session.write(b''.join(seq)) <NEW_LINE> <DEDENT> def loseConnection(self): <NEW_LINE> <INDENT> self.session.loseConnection()
I am both an L{IProcessProtocol} and an L{ITransport}. I am a transport to the remote endpoint and a process protocol to the local subsystem.
6259906ae76e3b2f99fda1f1
class RepeatOr(Repeat): <NEW_LINE> <INDENT> delimiter_regex = "OR"
OR separated
6259906a796e427e5384ff67
class tab(WordprocessingMLElement): <NEW_LINE> <INDENT> pass
This element specifies a single custom tab stop within a set of custom tab stops applied as part of a set of customized paragraph properties in a document. Parent element: tabs
6259906a2c8b7c6e89bd4fd5
class Any(OctetString): <NEW_LINE> <INDENT> tagSet = tag.TagSet() <NEW_LINE> subtypeSpec = constraint.ConstraintsIntersection() <NEW_LINE> typeId = OctetString.getTypeId() <NEW_LINE> @property <NEW_LINE> def tagMap(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._tagMap <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self._tagMap = tagmap.TagMap( {self.tagSet: self}, {eoo.endOfOctets.tagSet: eoo.endOfOctets}, self ) <NEW_LINE> return self._tagMap
Create |ASN.1| schema or value object. |ASN.1| class is based on :class:`~pyasn1.type.base.SimpleAsn1Type`, its objects are immutable and duck-type Python 2 :class:`str` or Python 3 :class:`bytes`. When used in Unicode context, |ASN.1| type assumes "|encoding|" serialisation. Keyword Args ------------ value: :class:`unicode`, :class:`str`, :class:`bytes` or |ASN.1| object :class:`str` (Python 2) or :class:`bytes` (Python 3), alternatively :class:`unicode` object (Python 2) or :class:`str` (Python 3) representing character string to be serialised into octets (note `encoding` parameter) or |ASN.1| object. If `value` is not given, schema object will be created. tagSet: :py:class:`~pyasn1.type.tag.TagSet` Object representing non-default ASN.1 tag(s) subtypeSpec: :py:class:`~pyasn1.type.constraint.ConstraintsIntersection` Object representing non-default ASN.1 subtype constraint(s) encoding: :py:class:`str` Unicode codec ID to encode/decode :class:`unicode` (Python 2) or :class:`str` (Python 3) the payload when |ASN.1| object is used in text string context. binValue: :py:class:`str` Binary string initializer to use instead of the *value*. Example: '10110011'. hexValue: :py:class:`str` Hexadecimal string initializer to use instead of the *value*. Example: 'DEADBEEF'. Raises ------ ~pyasn1.error.ValueConstraintError, ~pyasn1.error.PyAsn1Error On constraint violation or bad initializer. Examples -------- .. code-block:: python class Error(Sequence): ''' ASN.1 specification: Error ::= SEQUENCE { code INTEGER, parameter ANY DEFINED BY code -- Either INTEGER or REAL } ''' componentType=NamedTypes( NamedType('code', Integer()), NamedType('parameter', Any(), openType=OpenType('code', {1: Integer(), 2: Real()})) ) error = Error() error['code'] = 1 error['parameter'] = Integer(1234)
6259906aaad79263cf42ffa6
class OutputDict(dict): <NEW_LINE> <INDENT> def get(self, output_type=None, loss_type=None, hazard_output_id=None, poe=None, quantile=None, statistics=None, variable=None, insured=False): <NEW_LINE> <INDENT> return self[OutputKey(output_type, loss_type, hazard_output_id, poe, quantile, statistics, variable, insured)] <NEW_LINE> <DEDENT> def with_args(self, **kwargs): <NEW_LINE> <INDENT> clone = self.__class__(self) <NEW_LINE> clone.kwargs = self.kwargs <NEW_LINE> clone.kwargs.update(kwargs) <NEW_LINE> return clone <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(OutputDict, self).__init__(*args, **kwargs) <NEW_LINE> self.kwargs = dict() <NEW_LINE> <DEDENT> def write(self, *args, **kwargs): <NEW_LINE> <INDENT> kwargs.update(self.kwargs) <NEW_LINE> output_id = self.get(**kwargs) <NEW_LINE> globals().get(kwargs['output_type'])( kwargs.pop('loss_type'), output_id, *args) <NEW_LINE> <DEDENT> def write_all(self, arg, values, items, *initial_args, **initial_kwargs): <NEW_LINE> <INDENT> if not len(values) or not len(items): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for value, item in itertools.izip(values, items): <NEW_LINE> <INDENT> kwargs = {arg: value} <NEW_LINE> kwargs.update(initial_kwargs) <NEW_LINE> args = list(initial_args) + [item] <NEW_LINE> self.write(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def set(self, container): <NEW_LINE> <INDENT> hazard_output_id = getattr(container, "hazard_output_id") <NEW_LINE> loss_type = getattr(container, "loss_type") <NEW_LINE> poe = getattr(container, "poe", None) <NEW_LINE> quantile = getattr(container, "quantile", None) <NEW_LINE> statistics = getattr(container, "statistics", None) <NEW_LINE> variable = getattr(container, "variable", None) <NEW_LINE> insured = getattr(container, "insured", False) <NEW_LINE> key = OutputKey( output_type=container.output.output_type, loss_type=loss_type, hazard_output_id=hazard_output_id, poe=poe, quantile=quantile, statistics=statistics, variable=variable, insured=insured) <NEW_LINE> assert super( OutputDict, self).get( key, None) is None, "OutputDict can not be updated" <NEW_LINE> self[key] = container.id <NEW_LINE> <DEDENT> def extend(self, output_list): <NEW_LINE> <INDENT> for o in output_list: <NEW_LINE> <INDENT> self.set(o) <NEW_LINE> <DEDENT> return self
A dict keying OutputKey instances to database ID, with convenience setter and getter methods to manage Output containers. It also automatically links an Output type with its specific writer. Risk Calculators create OutputDict instances with Output IDs keyed by OutputKey instances. Worker tasks compute results than get the proper writer and use it to actually write the results
6259906a4f88993c371f1117
class IndexHandler(tornado.web.RequestHandler): <NEW_LINE> <INDENT> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> self.write('get...') <NEW_LINE> <DEDENT> def post(self, *args, **kwargs): <NEW_LINE> <INDENT> self.write('post...')
主页处理类
6259906a8a43f66fc4bf3982
class MyGame(arcade.Window): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__(WINDOW_WIDTH, WINDOW_HEIGHT, "Sudoku") <NEW_LINE> self.my_grid_sprites = None <NEW_LINE> self.my_textures = None <NEW_LINE> self.my_grid = None <NEW_LINE> self.square_selected = None <NEW_LINE> self.available_numbers = None <NEW_LINE> self.square_selected_possible_vales = None <NEW_LINE> arcade.set_background_color(BACKGROUND_COLOR) <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> self.my_grid_sprites = create_grid_sprites() <NEW_LINE> self.my_textures = create_textures() <NEW_LINE> self.my_grid = create_grid(BOARD_SIZE) <NEW_LINE> self.available_numbers = [BOARD_SIZE for _ in range(BOARD_SIZE)] <NEW_LINE> update_grid_textures(self.square_selected, self.my_grid_sprites, self.my_textures) <NEW_LINE> <DEDENT> def on_draw(self): <NEW_LINE> <INDENT> arcade.start_render() <NEW_LINE> self.my_grid_sprites.draw() <NEW_LINE> draw_grid_numbers(self.my_grid) <NEW_LINE> draw_available_numbers(self.available_numbers) <NEW_LINE> if(self.square_selected != None): <NEW_LINE> <INDENT> draw_possible_values(self.square_selected_possible_vales) <NEW_LINE> <DEDENT> <DEDENT> def on_mouse_press(self, x, y, button, modifiers): <NEW_LINE> <INDENT> if (button == arcade.MOUSE_BUTTON_RIGHT): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.square_selected = squareClicked(x , y) <NEW_LINE> if (self.square_selected == None): <NEW_LINE> <INDENT> update_grid_textures(self.square_selected, self.my_grid_sprites, self.my_textures) <NEW_LINE> self.square_selected_possible_vales = None <NEW_LINE> return <NEW_LINE> <DEDENT> y , x = self.square_selected <NEW_LINE> if (self.my_grid[y][x] != 0): <NEW_LINE> <INDENT> self.square_selected = None <NEW_LINE> update_grid_textures(self.square_selected, self.my_grid_sprites, self.my_textures) <NEW_LINE> self.square_selected_possible_vales = None <NEW_LINE> return <NEW_LINE> <DEDENT> update_grid_textures(self.square_selected, self.my_grid_sprites, self.my_textures) <NEW_LINE> self.square_selected_possible_vales = getPossibleValesInSquare(self.square_selected, self.my_grid) <NEW_LINE> <DEDENT> def on_key_press(self, symbol: int, modifiers: int): <NEW_LINE> <INDENT> if(self.square_selected == None): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if(symbol < 49 or symbol > 57): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> newNumber = symbol - 48 <NEW_LINE> if(newNumber not in self.square_selected_possible_vales): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> y, x = self.square_selected <NEW_LINE> self.my_grid[y][x] = newNumber <NEW_LINE> self.available_numbers[newNumber - 1] = self.available_numbers[newNumber - 1] - 1 <NEW_LINE> self.square_selected = None <NEW_LINE> self.square_selected_possible_vales = None
Main Game Class
6259906a2c8b7c6e89bd4fd6
class CmcalibrateTests(GeneralSetUp): <NEW_LINE> <INDENT> def test_base_command(self): <NEW_LINE> <INDENT> c = Cmcalibrate() <NEW_LINE> self.assertEqual(c.BaseCommand, ''.join(['cd "',getcwd(),'/"; ','cmcalibrate'])) <NEW_LINE> c.Parameters['--mpi'].on() <NEW_LINE> self.assertEqual(c.BaseCommand, ''.join(['cd "',getcwd(),'/"; ','cmcalibrate --mpi'])) <NEW_LINE> <DEDENT> def test_changing_working_dir(self): <NEW_LINE> <INDENT> c = Cmcalibrate(WorkingDir='/tmp/cmcalibrate_test') <NEW_LINE> self.assertEqual(c.BaseCommand, ''.join(['cd "','/tmp/cmcalibrate_test','/"; ','cmcalibrate'])) <NEW_LINE> c = Cmcalibrate() <NEW_LINE> c.WorkingDir = '/tmp/cmcalibrate_test2' <NEW_LINE> self.assertEqual(c.BaseCommand, ''.join(['cd "','/tmp/cmcalibrate_test2','/"; ','cmcalibrate'])) <NEW_LINE> rmdir('/tmp/cmcalibrate_test') <NEW_LINE> rmdir('/tmp/cmcalibrate_test2') <NEW_LINE> <DEDENT> def test_general_cleanUp(self): <NEW_LINE> <INDENT> shutil.rmtree(self.temp_dir) <NEW_LINE> shutil.rmtree(self.temp_dir_spaces)
Tests for the Cmcalibrate application controller
6259906a55399d3f05627d12
@pulumi.output_type <NEW_LINE> class GetAppSecSiemDefinitionsResult: <NEW_LINE> <INDENT> def __init__(__self__, id=None, json=None, output_text=None, siem_definition_name=None): <NEW_LINE> <INDENT> if id and not isinstance(id, str): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'id' to be a str") <NEW_LINE> <DEDENT> pulumi.set(__self__, "id", id) <NEW_LINE> if json and not isinstance(json, str): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'json' to be a str") <NEW_LINE> <DEDENT> pulumi.set(__self__, "json", json) <NEW_LINE> if output_text and not isinstance(output_text, str): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'output_text' to be a str") <NEW_LINE> <DEDENT> pulumi.set(__self__, "output_text", output_text) <NEW_LINE> if siem_definition_name and not isinstance(siem_definition_name, str): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'siem_definition_name' to be a str") <NEW_LINE> <DEDENT> pulumi.set(__self__, "siem_definition_name", siem_definition_name) <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter <NEW_LINE> def id(self) -> str: <NEW_LINE> <INDENT> return pulumi.get(self, "id") <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter <NEW_LINE> def json(self) -> str: <NEW_LINE> <INDENT> return pulumi.get(self, "json") <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter(name="outputText") <NEW_LINE> def output_text(self) -> str: <NEW_LINE> <INDENT> return pulumi.get(self, "output_text") <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter(name="siemDefinitionName") <NEW_LINE> def siem_definition_name(self) -> Optional[str]: <NEW_LINE> <INDENT> return pulumi.get(self, "siem_definition_name")
A collection of values returned by getAppSecSiemDefinitions.
6259906a92d797404e389753
@attr.s <NEW_LINE> class PayerNewRequest(FieldsCommentMixin, ApiRequest): <NEW_LINE> <INDENT> payer = Field(default=None, validator=attr.validators.instance_of(Payer)) <NEW_LINE> request_type = 'payer-new' <NEW_LINE> object_fields = ['payer'] <NEW_LINE> def get_hash_values(self): <NEW_LINE> <INDENT> return [self.timestamp, self.merchantid, self.orderid, '', '', self.payer.ref]
Class representing a new payer to be sent to API.
6259906ae1aae11d1e7cf405
class RegionalQuotaCapability(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'region_name': {'key': 'regionName', 'type': 'str'}, 'cores_used': {'key': 'coresUsed', 'type': 'long'}, 'cores_available': {'key': 'coresAvailable', 'type': 'long'}, } <NEW_LINE> def __init__( self, *, region_name: Optional[str] = None, cores_used: Optional[int] = None, cores_available: Optional[int] = None, **kwargs ): <NEW_LINE> <INDENT> super(RegionalQuotaCapability, self).__init__(**kwargs) <NEW_LINE> self.region_name = region_name <NEW_LINE> self.cores_used = cores_used <NEW_LINE> self.cores_available = cores_available
The regional quota capacity. :param region_name: The region name. :type region_name: str :param cores_used: The number of cores used in the region. :type cores_used: long :param cores_available: The number of cores available in the region. :type cores_available: long
6259906ad268445f2663a755
class TeardownSystemMounts(JailStop): <NEW_LINE> <INDENT> pass
Teardown a jails mountpoints.
6259906a3539df3088ecda8f
class MsgM25FlashWriteStatus(SBP): <NEW_LINE> <INDENT> _parser = construct.Struct( 'status' / construct.Array(1, construct.Int8ul),) <NEW_LINE> __slots__ = [ 'status', ] <NEW_LINE> def __init__(self, sbp=None, **kwargs): <NEW_LINE> <INDENT> if sbp: <NEW_LINE> <INDENT> super( MsgM25FlashWriteStatus, self).__init__(sbp.msg_type, sbp.sender, sbp.length, sbp.payload, sbp.crc) <NEW_LINE> self.from_binary(sbp.payload) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super( MsgM25FlashWriteStatus, self).__init__() <NEW_LINE> self.msg_type = SBP_MSG_M25_FLASH_WRITE_STATUS <NEW_LINE> self.sender = kwargs.pop('sender', SENDER_ID) <NEW_LINE> self.status = kwargs.pop('status') <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return fmt_repr(self) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_json(s): <NEW_LINE> <INDENT> d = json.loads(s) <NEW_LINE> return MsgM25FlashWriteStatus.from_json_dict(d) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_json_dict(d): <NEW_LINE> <INDENT> sbp = SBP.from_json_dict(d) <NEW_LINE> return MsgM25FlashWriteStatus(sbp, **d) <NEW_LINE> <DEDENT> def from_binary(self, d): <NEW_LINE> <INDENT> p = MsgM25FlashWriteStatus._parser.parse(d) <NEW_LINE> for n in self.__class__.__slots__: <NEW_LINE> <INDENT> setattr(self, n, getattr(p, n)) <NEW_LINE> <DEDENT> <DEDENT> def to_binary(self): <NEW_LINE> <INDENT> c = containerize(exclude_fields(self)) <NEW_LINE> self.payload = MsgM25FlashWriteStatus._parser.build(c) <NEW_LINE> return self.pack() <NEW_LINE> <DEDENT> def to_json_dict(self): <NEW_LINE> <INDENT> self.to_binary() <NEW_LINE> d = super( MsgM25FlashWriteStatus, self).to_json_dict() <NEW_LINE> j = walk_json_dict(exclude_fields(self)) <NEW_LINE> d.update(j) <NEW_LINE> return d
SBP class for message MSG_M25_FLASH_WRITE_STATUS (0x00F3). You can have MSG_M25_FLASH_WRITE_STATUS inherit its fields directly from an inherited SBP object, or construct it inline using a dict of its fields. The flash status message writes to the 8-bit M25 flash status register. The device replies with a MSG_FLASH_DONE message. Parameters ---------- sbp : SBP SBP parent object to inherit from. status : array Byte to write to the M25 flash status register sender : int Optional sender ID, defaults to SENDER_ID (see sbp/msg.py).
6259906a7047854f46340ba7
class ProblematicSearchServers(SearchException): <NEW_LINE> <INDENT> def __init__(self, failed=EmptyI, invalid=EmptyI, unsupported=EmptyI): <NEW_LINE> <INDENT> SearchException.__init__(self) <NEW_LINE> self.failed_servers = failed <NEW_LINE> self.invalid_servers = invalid <NEW_LINE> self.unsupported_servers = unsupported <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> s = _("Some repositories failed to respond appropriately:\n") <NEW_LINE> for pub, err in self.failed_servers: <NEW_LINE> <INDENT> s += _("{o}:\n{msg}\n").format( o=pub, msg=err) <NEW_LINE> <DEDENT> for pub in self.invalid_servers: <NEW_LINE> <INDENT> s += _("{0} did not return a valid " "response.\n".format(pub)) <NEW_LINE> <DEDENT> if len(self.unsupported_servers) > 0: <NEW_LINE> <INDENT> s += _("Some repositories don't support requested " "search operation:\n") <NEW_LINE> <DEDENT> for pub, err in self.unsupported_servers: <NEW_LINE> <INDENT> s += _("{o}:\n{msg}\n").format( o=pub, msg=err) <NEW_LINE> <DEDENT> return s
This class wraps exceptions which could appear while trying to do a search request.
6259906a45492302aabfdcc9
class EggHolderProductBitCost(MultiObjectiveTestProblem): <NEW_LINE> <INDENT> dim = 6 <NEW_LINE> num_objectives = 2 <NEW_LINE> _bounds = [(0.0, 20.), (0.0, 20.0), (0.0, 20.0), (0.0, 20.0), (0.0, 20.0), (0.0, 20.0)] <NEW_LINE> _ref_point = [0.5, -(w[0]+w[1]+w[2]+w[3]+w[4]+w[5])*20] <NEW_LINE> _max_hv = 1e3 <NEW_LINE> def __init__(self, noise_std: Optional[float] = None, negate: bool = False) -> None: <NEW_LINE> <INDENT> super().__init__(noise_std=noise_std, negate=negate) <NEW_LINE> <DEDENT> def EggHolderProduct(self,X: Tensor) -> Tensor: <NEW_LINE> <INDENT> return EggHolder(torch.stack([X[..., 0], X[..., 1]], dim=-1)) *EggHolder(torch.stack([X[..., 2], X[..., 3]], dim=-1)) *EggHolder(torch.stack([X[..., 4], X[..., 5]], dim=-1)) <NEW_LINE> <DEDENT> def BitCost(self,X: Tensor) -> Tensor: <NEW_LINE> <INDENT> bitCost = torch.zeros(X.shape[0]) <NEW_LINE> for i in range(self.dim): <NEW_LINE> <INDENT> bitCost += X[...,i]*w[i] <NEW_LINE> <DEDENT> return -bitCost <NEW_LINE> <DEDENT> def evaluate_true(self, X: Tensor) -> Tensor: <NEW_LINE> <INDENT> y = self.EggHolderProduct(X=X) <NEW_LINE> bitCost = self.BitCost(X=X) <NEW_LINE> return torch.stack([y, bitCost], dim=-1)
Two objective problem composed of the following discrete objectives: y = TT_k binom.pmf(b_k, N_k, p_k) Bit_Cost = - (Sum_k w_k * b_k)
6259906a7b180e01f3e49c5c
class IsUserOrgAdminForUrlConnection(access.AccessChecker): <NEW_LINE> <INDENT> def checkAccess(self, data, check): <NEW_LINE> <INDENT> if not data.ndb_profile: <NEW_LINE> <INDENT> raise exception.Forbidden(message=access._MESSAGE_NO_PROFILE) <NEW_LINE> <DEDENT> if data.url_connection.organization not in data.ndb_profile.admin_for: <NEW_LINE> <INDENT> raise exception.Forbidden( message=access._MESSAGE_NOT_ORG_ADMIN_FOR_ORG % data.url_connection.organization.id())
AccessChecker that ensures that the logged in user is organization administrator for the connection which is retrieved from the URL data.
6259906afff4ab517ebcf00c
class Lightmap(list): <NEW_LINE> <INDENT> _pixels: List[bytes] = [b"\0" * 3] * 128 * 128 <NEW_LINE> _format = "3s" * 128 * 128 <NEW_LINE> def __getitem__(self, row) -> List[bytes]: <NEW_LINE> <INDENT> row_start = row * 128 <NEW_LINE> return self._pixels[row_start:row_start + 128] <NEW_LINE> <DEDENT> def flat(self) -> bytes: <NEW_LINE> <INDENT> return b"".join(self._pixels) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_tuple(cls, _tuple): <NEW_LINE> <INDENT> out = cls() <NEW_LINE> out._pixels = _tuple <NEW_LINE> return out
Raw pixel bytes, 128x128 RGB_888 image
6259906a009cb60464d02d2b
class TimeInfoResult(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Duration = None <NEW_LINE> self.EndTs = None <NEW_LINE> self.StartTs = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Duration = params.get("Duration") <NEW_LINE> self.EndTs = params.get("EndTs") <NEW_LINE> self.StartTs = params.get("StartTs") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
TimeInfoResult
6259906aa8370b77170f1bb7
class StringModel: <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> self.msg = msg <NEW_LINE> <DEDENT> def to_list(self): <NEW_LINE> <INDENT> self.strIO_readlines = SP.to_lines(self.msg) <NEW_LINE> self.list = [] <NEW_LINE> for line in self.strIO_readlines: <NEW_LINE> <INDENT> new_line = SP.clearn_enter(line) <NEW_LINE> new_line = SP.clear_space(new_line) <NEW_LINE> if new_line != "": <NEW_LINE> <INDENT> self.list.append(new_line) <NEW_LINE> <DEDENT> <DEDENT> return self.list <NEW_LINE> <DEDENT> def getClassName(self): <NEW_LINE> <INDENT> startName = "class" <NEW_LINE> endName = "(db.Model):" <NEW_LINE> for line in self.list: <NEW_LINE> <INDENT> if startName in line and endName in line: <NEW_LINE> <INDENT> newline = line <NEW_LINE> newline = SP.remove(newline, startName) <NEW_LINE> newline = SP.remove(newline, endName) <NEW_LINE> self.className = newline <NEW_LINE> <DEDENT> <DEDENT> return self.className <NEW_LINE> <DEDENT> def getColumns(self): <NEW_LINE> <INDENT> column_check = "db.Column(" <NEW_LINE> self.columns = [] <NEW_LINE> for line in self.list: <NEW_LINE> <INDENT> if column_check in line: <NEW_LINE> <INDENT> column = Column(line) <NEW_LINE> self.columns.append(column) <NEW_LINE> <DEDENT> <DEDENT> return self.columns
從Model String 解析出Class 以及Column 細節
6259906a2c8b7c6e89bd4fd7
class _ConstraintBodyHeatSource(CommandManager): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(_ConstraintBodyHeatSource, self).__init__() <NEW_LINE> self.pixmap = "FEM_ConstraintHeatflux" <NEW_LINE> self.menuetext = "Constraint body heat source" <NEW_LINE> self.tooltip = "Creates a FEM constraint body heat source" <NEW_LINE> self.is_active = "with_analysis" <NEW_LINE> self.do_activated = "add_obj_on_gui_noset_edit"
The FEM_ConstraintBodyHeatSource command definition
6259906a0a50d4780f7069b9
class DisplayPublisher(Configurable): <NEW_LINE> <INDENT> def _validate_data(self, source, data, metadata=None): <NEW_LINE> <INDENT> if not isinstance(source, basestring): <NEW_LINE> <INDENT> raise TypeError('source must be a str, got: %r' % source) <NEW_LINE> <DEDENT> if not isinstance(data, dict): <NEW_LINE> <INDENT> raise TypeError('data must be a dict, got: %r' % data) <NEW_LINE> <DEDENT> if metadata is not None: <NEW_LINE> <INDENT> if not isinstance(metadata, dict): <NEW_LINE> <INDENT> raise TypeError('metadata must be a dict, got: %r' % data) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def publish(self, source, data, metadata=None): <NEW_LINE> <INDENT> if 'text/plain' in data: <NEW_LINE> <INDENT> print(data['text/plain'], file=io.stdout) <NEW_LINE> <DEDENT> <DEDENT> def clear_output(self, stdout=True, stderr=True, other=True): <NEW_LINE> <INDENT> if stdout: <NEW_LINE> <INDENT> print('\033[2K\r', file=io.stdout, end='') <NEW_LINE> io.stdout.flush() <NEW_LINE> <DEDENT> if stderr: <NEW_LINE> <INDENT> print('\033[2K\r', file=io.stderr, end='') <NEW_LINE> io.stderr.flush()
A traited class that publishes display data to frontends. Instances of this class are created by the main IPython object and should be accessed there.
6259906aaad79263cf42ffa8
class CosineNbrAttentionEmbedding(Encoder): <NEW_LINE> <INDENT> def __init__(self, input_dim, is_train, train_dropout=1.0, emb_dim=None, proj_e=None, proj_w=None, scope="attention"): <NEW_LINE> <INDENT> super(CosineNbrAttentionEmbedding, self).__init__() <NEW_LINE> self.input_dim = input_dim <NEW_LINE> self.scope = scope <NEW_LINE> self.is_train = is_train <NEW_LINE> self.dropout = train_dropout <NEW_LINE> if emb_dim: <NEW_LINE> <INDENT> self.emb_dim = emb_dim <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.emb_dim = self.input_dim <NEW_LINE> <DEDENT> with tf.variable_scope(scope): <NEW_LINE> <INDENT> if proj_e: <NEW_LINE> <INDENT> self.proj_e = proj_e <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.proj_e = tf.get_variable( "W_embed", shape=(2 * self.input_dim, self.emb_dim), initializer=tf.glorot_uniform_initializer() ) <NEW_LINE> <DEDENT> if proj_w: <NEW_LINE> <INDENT> self.proj_w = proj_w <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.proj_w = tf.get_variable( "W_attention", shape=(2 * self.input_dim, self.emb_dim), initializer=tf.glorot_uniform_initializer() ) <NEW_LINE> <DEDENT> <DEDENT> if not proj_w: <NEW_LINE> <INDENT> utils.add_variable_summaries(self.proj_w, self.scope + "/W_attention") <NEW_LINE> <DEDENT> <DEDENT> def attend(self, node, neighbors, query, nbr_mask, name=""): <NEW_LINE> <INDENT> node_query = tf.concat([node, query], axis=-1) <NEW_LINE> node_emb = tf.matmul(node_query, self.proj_e) <NEW_LINE> node_emb = tf.expand_dims(node_emb, 1) <NEW_LINE> nbr_scores = tf.squeeze( tf.matmul(node_emb, neighbors, transpose_b=True), axis=1 ) <NEW_LINE> nbr_scores += (1 - nbr_mask) * (-1e7) <NEW_LINE> attention_probs = tf.squeeze(tf.nn.softmax(nbr_scores, axis=-1)) <NEW_LINE> self.add_to_collection("attention_probs", attention_probs) <NEW_LINE> utils.add_histogram_summary(attention_probs, self.scope + "/" + name + "/attention_probs") <NEW_LINE> attention_emb = tf.reduce_sum( tf.expand_dims(attention_probs, -1) * neighbors, 1 ) <NEW_LINE> concat_emb = tf.concat([node, attention_emb], -1) <NEW_LINE> output_emb = tf.matmul(concat_emb, self.proj_w) <NEW_LINE> output = tf.cond( self.is_train, lambda: tf.nn.dropout(output_emb, self.dropout, name="dropout"), lambda: output_emb ) <NEW_LINE> return output <NEW_LINE> <DEDENT> def make_feed_dict(self): <NEW_LINE> <INDENT> return {}
Compose embedding by attending to neighbors using simple dot product. Concatenates and projects (node, query_rel) to an embedding that is used to attend to the neighbor embeddings.
6259906a4f88993c371f1118
class CallbackImageProvider(AbstractImageProvider): <NEW_LINE> <INDENT> def __init__(self,name,image_provider_fn): <NEW_LINE> <INDENT> self.__name = name <NEW_LINE> self.__image_provider_fn = image_provider_fn <NEW_LINE> <DEDENT> def provide_image(self, image_set): <NEW_LINE> <INDENT> return self.__image_provider_fn(image_set,self) <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return self.__name
An image provider proxy that calls the indicated callback functions (presumably in your module) to implement the methods
6259906a8e7ae83300eea881
class Spec(object): <NEW_LINE> <INDENT> def __init__(self, foreground_specs, blur_method='none', blur_level=0, motion_method='none', motion_level=0, motion_angle=45, noise_level=0, compression=0, scale=1.): <NEW_LINE> <INDENT> self.foreground_specs = foreground_specs <NEW_LINE> self.blur_method = blur_method <NEW_LINE> self.blur_level = blur_level <NEW_LINE> self.motion_method = motion_method <NEW_LINE> self.motion_level = motion_level <NEW_LINE> self.motion_angle = motion_angle <NEW_LINE> self.noise_level = noise_level <NEW_LINE> self.compression = compression <NEW_LINE> self.scale = scale
Class to represent the specification of artificial transformations and the types of foreground positions to be applied to each background images.
6259906a2c8b7c6e89bd4fd8
class AbstractActionFlag(object): <NEW_LINE> <INDENT> _immutable_fields_ = ["checkinterval_scaled?"] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._periodic_actions = [] <NEW_LINE> self._nonperiodic_actions = [] <NEW_LINE> self.has_bytecode_counter = False <NEW_LINE> self.fired_actions = None <NEW_LINE> self.checkinterval_scaled = 10000 * TICK_COUNTER_STEP <NEW_LINE> self._rebuild_action_dispatcher() <NEW_LINE> <DEDENT> def fire(self, action): <NEW_LINE> <INDENT> if not action._fired: <NEW_LINE> <INDENT> action._fired = True <NEW_LINE> if self.fired_actions is None: <NEW_LINE> <INDENT> self.fired_actions = [] <NEW_LINE> <DEDENT> self.fired_actions.append(action) <NEW_LINE> self.reset_ticker(-1) <NEW_LINE> <DEDENT> <DEDENT> @not_rpython <NEW_LINE> def register_periodic_action(self, action, use_bytecode_counter): <NEW_LINE> <INDENT> assert isinstance(action, PeriodicAsyncAction) <NEW_LINE> if use_bytecode_counter: <NEW_LINE> <INDENT> self._periodic_actions.append(action) <NEW_LINE> self.has_bytecode_counter = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._periodic_actions.insert(0, action) <NEW_LINE> <DEDENT> self._rebuild_action_dispatcher() <NEW_LINE> <DEDENT> def getcheckinterval(self): <NEW_LINE> <INDENT> return self.checkinterval_scaled // TICK_COUNTER_STEP <NEW_LINE> <DEDENT> def setcheckinterval(self, interval): <NEW_LINE> <INDENT> MAX = sys.maxint // TICK_COUNTER_STEP <NEW_LINE> if interval < 1: <NEW_LINE> <INDENT> interval = 1 <NEW_LINE> <DEDENT> elif interval > MAX: <NEW_LINE> <INDENT> interval = MAX <NEW_LINE> <DEDENT> self.checkinterval_scaled = interval * TICK_COUNTER_STEP <NEW_LINE> self.reset_ticker(-1) <NEW_LINE> <DEDENT> def _rebuild_action_dispatcher(self): <NEW_LINE> <INDENT> periodic_actions = unrolling_iterable(self._periodic_actions) <NEW_LINE> @jit.unroll_safe <NEW_LINE> @objectmodel.dont_inline <NEW_LINE> def action_dispatcher(ec, frame): <NEW_LINE> <INDENT> self.reset_ticker(self.checkinterval_scaled) <NEW_LINE> for action in periodic_actions: <NEW_LINE> <INDENT> action.perform(ec, frame) <NEW_LINE> <DEDENT> list = self.fired_actions <NEW_LINE> if list is not None: <NEW_LINE> <INDENT> self.fired_actions = None <NEW_LINE> for action in list: <NEW_LINE> <INDENT> action._fired = False <NEW_LINE> action.perform(ec, frame) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.action_dispatcher = action_dispatcher
This holds in an integer the 'ticker'. If threads are enabled, it is decremented at each bytecode; when it reaches zero, we release the GIL. And whether we have threads or not, it is forced to zero whenever we fire any of the asynchronous actions.
6259906a92d797404e389754
class ShowSubMenu(InclusionTag): <NEW_LINE> <INDENT> name = 'fix_show_sub_menu' <NEW_LINE> template = 'menu/dummy.html' <NEW_LINE> options = Options( IntegerArgument('levels', default=100, required=False), Argument('root_level', default=None, required=False), IntegerArgument('nephews', default=100, required=False), Argument('template', default='menu/sub_menu.html', required=False), ) <NEW_LINE> def get_context(self, context, levels, root_level, nephews, template): <NEW_LINE> <INDENT> if not root_level and root_level != 0: <NEW_LINE> <INDENT> root_level = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> request = context['request'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return {'template': 'menu/empty.html'} <NEW_LINE> <DEDENT> nodes = menu_pool.get_nodes(request) <NEW_LINE> children = [] <NEW_LINE> include_root = False <NEW_LINE> if root_level is not None and root_level > 0: <NEW_LINE> <INDENT> root_level -= 1 <NEW_LINE> <DEDENT> elif root_level is not None and root_level == 0: <NEW_LINE> <INDENT> include_root = True <NEW_LINE> <DEDENT> for node in nodes: <NEW_LINE> <INDENT> if root_level is None: <NEW_LINE> <INDENT> if node.selected: <NEW_LINE> <INDENT> root_level = node.level <NEW_LINE> <DEDENT> <DEDENT> is_root_ancestor = (node.ancestor and node.level == root_level) <NEW_LINE> root_selected = (node.selected and node.level == root_level) <NEW_LINE> if is_root_ancestor or root_selected: <NEW_LINE> <INDENT> cut_after(node, levels, []) <NEW_LINE> children = node.children <NEW_LINE> for child in children: <NEW_LINE> <INDENT> child.parent = None <NEW_LINE> if child.sibling: <NEW_LINE> <INDENT> cut_after(child, nephews, []) <NEW_LINE> <DEDENT> <DEDENT> if include_root: <NEW_LINE> <INDENT> children = menu_pool.apply_modifiers( [node], request, post_cut=True ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> children = menu_pool.apply_modifiers( children, request, post_cut=True ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> context.update({ 'children': children, 'template': template, 'from_level': 0, 'to_level': 0, 'extra_inactive': 0, 'extra_active': 0 }) <NEW_LINE> return context
show the sub menu of the current nav-node. - levels: how many levels deep - root_level: the level to start the menu at - nephews: the level of descendants of siblings (nephews) to show - template: template used to render the navigation
6259906a63b5f9789fe86955
class App(Cli): <NEW_LINE> <INDENT> format = '%(levelname)s [%(name)s] %(message)s' <NEW_LINE> def __init__(self, app, shell_namespace=None, extra_files=None, bootstrap=None, shutdown=None): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> self.shell_namespace = shell_namespace or {} <NEW_LINE> self.extra_files = extra_files <NEW_LINE> self.bootstrap = bootstrap <NEW_LINE> self.shutdown = shutdown <NEW_LINE> <DEDENT> def command_serve(self, host='', port='8000', level='debug'): <NEW_LINE> <INDENT> logging.basicConfig(level=getattr(logging, level.upper()), format=self.format) <NEW_LINE> if self.bootstrap: <NEW_LINE> <INDENT> logger.info('Bootstraping...') <NEW_LINE> self.bootstrap() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> server_thread = DevServerThread(host, port, self.app) <NEW_LINE> server_thread.start() <NEW_LINE> wait_for_code_change(extra_files=self.extra_files) <NEW_LINE> server_thread.running = False <NEW_LINE> server_thread.join() <NEW_LINE> logger.info('Reloading...') <NEW_LINE> flush_fds() <NEW_LINE> close_fds() <NEW_LINE> os.execvp(sys.executable, [sys.executable] + sys.argv) <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> logger.info('Stoping dev-server...') <NEW_LINE> server_thread.running = False <NEW_LINE> server_thread.join() <NEW_LINE> if self.shutdown: <NEW_LINE> <INDENT> self.shutdown() <NEW_LINE> <DEDENT> sys.exit() <NEW_LINE> <DEDENT> <DEDENT> def command_shell(self): <NEW_LINE> <INDENT> from code import interact <NEW_LINE> interact('Namespace %r' % self.shell_namespace, local=self.shell_namespace)
Development application
6259906a01c39578d7f1432e
class VMCTemplateChecksumError(VMCBaseError): <NEW_LINE> <INDENT> pass
This exception is raised when the template tries to replace a file with a checksum that is not recognised.
6259906ad268445f2663a756
class DisableFileSystemRedirection: <NEW_LINE> <INDENT> if is_windows(): <NEW_LINE> <INDENT> _disable = ctypes.windll.kernel32.Wow64DisableWow64FsRedirection <NEW_LINE> _revert = ctypes.windll.kernel32.Wow64RevertWow64FsRedirection <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _disable = '' <NEW_LINE> _revert = '' <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.old_value = ctypes.c_long() <NEW_LINE> self.success = self._disable(ctypes.byref(self.old_value)) <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> if self.success: <NEW_LINE> <INDENT> self._revert(self.old_value)
When a 32 bit program runs on a 64 bit operating system the paths to C:/Windows/System32 automatically get redirected to the 32 bit version (C:/Windows/SysWow64), if you really do need to access the contents of System32, you need to disable the file system redirector first.
6259906aa8370b77170f1bb8
class DataActionRecord(models.Model): <NEW_LINE> <INDENT> data = models.ForeignKey("Data", null=True, blank=True) <NEW_LINE> action = models.CharField(max_length=24, blank=True, null=True) <NEW_LINE> time = models.DateTimeField(blank=True, null=True) <NEW_LINE> user = models.ForeignKey(User, blank=True, null=True) <NEW_LINE> @receiver([action_message],) <NEW_LINE> def record_handel(sender=None, user=None, instance=None, action=None, **kwargs): <NEW_LINE> <INDENT> form = {} <NEW_LINE> form['user'] = user <NEW_LINE> form['data'] = instance <NEW_LINE> form['time'] = timezone.localtime(timezone.now()) <NEW_LINE> form['action'] = action <NEW_LINE> acrecord, created = DataActionRecord.objects.get_or_create(**form)
用户处理data记录表
6259906a0c0af96317c57958
class SuperiorHuntersDefense(FeatureSelector): <NEW_LINE> <INDENT> options = {'evasion': Evasion, 'stand against of the tide': StandAgainstTheTide, 'uncanny dodge': UncannyDodge} <NEW_LINE> name = "Superior Hunter's Defense (Select One)" <NEW_LINE> source = "Ranger (Hunter)"
Select a Superior Hunter's Defense option in "feature_choices" in your .py file from one of: evasion stand against the tide uncanny dodge
6259906aac7a0e7691f73cda
class InventoryAdapter(object): <NEW_LINE> <INDENT> def __init__(self, env): <NEW_LINE> <INDENT> self.env = env <NEW_LINE> if not hasattr(env, 'intersphinx_cache'): <NEW_LINE> <INDENT> self.env.intersphinx_cache = {} <NEW_LINE> self.env.intersphinx_inventory = {} <NEW_LINE> self.env.intersphinx_named_inventory = {} <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def cache(self): <NEW_LINE> <INDENT> return self.env.intersphinx_cache <NEW_LINE> <DEDENT> @property <NEW_LINE> def main_inventory(self): <NEW_LINE> <INDENT> return self.env.intersphinx_inventory <NEW_LINE> <DEDENT> @property <NEW_LINE> def named_inventory(self): <NEW_LINE> <INDENT> return self.env.intersphinx_named_inventory <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.env.intersphinx_inventory.clear() <NEW_LINE> self.env.intersphinx_named_inventory.clear()
Inventory adapter for environment
6259906a26068e7796d4e12c
class ListSubscriptionsInputSet(InputSet): <NEW_LINE> <INDENT> def set_AccessToken(self, value): <NEW_LINE> <INDENT> super(ListSubscriptionsInputSet, self)._set_input('AccessToken', value) <NEW_LINE> <DEDENT> def set_AccessTokenSecret(self, value): <NEW_LINE> <INDENT> super(ListSubscriptionsInputSet, self)._set_input('AccessTokenSecret', value) <NEW_LINE> <DEDENT> def set_Application(self, value): <NEW_LINE> <INDENT> super(ListSubscriptionsInputSet, self)._set_input('Application', value) <NEW_LINE> <DEDENT> def set_ConsumerKey(self, value): <NEW_LINE> <INDENT> super(ListSubscriptionsInputSet, self)._set_input('ConsumerKey', value) <NEW_LINE> <DEDENT> def set_ConsumerSecret(self, value): <NEW_LINE> <INDENT> super(ListSubscriptionsInputSet, self)._set_input('ConsumerSecret', value) <NEW_LINE> <DEDENT> def set_UserID(self, value): <NEW_LINE> <INDENT> super(ListSubscriptionsInputSet, self)._set_input('UserID', value)
An InputSet with methods appropriate for specifying the inputs to the ListSubscriptions Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
6259906ab7558d5895464b2a
class AbstractRenderer(object): <NEW_LINE> <INDENT> def __init__(self, widget, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def draw(self): <NEW_LINE> <INDENT> pass
AbstractRenderer is the standard interface for renderers. Each renderer have to implement an initialization function __init__ and a draw method to do the actual drawing using OpenGL or by using other, more basic, renderers. Usually the renderers have also some custom functions that they use to update themselves. For example a SphereRenderer implements the function update_positions to move the spheres around without having to regenerate all of the other properties. .. seealso:: :doc:`/graphics` for a tutorial on how to develop a simple renderer. **Parameters** widget: :py:class:`chemlab.graphics.QChemlabWidget` The parent `QChemlabWidget`. Renderers can use the widget to access the camera, lights, and other informations. args, kwargs: Any other argument that they may use.
6259906afff4ab517ebcf00e
class Action(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.command = None <NEW_LINE> self.parameters = None <NEW_LINE> self.type = None <NEW_LINE> self.message = None <NEW_LINE> self._parameter_start_pos = 0 <NEW_LINE> <DEDENT> def setParameterStartPos(self,pos): <NEW_LINE> <INDENT> self._parameter_start_pos = pos <NEW_LINE> <DEDENT> def getParameterStartPos(self): <NEW_LINE> <INDENT> return self._parameter_start_pos <NEW_LINE> <DEDENT> def execute(self,parameters): <NEW_LINE> <INDENT> if self.type == None: <NEW_LINE> <INDENT> return 'No action type' <NEW_LINE> <DEDENT> elif self.type.lower() == 'script': <NEW_LINE> <INDENT> if self.command == None: <NEW_LINE> <INDENT> return 'No command' <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> script_command = self.command <NEW_LINE> if self.parameters != None and type(self.parameters) == str: <NEW_LINE> <INDENT> script_command = '%s %s'%(script_command,self.parameters) <NEW_LINE> if script_command.find('%s') > -1 and len(parameters) > 0: <NEW_LINE> <INDENT> script_command = script_command % tuple(parameters[0:script_command.count('%s')]) <NEW_LINE> <DEDENT> <DEDENT> if self.message != None: <NEW_LINE> <INDENT> if self.message.count('%s') > 0 and parameters != None and len(parameters) > 0: <NEW_LINE> <INDENT> syslog.syslog(syslog.LOG_NOTICE,self.message % tuple(parameters[0:self.message.count('%s')]) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> syslog.syslog(syslog.LOG_NOTICE,self.message) <NEW_LINE> <DEDENT> <DEDENT> exit_status = subprocess.call(script_command, shell=True) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> syslog.syslog(syslog.LOG_ERR, 'Script action failed at %s'%traceback.format_exc()) <NEW_LINE> return 'Execute error' <NEW_LINE> <DEDENT> if exit_status == 0 : <NEW_LINE> <INDENT> return 'OK' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'Error (%d)'%exit_status <NEW_LINE> <DEDENT> <DEDENT> elif self.type.lower() == 'inline': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if len(parameters) > 0: <NEW_LINE> <INDENT> inline_act_parameters = self.parameters % tuple(parameters) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> inline_act_parameters = '' <NEW_LINE> <DEDENT> if self.message != None: <NEW_LINE> <INDENT> if self.message.count('%s') > 0 and parameters != None and len(parameters) > 0: <NEW_LINE> <INDENT> syslog.syslog(syslog.LOG_NOTICE,self.message % tuple(parameters[0:self.message.count('%s')]) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> syslog.syslog(syslog.LOG_NOTICE,self.message) <NEW_LINE> <DEDENT> <DEDENT> return ph_inline_actions.execute(self,inline_act_parameters) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> syslog.syslog(syslog.LOG_ERR, 'Inline action failed at %s'%traceback.format_exc()) <NEW_LINE> return 'Execute error' <NEW_LINE> <DEDENT> <DEDENT> return 'Unknown action type'
Action class, handles actual (system) calls. set command, parameters (template) type and log message
6259906ad6c5a102081e391c
class NodejsNpmPackAction(BaseAction): <NEW_LINE> <INDENT> NAME = "NpmPack" <NEW_LINE> DESCRIPTION = "Packaging source using NPM" <NEW_LINE> PURPOSE = Purpose.COPY_SOURCE <NEW_LINE> def __init__(self, artifacts_dir, scratch_dir, manifest_path, osutils, subprocess_npm): <NEW_LINE> <INDENT> super(NodejsNpmPackAction, self).__init__() <NEW_LINE> self.artifacts_dir = artifacts_dir <NEW_LINE> self.manifest_path = manifest_path <NEW_LINE> self.scratch_dir = scratch_dir <NEW_LINE> self.osutils = osutils <NEW_LINE> self.subprocess_npm = subprocess_npm <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> package_path = "file:{}".format(self.osutils.abspath(self.osutils.dirname(self.manifest_path))) <NEW_LINE> LOG.debug("NODEJS packaging %s to %s", package_path, self.scratch_dir) <NEW_LINE> tarfile_name = self.subprocess_npm.run(["pack", "-q", package_path], cwd=self.scratch_dir).splitlines()[-1] <NEW_LINE> LOG.debug("NODEJS packed to %s", tarfile_name) <NEW_LINE> tarfile_path = self.osutils.joinpath(self.scratch_dir, tarfile_name) <NEW_LINE> LOG.debug("NODEJS extracting to %s", self.artifacts_dir) <NEW_LINE> self.osutils.extract_tarfile(tarfile_path, self.artifacts_dir) <NEW_LINE> <DEDENT> except NpmExecutionError as ex: <NEW_LINE> <INDENT> raise ActionFailedError(str(ex))
A Lambda Builder Action that packages a Node.js package using NPM to extract the source and remove test resources
6259906a44b2445a339b7559
class Trip(ApiObject): <NEW_LINE> <INDENT> _class_keys = { 'user': User, 'offer': Offer, 'demand': Demand, } <NEW_LINE> clean_date = staticmethod(api_to_date) <NEW_LINE> clean_time = staticmethod(api_to_time) <NEW_LINE> clean_creation_date = staticmethod(api_to_datetime) <NEW_LINE> clean_modification_date = staticmethod(api_to_datetime) <NEW_LINE> @property <NEW_LINE> def trip_type(self): <NEW_LINE> <INDENT> if getattr(self, 'offer', False) and getattr(self, 'demand', False): <NEW_LINE> <INDENT> return TRIP_BOTH <NEW_LINE> <DEDENT> elif getattr(self, 'offer', False): <NEW_LINE> <INDENT> return TRIP_OFFER <NEW_LINE> <DEDENT> elif getattr(self, 'demand', False): <NEW_LINE> <INDENT> return TRIP_DEMAND <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def trip_type_name(self): <NEW_LINE> <INDENT> if self.trip_type == 0: <NEW_LINE> <INDENT> return 'Offer' <NEW_LINE> <DEDENT> elif self.trip_type == 1: <NEW_LINE> <INDENT> return 'Demand' <NEW_LINE> <DEDENT> elif self.trip_type == 2: <NEW_LINE> <INDENT> return 'Both' <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def print_dows(self): <NEW_LINE> <INDENT> if hasattr(self, 'dows'): <NEW_LINE> <INDENT> return u'-'.join([value for (key, value) in DOWS if key in self.dows]) <NEW_LINE> return "dows" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u"%s - %s" % (self.departure_city, self.arrival_city)
Represents a python Trip object, to be manipulated by python views
6259906a460517430c432c4f
class DecodeError(BaseError): <NEW_LINE> <INDENT> pass
Raised if there is an error in decoding an AMF data stream.
6259906a56ac1b37e63038dc
class RdTrkr(object): <NEW_LINE> <INDENT> def __init__(self, pngpath): <NEW_LINE> <INDENT> self.pngpath = pngpath <NEW_LINE> im2 = Image.open(self.pngpath) <NEW_LINE> _lg.info("File: "+os.path.basename(self.pngpath)+" has metadata: "+str(im2.info))
Class to add some image metadata too Parameters ---------- pngpath: metadata: Returns ------- Notes ------- Example -------- >>> import imgtrkr as it >>> it.RdTrkr('/home/nfs/z3457920/hdrive/repos/test.png')
6259906a0a50d4780f7069ba
class WordpieceTokenizer(object): <NEW_LINE> <INDENT> def __init__(self, vocab, unk_token="'--OOV--'", max_input_chars_per_word=100): <NEW_LINE> <INDENT> self.vocab = vocab <NEW_LINE> self.unk_token = unk_token <NEW_LINE> self.max_input_chars_per_word = max_input_chars_per_word <NEW_LINE> <DEDENT> def tokenize(self, text): <NEW_LINE> <INDENT> text = convert_to_unicode(text) <NEW_LINE> output_tokens = [] <NEW_LINE> for token in whitespace_tokenize(text): <NEW_LINE> <INDENT> chars = list(token) <NEW_LINE> if len(chars) > self.max_input_chars_per_word: <NEW_LINE> <INDENT> output_tokens.append(text) <NEW_LINE> continue <NEW_LINE> <DEDENT> is_bad = False <NEW_LINE> start = 0 <NEW_LINE> sub_tokens = [] <NEW_LINE> while start < len(chars): <NEW_LINE> <INDENT> end = len(chars) <NEW_LINE> cur_substr = None <NEW_LINE> while start < end: <NEW_LINE> <INDENT> substr = "".join(chars[start:end]) <NEW_LINE> if substr in self.vocab: <NEW_LINE> <INDENT> cur_substr = substr <NEW_LINE> break <NEW_LINE> <DEDENT> end -= 1 <NEW_LINE> <DEDENT> if cur_substr is None: <NEW_LINE> <INDENT> is_bad = True <NEW_LINE> break <NEW_LINE> <DEDENT> sub_tokens.append(cur_substr) <NEW_LINE> start = end <NEW_LINE> <DEDENT> if is_bad: <NEW_LINE> <INDENT> output_tokens.append(text) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output_tokens.extend(sub_tokens) <NEW_LINE> <DEDENT> <DEDENT> return output_tokens
Runs WordPiece tokenization.
6259906a76e4537e8c3f0d77
class TestFlagValid(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.flag = basic.flag() <NEW_LINE> <DEDENT> def test_true(self): <NEW_LINE> <INDENT> result = self.flag.parseString('Y') <NEW_LINE> self.assertEqual('Y', result[0]) <NEW_LINE> <DEDENT> def test_false(self): <NEW_LINE> <INDENT> result = self.flag.parseString('N') <NEW_LINE> self.assertEqual('N', result[0]) <NEW_LINE> <DEDENT> def test_unknown(self): <NEW_LINE> <INDENT> result = self.flag.parseString('U') <NEW_LINE> self.assertEqual('U', result[0])
Tests that the flag field accepts and parses valid values.
6259906aaad79263cf42ffaa
class StudyFilter(filters.FilterSet): <NEW_LINE> <INDENT> description = filters.LookupChoiceFilter( lookup_choices=[ ("contains", "Contains (case-sensitive)"), ("icontains", "Contains (case-insensitive)"), ("exact", "Exact"), ] ) <NEW_LINE> created_after_date = filters.DateFilter("date", lookup_expr="gte") <NEW_LINE> created_before_date = filters.DateFilter("date", lookup_expr="lte") <NEW_LINE> created_after_time = filters.DateFilter("time", lookup_expr="gte") <NEW_LINE> created_before_time = filters.DateFilter("time", lookup_expr="lte") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Study <NEW_LINE> fields = ( "id", "uid", "description", "created_after_date", "created_before_date", "created_after_time", "created_before_time", )
Provides filtering functionality for the :class:`~django_dicom.views.study.StudyViewSet`. Available filters are: * *id*: Primary key * *uid*: Study instance UID * *description*: Study description (contains, icontains, or exact) * *created_after_date*: Create after date * *created_before_date*: Create before date * *created_after_time*: Create after time * *created_before_time*: Create before time
6259906a38b623060ffaa44c
class ArchiveError(MBSError): <NEW_LINE> <INDENT> def __init__(self, return_code=None, last_log_line=None): <NEW_LINE> <INDENT> self._return_code = return_code <NEW_LINE> self._last_log_line = last_log_line <NEW_LINE> msg = "Failed to zip and compress your backup" <NEW_LINE> details = "Failed to tar. Tar command returned a non-zero exit status %s" % return_code <NEW_LINE> super(ArchiveError, self).__init__(msg=msg, details=details) <NEW_LINE> <DEDENT> @property <NEW_LINE> def return_code(self): <NEW_LINE> <INDENT> return self._return_code <NEW_LINE> <DEDENT> @return_code.setter <NEW_LINE> def return_code(self, val): <NEW_LINE> <INDENT> self._return_code = val <NEW_LINE> <DEDENT> @property <NEW_LINE> def last_log_line(self): <NEW_LINE> <INDENT> return self._last_log_line <NEW_LINE> <DEDENT> @last_log_line.setter <NEW_LINE> def last_log_line(self, val): <NEW_LINE> <INDENT> self._last_log_line = val <NEW_LINE> <DEDENT> def to_document(self, display_only=False): <NEW_LINE> <INDENT> doc = super(ArchiveError, self).to_document(display_only=display_only) <NEW_LINE> doc["returnCode"] = self.return_code <NEW_LINE> doc["lastLogLine"] = self.last_log_line <NEW_LINE> return doc
Base error for archive errors
6259906a442bda511e95d952
class NZBGetOptionsFlowHandler(OptionsFlow): <NEW_LINE> <INDENT> def __init__(self, config_entry): <NEW_LINE> <INDENT> self.config_entry = config_entry <NEW_LINE> <DEDENT> async def async_step_init(self, user_input: ConfigType | None = None): <NEW_LINE> <INDENT> if user_input is not None: <NEW_LINE> <INDENT> return self.async_create_entry(title="", data=user_input) <NEW_LINE> <DEDENT> options = { vol.Optional( CONF_SCAN_INTERVAL, default=self.config_entry.options.get( CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL ), ): int, } <NEW_LINE> return self.async_show_form(step_id="init", data_schema=vol.Schema(options))
Handle NZBGet client options.
6259906afff4ab517ebcf00f
@node(params=['key', 'caseid']) <NEW_LINE> class InputPathFromKey(Node): <NEW_LINE> <INDENT> def output(self): <NEW_LINE> <INDENT> return _lookupInputKey(self.key, self.caseid) <NEW_LINE> <DEDENT> def show(self): <NEW_LINE> <INDENT> return self.output()
An input path found by looking up its key in INPUT_KEYS in pnlpipe_config.py and substituting its caseid.
6259906a91f36d47f2231a89
class DownloadThread(threading.Thread): <NEW_LINE> <INDENT> _counter = 0 <NEW_LINE> when_all_finished = threading.Event() <NEW_LINE> def __init__(self, url, update_callback): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> DownloadThread._counter += 1 <NEW_LINE> self.url = url <NEW_LINE> self.name = md5(url.encode()).hexdigest() <NEW_LINE> self.filename = unquote(self.url).split('/')[-1] <NEW_LINE> self._percentage = 0 <NEW_LINE> self._update = update_callback <NEW_LINE> <DEDENT> def _download(self, response, count): <NEW_LINE> <INDENT> start_time = datetime.now() <NEW_LINE> while response.length: <NEW_LINE> <INDENT> yield response.read(count) <NEW_LINE> <DEDENT> self.timedelta = datetime.now() - start_time <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> u = req.urlopen(self.url) <NEW_LINE> count = 4096 <NEW_LINE> self.file_size = u.length <NEW_LINE> downloaded = 0 <NEW_LINE> with open(path.join('downloads', self.filename), 'wb') as output: <NEW_LINE> <INDENT> for frame in self._download(u, count): <NEW_LINE> <INDENT> downloaded += count <NEW_LINE> output.write(frame) <NEW_LINE> self._update(downloaded / self.file_size) <NEW_LINE> <DEDENT> self._update(1) <NEW_LINE> <DEDENT> DownloadThread._counter -= 1 <NEW_LINE> if DownloadThread._counter == 0: <NEW_LINE> <INDENT> self.when_all_finished.set()
Класс простого потока индикации прогресса. _percentage - текущий процент выполнения операции _update - функция, которая вызывается при загрузке новой части файла _finish - функция, которая вызывает после загрузки файла
6259906a009cb60464d02d2e
class StandardCore(Core): <NEW_LINE> <INDENT> def __init__(self, sxc_root): <NEW_LINE> <INDENT> self.root = sxc_root <NEW_LINE> self.__output = StandardOutput() <NEW_LINE> self.__aggregators = None <NEW_LINE> self.__actuators = None <NEW_LINE> self.__utils = StandardUtils(self.__output) <NEW_LINE> self.__source_dir = os.getcwd() <NEW_LINE> <DEDENT> def get_output(self): <NEW_LINE> <INDENT> return self.__output <NEW_LINE> <DEDENT> def __build_plugin_list(self, plugin_type, plugin_factory): <NEW_LINE> <INDENT> plugins = [] <NEW_LINE> type_dir = os.path.join(self.root, plugin_type) <NEW_LINE> for dir in os.listdir(type_dir): <NEW_LINE> <INDENT> full_name = os.path.join(type_dir, dir) <NEW_LINE> plugins.append(plugin_factory(full_name)) <NEW_LINE> <DEDENT> return plugins <NEW_LINE> <DEDENT> def get_aggregators(self): <NEW_LINE> <INDENT> if self.__aggregators is None: <NEW_LINE> <INDENT> self.__aggregators = self.__build_plugin_list( 'aggregators', agg.AggregatorExtension) <NEW_LINE> <DEDENT> return self.__aggregators <NEW_LINE> <DEDENT> def get_actuators(self): <NEW_LINE> <INDENT> if self.__actuators is None: <NEW_LINE> <INDENT> self.__actuators = self.__build_plugin_list( 'actuators', acc.ActuatorExtension) <NEW_LINE> <DEDENT> return self.__actuators <NEW_LINE> <DEDENT> def get_actuator(self, name): <NEW_LINE> <INDENT> all = self.get_actuators() <NEW_LINE> for actuator in all: <NEW_LINE> <INDENT> if actuator.name == name: <NEW_LINE> <INDENT> return actuator <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def get_ordered_aggregators(self): <NEW_LINE> <INDENT> return self.get_aggregators() <NEW_LINE> <DEDENT> def get_utils(self): <NEW_LINE> <INDENT> return self.__utils <NEW_LINE> <DEDENT> def get_source_directory(self): <NEW_LINE> <INDENT> return self.__source_dir
Standard implementation of Core.
6259906a4527f215b58eb59a
class AbstractHistory(models.Model): <NEW_LINE> <INDENT> ACTIONS = ( ("Create", "Create"), ("Delete", "Delete"), ("Modify", "Modify"), ("Revise", "Revise"), ("Promote", "Promote"), ("Demote", "Demote"), ("Cancel", "Cancel"), ("Publish", "Publish"), ("Unpublish", "Unpublish"), ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> action = models.CharField(max_length=50, choices=ACTIONS) <NEW_LINE> details = models.TextField() <NEW_LINE> date = models.DateTimeField(auto_now=True) <NEW_LINE> user = models.ForeignKey(User, related_name="%(class)s_user") <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return "History<%s, %s, %s>" % (self.plmobject, self.date, self.action) <NEW_LINE> <DEDENT> def get_day_as_int(self): <NEW_LINE> <INDENT> return self.date.year * 10000 + self.date.month * 100 + self.date.day <NEW_LINE> <DEDENT> def get_day(self): <NEW_LINE> <INDENT> return datetime.date(self.date.year, self.date.month, self.date.day) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def timeline_items(cls, user): <NEW_LINE> <INDENT> return _prefetch_related(cls.objects.all().order_by("-date")) <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> return self.plmobject.title
History model. This model records all events related to :class:`.PLMObject` :model attributes: .. attribute:: plmobject :class:`.PLMObject` of the event .. attribute:: action type of action (see :attr:`.ACTIONS`) .. attribute:: details type of action (see :attr:`.ACTIONS`) .. attribute:: date date of the event .. attribute:: user :class:`~django.contrib.auth.models.User` who maded the event :class attribute:
6259906a55399d3f05627d16
class HotelTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.first_hotel = hotel.Hotel( property_name='Party', property_type='HOTEL', local='MIAMI', star_rating=3, week_price=120, weekend_price=100, loyalty_week_price=90, loyalty_weekend_price=90 ) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(self.first_hotel) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def test_return_hotel_name(self): <NEW_LINE> <INDENT> expected_value = 'Party' <NEW_LINE> self.assertEqual(self.first_hotel.property_name, expected_value) <NEW_LINE> <DEDENT> def test_return_hotel_type(self): <NEW_LINE> <INDENT> expected_value = 'HOTEL' <NEW_LINE> self.assertEqual(self.first_hotel.property_type, expected_value) <NEW_LINE> <DEDENT> def test_return_hotel_local(self): <NEW_LINE> <INDENT> expected_value = 'MIAMI' <NEW_LINE> self.assertEqual(self.first_hotel.property_local, expected_value) <NEW_LINE> <DEDENT> def test_return_hotel_star_rating(self): <NEW_LINE> <INDENT> expected_value = 3 <NEW_LINE> self.assertEqual(self.first_hotel.property_star_rating, expected_value) <NEW_LINE> <DEDENT> def test_return_hotel_week_price(self): <NEW_LINE> <INDENT> expected_value = 120 <NEW_LINE> self.assertEqual(self.first_hotel.week_price, expected_value) <NEW_LINE> <DEDENT> def test_return_hotel_weekend_price(self): <NEW_LINE> <INDENT> expected_value = 100 <NEW_LINE> self.assertEqual(self.first_hotel.weekend_price, expected_value) <NEW_LINE> <DEDENT> def test_return_hotel_loyalty_week_price(self): <NEW_LINE> <INDENT> expected_value = 90 <NEW_LINE> self.assertEqual(self.first_hotel.loyalty_week_price, expected_value) <NEW_LINE> <DEDENT> def test_return_hotel_loyalty_weekend_price(self): <NEW_LINE> <INDENT> expected_value = 90 <NEW_LINE> self.assertEqual(self.first_hotel.loyalty_weekend_price, expected_value)
Tests for the ``Hotel`` class.
6259906a091ae35668706427
class TestUpgrade(test_util.TensorFlowTestCase): <NEW_LINE> <INDENT> def _upgrade(self, old_file_text): <NEW_LINE> <INDENT> in_file = six.StringIO(old_file_text) <NEW_LINE> out_file = six.StringIO() <NEW_LINE> upgrader = tf_upgrade.TensorFlowCodeUpgrader() <NEW_LINE> count, report, errors = ( upgrader.process_opened_file("test.py", in_file, "test_out.py", out_file)) <NEW_LINE> return count, report, errors, out_file.getvalue() <NEW_LINE> <DEDENT> def testParseError(self): <NEW_LINE> <INDENT> _, report, unused_errors, unused_new_text = self._upgrade( "import tensorflow as tf\na + \n") <NEW_LINE> self.assertTrue(report.find("Failed to parse") != -1) <NEW_LINE> <DEDENT> def testReport(self): <NEW_LINE> <INDENT> text = "tf.mul(a, b)\n" <NEW_LINE> _, report, unused_errors, unused_new_text = self._upgrade(text) <NEW_LINE> self.assertTrue(report.find("Renamed function `tf.mul` to `tf.multiply`")) <NEW_LINE> <DEDENT> def testRename(self): <NEW_LINE> <INDENT> text = "tf.mul(a, tf.sub(b, c))\n" <NEW_LINE> _, unused_report, unused_errors, new_text = self._upgrade(text) <NEW_LINE> self.assertEqual(new_text, "tf.multiply(a, tf.subtract(b, c))\n") <NEW_LINE> <DEDENT> def testReorder(self): <NEW_LINE> <INDENT> text = "tf.concat(a, b)\ntf.split(a, b, c)\n" <NEW_LINE> _, unused_report, unused_errors, new_text = self._upgrade(text) <NEW_LINE> self.assertEqual(new_text, "tf.concat(concat_dim=a, values=b)\n" "tf.split(axis=a, num_or_size_splits=b, value=c)\n") <NEW_LINE> <DEDENT> def testKeyword(self): <NEW_LINE> <INDENT> text = "tf.reduce_any(a, reduction_indices=[1, 2])\n" <NEW_LINE> _, unused_report, unused_errors, new_text = self._upgrade(text) <NEW_LINE> self.assertEqual(new_text, "tf.reduce_any(a, axis=[1, 2])\n") <NEW_LINE> <DEDENT> def testComplexExpression(self): <NEW_LINE> <INDENT> text = "(foo + bar)[a].word()" <NEW_LINE> _ = self._upgrade(text) <NEW_LINE> <DEDENT> def testReverse(self): <NEW_LINE> <INDENT> text = "tf.reverse(a, b)\n" <NEW_LINE> _, unused_report, errors, new_text = self._upgrade(text) <NEW_LINE> self.assertEqual(new_text, new_text) <NEW_LINE> self.assertEqual(errors, ["test.py:1: tf.reverse requires manual check."])
Test various APIs that have been changed in 1.0. We also test whether a converted file is executable. test_file_v0_11.py aims to exhaustively test that API changes are convertible and actually work when run with current TensorFlow.
6259906a16aa5153ce401cce
class CompleteUploadRequest(object): <NEW_LINE> <INDENT> deserialized_types = { 'part_e_tags': 'list[ask_smapi_model.v0.catalog.upload.pre_signed_url_item.PreSignedUrlItem]' } <NEW_LINE> attribute_map = { 'part_e_tags': 'partETags' } <NEW_LINE> supports_multiple_types = False <NEW_LINE> def __init__(self, part_e_tags=None): <NEW_LINE> <INDENT> self.__discriminator_value = None <NEW_LINE> self.part_e_tags = part_e_tags <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.deserialized_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x.value if isinstance(x, Enum) else x, value )) <NEW_LINE> <DEDENT> elif isinstance(value, Enum): <NEW_LINE> <INDENT> result[attr] = value.value <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else (item[0], item[1].value) if isinstance(item[1], Enum) else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, CompleteUploadRequest): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
:param part_e_tags: List of (eTag, part number) pairs for each part of the file uploaded. :type part_e_tags: (optional) list[ask_smapi_model.v0.catalog.upload.pre_signed_url_item.PreSignedUrlItem]
6259906a7d847024c075dbcf
class JobDetail(mixins.RetrieveModelMixin, generics.GenericAPIView): <NEW_LINE> <INDENT> queryset = Job.objects.all() <NEW_LINE> lookup_field = 'name' <NEW_LINE> def get_serializer_class(self): <NEW_LINE> <INDENT> if self.request.method == 'GET': <NEW_LINE> <INDENT> return JobDetailSerializer <NEW_LINE> <DEDENT> <DEDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return self.retrieve(request, *args, **kwargs)
API endpoint to return the steps and configuration for the jobs
6259906ad486a94d0ba2d7b3
class PkgManager(AbstractPkgManager): <NEW_LINE> <INDENT> CMDPREFIX_DETECT = 'pkg search -l' <NEW_LINE> CMDPREFIX_UPDATE = 'pkg refresh' <NEW_LINE> CMDPREFIX_INSTALL = 'pkg install' <NEW_LINE> CMDPREFIX_REMOVE = 'pkg uninstall' <NEW_LINE> CMDPREFIX_ADDREPO = 'pkg set-publisher -O' <NEW_LINE> SOURCELISTS_DIR = '' <NEW_LINE> SOURCELISTS_CFG = '/var/pkg/cfg_cache' <NEW_LINE> def __init__(self): self.update_sources = self.update_sources_by_cmd
Image Packaging System Manager(OpenSolaris)
6259906a01c39578d7f1432f
class InstallGOScanners: <NEW_LINE> <INDENT> def install_scanner(self, scannerName): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> scanner_installation_message = subprocess.check_output([GO_CMD, "get", scanner_install_instructions[scannerName]]) <NEW_LINE> print(scanner_installation_message.decode("utf-8")) <NEW_LINE> print("\nINFO: {0} installed successfully!".format(scannerName)) <NEW_LINE> return 1 <NEW_LINE> <DEDENT> except subprocess.CalledProcessError as err: <NEW_LINE> <INDENT> print("\n\nERROR: {0} installation failed with error {1}".format(scannerName, str(err))) <NEW_LINE> return 0
Install the scanners as needed
6259906add821e528d6da57b
class RevokeResponse: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swaggerTypes = { 'result': 'RevokeResult', 'status': 'str', 'error_message': 'str', 'composedOn': 'long' } <NEW_LINE> self.result = None <NEW_LINE> self.status = None <NEW_LINE> self.error_message = None <NEW_LINE> self.composedOn = None
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
6259906a7047854f46340baa
class NutritionOrderSupplement(backboneelement.BackboneElement): <NEW_LINE> <INDENT> resource_name = "NutritionOrderSupplement" <NEW_LINE> def __init__(self, jsondict=None): <NEW_LINE> <INDENT> self.instruction = None <NEW_LINE> self.productName = None <NEW_LINE> self.quantity = None <NEW_LINE> self.schedule = None <NEW_LINE> self.type = None <NEW_LINE> super(NutritionOrderSupplement, self).__init__(jsondict) <NEW_LINE> <DEDENT> def elementProperties(self): <NEW_LINE> <INDENT> js = super(NutritionOrderSupplement, self).elementProperties() <NEW_LINE> js.extend([ ("instruction", "instruction", str, False, None, False), ("productName", "productName", str, False, None, False), ("quantity", "quantity", quantity.Quantity, False, None, False), ("schedule", "schedule", timing.Timing, True, None, False), ("type", "type", codeableconcept.CodeableConcept, False, None, False), ]) <NEW_LINE> return js
Supplement components. Oral nutritional products given in order to add further nutritional value to the patient's diet.
6259906a3cc13d1c6d466f3a
class XMLPayload(StanzaPayload): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> if isinstance(data, StanzaPayload): <NEW_LINE> <INDENT> data = data.as_xml() <NEW_LINE> <DEDENT> if not isinstance(data, ElementClass): <NEW_LINE> <INDENT> raise TypeError("ElementTree.Element required") <NEW_LINE> <DEDENT> self.xml_element_name = data.tag <NEW_LINE> self.element = data <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_xml(cls, element): <NEW_LINE> <INDENT> return cls(element) <NEW_LINE> <DEDENT> def as_xml(self): <NEW_LINE> <INDENT> return self.element <NEW_LINE> <DEDENT> @property <NEW_LINE> def handler_key(self): <NEW_LINE> <INDENT> return self.xml_element_name
Transparent XML payload for stanza. This object can be used for any stanza payload. It doesn't decode the XML element, but instead keeps it in the ElementTree format. :Ivariables: - `xml_element_name`: qualified name of the wrapped element - `element`: the wrapped element :Types: - `xml_element_name`: `unicode` - `element`: :etree:`ElementTree.Element`
6259906ae5267d203ee6cfb8
class WrongAgencyName(Exception): <NEW_LINE> <INDENT> pass
Custom exception
6259906abaa26c4b54d50a9c
class Plugin(PluginInterface): <NEW_LINE> <INDENT> def get_name(self): <NEW_LINE> <INDENT> return "coverage" <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> return { 'config_filename': False // Cmdline(arg='--cov-config') // Doc('Coverage configuration file'), 'report_type': 'html' // Cmdline(arg='--cov-report') // Doc('Coverage report format'), 'report': True, 'append': False // Cmdline(on='--cov-append') // Doc('Append coverage data to existing file'), 'sources': [] // Cmdline(append='--cov') // Doc('Modules or packages for which to track coverage'), } <NEW_LINE> <DEDENT> def activate(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import coverage <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> raise RuntimeError('The coverage plugin requires the coverage package to be installed. Please run `pip install coverage` to install it') <NEW_LINE> <DEDENT> sources = slash_config.root.plugin_config.coverage.sources or None <NEW_LINE> self._cov = coverage.Coverage( data_file=_DATA_FILENAME, config_file=slash_config.root.plugin_config.coverage.config_filename, source=sources, ) <NEW_LINE> if slash_config.root.plugin_config.coverage.append: <NEW_LINE> <INDENT> self._cov.load() <NEW_LINE> <DEDENT> self._reporters = [] <NEW_LINE> for report_type_name in slash_config.root.plugin_config.coverage.report_type.split(','): <NEW_LINE> <INDENT> if report_type_name == 'html': <NEW_LINE> <INDENT> self._reporters.append(self._cov.html_report) <NEW_LINE> <DEDENT> elif report_type_name == 'xml': <NEW_LINE> <INDENT> self._reporters.append(self._cov.xml_report) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError('Unknown report type: {!r}'.format(report_type_name)) <NEW_LINE> <DEDENT> <DEDENT> self._cov.start() <NEW_LINE> <DEDENT> def session_end(self): <NEW_LINE> <INDENT> self._cov.stop() <NEW_LINE> self._cov.save() <NEW_LINE> if slash_config.root.plugin_config.coverage.report: <NEW_LINE> <INDENT> for reporter in self._reporters: <NEW_LINE> <INDENT> reporter()
Enables saving coverage information for test runs For more information see https://slash.readthedocs.org/en/master/builtin_plugins.html#coverage
6259906ad486a94d0ba2d7b4
class ZohoOAuth2(httpx.Auth): <NEW_LINE> <INDENT> SCHEME = "https" <NEW_LINE> BASE = "accounts.zoho.com" <NEW_LINE> def __init__(self, client_id, client_secret, scope, redirect): <NEW_LINE> <INDENT> self.client_id = client_id <NEW_LINE> self.client_secret = client_secret <NEW_LINE> self.scope = scope <NEW_LINE> self.redirect = redirect <NEW_LINE> self.grant_token = None <NEW_LINE> self.access_token = None <NEW_LINE> self.grant_request() <NEW_LINE> self.access_request() <NEW_LINE> <DEDENT> def auth_flow(self, request): <NEW_LINE> <INDENT> request.headers["Authorization"] = f"Zoho-oauthtoken {self.access_token}" <NEW_LINE> response = yield request <NEW_LINE> if response.status_code == 401: <NEW_LINE> <INDENT> self.grant_request() <NEW_LINE> self.access_request() <NEW_LINE> request.headers["Authorization"] = f"Zoho-oauthtoken {self.access_token}" <NEW_LINE> yield request <NEW_LINE> <DEDENT> <DEDENT> def grant_request(self): <NEW_LINE> <INDENT> path = "/oauth/v2/auth" <NEW_LINE> params = { "response_type": "code", "client_id": self.client_id, "scope": self.scope, "redirect_uri": self.redirect, } <NEW_LINE> query = urllib.parse.urlencode(params, True) <NEW_LINE> grant_access_url = urllib.parse.urlunsplit( (self.SCHEME, self.BASE, path, query, "") ) <NEW_LINE> webbrowser.open_new(grant_access_url) <NEW_LINE> httpServer = http.server.HTTPServer(("localhost", 8080), HTTPAuthHandler) <NEW_LINE> httpServer.handle_request() <NEW_LINE> if hasattr(httpServer, "grant_token"): <NEW_LINE> <INDENT> self.grant_token = httpServer.grant_token[0] <NEW_LINE> <DEDENT> <DEDENT> def access_request(self): <NEW_LINE> <INDENT> if self.grant_token is not None: <NEW_LINE> <INDENT> path = "/oauth/v2/token" <NEW_LINE> params = { "code": self.grant_token, "client_id": self.client_id, "client_secret": self.client_secret, "redirect_uri": self.redirect, "grant_type": "authorization_code", } <NEW_LINE> response = httpx.post(f"{self.SCHEME}://{self.BASE}{path}", params=params) <NEW_LINE> response_attr = response.json() <NEW_LINE> self.access_token = response_attr.get("access_token", None)
HTTPX custom authorization class to preform the OAuth 2.0 implementation on the Zoho API. See all OAuth API docs at: https://www.zoho.com/accounts/protocol/oauth.html
6259906a627d3e7fe0e0867e
class StateError(GXIntEnum): <NEW_LINE> <INDENT> SERVICE_NOT_ALLOWED = 1 <NEW_LINE> SERVICE_UNKNOWN = 2
DLMS state errors.
6259906a76e4537e8c3f0d79