code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
@implementer(interfaces.IMulticastTransport) <NEW_LINE> class MulticastPort(MulticastMixin, Port): <NEW_LINE> <INDENT> def __init__(self, port, proto, interface='', maxPacketSize=8192, reactor=None, listenMultiple=False): <NEW_LINE> <INDENT> Port.__init__(self, port, proto, interface, maxPacketSize, reactor) <NEW_LINE> self.listenMultiple = listenMultiple <NEW_LINE> <DEDENT> def createSocket(self): <NEW_LINE> <INDENT> skt = Port.createSocket(self) <NEW_LINE> if self.listenMultiple: <NEW_LINE> <INDENT> skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) <NEW_LINE> if hasattr(socket, "SO_REUSEPORT"): <NEW_LINE> <INDENT> skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) <NEW_LINE> <DEDENT> <DEDENT> return skt | UDP Port that supports multicasting. | 62599043507cdc57c63a6089 |
class EngineClient(base.Engine): <NEW_LINE> <INDENT> def __init__(self, transport): <NEW_LINE> <INDENT> serializer = auth_ctx.RpcContextSerializer( auth_ctx.JsonPayloadSerializer()) <NEW_LINE> self._client = messaging.RPCClient( transport, messaging.Target(topic=cfg.CONF.engine.topic), serializer=serializer ) <NEW_LINE> <DEDENT> def start_workflow(self, workflow_name, workflow_input, **params): <NEW_LINE> <INDENT> return self._client.call( auth_ctx.ctx(), 'start_workflow', workflow_name=workflow_name, workflow_input=workflow_input or {}, params=params ) <NEW_LINE> <DEDENT> def on_task_result(self, task_id, result): <NEW_LINE> <INDENT> return self._client.call( auth_ctx.ctx(), 'on_task_result', task_id=task_id, result_data=result.data, result_error=result.error ) <NEW_LINE> <DEDENT> def run_task(self, task_id): <NEW_LINE> <INDENT> return self._client.call(auth_ctx.ctx(), 'run_task', task_id=task_id) <NEW_LINE> <DEDENT> def pause_workflow(self, execution_id): <NEW_LINE> <INDENT> return self._client.call( auth_ctx.ctx(), 'pause_workflow', execution_id=execution_id ) <NEW_LINE> <DEDENT> def resume_workflow(self, execution_id): <NEW_LINE> <INDENT> return self._client.call( auth_ctx.ctx(), 'resume_workflow', execution_id=execution_id ) <NEW_LINE> <DEDENT> def stop_workflow(self, execution_id, state, message=None): <NEW_LINE> <INDENT> return self._client.call( auth_ctx.ctx(), 'stop_workflow', execution_id=execution_id, state=state, message=message ) <NEW_LINE> <DEDENT> def rollback_workflow(self, execution_id): <NEW_LINE> <INDENT> return self._client.call( auth_ctx.ctx(), 'rollback_workflow', execution_id=execution_id ) | RPC Engine client. | 625990436e29344779b0193e |
class itkTernaryAddImageFilterIF2IF2IF2IF2(itkTernaryAddImageFilterIF2IF2IF2IF2_Superclass): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> __swig_destroy__ = _itkTernaryAddImageFilterPython.delete_itkTernaryAddImageFilterIF2IF2IF2IF2 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkTernaryAddImageFilterPython.itkTernaryAddImageFilterIF2IF2IF2IF2_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkTernaryAddImageFilterIF2IF2IF2IF2.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New) | Proxy of C++ itkTernaryAddImageFilterIF2IF2IF2IF2 class | 625990431d351010ab8f4e0c |
class NormalDistribution(TweedieDistribution): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__(power=0) | Class for the Normal (aka Gaussian) distribution | 625990438e71fb1e983bcdbc |
class SBArduinoSerialCommentResponse(SBArduinoSerial): <NEW_LINE> <INDENT> def respond_to_write(self, data: bytes) -> None: <NEW_LINE> <INDENT> self.append_received_data(b"# Comment", newline=True) <NEW_LINE> self.append_received_data(b"+ OK", newline=True) | Like SBArduinoSerial, but returns a failure response rather than success. | 6259904307d97122c4217f8d |
class ConnectionMonitorListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[ConnectionMonitorResult]'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["ConnectionMonitorResult"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ConnectionMonitorListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value | List of connection monitors.
:param value: Information about connection monitors.
:type value: list[~azure.mgmt.network.v2020_07_01.models.ConnectionMonitorResult] | 6259904350485f2cf55dc273 |
class Resize2DImage(DetectionAugmentation): <NEW_LINE> <INDENT> def __init__(self, pResize): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.p = pResize <NEW_LINE> <DEDENT> def apply(self, input_record): <NEW_LINE> <INDENT> p = self.p <NEW_LINE> image = input_record["image"] <NEW_LINE> short = min(image.shape[:2]) <NEW_LINE> long = max(image.shape[:2]) <NEW_LINE> scale = min(p.short / short, p.long / long) <NEW_LINE> input_record["image"] = cv2.resize(image, None, None, scale, scale, interpolation=cv2.INTER_LINEAR) <NEW_LINE> h, w = image.shape[:2] <NEW_LINE> input_record["im_info"] = np.array([round(h * scale), round(w * scale), scale], dtype=np.float32) | input: image, ndarray(h, w, rgb)
gt_bbox, ndarry(n, 5)
output: image, ndarray(h', w', rgb)
im_info, tuple(h', w', scale)
gt_bbox, ndarray(n, 5) | 6259904324f1403a92686243 |
class Sink(PipelineElement): <NEW_LINE> <INDENT> def __init__(self, handler, args, kwargs): <NEW_LINE> <INDENT> super().__init__(handler, args, kwargs) <NEW_LINE> <DEDENT> def process(self, frame): <NEW_LINE> <INDENT> self.call_handler(frame) | A sink draws from a stream and stores/displays the frames. | 625990430a366e3fb87ddcd3 |
class HelloViewSet(viewsets.ViewSet): <NEW_LINE> <INDENT> serializer_class = serializers.HelloSerializer <NEW_LINE> def list(self, request): <NEW_LINE> <INDENT> a_viewset = [ 'Uses action (list, create, retrieve, update, partial_update)', 'Automatically maps to URLs using Routers', 'Provides more functionality with less code.' ] <NEW_LINE> return Response({'message': 'Hello!', 'a_viewset': a_viewset}) <NEW_LINE> <DEDENT> def create(self, request): <NEW_LINE> <INDENT> serializer = serializers.HelloSerializer(data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> name = serializer.data.get('name') <NEW_LINE> message = 'Hello {0}'.format(name) <NEW_LINE> return Response({'message': message}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> <DEDENT> def retrieve(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'http_method': 'GET'}) <NEW_LINE> <DEDENT> def update(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'http_method': 'PUT'}) <NEW_LINE> <DEDENT> def partial_update(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'http_method': 'PATCH'}) <NEW_LINE> <DEDENT> def destroy(self, request, pk=None): <NEW_LINE> <INDENT> return Response({'http_method': 'DELETE'}) | Test API ViewSet | 62599043e76e3b2f99fd9cf9 |
class WebAPIResponseFormError(WebAPIResponseError): <NEW_LINE> <INDENT> def __init__(self, request, form, *args, **kwargs): <NEW_LINE> <INDENT> fields = {} <NEW_LINE> for field in form.errors: <NEW_LINE> <INDENT> fields[field] = [force_text(e) for e in form.errors[field]] <NEW_LINE> <DEDENT> super(WebAPIResponseFormError, self).__init__( request, INVALID_FORM_DATA, { 'fields': fields }, *args, **kwargs) | An error response designed to return all errors from a form. | 62599043097d151d1a2c2357 |
class IMAPUtil(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> path = os.path.expanduser('~/.imap-utils') <NEW_LINE> self._config = ConfigParser.ConfigParser() <NEW_LINE> self._config.optionxform = str <NEW_LINE> self._config.read(path) <NEW_LINE> self._imap = None <NEW_LINE> try: <NEW_LINE> <INDENT> self._simulate = self._config.getboolean('IMAP', 'simulate') <NEW_LINE> <DEDENT> except ConfigParser.NoOptionError: <NEW_LINE> <INDENT> self._simulate = False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._verbose = self._config.getboolean('IMAP', 'verbose') <NEW_LINE> <DEDENT> except ConfigParser.NoOptionError: <NEW_LINE> <INDENT> self._verbose = False <NEW_LINE> <DEDENT> <DEDENT> def login(self): <NEW_LINE> <INDENT> host = self._config.get('IMAP', 'host') <NEW_LINE> login = self._config.get('IMAP', 'login') <NEW_LINE> password = self._config.get('IMAP', 'password') <NEW_LINE> try: <NEW_LINE> <INDENT> ssl = self._config.getboolean('IMAP', 'ssl') <NEW_LINE> <DEDENT> except ConfigParser.NoOptionError: <NEW_LINE> <INDENT> ssl = False <NEW_LINE> <DEDENT> if ssl: <NEW_LINE> <INDENT> self._imap = imaplib.IMAP4_SSL(host) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._imap = imaplib.IMAP4(host) <NEW_LINE> <DEDENT> res = self._imap.login(login, password) <NEW_LINE> if res[0] != 'OK': <NEW_LINE> <INDENT> sys.stderr.write("login: %s\n" % str(res)) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> def get_ints(self, section, name): <NEW_LINE> <INDENT> raw = self._config.get(section, name) <NEW_LINE> if raw.find(',') == -1: <NEW_LINE> <INDENT> return (int(raw), int(raw)) <NEW_LINE> <DEDENT> raw_read, raw_unread = raw.split(',') <NEW_LINE> return (int(raw_read), int(raw_unread)) <NEW_LINE> <DEDENT> def get_timestamps(self, section, name): <NEW_LINE> <INDENT> read, unread = self.get_ints(section, name) <NEW_LINE> return ( time.localtime(time.time() - (read * 86400)), time.localtime(time.time() - (unread * 86400)) ) | Wrapper class for IMAP utilities. It takes care about configuration
and connecting to IMAP. | 6259904307d97122c4217f8e |
class FullyPackedLoops(Parent, UniqueRepresentation): <NEW_LINE> <INDENT> def __init__(self, n): <NEW_LINE> <INDENT> self._n = n <NEW_LINE> Parent.__init__(self, category=FiniteEnumeratedSets()) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for X in SixVertexModel(self._n, boundary_conditions='ice'): <NEW_LINE> <INDENT> yield self.element_class(self, X) <NEW_LINE> <DEDENT> <DEDENT> def _repr_(self): <NEW_LINE> <INDENT> return "Fully packed loops on a %sx%s grid" % (self._n,self._n) <NEW_LINE> <DEDENT> def __contains__(self, fpl): <NEW_LINE> <INDENT> if isinstance(fpl, FullyPackedLoop): <NEW_LINE> <INDENT> return fpl._n == self._n <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def _element_constructor_(self, generator): <NEW_LINE> <INDENT> if isinstance(generator, AlternatingSignMatrix): <NEW_LINE> <INDENT> SVM = generator.to_six_vertex_model() <NEW_LINE> <DEDENT> elif isinstance(generator, SquareIceModel.Element) or isinstance(generator, SixVertexConfiguration): <NEW_LINE> <INDENT> SVM = generator <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> SVM = AlternatingSignMatrix(generator).to_six_vertex_model() <NEW_LINE> <DEDENT> except (TypeError, ValueError): <NEW_LINE> <INDENT> SVM = SixVertexModel(self._n, boundary_conditions='ice')(generator) <NEW_LINE> SVM.to_alternating_sign_matrix() <NEW_LINE> <DEDENT> <DEDENT> if len(SVM) != self._n: <NEW_LINE> <INDENT> raise ValueError("invalid size") <NEW_LINE> <DEDENT> return self.element_class(self, SVM) <NEW_LINE> <DEDENT> Element = FullyPackedLoop <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return self._n <NEW_LINE> <DEDENT> def cardinality(self): <NEW_LINE> <INDENT> return Integer(prod( [ factorial(3*k+1)/factorial(self._n+k) for k in range(self._n)] )) <NEW_LINE> <DEDENT> def _an_element_(self): <NEW_LINE> <INDENT> SVM = SixVertexModel(self._n,boundary_conditions='ice').an_element() <NEW_LINE> return self.element_class(self, SVM) | Class of all fully packed loops on an `n \times n` grid.
They are known to be in bijection with alternating sign matrices.
.. SEEALSO::
:class:`AlternatingSignMatrices`
INPUT:
- ``n`` -- the number of row (and column) or grid
EXAMPLES:
This will create an instance to manipulate the fully packed loops of size 3::
sage: FPLs = FullyPackedLoops(3)
sage: FPLs
Fully packed loops on a 3x3 grid
sage: FPLs.cardinality()
7
When using the square ice model, it is known that the number of
configurations is equal to the number of alternating sign matrices::
sage: M = FullyPackedLoops(1)
sage: len(M)
1
sage: M = FullyPackedLoops(4)
sage: len(M)
42
sage: all(len(SixVertexModel(n, boundary_conditions='ice'))
....: == FullyPackedLoops(n).cardinality() for n in range(1, 7))
True | 625990438e05c05ec3f6f7d2 |
class ExternalLoader(TestLoader): <NEW_LINE> <INDENT> name = 'external' <NEW_LINE> def __init__(self, args, extra_params): <NEW_LINE> <INDENT> loader_options = extra_params.pop('loader_options', None) <NEW_LINE> super(ExternalLoader, self).__init__(args, extra_params) <NEW_LINE> if loader_options == '?': <NEW_LINE> <INDENT> raise LoaderError("File loader accepts an option to set the " "external-runner executable.") <NEW_LINE> <DEDENT> self._external_runner = self._process_external_runner( args, loader_options) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _process_external_runner(args, runner): <NEW_LINE> <INDENT> chdir = getattr(args, 'external_runner_chdir', None) <NEW_LINE> test_dir = getattr(args, 'external_runner_testdir', None) <NEW_LINE> if runner: <NEW_LINE> <INDENT> external_runner_and_args = shlex.split(runner) <NEW_LINE> executable = os.path.abspath(external_runner_and_args[0]) <NEW_LINE> runner = " ".join([executable] + external_runner_and_args[1:]) <NEW_LINE> if not os.path.exists(executable): <NEW_LINE> <INDENT> msg = ('Could not find the external runner executable "%s"' % executable) <NEW_LINE> raise LoaderError(msg) <NEW_LINE> <DEDENT> if chdir == 'test': <NEW_LINE> <INDENT> if not test_dir: <NEW_LINE> <INDENT> msg = ('Option "--external-runner-chdir=test" requires ' '"--external-runner-testdir" to be set.') <NEW_LINE> raise LoaderError(msg) <NEW_LINE> <DEDENT> <DEDENT> elif test_dir: <NEW_LINE> <INDENT> msg = ('Option "--external-runner-testdir" requires ' '"--external-runner-chdir=test".') <NEW_LINE> raise LoaderError(msg) <NEW_LINE> <DEDENT> return test.ExternalRunnerSpec(runner, chdir, test_dir) <NEW_LINE> <DEDENT> elif chdir: <NEW_LINE> <INDENT> msg = ('Option "--external-runner-chdir" requires ' '"--external-runner" to be set.') <NEW_LINE> raise LoaderError(msg) <NEW_LINE> <DEDENT> elif test_dir: <NEW_LINE> <INDENT> msg = ('Option "--external-runner-testdir" requires ' '"--external-runner" to be set.') <NEW_LINE> raise LoaderError(msg) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def discover(self, reference, which_tests=DiscoverMode.DEFAULT): <NEW_LINE> <INDENT> if (not self._external_runner) or (reference is None): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return [(test.ExternalRunnerTest, {'name': reference, 'external_runner': self._external_runner, 'external_runner_argument': reference})] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_type_label_mapping(): <NEW_LINE> <INDENT> return {test.ExternalRunnerTest: 'EXTERNAL'} <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_decorator_mapping(): <NEW_LINE> <INDENT> return {test.ExternalRunnerTest: output.TERM_SUPPORT.healthy_str} | External-runner loader class | 6259904326238365f5fade48 |
class TextAnalysisTests(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.filename = "text_analysis_test_file.txt" <NEW_LINE> with open(self.filename, 'w') as f: <NEW_LINE> <INDENT> f.write("Python is a great tool") <NEW_LINE> <DEDENT> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(self.filename) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def test_function_runs(self): <NEW_LINE> <INDENT> analyze_text(self.filename) <NEW_LINE> <DEDENT> def test_line_count(self): <NEW_LINE> <INDENT> self.assertEqual(analyze_text(self.filename), 1) | Tests for the `analyze_text()` function | 6259904323849d37ff8523a9 |
class UserRegistrationForm(UserCreationForm): <NEW_LINE> <INDENT> password1 = forms.CharField(label="Password", widget=forms.PasswordInput) <NEW_LINE> password2 = forms.CharField( label="Password Confirmation", widget=forms.PasswordInput) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ['email', 'username', 'password1', 'password2'] <NEW_LINE> <DEDENT> def clean_emil(self): <NEW_LINE> <INDENT> email = self.cleaned_data.get('email') <NEW_LINE> username = self.cleaned_date.get('username') <NEW_LINE> if User.objects.filter(email=email).exclude(username=username): <NEW_LINE> <INDENT> raise forms.ValidateionError(u'Email address must be unique') <NEW_LINE> <DEDENT> return email <NEW_LINE> <DEDENT> def clean_password(self): <NEW_LINE> <INDENT> password1 = self.cleaned_data.get('password1') <NEW_LINE> password2 = self.cleaned_date.get('password2') <NEW_LINE> if not password1 or not password2: <NEW_LINE> <INDENT> raise ValidationError("Please confirm your password") <NEW_LINE> <DEDENT> if password1 != password2: <NEW_LINE> <INDENT> raise ValidationError("Passwords must match") <NEW_LINE> <DEDENT> return password2 | Form used to register a new user | 6259904307f4c71912bb0721 |
class Application(web.Application): <NEW_LINE> <INDENT> def __init__(self, handlers, config='settings.py', **kwargs): <NEW_LINE> <INDENT> options.config = config <NEW_LINE> settings = options.group_dict('app') <NEW_LINE> settings.update(kwargs) <NEW_LINE> super(Application, self).__init__(handlers,**settings) <NEW_LINE> <DEDENT> def start(self, port, address='127.0.0.1', **settings): <NEW_LINE> <INDENT> from tornado.httpserver import HTTPServer <NEW_LINE> server = HTTPServer(self, **settings) <NEW_LINE> server.listen(port, address=address) <NEW_LINE> ioloop.IOLoop.instance().start() | Extension for `tornado.web.Application` with config support.
| 6259904323e79379d538d7ed |
class StatusLogCombiner(): <NEW_LINE> <INDENT> def __init__(self, archiveType, issueArchiveDir): <NEW_LINE> <INDENT> self._statusLogHeader = "" <NEW_LINE> self._type = "unknown" <NEW_LINE> if archiveType == ARCHIVE_TYPE_EXTENDED: <NEW_LINE> <INDENT> issueArchivePath = Path(issueArchiveDir) <NEW_LINE> if issueArchivePath.exists(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> tmpPath = issueArchivePath / "tmp" <NEW_LINE> tmpFileList = tmpPath.glob("status_*.log") <NEW_LINE> tmpFilePath = list(tmpFileList)[0] <NEW_LINE> self._type = tmpFilePath.name.split('_')[1].split('.')[0] <NEW_LINE> statusLogAllPath = issueArchivePath / STAT_LOG_ALL_FILENAME <NEW_LINE> if statusLogAllPath.exists(): <NEW_LINE> <INDENT> statusLogAllPath.unlink() <NEW_LINE> <DEDENT> statusLogAllFile = statusLogAllPath.open(mode='a', newline='') <NEW_LINE> mcgLogPath = issueArchivePath / "usr" / "local" / "data" / "log" / "mcg_ext_log" <NEW_LINE> unpackPath = issueArchivePath / "unpackTmp" <NEW_LINE> if unpackPath.exists(): <NEW_LINE> <INDENT> shutil.rmtree(str(unpackPath), True) <NEW_LINE> <DEDENT> unpackPath.mkdir() <NEW_LINE> fileList = sorted(mcgLogPath.glob("status_*.tar.gz"), key=str, reverse=False) <NEW_LINE> firstFile = True <NEW_LINE> for file in fileList: <NEW_LINE> <INDENT> print("status %s file %s" % (self._type, str(file))) <NEW_LINE> tar = tarfile.open(str(file), 'r') <NEW_LINE> for item in tar: <NEW_LINE> <INDENT> tar.extract(item, str(unpackPath)) <NEW_LINE> <DEDENT> with (unpackPath / tmpFilePath.name).open(mode='r') as infile: <NEW_LINE> <INDENT> if firstFile: <NEW_LINE> <INDENT> firstFile = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._statusLogHeader = infile.readline()[:-1] <NEW_LINE> <DEDENT> for line in infile: <NEW_LINE> <INDENT> statusLogAllFile.write(line) <NEW_LINE> <DEDENT> infile.close() <NEW_LINE> <DEDENT> <DEDENT> with tmpFilePath.open(mode='r') as infile: <NEW_LINE> <INDENT> infile.readline() <NEW_LINE> for line in infile: <NEW_LINE> <INDENT> statusLogAllFile.write(line) <NEW_LINE> <DEDENT> infile.close() <NEW_LINE> <DEDENT> statusLogAllFile.close() <NEW_LINE> <DEDENT> except BaseException: <NEW_LINE> <INDENT> exc_type, exc_value, exc_traceback = sys.exc_info() <NEW_LINE> traceback.print_tb(exc_traceback) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._statusLogHeader = "TIMESTAMP;EVENT" <NEW_LINE> print("statusLogCombiner: nothing to do - is a 'normal' issue archive directory") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def getStatusLogHeader(self): <NEW_LINE> <INDENT> return self._statusLogHeader <NEW_LINE> <DEDENT> def getStatusLogType(self): <NEW_LINE> <INDENT> return self._type | classdocs | 62599043b57a9660fecd2d6b |
class TestConfig(unittest.TestCase): <NEW_LINE> <INDENT> test_dir = None <NEW_LINE> cfg_path = None <NEW_LINE> @classmethod <NEW_LINE> def setUpClass(cls) -> None: <NEW_LINE> <INDENT> cls.test_dir = os.path.join(TEST_DIR, 'config') <NEW_LINE> cls.cfg_path = os.path.join(cls.test_dir, 'smd.cfg') <NEW_LINE> os.mkdir(cls.test_dir) <NEW_LINE> os.chdir(cls.test_dir) <NEW_LINE> <DEDENT> def tearDown(self) -> None: <NEW_LINE> <INDENT> if os.path.exists(self.cfg_path): <NEW_LINE> <INDENT> os.remove(self.cfg_path) <NEW_LINE> <DEDENT> <DEDENT> def test_init_getitem_and_setitem(self) -> None: <NEW_LINE> <INDENT> cfg = smd.utils.Config(self.cfg_path) <NEW_LINE> self.assertFalse(os.path.exists(self.cfg_path)) <NEW_LINE> self.assertEqual(cfg.path, self.cfg_path) <NEW_LINE> self.assertIsInstance(cfg._parser, configparser.ConfigParser) <NEW_LINE> cfg['test'] = 'true' <NEW_LINE> self.assertEqual(cfg['test'], 'true') <NEW_LINE> with self.assertRaises(KeyError): <NEW_LINE> <INDENT> cfg['bad_key'] <NEW_LINE> <DEDENT> <DEDENT> def test_exists(self) -> None: <NEW_LINE> <INDENT> cfg = smd.utils.Config(self.cfg_path) <NEW_LINE> self.assertFalse(cfg.exists()) <NEW_LINE> with open(self.cfg_path, 'w') as fd: <NEW_LINE> <INDENT> fd.write('[DEFAULT]\ntest=true\n\n') <NEW_LINE> <DEDENT> self.assertTrue(cfg.exists()) <NEW_LINE> <DEDENT> def test_load(self) -> None: <NEW_LINE> <INDENT> cfg = smd.utils.Config(self.cfg_path) <NEW_LINE> self.assertFalse(cfg.load()) <NEW_LINE> with self.assertRaises(KeyError): <NEW_LINE> <INDENT> cfg['test'] <NEW_LINE> <DEDENT> with open(self.cfg_path, 'w') as fd: <NEW_LINE> <INDENT> fd.write('[DEFAULT]\ntest=true\n\n') <NEW_LINE> <DEDENT> self.assertTrue(cfg.load()) <NEW_LINE> self.assertEqual(cfg['test'], 'true') <NEW_LINE> <DEDENT> def test_reset(self) -> None: <NEW_LINE> <INDENT> cfg = smd.utils.Config(self.cfg_path) <NEW_LINE> cfg['language'] = 'new lang' <NEW_LINE> self.assertEqual(cfg['language'], 'new lang') <NEW_LINE> old_parser = cfg._parser <NEW_LINE> cfg.reset() <NEW_LINE> self.assertEqual(cfg['language'], 'SYSTEM') <NEW_LINE> self.assertEqual(cfg['manga_dir'], '.') <NEW_LINE> <DEDENT> def test_save(self) -> None: <NEW_LINE> <INDENT> cfg = smd.utils.Config(self.cfg_path) <NEW_LINE> self.assertFalse(os.path.exists(self.cfg_path)) <NEW_LINE> cfg.save() <NEW_LINE> self.assertTrue(os.path.exists(self.cfg_path)) | Tests :class:`smd.utils.Config` class. | 62599043ec188e330fdf9b8a |
class GoalType(object): <NEW_LINE> <INDENT> _definitions = list() <NEW_LINE> def __init__(self, value): <NEW_LINE> <INDENT> self._value = value <NEW_LINE> GoalType._definitions.append(self) <NEW_LINE> <DEDENT> def _getValue(self): <NEW_LINE> <INDENT> return self._value | Under the current Monaco implimentation,
The possibilities have been exaustively created and
no new instances of this class should be made | 6259904373bcbd0ca4bcb57b |
class TestPrim(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.graph = Graph() <NEW_LINE> self.graph.add_edge('A', 'B', 4) <NEW_LINE> self.graph.add_edge('A', 'H', 8) <NEW_LINE> self.graph.add_edge('B', 'C', 8) <NEW_LINE> self.graph.add_edge('B', 'H', 11) <NEW_LINE> self.graph.add_edge('C', 'D', 7) <NEW_LINE> self.graph.add_edge('C', 'F', 4) <NEW_LINE> self.graph.add_edge('C', 'I', 2) <NEW_LINE> self.graph.add_edge('D', 'E', 9) <NEW_LINE> self.graph.add_edge('D', 'F', 14) <NEW_LINE> self.graph.add_edge('E', 'F', 10) <NEW_LINE> self.graph.add_edge('F', 'G', 2) <NEW_LINE> self.graph.add_edge('G', 'I', 6) <NEW_LINE> self.graph.add_edge('G', 'H', 1) <NEW_LINE> self.graph.add_edge('H', 'I', 7) <NEW_LINE> <DEDENT> def test_prim_1(self): <NEW_LINE> <INDENT> actual = prim(self.graph, 'B') <NEW_LINE> expected = {'H': 'G', 'C': 'B', 'A': 'B', 'D': 'C', 'I': 'C', 'G': 'F', 'F': 'C', 'B': None, 'E': 'D'} <NEW_LINE> self.assertEqual(actual, expected) <NEW_LINE> <DEDENT> def test_prim_2(self): <NEW_LINE> <INDENT> actual = prim(self.graph, 'A') <NEW_LINE> expected_1 = {'A': None, 'B': 'A', 'C': 'B', 'D': 'C', 'E': 'D', 'F': 'C', 'I': 'C', 'G': 'F', 'H': 'G'} <NEW_LINE> expected_2 = {'F': 'G', 'C': 'F', 'I': 'C', 'D': 'C', 'G': 'H', 'B': 'A', 'E': 'D', 'H': 'A', 'A': None} <NEW_LINE> self.assertTrue(actual == expected_1 or actual == expected_2) | "Test case taken from Cormen, Chapter 23.2, Figure 23.5 | 62599043d53ae8145f91974d |
@gin.configurable <NEW_LINE> class SimpleScene(scene_base.SceneBase): <NEW_LINE> <INDENT> def build_scene(self, pybullet_client): <NEW_LINE> <INDENT> super().build_scene(pybullet_client) <NEW_LINE> visual_shape_id = self._pybullet_client.createVisualShape( shapeType=self._pybullet_client.GEOM_PLANE) <NEW_LINE> collision_shape_id = self._pybullet_client.createCollisionShape( shapeType=self._pybullet_client.GEOM_PLANE) <NEW_LINE> ground_id = self._pybullet_client.createMultiBody( baseMass=0, baseCollisionShapeIndex=collision_shape_id, baseVisualShapeIndex=visual_shape_id) <NEW_LINE> self._pybullet_client.changeDynamics(ground_id, -1, lateralFriction=1.0) <NEW_LINE> self.add_object(ground_id, scene_base.ObjectType.GROUND) <NEW_LINE> <DEDENT> @property <NEW_LINE> def vectorized_map(self) -> Sequence[scene_base.Polygon]: <NEW_LINE> <INDENT> return [] | A scene containing only a planar floor. | 6259904307d97122c4217f8f |
class DoItUnderflowError(DoItError): <NEW_LINE> <INDENT> __slots__ = [] <NEW_LINE> def __init__(self, emsg): <NEW_LINE> <INDENT> DoItError.__init__(self, ERROR_UNDERFLOW, emsg) | Raised when arithmetic operation underflows.
| 625990430fa83653e46f61ca |
class ContentsCollection(list): <NEW_LINE> <INDENT> def __init__(self, item_type, view, *contents): <NEW_LINE> <INDENT> self.item_type = item_type <NEW_LINE> self.info = ProjectContentsView.needed_values[item_type] <NEW_LINE> self.info.collection = self <NEW_LINE> self.editable = view.editable <NEW_LINE> self.extend(contents) <NEW_LINE> <DEDENT> def __setitem__(self, i, y): <NEW_LINE> <INDENT> list.__setitem__(self, i, y) <NEW_LINE> <DEDENT> def append(self, item): <NEW_LINE> <INDENT> list.append(self, item) <NEW_LINE> <DEDENT> def extend(self, items): <NEW_LINE> <INDENT> list.extend(self, items) | each item in the list should have a .collection attribute
which references the Collection itself. doing this by overriding
methods on the Collection to stick the attribute in when
adding the item.
NOTE THAT I AM only overriding select methods so if any other
method is called the template rendering will break mysteriously! | 6259904394891a1f408ba06e |
class ToggleNumberColumnAction(gaupol.ToggleAction): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> gaupol.ToggleAction.__init__(self, "toggle_number_column") <NEW_LINE> fields = gaupol.conf.editor.visible_fields <NEW_LINE> self.props.active = gaupol.fields.NUMBER in fields <NEW_LINE> self.props.label = _("_No.") <NEW_LINE> self.props.tooltip = _('Show or hide the number column') <NEW_LINE> self.action_group = "main-unsafe" <NEW_LINE> <DEDENT> def _affirm_doable(self, application, page): <NEW_LINE> <INDENT> aeidon.util.affirm(page is not None) | Show or hide the number column. | 6259904310dbd63aa1c71ec9 |
class NinjaAnt(Ant): <NEW_LINE> <INDENT> name = 'Ninja' <NEW_LINE> damage = 1 <NEW_LINE> food_cost = 5 <NEW_LINE> blocks_path = False <NEW_LINE> implemented = False <NEW_LINE> def action(self, colony): <NEW_LINE> <INDENT> Insect_place = self.place <NEW_LINE> bees_in_place = Insect_place.bees[:] <NEW_LINE> for bee in bees_in_place: <NEW_LINE> <INDENT> bee.reduce_armor(self.damage) | NinjaAnt does not block the path and damages all bees in its place. | 625990436fece00bbacccca3 |
class Driver(base.DriverBase): <NEW_LINE> <INDENT> def __init__(self, conf, storage): <NEW_LINE> <INDENT> super(Driver, self).__init__(conf) <NEW_LINE> self._storage = storage <NEW_LINE> <DEDENT> @decorators.lazy_property(write=False) <NEW_LINE> def queue_controller(self): <NEW_LINE> <INDENT> stages = _get_storage_pipeline('queue', self.conf) <NEW_LINE> stages.append(self._storage.queue_controller) <NEW_LINE> return stages <NEW_LINE> <DEDENT> @decorators.lazy_property(write=False) <NEW_LINE> def message_controller(self): <NEW_LINE> <INDENT> stages = _get_storage_pipeline('message', self.conf) <NEW_LINE> stages.append(self._storage.message_controller) <NEW_LINE> return stages <NEW_LINE> <DEDENT> @decorators.lazy_property(write=False) <NEW_LINE> def claim_controller(self): <NEW_LINE> <INDENT> stages = _get_storage_pipeline('claim', self.conf) <NEW_LINE> stages.append(self._storage.claim_controller) <NEW_LINE> return stages | Meta-driver for injecting pipelines in front of controllers.
:param storage_conf: For real drivers, this would be used to
configure the storage, but in this case it is simply ignored.
:param conf: Configuration from which to load pipeline settings
:param storage: Storage driver that will service requests as the
last step in the pipeline | 625990438c3a8732951f784b |
class CompressedStaticFilesMixin(object): <NEW_LINE> <INDENT> _new_files = None <NEW_LINE> def post_process(self, *args, **kwargs): <NEW_LINE> <INDENT> files = super(CompressedStaticFilesMixin, self).post_process(*args, **kwargs) <NEW_LINE> if not kwargs.get('dry_run'): <NEW_LINE> <INDENT> files = self.post_process_with_compression(files) <NEW_LINE> <DEDENT> return files <NEW_LINE> <DEDENT> def post_process_with_compression(self, files): <NEW_LINE> <INDENT> hashed_names = {} <NEW_LINE> new_files = set() <NEW_LINE> self.start_tracking_new_files(new_files) <NEW_LINE> for name, hashed_name, processed in files: <NEW_LINE> <INDENT> if hashed_name and not isinstance(processed, Exception): <NEW_LINE> <INDENT> hashed_names[self.clean_name(name)] = hashed_name <NEW_LINE> <DEDENT> yield name, hashed_name, processed <NEW_LINE> <DEDENT> self.stop_tracking_new_files() <NEW_LINE> original_files = set(hashed_names.keys()) <NEW_LINE> hashed_files = set(hashed_names.values()) <NEW_LINE> if self.keep_only_hashed_files: <NEW_LINE> <INDENT> files_to_delete = (original_files | new_files) - hashed_files <NEW_LINE> files_to_compress = hashed_files <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> files_to_delete = set() <NEW_LINE> files_to_compress = original_files | hashed_files <NEW_LINE> <DEDENT> self.delete_files(files_to_delete) <NEW_LINE> for name, compressed_name in self.compress_files(files_to_compress): <NEW_LINE> <INDENT> yield name, compressed_name, True <NEW_LINE> <DEDENT> <DEDENT> def hashed_name(self, *args, **kwargs): <NEW_LINE> <INDENT> name = super(CompressedStaticFilesMixin, self).hashed_name(*args, **kwargs) <NEW_LINE> if self._new_files is not None: <NEW_LINE> <INDENT> self._new_files.add(self.clean_name(name)) <NEW_LINE> <DEDENT> return name <NEW_LINE> <DEDENT> def start_tracking_new_files(self, new_files): <NEW_LINE> <INDENT> self._new_files = new_files <NEW_LINE> <DEDENT> def stop_tracking_new_files(self): <NEW_LINE> <INDENT> self._new_files = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def keep_only_hashed_files(self): <NEW_LINE> <INDENT> return getattr(settings, 'WHITENOISE_KEEP_ONLY_HASHED_FILES', False) <NEW_LINE> <DEDENT> def delete_files(self, files_to_delete): <NEW_LINE> <INDENT> for name in files_to_delete: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.unlink(self.path(name)) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> if e.errno != errno.ENOENT: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def compress_files(self, names): <NEW_LINE> <INDENT> extensions = getattr(settings, 'WHITENOISE_SKIP_COMPRESS_EXTENSIONS', None) <NEW_LINE> compressor = Compressor(extensions=extensions, quiet=True) <NEW_LINE> for name in names: <NEW_LINE> <INDENT> if compressor.should_compress(name): <NEW_LINE> <INDENT> path = self.path(name) <NEW_LINE> prefix_len = len(path) - len(name) <NEW_LINE> for compressed_path in compressor.compress(path): <NEW_LINE> <INDENT> compressed_name = compressed_path[prefix_len:] <NEW_LINE> yield name, compressed_name | Wraps a StaticFilesStorage instance to create compressed versions of its
output files and, optionally, to delete the non-hashed files (i.e. those
without the hash in their name) | 62599043e64d504609df9d49 |
class Client(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=128) <NEW_LINE> email = models.EmailField(max_length=128) <NEW_LINE> phone = models.CharField(max_length=20) <NEW_LINE> comments = models.TextField(blank=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name | Creating a client instance
>>> a_client = Client.objects.create(name='david', email='[email protected]', phone=7706338574, comments='optional')
>>> a_client.name
'david'
>>> | 625990431d351010ab8f4e10 |
class SimpleRenderer(Renderer): <NEW_LINE> <INDENT> _template = ViewPageTemplateFile('simple.pt') <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> Renderer.__init__(self, *args) <NEW_LINE> context = aq_inner(self.context) <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> return self._template() <NEW_LINE> <DEDENT> @property <NEW_LINE> def available(self): <NEW_LINE> <INDENT> context = aq_inner(self.context) <NEW_LINE> return context.portal_type == 'News Item' <NEW_LINE> <DEDENT> def words(self): <NEW_LINE> <INDENT> return ['Hello World', 'Morning World'] <NEW_LINE> <DEDENT> def time(self): <NEW_LINE> <INDENT> return time.time() | The renderer will prepare the portlet HTML. | 6259904307d97122c4217f91 |
class EvalPrediction(NamedTuple): <NEW_LINE> <INDENT> predictions: np.ndarray <NEW_LINE> label_ids: np.ndarray | Evaluation eval_model_dir (always contains labels), to be used
to compute metrics. | 6259904363b5f9789fe8645d |
class SupportingFunctionsTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_check_config(self): <NEW_LINE> <INDENT> settings = { "language": 1, "spreadsheet_name": "tests/test_data/good_data.ods", "sheet_name": "Sheet1", "target_folder": "tests/", "log_file": "tests/test_log", "sort": "phonetics", } <NEW_LINE> with self.assertRaises(TypeError) as error: <NEW_LINE> <INDENT> read_data.check_settings(config_file=settings) <NEW_LINE> <DEDENT> self.assertIn("The language is not a string", str(error.exception)) <NEW_LINE> settings["language"] = "Test" <NEW_LINE> settings["target_folder"] = "Fake directory" <NEW_LINE> with self.assertRaises(FileNotFoundError) as error: <NEW_LINE> <INDENT> read_data.check_settings(config_file=settings) <NEW_LINE> <DEDENT> self.assertIn( "The following file doesn't exist: {t}".format(t=settings["target_folder"]), str(error.exception), "Error message wrong", ) <NEW_LINE> <DEDENT> def test_letter_to_number(self): <NEW_LINE> <INDENT> self.assertEqual( read_data.letter_to_number("A"), 0, "Letter to number giving wrong output" ) <NEW_LINE> self.assertEqual( read_data.letter_to_number("B"), 1, "Letter to number giving wrong output" ) <NEW_LINE> self.assertEqual( read_data.letter_to_number("a"), 0, "Letter to number giving wrong output" ) <NEW_LINE> self.assertEqual( read_data.letter_to_number("Z"), 25, "Letter to number giving wrong output" ) <NEW_LINE> with self.assertRaises(AssertionError): <NEW_LINE> <INDENT> read_data.letter_to_number(1) <NEW_LINE> <DEDENT> with self.assertRaises(AssertionError): <NEW_LINE> <INDENT> read_data.letter_to_number("AA") <NEW_LINE> <DEDENT> with self.assertRaises(AssertionError): <NEW_LINE> <INDENT> read_data.letter_to_number("1") <NEW_LINE> <DEDENT> with self.assertRaises(AssertionError): <NEW_LINE> <INDENT> read_data.letter_to_number("") <NEW_LINE> <DEDENT> with self.assertRaises(AssertionError): <NEW_LINE> <INDENT> read_data.letter_to_number(" ") | Tests the functions supporting read_lexicon() | 62599043004d5f362081f95e |
class CheckSingleStoreAction(argparse.Action): <NEW_LINE> <INDENT> def __call__(self, parser, namespace, values, option_string=None): <NEW_LINE> <INDENT> if getattr(namespace, self.dest, None) is not None: <NEW_LINE> <INDENT> print( 'WARNING: previous optional argument "' + option_string + ' ' + str(getattr(namespace, self.dest)) + '" overwritten by "' + str(option_string) + ' ' + str(values) + '"' ) <NEW_LINE> <DEDENT> setattr(namespace, self.dest, values) | issue a warning when the store action is called multiple times | 62599043cad5886f8bdc59f6 |
class RNNEncoder(AbsEncoder): <NEW_LINE> <INDENT> def __init__( self, input_size: int, rnn_type: str = "lstm", bidirectional: bool = True, use_projection: bool = True, num_layers: int = 4, hidden_size: int = 320, output_size: int = 320, dropout: float = 0.0, subsample: Optional[Sequence[int]] = (2, 2, 1, 1), ): <NEW_LINE> <INDENT> assert check_argument_types() <NEW_LINE> super().__init__() <NEW_LINE> self._output_size = output_size <NEW_LINE> self.rnn_type = rnn_type <NEW_LINE> self.bidirectional = bidirectional <NEW_LINE> self.use_projection = use_projection <NEW_LINE> if rnn_type not in {"lstm", "gru"}: <NEW_LINE> <INDENT> raise ValueError(f"Not supported rnn_type={rnn_type}") <NEW_LINE> <DEDENT> if subsample is None: <NEW_LINE> <INDENT> subsample = np.ones(num_layers + 1, dtype=np.int) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> subsample = subsample[:num_layers] <NEW_LINE> subsample = np.pad( np.array(subsample, dtype=np.int), [1, num_layers - len(subsample)], mode="constant", constant_values=1, ) <NEW_LINE> <DEDENT> rnn_type = ("b" if bidirectional else "") + rnn_type <NEW_LINE> if use_projection: <NEW_LINE> <INDENT> self.enc = torch.nn.ModuleList( [ RNNP( input_size, num_layers, hidden_size, output_size, subsample, dropout, typ=rnn_type, ) ] ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.enc = torch.nn.ModuleList( [ RNN( input_size, num_layers, hidden_size, output_size, dropout, typ=rnn_type, ) ] ) <NEW_LINE> <DEDENT> <DEDENT> def output_size(self) -> int: <NEW_LINE> <INDENT> return self._output_size <NEW_LINE> <DEDENT> def forward( self, xs_pad: torch.Tensor, ilens: torch.Tensor, prev_states: torch.Tensor = None, ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: <NEW_LINE> <INDENT> if prev_states is None: <NEW_LINE> <INDENT> prev_states = [None] * len(self.enc) <NEW_LINE> <DEDENT> assert len(prev_states) == len(self.enc) <NEW_LINE> current_states = [] <NEW_LINE> for module, prev_state in zip(self.enc, prev_states): <NEW_LINE> <INDENT> xs_pad, ilens, states = module(xs_pad, ilens, prev_state=prev_state) <NEW_LINE> current_states.append(states) <NEW_LINE> <DEDENT> if self.use_projection: <NEW_LINE> <INDENT> xs_pad.masked_fill_(make_pad_mask(ilens, xs_pad, 1), 0.0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> xs_pad = xs_pad.masked_fill(make_pad_mask(ilens, xs_pad, 1), 0.0) <NEW_LINE> <DEDENT> return xs_pad, ilens, current_states | RNNEncoder class.
Args:
input_size: The number of expected features in the input
output_size: The number of output features
hidden_size: The number of hidden features
bidirectional: If ``True`` becomes a bidirectional LSTM
use_projection: Use projection layer or not
num_layers: Number of recurrent layers
dropout: dropout probability | 625990431f5feb6acb163ee6 |
@BenchBuild.subcommand("log") <NEW_LINE> class BenchBuildLog(cli.Application): <NEW_LINE> <INDENT> @cli.switch(["-E", "--experiment"], str, list=True, help="Experiments to fetch the log for.") <NEW_LINE> def experiment(self, experiments): <NEW_LINE> <INDENT> self._experiments = experiments <NEW_LINE> <DEDENT> @cli.switch(["-e", "--experiment-id"], str, list=True, help="Experiment IDs to fetch the log for.") <NEW_LINE> def experiment_ids(self, experiment_ids): <NEW_LINE> <INDENT> self._experiment_ids = experiment_ids <NEW_LINE> <DEDENT> @cli.switch(["-p", "--project-id"], str, list=True, help="Project IDs to fetch the log for.") <NEW_LINE> def project_ids(self, project_ids): <NEW_LINE> <INDENT> self._project_ids = project_ids <NEW_LINE> <DEDENT> @cli.switch(["-t", "--type"], cli.Set("stdout", "stderr"), list=True, help="Set the output types to print.") <NEW_LINE> def log_type(self, types): <NEW_LINE> <INDENT> self._types = types <NEW_LINE> <DEDENT> _experiments = None <NEW_LINE> _experiment_ids = None <NEW_LINE> _project_ids = None <NEW_LINE> _types = None <NEW_LINE> def main(self, *projects): <NEW_LINE> <INDENT> from benchbuild.utils.schema import Session, Run, RunLog <NEW_LINE> session = Session() <NEW_LINE> exps = self._experiments <NEW_LINE> exp_ids = self._experiment_ids <NEW_LINE> project_ids = self._project_ids <NEW_LINE> types = self._types <NEW_LINE> if types is not None: <NEW_LINE> <INDENT> query = session.query(Run, RunLog).filter(Run.id == RunLog.run_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> query = session.query(Run) <NEW_LINE> <DEDENT> if exps is not None: <NEW_LINE> <INDENT> query = query.filter(Run.experiment_name.in_(exps)) <NEW_LINE> <DEDENT> if exp_ids is not None: <NEW_LINE> <INDENT> query = query.filter(Run.experiment_group.in_(exp_ids)) <NEW_LINE> <DEDENT> if projects is not None: <NEW_LINE> <INDENT> query = query.filter(Run.project_name.in_(projects)) <NEW_LINE> <DEDENT> if project_ids is not None: <NEW_LINE> <INDENT> query = query.filter(Run.run_group.in_(project_ids)) <NEW_LINE> <DEDENT> if types is not None: <NEW_LINE> <INDENT> print_logs(query, types) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print_runs(query) | Frontend command to the benchbuild database. | 62599043a8ecb03325872502 |
class U3(KaitaiStruct): <NEW_LINE> <INDENT> SEQ_FIELDS = ["b1", "b2", "b3"] <NEW_LINE> def __init__(self, _io, _parent=None, _root=None): <NEW_LINE> <INDENT> self._io = _io <NEW_LINE> self._parent = _parent <NEW_LINE> self._root = _root if _root else self <NEW_LINE> self._debug = collections.defaultdict(dict) <NEW_LINE> <DEDENT> def _read(self): <NEW_LINE> <INDENT> self._debug['b1']['start'] = self._io.pos() <NEW_LINE> self.b1 = self._io.read_u1() <NEW_LINE> self._debug['b1']['end'] = self._io.pos() <NEW_LINE> self._debug['b2']['start'] = self._io.pos() <NEW_LINE> self.b2 = self._io.read_u1() <NEW_LINE> self._debug['b2']['end'] = self._io.pos() <NEW_LINE> self._debug['b3']['start'] = self._io.pos() <NEW_LINE> self.b3 = self._io.read_u1() <NEW_LINE> self._debug['b3']['end'] = self._io.pos() <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> if hasattr(self, '_m_value'): <NEW_LINE> <INDENT> return self._m_value if hasattr(self, '_m_value') else None <NEW_LINE> <DEDENT> self._m_value = ((self.b1 | (self.b2 << 8)) | (self.b3 << 16)) <NEW_LINE> return self._m_value if hasattr(self, '_m_value') else None | Implements unsigned 24-bit (3 byte) integer.
| 625990430a366e3fb87ddcd7 |
class User(db.Model): <NEW_LINE> <INDENT> __tablename__ = constTablePrefix + 'user' <NEW_LINE> id = db.Column(db.Integer, db.Sequence(__tablename__ + '_id_seq'), autoincrement=True, primary_key=True) <NEW_LINE> create_time = db.Column(db.DateTime, default=datetime.utcnow) <NEW_LINE> machine_id = db.Column(db.String(255), db.ForeignKey('machines.id'), nullable=False, unique=True, doc='唯一ID') <NEW_LINE> machine = db.relationship('GUserMachines', backref='gif_user') <NEW_LINE> followed = db.relationship('User', secondary=tbl_followers, primaryjoin=(tbl_followers.c.follower_id == id), secondaryjoin=(tbl_followers.c.followed_id == id), backref=db.backref('followers', lazy='dynamic'), lazy='dynamic') <NEW_LINE> def follow(self, user): <NEW_LINE> <INDENT> if not self.is_following(user): <NEW_LINE> <INDENT> self.followed.append(user) <NEW_LINE> return self <NEW_LINE> <DEDENT> <DEDENT> def unfollow(self, user): <NEW_LINE> <INDENT> if self.is_following(user): <NEW_LINE> <INDENT> self.followed.remove(user) <NEW_LINE> return self <NEW_LINE> <DEDENT> <DEDENT> def is_following(self, user): <NEW_LINE> <INDENT> return self.followed.filter(tbl_followers.c.followed_id == user.id).count() > 0 <NEW_LINE> <DEDENT> def getBaseJSON(self): <NEW_LINE> <INDENT> return { 'id': self.id, 'machine_id': self.machine_id, 'create_time': format_datetime(self.create_time) } <NEW_LINE> <DEDENT> def getJSON(self): <NEW_LINE> <INDENT> info = self.getBaseJSON() <NEW_LINE> return info <NEW_LINE> <DEDENT> def getJSONEx(self, more=True): <NEW_LINE> <INDENT> info = self.getJSON() <NEW_LINE> return info | 资源专区的用户 | 625990430fa83653e46f61cd |
class Form: <NEW_LINE> <INDENT> def __init__(self, form_obj, server, user, api_key): <NEW_LINE> <INDENT> self.form_obj = form_obj <NEW_LINE> self._api_key = api_key <NEW_LINE> self.server = server <NEW_LINE> self.user = user <NEW_LINE> self.form_id = self.form_obj.attrib['id'] <NEW_LINE> self.name = self.form_obj.find('name').text <NEW_LINE> self.directory = self.form_obj.find('directory').text <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return 7 <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.results) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<{}({}) @ {}>".format(self.directory, self.user, id(self)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> url = 'https://{}.formsite.com/api/users/{}/forms/{}/status'.format( self.server, self.user, self.directory ) <NEW_LINE> params = { 'fs_api_key': self._api_key } <NEW_LINE> resp = requests.get(url, params=params) <NEW_LINE> form_obj = ET.fromstring(resp.text) if resp.ok else None <NEW_LINE> limit_items = form_obj.find('status').findall('.//limit[@type="items"]')[0] <NEW_LINE> limit_results = form_obj.find('status').findall('.//limit[@type="results"]')[0] <NEW_LINE> return AttributeDict({ 'state': form_obj.find("status").find('state').text, 'last_modified': form_obj.find("status").find('last_modified').text, 'limit_items': { 'used': limit_items.find('used').text, 'total': limit_items.find('total').text, }, 'limit_results': { 'used': limit_results.find('used').text, 'total': limit_results.find('total').text, } }) <NEW_LINE> <DEDENT> @property <NEW_LINE> def results(self): <NEW_LINE> <INDENT> _url = 'https://{}.formsite.com/api/users/{}/forms/{}/results'.format( self.server, self.user, self.directory ) <NEW_LINE> params = { 'fs_api_key': self._api_key } <NEW_LINE> resp = requests.get(_url, params=params) <NEW_LINE> form_results_et = ET.fromstring(resp.text) if resp.ok else None <NEW_LINE> form_results_obj = form_results_et.find('results') <NEW_LINE> return FormResults(form_results_obj) if form_results_obj else 0 | pythonic interface for individual forms | 6259904394891a1f408ba06f |
class UserAddonAccountDetail(JSONAPIBaseView, generics.RetrieveAPIView, UserMixin, AddonSettingsMixin): <NEW_LINE> <INDENT> permission_classes = ( drf_permissions.IsAuthenticatedOrReadOnly, base_permissions.TokenHasScope, CurrentUser, ) <NEW_LINE> required_read_scopes = [CoreScopes.USER_ADDON_READ] <NEW_LINE> required_write_scopes = [CoreScopes.NULL] <NEW_LINE> serializer_class = AddonAccountSerializer <NEW_LINE> view_category = 'users' <NEW_LINE> view_name = 'user-external_account-detail' <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> user_settings = self.get_addon_settings(check_object_permissions=False) <NEW_LINE> account_id = self.kwargs['account_id'] <NEW_LINE> account = ExternalAccount.load(account_id) <NEW_LINE> if not (account and user_settings.external_accounts.filter(id=account.id).exists()): <NEW_LINE> <INDENT> raise NotFound('Requested addon unavailable') <NEW_LINE> <DEDENT> return account | Detail of an individual external_account authorized by this user *Read-only*
##Permissions
ExternalAccounts are visible only to the user that has ownership of them.
## ExternalAccount Attributes
OSF ExternalAccount entities have the "external_accounts" `type`, with `id` indicating the
`external_account_id` according to the OSF
name type description
=====================================================================================================
display_name string Display name on the third-party service
profile_url string Link to users profile on third-party service *presence varies by service*
provider string short_name of third-party service provider
##Links
See the [JSON-API spec regarding pagination](http://jsonapi.org/format/1.0/#fetching-pagination).
self: the canonical api endpoint of this external_account
#This Request/Response | 6259904307d97122c4217f92 |
class RuleTrie(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.root = DomainNode("", [], 0) <NEW_LINE> <DEDENT> def matchingRulesets(self, fqdn): <NEW_LINE> <INDENT> return self.root.matchingRulesets(fqdn) <NEW_LINE> <DEDENT> def addRuleset(self, ruleset): <NEW_LINE> <INDENT> for target in ruleset.targets: <NEW_LINE> <INDENT> node = self.root <NEW_LINE> parts = list(enumerate(target.split("."))) <NEW_LINE> depth = 0 <NEW_LINE> for (idx, part) in reversed(parts): <NEW_LINE> <INDENT> depth += 1 <NEW_LINE> partNode = node.children.get(part) <NEW_LINE> if not partNode: <NEW_LINE> <INDENT> partNode = DomainNode(part, [], depth) <NEW_LINE> node.addChild(partNode) <NEW_LINE> <DEDENT> if idx == 0: <NEW_LINE> <INDENT> partNode.rulesets.append(ruleset) <NEW_LINE> <DEDENT> node = partNode <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def acceptedScheme(self, url): <NEW_LINE> <INDENT> parsed = urllib.parse.urlparse(url) <NEW_LINE> return parsed.scheme in ("http", "https") <NEW_LINE> <DEDENT> def transformUrl(self, url): <NEW_LINE> <INDENT> parsed = urllib.parse.urlparse(url) <NEW_LINE> if parsed.scheme not in ("http", "https"): <NEW_LINE> <INDENT> raise RuleTransformError("Unknown scheme '{}' in '{}'".format(parsed.scheme, url)) <NEW_LINE> <DEDENT> fqdn = parsed.netloc.lower() <NEW_LINE> matching = self.matchingRulesets(fqdn) <NEW_LINE> for ruleset in matching: <NEW_LINE> <INDENT> newUrl = ruleset.apply(url) <NEW_LINE> if newUrl != url: <NEW_LINE> <INDENT> return RuleMatch(newUrl, ruleset) <NEW_LINE> <DEDENT> <DEDENT> return RuleMatch(url, None) <NEW_LINE> <DEDENT> def prettyPrint(self): <NEW_LINE> <INDENT> self.root.prettyPrint() | Suffix trie for rulesets. | 62599043d99f1b3c44d06991 |
class GetScanHistory(Action): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> self.logger = logging.getLogger() <NEW_LINE> self.validate_config() <NEW_LINE> <DEDENT> def validate_config(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.config[NEXT_ACTION] <NEW_LINE> self.config[OUTPUT_KEY_NAME] <NEW_LINE> self.config[PATH] <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> raise ActionError(f'Invalid action config: missing required setting {e}') <NEW_LINE> <DEDENT> <DEDENT> def execute(self, input_data): <NEW_LINE> <INDENT> self.logger.debug('Running GetScanHistory action') <NEW_LINE> file_names = self.get_file_names() <NEW_LINE> input_data[self.config[OUTPUT_KEY_NAME]] = file_names <NEW_LINE> input_data[NEXT_ACTION] = self.config[NEXT_ACTION] <NEW_LINE> return input_data <NEW_LINE> <DEDENT> def get_file_names(self): <NEW_LINE> <INDENT> self.logger.debug('Fetching file names from directory') <NEW_LINE> try: <NEW_LINE> <INDENT> return FileHelper.get_files_from_dir(self.config[PATH]) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise ActionError(f"Action error: can't retrieve content from {self.config[PATH]}") | A class to retrieve scan history from the file system. | 62599043596a897236128f28 |
class HookspecMarker(object): <NEW_LINE> <INDENT> def __init__(self, project_name): <NEW_LINE> <INDENT> self.project_name = project_name <NEW_LINE> <DEDENT> def __call__(self, function, *args, **kwargs): <NEW_LINE> <INDENT> if any(args) or any(kwargs): <NEW_LINE> <INDENT> raise NotImplementedError( "This is a minimal implementation of pluggy") <NEW_LINE> <DEDENT> return function | Dummy implementation. No spec validation. | 6259904350485f2cf55dc278 |
class Downsample(Filter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.start_index: int = 0 <NEW_LINE> self.step: int = 1 <NEW_LINE> <DEDENT> def transform(self, predictors: Matrix) -> Matrix: <NEW_LINE> <INDENT> columns = [i for i in range(self.start_index, predictors.num_columns(), self.step)] <NEW_LINE> return predictors.get_columns(columns) | Filter which gets every Nth column from a matrix, starting at a given index. | 6259904323849d37ff8523ad |
class ConfigurationManager(object): <NEW_LINE> <INDENT> def __init__(self, interactions_file_name: str): <NEW_LINE> <INDENT> self.interactions = file_utilities.get_file_dict(interactions_file_name) <NEW_LINE> <DEDENT> def get_interaction_details(self, interaction_name: str) -> dict: <NEW_LINE> <INDENT> return copy.deepcopy(self.interactions.get(interaction_name)) | A utility used to obtain configuration details for the current application. | 625990433eb6a72ae038b953 |
class BearerAuth(requests.auth.AuthBase): <NEW_LINE> <INDENT> def __init__(self, consumer_key, consumer_secret, proxies=None, user_agent=None): <NEW_LINE> <INDENT> self._consumer_key = consumer_key <NEW_LINE> self._consumer_secret = consumer_secret <NEW_LINE> self.proxies = proxies <NEW_LINE> self.user_agent = user_agent <NEW_LINE> self._bearer_token = self._get_access_token() <NEW_LINE> <DEDENT> def _get_access_token(self): <NEW_LINE> <INDENT> token_url = '%s://%s.%s/%s' % (PROTOCOL, OAUTH2_SUBDOMAIN, DOMAIN, OAUTH2_ENDPOINT) <NEW_LINE> auth = self._consumer_key + ':' + self._consumer_secret <NEW_LINE> b64_bearer_token_creds = base64.b64encode(auth.encode('utf8')) <NEW_LINE> params = {'grant_type': 'client_credentials'} <NEW_LINE> headers = {} <NEW_LINE> headers['User-Agent'] = self.user_agent <NEW_LINE> headers['Authorization'] = 'Basic ' + b64_bearer_token_creds.decode('utf8') <NEW_LINE> headers['Content-Type'] = 'application/x-www-form-urlencoded;charset=UTF-8' <NEW_LINE> try: <NEW_LINE> <INDENT> response = requests.post( token_url, params=params, headers=headers, proxies=self.proxies) <NEW_LINE> data = response.json() <NEW_LINE> return data['access_token'] <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise Exception('Error requesting bearer access token: %s' % e) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, r): <NEW_LINE> <INDENT> auth_list = [ self._consumer_key, self._consumer_secret, self._bearer_token] <NEW_LINE> if all(auth_list): <NEW_LINE> <INDENT> r.headers['Authorization'] = "Bearer %s" % self._bearer_token <NEW_LINE> return r <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Not enough keys passed to Bearer token manager.') | Request bearer access token for oAuth2 authentication.
:param consumer_key: Twitter application consumer key
:param consumer_secret: Twitter application consumer secret
:param proxies: Dictionary of proxy URLs (see documentation for python-requests). | 62599043a79ad1619776b372 |
class globals: <NEW_LINE> <INDENT> match = False <NEW_LINE> matchedString = "" <NEW_LINE> matchedVendor = "" <NEW_LINE> partials = False | class providing global accessible variables | 62599043287bf620b6272eda |
class S2TRule(object): <NEW_LINE> <INDENT> def __init__(self, ruleNum): <NEW_LINE> <INDENT> self.id = "%s" % (ruleNum) <NEW_LINE> self.attrs = {} <NEW_LINE> <DEDENT> def set_attribute(self, attr, val): <NEW_LINE> <INDENT> self.attrs[attr] = val <NEW_LINE> <DEDENT> def get_attribute(self, attr): <NEW_LINE> <INDENT> self.attrs.get(attr) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '<S2TRule ' + self.id + '>' <NEW_LINE> <DEDENT> def pp(self): <NEW_LINE> <INDENT> print("<S2TRule %s>" % self.id) <NEW_LINE> for attr, val in self.attrs.items(): <NEW_LINE> <INDENT> print(" %s=\"%s\"" % (attr, val)) | Implements the S2T rule object. An S2T rule consists of an ID
number and a set of conditions including:
-- Optional Conditions: tense, aspect, reltype.
-- Mandatory Condition: relation (the reltype for the new TLINK). | 625990436e29344779b01944 |
class EEPBin(DataReader): <NEW_LINE> <INDENT> def __init__(self, source): <NEW_LINE> <INDENT> DataReader.__init__(self) <NEW_LINE> nsamples = None <NEW_LINE> hdr = {} <NEW_LINE> infile = open(source, "r") <NEW_LINE> while True: <NEW_LINE> <INDENT> line = infile.readline() <NEW_LINE> if not line or line.startswith(';EOH;'): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> line = line.strip() <NEW_LINE> if not line.count(':'): <NEW_LINE> <INDENT> l = line.split() <NEW_LINE> self._props['nchannels'] = int(l[0][1:]) <NEW_LINE> self._props['ntimepoints'] = int(l[1]) <NEW_LINE> self._props['t0'] = float(l[3]) <NEW_LINE> self._props['dt'] = float(l[4]) <NEW_LINE> nsamples = int(l[2]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l = line.split(':') <NEW_LINE> key = l[0].lstrip(';') <NEW_LINE> value = ':'.join(l[1:]) <NEW_LINE> hdr[key] = value <NEW_LINE> <DEDENT> <DEDENT> if hdr.has_key('channels'): <NEW_LINE> <INDENT> self._props['channels'] = hdr['channels'].split() <NEW_LINE> <DEDENT> self._data = np.reshape(np.fromfile(infile, dtype='f'), (nsamples, self._props['nchannels'], self._props['ntimepoints'])) <NEW_LINE> infile.close() <NEW_LINE> <DEDENT> nchannels = property(fget=lambda self: self._props['nchannels'], doc="Number of channels") <NEW_LINE> ntimepoints = property(fget=lambda self: self._props['ntimepoints'], doc="Number of data timepoints") <NEW_LINE> nsamples = property(fget=lambda self: self._data.shape[0], doc="Number of trials/samples") <NEW_LINE> t0 = property(fget=lambda self: self._props['t0'], doc="Relative start time of sampling interval") <NEW_LINE> dt = property(fget=lambda self: self._props['dt'], doc="Time difference between two adjacent samples") <NEW_LINE> channels = property(fget=lambda self: self._props['channels'], doc="List of channel names") | Read-access to binary EEP files.
EEP files are used by *eeprobe* a software for analysing even-related
potentials (ERP), which was developed at the Max-Planck Institute for
Cognitive Neuroscience in Leipzig, Germany.
http://www.ant-neuro.com/products/eeprobe
EEP files consist of a plain text header and a binary data block in a
single file. The header starts with a line of the form
';%d %d %d %g %g' % (Nchannels, Nsamples, Ntrials, t0, dt)
where Nchannels, Nsamples, Ntrials are the numbers of channels, samples
per trial and trials respectively. t0 is the time of the first sample
of a trial relative to the stimulus onset and dt is the sampling interval.
The binary data block consists of single precision floats arranged in the
following way::
<trial1,channel1,sample1>,<trial1,channel1,sample2>,...
<trial1,channel2,sample1>,<trial1,channel2,sample2>,...
.
<trial2,channel1,sample1>,<trial2,channel1,sample2>,...
<trial2,channel2,sample1>,<trial2,channel2,sample2>,... | 6259904321bff66bcd723f5e |
class AbstractSendVoiceAllOf(object): <NEW_LINE> <INDENT> openapi_types = { 'destination_field': 'str' } <NEW_LINE> attribute_map = { 'destination_field': 'destination_field' } <NEW_LINE> def __init__(self, destination_field=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._destination_field = None <NEW_LINE> self.discriminator = None <NEW_LINE> if destination_field is not None: <NEW_LINE> <INDENT> self.destination_field = destination_field <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def destination_field(self): <NEW_LINE> <INDENT> return self._destination_field <NEW_LINE> <DEDENT> @destination_field.setter <NEW_LINE> def destination_field(self, destination_field): <NEW_LINE> <INDENT> allowed_values = ["phone", "cellphone", "phone_failsafe_cellphone", "cellphone_failsafe_phone", "cellphone_phone"] <NEW_LINE> if self.local_vars_configuration.client_side_validation and destination_field not in allowed_values: <NEW_LINE> <INDENT> raise ValueError( "Invalid value for `destination_field` ({0}), must be one of {1}" .format(destination_field, allowed_values) ) <NEW_LINE> <DEDENT> self._destination_field = destination_field <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, AbstractSendVoiceAllOf): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, AbstractSendVoiceAllOf): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict() | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 625990431d351010ab8f4e12 |
class SecurityGroupServerRpcApiMixin(object): <NEW_LINE> <INDENT> def security_group_rules_for_devices(self, context, devices): <NEW_LINE> <INDENT> LOG.debug("Get security group rules " "for devices via rpc %r", devices) <NEW_LINE> return self.call(context, self.make_msg('security_group_rules_for_devices', devices=devices), version='1.1') <NEW_LINE> <DEDENT> def security_group_info_for_devices(self, context, devices): <NEW_LINE> <INDENT> LOG.debug("Get security group information for devices via rpc %r", devices) <NEW_LINE> return self.call(context, self.make_msg('security_group_info_for_devices', devices=devices), version='1.2') | A mix-in that enable SecurityGroup support in plugin rpc. | 62599043d53ae8145f919751 |
class Class(object): <NEW_LINE> <INDENT> def __init__(self, name, value=0, ranks=[]): <NEW_LINE> <INDENT> misc.checkTypeAgainst(type(name), StringType, __file__) <NEW_LINE> misc.checkTypeAgainst(type(value), IntType, __file__) <NEW_LINE> misc.checkTypeAgainst(type(ranks), ListType, __file__) <NEW_LINE> self._name = name <NEW_LINE> self._value = value <NEW_LINE> self._ranks = {} <NEW_LINE> for x in ranks: <NEW_LINE> <INDENT> self.addRank(x) <NEW_LINE> <DEDENT> <DEDENT> def addRank(self, rank): <NEW_LINE> <INDENT> misc.checkTypeAgainst(type(rank), Rank, __file__) <NEW_LINE> self._ranks[rank._name] = rank <NEW_LINE> <DEDENT> def allRanks(self): <NEW_LINE> <INDENT> return self._ranks.values() <NEW_LINE> <DEDENT> def getRank(self, name): <NEW_LINE> <INDENT> return self._ranks[name] if name in self._ranks else None <NEW_LINE> <DEDENT> def getJSON(self): <NEW_LINE> <INDENT> return { "name": self._name, "value": self._value } | Class class is used to group all the relevant ranks together.
Attributes:
_name (str): name of the Class instance
_value (int): value / weight of the instance
_ranks (dict<str, Rank>): list of Rank objects | 625990438e71fb1e983bcdc2 |
class TableNamespace(AssertionNamespace): <NEW_LINE> <INDENT> @assertion <NEW_LINE> def column_contain( self, table, values, column, description=None, category=None, limit=None, report_fails_only=False, ): <NEW_LINE> <INDENT> entry = assertions.ColumnContain( table=table, values=values, column=column, limit=limit, report_fails_only=report_fails_only, description=description, category=category, ) <NEW_LINE> return entry <NEW_LINE> <DEDENT> @assertion <NEW_LINE> def match( self, actual, expected, description=None, category=None, include_columns=None, exclude_columns=None, report_all=True, fail_limit=0, ): <NEW_LINE> <INDENT> entry = assertions.TableMatch( table=actual, expected_table=expected, include_columns=include_columns, exclude_columns=exclude_columns, report_all=report_all, fail_limit=fail_limit, description=description, category=category, ) <NEW_LINE> return entry <NEW_LINE> <DEDENT> @assertion <NEW_LINE> def diff( self, actual, expected, description=None, category=None, include_columns=None, exclude_columns=None, report_all=True, fail_limit=0, ): <NEW_LINE> <INDENT> entry = assertions.TableDiff( table=actual, expected_table=expected, include_columns=include_columns, exclude_columns=exclude_columns, report_all=report_all, fail_limit=fail_limit, report_fail_only=True, description=description, category=category, ) <NEW_LINE> return entry <NEW_LINE> <DEDENT> @assertion <NEW_LINE> def log(self, table, display_index=False, description=None): <NEW_LINE> <INDENT> entry = base.TableLog( table=table, display_index=display_index, description=description ) <NEW_LINE> return entry | Contains logic for regular expression assertions. | 62599043b57a9660fecd2d70 |
class Brew(BaseAdapter): <NEW_LINE> <INDENT> def search(self, query): <NEW_LINE> <INDENT> response = self.command('search', query)[0] <NEW_LINE> if 'No formula found' not in response and 'Error:' not in response: <NEW_LINE> <INDENT> return dict([( self.package_name(line), self.search_info(self.package_name(line)) ) for line in response.splitlines() if line]) <NEW_LINE> <DEDENT> return {} <NEW_LINE> <DEDENT> def search_info(self, query): <NEW_LINE> <INDENT> response = self._execute_command('aero', ['info', query], False)[0] <NEW_LINE> from re import split <NEW_LINE> lines = response.splitlines() <NEW_LINE> idx = lines.index(u' ________________________________________ __________________________________________________ ') <NEW_LINE> return u'\n'.join([u''.join(split(u'\x1b.*?m', l)).replace(u' : ', u'').strip() for l in response.splitlines()[idx+1:idx+4]]) <NEW_LINE> <DEDENT> def info(self, query): <NEW_LINE> <INDENT> if '/' in query: <NEW_LINE> <INDENT> self.command_no_passthru('tap', '/'.join(query.split('/')[:-1])) <NEW_LINE> <DEDENT> response = self.command('info', query)[0] <NEW_LINE> if 'Error:' not in response: <NEW_LINE> <INDENT> response = response.replace(query + ': ', 'version: ') <NEW_LINE> return [line.split(u': ', 1) for line in response.splitlines() if 'homebrew' not in line] <NEW_LINE> <DEDENT> return [['No info available']] <NEW_LINE> <DEDENT> def install(self, query): <NEW_LINE> <INDENT> self.shell('install', query) <NEW_LINE> return {} | Homebrew adapter. | 625990433617ad0b5ee0742d |
class IpFilter(CommandFilter): <NEW_LINE> <INDENT> def match(self, userargs): <NEW_LINE> <INDENT> if userargs[0] == 'ip': <NEW_LINE> <INDENT> for a, b in zip(userargs[1:], userargs[2:]): <NEW_LINE> <INDENT> if a == 'netns': <NEW_LINE> <INDENT> return (b != 'exec') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return True | Specific filter for the ip utility to that does not match exec. | 62599043379a373c97d9a31d |
class LxList(sdb.Command): <NEW_LINE> <INDENT> names = ["linux_list", "lxlist"] <NEW_LINE> @classmethod <NEW_LINE> def _init_parser(cls, name: str) -> argparse.ArgumentParser: <NEW_LINE> <INDENT> parser = super()._init_parser(name) <NEW_LINE> parser.add_argument( "struct_name", help="name of the struct used for entries in the list") <NEW_LINE> parser.add_argument("member", help="name of the node member within the struct") <NEW_LINE> return parser <NEW_LINE> <DEDENT> def _call(self, objs: Iterable[drgn.Object]) -> Iterable[drgn.Object]: <NEW_LINE> <INDENT> sname = get_valid_struct_name(self, self.args.struct_name) <NEW_LINE> for obj in objs: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> yield from list_for_each_entry(sname, obj, self.args.member) <NEW_LINE> <DEDENT> except LookupError as err: <NEW_LINE> <INDENT> raise sdb.CommandError(self.name, str(err)) | Walk a standard Linux doubly-linked list
DESCRIPTION
Given the type of its nodes and the name of its list_node
member, walk a doubly-linked list as defined in the Linux
kernel ('struct list_head' type in include/linux/list.h).
EXAMPLES
Walk all modules in the system:
sdb> addr modules | lxlist module list | member name ! head -n 3
(char [56])"connstat"
(char [56])"rpcsec_gss_krb5"
(char [56])"nfsv4"
...
Walk all root caches of SLUB:
sdb> addr slab_root_caches | lxlist kmem_cache memcg_params.__root_caches_node | member name
(const char *)0xffff90cc010ae620 = "nfs_direct_cache"
(const char *)0xffff90cbdfb96a90 = "nfs_read_data"
(const char *)0xffff90cbdfb96650 = "nfs_inode_cache"
(const char *)0xffff90cbd9983e60 = "t10_pr_reg_cache"
... | 625990438a43f66fc4bf3487 |
class Transform: <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> self.filed = config['field'] <NEW_LINE> self.lenMode = config['len'] if 'len' in config else False <NEW_LINE> self.value = config['value'] <NEW_LINE> self.config = config <NEW_LINE> <DEDENT> def do(self, rows): <NEW_LINE> <INDENT> data = [] <NEW_LINE> for row in rows: <NEW_LINE> <INDENT> if self.lenMode: <NEW_LINE> <INDENT> if len(str(row[self.filed])) <= self.value: <NEW_LINE> <INDENT> data.append(row) <NEW_LINE> <DEDENT> <DEDENT> elif row[self.filed] <= self.value: <NEW_LINE> <INDENT> data.append(row) <NEW_LINE> <DEDENT> <DEDENT> return data | 某个字段的值大于某个值,则删除该行数据
field: 字段名
value: 用来比较的数值
len: (可选参数)默认值 false 数值比较; 若设置成 true , 则比较字段的长度
配置样例,该配置表示 age 大于 20 则丢弃该行数据:
transfrom:
- type: filter
name: gt
field: age
value: 20
len: false | 62599043e76e3b2f99fd9cff |
class CommandSlice: <NEW_LINE> <INDENT> def GetResources(self): <NEW_LINE> <INDENT> return {'Pixmap' : getIconPath("Part_Slice.svg"), 'MenuText': QtCore.QT_TRANSLATE_NOOP("Part_SplitFeatures","Slice"), 'Accel': "", 'ToolTip': QtCore.QT_TRANSLATE_NOOP("Part_SplitFeatures","Part_Slice: split object by intersections with other objects")} <NEW_LINE> <DEDENT> def Activated(self): <NEW_LINE> <INDENT> if len(FreeCADGui.Selection.getSelectionEx()) > 1 : <NEW_LINE> <INDENT> cmdCreateSliceFeature(name= "Slice", mode= "Split") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mb = QtGui.QMessageBox() <NEW_LINE> mb.setIcon(mb.Icon.Warning) <NEW_LINE> mb.setText(_translate("Part_SplitFeatures", "Select at least two objects, first! First one is the object to be sliced; the rest are objects to slice with.", None)) <NEW_LINE> mb.setWindowTitle(_translate("Part_SplitFeatures","Bad selection", None)) <NEW_LINE> mb.exec_() <NEW_LINE> <DEDENT> <DEDENT> def IsActive(self): <NEW_LINE> <INDENT> if FreeCAD.ActiveDocument: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | Command to create Slice feature | 62599043097d151d1a2c235d |
class ImageChunk(InstanceOrExpr, DelegatingMixin): <NEW_LINE> <INDENT> widget2d = Arg(Widget2D) <NEW_LINE> filename = Arg(str) <NEW_LINE> need_redraw = State(bool, True) <NEW_LINE> def usagetrack_draw(self, draw_method, *args, **kws): <NEW_LINE> <INDENT> res = draw_method(*args, **kws) <NEW_LINE> return res <NEW_LINE> <DEDENT> delegate = If( need_redraw, PixelGrabber( WrapMethod(widget2d, 'draw', _self.usagetrack_draw, post_action = Set(need_redraw, False)), filename), Image(...) ) <NEW_LINE> pass | ImageChunk(widget2d) draws widget2d normally, but also captures an image to use
for faster redrawing. In some cases, it can significantly speed up drawing of certain
constant pictures or text, at least for now while our text drawing is inefficient
and we don't yet have the display-list equivalent of ImageChunk.
WARNING: See the caveats below about significant limitations in when and how
ImageChunk can be used. See also DisplayListChunk [more widely applicable].
The default options redraw the image normally at least once per session,
and never store it on disk (except perhaps in temporary files due to implementation kluges).
They draw it normally again (and cache a new image) whenever some variable used to draw it
changes.
Options are provided to change the set of variables whose changes invalidate
the cached image, to record a history of changed images for debugging and testing
purposes, to keep a saved image on disk for rapid startup, to require developer
confirmation of changes to that image, and to bring up debugging panes for control
of these options and browsing of past image versions.
Caveats: ImageChunk works by grabbing pixels from the color buffer immediately after
drawing widget2d. This can only work properly if widget2d is drawn in an unrotated orientation,
and always at the same size in pixels on the screen, but ImageChunk doesn't enforce or check
this. Therefore it's only suitable for use in 2d screen-aligned widget layouts.
It can also be confused by obscuring objects drawn into the same pixels in the color or depth
buffers, if they are drawn first (which is not in general easy to control), so it's only safe
to use when no such obscuring objects will be drawn, or when they're guaranteed to be drawn
later than the ImageChunk.
Likely bug: it doesn't yet do anything to make the lbox aligned at precise pixel boundaries. | 62599043b5575c28eb713643 |
class PinInvalidBounce(PinError, ValueError): <NEW_LINE> <INDENT> pass | Error raised when attempting to assign an invalid bounce time to a pin | 6259904326068e7796d4dc3a |
class Kalman: <NEW_LINE> <INDENT> def __init__(self, x, u, P, A, B, Q, H, R): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.u = u <NEW_LINE> self.P = P <NEW_LINE> self.A = A <NEW_LINE> self.B = B <NEW_LINE> self.Q = Q <NEW_LINE> self.H = H <NEW_LINE> self.R = R <NEW_LINE> self.state = [] <NEW_LINE> self.residual = [] <NEW_LINE> self.epsilon = [] <NEW_LINE> self.sigma = [] <NEW_LINE> self.process_noise = [] <NEW_LINE> <DEDENT> def filter(self, z): <NEW_LINE> <INDENT> for zi in z: <NEW_LINE> <INDENT> self.predict() <NEW_LINE> self.update(zi) <NEW_LINE> <DEDENT> <DEDENT> def filter_epsilon(self, z, threshold, scale): <NEW_LINE> <INDENT> for zi in z: <NEW_LINE> <INDENT> self.predict() <NEW_LINE> self.update(zi) <NEW_LINE> self.adapt_epsilon(threshold, scale) <NEW_LINE> <DEDENT> <DEDENT> def filter_sigma(self, z, n_sigma, scale): <NEW_LINE> <INDENT> for zi in z: <NEW_LINE> <INDENT> self.predict() <NEW_LINE> self.update(zi) <NEW_LINE> self.adapt_sigma(n_sigma, scale) <NEW_LINE> <DEDENT> <DEDENT> def predict(self): <NEW_LINE> <INDENT> self.x = matmul(self.A, self.x) + matmul(self.B, self.u) <NEW_LINE> self.P = matmul(matmul(self.A, self.P), self.A.T) + self.Q <NEW_LINE> <DEDENT> def update(self, z): <NEW_LINE> <INDENT> S = matmul(matmul(self.H, self.P), self.H.T) + self.R; <NEW_LINE> K = matmul(matmul(self.P, self.H.T), inv(S)) <NEW_LINE> residual = z - matmul(self.H, self.x) <NEW_LINE> self.x = self.x + matmul(K, residual) <NEW_LINE> self.P = self.P - matmul(matmul(K, self.H), self.P) <NEW_LINE> self.state.append(self.x) <NEW_LINE> self.residual.append(norm(residual)) <NEW_LINE> self.epsilon.append(matmul(residual.T, matmul(inv(S), residual))) <NEW_LINE> self.sigma.append(sqrt(S[0, 0] ** 2 + S[1, 1] ** 2 + S[2, 2] ** 2)) <NEW_LINE> self.process_noise.append(self.Q[0, 0]) <NEW_LINE> <DEDENT> def adapt_epsilon(self, threshold, scale): <NEW_LINE> <INDENT> if self.epsilon[-1] > threshold: <NEW_LINE> <INDENT> self.Q *= scale <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.Q /= scale <NEW_LINE> <DEDENT> <DEDENT> def adapt_sigma(self, n_sigma, scale): <NEW_LINE> <INDENT> if self.residual[-1] > n_sigma * self.sigma[-1]: <NEW_LINE> <INDENT> self.Q *= scale <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.Q /= scale | Kalman filter object. | 62599043d99f1b3c44d06993 |
class SqlSaver(StateSaver): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(SqlSaver, self).__init__() <NEW_LINE> self.__writeCache = {} <NEW_LINE> self.table = None <NEW_LINE> self.db = None <NEW_LINE> self.metadata = MetaData() <NEW_LINE> self.engine = None <NEW_LINE> self.firstTime = True <NEW_LINE> <DEDENT> def setup(self, setting): <NEW_LINE> <INDENT> if 'db' in setting: <NEW_LINE> <INDENT> self.db = setting['db'] <NEW_LINE> self.engine = create_engine(setting['db'], echo = False) <NEW_LINE> <DEDENT> <DEDENT> def constructTable(self, cols): <NEW_LINE> <INDENT> if not self.firstTime or self.table is not None or not self.tableName: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> columns = [Column('time', Integer, primary_key = True)] <NEW_LINE> for col in cols: <NEW_LINE> <INDENT> columns.append(Column(col, String(200))) <NEW_LINE> <DEDENT> self.table = Table(self.tableName, self.metadata, *columns) <NEW_LINE> <DEDENT> def resetTable(self, cols): <NEW_LINE> <INDENT> self.constructTable(cols) <NEW_LINE> self.table.drop(self.engine, checkfirst = True) <NEW_LINE> LOG.info("create db %s with table %s with cols %s" % (self.db, self.tableName, cols)) <NEW_LINE> self.table.create(self.engine, checkfirst = True) <NEW_LINE> <DEDENT> def read(self, row, col): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def write(self, timestamp, col, value): <NEW_LINE> <INDENT> if (int == int(timestamp)): <NEW_LINE> <INDENT> LOG.error("timestamp %s is not integer" % timestamp) <NEW_LINE> return <NEW_LINE> <DEDENT> if timestamp not in self.__writeCache: <NEW_LINE> <INDENT> self.__writeCache[timestamp] = {} <NEW_LINE> <DEDENT> self.__writeCache[timestamp][col] = value <NEW_LINE> <DEDENT> def commit(self): <NEW_LINE> <INDENT> if not self.tableName: <NEW_LINE> <INDENT> raise UfException(Errors.TABLENAME_NOT_SET, "Table name not set") <NEW_LINE> <DEDENT> cols = set() <NEW_LINE> for colValueDict in self.__writeCache.itervalues(): <NEW_LINE> <INDENT> cols.update(colValueDict.iterkeys()) <NEW_LINE> <DEDENT> self.resetTable(cols) <NEW_LINE> updates = [] <NEW_LINE> for row, colValueDict in self.__writeCache.iteritems(): <NEW_LINE> <INDENT> update = {'time': row} <NEW_LINE> update.update(colValueDict) <NEW_LINE> for col in cols: <NEW_LINE> <INDENT> if col not in update: <NEW_LINE> <INDENT> update[col] = "" <NEW_LINE> <DEDENT> <DEDENT> for key in update.keys(): <NEW_LINE> <INDENT> update[key] = str(update[key]) <NEW_LINE> <DEDENT> updates.append(update) <NEW_LINE> <DEDENT> conn = self.engine.connect() <NEW_LINE> conn.execute(self.table.insert(), updates) <NEW_LINE> LOG.info("committed table %s at %s" % (self.table, self.db)) | sql saver | 6259904323e79379d538d7f3 |
class Square(Rectangle): <NEW_LINE> <INDENT> def __init__(self, size): <NEW_LINE> <INDENT> self.integer_validator("Value", size) <NEW_LINE> super().__init__(size, size) <NEW_LINE> self.__size = size <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> return self.__size**2 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "[Square] {}/{}".format(self.__size, self.__size) | Sets a Square. | 625990431d351010ab8f4e14 |
class IterableItems: <NEW_LINE> <INDENT> def __init__(self, fetch_method, id=None, mask=None, limit=10): <NEW_LINE> <INDENT> self.fetch_method = fetch_method <NEW_LINE> self.mask = mask <NEW_LINE> self.id = id <NEW_LINE> self.offset = 0 <NEW_LINE> self.limit = limit <NEW_LINE> self.fetched = [] <NEW_LINE> <DEDENT> def fetch(self): <NEW_LINE> <INDENT> items = self.fetch_method(offset=self.offset, limit=self.limit, mask=self.mask, id=self.id) <NEW_LINE> self.offset += self.limit <NEW_LINE> return items <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if len(self.fetched) < 1: <NEW_LINE> <INDENT> self.fetched = self.fetch() <NEW_LINE> logging.debug(self.__class__.__name__ + ' has been fetched.') <NEW_LINE> if len(self.fetched) < 1: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> <DEDENT> item = self.fetched.pop() <NEW_LINE> return item | Iterator for Pagenated list | 625990431f5feb6acb163eea |
class EvacPlan: <NEW_LINE> <INDENT> def __init__(self, s): <NEW_LINE> <INDENT> class __Party: <NEW_LINE> <INDENT> def __init__(self, name, num): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.num = num <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.num > other.num <NEW_LINE> <DEDENT> <DEDENT> self.__parties = [__Party(i, s[i]) for i in s] <NEW_LINE> self.__total = sum([s[i] for i in s]) <NEW_LINE> heapify(self.__parties) <NEW_LINE> self.plan = None <NEW_LINE> self.__group = '' <NEW_LINE> self.__make_plan() <NEW_LINE> <DEDENT> def __evac_largest(self): <NEW_LINE> <INDENT> largest_party = heappop(self.__parties) <NEW_LINE> largest_party.num -= 1 <NEW_LINE> self.__total -= 1 <NEW_LINE> self.__group += largest_party.name <NEW_LINE> if len(self.__group) == 2: <NEW_LINE> <INDENT> self.plan.append(self.__group) <NEW_LINE> self.__group = '' <NEW_LINE> <DEDENT> if largest_party.num > 0: <NEW_LINE> <INDENT> heappush(self.__parties, largest_party) <NEW_LINE> <DEDENT> <DEDENT> def __make_plan(self): <NEW_LINE> <INDENT> if self.plan is not None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.plan = [] <NEW_LINE> <DEDENT> if self.__total % 2 == 1: <NEW_LINE> <INDENT> self.__evac_largest() <NEW_LINE> self.plan.append(self.__group) <NEW_LINE> self.__group = '' <NEW_LINE> <DEDENT> while self.__total > 0: <NEW_LINE> <INDENT> self.__evac_largest() <NEW_LINE> <DEDENT> if len(self.__group) == 1: <NEW_LINE> <INDENT> self.plan.append(self.__group) | If total number of people is odd, pop 1 from the largest party, make that
the first instruction (just evacuate 1 people in that round)
Then for the remaining even number of people We will always pop from the
current largest party. Then group instructions into pairs of two to minimize
rounds and avoid breaking in situations where only two parties left and their
numbers are equal. | 62599043b57a9660fecd2d72 |
class FileIOPluginTestMixin(PluginIOTestMixin): <NEW_LINE> <INDENT> def test_empty(self): <NEW_LINE> <INDENT> self.assertEqual(IOPlugin.EMPTY_DATA, self.plugin.load()) <NEW_LINE> <DEDENT> def test_save_ignore_unpersisted(self): <NEW_LINE> <INDENT> self.plugin.save(self.all_data) <NEW_LINE> self.assertEqual(self.plugin.load(), IOPlugin.Data( *(data if persist else None for persist, data in zip(self.plugin.persisted(), self.all_data)))) | Common FileIO plugins tests. | 62599043a8ecb03325872506 |
class Test(namespaceable): <NEW_LINE> <INDENT> with namespace() as namespace_: <NEW_LINE> <INDENT> with pytest.raises( NameError, message="name 'footer' is not defined"): <NEW_LINE> <INDENT> del footer | Throwaway test class, for testing failing delete. | 6259904307f4c71912bb0728 |
@view_defaults(route_name="katfud", renderer="json") <NEW_LINE> class KatFud(object): <NEW_LINE> <INDENT> def __init__(self, request): <NEW_LINE> <INDENT> self.request = request <NEW_LINE> <DEDENT> @view_config(request_method='GET') <NEW_LINE> def stats(self): <NEW_LINE> <INDENT> cur_settings = runtime.runtime <NEW_LINE> stats = {} <NEW_LINE> with open(self.request.registry.settings['katfud.host_file']) as f: <NEW_LINE> <INDENT> stats['fudUrl'] = "http://{}".format(f.readline()) <NEW_LINE> <DEDENT> stats['currentTime'] = int(time.time()) <NEW_LINE> stats['started'] = cur_settings.started <NEW_LINE> stats['lastRan'] = cur_settings.lastRan <NEW_LINE> stats['numberFeeds'] = cur_settings.numberFeeds <NEW_LINE> stats['nextRun'] = cur_settings.nextRun <NEW_LINE> stats['firstFeed'] = cur_settings.firstFeed <NEW_LINE> stats['periodSec'] = cur_settings.periodSec <NEW_LINE> stats['periodScale'] = cur_settings.periodScale <NEW_LINE> return stats <NEW_LINE> <DEDENT> @view_config(request_method='PUT') <NEW_LINE> def update_settings(self): <NEW_LINE> <INDENT> settings = self.request.json_body <NEW_LINE> if settings['code'] != self.request.registry.settings['katfud.security_code']: <NEW_LINE> <INDENT> self.request.response.status = 401 <NEW_LINE> return {"status": "failure (code)"} <NEW_LINE> <DEDENT> cur_settings = runtime.runtime <NEW_LINE> if 'firstFeed' in settings: <NEW_LINE> <INDENT> cur_settings.firstFeed = settings['firstFeed'] <NEW_LINE> <DEDENT> if 'periodSec' in settings: <NEW_LINE> <INDENT> cur_settings.periodSec = settings['periodSec'] <NEW_LINE> <DEDENT> if 'periodScale' in settings: <NEW_LINE> <INDENT> cur_settings.periodScale = settings['periodScale'] <NEW_LINE> <DEDENT> cur_settings.nextRun = cur_settings.firstFeed <NEW_LINE> cur_settings.save() <NEW_LINE> return {"status": "success"} <NEW_LINE> <DEDENT> @view_config(route_name='katfudcmd', request_method='POST') <NEW_LINE> def run_cmd(self): <NEW_LINE> <INDENT> cmd = self.request.matchdict['cmd'] <NEW_LINE> code = self.request.params['code'] <NEW_LINE> log.info('Command: {}, Code: {}'.format(cmd, code)) <NEW_LINE> if code != self.request.registry.settings['katfud.security_code']: <NEW_LINE> <INDENT> self.request.response.status = 401 <NEW_LINE> return {"status": "failure (code)"} <NEW_LINE> <DEDENT> if cmd == 'run_now': <NEW_LINE> <INDENT> if self.request.registry.settings['katfud.non_pi']: <NEW_LINE> <INDENT> log.warning("Not running on PI - no motor run now") <NEW_LINE> runtime.runtime.set_last_ran() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> run_now() <NEW_LINE> <DEDENT> <DEDENT> elif cmd == 'reboot': <NEW_LINE> <INDENT> if self.request.registry.settings['katfud.non_pi']: <NEW_LINE> <INDENT> log.warning("Not running on PI - not rebooting") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> command = "/sbin/shutdown -r now" <NEW_LINE> process = subprocess.Popen(command.split(), stdout=subprocess.PIPE) <NEW_LINE> output = process.communicate()[0] <NEW_LINE> <DEDENT> <DEDENT> elif cmd == 'shutdown': <NEW_LINE> <INDENT> if self.request.registry.settings['katfud.non_pi']: <NEW_LINE> <INDENT> log.warning("Not running on PI - not shutting down") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> command = "/sbin/shutdown now" <NEW_LINE> process = subprocess.Popen(command.split(), stdout=subprocess.PIPE) <NEW_LINE> output = process.communicate()[0] <NEW_LINE> <DEDENT> <DEDENT> return {"status": "success"} | KatFud REST
| 6259904330dc7b76659a0b26 |
class AccountInvoiceLine(models.Model): <NEW_LINE> <INDENT> _inherit = 'account.invoice.line' <NEW_LINE> move_id = fields.Many2one('stock.move', 'Stock Move', ondelete='set null', index=True, readonly=True) <NEW_LINE> picking_id = fields.Many2one('stock.picking', related='move_id.picking_id', string='Stock Picking', store=False, readonly=True, related_sudo=False) | Override AccountInvoice_line to add the link to the purchase order line it is related to | 62599043d164cc617582226d |
class Image(SolidShape): <NEW_LINE> <INDENT> _attrMap = AttrMap(BASE=SolidShape, x = AttrMapValue(isNumber), y = AttrMapValue(isNumber), width = AttrMapValue(isNumberOrNone,desc="width of the object in points"), height = AttrMapValue(isNumberOrNone,desc="height of the objects in points"), path = AttrMapValue(None), ) <NEW_LINE> def __init__(self, x, y, width, height, path, **kw): <NEW_LINE> <INDENT> SolidShape.__init__(self, kw) <NEW_LINE> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.path = path <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> new = self.__class__(self.x, self.y, self.width, self.height, self.path) <NEW_LINE> new.setProperties(self.getProperties()) <NEW_LINE> return new <NEW_LINE> <DEDENT> def getBounds(self): <NEW_LINE> <INDENT> return (self.x, self.y, self.x + self.width, self.y + self.height) | Bitmap image. | 6259904326068e7796d4dc3c |
class BufferSetRequest(Request): <NEW_LINE> <INDENT> request_id = RequestId.BUFFER_SET <NEW_LINE> def __init__(self, buffer_id=None, index_value_pairs=None): <NEW_LINE> <INDENT> Request.__init__(self) <NEW_LINE> self._buffer_id = int(buffer_id) <NEW_LINE> if index_value_pairs: <NEW_LINE> <INDENT> pairs = [] <NEW_LINE> for index, value in index_value_pairs: <NEW_LINE> <INDENT> index = int(index) <NEW_LINE> value = float(value) <NEW_LINE> pair = (index, value) <NEW_LINE> pairs.append(pair) <NEW_LINE> <DEDENT> pairs = tuple(pairs) <NEW_LINE> <DEDENT> self._index_value_pairs = index_value_pairs <NEW_LINE> <DEDENT> def to_osc(self, *, with_placeholders=False): <NEW_LINE> <INDENT> request_id = self.request_name <NEW_LINE> buffer_id = int(self.buffer_id) <NEW_LINE> contents = [request_id, buffer_id] <NEW_LINE> if self.index_value_pairs: <NEW_LINE> <INDENT> for index, value in self.index_value_pairs: <NEW_LINE> <INDENT> contents.append(index) <NEW_LINE> contents.append(value) <NEW_LINE> <DEDENT> <DEDENT> message = supriya.osc.OscMessage(*contents) <NEW_LINE> return message <NEW_LINE> <DEDENT> @property <NEW_LINE> def buffer_id(self): <NEW_LINE> <INDENT> return self._buffer_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def index_value_pairs(self): <NEW_LINE> <INDENT> return self._index_value_pairs | A /b_set request.
::
>>> import supriya.commands
>>> request = supriya.commands.BufferSetRequest(
... buffer_id=23,
... index_value_pairs=(
... (0, 1.0),
... (10, 13.2),
... (17, 19.3),
... ),
... )
>>> request
BufferSetRequest(
buffer_id=23,
index_value_pairs=(
(0, 1.0),
(10, 13.2),
(17, 19.3),
),
)
::
>>> request.to_osc()
OscMessage('/b_set', 23, 0, 1.0, 10, 13.2, 17, 19.3) | 625990436fece00bbacccca9 |
class UserListView(ListView): <NEW_LINE> <INDENT> model = User <NEW_LINE> context_object_name = 'users' <NEW_LINE> template_name = 'users/user_list.html' <NEW_LINE> ordering = 'id' | Вывод всех пользователей | 62599043baa26c4b54d5059f |
class SExtractorBackground(BackgroundBase): <NEW_LINE> <INDENT> def calc_background(self, data, axis=None): <NEW_LINE> <INDENT> if self.sigma_clip is not None: <NEW_LINE> <INDENT> data = self.sigma_clip(data, axis=axis) <NEW_LINE> <DEDENT> _median = np.atleast_1d(_masked_median(data, axis=axis)) <NEW_LINE> _mean = np.atleast_1d(np.ma.mean(data, axis=axis)) <NEW_LINE> _std = np.atleast_1d(np.ma.std(data, axis=axis)) <NEW_LINE> bkg = np.atleast_1d((2.5 * _median) - (1.5 * _mean)) <NEW_LINE> bkg = np.ma.where(_std == 0, _mean, bkg) <NEW_LINE> idx = np.ma.where(_std != 0) <NEW_LINE> condition = (np.abs(_mean[idx] - _median[idx]) / _std[idx]) < 0.3 <NEW_LINE> bkg[idx] = np.ma.where(condition, bkg[idx], _median[idx]) <NEW_LINE> if axis is None and np.ma.isMaskedArray(bkg): <NEW_LINE> <INDENT> bkg = bkg.item() <NEW_LINE> <DEDENT> return bkg | Class to calculate the background in an array using the
SExtractor algorithm.
The background is calculated using a mode estimator of the form
``(2.5 * median) - (1.5 * mean)``.
If ``(mean - median) / std > 0.3`` then the median is used instead.
Despite what the `SExtractor`_ User's Manual says, this is the
method it *always* uses.
.. _SExtractor: https://www.astromatic.net/software/sextractor
Parameters
----------
sigma_clip : `astropy.stats.SigmaClip` object, optional
A `~astropy.stats.SigmaClip` object that defines the sigma
clipping parameters. If `None` then no sigma clipping will be
performed. The default is to perform sigma clipping with
``sigma=3.0`` and ``maxiters=5``.
Examples
--------
>>> from astropy.stats import SigmaClip
>>> from photutils import SExtractorBackground
>>> data = np.arange(100)
>>> sigma_clip = SigmaClip(sigma=3.0)
>>> bkg = SExtractorBackground(sigma_clip)
The background value can be calculated by using the
`calc_background` method, e.g.:
>>> bkg_value = bkg.calc_background(data)
>>> print(bkg_value) # doctest: +FLOAT_CMP
49.5
Alternatively, the background value can be calculated by calling the
class instance as a function, e.g.:
>>> bkg_value = bkg(data)
>>> print(bkg_value) # doctest: +FLOAT_CMP
49.5 | 625990433eb6a72ae038b957 |
class VirtualMachineScaleSetUpdateVMProfile(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'os_profile': {'key': 'osProfile', 'type': 'VirtualMachineScaleSetUpdateOSProfile'}, 'storage_profile': {'key': 'storageProfile', 'type': 'VirtualMachineScaleSetUpdateStorageProfile'}, 'network_profile': {'key': 'networkProfile', 'type': 'VirtualMachineScaleSetUpdateNetworkProfile'}, 'diagnostics_profile': {'key': 'diagnosticsProfile', 'type': 'DiagnosticsProfile'}, 'extension_profile': {'key': 'extensionProfile', 'type': 'VirtualMachineScaleSetExtensionProfile'}, 'license_type': {'key': 'licenseType', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, os_profile: Optional["VirtualMachineScaleSetUpdateOSProfile"] = None, storage_profile: Optional["VirtualMachineScaleSetUpdateStorageProfile"] = None, network_profile: Optional["VirtualMachineScaleSetUpdateNetworkProfile"] = None, diagnostics_profile: Optional["DiagnosticsProfile"] = None, extension_profile: Optional["VirtualMachineScaleSetExtensionProfile"] = None, license_type: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(VirtualMachineScaleSetUpdateVMProfile, self).__init__(**kwargs) <NEW_LINE> self.os_profile = os_profile <NEW_LINE> self.storage_profile = storage_profile <NEW_LINE> self.network_profile = network_profile <NEW_LINE> self.diagnostics_profile = diagnostics_profile <NEW_LINE> self.extension_profile = extension_profile <NEW_LINE> self.license_type = license_type | Describes a virtual machine scale set virtual machine profile.
:ivar os_profile: The virtual machine scale set OS profile.
:vartype os_profile:
~azure.mgmt.compute.v2017_12_01.models.VirtualMachineScaleSetUpdateOSProfile
:ivar storage_profile: The virtual machine scale set storage profile.
:vartype storage_profile:
~azure.mgmt.compute.v2017_12_01.models.VirtualMachineScaleSetUpdateStorageProfile
:ivar network_profile: The virtual machine scale set network profile.
:vartype network_profile:
~azure.mgmt.compute.v2017_12_01.models.VirtualMachineScaleSetUpdateNetworkProfile
:ivar diagnostics_profile: The virtual machine scale set diagnostics profile.
:vartype diagnostics_profile: ~azure.mgmt.compute.v2017_12_01.models.DiagnosticsProfile
:ivar extension_profile: The virtual machine scale set extension profile.
:vartype extension_profile:
~azure.mgmt.compute.v2017_12_01.models.VirtualMachineScaleSetExtensionProfile
:ivar license_type: The license type, which is for bring your own license scenario.
:vartype license_type: str | 6259904323e79379d538d7f5 |
class PessoaJuridica(Pessoa): <NEW_LINE> <INDENT> def __init__(self, cnpj, nome, idade): <NEW_LINE> <INDENT> super().__init__(nome, idade) <NEW_LINE> self.cnpj = cnpj <NEW_LINE> <DEDENT> def get_cnpj(self): <NEW_LINE> <INDENT> return self.cnpj <NEW_LINE> <DEDENT> def set_cnpj(self, cnpj): <NEW_LINE> <INDENT> self.cnpj = cnpj | Classe para definição de pessoas jurídicas.
A classe ``PessoaJuridica`` é uma especialização da classe básica ``Pessoa``. As *pessoas jurídicas* são
associadas a entidades tais como empresas, associações, fundações.
As *pessoas jurídicas* são identificados por um número único
gerado pela Receita Federal e conhecido por **CNPJ** (*Cadastro Nacional de Pessoa Jurídica*). | 62599043e64d504609df9d4c |
class BaseController(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.proxy_admin_user = CONFIG.get('reddwarf_proxy_admin_user', 'admin') <NEW_LINE> self.proxy_admin_pass = CONFIG.get('reddwarf_proxy_admin_pass', '3de4922d8b6ac5a1aad9') <NEW_LINE> self.proxy_admin_tenant_name = CONFIG.get( 'reddwarf_proxy_admin_tenant_name', 'admin') <NEW_LINE> self.auth_url = CONFIG.get('reddwarf_auth_url', 'http://0.0.0.0:5000/v2.0') <NEW_LINE> <DEDENT> def get_client(self, req): <NEW_LINE> <INDENT> proxy_token = req.headers["X-Auth-Token"] <NEW_LINE> client = Client(self.proxy_admin_user, self.proxy_admin_pass, self.proxy_admin_tenant_name, self.auth_url, token=proxy_token) <NEW_LINE> client.authenticate() <NEW_LINE> return client | Base controller class. | 625990436e29344779b01948 |
class LanguageServiceServicer(object): <NEW_LINE> <INDENT> def AnalyzeSentiment(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details("Method not implemented!") <NEW_LINE> raise NotImplementedError("Method not implemented!") <NEW_LINE> <DEDENT> def AnalyzeEntities(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details("Method not implemented!") <NEW_LINE> raise NotImplementedError("Method not implemented!") <NEW_LINE> <DEDENT> def AnalyzeEntitySentiment(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details("Method not implemented!") <NEW_LINE> raise NotImplementedError("Method not implemented!") <NEW_LINE> <DEDENT> def AnalyzeSyntax(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details("Method not implemented!") <NEW_LINE> raise NotImplementedError("Method not implemented!") <NEW_LINE> <DEDENT> def ClassifyText(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details("Method not implemented!") <NEW_LINE> raise NotImplementedError("Method not implemented!") <NEW_LINE> <DEDENT> def AnnotateText(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details("Method not implemented!") <NEW_LINE> raise NotImplementedError("Method not implemented!") | Provides text analysis operations such as sentiment analysis and entity
recognition. | 62599043a4f1c619b294f803 |
class SyncClient(Base): <NEW_LINE> <INDENT> __tablename__ = 'sync_client' <NEW_LINE> __table_args__ = ( UniqueConstraint('service_provider_gateway_id', 'token'), ) <NEW_LINE> token = Column(String, nullable=False, default=gen_token) <NEW_LINE> description = Column(String) <NEW_LINE> enabled = Column(Boolean, nullable=False, default=True) <NEW_LINE> service_provider_gateway_id = Column(Integer, ForeignKey(ServiceProviderGateway.id), nullable=False) <NEW_LINE> tasks = relationship('SyncTask', backref='sync_client', lazy='dynamic', cascade='all, delete, delete-orphan') | Cliente de Sincronismo | 62599043004d5f362081f961 |
class Out: <NEW_LINE> <INDENT> def __init__(self, out_file="output.txt"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sys.stdout = self.out_file = open(out_file, "w") <NEW_LINE> self.initialized = True <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> print("could not create or write to file", out_file) <NEW_LINE> sys.stdout = sys.__stdout__ <NEW_LINE> self.initialized = False <NEW_LINE> <DEDENT> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if not self.initialized: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> sys.stdout = sys.__stdout__ <NEW_LINE> self.out_file.close() | maps std out to files
default filname :
stdout = tc_output.txt | 6259904329b78933be26aa3e |
class LmkMetric(mx.metric.EvalMetric): <NEW_LINE> <INDENT> def __init__(self, lmk_count, pt_ind1, pt_ind2): <NEW_LINE> <INDENT> assert pt_ind1 < lmk_count, "pt_ind1 should be less than lmk_count" <NEW_LINE> assert pt_ind2 < lmk_count, "pt_ind2 should be less than lmk_count" <NEW_LINE> self.lmk_count = lmk_count <NEW_LINE> self.pt_ind1 = pt_ind1 <NEW_LINE> self.pt_ind2 = pt_ind2 <NEW_LINE> self.transform_funcs = lmk_transform_func <NEW_LINE> super(LmkMetric, self).__init__('LmkMetric', lmk_count+1) <NEW_LINE> <DEDENT> def _check_and_get_data(self, label, pred, transform_func=None): <NEW_LINE> <INDENT> if isinstance(label, mx.nd.NDArray): <NEW_LINE> <INDENT> label = label.asnumpy()[:, :self.lmk_count*2].copy() <NEW_LINE> <DEDENT> assert isinstance(label, np.ndarray), 'label should be ndarray' <NEW_LINE> if isinstance(pred, mx.nd.NDArray): <NEW_LINE> <INDENT> pred = pred.asnumpy().copy() <NEW_LINE> <DEDENT> assert isinstance(pred, np.ndarray), 'pred should be ndarray' <NEW_LINE> label = label.reshape(label.shape[0], -1, 2).copy() <NEW_LINE> pred = pred.reshape(pred.shape[0], -1, 2).copy() <NEW_LINE> assert label.shape == pred.shape, "label and pred should be same size" <NEW_LINE> assert label.shape[1] <= self.lmk_count, "pred landmark count should be less than lmk_count" <NEW_LINE> if transform_func is not None: <NEW_LINE> <INDENT> label = transform_func(label) <NEW_LINE> pred = transform_func(pred) <NEW_LINE> <DEDENT> return (label, pred) <NEW_LINE> <DEDENT> def update(self, labels, preds): <NEW_LINE> <INDENT> label, pred = self._check_and_get_data(labels[0], preds[0], self.transform_funcs) <NEW_LINE> num_inst = label.shape[0] <NEW_LINE> lmk_cnt = label.shape[1] <NEW_LINE> inter_ocular = label[:, self.pt_ind1, :] - label[:, self.pt_ind2, :] <NEW_LINE> inter_ocular = np.square(inter_ocular) <NEW_LINE> inter_ocular = np.sqrt(inter_ocular[:, 0] + inter_ocular[:, 1]) <NEW_LINE> error = np.square(label - pred) <NEW_LINE> error = np.sqrt(error[:, :, 0] + error[:, :, 1]) <NEW_LINE> for n in range(num_inst): <NEW_LINE> <INDENT> error[n, :] /= (inter_ocular[n]) <NEW_LINE> <DEDENT> for c in range(lmk_cnt): <NEW_LINE> <INDENT> self.num_inst[c + 1] += 1 <NEW_LINE> self.sum_metric[c + 1] += np.mean(error[:, c]) <NEW_LINE> <DEDENT> self.num_inst[0] += 1 <NEW_LINE> self.sum_metric[0] += np.mean(error) | This metric is used for lower mxnet version.
Calculate the mean error of landmarks. The mean error is measured by
the distances between estimated landmarks and the ground truths, and
normalized with respect to the inter-ocular distance. | 62599043498bea3a75a58e15 |
class CreateEntity(object): <NEW_LINE> <INDENT> def __init__(self, entity, description=None, metadata=None, values=None, fuzzy_match=None): <NEW_LINE> <INDENT> self.entity = entity <NEW_LINE> self.description = description <NEW_LINE> self.metadata = metadata <NEW_LINE> self.values = values <NEW_LINE> self.fuzzy_match = fuzzy_match <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> args = {} <NEW_LINE> if 'entity' in _dict: <NEW_LINE> <INDENT> args['entity'] = _dict.get('entity') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'entity\' not present in CreateEntity JSON') <NEW_LINE> <DEDENT> if 'description' in _dict: <NEW_LINE> <INDENT> args['description'] = _dict.get('description') <NEW_LINE> <DEDENT> if 'metadata' in _dict: <NEW_LINE> <INDENT> args['metadata'] = _dict.get('metadata') <NEW_LINE> <DEDENT> if 'values' in _dict: <NEW_LINE> <INDENT> args['values'] = [ CreateValue._from_dict(x) for x in (_dict.get('values')) ] <NEW_LINE> <DEDENT> if 'fuzzy_match' in _dict: <NEW_LINE> <INDENT> args['fuzzy_match'] = _dict.get('fuzzy_match') <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'entity') and self.entity is not None: <NEW_LINE> <INDENT> _dict['entity'] = self.entity <NEW_LINE> <DEDENT> if hasattr(self, 'description') and self.description is not None: <NEW_LINE> <INDENT> _dict['description'] = self.description <NEW_LINE> <DEDENT> if hasattr(self, 'metadata') and self.metadata is not None: <NEW_LINE> <INDENT> _dict['metadata'] = self.metadata <NEW_LINE> <DEDENT> if hasattr(self, 'values') and self.values is not None: <NEW_LINE> <INDENT> _dict['values'] = [x._to_dict() for x in self.values] <NEW_LINE> <DEDENT> if hasattr(self, 'fuzzy_match') and self.fuzzy_match is not None: <NEW_LINE> <INDENT> _dict['fuzzy_match'] = self.fuzzy_match <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return json.dumps(self._to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | CreateEntity.
:attr str entity: The name of the entity. This string must conform to the following
restrictions:
- It can contain only Unicode alphanumeric, underscore, and hyphen characters.
- It must be no longer than 64 characters.
If you specify an entity name beginning with the reserved prefix `sys-`, it must be
the name of a system entity that you want to enable. (Any entity content specified
with the request is ignored.).
:attr str description: (optional) The description of the entity. This string cannot
contain carriage return, newline, or tab characters, and it must be no longer than 128
characters.
:attr object metadata: (optional) Any metadata related to the value.
:attr list[CreateValue] values: (optional) An array of objects describing the entity
values.
:attr bool fuzzy_match: (optional) Whether to use fuzzy matching for the entity. | 62599043a8ecb03325872508 |
class ContactFormHandleView(FormView): <NEW_LINE> <INDENT> form_class = ContactForm <NEW_LINE> success_url = reverse_lazy('contact-success') <NEW_LINE> template_name = 'contact.html' <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> form.send_mail() <NEW_LINE> return super(ContactFormHandleView, self).form_valid(form) | View pour traiter le contenu du formulaire de contact | 6259904330c21e258be99afe |
@attr.s <NEW_LINE> class StatMapping(object): <NEW_LINE> <INDENT> text = attr.ib() <NEW_LINE> mapValue = attr.ib(default="", validator=instance_of(str)) <NEW_LINE> startValue = attr.ib(default="", validator=instance_of(str)) <NEW_LINE> endValue = attr.ib(default="", validator=instance_of(str)) <NEW_LINE> id = attr.ib(default=None) <NEW_LINE> def to_json_data(self): <NEW_LINE> <INDENT> mappingType = MAPPING_TYPE_VALUE_TO_TEXT if self.mapValue else MAPPING_TYPE_RANGE_TO_TEXT <NEW_LINE> ret_dict = { 'operator': '', 'text': self.text, 'type': mappingType, 'value': self.mapValue, 'from': self.startValue, 'to': self.endValue, 'id': self.id } <NEW_LINE> return ret_dict | Deprecated Grafana v8
Generates json structure for the value mapping for the Stat panel:
:param text: Sting that will replace input value
:param value: Value to be replaced
:param startValue: When using a range, the start value of the range
:param endValue: When using a range, the end value of the range
:param id: panel id | 6259904363b5f9789fe86463 |
class CustomLanguageCode(SQLBase): <NEW_LINE> <INDENT> implements(ICustomLanguageCode) <NEW_LINE> _table = 'CustomLanguageCode' <NEW_LINE> product = ForeignKey( dbName='product', foreignKey='Product', notNull=False, default=None) <NEW_LINE> distribution = ForeignKey( dbName='distribution', foreignKey='Distribution', notNull=False, default=None) <NEW_LINE> sourcepackagename = ForeignKey( dbName='sourcepackagename', foreignKey='SourcePackageName', notNull=False, default=None) <NEW_LINE> language_code = StringCol(dbName='language_code', notNull=True) <NEW_LINE> language = ForeignKey( dbName='language', foreignKey='Language', notNull=False, default=None) <NEW_LINE> @property <NEW_LINE> def translation_target(self): <NEW_LINE> <INDENT> from lp.registry.model.distributionsourcepackage import ( DistributionSourcePackage) <NEW_LINE> if self.product: <NEW_LINE> <INDENT> return self.product <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return DistributionSourcePackage( self.distribution, self.sourcepackagename) | See `ICustomLanguageCode`. | 62599043d7e4931a7ef3d36e |
class Model(Supermodel): <NEW_LINE> <INDENT> def __init__(self, uuid, name: str): <NEW_LINE> <INDENT> super(Model, self).__init__(uuid, name) <NEW_LINE> self.step = 0 <NEW_LINE> self.current_time = 0 <NEW_LINE> self.future = 0 <NEW_LINE> self.async_future = None <NEW_LINE> self.outputs['time'] = Output('Time', unit='s', info='utc time in seconds since epoch') <NEW_LINE> self.outputs['step'] = Output('Step', unit='-', info='step number, starts with 0') <NEW_LINE> self.properties["mode"] = Property(name='Mode', default='live', data_type=str, unit='-', info='live or simulation') <NEW_LINE> self.properties["time_increase"] = Property('Time increase', default=1, data_type=float, unit='s', info='Time increase with each iteration') <NEW_LINE> self.properties["sim_speed"] = Property('Simulation speed', default=0, data_type=float, unit='s', info='Time between iteration, simulation mode only, 0 = as fast as possible') <NEW_LINE> self.properties["sim_start"] = Property('Simulation Start (UTC)', default="2018-01-01 00:00", data_type=str, unit='YYYY-MM-DD hh:mm') <NEW_LINE> <DEDENT> async def func_birth(self): <NEW_LINE> <INDENT> sim_start = cal.timegm(time.strptime(self.get_property("sim_start"), '%Y-%m-%d %H:%M')) <NEW_LINE> now = time.time() <NEW_LINE> self.future = now + self.get_property('time_increase') - (now % self.get_property('time_increase')) <NEW_LINE> self.close_gates() <NEW_LINE> self.async_future = asyncio.ensure_future(self.loop()) <NEW_LINE> <DEDENT> async def func_amend(self, keys=[]): <NEW_LINE> <INDENT> if "time_increase" in keys: <NEW_LINE> <INDENT> self.future = self.current_time + self.get_property('time_increase') - (self.current_time % self.get_property('time_increase')) <NEW_LINE> self.async_future.cancel() <NEW_LINE> self.async_future = asyncio.ensure_future(self.loop()) <NEW_LINE> <DEDENT> <DEDENT> async def func_prep(self): <NEW_LINE> <INDENT> self.set_output("step", self.step) <NEW_LINE> self.step = self.step + 1 <NEW_LINE> self.current_time = self.future <NEW_LINE> self.set_output("time", self.current_time) <NEW_LINE> <DEDENT> async def func_post(self, peri_to_post=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> async def func_in_sync(self): <NEW_LINE> <INDENT> self.close_gates() <NEW_LINE> <DEDENT> def close_gates(self): <NEW_LINE> <INDENT> if self.get_property('time_increase') > 0: <NEW_LINE> <INDENT> print("closing peri gate") <NEW_LINE> self.agent.peri_gate.clear() <NEW_LINE> <DEDENT> <DEDENT> async def loop(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> await asyncio.sleep(self.future - time.time()) <NEW_LINE> self.future = self.future + self.get_property('time_increase') <NEW_LINE> if not self.agent.peri_gate.is_set(): <NEW_LINE> <INDENT> print("opening peri gate") <NEW_LINE> self.agent.peri_gate.set() | schedules the func gates of the agent
sets the number of elapsed rounds as output | 62599043d99f1b3c44d06996 |
class Engine(ABC): <NEW_LINE> <INDENT> def __init__(self, game): <NEW_LINE> <INDENT> self.game = game <NEW_LINE> <DEDENT> def isTalkative(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def placeShips(self): <NEW_LINE> <INDENT> raise NotImplementedError("Override me") <NEW_LINE> <DEDENT> def playerShips(self): <NEW_LINE> <INDENT> return self.game.state[self.game.player_state, 0] <NEW_LINE> <DEDENT> def opponentsShips(self): <NEW_LINE> <INDENT> return self.game.state[self.game.player_state, 1] <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def attackShips(self): <NEW_LINE> <INDENT> raise NotImplementedError("Override me") | Base class for different implementations designed to play Battleships | 6259904323849d37ff8523b3 |
class ElVatRules(EuVatRulesMixin): <NEW_LINE> <INDENT> def get_vat_rate(self, item_type): <NEW_LINE> <INDENT> return Decimal(24) | VAT rules for Greece.
| 62599043a79ad1619776b378 |
class WithdrawVirtualMoneyViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> authentication_classes = (TokenAuthentication,) <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> serializer_class = TransactionSerializer <NEW_LINE> queryset = Transaction.objects.all() <NEW_LINE> def create(self, request, *args, **kwargs): <NEW_LINE> <INDENT> data = request.data.copy() <NEW_LINE> data['deposited_or_withdrawn_by'] = self.request.user.id <NEW_LINE> data['status'] = True <NEW_LINE> data['transaction_status'] = 'WT' <NEW_LINE> serializer = TransactionSerializer(data=data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> wallet = Wallet.objects.get(owned_by=self.request.user) <NEW_LINE> if wallet.status: <NEW_LINE> <INDENT> if wallet.balance - serializer.validated_data['amount'] >= 0: <NEW_LINE> <INDENT> wallet.balance = wallet.balance - serializer.validated_data['amount'] <NEW_LINE> wallet.save() <NEW_LINE> self.perform_create(serializer) <NEW_LINE> return response('success', {'withdrawal': get_data('withdrawn_by', 'withdrawn_at', serializer.data)}, HTTP_201_CREATED) <NEW_LINE> <DEDENT> return response('failed', {'message': "wallet doesn't have enough money"}, HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> return response('failed', {'message': "wallet is not yet enabled"}, HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> return response('failed', serializer.errors, HTTP_400_BAD_REQUEST) | Viewset for money withdrawal from wallet | 6259904323e79379d538d7f7 |
class BusLine(object): <NEW_LINE> <INDENT> def __init__(self, stop_list = [], bus_id = None): <NEW_LINE> <INDENT> self.stop_list = stop_list <NEW_LINE> self.bus_id = bus_id <NEW_LINE> <DEDENT> def append(self, stop_obj): <NEW_LINE> <INDENT> self.stop_list.append(stop_obj) <NEW_LINE> <DEDENT> def set_BusID(self, newid): <NEW_LINE> <INDENT> self.bus_id = newid | This BusLine class is supposed to hold basic info about a bus and all its stops | 625990431f5feb6acb163eee |
class AnonymousOrg(object): <NEW_LINE> <INDENT> def __init__(self, org): <NEW_LINE> <INDENT> self.org = org <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.org.is_anon = True <NEW_LINE> self.org.save() <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self.org.is_anon = False <NEW_LINE> self.org.save() | Makes the given org temporarily anonymous | 62599043b57a9660fecd2d76 |
class QCommonStyle(QStyle): <NEW_LINE> <INDENT> def childEvent(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def connectNotify(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def customEvent(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def disconnectNotify(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def drawComplexControl(self, QStyle_ComplexControl, QStyleOptionComplex, QPainter, QWidget_widget=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def drawControl(self, QStyle_ControlElement, QStyleOption, QPainter, QWidget_widget=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def drawPrimitive(self, QStyle_PrimitiveElement, QStyleOption, QPainter, QWidget_widget=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def generatedIconPixmap(self, QIcon_Mode, QPixmap, QStyleOption): <NEW_LINE> <INDENT> return QPixmap <NEW_LINE> <DEDENT> def hitTestComplexControl(self, QStyle_ComplexControl, QStyleOptionComplex, QPoint, QWidget_widget=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def layoutSpacingImplementation(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def pixelMetric(self, QStyle_PixelMetric, QStyleOption_option=None, QWidget_widget=None): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def polish(self, *__args): <NEW_LINE> <INDENT> return QPalette <NEW_LINE> <DEDENT> def receivers(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def sender(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def senderSignalIndex(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def sizeFromContents(self, QStyle_ContentsType, QStyleOption, QSize, QWidget_widget=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def standardIconImplementation(self, QStyle_StandardPixmap, QStyleOption_option=None, QWidget_widget=None): <NEW_LINE> <INDENT> return QIcon <NEW_LINE> <DEDENT> def standardPixmap(self, QStyle_StandardPixmap, QStyleOption_option=None, QWidget_widget=None): <NEW_LINE> <INDENT> return QPixmap <NEW_LINE> <DEDENT> def styleHint(self, QStyle_StyleHint, QStyleOption_option=None, QWidget_widget=None, QStyleHintReturn_returnData=None): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def subControlRect(self, QStyle_ComplexControl, QStyleOptionComplex, QStyle_SubControl, QWidget_widget=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def subElementRect(self, QStyle_SubElement, QStyleOption, QWidget_widget=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def timerEvent(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def unpolish(self, *__args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> pass | QCommonStyle() | 62599043d6c5a102081e341f |
class NeuroM_SomaDiamTest_Range(sciunit.Test): <NEW_LINE> <INDENT> score_type = morphounit.scores.RangeScore <NEW_LINE> id = "/tests/7?version=11" <NEW_LINE> def __init__(self, observation={}, name="NeuroM soma diameter - range"): <NEW_LINE> <INDENT> description = ("Tests the soma diameter for morphologies loaded via NeuroM") <NEW_LINE> self.units = quantities.um <NEW_LINE> required_capabilities = (cap.HandlesNeuroM,) <NEW_LINE> observation = self.format_data(observation) <NEW_LINE> self.figures = [] <NEW_LINE> sciunit.Test.__init__(self, observation, name) <NEW_LINE> self.directory_output = './output/' <NEW_LINE> <DEDENT> def format_data(self, data): <NEW_LINE> <INDENT> for key,val in data["diameter"].items(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> quantity_parts = val.split(" ") <NEW_LINE> number = float(quantity_parts[0]) <NEW_LINE> units_str = " ".join(quantity_parts[1:]) <NEW_LINE> assert (units_str == self.units.symbol) <NEW_LINE> data["diameter"][key] = quantities.Quantity(number, self.units) <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> raise sciunit.Error("Values not in appropriate format. Required units: ", self.units.symbol) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise sciunit.Error("Values not in appropriate format.") <NEW_LINE> <DEDENT> <DEDENT> return data <NEW_LINE> <DEDENT> def validate_observation(self, observation): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> for key0 in observation.keys(): <NEW_LINE> <INDENT> for key, val in observation["diameter"].items(): <NEW_LINE> <INDENT> assert type(observation["diameter"][key]) is quantities.Quantity <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise sciunit.ObservationError( ("Observation must return a dictionary of the form:" "{'diameter': {'min': 'XX um', 'max': 'YY um'}}")) <NEW_LINE> <DEDENT> <DEDENT> def generate_prediction(self, model, verbose=False): <NEW_LINE> <INDENT> self.model_name = model.name <NEW_LINE> prediction = model.get_soma_diameter_info() <NEW_LINE> prediction = self.format_data(prediction) <NEW_LINE> return prediction <NEW_LINE> <DEDENT> def compute_score(self, observation, prediction, verbose=False): <NEW_LINE> <INDENT> print("observation = ", observation) <NEW_LINE> print("prediction = ", prediction) <NEW_LINE> self.score = morphounit.scores.RangeScore.compute(observation["diameter"], prediction["diameter"]) <NEW_LINE> self.score.description = "score is 0.0 if within range; otherwise difference" <NEW_LINE> self.path_test_output = self.directory_output + 'soma_diameter_range/' + self.model_name + '/' <NEW_LINE> if not os.path.exists(self.path_test_output): <NEW_LINE> <INDENT> os.makedirs(self.path_test_output) <NEW_LINE> <DEDENT> self.observation = observation <NEW_LINE> self.prediction = prediction <NEW_LINE> err_plot = plots.ErrorPlot(self) <NEW_LINE> err_plot.xlabels = ["Soma"] <NEW_LINE> err_plot.ylabel = "Diameter (um)" <NEW_LINE> file1 = err_plot.create() <NEW_LINE> self.figures.append(file1) <NEW_LINE> return self.score <NEW_LINE> <DEDENT> def bind_score(self, score, model, observation, prediction): <NEW_LINE> <INDENT> score.related_data["figures"] = self.figures <NEW_LINE> return score | Tests the soma diameter for morphologies loaded via NeuroM
Compares against Min, Max allowed range;
Score = True if within range, False otherwise | 6259904366673b3332c316f3 |
class General404Tests(SeleniumTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.visit('/i-am-so/long-that/I-probably/dont-exist') <NEW_LINE> <DEDENT> def test_has_correct_title(self): <NEW_LINE> <INDENT> self.assert_title_equals("Page Not Found") <NEW_LINE> <DEDENT> def test_has_correct_page_heading(self): <NEW_LINE> <INDENT> self.assert_page_heading_equals("Whoops! Page Not Found") <NEW_LINE> <DEDENT> def test_has_search_box(self): <NEW_LINE> <INDENT> self.assert_element_exists( self.selenium.find_element_by_css_selector, "form#searchform") | Test General Expectations for the Custom 404 Page. | 625990438da39b475be044e9 |
class ThreadedServer: <NEW_LINE> <INDENT> def __init__( self, port: int, scenarios_dir: pathlib.Path, stdout: TextIO, stderr: TextIO ) -> None: <NEW_LINE> <INDENT> self.port = port <NEW_LINE> self.scenarios_dir = scenarios_dir <NEW_LINE> self.stdout = stdout <NEW_LINE> self.stderr = stderr <NEW_LINE> class Handler(http.server.SimpleHTTPRequestHandler): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__( *args, directory=str(scenarios_dir), **kwargs ) <NEW_LINE> <DEDENT> def log_message(self, format, *args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def do_GET(self): <NEW_LINE> <INDENT> if self.path == "/": <NEW_LINE> <INDENT> self.path = "ontology.html" <NEW_LINE> <DEDENT> return http.server.SimpleHTTPRequestHandler.do_GET(self) <NEW_LINE> <DEDENT> <DEDENT> self.handler = Handler <NEW_LINE> self._httpd = http.server.HTTPServer(("", port), Handler) <NEW_LINE> self._work_thread = None <NEW_LINE> self._server_exception_lock = threading.Lock() <NEW_LINE> self._server_exception = None <NEW_LINE> <DEDENT> def start(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> def serve() -> None: <NEW_LINE> <INDENT> prefix = f"In {ThreadedServer.__name__}.{serve.__name__}" <NEW_LINE> try: <NEW_LINE> <INDENT> print( f"{prefix}: Starting to serve {self.scenarios_dir} forever on: " f"http://localhost:{self.port}", file=self.stdout, ) <NEW_LINE> self._httpd.serve_forever() <NEW_LINE> print(f"{prefix}: Stopped serving forever.", file=self.stdout) <NEW_LINE> <DEDENT> except Exception as error: <NEW_LINE> <INDENT> print( f"{prefix}: Caught an exception in the HTTPD server " f"(it will be raised at shutdown): {error}", file=self.stderr, ) <NEW_LINE> with self._server_exception_lock: <NEW_LINE> <INDENT> self._server_exception = error <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self._work_thread = threading.Thread(target=serve) <NEW_LINE> self._work_thread.start() <NEW_LINE> <DEDENT> def shutdown(self) -> None: <NEW_LINE> <INDENT> prefix = f"In {ThreadedServer.__name__}.{ThreadedServer.shutdown.__name__}" <NEW_LINE> print(f"{prefix}: Instructing the server to shut down...", file=self.stdout) <NEW_LINE> with self._server_exception_lock: <NEW_LINE> <INDENT> if self._server_exception is not None: <NEW_LINE> <INDENT> raise self._server_exception <NEW_LINE> <DEDENT> <DEDENT> print(f"{prefix}: Waiting for server to shut down...", file=self.stdout) <NEW_LINE> self._httpd.shutdown() <NEW_LINE> <DEDENT> def __enter__(self) -> "ThreadedServer": <NEW_LINE> <INDENT> self.start() <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb) -> None: <NEW_LINE> <INDENT> self.shutdown() | Encapsulate a HTTP server running in a separate thread. | 62599043d7e4931a7ef3d370 |
class ScraperNotFoundError(ScraperError): <NEW_LINE> <INDENT> pass | No se encuentra el scraper solicitado | 62599043379a373c97d9a324 |
class TextInput(FormControlMixin, widgets.TextInput): <NEW_LINE> <INDENT> pass | Add css class 'form-control' to input-tag | 62599043dc8b845886d548b3 |
class EditProfileView(mixins.LoggedInOnlyView, SuccessMessageMixin, UpdateView): <NEW_LINE> <INDENT> model = models.User <NEW_LINE> template_name = "users/edit-profile.html" <NEW_LINE> fields = [ "first_name", "last_name", "gender", "bio", "birthday", "language", "currency", ] <NEW_LINE> success_message = "Profile Updated" <NEW_LINE> def get_object(self, queryset=None): <NEW_LINE> <INDENT> return self.request.user <NEW_LINE> <DEDENT> def get_form(self, form_class=None): <NEW_LINE> <INDENT> form = super().get_form(form_class=form_class) <NEW_LINE> form.fields["first_name"].widget.attrs = {"placeholder": "First name"} <NEW_LINE> form.fields["last_name"].widget.attrs = {"placeholder": "Last name"} <NEW_LINE> form.fields["bio"].widget.attrs = {"placeholder": "Bio"} <NEW_LINE> form.fields["birthday"].widget.attrs = {"placeholder": "Birthday"} <NEW_LINE> return form | EditProfile View Definition | 62599043b5575c28eb713646 |
class PagerdutyTestCase(TestCase, LoaderModuleMockMixin): <NEW_LINE> <INDENT> def setup_loader_modules(self): <NEW_LINE> <INDENT> return {pagerduty: {}} <NEW_LINE> <DEDENT> def test_create_event(self): <NEW_LINE> <INDENT> name = "This is a server warning message" <NEW_LINE> details = "This is a much more detailed message" <NEW_LINE> service_key = "9abcd123456789efabcde362783cdbaf" <NEW_LINE> profile = "my-pagerduty-account" <NEW_LINE> ret = {"name": name, "result": None, "comment": "", "changes": {}} <NEW_LINE> with patch.dict(pagerduty.__opts__, {"test": True}): <NEW_LINE> <INDENT> comt = "Need to create event: {0}".format(name) <NEW_LINE> ret.update({"comment": comt}) <NEW_LINE> self.assertDictEqual( pagerduty.create_event(name, details, service_key, profile), ret ) <NEW_LINE> <DEDENT> with patch.dict(pagerduty.__opts__, {"test": False}): <NEW_LINE> <INDENT> mock_t = MagicMock(return_value=True) <NEW_LINE> with patch.dict(pagerduty.__salt__, {"pagerduty.create_event": mock_t}): <NEW_LINE> <INDENT> comt = "Created event: {0}".format(name) <NEW_LINE> ret.update({"comment": comt, "result": True}) <NEW_LINE> self.assertDictEqual( pagerduty.create_event(name, details, service_key, profile), ret ) | Test cases for salt.states.pagerduty | 62599043a79ad1619776b37a |
class Create(Command): <NEW_LINE> <INDENT> name = 'create' <NEW_LINE> def setup_parser(self, parser): <NEW_LINE> <INDENT> self.parser = parser <NEW_LINE> template_url = settings.get('project_template_url', MASTER_TEMPLATE_URL) <NEW_LINE> self.parser.add_argument( 'path', metavar='PATH', type=str, help='The path to create the new template at' ) <NEW_LINE> self.parser.add_argument( '-t', '--template', metavar='GIT_URL', type=str, help='The GIT URL of the template to use. Defaults to %s' % template_url, default=template_url ) <NEW_LINE> self.parser.add_argument( '-r', '--ref', metavar='GIT_REF', type=str, help='The GIT reference to use when archiving the template. ' 'Defaults to master.', default='master' ) <NEW_LINE> <DEDENT> def main(self, args): <NEW_LINE> <INDENT> from stackstrap.cli import StackStrapCLI <NEW_LINE> cli = StackStrapCLI() <NEW_LINE> template = Template('master-template') <NEW_LINE> if not template.exists: <NEW_LINE> <INDENT> self.log.info("You are creating a new template for the first time " "we will now setup a template named 'master-template' " "that is used to create new templates.") <NEW_LINE> cli.main(['template', 'add', '-r', args.ref, 'master-template', args.template]) <NEW_LINE> <DEDENT> cli.main(['create', args.path, 'master-template']) | Create a new template | 62599043c432627299fa427f |
class CensysTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> load_dotenv(find_dotenv()) <NEW_LINE> self.ipaddr = getenv('IPADDR') <NEW_LINE> self.ip_info = censys.CensysLookup(self.ipaddr) <NEW_LINE> <DEDENT> def test_get_openports(self): <NEW_LINE> <INDENT> portlist = self.ip_info.get_openports() <NEW_LINE> self.assertTrue(portlist) | Testing for censys.py module | 62599043d10714528d69f00a |
class ApiListClientApprovalsHandler(ApiListApprovalsHandlerBase): <NEW_LINE> <INDENT> args_type = ApiListClientApprovalsArgs <NEW_LINE> result_type = ApiListClientApprovalsResult <NEW_LINE> def _CheckClientId(self, client_id, approval): <NEW_LINE> <INDENT> subject = approval.Get(approval.Schema.SUBJECT) <NEW_LINE> return subject.Basename() == client_id <NEW_LINE> <DEDENT> def _CheckState(self, state, approval): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> approval.CheckAccess(approval.context) <NEW_LINE> is_valid = True <NEW_LINE> <DEDENT> except access_control.UnauthorizedAccess: <NEW_LINE> <INDENT> is_valid = False <NEW_LINE> <DEDENT> if state == ApiListClientApprovalsArgs.State.VALID: <NEW_LINE> <INDENT> return is_valid <NEW_LINE> <DEDENT> if state == ApiListClientApprovalsArgs.State.INVALID: <NEW_LINE> <INDENT> return not is_valid <NEW_LINE> <DEDENT> <DEDENT> def _BuildFilter(self, args): <NEW_LINE> <INDENT> filters = [] <NEW_LINE> if args.client_id: <NEW_LINE> <INDENT> filters.append(functools.partial(self._CheckClientId, args.client_id)) <NEW_LINE> <DEDENT> if args.state: <NEW_LINE> <INDENT> filters.append(functools.partial(self._CheckState, args.state)) <NEW_LINE> <DEDENT> if filters: <NEW_LINE> <INDENT> def Filter(approval): <NEW_LINE> <INDENT> for f in filters: <NEW_LINE> <INDENT> if not f(approval): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> return Filter <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return lambda approval: True <NEW_LINE> <DEDENT> <DEDENT> def Handle(self, args, context=None): <NEW_LINE> <INDENT> subject_id = None <NEW_LINE> if args.client_id: <NEW_LINE> <INDENT> subject_id = str(args.client_id) <NEW_LINE> <DEDENT> approvals = sorted( data_store.REL_DB.ReadApprovalRequests( context.username, rdf_objects.ApprovalRequest.ApprovalType.APPROVAL_TYPE_CLIENT, subject_id=subject_id, include_expired=True), key=lambda ar: ar.timestamp, reverse=True) <NEW_LINE> approvals = self._FilterRelationalApprovalRequests( approvals, lambda ar: ApiClientApproval().InitFromDatabaseObject(ar), args.state) <NEW_LINE> if not args.count: <NEW_LINE> <INDENT> end = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> end = args.offset + args.count <NEW_LINE> <DEDENT> items = list(itertools.islice(approvals, args.offset, end)) <NEW_LINE> api_client.UpdateClientsFromFleetspeak([a.subject for a in items]) <NEW_LINE> return ApiListClientApprovalsResult(items=items) | Returns list of user's clients approvals. | 6259904382261d6c52730842 |
class ILDAPServer(IContained, ILDAPServerConfiguration): <NEW_LINE> <INDENT> pass | A LDAP server
| 625990438a349b6b43687545 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.