code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class ContactActionsLinks(flourish.page.RefineLinksViewlet): <NEW_LINE> <INDENT> pass
Contact actions links viewlet.
62599033d53ae8145f919555
class Meta(Generic): <NEW_LINE> <INDENT> def debug(self, node): <NEW_LINE> <INDENT> nodename = node.__class__.__name__ <NEW_LINE> visitorname = self.__class__.__name__ <NEW_LINE> nodefields = list(ast.iter_fields(node)) <NEW_LINE> node_ = list(ast.iter_child_nodes(node)) <NEW_LINE> print('[warn]', nodename, nodefields, node_) <NEW_LINE> print('[warn]', '<visit_%s not implemented in %s>' % (nodename, visitorname)) <NEW_LINE> <DEDENT> def visicat(self, subs, sep='.'): <NEW_LINE> <INDENT> return sep.join([self.visit(sub) for sub in (subs or [])]) <NEW_LINE> <DEDENT> def syntax(self, name, sub, beg='<', end='>', pre='meta:'): <NEW_LINE> <INDENT> return '%s%s%s %s%s' % (beg, pre, name, sub, end) <NEW_LINE> <DEDENT> def field(self, name, node, fmt='%s=%s'): <NEW_LINE> <INDENT> return fmt % (name, node) <NEW_LINE> <DEDENT> def meta_visit(self, node): <NEW_LINE> <INDENT> def dispatch(node): <NEW_LINE> <INDENT> if type(node) is type([]): <NEW_LINE> <INDENT> return self.visicat(node, sep=' ') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.visit(node) <NEW_LINE> <DEDENT> <DEDENT> fields = ast.iter_fields(node) <NEW_LINE> vfields = ' '.join(self.field(name, dispatch(node)) for name, node in fields) <NEW_LINE> return self.syntax(node.__class__.__name__, vfields) <NEW_LINE> <DEDENT> def generic_visit(self, node): <NEW_LINE> <INDENT> if super().atomp(node) or super().nilp(node): <NEW_LINE> <INDENT> return node <NEW_LINE> <DEDENT> elif super().listp(node): <NEW_LINE> <INDENT> if len(node) == 1: <NEW_LINE> <INDENT> node = node[0] <NEW_LINE> <DEDENT> elif len(node) == 0: <NEW_LINE> <INDENT> print('[error]', 'node is an empty list') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('[warn]', 'node is a list of size > 1, ' 'elements above 1 are ignored (@TOFIX)') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return self.meta_visit(node)
Meta visitor, ugly variant of Generic, but uses the official generic_visit from the API.
62599033796e427e5384f86d
@plugin_pool.register_plugin <NEW_LINE> class CMSPeoplePlugin(CMSAllinkBaseAppContentPlugin): <NEW_LINE> <INDENT> model = PeopleAppContentPlugin <NEW_LINE> name = model.data_model.get_verbose_name_plural() <NEW_LINE> data_model = PeopleAppContentPlugin.data_model
Is only defined here because this Model is only for plugin instnances There should be no need to extend this Plugin with any logic. It is only registered here. model: - where to store plugin instances name: - name of the plugin data_model: - Where to get the data from (can also be a parent model, so automatically all subclasses will be selected)
6259903394891a1f408b9f72
class DataArrayIntIterator(_object): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, DataArrayIntIterator, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, DataArrayIntIterator, name) <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> this = _MEDCouplingRemapper.new_DataArrayIntIterator(*args) <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> __swig_destroy__ = _MEDCouplingRemapper.delete_DataArrayIntIterator <NEW_LINE> __del__ = lambda self : None; <NEW_LINE> def next(self): <NEW_LINE> <INDENT> return _MEDCouplingRemapper.DataArrayIntIterator_next(self)
1
6259903391af0d3eaad3af20
class StreamSources(object): <NEW_LINE> <INDENT> def __init__(self, base_url=WOWZA_BASE_URL + 'stream_sources/', api_key=WOWZA_API_KEY, access_key=WOWZA_ACCESS_KEY): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.base_url = base_url <NEW_LINE> self.headers = { 'wsc-api-key': WOWZA_API_KEY, 'wsc-access-key': WOWZA_ACCESS_KEY, 'content-type': 'application/json' } <NEW_LINE> <DEDENT> def info(self, source_id=None): <NEW_LINE> <INDENT> path = self.base_url <NEW_LINE> path = "{}/{}".format(path, source_id) if source_id else path <NEW_LINE> response = session.get(path, headers=self.headers) <NEW_LINE> return response.json() <NEW_LINE> <DEDENT> def source(self, source_id): <NEW_LINE> <INDENT> return self.info(source_id) <NEW_LINE> <DEDENT> def create(self, param_dict): <NEW_LINE> <INDENT> if isinstance(param_dict, dict): <NEW_LINE> <INDENT> path = self.base_url <NEW_LINE> param_dict = { 'stream_source': param_dict } <NEW_LINE> response = session.post(path, json.dumps(param_dict), headers=self.headers) <NEW_LINE> return response.json() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return InvalidParamDict({ 'message': 'The provided parameter dictionary is not valid.' }) <NEW_LINE> <DEDENT> <DEDENT> def update(self, source_id, param_dict): <NEW_LINE> <INDENT> if isinstance(param_dict, dict): <NEW_LINE> <INDENT> path = self.base_url + source_id <NEW_LINE> param_dict = { 'stream_source': param_dict } <NEW_LINE> response = session.patch(path, json.dumps(param_dict), headers=self.headers) <NEW_LINE> return response.json() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return InvalidParamDict({ 'message': 'The provided parameter dictionary is not valid.' }) <NEW_LINE> <DEDENT> <DEDENT> def delete(self, source_id): <NEW_LINE> <INDENT> path = self.base_url + source_id <NEW_LINE> response = session.delete(path, headers=self.headers) <NEW_LINE> return response
Class to interface with the following Wowza endpoints: /api/v1/stream_sources/
62599033cad5886f8bdc58f4
class StateCandReceived(JingleFileTransferStates): <NEW_LINE> <INDENT> def _recv_candidate(self, args): <NEW_LINE> <INDENT> if 'candError' in args: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> content = args['content'] <NEW_LINE> streamhost_cid = content.getTag('transport').getTag('candidate-used'). getAttr('cid') <NEW_LINE> streamhost_used = None <NEW_LINE> for cand in self.jft.transport.candidates: <NEW_LINE> <INDENT> if cand['candidate_id'] == streamhost_cid: <NEW_LINE> <INDENT> streamhost_used = cand <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if streamhost_used is None: <NEW_LINE> <INDENT> log.info("unknow streamhost") <NEW_LINE> return <NEW_LINE> <DEDENT> self.jft.nominated_cand['peer-cand'] = streamhost_used <NEW_LINE> <DEDENT> def action(self, args=None): <NEW_LINE> <INDENT> self._recv_candidate(args)
This state happens when we receive a candidate. It takes the arguments: canError if we receive a candidate-error
6259903315baa7234946308c
class Error(Exception): <NEW_LINE> <INDENT> pass
Base Exception class.
62599033ac7a0e7691f735da
class IPPSensor(IPPEntity, SensorEntity): <NEW_LINE> <INDENT> def __init__( self, *, coordinator: IPPDataUpdateCoordinator, enabled_default: bool = True, entry_id: str, unique_id: str, icon: str, key: str, name: str, unit_of_measurement: str | None = None, ) -> None: <NEW_LINE> <INDENT> self._unit_of_measurement = unit_of_measurement <NEW_LINE> self._key = key <NEW_LINE> self._unique_id = None <NEW_LINE> if unique_id is not None: <NEW_LINE> <INDENT> self._unique_id = f"{unique_id}_{key}" <NEW_LINE> <DEDENT> super().__init__( entry_id=entry_id, device_id=unique_id, coordinator=coordinator, name=name, icon=icon, enabled_default=enabled_default, ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self) -> str: <NEW_LINE> <INDENT> return self._unique_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self) -> str: <NEW_LINE> <INDENT> return self._unit_of_measurement
Defines an IPP sensor.
625990336e29344779b01743
class SessionWizardView(WizardView): <NEW_LINE> <INDENT> storage_name = 'my_django.contrib.formtools.wizard.storage.session.SessionStorage'
A WizardView with pre-configured SessionStorage backend.
6259903373bcbd0ca4bcb37a
class Struct: <NEW_LINE> <INDENT> def __init__(self, register_address, struct_format): <NEW_LINE> <INDENT> self.format = struct_format <NEW_LINE> self.buffer = bytearray(1+struct.calcsize(self.format)) <NEW_LINE> self.buffer[0] = register_address <NEW_LINE> <DEDENT> def __get__(self, obj, objtype=None): <NEW_LINE> <INDENT> with obj.i2c_device: <NEW_LINE> <INDENT> obj.i2c_device.write(self.buffer, end=1, stop=False) <NEW_LINE> obj.i2c_device.readinto(self.buffer, start=1) <NEW_LINE> <DEDENT> return struct.unpack_from(self.format, memoryview(self.buffer)[1:]) <NEW_LINE> <DEDENT> def __set__(self, obj, value): <NEW_LINE> <INDENT> struct.pack_into(self.format, self.buffer, 1, *value) <NEW_LINE> with obj.i2c_device: <NEW_LINE> <INDENT> obj.i2c_device.write(self.buffer)
Arbitrary structure register that is readable and writeable. Values are tuples that map to the values in the defined struct. See struct module documentation for struct format string and its possible value types. :param int register_address: The register address to read the bit from :param type struct_format: The struct format string for this register.
62599033be8e80087fbc0170
class PeopleCollectionView(BaseCollectionView): <NEW_LINE> <INDENT> collection_name = 'people' <NEW_LINE> field_names: ClassVar[Tuple[str, ...]] = ( 'url', 'name', 'height', 'mass', 'hair_color', 'skin_color', 'eye_color', 'birth_year', 'gender', 'homeworld', 'edited', )
Star Wars API people collection view/iterator.
62599033d99f1b3c44d06796
class LifecycleConfig(_messages.Message): <NEW_LINE> <INDENT> autoDeleteTime = _messages.StringField(1) <NEW_LINE> autoDeleteTtl = _messages.StringField(2) <NEW_LINE> idleDeleteTtl = _messages.StringField(3) <NEW_LINE> idleStartTime = _messages.StringField(4)
Specifies the cluster auto delete related schedule configuration. Fields: autoDeleteTime: Optional. The time when cluster will be auto-deleted. autoDeleteTtl: Optional. The life duration of cluster, the cluster will be auto-deleted at the end of this duration. idleDeleteTtl: Optional. The longest duration that cluster would keep alive while staying idle; passing this threshold will cause cluster to be auto-deleted. idleStartTime: Output only. The time when cluster became idle (most recent job finished) and became eligible for deletion due to idleness.
6259903350485f2cf55dc070
class Translations(gettext.GNUTranslations, object): <NEW_LINE> <INDENT> DEFAULT_DOMAIN = 'messages' <NEW_LINE> def __init__(self, fileobj=None, domain=DEFAULT_DOMAIN): <NEW_LINE> <INDENT> self.plural = lambda n: int(n != 1) <NEW_LINE> gettext.GNUTranslations.__init__(self, fp=fileobj) <NEW_LINE> self.files = list(filter(None, [getattr(fileobj, 'name', None)])) <NEW_LINE> self.domain = domain <NEW_LINE> self._domains = {} <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load(cls, dirname=None, locales=None, domain=DEFAULT_DOMAIN): <NEW_LINE> <INDENT> if locales is not None: <NEW_LINE> <INDENT> if not isinstance(locales, (list, tuple)): <NEW_LINE> <INDENT> locales = [locales] <NEW_LINE> <DEDENT> locales = [str(l) for l in locales] <NEW_LINE> <DEDENT> if not domain: <NEW_LINE> <INDENT> domain = cls.DEFAULT_DOMAIN <NEW_LINE> <DEDENT> filename = gettext.find(domain, dirname, locales) <NEW_LINE> if not filename: <NEW_LINE> <INDENT> return gettext.NullTranslations() <NEW_LINE> <DEDENT> with open(filename, 'rb') as fp: <NEW_LINE> <INDENT> return cls(fileobj=fp, domain=domain) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<%s: "%s">' % (type(self).__name__, self._info.get('project-id-version')) <NEW_LINE> <DEDENT> def add(self, translations, merge=True): <NEW_LINE> <INDENT> domain = getattr(translations, 'domain', self.DEFAULT_DOMAIN) <NEW_LINE> if merge and domain == self.domain: <NEW_LINE> <INDENT> return self.merge(translations) <NEW_LINE> <DEDENT> existing = self._domains.get(domain) <NEW_LINE> if merge and existing is not None: <NEW_LINE> <INDENT> existing.merge(translations) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> translations.add_fallback(self) <NEW_LINE> self._domains[domain] = translations <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def merge(self, translations): <NEW_LINE> <INDENT> if isinstance(translations, gettext.GNUTranslations): <NEW_LINE> <INDENT> self._catalog.update(translations._catalog) <NEW_LINE> if isinstance(translations, Translations): <NEW_LINE> <INDENT> self.files.extend(translations.files) <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def dgettext(self, domain, message): <NEW_LINE> <INDENT> return self._domains.get(domain, self).gettext(message) <NEW_LINE> <DEDENT> def ldgettext(self, domain, message): <NEW_LINE> <INDENT> return self._domains.get(domain, self).lgettext(message) <NEW_LINE> <DEDENT> def dugettext(self, domain, message): <NEW_LINE> <INDENT> if PY2: <NEW_LINE> <INDENT> return self._domains.get(domain, self).ugettext(message) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._domains.get(domain, self).gettext(message) <NEW_LINE> <DEDENT> <DEDENT> def dngettext(self, domain, singular, plural, num): <NEW_LINE> <INDENT> return self._domains.get(domain, self).ngettext(singular, plural, num) <NEW_LINE> <DEDENT> def ldngettext(self, domain, singular, plural, num): <NEW_LINE> <INDENT> return self._domains.get(domain, self).lngettext(singular, plural, num) <NEW_LINE> <DEDENT> def dungettext(self, domain, singular, plural, num): <NEW_LINE> <INDENT> if PY2: <NEW_LINE> <INDENT> return self._domains.get(domain, self).ungettext( singular, plural, num) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._domains.get(domain, self).ngettext( singular, plural, num)
An extended translation catalog class (ripped off from Babel)
625990331f5feb6acb163ce4
class EventServiceGrpcTransport(object): <NEW_LINE> <INDENT> _OAUTH_SCOPES = ( "https://www.googleapis.com/auth/cloud-platform", "https://www.googleapis.com/auth/jobs", ) <NEW_LINE> def __init__( self, channel=None, credentials=None, address="jobs.googleapis.com:443" ): <NEW_LINE> <INDENT> if channel is not None and credentials is not None: <NEW_LINE> <INDENT> raise ValueError( "The `channel` and `credentials` arguments are mutually " "exclusive." ) <NEW_LINE> <DEDENT> if channel is None: <NEW_LINE> <INDENT> channel = self.create_channel( address=address, credentials=credentials, options={ "grpc.max_send_message_length": -1, "grpc.max_receive_message_length": -1, }.items(), ) <NEW_LINE> <DEDENT> self._channel = channel <NEW_LINE> self._stubs = { "event_service_stub": event_service_pb2_grpc.EventServiceStub(channel) } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create_channel( cls, address="jobs.googleapis.com:443", credentials=None, **kwargs ): <NEW_LINE> <INDENT> return google.api_core.grpc_helpers.create_channel( address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def channel(self): <NEW_LINE> <INDENT> return self._channel <NEW_LINE> <DEDENT> @property <NEW_LINE> def create_client_event(self): <NEW_LINE> <INDENT> return self._stubs["event_service_stub"].CreateClientEvent
gRPC transport class providing stubs for google.cloud.talent.v4beta1 EventService API. The transport provides access to the raw gRPC stubs, which can be used to take advantage of advanced features of gRPC.
625990338c3a8732951f764c
class Video(): <NEW_LINE> <INDENT> def __init__(self, title, release_year, rating, runtime, genre, rt_score, review, poster_image_url, trailer_youtube_url): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> self.year = release_year <NEW_LINE> self.rating = rating <NEW_LINE> self.runtime = runtime <NEW_LINE> self.genre = genre <NEW_LINE> self.score = rt_score <NEW_LINE> self.review = review <NEW_LINE> self.poster_image_url = poster_image_url <NEW_LINE> self.trailer_youtube_url = trailer_youtube_url
Highest level class, contains all universal attributes for any current or future class movies, tv shows, etc. Dot operator allows future classes to instantiate categories.
625990333eb6a72ae038b75b
class Signed_Transaction(GrapheneSigned_Transaction): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Signed_Transaction, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def sign(self, wifkeys, chain="UTT"): <NEW_LINE> <INDENT> return super(Signed_Transaction, self).sign(wifkeys, chain) <NEW_LINE> <DEDENT> def verify(self, pubkeys=[], chain="UTT"): <NEW_LINE> <INDENT> return super(Signed_Transaction, self).verify(pubkeys, chain) <NEW_LINE> <DEDENT> def getOperationKlass(self): <NEW_LINE> <INDENT> return Operation <NEW_LINE> <DEDENT> def getKnownChains(self): <NEW_LINE> <INDENT> return known_chains
Create a signed transaction and offer method to create the signature :param num refNum: parameter ref_block_num (see ``getBlockParams``) :param num refPrefix: parameter ref_block_prefix (see ``getBlockParams``) :param str expiration: expiration date :param Array operations: array of operations
6259903326238365f5fadc47
class Td(Elem): <NEW_LINE> <INDENT> def __init__(self, content=None, attr=dict(), tag='td', tag_type='double'): <NEW_LINE> <INDENT> super().__init__(tag, attr, content, tag_type)
__init__() method.
62599033d4950a0f3b1116b8
class TableHead(Element): <NEW_LINE> <INDENT> def __init__(self, head, parent=None): <NEW_LINE> <INDENT> super(TableHead, self).__init__(head, parent=parent) <NEW_LINE> self.rows = [TableRow(tr, parent=self) for tr in head.findall('tr')]
This class maps to the `<th>` element of the html table.
62599033cad5886f8bdc58f5
@ddt <NEW_LINE> class Test(unittest.TestCase): <NEW_LINE> <INDENT> @data( ([1, 1, 1], [1, 1, 1]), ([3, 3, 3], [1, 1, 1]), ([1, 0, 0], [1, 0, 0]), ([0, 1, 0], [0, 1, 0]), ([0, 0, 1], [0, 0, 1]), ([5, 0, 0], [1, 0, 0]), ([0, 5, 0], [0, 1, 0]), ([0, 0, 5], [0, 0, 1]), ([5, 4, 4], [1, 0, 0]), ([4, 5, 4], [0, 1, 0]), ([4, 4, 5], [0, 0, 1]), ([1, 1, 0], [0, 1, 0]), ([1, 2, 0], [0, 1, 0]), ([2, 1, 0], [0, 1, 0]), ([8, 8, 3], [0, 1, 0]), ([5, 8, 3], [0, 1, 0]), ([8, 5, 3], [0, 1, 0]), ([1, 0, 1], [1, 0, 0]), ([1, 0, 2], [1, 0, 0]), ([2, 0, 1], [1, 0, 0]), ([8, 3, 8], [1, 0, 0]), ([5, 3, 8], [1, 0, 0]), ([8, 3, 5], [1, 0, 0]), ([0, 1, 1], [0, 0, 1]), ([0, 1, 2], [0, 0, 1]), ([0, 2, 1], [0, 0, 1]), ([3, 8, 8], [0, 0, 1]), ([3, 5, 8], [0, 0, 1]), ([3, 8, 5], [0, 0, 1]) ) <NEW_LINE> @unpack <NEW_LINE> def testRule(self, mcs, result): <NEW_LINE> <INDENT> self.assertEqual(result, evaluate(mcs))
A list of move counts evaluates to a deterministic result ([ROCKS, PAPERS, SCISSORS], [ROCK_WIN, PAPER_WIN, SCISSORS_WIN]),
6259903323e79379d538d5fe
class Path(): <NEW_LINE> <INDENT> def __init__(self, path=[]): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> <DEDENT> def append(self, coordinate): <NEW_LINE> <INDENT> self.path.append(coordinate) <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> return Path(list(self.path)) <NEW_LINE> <DEDENT> def last(self): <NEW_LINE> <INDENT> return self.path[-1] <NEW_LINE> <DEDENT> def length(self): <NEW_LINE> <INDENT> return len(self.path) <NEW_LINE> <DEDENT> def contains(self, coordinate): <NEW_LINE> <INDENT> for path_coordinate in self.path: <NEW_LINE> <INDENT> if path_coordinate == coordinate: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def count(self, coordinate): <NEW_LINE> <INDENT> count = 0 <NEW_LINE> for path_coordinate in self.path: <NEW_LINE> <INDENT> if path_coordinate == coordinate: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> <DEDENT> <DEDENT> return count <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '[{0}]'.format(' '.join([str(coordinate) for coordinate in self.path]))
An object wrapper for an array of Coordinate objects.
625990335e10d32532ce417d
class LumiDataPoint(object): <NEW_LINE> <INDENT> def __init__(self, line, json_file_name=None): <NEW_LINE> <INDENT> line_split = line.split(",") <NEW_LINE> tmp = line_split[0].split(":") <NEW_LINE> self.run_number = int(tmp[0]) <NEW_LINE> self.fill_number = int(tmp[1]) <NEW_LINE> tmp = line_split[1].split(":") <NEW_LINE> self.ls = int(tmp[0]) <NEW_LINE> tmp = line_split[2] <NEW_LINE> self.timestamp = datetime.datetime.strptime(tmp, DATE_FMT_STR_LUMICALC) <NEW_LINE> scale_factor = 1.e6 <NEW_LINE> self.lum_del = scale_factor * float(line_split[5]) <NEW_LINE> self.lum_rec = scale_factor * float(line_split[6]) <NEW_LINE> if json_file_name: <NEW_LINE> <INDENT> addcertls = bool(checkCertification(self.run_number, self.ls)) <NEW_LINE> if addcertls: <NEW_LINE> <INDENT> self.lum_cert = scale_factor * float(line_split[6]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.lum_cert = 0. <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.lum_cert = 0.
Holds info from one line of lumiCalc lumibyls output.
625990338a43f66fc4bf327d
class DataSaveCommand(command.Command): <NEW_LINE> <INDENT> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(DataSaveCommand, self).get_parser(prog_name) <NEW_LINE> parser.add_argument("--file", metavar="<filename>", help="downloaded introspection data filename " "(default: stdout)") <NEW_LINE> parser.add_argument('node', help='baremetal node UUID or name') <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> client = self.app.client_manager.baremetal_introspection <NEW_LINE> data = client.get_data(parsed_args.node, raw=bool(parsed_args.file)) <NEW_LINE> if parsed_args.file: <NEW_LINE> <INDENT> with open(parsed_args.file, 'wb') as fp: <NEW_LINE> <INDENT> fp.write(data) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> json.dump(data, sys.stdout)
Save or display raw introspection data.
6259903350485f2cf55dc072
class Exercise1(object): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> if not os.path.exists(filename): <NEW_LINE> <INDENT> print >> sys.stderr, "%s doesn't exist" % filename <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.setup_display() <NEW_LINE> wait_period = int(1000.0/self.fps) <NEW_LINE> while True: <NEW_LINE> <INDENT> frame = cv.QueryFrame(self.cap) <NEW_LINE> if frame == None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> self.write_original(frame, self.pane1) <NEW_LINE> self.write_greyscale(frame, self.pane2) <NEW_LINE> self.write_canny(frame, self.pane3) <NEW_LINE> cv.ShowImage("Exercise1b", self.frame) <NEW_LINE> c = cv.WaitKey(wait_period) <NEW_LINE> if c == 27: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> del(self.cap) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> def setup_display(self): <NEW_LINE> <INDENT> self.cap = cv.CaptureFromFile(self.filename) <NEW_LINE> (self.width,self.height) = (int(cv.GetCaptureProperty(self.cap, cv.CV_CAP_PROP_FRAME_WIDTH)), int(cv.GetCaptureProperty(self.cap, cv.CV_CAP_PROP_FRAME_HEIGHT))) <NEW_LINE> self.fps = cv.GetCaptureProperty(self.cap, cv.CV_CAP_PROP_FPS) <NEW_LINE> cv.NamedWindow("Exercise1b", cv.CV_WINDOW_AUTOSIZE) <NEW_LINE> self.frame = cv.CreateImage((self.width*3,self.height), cv.IPL_DEPTH_8U, 3) <NEW_LINE> self.pane1 = cv.GetSubRect(self.frame, (0,0,self.width,self.height)) <NEW_LINE> self.pane2 = cv.GetSubRect(self.frame, (self.width,0,self.width,self.height)) <NEW_LINE> self.pane3 = cv.GetSubRect(self.frame, (self.width*2,0,self.width,self.height)) <NEW_LINE> <DEDENT> def write_original(self, frame, output): <NEW_LINE> <INDENT> cv.Copy(frame, output) <NEW_LINE> <DEDENT> def write_greyscale(self, orig, output): <NEW_LINE> <INDENT> temp1 = cv.CreateImage(cv.GetSize(orig), cv.IPL_DEPTH_8U, 1) <NEW_LINE> temp2 = cv.CreateImage(cv.GetSize(orig), cv.IPL_DEPTH_8U, 3) <NEW_LINE> cv.CvtColor(orig, temp1, cv2.COLOR_RGB2GRAY) <NEW_LINE> cv.CvtColor(temp1, temp2, cv2.COLOR_GRAY2RGB) <NEW_LINE> cv.Copy(temp2, output) <NEW_LINE> <DEDENT> def write_canny(self, orig, output): <NEW_LINE> <INDENT> temp1 = cv.CreateImage(cv.GetSize(orig), cv.IPL_DEPTH_8U, 1) <NEW_LINE> temp2 = cv.CreateImage(cv.GetSize(orig), cv.IPL_DEPTH_8U, 3) <NEW_LINE> canny = cv.CreateImage(cv.GetSize(orig), cv.IPL_DEPTH_8U, 1) <NEW_LINE> cv.CvtColor(orig, temp1, cv2.COLOR_RGB2GRAY) <NEW_LINE> cv.Canny(temp1, canny, 0.5, 1.0, 5) <NEW_LINE> cv.CvtColor(canny, temp2, cv2.COLOR_GRAY2RGB) <NEW_LINE> cv.Copy(temp2, output)
Read in a video, and perform some real-time video processing on it.
6259903321bff66bcd723d5b
class Interval(tuple): <NEW_LINE> <INDENT> security = ClassSecurityInfo() <NEW_LINE> def __new__(cls, start_date=None, end_date=None, all_day=False): <NEW_LINE> <INDENT> if start_date is not None and start_date > end_date: <NEW_LINE> <INDENT> raise ValueError('End time can not precede start time') <NEW_LINE> <DEDENT> if all_day: <NEW_LINE> <INDENT> start_date = datetime(start_date.year, start_date.month, start_date.day) <NEW_LINE> end_date = datetime(end_date.year, end_date.month, end_date.day) <NEW_LINE> <DEDENT> return tuple.__new__(cls, (start_date, end_date, all_day)) <NEW_LINE> <DEDENT> security.declarePublic('start_date') <NEW_LINE> @property <NEW_LINE> def start_date(self): <NEW_LINE> <INDENT> return self[0] <NEW_LINE> <DEDENT> security.declarePublic('end_date') <NEW_LINE> @property <NEW_LINE> def end_date(self): <NEW_LINE> <INDENT> return self[1] <NEW_LINE> <DEDENT> security.declarePublic('all_day') <NEW_LINE> @property <NEW_LINE> def all_day(self): <NEW_LINE> <INDENT> return self[2] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self == Interval(): <NEW_LINE> <INDENT> return "empty Interval()" <NEW_LINE> <DEDENT> data = { 'start_date': self.start_date.strftime("%d/%m/%Y"), 'end_date': self.end_date.strftime("%d/%m/%Y"), 'start_time': self.start_date.strftime("%H:%M"), 'end_time': self.end_date.strftime("%H:%M"), 'all_day': repr(self.all_day), } <NEW_LINE> return ("Interval:[%(start_date)s, %(start_time)s - " "%(end_date)s, %(end_time)s; All day: %(all_day)s]" % data)
Immutable type representing two datetime intervals and all_day option
62599033287bf620b6272cde
class ManholeConnectionThread(_ORIGINAL_THREAD): <NEW_LINE> <INDENT> def __init__(self, client, locals, daemon=False): <NEW_LINE> <INDENT> super(ManholeConnectionThread, self).__init__() <NEW_LINE> self.daemon = daemon <NEW_LINE> self.client = client <NEW_LINE> self.name = "ManholeConnectionThread" <NEW_LINE> self.locals = locals <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> _LOG('Started ManholeConnectionThread thread. Checking credentials ...') <NEW_LINE> pthread_setname_np(self.ident, "Manhole ----") <NEW_LINE> pid, _, _ = self.check_credentials(self.client) <NEW_LINE> pthread_setname_np(self.ident, "Manhole %s" % pid) <NEW_LINE> self.handle(self.client, self.locals) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def check_credentials(client): <NEW_LINE> <INDENT> pid, uid, gid = get_peercred(client) <NEW_LINE> euid = os.geteuid() <NEW_LINE> client_name = "PID:%s UID:%s GID:%s" % (pid, uid, gid) <NEW_LINE> if uid not in (0, euid): <NEW_LINE> <INDENT> raise SuspiciousClient("Can't accept client with %s. It doesn't match the current EUID:%s or ROOT." % ( client_name, euid )) <NEW_LINE> <DEDENT> _LOG("Accepted connection %s from %s" % (client, client_name)) <NEW_LINE> return pid, uid, gid <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def handle(client, locals): <NEW_LINE> <INDENT> client.settimeout(None) <NEW_LINE> backup = [] <NEW_LINE> old_interval = getinterval() <NEW_LINE> patches = [('r', ('stdin', '__stdin__')), ('w', ('stdout', '__stdout__'))] <NEW_LINE> if _MANHOLE.redirect_stderr: <NEW_LINE> <INDENT> patches.append(('w', ('stderr', '__stderr__'))) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> client_fd = client.fileno() <NEW_LINE> for mode, names in patches: <NEW_LINE> <INDENT> for name in names: <NEW_LINE> <INDENT> backup.append((name, getattr(sys, name))) <NEW_LINE> setattr(sys, name, _ORIGINAL_FDOPEN(client_fd, mode, 1 if PY3 else 0)) <NEW_LINE> <DEDENT> <DEDENT> run_repl(locals) <NEW_LINE> _LOG("DONE.") <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> setinterval(2147483647) <NEW_LINE> client.close() <NEW_LINE> junk = [] <NEW_LINE> for name, fh in backup: <NEW_LINE> <INDENT> junk.append(getattr(sys, name)) <NEW_LINE> setattr(sys, name, fh) <NEW_LINE> <DEDENT> del backup <NEW_LINE> for fh in junk: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> fh.close() <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> del fh <NEW_LINE> <DEDENT> del junk <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> setinterval(old_interval) <NEW_LINE> _LOG("Cleaned up.") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> _LOG("ManholeConnectionThread thread failed:") <NEW_LINE> _LOG(traceback.format_exc())
Manhole thread that handles the connection. This thread is a normal thread (non-daemon) - it won't exit if the main thread exits.
625990333eb6a72ae038b75d
class ServicerContext(six.with_metaclass(abc.ABCMeta, RpcContext)): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def invocation_metadata(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def peer(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def peer_identities(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def peer_identity_key(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def auth_context(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def send_initial_metadata(self, initial_metadata): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def set_trailing_metadata(self, trailing_metadata): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def set_code(self, code): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def set_details(self, details): <NEW_LINE> <INDENT> raise NotImplementedError()
A context object passed to method implementations.
62599033d4950a0f3b1116b9
class _ElementNode(_Node): <NEW_LINE> <INDENT> def __init__(self, parent, tag, attributes): <NEW_LINE> <INDENT> _Node.__init__(self, attributes) <NEW_LINE> self.parent = parent <NEW_LINE> self.tag = tag <NEW_LINE> <DEDENT> def clone(self, parent=None): <NEW_LINE> <INDENT> newNode = _ElementNode(parent, self.tag, self.attributes.copy()) <NEW_LINE> self._cloneChildren(newNode) <NEW_LINE> return newNode <NEW_LINE> <DEDENT> def toText(self): <NEW_LINE> <INDENT> text = ['<%s' % self.tag] <NEW_LINE> attributes = self.attributes.items() <NEW_LINE> attributes.sort() <NEW_LINE> for attribute, value in attributes: <NEW_LINE> <INDENT> text.append(' %s="%s"' % (attribute, value)) <NEW_LINE> <DEDENT> childText = self.childrenToText() <NEW_LINE> if childText or self.tag in nonSelfClose: <NEW_LINE> <INDENT> text.append('>') <NEW_LINE> text.append(childText) <NEW_LINE> text.append('</%s>' % self.tag) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> text.append('/>') <NEW_LINE> <DEDENT> return ''.join(text)
A node representing an element in a document, with a `parent`, a `tag`, a dictionary of `attributes` and a list of `children`.
62599033be383301e025490a
@override_settings(SITEURL='http://localhost:8001/') <NEW_LINE> class GeoNodeGeoServerSync(GeoNodeLiveTestSupport): <NEW_LINE> <INDENT> port = 8001 <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(GeoNodeLiveTestSupport, self).setUp() <NEW_LINE> settings.OGC_SERVER['default']['GEOFENCE_SECURITY_ENABLED'] = True <NEW_LINE> <DEDENT> @on_ogc_backend(geoserver.BACKEND_PACKAGE) <NEW_LINE> @timeout_decorator.timeout(LOCAL_TIMEOUT) <NEW_LINE> def test_set_attributes_from_geoserver(self): <NEW_LINE> <INDENT> shp_file = os.path.join( gisdata.VECTOR_DATA, 'san_andres_y_providencia_poi.shp') <NEW_LINE> layer = file_upload(shp_file) <NEW_LINE> try: <NEW_LINE> <INDENT> for attribute in layer.attribute_set.all(): <NEW_LINE> <INDENT> attribute.attribute_label = f'{attribute.attribute}_label' <NEW_LINE> attribute.description = f'{attribute.attribute}_description' <NEW_LINE> attribute.save() <NEW_LINE> <DEDENT> set_attributes_from_geoserver(layer) <NEW_LINE> for attribute in layer.attribute_set.all(): <NEW_LINE> <INDENT> self.assertEqual( attribute.attribute_label, f'{attribute.attribute}_label' ) <NEW_LINE> self.assertEqual( attribute.description, f'{attribute.attribute}_description' ) <NEW_LINE> <DEDENT> links = Link.objects.filter(resource=layer.resourcebase_ptr) <NEW_LINE> self.assertIsNotNone(links) <NEW_LINE> self.assertTrue(len(links) > 7) <NEW_LINE> original_data_links = [ll for ll in links if 'original' == ll.link_type] <NEW_LINE> self.assertEqual(len(original_data_links), 1) <NEW_LINE> resp = self.client.get(original_data_links[0].url) <NEW_LINE> self.assertEqual(resp.status_code, 200) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> layer.delete()
Tests GeoNode/GeoServer syncronization
625990339b70327d1c57fe7b
class CCPasswordField(CCField): <NEW_LINE> <INDENT> TYPE = 8 <NEW_LINE> password = "" <NEW_LINE> def __init__(self, password): <NEW_LINE> <INDENT> if __debug__: <NEW_LINE> <INDENT> if len(password) > 9 or len(password) < 4: <NEW_LINE> <INDENT> raise AssertionError("Password must be from 4 to 9 characters in length. Password passed is '"+password+"'") <NEW_LINE> <DEDENT> <DEDENT> self.type_val = CCPasswordField.TYPE <NEW_LINE> self.password = password <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return_str = " Password Field (type=8)\n" <NEW_LINE> return_str += " password = '"+str(self.password)+"'" <NEW_LINE> return return_str <NEW_LINE> <DEDENT> @property <NEW_LINE> def json_data(self): <NEW_LINE> <INDENT> json_field = {} <NEW_LINE> json_field["type"] = self.type_val <NEW_LINE> json_field["password"] = self.password <NEW_LINE> return json_field <NEW_LINE> <DEDENT> @property <NEW_LINE> def byte_data(self): <NEW_LINE> <INDENT> password_bytes = b"" <NEW_LINE> password_bytes += self.password.encode("ascii") <NEW_LINE> password_bytes + b'\x00' <NEW_LINE> return password_bytes
A class defining an unencoded password Member vars: type_val (int): the type identifier of this class (set to 8) password (string): the password string, length from 4 to 9 characters
625990336e29344779b01747
class CP2K_Result(Result): <NEW_LINE> <INDENT> def __init__(self, settings, molecule, job_name, plams_dir, work_dir=None, properties=package_properties['cp2k'], status='successful', warnings=None): <NEW_LINE> <INDENT> super().__init__(settings, molecule, job_name, plams_dir, work_dir=work_dir, properties=properties, status=status, warnings=warnings) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, settings, molecule, job_name, archive, status, warnings): <NEW_LINE> <INDENT> plams_dir, work_dir = list(map(archive.get, ["plams_dir", "work_dir"])) <NEW_LINE> return CP2K_Result(settings, molecule, job_name, plams_dir.path, work_dir=work_dir, properties=package_properties['cp2k'], status=status, warnings=warnings)
Class providing access to CP2K result.
62599033ec188e330fdf998b
class HuaweiHVSFCDriver(driver.FibreChannelDriver): <NEW_LINE> <INDENT> VERSION = '1.0.0' <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(HuaweiHVSFCDriver, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def do_setup(self, context): <NEW_LINE> <INDENT> self.common = HVSCommon(configuration=self.configuration) <NEW_LINE> self.common.login() <NEW_LINE> <DEDENT> def check_for_setup_error(self): <NEW_LINE> <INDENT> self.common._check_conf_file() <NEW_LINE> <DEDENT> def create_volume(self, volume): <NEW_LINE> <INDENT> self.common.create_volume(volume) <NEW_LINE> <DEDENT> def create_volume_from_snapshot(self, volume, snapshot): <NEW_LINE> <INDENT> self.common.create_volume_from_snapshot(volume, snapshot) <NEW_LINE> <DEDENT> def create_cloned_volume(self, volume, src_vref): <NEW_LINE> <INDENT> self.common.create_cloned_volume(volume, src_vref) <NEW_LINE> <DEDENT> def extend_volume(self, volume, new_size): <NEW_LINE> <INDENT> self.common.extend_volume(volume, new_size) <NEW_LINE> <DEDENT> def delete_volume(self, volume): <NEW_LINE> <INDENT> self.common.delete_volume(volume) <NEW_LINE> <DEDENT> def create_snapshot(self, snapshot): <NEW_LINE> <INDENT> self.common.create_snapshot(snapshot) <NEW_LINE> <DEDENT> def delete_snapshot(self, snapshot): <NEW_LINE> <INDENT> self.common.delete_snapshot(snapshot) <NEW_LINE> <DEDENT> def get_volume_stats(self, refresh=False): <NEW_LINE> <INDENT> data = self.common.update_volume_stats(refresh) <NEW_LINE> backend_name = self.configuration.safe_get('volume_backend_name') <NEW_LINE> data['volume_backend_name'] = backend_name or self.__class__.__name__ <NEW_LINE> data['storage_protocol'] = 'FC' <NEW_LINE> data['driver_version'] = self.VERSION <NEW_LINE> return data <NEW_LINE> <DEDENT> @fczm_utils.AddFCZone <NEW_LINE> def initialize_connection(self, volume, connector): <NEW_LINE> <INDENT> return self.common.initialize_connection_fc(volume, connector) <NEW_LINE> <DEDENT> @fczm_utils.RemoveFCZone <NEW_LINE> def terminate_connection(self, volume, connector, **kwargs): <NEW_LINE> <INDENT> self.common.terminate_connection(volume, connector, **kwargs) <NEW_LINE> <DEDENT> def create_export(self, context, volume): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ensure_export(self, context, volume): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def remove_export(self, context, volume): <NEW_LINE> <INDENT> pass
FC driver for Huawei OceanStor HVS storage arrays.
625990338c3a8732951f764f
class Affiliations(models.Model): <NEW_LINE> <INDENT> contact = models.ForeignKey(Contact, on_delete=models.CASCADE) <NEW_LINE> organization = models.CharField(max_length=50) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.organization
this model defines the affiliations to a contact that a user owns
625990336fece00bbacccaa5
class Settings(object): <NEW_LINE> <INDENT> def __init__(self, screen_width=1200, screen_height=800, bg_color=(230, 230, 230), ship_speed_factor=2.5, bullet_width=3, bullet_height=15, bullet_color=(60, 60, 60), bullet_speed_factor=3, bullet_num_limit=5): <NEW_LINE> <INDENT> self.screen_width = screen_width <NEW_LINE> self.screen_height = screen_height <NEW_LINE> self.bg_color = bg_color <NEW_LINE> self.ship_speed_factor = ship_speed_factor <NEW_LINE> self.bullet_width = bullet_width <NEW_LINE> self.bullet_height = bullet_height <NEW_LINE> self.bullet_color = bullet_color <NEW_LINE> self.bullet_speed_factor = bullet_speed_factor <NEW_LINE> self.bullet_num_limit = bullet_num_limit
Class to store all settings for the game
62599033be8e80087fbc0174
class User(Base): <NEW_LINE> <INDENT> __tablename__ = 'users' <NEW_LINE> username = db.Column(db.String(64), index=True) <NEW_LINE> password_hash = db.Column(db.String(128)) <NEW_LINE> LoggedIn = db.Column(db.Boolean, default=True) <NEW_LINE> bucket = db.relationship('BucketList', backref='owner', lazy='dynamic') <NEW_LINE> def to_json(self): <NEW_LINE> <INDENT> return { "id": self.id, "username": self.username, "date_created": self.date_created.strftime("%Y-%m-%d %H:%M:%S"), "LoggedIn": self.LoggedIn } <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<User(username='%s', email='%s')>" % (self.username, self.email) <NEW_LINE> <DEDENT> def hash_password(self, password): <NEW_LINE> <INDENT> self.password_hash = pwd_context.encrypt(password) <NEW_LINE> <DEDENT> def verify_password(self, password): <NEW_LINE> <INDENT> return pwd_context.verify(password, self.password_hash) <NEW_LINE> <DEDENT> def generate_auth_token(self, expiration=86400): <NEW_LINE> <INDENT> s = Serializer(app.config['SECRET_KEY'], expires_in=expiration) <NEW_LINE> return s.dumps({'id': self.id}) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def verify_auth_token(token): <NEW_LINE> <INDENT> s = Serializer(app.config['SECRET_KEY']) <NEW_LINE> try: <NEW_LINE> <INDENT> data = s.loads(token) <NEW_LINE> <DEDENT> except SignatureExpired: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> except BadSignature: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> g.user = User.query.get(data['id']) <NEW_LINE> return g.user
User table defined for user authenticatin.
625990338a349b6b43687336
class Stage(models.Model): <NEW_LINE> <INDENT> name = models.CharField( 128, verbose_name=_('Stage Name'), null=False, translate=True ) <NEW_LINE> sequence = models.IntegerField( verbose_name=_('Sequence'), default=1, help_text="Used to order stages. Lower is better." ) <NEW_LINE> probability = models.FloatField( verbose_name=_('Probability (%)'), null=False, default=10.0, help_text="This percentage depicts the default/average probability of the Case for this stage to be a success") <NEW_LINE> on_change = models.BooleanField( verbose_name=_('Change Probability Automatically'), help_text="Setting this stage will change the probability automatically on the opportunity.") <NEW_LINE> requirements = models.TextField( verbose_name=_('Requirements'), help_text="Enter here the internal requirements for this stage (ex: Offer sent to customer). It will appear as a tooltip over the stage's name." ) <NEW_LINE> team_id = models.ForeignKey( 'crm.team', verbose_name='Sales Team', ondelete=models.CASCADE, help_text='Specific team that uses this stage. Other teams will not be able to see or use this stage.' ) <NEW_LINE> legend_priority = models.TextField( verbose_name=_('Priority Management Explanation'), translate=True, help_text='Explanation text to help users using the star and priority mechanism on stages or issues that are in this stage.' ) <NEW_LINE> fold = models.BooleanField( verbose_name=_('Folded in Pipeline'), help_text='This stage is folded in the kanban view when there are no records in that stage to display.' ) <NEW_LINE> team_count = models.IntegerField('team_count', compute='_compute_team_count') <NEW_LINE> @api.record <NEW_LINE> def default_get(self, fields): <NEW_LINE> <INDENT> ctx = dict(self.env.context) <NEW_LINE> if ctx.get('default_team_id') and not ctx.get('crm_team_mono'): <NEW_LINE> <INDENT> ctx.pop('default_team_id') <NEW_LINE> <DEDENT> return super(Stage, self.with_context(ctx)).default_get(fields) <NEW_LINE> <DEDENT> @api.records <NEW_LINE> def _compute_team_count(self): <NEW_LINE> <INDENT> for stage in self: <NEW_LINE> <INDENT> stage.team_count = self.env['crm.team'].search_count([]) <NEW_LINE> <DEDENT> <DEDENT> class Meta: <NEW_LINE> <INDENT> name = "crm.stage" <NEW_LINE> verbose_name = "CRM Stages" <NEW_LINE> ordering = ('sequence', 'name', 'id')
Model for case stages. This models the main stages of a document management flow. Main CRM objects (leads, opportunities, project issues, ...) will now use only stages, instead of state and stages. Stages are for example used to display the kanban view of records.
62599033c432627299fa40ee
class cached_property(lazy_property): <NEW_LINE> <INDENT> resettable = True
A computed property whose value is computed once and cached, but can be reset. This is a variant of :class:`lazy_property` that has the :attr:`~custom_property.resettable` option enabled by default.
62599033796e427e5384f874
class ModifyClusterAttributeResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ProjectId = None <NEW_LINE> self.ClusterName = None <NEW_LINE> self.ClusterDesc = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ProjectId = params.get("ProjectId") <NEW_LINE> self.ClusterName = params.get("ClusterName") <NEW_LINE> self.ClusterDesc = params.get("ClusterDesc") <NEW_LINE> self.RequestId = params.get("RequestId")
ModifyClusterAttribute返回参数结构体
62599033d6c5a102081e321d
class JoinedRoomsError(ErrorResponse): <NEW_LINE> <INDENT> pass
A response representing an unsuccessful joined rooms query.
6259903366673b3332c314ea
class FieldsOnCorrectTypeRule(ValidationRule): <NEW_LINE> <INDENT> def enter_field(self, node: FieldNode, *_args: Any) -> None: <NEW_LINE> <INDENT> type_ = self.context.get_parent_type() <NEW_LINE> if not type_: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> field_def = self.context.get_field_def() <NEW_LINE> if field_def: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> schema = self.context.schema <NEW_LINE> field_name = node.name.value <NEW_LINE> suggestion = did_you_mean( get_suggested_type_names(schema, type_, field_name), "to use an inline fragment on", ) <NEW_LINE> if not suggestion: <NEW_LINE> <INDENT> suggestion = did_you_mean(get_suggested_field_names(type_, field_name)) <NEW_LINE> <DEDENT> self.report_error( GraphQLError( f"Cannot query field '{field_name}' on type '{type_}'." + suggestion, node, ) )
Fields on correct type A GraphQL document is only valid if all fields selected are defined by the parent type, or are an allowed meta field such as ``__typename``. See https://spec.graphql.org/draft/#sec-Field-Selections
6259903376d4e153a661daed
class Addition(BinaryNode): <NEW_LINE> <INDENT> def __init__(self, child1, child2): <NEW_LINE> <INDENT> super(Addition, self).__init__(child1, child2) <NEW_LINE> self.add_child(child1) <NEW_LINE> self.add_child(child2) <NEW_LINE> self.in_vars = child1.in_vars + child2.in_vars <NEW_LINE> self.out_vars = child1.out_vars + child2.out_vars <NEW_LINE> self.name = '(' + child1.name + ')+(' + child2.name + ')'
A class for storing STL Conjunction nodes Inherits TemporalNode
625990336e29344779b01749
class Flower: <NEW_LINE> <INDENT> def __init__(self, name, petals, price): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._petals = petals <NEW_LINE> self._price = price <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def set_name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> def get_petals(self): <NEW_LINE> <INDENT> return self._petals <NEW_LINE> <DEDENT> def set_petals(self, petals): <NEW_LINE> <INDENT> self._petals = petals <NEW_LINE> <DEDENT> def get_price(self): <NEW_LINE> <INDENT> return self._price <NEW_LINE> <DEDENT> def set_price(self, price): <NEW_LINE> <INDENT> self._price = price
Represents a flower in a flower shop
6259903373bcbd0ca4bcb37f
@with_input_types(T) <NEW_LINE> @with_output_types(List[T]) <NEW_LINE> class SampleCombineFn(core.CombineFn): <NEW_LINE> <INDENT> def __init__(self, n): <NEW_LINE> <INDENT> super(SampleCombineFn, self).__init__() <NEW_LINE> self._top_combiner = TopCombineFn(n) <NEW_LINE> <DEDENT> def create_accumulator(self): <NEW_LINE> <INDENT> return self._top_combiner.create_accumulator() <NEW_LINE> <DEDENT> def add_input(self, heap, element): <NEW_LINE> <INDENT> return self._top_combiner.add_input(heap, (random.random(), element)) <NEW_LINE> <DEDENT> def merge_accumulators(self, heaps): <NEW_LINE> <INDENT> return self._top_combiner.merge_accumulators(heaps) <NEW_LINE> <DEDENT> def compact(self, heap): <NEW_LINE> <INDENT> return self._top_combiner.compact(heap) <NEW_LINE> <DEDENT> def extract_output(self, heap): <NEW_LINE> <INDENT> return [e for _, e in self._top_combiner.extract_output(heap)]
CombineFn for all Sample transforms.
625990335e10d32532ce417f
class BillDetailView(DetailView): <NEW_LINE> <INDENT> model = Bill <NEW_LINE> context_object_name = "bill" <NEW_LINE> template_name = 'legislative/bill_detail.html' <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> if self.kwargs['id']: <NEW_LINE> <INDENT> return Bill.objects.get(id=self.kwargs['id']) <NEW_LINE> <DEDENT> return super(BillDetailView, self).get_objects()
View showing detail information about an individual bill.
62599033ec188e330fdf998d
class SpaceTimePointwiseStateObservation(Misfit): <NEW_LINE> <INDENT> def __init__(self, Vh, observation_times, targets, data=None, noise_variance=None): <NEW_LINE> <INDENT> self.Vh = Vh <NEW_LINE> self.observation_times = observation_times <NEW_LINE> self.B = assemblePointwiseObservation(self.Vh, targets) <NEW_LINE> self.ntargets = targets <NEW_LINE> if data is None: <NEW_LINE> <INDENT> self.data = TimeDependentVector(observation_times) <NEW_LINE> self.data.initialize(self.B, 0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> self.noise_variance = noise_variance <NEW_LINE> self.u_snapshot = dl.Vector() <NEW_LINE> self.Bu_snapshot = dl.Vector() <NEW_LINE> self.data_snapshot = dl.Vector() <NEW_LINE> self.B.init_vector(self.u_snapshot, 1) <NEW_LINE> self.B.init_vector(self.Bu_snapshot, 0) <NEW_LINE> self.B.init_vector(self.data_snapshot, 0) <NEW_LINE> <DEDENT> def observe(self, x, obs): <NEW_LINE> <INDENT> obs.zero() <NEW_LINE> for t in self.observation_times: <NEW_LINE> <INDENT> x[STATE].retrieve(self.u_snapshot, t) <NEW_LINE> self.B.mult(self.u_snapshot, self.Bu_snapshot) <NEW_LINE> obs.store(self.Bu_snapshot, t) <NEW_LINE> <DEDENT> <DEDENT> def cost(self, x): <NEW_LINE> <INDENT> c = 0 <NEW_LINE> for t in self.observation_times: <NEW_LINE> <INDENT> x[STATE].retrieve(self.u_snapshot, t) <NEW_LINE> self.B.mult(self.u_snapshot, self.Bu_snapshot) <NEW_LINE> self.data.retrieve(self.data_snapshot, t) <NEW_LINE> self.Bu_snapshot.axpy(-1., self.data_snapshot) <NEW_LINE> c += self.Bu_snapshot.inner(self.Bu_snapshot) <NEW_LINE> <DEDENT> return c/(2.*self.noise_variance) <NEW_LINE> <DEDENT> def grad(self, i, x, out): <NEW_LINE> <INDENT> out.zero() <NEW_LINE> if i == STATE: <NEW_LINE> <INDENT> for t in self.observation_times: <NEW_LINE> <INDENT> x[STATE].retrieve(self.u_snapshot, t) <NEW_LINE> self.B.mult(self.u_snapshot, self.Bu_snapshot) <NEW_LINE> self.data.retrieve(self.data_snapshot, t) <NEW_LINE> self.Bu_snapshot.axpy(-1., self.data_snapshot) <NEW_LINE> self.Bu_snapshot *= 1./self.noise_variance <NEW_LINE> self.B.transpmult(self.Bu_snapshot, self.u_snapshot) <NEW_LINE> out.store(self.u_snapshot, t) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def setLinearizationPoint(self, x, gauss_newton_approx=False): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def apply_ij(self, i, j, direction, out): <NEW_LINE> <INDENT> out.zero() <NEW_LINE> if i == STATE and j == STATE: <NEW_LINE> <INDENT> for t in self.observation_times: <NEW_LINE> <INDENT> direction.retrieve(self.u_snapshot, t) <NEW_LINE> self.B.mult(self.u_snapshot, self.Bu_snapshot) <NEW_LINE> self.Bu_snapshot *= 1./self.noise_variance <NEW_LINE> self.B.transpmult(self.Bu_snapshot, self.u_snapshot) <NEW_LINE> out.store(self.u_snapshot, t) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pass
Creates a class that represents observations in time and space. More information regarding the base class: https://hippylib.readthedocs.io/en/2.0.0/_modules/hippylib/modeling/misfit.html Inputs: observation_times - Array of times to make observations at targets - Array of spatial coordinates representing observation points data - Input time dependent vector representing data noise_variance - Measurement noise
62599033d99f1b3c44d0679c
class p4_field_list_calculation (p4_object): <NEW_LINE> <INDENT> required_attributes = ["name", "input", "algorithm", "output_width"] <NEW_LINE> allowed_attributes = required_attributes + ["doc"] <NEW_LINE> def __init__ (self, hlir, name, **kwargs): <NEW_LINE> <INDENT> p4_object.__init__(self, hlir, name, **kwargs) <NEW_LINE> if not self.valid_obj: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.algorithm not in { "xor16", "csum16", "crc16", "crc32", "programmable_crc", "user_defined" }: <NEW_LINE> <INDENT> raise p4_compiler_msg ( "Reference to unsupported algorithm '"+self.algorithm+"'", self.filename, self.lineno ) <NEW_LINE> <DEDENT> hlir.p4_field_list_calculations[self.name] = self <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_from_hlir(hlir, name): <NEW_LINE> <INDENT> return hlir.p4_field_list_calculations[name] <NEW_LINE> <DEDENT> def build (self, hlir): <NEW_LINE> <INDENT> for idx, field_list_name in enumerate(self.input): <NEW_LINE> <INDENT> self.input[idx] = hlir.p4_field_lists[field_list_name]
TODO
6259903391af0d3eaad3af28
class Driver(object, metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> def __init__(self, conf, topics, transport): <NEW_LINE> <INDENT> self.conf = conf <NEW_LINE> self.topics = topics <NEW_LINE> self.transport = transport <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def notify(self, ctxt, msg, priority, retry): <NEW_LINE> <INDENT> pass
Base driver for Notifications
62599033796e427e5384f876
class BinaryAttributeTestCase(TestCase): <NEW_LINE> <INDENT> def test_binary_attribute(self): <NEW_LINE> <INDENT> attr = BinaryAttribute() <NEW_LINE> self.assertIsNotNone(attr) <NEW_LINE> self.assertEqual(attr.attr_type, BINARY) <NEW_LINE> attr = BinaryAttribute(default=b'foo') <NEW_LINE> self.assertEqual(attr.default, b'foo') <NEW_LINE> <DEDENT> def test_binary_round_trip(self): <NEW_LINE> <INDENT> attr = BinaryAttribute() <NEW_LINE> value = b'foo' <NEW_LINE> serial = attr.serialize(value) <NEW_LINE> self.assertEqual(attr.deserialize(serial), value) <NEW_LINE> <DEDENT> def test_binary_serialize(self): <NEW_LINE> <INDENT> attr = BinaryAttribute() <NEW_LINE> serial = b64encode(b'foo').decode(DEFAULT_ENCODING) <NEW_LINE> self.assertEqual(attr.serialize(b'foo'), serial) <NEW_LINE> <DEDENT> def test_binary_deserialize(self): <NEW_LINE> <INDENT> attr = BinaryAttribute() <NEW_LINE> serial = b64encode(b'foo').decode(DEFAULT_ENCODING) <NEW_LINE> self.assertEqual(attr.deserialize(serial), b'foo') <NEW_LINE> <DEDENT> def test_binary_set_serialize(self): <NEW_LINE> <INDENT> attr = BinarySetAttribute() <NEW_LINE> self.assertEqual(attr.attr_type, BINARY_SET) <NEW_LINE> self.assertEqual( attr.serialize({b'foo', b'bar'}), [b64encode(val).decode(DEFAULT_ENCODING) for val in sorted({b'foo', b'bar'})]) <NEW_LINE> self.assertEqual(attr.serialize(None), None) <NEW_LINE> <DEDENT> def test_binary_set_round_trip(self): <NEW_LINE> <INDENT> attr = BinarySetAttribute() <NEW_LINE> value = {b'foo', b'bar'} <NEW_LINE> serial = attr.serialize(value) <NEW_LINE> self.assertEqual(attr.deserialize(serial), value) <NEW_LINE> <DEDENT> def test_binary_set_deserialize(self): <NEW_LINE> <INDENT> attr = BinarySetAttribute() <NEW_LINE> value = {b'foo', b'bar'} <NEW_LINE> self.assertEqual( attr.deserialize([b64encode(val).decode(DEFAULT_ENCODING) for val in sorted(value)]), value ) <NEW_LINE> <DEDENT> def test_binary_set_attribute(self): <NEW_LINE> <INDENT> attr = BinarySetAttribute() <NEW_LINE> self.assertIsNotNone(attr) <NEW_LINE> attr = BinarySetAttribute(default={b'foo', b'bar'}) <NEW_LINE> self.assertEqual(attr.default, {b'foo', b'bar'})
Tests binary attributes
62599033d4950a0f3b1116bb
class NORDIC_BLE(Packet): <NEW_LINE> <INDENT> name = "BTLE Nordic info header" <NEW_LINE> fields_desc = [ ByteField("board", 0), LEShortField("payload_len", None), ByteField("protocol", 0), LEShortField("packet_counter", 0), ByteField("packet_id", 0), ByteField("packet_len", 10), ByteField("flags", 0), ByteField("channel", 0), ByteField("rssi", 0), LEShortField("event_counter", 0), LEIntField("delta_time", 0), ] <NEW_LINE> def post_build(self, p, pay): <NEW_LINE> <INDENT> if self.payload_len is None: <NEW_LINE> <INDENT> p = p[:1] + chb(len(pay) + 10) + p[2:] <NEW_LINE> <DEDENT> return p + pay
Cooked Nordic BTLE link-layer pseudoheader.
6259903315baa72349463094
class CoviseMsgLoopAction: <NEW_LINE> <INDENT> runType_=None <NEW_LINE> def __init__(self, name, type, desc=''): <NEW_LINE> <INDENT> self.__desc = desc <NEW_LINE> if not isinstance(type,tuple): <NEW_LINE> <INDENT> self.__type = (type,) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__type = type <NEW_LINE> <DEDENT> self.__name = name <NEW_LINE> <DEDENT> def run(self, param=None): <NEW_LINE> <INDENT> print("CoviseMsgLoopAction.run(): redefine for specific functionality") <NEW_LINE> <DEDENT> def type(self): return self.__type <NEW_LINE> def name(self): return self.__name <NEW_LINE> def desc(self): return self.__desc <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> ret='CoviseMsgLoopAction: type: ' <NEW_LINE> for t in self.__type: <NEW_LINE> <INDENT> ret += str(t)+' ' <NEW_LINE> <DEDENT> ret += (' name: %s, description: %s.'% (self.__name, self.__desc)) <NEW_LINE> return ret
Base of actions triggered after receiving a covise-msg. You can register a CoviseMsgLoopAction in the CoviseMsgLoop(). The run-method of the action is then called whenever a covise-message of type type occurs. Central is the type of the covise-message. Find type-numbers for covise-messages in file $COVISEDIR/src/kernel/covise/covise_msg.h in the definition of enum covise_msg_type.
6259903376d4e153a661daee
class UserInfoView(LoginRequiredMixin, View): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> user = request.user <NEW_LINE> address = Address.objects.get_default_address(user) <NEW_LINE> from django_redis import get_redis_connection <NEW_LINE> conn = get_redis_connection('default') <NEW_LINE> history_key = 'history_%d' % user.id <NEW_LINE> sku_ids = conn.lrange(history_key, 0, 4) <NEW_LINE> goods_list = [] <NEW_LINE> for id in sku_ids: <NEW_LINE> <INDENT> goods = GoodsSKU.objects.get(id=id) <NEW_LINE> goods_list.append(goods) <NEW_LINE> <DEDENT> context = {'page': 'user', 'address': address, 'goods_list': goods_list} <NEW_LINE> return render(request, 'user_center_info.html', context)
用户中心-信息页
625990336e29344779b0174b
class YamlLoader(Loader): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(YamlLoader, self).__init__() <NEW_LINE> <DEDENT> def parse_file(self, file_content): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return yaml.load(file_content) <NEW_LINE> <DEDENT> except yaml.scanner.ScannerError as e: <NEW_LINE> <INDENT> raise I18nFileLoadError("invalid YAML: {0}".format(e.strerror))
class to load yaml files
625990335e10d32532ce4180
class AbstractDrone(MSONable, metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def assimilate(self, path): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_valid_paths(self, path): <NEW_LINE> <INDENT> return
Abstract drone class that defines the various methods that must be implemented by drones. Because of the quirky nature of Python"s multiprocessing, the intermediate data representations has to be in the form of python primitives. So all objects that drones work with must be MSONable. All drones must also implement the standard MSONable as_dict() and from_dict API.
6259903330c21e258be99908
class Solution: <NEW_LINE> <INDENT> def isValidBST(self, root): <NEW_LINE> <INDENT> minNode, maxNode, isValid = self.divideConquer(root) <NEW_LINE> return isValid <NEW_LINE> <DEDENT> def divideConquer(self, root): <NEW_LINE> <INDENT> if root == None: <NEW_LINE> <INDENT> return None, None, True <NEW_LINE> <DEDENT> leftMinNode, leftMaxNode, leftValid = self.divideConquer(root.left) <NEW_LINE> rightMinNode, rightMaxNode, rightValid = self.divideConquer(root.right) <NEW_LINE> if not leftValid or not rightValid: <NEW_LINE> <INDENT> return None, None, False <NEW_LINE> <DEDENT> if leftMaxNode != None and leftMaxNode.val >= root.val: <NEW_LINE> <INDENT> return None, None, False <NEW_LINE> <DEDENT> if rightMinNode != None and rightMinNode.val <= root.val: <NEW_LINE> <INDENT> return None, None, False <NEW_LINE> <DEDENT> minNode = leftMinNode if leftMinNode != None else root <NEW_LINE> maxNode = rightMaxNode if rightMaxNode != None else root <NEW_LINE> return minNode, maxNode, True
@param root: The root of binary tree. @return: True if the binary tree is BST, or false
62599033c432627299fa40f2
class NaturalOrderGroup(click.Group): <NEW_LINE> <INDENT> def list_commands(self, ctx): <NEW_LINE> <INDENT> return self.commands.keys()
Command group trying to list subcommands in the order they were added. Make sure you initialize the `self.commands` with OrderedDict instance. With decorator, use:: @click.group(cls=NaturalOrderGroup, commands=OrderedDict())
6259903391af0d3eaad3af2a
class MainViewHandler(webapp2.RequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> test_run = self.request.get('test_run') <NEW_LINE> bucket = gs_bucket.GoogleCloudStorageBucket(constants.BUCKET) <NEW_LINE> ispy = ispy_utils.ISpyUtils(bucket) <NEW_LINE> if test_run: <NEW_LINE> <INDENT> self._GetForTestRun(test_run, ispy) <NEW_LINE> return <NEW_LINE> <DEDENT> self._GetAllTestRuns(ispy) <NEW_LINE> <DEDENT> def _GetAllTestRuns(self, ispy): <NEW_LINE> <INDENT> template = JINJA.get_template('list_view.html') <NEW_LINE> data = {} <NEW_LINE> test_runs = set([path.lstrip('/').split('/')[1] for path in ispy.GetAllPaths('failures/')]) <NEW_LINE> base_url = '/?test_run=%s' <NEW_LINE> data['links'] = [(test_run, base_url % test_run) for test_run in test_runs] <NEW_LINE> self.response.write(template.render(data)) <NEW_LINE> <DEDENT> def _GetForTestRun(self, test_run, ispy): <NEW_LINE> <INDENT> paths = set([path for path in ispy.GetAllPaths('failures/' + test_run) if path.endswith('actual.png')]) <NEW_LINE> rows = [self._CreateRow(test_run, path, ispy) for path in paths] <NEW_LINE> if rows: <NEW_LINE> <INDENT> def _Sorter(a, b): <NEW_LINE> <INDENT> return cmp(b['percent_different'], a['percent_different']) <NEW_LINE> <DEDENT> template = JINJA.get_template('main_view.html') <NEW_LINE> self.response.write( template.render({'comparisons': sorted(rows, _Sorter), 'test_run': test_run})) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> template = JINJA.get_template('empty_view.html') <NEW_LINE> self.response.write(template.render()) <NEW_LINE> <DEDENT> <DEDENT> def _CreateRow(self, test_run, path, ispy): <NEW_LINE> <INDENT> res = {} <NEW_LINE> res['expectation'] = path.lstrip('/').split('/')[2] <NEW_LINE> res['test_run'] = test_run <NEW_LINE> res['info'] = json.loads(ispy.cloud_bucket.DownloadFile( ispy_utils.GetFailurePath(res['test_run'], res['expectation'], 'info.txt'))) <NEW_LINE> expected = ispy_utils.GetExpectationPath( res['expectation'], 'expected.png') <NEW_LINE> diff = ispy_utils.GetFailurePath(test_run, res['expectation'], 'diff.png') <NEW_LINE> res['percent_different'] = res['info']['fraction_different'] * 100 <NEW_LINE> res['expected_path'] = expected <NEW_LINE> res['diff_path'] = diff <NEW_LINE> res['actual_path'] = path <NEW_LINE> res['expected'] = ispy.cloud_bucket.GetImageURL(expected) <NEW_LINE> res['diff'] = ispy.cloud_bucket.GetImageURL(diff) <NEW_LINE> res['actual'] = ispy.cloud_bucket.GetImageURL(path) <NEW_LINE> return res
Request handler to serve the main_view page.
62599033d18da76e235b79cc
class PortCheck(Check): <NEW_LINE> <INDENT> def __init__(self, host, port, timeout=7, **kwargs): <NEW_LINE> <INDENT> logger.debug('Initialising with %s:%d, timeout %ds.', host, port, timeout) <NEW_LINE> self.host = host <NEW_LINE> self.port = port <NEW_LINE> self.timeout = timeout <NEW_LINE> super().__init__(**kwargs) <NEW_LINE> <DEDENT> def check(self): <NEW_LINE> <INDENT> sock = socket.socket(socket.AF_INET, type=socket.SOCK_STREAM) <NEW_LINE> sock.settimeout(self.timeout) <NEW_LINE> try: <NEW_LINE> <INDENT> start_time = time.perf_counter() <NEW_LINE> sock.connect((self.host, self.port)) <NEW_LINE> end_time = time.perf_counter() <NEW_LINE> sock.close() <NEW_LINE> error = None <NEW_LINE> <DEDENT> except (socket.error, socket.gaierror, OverflowError) as err: <NEW_LINE> <INDENT> end_time = time.perf_counter() <NEW_LINE> sock.close() <NEW_LINE> logger.warning('Caught exception: %s.', repr(err)) <NEW_LINE> error = err <NEW_LINE> <DEDENT> availability = False if error else True <NEW_LINE> runtime = end_time - start_time <NEW_LINE> message = str(error) if error else '' <NEW_LINE> logger.info('Tested %s in %fs w/ message "%s".', availability, runtime, message) <NEW_LINE> return Result(availability, runtime, message)
Checks if TCP ports are open.
6259903323e79379d538d606
class Sphere3: <NEW_LINE> <INDENT> def __init__(self, p = None, r = 1.): <NEW_LINE> <INDENT> if (not p is None): <NEW_LINE> <INDENT> self.p = p <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.p = [0., 0., 0.] <NEW_LINE> <DEDENT> self.r = r <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ("Sphere3[" + str(self.p) + ", " + str(self.r) + "]") <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ("Sphere3(" + repr(self.p) + ", " + repr(self.r) + ")")
Sphere (3D). A Sphere in 3D space. p -- Position vector. r -- Radius.
625990336e29344779b0174d
class Command(BaseCommand): <NEW_LINE> <INDENT> option_list = BaseCommand.option_list + ( make_option('-s', '--set', action='store', nargs=2, dest='set', help='set value for key'), ) <NEW_LINE> usage = "Usage: manage <torrent-id> [<key1> [<key2> ...]]\n" " manage <torrent-id> --set <key> <value>" <NEW_LINE> def handle(self, *args, **options): <NEW_LINE> <INDENT> self.console = component.get("ConsoleUI") <NEW_LINE> if options['set']: <NEW_LINE> <INDENT> return self._set_option(*args, **options) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._get_option(*args, **options) <NEW_LINE> <DEDENT> <DEDENT> def _get_option(self, *args, **options): <NEW_LINE> <INDENT> def on_torrents_status(status): <NEW_LINE> <INDENT> for torrentid, data in status.items(): <NEW_LINE> <INDENT> self.console.write('') <NEW_LINE> if 'name' in data: <NEW_LINE> <INDENT> self.console.write('{!info!}Name: {!input!}%s' % data.get('name')) <NEW_LINE> <DEDENT> self.console.write('{!info!}ID: {!input!}%s' % torrentid) <NEW_LINE> for k, v in data.items(): <NEW_LINE> <INDENT> if k != 'name': <NEW_LINE> <INDENT> self.console.write('{!info!}%s: {!input!}%s' % (k, v)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def on_torrents_status_fail(reason): <NEW_LINE> <INDENT> self.console.write('{!error!}Failed to get torrent data.') <NEW_LINE> <DEDENT> torrent_ids = [] <NEW_LINE> torrent_ids.extend(self.console.match_torrent(args[0])) <NEW_LINE> request_options = [] <NEW_LINE> for opt in args[1:]: <NEW_LINE> <INDENT> if opt not in torrent_options: <NEW_LINE> <INDENT> self.console.write('{!error!}Unknown torrent option: %s' % opt) <NEW_LINE> return <NEW_LINE> <DEDENT> request_options.append(opt) <NEW_LINE> <DEDENT> if not request_options: <NEW_LINE> <INDENT> request_options = [ opt for opt in torrent_options.keys() ] <NEW_LINE> <DEDENT> request_options.append('name') <NEW_LINE> d = client.core.get_torrents_status({"id": torrent_ids}, request_options) <NEW_LINE> d.addCallback(on_torrents_status) <NEW_LINE> d.addErrback(on_torrents_status_fail) <NEW_LINE> return d <NEW_LINE> <DEDENT> def _set_option(self, *args, **options): <NEW_LINE> <INDENT> deferred = defer.Deferred() <NEW_LINE> torrent_ids = [] <NEW_LINE> torrent_ids.extend(self.console.match_torrent(args[0])) <NEW_LINE> key = options["set"][0] <NEW_LINE> val = options["set"][1] + " " .join(args[1:]) <NEW_LINE> if key not in torrent_options: <NEW_LINE> <INDENT> self.console.write("{!error!}The key '%s' is invalid!" % key) <NEW_LINE> return <NEW_LINE> <DEDENT> val = torrent_options[key](val) <NEW_LINE> def on_set_config(result): <NEW_LINE> <INDENT> self.console.write("{!success!}Torrent option successfully updated.") <NEW_LINE> deferred.callback(True) <NEW_LINE> <DEDENT> self.console.write("Setting %s to %s for torrents %s.." % (key, val, torrent_ids)) <NEW_LINE> client.core.set_torrent_options(torrent_ids, {key: val}).addCallback(on_set_config) <NEW_LINE> return deferred <NEW_LINE> <DEDENT> def complete(self, line): <NEW_LINE> <INDENT> return component.get("ConsoleUI").tab_complete_torrent(line)
Show and manage per-torrent options
6259903307d97122c4217da4
class HumanNaturalLanguage(HumanLanguage): <NEW_LINE> <INDENT> def __init__(self, hasCode=None, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self._namespace = ONTOLOGY_NS <NEW_LINE> self._project_id = PROJECT_ID <NEW_LINE> self._name = "HumanNaturalLanguage" <NEW_LINE> self.hasCode = HasCode(hasCode)
Language naturally evolved in humans. Labels: menschliche naturliche Sprache (de) / human natural language (en)
625990338a43f66fc4bf3285
class EESwedBankProvider(IPizzaProviderBase): <NEW_LINE> <INDENT> extra_fields = (('VK_CHARSET', 'UTF-8'),)
| SWEDBANK AS | https://www.swedbank.ee Protocol IPizza KeyChain :class:`~.IPizzaProviderBase.KeyChain` Supported return urls: * ``return`` Supported protocol version: * ``008``
6259903330c21e258be9990a
class MyRPC1Servicer(object): <NEW_LINE> <INDENT> def Test1(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!')
package my_rpc_service;
62599033b830903b9686ecf9
class Vertex(object): <NEW_LINE> <INDENT> def __init__(self, id, x, y, z): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.x = float(x) <NEW_LINE> self.y = float(y) <NEW_LINE> self.z = float(z) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str([self.x, self.y, self.z]) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.id == other.id <NEW_LINE> <DEDENT> def to_array(self): <NEW_LINE> <INDENT> return np.array([self.x, self.y, self.z]) <NEW_LINE> <DEDENT> def is_blocked(self, faces): <NEW_LINE> <INDENT> for face in faces.values(): <NEW_LINE> <INDENT> if face.is_blocking(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
The three dimensional coordinates of a vertex
6259903394891a1f408b9f78
class MatchmakersViewSet(mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> queryset = UserModel.objects.filter(is_matchmaker=True) <NEW_LINE> serializer_class = UserSerializer <NEW_LINE> permission_classes = [IsSuperAdmin] <NEW_LINE> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return self.list(request, *args, **kwargs) <NEW_LINE> <DEDENT> def patch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> u = request.pop('id') <NEW_LINE> user = UserModel.objects.get(id=u) <NEW_LINE> permissions_data = request.pop('user_permissions') <NEW_LINE> groups_data = request.pop('groups') <NEW_LINE> for p in permissions_data: <NEW_LINE> <INDENT> user.user_permissions.add(p) <NEW_LINE> <DEDENT> for g in groups_data: <NEW_LINE> <INDENT> user.groups.add(g) <NEW_LINE> <DEDENT> return Response(user, status=status.HTTP_200_OK)
API endpoint that allows matchmakers to be viewed or edited(groups and permissions). Viewset provides `list`, `create`, `retrieve`, `update` and `destroy` actions.
62599033d10714528d69ef0a
class ChoiceTextField(models.TextField): <NEW_LINE> <INDENT> def formfield(self, **kwargs): <NEW_LINE> <INDENT> if self.choices: <NEW_LINE> <INDENT> kwargs['widget'] = None <NEW_LINE> <DEDENT> return super(ChoiceTextField, self).formfield(**kwargs)
Textfield which uses a Select widget if it has choices specified.
6259903323e79379d538d608
class ResourceExhaustedError(OpError): <NEW_LINE> <INDENT> def __init__(self, node_def, op, message): <NEW_LINE> <INDENT> super(ResourceExhaustedError, self).__init__(node_def, op, message, RESOURCE_EXHAUSTED)
Some resource has been exhausted. For example, this error might be raised if a per-user quota is exhausted, or perhaps the entire file system is out of space. @@__init__
62599033be8e80087fbc017c
class Simulator(Startable, Hookable, Stateful, ExceptionRouter, FDEController): <NEW_LINE> <INDENT> __hookAlias__ = dict( NewSimulation = None , SimulationDone = None , PluginsActivated = None , NewInput = None , InputDone = None , SteadyStateDone = None , NewTimeStep = None , ProcessControlDone = None , ODESolverDone = None , PlotDataReady = None , WritePlotData = None , RestartWritten = None , SevereError = None )
Convenience class preparing use of FDEController and mixin types useful for simulator codes.
625990338a349b6b4368733d
class CodeImportMachineView(LaunchpadView): <NEW_LINE> <INDENT> label = "Import machines for Launchpad" <NEW_LINE> @property <NEW_LINE> def machines(self): <NEW_LINE> <INDENT> return getUtility(ICodeImportMachineSet).getAll()
The view for the page that shows all the import machines.
6259903391af0d3eaad3af2e
@attr.s(auto_attribs=True, frozen=True) <NEW_LINE> class AlleleCount: <NEW_LINE> <INDENT> count: int = attr.ib( validator=[attr.validators.instance_of(int), _greater_than_zero] )
Specify a *number* of copies of a mutation. :param count: Initial number of copies of a mutation. This value must be `> 0`. :type count: int
625990338c3a8732951f7658
class HitBox: <NEW_LINE> <INDENT> def __init__(self, x1, y1, x2, y2): <NEW_LINE> <INDENT> self.x1 = x1 <NEW_LINE> self.y1 = y1 <NEW_LINE> self.x2 = x2 <NEW_LINE> self.y2 = y2
Class used to draw hitboxes around objects
6259903326238365f5fadc53
class titleWindow(object): <NEW_LINE> <INDENT> def __init__(self, mh, mw, h, w, y=None, x=None): <NEW_LINE> <INDENT> height = 2 <NEW_LINE> width = 46 <NEW_LINE> y = 0 <NEW_LINE> x = mw // 2 - 23 <NEW_LINE> self.window_data = (height, width, y, x) <NEW_LINE> <DEDENT> def getWinSizePos(self): <NEW_LINE> <INDENT> return self.window_data
Title element
6259903363f4b57ef00865f3
class Function(Field): <NEW_LINE> <INDENT> def __init__(self, field, getter, setter=None, searcher=None, loading='lazy'): <NEW_LINE> <INDENT> assert isinstance(field, Field) <NEW_LINE> self._field = field <NEW_LINE> self._type = field._type <NEW_LINE> self.getter = getter <NEW_LINE> self.setter = setter <NEW_LINE> if not self.setter: <NEW_LINE> <INDENT> self._field.readonly = True <NEW_LINE> <DEDENT> self.searcher = searcher <NEW_LINE> assert loading in ('lazy', 'eager'), 'loading must be "lazy" or "eager"' <NEW_LINE> self.loading = loading <NEW_LINE> <DEDENT> __init__.__doc__ += Field.__init__.__doc__ <NEW_LINE> def __copy__(self): <NEW_LINE> <INDENT> return Function(copy.copy(self._field), self.getter, setter=self.setter, searcher=self.searcher) <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self._field, name) <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> return self._field[name] <NEW_LINE> <DEDENT> def __setattr__(self, name, value): <NEW_LINE> <INDENT> if name in ('_field', '_type', 'getter', 'setter', 'searcher'): <NEW_LINE> <INDENT> return object.__setattr__(self, name, value) <NEW_LINE> <DEDENT> return setattr(self._field, name, value) <NEW_LINE> <DEDENT> def search(self, model, name, clause): <NEW_LINE> <INDENT> if not self.searcher: <NEW_LINE> <INDENT> model.raise_user_error('search_function_missing', name) <NEW_LINE> <DEDENT> return getattr(model, self.searcher)(name, tuple(clause)) <NEW_LINE> <DEDENT> def get(self, ids, model, name, values=None): <NEW_LINE> <INDENT> if isinstance(name, list): <NEW_LINE> <INDENT> names = name <NEW_LINE> if 'names' in inspect.getargspec(getattr(model, self.getter))[0]: <NEW_LINE> <INDENT> return getattr(model, self.getter)(ids, names) <NEW_LINE> <DEDENT> res = {} <NEW_LINE> for name in names: <NEW_LINE> <INDENT> res[name] = getattr(model, self.getter)(ids, name) <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if 'names' in inspect.getargspec(getattr(model, self.getter))[0]: <NEW_LINE> <INDENT> name = [name] <NEW_LINE> <DEDENT> return getattr(model, self.getter)(ids, name) <NEW_LINE> <DEDENT> <DEDENT> def set(self, ids, model, name, value): <NEW_LINE> <INDENT> if self.setter: <NEW_LINE> <INDENT> getattr(model, self.setter)(ids, name, value)
Define function field (any).
625990336e29344779b01751
class Solution: <NEW_LINE> <INDENT> def numIslands(slef,grid): <NEW_LINE> <INDENT> if not grid or not grid[0]: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> result =[] <NEW_LINE> for i in range(len(grid)): <NEW_LINE> <INDENT> for j in range(len(grid[0])): <NEW_LINE> <INDENT> if grid[i][j] == 0: <NEW_LINE> <INDENT> left_top = [i,j] <NEW_LINE> x, y = i, j <NEW_LINE> while y + 1 < len(grid[0]) and grid[x][y+1] == 0: <NEW_LINE> <INDENT> grid[x][y + 1] = 1 <NEW_LINE> y += 1 <NEW_LINE> <DEDENT> while x +1 < len(grid) and grid[x+1][y] == 0: <NEW_LINE> <INDENT> grid[x + 1][y] =1 <NEW_LINE> x += 1 <NEW_LINE> <DEDENT> right_bot = [x,y] <NEW_LINE> Solution.helper(Solution, grid,left_top,right_bot) <NEW_LINE> result.append([left_top,right_bot]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def helper(self, grid, left_top,right_bot): <NEW_LINE> <INDENT> for i in range(left_top[0],right_bot[0]+1): <NEW_LINE> <INDENT> for j in range(left_top[1],right_bot[1]+1): <NEW_LINE> <INDENT> grid[i][j] =1
@param grid: a boolean 2D matrix @return: an integer 0/1组成的数组,找到0的矩阵 ,一定是tectangle
625990338e05c05ec3f6f6db
@test(groups=["test_bdd"]) <NEW_LINE> class TestBlockDevice(TestBasic): <NEW_LINE> <INDENT> @test(depends_on=[SetupEnvironment.prepare_slaves_3], groups=["deploy_bdd"]) <NEW_LINE> @log_snapshot_after_test <NEW_LINE> def bdd_ha_one_controller_compact(self): <NEW_LINE> <INDENT> self.env.revert_snapshot("ready_with_3_slaves") <NEW_LINE> self.show_step(1) <NEW_LINE> cluster_id = self.fuel_web.create_cluster( name=self.__class__.__name__, settings={ 'tenant': 'bdd', 'user': 'bdd', 'password': 'bdd', 'volumes_lvm': False, 'volumes_ceph': False, 'images_ceph': False, 'objects_ceph': False, 'ephemeral_ceph': False, 'nova_quotas': True, 'volumes_block_device': True, 'net_provider': 'neutron', 'net_segment_type': settings.NEUTRON_SEGMENT['vlan'], 'configure_ssl': False } ) <NEW_LINE> self.show_step(2) <NEW_LINE> self.show_step(3) <NEW_LINE> self.fuel_web.update_nodes( cluster_id, { 'slave-01': ['controller'], 'slave-02': ['compute', 'cinder-block-device'], } ) <NEW_LINE> self.show_step(4) <NEW_LINE> self.fuel_web.deploy_cluster_wait(cluster_id) <NEW_LINE> self.show_step(5) <NEW_LINE> self.fuel_web.verify_network(cluster_id) <NEW_LINE> self.show_step(6) <NEW_LINE> self.fuel_web.run_ostf(cluster_id=cluster_id) <NEW_LINE> self.env.make_snapshot("bdd_ha_one_controller_compact")
Tests for verification deployment with Cinder block Device.
6259903307d97122c4217da8
class TagsStatsTool(TagsTRTool): <NEW_LINE> <INDENT> coalesce = list <NEW_LINE> filter_methods = ("tag", "projects", "locales", "path") <NEW_LINE> _default_annotations = ( ("total_strings", Coalesce(Sum("resource__total_strings"), Value(0))), ("fuzzy_strings", Coalesce(Sum("fuzzy_strings"), Value(0))), ("strings_with_warnings", Coalesce(Sum("strings_with_warnings"), Value(0))), ("strings_with_errors", Coalesce(Sum("strings_with_errors"), Value(0))), ("approved_strings", Coalesce(Sum("approved_strings"), Value(0))), ("unreviewed_strings", Coalesce(Sum("unreviewed_strings"), Value(0))), ) <NEW_LINE> def get_data(self): <NEW_LINE> <INDENT> if self.get_groupby()[0] == "resource__tag": <NEW_LINE> <INDENT> stats = { stat["resource__tag"]: stat for stat in super(TagsStatsTool, self).get_data() } <NEW_LINE> tags = self.tag_manager.filter(pk__in=stats.keys()) <NEW_LINE> tags = tags.values("pk", "slug", "name", "priority", "project") <NEW_LINE> tags = tags.annotate(resource__tag=F("pk")) <NEW_LINE> for tag in tags: <NEW_LINE> <INDENT> tag.update(stats[tag["pk"]]) <NEW_LINE> <DEDENT> return tags <NEW_LINE> <DEDENT> elif self.get_groupby()[0] == "locale": <NEW_LINE> <INDENT> result = list(super().get_data()) <NEW_LINE> locales = { loc["pk"]: loc for loc in self.locale_manager.filter( pk__in=(r["locale"] for r in result) ).values("pk", "name", "code", "population") } <NEW_LINE> for r in result: <NEW_LINE> <INDENT> r.update(locales[r["locale"]]) <NEW_LINE> <DEDENT> return sorted(result, key=lambda r: r["name"])
Creates aggregated stat data for tags according to filters
625990335e10d32532ce4183
class Dict(dict): <NEW_LINE> <INDENT> def __init__(self,names=(),values=(),**kw): <NEW_LINE> <INDENT> super(Dict,self).__init__(**kw) <NEW_LINE> for k,v in zip(names,values): <NEW_LINE> <INDENT> self[k]=v <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self,key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise AttributeError(r"'Dict' object has no attribute '%s'" % key) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self,key,value): <NEW_LINE> <INDENT> self[key]=value
simple dict but support x.y style
625990338a43f66fc4bf3289
class GPEnvelope(CoClass): <NEW_LINE> <INDENT> _reg_clsid_ = GUID('{01A9676B-E251-4A40-8544-5E5B65E2983C}') <NEW_LINE> _idlflags_ = [] <NEW_LINE> _typelib_path_ = typelib_path <NEW_LINE> _reg_typelib_ = ('{C031A050-82C6-4F8F-8836-5692631CFFE6}', 10, 2)
Geoprocessing value object containing an envelope value.
6259903330c21e258be9990e
class engine(Engine): <NEW_LINE> <INDENT> name = "PostgreSQL" <NEW_LINE> abbreviation = "postgres" <NEW_LINE> max_int = 2147483647 <NEW_LINE> placeholder = "%s" <NEW_LINE> required_opts = [ ("user", "Enter your PostgreSQL username", "postgres"), ("password", "Enter your password", ""), ("host", "Enter your PostgreSQL host", "localhost"), ("port", "Enter your PostgreSQL port", 5432), ("database", "Enter your PostgreSQL database name", "postgres"), ("database_name", "Format of schema name", "{db}"), ("table_name", "Format of table name", "{db}.{table}"), ] <NEW_LINE> def create_db_statement(self): <NEW_LINE> <INDENT> return Engine.create_db_statement(self).replace("DATABASE", "SCHEMA") <NEW_LINE> <DEDENT> def create_db(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> Engine.create_db(self) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.connection.rollback() <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> def drop_statement(self, objecttype, objectname): <NEW_LINE> <INDENT> statement = Engine.drop_statement(self, objecttype, objectname) <NEW_LINE> statement += " CASCADE;" <NEW_LINE> return statement.replace(" DATABASE ", " SCHEMA ") <NEW_LINE> <DEDENT> def get_connection(self): <NEW_LINE> <INDENT> import psycopg2 as dbapi <NEW_LINE> self.get_input() <NEW_LINE> conn = dbapi.connect( host=self.opts["host"], port=int(self.opts["port"]), user=self.opts["user"], password=self.opts["password"], database=self.opts["database"], ) <NEW_LINE> encoding = ENCODING.lower() <NEW_LINE> if self.script.encoding: <NEW_LINE> <INDENT> encoding = self.script.encoding.lower() <NEW_LINE> <DEDENT> encoding_lookup = {"iso-8859-1": "Latin1", "latin-1": "Latin1", "utf-8": "UTF8"} <NEW_LINE> db_encoding = encoding_lookup.get(encoding) <NEW_LINE> conn.set_client_encoding(db_encoding) <NEW_LINE> return conn
Engine instance for PostgreSQL.
62599033be8e80087fbc017e
class Post(models.Model): <NEW_LINE> <INDENT> author = models.ForeignKey('auth.User') <NEW_LINE> title = models.CharField(max_length=200) <NEW_LINE> text = models.TextField() <NEW_LINE> created_date = models.DateTimeField( default=timezone.now) <NEW_LINE> published_date = models.DateTimeField( blank=True, null=True) <NEW_LINE> def publish(self): <NEW_LINE> <INDENT> self.published_date = timezone.now() <NEW_LINE> self.save() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title
docstring for post
62599033e76e3b2f99fd9b0d
class TestInitCommand(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> if not os.path.exists('test_workspace'): <NEW_LINE> <INDENT> os.makedirs('test_workspace') <NEW_LINE> <DEDENT> with open(os.path.join(os.getcwd(), 'test_workspace/main.cpp'), 'wt') as f: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> with open(os.path.join(os.getcwd(), 'test_workspace/header1.h'), 'wt') as f: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> with open(os.path.join(os.getcwd(), 'test_workspace/linker.ld'), 'wt') as f: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.parser = argparse.ArgumentParser() <NEW_LINE> subparsers = self.parser.add_subparsers(help='commands') <NEW_LINE> self.subparser = subparsers.add_parser('init', help=init.help) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> shutil.rmtree('test_workspace', ignore_errors=True) <NEW_LINE> os.remove('projects.yaml') <NEW_LINE> os.remove('project.yaml') <NEW_LINE> <DEDENT> def test_init_empty_project(self): <NEW_LINE> <INDENT> init.setup(self.subparser) <NEW_LINE> args = self.parser.parse_args(['init']) <NEW_LINE> result = init.run(args) <NEW_LINE> assert result == 0 <NEW_LINE> assert os.path.isfile('projects.yaml') <NEW_LINE> assert os.path.isfile('project.yaml') <NEW_LINE> <DEDENT> def test_init_small_project(self): <NEW_LINE> <INDENT> init.setup(self.subparser) <NEW_LINE> args = self.parser.parse_args(['init', '-dir', 'test_workspace']) <NEW_LINE> result = init.run(args) <NEW_LINE> assert result == 0 <NEW_LINE> assert os.path.isfile('projects.yaml') <NEW_LINE> assert os.path.isfile('project.yaml')
test init command
6259903371ff763f4b5e8899
class Die(): <NEW_LINE> <INDENT> def __init__(self, num_sides=6): <NEW_LINE> <INDENT> self.num_sides = num_sides <NEW_LINE> <DEDENT> def roll(self): <NEW_LINE> <INDENT> return randint(1, self.num_sides)
Klasa przedstawiająca pojedynczą kość do gry.
625990331d351010ab8f4c1a
class LinearRegressionTester(RegressionTester): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(LinearRegressionTester, self).__init__(*args, **kwargs) <NEW_LINE> self.regression_model = LinearRegression <NEW_LINE> self.score_fcn = r2_score
used for linear regression testing
62599033b830903b9686ecfb
class WebhookAdapter: <NEW_LINE> <INDENT> BASE = 'https://discord.com/api/v7' <NEW_LINE> def _prepare(self, webhook): <NEW_LINE> <INDENT> self._webhook_id = webhook.id <NEW_LINE> self._webhook_token = webhook.token <NEW_LINE> self._request_url = '{0.BASE}/webhooks/{1}/{2}'.format(self, webhook.id, webhook.token) <NEW_LINE> self.webhook = webhook <NEW_LINE> <DEDENT> def request(self, verb, url, payload=None, multipart=None): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def delete_webhook(self): <NEW_LINE> <INDENT> return self.request('DELETE', self._request_url) <NEW_LINE> <DEDENT> def edit_webhook(self, **payload): <NEW_LINE> <INDENT> return self.request('PATCH', self._request_url, payload=payload) <NEW_LINE> <DEDENT> def handle_execution_response(self, data, *, wait): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> async def _wrap_coroutine_and_cleanup(self, coro, cleanup): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return await coro <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> cleanup() <NEW_LINE> <DEDENT> <DEDENT> def execute_webhook(self, *, payload, wait=False, file=None, files=None): <NEW_LINE> <INDENT> cleanup = None <NEW_LINE> if file is not None: <NEW_LINE> <INDENT> multipart = { 'file': (file.filename, file.fp, 'application/octet-stream'), 'payload_json': utils.to_json(payload) } <NEW_LINE> data = None <NEW_LINE> cleanup = file.close <NEW_LINE> files_to_pass = [file] <NEW_LINE> <DEDENT> elif files is not None: <NEW_LINE> <INDENT> multipart = { 'payload_json': utils.to_json(payload) } <NEW_LINE> for i, file in enumerate(files): <NEW_LINE> <INDENT> multipart['file%i' % i] = (file.filename, file.fp, 'application/octet-stream') <NEW_LINE> <DEDENT> data = None <NEW_LINE> def _anon(): <NEW_LINE> <INDENT> for f in files: <NEW_LINE> <INDENT> f.close() <NEW_LINE> <DEDENT> <DEDENT> cleanup = _anon <NEW_LINE> files_to_pass = files <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = payload <NEW_LINE> multipart = None <NEW_LINE> files_to_pass = None <NEW_LINE> <DEDENT> url = '%s?wait=%d' % (self._request_url, wait) <NEW_LINE> maybe_coro = None <NEW_LINE> try: <NEW_LINE> <INDENT> maybe_coro = self.request('POST', url, multipart=multipart, payload=data, files=files_to_pass) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if maybe_coro is not None and cleanup is not None: <NEW_LINE> <INDENT> if not asyncio.iscoroutine(maybe_coro): <NEW_LINE> <INDENT> cleanup() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> maybe_coro = self._wrap_coroutine_and_cleanup(maybe_coro, cleanup) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return self.handle_execution_response(maybe_coro, wait=wait)
Base class for all webhook adapters. Attributes ------------ webhook: :class:`Webhook` The webhook that owns this adapter.
6259903394891a1f408b9f7a
class Delter(AccessorBase): <NEW_LINE> <INDENT> __slots__ = add_to_slots('parent_xpath', 'marshal_to') <NEW_LINE> def __call__(self): <NEW_LINE> <INDENT> parent = self.xmltreefile().find(self.parent_xpath) <NEW_LINE> if parent is None: <NEW_LINE> <INDENT> raise xcepts.LibvirtXMLNotFoundError("Parent element %s not " "found" % self.parent_xpath) <NEW_LINE> <DEDENT> todel = [] <NEW_LINE> index = 0 <NEW_LINE> for child in parent.getchildren(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> item = self.marshal_to(child.tag, dict(list(child.items())), index, self.libvirtxml, child.text) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> item = self.marshal_to(child.tag, dict(list(child.items())), index, self.libvirtxml) <NEW_LINE> <DEDENT> index += 1 <NEW_LINE> if item is not None: <NEW_LINE> <INDENT> todel.append(child) <NEW_LINE> <DEDENT> <DEDENT> for child in todel: <NEW_LINE> <INDENT> parent.remove(child)
Remove ALL child elements for which marshal_to does NOT return None
6259903373bcbd0ca4bcb389
@as_function <NEW_LINE> class nfr(IterOnInfs, Bin): <NEW_LINE> <INDENT> exec_name = "NFR" <NEW_LINE> names = "MNaseSeq", <NEW_LINE> def fun(self, f, genome_dir, out_dir): <NEW_LINE> <INDENT> _, base = PathHelpers.base_name(f["file_path"]) <NEW_LINE> input = PathHelpers.build_path(base, out_dir, "gff", "NR") <NEW_LINE> output = PathHelpers.build_path(base, out_dir, "gff", "NFR") <NEW_LINE> args = {"input": input, "output": output} <NEW_LINE> meta = [{"name": "NFR_gff", "file_path": output}] <NEW_LINE> return args, meta
Look for nucleosome-free regions
62599033d53ae8145f919566
class RegistroC321(Registro): <NEW_LINE> <INDENT> campos = [ CampoFixo(1, 'REG', 'C321'), Campo(2, 'COD_ITEM'), CampoNumerico(3, 'QTD'), Campo(4, 'UNID'), CampoNumerico(5, 'VL_ITEM'), CampoNumerico(6, 'VL_DESC'), CampoNumerico(7, 'VL_BC_ICMS'), CampoNumerico(8, 'VL_ICMS'), CampoNumerico(9, 'VL_PIS'), CampoNumerico(10, 'VL_COFINS'), ] <NEW_LINE> nivel = 4
ITENS DO RESUMO DIÁRIO DOS DOCUMENTOS (CÓDIGO 02)
625990336fece00bbacccaab
class TestLogging(MockHttpTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestLogging, self).setUp() <NEW_LINE> self.swiftclient_logger = logging.getLogger("swiftclient") <NEW_LINE> self.log_level = self.swiftclient_logger.getEffectiveLevel() <NEW_LINE> self.swiftclient_logger.setLevel(logging.INFO) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.swiftclient_logger.setLevel(self.log_level) <NEW_LINE> super(TestLogging, self).tearDown() <NEW_LINE> <DEDENT> def test_put_ok(self): <NEW_LINE> <INDENT> c.http_connection = self.fake_http_connection(200) <NEW_LINE> args = ('http://www.test.com', 'asdf', 'asdf', 'asdf', 'asdf') <NEW_LINE> value = c.put_object(*args) <NEW_LINE> self.assertTrue(isinstance(value, six.string_types)) <NEW_LINE> <DEDENT> def test_head_error(self): <NEW_LINE> <INDENT> c.http_connection = self.fake_http_connection(500) <NEW_LINE> self.assertRaises(c.ClientException, c.head_object, 'http://www.test.com', 'asdf', 'asdf', 'asdf') <NEW_LINE> <DEDENT> def test_get_error(self): <NEW_LINE> <INDENT> c.http_connection = self.fake_http_connection(404) <NEW_LINE> e = self.assertRaises(c.ClientException, c.get_object, 'http://www.test.com', 'asdf', 'asdf', 'asdf') <NEW_LINE> self.assertEqual(e.http_status, 404)
Make sure all the lines in http_log are covered.
625990338a43f66fc4bf328b
class AsyncBulbInterface(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, event, heartbeat_ms): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.stopped = event <NEW_LINE> self.hb_rate = heartbeat_ms <NEW_LINE> self.device_list = [] <NEW_LINE> self.color_queue = {} <NEW_LINE> self.color_cache = {} <NEW_LINE> self.power_queue = {} <NEW_LINE> self.power_cache = {} <NEW_LINE> self.logger = logging.getLogger("root") <NEW_LINE> <DEDENT> def set_device_list( self, device_list: List[lifxlan.Device], ): <NEW_LINE> <INDENT> for dev in device_list: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> label = dev.get_label() <NEW_LINE> self.color_queue[label] = queue.Queue() <NEW_LINE> try: <NEW_LINE> <INDENT> if dev.supports_multizone(): <NEW_LINE> <INDENT> dev: lifxlan.MultiZoneLight <NEW_LINE> color = dev.get_color_zones()[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> color = getattr(dev, "color", None) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.logger.error(e) <NEW_LINE> color = None <NEW_LINE> <DEDENT> self.color_cache[dev.label] = color <NEW_LINE> self.power_queue[dev.label] = queue.Queue() <NEW_LINE> try: <NEW_LINE> <INDENT> self.power_cache[dev.label] = dev.power_level or dev.get_power() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.logger.error(e) <NEW_LINE> self.power_cache[dev.label] = 0 <NEW_LINE> <DEDENT> self.device_list.append(dev) <NEW_LINE> <DEDENT> except lifxlan.WorkflowException as exc: <NEW_LINE> <INDENT> self.logger.warning( "Error when communicating with LIFX device: %s", exc ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def query_device(self, target): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pwr = target.get_power() <NEW_LINE> if pwr != self.power_cache[target.label]: <NEW_LINE> <INDENT> self.power_queue[target.label].put(pwr) <NEW_LINE> self.power_cache[target.label] = pwr <NEW_LINE> <DEDENT> clr = target.get_color() <NEW_LINE> if clr != self.color_cache[target.label]: <NEW_LINE> <INDENT> self.color_queue[target.label].put(clr) <NEW_LINE> self.color_cache[target.label] = clr <NEW_LINE> <DEDENT> <DEDENT> except lifxlan.WorkflowException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> with concurrent.futures.ThreadPoolExecutor( max_workers=max(1, len(self.device_list)) ) as executor: <NEW_LINE> <INDENT> while not self.stopped.wait(self.hb_rate / 1000): <NEW_LINE> <INDENT> executor.map(self.query_device, self.device_list)
Asynchronous networking layer between LIFX devices and the GUI.
6259903350485f2cf55dc080
class nonascii: <NEW_LINE> <INDENT> pass
Це не латиниця
625990331f5feb6acb163cf4
class Math(StdOutCommandLine): <NEW_LINE> <INDENT> input_spec = MathInputSpec <NEW_LINE> output_spec = MathOutputSpec <NEW_LINE> _cmd = "mincmath" <NEW_LINE> def _format_arg(self, name, spec, value): <NEW_LINE> <INDENT> assert value is not None <NEW_LINE> if name in self.input_spec.bool_or_const_traits: <NEW_LINE> <INDENT> if isinstance(value, bool) and value: <NEW_LINE> <INDENT> return spec.argstr <NEW_LINE> <DEDENT> elif isinstance(value, bool) and not value: <NEW_LINE> <INDENT> raise ValueError("Does not make sense to specify %s=False" % (name,)) <NEW_LINE> <DEDENT> elif isinstance(value, float): <NEW_LINE> <INDENT> return "%s -const %s" % (spec.argstr, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Invalid %s argument: %s" % (name, value)) <NEW_LINE> <DEDENT> <DEDENT> return super(Math, self)._format_arg(name, spec, value) <NEW_LINE> <DEDENT> def _parse_inputs(self): <NEW_LINE> <INDENT> nr_input_files = len(self.inputs.input_files) <NEW_LINE> for n in self.input_spec.bool_or_const_traits: <NEW_LINE> <INDENT> t = self.inputs.__getattribute__(n) <NEW_LINE> if isdefined(t): <NEW_LINE> <INDENT> if isinstance(t, bool): <NEW_LINE> <INDENT> if nr_input_files != 2: <NEW_LINE> <INDENT> raise ValueError( "Due to the %s option we expected 2 files but input_files is of length %d" % (n, nr_input_files) ) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(t, float): <NEW_LINE> <INDENT> if nr_input_files != 1: <NEW_LINE> <INDENT> raise ValueError( "Due to the %s option we expected 1 file but input_files is of length %d" % (n, nr_input_files) ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( "Argument should be a bool or const, but got: %s" % t ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for n in self.input_spec.single_volume_traits: <NEW_LINE> <INDENT> t = self.inputs.__getattribute__(n) <NEW_LINE> if isdefined(t): <NEW_LINE> <INDENT> if nr_input_files != 1: <NEW_LINE> <INDENT> raise ValueError( "Due to the %s option we expected 1 file but input_files is of length %d" % (n, nr_input_files) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for n in self.input_spec.two_volume_traits: <NEW_LINE> <INDENT> t = self.inputs.__getattribute__(n) <NEW_LINE> if isdefined(t): <NEW_LINE> <INDENT> if nr_input_files != 2: <NEW_LINE> <INDENT> raise ValueError( "Due to the %s option we expected 2 files but input_files is of length %d" % (n, nr_input_files) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for n in self.input_spec.n_volume_traits: <NEW_LINE> <INDENT> t = self.inputs.__getattribute__(n) <NEW_LINE> if isdefined(t): <NEW_LINE> <INDENT> if not nr_input_files >= 1: <NEW_LINE> <INDENT> raise ValueError( "Due to the %s option we expected at least one file but input_files is of length %d" % (n, nr_input_files) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return super(Math, self)._parse_inputs()
Various mathematical operations supplied by mincmath. Examples -------- >>> from nipype.interfaces.minc import Math >>> from nipype.interfaces.minc.testdata import minc2Dfile Scale: volume*3.0 + 2: >>> scale = Math(input_files=[minc2Dfile], scale=(3.0, 2)) >>> scale.run() # doctest: +SKIP Test if >= 1.5: >>> gt = Math(input_files=[minc2Dfile], test_gt=1.5) >>> gt.run() # doctest: +SKIP
625990338c3a8732951f765c
class CreateRouter(SimpleRouter): <NEW_LINE> <INDENT> routes = [ Route(url=r'^{prefix}{trailing_slash}$', mapping={ 'post': 'create', }, name='{basename}-detail', initkwargs={'suffix': 'Detail'}) ]
Router for get and update only
62599034cad5886f8bdc58fd
class MemoryReadSignExtender_t(unittest.TestCase): <NEW_LINE> <INDENT> def test_constructor(self): <NEW_LINE> <INDENT> x = Bus(32) <NEW_LINE> s = Bus(2) <NEW_LINE> y = Bus(32) <NEW_LINE> with self.assertRaises(TypeError): <NEW_LINE> <INDENT> mrse = MemoryReadSignExtender('x',s,y) <NEW_LINE> <DEDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> a = Bus(31) <NEW_LINE> mrse = MemoryReadSignExtender(a,s,y) <NEW_LINE> <DEDENT> with self.assertRaises(TypeError): <NEW_LINE> <INDENT> mrse = MemoryReadSignExtender(x,'s',y) <NEW_LINE> <DEDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> a = Bus(3) <NEW_LINE> mrse = MemoryReadSignExtender(x,a,y) <NEW_LINE> <DEDENT> with self.assertRaises(TypeError): <NEW_LINE> <INDENT> mrse = MemoryReadSignExtender(x,s,'y') <NEW_LINE> <DEDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> a = Bus(33) <NEW_LINE> mrse = MemoryReadSignExtender(x,s,a) <NEW_LINE> <DEDENT> mrse = MemoryReadSignExtender(x,s,y) <NEW_LINE> <DEDENT> def test_run(self): <NEW_LINE> <INDENT> x = Bus(32) <NEW_LINE> s = Bus(2) <NEW_LINE> y = Bus(32) <NEW_LINE> mrse = MemoryReadSignExtender(x,s,y) <NEW_LINE> x.write(0xFF) <NEW_LINE> s.write(0) <NEW_LINE> mrse.run() <NEW_LINE> self.assertEqual(y.read(),0xFF) <NEW_LINE> x.write(0xF2C8FF) <NEW_LINE> s.write(2) <NEW_LINE> mrse.run() <NEW_LINE> self.assertEqual(y.read(),0xF2C8FF) <NEW_LINE> x.write(0x2CAE) <NEW_LINE> s.write(1) <NEW_LINE> mrse.run() <NEW_LINE> self.assertEqual(y.read(),0xFFFFFFAE) <NEW_LINE> x.write(0x2C5E) <NEW_LINE> s.write(1) <NEW_LINE> mrse.run() <NEW_LINE> self.assertEqual(y.read(),0x5E) <NEW_LINE> x.write(0x34ACAE) <NEW_LINE> s.write(3) <NEW_LINE> mrse.run() <NEW_LINE> self.assertEqual(y.read(),0xFFFFACAE) <NEW_LINE> x.write(0xF2C5E) <NEW_LINE> s.write(3) <NEW_LINE> mrse.run() <NEW_LINE> self.assertEqual(y.read(),0x2C5E) <NEW_LINE> <DEDENT> def test_from_dict(self): <NEW_LINE> <INDENT> hooks = OrderedDict({ "i" : Bus(32), "c" : Bus(2), "o" : Bus(32) }) <NEW_LINE> config = { "input" : "i", "ctrl" : "c", "output" : "o" } <NEW_LINE> ext = MemoryReadSignExtender.from_dict(config,hooks)
Tests MemoryReadSignExtender's constructor and run functionality
62599034d53ae8145f919568
class ApiTokenView(View): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def authenticate_user(cls, request): <NEW_LINE> <INDENT> if 'HTTP_AUTHORIZATION' not in request.META: <NEW_LINE> <INDENT> data = {'message': 'Use Basic Auth and supply your username and password.'} <NEW_LINE> return JsonResponse(data, status=401) <NEW_LINE> <DEDENT> auth = request.META['HTTP_AUTHORIZATION'].split() <NEW_LINE> if len(auth) != 2 and auth[0].lower() != 'basic': <NEW_LINE> <INDENT> data = {'message': 'Use Basic Auth and supply your username and password.'} <NEW_LINE> return JsonResponse(data, status=401) <NEW_LINE> <DEDENT> username, password = b64decode(auth[1]).decode('utf-8').split(':') <NEW_LINE> user = authenticate(username=username, password=password) <NEW_LINE> if user is None: <NEW_LINE> <INDENT> data = {'message': 'Invalid Credentials.'} <NEW_LINE> return JsonResponse(data, status=401) <NEW_LINE> <DEDENT> return user <NEW_LINE> <DEDENT> def post(self, request, **kwargs): <NEW_LINE> <INDENT> if kwargs: <NEW_LINE> <INDENT> data = {'message': 'Bad Request.'} <NEW_LINE> return JsonResponse(data, status=400) <NEW_LINE> <DEDENT> response = self.authenticate_user(request) <NEW_LINE> if not isinstance(response, User): <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> api_token, created = ApiToken.objects.get_or_create(owner=response) <NEW_LINE> <DEDENT> except ValidationError: <NEW_LINE> <INDENT> data = {'message': 'Unauthorized.'} <NEW_LINE> return JsonResponse(data, status=401) <NEW_LINE> <DEDENT> data = { 'token': api_token.token.hashid, 'username': response.username, 'first_name': response.first_name, 'last_name': response.last_name, 'is_active': response.is_active, 'id': response.id, 'email': response.email } <NEW_LINE> return JsonResponse(data, status=201 if created else 200)
View for handling issuing and revoking api token.
6259903430c21e258be99911
class BlockingOSCUDPServer(socketserver.UDPServer): <NEW_LINE> <INDENT> def __init__(self, server_address, dispatcher): <NEW_LINE> <INDENT> super().__init__(server_address, _UDPHandler) <NEW_LINE> self._dispatcher = dispatcher <NEW_LINE> <DEDENT> def verify_request(self, request, client_address): <NEW_LINE> <INDENT> return _is_valid_request(request) <NEW_LINE> <DEDENT> @property <NEW_LINE> def dispatcher(self): <NEW_LINE> <INDENT> return self._dispatcher
Blocking version of the UDP server. Each message will be handled sequentially on the same thread. Use this is you don't care about latency in your message handling or don't have a multiprocess/multithread environment (really?).
6259903491af0d3eaad3af34
class _TransacitonCtx(object): <NEW_LINE> <INDENT> def __enter__(self): <NEW_LINE> <INDENT> global _db_ctx <NEW_LINE> self.should_close_conn = False <NEW_LINE> if not _db_ctx.is_init(): <NEW_LINE> <INDENT> _db_ctx.init() <NEW_LINE> self.should_close_conn = True <NEW_LINE> <DEDENT> _db_ctx.transactions = _db_ctx.transactions + 1 <NEW_LINE> logging.info('begin transaction...' if _db_ctx.transactions == 1 else 'join current transaction...') <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exctype, excvalue, traceback): <NEW_LINE> <INDENT> global _db_ctx <NEW_LINE> _db_ctx.transactions = _db_ctx.transactions - 1 <NEW_LINE> try: <NEW_LINE> <INDENT> if _db_ctx.transactions == 0: <NEW_LINE> <INDENT> if exctype is None: <NEW_LINE> <INDENT> self.commit() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.rollback() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> if self.should_close_conn: <NEW_LINE> <INDENT> _db_ctx.cleanup() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def commit(self): <NEW_LINE> <INDENT> global _db_ctx <NEW_LINE> logging.info('commit transaction...') <NEW_LINE> try: <NEW_LINE> <INDENT> _db_ctx.connection.commit() <NEW_LINE> logging.info('commit ok.') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> logging.warning('commit failed. try rollback...') <NEW_LINE> _db_ctx.connection.rollback() <NEW_LINE> logging.warning('rollback ok') <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> def rollback(self): <NEW_LINE> <INDENT> global _db_ctx <NEW_LINE> logging.warning('rollback transactions...') <NEW_LINE> _db_ctx.connection.rollback() <NEW_LINE> logging.info('rollback ok.')
_TransacitonCtx object that can handle transactions with _TransacitonCtx(): pass
62599034d4950a0f3b1116c1
class User(object): <NEW_LINE> <INDENT> def __init__(self, bb, username): <NEW_LINE> <INDENT> self.bb = bb <NEW_LINE> self.username = username <NEW_LINE> <DEDENT> def followers(self): <NEW_LINE> <INDENT> url = api_base + 'users/{0}/followers/'.format(self.username) <NEW_LINE> return self.bb._loads(url) <NEW_LINE> <DEDENT> def repository(self, slug): <NEW_LINE> <INDENT> return Repository(self.bb, self.username, slug) <NEW_LINE> <DEDENT> def repositories(self): <NEW_LINE> <INDENT> user_data = self.get() <NEW_LINE> return user_data['repositories'] <NEW_LINE> <DEDENT> def events(self, start=None, limit=None): <NEW_LINE> <INDENT> query = smart_encode(start=start, limit=limit) <NEW_LINE> url = api_base + 'users/{0}/events/'.format(self.username) <NEW_LINE> if query: <NEW_LINE> <INDENT> url += '?{0}'.format(query) <NEW_LINE> <DEDENT> return self.bb._loads(url) <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> url = api_base + 'users/{0}/'.format(self.username) <NEW_LINE> return self.bb._loads(url) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<User: {0}>'.format(self.username)
API encapsulation for user related bitbucket queries.
625990346e29344779b01758
class ComparisonTestFramework(bfbTestFramework): <NEW_LINE> <INDENT> def set_test_params(self): <NEW_LINE> <INDENT> self.num_nodes = 2 <NEW_LINE> self.setup_clean_chain = True <NEW_LINE> <DEDENT> def add_options(self, parser): <NEW_LINE> <INDENT> parser.add_option("--testbinary", dest="testbinary", default=os.getenv("BFBD", "bfbd"), help="bfbd binary to test") <NEW_LINE> parser.add_option("--refbinary", dest="refbinary", default=os.getenv("BFBD", "bfbd"), help="bfbd binary to use for reference nodes (if any)") <NEW_LINE> <DEDENT> def setup_network(self): <NEW_LINE> <INDENT> extra_args = [['-whitelist=127.0.0.1']] * self.num_nodes <NEW_LINE> if hasattr(self, "extra_args"): <NEW_LINE> <INDENT> extra_args = self.extra_args <NEW_LINE> <DEDENT> self.add_nodes(self.num_nodes, extra_args, binary=[self.options.testbinary] + [self.options.refbinary] * (self.num_nodes - 1)) <NEW_LINE> self.start_nodes()
Test framework for doing p2p comparison testing Sets up some bfbd binaries: - 1 binary: test binary - 2 binaries: 1 test binary, 1 ref binary - n>2 binaries: 1 test binary, n-1 ref binaries
625990345e10d32532ce4186
class SigninCard(Model): <NEW_LINE> <INDENT> _attribute_map = { 'text': {'key': 'text', 'type': 'str'}, 'buttons': {'key': 'buttons', 'type': '[CardAction]'}, } <NEW_LINE> def __init__(self, *, text: str=None, buttons=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(SigninCard, self).__init__(**kwargs) <NEW_LINE> self.text = text <NEW_LINE> self.buttons = buttons
A card representing a request to sign in. :param text: Text for signin request :type text: str :param buttons: Action to use to perform signin :type buttons: list[~botframework.connector.models.CardAction]
6259903430c21e258be99913
class MD5(Watcher): <NEW_LINE> <INDENT> def _get_value(self): <NEW_LINE> <INDENT> md5_hash = hashlib.md5() <NEW_LINE> with open(self.file_name, 'rb') as file_handle: <NEW_LINE> <INDENT> chunk = file_handle.read(4096) <NEW_LINE> while chunk: <NEW_LINE> <INDENT> md5_hash.update(chunk) <NEW_LINE> chunk = file_handle.read(4096) <NEW_LINE> <DEDENT> <DEDENT> return md5_hash.hexdigest()
MD5 hash based change detection. This class uses MD5 hashes based on the files contents to enable change detection.
625990344e696a045264e6a5
class UserInventory(Base): <NEW_LINE> <INDENT> __tablename__ = 'user_inventories' <NEW_LINE> user_id = Column(Integer, ForeignKey('users.id'), primary_key=True) <NEW_LINE> product_id = Column(Integer, ForeignKey('products.id'), primary_key=True) <NEW_LINE> product_count = Column(Integer) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return f"<UserInventory(owner={self.user_id}, product={self.product_id}, count={self.product_count})>"
ユーザーインベントリモデル
6259903421bff66bcd723d6d
class SeatCategoryMapping(models.Model): <NEW_LINE> <INDENT> event = models.ForeignKey(Event, related_name='seat_category_mappings', on_delete=models.CASCADE) <NEW_LINE> subevent = models.ForeignKey(SubEvent, null=True, blank=True, related_name='seat_category_mappings', on_delete=models.CASCADE) <NEW_LINE> layout_category = models.CharField(max_length=190) <NEW_LINE> product = models.ForeignKey(Item, related_name='seat_category_mappings', on_delete=models.CASCADE)
Input seating plans have abstract "categories", such as "Balcony seat", etc. This model maps them to actual pretix product on a per-(sub)event level.
6259903494891a1f408b9f7d