code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class SolutionV5(object): <NEW_LINE> <INDENT> def isValidBST(self, root): <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> stack = [] <NEW_LINE> prev = None <NEW_LINE> while root or stack: <NEW_LINE> <INDENT> while root: <NEW_LINE> <INDENT> stack.append(root) <NEW_LINE> root = root.left <NEW_LINE> <DEDENT> root = stack.pop() <NEW_LINE> if prev and root.val <= prev.val: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> prev = root <NEW_LINE> root = root.right <NEW_LINE> <DEDENT> return True | Binary Tree, in-order traversal, use Stack, but O(2N) | 6259904b8a349b6b43687644 |
class CategoryFilter(): <NEW_LINE> <INDENT> def __init__(self, *, include: bool = None, filter: 'FilterTerms' = None) -> None: <NEW_LINE> <INDENT> self.include = include <NEW_LINE> self.filter = filter <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, _dict: Dict) -> 'CategoryFilter': <NEW_LINE> <INDENT> args = {} <NEW_LINE> if 'include' in _dict: <NEW_LINE> <INDENT> args['include'] = _dict.get('include') <NEW_LINE> <DEDENT> if 'filter' in _dict: <NEW_LINE> <INDENT> args['filter'] = FilterTerms.from_dict(_dict.get('filter')) <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> return cls.from_dict(_dict) <NEW_LINE> <DEDENT> def to_dict(self) -> Dict: <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'include') and self.include is not None: <NEW_LINE> <INDENT> _dict['include'] = self.include <NEW_LINE> <DEDENT> if hasattr(self, 'filter') and self.filter is not None: <NEW_LINE> <INDENT> _dict['filter'] = self.filter.to_dict() <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> return self.to_dict() <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return json.dumps(self.to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other: 'CategoryFilter') -> bool: <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other: 'CategoryFilter') -> bool: <NEW_LINE> <INDENT> return not self == other | Filter on a category. The filter will match against the values of the given category
with include or exclude.
:attr bool include: (optional) -> true - This is an include filter, false - this
is an exclude filter.
:attr FilterTerms filter: (optional) Offering filter terms. | 6259904b10dbd63aa1c71fd1 |
class TestParseCoords(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_parse_coords_whitespace_dot(self): <NEW_LINE> <INDENT> coords_txt = '55.455555 61.25847' <NEW_LINE> coords = cadastron.parse_coords(coords_txt) <NEW_LINE> self.assertTupleEqual(coords, ('55.455555', '61.25847')) <NEW_LINE> <DEDENT> def test_parse_coords_whitespace_comma(self): <NEW_LINE> <INDENT> coords_txt = '55,455555 61,25847' <NEW_LINE> coords = cadastron.parse_coords(coords_txt) <NEW_LINE> self.assertTupleEqual(coords, ('55.455555', '61.25847')) <NEW_LINE> <DEDENT> def test_parse_coords_yandex(self): <NEW_LINE> <INDENT> coords_txt = '55.455555, 61.25847' <NEW_LINE> coords = cadastron.parse_coords(coords_txt) <NEW_LINE> self.assertTupleEqual(coords, ('55.455555', '61.25847')) | Тест функции parse_coords(text) | 6259904bd6c5a102081e3512 |
class HeadersDict(collections.MutableMapping): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._dict = {} <NEW_LINE> self.update(*args, **kwargs) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> self._dict[_HeaderKey(key)] = value <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self._dict[_HeaderKey(key)] <NEW_LINE> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> del self._dict[_HeaderKey(key)] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return (str(key) for key in self._dict) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._dict) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return repr(self._dict) <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return HeadersDict(self.items()) | A case-insenseitive dictionary to represent HTTP headers. | 6259904b63b5f9789fe86563 |
class GithubUserViewSet(mixins.UpdateModelMixin, mixins.RetrieveModelMixin, mixins.ListModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> queryset = GithubUser.objects.prefetch_related('organization', 'repository', 'language').all() <NEW_LINE> serializer_class = GithubUserListSerializer <NEW_LINE> pagination_class = CreatedPagination <NEW_LINE> lookup_url_kwarg = 'username' <NEW_LINE> lookup_field = 'username' <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> data = self.request.GET <NEW_LINE> queryset = self.queryset <NEW_LINE> if data.get('company'): <NEW_LINE> <INDENT> queryset = queryset.filter(company__icontains=data.get('company')) <NEW_LINE> <DEDENT> if data.get('username'): <NEW_LINE> <INDENT> queryset = queryset.filter(username__icontains=data.get('username')) <NEW_LINE> <DEDENT> if data.get('tier'): <NEW_LINE> <INDENT> queryset = queryset.filter(tier=data.get('tier')) <NEW_LINE> <DEDENT> return queryset <NEW_LINE> <DEDENT> def retrieve(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.serializer_class = GithubUserSerializer <NEW_LINE> username = self.kwargs.get(self.lookup_url_kwarg) <NEW_LINE> github_user = self.get_queryset().filter(username=username).first() <NEW_LINE> if not github_user: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> github_information_service = GithubInformationService(username) <NEW_LINE> github_user = github_information_service.update() <NEW_LINE> <DEDENT> except GitHubUserDoesNotExist: <NEW_LINE> <INDENT> raise NotExistsGithubUser() <NEW_LINE> <DEDENT> except RateLimit: <NEW_LINE> <INDENT> raise RateLimitGithubAPI() <NEW_LINE> <DEDENT> <DEDENT> serializer = self.serializer_class(github_user) <NEW_LINE> return Response(serializer.data) <NEW_LINE> <DEDENT> def update(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.serializer_class = GithubUserSerializer <NEW_LINE> username = self.kwargs.get(self.lookup_url_kwarg) <NEW_LINE> try: <NEW_LINE> <INDENT> github_user = GithubUser.objects.filter(username=username).get() <NEW_LINE> if self.can_update(updated_date=github_user.updated) is False: <NEW_LINE> <INDENT> response_data = self.serializer_class(github_user).data <NEW_LINE> return Response(response_data) <NEW_LINE> <DEDENT> github_information_service = GithubInformationService(username) <NEW_LINE> user = github_information_service.update() <NEW_LINE> response_data = self.serializer_class(user).data <NEW_LINE> <DEDENT> except GithubUser.DoesNotExist: <NEW_LINE> <INDENT> raise exceptions.NotFound <NEW_LINE> <DEDENT> except RateLimit: <NEW_LINE> <INDENT> raise RateLimitGithubAPI() <NEW_LINE> <DEDENT> return Response(response_data) <NEW_LINE> <DEDENT> @action(methods=['GET'], detail=True, url_path='tag', url_name='tag') <NEW_LINE> def tag(self, request, *args, **kwargs): <NEW_LINE> <INDENT> github_user = self.get_object() <NEW_LINE> theme = request.GET.get('theme', 'basic') <NEW_LINE> template = loader.get_template('tag/profile.html') <NEW_LINE> context = {'github_user': github_user, 'theme': theme} <NEW_LINE> response = HttpResponse(content=template.render(context, request)) <NEW_LINE> response['Content-Type'] = 'image/svg+xml' <NEW_LINE> response['Cache-Control'] = 'no-cache, no-store, must-revalidate' <NEW_LINE> return response <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def can_update(updated_date: datetime): <NEW_LINE> <INDENT> return updated_date + timedelta(1) <= datetime.now() | endpoint : githubs/users/:username | 6259904b7cff6e4e811b6e30 |
class AddGroupParser(BaseParser): <NEW_LINE> <INDENT> command = COMMANDS.ADD_GROUP <NEW_LINE> def _add_arguments(self): <NEW_LINE> <INDENT> self.parser.add_argument("name", action="store", help="the new group's name") | usage: td add-group [name]
td ag [name]
add group
positional arguments:
name the new group's name
optional arguments:
-h, --help show this help message and exit | 6259904b96565a6dacd2d984 |
class Floating_ips_bulk(extensions.ExtensionDescriptor): <NEW_LINE> <INDENT> name = "FloatingIpsBulk" <NEW_LINE> alias = "os-floating-ips-bulk" <NEW_LINE> namespace = ("http://docs.openstack.org/compute/ext/" "floating_ips_bulk/api/v2") <NEW_LINE> updated = "2012-10-29T13:25:27-06:00" <NEW_LINE> def get_resources(self): <NEW_LINE> <INDENT> resources = [] <NEW_LINE> resource = extensions.ResourceExtension('os-floating-ips-bulk', FloatingIPBulkController()) <NEW_LINE> resources.append(resource) <NEW_LINE> return resources | Bulk handling of Floating IPs. | 6259904b82261d6c527308c1 |
class BayesianNetwork: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.edges = defaultdict(lambda: []) <NEW_LINE> self.variables = {} <NEW_LINE> <DEDENT> def add_variable(self, variable): <NEW_LINE> <INDENT> if not isinstance(variable, Variable): <NEW_LINE> <INDENT> raise TypeError(f"Expected {Variable}; got {type(variable)}.") <NEW_LINE> <DEDENT> self.variables[variable.name] = variable <NEW_LINE> <DEDENT> def add_edge(self, from_variable, to_variable): <NEW_LINE> <INDENT> if from_variable not in self.variables.values(): <NEW_LINE> <INDENT> raise ValueError("Parent variable is not added to list of variables.") <NEW_LINE> <DEDENT> if to_variable not in self.variables.values(): <NEW_LINE> <INDENT> raise ValueError("Child variable is not added to list of variables.") <NEW_LINE> <DEDENT> self.edges[from_variable].append(to_variable) <NEW_LINE> <DEDENT> def sorted_nodes(self): <NEW_LINE> <INDENT> S = [] <NEW_LINE> L = {} <NEW_LINE> temp = [] <NEW_LINE> for key in self.variables: <NEW_LINE> <INDENT> for child in self.edges[key]: <NEW_LINE> <INDENT> temp.append(child) <NEW_LINE> <DEDENT> <DEDENT> for key in self.variables: <NEW_LINE> <INDENT> if(key not in temp): <NEW_LINE> <INDENT> S.append(key) <NEW_LINE> <DEDENT> <DEDENT> while(len(S)): <NEW_LINE> <INDENT> temp_var = S[0] <NEW_LINE> new_dictionary_element = {temp_var: self.edges[temp_var]} <NEW_LINE> L.update(new_dictionary_element) <NEW_LINE> S.remove(temp_var) <NEW_LINE> for child in self.edges[temp_var]: <NEW_LINE> <INDENT> temp.remove(child) <NEW_LINE> if(child not in temp): <NEW_LINE> <INDENT> S.append(child) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if temp: <NEW_LINE> <INDENT> raise Exception ("Graph has at least one cycle") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (L) | Class representing a Bayesian network.
Nodes can be accessed through self.variables['variable_name'].
Each node is a Variable.
Edges are stored in a dictionary. A node's children can be accessed by
self.edges[variable]. Both the key and value in this dictionary is a Variable. | 6259904ba8ecb03325872608 |
class MultitaskDataloader: <NEW_LINE> <INDENT> def __init__(self, dataloader_dict): <NEW_LINE> <INDENT> self.dataloader_dict = dataloader_dict <NEW_LINE> self.num_batches_dict = { task_name: len(dataloader) for task_name, dataloader in self.dataloader_dict.items() } <NEW_LINE> self.task_name_list = list(self.dataloader_dict) <NEW_LINE> self.dataset = [None] * sum( len(dataloader.dataset) for dataloader in self.dataloader_dict.values() ) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return sum(self.num_batches_dict.values()) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> task_choice_list = [] <NEW_LINE> for i, task_name in enumerate(self.task_name_list): <NEW_LINE> <INDENT> task_choice_list += [i] * self.num_batches_dict[task_name] <NEW_LINE> <DEDENT> task_choice_list = np.array(task_choice_list) <NEW_LINE> np.random.shuffle(task_choice_list) <NEW_LINE> dataloader_iter_dict = { task_name: iter(dataloader) for task_name, dataloader in self.dataloader_dict.items() } <NEW_LINE> for task_choice in task_choice_list: <NEW_LINE> <INDENT> task_name = self.task_name_list[task_choice] <NEW_LINE> yield next(dataloader_iter_dict[task_name]) | Data loader that combines and samples from multiple single-task
data loaders. | 6259904bd4950a0f3b11183e |
class OneLine: <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self.value = value | Wrap any value in this class to print it on one line in the JSON file | 6259904b6e29344779b01a38 |
class ObjectIdReferenceAttribute(Attribute): <NEW_LINE> <INDENT> def __init__( self, possible_types, pk_getter, name, pk_setter=None, force_display=False, searcher=None, ): <NEW_LINE> <INDENT> self.possible_types = possible_types <NEW_LINE> self.pk_getter = pk_getter <NEW_LINE> self.setter = pk_setter <NEW_LINE> self.name = name <NEW_LINE> self.force_display = force_display <NEW_LINE> self.searcher = searcher <NEW_LINE> <DEDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> models = import_models() <NEW_LINE> classes = [getattr(models, cls_name) for cls_name in self.possible_types] <NEW_LINE> pk = Attribute(self.pk_getter).__get__(instance, None) <NEW_LINE> resource = None <NEW_LINE> for cls in classes: <NEW_LINE> <INDENT> if resource is None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> resource = cls._get_item_from_pk(pk) <NEW_LINE> cls_name = cls.__name__ <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if resource is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if ( self.name in instance._model._contained_names ): <NEW_LINE> <INDENT> as_fhir = resource.to_fhir() <NEW_LINE> instance._model._refcount += 1 <NEW_LINE> as_fhir.id = f"ref{instance._model._refcount}" <NEW_LINE> instance._model._contained_items.append(as_fhir) <NEW_LINE> reference = {"reference": f"#ref{instance._model._refcount}"} <NEW_LINE> if hasattr(resource, "_as_display"): <NEW_LINE> <INDENT> reference["display"] = resource._as_display <NEW_LINE> <DEDENT> return reference <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> reference = { "reference": f"{cls_name}/{pk}", "identifier": {"system": f"{cls_name}", "value": str(pk)}, } <NEW_LINE> if self.force_display: <NEW_LINE> <INDENT> if hasattr(resource, "_as_display"): <NEW_LINE> <INDENT> reference["display"] = resource._as_display <NEW_LINE> <DEDENT> <DEDENT> return reference <NEW_LINE> <DEDENT> <DEDENT> def __set__(self, instance, reference): <NEW_LINE> <INDENT> if not self.setter: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> sys = value = None <NEW_LINE> if hasattr(reference, "reference"): <NEW_LINE> <INDENT> value = reference.reference <NEW_LINE> if value is None or '/' not in value: <NEW_LINE> <INDENT> raise MappingValidationError("Invalid subject") <NEW_LINE> <DEDENT> model_type, id = value.split('/') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> value = ObjectId(id) <NEW_LINE> <DEDENT> except InvalidId: <NEW_LINE> <INDENT> raise MappingValidationError(f"{id} is an invalid resource identifier") <NEW_LINE> <DEDENT> models = import_models() <NEW_LINE> model_cls = getattr(models, model_type, None) <NEW_LINE> if model_type is None: <NEW_LINE> <INDENT> raise MappingValidationError(f"Resource {model_type} does not exist.") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> resource = model_cls._get_item_from_pk(value) <NEW_LINE> <DEDENT> except DoesNotExistError as e: <NEW_LINE> <INDENT> if settings.STRICT_MODE['set_non_existent_reference']: <NEW_LINE> <INDENT> raise MappingValidationError(f"{e.resource_type}/{e.pk} was not found on the server.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(f"{e.resource_type}/{e.pk} was not found on the server.") <NEW_LINE> <DEDENT> <DEDENT> return super(ObjectIdReferenceAttribute, self).__set__(instance, value) | A Reference to some other Resource of one or
more possible types that may be contained.
Native pymodm references must explicitly specify the related model type,
which doesn't work for us since we accept several possible types. This is
why we use ObjectIds to store references. | 6259904b23e79379d538d8f5 |
class EbayScraper(BaseSpider): <NEW_LINE> <INDENT> name = "ebay" <NEW_LINE> endpoint = "http://svcs.ebay.com/services/search/FindingService/v1" <NEW_LINE> parameters = { 'OPERATION-NAME':'findItemsAdvanced', 'SERVICE-VERSION':'1.11.0', 'SECURITY-APPNAME':'Moomersda-68ea-4fc0-88f7-a53b23426ae', 'RESPONSE-DATA-FORMAT':'JSON', 'REST-PAYLOAD':'1', 'keywords':'burning man', 'categoryId':1305, 'paginationInput.entriesPerPage':100, 'paginationInput.pageNumber':1, } <NEW_LINE> start_urls = [get_query_url(endpoint, parameters)] <NEW_LINE> alreadyPaging = False <NEW_LINE> def parse(self, http_response): <NEW_LINE> <INDENT> data = json.loads(http_response.body)['findItemsAdvancedResponse'][0] <NEW_LINE> if not self.alreadyPaging: <NEW_LINE> <INDENT> self.alreadyPaging = True <NEW_LINE> pagecount = int(data['paginationOutput'][0]['totalPages'][0]) <NEW_LINE> if pagecount > 1: <NEW_LINE> <INDENT> for pagenum in xrange(2, pagecount + 1): <NEW_LINE> <INDENT> self.parameters['paginationInput.pageNumber'] = pagenum <NEW_LINE> yield Request(get_query_url(self.endpoint, self.parameters), callback=self.parse) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for item in data['searchResult'][0]['item']: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> posted = datetime.datetime.strptime( item['listingInfo'][0]['startTime'][0], "%Y-%m-%dT%H:%M:%S.%fZ") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> posted = None <NEW_LINE> <DEDENT> yield BumscrapeItem(url=item['viewItemURL'][0], title=item['title'][0], price=item['sellingStatus'][0]['currentPrice'][0]['__value__'], posted=posted ) | Scraper that finds BM tickets on eBay using the eBay API | 6259904be76e3b2f99fd9e03 |
class Config(dict): <NEW_LINE> <INDENT> def __init__(self, defaults = {}): <NEW_LINE> <INDENT> dict.__init__(self, defaults) <NEW_LINE> <DEDENT> def __getattr__(self, attrname): <NEW_LINE> <INDENT> return self[attrname] <NEW_LINE> <DEDENT> def __setattr__(self, attrname, value): <NEW_LINE> <INDENT> self[attrname] = value | Works like dict but provides methods to get attribute by '.',
such as:
config = Config({'foo': foo, 'bar': bar})
print config.foo
config.bar = bar2 | 6259904b596a897236128faa |
class SearchPackage6Test(BaseTest): <NEW_LINE> <INDENT> fixtureDB = True <NEW_LINE> runCmd = "aptly package search" | search package: no query | 6259904b07f4c71912bb082c |
class Message(models.Model): <NEW_LINE> <INDENT> uuid_id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) <NEW_LINE> sender = models.ForeignKey( settings.AUTH_USER_MODEL, related_name="sent_messages", verbose_name=_("Sender"), null=True, on_delete=models.SET_NULL, ) <NEW_LINE> recipient = models.ForeignKey( settings.AUTH_USER_MODEL, related_name="received_messages", null=True, blank=True, verbose_name=_("Recipient"), on_delete=models.SET_NULL, ) <NEW_LINE> timestamp = models.DateTimeField(auto_now_add=True) <NEW_LINE> message = models.TextField(max_length=1000, blank=True) <NEW_LINE> unread = models.BooleanField(default=True, db_index=True) <NEW_LINE> objects = MessageQuerySet.as_manager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _("Message") <NEW_LINE> verbose_name_plural = _("Messages") <NEW_LINE> ordering = ("-timestamp",) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.message <NEW_LINE> <DEDENT> def mark_as_read(self): <NEW_LINE> <INDENT> if self.unread: <NEW_LINE> <INDENT> self.unread = False <NEW_LINE> self.save() <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def send_message(sender, recipient, message): <NEW_LINE> <INDENT> new_message = Message.objects.create( sender=sender, recipient=recipient, message=message ) <NEW_LINE> channel_layer = get_channel_layer() <NEW_LINE> payload = { "type": "receive", "key": "message", "message_id": str(new_message.uuid_id), "sender": str(sender), "recipient": str(recipient), } <NEW_LINE> transaction.on_commit( lambda: async_to_sync(channel_layer.group_send)(recipient.username, payload) ) <NEW_LINE> return new_message | A private message sent between users. | 6259904b07f4c71912bb082d |
class ThicknessSlice(Slice, ROIManagerMixin): <NEW_LINE> <INDENT> roi_names = ['Left', 'Top', 'Right', 'Bottom'] <NEW_LINE> roi_nominal_angles = [180, 90, 0, -90] <NEW_LINE> roi_widths_mm = [8, 40, 8, 40] <NEW_LINE> roi_heights_mm = [40, 8, 40, 8] <NEW_LINE> dist2rois_mm = 38 <NEW_LINE> def __init__(self, dicom_stack, settings): <NEW_LINE> <INDENT> super().__init__(dicom_stack, settings) <NEW_LINE> self._setup_rois() <NEW_LINE> <DEDENT> def _setup_rois(self): <NEW_LINE> <INDENT> self.rois = OrderedDict() <NEW_LINE> for name, angle, height, width in zip(self.roi_names, self.roi_angles, self.roi_heights, self.roi_widths): <NEW_LINE> <INDENT> self.rois[name] = ThicknessROI(self.image, width, height, angle, self.dist2rois, self.phan_center) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def tolerance(self): <NEW_LINE> <INDENT> return self.settings.thickness_tolerance <NEW_LINE> <DEDENT> @property <NEW_LINE> def slice_num(self): <NEW_LINE> <INDENT> return self.settings.hu_slice_num <NEW_LINE> <DEDENT> @property <NEW_LINE> def avg_slice_thickness(self): <NEW_LINE> <INDENT> return np.mean(sorted(roi.wire_fwhm*self.settings.mm_per_pixel*0.42 for roi in self.rois.values())[-2:])/3 <NEW_LINE> <DEDENT> @property <NEW_LINE> def nominal_slice_thickness(self): <NEW_LINE> <INDENT> return self.settings.dicom_stack.metadata.SliceThickness <NEW_LINE> <DEDENT> @property <NEW_LINE> def passed(self): <NEW_LINE> <INDENT> return self.nominal_slice_thickness - self.tolerance < self.avg_slice_thickness < self.nominal_slice_thickness + self.tolerance <NEW_LINE> <DEDENT> @property <NEW_LINE> def roi_heights(self): <NEW_LINE> <INDENT> return [height / self.settings.mm_per_pixel for height in self.roi_heights_mm] <NEW_LINE> <DEDENT> @property <NEW_LINE> def roi_widths(self): <NEW_LINE> <INDENT> return [width / self.settings.mm_per_pixel for width in self.roi_widths_mm] | This class analyzes the angled wire on the HU slice to determine the slice thickness.
Attributes
----------
roi_widths_mm : list
The widths of the rectangular ROIs in mm. Follows the order of ``roi_names``.
roi_heights_mm : list
The heights of the rectangular ROIs in mm. Follows the order of ``roi_names``. | 6259904bb57a9660fecd2e76 |
class BaseHandler(RequestHandler): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(BaseHandler, self).__init__(*args, **kwargs) <NEW_LINE> self.json_args = {} <NEW_LINE> self.session = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def db(self): <NEW_LINE> <INDENT> return self.application.db <NEW_LINE> <DEDENT> @property <NEW_LINE> def redis(self): <NEW_LINE> <INDENT> return self.application.redis <NEW_LINE> <DEDENT> def set_default_headers(self): <NEW_LINE> <INDENT> self.set_header('Content-Type', 'application/json; charset=UTF-8') <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def prepare(self): <NEW_LINE> <INDENT> self.xsrf_token <NEW_LINE> if self.request.headers.get('Content-Type', '').startswith('application/json'): <NEW_LINE> <INDENT> self.json_args = json.loads(self.request.body) <NEW_LINE> <DEDENT> <DEDENT> def write_error(self, status_code, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def on_finish(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_current_user(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.session = Session(self) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error(e) <NEW_LINE> return {} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.session.data | Handler基类 | 6259904b45492302aabfd8cd |
class SCN_INVALIDATE_RECT(Structure): <NEW_LINE> <INDENT> _fields_ = [ ("code", c_uint), ("hwnd", HWINDOW), ("invalidRect", RECT) ] | . | 6259904b23e79379d538d8f7 |
class BadCrsError(Exception): <NEW_LINE> <INDENT> def __init__(self, message: str = 'A required CRS was not valid for the operation'): <NEW_LINE> <INDENT> super().__init__(message) | Exception for cases where a CRS is invalid.
Thrown in cases such as building a converter with only one CoordinateReferenceSystem. | 6259904b16aa5153ce4018e6 |
class DevelopmentConfig(BaseConfig): <NEW_LINE> <INDENT> DEBUG = True <NEW_LINE> WTF_CSRF_ENABLED = False <NEW_LINE> SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'dev.sqlite') <NEW_LINE> DEBUG_TB_ENABLED = False | Development configuration. | 6259904b8e05c05ec3f6f857 |
class StorageLinkBackend(backend.KindBackend): <NEW_LINE> <INDENT> def create(self, link, extras): <NEW_LINE> <INDENT> context = extras['nova_ctx'] <NEW_LINE> instance_id = link.source.attributes['occi.core.id'] <NEW_LINE> volume_id = link.target.attributes['occi.core.id'] <NEW_LINE> mount_point = link.attributes['occi.storagelink.deviceid'] <NEW_LINE> vm.attach_volume(instance_id, volume_id, mount_point, context) <NEW_LINE> link.attributes['occi.core.id'] = str(uuid.uuid4()) <NEW_LINE> link.attributes['occi.storagelink.deviceid'] = link.attributes['occi.storagelink.deviceid'] <NEW_LINE> link.attributes['occi.storagelink.mountpoint'] = '' <NEW_LINE> link.attributes['occi.storagelink.state'] = 'active' <NEW_LINE> <DEDENT> def delete(self, link, extras): <NEW_LINE> <INDENT> volume_id = link.target.attributes['occi.core.id'] <NEW_LINE> vm.detach_volume(volume_id, extras['nova_ctx']) | A backend for the storage links. | 6259904b8da39b475be045e9 |
class Close(Element) : <NEW_LINE> <INDENT> def __init__(self) : <NEW_LINE> <INDENT> super().__init__(CAIRO.PATH_CLOSE_PATH, False, (), Context.close_path) | represents a closing of the current path. | 6259904bd6c5a102081e3516 |
class Movement: <NEW_LINE> <INDENT> def __init__( self, max_velocity=100, min_velocity=1, stagnant_velocity=0, max_angle=100, min_angle=1, stagnant_angle=0, ) -> None: <NEW_LINE> <INDENT> self.max_velocity = max_velocity <NEW_LINE> self.min_velocity = min_velocity <NEW_LINE> self.stagnant_velocity = stagnant_velocity <NEW_LINE> self.max_angle = max_angle <NEW_LINE> self.min_angle = min_angle <NEW_LINE> self.stagnant_angle = stagnant_angle <NEW_LINE> <DEDENT> def _parse_value( self, value: int, max: int, min: int, negative: bool = False, negatives_allowed: bool = False, ) -> int: <NEW_LINE> <INDENT> if negatives_allowed: <NEW_LINE> <INDENT> if value > max: <NEW_LINE> <INDENT> return max <NEW_LINE> <DEDENT> if value < max * (-1): <NEW_LINE> <INDENT> return max * (-1) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> if negative: <NEW_LINE> <INDENT> if value > max: <NEW_LINE> <INDENT> return max * (-1) <NEW_LINE> <DEDENT> if value > min: <NEW_LINE> <INDENT> return value * (-1) <NEW_LINE> <DEDENT> if value > max * (-1): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> return max * (-1) <NEW_LINE> <DEDENT> if value < min: <NEW_LINE> <INDENT> return min <NEW_LINE> <DEDENT> if value > max: <NEW_LINE> <INDENT> return max <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def _parse_velocity( self, velocity: int, negative: bool = False, both: bool = False ) -> int: <NEW_LINE> <INDENT> return self._parse_value( velocity, self.max_velocity, self.min_velocity, negative=negative, negatives_allowed=both, ) <NEW_LINE> <DEDENT> def _parse_angle( self, angle: int, negative: bool = False, both: bool = False ) -> int: <NEW_LINE> <INDENT> return self._parse_value( angle, self.max_angle, self.min_angle, negative=negative, negatives_allowed=both, ) <NEW_LINE> <DEDENT> def drive_forward(self, misty: Misty, velocity: int) -> Dict: <NEW_LINE> <INDENT> forw = { "LinearVelocity": self._parse_velocity(velocity), "AngularVelocity": self.stagnant_angle, } <NEW_LINE> return misty.perform_action("drive", data=forw).parse_to_dict() <NEW_LINE> <DEDENT> def drive_backward(self, misty: Misty, velocity: int): <NEW_LINE> <INDENT> back = { "LinearVelocity": self._parse_velocity(velocity, negative=True), "AngularVelocity": self.stagnant_angle, } <NEW_LINE> return misty.perform_action("drive", data=back).parse_to_dict() <NEW_LINE> <DEDENT> def drive_left(self, misty: Misty, velocity: int, angle: int): <NEW_LINE> <INDENT> left = { "LinearVelocity": self._parse_velocity(velocity), "AngularVelocity": self._parse_angle(angle), } <NEW_LINE> return misty.perform_action("drive", data=left).parse_to_dict() <NEW_LINE> <DEDENT> def drive_right(self, misty: Misty, velocity: int, angle: int): <NEW_LINE> <INDENT> velocity = self._parse_velocity(velocity) <NEW_LINE> right = { "LinearVelocity": self._parse_velocity(velocity), "AngularVelocity": self._parse_angle(angle, negative=True), } <NEW_LINE> return misty.perform_action("drive", data=right).parse_to_dict() <NEW_LINE> <DEDENT> def stop_driving(self, misty: Misty): <NEW_LINE> <INDENT> return misty.perform_action("drive_stop").parse_to_dict() | Handles Misty's movement. All angles are in degrees as integers. | 6259904b94891a1f408ba0f2 |
class NominalStem(Stem): <NEW_LINE> <INDENT> __tablename__ = None <NEW_LINE> __mapper_args__ = {'polymorphic_identity': Tag.NOMINAL} | Stem of a :class:`Nominal`. | 6259904b23849d37ff8524b7 |
class Channel(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100, default='ninguno') <NEW_LINE> api_key = models.CharField(max_length=100) <NEW_LINE> api_secret = models.CharField(max_length=100) <NEW_LINE> token = models.CharField(max_length=100, blank=True, null=True) <NEW_LINE> token_secret = models.CharField(max_length=100, blank=True, null=True) <NEW_LINE> email = models.CharField(max_length=100, blank=True, null=True) <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> return { 'api_key': self.api_key, 'api_secret': self.api_secret, 'token': self.token, 'token_secret': self.token_secret, 'email': self.email } | stores channel Api credentials | 6259904b7cff6e4e811b6e34 |
class LowerYield(object): <NEW_LINE> <INDENT> def __init__(self, lower, yield_point, live_vars): <NEW_LINE> <INDENT> self.lower = lower <NEW_LINE> self.context = lower.context <NEW_LINE> self.builder = lower.builder <NEW_LINE> self.genlower = lower.genlower <NEW_LINE> self.gentype = self.genlower.gentype <NEW_LINE> self.gen_state_ptr = self.genlower.gen_state_ptr <NEW_LINE> self.resume_index_ptr = self.genlower.resume_index_ptr <NEW_LINE> self.yp = yield_point <NEW_LINE> self.inst = self.yp.inst <NEW_LINE> self.live_vars = live_vars <NEW_LINE> self.live_var_indices = [lower.generator_info.state_vars.index(v) for v in live_vars] <NEW_LINE> <DEDENT> def lower_yield_suspend(self): <NEW_LINE> <INDENT> self.lower.debug_print("# generator suspend") <NEW_LINE> for state_index, name in zip(self.live_var_indices, self.live_vars): <NEW_LINE> <INDENT> state_slot = cgutils.gep_inbounds(self.builder, self.gen_state_ptr, 0, state_index) <NEW_LINE> ty = self.gentype.state_types[state_index] <NEW_LINE> fetype = self.lower.typeof(name) <NEW_LINE> self.lower._alloca_var(name, fetype) <NEW_LINE> val = self.lower.loadvar(name) <NEW_LINE> if self.context.enable_nrt: <NEW_LINE> <INDENT> self.context.nrt.incref(self.builder, ty, val) <NEW_LINE> <DEDENT> self.context.pack_value(self.builder, ty, val, state_slot) <NEW_LINE> <DEDENT> indexval = Constant.int(self.resume_index_ptr.type.pointee, self.inst.index) <NEW_LINE> self.builder.store(indexval, self.resume_index_ptr) <NEW_LINE> self.lower.debug_print("# generator suspend end") <NEW_LINE> <DEDENT> def lower_yield_resume(self): <NEW_LINE> <INDENT> self.genlower.create_resumption_block(self.lower, self.inst.index) <NEW_LINE> self.lower.debug_print("# generator resume") <NEW_LINE> for state_index, name in zip(self.live_var_indices, self.live_vars): <NEW_LINE> <INDENT> state_slot = cgutils.gep_inbounds(self.builder, self.gen_state_ptr, 0, state_index) <NEW_LINE> ty = self.gentype.state_types[state_index] <NEW_LINE> val = self.context.unpack_value(self.builder, ty, state_slot) <NEW_LINE> self.lower.storevar(val, name) <NEW_LINE> if self.context.enable_nrt: <NEW_LINE> <INDENT> self.context.nrt.decref(self.builder, ty, val) <NEW_LINE> <DEDENT> <DEDENT> self.lower.debug_print("# generator resume end") | Support class for lowering a particular yield point. | 6259904b097d151d1a2c2468 |
class IRuleConditionDirective(IRuleElementDirective): <NEW_LINE> <INDENT> pass | An element directive describing what is logically a condition element.
| 6259904b462c4b4f79dbcdf9 |
class NotInitialized(Exception): <NEW_LINE> <INDENT> pass | Raise when an entity is not initialized but accessed as if it were. | 6259904b1f037a2d8b9e5269 |
class DevelopmentConfig(BaseConfig): <NEW_LINE> <INDENT> DEBUG = True <NEW_LINE> TESTING = True <NEW_LINE> SQLALCHEMY_DATABASE_URI = DATABASE_URL | Development configuration | 6259904b4428ac0f6e65992b |
class Cells: <NEW_LINE> <INDENT> def __init__(self, x, sim): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> return <NEW_LINE> <DEDENT> def correct_for_pbc(self, sim): <NEW_LINE> <INDENT> self.xi = np.zeros((sim.nsteps, 2, sim.ncells), dtype=np.float32) <NEW_LINE> self.xi[0, :, :] = self.x[0, :, :] <NEW_LINE> for tstep in range(1, sim.nsteps): <NEW_LINE> <INDENT> dx = self.x[tstep, 0, :] - self.x[tstep-1, 0, :] <NEW_LINE> dy = self.x[tstep, 1, :] - self.x[tstep-1, 1, :] <NEW_LINE> self.xi[tstep, 0, :] = self.xi[tstep-1, 0, :] + neigh_min_array(dx, sim.lx) <NEW_LINE> self.xi[tstep, 1, :] = self.xi[tstep-1, 1, :] + neigh_min_array(dy, sim.ly) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def calculate_inter_scattering(self, sim, qvector): <NEW_LINE> <INDENT> nks = 24 <NEW_LINE> kxs = np.zeros((nks), dtype=np.float64) <NEW_LINE> kys = np.zeros((nks), dtype=np.float64) <NEW_LINE> for j in range(nks): <NEW_LINE> <INDENT> kxs[j] = np.cos(2*np.pi*j/nks) * qvector <NEW_LINE> kys[j] = np.sin(2*np.pi*j/nks) * qvector <NEW_LINE> <DEDENT> ndelay = int(sim.nsteps/2) <NEW_LINE> delay = np.zeros((ndelay), dtype=np.int64) <NEW_LINE> Fs = np.zeros((ndelay), dtype=np.float64) <NEW_LINE> Fs[0] = 1.0 <NEW_LINE> for d in range(1, ndelay): <NEW_LINE> <INDENT> delay[d] = d*sim.dt <NEW_LINE> dx = self.xi[d:, 0, :] - self.xi[:-d, 0, :] <NEW_LINE> dy = self.xi[d:, 1, :] - self.xi[:-d, 1, :] <NEW_LINE> avg_over_qvector = 0. <NEW_LINE> for j in range(nks): <NEW_LINE> <INDENT> cost = np.sum(np.cos(kxs[j]*dx), axis=1) <NEW_LINE> sint = np.sum(np.sin(kys[j]*dy), axis=1) <NEW_LINE> avg_over_qvector += np.mean(cost**2 + sint**2)/sim.ncells <NEW_LINE> <DEDENT> Fs[d] /= avg_over_qvector/nks <NEW_LINE> <DEDENT> return delay, Fs | data structure for storing cell information | 6259904b30dc7b76659a0c2e |
class PersistentArray(object): <NEW_LINE> <INDENT> search = None <NEW_LINE> def __init__(self, ID, *url, **kw): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.ID = hash(ID) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise TypeError("Item IDs must be hashable") <NEW_LINE> <DEDENT> if kw.pop('search', False): <NEW_LINE> <INDENT> self.search = None <NEW_LINE> <DEDENT> if url: <NEW_LINE> <INDENT> self.t = Transactor(self.ID, url[0], **kw) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.t = Transactor(self.ID) <NEW_LINE> <DEDENT> <DEDENT> def shutdown(self, *null): <NEW_LINE> <INDENT> return self.t.shutdown() <NEW_LINE> <DEDENT> def write(self, funcName, *args, **kw): <NEW_LINE> <INDENT> def writeDone(noneResult, d1): <NEW_LINE> <INDENT> x, y, z = [hash(arg) for arg in args[0:3]] <NEW_LINE> document = "%d-%d" % (self.groupID, x) <NEW_LINE> section = "%d-%d" % (y, z) <NEW_LINE> d3 = self.search.index( value, document=document, section=section) <NEW_LINE> d3.addCallback(self.search.ready) <NEW_LINE> d1.callback(None) <NEW_LINE> <DEDENT> func = getattr(self.t, funcName) <NEW_LINE> kwNew = {'niceness':kw['niceness']} <NEW_LINE> if self.search is None: <NEW_LINE> <INDENT> return func(*args, **kwNew) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d1 = defer.Deferred() <NEW_LINE> self.search.busy() <NEW_LINE> d2 = func(*args, **kwNew) <NEW_LINE> d2.addCallback(writeDone, d1) <NEW_LINE> return d1 <NEW_LINE> <DEDENT> <DEDENT> def get(self, x, y, z): <NEW_LINE> <INDENT> d = self.t.dt.deferToAll() <NEW_LINE> d.addCallback(lambda _: self.t.load(x, y, z)) <NEW_LINE> return d <NEW_LINE> <DEDENT> def set(self, x, y, z, value): <NEW_LINE> <INDENT> def loaded(loadedValue): <NEW_LINE> <INDENT> if loadedValue is None: <NEW_LINE> <INDENT> return self.write( "insert", x, y, z, value, niceness=NICENESS_WRITE) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.write( "update", x, y, z, value, niceness=NICENESS_WRITE) <NEW_LINE> <DEDENT> <DEDENT> d = self.t.load(x, y, z) <NEW_LINE> d.addCallback(loaded) <NEW_LINE> self.t.dt.put(d) <NEW_LINE> return d <NEW_LINE> <DEDENT> def delete(self, x, y, z): <NEW_LINE> <INDENT> d = self.write("delete", x, y, z, niceness=NICENESS_WRITE) <NEW_LINE> self.t.dt.put(d) <NEW_LINE> return d <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> d =self.write("clear", niceness=0) <NEW_LINE> self.t.dt.put(d) <NEW_LINE> return d | I am a three-dimensional array of Python objects, addressable by any
three-way combination of hashable Python objects. You can use me as a
two-dimensional array by simply using some constant, e.g., C{None} when
supplying an address for my third dimension.
B{IMPORTANT}: Make sure you call my L{shutdown} method for an instance of
me that you're done with before allowing that instance to be deleted. | 6259904b82261d6c527308c3 |
class VideoBatchSampler(Sampler): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def __iter__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def __len__(self): <NEW_LINE> <INDENT> pass | Base class for all video samplers.
Every `VideoBatchSampler` subclass has to provide an `__iter__()` method,
providing a way to iterate over indices of dataset elements, and
a `__len__()` method that returns the length of the returned iterators. | 6259904b07d97122c421809d |
class Tester(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.B = combfuncs.createLookup(['a','b','c','d','e']) <NEW_LINE> self.k = 3 <NEW_LINE> <DEDENT> def testall(self): <NEW_LINE> <INDENT> v = (self.B if type(self.B) == int else len(self.B[1])) <NEW_LINE> rk = 0 <NEW_LINE> for K in ksubsetlex.all(self.B, self.k): <NEW_LINE> <INDENT> self.assertEqual(ksubsetlex.rank(self.B, K), rk) <NEW_LINE> self.assertEqual(ksubsetlex.unrank(self.B, self.k, rk), K) <NEW_LINE> rk += 1 <NEW_LINE> <DEDENT> self.assertEqual(rk, combfuncs.binom(v, self.k)) | Unit testing class for this module.
We perform all operations over a given base set and check their
interactions for correctness. | 6259904b07f4c71912bb082e |
class Db_gz_group(object): <NEW_LINE> <INDENT> def __init__(self, conn, cur, session): <NEW_LINE> <INDENT> self.session = session <NEW_LINE> self.member_pat = re.compile('\d+') <NEW_LINE> self.name_pat = re.compile(r'(推荐.+)') <NEW_LINE> self.conn = conn <NEW_LINE> self.cur = cur <NEW_LINE> <DEDENT> def get_all_page(self): <NEW_LINE> <INDENT> groups_url = [] <NEW_LINE> for i in range(0, 2001, 20): <NEW_LINE> <INDENT> url = r"https://www.douban.com/group/search?start=%s&cat=1019&sort=relevance&q=广州" % i <NEW_LINE> groups_url.append(url) <NEW_LINE> <DEDENT> return groups_url <NEW_LINE> <DEDENT> def get_group_info(self, page_url): <NEW_LINE> <INDENT> url = page_url <NEW_LINE> print('正在处理:%s' % url) <NEW_LINE> r = self.session.get(url) <NEW_LINE> html = r.content <NEW_LINE> try: <NEW_LINE> <INDENT> bsObj = bs(html, 'lxml') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> bsObj = bs(html, 'html.parser') <NEW_LINE> <DEDENT> groups = bsObj.find("div", {"class": "groups"}).findAll("div", {"class": "result"}) <NEW_LINE> for group in groups: <NEW_LINE> <INDENT> a_tag = group.find("a", {"class": "nbg"}) <NEW_LINE> group_name = a_tag.attrs['title'] <NEW_LINE> group_name = self.name_pat.sub('', group_name) <NEW_LINE> group_url = a_tag.attrs['href'] <NEW_LINE> group_info = group.find("div", {"class": "info"}).get_text() <NEW_LINE> group_member = self.member_pat.search(group_info).group(0) <NEW_LINE> print("%s\n%s\n%s\n***************************************" % (group_name, group_url, group_member)) <NEW_LINE> <DEDENT> <DEDENT> def insert_db(self, name, url, member): <NEW_LINE> <INDENT> print('插入数据库') <NEW_LINE> try: <NEW_LINE> <INDENT> sql_insert = "INSERT INTO douban_gz_group (group_name, group_url, group_member) values ('%s', '%s', '%s')" % (name, url, member) <NEW_LINE> self.cur.execute(sql_insert) <NEW_LINE> self.conn.commit() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.conn.rollback() | 单线程版
本程序用于提取豆瓣中所有包含“广州”的小组
提取其名称、成员数量、链接 | 6259904b0c0af96317c5775e |
class DateCoder(DeterministicCoder): <NEW_LINE> <INDENT> def _create_impl(self): <NEW_LINE> <INDENT> return coder_impl.DateCoderImpl() <NEW_LINE> <DEDENT> def to_type_hint(self): <NEW_LINE> <INDENT> return datetime.date | Coder for Date | 6259904bcad5886f8bdc5a7b |
class add_partitions_args: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.LIST, 'new_parts', (TType.STRUCT,(Partition, Partition.thrift_spec)), None, ), ) <NEW_LINE> def __init__(self, new_parts=None,): <NEW_LINE> <INDENT> self.new_parts = new_parts <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.new_parts = [] <NEW_LINE> (_etype388, _size385) = iprot.readListBegin() <NEW_LINE> for _i389 in xrange(_size385): <NEW_LINE> <INDENT> _elem390 = Partition() <NEW_LINE> _elem390.read(iprot) <NEW_LINE> self.new_parts.append(_elem390) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('add_partitions_args') <NEW_LINE> if self.new_parts is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('new_parts', TType.LIST, 1) <NEW_LINE> oprot.writeListBegin(TType.STRUCT, len(self.new_parts)) <NEW_LINE> for iter391 in self.new_parts: <NEW_LINE> <INDENT> iter391.write(oprot) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- new_parts | 6259904b21a7993f00c67364 |
class OptimalPlaceEngine(BaseDiskPlacementEngine): <NEW_LINE> <INDENT> def __init__(self, datastore_manager, option): <NEW_LINE> <INDENT> super(OptimalPlaceEngine, self).__init__(datastore_manager, option) <NEW_LINE> <DEDENT> @log_duration <NEW_LINE> def place(self, disks_placement, constraints): <NEW_LINE> <INDENT> disks_total_size = self.disk_util.disks_capacity_gb( disks_placement.disks) <NEW_LINE> optimal_datastore = disks_placement.selector.get_datastore() <NEW_LINE> if disks_total_size > disks_placement.selector.free_space(optimal_datastore): <NEW_LINE> <INDENT> return DiskPlaceResult( result=PlaceResultCode.NOT_ENOUGH_DATASTORE_CAPACITY, disks_placement=disks_placement) <NEW_LINE> <DEDENT> disks_placement.selector.consume_datastore_space(optimal_datastore, disks_total_size) <NEW_LINE> for disk in disks_placement.disks: <NEW_LINE> <INDENT> disks_placement.placement_list.append( AgentResourcePlacement(AgentResourcePlacement.DISK, disk.id, optimal_datastore) ) <NEW_LINE> <DEDENT> return DiskPlaceResult(result=PlaceResultCode.OK, disks_placement=disks_placement) | Optimal place engine tries to put all disks into one datastore. This
engine doesn't look into constraints so far. | 6259904bec188e330fdf9c99 |
class GitCache(object): <NEW_LINE> <INDENT> def __init__(self, cache_time=900): <NEW_LINE> <INDENT> self.cache_time = int(cache_time) <NEW_LINE> self.cache = dict() <NEW_LINE> <DEDENT> def store(self, key, object, type='object'): <NEW_LINE> <INDENT> self.cache[key + '_' + type] = dict(object=object, time=datetime.now()) <NEW_LINE> <DEDENT> def read(self, key, type='object'): <NEW_LINE> <INDENT> if(not self.cache.has_key(key + '_' + type)): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if((self.cache[key + '_' + type]['time'] + timedelta(0, self.cache_time) > datetime.now())): <NEW_LINE> <INDENT> return self.cache[key + '_' + type]['object'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> del(self.cache[key + '_' + type]) <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> def flush(self): <NEW_LINE> <INDENT> self.cache = dict() | Internal caching system, created to avoid parse the same files multiple
times. | 6259904bd4950a0f3b111840 |
class Screen(object): <NEW_LINE> <INDENT> def __init__(self, name, cocktails=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.cocktails = cocktails if cocktails is not None else [] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.cocktails) <NEW_LINE> <DEDENT> def add_cocktail(self, cocktail): <NEW_LINE> <INDENT> self.cocktails.append(cocktail) <NEW_LINE> <DEDENT> def print_stats(self): <NEW_LINE> <INDENT> cmap = {} <NEW_LINE> for cocktail in self.cocktails: <NEW_LINE> <INDENT> for c in cocktail.components: <NEW_LINE> <INDENT> cmap[c.name] = cmap.get(c.name, 0) + 1 <NEW_LINE> <DEDENT> <DEDENT> print("Name: %s" % self.name) <NEW_LINE> print("Wells: %s" % len(self)) <NEW_LINE> print("Distinct Compounds: %s" % len(cmap.keys())) <NEW_LINE> for k in sorted(cmap, key=cmap.get, reverse=True): <NEW_LINE> <INDENT> print("%s: %s" % (k, cmap[k])) <NEW_LINE> <DEDENT> <DEDENT> def _set_summary_stats(self, path): <NEW_LINE> <INDENT> cols = ['molecular_weight', 'density'] <NEW_LINE> data = {} <NEW_LINE> with open(path) as csvfile: <NEW_LINE> <INDENT> reader = csv.DictReader(csvfile, delimiter="\t") <NEW_LINE> for row in reader: <NEW_LINE> <INDENT> data[row['name'].lower()] = row <NEW_LINE> <DEDENT> <DEDENT> for ck in self.cocktails: <NEW_LINE> <INDENT> for cp in ck.components: <NEW_LINE> <INDENT> if cp.name not in data: <NEW_LINE> <INDENT> logger.info("Missing summary data for compound: %s" % cp.name) <NEW_LINE> continue <NEW_LINE> <DEDENT> row = data[cp.name] <NEW_LINE> for key in cols: <NEW_LINE> <INDENT> if key in row and len(row[key]) > 0: <NEW_LINE> <INDENT> setattr(cp, key, float(row[key])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(cp, key, None) <NEW_LINE> <DEDENT> <DEDENT> if 'smiles' in row and len(row['smiles']) > 0: <NEW_LINE> <INDENT> cp.smiles = row['smiles'] <NEW_LINE> try: <NEW_LINE> <INDENT> mol = smilin(cp.smiles) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> logger.info("Invalid smiles format, failed to parse smiles for compound: %s" % cp.name) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> logger.info("Missing smiles data for compound: %s" % cp.name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def json(self): <NEW_LINE> <INDENT> schema = ScreenSerializer() <NEW_LINE> return schema.dumps(self).data <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "[ %s ]" % ", ".join([self.name,str(len(self))]) | This class represents a macromolecular crystallization screen.
A screen is made of of one or more cocktails. | 6259904b0a366e3fb87ddde1 |
class fields(SymbolDef): <NEW_LINE> <INDENT> os_id = 'os-id' <NEW_LINE> os_version_id = 'os-version-id' | Symbols for each field of a packaging meta-data unit. | 6259904b8e05c05ec3f6f858 |
class MetaResultFlatComplete(FlatFileExporter): <NEW_LINE> <INDENT> def _get_header_row(self): <NEW_LINE> <INDENT> header = [] <NEW_LINE> header.extend(Study.flat_complete_header_row()) <NEW_LINE> header.extend(models.MetaProtocol.flat_complete_header_row()) <NEW_LINE> header.extend(models.MetaResult.flat_complete_header_row()) <NEW_LINE> header.extend(models.SingleResult.flat_complete_header_row()) <NEW_LINE> return header <NEW_LINE> <DEDENT> def _get_data_rows(self): <NEW_LINE> <INDENT> rows = [] <NEW_LINE> for obj in self.queryset: <NEW_LINE> <INDENT> ser = obj.get_json(json_encode=False) <NEW_LINE> row = [] <NEW_LINE> row.extend(Study.flat_complete_data_row(ser["protocol"]["study"])) <NEW_LINE> row.extend(models.MetaProtocol.flat_complete_data_row(ser["protocol"])) <NEW_LINE> row.extend(models.MetaResult.flat_complete_data_row(ser)) <NEW_LINE> if len(ser["single_results"]) == 0: <NEW_LINE> <INDENT> row.extend([None] * 10) <NEW_LINE> rows.append(row) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for sr in ser["single_results"]: <NEW_LINE> <INDENT> row_copy = list(row) <NEW_LINE> row_copy.extend(models.SingleResult.flat_complete_data_row(sr)) <NEW_LINE> rows.append(row_copy) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return rows | Returns a complete export of all data required to rebuild the the
epidemiological meta-result study type from scratch. | 6259904b596a897236128fac |
class Moisture_Event(object): <NEW_LINE> <INDENT> def __init__(self, area_id, moisture, min, max, date): <NEW_LINE> <INDENT> super(Moisture_Event, self).__init__() <NEW_LINE> self.area_id = area_id <NEW_LINE> self.moisture = moisture <NEW_LINE> self.min = min <NEW_LINE> self.max = max <NEW_LINE> self.date = date <NEW_LINE> <DEDENT> def to_JSON(self): <NEW_LINE> <INDENT> return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4) | Object that represents the data acquired from the sensors. on V1 this
data comes from a CSV file with the following structure
area_id, moisture, min, max, date | 6259904bbe383301e0254c16 |
class WrappedIterableDataset(torch.utils.data.IterableDataset): <NEW_LINE> <INDENT> def __init__(self, hf_iterable, verbose: bool = True): <NEW_LINE> <INDENT> self.hf_iterable = hf_iterable <NEW_LINE> self.verbose = verbose <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> started = False <NEW_LINE> logger.info("Pre-fetching training samples...") <NEW_LINE> while True: <NEW_LINE> <INDENT> for sample in self.hf_iterable: <NEW_LINE> <INDENT> if not started: <NEW_LINE> <INDENT> logger.info("Began iterating minibatches!") <NEW_LINE> started = True <NEW_LINE> <DEDENT> yield sample | Wraps huggingface IterableDataset as pytorch IterableDataset, implement default methods for DataLoader | 6259904b1f037a2d8b9e526a |
class UpdateOrderApi(object): <NEW_LINE> <INDENT> def __init__(self, api_client=None): <NEW_LINE> <INDENT> config = Configuration() <NEW_LINE> if api_client: <NEW_LINE> <INDENT> self.api_client = api_client <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not config.api_client: <NEW_LINE> <INDENT> config.api_client = ApiClient() <NEW_LINE> <DEDENT> self.api_client = config.api_client <NEW_LINE> <DEDENT> <DEDENT> def update_order(self, json, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('callback'): <NEW_LINE> <INDENT> return self.update_order_with_http_info(json, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.update_order_with_http_info(json, **kwargs) <NEW_LINE> return data <NEW_LINE> <DEDENT> <DEDENT> def update_order_with_http_info(self, json, **kwargs): <NEW_LINE> <INDENT> all_params = ['json'] <NEW_LINE> all_params.append('callback') <NEW_LINE> all_params.append('_return_http_data_only') <NEW_LINE> all_params.append('_preload_content') <NEW_LINE> all_params.append('_request_timeout') <NEW_LINE> params = locals() <NEW_LINE> for key, val in iteritems(params['kwargs']): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise TypeError( "Got an unexpected keyword argument '%s'" " to method update_order" % key ) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> if ('json' not in params) or (params['json'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `json` when calling `update_order`") <NEW_LINE> <DEDENT> collection_formats = {} <NEW_LINE> resource_path = '/v1/update_order'.replace('{format}', 'json') <NEW_LINE> path_params = {} <NEW_LINE> query_params = {} <NEW_LINE> header_params = {} <NEW_LINE> form_params = [] <NEW_LINE> local_var_files = {} <NEW_LINE> body_params = None <NEW_LINE> if 'json' in params: <NEW_LINE> <INDENT> body_params = params['json'] <NEW_LINE> <DEDENT> header_params['Accept'] = self.api_client. select_header_accept(['application/json']) <NEW_LINE> header_params['Content-Type'] = self.api_client. select_header_content_type(['application/json']) <NEW_LINE> auth_settings = ['api_key'] <NEW_LINE> return self.api_client.call_api(resource_path, 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='EventResponse', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen | 6259904b4428ac0f6e65992d |
class TestEmpty: <NEW_LINE> <INDENT> def test_empty_class_is_not_none(self): <NEW_LINE> <INDENT> assert Empty is not None <NEW_LINE> assert Empty != None <NEW_LINE> <DEDENT> def test_empty_class_evaluates_to_false(self): <NEW_LINE> <INDENT> assert not Empty <NEW_LINE> <DEDENT> def test_empty_class_is_empty(self): <NEW_LINE> <INDENT> assert Empty is Empty <NEW_LINE> assert Empty == Empty <NEW_LINE> <DEDENT> def test_empty_instance_evaluates_to_false(self): <NEW_LINE> <INDENT> assert not Empty() <NEW_LINE> <DEDENT> def test_empty_instance_is_unique(self): <NEW_LINE> <INDENT> empty = Empty() <NEW_LINE> assert isinstance(empty, Empty) <NEW_LINE> assert empty is not Empty <NEW_LINE> assert empty != Empty <NEW_LINE> assert empty is not Empty() <NEW_LINE> assert empty != Empty() <NEW_LINE> assert empty is empty <NEW_LINE> assert empty == empty | Empty tests. | 6259904bf7d966606f7492b7 |
class WrapFormSpawner(FormMixin, WrapSpawner): <NEW_LINE> <INDENT> def set_class(self, data): <NEW_LINE> <INDENT> raise NotImplementedError('Must set_class based on form or saved cfg') <NEW_LINE> <DEDENT> def construct_child(self): <NEW_LINE> <INDENT> self.log.info("construct_child") <NEW_LINE> self.set_class(self.user_options) <NEW_LINE> super().construct_child() <NEW_LINE> <DEDENT> def load_child_class(self, state): <NEW_LINE> <INDENT> self.log.info("Load_child_class: state=%s", state) <NEW_LINE> self.child_state = state.get('child_state', {}) <NEW_LINE> self.user_options = state.get('child_conf', {}) <NEW_LINE> <DEDENT> def get_state(self): <NEW_LINE> <INDENT> state = super().get_state() <NEW_LINE> state.update(self.child_config) <NEW_LINE> return state | WrapSpawner modification to select the child class based on the
form information generated by a form-class above. Subclasses must
implement a set_class method that gets the formdata and must use that
to return a Spawner class to use | 6259904b07d97122c421809f |
class User(db.Model): <NEW_LINE> <INDENT> __tablename__ = "user" <NEW_LINE> user_id = db.Column(db.Integer, primary_key=True) <NEW_LINE> username = db.Column(db.String(50), unique=True) <NEW_LINE> email = db.Column(db.String(100), unique=True) <NEW_LINE> password_hash = db.Column(db.String(128)) <NEW_LINE> profile_pic = db.Column(db.String(100)) <NEW_LINE> def __init__(self, email, username, pwd): <NEW_LINE> <INDENT> self.username = username <NEW_LINE> self.email = email <NEW_LINE> self.hash_password(pwd) <NEW_LINE> <DEDENT> def hash_password(self, pwd): <NEW_LINE> <INDENT> self.password_hash = pwd_context.encrypt(pwd) <NEW_LINE> <DEDENT> def verify_password(self, pwd): <NEW_LINE> <INDENT> return pwd_context.verify(pwd, self.password_hash) <NEW_LINE> <DEDENT> def generate_auth_token(self, expiration=600): <NEW_LINE> <INDENT> s = Serializer(app.config['SECRET_KEY'], expires_in=expiration) <NEW_LINE> return s.dumps({'user_id': self.user_id}) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def verify_auth_token(token): <NEW_LINE> <INDENT> s = Serializer(app.config['SECRET_KEY']) <NEW_LINE> try: <NEW_LINE> <INDENT> data = s.loads(token) <NEW_LINE> <DEDENT> except SignatureExpired: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> except BadSignature: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> user = User.query.get(data['user_id']) <NEW_LINE> return user | User db model.
Table of user information.
Args:
pass
Attributes:
user_id (int): Integer representation/primary key
username (str): Unique username
email (str): Unique email
profile_pic (str): path to profile picture | 6259904be64d504609df9dce |
class DescribeRecordDatesByChannelRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.DeviceId = None <NEW_LINE> self.ChannelId = None <NEW_LINE> self.Type = None <NEW_LINE> self.Limit = None <NEW_LINE> self.Offset = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.DeviceId = params.get("DeviceId") <NEW_LINE> self.ChannelId = params.get("ChannelId") <NEW_LINE> self.Type = params.get("Type") <NEW_LINE> self.Limit = params.get("Limit") <NEW_LINE> self.Offset = params.get("Offset") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | DescribeRecordDatesByChannel请求参数结构体
| 6259904b8e71fb1e983bcec2 |
class HttpParserUpgrade(ParseError): <NEW_LINE> <INDENT> pass | 当协议请求升级时抛出错误 | 6259904b21a7993f00c67366 |
class Entity(): <NEW_LINE> <INDENT> _velocity = Vec2.zeros() <NEW_LINE> _acceleration = Vec2.zeros() <NEW_LINE> def __init__(self, position=Vec2.zeros(), material=Material(mass=0), color=(255, 0, 0)) -> None: <NEW_LINE> <INDENT> assert isinstance(position, Vec2) and isinstance(material, Material) <NEW_LINE> assert isinstance(color, tuple) and len(color) == 3 <NEW_LINE> self._position = position <NEW_LINE> self._prev_pos = position <NEW_LINE> self._material = material <NEW_LINE> self.color = color <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return f"Entity: '{type(self).__name__}'; Pos: {self.position}); Mat: {self.material}" <NEW_LINE> <DEDENT> def accelerate(self, rate: float, dtime: float) -> None: <NEW_LINE> <INDENT> self.acceleration += rate * dtime <NEW_LINE> <DEDENT> def update_velocity(self): <NEW_LINE> <INDENT> self.velocity = self.position - self.prev_pos <NEW_LINE> <DEDENT> def update_prev_pos(self): <NEW_LINE> <INDENT> self.prev_pos = self.position <NEW_LINE> <DEDENT> def reset_forces(self) -> None: <NEW_LINE> <INDENT> self.acceleration = Vec2.zeros() <NEW_LINE> <DEDENT> @property <NEW_LINE> def position(self) -> Vec2: <NEW_LINE> <INDENT> return self._position <NEW_LINE> <DEDENT> @position.setter <NEW_LINE> def position(self, new_vec: Vec2) -> None: <NEW_LINE> <INDENT> assert isinstance(new_vec, Vec2) <NEW_LINE> self._position = new_vec <NEW_LINE> <DEDENT> @property <NEW_LINE> def prev_pos(self) -> Vec2: <NEW_LINE> <INDENT> return self._prev_pos <NEW_LINE> <DEDENT> @prev_pos.setter <NEW_LINE> def prev_pos(self, new_vec: Vec2) -> None: <NEW_LINE> <INDENT> assert isinstance(new_vec, Vec2) <NEW_LINE> self._prev_pos = new_vec <NEW_LINE> <DEDENT> @property <NEW_LINE> def velocity(self) -> Vec2: <NEW_LINE> <INDENT> return self._velocity <NEW_LINE> <DEDENT> @velocity.setter <NEW_LINE> def velocity(self, new_vec: Vec2) -> None: <NEW_LINE> <INDENT> assert isinstance(new_vec, Vec2) <NEW_LINE> self._velocity = new_vec <NEW_LINE> <DEDENT> @property <NEW_LINE> def acceleration(self) -> Vec2: <NEW_LINE> <INDENT> return self._acceleration <NEW_LINE> <DEDENT> @acceleration.setter <NEW_LINE> def acceleration(self, new_vec: Vec2) -> None: <NEW_LINE> <INDENT> assert isinstance(new_vec, Vec2) <NEW_LINE> self._acceleration = new_vec <NEW_LINE> <DEDENT> @property <NEW_LINE> def material(self) -> Material: <NEW_LINE> <INDENT> return self._material <NEW_LINE> <DEDENT> @material.setter <NEW_LINE> def material(self, new_mat: Material) -> None: <NEW_LINE> <INDENT> assert isinstance(new_mat, Material) <NEW_LINE> self._material = new_mat | Basic backbone of all game interactive elements. It stores its position or velocity or acceleration as vector (Vec2) and additional information as 'Material',
which is for computing 'Marble' velocity, speed and bouncness. | 6259904bd7e4931a7ef3d474 |
class OrgJobDescription(GDataBase): <NEW_LINE> <INDENT> _tag = 'orgJobDescription' | The Google Contacts OrgJobDescription element. | 6259904b23e79379d538d8fb |
class CharDatabase(SQLObject): <NEW_LINE> <INDENT> name = StringCol(length=255, unique=True) <NEW_LINE> stat_survival = IntCol() <NEW_LINE> stat_movement = IntCol() <NEW_LINE> stat_accuracy = IntCol() <NEW_LINE> stat_strength = IntCol() <NEW_LINE> stat_evasion = IntCol() <NEW_LINE> stat_luck = IntCol() <NEW_LINE> stat_speed = IntCol() <NEW_LINE> defense_insanity = IntCol() <NEW_LINE> defense_head = IntCol() <NEW_LINE> defense_arms = IntCol() <NEW_LINE> defense_body = IntCol() <NEW_LINE> defense_waist = IntCol() <NEW_LINE> defense_legs = IntCol() <NEW_LINE> weapon_speed = IntCol() <NEW_LINE> weapon_accuracy = IntCol() <NEW_LINE> weapon_strength = IntCol() | connect to mysql database | 6259904b63d6d428bbee3bc8 |
class WorkflowTestificationError(Error): <NEW_LINE> <INDENT> pass | You provided an invalid test/stub definition. | 6259904b8e05c05ec3f6f859 |
class Meta: <NEW_LINE> <INDENT> unique_together = ('category', 'order',) | Defines composite unique key of category and order | 6259904bb830903b9686ee79 |
class Address(namedtuple("Address", "name route mailbox host")): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return formataddr(( to_unicode(self.name), to_unicode(self.mailbox) + '@' + to_unicode(self.host))) | Represents electronic mail addresses. Used to store addresses in
:py:class:`Envelope`.
:ivar name: The address "personal name".
:ivar route: SMTP source route (rarely used).
:ivar mailbox: Mailbox name (what comes just before the @ sign).
:ivar host: The host/domain name.
As an example, an address header that looks like::
Mary Smith <[email protected]>
would be represented as::
Address(name=u'Mary Smith', route=None, mailbox=u'mary', host=u'foo.com')
See :rfc:`2822` for more detail.
See also :py:class:`Envelope` for information about handling of
"group syntax". | 6259904b7cff6e4e811b6e38 |
class LNKGenerator(Generator): <NEW_LINE> <INDENT> def check(self): <NEW_LINE> <INDENT> if sys.platform != "win32": <NEW_LINE> <INDENT> logging.error(" [!] You have to run on Windows OS to build this file format.") <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def buildLnkWithWscript(self, target, targetArgs=None, iconPath=None, workingDirectory = ""): <NEW_LINE> <INDENT> shell = Dispatch("WScript.Shell") <NEW_LINE> shortcut = shell.CreateShortcut(self.outputFilePath) <NEW_LINE> shortcut.Targetpath = target <NEW_LINE> shortcut.WorkingDirectory = workingDirectory <NEW_LINE> if targetArgs: <NEW_LINE> <INDENT> shortcut.Arguments = targetArgs <NEW_LINE> <DEDENT> if iconPath: <NEW_LINE> <INDENT> shortcut.IconLocation = iconPath <NEW_LINE> <DEDENT> shortcut.save() <NEW_LINE> <DEDENT> def generate(self): <NEW_LINE> <INDENT> logging.info(" [+] Generating %s file..." % self.outputFileType) <NEW_LINE> paramDict = OrderedDict([("Shortcut_Target",None), ("Shortcut_Icon",None) ]) <NEW_LINE> self.fillInputParams(paramDict) <NEW_LINE> iconPath = paramDict["Shortcut_Icon"] <NEW_LINE> CmdLine = paramDict["Shortcut_Target"].split(' ', 1) <NEW_LINE> target = CmdLine[0] <NEW_LINE> if len(CmdLine) == 2: <NEW_LINE> <INDENT> targetArgs = CmdLine[1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> targetArgs = None <NEW_LINE> <DEDENT> self.buildLnkWithWscript(target, targetArgs, iconPath) <NEW_LINE> logging.info(" [-] Generated %s file: %s" % (self.outputFileType, self.outputFilePath)) <NEW_LINE> logging.info(" [-] Test with: \nBrowse %s dir to trigger icon resolution. Click on file to trigger shortcut.\n" % self.outputFilePath) | Module used to generate malicious Explorer Command File | 6259904b3c8af77a43b6893c |
class LibSass(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.clib = None <NEW_LINE> <DEDENT> def _load(self): <NEW_LINE> <INDENT> if self.clib is None: <NEW_LINE> <INDENT> root_path = os.path.normpath(os.path.join(os.path.dirname(os.path.realpath(__file__ )), '..')) <NEW_LINE> path1 = os.path.join(root_path, 'sass.so') <NEW_LINE> path2 = os.path.join(root_path, '..', 'sass.so') <NEW_LINE> if os.path.exists(path1): <NEW_LINE> <INDENT> self.clib = cdll.LoadLibrary(path1) <NEW_LINE> <DEDENT> elif os.path.exists(path1): <NEW_LINE> <INDENT> self.clib = cdll.LoadLibrary(path2) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("Could not load library") <NEW_LINE> <DEDENT> self.clib.sass_new_context.restype = POINTER(SassContext) <NEW_LINE> self.clib.sass_new_file_context.restype = POINTER(SassFileContext) <NEW_LINE> self.clib.sass_new_folder_context.restype = POINTER(SassFolderContext) <NEW_LINE> self.clib.sass_compile.restype = c_int <NEW_LINE> self.clib.sass_compile.argtypes = [POINTER(SassContext)] <NEW_LINE> self.clib.sass_compile_file.restype = c_int <NEW_LINE> self.clib.sass_compile_file.argtypes = [POINTER(SassFileContext)] <NEW_LINE> self.clib.sass_compile_folder.restype = c_int <NEW_LINE> self.clib.sass_compile_folder.argtypes = [POINTER(SassFolderContext)] <NEW_LINE> <DEDENT> <DEDENT> def __getattribute__(self, name): <NEW_LINE> <INDENT> attr = object.__getattribute__(self, name) <NEW_LINE> if hasattr(attr, '__call__') and name != "_load": <NEW_LINE> <INDENT> def load_wrapper(*args, **kwargs): <NEW_LINE> <INDENT> self._load() <NEW_LINE> return attr(*args, **kwargs) <NEW_LINE> <DEDENT> return load_wrapper <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return attr <NEW_LINE> <DEDENT> <DEDENT> def sass_new_context(self): <NEW_LINE> <INDENT> return self.clib.sass_new_context() <NEW_LINE> <DEDENT> def sass_new_file_context(self): <NEW_LINE> <INDENT> return self.clib.sass_new_file_context() <NEW_LINE> <DEDENT> def sass_new_folder_context(self): <NEW_LINE> <INDENT> return self.clib.sass_new_folder_context() <NEW_LINE> <DEDENT> def compile(self, ctx): <NEW_LINE> <INDENT> return self.clib.sass_compile(ctx) <NEW_LINE> <DEDENT> def compile_file(self, ctx): <NEW_LINE> <INDENT> return self.clib.sass_compile_file(ctx) <NEW_LINE> <DEDENT> def compile_folder(self, ctx): <NEW_LINE> <INDENT> return self.clib.sass_compile_folder(ctx) | Wrapper class around libsass.
The class provides methods that mimic the functions in sass_interface.h | 6259904bb5575c28eb7136c8 |
class on_keyword(parser.keyword): <NEW_LINE> <INDENT> def __init__(self, sString): <NEW_LINE> <INDENT> parser.keyword.__init__(self, sString) | unique_id = sensitivity_clause : on_keyword | 6259904b30dc7b76659a0c32 |
class Libogg(AutotoolsPackage): <NEW_LINE> <INDENT> homepage = "https://www.xiph.org/ogg/" <NEW_LINE> url = "http://downloads.xiph.org/releases/ogg/libogg-1.3.2.tar.gz" <NEW_LINE> version('1.3.2', 'b72e1a1dbadff3248e4ed62a4177e937') | Ogg is a multimedia container format, and the native file and stream
format for the Xiph.org multimedia codecs. | 6259904b009cb60464d02934 |
class PomXmlTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.o = untangle.parse('tests/res/pom.xml') <NEW_LINE> <DEDENT> def test_parent(self): <NEW_LINE> <INDENT> project = self.o.project <NEW_LINE> self.assert_(project) <NEW_LINE> parent = project.parent <NEW_LINE> self.assert_(parent) <NEW_LINE> self.assertEquals( 'com.atlassian.confluence.plugin.base', parent.groupId ) <NEW_LINE> self.assertEquals('confluence-plugin-base', parent.artifactId) <NEW_LINE> self.assertEquals('17', parent.version) <NEW_LINE> self.assertEquals('4.0.0', project.modelVersion) <NEW_LINE> self.assertEquals('com.this.that.groupId', project.groupId) <NEW_LINE> self.assertEquals('', project.name) <NEW_LINE> self.assertEquals( '${pom.groupId}.${pom.artifactId}', project.properties.atlassian_plugin_key ) <NEW_LINE> self.assertEquals( '1.4.1', project.properties.atlassian_product_test_lib_version ) <NEW_LINE> self.assertEquals( '2.9', project.properties.atlassian_product_data_version ) | Tests parsing a Maven pom.xml | 6259904bcb5e8a47e493cb86 |
class MXResolver: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_mx_records(domain): <NEW_LINE> <INDENT> return [] | Gets an array of MXRecords associated to the domain specified.
:param domain:
:return: [MXRecord] | 6259904b07f4c71912bb0832 |
class MissingScopes(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def create(**kwargs): <NEW_LINE> <INDENT> return MissingScopes(**kwargs) <NEW_LINE> <DEDENT> def __init__(self, json=None, **kwargs): <NEW_LINE> <INDENT> if json is None and not kwargs: <NEW_LINE> <INDENT> raise ValueError('No data or kwargs present') <NEW_LINE> <DEDENT> class_name = 'MissingScopes' <NEW_LINE> data = json or kwargs <NEW_LINE> data_types = [string_types] <NEW_LINE> self.organization = client_support.set_property( 'organization', data, data_types, False, [], False, True, class_name) <NEW_LINE> data_types = [string_types] <NEW_LINE> self.scopes = client_support.set_property('scopes', data, data_types, False, [], True, True, class_name) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.as_json(indent=4) <NEW_LINE> <DEDENT> def as_json(self, indent=0): <NEW_LINE> <INDENT> return client_support.to_json(self, indent=indent) <NEW_LINE> <DEDENT> def as_dict(self): <NEW_LINE> <INDENT> return client_support.to_dict(self) | auto-generated. don't touch. | 6259904b0c0af96317c57760 |
@createsPerformedReactionSetBool <NEW_LINE> class WekaKNNKFTest(ModelTest): <NEW_LINE> <INDENT> modelLibrary = "weka" <NEW_LINE> modelTool = "KNN" <NEW_LINE> splitter = "KFoldSplitter" | Tests Weka KNN. | 6259904bac7a0e7691f738da |
class DbtOutput: <NEW_LINE> <INDENT> def __init__(self, result: Dict[str, Any]): <NEW_LINE> <INDENT> self._result = check.dict_param(result, "result", key_type=str) <NEW_LINE> <DEDENT> @property <NEW_LINE> def result(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> return self._result <NEW_LINE> <DEDENT> @property <NEW_LINE> def docs_url(self) -> Optional[str]: <NEW_LINE> <INDENT> return None | Base class for both DbtCliOutput and DbtRPCOutput. Contains a single field, `result`, which
represents the dbt-formatted result of the command that was run (if any).
Used internally, should not be instantiated directly by the user. | 6259904b26068e7796d4dd43 |
class TextPlugin(NitpickPlugin): <NEW_LINE> <INDENT> identify_tags = {"text"} <NEW_LINE> validation_schema = TextSchema <NEW_LINE> skip_empty_suggestion = True <NEW_LINE> violation_base_code = 350 <NEW_LINE> def _expected_lines(self): <NEW_LINE> <INDENT> return [obj.get("line") for obj in self.expected_config.get(KEY_CONTAINS, {})] <NEW_LINE> <DEDENT> @property <NEW_LINE> def initial_contents(self) -> str: <NEW_LINE> <INDENT> return "\n".join(self._expected_lines()) <NEW_LINE> <DEDENT> def enforce_rules(self) -> Iterator[Fuss]: <NEW_LINE> <INDENT> expected = OrderedSet(self._expected_lines()) <NEW_LINE> actual = OrderedSet(self.file_path.read_text().split("\n")) <NEW_LINE> missing = expected - actual <NEW_LINE> if missing: <NEW_LINE> <INDENT> yield self.reporter.make_fuss(Violations.MISSING_LINES, "\n".join(sorted(missing))) | Enforce configuration on text files.
To check if ``some.txt`` file contains the lines ``abc`` and ``def`` (in any order):
.. code-block:: toml
[["some.txt".contains]]
line = "abc"
[["some.txt".contains]]
line = "def" | 6259904b8e71fb1e983bcec5 |
@register <NEW_LINE> class ImageRecorder(Recorder): <NEW_LINE> <INDENT> _model_name = Unicode('ImageRecorderModel').tag(sync=True) <NEW_LINE> _view_name = Unicode('ImageRecorderView').tag(sync=True) <NEW_LINE> image = Instance(Image).tag(sync=True, **widget_serialization) <NEW_LINE> format = Unicode('png', help='The format of the image.').tag(sync=True) <NEW_LINE> _width = Unicode().tag(sync=True) <NEW_LINE> _height = Unicode().tag(sync=True) <NEW_LINE> @traitlets.default('image') <NEW_LINE> def _default_image(self): <NEW_LINE> <INDENT> return Image(width=self._width, height=self._height, format=self.format) <NEW_LINE> <DEDENT> @observe('_width') <NEW_LINE> def _update_image_width(self, change): <NEW_LINE> <INDENT> self.image.width = self._width <NEW_LINE> <DEDENT> @observe('_height') <NEW_LINE> def _update_image_height(self, change): <NEW_LINE> <INDENT> self.image.height = self._height <NEW_LINE> <DEDENT> @observe('format') <NEW_LINE> def _update_image_format(self, change): <NEW_LINE> <INDENT> self.image.format = self.format <NEW_LINE> <DEDENT> @observe('image') <NEW_LINE> def _bind_image(self, change): <NEW_LINE> <INDENT> if change.old: <NEW_LINE> <INDENT> change.old.unobserve(self._check_autosave, 'value') <NEW_LINE> <DEDENT> change.new.observe(self._check_autosave, 'value') <NEW_LINE> <DEDENT> def _check_autosave(self, change): <NEW_LINE> <INDENT> if len(self.image.value) and self.autosave: <NEW_LINE> <INDENT> self.save() <NEW_LINE> <DEDENT> <DEDENT> def save(self, filename=None): <NEW_LINE> <INDENT> filename = filename or self.filename <NEW_LINE> if '.' not in filename: <NEW_LINE> <INDENT> filename += '.' + self.format <NEW_LINE> <DEDENT> if len(self.image.value) == 0: <NEW_LINE> <INDENT> raise ValueError('No data, did you record anything?') <NEW_LINE> <DEDENT> with open(filename, 'wb') as f: <NEW_LINE> <INDENT> f.write(self.image.value) | Creates a recorder which allows to grab an Image from a MediaStream widget.
| 6259904b0a366e3fb87ddde5 |
class LevelFilter: <NEW_LINE> <INDENT> def __init__(self, min_level="NOTSET", max_level="CRITICAL"): <NEW_LINE> <INDENT> self.min_level = logging._checkLevel(min_level) <NEW_LINE> self.max_level = logging._checkLevel(max_level) <NEW_LINE> <DEDENT> def filter(self, record): <NEW_LINE> <INDENT> if self.min_level <= record.levelno <= self.max_level: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 | Log filter that accepts records in a certain level range | 6259904b004d5f362081f9e7 |
class InboxView(object): <NEW_LINE> <INDENT> template_name = None <NEW_LINE> http_method_names = ('GET', 'POST') <NEW_LINE> def render_to_response(self, context, template_name=None): <NEW_LINE> <INDENT> if template_name is None: <NEW_LINE> <INDENT> template_name = self.template_name <NEW_LINE> <DEDENT> template = get_template(template_name) <NEW_LINE> html = template.render(context) <NEW_LINE> json = simplejson.dumps({'html': html, 'success': True}) <NEW_LINE> return HttpResponse(json, mimetype='application/json') <NEW_LINE> <DEDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> context = self.get_context(request, *args, **kwargs) <NEW_LINE> return self.render_to_response(context) <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if request.method not in self.http_method_names: <NEW_LINE> <INDENT> return HttpResponseNotAllowed() <NEW_LINE> <DEDENT> view_func = getattr(self, request.method.lower()) <NEW_LINE> return view_func(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_context(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> def as_view(self): <NEW_LINE> <INDENT> def view_function(request, *args, **kwargs): <NEW_LINE> <INDENT> if request.user.is_authenticated() and request.is_ajax(): <NEW_LINE> <INDENT> view_method = getattr(self, request.method.lower()) <NEW_LINE> return view_method(request, *args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return HttpResponseForbidden() <NEW_LINE> <DEDENT> <DEDENT> return view_function | custom class-based view
to be used for pjax use and for generation
of content in the traditional way, where
the only the :method:`get_context` would be used. | 6259904be76e3b2f99fd9e0a |
class ListarCircuito(ListView): <NEW_LINE> <INDENT> model = Circuito <NEW_LINE> template_name = 'circuito/listar.html' <NEW_LINE> context_object_name = "listar_circuito" | Vista basada en clase: (`Listar`)
:param template_name: ruta de la plantilla
:param model: Modelo al cual se hace referencia
:param context_object_name: nombre del objeto que contiene esta vista | 6259904b16aa5153ce4018ed |
class ArgReal(Arg): <NEW_LINE> <INDENT> def __init__(self, key, value = None, help = '', min = -1.7976931348623157e308, max = 1.7976931348623157e308, isTemporary = 0, deprecated = False): <NEW_LINE> <INDENT> self.min = min <NEW_LINE> self.max = max <NEW_LINE> Arg.__init__(self, key, value, help, isTemporary, deprecated) <NEW_LINE> return <NEW_LINE> <DEDENT> def getEntryPrompt(self): <NEW_LINE> <INDENT> return 'Please enter floating point value for '+str(self.key)+': ' <NEW_LINE> <DEDENT> def setValue(self, value): <NEW_LINE> <INDENT> self.checkKey() <NEW_LINE> try: <NEW_LINE> <INDENT> value = float(value) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise TypeError('Invalid floating point number: '+str(value)+' for key '+str(self.key)) <NEW_LINE> <DEDENT> if value < self.min or value >= self.max: <NEW_LINE> <INDENT> raise ValueError('Number out of range: '+str(value)+' not in ['+str(self.min)+','+str(self.max)+')'+' for key '+str(self.key)) <NEW_LINE> <DEDENT> self.value = value <NEW_LINE> return | Arguments that represent floating point numbers | 6259904b7cff6e4e811b6e3a |
class NotebookTrainingTracker(NotebookProgressBar): <NEW_LINE> <INDENT> def __init__(self, num_steps, column_names=None): <NEW_LINE> <INDENT> super().__init__(num_steps) <NEW_LINE> self.inner_table = None if column_names is None else [column_names] <NEW_LINE> self.child_bar = None <NEW_LINE> <DEDENT> def display(self): <NEW_LINE> <INDENT> self.html_code = html_progress_bar(self.value, self.total, self.prefix, self.label, self.width) <NEW_LINE> if self.inner_table is not None: <NEW_LINE> <INDENT> self.html_code += text_to_html_table(self.inner_table) <NEW_LINE> <DEDENT> if self.child_bar is not None: <NEW_LINE> <INDENT> self.html_code += self.child_bar.html_code <NEW_LINE> <DEDENT> if self.output is None: <NEW_LINE> <INDENT> self.output = disp.display(disp.HTML(self.html_code), display_id=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.output.update(disp.HTML(self.html_code)) <NEW_LINE> <DEDENT> <DEDENT> def write_line(self, values): <NEW_LINE> <INDENT> if self.inner_table is None: <NEW_LINE> <INDENT> self.inner_table = [list(values.keys()), list(values.values())] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> columns = self.inner_table[0] <NEW_LINE> if len(self.inner_table) == 1: <NEW_LINE> <INDENT> for key in values.keys(): <NEW_LINE> <INDENT> if key not in columns: <NEW_LINE> <INDENT> columns.append(key) <NEW_LINE> <DEDENT> <DEDENT> self.inner_table[0] = columns <NEW_LINE> <DEDENT> self.inner_table.append([values[c] for c in columns]) <NEW_LINE> <DEDENT> <DEDENT> def add_child(self, total, prefix=None, width=300): <NEW_LINE> <INDENT> self.child_bar = NotebookProgressBar(total, prefix=prefix, parent=self, width=width) <NEW_LINE> return self.child_bar <NEW_LINE> <DEDENT> def remove_child(self): <NEW_LINE> <INDENT> self.child_bar = None <NEW_LINE> self.display() | An object tracking the updates of an ongoing training with progress bars and a nice table reporting metrics.
Args:
num_steps (:obj:`int`): The number of steps during training.
column_names (:obj:`List[str]`, `optional`):
The list of column names for the metrics table (will be inferred from the first call to
:meth:`~transformers.utils.notebook.NotebookTrainingTracker.write_line` if not set). | 6259904b3c8af77a43b6893d |
class PairwiseEMDEnergyEEDot(PairwiseEMDBaseFloat64): <NEW_LINE> <INDENT> thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> _eventgeometry.PairwiseEMDEnergyEEDot_swiginit(self, _eventgeometry.new_PairwiseEMDEnergyEEDot(*args, **kwargs)) <NEW_LINE> <DEDENT> __swig_destroy__ = _eventgeometry.delete_PairwiseEMDEnergyEEDot <NEW_LINE> description = _swig_new_instance_method(_eventgeometry.PairwiseEMDEnergyEEDot_description) <NEW_LINE> clear = _swig_new_instance_method(_eventgeometry.PairwiseEMDEnergyEEDot_clear) <NEW_LINE> init = _swig_new_instance_method(_eventgeometry.PairwiseEMDEnergyEEDot_init) <NEW_LINE> compute = _swig_new_instance_method(_eventgeometry.PairwiseEMDEnergyEEDot_compute) <NEW_LINE> __repr__ = _swig_new_instance_method(_eventgeometry.PairwiseEMDEnergyEEDot___repr__) <NEW_LINE> preprocess_CenterWeightedCentroid = _swig_new_instance_method(_eventgeometry.PairwiseEMDEnergyEEDot_preprocess_CenterWeightedCentroid) <NEW_LINE> preprocess_CenterEScheme = _swig_new_instance_method(_eventgeometry.PairwiseEMDEnergyEEDot_preprocess_CenterEScheme) <NEW_LINE> preprocess_CenterPtCentroid = _swig_new_instance_method(_eventgeometry.PairwiseEMDEnergyEEDot_preprocess_CenterPtCentroid) <NEW_LINE> preprocess_MaskCircle = _swig_new_instance_method(_eventgeometry.PairwiseEMDEnergyEEDot_preprocess_MaskCircle) <NEW_LINE> _add_event = _swig_new_instance_method(_eventgeometry.PairwiseEMDEnergyEEDot__add_event) | Proxy of C++ fastjet::contrib::eventgeometry::PairwiseEMD< fastjet::contrib::eventgeometry::EMD< double,fastjet::contrib::eventgeometry::Energy,fastjet::contrib::eventgeometry::EEDot >,double > class. | 6259904b596a897236128fae |
class HendrixTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def noSettingsDeploy(self, action='start', options={}): <NEW_LINE> <INDENT> options.update({'wsgi': 'hendrix.tests.wsgi'}) <NEW_LINE> return HendrixDeploy(action, options) <NEW_LINE> <DEDENT> def withSettingsDeploy(self, action='start', options={}): <NEW_LINE> <INDENT> os.environ.setdefault( "DJANGO_SETTINGS_MODULE", "hendrix.tests.testproject.settings" ) <NEW_LINE> options.update({'settings': 'hendrix.tests.testproject.settings'}) <NEW_LINE> return HendrixDeploy(action, options) | This is where we collect our helper functions to test hendrix | 6259904bdc8b845886d549bd |
class ShellcodeSearchCommand(GenericCommand): <NEW_LINE> <INDENT> _cmdline_ = "shellcode search" <NEW_LINE> _syntax_ = "%s <pattern1> <pattern2>" % _cmdline_ <NEW_LINE> api_base = "http://shell-storm.org" <NEW_LINE> search_url = api_base + "/api/?s=" <NEW_LINE> def do_invoke(self, argv): <NEW_LINE> <INDENT> if len(argv) == 0: <NEW_LINE> <INDENT> err("Missing pattern to search") <NEW_LINE> self.usage() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.search_shellcode(argv) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def search_shellcode(self, search_options): <NEW_LINE> <INDENT> args = "*".join(search_options) <NEW_LINE> http = urlopen(self.search_url + args) <NEW_LINE> ret = http.read() <NEW_LINE> if http.getcode() != 200: <NEW_LINE> <INDENT> err("Could not query search page: got %d" % http.getcode()) <NEW_LINE> return <NEW_LINE> <DEDENT> lines = ret.split("\n") <NEW_LINE> refs = [ line.split("::::") for line in lines ] <NEW_LINE> info("Showing matching shellcodes") <NEW_LINE> for ref in refs: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> auth, arch, cmd, sid, link = ref <NEW_LINE> print(("\t".join([sid, arch, cmd]))) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> info("Use `%s get <id>` to fetch shellcode" % self._cmdline_) <NEW_LINE> return | Search patthern in shellcodes database. | 6259904b097d151d1a2c246e |
class FileTree: <NEW_LINE> <INDENT> def __init__(self, paths): <NEW_LINE> <INDENT> self.tree = {} <NEW_LINE> def get_parent(path): <NEW_LINE> <INDENT> parent = self.tree <NEW_LINE> while '/' in path: <NEW_LINE> <INDENT> directory, path = path.split('/', 1) <NEW_LINE> child = parent.get(directory) <NEW_LINE> if child is None: <NEW_LINE> <INDENT> parent[directory] = {} <NEW_LINE> <DEDENT> parent = parent[directory] <NEW_LINE> <DEDENT> return parent, path <NEW_LINE> <DEDENT> for path in paths: <NEW_LINE> <INDENT> if path[-1] == '/': <NEW_LINE> <INDENT> path = path[:-1] <NEW_LINE> parent, path = get_parent(path) <NEW_LINE> parent[path] = {} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parent, path = get_parent(path) <NEW_LINE> parent[path] = [] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_tree(self): <NEW_LINE> <INDENT> def to_tuple(path, item): <NEW_LINE> <INDENT> if isinstance(item, dict): <NEW_LINE> <INDENT> return item <NEW_LINE> <DEDENT> return tuple(item) <NEW_LINE> <DEDENT> self.walk(to_tuple) <NEW_LINE> return self.tree <NEW_LINE> <DEDENT> def walk(self, callback): <NEW_LINE> <INDENT> def walk(directory, parent_path): <NEW_LINE> <INDENT> for path in list(directory): <NEW_LINE> <INDENT> full_path = os.path.join(parent_path, path).replace('\\', '/') <NEW_LINE> if isinstance(directory[path], dict): <NEW_LINE> <INDENT> directory[path] = ( callback(full_path, directory[path]) or directory[path] ) <NEW_LINE> walk(directory[path], full_path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> directory[path] = ( callback(full_path, directory[path]) or directory[path] ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> walk(self.tree, '') <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> lines = [] <NEW_LINE> def write(path, item): <NEW_LINE> <INDENT> depth = path.count('/') <NEW_LINE> path = os.path.basename(path) <NEW_LINE> path = isinstance(item, dict) and path + '/' or path <NEW_LINE> lines.append(' ' * depth + path) <NEW_LINE> <DEDENT> self.walk(write) <NEW_LINE> return '\n'.join(lines) | Convert a list of paths in a file tree.
:param paths: The paths to be converted.
:type paths: list | 6259904b009cb60464d02936 |
class Department(object): <NEW_LINE> <INDENT> def __init__(self, name, contact_info, number_of_employees, manager): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.employees = {} <NEW_LINE> self.office_hours = "9:00-5:00" <NEW_LINE> self.contact_info = contact_info <NEW_LINE> self.number_of_employees = number_of_employees <NEW_LINE> self.__manager = manager <NEW_LINE> self.meet = "" <NEW_LINE> print(name, contact_info, number_of_employees, manager) <NEW_LINE> <DEDENT> def get_budget(self): <NEW_LINE> <INDENT> self.budget = '$110,000' <NEW_LINE> return self.budget <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> <DEDENT> @name.setter <NEW_LINE> def name(self, val): <NEW_LINE> <INDENT> if isinstance(val, int): <NEW_LINE> <INDENT> raise TypeError('Please provide a string value for the department name') <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def manager(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.__manager <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return "" | Parent class for all departments | 6259904bd99f1b3c44d06a99 |
class Config(object): <NEW_LINE> <INDENT> _instance = None <NEW_LINE> config: Any = None <NEW_LINE> def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> if cls._instance is None: <NEW_LINE> <INDENT> cls._instance = super(Config, cls).__new__(cls) <NEW_LINE> <DEDENT> return cls._instance <NEW_LINE> <DEDENT> def __init__(self, config=None): <NEW_LINE> <INDENT> if self.config is None: <NEW_LINE> <INDENT> self.config = yaml.load(_default_configuration, Loader=yaml.FullLoader) <NEW_LINE> <DEDENT> if config: <NEW_LINE> <INDENT> self.reload(config) <NEW_LINE> <DEDENT> <DEDENT> def reload(self, config): <NEW_LINE> <INDENT> self.config = { **self.config, **yaml.load(open(config, 'r'), Loader=yaml.FullLoader) } <NEW_LINE> <DEDENT> def reload_default(self): <NEW_LINE> <INDENT> self.config = yaml.load(_default_configuration, Loader=yaml.FullLoader) | Singleton Config class for representing the pipeline configuration. | 6259904b07f4c71912bb0834 |
class ExternalServiceError(Exception): <NEW_LINE> <INDENT> pass | External service error. | 6259904b07f4c71912bb0835 |
class Netgear(object): <NEW_LINE> <INDENT> def __init__(self, host, username, password): <NEW_LINE> <INDENT> self.soap_url = "http://{}:5000/soap/server_sa/".format(host) <NEW_LINE> self.username = username <NEW_LINE> self.password = password <NEW_LINE> self.logged_in = False <NEW_LINE> <DEDENT> def login(self): <NEW_LINE> <INDENT> _LOGGER.info("Login") <NEW_LINE> message = SOAP_LOGIN.format(session_id=SESSION_ID, username=self.username, password=self.password) <NEW_LINE> success, _ = self._make_request( ACTION_LOGIN, message, False) <NEW_LINE> self.logged_in = success <NEW_LINE> return self.logged_in <NEW_LINE> <DEDENT> def get_attached_devices(self): <NEW_LINE> <INDENT> _LOGGER.info("Get attached devices") <NEW_LINE> message = SOAP_ATTACHED_DEVICES.format(session_id=SESSION_ID) <NEW_LINE> success, response = self._make_request( ACTION_GET_ATTACHED_DEVICES, message) <NEW_LINE> if not success: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> data = re.search(r"<NewAttachDevice>(.*)</NewAttachDevice>", response).group(1).split(";") <NEW_LINE> devices = [] <NEW_LINE> device_start = [index for index, value in enumerate(data) if '@' in value] <NEW_LINE> for index, start in enumerate(device_start): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> info = data[start:device_start[index+1]] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> info = data[start:-1] <NEW_LINE> <DEDENT> signal = convert(info[0].split("@")[0], int) <NEW_LINE> ip, name, mac, link_type = info[1:5] <NEW_LINE> link_rate = convert(info[-1], int) <NEW_LINE> devices.append(Device(signal, ip, name, mac, link_type, link_rate)) <NEW_LINE> <DEDENT> return devices <NEW_LINE> <DEDENT> def _make_request(self, action, message, try_login_after_failure=True): <NEW_LINE> <INDENT> if not self.logged_in and try_login_after_failure: <NEW_LINE> <INDENT> if not self.login(): <NEW_LINE> <INDENT> return False, "" <NEW_LINE> <DEDENT> <DEDENT> headers = _get_soap_header(action) <NEW_LINE> try: <NEW_LINE> <INDENT> req = requests.post( self.soap_url, headers=headers, data=message, timeout=10) <NEW_LINE> success = _is_valid_response(req) <NEW_LINE> if not success and try_login_after_failure: <NEW_LINE> <INDENT> self.login() <NEW_LINE> req = requests.post( self.soap_url, headers=headers, data=message, timeout=10) <NEW_LINE> success = _is_valid_response(req) <NEW_LINE> <DEDENT> return success, req.text <NEW_LINE> <DEDENT> except requests.exceptions.RequestException: <NEW_LINE> <INDENT> _LOGGER.exception("Error talking to API") <NEW_LINE> return False, "" | Represents a Netgear Router. | 6259904b21a7993f00c6736a |
class H2OAutoEncoderModel(ModelBase): <NEW_LINE> <INDENT> def __init__(self, dest_key, model_json): <NEW_LINE> <INDENT> super(H2OAutoEncoderModel, self).__init__(dest_key, model_json,H2OAutoEncoderModelMetrics) <NEW_LINE> <DEDENT> def anomaly(self,test_data,per_feature=False): <NEW_LINE> <INDENT> if not test_data: raise ValueError("Must specify test data") <NEW_LINE> j = H2OConnection.post_json("Predictions/models/" + self._id + "/frames/" + test_data._id, reconstruction_error=True, reconstruction_error_per_feature=per_feature) <NEW_LINE> return h2o.get_frame(j["model_metrics"][0]["predictions"]["frame_id"]["name"]) | Class for AutoEncoder models. | 6259904b379a373c97d9a42b |
class ConnectDevice(Action): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(ConnectDevice, self).__init__() <NEW_LINE> self.name = "connect-device" <NEW_LINE> self.summary = "run connection command" <NEW_LINE> self.description = "use the configured command to connect serial to the device" <NEW_LINE> self.session_class = ShellSession <NEW_LINE> self.shell_class = ShellCommand <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> super(ConnectDevice, self).validate() <NEW_LINE> if 'connect' not in self.job.device['commands']: <NEW_LINE> <INDENT> self.errors = "Unable to connect to device %s - missing connect command." % self.job.device.hostname <NEW_LINE> return <NEW_LINE> <DEDENT> command = self.job.device['commands']['connect'] <NEW_LINE> exe = '' <NEW_LINE> try: <NEW_LINE> <INDENT> exe = command.split(' ')[0] <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self.errors = "Unable to parse the connection command %s" % command <NEW_LINE> <DEDENT> self.errors = infrastructure_error(exe) <NEW_LINE> <DEDENT> def run(self, connection, max_end_time, args=None): <NEW_LINE> <INDENT> connection = self.get_namespace_data(action='shared', label='shared', key='connection', deepcopy=False) <NEW_LINE> if connection: <NEW_LINE> <INDENT> self.logger.debug("Already connected") <NEW_LINE> return connection <NEW_LINE> <DEDENT> command = self.job.device['commands']['connect'][:] <NEW_LINE> self.logger.info("%s Connecting to device using '%s'", self.name, command) <NEW_LINE> shell = self.shell_class("%s\n" % command, self.timeout, logger=self.logger) <NEW_LINE> if shell.exitstatus: <NEW_LINE> <INDENT> raise JobError("%s command exited %d: %s" % (command, shell.exitstatus, shell.readlines())) <NEW_LINE> <DEDENT> connection = self.session_class(self.job, shell) <NEW_LINE> connection.connected = True <NEW_LINE> connection = super(ConnectDevice, self).run(connection, max_end_time, args) <NEW_LINE> if not connection.prompt_str: <NEW_LINE> <INDENT> connection.prompt_str = [DEFAULT_SHELL_PROMPT] <NEW_LINE> <DEDENT> self.set_namespace_data(action='shared', label='shared', key='connection', value=connection) <NEW_LINE> return connection | General purpose class to use the device commands to
make a serial connection to the device. e.g. using ser2net
Inherit from this class and change the session_class and/or shell_class for different behaviour. | 6259904b3c8af77a43b6893e |
class ExampleAlgorithm(GeoAlgorithm): <NEW_LINE> <INDENT> OUTPUT_LAYER = 'OUTPUT_LAYER' <NEW_LINE> INPUT_LAYER = 'INPUT_LAYER' <NEW_LINE> def defineCharacteristics(self): <NEW_LINE> <INDENT> self.name = 'Create copy of layer' <NEW_LINE> self.group = 'Algorithms for vector layers' <NEW_LINE> self.addParameter(ParameterVector(self.INPUT_LAYER, 'Input layer', [ParameterVector.VECTOR_TYPE_ANY], False)) <NEW_LINE> self.addOutput(OutputVector(self.OUTPUT_LAYER, 'Output layer with selected features')) <NEW_LINE> <DEDENT> def processAlgorithm(self, progress): <NEW_LINE> <INDENT> inputFilename = self.getParameterValue(self.INPUT_LAYER) <NEW_LINE> output = self.getOutputValue(self.OUTPUT_LAYER) <NEW_LINE> vectorLayer = dataobjects.getObjectFromUri(inputFilename) <NEW_LINE> settings = QSettings() <NEW_LINE> systemEncoding = settings.value('/UI/encoding', 'System') <NEW_LINE> provider = vectorLayer.dataProvider() <NEW_LINE> writer = QgsVectorFileWriter(output, systemEncoding, provider.fields(), provider.geometryType(), provider.crs()) <NEW_LINE> features = vector.features(vectorLayer) <NEW_LINE> for f in features: <NEW_LINE> <INDENT> writer.addFeature(f) | This is an example algorithm that takes a vector layer and
creates a new one just with just those features of the input
layer that are selected.
It is meant to be used as an example of how to create your own
algorithms and explain methods and variables used to do it. An
algorithm like this will be available in all elements, and there
is not need for additional work.
All Processing algorithms should extend the GeoAlgorithm class. | 6259904b50485f2cf55dc38c |
class CourseListViewTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.course3 = Course.objects.create( title="Python Django Reinhardt Appreciation Course 0", description="Course 0 is pretty self explanatory." ) <NEW_LINE> for x in range(1,5): <NEW_LINE> <INDENT> Course.objects.create( title="Python Django Reinhardt Appreciation Course {}".format(x), description="Course {} is pretty self explanatory.".format(x) ) <NEW_LINE> <DEDENT> <DEDENT> def test_new_course_list_view(self): <NEW_LINE> <INDENT> resp = self.client.get(reverse('courses:list')) <NEW_LINE> self.assertIn(self.course3, resp.context['courses']) | Tests for the Course list view | 6259904b96565a6dacd2d98a |
class MySlackServer(SlackServer): <NEW_LINE> <INDENT> def bind_route(self, server): <NEW_LINE> <INDENT> @server.route(self.endpoint, methods=['GET', 'POST']) <NEW_LINE> def event(): <NEW_LINE> <INDENT> if request.method == 'GET': <NEW_LINE> <INDENT> return make_response( "These are not the slackbots you're looking for.", 404) <NEW_LINE> <DEDENT> event_data = json.loads(request.data.decode('utf-8')) <NEW_LINE> if "challenge" in event_data: <NEW_LINE> <INDENT> return make_response( event_data.get("challenge"), 200, {"content_type": "application/json"} ) <NEW_LINE> <DEDENT> request_token = event_data.get("token") <NEW_LINE> if self.verification_token != request_token: <NEW_LINE> <INDENT> self.emitter.emit('error', 'invalid verification token') <NEW_LINE> return make_response( "Request contains invalid Slack verification token", 403) <NEW_LINE> <DEDENT> if "event" in event_data: <NEW_LINE> <INDENT> event_type = event_data["event"]["type"] <NEW_LINE> from app import huey <NEW_LINE> @huey.task() <NEW_LINE> def call_event_listener_async(): <NEW_LINE> <INDENT> self.emitter.emit(event_type, event_data) <NEW_LINE> <DEDENT> call_event_listener_async() <NEW_LINE> response = make_response("", 200) <NEW_LINE> response.headers['X-Slack-Powered-By'] = self.package_info <NEW_LINE> return response | Override bind_route method to make | 6259904b6fece00bbacccdba |
class DemandeCreateView(LoginRequiredMixin, CreateView): <NEW_LINE> <INDENT> model = Demande <NEW_LINE> fields = ('description', ) <NEW_LINE> def form_valid(self, form) -> HttpResponse: <NEW_LINE> <INDENT> form.instance.demandeur = self.request.user <NEW_LINE> form.instance.cagnotte = get_object_or_404(Cagnotte, slug=self.kwargs['slug']) <NEW_LINE> messages.success(self.request, 'Votre demande a été correctement enregistrée !') <NEW_LINE> return super().form_valid(form) | A view to add a Demande. | 6259904bb5575c28eb7136ca |
class WriteVariableActionMixin(object): <NEW_LINE> <INDENT> def __init__(self, value=None, **kwargs): <NEW_LINE> <INDENT> super(WriteVariableActionMixin, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def value_type(self, diagnostics=None, context=None): <NEW_LINE> <INDENT> raise NotImplementedError( 'operation value_type(...) not yet implemented') <NEW_LINE> <DEDENT> def multiplicity(self, diagnostics=None, context=None): <NEW_LINE> <INDENT> raise NotImplementedError( 'operation multiplicity(...) not yet implemented') | User defined mixin class for WriteVariableAction. | 6259904bcb5e8a47e493cb88 |
class JWTAuthenticationMiddleware(object): <NEW_LINE> <INDENT> def __init__(self, get_response): <NEW_LINE> <INDENT> self.get_response = get_response <NEW_LINE> <DEDENT> def __call__(self, request): <NEW_LINE> <INDENT> return self.get_response(request) <NEW_LINE> <DEDENT> def process_request(self, request): <NEW_LINE> <INDENT> request.user = SimpleLazyObject(lambda : get_user_jwt(request)) | Middleware for authenticating JSON Web Tokens in Authorize Header | 6259904b0c0af96317c57762 |
class BasicInvalidCredentials(InvalidCredentials): <NEW_LINE> <INDENT> default_headers = {"www-authenticate": "basic"} <NEW_LINE> status = 401 | Invalid credentials provided for basic authentication. | 6259904b711fe17d825e169f |
class Executor: <NEW_LINE> <INDENT> def __init__(self, args): <NEW_LINE> <INDENT> args = Args(args) <NEW_LINE> config = SheBaoConfig(args.get_arg('-c')) <NEW_LINE> self.employee_data = EmployeeData(args.get_arg('-d')) <NEW_LINE> self.exporter = Exporter(args.get_arg('-o')) <NEW_LINE> self.calculator = Calculator(config) <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> for item in self.employee_data: <NEW_LINE> <INDENT> item = self.calculator.calculate(item) <NEW_LINE> self.exporter.export(item) <NEW_LINE> <DEDENT> self.exporter.close() | 执行器实现
该执行器会调用各个类来实现税额计算功能 | 6259904b498bea3a75a58f23 |
class TestTerminationGDDR5(unittest.TestCase): <NEW_LINE> <INDENT> vdd = 1.5 <NEW_LINE> rankcnt = 2 <NEW_LINE> resistance = energydram.TermResistance(rz_dev=40, rz_mc=40, rtt_nom=60, rtt_wr=60, rtt_mc=60, rs=15) <NEW_LINE> term = energydram.Termination(vdd, rankcnt, resistance, width=32, with_dqs=False, with_dm=False, with_dbi=True, level='high') <NEW_LINE> def test_power_target_rank(self): <NEW_LINE> <INDENT> self.assertAlmostEqual(self.term.read_power_target_rank(), 317.2e-3, places=4) <NEW_LINE> self.assertAlmostEqual(self.term.write_power_target_rank(), 140.5e-3, places=4) <NEW_LINE> <DEDENT> def test_power_other_ranks(self): <NEW_LINE> <INDENT> self.assertAlmostEqual(self.term.read_power_other_ranks(), 85.4e-3, places=4) <NEW_LINE> self.assertAlmostEqual(self.term.write_power_other_ranks(), 140.5e-3, places=4) | Termination class unit tests for GDDR5.
Compare with Kenta & Makoto Excel sheet. | 6259904bd4950a0f3b111844 |
class SubscriptionListAPI(Resource): <NEW_LINE> <INDENT> @use_kwargs(SubscriptionQuerySchema(partial=True), locations=("query",)) <NEW_LINE> def get(self, **kwargs): <NEW_LINE> <INDENT> subscriptions = Subscription.get_subscriptions(**kwargs) <NEW_LINE> result = SubscriptionSchema().dump(subscriptions, many=True) <NEW_LINE> return jsonify(result) | Resource/routes for subscriptions endpoints | 6259904b21a7993f00c6736c |
class Portgroup(Base): <NEW_LINE> <INDENT> __tablename__ = 'portgroups' <NEW_LINE> __table_args__ = ( schema.UniqueConstraint('uuid', name='uniq_portgroups0uuid'), schema.UniqueConstraint('address', name='uniq_portgroups0address'), schema.UniqueConstraint('name', name='uniq_portgroups0name'), table_args()) <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> uuid = Column(String(36)) <NEW_LINE> name = Column(String(255), nullable=True) <NEW_LINE> node_id = Column(Integer, ForeignKey('nodes.id'), nullable=True) <NEW_LINE> address = Column(String(18)) <NEW_LINE> extra = Column(db_types.JsonEncodedDict) | Represents a group of network ports of a bare metal node. | 6259904b07d97122c42180a6 |
class NotificationInstance(InstanceResource): <NEW_LINE> <INDENT> class Priority(object): <NEW_LINE> <INDENT> HIGH = "high" <NEW_LINE> LOW = "low" <NEW_LINE> <DEDENT> def __init__(self, version, payload, service_sid): <NEW_LINE> <INDENT> super(NotificationInstance, self).__init__(version) <NEW_LINE> self._properties = { 'sid': payload.get('sid'), 'account_sid': payload.get('account_sid'), 'service_sid': payload.get('service_sid'), 'date_created': deserialize.iso8601_datetime(payload.get('date_created')), 'identities': payload.get('identities'), 'tags': payload.get('tags'), 'segments': payload.get('segments'), 'priority': payload.get('priority'), 'ttl': deserialize.integer(payload.get('ttl')), 'title': payload.get('title'), 'body': payload.get('body'), 'sound': payload.get('sound'), 'action': payload.get('action'), 'data': payload.get('data'), 'apn': payload.get('apn'), 'gcm': payload.get('gcm'), 'fcm': payload.get('fcm'), 'sms': payload.get('sms'), 'facebook_messenger': payload.get('facebook_messenger'), 'alexa': payload.get('alexa'), } <NEW_LINE> self._context = None <NEW_LINE> self._solution = {'service_sid': service_sid, } <NEW_LINE> <DEDENT> @property <NEW_LINE> def sid(self): <NEW_LINE> <INDENT> return self._properties['sid'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def account_sid(self): <NEW_LINE> <INDENT> return self._properties['account_sid'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def service_sid(self): <NEW_LINE> <INDENT> return self._properties['service_sid'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def date_created(self): <NEW_LINE> <INDENT> return self._properties['date_created'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def identities(self): <NEW_LINE> <INDENT> return self._properties['identities'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def tags(self): <NEW_LINE> <INDENT> return self._properties['tags'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def segments(self): <NEW_LINE> <INDENT> return self._properties['segments'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def priority(self): <NEW_LINE> <INDENT> return self._properties['priority'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def ttl(self): <NEW_LINE> <INDENT> return self._properties['ttl'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> return self._properties['title'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def body(self): <NEW_LINE> <INDENT> return self._properties['body'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def sound(self): <NEW_LINE> <INDENT> return self._properties['sound'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def action(self): <NEW_LINE> <INDENT> return self._properties['action'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._properties['data'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def apn(self): <NEW_LINE> <INDENT> return self._properties['apn'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def gcm(self): <NEW_LINE> <INDENT> return self._properties['gcm'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def fcm(self): <NEW_LINE> <INDENT> return self._properties['fcm'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def sms(self): <NEW_LINE> <INDENT> return self._properties['sms'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def facebook_messenger(self): <NEW_LINE> <INDENT> return self._properties['facebook_messenger'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def alexa(self): <NEW_LINE> <INDENT> return self._properties['alexa'] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Twilio.Notify.V1.NotificationInstance>' | PLEASE NOTE that this class contains beta products that are subject to
change. Use them with caution. | 6259904b8e71fb1e983bcec9 |
class QuestionsViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> authentication_classes = [JWTAuthentication] <NEW_LINE> permission_classes = [permissions.IsAdminUser | ReadOnly] <NEW_LINE> queryset = Question.objects.all() <NEW_LINE> serializer_class = QuestionSerializer <NEW_LINE> filter_backends = (filters.DjangoFilterBackend,) <NEW_LINE> filterset_class = QuestionFilter | Returns a list questions. | 6259904ba79ad1619776b483 |
class VolumeUtilization(GenericChart): <NEW_LINE> <INDENT> _model = m_volumes.VolumeUtilizationModel <NEW_LINE> _label = 'Volume Utilization panel' | page object for Volume Utilization panel | 6259904bb830903b9686ee7c |
class Words(): <NEW_LINE> <INDENT> def __init__(self, words: List['Word']) -> None: <NEW_LINE> <INDENT> self.words = words <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, _dict: Dict) -> 'Words': <NEW_LINE> <INDENT> args = {} <NEW_LINE> if 'words' in _dict: <NEW_LINE> <INDENT> args['words'] = [Word.from_dict(x) for x in _dict.get('words')] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError( 'Required property \'words\' not present in Words JSON') <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> return cls.from_dict(_dict) <NEW_LINE> <DEDENT> def to_dict(self) -> Dict: <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'words') and self.words is not None: <NEW_LINE> <INDENT> _dict['words'] = [x.to_dict() for x in self.words] <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> return self.to_dict() <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return json.dumps(self.to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other: 'Words') -> bool: <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other: 'Words') -> bool: <NEW_LINE> <INDENT> return not self == other | Information about the words from a custom language model.
:attr List[Word] words: An array of `Word` objects that provides information
about each word in the custom model's words resource. The array is empty if the
custom model has no words. | 6259904b3cc13d1c6d466b3c |
class Clan(): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.name = data["name"] <NEW_LINE> self.rank = data["rank"] <NEW_LINE> self.type = data["type"] | The class used to represent a user's clan.
Attributes
-----------
name : [str]
The clan's name. Not to be confused with :class:`Profile.name`
rank : [int]
The clan's rank.
type : [str]
The size of clan. | 6259904b16aa5153ce4018f1 |
class TypeExperience(Measure): <NEW_LINE> <INDENT> def measure(self, roundnum, women, game): <NEW_LINE> <INDENT> if self.midwife_type is not None: <NEW_LINE> <INDENT> women = filter(lambda x: x.player_type == self.midwife_type, women) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> group_log = itertools.chain(*map(lambda x: x.type_log, women)) <NEW_LINE> <DEDENT> frequencies = collections.Counter(group_log) <NEW_LINE> total_signals = sum(frequencies.values()) <NEW_LINE> if total_signals == 0: <NEW_LINE> <INDENT> return 0. <NEW_LINE> <DEDENT> return frequencies[self.player_type] / float(total_signals) | A measure that gives the frequency of a type experienced up
to that round by some (or all) types of midwife. | 6259904b596a897236128fb0 |
class CommandOptions(HelpProvider): <NEW_LINE> <INDENT> help_spec = HelpProvider.HelpSpec( help_name = 'wildcards', help_name_aliases = ['wildcard', '*', '**'], help_type = 'additional_help', help_one_line_summary = 'Wildcard Names', help_text = _detailed_help_text, subcommand_help_text = {}, ) | Additional help about wildcards. | 6259904b23e79379d538d902 |
class HueSaturationValue(ScalarField_RGB): <NEW_LINE> <INDENT> def compute(self): <NEW_LINE> <INDENT> rgb = self.rgb <NEW_LINE> MAX = np.max(rgb, -1) <NEW_LINE> MIN = np.min(rgb, -1) <NEW_LINE> MAX_MIN = np.ptp(rgb, -1) <NEW_LINE> H = np.empty_like(MAX) <NEW_LINE> idx = rgb[:, 0] == MAX <NEW_LINE> H[idx] = 60 * (rgb[idx, 1] - rgb[idx, 2]) / MAX_MIN[idx] <NEW_LINE> H[np.logical_and(idx, rgb[:, 1] < rgb[:, 2])] += 360 <NEW_LINE> idx = rgb[:, 1] == MAX <NEW_LINE> H[idx] = (60 * (rgb[idx, 2] - rgb[idx, 0]) / MAX_MIN[idx]) + 120 <NEW_LINE> idx = rgb[:, 2] == MAX <NEW_LINE> H[idx] = (60 * (rgb[idx, 0] - rgb[idx, 1]) / MAX_MIN[idx]) + 240 <NEW_LINE> self.to_be_added["H"] = np.nan_to_num(H) <NEW_LINE> self.to_be_added["S"] = np.nan_to_num( np.where(MAX == 0, 0, 1 - (MIN / MAX))) <NEW_LINE> self.to_be_added["V"] = np.nan_to_num(MAX / 255 * 100) | Hue, Saturation, Value colorspace.
| 6259904b94891a1f408ba0f7 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.