code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Alias(models.Model): <NEW_LINE> <INDENT> compound = models.ForeignKey('Compound', related_name="aliases") <NEW_LINE> name = models.CharField(max_length=DEFAULT_MAX_LENGTH, unique=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return '%s (%s)' % (self.name, self.compound.name) | Some shortchuts to find Compounds.
Example: 'c5' -> Pentane | 62599069f7d966606f7494a6 |
class DeleteMountTargetResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RequestId = params.get("RequestId") | DeleteMountTarget返回参数结构体
| 62599069167d2b6e312b8179 |
class SimpleEditor(Editor): <NEW_LINE> <INDENT> def init(self, parent): <NEW_LINE> <INDENT> tctl = masked.TimeCtrl(parent, -1, name="12 hour control") <NEW_LINE> self.control = tctl <NEW_LINE> self.control.Bind(masked.EVT_TIMEUPDATE, self.time_updated) <NEW_LINE> return <NEW_LINE> <DEDENT> def time_updated(self, event): <NEW_LINE> <INDENT> time = self.control.GetValue(as_wxDateTime=True) <NEW_LINE> hour = time.GetHour() <NEW_LINE> minute = time.GetMinute() <NEW_LINE> second = time.GetSecond() <NEW_LINE> self.value = datetime.time(hour, minute, second) <NEW_LINE> return <NEW_LINE> <DEDENT> def update_editor(self): <NEW_LINE> <INDENT> if self.value: <NEW_LINE> <INDENT> time = self.control.GetValue(as_wxDateTime=True) <NEW_LINE> time.SetHour(self.value.hour) <NEW_LINE> time.SetMinute(self.value.minute) <NEW_LINE> time.SetSecond(self.value.second) <NEW_LINE> self.control.SetValue(time) <NEW_LINE> <DEDENT> return | Traits UI time editor. | 62599069097d151d1a2c2844 |
class IJVZooEvent(Interface): <NEW_LINE> <INDENT> pass | Base class for niteoweb.jvzoo events. | 62599069adb09d7d5dc0bd41 |
class ContextInheritanceTestCase(common_test.ContextTestCase): <NEW_LINE> <INDENT> def test_context_inherit(self): <NEW_LINE> <INDENT> data = {'css': {'background-color': 'blue'}} <NEW_LINE> common_test.create_plugin(hierarchy=('some', ), platforms='', data=data) <NEW_LINE> common_test.create_plugin(hierarchy=('some', 'kind', 'of', 'context'), platforms='') <NEW_LINE> context = ways.api.get_context('some/kind/of/context') <NEW_LINE> self.assertEqual(context.data['css']['background-color'], 'blue') | Test the ways the a Context is meant to inherit from Context objects.
Context objects from others through their hierarchy.
If a Context exists at a lower hierarchy | 625990693539df3088ecda76 |
class ListConverter(BaseConverter): <NEW_LINE> <INDENT> def to_python(self, value): <NEW_LINE> <INDENT> return value.split('+') <NEW_LINE> <DEDENT> def to_url(self, values): <NEW_LINE> <INDENT> return '+'.join( BaseConverter.to_url(self, item) for item in values ) | nome+nome2+nome3 | 625990691b99ca4002290121 |
class Bar(object): <NEW_LINE> <INDENT> def __init__(self, an_argument): <NEW_LINE> <INDENT> self.random_attrib = an_argument <NEW_LINE> self.name = None | Some Other Class | 6259906944b2445a339b754b |
class GPU(object): <NEW_LINE> <INDENT> def __init__(self, deviceID): <NEW_LINE> <INDENT> self.deviceID = deviceID <NEW_LINE> self.name = None <NEW_LINE> self.pcibusID = None <NEW_LINE> self.constmem = None <NEW_LINE> self.totalmem = None <NEW_LINE> <DEDENT> def get_gpu_info(self, drv): <NEW_LINE> <INDENT> self.name = drv.Device(self.deviceID).name() <NEW_LINE> self.pcibusID = drv.Device(self.deviceID).pci_bus_id() <NEW_LINE> self.constmem = drv.Device(self.deviceID).total_constant_memory <NEW_LINE> self.totalmem = drv.Device(self.deviceID).total_memory() | GPU information. | 62599069cb5e8a47e493cd6f |
class WellKnownApi(object): <NEW_LINE> <INDENT> def __init__(self, api_client=None): <NEW_LINE> <INDENT> if api_client is None: <NEW_LINE> <INDENT> api_client = ApiClient() <NEW_LINE> <DEDENT> self.api_client = api_client <NEW_LINE> <DEDENT> def get_service_account_issuer_open_id_configuration(self, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> return self.get_service_account_issuer_open_id_configuration_with_http_info(**kwargs) <NEW_LINE> <DEDENT> def get_service_account_issuer_open_id_configuration_with_http_info(self, **kwargs): <NEW_LINE> <INDENT> local_var_params = locals() <NEW_LINE> all_params = [ ] <NEW_LINE> all_params.extend( [ 'async_req', '_return_http_data_only', '_preload_content', '_request_timeout', '_request_auth', '_content_type', '_headers' ] ) <NEW_LINE> for key, val in six.iteritems(local_var_params['kwargs']): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise ApiTypeError( "Got an unexpected keyword argument '%s'" " to method get_service_account_issuer_open_id_configuration" % key ) <NEW_LINE> <DEDENT> local_var_params[key] = val <NEW_LINE> <DEDENT> del local_var_params['kwargs'] <NEW_LINE> collection_formats = {} <NEW_LINE> path_params = {} <NEW_LINE> query_params = [] <NEW_LINE> header_params = dict(local_var_params.get('_headers', {})) <NEW_LINE> form_params = [] <NEW_LINE> local_var_files = {} <NEW_LINE> body_params = None <NEW_LINE> header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) <NEW_LINE> auth_settings = ['BearerToken'] <NEW_LINE> response_types_map = { 200: "str", 401: None, } <NEW_LINE> return self.api_client.call_api( '/.well-known/openid-configuration/', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_types_map=response_types_map, auth_settings=auth_settings, async_req=local_var_params.get('async_req'), _return_http_data_only=local_var_params.get('_return_http_data_only'), _preload_content=local_var_params.get('_preload_content', True), _request_timeout=local_var_params.get('_request_timeout'), collection_formats=collection_formats, _request_auth=local_var_params.get('_request_auth')) | NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually. | 6259906926068e7796d4e111 |
class BoolConst(Const): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass | A boolean const | 62599069be8e80087fbc0863 |
class H(OneLineTag): <NEW_LINE> <INDENT> def __init__(self, level, content, **kwargs): <NEW_LINE> <INDENT> super().__init__(content, **kwargs) <NEW_LINE> self.level = level <NEW_LINE> self.tag = f'h{level}' | Header element. Overrides onelinetag by taking one int arg for the header
level. | 62599069f548e778e596cd63 |
class CollectionItemsCreate(generics.ListCreateAPIView): <NEW_LINE> <INDENT> serializer_class = ItemSerialiser <NEW_LINE> permission_classes = [IsAuthenticated, IsOwnerCollectionOrHasPermission, ] <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return Item.objects.filter(collection=self.kwargs['collection']) <NEW_LINE> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(user=self.request.user) | Create items in a collection; needs to be collection owner or have owner's permission
url: collection/<int:collection>/items/ | 625990693cc13d1c6d466f1d |
class SimpleCurveWriter(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.curves = [] <NEW_LINE> self.imt = None <NEW_LINE> <DEDENT> def __exit__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def add_data(self, location, poes): <NEW_LINE> <INDENT> self.curves.append(dict(wkb=location, poes=poes)) | Simple imt-agnostic Curve Writer that stores curves in a list of
dictionaries. | 625990693539df3088ecda77 |
class UserProfile(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> email = models.EmailField(max_length=255, unique=True) <NEW_LINE> name = models.CharField(max_length=255) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> objects = UserProfileManager() <NEW_LINE> USERNAME_FIELD = 'email' <NEW_LINE> REQUIRED_FIELDS = ['name'] <NEW_LINE> def get_full_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_short_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.email | Represents a "user profile" inside our system. | 625990695166f23b2e244baa |
class Meta: <NEW_LINE> <INDENT> model = ProductSpecification | Contain definition of ProductSpecificationModelSerializer | 62599069a8370b77170f1b9d |
class BlastProtDb(_BlastDb, Data): <NEW_LINE> <INDENT> file_ext = "blastdbp" <NEW_LINE> allow_datatype_change = False <NEW_LINE> composite_type = "basic" <NEW_LINE> def __init__(self, **kwd): <NEW_LINE> <INDENT> Data.__init__(self, **kwd) <NEW_LINE> self.add_composite_file("blastdb.phr", is_binary=True) <NEW_LINE> self.add_composite_file("blastdb.pin", is_binary=True) <NEW_LINE> self.add_composite_file("blastdb.psq", is_binary=True) <NEW_LINE> self.add_composite_file("blastdb.phd", is_binary=True, optional=True) <NEW_LINE> self.add_composite_file("blastdb.phi", is_binary=True, optional=True) <NEW_LINE> self.add_composite_file("blastdb.pnd", is_binary=True, optional=True) <NEW_LINE> self.add_composite_file("blastdb.pni", is_binary=True, optional=True) <NEW_LINE> self.add_composite_file("blastdb.pog", is_binary=True, optional=True) <NEW_LINE> self.add_composite_file("blastdb.psd", is_binary=True, optional=True) <NEW_LINE> self.add_composite_file("blastdb.psi", is_binary=True, optional=True) | Class for protein BLAST database files. | 625990693317a56b869bf12f |
class _WFastaTabularWriter(_WFastaWriter): <NEW_LINE> <INDENT> def __init__(self, fp, min_frequency, repeat): <NEW_LINE> <INDENT> super(_WFastaTabularWriter, self).__init__(fp, min_frequency, repeat) <NEW_LINE> self.writer = csv.writer(self.fp, delimiter='\t', lineterminator='\n') <NEW_LINE> self.writer.writerow(('id', 'index', 'frequency', 'sequence')) <NEW_LINE> <DEDENT> def write(self, sequence): <NEW_LINE> <INDENT> parsed_header = _parse_wfasta_header(sequence.id) <NEW_LINE> self.writer.writerow((parsed_header.id, parsed_header.index, parsed_header.frequency, sequence.seq)) | Writer for tab-delimited text, with a header | 6259906991f36d47f2231a7b |
class XlsxFilesProcessor: <NEW_LINE> <INDENT> def __init__(self,rString = r'(?P<string>QCM_MF_S1_(?P<number>1[012]|[1-9]))', workingDir='.'): <NEW_LINE> <INDENT> self.__regexMCQ = re.compile(rString) <NEW_LINE> os.chdir(workingDir) <NEW_LINE> print('Working directory:\n {0}'.format(os.getcwd())) <NEW_LINE> self.__xlsx_file_list = glob.glob('*.xlsx') <NEW_LINE> print('Found {} .xlsx files:'.format(len(self.xlsx_file_list))) <NEW_LINE> printList(self.xlsx_file_list) <NEW_LINE> self.__MCQ_file_list = XlsxFilesProcessor.__extract_matching_files(self.xlsx_file_list, self.__regexMCQ) <NEW_LINE> print('Found {} MCQ files:'.format(len(self.MCQ_file_list))) <NEW_LINE> printList(self.MCQ_file_list) <NEW_LINE> <DEDENT> @property <NEW_LINE> def xlsx_file_list(self): <NEW_LINE> <INDENT> return self.__xlsx_file_list <NEW_LINE> <DEDENT> @property <NEW_LINE> def MCQ_file_list(self): <NEW_LINE> <INDENT> return self.__MCQ_file_list <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __extract_matching_files(file_list: List[str], regex) -> List[str]: <NEW_LINE> <INDENT> res = [] <NEW_LINE> for f in file_list: <NEW_LINE> <INDENT> print('Processing file: {}'.format(f)) <NEW_LINE> match = regex.search(f) <NEW_LINE> if match: <NEW_LINE> <INDENT> gr = match.group <NEW_LINE> print('Mcq regex found: {} {}'.format(gr('string'),gr('number'))) <NEW_LINE> res.append((f,int(gr('number')))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Mcq regex not found. File ignored.') <NEW_LINE> <DEDENT> <DEDENT> return sorted(res, key=lambda x:x[1]) | Class for processing .xlsx files from Microsoft Forms.
Regex must have this two following groups:
- <string>
- <number> | 625990694a966d76dd5f06cd |
class SourceManager: <NEW_LINE> <INDENT> sources = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.sources = list() <NEW_LINE> <DEDENT> def identify_sources(self, spec): <NEW_LINE> <INDENT> if not spec: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for source in spec.pkg_source: <NEW_LINE> <INDENT> if not isinstance(source, dict): <NEW_LINE> <INDENT> console_ui.emit_error("SOURCE", "Source lines must be of 'key : value' " "mapping type") <NEW_LINE> print("Erronous line: {}".format(str(source))) <NEW_LINE> return False <NEW_LINE> <DEDENT> if len(list(source.keys())) != 1: <NEW_LINE> <INDENT> console_ui.emit_error("SOURCE", "Encountered too many keys in source") <NEW_LINE> print("Erronous source: {}".format(str(source))) <NEW_LINE> return False <NEW_LINE> <DEDENT> uri = list(source.keys())[0] <NEW_LINE> hash = source[uri] <NEW_LINE> if "|" in uri: <NEW_LINE> <INDENT> brk = uri.split("|") <NEW_LINE> if brk[0] == 'git': <NEW_LINE> <INDENT> uri = "|".join(brk[1:]) <NEW_LINE> self.sources.append(GitSource(uri, hash)) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> self.sources.append(TarSource(uri, hash)) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def _get_working_dir(self, context): <NEW_LINE> <INDENT> build_dir = context.get_build_dir() <NEW_LINE> source0 = self.sources[0].filename <NEW_LINE> if os.path.exists(build_dir): <NEW_LINE> <INDENT> items = os.listdir(build_dir) <NEW_LINE> if len(items) == 1: <NEW_LINE> <INDENT> return os.path.join(build_dir, items[0]) <NEW_LINE> <DEDENT> for item in items: <NEW_LINE> <INDENT> if source0.startswith(item): <NEW_LINE> <INDENT> return os.path.join(build_dir, item) <NEW_LINE> <DEDENT> <DEDENT> return build_dir <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return build_dir <NEW_LINE> <DEDENT> <DEDENT> def get_working_dir(self, context): <NEW_LINE> <INDENT> potential = self._get_working_dir(context) <NEW_LINE> if not os.path.exists(potential) or not os.path.isdir(potential): <NEW_LINE> <INDENT> return context.get_build_dir() <NEW_LINE> <DEDENT> return potential | Responsible for identifying, fetching, and verifying sources as listed
within a YpkgSpec. | 6259906932920d7e50bc781f |
class SeenURLs: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._urls = set() <NEW_LINE> <DEDENT> def mark_seen(self, crawler_url): <NEW_LINE> <INDENT> self._urls.add(crawler_url.url) <NEW_LINE> <DEDENT> def seen(self, crawler_url): <NEW_LINE> <INDENT> return crawler_url.url in self._urls | Keeps track of URLs seen by the crawler. It keeps everything in
memory, which works fine for small sites. A different approach
would be required to be able to crawl bigger sites (e.g. using
persistent storage). | 6259906955399d3f05627cf9 |
class OverrideAuditEventListener(sublime_plugin.EventListener): <NEW_LINE> <INDENT> def on_post_save_async(self, view): <NEW_LINE> <INDENT> setup_override_minidiff(view) <NEW_LINE> <DEDENT> def on_load_async(self, view): <NEW_LINE> <INDENT> setup_override_minidiff(view) <NEW_LINE> <DEDENT> def on_close(self, view): <NEW_LINE> <INDENT> tmp_base = view.settings().get("_oa_ext_diff_base", None) <NEW_LINE> if tmp_base is not None: <NEW_LINE> <INDENT> delete_packed_override(tmp_base) <NEW_LINE> <DEDENT> <DEDENT> def on_hover(self, view, point, hover_zone): <NEW_LINE> <INDENT> if hover_zone != sublime.HOVER_TEXT: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not view.match_selector(point, "text.override-audit entity.name.package"): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> report_type = view.settings().get("override_audit_report_type", "??") <NEW_LINE> pkg_name = view.substr(view.extract_scope(point)) <NEW_LINE> show_pkg_popup(view, point, "pkg:" + pkg_name, report_type != ":packages") | Check on file load and save to see if the new file is potentially an
override for a package, and set the variables that allow for our context
menus to let you edit/diff the override. | 625990698e71fb1e983bd29f |
class WebIDLFile(SandboxDerived): <NEW_LINE> <INDENT> __slots__ = ( 'basename', ) <NEW_LINE> def __init__(self, sandbox, path): <NEW_LINE> <INDENT> SandboxDerived.__init__(self, sandbox) <NEW_LINE> self.basename = path | Describes an individual .webidl source file. | 625990695fcc89381b266d43 |
class Player(Entity): <NEW_LINE> <INDENT> name = "Player" <NEW_LINE> def __init__(self, username="", **kwargs): <NEW_LINE> <INDENT> super(Player, self).__init__(**kwargs) <NEW_LINE> self.username = username <NEW_LINE> self.inventory = Inventory() <NEW_LINE> self.equipped = 0 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ("%s(eid=%d, location=%s, username=%s)" % (self.name, self.eid, self.location, self.username)) <NEW_LINE> <DEDENT> __str__ = __repr__ <NEW_LINE> def save_to_packet(self): <NEW_LINE> <INDENT> yaw, pitch = self.location.ori.to_fracs() <NEW_LINE> x, y, z = self.location.pos <NEW_LINE> item = self.inventory.holdables[self.equipped] <NEW_LINE> if item is None: <NEW_LINE> <INDENT> item = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item = item[0] <NEW_LINE> <DEDENT> packet = make_packet("player", eid=self.eid, username=self.username, x=x, y=y, z=z, yaw=yaw, pitch=pitch, item=item, metadata={}) <NEW_LINE> return packet <NEW_LINE> <DEDENT> def save_equipment_to_packet(self): <NEW_LINE> <INDENT> packet = "" <NEW_LINE> slots = (self.inventory.holdables[self.equipped], self.inventory.armor[3], self.inventory.armor[2], self.inventory.armor[1], self.inventory.armor[0]) <NEW_LINE> for slot, item in enumerate(slots): <NEW_LINE> <INDENT> if item is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> primary, secondary, count = item <NEW_LINE> packet += make_packet("entity-equipment", eid=self.eid, slot=slot, primary=primary, secondary=secondary, count=1) <NEW_LINE> <DEDENT> return packet | A player entity. | 625990697d43ff2487427ffd |
class OverReferendumListView(ListView): <NEW_LINE> <INDENT> model = Referendum <NEW_LINE> template_name = 'referendum/referendum_list.html' <NEW_LINE> def get_context_data(self, *, object_list=None, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(object_list=object_list, **kwargs) <NEW_LINE> context['categories'] = Category.objects.all() <NEW_LINE> context['title'] = "Liste des référendums terminés" <NEW_LINE> return context <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> return [referendum for referendum in Referendum.objects.all() if referendum.is_over] | Over referendum list view | 62599069435de62698e9d5e3 |
class ActionPermission(BasePermission): <NEW_LINE> <INDENT> def perm_non_superuser(self, request, view): <NEW_LINE> <INDENT> queryset = view.get_queryset() <NEW_LINE> content_type = ContentType.objects.get_for_model(queryset.model) <NEW_LINE> codename = '%s_%s' % (view.action, content_type.model, ) <NEW_LINE> perm_exists = Permission.objects .filter(content_type=content_type) .filter(codename=codename) .exists() <NEW_LINE> if perm_exists: <NEW_LINE> <INDENT> perm_name = '%s.%s' % (content_type.app_label, codename) <NEW_LINE> return request.user.has_perm(perm_name) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> if request.user and not request.user.is_authenticated: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if request.user.is_superuser: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.perm_non_superuser(request, view) | Allows access only to has perm users. | 6259906901c39578d7f14321 |
@method_decorator(login_required, name='dispatch') <NEW_LINE> class EmployeeUpdateProfileSettings(SuccessMessageMixin, UpdateView): <NEW_LINE> <INDENT> template_name = 'schedulingcalendar/employeeProfile.html' <NEW_LINE> success_message = 'Schedule display settings successfully updated' <NEW_LINE> form_class = EmployeeDisplaySettingsForm <NEW_LINE> success_url = reverse_lazy('schedulingcalendar:employee_profile_settings') <NEW_LINE> def get(self, request, **kwargs): <NEW_LINE> <INDENT> self.object = Employee.objects.get(employee_user=self.request.user) <NEW_LINE> form_class = self.get_form_class() <NEW_LINE> form = self.get_form(form_class) <NEW_LINE> context = self.get_context_data(object=self.object, form=form) <NEW_LINE> return self.render_to_response(context) <NEW_LINE> <DEDENT> def get_object(self, queryset=None): <NEW_LINE> <INDENT> obj = Employee.objects.get(employee_user=self.request.user) <NEW_LINE> return obj | Display employee settings and form to update these settings. | 6259906916aa5153ce401cb2 |
class HeroInfo(models.Model): <NEW_LINE> <INDENT> hname = models.CharField(max_length=20) <NEW_LINE> hgender = models.BooleanField(default=False) <NEW_LINE> hcomment = models.CharField(max_length=128) <NEW_LINE> hbook = models.ForeignKey('BookInfo', on_delete=models.CASCADE) | 英雄人物模型类 | 62599069d486a94d0ba2d797 |
class Cupping(CuppingServiceBaseMixin, Base): <NEW_LINE> <INDENT> __tablename__ = 'cuppings' <NEW_LINE> session_id = Column(Integer, ForeignKey('sessions.id'), nullable=False) <NEW_LINE> name = Column(String(length=127)) <NEW_LINE> session = relationship('Session', back_populates='cuppings') <NEW_LINE> scores = Column(JSONB, nullable=False) <NEW_LINE> overall_score = Column(Numeric(precision=4, scale=1)) <NEW_LINE> descriptors = Column(JSONB, nullable=True) <NEW_LINE> defects = Column(JSONB, nullable=True) <NEW_LINE> notes = Column(String(length=255)) <NEW_LINE> is_sample = Column(Boolean, default=False) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<Cupping(id=%s, name=%s, session_id=%s, overall_score=%s)>' % ( self.id or 'unsaved', self.name, self.session_id, self.overall_score) <NEW_LINE> <DEDENT> def _validate_list_or_tuple(self, key, value): <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if not isinstance(value, (list, tuple)): <NEW_LINE> <INDENT> raise ValueError('%s must be a list of strings' % (key, )) <NEW_LINE> <DEDENT> for _string in value: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> assert isinstance(_string, str) <NEW_LINE> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> raise ValueError('%s must be a list of strings' % (key, )) <NEW_LINE> <DEDENT> <DEDENT> return [str(v).strip() for v in value] <NEW_LINE> <DEDENT> @validates('scores') <NEW_LINE> def validate_scores(self, key, value): <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if not isinstance(value, dict): <NEW_LINE> <INDENT> raise ValueError('Scores must be a mapping of name to numeric value') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return {k: float(v) for k, v in value.items()} <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ValueError('Scores must be a mapping of name to numeric value') <NEW_LINE> <DEDENT> <DEDENT> @validates('descriptors') <NEW_LINE> def validate_descriptors(self, key, value): <NEW_LINE> <INDENT> return self._validate_list_or_tuple(key, value) <NEW_LINE> <DEDENT> @validates('defects') <NEW_LINE> def validate_defects(self, key, value): <NEW_LINE> <INDENT> return self._validate_list_or_tuple(key, value) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_model(cls, model, session_id=None): <NEW_LINE> <INDENT> session_id = session_id or model.session_id <NEW_LINE> cupping = cls( session_id=session_id, name=model.name, scores=model.scores, overall_score=model.overall_score, descriptors=model.descriptors, defects=model.defects, notes=model.notes, is_sample=model.is_sample, ) <NEW_LINE> cupping.save() <NEW_LINE> return cupping | An individual cupping for one object (roast). | 62599069435de62698e9d5e4 |
class FeatureBase(metaclass=ABCMeta): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._last_submission = None <NEW_LINE> self._submission = None <NEW_LINE> self._values = [] <NEW_LINE> <DEDENT> @property <NEW_LINE> @abstractmethod <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> @property <NEW_LINE> @abstractmethod <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def new_submission(self, submission): <NEW_LINE> <INDENT> self._last_submission = self._submission <NEW_LINE> self._submission = submission <NEW_LINE> self._values.append(self._submission_value()) <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def _submission_value(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def values(self): <NEW_LINE> <INDENT> return self._values <NEW_LINE> <DEDENT> def clear_submissions(self): <NEW_LINE> <INDENT> self._last_submission = None <NEW_LINE> self._submission = None <NEW_LINE> self._after_clear() <NEW_LINE> <DEDENT> def _after_clear(self): <NEW_LINE> <INDENT> pass | Base class for features. | 625990697d847024c075dbb3 |
class StringDouble(_object): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, StringDouble, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, StringDouble, name) <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> this = _structural.new_StringDouble(*args) <NEW_LINE> try: <NEW_LINE> <INDENT> self.this.append(this) <NEW_LINE> <DEDENT> except __builtin__.Exception: <NEW_LINE> <INDENT> self.this = this <NEW_LINE> <DEDENT> <DEDENT> __swig_setmethods__["first"] = _structural.StringDouble_first_set <NEW_LINE> __swig_getmethods__["first"] = _structural.StringDouble_first_get <NEW_LINE> if _newclass: <NEW_LINE> <INDENT> first = _swig_property(_structural.StringDouble_first_get, _structural.StringDouble_first_set) <NEW_LINE> <DEDENT> __swig_setmethods__["second"] = _structural.StringDouble_second_set <NEW_LINE> __swig_getmethods__["second"] = _structural.StringDouble_second_get <NEW_LINE> if _newclass: <NEW_LINE> <INDENT> second = _swig_property(_structural.StringDouble_second_get, _structural.StringDouble_second_set) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return 2 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str((self.first, self.second)) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> if not (index % 2): <NEW_LINE> <INDENT> return self.first <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.second <NEW_LINE> <DEDENT> <DEDENT> def __setitem__(self, index, val): <NEW_LINE> <INDENT> if not (index % 2): <NEW_LINE> <INDENT> self.first = val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.second = val <NEW_LINE> <DEDENT> <DEDENT> __swig_destroy__ = _structural.delete_StringDouble <NEW_LINE> __del__ = lambda self: None | Proxy of C++ std::pair<(std::string,double)> class. | 62599069627d3e7fe0e08661 |
class Tag(object): <NEW_LINE> <INDENT> def __init__(self, name, output): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.url = '/{0}/'.format( output.format(tag=name).lstrip('/').rstrip('/')) | Simple wrapper to provide useful methods for manipulating with tag. | 62599069dd821e528d6da56d |
class Client(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey('auth.User', related_name="client") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.user.username | Model for Clients who buy yachts | 62599069fff4ab517ebceff4 |
class Command(BaseCommand): <NEW_LINE> <INDENT> help = "Sync prices (and plans) from stripe." <NEW_LINE> def handle(self, *args, **options): <NEW_LINE> <INDENT> for price_data in Price.api_list(): <NEW_LINE> <INDENT> price = Price.sync_from_stripe_data(price_data) <NEW_LINE> self.stdout.write(f"Synchronized price {price.id}") <NEW_LINE> <DEDENT> for plan_data in Plan.api_list(): <NEW_LINE> <INDENT> plan = Plan.sync_from_stripe_data(plan_data) <NEW_LINE> self.stdout.write(f"Synchronized plan {plan.id}") | Sync prices (and plans) from stripe. | 62599069a8370b77170f1b9e |
class Book: <NEW_LINE> <INDENT> def display_book(self): <NEW_LINE> <INDENT> print("%s by %s" %(self.title, self.author)) <NEW_LINE> <DEDENT> def display_book_status(self): <NEW_LINE> <INDENT> print("%s is checked out: %s" %(self.title, self.checked_out)) <NEW_LINE> <DEDENT> def return_book(self): <NEW_LINE> <INDENT> self.checked_out = False | 62599069baa26c4b54d50a80 |
|
class Solution: <NEW_LINE> <INDENT> def mergeTwoLists(self, l1, l2): <NEW_LINE> <INDENT> if not l1: <NEW_LINE> <INDENT> return l2 <NEW_LINE> <DEDENT> if not l2: <NEW_LINE> <INDENT> return l1 <NEW_LINE> <DEDENT> if l1.val<l2.val: <NEW_LINE> <INDENT> head, l1_ind, l2_ind = l1, l1.next, l2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> head, l1_ind, l2_ind = l2, l1, l2.next <NEW_LINE> <DEDENT> result = head <NEW_LINE> while l1_ind and l2_ind: <NEW_LINE> <INDENT> if l1_ind.val < l2_ind.val: <NEW_LINE> <INDENT> head.next = l1_ind <NEW_LINE> head = head.next <NEW_LINE> l1_ind = l1_ind.next <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> head.next = l2_ind <NEW_LINE> head = head.next <NEW_LINE> l2_ind = l2_ind.next <NEW_LINE> <DEDENT> <DEDENT> if l1_ind: <NEW_LINE> <INDENT> head.next = l1_ind <NEW_LINE> <DEDENT> if l2_ind: <NEW_LINE> <INDENT> head.next = l2_ind <NEW_LINE> <DEDENT> return result | @param two ListNodes
@return a ListNode | 6259906a1f037a2d8b9e5457 |
class BasicUserCreationForm(forms.ModelForm): <NEW_LINE> <INDENT> username = forms.RegexField(label=_("Username"), max_length=30, regex=r'^[\w.@+-]+$', help_text = _("Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only."), error_messages = {'invalid': _("This value may contain only letters, numbers and @/./+/-/_ characters.")}) <NEW_LINE> password1 = forms.CharField(label=_("Password"), widget=forms.PasswordInput) <NEW_LINE> password2 = forms.CharField(label=_("Password confirmation"), widget=forms.PasswordInput, help_text = _("Enter the same password as above, for verification.")) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(BasicUserCreationForm, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def clean_username(self): <NEW_LINE> <INDENT> username = self.cleaned_data["username"] <NEW_LINE> try: <NEW_LINE> <INDENT> self._meta.model.objects.get(username=username) <NEW_LINE> <DEDENT> except self._meta.model.DoesNotExist: <NEW_LINE> <INDENT> return username <NEW_LINE> <DEDENT> raise forms.ValidationError(_("A user with that username already exists.")) <NEW_LINE> <DEDENT> def clean_password2(self): <NEW_LINE> <INDENT> password1 = self.cleaned_data.get("password1", "") <NEW_LINE> password2 = self.cleaned_data["password2"] <NEW_LINE> if password1 != password2: <NEW_LINE> <INDENT> raise forms.ValidationError(_("The two password fields didn't match.")) <NEW_LINE> <DEDENT> return password2 <NEW_LINE> <DEDENT> def save(self, commit=True): <NEW_LINE> <INDENT> user = super(BasicUserCreationForm, self).save(commit=False) <NEW_LINE> user.set_password(self.cleaned_data["password1"]) <NEW_LINE> if commit: <NEW_LINE> <INDENT> user.save() <NEW_LINE> <DEDENT> return user | A form that creates a user, with no privileges, from the given username and password. | 6259906abe8e80087fbc0865 |
class Node: <NEW_LINE> <INDENT> def __init__(self,char=None,weight=0,left=None,right=None): <NEW_LINE> <INDENT> self.char = char <NEW_LINE> self.weight = weight <NEW_LINE> self.left = left <NEW_LINE> self.right = right | Class for holding all node objects: branches and leaves. For leaves,
the left and right attributes are set to the value None while the
char and weight attributes are set accordingly. Branches are given
a left and right attribute, which contain another node object
recursively until a leaf is reached. Branches' char and weight
attributes are set to None, as our prefix coding system requires
that all our values be at the leaf (end) nodes. | 6259906aaad79263cf42ff8f |
class Persister(Base.Persister): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _get_table_name(): <NEW_LINE> <INDENT> return 'People' | Persists Guardian objects | 6259906a7d847024c075dbb4 |
class UpdateJobRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.JobId = None <NEW_LINE> self.JobAction = None <NEW_LINE> self.Description = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.JobId = params.get("JobId") <NEW_LINE> self.JobAction = params.get("JobAction") <NEW_LINE> self.Description = params.get("Description") | UpdateJob请求参数结构体
| 6259906ad6c5a102081e3902 |
class NULBC(DataItemBase): <NEW_LINE> <INDENT> __type__ = variables.Dynamic <NEW_LINE> __allowedtypes__ = [variables.U1, variables.String] | Column count in dies.
:Types:
- :class:`String <secsgem.secs.variables.String>`
- :class:`U1 <secsgem.secs.variables.U1>`
**Used In Function**
- :class:`SecsS12F01 <secsgem.secs.functions.SecsS12F01>`
- :class:`SecsS12F03 <secsgem.secs.functions.SecsS12F03>`
- :class:`SecsS12F04 <secsgem.secs.functions.SecsS12F04>` | 6259906a56ac1b37e63038cf |
class ReleasesFeatured(DataAPIService): <NEW_LINE> <INDENT> service_name = "releases" <NEW_LINE> uri = "/releases/featured/(.*)" <NEW_LINE> def __init__(self, config): <NEW_LINE> <INDENT> super(ReleasesFeatured, self).__init__(config) <NEW_LINE> logger.debug('Releases featured service __init__') <NEW_LINE> <DEDENT> def get(self, *args): <NEW_LINE> <INDENT> params = self.parse_query_string(args[0]) <NEW_LINE> module = self.get_module(params) <NEW_LINE> impl = module.Releases(config=self.context) <NEW_LINE> return impl.get_featured(**params) <NEW_LINE> <DEDENT> def put(self, *args): <NEW_LINE> <INDENT> params = self.parse_query_string(args[0]) <NEW_LINE> put_input = web.input() <NEW_LINE> for i in put_input: <NEW_LINE> <INDENT> put_input[i] = put_input[i].split(',') <NEW_LINE> <DEDENT> params.update(put_input) <NEW_LINE> module = self.get_module(params) <NEW_LINE> impl = module.Releases(config=self.context) <NEW_LINE> return impl.update_featured(**params) | Handle featured versions of a given product.
| 6259906af548e778e596cd66 |
class BasicEventFormatter(EventFormatter): <NEW_LINE> <INDENT> def __init__( self, data_type='basic', format_string=None, format_string_short=None): <NEW_LINE> <INDENT> super(BasicEventFormatter, self).__init__(data_type=data_type) <NEW_LINE> self._format_string_attribute_names = None <NEW_LINE> self._format_string = format_string <NEW_LINE> self._format_string_short = format_string_short <NEW_LINE> <DEDENT> def GetFormatStringAttributeNames(self): <NEW_LINE> <INDENT> if self._format_string_attribute_names is None: <NEW_LINE> <INDENT> self._format_string_attribute_names = ( self._FORMAT_STRING_ATTRIBUTE_NAME_RE.findall( self._format_string)) <NEW_LINE> <DEDENT> return set(self._format_string_attribute_names) <NEW_LINE> <DEDENT> def GetMessage(self, event_values): <NEW_LINE> <INDENT> return self._FormatMessage(self._format_string, event_values) <NEW_LINE> <DEDENT> def GetMessageShort(self, event_values): <NEW_LINE> <INDENT> if self._format_string_short: <NEW_LINE> <INDENT> format_string = self._format_string_short <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> format_string = self._format_string <NEW_LINE> <DEDENT> short_message_string = self._FormatMessage(format_string, event_values) <NEW_LINE> if len(short_message_string) > 80: <NEW_LINE> <INDENT> short_message_string = '{0:s}...'.format(short_message_string[:77]) <NEW_LINE> <DEDENT> return short_message_string | Format event values using a message format string.
Attributes:
custom_helpers (list[str]): identifiers of custom event formatter helpers.
helpers (list[EventFormatterHelper]): event formatter helpers. | 6259906a76e4537e8c3f0d5d |
class Box(Mesh): <NEW_LINE> <INDENT> def __init__(self, size, material=None): <NEW_LINE> <INDENT> self._size = np.array(size) <NEW_LINE> mesh = trimesh.creation.box(size) <NEW_LINE> super(Box, self).__init__(mesh, material=material) <NEW_LINE> <DEDENT> def is_on_surface(self, point): <NEW_LINE> <INDENT> on_surf, _ = on_aabb_surface(self._size, point, atol=2 * EPS_ZERO) <NEW_LINE> return on_surf <NEW_LINE> <DEDENT> def normal(self, surface_point: tuple) -> tuple: <NEW_LINE> <INDENT> on_surf, surf_indexes = on_aabb_surface( self._size, surface_point, atol=2 * EPS_ZERO ) <NEW_LINE> if not on_surf: <NEW_LINE> <INDENT> raise GeometryError( "Point is not on surface.", {"point": surface_point, "geometry": self} ) <NEW_LINE> <DEDENT> if len(surf_indexes) != 1: <NEW_LINE> <INDENT> raise GeometryError( "Point is on multiple surfaces.", {"point": surface_point, "geometry": self}, ) <NEW_LINE> <DEDENT> idx = surf_indexes[0] <NEW_LINE> return NORMALS[idx] | Defines an axis-aligned box with centre (0, 0, 0) and side length.
Notes
-----
This is currently implemented using trimesh, it could be the case that this is
a little overkill of such a simple class. Consider re-writing, but add timing
tests to test efficiencies of the changes.
For TIR rays it would seem possible to have a huge optimisation because the
total path length and number of TIR bounces can be calculated in advance. | 6259906a4e4d562566373be1 |
class TestPickle: <NEW_LINE> <INDENT> def test_pickle_unit(self): <NEW_LINE> <INDENT> x = unit.kelvin <NEW_LINE> y = pickle.loads(pickle.dumps(x)) <NEW_LINE> assert x == y <NEW_LINE> <DEDENT> def test_pick_quantity(self): <NEW_LINE> <INDENT> x = 1.0 * unit.kelvin <NEW_LINE> y = pickle.loads(pickle.dumps(x)) <NEW_LINE> assert x == y <NEW_LINE> <DEDENT> @skip_if_missing("uncertainties") <NEW_LINE> def test_pickle_quantity(self): <NEW_LINE> <INDENT> x = (1.0 * unit.kelvin).plus_minus(0.05) <NEW_LINE> y = pickle.loads(pickle.dumps(x)) <NEW_LINE> assert x.value == y.value and x.error == y.error | Test pickle-based serialization of Quantity, Unit, and Measurement objects
See:
* https://github.com/hgrecco/pint/issues/1017
* https://github.com/openforcefield/openff-evaluator/pull/341 | 6259906a2ae34c7f260ac8c2 |
class ShowSchedule(View): <NEW_LINE> <INDENT> template_name = "schedule/show.html" <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> check_schedule_view(request) <NEW_LINE> schedule = Schedule.objects.filter(published=True, hidden=False).first() <NEW_LINE> if not schedule: <NEW_LINE> <INDENT> raise Http404() <NEW_LINE> <DEDENT> data = {"days": []} <NEW_LINE> for day in schedule.day_set.all(): <NEW_LINE> <INDENT> data["days"].append({ "tracks": day.track_set.order_by("order"), "date": day.date, "slots": day.slot_set.all().select_related(), "slot_groups": day.slot_groups(), }) <NEW_LINE> <DEDENT> return render(request, self.template_name, data) | Shows the schedule of the event. | 6259906a4f88993c371f110c |
class ImageSlidingWindowDataset(Dataset): <NEW_LINE> <INDENT> def __init__(self, full_example, image_patch_size=128, window_step_size=32): <NEW_LINE> <INDENT> self.full_example = CrowdExample(image=full_example.image) <NEW_LINE> self.window_step_size = window_step_size <NEW_LINE> self.image_patch_size = image_patch_size <NEW_LINE> half_patch_size = int(self.image_patch_size // 2) <NEW_LINE> self.y_positions = list(range(half_patch_size, self.full_example.image.shape[0] - half_patch_size + 1, self.window_step_size)) <NEW_LINE> if self.full_example.image.shape[0] - half_patch_size > 0: <NEW_LINE> <INDENT> self.y_positions = list(set(self.y_positions + [self.full_example.image.shape[0] - half_patch_size])) <NEW_LINE> <DEDENT> self.x_positions = list(range(half_patch_size, self.full_example.image.shape[1] - half_patch_size + 1, self.window_step_size)) <NEW_LINE> if self.full_example.image.shape[1] - half_patch_size > 0: <NEW_LINE> <INDENT> self.x_positions = list(set(self.x_positions + [self.full_example.image.shape[1] - half_patch_size])) <NEW_LINE> <DEDENT> self.positions_shape = np.array([len(self.y_positions), len(self.x_positions)]) <NEW_LINE> self.length = self.positions_shape.prod() <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> extract_patch_transform = ExtractPatchForPosition(self.image_patch_size, allow_padded=True) <NEW_LINE> test_transform = torchvision.transforms.Compose([NegativeOneToOneNormalizeImage(), NumpyArraysToTorchTensors()]) <NEW_LINE> y_index, x_index = np.unravel_index(index, self.positions_shape) <NEW_LINE> y = self.y_positions[y_index] <NEW_LINE> x = self.x_positions[x_index] <NEW_LINE> patch = extract_patch_transform(self.full_example, y, x) <NEW_LINE> example = test_transform(patch) <NEW_LINE> return example.image, x, y <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.length | Creates a database for a sliding window extraction of 1 full example (i.e. each of the patches of the full example). | 6259906a009cb60464d02d14 |
class Solution: <NEW_LINE> <INDENT> @timeit <NEW_LINE> def tictactoe(self, board: List[str]) -> str: <NEW_LINE> <INDENT> n = len(board) <NEW_LINE> def win(s): <NEW_LINE> <INDENT> rlist = board[:] <NEW_LINE> rlist.extend([''.join([board[i][j] for i in range(n)]) for j in range(n)]) <NEW_LINE> rlist.append(''.join([board[i][i] for i in range(n)])) <NEW_LINE> rlist.append(''.join([board[i][n-1-i] for i in range(n)])) <NEW_LINE> return s*n in rlist <NEW_LINE> <DEDENT> if win('X'): return 'X' <NEW_LINE> if win('O'): return 'O' <NEW_LINE> if sum(row.count(' ') for row in board) == 0: <NEW_LINE> <INDENT> return "Draw" <NEW_LINE> <DEDENT> return "Pending" | [面试题 16.04. 井字游戏](https://leetcode-cn.com/problems/tic-tac-toe-lcci/) | 6259906afff4ab517ebceff5 |
class TestSyntaxErrorHandling(unittest.TestCase): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> with self.assertRaises(SyntaxError): <NEW_LINE> <INDENT> Renderer(env).load_callback('syntaxerror', None, None) | Propagates a SyntaxError | 6259906a92d797404e389749 |
class FirstLastAccum(object): <NEW_LINE> <INDENT> default_init = (None, None, None, None) <NEW_LINE> def __init__(self, stats_tuple=None): <NEW_LINE> <INDENT> self.first = None <NEW_LINE> self.firsttime = None <NEW_LINE> self.last = None <NEW_LINE> self.lasttime = None <NEW_LINE> <DEDENT> def setStats(self, stats_tuple=None): <NEW_LINE> <INDENT> self.first, self.firsttime, self.last, self.lasttime = stats_tuple if stats_tuple else FirstLastAccum.default_init <NEW_LINE> <DEDENT> def getStatsTuple(self): <NEW_LINE> <INDENT> return self.first, self.firsttime, self.last, self.lasttime <NEW_LINE> <DEDENT> def mergeHiLo(self, x_stats): <NEW_LINE> <INDENT> if x_stats.firsttime is not None: <NEW_LINE> <INDENT> if self.firsttime is None or x_stats.firsttime < self.firsttime: <NEW_LINE> <INDENT> self.firsttime = x_stats.firsttime <NEW_LINE> self.first = x_stats.first <NEW_LINE> <DEDENT> <DEDENT> if x_stats.lasttime is not None: <NEW_LINE> <INDENT> if self.lasttime is None or x_stats.lasttime >= self.lasttime: <NEW_LINE> <INDENT> self.lasttime = x_stats.lasttime <NEW_LINE> self.last = x_stats.last <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def mergeSum(self, x_stats): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def addHiLo(self, val, ts): <NEW_LINE> <INDENT> if val is not None: <NEW_LINE> <INDENT> string_val = str(val) <NEW_LINE> if self.firsttime is None or ts < self.firsttime: <NEW_LINE> <INDENT> self.first = string_val <NEW_LINE> self.firsttime = ts <NEW_LINE> <DEDENT> if self.lasttime is None or ts >= self.lasttime: <NEW_LINE> <INDENT> self.last = string_val <NEW_LINE> self.lasttime = ts <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def addSum(self, val, weight=1): <NEW_LINE> <INDENT> pass | Minimal accumulator, suitable for strings.
It can only return the first and last strings it has seen, along with their timestamps. | 6259906a4428ac0f6e659d0c |
class Node: <NEW_LINE> <INDENT> def __init__(self,data=None,next=None): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.next = next <NEW_LINE> <DEDENT> def get_data(self): <NEW_LINE> <INDENT> return self.data <NEW_LINE> <DEDENT> def get_next(self): <NEW_LINE> <INDENT> return self.next <NEW_LINE> <DEDENT> def set_next(self,newNode): <NEW_LINE> <INDENT> self.next = newNode | Implementing a Queue using a linked list | 6259906a435de62698e9d5e5 |
class Search(Kiwicom): <NEW_LINE> <INDENT> def search_places(self, headers=None, request_args=None, **params): <NEW_LINE> <INDENT> service_url = urljoin(self.API_HOST['search'], 'places') <NEW_LINE> return self.make_request(service_url, headers=headers, request_args=request_args, **params) <NEW_LINE> <DEDENT> def search_flights(self, headers=None, request_args=None, **params): <NEW_LINE> <INDENT> self._reformat_date(params) <NEW_LINE> service_url = urljoin(self.API_HOST['search'], 'flights') <NEW_LINE> return self.make_request(service_url, headers=headers, request_args=request_args, **params) <NEW_LINE> <DEDENT> def search_flights_multi(self, json_data=None, data=None, headers=None, request_args=None, **params): <NEW_LINE> <INDENT> if json_data: <NEW_LINE> <INDENT> for item in json_data['requests']: <NEW_LINE> <INDENT> json_data.update(self._reformat_date(item)) <NEW_LINE> <DEDENT> <DEDENT> if data: <NEW_LINE> <INDENT> for item in data['requests']: <NEW_LINE> <INDENT> data.update(self._reformat_date(item)) <NEW_LINE> <DEDENT> <DEDENT> service_url = urljoin(self.API_HOST['search'], 'flights_multi') <NEW_LINE> return self.make_request(service_url, method='post', json_data=json_data, data=data, headers=headers, request_args=request_args, **params) | Search Class | 6259906ad486a94d0ba2d799 |
class ArrayOverallState(PLNOverallState): <NEW_LINE> <INDENT> def __init__( self, annotated_interactions, active_gene_threshold, transition_ratio, seed_links_indices=None, link_false_pos=None, link_false_neg=None, link_prior=None, parameters_state_class=PLNParametersState, links_state_class=ArrayLinksState ): <NEW_LINE> <INDENT> num_process_links = annotated_interactions.calc_num_links() <NEW_LINE> logger.info("Determining active interactions.") <NEW_LINE> active_interactions = ( annotated_interactions.get_active_coannotated_interactions( active_gene_threshold) ) <NEW_LINE> self.links_state = links_state_class( annotated_interactions, active_interactions, seed_links_indices ) <NEW_LINE> self.links_state.report_interactions() <NEW_LINE> self.parameters_state = parameters_state_class( num_process_links, link_false_pos, link_false_neg, link_prior ) <NEW_LINE> self.transition_ratio = transition_ratio <NEW_LINE> self._delta = None <NEW_LINE> <DEDENT> def serialize_state(self): <NEW_LINE> <INDENT> return self.links_state.serialize_state() | Similar to `PLNOverallState`, but using `ArrayLinksState` as the
links state class, and `AnnotatedInteractionsArray` as the annotated
interactions class. | 6259906a097d151d1a2c2848 |
class SaltclassPillarTestCase(TestCase, LoaderModuleMockMixin): <NEW_LINE> <INDENT> def setup_loader_modules(self): <NEW_LINE> <INDENT> return {saltclass: {}} <NEW_LINE> <DEDENT> def _runner(self, expected_ret): <NEW_LINE> <INDENT> fake_args = { "path": os.path.abspath( os.path.join(RUNTIME_VARS.FILES, "saltclass", "examples") ) } <NEW_LINE> fake_pillar = {} <NEW_LINE> fake_minion_id = "fake_id" <NEW_LINE> try: <NEW_LINE> <INDENT> full_ret = saltclass.ext_pillar(fake_minion_id, fake_pillar, fake_args) <NEW_LINE> parsed_ret = full_ret["__saltclass__"]["classes"] <NEW_LINE> <DEDENT> except TypeError as err: <NEW_LINE> <INDENT> self.fail(err) <NEW_LINE> <DEDENT> self.assertListEqual(expected_ret, parsed_ret) <NEW_LINE> <DEDENT> def test_succeeds(self): <NEW_LINE> <INDENT> ret = [ "default.users", "default.motd", "default.empty", "default", "roles.app", "roles.nginx", ] <NEW_LINE> self._runner(ret) | Tests for salt.pillar.saltclass | 6259906a2ae34c7f260ac8c3 |
class Matcher: <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> self.not_ = InverseMatcher(value, self) <NEW_LINE> <DEDENT> def toBe(self, value): <NEW_LINE> <INDENT> if self.value != value: <NEW_LINE> <INDENT> msg = "expected " + str(self.value) + " to be " + str(value) <NEW_LINE> raise AssertionError(msg) <NEW_LINE> <DEDENT> <DEDENT> def toEqual(self, value): <NEW_LINE> <INDENT> self.toBe(value) | Matcher raises assertion errors when its expectations fail | 6259906ad486a94d0ba2d79a |
class ListenerTests(test_santiago.SantiagoTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.listener = santiago.SantiagoListener(santiago.Santiago()) <NEW_LINE> self.listener.santiago.incoming_request = self.acall <NEW_LINE> self.listener.santiago.get_client_locations = self.acall <NEW_LINE> self.listener.santiago.query = self.acall <NEW_LINE> self.listener.santiago.create_hosting_location = self.acall <NEW_LINE> self.item_one, self.item_two, self.item_three = (1, 2, 3) <NEW_LINE> <DEDENT> def acall(self, *args, **kwargs): <NEW_LINE> <INDENT> self.args = args <NEW_LINE> self.kwargs = kwargs <NEW_LINE> <DEDENT> def test_pass_incoming_request(self): <NEW_LINE> <INDENT> self.listener.incoming_request(self.item_one) <NEW_LINE> self.assertEqual(self.args, (self.item_one,)) | Tests the ``SantiagoListener`` class.
Mostly making sure entire requests are successfully passed down to the
underlying Santiago. | 6259906a44b2445a339b754d |
class ManyToManyWidget(Widget): <NEW_LINE> <INDENT> def __init__(self, model, separator=None, field=None, *args, **kwargs): <NEW_LINE> <INDENT> if separator is None: <NEW_LINE> <INDENT> separator = ',' <NEW_LINE> <DEDENT> if field is None: <NEW_LINE> <INDENT> field = 'pk' <NEW_LINE> <DEDENT> self.model = model <NEW_LINE> self.separator = separator <NEW_LINE> self.field = field <NEW_LINE> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def clean(self, value, row=None, *args, **kwargs): <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> return self.model.objects.none() <NEW_LINE> <DEDENT> if isinstance(value, (float, int)): <NEW_LINE> <INDENT> ids = [int(value)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ids = value.split(self.separator) <NEW_LINE> ids = filter(None, [i.strip() for i in ids]) <NEW_LINE> <DEDENT> return self.model.objects.filter(**{ '%s__in' % self.field: ids }) <NEW_LINE> <DEDENT> def render(self, value, obj=None): <NEW_LINE> <INDENT> ids = [smart_str(getattr(obj, self.field)) for obj in value.all()] <NEW_LINE> return self.separator.join(ids) | Widget that converts between representations of a ManyToMany relationships
as a list and an actual ManyToMany field.
:param model: The model the ManyToMany field refers to (required).
:param separator: Defaults to ``','``.
:param field: A field on the related model. Default is ``pk``. | 6259906a3cc13d1c6d466f21 |
class Program: <NEW_LINE> <INDENT> def create_user(self, user_name: str): <NEW_LINE> <INDENT> user: User = User(UserName(user_name)) <NEW_LINE> user_service: UserService = UserService() <NEW_LINE> if user_service.exists(user): <NEW_LINE> <INDENT> raise Exception(f"{user_name}はすでに存在しています") | ユーザ作成処理の実装 | 6259906a8da39b475be049c7 |
class WebUsbTransport(ProtocolBasedTransport): <NEW_LINE> <INDENT> PATH_PREFIX = "webusb" <NEW_LINE> ENABLED = usb1 is not None <NEW_LINE> context = None <NEW_LINE> def __init__( self, device: str, handle: WebUsbHandle = None, debug: bool = False ) -> None: <NEW_LINE> <INDENT> if handle is None: <NEW_LINE> <INDENT> handle = WebUsbHandle(device, debug) <NEW_LINE> <DEDENT> self.device = device <NEW_LINE> self.handle = handle <NEW_LINE> self.debug = debug <NEW_LINE> super().__init__(protocol=ProtocolV1(handle)) <NEW_LINE> <DEDENT> def get_path(self) -> str: <NEW_LINE> <INDENT> return "%s:%s" % (self.PATH_PREFIX, dev_to_str(self.device)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def enumerate(cls) -> Iterable["WebUsbTransport"]: <NEW_LINE> <INDENT> if cls.context is None: <NEW_LINE> <INDENT> cls.context = usb1.USBContext() <NEW_LINE> cls.context.open() <NEW_LINE> atexit.register(cls.context.close) <NEW_LINE> <DEDENT> devices = [] <NEW_LINE> for dev in cls.context.getDeviceIterator(skip_on_error=True): <NEW_LINE> <INDENT> usb_id = (dev.getVendorID(), dev.getProductID()) <NEW_LINE> if usb_id not in TREZORS: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not is_vendor_class(dev): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> dev.getProduct() <NEW_LINE> devices.append(WebUsbTransport(dev)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return devices <NEW_LINE> <DEDENT> def find_debug(self) -> "WebUsbTransport": <NEW_LINE> <INDENT> if self.protocol.VERSION >= 2: <NEW_LINE> <INDENT> return WebUsbTransport(self.device, self.handle) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return WebUsbTransport(self.device, debug=True) | WebUsbTransport implements transport over WebUSB interface. | 6259906a76e4537e8c3f0d5e |
class Header3_Locators_Base_2(object): <NEW_LINE> <INDENT> locators = { 'base' : "css=#header", 'login' : "css=#login a", 'register' : "css=#register a", 'logout' : "css=#logout a", 'myaccount' : "css=#myaccount a", 'username' : "css=#usersname a", 'search' : "css=#searchform", } | locators for Header object | 6259906a442bda511e95d946 |
class Cluster(mb.SavannaBase): <NEW_LINE> <INDENT> __tablename__ = 'clusters' <NEW_LINE> __table_args__ = ( sa.UniqueConstraint('name', 'tenant_id'), ) <NEW_LINE> id = _id_column() <NEW_LINE> name = sa.Column(sa.String(80), nullable=False) <NEW_LINE> description = sa.Column(sa.Text) <NEW_LINE> tenant_id = sa.Column(sa.String(36)) <NEW_LINE> trust_id = sa.Column(sa.String(36)) <NEW_LINE> is_transient = sa.Column(sa.Boolean, default=False) <NEW_LINE> plugin_name = sa.Column(sa.String(80), nullable=False) <NEW_LINE> hadoop_version = sa.Column(sa.String(80), nullable=False) <NEW_LINE> cluster_configs = sa.Column(st.JsonDictType()) <NEW_LINE> default_image_id = sa.Column(sa.String(36)) <NEW_LINE> neutron_management_network = sa.Column(sa.String(36)) <NEW_LINE> anti_affinity = sa.Column(st.JsonListType()) <NEW_LINE> management_private_key = sa.Column(sa.Text, nullable=False) <NEW_LINE> management_public_key = sa.Column(sa.Text, nullable=False) <NEW_LINE> user_keypair_id = sa.Column(sa.String(80)) <NEW_LINE> status = sa.Column(sa.String(80)) <NEW_LINE> status_description = sa.Column(sa.String(200)) <NEW_LINE> info = sa.Column(st.JsonDictType()) <NEW_LINE> node_groups = relationship('NodeGroup', cascade="all,delete", backref='cluster', lazy='joined') <NEW_LINE> cluster_template_id = sa.Column(sa.String(36), sa.ForeignKey('cluster_templates.id')) <NEW_LINE> cluster_template = relationship('ClusterTemplate', backref="clusters", lazy='joined') <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> d = super(Cluster, self).to_dict() <NEW_LINE> d['node_groups'] = [ng.to_dict() for ng in self.node_groups] <NEW_LINE> return d | Contains all info about cluster. | 6259906a796e427e5384ff53 |
class BaseConfig: <NEW_LINE> <INDENT> DEBUG = False <NEW_LINE> SECRET_KEY = os.getenv('SECRET_KEY', 'my_strong_key') <NEW_LINE> BCRYPT_HASH_PREFIX = 14 <NEW_LINE> SQLALCHEMY_TRACK_MODIFICATIONS = False | Base application configuration | 6259906a3317a56b869bf131 |
class geni_union: <NEW_LINE> <INDENT> def __init__(self, union_dict, union_id): <NEW_LINE> <INDENT> self.union_id = union_id <NEW_LINE> self.union_url = union_dict["url"] <NEW_LINE> self.union_status = union_dict["status"] <NEW_LINE> self.parents = [] <NEW_LINE> self.children = [] <NEW_LINE> for tmp_profile in union_dict["edges"]: <NEW_LINE> <INDENT> if (union_dict["edges"][tmp_profile]['rel'] == "child"): <NEW_LINE> <INDENT> self.children.append(tmp_profile) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.parents.append(tmp_profile) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def is_profile_child(self, profile2check): <NEW_LINE> <INDENT> return profile2check in self.children <NEW_LINE> <DEDENT> def is_profile_parent(self, profile2check): <NEW_LINE> <INDENT> return profile2check in self.parents <NEW_LINE> <DEDENT> def get_id(self): <NEW_LINE> <INDENT> return self.union_id | This function deals with geni unions model, it is a data handler, no itended
to be used as a caller. | 6259906a8e7ae83300eea86a |
class Reshape(Transform): <NEW_LINE> <INDENT> def __init__(self, shape): <NEW_LINE> <INDENT> self.shape = shape <NEW_LINE> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> if isinstance(x, np.ndarray): <NEW_LINE> <INDENT> return x.reshape(self.shape) <NEW_LINE> <DEDENT> elif torch.is_tensor(x): <NEW_LINE> <INDENT> return x.view(*self.shape) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> format_string = self.__class__.__name__ + '(' <NEW_LINE> format_string += 'shape={0}'.format(self.shape) <NEW_LINE> format_string += ')' <NEW_LINE> return format_string | Reshapes PyTorch tensors and NumPy arrays as required
Parameters
----------
shape: tuple of integers
Shape to reshape all input to | 6259906a32920d7e50bc7822 |
class BoardLabels(object): <NEW_LINE> <INDENT> def __init__(self, board): <NEW_LINE> <INDENT> self.board = board <NEW_LINE> self.labels = [] <NEW_LINE> for data in board.labels: <NEW_LINE> <INDENT> t = component.Component(label.LabelTitle(data)) <NEW_LINE> l = component.Component(data, model='edit-color') <NEW_LINE> self.labels.append((t, l)) | Board configuration component for board labels | 6259906a55399d3f05627cfd |
class StorageLayout: <NEW_LINE> <INDENT> COMPRESSED_SUFFIX = ".lzma" <NEW_LINE> if os.path.isfile("/usr/bin/lzma"): <NEW_LINE> <INDENT> COMPRESS_PATH = "/usr/bin/lzma" <NEW_LINE> <DEDENT> elif os.path.isfile("/usr/local/bin/lzma"): <NEW_LINE> <INDENT> COMPRESS_PATH = "/usr/local/bin/lzma" <NEW_LINE> <DEDENT> COMPRESSION_ARGS = ["-0"] <NEW_LINE> DECOMPRESSION_ARGS = ["--decompress", "--stdout"] <NEW_LINE> PENDING_COMPRESSION_SUFFIX = ".compressme" <NEW_LINE> def __init__(self, schema, basedir, max_log_size): <NEW_LINE> <INDENT> self._max_log_size = max_log_size <NEW_LINE> self._schema = schema <NEW_LINE> self._basedir = basedir <NEW_LINE> <DEDENT> def write(self, uuid, obj, dimensions, version=1): <NEW_LINE> <INDENT> filename = self._schema.get_filename(self._basedir, dimensions, version) <NEW_LINE> return self.write_filename(uuid, obj, filename) <NEW_LINE> <DEDENT> def clean_newlines(self, value, tag="value"): <NEW_LINE> <INDENT> for eol in ["\r", "\n"]: <NEW_LINE> <INDENT> if eol in value: <NEW_LINE> <INDENT> logging.warn("Found an unexpected EOL in %s" % (tag)) <NEW_LINE> value = value.replace(eol, " ") <NEW_LINE> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> def write_filename(self, uuid, obj, filename): <NEW_LINE> <INDENT> if isinstance(obj, basestring): <NEW_LINE> <INDENT> jsonstr = self.clean_newlines(unicode(obj), obj) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> jsonstr = unicode(json.dumps(obj, separators=(',', ':'))) <NEW_LINE> <DEDENT> output_line = u"%s\t%s\n" % (uuid, jsonstr) <NEW_LINE> dirname = os.path.dirname(filename) <NEW_LINE> if dirname != '' and not os.path.exists(dirname): <NEW_LINE> <INDENT> fileutil.makedirs_concurrent(dirname) <NEW_LINE> <DEDENT> with io.open(filename, "a") as fout: <NEW_LINE> <INDENT> fout.write(output_line) <NEW_LINE> filesize = fout.tell() <NEW_LINE> <DEDENT> logging.debug("Wrote to %s: new size is %d" % (filename, filesize)) <NEW_LINE> if filesize >= self._max_log_size: <NEW_LINE> <INDENT> return self.rotate(filename) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return filename <NEW_LINE> <DEDENT> <DEDENT> def rotate(self, filename): <NEW_LINE> <INDENT> logging.debug("Rotating %s" % (filename)) <NEW_LINE> tmp_name = "%s.%d.%f%s" % (filename, os.getpid(), time.time(), self.PENDING_COMPRESSION_SUFFIX) <NEW_LINE> os.rename(filename, tmp_name) <NEW_LINE> return tmp_name | A class for encapsulating the on-disk data layout for Telemetry | 6259906a5fcc89381b266d45 |
class EquipmentLocatorLedFsmTask(ManagedObject): <NEW_LINE> <INDENT> consts = EquipmentLocatorLedFsmTaskConsts() <NEW_LINE> naming_props = set([u'item']) <NEW_LINE> mo_meta = MoMeta("EquipmentLocatorLedFsmTask", "equipmentLocatorLedFsmTask", "task-[item]", VersionMeta.Version111j, "OutputOnly", 0xf, [], [""], [u'equipmentLocatorLed'], [], [None]) <NEW_LINE> prop_meta = { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version111j, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []), "completion": MoPropertyMeta("completion", "completion", "string", VersionMeta.Version111j, MoPropertyMeta.READ_ONLY, None, None, None, None, ["cancelled", "completed", "processing", "scheduled"], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version111j, MoPropertyMeta.READ_ONLY, 0x2, 0, 256, None, [], []), "flags": MoPropertyMeta("flags", "flags", "string", VersionMeta.Version111j, MoPropertyMeta.READ_ONLY, None, None, None, r"""(defaultValue){0,1}""", [], []), "item": MoPropertyMeta("item", "item", "string", VersionMeta.Version111j, MoPropertyMeta.NAMING, None, None, None, None, ["SetFeLocatorLed", "SetFiLocatorLed", "SetLocatorLed", "nop"], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version111j, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []), "sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []), "seq_id": MoPropertyMeta("seq_id", "seqId", "uint", VersionMeta.Version111j, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version111j, MoPropertyMeta.READ_WRITE, 0x8, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), } <NEW_LINE> prop_map = { "childAction": "child_action", "completion": "completion", "dn": "dn", "flags": "flags", "item": "item", "rn": "rn", "sacl": "sacl", "seqId": "seq_id", "status": "status", } <NEW_LINE> def __init__(self, parent_mo_or_dn, item, **kwargs): <NEW_LINE> <INDENT> self._dirty_mask = 0 <NEW_LINE> self.item = item <NEW_LINE> self.child_action = None <NEW_LINE> self.completion = None <NEW_LINE> self.flags = None <NEW_LINE> self.sacl = None <NEW_LINE> self.seq_id = None <NEW_LINE> self.status = None <NEW_LINE> ManagedObject.__init__(self, "EquipmentLocatorLedFsmTask", parent_mo_or_dn, **kwargs) | This is EquipmentLocatorLedFsmTask class. | 6259906ae5267d203ee6cfac |
class CreatePostView(LoginRequiredMixin, CreateView): <NEW_LINE> <INDENT> template_name = 'posts/new.html' <NEW_LINE> form_class = PostForm <NEW_LINE> success_url = reverse_lazy('posts:feed') <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> context['user'] = self.request.user <NEW_LINE> context['profile'] = self.request.user.profile <NEW_LINE> return context | Create a new post | 6259906a0c0af96317c5794d |
class Runner: <NEW_LINE> <INDENT> rmdir_count = 0 <NEW_LINE> rmdir_failed = 0 <NEW_LINE> unlink_count = 0 <NEW_LINE> unlink_failed = 0 | Module-level configuration and value store. | 6259906a435de62698e9d5e8 |
class Post(models.Model): <NEW_LINE> <INDENT> description = models.TextField() | Model for post resource; only needs 'description' field | 6259906a2c8b7c6e89bd4fc3 |
class faq_publisher(webdav_publisher.webdav_publisher): <NEW_LINE> <INDENT> meta_type = 'FAQ publisher' <NEW_LINE> publisher = 1 <NEW_LINE> manage_options = (OFS.Folder.Folder.manage_options[0],) + ( {'label': 'View', 'action': ''}, {'label': 'Security', 'action': 'manage_access'}, ) <NEW_LINE> security = AccessControl.ClassSecurityInfo() <NEW_LINE> security.setDefaultAccess('allow') <NEW_LINE> security.declareProtected(permissions.edit_publishers, 'edit', 'index_html') <NEW_LINE> publish = PageTemplateFile('publish.pt', globals()) <NEW_LINE> security.declareProtected(permissions.publish_issues, 'publish_issue') <NEW_LINE> def publish_issue(self, issue): <NEW_LINE> <INDENT> self.REQUEST['issue'] = issue <NEW_LINE> return self.publish() <NEW_LINE> <DEDENT> security.declareProtected(permissions.edit_publishers, 'manage_edit') <NEW_LINE> def manage_edit(self, title='', REQUEST=None): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> if REQUEST: <NEW_LINE> <INDENT> REQUEST.RESPONSE.redirect(self.absolute_url()) | FAQ publisher class.
An FAQ publisher publishes issues (questions, and optionally solutions) to a FAQ | 6259906afff4ab517ebceff8 |
class CHSAlumniSkateDateStaffListView(LoginRequiredMixin, ListView): <NEW_LINE> <INDENT> model = CHSAlumniDate <NEW_LINE> context_object_name = 'skate_dates' <NEW_LINE> template_name = 'chs_alumni_skate_sessions_list.html' <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> queryset = super().get_queryset() <NEW_LINE> queryset = queryset.filter(skate_date__gte=date.today()).order_by('skate_date') <NEW_LINE> return queryset | Displays page with list of upcoming CHS ALumni Skate dates with buttons for viewing registered skaters. | 6259906a45492302aabfdcb5 |
class TutorFactory(factory.django.DjangoModelFactory): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = models.Tutor <NEW_LINE> <DEDENT> user = factory.SubFactory(UserFactory) <NEW_LINE> promotion = factory.Iterator([_this_year, _this_year + 1, _this_year + 2]) <NEW_LINE> address = factory.SubFactory(AddressFactory) | Tutor object factory. | 6259906ad6c5a102081e3906 |
class HtmlFormatter(logging.Formatter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> logging.Formatter.__init__( self, "%(asctime)s %(levelname)s %(name)s %(message)s", "%Y-%m-%d %H:%M:%S", ) <NEW_LINE> <DEDENT> def formatException(self, exc_info): <NEW_LINE> <INDENT> exc = logging.Formatter.formatException(self, exc_info) <NEW_LINE> return """%s""" % exc | Formatter for the logging class | 6259906a091ae35668706410 |
class BGrid_GFDL(object): <NEW_LINE> <INDENT> def __init__(self, lon_t, lat_t, lon_uv, lat_uv, mask_t, mask_uv, h, z_t, z_t_edges, z_uv, z_uv_edges, f, name, xrange, yrange): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.xrange = xrange <NEW_LINE> self.yrange = yrange <NEW_LINE> self.lon_t = lon_t[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] <NEW_LINE> self.lat_t = lat_t[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] <NEW_LINE> self.lon_uv = lon_uv[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] <NEW_LINE> self.lat_uv = lat_uv[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] <NEW_LINE> self.mask_t = mask_t[:,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] <NEW_LINE> self.mask_uv = mask_uv[:,yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] <NEW_LINE> self.h = h[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] <NEW_LINE> self.z_t = z_t <NEW_LINE> self.z_t_edges = z_t_edges <NEW_LINE> self.z_uv = z_uv <NEW_LINE> self.z_uv_edges = z_uv_edges <NEW_LINE> self.f = f[yrange[0]:yrange[1]+1, xrange[0]:xrange[1]+1] <NEW_LINE> self.lon_t_vert = lon_uv[yrange[0]-1:yrange[1]+1, xrange[0]-1:xrange[1]+1] <NEW_LINE> self.lat_t_vert = lat_uv[yrange[0]-1:yrange[1]+1, xrange[0]-1:xrange[1]+1] <NEW_LINE> self.lon_uv_vert = lon_t[yrange[0]:yrange[1]+2, xrange[0]:xrange[1]+2] <NEW_LINE> self.lat_uv_vert = lat_t[yrange[0]:yrange[1]+2, xrange[0]:xrange[1]+2] <NEW_LINE> self._calculate_grid_angle() <NEW_LINE> <DEDENT> def _calculate_grid_angle(self): <NEW_LINE> <INDENT> geod = pyproj.Geod(ellps='WGS84') <NEW_LINE> sizey, sizex = self.lon_t_vert.shape <NEW_LINE> angle = np.zeros(self.h.shape) <NEW_LINE> for i in range(sizex-1): <NEW_LINE> <INDENT> az_forward, az_back, dx = geod.inv(self.lon_t_vert[:,i], self.lat_t_vert[:,i], self.lon_t_vert[:,i+1], self.lat_t_vert[:,i+1]) <NEW_LINE> angle[:,i] = 0.5 * (az_forward[1:] + az_forward[:-1]) <NEW_LINE> <DEDENT> self.angle = (90 - angle) * np.pi/180. | Arakawa B-Grid for GFDL CM2.1 | 6259906aa8370b77170f1ba3 |
class DatabaseLock(object): <NEW_LINE> <INDENT> def __init__(self, key, timeout=86400, grace=None): <NEW_LINE> <INDENT> self.key = "lock:%s" % key <NEW_LINE> self.timeout = timeout <NEW_LINE> self.grace = grace <NEW_LINE> self.instance_id = uuid.uuid1().hex <NEW_LINE> <DEDENT> def acquire(self, blocking=True): <NEW_LINE> <INDENT> from .models import Lock <NEW_LINE> try: <NEW_LINE> <INDENT> lock = Lock.objects.create(key=self.key) <NEW_LINE> <DEDENT> except IntegrityError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> lock.value = self.instance_id <NEW_LINE> lock.save() <NEW_LINE> return True <NEW_LINE> <DEDENT> def release(self): <NEW_LINE> <INDENT> from .models import Lock <NEW_LINE> lock = Lock.objects.get(key=self.key, value=self.instance_id) <NEW_LINE> if lock: <NEW_LINE> <INDENT> lock.delete() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.warning("I've no lock in DB to release. Increase TIMEOUT of lock operations") | Try to do same as threading.Lock, but using django cache to store lock
instance to do a distributed lock | 6259906aa219f33f346c7fe5 |
class TestCrowdMap(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self._map = CrowdMap('sandyaiddev.crowdmap.com/api') <NEW_LINE> self.categories = {u'1': u'Category 1', u'3': u'Category 3', u'2': u'Category 2', u'4': u'Trusted Reports'} <NEW_LINE> <DEDENT> def test_categories(self): <NEW_LINE> <INDENT> self.assertEqual(self._map.categories, self.categories) <NEW_LINE> <DEDENT> def test_get_categories(self): <NEW_LINE> <INDENT> self.assertEqual(self._map.get_categories(), self.categories) <NEW_LINE> <DEDENT> def test_add_tweet(self): <NEW_LINE> <INDENT> tweet = TestTwitter.get_test_tweet() <NEW_LINE> response = self._map.add_tweet(tweet, ['1', '3'], '520 clinton ave, ny') <NEW_LINE> self.assertTrue('payload' in response and 'success' in response['payload'] and response['payload']['success'] == 'true') | Simple tests for the CrowdMap class.
You can run them with: python -m unittest crowdmap_bot | 6259906afff4ab517ebceff9 |
class Dimecoin(Bitcoin): <NEW_LINE> <INDENT> name = 'dimecoin' <NEW_LINE> symbols = ('DIME', ) <NEW_LINE> nodes = ('217.175.119.125', '184.164.129.202', '200.123.47.184', '13.81.2.56', '189.27.221.173', '45.116.233.61', '200.123.47.184') <NEW_LINE> port = 11931 <NEW_LINE> message_start = b'\xfe\xa5\x03\xdd' <NEW_LINE> base58_prefixes = { 'PUBKEY_ADDR': 15, 'SCRIPT_ADDR': 9, 'SECRET_KEY': 143 } | Class with all the necessary Dimecoin (DIME) network information based on
https://github.com/dime-coin/dimecoin/blob/master/src/net.cpp
(date of access: 02/16/2018) | 6259906a4527f215b58eb58f |
class RegIvaDialog(ga._AnagDialog): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if not kwargs.has_key('title') and len(args) < 3: <NEW_LINE> <INDENT> kwargs['title'] = FRAME_TITLE <NEW_LINE> <DEDENT> ga._AnagDialog.__init__(self, *args, **kwargs) <NEW_LINE> self.LoadAnagPanel(RegIvaPanel(self, -1)) | Dialog Gestione tabella Registri IVA. | 6259906a8a43f66fc4bf3971 |
class MessageFilter(object): <NEW_LINE> <INDENT> default_filter_type = 'json' <NEW_LINE> filter_class_by_type = { "json": BasicDjangoFilter, } <NEW_LINE> def __init__(self, type, data=None, query_params=None, *args, **kwargs): <NEW_LINE> <INDENT> self.type = type <NEW_LINE> filter_class = self.filter_class_by_type[type] <NEW_LINE> self._filter = filter_class(data=data, query_params=query_params, *args, **kwargs) <NEW_LINE> <DEDENT> def get_filter_data(self): <NEW_LINE> <INDENT> return self._filter.data <NEW_LINE> <DEDENT> def __call__(self, queryset=None): <NEW_LINE> <INDENT> if self._filter is not None: <NEW_LINE> <INDENT> return self._filter.filter(queryset=queryset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return queryset | A generic message filter object that looks
up the proper filter implementation based on
the given filter type. | 6259906a8e71fb1e983bd2a5 |
class RawOutstreamFile: <NEW_LINE> <INDENT> def __init__(self, outfile=''): <NEW_LINE> <INDENT> self.buffer = StringIO() <NEW_LINE> self.outfile = outfile <NEW_LINE> <DEDENT> def writeSlice(self, str_slice): <NEW_LINE> <INDENT> self.buffer.write(str_slice) <NEW_LINE> <DEDENT> def writeBew(self, value, length=1): <NEW_LINE> <INDENT> self.writeSlice(writeBew(value, length)) <NEW_LINE> <DEDENT> def writeVarLen(self, value): <NEW_LINE> <INDENT> var = self.writeSlice(writeVar(value)) <NEW_LINE> <DEDENT> def write(self): <NEW_LINE> <INDENT> if self.outfile: <NEW_LINE> <INDENT> if isinstance(self.outfile, StringType): <NEW_LINE> <INDENT> outfile = open(self.outfile, 'wb') <NEW_LINE> outfile.write(self.getvalue()) <NEW_LINE> outfile.close() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.outfile.write(self.getvalue()) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> sys.stdout.write(self.getvalue()) <NEW_LINE> <DEDENT> <DEDENT> def getvalue(self): <NEW_LINE> <INDENT> return self.buffer.getvalue() | Writes a midi file to disk.
Parameters
----------
outfile : str, optional. Default: ''
the file name of the file to be written. May contain a path
to a folder / directory as well. | 6259906a097d151d1a2c284c |
class _DLPolyParser(object): <NEW_LINE> <INDENT> def tearDown(self): <NEW_LINE> <INDENT> del self.p <NEW_LINE> del self.f <NEW_LINE> <DEDENT> def test_usage(self): <NEW_LINE> <INDENT> with self.p(self.f) as parser: <NEW_LINE> <INDENT> struc = parser.parse() <NEW_LINE> <DEDENT> assert_equal('atoms' in struc, True) <NEW_LINE> assert_equal(len(struc['atoms']), 216) <NEW_LINE> <DEDENT> def test_names(self): <NEW_LINE> <INDENT> with self.p(self.f) as parser: <NEW_LINE> <INDENT> struc = parser.parse() <NEW_LINE> <DEDENT> atoms = struc['atoms'] <NEW_LINE> assert_equal(atoms[0].name, 'K+') <NEW_LINE> assert_equal(atoms[4].name, 'Cl-') | Test of real data | 6259906a56b00c62f0fb40ae |
class ReadWriteMutex(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.async_ = 0 <NEW_LINE> self.current_sync_operation = None <NEW_LINE> self.condition = threading.Condition(threading.Lock()) <NEW_LINE> <DEDENT> def acquire_read_lock(self, wait=True): <NEW_LINE> <INDENT> self.condition.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> if wait: <NEW_LINE> <INDENT> while self.current_sync_operation is not None: <NEW_LINE> <INDENT> self.condition.wait() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.current_sync_operation is not None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> self.async_ += 1 <NEW_LINE> log.debug("%s acquired read lock", self) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.condition.release() <NEW_LINE> <DEDENT> if not wait: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def release_read_lock(self): <NEW_LINE> <INDENT> self.condition.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> self.async_ -= 1 <NEW_LINE> if self.async_ == 0: <NEW_LINE> <INDENT> if self.current_sync_operation is not None: <NEW_LINE> <INDENT> self.condition.notifyAll() <NEW_LINE> <DEDENT> <DEDENT> elif self.async_ < 0: <NEW_LINE> <INDENT> raise LockError("Synchronizer error - too many " "release_read_locks called") <NEW_LINE> <DEDENT> log.debug("%s released read lock", self) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.condition.release() <NEW_LINE> <DEDENT> <DEDENT> def acquire_write_lock(self, wait=True): <NEW_LINE> <INDENT> self.condition.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> if wait: <NEW_LINE> <INDENT> while self.current_sync_operation is not None: <NEW_LINE> <INDENT> self.condition.wait() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.current_sync_operation is not None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> self.current_sync_operation = threading.currentThread() <NEW_LINE> if self.async_ > 0: <NEW_LINE> <INDENT> if wait: <NEW_LINE> <INDENT> self.condition.wait() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.current_sync_operation = None <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> log.debug("%s acquired write lock", self) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.condition.release() <NEW_LINE> <DEDENT> if not wait: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def release_write_lock(self): <NEW_LINE> <INDENT> self.condition.acquire() <NEW_LINE> try: <NEW_LINE> <INDENT> if self.current_sync_operation is not threading.currentThread(): <NEW_LINE> <INDENT> raise LockError("Synchronizer error - current thread doesn't " "have the write lock") <NEW_LINE> <DEDENT> self.current_sync_operation = None <NEW_LINE> self.condition.notifyAll() <NEW_LINE> log.debug("%s released write lock", self) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.condition.release() | A mutex which allows multiple readers, single writer.
:class:`.ReadWriteMutex` uses a Python ``threading.Condition``
to provide this functionality across threads within a process.
The Beaker package also contained a file-lock based version
of this concept, so that readers/writers could be synchronized
across processes with a common filesystem. A future Dogpile
release may include this additional class at some point. | 6259906a0c0af96317c5794e |
class SpeechRecognitionResult(RecognitionResult): <NEW_LINE> <INDENT> def __init__(self, impl_result): <NEW_LINE> <INDENT> super().__init__(impl_result) | Base class for speech recognition results. | 6259906a4f6381625f19a096 |
class KGDatasetWN18(KGDataset): <NEW_LINE> <INDENT> def __init__(self, path, name='wn18'): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> url = 'https://data.dgl.ai/dataset/{}.zip'.format(name) <NEW_LINE> if not os.path.exists(os.path.join(path, name)): <NEW_LINE> <INDENT> print('File not found. Downloading from', url) <NEW_LINE> _download_and_extract(url, path, name + '.zip') <NEW_LINE> <DEDENT> self.path = os.path.join(path, name) <NEW_LINE> super(KGDatasetWN18, self).__init__(os.path.join(self.path, 'entities.dict'), os.path.join(self.path, 'relations.dict'), os.path.join(self.path, 'train.txt'), os.path.join(self.path, 'valid.txt'), os.path.join(self.path, 'test.txt')) | Load a knowledge graph wn18
The wn18 dataset has five files:
* entities.dict stores the mapping between entity Id and entity name.
* relations.dict stores the mapping between relation Id and relation name.
* train.txt stores the triples in the training set.
* valid.txt stores the triples in the validation set.
* test.txt stores the triples in the test set.
The mapping between entity (relation) name and entity (relation) Id is stored as 'name id'.
The triples are stored as 'head_nid relation_id tail_nid'. | 6259906aa8370b77170f1ba4 |
class Object_dectector: <NEW_LINE> <INDENT> def __init__(self,cfg_path,threshold=0.5): <NEW_LINE> <INDENT> self.cfg = get_cfg() <NEW_LINE> self.cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = threshold <NEW_LINE> self.cfg.merge_from_file(model_zoo.get_config_file(cfg_path)) <NEW_LINE> self.cfg.MODEL.WEIGHTS = model_zoo.get_checkpoint_url(cfg_path) <NEW_LINE> self.predictor = DefaultPredictor(self.cfg) <NEW_LINE> <DEDENT> def detect(self,image_path,out_dir=None): <NEW_LINE> <INDENT> img = cv2.imread(image_path) <NEW_LINE> outputs = self.predictor(img) <NEW_LINE> v = Visualizer(img[:, :, ::-1], MetadataCatalog.get(self.cfg.DATASETS.TRAIN[0]), scale=1.2) <NEW_LINE> out = v.draw_instance_predictions(outputs["instances"].to("cpu")) <NEW_LINE> if out_dir: <NEW_LINE> <INDENT> cv2.imwrite(out_dir,out.get_image()[:, :, ::-1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cv2.imwrite("img", out.get_image()[:, :, ::-1]) <NEW_LINE> <DEDENT> <DEDENT> def detect_imgs(self,folder_path,out_dir): <NEW_LINE> <INDENT> if not os.path.exists(folder_path): <NEW_LINE> <INDENT> raise ValueError("Folder path doesn't exist") <NEW_LINE> <DEDENT> if not os.path.exists(out_dir): <NEW_LINE> <INDENT> os.makedirs(out_dir) <NEW_LINE> <DEDENT> for file in tqdm(os.listdir(folder_path)): <NEW_LINE> <INDENT> img = folder_path+file <NEW_LINE> self.detect(img,out_dir+file) | Detect objects in an image using models supported by detectron2 | 6259906a45492302aabfdcb7 |
class PEP3101FormattingWrapperTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.observer = mock.Mock() <NEW_LINE> self.wrapper = PEP3101FormattingWrapper(self.observer) <NEW_LINE> <DEDENT> def test_why_is_None(self): <NEW_LINE> <INDENT> self.wrapper({'why': None, 'key': 'value'}) <NEW_LINE> self.observer.assert_called_once_with({'why': None, 'key': 'value'}) <NEW_LINE> <DEDENT> def test_format_why(self): <NEW_LINE> <INDENT> self.wrapper({'why': 'Hello {name}', 'name': 'World'}) <NEW_LINE> self.observer.assert_called_once_with({'why': 'Hello World', 'name': 'World'}) <NEW_LINE> <DEDENT> def test_format_message(self): <NEW_LINE> <INDENT> self.wrapper({'message': ('foo {bar}',), 'bar': 'bar'}) <NEW_LINE> self.observer.assert_called_once_with( {'message': ('foo bar',), 'bar': 'bar'}) <NEW_LINE> <DEDENT> def test_format_message_tuple(self): <NEW_LINE> <INDENT> self.wrapper({'message': ('foo', 'bar', 'baz', '{bax}'), 'bax': 'bax'}) <NEW_LINE> self.observer.assert_called_once_with( {'message': ('foo bar baz bax',), 'bax': 'bax'}) <NEW_LINE> <DEDENT> def test_formatting_failure(self): <NEW_LINE> <INDENT> self.wrapper({'message': ('{u"Hello": "There"}',)}) <NEW_LINE> self.observer.assert_called_once_with({ 'message': '{u"Hello": "There"}', 'message_formatting_error': matches(Contains('KeyError')) }) | Test the PEP3101 Formatting. | 6259906abe8e80087fbc086b |
class ConnectionContext(rpc_common.Connection): <NEW_LINE> <INDENT> def __init__(self, connection_pool, pooled=True): <NEW_LINE> <INDENT> self.connection = None <NEW_LINE> self.connection_pool = connection_pool <NEW_LINE> if pooled: <NEW_LINE> <INDENT> self.connection = connection_pool.get() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.connection = connection_pool.create() <NEW_LINE> <DEDENT> self.pooled = pooled <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def _done(self): <NEW_LINE> <INDENT> if self.connection: <NEW_LINE> <INDENT> if self.pooled: <NEW_LINE> <INDENT> self.connection.reset() <NEW_LINE> self.connection_pool.put(self.connection) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.connection.close() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self.connection = None <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, exc_type, exc_value, tb): <NEW_LINE> <INDENT> self._done() <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self._done() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._done() <NEW_LINE> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> if self.connection: <NEW_LINE> <INDENT> return getattr(self.connection, key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise rpc_common.InvalidRPCConnectionReuse() | The class that is actually returned to the create_connection() caller.
This is essentially a wrapper around Connection that supports 'with'.
It can also return a new Connection, or one from a pool.
The function will also catch when an instance of this class is to be
deleted. With that we can return Connections to the pool on exceptions
and so forth without making the caller be responsible for catching them.
If possible the function makes sure to return a connection to the pool. | 6259906a1f037a2d8b9e545a |
class DPeptideChemComp(PeptideChemComp): <NEW_LINE> <INDENT> type = 'D-peptide linking' | A single peptide component with (unusual) D- chirality.
See :class:`ChemComp` for a description of the parameters. | 6259906a091ae35668706412 |
class wgssplit(dsl.ContainerOp): <NEW_LINE> <INDENT> def __init__(self, validate=None): <NEW_LINE> <INDENT> super(wgssplit, self).__init__( name='wgs-split-first', image='10.18.101.90:80/library/wgs-split:latest', command=['./root/app/split.sh'], arguments=[ '--validate', validate, ], file_outputs={ 'split': '/output.txt', }) | test images-wgs-nfs:v1 | 6259906a7cff6e4e811b7229 |
class UsersViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Users.objects.all().order_by('-date_joined') <NEW_LINE> serializer_class = UsersSerializer | API endpoint that allows users to be viewed or edited. | 6259906aaad79263cf42ff95 |
class CompressorFileAltStorage(CompressorFileStorage): <NEW_LINE> <INDENT> def __init__(self, location=None, base_url=None, *args, **kwargs): <NEW_LINE> <INDENT> if location is None: <NEW_LINE> <INDENT> location = settings.COMPRESS_SOURCE_ROOT <NEW_LINE> <DEDENT> base_url = None <NEW_LINE> super(CompressorFileAltStorage, self).__init__(location, base_url, *args, **kwargs) | This alternative django-compressor storage class is utilised
specifically for CompressorAltFinder which allows an independent
find path.
The default for ``location`` is ``COMPRESS_SOURCE_ROOT``. | 6259906a442bda511e95d948 |
class Document(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Document, self).__init__() <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def add_style_as_used(self, name): <NEW_LINE> <INDENT> if name not in self.used_styles: <NEW_LINE> <INDENT> self.used_styles.append(name) <NEW_LINE> <DEDENT> <DEDENT> def add_font_as_used(self, sz): <NEW_LINE> <INDENT> fsz = int(sz) / 2 <NEW_LINE> self.used_font_size[fsz] += 1 <NEW_LINE> <DEDENT> def get_styles(self, name): <NEW_LINE> <INDENT> styles = [] <NEW_LINE> while True: <NEW_LINE> <INDENT> style = self.styles.get_by_id(name) <NEW_LINE> styles.append(style) <NEW_LINE> if style.based_on == '': <NEW_LINE> <INDENT> return styles <NEW_LINE> <DEDENT> name = style.based_on <NEW_LINE> <DEDENT> <DEDENT> def _calculate_possible_headers(self): <NEW_LINE> <INDENT> _headers = [] <NEW_LINE> _text = [] <NEW_LINE> max_count = sum(six.itervalues(self.usage_font_size)) <NEW_LINE> from .serialize import _get_font_size <NEW_LINE> for name in self.used_styles: <NEW_LINE> <INDENT> _style = self.styles.get_by_id(name) <NEW_LINE> font_size = _get_font_size(self, _style) <NEW_LINE> if font_size != -1 and font_size not in _headers: <NEW_LINE> <INDENT> _headers.append(font_size) <NEW_LINE> <DEDENT> <DEDENT> self.possible_headers_style = [x for x in reversed(sorted(_headers))] <NEW_LINE> _text_list = collections.Counter() <NEW_LINE> for font_size, amount in six.iteritems(self.usage_font_size): <NEW_LINE> <INDENT> if float(amount) / max_count <= 0.1: <NEW_LINE> <INDENT> if font_size not in _headers: <NEW_LINE> <INDENT> _headers.append(font_size) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> _text.append(font_size) <NEW_LINE> _text_list[font_size] = amount <NEW_LINE> <DEDENT> <DEDENT> self.possible_headers = [x for x in reversed(sorted(_headers))] <NEW_LINE> self.possible_text = [x for x in reversed(sorted(_text))] <NEW_LINE> if len(self.possible_text) > 0: <NEW_LINE> <INDENT> for value in self.possible_headers[:]: <NEW_LINE> <INDENT> if self.possible_text[0] >= value: <NEW_LINE> <INDENT> self.possible_headers.remove(value) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> _mc = _text_list.most_common(1) <NEW_LINE> if len(_mc) > 0: <NEW_LINE> <INDENT> self.base_font_size = _mc[0][0] <NEW_LINE> <DEDENT> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.elements = [] <NEW_LINE> self.relationships = {'document': {}, 'endnotes': {}, 'footnotes': {}} <NEW_LINE> self.footnotes = {} <NEW_LINE> self.endnotes = {} <NEW_LINE> self.comments = {} <NEW_LINE> self.numbering = {} <NEW_LINE> self.abstruct_numbering = {} <NEW_LINE> self.styles = StylesCollection() <NEW_LINE> self.default_style = None <NEW_LINE> self.used_styles = [] <NEW_LINE> self.used_font_size = collections.Counter() <NEW_LINE> self.usage_font_size = collections.Counter() <NEW_LINE> self.possible_headers_style = [] <NEW_LINE> self.possible_headers = [] <NEW_LINE> self.possible_text = [] <NEW_LINE> self.base_font_size = -1 | Represents OOXML document. | 6259906a8e7ae83300eea86e |
class ParameterStructure(dict): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def flatten_dict(dict_: dict): <NEW_LINE> <INDENT> new_dict = {} <NEW_LINE> for key, value in dict_.items(): <NEW_LINE> <INDENT> if isinstance(value, dict): <NEW_LINE> <INDENT> flattened = ParameterStructure.flatten_dict(value) <NEW_LINE> for key_flat, value_flat in flattened.items(): <NEW_LINE> <INDENT> new_dict.update({str(key) + "." + key_flat: value_flat}) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> new_dict.update({key: value}) <NEW_LINE> <DEDENT> <DEDENT> return new_dict <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if len(args) > 0 and len(kwargs) > 0: <NEW_LINE> <INDENT> raise Exception("Only keyword or dictionary allowed") <NEW_LINE> <DEDENT> if len(args) > 0: <NEW_LINE> <INDENT> flattened = ParameterStructure.flatten_dict(args[0]) <NEW_LINE> <DEDENT> elif len(kwargs) > 0: <NEW_LINE> <INDENT> flattened = ParameterStructure.flatten_dict(kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> flattened = {} <NEW_LINE> <DEDENT> super().__init__(flattened) | Basic functionality of a structure containing parameters. | 6259906a32920d7e50bc7826 |
class MinimaxAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def maxValue(self, gameState, depth, agent): <NEW_LINE> <INDENT> if gameState.isWin() or gameState.isLose() or depth == self.depth: <NEW_LINE> <INDENT> return self.evaluationFunction(gameState) <NEW_LINE> <DEDENT> v = float("-inf") <NEW_LINE> actions = gameState.getLegalActions(agent) <NEW_LINE> for each in actions: <NEW_LINE> <INDENT> succState = gameState.generateSuccessor(agent, each) <NEW_LINE> v = max(v, self.minValue(succState, depth, 1)) <NEW_LINE> <DEDENT> return v <NEW_LINE> <DEDENT> def minValue(self, gameState, depth, agent): <NEW_LINE> <INDENT> if gameState.isWin() or gameState.isLose() or depth == self.depth: <NEW_LINE> <INDENT> return self.evaluationFunction(gameState) <NEW_LINE> <DEDENT> returntoPacman = False <NEW_LINE> if agent == gameState.getNumAgents() - 1: <NEW_LINE> <INDENT> returntoPacman = True <NEW_LINE> <DEDENT> v = float("inf") <NEW_LINE> actions = gameState.getLegalActions(agent) <NEW_LINE> for each in actions: <NEW_LINE> <INDENT> succState = gameState.generateSuccessor(agent, each) <NEW_LINE> if not returntoPacman: <NEW_LINE> <INDENT> v = min(v, self.minValue(succState, depth, agent + 1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> v = min(v, self.maxValue(succState, depth + 1, 0)) <NEW_LINE> <DEDENT> <DEDENT> return v <NEW_LINE> <DEDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> def minimax(gameState): <NEW_LINE> <INDENT> actions = gameState.getLegalActions(0) <NEW_LINE> maxVal = float("-inf") <NEW_LINE> action = None <NEW_LINE> for each in actions: <NEW_LINE> <INDENT> succState = gameState.generateSuccessor(0, each) <NEW_LINE> v = self.minValue(succState, 0, 1) <NEW_LINE> if v > maxVal: <NEW_LINE> <INDENT> maxVal = v <NEW_LINE> action = each <NEW_LINE> <DEDENT> <DEDENT> return action <NEW_LINE> <DEDENT> return minimax(gameState) | Your minimax agent (question 2) | 6259906a2c8b7c6e89bd4fc6 |
@resources.register('rds-snapshot') <NEW_LINE> class RDSSnapshot(QueryResourceManager): <NEW_LINE> <INDENT> class Meta(object): <NEW_LINE> <INDENT> service = 'rds' <NEW_LINE> type = 'rds-snapshot' <NEW_LINE> enum_spec = ('describe_db_snapshots', 'DBSnapshots', None) <NEW_LINE> name = id = 'DBSnapshotIdentifier' <NEW_LINE> filter_name = None <NEW_LINE> filter_type = None <NEW_LINE> dimension = None <NEW_LINE> date = 'SnapshotCreateTime' <NEW_LINE> <DEDENT> resource_type = Meta <NEW_LINE> filter_registry = FilterRegistry('rds-snapshot.filters') <NEW_LINE> action_registry = ActionRegistry('rds-snapshot.actions') | Resource manager for RDS DB snapshots.
| 6259906a92d797404e38974c |
class DMS_Feed(_CHART): <NEW_LINE> <INDENT> service_url = 'https://chart.maryland.gov/rss/ProduceRss.aspx?Type=DMSXML' <NEW_LINE> @classmethod <NEW_LINE> def get_msg_boards(cls): <NEW_LINE> <INDENT> return cls.get_geojson(cls.service_url) | Access DMS XML from CHART. | 6259906a5fcc89381b266d47 |
class QueueScaleCountView(GenericAPIView): <NEW_LINE> <INDENT> parser_classes = (JSONParser,) <NEW_LINE> queryset = Queue.objects.all() <NEW_LINE> serializer_class = QueueStatusSerializerV6 <NEW_LINE> def post(self, request): <NEW_LINE> <INDENT> if request.version == 'v6' or request.version == 'v7': <NEW_LINE> <INDENT> return self.post_v6(request) <NEW_LINE> <DEDENT> raise Http404() <NEW_LINE> <DEDENT> def post_v6(self, request): <NEW_LINE> <INDENT> return self.queue_count_jobs(request) <NEW_LINE> <DEDENT> def queue_count_jobs(self, request): <NEW_LINE> <INDENT> num = rest_util.parse_int(request, 'num') <NEW_LINE> if num < 1: <NEW_LINE> <INDENT> raise BadParameter('num must be at least 1') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> recipe_type = RecipeType.objects.get(name='scale-count', revision_num='1') <NEW_LINE> for _ in xrange(num): <NEW_LINE> <INDENT> Queue.objects.queue_new_recipe_for_user_v6(recipe_type, Data()) <NEW_LINE> <DEDENT> <DEDENT> except (InvalidData, InvalidRecipeData, InactiveRecipeType) as ex: <NEW_LINE> <INDENT> message = 'Unable to create new recipe' <NEW_LINE> logger.exception(message) <NEW_LINE> raise BadParameter('%s: %s' % (message, unicode(ex))) <NEW_LINE> <DEDENT> return Response(status=status.HTTP_202_ACCEPTED) | This view is the endpoint for queuing new Scale Count jobs. | 6259906a7d43ff2487428001 |
class Droid(Character): <NEW_LINE> <INDENT> def __init__(self, name='C-3PO', species='Droid', eye_color= 'yellow', birth_year='112BBY', json_dict=None): <NEW_LINE> <INDENT> super().__init__(name, species, json_dict) <NEW_LINE> if json_dict is not None: <NEW_LINE> <INDENT> self.birth_year = json_dict['birth_year'] <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return super().__str__() + ', birth_year: ' + self.birth_year | docstring for Droid. | 6259906a2c8b7c6e89bd4fc7 |
class KNXClimate(ClimateDevice): <NEW_LINE> <INDENT> def __init__(self, device): <NEW_LINE> <INDENT> self.device = device <NEW_LINE> <DEDENT> @property <NEW_LINE> def supported_features(self): <NEW_LINE> <INDENT> support = SUPPORT_TARGET_TEMPERATURE <NEW_LINE> if self.device.supports_operation_mode: <NEW_LINE> <INDENT> support |= SUPPORT_OPERATION_MODE <NEW_LINE> <DEDENT> return support <NEW_LINE> <DEDENT> def async_register_callbacks(self): <NEW_LINE> <INDENT> async def after_update_callback(device): <NEW_LINE> <INDENT> await self.async_update_ha_state() <NEW_LINE> <DEDENT> self.device.register_device_updated_cb(after_update_callback) <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> self.async_register_callbacks() <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.device.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def available(self): <NEW_LINE> <INDENT> return self.hass.data[DATA_KNX].connected <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def temperature_unit(self): <NEW_LINE> <INDENT> return TEMP_CELSIUS <NEW_LINE> <DEDENT> @property <NEW_LINE> def current_temperature(self): <NEW_LINE> <INDENT> return self.device.temperature.value <NEW_LINE> <DEDENT> @property <NEW_LINE> def target_temperature_step(self): <NEW_LINE> <INDENT> return self.device.setpoint_shift_step <NEW_LINE> <DEDENT> @property <NEW_LINE> def target_temperature(self): <NEW_LINE> <INDENT> return self.device.target_temperature.value <NEW_LINE> <DEDENT> @property <NEW_LINE> def min_temp(self): <NEW_LINE> <INDENT> return self.device.target_temperature_min <NEW_LINE> <DEDENT> @property <NEW_LINE> def max_temp(self): <NEW_LINE> <INDENT> return self.device.target_temperature_max <NEW_LINE> <DEDENT> async def async_set_temperature(self, **kwargs): <NEW_LINE> <INDENT> temperature = kwargs.get(ATTR_TEMPERATURE) <NEW_LINE> if temperature is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> await self.device.set_target_temperature(temperature) <NEW_LINE> await self.async_update_ha_state() <NEW_LINE> <DEDENT> @property <NEW_LINE> def current_operation(self): <NEW_LINE> <INDENT> if self.device.supports_operation_mode: <NEW_LINE> <INDENT> return self.device.operation_mode.value <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> @property <NEW_LINE> def operation_list(self): <NEW_LINE> <INDENT> return [operation_mode.value for operation_mode in self.device.get_supported_operation_modes()] <NEW_LINE> <DEDENT> async def async_set_operation_mode(self, operation_mode): <NEW_LINE> <INDENT> if self.device.supports_operation_mode: <NEW_LINE> <INDENT> from xknx.knx import HVACOperationMode <NEW_LINE> knx_operation_mode = HVACOperationMode(operation_mode) <NEW_LINE> await self.device.set_operation_mode(knx_operation_mode) | Representation of a KNX climate device. | 6259906aa8370b77170f1ba6 |
class Permission(DeclarativeBase): <NEW_LINE> <INDENT> __tablename__ = 'tg_permission' <NEW_LINE> permission_id = Column(Integer, autoincrement=True, primary_key=True) <NEW_LINE> permission_name = Column(Unicode(63), unique=True, nullable=False) <NEW_LINE> description = Column(Unicode(255)) <NEW_LINE> groups = relation(Group, secondary=group_permission_table, backref='permissions') <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return ('<Permission: name=%r>' % self.permission_name).encode('utf-8') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.permission_name | Permission definition for :mod:`repoze.what`.
Only the ``permission_name`` column is required by :mod:`repoze.what`. | 6259906abaa26c4b54d50a88 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.