code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class BaseMasker(BaseEstimator, TransformerMixin, CacheMixin): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def transform_single_imgs(self, imgs, confounds=None, copy=True): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def transform(self, imgs, confounds=None): <NEW_LINE> <INDENT> self._check_fitted() <NEW_LINE> return self.transform_single_imgs(imgs, confounds) <NEW_LINE> <DEDENT> def fit_transform(self, X, y=None, confounds=None, **fit_params): <NEW_LINE> <INDENT> if y is None: <NEW_LINE> <INDENT> if self.mask_img is None: <NEW_LINE> <INDENT> return self.fit(X, **fit_params ).transform(X, confounds=confounds) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.fit(**fit_params).transform(X, confounds=confounds) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.mask_img is None: <NEW_LINE> <INDENT> return self.fit(X, y, **fit_params ).transform(X, confounds=confounds) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> warnings.warn('[%s.fit] Generation of a mask has been' ' requested (y != None) while a mask has' ' been provided at masker creation. Given mask' ' will be used.' % self.__class__.__name__) <NEW_LINE> return self.fit(**fit_params).transform(X, confounds=confounds) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def inverse_transform(self, X): <NEW_LINE> <INDENT> img = self._cache(masking.unmask)(X, self.mask_img_) <NEW_LINE> try: <NEW_LINE> <INDENT> img._header._structarr = np.array(img._header._structarr).copy() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return img <NEW_LINE> <DEDENT> def _check_fitted(self): <NEW_LINE> <INDENT> if not hasattr(self, "mask_img_"): <NEW_LINE> <INDENT> raise ValueError('It seems that %s has not been fitted. ' 'You must call fit() before calling transform().' % self.__class__.__name__) | Base class for NiftiMaskers
| 6259903c26238365f5fadd58 |
class SimpleDroneSpout(Spout): <NEW_LINE> <INDENT> outputs = ['uid', 'dronetime', 'region', 'altitude', 'latitude', 'longitude'] <NEW_LINE> def initialize(self, stormconf, context): <NEW_LINE> <INDENT> with open("/opt/internal.json") as f: <NEW_LINE> <INDENT> config = json.load(f) <NEW_LINE> <DEDENT> self.internal_config = config <NEW_LINE> self.logger.warn("Internal Config: " + str(config)) <NEW_LINE> self.logger.warn("Storm Config: " + str(stormconf)) <NEW_LINE> self.logger.warn("Storm Context: "+ str(context)) <NEW_LINE> self.timeout = float(config['timeout']) <NEW_LINE> self.ckc = Consumer({'bootstrap.servers': config['kafka'], 'group.id': config['kfgroupid'], 'default.topic.config': {'auto.offset.reset': "smallest"}}) <NEW_LINE> self.ckc.subscribe([config['raw_topic']]) <NEW_LINE> <DEDENT> def next_tuple(self): <NEW_LINE> <INDENT> msg = self.ckc.poll(self.timeout) <NEW_LINE> if msg is not None: <NEW_LINE> <INDENT> text = msg.value().decode("utf-8") <NEW_LINE> if "uid" in text: <NEW_LINE> <INDENT> data = json.loads(text) <NEW_LINE> self.emit([data['uid'], data['dronetime'], data['region'], data['altitude'], data['latitude'], data['longitude']]) | A Storm spout that receives messages from a Kafka broker, sends them for proximity
monitoring, and records its actions.
Tip:
The configuration file, internal.json (see config/internal.json.example) must
be present in the root of this repository | 6259903c26068e7796d4db49 |
class ListCollectionsResponse(): <NEW_LINE> <INDENT> def __init__(self, *, collections=None): <NEW_LINE> <INDENT> self.collections = collections <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> args = {} <NEW_LINE> valid_keys = ['collections'] <NEW_LINE> bad_keys = set(_dict.keys()) - set(valid_keys) <NEW_LINE> if bad_keys: <NEW_LINE> <INDENT> raise ValueError( 'Unrecognized keys detected in dictionary for class ListCollectionsResponse: ' + ', '.join(bad_keys)) <NEW_LINE> <DEDENT> if 'collections' in _dict: <NEW_LINE> <INDENT> args['collections'] = [ Collection._from_dict(x) for x in (_dict.get('collections')) ] <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'collections') and self.collections is not None: <NEW_LINE> <INDENT> _dict['collections'] = [x._to_dict() for x in self.collections] <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return json.dumps(self._to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | Response object containing an array of collection details.
:attr list[Collection] collections: (optional) An array containing information
about each collection in the project. | 6259903cb5575c28eb7135ca |
class BaseInboxView(OrgPermsMixin, SmartTemplateView): <NEW_LINE> <INDENT> title = None <NEW_LINE> folder = None <NEW_LINE> folder_icon = None <NEW_LINE> template_name = None <NEW_LINE> permission = "orgs.org_inbox" <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(BaseInboxView, self).get_context_data(**kwargs) <NEW_LINE> org = self.request.org <NEW_LINE> user = self.request.user <NEW_LINE> partner = user.get_partner(org) <NEW_LINE> labels = list(Label.get_all(org, user).order_by("name")) <NEW_LINE> Label.bulk_cache_initialize(labels) <NEW_LINE> fields = Field.get_all(org, visible=True).order_by("label") <NEW_LINE> context["context_data_json"] = { "user": {"id": user.pk, "partner": partner.as_json() if partner else None}, "labels": [l.as_json() for l in labels], "fields": [f.as_json() for f in fields], } <NEW_LINE> context["banner_text"] = org.get_banner_text() <NEW_LINE> context["folder"] = self.folder.name <NEW_LINE> context["folder_icon"] = self.folder_icon <NEW_LINE> context["open_case_count"] = Case.get_open(org, user).count() <NEW_LINE> context["closed_case_count"] = Case.get_closed(org, user).count() <NEW_LINE> context["allow_case_without_message"] = getattr(settings, "SITE_ALLOW_CASE_WITHOUT_MESSAGE", False) <NEW_LINE> context["user_must_reply_with_faq"] = org and not user.is_anonymous and user.must_use_faq() <NEW_LINE> context["site_contact_display"] = getattr(settings, "SITE_CONTACT_DISPLAY", "name") <NEW_LINE> context["search_text_days"] = Message.SEARCH_BY_TEXT_DAYS <NEW_LINE> return context | Mixin to add site metadata to the context in JSON format which can then used | 6259903c3c8af77a43b6883d |
class FacebookBackend(_OAuthBackend): <NEW_LINE> <INDENT> DEFAULT_GRAPH_ENDPOINT = "https://graph.facebook.com/v2.5/me" <NEW_LINE> def __init__(self, outgoing, internal_attributes, config, base_url, name): <NEW_LINE> <INDENT> config.setdefault("response_type", "code") <NEW_LINE> config["verify_accesstoken_state"] = False <NEW_LINE> super().__init__(outgoing, internal_attributes, config, base_url, name, "facebook", "id") <NEW_LINE> <DEDENT> def get_request_args(self, get_state=stateID): <NEW_LINE> <INDENT> request_args = super().get_request_args(get_state=get_state) <NEW_LINE> client_id = self.config["client_config"]["client_id"] <NEW_LINE> extra_args = { arg_name: arg_val for arg_name in ["auth_type", "scope"] for arg_val in [self.config.get(arg_name, [])] if arg_val } <NEW_LINE> extra_args.update({"client_id": client_id}) <NEW_LINE> request_args.update(extra_args) <NEW_LINE> return request_args <NEW_LINE> <DEDENT> def auth_info(self, request): <NEW_LINE> <INDENT> auth_info = AuthenticationInformation(UNSPECIFIED, None, self.config["server_info"]["authorization_endpoint"]) <NEW_LINE> return auth_info <NEW_LINE> <DEDENT> def user_information(self, access_token): <NEW_LINE> <INDENT> payload = {"access_token": access_token} <NEW_LINE> url = self.config["server_info"].get("graph_endpoint", self.DEFAULT_GRAPH_ENDPOINT) <NEW_LINE> if self.config["fields"]: <NEW_LINE> <INDENT> payload["fields"] = ",".join(self.config["fields"]) <NEW_LINE> <DEDENT> resp = requests.get(url, params=payload) <NEW_LINE> data = json.loads(resp.text) <NEW_LINE> try: <NEW_LINE> <INDENT> picture_url = data["picture"]["data"]["url"] <NEW_LINE> data["picture"] = picture_url <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return data | Backend module for facebook. | 6259903c15baa72349463193 |
class ModelTrainingParams(base_orm.BaseORM): <NEW_LINE> <INDENT> DB_FIELDS = OrderedDict( [ ("model_hash", str), ("label_rows", list), ("epochs", int), ("batch_size", int), ("weights", ModelTrainingWeights), ("device", str), ] ) | Model training parameters.
ORM:
model_hash,
label_rows,
epochs,
batch_size,
weights,
device | 6259903c287bf620b6272ded |
class UsersViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = User.objects.all().order_by('-date_joined') <NEW_LINE> serializer_class = UserSerializer | API endpoint that allows users to be viewed or edited. | 6259903cd4950a0f3b111741 |
class NiceOD(OrderedDict): <NEW_LINE> <INDENT> def __repr__(self): <NEW_LINE> <INDENT> return dict.__repr__(self) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(str(self)) | A nice ordered dict. aka hashable and prints like a dict. | 6259903c287bf620b6272dee |
class Suffix_Feature_Template(Feature_Template): <NEW_LINE> <INDENT> def __init__(self,s): <NEW_LINE> <INDENT> assert(s > 0), "Suffix length must be a positive integer greater than 0" <NEW_LINE> Feature_Template.__init__(self) <NEW_LINE> self.s = s <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.s) + "-suffix" <NEW_LINE> <DEDENT> def hash(self, sentence, context): <NEW_LINE> <INDENT> i, ngram = context <NEW_LINE> tag = ngram[-1] <NEW_LINE> if len(tag) < self.s: <NEW_LINE> <INDENT> suffix = ngram[-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> suffix = ngram[-1][-self.s:] <NEW_LINE> <DEDENT> f_key = (suffix,tag) <NEW_LINE> return (str(self),f_key) | A Suffix_Feature_Template class
This class extends Feature_Template and provides the necessary functionality to compare n length suffices with the contextual tag.
For example:
Sentence = [The quick brown fox jumped over the lazy dog]
Context = [5, (VERB)]
Suffix_Feature_Template(2)'s dictionary would consist of a mapping of the key ('ed',VERB) which associates the last 2 letters of the
word at position 5 in the sentence with the tag VERB. | 6259903c21a7993f00c67171 |
class ComplexNode(SingleValueTreeNodeObject): <NEW_LINE> <INDENT> pass | A tree node for complex number values.
| 6259903c507cdc57c63a5f9d |
class UserCenterPageTestCase(BaseTestCase): <NEW_LINE> <INDENT> def test_add_addr(self): <NEW_LINE> <INDENT> file_name = DATA_PATH + r'\data_login.csv' <NEW_LINE> data = ReadCsv().read_login_data(file_name) <NEW_LINE> username = data[-1][0] <NEW_LINE> password = data[-1][1] <NEW_LINE> login = LoginPage(self.driver) <NEW_LINE> actual = login.login(username,password) <NEW_LINE> self.assertEqual(username, actual) <NEW_LINE> file_name = DATA_PATH + r'\data_addr.csv' <NEW_LINE> data = ReadCsv().read_addr_data(file_name) <NEW_LINE> addr = UserCenterPage(self.driver) <NEW_LINE> addr.add_addr(*data[1]) | 用户中心用例类 | 6259903c1f5feb6acb163df6 |
class TagHMM(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.train, self.test = self.split_sents() <NEW_LINE> self.hmm = HMM() <NEW_LINE> self.hmm.train(self.train) <NEW_LINE> <DEDENT> def split_sents(self, train=0.95, total=3500, document_class=TaggedSentence): <NEW_LINE> <INDENT> sents = tagged_corpus.tagged_sents()[:total] <NEW_LINE> total = len(sents) if total is None else total <NEW_LINE> i = int(round(train * total)) <NEW_LINE> j = i + int(round(total - train * total)) <NEW_LINE> return (map(document_class, sents[0:i]), map(document_class, sents[i:j])) <NEW_LINE> <DEDENT> def accuracy(self, test_sents, verbose=sys.stderr): <NEW_LINE> <INDENT> total = correct = 0 <NEW_LINE> for sent in test_sents: <NEW_LINE> <INDENT> tags = self.hmm.classify(sent) <NEW_LINE> total += len(tags) <NEW_LINE> for guess, tag in zip(tags, sent.label): <NEW_LINE> <INDENT> correct += (guess == tag) <NEW_LINE> <DEDENT> <DEDENT> if verbose: <NEW_LINE> <INDENT> print >> verbose, "%.2d%% " % (100 * correct / total), <NEW_LINE> <DEDENT> return correct / total <NEW_LINE> <DEDENT> @skip("too slow") <NEW_LINE> def test_tag_train(self): <NEW_LINE> <INDENT> self.assertGreater(self.accuracy(self.train), 0.85) <NEW_LINE> <DEDENT> def test_tag(self): <NEW_LINE> <INDENT> self.assertGreater(self.accuracy(self.test), 0.85) | Train and test an HMM POS tagger. | 6259903cd53ae8145f919669 |
class Message(JSONSerializable): <NEW_LINE> <INDENT> def __init__(self, mtype, data): <NEW_LINE> <INDENT> self.timestamp = utcts(datetime.datetime.utcnow()) <NEW_LINE> self.mtype = mtype <NEW_LINE> self.data = data <NEW_LINE> self.checksum = double_sha256(data) | A Message is a container for messages sent within the P2P network. | 6259903c15baa72349463194 |
class InlineResponse2007(object): <NEW_LINE> <INDENT> swagger_types = { 'payload': 'AnimalType', 'meta': 'ResponseMeta' } <NEW_LINE> if hasattr(getResponse(), "swagger_types"): <NEW_LINE> <INDENT> swagger_types.update(getResponse().swagger_types) <NEW_LINE> <DEDENT> attribute_map = { 'payload': 'payload', 'meta': 'meta' } <NEW_LINE> if hasattr(getResponse(), "attribute_map"): <NEW_LINE> <INDENT> attribute_map.update(getResponse().attribute_map) <NEW_LINE> <DEDENT> def __init__(self, payload=None, meta=None, *args, **kwargs): <NEW_LINE> <INDENT> self._payload = None <NEW_LINE> self._meta = None <NEW_LINE> self.discriminator = None <NEW_LINE> if payload is not None: <NEW_LINE> <INDENT> self.payload = payload <NEW_LINE> <DEDENT> if meta is not None: <NEW_LINE> <INDENT> self.meta = meta <NEW_LINE> <DEDENT> Response.__init__(self, *args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def payload(self): <NEW_LINE> <INDENT> return self._payload <NEW_LINE> <DEDENT> @payload.setter <NEW_LINE> def payload(self, payload): <NEW_LINE> <INDENT> self._payload = payload <NEW_LINE> <DEDENT> @property <NEW_LINE> def meta(self): <NEW_LINE> <INDENT> return self._meta <NEW_LINE> <DEDENT> @meta.setter <NEW_LINE> def meta(self, meta): <NEW_LINE> <INDENT> self._meta = meta <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(InlineResponse2007, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, InlineResponse2007): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259903c73bcbd0ca4bcb48d |
class MyghtyJavascriptLexer(DelegatingLexer): <NEW_LINE> <INDENT> name = 'JavaScript+Myghty' <NEW_LINE> aliases = ['javascript+myghty', 'js+myghty'] <NEW_LINE> mimetypes = ['application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy'] <NEW_LINE> def __init__(self, **options): <NEW_LINE> <INDENT> super().__init__(JavascriptLexer, MyghtyLexer, **options) | Subclass of the `MyghtyLexer` that highlights unlexed data
with the `JavascriptLexer`.
.. versionadded:: 0.6 | 6259903cbaa26c4b54d504ac |
class DummyProfileFactory(factory.DjangoModelFactory): <NEW_LINE> <INDENT> FACTORY_FOR = DummyProfile <NEW_LINE> user = factory.SubFactory(UserFactory) <NEW_LINE> dummy_field = factory.Sequence(lambda n: 'dummyfield{}'.format(n)) | Factory for the ``DummyProfile`` model. | 6259903c94891a1f408b9ff9 |
class Error(Exception): <NEW_LINE> <INDENT> pass | Base class for exceptionsin this module. | 6259903c6e29344779b01856 |
class TestTelephonyOutgoingHangupAlerting(TestCase, TelephonyTestCommon): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.addCleanup(self.clean_up) <NEW_LINE> super(TestTelephonyOutgoingHangupAlerting, self).setUp() <NEW_LINE> self.wait_for_obj("window.navigator.mozTelephony") <NEW_LINE> self.disable_dialer() <NEW_LINE> <DEDENT> def test_telephony_outgoing_hangup_alerting(self): <NEW_LINE> <INDENT> self.user_guided_outgoing_call() <NEW_LINE> time.sleep(1) <NEW_LINE> self.hangup_call(call_type="Outgoing") <NEW_LINE> self.calls = self.marionette.execute_script("return window.wrappedJSObject.calls") <NEW_LINE> self.assertEqual(self.calls['length'], 0, "There should be 0 calls") <NEW_LINE> <DEDENT> def clean_up(self): <NEW_LINE> <INDENT> self.enable_dialer() | This is a test for the `WebTelephony API`_ which will:
- Disable the default gaia dialer, so that the test app can handle calls
- Ask the test user to specify a destination phone number for the test call
- Setup mozTelephonyCall event listeners for the outgoing call
- Use the API to initiate the outgoing call
- Hang up the call via the API after dialing but before call is connected
- Verify that the corresponding mozTelephonyCall events were triggered
- Re-enable the default gaia dialer
.. _`WebTelephony API`: https://developer.mozilla.org/en-US/docs/Web/Guide/API/Telephony | 6259903c26068e7796d4db4b |
class AnalysisReasons(CheckForm): <NEW_LINE> <INDENT> comprehensive_2 = BooleanField(lazy_gettext('I value it for comprehending the experiment')) <NEW_LINE> reproducibility_2 = BooleanField(lazy_gettext('I value it for reproducing the experiment')) <NEW_LINE> no_time_1 = BooleanField(lazy_gettext('I value it, but I do not have time for analyses')) <NEW_LINE> no_tools_1 = BooleanField(lazy_gettext('I value it, but I do not have the appropriate tools')) <NEW_LINE> no_knowledge_1 = BooleanField(lazy_gettext('I value it, but I do not have enough knowledge')) <NEW_LINE> no_utility_0 = BooleanField(lazy_gettext('I do not see any utility in provenance analysis')) <NEW_LINE> other_e = BooleanField(lazy_gettext('Other(s)')) <NEW_LINE> other = TextField(lazy_gettext('Specify')) <NEW_LINE> submit = SubmitField(lazy_gettext('Next')) | Q12/A2 | 6259903cd4950a0f3b111742 |
class PokemonForm(TableBase): <NEW_LINE> <INDENT> __tablename__ = 'pokemon_forms' <NEW_LINE> __singlename__ = 'pokemon_form' <NEW_LINE> id = Column(Integer, primary_key=True, nullable=False, info=dict(description=u'A unique ID for this form.')) <NEW_LINE> form_identifier = Column(Unicode(16), nullable=True, info=dict(description=u"An identifier of the form, uniue among a species. May be None for the default form of the species.", format='identifier')) <NEW_LINE> pokemon_id = Column(Integer, ForeignKey('pokemon.id'), nullable=False, autoincrement=False, info=dict(description=u'The ID of the base Pokémon for this form.')) <NEW_LINE> introduced_in_version_group_id = Column(Integer, ForeignKey('version_groups.id'), autoincrement=False, info=dict(description=u'The ID of the version group in which this form first appeared.')) <NEW_LINE> is_default = Column(Boolean, nullable=False, info=dict(description=u'Set for exactly one form used as the default for each pokemon (not necessarily species).')) <NEW_LINE> is_battle_only = Column(Boolean, nullable=False, info=dict(description=u'Set iff the form can only appear in battle.')) <NEW_LINE> form_order = Column(Integer, nullable=False, autoincrement=False, info=dict(description=u"The order in which forms should be sorted within a species' forms. Multiple forms may have equal order, in which case they should fall back on sorting by name. " u"Used in generating `pokemon_forms.order` and `pokemon.order`.")) <NEW_LINE> order = Column(Integer, nullable=False, autoincrement=False, info=dict(description=u'The order in which forms should be sorted within all forms. Multiple forms may have equal order, in which case they should fall back on sorting by name.')) <NEW_LINE> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.pokemon_name or self.species.name | An individual form of a Pokémon. This includes *every* variant (except
color differences) of every Pokémon, regardless of how the games treat
them. Even Pokémon with no alternate forms have one row in this table, to
represent their lone "normal" form. | 6259903c8e05c05ec3f6f75d |
class PlayerCreateForm(FlaskForm): <NEW_LINE> <INDENT> first_name = StringField('first_name', validators=[DataRequired()]) <NEW_LINE> last_name = StringField('last_name', validators=[DataRequired()]) <NEW_LINE> aga_id = IntegerField( 'aga_id', validators=[NumberRange(0, 50000)]) <NEW_LINE> aga_rank = IntegerField( 'aga_rank', validators=[NumberRange(-30, 9)]) <NEW_LINE> @staticmethod <NEW_LINE> def validate_aga_rank(form, field): <NEW_LINE> <INDENT> if field.data < -30 or field.data > 9 or field.data == 0: <NEW_LINE> <INDENT> raise ValidationError('Rank must be -30 to -1 or 1 to 9') | Player creation form. | 6259903c21a7993f00c67173 |
@base.ReleaseTracks(base.ReleaseTrack.ALPHA) <NEW_LINE> class Source(base.Group): <NEW_LINE> <INDENT> def Filter(self, context, args): <NEW_LINE> <INDENT> resources.SetParamDefault( api='source', collection=None, param='projectId', resolver=resolvers.FromProperty(properties.VALUES.core.project)) <NEW_LINE> source.Source.SetResourceParser(resources.REGISTRY) <NEW_LINE> source.Source.SetApiEndpoint( self.Http(), properties.VALUES.api_endpoint_overrides.source.Get()) | Cloud git repository commands. | 6259903cb57a9660fecd2c80 |
class Size(Shape): <NEW_LINE> <INDENT> def __new__(cls: Type[Size], width: Real = 0, height: Real = 0) -> Size: <NEW_LINE> <INDENT> return super().__new__(cls, (width, height)) <NEW_LINE> <DEDENT> def __add__(self, other: Union[Size, Real]) -> Union[Size, Real]: <NEW_LINE> <INDENT> if isinstance(other, Size): <NEW_LINE> <INDENT> return self.area + other.area <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Size(self.width + other, self.height + other) <NEW_LINE> <DEDENT> <DEDENT> def __sub__(self, other: Union[Size, Real]) -> Union[Size, Real]: <NEW_LINE> <INDENT> if isinstance(other, Size): <NEW_LINE> <INDENT> return self.area - other.area <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Size(self.width - other, self.height - other) <NEW_LINE> <DEDENT> <DEDENT> def __floordiv__(self, n) -> Size: <NEW_LINE> <INDENT> if not isinstance(n, Real): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return Size(self[0] // n, self[1] // n) <NEW_LINE> <DEDENT> @property <NEW_LINE> def area(self) -> Real: <NEW_LINE> <INDENT> return self.width * self.height <NEW_LINE> <DEDENT> @property <NEW_LINE> def floored(self) -> Size: <NEW_LINE> <INDENT> return Size(floor(self.width), floor(self.height)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self) -> Real: <NEW_LINE> <INDENT> return self[1] <NEW_LINE> <DEDENT> @property <NEW_LINE> def width(self) -> Real: <NEW_LINE> <INDENT> return self[0] | Base class for defining a 2D area.
Supports floor division. | 6259903cbe383301e0254a1c |
class SocketOptionsAdapter(adapters.HTTPAdapter): <NEW_LINE> <INDENT> if connection is not None: <NEW_LINE> <INDENT> default_options = getattr( connection.HTTPConnection, 'default_socket_options', [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)] ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> default_options = [] <NEW_LINE> warnings.warn(exc.RequestsVersionTooOld, "This version of Requests is only compatible with a " "version of urllib3 which is too old to support " "setting options on a socket. This adapter is " "functionally useless.") <NEW_LINE> <DEDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.socket_options = kwargs.pop('socket_options', self.default_options) <NEW_LINE> super(SocketOptionsAdapter, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def init_poolmanager(self, connections, maxsize, block=False): <NEW_LINE> <INDENT> if requests.__build__ >= 0x020400: <NEW_LINE> <INDENT> self.poolmanager = poolmanager.PoolManager( num_pools=connections, maxsize=maxsize, block=block, socket_options=self.socket_options ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(SocketOptionsAdapter, self).init_poolmanager( connections, maxsize, block ) | An adapter for requests that allows users to specify socket options.
Since version 2.4.0 of requests, it is possible to specify a custom list
of socket options that need to be set before establishing the connection.
Example usage::
>>> import socket
>>> import requests
>>> from requests_toolbelt.adapters import socket_options
>>> s = requests.Session()
>>> opts = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 0)]
>>> adapter = socket_options.SocketOptionsAdapter(socket_options=opts)
>>> s.mount('http://', adapter)
You can also take advantage of the list of default options on this class
to keep using the original options in addition to your custom options. In
that case, ``opts`` might look like::
>>> opts = socket_options.SocketOptionsAdapter.default_options + opts | 6259903c07d97122c4217ea3 |
class Sketch(object): <NEW_LINE> <INDENT> def __init__(self, source): <NEW_LINE> <INDENT> if isinstance(source, Tilt): <NEW_LINE> <INDENT> with source.subfile_reader('data.sketch') as inf: <NEW_LINE> <INDENT> self.filename = None <NEW_LINE> self._parse(binfile(inf)) <NEW_LINE> <DEDENT> <DEDENT> elif hasattr(source, 'read'): <NEW_LINE> <INDENT> self.filename = None <NEW_LINE> self._parse(binfile(source)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.filename = source <NEW_LINE> with file(source, 'rb') as inf: <NEW_LINE> <INDENT> self._parse(binfile(inf)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def write(self, destination): <NEW_LINE> <INDENT> tmpf = io.StringIO() <NEW_LINE> self._write(binfile(tmpf)) <NEW_LINE> data = tmpf.getvalue() <NEW_LINE> if isinstance(destination, Tilt): <NEW_LINE> <INDENT> with destination.subfile_writer('data.sketch') as outf: <NEW_LINE> <INDENT> outf.write(data) <NEW_LINE> <DEDENT> <DEDENT> elif hasattr(destination, 'write'): <NEW_LINE> <INDENT> destination.write(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with file(destination, 'wb') as outf: <NEW_LINE> <INDENT> outf.write(data) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _parse(self, b): <NEW_LINE> <INDENT> self.header = list(b.unpack("<3I")) <NEW_LINE> self.additional_header = b.read_length_prefixed() <NEW_LINE> (num_strokes, ) = b.unpack("<i") <NEW_LINE> assert 0 <= num_strokes < 300000, num_strokes <NEW_LINE> self.strokes = [Stroke.from_file(b) for i in range(num_strokes)] <NEW_LINE> <DEDENT> def _write(self, b): <NEW_LINE> <INDENT> b.pack("<3I", *self.header) <NEW_LINE> b.write_length_prefixed(self.additional_header) <NEW_LINE> b.pack("<i", len(self.strokes)) <NEW_LINE> for stroke in self.strokes: <NEW_LINE> <INDENT> stroke._write(b) | Stroke data from a .tilt file. Attributes:
.strokes List of tilt.Stroke instances
.filename Filename if loaded from file, but usually None
.header Opaque header data | 6259903cd53ae8145f91966b |
class TestCiscoIPsecDriver(testlib_api.SqlTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestCiscoIPsecDriver, self).setUp() <NEW_LINE> mock.patch('neutron.common.rpc.create_connection').start() <NEW_LINE> service_plugin = mock.Mock() <NEW_LINE> service_plugin._get_vpnservice.return_value = { 'router_id': _uuid() } <NEW_LINE> l3_plugin = mock.Mock() <NEW_LINE> mock.patch( 'neutron.manager.NeutronManager.get_service_plugins', return_value={constants.L3_ROUTER_NAT: l3_plugin}).start() <NEW_LINE> l3_plugin.get_host_for_router.return_value = FAKE_HOST <NEW_LINE> l3_agent = mock.Mock() <NEW_LINE> l3_agent.host = 'some-host' <NEW_LINE> l3_plugin.get_l3_agents_hosting_routers.return_value = [l3_agent] <NEW_LINE> self.driver = ipsec_driver.CiscoCsrIPsecVPNDriver(service_plugin) <NEW_LINE> mock.patch.object(csr_db, 'create_tunnel_mapping').start() <NEW_LINE> self.context = n_ctx.Context('some_user', 'some_tenant') <NEW_LINE> <DEDENT> def _test_update(self, func, args, additional_info=None): <NEW_LINE> <INDENT> with mock.patch.object(self.driver.agent_rpc, 'cast') as cast: <NEW_LINE> <INDENT> func(self.context, *args) <NEW_LINE> cast.assert_called_once_with( self.context, {'args': additional_info, 'namespace': None, 'method': 'vpnservice_updated'}, version='1.0', topic='cisco_csr_ipsec_agent.fake_host') <NEW_LINE> <DEDENT> <DEDENT> def test_create_ipsec_site_connection(self): <NEW_LINE> <INDENT> self._test_update(self.driver.create_ipsec_site_connection, [FAKE_VPN_CONNECTION], {'reason': 'ipsec-conn-create'}) <NEW_LINE> <DEDENT> def test_update_ipsec_site_connection(self): <NEW_LINE> <INDENT> self._test_update(self.driver.update_ipsec_site_connection, [FAKE_VPN_CONNECTION, FAKE_VPN_CONNECTION], {'reason': 'ipsec-conn-update'}) <NEW_LINE> <DEDENT> def test_delete_ipsec_site_connection(self): <NEW_LINE> <INDENT> self._test_update(self.driver.delete_ipsec_site_connection, [FAKE_VPN_CONNECTION], {'reason': 'ipsec-conn-delete'}) <NEW_LINE> <DEDENT> def test_update_vpnservice(self): <NEW_LINE> <INDENT> self._test_update(self.driver.update_vpnservice, [FAKE_VPN_SERVICE, FAKE_VPN_SERVICE], {'reason': 'vpn-service-update'}) <NEW_LINE> <DEDENT> def test_delete_vpnservice(self): <NEW_LINE> <INDENT> self._test_update(self.driver.delete_vpnservice, [FAKE_VPN_SERVICE], {'reason': 'vpn-service-delete'}) | Test that various incoming requests are sent to device driver. | 6259903c004d5f362081f8e7 |
class HistoryPreprocessor(Preprocessor): <NEW_LINE> <INDENT> def __init__(self, history_length=1): <NEW_LINE> <INDENT> self.history_length = history_length <NEW_LINE> self.past_states = None <NEW_LINE> self.past_states_ori = None <NEW_LINE> <DEDENT> def process_state_for_network(self, state): <NEW_LINE> <INDENT> row, col = state.shape <NEW_LINE> if self.past_states is None: <NEW_LINE> <INDENT> self.past_states = np.zeros((row, col, self.history_length)) <NEW_LINE> <DEDENT> history = np.dstack((self.past_states, state)) <NEW_LINE> self.past_states = history[:, :, 1:] <NEW_LINE> return history <NEW_LINE> <DEDENT> def process_state_for_network_ori(self, state): <NEW_LINE> <INDENT> row, col, channel = state.shape <NEW_LINE> if self.past_states_ori is None: <NEW_LINE> <INDENT> self.past_states_ori = np.zeros((row, col, channel, self.history_length)) <NEW_LINE> <DEDENT> history = np.concatenate((self.past_states_ori, np.expand_dims(state, -1)), axis=3) <NEW_LINE> self.past_states_ori = history[:, :, :, 1:] <NEW_LINE> return history <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.past_states = None <NEW_LINE> self.past_states_ori = None <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> return {'history_length': self.history_length} | Keeps the last k states.
Useful for domains where you need velocities, but the state
contains only positions.
When the environment starts, this will just fill the initial
sequence values with zeros k times.
Parameters
----------
history_length: int
Number of previous states to prepend to state being processed. | 6259903ca4f1c619b294f78a |
class EnvironmentModules(Package): <NEW_LINE> <INDENT> homepage = "https://sourceforge.net/p/modules/wiki/Home/" <NEW_LINE> url = "http://prdownloads.sourceforge.net/modules/modules-3.2.10.tar.gz" <NEW_LINE> version('3.2.10', '8b097fdcb90c514d7540bb55a3cb90fb') <NEW_LINE> depends_on('tcl', type=('build', 'link', 'run')) <NEW_LINE> def install(self, spec, prefix): <NEW_LINE> <INDENT> tcl_spec = spec['tcl'] <NEW_LINE> tcl_config_name = 'tclConfig.sh' <NEW_LINE> tcl_config_dir_options = [tcl_spec.prefix.lib, tcl_spec.prefix.lib64] <NEW_LINE> tcl_config_found = False <NEW_LINE> for tcl_config_dir in tcl_config_dir_options: <NEW_LINE> <INDENT> tcl_config_found = os.path.exists( join_path(tcl_config_dir, tcl_config_name)) <NEW_LINE> if tcl_config_found: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if not tcl_config_found: <NEW_LINE> <INDENT> raise InstallError('Failed to locate ' + tcl_config_name) <NEW_LINE> <DEDENT> cpp_flags = ['-DUSE_INTERP_ERRORLINE'] <NEW_LINE> config_args = [ "--without-tclx", "--with-tclx-ver=0.0", "--prefix=" + prefix, "--with-tcl=" + tcl_config_dir, "--with-tcl-ver=%d.%d" % ( tcl_spec.version.version[0], tcl_spec.version.version[1]), '--disable-debug', '--disable-dependency-tracking', '--disable-silent-rules', '--disable-versioning', '--datarootdir=' + prefix.share, 'CPPFLAGS=' + ' '.join(cpp_flags) ] <NEW_LINE> configure(*config_args) <NEW_LINE> make() <NEW_LINE> make('install') | The Environment Modules package provides for the dynamic
modification of a user's environment via modulefiles. | 6259903c16aa5153ce4016f3 |
class Reviews(BaseAbstractModel): <NEW_LINE> <INDENT> profile=models.ForeignKey(to='Auth.Profile', on_delete=models.DO_NOTHING, null=True,related_name='reviews') <NEW_LINE> rating = models.IntegerField(blank=True, null=True) <NEW_LINE> reviews= models.CharField(max_length=255, blank=True, null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{}".format(self.reviews) | This is model for both user and job reviews | 6259903cc432627299fa41ff |
class ForumTrack(models.Model): <NEW_LINE> <INDENT> id = models.PositiveIntegerField(primary_key=True, db_column="user_id", default=0, help_text="primary key" ) <NEW_LINE> id = models.PositiveIntegerField(primary_key=True, db_column="forum_id", default=0, help_text="primary key" ) <NEW_LINE> mark_time = models.PositiveIntegerField( default=0, ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = u"%sforums_track" % settings.PHPBB_TABLE_PREFIX | Unread post information is stored here | 6259903c26238365f5fadd5c |
class PoswiseFeedForwardNet(nn.Module): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> super(PoswiseFeedForwardNet, self).__init__() <NEW_LINE> self.w_1 = nn.Conv1d(config['d_model'], config['d_ff'], kernel_size=1) <NEW_LINE> self.w_2 = nn.Conv1d(config['d_ff'], config['d_model'], kernel_size=1) <NEW_LINE> self.layer_norm = nn.LayerNorm(config['d_model'], eps=1e-6) <NEW_LINE> self.dropout = nn.Dropout(config['dropout']) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> residual = x <NEW_LINE> output = F.relu(self.w_1(x.transpose(1, 2))) <NEW_LINE> output = self.w_2(output).transpose(1, 2) <NEW_LINE> output = self.dropout(output) <NEW_LINE> output = self.layer_norm(output + residual) <NEW_LINE> return output | two feed forward layers | 6259903c07d97122c4217ea4 |
class UserSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = get_user_model() <NEW_LINE> fields = ('email', 'password', 'name') <NEW_LINE> extra_kwargs = {'password': {'write_only': True, 'min_length': 5}} <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> return get_user_model().objects.create_user(**validated_data) <NEW_LINE> <DEDENT> def update(self, instance, validated_data): <NEW_LINE> <INDENT> password = validated_data.pop('password', None) <NEW_LINE> user = super().update(instance, validated_data) <NEW_LINE> if password: <NEW_LINE> <INDENT> user.set_password(password) <NEW_LINE> user.save() <NEW_LINE> <DEDENT> return user | Serializers for the users object. | 6259903c96565a6dacd2d88e |
class App(object): <NEW_LINE> <INDENT> def __init__( self, appName, shortcut='<Control><Q>', a11yAppName=None, forceKill=True, parameters='', recordVideo=False): <NEW_LINE> <INDENT> self.appCommand = appName <NEW_LINE> self.shortcut = shortcut <NEW_LINE> self.forceKill = forceKill <NEW_LINE> self.parameters = parameters <NEW_LINE> self.internCommand = self.appCommand.lower() <NEW_LINE> self.a11yAppName = a11yAppName <NEW_LINE> self.recordVideo = recordVideo <NEW_LINE> self.pid = None <NEW_LINE> pressKey('Esc') <NEW_LINE> absoluteMotion(100, 100, 2) <NEW_LINE> if self.recordVideo: <NEW_LINE> <INDENT> cmd = "gsettings set org.gnome.settings-daemon.plugins.media-keys max-screencast-length 600" <NEW_LINE> Popen(cmd, shell=True, stdout=PIPE).wait() <NEW_LINE> keyCombo('<Control><Alt><Shift>R') <NEW_LINE> <DEDENT> <DEDENT> def isRunning(self): <NEW_LINE> <INDENT> if self.a11yAppName is None: <NEW_LINE> <INDENT> self.a11yAppName = self.internCommand <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if self.a11yAppName in [x.name for x in root.applications()]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> sleep(10) <NEW_LINE> if self.a11yAppName in [x.name for x in root.applications()]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def quit(self): <NEW_LINE> <INDENT> if self.recordVideo: <NEW_LINE> <INDENT> keyCombo('<Control><Alt><Shift>R') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> keyCombo('<Ctrl><Q>') <NEW_LINE> if self.isRunning(): <NEW_LINE> <INDENT> self.kill() <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def kill(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.system("pkill -9 gnome-boxes") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> Popen("killall " + self.appCommand, shell=True).wait() <NEW_LINE> <DEDENT> <DEDENT> def startViaCommand(self): <NEW_LINE> <INDENT> if self.forceKill and self.isRunning(): <NEW_LINE> <INDENT> self.kill() <NEW_LINE> assert not self.isRunning(), "Application cannot be stopped" <NEW_LINE> <DEDENT> self.process = Popen(self.appCommand.split() + self.parameters.split(), stdout=PIPE, stderr=PIPE, bufsize=0) <NEW_LINE> self.pid = self.process.pid <NEW_LINE> sleep(1) <NEW_LINE> return root.application(self.a11yAppName) <NEW_LINE> <DEDENT> def closeViaShortcut(self): <NEW_LINE> <INDENT> if not self.isRunning(): <NEW_LINE> <INDENT> raise Exception("App is not running") <NEW_LINE> <DEDENT> keyCombo(self.shortcut) <NEW_LINE> assert not self.isRunning(), "Application cannot be stopped" | This class does all basic events with the app | 6259903c63f4b57ef0086677 |
class ObjectiveFunction(object): <NEW_LINE> <INDENT> def __call__(self, theta): <NEW_LINE> <INDENT> return self.evaluate(theta) <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def evaluate(self, theta): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def fit(self, x0, optimizer='gd', n=1000, xtol=1e-6, ftol=1e-9): <NEW_LINE> <INDENT> opts = {'gd': GradientDescent, 'cd': CoordinateDescent} <NEW_LINE> optimizer = opts[optimizer] <NEW_LINE> self.opt = optimizer(self.evaluate, self.gradient) <NEW_LINE> self.opt.compute(x0=x0, n=n, xtol=xtol, ftol=ftol) <NEW_LINE> return self.opt | An abstract class for a generic objective function. | 6259903c1f5feb6acb163df9 |
class Spindle(object): <NEW_LINE> <INDENT> CW = -1 <NEW_LINE> CCW = 1 <NEW_LINE> def __init__(self, speed=1.0): <NEW_LINE> <INDENT> self.speed = speed <NEW_LINE> self.running = False <NEW_LINE> <DEDENT> def rotate(self, direction, speed=None): <NEW_LINE> <INDENT> if speed is None: <NEW_LINE> <INDENT> speed = self.speed <NEW_LINE> <DEDENT> self.running = True <NEW_LINE> logging.info("Turn Spindle in Direction %s with speed %s", direction, speed) <NEW_LINE> <DEDENT> def get_state(self): <NEW_LINE> <INDENT> if self.running: <NEW_LINE> <INDENT> return("%s@%s" % (self.running, self.speed)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return("not running") <NEW_LINE> <DEDENT> <DEDENT> def unhold(self): <NEW_LINE> <INDENT> logging.info("Power Off Spindle") <NEW_LINE> self.running = False | Abstract Class for Spindle
Spindle can rotate clockwise or counterclockwise
in given Speed | 6259903c004d5f362081f8e8 |
class fib(object): <NEW_LINE> <INDENT> def __init__(self, n=20): <NEW_LINE> <INDENT> self.a = 0 <NEW_LINE> self.b = 1 <NEW_LINE> self.n = n <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> self.a, self.b = self.b, self.a + self.b <NEW_LINE> if self.a > self.n: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> return self.a | docstring for fib | 6259903c1d351010ab8f4d23 |
class InnerModel(BaseModel): <NEW_LINE> <INDENT> inner_int_field = IntegerField() <NEW_LINE> inner_str_field = StringField() | Inner model. | 6259903c30c21e258be99a13 |
class EndProgramException(LarvException): <NEW_LINE> <INDENT> pass | To be called by World when there aren't any more Engines in the stack. | 6259903c91af0d3eaad3b03c |
class CUHK_PEDES(Dataset): <NEW_LINE> <INDENT> def __init__(self, conf, dataset, is_train=False, image_caption='image'): <NEW_LINE> <INDENT> self.split = dataset[0]["split"] <NEW_LINE> self.image_caption = image_caption <NEW_LINE> self.dataset = dataset <NEW_LINE> self.config = conf <NEW_LINE> self.positive_samples = conf.positive_samples <NEW_LINE> self.negative_samples = conf.negative_samples <NEW_LINE> self.n_original_captions = conf.n_original_captions <NEW_LINE> self.transform = transforms.Compose([ transforms.Resize(conf.image_size), transforms.ToTensor() ]) <NEW_LINE> conf.logger.info(f'init {self.split} {image_caption} ,length:{len(self)}, dataset length:{len(self.dataset)}') <NEW_LINE> if self.image_caption == 'caption': <NEW_LINE> <INDENT> self.num_classes = len(self) / self.n_original_captions <NEW_LINE> <DEDENT> elif self.image_caption == 'image': <NEW_LINE> <INDENT> self.num_classes = len(self) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> if self.image_caption == 'caption': <NEW_LINE> <INDENT> img_index = index // self.n_original_captions <NEW_LINE> <DEDENT> elif self.image_caption == 'image': <NEW_LINE> <INDENT> img_index = index <NEW_LINE> <DEDENT> data = self.dataset[img_index] <NEW_LINE> image_path = os.path.join(self.config.images_dir, data['file_path']) <NEW_LINE> cap_index = index % self.n_original_captions <NEW_LINE> caption_indexes = data['index_captions'][cap_index] <NEW_LINE> p_id = int(data['id']) <NEW_LINE> image = Image.open(image_path) <NEW_LINE> image = self.transform(image) <NEW_LINE> caption = np.zeros(self.config.max_length) <NEW_LINE> for i, word_i in enumerate(caption_indexes): <NEW_LINE> <INDENT> if i < self.config.max_length: <NEW_LINE> <INDENT> caption[i] = word_i <NEW_LINE> <DEDENT> <DEDENT> caption = torch.LongTensor(caption) <NEW_LINE> return index, image, caption, img_index, p_id <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> if self.image_caption == 'caption': <NEW_LINE> <INDENT> return len(self.dataset) * self.n_original_captions <NEW_LINE> <DEDENT> elif self.image_caption == 'image': <NEW_LINE> <INDENT> return len(self.dataset) | the class for CUHK_PEDES dataset
Attributes: | 6259903c26238365f5fadd5e |
class Unit(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=30, unique=True) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return 'Unit(name=%r)' % self.name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name | A unit used on a data point, such as "ug/m3", or "kWh" | 6259903cec188e330fdf9aa1 |
class SecurityGovernanceItem(Model): <NEW_LINE> <INDENT> S1 = "s1" <NEW_LINE> S2 = "s2" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { } <NEW_LINE> self.attribute_map = { } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt) -> 'SecurityGovernanceItem': <NEW_LINE> <INDENT> return util.deserialize_model(dikt, cls) | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259903c71ff763f4b5e89a5 |
class TLSError(Exception): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self) | Base class for all TLS Lite exceptions. | 6259903c63f4b57ef0086678 |
class MutableCoordinate(_Coordinate): <NEW_LINE> <INDENT> def __init__(self, coord): <NEW_LINE> <INDENT> super().__init__(coord) <NEW_LINE> <DEDENT> def bring_within(self, lat): <NEW_LINE> <INDENT> self.coord = _xtal.bring_within_lattice(self.coord, lat) <NEW_LINE> return <NEW_LINE> <DEDENT> def __iadd__(self, other): <NEW_LINE> <INDENT> self.coord = np.add(self.coord, other.cart()) <NEW_LINE> return self | Mutable Coordinate class. Defined as the Cartesian
coodrinates, can handle opperations related to lattice
periodicity. | 6259903c596a897236128ea8 |
class FdMsInterface(CommandLineArguments): <NEW_LINE> <INDENT> EXEC_PATH = os.path.join(_base_dir_, "matching_stats.x") <NEW_LINE> params = OrderedDict([ CommonArgumentSpecs.s_path, CommonArgumentSpecs.t_path, CommonArgumentSpecs.out_path, ('double_rank', (False, bool, False, "Use double instead of single rank.")), ('rank_fail', (False, bool, False, "Use the rank-and-fail strategy.")), ('lazy_wl', (False, bool, False, 'Use lazy Weiner links')), ('use_maxrep_rc', (False, bool, False, 'maxrep vector for Weiner links with rank&check')), ('use_maxrep_vanilla', (False, bool, False, 'maxrep vector for Weiner links')), ('lca_parents', (False, bool, False, 'Use lca instead of consecutive parent sequence')), CommonArgumentSpecs.load_maxrep, ('time_usage', (False, bool, False, 'Report time usage.')), ('answer', (False, bool, False, 'Print answer.')), ('avg', (False, bool, False, 'Print average MS.')), CommonArgumentSpecs.load_cst, ('nthreads', (False, int, 1, 'nr. of threads')), ('nslices', (False, int, 1, 'nr. of blocks')) ]) | Interface to the fd_ms binary | 6259903c8c3a8732951f7760 |
class ProductionConfig(Config): <NEW_LINE> <INDENT> SQLALCHEMY_DATABASE_URI = 'sqlite:///'+os.path.join(basedir,'data/data.sqlite') | 正式环境配置 | 6259903cb57a9660fecd2c84 |
class BingoCage: <NEW_LINE> <INDENT> def __init__(self, items): <NEW_LINE> <INDENT> self._items = list(items) <NEW_LINE> random.shuffle(self._items) <NEW_LINE> <DEDENT> def pick(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._items.pop() <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise LookupError('pick from empty BingoCage') <NEW_LINE> <DEDENT> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> return self.pick() | a callable demo | 6259903c21a7993f00c67177 |
class MostFrequentVisitor(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.visitors = {} <NEW_LINE> self.most_frequent_visitor = None <NEW_LINE> <DEDENT> def process(self, matches): <NEW_LINE> <INDENT> visitor = matches['ip'] <NEW_LINE> if visitor not in self.visitors: <NEW_LINE> <INDENT> self.visitors[visitor] = 0 <NEW_LINE> <DEDENT> self.visitors[visitor] += 1 <NEW_LINE> if self.most_frequent_visitor is None or (self.visitors[self.most_frequent_visitor] < self.visitors[visitor]): <NEW_LINE> <INDENT> self.most_frequent_visitor = visitor <NEW_LINE> <DEDENT> <DEDENT> def print_result(self): <NEW_LINE> <INDENT> print("Most frequent visitor: {}".format(self.most_frequent_visitor)) | Processor to find the most frequent visitor based on IP address from a log file | 6259903c63b5f9789fe86375 |
class QuietServerHandler(ServerHandler): <NEW_LINE> <INDENT> def log_exception(self, exc_info): <NEW_LINE> <INDENT> import errno <NEW_LINE> if exc_info[1].args[0] == errno.EPIPE: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> ServerHandler.log_exception(self, exc_info) | Used to suppress tracebacks of broken pipes. | 6259903c0a366e3fb87ddbee |
class Command(BaseCommand): <NEW_LINE> <INDENT> help = "Run the queue consumer" <NEW_LINE> option_list = BaseCommand.option_list + ( make_option('--periodic', '-p', dest='periodic', action='store_true', help='Enqueue periodic commands' ), make_option('--no-periodic', '-n', dest='periodic', action='store_false', help='Do not enqueue periodic commands' ), make_option('--workers', '-w', dest='workers', type='int', help='Number of worker threads' ), make_option('--delay', '-d', dest='initial_delay', type='float', help='Delay between polling requests' ), make_option('--max_delay', '-m', dest='max_delay', type='float', help='Maximum delay between polling requests' ), ) <NEW_LINE> def autodiscover_appconfigs(self): <NEW_LINE> <INDENT> module_name = 'tasks' <NEW_LINE> for config in django_apps.get_app_configs(): <NEW_LINE> <INDENT> app_path = config.module.__path__ <NEW_LINE> try: <NEW_LINE> <INDENT> fp, path, description = imp.find_module(module_name, app_path) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> import_path = '%s.%s' % (config.name, module_name) <NEW_LINE> imp.load_module(import_path, fp, path, description) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def autodiscover_old(self): <NEW_LINE> <INDENT> module_name = 'tasks' <NEW_LINE> for app in settings.INSTALLED_APPS: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import_module(app) <NEW_LINE> app_path = sys.modules[app].__path__ <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> imp.find_module(module_name, app_path) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> import_module('%s.%s' % (app, module_name)) <NEW_LINE> app_path = sys.modules['%s.%s' % (app, module_name)] <NEW_LINE> <DEDENT> <DEDENT> def autodiscover(self): <NEW_LINE> <INDENT> if HAS_DJANGO_APPS: <NEW_LINE> <INDENT> self.autodiscover_appconfigs() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.autodiscover_old() <NEW_LINE> <DEDENT> <DEDENT> def handle(self, *args, **options): <NEW_LINE> <INDENT> from huey.djhuey import HUEY <NEW_LINE> try: <NEW_LINE> <INDENT> consumer_options = settings.HUEY['consumer_options'] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> consumer_options = {} <NEW_LINE> <DEDENT> if options['workers'] is not None: <NEW_LINE> <INDENT> consumer_options['workers'] = options['workers'] <NEW_LINE> <DEDENT> if options['periodic'] is not None: <NEW_LINE> <INDENT> consumer_options['periodic'] = options['periodic'] <NEW_LINE> <DEDENT> if options['initial_delay'] is not None: <NEW_LINE> <INDENT> consumer_options['initial_delay'] = options['initial_delay'] <NEW_LINE> <DEDENT> if options['max_delay'] is not None: <NEW_LINE> <INDENT> consumer_options['max_delay'] = options['max_delay'] <NEW_LINE> <DEDENT> self.autodiscover() <NEW_LINE> loglevel = get_loglevel(consumer_options.pop('loglevel', None)) <NEW_LINE> logfile = consumer_options.pop('logfile', None) <NEW_LINE> setup_logger(loglevel, logfile) <NEW_LINE> consumer = Consumer(HUEY, **consumer_options) <NEW_LINE> consumer.run() | Queue consumer. Example usage::
To start the consumer (note you must export the settings module):
django-admin.py run_huey | 6259903cd53ae8145f91966f |
class chain(Command): <NEW_LINE> <INDENT> def execute(self): <NEW_LINE> <INDENT> for command in self.rest(1).split(";"): <NEW_LINE> <INDENT> self.fm.execute_console(command) | :chain <command1>; <command2>; ...
Calls multiple commands at once, separated by semicolons. | 6259903cd10714528d69ef90 |
class PreSurvey(models.Model): <NEW_LINE> <INDENT> student_id = models.IntegerField(default=0) <NEW_LINE> fill_time = models.DateTimeField(auto_now_add=True) <NEW_LINE> p = models.IntegerField(default=0) <NEW_LINE> p_t = models.TextField() <NEW_LINE> p1 = models.IntegerField(default=0) <NEW_LINE> p2 = models.IntegerField(default=0) <NEW_LINE> p3 = models.IntegerField(default=0) <NEW_LINE> p4 = models.IntegerField(default=0) <NEW_LINE> p5 = models.IntegerField(default=0) <NEW_LINE> p6 = models.IntegerField(default=0) <NEW_LINE> p7 = models.IntegerField(default=0) <NEW_LINE> p8 = models.IntegerField(default=0) <NEW_LINE> p9 = models.IntegerField(default=0) <NEW_LINE> p10 = models.IntegerField(default=0) <NEW_LINE> @property <NEW_LINE> def student(self): <NEW_LINE> <INDENT> return User.objects.get(id=self.student_id) | student_id: 學生id | 6259903c1d351010ab8f4d25 |
class UaiWriter(): <NEW_LINE> <INDENT> def write_model(self, model: GenericGraphModel, path: str): <NEW_LINE> <INDENT> assert path.endswith('.uai') <NEW_LINE> with open(path, 'w') as f: <NEW_LINE> <INDENT> f.write('MARKOV\n') <NEW_LINE> f.write(str(model.num_variables) + '\n') <NEW_LINE> for var_id in range(model.num_variables): <NEW_LINE> <INDENT> card = model.get_variable(var_id).domain.size() <NEW_LINE> assert 1 <= card <= 1000 <NEW_LINE> f.write(str(card) + ' ') <NEW_LINE> <DEDENT> f.write('\n') <NEW_LINE> num_factors = len(model.factors) <NEW_LINE> f.write(str(num_factors) + '\n') <NEW_LINE> for factor_id in range(num_factors): <NEW_LINE> <INDENT> var_idx = model.factors[factor_id].var_idx <NEW_LINE> f.write(str(len(var_idx)) + ' ') <NEW_LINE> f.write(' '.join(str(var_id) for var_id in var_idx)) <NEW_LINE> f.write('\n') <NEW_LINE> <DEDENT> for factor_id in range(num_factors): <NEW_LINE> <INDENT> factor = model.factors[factor_id] <NEW_LINE> assert factor.is_discrete() <NEW_LINE> vals_flat = factor.values.reshape(-1) <NEW_LINE> f.write(str(len(vals_flat))) <NEW_LINE> f.write('\n') <NEW_LINE> f.write(' '.join(['%.10f' % x for x in vals_flat])) <NEW_LINE> f.write('\n') <NEW_LINE> <DEDENT> return model | Writes Graphical Model in UAI format. | 6259903c6fece00bbacccbb7 |
class TestPostSQLClass(SQLTestCase): <NEW_LINE> <INDENT> sql_dir = 'sql/' <NEW_LINE> ans_dir = 'expected/' <NEW_LINE> out_dir = 'output/' | @gucs gp_create_table_random_default_distribution=off | 6259903c76d4e153a661db78 |
class LinodeTest(CloudTest): <NEW_LINE> <INDENT> PROVIDER = 'linode' <NEW_LINE> REQUIRED_PROVIDER_CONFIG_ITEMS = ('apikey', 'password') <NEW_LINE> def test_instance(self): <NEW_LINE> <INDENT> ret_str = self.run_cloud('-p linode-test {0}'.format(self.instance_name), timeout=TIMEOUT) <NEW_LINE> self.assertInstanceExists(ret_str) <NEW_LINE> self.assertDestroyInstance() | Integration tests for the Linode cloud provider in Salt-Cloud | 6259903c26238365f5fadd60 |
class UnsatisfiableRequest(OperationFailed): <NEW_LINE> <INDENT> pass | Exception raised if Tor was unable to process our request. | 6259903c3eb6a72ae038b874 |
class NewSourceForm(Schema): <NEW_LINE> <INDENT> allow_extra_fields = True <NEW_LINE> filter_extra_fields = True <NEW_LINE> authorFirstName = UnicodeString(not_empty=True) <NEW_LINE> authorLastName = UnicodeString(not_empty=True) <NEW_LINE> title = UnicodeString(not_empty=True) <NEW_LINE> year = Regex('[0-9]{4}', not_empty=True) <NEW_LINE> fullReference = UnicodeString() <NEW_LINE> file_id = Regex('[0-9]+') | NewSourceForm is a Schema for validating the
data entered at the Add Source page. | 6259903c21bff66bcd723e73 |
class S3OrgMenuLayout(S3NavigationItem): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def layout(item): <NEW_LINE> <INDENT> name = "IFRC" <NEW_LINE> logo = None <NEW_LINE> root_org = current.auth.root_org() <NEW_LINE> if root_org: <NEW_LINE> <INDENT> db = current.db <NEW_LINE> s3db = current.s3db <NEW_LINE> language = current.session.s3.language <NEW_LINE> if language == current.deployment_settings.get_L10n_default_language(): <NEW_LINE> <INDENT> l10n = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ltable = s3db.org_organisation_name <NEW_LINE> query = (ltable.organisation_id == root_org) & (ltable.language == language) <NEW_LINE> l10n = db(query).select(ltable.name_l10n, ltable.acronym_l10n, limitby = (0, 1), cache = s3db.cache, ).first() <NEW_LINE> <DEDENT> table = s3db.org_organisation <NEW_LINE> record = db(table.id == root_org).select(table.name, table.logo, limitby = (0, 1), cache = s3db.cache, ).first() <NEW_LINE> if l10n: <NEW_LINE> <INDENT> name = l10n.name_l10n <NEW_LINE> <DEDENT> if record: <NEW_LINE> <INDENT> if not l10n: <NEW_LINE> <INDENT> name = record.name <NEW_LINE> <DEDENT> if record.logo: <NEW_LINE> <INDENT> size = (60, None) <NEW_LINE> image = s3db.pr_image_library_represent(record.logo, size=size) <NEW_LINE> url_small = URL(c="default", f="download", args=image) <NEW_LINE> alt = "%s logo" % name <NEW_LINE> logo = IMG(_src = url_small, _alt = alt, _width = 60, ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not logo: <NEW_LINE> <INDENT> logo = IMG(_src = "/%s/static/themes/RMSAmericas/img/logo_small.png" % current.request.application, _alt = current.T("Red Cross/Red Crescent"), _width = 60, ) <NEW_LINE> <DEDENT> return (name, logo) | Layout for the organisation-specific menu
- used by the custom PDF Form for REQ
- replace with s3db.org_organistion_logo()? | 6259903c94891a1f408b9ffc |
class UserSerializer(ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ( "id", "username", "email", "password", "last_login", "date_joined", "profile", ) <NEW_LINE> read_only_fields = ("id", "date_joined", "profile") <NEW_LINE> extra_kwargs = {"password": {"write_only": True}} <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> user = User.objects.create( email=validated_data["email"], username=validated_data["username"], password=make_password(validated_data["password"]), ) <NEW_LINE> user.save() <NEW_LINE> return user | A serializer for the default auth.User model | 6259903c63f4b57ef0086679 |
class SourceEditor(ElementEditor): <NEW_LINE> <INDENT> def __init__(self, parent = None, source = None, *args, **kwargs): <NEW_LINE> <INDENT> if source is None: <NEW_LINE> <INDENT> source = Source() <NEW_LINE> <DEDENT> ElementEditor.__init__(self, parent, source, *args, **kwargs) <NEW_LINE> <DEDENT> def _makeWidgets(self): <NEW_LINE> <INDENT> ElementEditor._makeWidgets(self) <NEW_LINE> entryField = Pmw.EntryField(self, label_text = 'Source Name:', labelpos = 'w') <NEW_LINE> entryField.grid(row = 0, column = 0, sticky = 'w') <NEW_LINE> self._nameEntryField = entryField <NEW_LINE> self._balloon.bind(self._nameEntryField, 'Enter the source name.') <NEW_LINE> label = tkinter.Label(self, text = 'Source Type:') <NEW_LINE> label.grid(row = 0, column = 1, sticky = 'e') <NEW_LINE> self._typeLabel = label <NEW_LINE> spectrumEditor = SpectrumEditor(self, borderwidth = 2, relief = 'ridge') <NEW_LINE> spectrumEditor.grid(row = 1, column = 0, columnspan = 2, sticky = 'ew') <NEW_LINE> self._spectrumEditor = spectrumEditor <NEW_LINE> spatialModelEditor = SpatialModelEditor(self, borderwidth = 2, relief = 'ridge') <NEW_LINE> spatialModelEditor.grid(row = 2, column = 0, columnspan = 2, sticky = 'ew') <NEW_LINE> self._spatialModelEditor = spatialModelEditor <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> name = self._nameEntryField.getvalue() <NEW_LINE> type = self._typeLabel['text'] <NEW_LINE> type = type.replace('Source Type:', '') <NEW_LINE> spectrum = self._spectrumEditor.get() <NEW_LINE> spatialModel = self._spatialModelEditor.get() <NEW_LINE> source = Source(name, type, spectrum, spatialModel) <NEW_LINE> return source <NEW_LINE> <DEDENT> def set(self, source): <NEW_LINE> <INDENT> name = source.getName() <NEW_LINE> self._nameEntryField.setvalue(name) <NEW_LINE> type = source.getType() <NEW_LINE> self._typeLabel.configure(text = 'Source Type:' + type) <NEW_LINE> spectrum = source.getSpectrum() <NEW_LINE> self._spectrumEditor.set(spectrum) <NEW_LINE> spatialModel = source.getSpatialModel() <NEW_LINE> self._spatialModelEditor.set(spatialModel) <NEW_LINE> <DEDENT> def enable(self): <NEW_LINE> <INDENT> self._nameEntryField.configure(entry_state = 'normal') <NEW_LINE> self._typeLabel.configure(state = 'normal') <NEW_LINE> self._spectrumEditor.enable() <NEW_LINE> self._spatialModelEditor.enable() <NEW_LINE> <DEDENT> def disable(self): <NEW_LINE> <INDENT> self._nameEntryField.configure(entry_state = 'disabled') <NEW_LINE> self._typeLabel.configure(state = 'disabled') <NEW_LINE> self._spectrumEditor.disable() <NEW_LINE> self._spatialModelEditor.disable() <NEW_LINE> <DEDENT> def setDS9Connector(self,connector): <NEW_LINE> <INDENT> self._ds9Connector = connector <NEW_LINE> self._spatialModelEditor.setDS9Connector(connector) | Class to edit ModelEditor Source objects.
Python implementation of the SourceEditor class which allows the
user to edit the fields of a <source> element. This compound
widget is designed to be embedded in other widgets.
Attributes:
_nameEntryField: (Pmw.EntryField) Controls editing of the Source
name.
_typeLabel: (tkinter.Label) Displays the Source type. Note that
this value cannot be changed by the user.
_spectrumEditor: (SpectrumEditor) For editing the Spectrum.
_spatialModelEditor: (SpatialModelEditor) For editing the
SpatialModel. | 6259903c8c3a8732951f7762 |
class Hand(db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> thema_id = db.Column(db.String(TITLE_SIZE), db.ForeignKey('thema.id')) <NEW_LINE> keyword = db.Column(db.String(KEYWORD_SIZE)) <NEW_LINE> words = db.Column(db.PickleType) <NEW_LINE> def __init__(self, id, thema_id, keyword, words): <NEW_LINE> <INDENT> super(Hand, self).__init__() <NEW_LINE> self.id = id <NEW_LINE> self.thema_id = thematic <NEW_LINE> self.keyword = keyword <NEW_LINE> self.words = words <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Hand %r>' % self.keyword | Hand contains a set of words | 6259903c23e79379d538d70a |
class ProjectDelete(DeleteView): <NEW_LINE> <INDENT> model = models.Project <NEW_LINE> def delete(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.object = self.get_object() <NEW_LINE> self.object.date_deleted = datetime.datetime.now() <NEW_LINE> self.object.save() <NEW_LINE> messages.add_message(request, messages.WARNING, 'Project {} Successfully Deleted'.format(self.object)) <NEW_LINE> return HttpResponseRedirect(reverse('projects_project_list')) | Deletes a project by setting the Project's date_deleted. We save projects for historical tracking. | 6259903c0a366e3fb87ddbf0 |
class NotFoundException(HttpErrorException): <NEW_LINE> <INDENT> pass | Used for HTTP Not Found(404) Errors | 6259903c15baa7234946319c |
class userchangerole_args(object): <NEW_LINE> <INDENT> def __init__(self, info=None,): <NEW_LINE> <INDENT> self.info = info <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('userchangerole_args') <NEW_LINE> if self.info is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('info', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- info | 6259903c10dbd63aa1c71de2 |
class Classifier : <NEW_LINE> <INDENT> def __init__(self, kernel): <NEW_LINE> <INDENT> self.lamb = 1 <NEW_LINE> if isinstance(kernel, str): <NEW_LINE> <INDENT> self.kernel = kernels.Kernel(kernel) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.kernel = kernel <NEW_LINE> <DEDENT> self.coef = 0 <NEW_LINE> self.Xtrain = None <NEW_LINE> self.solver = "cvxopt" <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> if self.Xtrain is None: <NEW_LINE> <INDENT> print("Error, the classifier has not been trained") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.kernel.predict(X,self.Xtrain,self.coef) | Abstract class from which every classifier must inherit | 6259903c711fe17d825e15a2 |
class Nesterov(object): <NEW_LINE> <INDENT> def __init__(self, lr=0.01, momentum=0.9): <NEW_LINE> <INDENT> self.lr = lr <NEW_LINE> self.momentum = momentum <NEW_LINE> self.v = None <NEW_LINE> <DEDENT> def update(self, params, grads): <NEW_LINE> <INDENT> if self.v is None: <NEW_LINE> <INDENT> self.v = {} <NEW_LINE> for key, val in params.items(): <NEW_LINE> <INDENT> self.v[key] = np.zeros_like(val) <NEW_LINE> <DEDENT> <DEDENT> for key in params.keys(): <NEW_LINE> <INDENT> self.v[key] *= self.momentum <NEW_LINE> self.v[key] -= self.lr * grads[key] <NEW_LINE> params[key] += self.momentum * self.momentum * self.v[key] <NEW_LINE> params[key] -= (1 + self.momentum) * self.lr * grads[key] | Nesterov's Accelerated Gradient (http://arxiv.org/abs/1212.0901) | 6259903cb57a9660fecd2c88 |
class Prover: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.proved_rules = [] <NEW_LINE> self._rules_seen = set() <NEW_LINE> <DEDENT> def split_alpha_beta(self): <NEW_LINE> <INDENT> rules_alpha = [] <NEW_LINE> rules_beta = [] <NEW_LINE> for a, b in self.proved_rules: <NEW_LINE> <INDENT> if isinstance(a, And): <NEW_LINE> <INDENT> rules_beta.append((a, b)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rules_alpha.append((a, b)) <NEW_LINE> <DEDENT> <DEDENT> return rules_alpha, rules_beta <NEW_LINE> <DEDENT> @property <NEW_LINE> def rules_alpha(self): <NEW_LINE> <INDENT> return self.split_alpha_beta()[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def rules_beta(self): <NEW_LINE> <INDENT> return self.split_alpha_beta()[1] <NEW_LINE> <DEDENT> def process_rule(self, a, b): <NEW_LINE> <INDENT> if (not a) or isinstance(b, bool): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if isinstance(a, bool): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if (a, b) in self._rules_seen: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._rules_seen.add((a, b)) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._process_rule(a, b) <NEW_LINE> <DEDENT> except TautologyDetected: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def _process_rule(self, a, b): <NEW_LINE> <INDENT> if isinstance(b, And): <NEW_LINE> <INDENT> for barg in b.args: <NEW_LINE> <INDENT> self.process_rule(a, barg) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(b, Or): <NEW_LINE> <INDENT> if not isinstance(a, Logic): <NEW_LINE> <INDENT> if a in b.args: <NEW_LINE> <INDENT> raise TautologyDetected(a, b, 'a -> a|c|...') <NEW_LINE> <DEDENT> <DEDENT> self.process_rule(And(*[Not(barg) for barg in b.args]), Not(a)) <NEW_LINE> for bidx in range(len(b.args)): <NEW_LINE> <INDENT> barg = b.args[bidx] <NEW_LINE> brest = b.args[:bidx] + b.args[bidx + 1:] <NEW_LINE> self.process_rule(And(a, Not(barg)), Or(*brest)) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(a, And): <NEW_LINE> <INDENT> if b in a.args: <NEW_LINE> <INDENT> raise TautologyDetected(a, b, 'a & b -> a') <NEW_LINE> <DEDENT> self.proved_rules.append((a, b)) <NEW_LINE> <DEDENT> elif isinstance(a, Or): <NEW_LINE> <INDENT> if b in a.args: <NEW_LINE> <INDENT> raise TautologyDetected(a, b, 'a | b -> a') <NEW_LINE> <DEDENT> for aarg in a.args: <NEW_LINE> <INDENT> self.process_rule(aarg, b) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.proved_rules.append((a, b)) <NEW_LINE> self.proved_rules.append((Not(b), Not(a))) | ai - prover of logic rules
given a set of initial rules, Prover tries to prove all possible rules
which follow from given premises.
As a result proved_rules are always either in one of two forms: alpha or
beta:
Alpha rules
-----------
This are rules of the form::
a -> b & c & d & ...
Beta rules
----------
This are rules of the form::
&(a,b,...) -> c & d & ...
i.e. beta rules are join conditions that say that something follows when
*several* facts are true at the same time. | 6259903c0a366e3fb87ddbf2 |
class NoOpFixture(EnvLoadableFixture): <NEW_LINE> <INDENT> def attach_storage_medium(self, ds): <NEW_LINE> <INDENT> ds.meta.storage_medium = NoOpMedium(None, ds) <NEW_LINE> <DEDENT> def rollback(self): pass <NEW_LINE> def commit(self): pass | a fixture that pretends to load stuff but doesn't really. | 6259903cd53ae8145f919673 |
class IType: <NEW_LINE> <INDENT> IMM_START = 20 <NEW_LINE> IMM_END = 32 <NEW_LINE> FUNCT_START = 12 <NEW_LINE> FUNCT_END = 15 <NEW_LINE> def __init__(self, instruction): <NEW_LINE> <INDENT> self.instr = instruction <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def encode(cls, imm_val, rs1_val, funct_val, rd_val, opcode_val): <NEW_LINE> <INDENT> return ((imm_val << cls.IMM_START) | (rs1_val << RS1_START) | (funct_val << cls.FUNCT_START) | (rd_val << RD_START) | (opcode_val << OPCODE_START)) <NEW_LINE> <DEDENT> def immediate(self): <NEW_LINE> <INDENT> return sext(self.instr[self.IMM_START:self.IMM_END]) <NEW_LINE> <DEDENT> def funct(self): <NEW_LINE> <INDENT> return self.instr[self.FUNCT_START:self.FUNCT_END] <NEW_LINE> <DEDENT> def shift_amount(self): <NEW_LINE> <INDENT> return self.instr[self.IMM_START:self.IMM_START + 5] <NEW_LINE> <DEDENT> def right_shift_type(self): <NEW_LINE> <INDENT> return self.instr[self.IMM_START + 5:self.IMM_END] | I-type instruction format | 6259903c07d97122c4217eab |
@final <NEW_LINE> class NegatedConditionsViolation(ASTViolation): <NEW_LINE> <INDENT> error_template = 'Found negated condition' <NEW_LINE> code = 504 <NEW_LINE> previous_codes = {463} | Forbid negated conditions together with ``else`` clause.
Reasoning:
It easier to read and name regular conditions. Not negated ones.
Solution:
Move actions from the negated ``if`` condition to the ``else``
condition.
Example::
# Correct:
if some == 1:
...
else:
...
if not some:
...
if not some:
...
elif other:
...
# Wrong:
if not some:
...
else:
...
.. versionadded:: 0.8.0
.. versionchanged:: 0.11.0 | 6259903c91af0d3eaad3b042 |
class TargetAddress(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "Ip": (str, True), "Port": (str, False), } | `TargetAddress <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-route53resolver-resolverrule-targetaddress.html>`__ | 6259903c76d4e153a661db7a |
class Node: <NEW_LINE> <INDENT> def __init__(self,x,y,father,g_value,h_value,f_value): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.father = father <NEW_LINE> self.g_value = g_value <NEW_LINE> self.h_value = h_value <NEW_LINE> self.f_value = f_value <NEW_LINE> <DEDENT> def equal(self,other): <NEW_LINE> <INDENT> if(self.x == other.x) and (self.y == other.y): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | Each position in matrix-map is a node | 6259903cd99f1b3c44d068b4 |
class Credential(BASE, L2NetworkBase): <NEW_LINE> <INDENT> __tablename__ = 'credentials' <NEW_LINE> credential_id = Column(String(255)) <NEW_LINE> tenant_id = Column(String(255), primary_key=True) <NEW_LINE> credential_name = Column(String(255), primary_key=True) <NEW_LINE> user_name = Column(String(255)) <NEW_LINE> password = Column(String(255)) <NEW_LINE> def __init__(self, tenant_id, credential_name, user_name, password): <NEW_LINE> <INDENT> self.credential_id = uuidutils.generate_uuid() <NEW_LINE> self.tenant_id = tenant_id <NEW_LINE> self.credential_name = credential_name <NEW_LINE> self.user_name = user_name <NEW_LINE> self.password = password <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Credentials(%s,%s,%s,%s,%s)>" % (self.credential_id, self.tenant_id, self.credential_name, self.user_name, self.password) | Represents credentials for a tenant | 6259903ccad5886f8bdc5983 |
class InvalidPageException(Exception): <NEW_LINE> <INDENT> pass | If page is not found | 6259903c596a897236128eae |
@gin.configurable <NEW_LINE> class ValueNetwork(network.Network): <NEW_LINE> <INDENT> def __init__(self, input_tensor_spec, preprocessing_layers=None, preprocessing_combiner=None, conv_layer_params=None, fc_layer_params=(75, 40), dropout_layer_params=None, activation_fn=tf.keras.activations.relu, kernel_initializer=None, batch_squash=True, dtype=tf.float32, name='ValueNetwork'): <NEW_LINE> <INDENT> super(ValueNetwork, self).__init__( input_tensor_spec=input_tensor_spec, state_spec=(), name=name) <NEW_LINE> if not kernel_initializer: <NEW_LINE> <INDENT> kernel_initializer = tf.compat.v1.keras.initializers.glorot_uniform() <NEW_LINE> <DEDENT> self._encoder = encoding_network.EncodingNetwork( input_tensor_spec, preprocessing_layers=preprocessing_layers, preprocessing_combiner=preprocessing_combiner, conv_layer_params=conv_layer_params, fc_layer_params=fc_layer_params, dropout_layer_params=dropout_layer_params, activation_fn=activation_fn, kernel_initializer=kernel_initializer, batch_squash=batch_squash, dtype=dtype) <NEW_LINE> self._postprocessing_layers = tf.keras.layers.Dense( 1, activation=None, kernel_initializer=tf.random_uniform_initializer( minval=-0.03, maxval=0.03)) <NEW_LINE> <DEDENT> def call(self, observation, step_type=None, network_state=(), training=False): <NEW_LINE> <INDENT> state, network_state = self._encoder( observation, step_type=step_type, network_state=network_state, training=training) <NEW_LINE> value = self._postprocessing_layers(state, training=training) <NEW_LINE> return tf.squeeze(value, -1), network_state | Feed Forward value network. Reduces to 1 value output per batch item. | 6259903c287bf620b6272dfa |
class SkipExtensionException(Exception): <NEW_LINE> <INDENT> pass | Exception to signal that an extension should be skipped.
It should be raised in the constructor of an extension. | 6259903c82261d6c527307cb |
class Brinkman3DGaussianStokesletsAndDipolesSphericalBCs(Brinkman3DGaussianStokesletsAndDipoles): <NEW_LINE> <INDENT> def __init__(self,eps,mu=1.0,alph=100*np.sqrt(1j),sphererad=0): <NEW_LINE> <INDENT> Brinkman3DGaussianStokesletsAndDipoles.__init__(self, eps, mu, alph) <NEW_LINE> self.dim = 3 <NEW_LINE> if sphererad <= 0: <NEW_LINE> <INDENT> raise ValueError('Need a positive value for the radius of the sphere.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cst1 = -self._H2func(np.array([sphererad])) / self._D2func(np.array([sphererad])) <NEW_LINE> self.BCcst = cst1[0] <NEW_LINE> cst2 = self._H1func(np.array([sphererad])) + self.BCcst*self._D1func(np.array([sphererad])) <NEW_LINE> self.fcst = self.mu/cst2[0] | Summation of 3D regularized Brinkmanlets and dipoles with the Gaussian blob.
BCs are steady oscillations of a sphere of radius sphererad.
Assigned constants:
self.BCcst is the relative dipole strength for spherical boundary conditions.
self.fcst is the coefficient that multiplies the boundary condition velocity
to get the Stokeslet force: f = self.fcst*v, where v is a three element vector
representing the velocity at the sphere surface. | 6259903cd53ae8145f919675 |
class FDWConvBlock(HybridBlock): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, kernel_size, strides, padding, dilation=1, use_bias=False, use_bn=True, bn_epsilon=1e-5, activation=(lambda: nn.Activation("relu")), **kwargs): <NEW_LINE> <INDENT> super(FDWConvBlock, self).__init__(**kwargs) <NEW_LINE> assert use_bn <NEW_LINE> self.activate = (activation is not None) <NEW_LINE> with self.name_scope(): <NEW_LINE> <INDENT> self.v_conv = dwconv_block( in_channels=in_channels, out_channels=out_channels, kernel_size=(kernel_size, 1), strides=strides, padding=(padding, 0), dilation=dilation, use_bias=use_bias, use_bn=use_bn, bn_epsilon=bn_epsilon, activation=None) <NEW_LINE> self.h_conv = dwconv_block( in_channels=in_channels, out_channels=out_channels, kernel_size=(1, kernel_size), strides=strides, padding=(0, padding), dilation=dilation, use_bias=use_bias, use_bn=use_bn, bn_epsilon=bn_epsilon, activation=None) <NEW_LINE> if self.activate: <NEW_LINE> <INDENT> self.act = get_activation_layer(activation) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def hybrid_forward(self, F, x): <NEW_LINE> <INDENT> x = self.v_conv(x) + self.h_conv(x) <NEW_LINE> if self.activate: <NEW_LINE> <INDENT> x = self.act(x) <NEW_LINE> <DEDENT> return x | Factorized depthwise separable convolution block with BatchNorms and activations at each convolution layers.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels.
kernel_size : int
Convolution window size.
strides : int or tuple/list of 2 int
Strides of the convolution.
padding : int
Padding value for convolution layer.
dilation : int or tuple/list of 2 int, default 1
Dilation value for convolution layer.
use_bias : bool, default False
Whether the layer uses a bias vector.
use_bn : bool, default True
Whether to use BatchNorm layer.
bn_epsilon : float, default 1e-5
Small float added to variance in Batch norm.
activation : function or str or None, default nn.ReLU(inplace=True)
Activation function after the each convolution block. | 6259903c91af0d3eaad3b044 |
class ParseElementEnhance(ParserElement): <NEW_LINE> <INDENT> def __init__( self, expr, savelist=False ): <NEW_LINE> <INDENT> super(ParseElementEnhance,self).__init__(savelist) <NEW_LINE> if isinstance( expr, basestring ): <NEW_LINE> <INDENT> if issubclass(ParserElement._literalStringClass, Token): <NEW_LINE> <INDENT> expr = ParserElement._literalStringClass(expr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> expr = ParserElement._literalStringClass(Literal(expr)) <NEW_LINE> <DEDENT> <DEDENT> self.expr = expr <NEW_LINE> self.strRepr = None <NEW_LINE> if expr is not None: <NEW_LINE> <INDENT> self.mayIndexError = expr.mayIndexError <NEW_LINE> self.mayReturnEmpty = expr.mayReturnEmpty <NEW_LINE> self.setWhitespaceChars( expr.whiteChars ) <NEW_LINE> self.skipWhitespace = expr.skipWhitespace <NEW_LINE> self.saveAsList = expr.saveAsList <NEW_LINE> self.callPreparse = expr.callPreparse <NEW_LINE> self.ignoreExprs.extend(expr.ignoreExprs) <NEW_LINE> <DEDENT> <DEDENT> def parseImpl( self, instring, loc, doActions=True ): <NEW_LINE> <INDENT> if self.expr is not None: <NEW_LINE> <INDENT> return self.expr._parse( instring, loc, doActions, callPreParse=False ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ParseException("",loc,self.errmsg,self) <NEW_LINE> <DEDENT> <DEDENT> def leaveWhitespace( self ): <NEW_LINE> <INDENT> self.skipWhitespace = False <NEW_LINE> self.expr = self.expr.copy() <NEW_LINE> if self.expr is not None: <NEW_LINE> <INDENT> self.expr.leaveWhitespace() <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def ignore( self, other ): <NEW_LINE> <INDENT> if isinstance( other, Suppress ): <NEW_LINE> <INDENT> if other not in self.ignoreExprs: <NEW_LINE> <INDENT> super( ParseElementEnhance, self).ignore( other ) <NEW_LINE> if self.expr is not None: <NEW_LINE> <INDENT> self.expr.ignore( self.ignoreExprs[-1] ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> super( ParseElementEnhance, self).ignore( other ) <NEW_LINE> if self.expr is not None: <NEW_LINE> <INDENT> self.expr.ignore( self.ignoreExprs[-1] ) <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def streamline( self ): <NEW_LINE> <INDENT> super(ParseElementEnhance,self).streamline() <NEW_LINE> if self.expr is not None: <NEW_LINE> <INDENT> self.expr.streamline() <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def checkRecursion( self, parseElementList ): <NEW_LINE> <INDENT> if self in parseElementList: <NEW_LINE> <INDENT> raise RecursiveGrammarException( parseElementList+[self] ) <NEW_LINE> <DEDENT> subRecCheckList = parseElementList[:] + [ self ] <NEW_LINE> if self.expr is not None: <NEW_LINE> <INDENT> self.expr.checkRecursion( subRecCheckList ) <NEW_LINE> <DEDENT> <DEDENT> def validate( self, validateTrace=[] ): <NEW_LINE> <INDENT> tmp = validateTrace[:]+[self] <NEW_LINE> if self.expr is not None: <NEW_LINE> <INDENT> self.expr.validate(tmp) <NEW_LINE> <DEDENT> self.checkRecursion( [] ) <NEW_LINE> <DEDENT> def __str__( self ): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return super(ParseElementEnhance,self).__str__() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if self.strRepr is None and self.expr is not None: <NEW_LINE> <INDENT> self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) <NEW_LINE> <DEDENT> return self.strRepr | Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens. | 6259903c30c21e258be99a1b |
class JNTTCertification(JNTTServer): <NEW_LINE> <INDENT> ip = '127.0.0.1' <NEW_LINE> hadds = ['1111/0000'] <NEW_LINE> client_hadd = "9999/0000" <NEW_LINE> conf = {'broker_ip': '127.0.0.1', 'broker_port': '1883'} <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> JNTTServer.setUp(self) <NEW_LINE> self.startClient(self.conf) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.stopClient() <NEW_LINE> JNTTServer.tearDown(self) | Certification base test
| 6259903cd164cc6175822185 |
class Pruner(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, scorer: torch.nn.Module) -> None: <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._scorer = scorer <NEW_LINE> <DEDENT> @overrides <NEW_LINE> def forward(self, embeddings: torch.FloatTensor, mask: torch.LongTensor, num_items_to_keep: int) -> Tuple[torch.FloatTensor, torch.LongTensor, torch.LongTensor, torch.FloatTensor]: <NEW_LINE> <INDENT> mask = mask.unsqueeze(-1) <NEW_LINE> num_items = embeddings.size(1) <NEW_LINE> scores = self._scorer(embeddings) <NEW_LINE> if scores.size(-1) != 1 or scores.dim() != 3: <NEW_LINE> <INDENT> raise ValueError(f"The scorer passed to Pruner must produce a tensor of shape" f"(batch_size, num_items, 1), but found shape {scores.size()}") <NEW_LINE> <DEDENT> scores = util.replace_masked_values(scores, mask, -1e20) <NEW_LINE> _, top_indices = scores.topk(num_items_to_keep, 1) <NEW_LINE> top_indices, _ = torch.sort(top_indices, 1) <NEW_LINE> top_indices = top_indices.squeeze(-1) <NEW_LINE> flat_top_indices = util.flatten_and_batch_shift_indices(top_indices, num_items) <NEW_LINE> top_embeddings = util.batched_index_select(embeddings, top_indices, flat_top_indices) <NEW_LINE> top_mask = util.batched_index_select(mask, top_indices, flat_top_indices) <NEW_LINE> top_scores = util.batched_index_select(scores, top_indices, flat_top_indices) <NEW_LINE> return top_embeddings, top_mask.squeeze(-1), top_indices, top_scores | This module scores and prunes items in a list using a parameterised scoring function and a
threshold.
Parameters
----------
scorer : ``torch.nn.Module``, required.
A module which, given a tensor of shape (batch_size, num_items, embedding_size),
produces a tensor of shape (batch_size, num_items, 1), representing a scalar score
per item in the tensor. | 6259903cc432627299fa4208 |
class Header: <NEW_LINE> <INDENT> def __init__(self, fields): <NEW_LINE> <INDENT> self.set_fields(fields) <NEW_LINE> <DEDENT> def set_fields(self, fields): <NEW_LINE> <INDENT> self.fields = fields <NEW_LINE> self.field_to_column = dict(zip(fields, count())) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if isinstance(key, int): <NEW_LINE> <INDENT> return self.fields[key] <NEW_LINE> <DEDENT> elif isinstance(key, str): <NEW_LINE> <INDENT> if key in self.field_to_column: <NEW_LINE> <INDENT> return key <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("field indices must be integers or strings") <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "#" + "\t".join(self.fields) | Header of a table -- contains column names and a mapping from them
to column indexes | 6259903cd99f1b3c44d068b6 |
class WorkerServiceServicer(object): <NEW_LINE> <INDENT> def GetStatus(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def CreateWorkerSession(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def DeleteWorkerSession(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def RegisterGraph(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def DeregisterGraph(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def RunGraph(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def CleanupGraph(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def CleanupAll(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def RecvTensor(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def Logging(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def Tracing(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') | //////////////////////////////////////////////////////////////////////////////
WorkerService defines a TensorFlow service that executes dataflow
graphs on a set of local devices, on behalf of a MasterService.
A worker service keeps track of multiple "registered graphs". Each
registered graph is a subgraph of a client's graph, corresponding to
only the nodes that should execute on this worker (and any
additional nodes necessary for inter-process communication using
the `RecvTensor` method).
////////////////////////////////////////////////////////////////////////////// | 6259903c0fa83653e46f60ea |
class IDECommunicatorV1(_IDECommunicatorBase): <NEW_LINE> <INDENT> def send_script(self, script): <NEW_LINE> <INDENT> self._write(script.encode('utf-8') + b'\n') | IDE Communicator (protocol version 1). | 6259903c1d351010ab8f4d2c |
class TestJSONJobsDatabase(CrontabberTestCaseBase): <NEW_LINE> <INDENT> def test_loading_existing_file(self): <NEW_LINE> <INDENT> db = crontabber.JSONJobDatabase() <NEW_LINE> file1 = os.path.join(self.tempdir, 'file1.json') <NEW_LINE> stuff = { 'foo': 1, 'more': { 'bar': u'Bar' } } <NEW_LINE> json.dump(stuff, open(file1, 'w')) <NEW_LINE> db.load(file1) <NEW_LINE> self.assertEqual(db['foo'], 1) <NEW_LINE> self.assertEqual(db['more']['bar'], u"Bar") <NEW_LINE> <DEDENT> def test_saving_new_file(self): <NEW_LINE> <INDENT> db = crontabber.JSONJobDatabase() <NEW_LINE> file1 = os.path.join(self.tempdir, 'file1.json') <NEW_LINE> db.load(file1) <NEW_LINE> self.assertEqual(db, {}) <NEW_LINE> db['foo'] = 1 <NEW_LINE> db['more'] = {'bar': u'Bar'} <NEW_LINE> db.save(file1) <NEW_LINE> structure = json.load(open(file1)) <NEW_LINE> self.assertEqual( structure, {u'foo': 1, u'more': {u'bar': u'Bar'}} ) <NEW_LINE> self.assertEqual(db['foo'], 1) <NEW_LINE> self.assertEqual(db['more']['bar'], u"Bar") <NEW_LINE> <DEDENT> def test_saving_dates(self): <NEW_LINE> <INDENT> db = crontabber.JSONJobDatabase() <NEW_LINE> file1 = os.path.join(self.tempdir, 'file1.json') <NEW_LINE> db.load(file1) <NEW_LINE> self.assertEqual(db, {}) <NEW_LINE> now = datetime.datetime.now() <NEW_LINE> today = datetime.date.today() <NEW_LINE> db['here'] = now <NEW_LINE> db['there'] = {'now': today} <NEW_LINE> db.save(file1) <NEW_LINE> structure = json.load(open(file1)) <NEW_LINE> self.assertTrue(now.strftime('%H:%M') in structure['here']) <NEW_LINE> self.assertTrue(now.strftime('%Y') in structure['here']) <NEW_LINE> self.assertTrue(now.strftime('%Y') in structure['there']['now']) <NEW_LINE> self.assertTrue(now.strftime('%m') in structure['there']['now']) <NEW_LINE> self.assertTrue(now.strftime('%d') in structure['there']['now']) <NEW_LINE> db2 = crontabber.JSONJobDatabase() <NEW_LINE> db2.load(file1) <NEW_LINE> self.assertTrue(isinstance(db2['here'], datetime.datetime)) <NEW_LINE> self.assertTrue(isinstance(db2['there']['now'], datetime.date)) <NEW_LINE> <DEDENT> def test_loading_broken_json(self): <NEW_LINE> <INDENT> file1 = os.path.join(self.tempdir, 'file1.json') <NEW_LINE> with open(file1, 'w') as f: <NEW_LINE> <INDENT> f.write('{Junk\n') <NEW_LINE> <DEDENT> db = crontabber.JSONJobDatabase() <NEW_LINE> self.assertRaises(crontabber.BrokenJSONError, db.load, file1) | This has nothing to do with Socorro actually. It's just tests for the
underlying JSON database. | 6259903c10dbd63aa1c71de6 |
class BiosVfDRAMClockThrottling(ManagedObject): <NEW_LINE> <INDENT> consts = BiosVfDRAMClockThrottlingConsts() <NEW_LINE> naming_props = set([]) <NEW_LINE> mo_meta = MoMeta("BiosVfDRAMClockThrottling", "biosVfDRAMClockThrottling", "DRAM-Clock-Throttling", VersionMeta.Version222c, "InputOutput", 0x3f, [], ["admin", "ls-compute", "ls-config", "ls-server", "ls-server-policy", "pn-policy"], ['biosSettings', 'biosVProfile'], [], ["Get", "Set"]) <NEW_LINE> prop_meta = { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version222c, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version222c, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []), "prop_acl": MoPropertyMeta("prop_acl", "propAcl", "ulong", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version222c, MoPropertyMeta.READ_ONLY, 0x8, 0, 256, None, [], []), "sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version222c, MoPropertyMeta.READ_WRITE, 0x10, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "supported_by_default": MoPropertyMeta("supported_by_default", "supportedByDefault", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, None, ["no", "yes"], []), "vp_dram_clock_throttling": MoPropertyMeta("vp_dram_clock_throttling", "vpDRAMClockThrottling", "string", VersionMeta.Version222c, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, ["auto", "balanced", "energy-efficient", "performance", "platform-default", "platform-recommended"], []), } <NEW_LINE> prop_map = { "childAction": "child_action", "dn": "dn", "propAcl": "prop_acl", "rn": "rn", "sacl": "sacl", "status": "status", "supportedByDefault": "supported_by_default", "vpDRAMClockThrottling": "vp_dram_clock_throttling", } <NEW_LINE> def __init__(self, parent_mo_or_dn, **kwargs): <NEW_LINE> <INDENT> self._dirty_mask = 0 <NEW_LINE> self.child_action = None <NEW_LINE> self.prop_acl = None <NEW_LINE> self.sacl = None <NEW_LINE> self.status = None <NEW_LINE> self.supported_by_default = None <NEW_LINE> self.vp_dram_clock_throttling = None <NEW_LINE> ManagedObject.__init__(self, "BiosVfDRAMClockThrottling", parent_mo_or_dn, **kwargs) | This is BiosVfDRAMClockThrottling class. | 6259903c8c3a8732951f7768 |
class CitiesLightListModelView(ListModelView): <NEW_LINE> <INDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> limit = request.GET.get('limit', None) <NEW_LINE> queryset = super(CitiesLightListModelView, self).get( request, *args, **kwargs) <NEW_LINE> if limit: <NEW_LINE> <INDENT> return queryset[:limit] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return queryset <NEW_LINE> <DEDENT> <DEDENT> def get_query_kwargs(self, request, *args, **kwargs): <NEW_LINE> <INDENT> kwargs = super(ListModelView, self).get_query_kwargs(request, *args, **kwargs) <NEW_LINE> if 'q' in request.GET.keys(): <NEW_LINE> <INDENT> kwargs['name_ascii__icontains'] = request.GET['q'] <NEW_LINE> <DEDENT> return kwargs | ListModelView that supports a limit GET request argument. | 6259903c287bf620b6272dfc |
class ReadOnlySegment(Segment): <NEW_LINE> <INDENT> def __init__(self, bytes, segment_id, block_size=4096, max_block_count=512): <NEW_LINE> <INDENT> super().__init__(segment_id, block_size, max_block_count) <NEW_LINE> memview = memoryview(bytes) <NEW_LINE> self._block_bytes = memview[block_size:] <NEW_LINE> summary_bytes = memview[:block_size] <NEW_LINE> self._inode_block_numbers = loads(summary_bytes) | Since this class is only for reading from a segment we can store the data in
a memoryview, which allows us to take slices of the data without copying. | 6259903c23e79379d538d710 |
class LoadTrajectoryError(TrajectoryError): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> self.message = f"File: '{self.path}' is not a valid trajectory" <NEW_LINE> super().__init__(self.message) <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.message | Error while loading a trajectory | 6259903c8a349b6b43687456 |
class Lighting(GameSprite): <NEW_LINE> <INDENT> def __init__(self, position, size, *containers): <NEW_LINE> <INDENT> GameSprite.__init__(self, *containers) <NEW_LINE> self.rect = pygame.Rect(position, size) <NEW_LINE> self.x, self.y = position <NEW_LINE> self.rect.midbottom = position <NEW_LINE> self._image = None <NEW_LINE> self._timeCollected = 0 <NEW_LINE> self._nextLightChange = randomTime() <NEW_LINE> self._phase = 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def image(self): <NEW_LINE> <INDENT> if self._image: <NEW_LINE> <INDENT> return self._image <NEW_LINE> <DEDENT> phase = self._phase <NEW_LINE> if phase==1: <NEW_LINE> <INDENT> self._image = utils.load_image('lighting1.png', directory='miscellaneous') <NEW_LINE> return self._image <NEW_LINE> <DEDENT> if phase==2: <NEW_LINE> <INDENT> self._image = utils.load_image('lighting2.png', directory='miscellaneous') <NEW_LINE> return self._image <NEW_LINE> <DEDENT> <DEDENT> def update(self, time_passed): <NEW_LINE> <INDENT> GameSprite.update(self, time_passed) <NEW_LINE> self._timeCollected+= time_passed <NEW_LINE> if self._timeCollected>=self._nextLightChange: <NEW_LINE> <INDENT> self._timeCollected = 0 <NEW_LINE> self._nextLightChange = randomTime() <NEW_LINE> self._phase+=1 <NEW_LINE> self._image = None <NEW_LINE> if self._phase==3: <NEW_LINE> <INDENT> self.kill() | Animation for a lighting that strike the ground
| 6259903cc432627299fa4209 |
class KeyboardCapture(threading.Thread): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._suppressed_keys = set() <NEW_LINE> self.key_down = lambda key: None <NEW_LINE> self.key_up = lambda key: None <NEW_LINE> self._proc = KeyboardCaptureProcess() <NEW_LINE> self._finished = threading.Event() <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self._proc.start() <NEW_LINE> self._proc.suppress_keyboard(self._suppressed_keys) <NEW_LINE> super().start() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> error, key, pressed = self._proc.get() <NEW_LINE> if error is not None: <NEW_LINE> <INDENT> log.error(*error) <NEW_LINE> <DEDENT> if self._finished.is_set(): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if key is not None: <NEW_LINE> <INDENT> (self.key_down if pressed else self.key_up)(key) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def cancel(self): <NEW_LINE> <INDENT> self._finished.set() <NEW_LINE> self._proc.stop() <NEW_LINE> if self.is_alive(): <NEW_LINE> <INDENT> self.join() <NEW_LINE> <DEDENT> <DEDENT> def suppress_keyboard(self, suppressed_keys=()): <NEW_LINE> <INDENT> self._suppressed_keys = set(suppressed_keys) <NEW_LINE> self._proc.suppress_keyboard(self._suppressed_keys) | Listen to all keyboard events. | 6259903c8a43f66fc4bf33a0 |
class DataFrame(wtypes.Base): <NEW_LINE> <INDENT> begin = datetime.datetime <NEW_LINE> end = datetime.datetime <NEW_LINE> tenant_id = wtypes.text <NEW_LINE> resources = [RatedResource] <NEW_LINE> def to_json(self): <NEW_LINE> <INDENT> return {'begin': self.begin, 'end': self.end, 'tenant_id': self.tenant_id, 'resources': self.resources} <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def sample(cls): <NEW_LINE> <INDENT> res_sample = RatedResource.sample() <NEW_LINE> sample = cls(tenant_id='69d12143688f413cbf5c3cfe03ed0a12', begin=datetime.datetime(2015, 4, 22, 7), end=datetime.datetime(2015, 4, 22, 8), resources=[res_sample]) <NEW_LINE> return sample | Type describing a stored data frame. | 6259903c26238365f5fadd68 |
class CThostFtdcQryTradingAccountField: <NEW_LINE> <INDENT> def __init__(self,**fields): <NEW_LINE> <INDENT> """经纪公司代码""" <NEW_LINE> self.BrokerID = None <NEW_LINE> """投资者代码""" <NEW_LINE> self.InvestorID = None <NEW_LINE> """币种代码""" <NEW_LINE> self.CurrencyID = None <NEW_LINE> self.__dict__.update(fields) <NEW_LINE> <DEDENT> def toDict(self): <NEW_LINE> <INDENT> return {k:v for k,v in self.__dict__.iteritems() if v != None} | 查询资金账户
BrokerID 经纪公司代码 char[11]
InvestorID 投资者代码 char[13]
CurrencyID 币种代码 char[4] | 6259903cec188e330fdf9aab |
class OWLPropertyRange(OWLObject, metaclass=ABCMeta): <NEW_LINE> <INDENT> pass | OWL Objects that can be the ranges of properties | 6259903c66673b3332c31609 |
class DetailsTenantView(ConfigurationView): <NEW_LINE> <INDENT> entities = View.nested(DetailsTenantEntities) <NEW_LINE> toolbar = View.nested(AccessControlToolbar) <NEW_LINE> name = Text('Name') <NEW_LINE> description = Text('Description') <NEW_LINE> parent = Text('Parent') <NEW_LINE> table = VersionPick({ Version.lowest(): Table('//*[self::fieldset or @id="fieldset"]/table'), '5.10': Table('//div[contains(@id,"react-")]/table')}) <NEW_LINE> @property <NEW_LINE> def is_displayed(self): <NEW_LINE> <INDENT> return ( self.accordions.accesscontrol.is_opened and self.title.text == '{} "{}"'.format(self.context['object'].obj_type, self.context['object'].name) ) | Details Tenant View | 6259903ccad5886f8bdc5985 |
class SteamykitchenMixin(object): <NEW_LINE> <INDENT> source = 'steamykitchen' <NEW_LINE> def parse_item(self, response): <NEW_LINE> <INDENT> hxs = HtmlXPathSelector(response) <NEW_LINE> base_path = """//blockquote[@class="recipe"]""" <NEW_LINE> recipes_scopes = hxs.select(base_path) <NEW_LINE> name_path = '//meta[@property="og:title"]/@content' <NEW_LINE> url_path = '//meta[@property="og:url"]/@content' <NEW_LINE> description_path = '//meta[@property="og:description"]/@content' <NEW_LINE> image_path = '//meta[@property="og:image"][1]/@content' <NEW_LINE> prepTime_path = '*//*[@itemprop="prepTime"]/@content' <NEW_LINE> cookTime_path = '*//*[@itemprop="cookTime"]/@content' <NEW_LINE> recipeYield_path = '*//*[@itemprop="recipeYield"]/text()' <NEW_LINE> ingredients_path = '*//*[@itemprop="ingredients"]' <NEW_LINE> datePublished = '//p[@class="date"]/text()' <NEW_LINE> recipes = [] <NEW_LINE> for r_scope in recipes_scopes: <NEW_LINE> <INDENT> il = RecipeItemLoader(item=RecipeItem()) <NEW_LINE> il.add_value('source', self.source) <NEW_LINE> il.add_value('name', r_scope.select(name_path).extract()) <NEW_LINE> il.add_value('image', r_scope.select(image_path).extract()) <NEW_LINE> il.add_value('url', r_scope.select(url_path).extract()) <NEW_LINE> il.add_value('description', r_scope.select(description_path).extract()) <NEW_LINE> il.add_value('prepTime', r_scope.select(prepTime_path).extract()) <NEW_LINE> il.add_value('cookTime', r_scope.select(cookTime_path).extract()) <NEW_LINE> il.add_value('recipeYield', r_scope.select(recipeYield_path).extract()) <NEW_LINE> ingredient_scopes = r_scope.select(ingredients_path) <NEW_LINE> ingredients = [] <NEW_LINE> for i_scope in ingredient_scopes: <NEW_LINE> <INDENT> ind = i_scope.extract() <NEW_LINE> ind = ind.strip() <NEW_LINE> ingredients.append("%s " % (ind)) <NEW_LINE> <DEDENT> il.add_value('ingredients', ingredients) <NEW_LINE> il.add_value('datePublished', r_scope.select(datePublished).extract()) <NEW_LINE> recipes.append(il.load_item()) <NEW_LINE> <DEDENT> return recipes | Using this as a mixin lets us reuse the parse_item method more easily | 6259903c26068e7796d4db59 |
class BrokenLinks(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.broken_links = {} <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pp = pprint.PrettyPrinter(depth=3, indent=1) <NEW_LINE> err_msg = "Broken links found: \n" + pp.pformat(self.broken_links) <NEW_LINE> self.assertEqual({}, self.broken_links, msg=err_msg) <NEW_LINE> <DEDENT> def test_no_broken_links(self): <NEW_LINE> <INDENT> link_file = open(SETTINGS['LINK_DATA'], 'r') <NEW_LINE> for data in link_file.readlines(): <NEW_LINE> <INDENT> link_data = json.loads(data) <NEW_LINE> page = link_data['page'] <NEW_LINE> for link in link_data['links']: <NEW_LINE> <INDENT> err_msg = 'Broken link: ' + str(link) <NEW_LINE> try: <NEW_LINE> <INDENT> self.assertLess(link['status'], 400, msg=err_msg) <NEW_LINE> <DEDENT> except AssertionError as err: <NEW_LINE> <INDENT> if isinstance(self.broken_links.get(page), list): <NEW_LINE> <INDENT> self.broken_links[page].append(err) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.broken_links[page] = [err] | Test for broken links on all pages of cnmipss.org | 6259903c596a897236128eb2 |
class MsgWaitForEndRead(Msg): <NEW_LINE> <INDENT> interaction = Interactions.WAIT_FOR_END_READ <NEW_LINE> requires_request = False <NEW_LINE> opcode = 0x00 <NEW_LINE> protocol = ProtocolVersion.ANY <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(timeout=5, *args, **kwargs) <NEW_LINE> <DEDENT> def _handle_reply(self, reply): <NEW_LINE> <INDENT> if reply.opcode == 0xc8: <NEW_LINE> <INDENT> if reply[0] != 0xed: <NEW_LINE> <INDENT> raise UnexpectedDataError(reply, 'Expected c8 ed') <NEW_LINE> <DEDENT> pass <NEW_LINE> <DEDENT> elif reply.opcode == 0xc9: <NEW_LINE> <INDENT> self.crc = int(binascii.hexlify(bytes(reply)), 16) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise UnexpectedReply(reply) <NEW_LINE> <DEDENT> <DEDENT> def execute(self): <NEW_LINE> <INDENT> super().execute() <NEW_LINE> super().execute() <NEW_LINE> return self | .. attribute:: crc
The checksum provided for the (out of band) pen data. | 6259903cb57a9660fecd2c8d |
class StatusBar(gtk.Statusbar): <NEW_LINE> <INDENT> def __init__(self, initmsg=None, others=[]): <NEW_LINE> <INDENT> super(StatusBar,self).__init__() <NEW_LINE> self._context = self.get_context_id("unique_sb") <NEW_LINE> self._timer = None <NEW_LINE> for oth in others[::-1]: <NEW_LINE> <INDENT> self.pack_end(oth, False) <NEW_LINE> self.pack_end(gtk.VSeparator(), False) <NEW_LINE> <DEDENT> if initmsg is not None: <NEW_LINE> <INDENT> self.__call__(initmsg) <NEW_LINE> <DEDENT> self.show_all() <NEW_LINE> <DEDENT> def __call__(self, msg, timeout=5): <NEW_LINE> <INDENT> if self._timer is not None: <NEW_LINE> <INDENT> self._timer.cancel() <NEW_LINE> <DEDENT> self.push(self._context, msg) <NEW_LINE> self._timer = threading.Timer(timeout, self.clear, ()) <NEW_LINE> self._timer.start() <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.push(self._context, "") <NEW_LINE> if self._timer is not None: <NEW_LINE> <INDENT> self._timer.cancel() <NEW_LINE> self._timer = None | All status bar functionality.
@param initmsg: An optional initial message.
@param others : Others widgets to add at the right of the texts.
@author: Facundo Batista <facundobatista =at= taniquetil.com.ar> | 6259903cd4950a0f3b111749 |
class GenericGlobalPerm(models.Model): <NEW_LINE> <INDENT> content_type = models.ForeignKey( ContentType, related_name='global_perms', verbose_name=_('content type'), on_delete=models.CASCADE, null=True ) <NEW_LINE> roles = models.IntegerField(verbose_name=_('roles'), default=DEFAULT_ROLE) <NEW_LINE> permission = models.ForeignKey( to=Permission, verbose_name=_('permission'), on_delete=models.CASCADE ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('global group permission') <NEW_LINE> verbose_name_plural = _('global group permissions') <NEW_LINE> unique_together = ('content_type', 'permission') | This model is for defining template-like permissions
e.g. Every blog moderator could edit his blog | 6259903c1f5feb6acb163e05 |
class Question(models.Model): <NEW_LINE> <INDENT> QUESTION_TYPES = [ ('1', 'Text'), ('2', 'Choose one'), ('3', 'Choose many'), ] <NEW_LINE> poll = models.ForeignKey(Poll, on_delete=models.CASCADE, related_name='questions') <NEW_LINE> text = models.CharField("Question text", max_length=255) <NEW_LINE> question_type = models.CharField(max_length=15, choices=QUESTION_TYPES, default='1') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.text | Questions Model | 6259903c23e79379d538d712 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.