code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class EqualityExpr(BinaryOperatorExpr): <NEW_LINE> <INDENT> operators = { '=' : operator.eq, '!=' : operator.ne, '<=' : operator.le, '<' : operator.lt, '>=' : operator.ge, '>' : operator.gt, } <NEW_LINE> def operate(self, a, b): <NEW_LINE> <INDENT> if nodesetp(a): <NEW_LINE> <INDENT> for node in a: <NEW_LINE> <INDENT> if self.operate(string_value(node), b): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> if nodesetp(b): <NEW_LINE> <INDENT> for node in b: <NEW_LINE> <INDENT> if self.operate(a, string_value(node)): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> if self.op in ('=', '!='): <NEW_LINE> <INDENT> if booleanp(a) or booleanp(b): <NEW_LINE> <INDENT> convert = boolean <NEW_LINE> <DEDENT> elif numberp(a) or numberp(b): <NEW_LINE> <INDENT> convert = number <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> convert = string <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> convert = number <NEW_LINE> <DEDENT> a, b = convert(a), convert(b) <NEW_LINE> return self.operators[self.op](a, b) | <x> = <y>, <x> != <y>, etc. | 625990538e71fb1e983bcfbc |
class Permission(Entity): <NEW_LINE> <INDENT> using_options(tablename="tg_permission", auto_primarykey="permission_id") <NEW_LINE> permission_name = Field(Unicode(16), unique=True) <NEW_LINE> description = Field(Unicode(255)) <NEW_LINE> groups = ManyToMany("Group") | A relationship that determines what each Group can do
| 62599053435de62698e9d2f4 |
class ProdConf(BaseConf): <NEW_LINE> <INDENT> DEBUG = False <NEW_LINE> SQLALCHEMY_ECHO = False | Production configuration | 62599053462c4b4f79dbcef7 |
class CredentialStore(object): <NEW_LINE> <INDENT> def __init__(self, credentials): <NEW_LINE> <INDENT> self._credentials = credentials <NEW_LINE> <DEDENT> def get_auth_for(self, url, credential_id=None): <NEW_LINE> <INDENT> for credential in self._credentials: <NEW_LINE> <INDENT> if re.search(credential["url"], url): <NEW_LINE> <INDENT> if credential_id and credential.get("id") != credential_id: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> auth = credential["auth"].copy() <NEW_LINE> return get_auth(auth.pop("provider"), **auth) <NEW_LINE> <DEDENT> <DEDENT> message = f"No credentials found for a given URL: '{url}'" <NEW_LINE> if credential_id: <NEW_LINE> <INDENT> message += f" (id='{credential_id}')" <NEW_LINE> <DEDENT> raise LookupError(message) | Credential store, manages your credentials. | 625990536e29344779b01b3b |
class CertificateIssuerSetParameters(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'provider': {'required': True}, } <NEW_LINE> _attribute_map = { 'provider': {'key': 'provider', 'type': 'str'}, 'credentials': {'key': 'credentials', 'type': 'IssuerCredentials'}, 'organization_details': {'key': 'org_details', 'type': 'OrganizationDetails'}, 'attributes': {'key': 'attributes', 'type': 'IssuerAttributes'}, } <NEW_LINE> def __init__( self, *, provider: str, credentials: Optional["IssuerCredentials"] = None, organization_details: Optional["OrganizationDetails"] = None, attributes: Optional["IssuerAttributes"] = None, **kwargs ): <NEW_LINE> <INDENT> super(CertificateIssuerSetParameters, self).__init__(**kwargs) <NEW_LINE> self.provider = provider <NEW_LINE> self.credentials = credentials <NEW_LINE> self.organization_details = organization_details <NEW_LINE> self.attributes = attributes | The certificate issuer set parameters.
All required parameters must be populated in order to send to Azure.
:param provider: Required. The issuer provider.
:type provider: str
:param credentials: The credentials to be used for the issuer.
:type credentials: ~azure.keyvault.v7_0.models.IssuerCredentials
:param organization_details: Details of the organization as provided to the issuer.
:type organization_details: ~azure.keyvault.v7_0.models.OrganizationDetails
:param attributes: Attributes of the issuer object.
:type attributes: ~azure.keyvault.v7_0.models.IssuerAttributes | 6259905376d4e153a661dcf4 |
class DecoratingQuerySet(QuerySet): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(DecoratingQuerySet, self).__init__(*args, **kwargs) <NEW_LINE> self._decorate_funcs = [] <NEW_LINE> <DEDENT> def _clone(self, klass=None, setup=False, **kw): <NEW_LINE> <INDENT> c = super(DecoratingQuerySet, self)._clone(klass, setup, **kw) <NEW_LINE> c._decorate_funcs = self._decorate_funcs <NEW_LINE> return c <NEW_LINE> <DEDENT> def decorate(self, fn): <NEW_LINE> <INDENT> if fn not in self._decorate_funcs: <NEW_LINE> <INDENT> self._decorate_funcs.append(fn) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def iterator(self): <NEW_LINE> <INDENT> base_iterator = super(DecoratingQuerySet, self).iterator() <NEW_LINE> for obj in base_iterator: <NEW_LINE> <INDENT> for fn in self._decorate_funcs: <NEW_LINE> <INDENT> fn(obj) <NEW_LINE> <DEDENT> yield obj | An enhancement of the QuerySet which allows objects to be decorated
with extra properties before they are returned.
When using this method with *django-polymorphic* or *django-hvad*, make sure this
class is first in the chain of inherited classes. | 625990537cff6e4e811b6f34 |
class RedgifsImageExtractor(RedgifsExtractor): <NEW_LINE> <INDENT> subcategory = "image" <NEW_LINE> pattern = (r"(?:https?://)?(?:" r"(?:www\.)?redgifs\.com/(?:watch|ifr)|" r"(?:www\.)?gifdeliverynetwork\.com|" r"i\.redgifs\.com/i)/([A-Za-z]+)") <NEW_LINE> test = ( ("https://redgifs.com/watch/foolishforkedabyssiniancat", { "pattern": r"https://\w+\.redgifs\.com" r"/FoolishForkedAbyssiniancat\.mp4", "content": "f6e03f1df9a2ff2a74092f53ee7580d2fb943533", }), ("https://redgifs.com/ifr/FoolishForkedAbyssiniancat"), ("https://i.redgifs.com/i/FoolishForkedAbyssiniancat"), ("https://www.gifdeliverynetwork.com/foolishforkedabyssiniancat"), ) <NEW_LINE> def gifs(self): <NEW_LINE> <INDENT> return (RedgifsAPI(self).gif(self.key),) | Extractor for individual gifs from redgifs.com | 6259905382261d6c52730943 |
class UpdateCourseAPIView(generics.UpdateAPIView): <NEW_LINE> <INDENT> lookup_url_kwarg = "unique_id" <NEW_LINE> lookup_field = COUR_UNIQUE_ID <NEW_LINE> queryset = Course.objects.all() <NEW_LINE> serializer_class = CourseSerializer | This API endpoint is for updating a Course object. | 6259905307f4c71912bb092c |
class Sequence(object): <NEW_LINE> <INDENT> def __init__(self, sequence, state, logprob): <NEW_LINE> <INDENT> self.sequence = sequence <NEW_LINE> self.state = state <NEW_LINE> self.logprob = logprob <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Sequence: {}, logprob: {:.3f}>'.format(self.sequence, self.logprob) <NEW_LINE> <DEDENT> def __lt__(self, other_sequence): <NEW_LINE> <INDENT> assert isinstance(other_sequence, Sequence) <NEW_LINE> return self.logprob < other_sequence.logprob | Represents a potential sequence. | 62599053b5575c28eb713745 |
class BakeAction(Operator): <NEW_LINE> <INDENT> bl_idname = "nla.bake" <NEW_LINE> bl_label = "Bake Action" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> frame_start = IntProperty( name="Start Frame", description="Start frame for baking", min=0, max=300000, default=1, ) <NEW_LINE> frame_end = IntProperty( name="End Frame", description="End frame for baking", min=1, max=300000, default=250, ) <NEW_LINE> step = IntProperty( name="Frame Step", description="Frame Step", min=1, max=120, default=1, ) <NEW_LINE> only_selected = BoolProperty( name="Only Selected", default=True, ) <NEW_LINE> clear_consraints = BoolProperty( name="Clear Constraints", default=False, ) <NEW_LINE> bake_types = EnumProperty( name="Bake Data", options={'ENUM_FLAG'}, items=(('POSE', "Pose", ""), ('OBJECT', "Object", ""), ), default={'POSE'}, ) <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> from bpy_extras import anim_utils <NEW_LINE> action = anim_utils.bake_action(self.frame_start, self.frame_end, self.step, self.only_selected, 'POSE' in self.bake_types, 'OBJECT' in self.bake_types, self.clear_consraints, True, ) <NEW_LINE> if action is None: <NEW_LINE> <INDENT> self.report({'INFO'}, "Nothing to bake") <NEW_LINE> return {'CANCELLED'} <NEW_LINE> <DEDENT> return {'FINISHED'} <NEW_LINE> <DEDENT> def invoke(self, context, event): <NEW_LINE> <INDENT> wm = context.window_manager <NEW_LINE> return wm.invoke_props_dialog(self) | Bake object/pose loc/scale/rotation animation to a new action | 625990534428ac0f6e659a2c |
class jsonable: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self = self <NEW_LINE> <DEDENT> def to_JSON(self): <NEW_LINE> <INDENT> return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4) | This is a parent class for all classes that I want to serialize in json | 62599053ac7a0e7691f739d4 |
class WuaTestCase(object): <NEW_LINE> <INDENT> __shared_session__ = True <NEW_LINE> desired_capabilities = config.DESIRED_CAPABILITIES <NEW_LINE> @staticmethod <NEW_LINE> def _create_session(o): <NEW_LINE> <INDENT> o.driver = Remote(command_executor="http://localhost:9999", desired_capabilities=o.desired_capabilities) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _destroy_session(o): <NEW_LINE> <INDENT> o.driver.quit() <NEW_LINE> <DEDENT> def setup_method(self, _): <NEW_LINE> <INDENT> if not self.__shared_session__: <NEW_LINE> <INDENT> WuaTestCase._create_session(self) <NEW_LINE> <DEDENT> <DEDENT> def teardown_method(self, _): <NEW_LINE> <INDENT> if not self.__shared_session__: <NEW_LINE> <INDENT> WuaTestCase._destroy_session(self) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def setup_class(cls): <NEW_LINE> <INDENT> if cls.__shared_session__: <NEW_LINE> <INDENT> WuaTestCase._create_session(cls) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def teardown_class(cls): <NEW_LINE> <INDENT> if cls.__shared_session__: <NEW_LINE> <INDENT> WuaTestCase._destroy_session(cls) <NEW_LINE> <DEDENT> <DEDENT> @pytest.fixture <NEW_LINE> def waiter(self): <NEW_LINE> <INDENT> return WebDriverWait(self.driver, timeout=5) | If True, then new session is created when test class is being setup,
i.e. one session is used for all test methods in class.
If False, then new session is created when test method is being setup,
i.e. each test method will run in new session. | 62599053b830903b9686eef7 |
class GraphAPI(object): <NEW_LINE> <INDENT> def __init__(self, access_token=None): <NEW_LINE> <INDENT> self.access_token = access_token <NEW_LINE> <DEDENT> def batch_request(self,requests): <NEW_LINE> <INDENT> assert self.access_token, "Batch operations require an access token" <NEW_LINE> return self.request("/", { 'batch': _encode_json(requests), 'method': 'post'}) <NEW_LINE> <DEDENT> def get_object(self, id, **args): <NEW_LINE> <INDENT> return self.request(id, args) <NEW_LINE> <DEDENT> def get_objects(self, ids, **args): <NEW_LINE> <INDENT> args["ids"] = ",".join(ids) <NEW_LINE> return self.request("", args) <NEW_LINE> <DEDENT> def get_connections(self, id, connection_name, **args): <NEW_LINE> <INDENT> return self.request(id + "/" + connection_name, args) <NEW_LINE> <DEDENT> def put_object(self, parent_object, connection_name, **data): <NEW_LINE> <INDENT> assert self.access_token, "Write operations require an access token" <NEW_LINE> return self.request(parent_object + "/" + connection_name, post_args=data) <NEW_LINE> <DEDENT> def put_wall_post(self, message, attachment={}, profile_id="me"): <NEW_LINE> <INDENT> return self.put_object(profile_id, "feed", message=message, **attachment) <NEW_LINE> <DEDENT> def put_comment(self, object_id, message): <NEW_LINE> <INDENT> return self.put_object(object_id, "comments", message=message) <NEW_LINE> <DEDENT> def put_like(self, object_id): <NEW_LINE> <INDENT> return self.put_object(object_id, "likes") <NEW_LINE> <DEDENT> def delete_object(self, id): <NEW_LINE> <INDENT> self.request(id, post_args={"method": "delete"}) <NEW_LINE> <DEDENT> def request(self, path, args=None, post_args=None): <NEW_LINE> <INDENT> if not args: args = {} <NEW_LINE> if self.access_token: <NEW_LINE> <INDENT> if post_args is not None: <NEW_LINE> <INDENT> post_args["access_token"] = self.access_token <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> args["access_token"] = self.access_token <NEW_LINE> <DEDENT> <DEDENT> post_data = None if post_args is None else urllib.urlencode(post_args) <NEW_LINE> file = urllib.urlopen("https://graph.facebook.com/" + path + "?" + urllib.urlencode(args), post_data) <NEW_LINE> try: <NEW_LINE> <INDENT> response = _parse_json(file.read()) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> file.close() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if response.get("error"): <NEW_LINE> <INDENT> raise GraphAPIError(response["error"]["type"], response["error"]["message"]) <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return response | A client for the Facebook Graph API.
See http://developers.facebook.com/docs/api for complete documentation
for the API.
The Graph API is made up of the objects in Facebook (e.g., people, pages,
events, photos) and the connections between them (e.g., friends,
photo tags, and event RSVPs). This client provides access to those
primitive types in a generic way. For example, given an OAuth access
token, this will fetch the profile of the active user and the list
of the user's friends:
graph = facebook.GraphAPI(access_token)
user = graph.get_object("me")
friends = graph.get_connections(user["id"], "friends")
You can see a list of all of the objects and connections supported
by the API at http://developers.facebook.com/docs/reference/api/.
You can obtain an access token via OAuth or by using the Facebook
JavaScript SDK. See http://developers.facebook.com/docs/authentication/
for details.
If you are using the JavaScript SDK, you can use the
get_user_from_cookie() method below to get the OAuth access token
for the active user from the cookie saved by the SDK. | 625990532ae34c7f260ac5da |
class Args(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.path = 'Data/' <NEW_LINE> self.dataset = 'ml-100k' <NEW_LINE> self.epochs = 50 <NEW_LINE> self.batch_size = 256 <NEW_LINE> self.num_tasks = 18 <NEW_LINE> self.e_dim = 8 <NEW_LINE> self.f_dim = 8 <NEW_LINE> self.reg = 0 <NEW_LINE> self.num_neg = 4 <NEW_LINE> self.lr = 0.001 <NEW_LINE> self.loss_weights = [1, 0.05] <NEW_LINE> self.K = 10 | Used to generate different sets of arguments | 6259905355399d3f05627a12 |
class AAR_estimator_LDA(BaseEstimator, ClassifierMixin): <NEW_LINE> <INDENT> def __init__(self,in_chan,classes,maxlag=3,fs=250,feature_idx='all'): <NEW_LINE> <INDENT> self.in_chan = in_chan <NEW_LINE> self.classes = classes <NEW_LINE> self.fs = fs <NEW_LINE> self.maxlag = maxlag <NEW_LINE> self.clf = LinearDiscriminantAnalysis() <NEW_LINE> <DEDENT> def find_aar_para(self,x): <NEW_LINE> <INDENT> features_x = np.zeros((len(x), self.in_chan * (self.maxlag + 1))) <NEW_LINE> for j, each_trial in enumerate(x): <NEW_LINE> <INDENT> features_trial = np.zeros((self.in_chan, self.maxlag + 1)) <NEW_LINE> for i in range(self.in_chan): <NEW_LINE> <INDENT> x_chan = each_trial[i] <NEW_LINE> para_estimator = AR(x_chan) <NEW_LINE> para_estimator = para_estimator.fit(maxlag=self.maxlag) <NEW_LINE> features_trial[i] = para_estimator.params <NEW_LINE> <DEDENT> features_trial = features_trial.flatten() <NEW_LINE> features_x[j] = features_trial <NEW_LINE> <DEDENT> return features_x <NEW_LINE> <DEDENT> def fit(self,x,y): <NEW_LINE> <INDENT> features_x = self.find_aar_para(x) <NEW_LINE> self.clf.fit(features_x,y) <NEW_LINE> <DEDENT> def predict(self,x): <NEW_LINE> <INDENT> features_x = self.find_aar_para(x) <NEW_LINE> pred = self.clf.predict(features_x) <NEW_LINE> return pred <NEW_LINE> <DEDENT> def transform(self,x): <NEW_LINE> <INDENT> features_x = self.find_aar_para(x) <NEW_LINE> tran = self.clf.transform(features_x) <NEW_LINE> return tran <NEW_LINE> <DEDENT> def score(self, X, y, sample_weight=None): <NEW_LINE> <INDENT> pred = self.predict(X) <NEW_LINE> return accuracy_score(y,pred) | feature:AAR
clf:LDA | 6259905363d6d428bbee3cc6 |
class RiemannProtocol(Int32StringReceiver, RiemannProtobufMixin): <NEW_LINE> <INDENT> implements(ITensorProtocol) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.pressure = 0 <NEW_LINE> <DEDENT> def stringReceived(self, string): <NEW_LINE> <INDENT> self.pressure -= 1 | Riemann protobuf protocol
| 6259905394891a1f408ba170 |
class Application(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Command = None <NEW_LINE> self.DeliveryForm = None <NEW_LINE> self.PackagePath = None <NEW_LINE> self.Docker = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Command = params.get("Command") <NEW_LINE> self.DeliveryForm = params.get("DeliveryForm") <NEW_LINE> self.PackagePath = params.get("PackagePath") <NEW_LINE> if params.get("Docker") is not None: <NEW_LINE> <INDENT> self.Docker = Docker() <NEW_LINE> self.Docker._deserialize(params.get("Docker")) <NEW_LINE> <DEDENT> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | 应用程序信息
| 625990537b25080760ed8759 |
class CallableNameTests(TestCase): <NEW_LINE> <INDENT> def test_method(self): <NEW_LINE> <INDENT> self.assertThat( [callable_name(partial(self.test_method)), callable_name(self.test_method)], AllMatch(Equals(self.test_method.func_name))) <NEW_LINE> <DEDENT> def test_function(self): <NEW_LINE> <INDENT> def _function(): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.assertThat( [callable_name(partial(_function)), callable_name(_function)], AllMatch(Equals(_function.func_name))) <NEW_LINE> <DEDENT> def test_lambda(self): <NEW_LINE> <INDENT> lam = lambda: None <NEW_LINE> self.assertThat( [callable_name(partial(lam)), callable_name(lam)], AllMatch(Equals(lam.func_name))) <NEW_LINE> <DEDENT> def test_callable(self): <NEW_LINE> <INDENT> class call(object): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> c = call() <NEW_LINE> self.assertThat( [callable_name(partial(call)), callable_name(call), callable_name(partial(c)), callable_name(c)], AllMatch(Equals(call.__name__))) <NEW_LINE> <DEDENT> def test_not_callable(self): <NEW_LINE> <INDENT> f = 42 <NEW_LINE> matcher = MatchesStructure( args=Equals(('Not a callable', f))) <NEW_LINE> with ExpectedException(TypeError, matcher): <NEW_LINE> <INDENT> callable_name(f) | Tests for `callable_name`. | 625990536e29344779b01b3d |
class DTClassifier(Classifier): <NEW_LINE> <INDENT> def __init__(self, X, y, num, classifier_name): <NEW_LINE> <INDENT> super().__init__(X, y, num, classifier_name) <NEW_LINE> self.classifier = DecisionTreeClassifier() | 决策树分类器 | 62599053498bea3a75a5901a |
class AvailableServiceSkuSku(Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'family': {'key': 'family', 'type': 'str'}, 'size': {'key': 'size', 'type': 'str'}, 'tier': {'key': 'tier', 'type': 'str'}, } <NEW_LINE> def __init__(self, *, name: str=None, family: str=None, size: str=None, tier: str=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(AvailableServiceSkuSku, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.family = family <NEW_LINE> self.size = size <NEW_LINE> self.tier = tier | SKU name, tier, etc.
:param name: The name of the SKU
:type name: str
:param family: SKU family
:type family: str
:param size: SKU size
:type size: str
:param tier: The tier of the SKU, such as "Free", "Basic", "Standard", or
"Premium"
:type tier: str | 6259905345492302aabfd9cc |
class SillyClass(object): <NEW_LINE> <INDENT> pass | docstring for SillyClass | 625990538da39b475be046df |
class VmdProc(object): <NEW_LINE> <INDENT> def __init__(self, experiment, directory=None): <NEW_LINE> <INDENT> self.experiment = experiment <NEW_LINE> self.directory = directory <NEW_LINE> self.has_run = False <NEW_LINE> <DEDENT> def make_command(self, output=True): <NEW_LINE> <INDENT> command = [standards.VmdCommand, standards.VmdDisp, standards.VmdNoDisp, standards.VmdExec, os.path.join(standards.SourceDirectory, self.script)] <NEW_LINE> try: <NEW_LINE> <INDENT> command.append("{} {}".format(standards.VmdMolecules, " ".join(map(str, self.molecules)))) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> command.append("{} {}".format(standards.VmdFrames, " ".join(map(str, self.frames)))) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> command.append("{} {}".format(standards.VmdArgs, " ".join(map(str, self.args)))) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if output: <NEW_LINE> <INDENT> outfile = os.path.join(self.directory, "vmd.out") <NEW_LINE> errfile = os.path.join(self.directory, "vmd.err") <NEW_LINE> command.extend([">", outfile, "2>", errfile]) <NEW_LINE> <DEDENT> return command <NEW_LINE> <DEDENT> def vmd(self): <NEW_LINE> <INDENT> arguments = self.make_command() <NEW_LINE> command = " ".join(arguments) <NEW_LINE> status = os.system(command) <NEW_LINE> if status != 0: <NEW_LINE> <INDENT> raise RuntimeError("Running VMD with the following command has failed:\n{}".format(command)) <NEW_LINE> <DEDENT> <DEDENT> def collect_garbage(self): <NEW_LINE> <INDENT> if not self.experiment.args.keeptemp: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.remove(self.vmd_functions_file) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.experiment.safe_rmtree(self.directory) <NEW_LINE> <DEDENT> <DEDENT> def write_epitope_file(self): <NEW_LINE> <INDENT> self.epitope_file = os.path.join(self.directory, "epitope.txt") <NEW_LINE> with open(self.epitope_file, "w") as f: <NEW_LINE> <INDENT> f.write(atomselect_residues(self.experiment.epitope_residue_ids)) <NEW_LINE> <DEDENT> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if self.directory is None: <NEW_LINE> <INDENT> self.directory = tempfile.mkdtemp(prefix="vmd_", dir=self.experiment.temp) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> os.mkdir(self.directory) <NEW_LINE> <DEDENT> self.vmd_functions_file = os.path.join(self.directory, os.path.basename(standards.VmdFunctions)) <NEW_LINE> os.symlink(standards.VmdFunctions, self.vmd_functions_file) <NEW_LINE> self.previous_directory = os.getcwd() <NEW_LINE> os.chdir(self.directory) <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> os.chdir(self.previous_directory) <NEW_LINE> self.collect_garbage() | This class is the base class for managing all VMD procedures. | 6259905338b623060ffaa2c9 |
class Decoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, dim_neck, dim_emb, dim_pre): <NEW_LINE> <INDENT> super(Decoder, self).__init__() <NEW_LINE> self.lstm1 = nn.LSTM(dim_neck * 2 + dim_emb, dim_pre, 1, batch_first=True) <NEW_LINE> layers = [] <NEW_LINE> for _ in range(3): <NEW_LINE> <INDENT> layers += [ConvBlock(dim_pre, dim_pre, kernel_size=5, nonlinearity='relu')] <NEW_LINE> <DEDENT> self.conv = nn.Sequential(*layers) <NEW_LINE> self.lstm2 = nn.LSTM(dim_pre, 1024, 2, batch_first=True) <NEW_LINE> self.linear_projection = nn.Linear(1024, 80) <NEW_LINE> nn.init.xavier_uniform_(self.linear_projection.weight, gain=nn.init.calculate_gain('linear')) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> self.lstm1.flatten_parameters() <NEW_LINE> x, _ = self.lstm1(x) <NEW_LINE> x = x.transpose(1, 2) <NEW_LINE> x = self.conv(x) <NEW_LINE> x = x.transpose(1, 2) <NEW_LINE> self.lstm2.flatten_parameters() <NEW_LINE> outputs, _ = self.lstm2(x) <NEW_LINE> decoder_output = self.linear_projection(outputs) <NEW_LINE> return decoder_output | Decoder module | 62599053507cdc57c63a6299 |
class TrafficAnalyticsProperties(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'network_watcher_flow_analytics_configuration': {'required': True}, } <NEW_LINE> _attribute_map = { 'network_watcher_flow_analytics_configuration': {'key': 'networkWatcherFlowAnalyticsConfiguration', 'type': 'TrafficAnalyticsConfigurationProperties'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(TrafficAnalyticsProperties, self).__init__(**kwargs) <NEW_LINE> self.network_watcher_flow_analytics_configuration = kwargs['network_watcher_flow_analytics_configuration'] | Parameters that define the configuration of traffic analytics.
All required parameters must be populated in order to send to Azure.
:param network_watcher_flow_analytics_configuration: Required. Parameters that define the
configuration of traffic analytics.
:type network_watcher_flow_analytics_configuration:
~azure.mgmt.network.v2018_04_01.models.TrafficAnalyticsConfigurationProperties | 62599053435de62698e9d2f7 |
class CPPImplementationsProcessor(object): <NEW_LINE> <INDENT> def do_process(self, block_holder, processor_changes, biiout): <NEW_LINE> <INDENT> assert biiout is not None <NEW_LINE> assert processor_changes is not None <NEW_LINE> simple_resources = [r for r in block_holder.simple_resources if r.cell.type == CPP] <NEW_LINE> simple_names = defaultdict(set) <NEW_LINE> defs_dict = self._set_default_definitions(simple_resources, simple_names) <NEW_LINE> for cell, content in simple_resources: <NEW_LINE> <INDENT> implicits = set() <NEW_LINE> self.check_implementations_by_name(cell, simple_names, implicits) <NEW_LINE> self.check_declarations(cell, content, defs_dict, implicits) <NEW_LINE> implicits = self.resolve_virtuals(block_holder, implicits) <NEW_LINE> cell.dependencies.implicit = implicits <NEW_LINE> <DEDENT> <DEDENT> def _set_default_definitions(self, simple_resources, simple_names): <NEW_LINE> <INDENT> defs_dict = {} <NEW_LINE> for r in simple_resources: <NEW_LINE> <INDENT> name = r.cell.name <NEW_LINE> if name.extension not in BiiType.cpp_src_exts: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> simple_names[os.path.splitext(name)[0]].add(name) <NEW_LINE> for definition in r.content.parser.definitions: <NEW_LINE> <INDENT> assert definition.type == CPPItem.METHOD or definition.type == CPPItem.VAR <NEW_LINE> defs_dict.setdefault(name.block_name, {}). setdefault(definition.scope, {}). setdefault(definition.name, set()).add(name) <NEW_LINE> <DEDENT> <DEDENT> return defs_dict <NEW_LINE> <DEDENT> def check_implementations_by_name(self, cell, simple_names, implicits): <NEW_LINE> <INDENT> if cell.name.cell_name.extension not in BiiType.cpp_src_exts: <NEW_LINE> <INDENT> sources = simple_names.get(os.path.splitext(cell.name)[0]) <NEW_LINE> if sources: <NEW_LINE> <INDENT> implicits.update(sources) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def check_declarations(self, cell, content, defs_dict, implicits): <NEW_LINE> <INDENT> for declaration in content.parser.declarations: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if declaration.type in [CPPItem.CLASS, CPPItem.STRUCT]: <NEW_LINE> <INDENT> scope = CPPItem.extend_scope(declaration.scope, declaration.name) <NEW_LINE> ds = defs_dict[cell.name.block_name][scope].values() <NEW_LINE> for d in ds: <NEW_LINE> <INDENT> d.discard(cell.name) <NEW_LINE> implicits.update(d) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> ds = defs_dict[cell.name.block_name][declaration.scope][declaration.name] <NEW_LINE> ds.discard(cell.name) <NEW_LINE> implicits.update(ds) <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def resolve_virtuals(block_holder, implicits): <NEW_LINE> <INDENT> new_implicits = set() <NEW_LINE> for implicit in implicits: <NEW_LINE> <INDENT> assert implicit.block_name == block_holder.block_name <NEW_LINE> cell2 = block_holder[implicit.cell_name].cell <NEW_LINE> target = cell2.container or cell2.name <NEW_LINE> new_implicits.add(target) <NEW_LINE> <DEDENT> return new_implicits | processor to detect implementations in C++, a very difficult task, so this processor is not
perfect. | 6259905363b5f9789fe86666 |
class Level_SynthSeq(LevelGen): <NEW_LINE> <INDENT> def __init__(self, seed=None): <NEW_LINE> <INDENT> super().__init__( seed=seed, locations=True, unblocking=True, implicit_unlock=False ) | Like SynthLoc, but now with multiple commands, combined just like in GoToSeq.
No implicit unlocking.
Competencies: Maze, Unblock, Unlock, GoTo, PickUp, PutNext, Open, Loc, Seq | 625990534e4d5625663738fb |
class DBConfig(object): <NEW_LINE> <INDENT> def __init__(self, db_path): <NEW_LINE> <INDENT> self.db_kvp_helper = DBKVPHelper('sqlite') <NEW_LINE> self.connect(db_path) <NEW_LINE> <DEDENT> def connect(self, db_path): <NEW_LINE> <INDENT> self.db_kvp_helper.connect(db_path) <NEW_LINE> self.db_kvp_helper.use_table('config') <NEW_LINE> <DEDENT> def attach(self, db_conn): <NEW_LINE> <INDENT> self.db_kvp_helper.attach(db_conn) <NEW_LINE> self.db_kvp_helper.use_table('config') <NEW_LINE> <DEDENT> def get(self, section, key, default=None): <NEW_LINE> <INDENT> db_key_name = section + '|' + key <NEW_LINE> result = self.db_kvp_helper.read(db_key_name, default) <NEW_LINE> if result: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return default <NEW_LINE> <DEDENT> <DEDENT> def getint(self, section, key, default=None): <NEW_LINE> <INDENT> return int(self.get(self, section, key, default)) <NEW_LINE> <DEDENT> def getfloat(self, section, key, default=None): <NEW_LINE> <INDENT> return float(self.get(self, section, key, default)) <NEW_LINE> <DEDENT> def getboolean(self, section, key, default=None): <NEW_LINE> <INDENT> return bool(self.get(self, section, key, default)) <NEW_LINE> <DEDENT> def set(self, section, key, value): <NEW_LINE> <INDENT> db_key_name = section + '|' + key <NEW_LINE> self.db_kvp_helper.write(db_key_name, value) <NEW_LINE> <DEDENT> def delete(self, section, key): <NEW_LINE> <INDENT> db_key_name = section + '|' + key <NEW_LINE> self.db_kvp_helper.delete(db_key_name) | A configuration class using database to persist. | 625990538e71fb1e983bcfbe |
class BedrockRouter(object): <NEW_LINE> <INDENT> db_for_read = db_for_write = lambda *a, **kw: 'bedrock' <NEW_LINE> allow_relation = allow_migrate = lambda *a, **kw: True | A database router to use a single non-default db | 6259905371ff763f4b5e8ca3 |
class Cursor(_object): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, Cursor, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, Cursor, name) <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> __swig_destroy__ = _fife.delete_Cursor <NEW_LINE> __del__ = lambda self : None; <NEW_LINE> def draw(self): <NEW_LINE> <INDENT> return _fife.Cursor_draw(self) <NEW_LINE> <DEDENT> def set(self, *args): <NEW_LINE> <INDENT> return _fife.Cursor_set(self, *args) <NEW_LINE> <DEDENT> def setDrag(self, *args): <NEW_LINE> <INDENT> return _fife.Cursor_setDrag(self, *args) <NEW_LINE> <DEDENT> def resetDrag(self): <NEW_LINE> <INDENT> return _fife.Cursor_resetDrag(self) <NEW_LINE> <DEDENT> def getType(self): <NEW_LINE> <INDENT> return _fife.Cursor_getType(self) <NEW_LINE> <DEDENT> def getId(self): <NEW_LINE> <INDENT> return _fife.Cursor_getId(self) <NEW_LINE> <DEDENT> def getImage(self): <NEW_LINE> <INDENT> return _fife.Cursor_getImage(self) <NEW_LINE> <DEDENT> def getAnimation(self): <NEW_LINE> <INDENT> return _fife.Cursor_getAnimation(self) <NEW_LINE> <DEDENT> def getDragType(self): <NEW_LINE> <INDENT> return _fife.Cursor_getDragType(self) <NEW_LINE> <DEDENT> def getDragImage(self): <NEW_LINE> <INDENT> return _fife.Cursor_getDragImage(self) <NEW_LINE> <DEDENT> def getDragAnimation(self): <NEW_LINE> <INDENT> return _fife.Cursor_getDragAnimation(self) <NEW_LINE> <DEDENT> def getX(self): <NEW_LINE> <INDENT> return _fife.Cursor_getX(self) <NEW_LINE> <DEDENT> def getY(self): <NEW_LINE> <INDENT> return _fife.Cursor_getY(self) <NEW_LINE> <DEDENT> def setPosition(self, *args): <NEW_LINE> <INDENT> return _fife.Cursor_setPosition(self, *args) <NEW_LINE> <DEDENT> def getPosition(self): <NEW_LINE> <INDENT> return _fife.Cursor_getPosition(self) | Proxy of C++ FIFE::Cursor class | 6259905310dbd63aa1c720d6 |
class ServiceState(object): <NEW_LINE> <INDENT> DISABLED = "DISABLED" <NEW_LINE> STARTING = "STARTING" <NEW_LINE> UP = "UP" <NEW_LINE> DEGRADED = "DEGRADED" <NEW_LINE> FAILED = "FAILED" <NEW_LINE> STOPPING = "STOPPING" <NEW_LINE> UNKNOWN = "UNKNOWN" <NEW_LINE> FAILURE_STATES = set([DEGRADED, FAILED]) <NEW_LINE> @classmethod <NEW_LINE> def from_service(cls, service): <NEW_LINE> <INDENT> if not service.enabled: <NEW_LINE> <INDENT> return cls.disabled_states(service) <NEW_LINE> <DEDENT> if service.instances.all(ServiceInstance.STATE_UP): <NEW_LINE> <INDENT> return cls.UP <NEW_LINE> <DEDENT> if service.instances.is_starting(): <NEW_LINE> <INDENT> return cls.STARTING <NEW_LINE> <DEDENT> if service.instances.all(ServiceInstance.STATE_FAILED): <NEW_LINE> <INDENT> return cls.FAILED <NEW_LINE> <DEDENT> return cls.DEGRADED <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def disabled_states(cls, service): <NEW_LINE> <INDENT> if not len(service.instances): <NEW_LINE> <INDENT> return cls.DISABLED <NEW_LINE> <DEDENT> if service.instances.all(ServiceInstance.STATE_STOPPING): <NEW_LINE> <INDENT> return cls.STOPPING <NEW_LINE> <DEDENT> return cls.UNKNOWN | Determine the state of a Service. | 6259905382261d6c52730944 |
class TestMonthlySummary(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testMonthlySummary(self): <NEW_LINE> <INDENT> pass | MonthlySummary unit test stubs | 6259905307f4c71912bb092e |
class UserSignupForm(forms.ModelForm): <NEW_LINE> <INDENT> username = forms.CharField( required=False, ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = MyUser <NEW_LINE> fields = [ "email", "first_name", "last_name", "password", ] <NEW_LINE> <DEDENT> def clean_password(self): <NEW_LINE> <INDENT> password = self.cleaned_data.get("password") <NEW_LINE> if len(password) < 6: <NEW_LINE> <INDENT> raise forms.ValidationError("Password min length is 6") <NEW_LINE> <DEDENT> elif len(password) > 255: <NEW_LINE> <INDENT> raise forms.ValidationError("Password max length is 255") <NEW_LINE> <DEDENT> return password <NEW_LINE> <DEDENT> def clean_first_name(self): <NEW_LINE> <INDENT> first_name = self.cleaned_data.get("first_name") <NEW_LINE> first_name = first_name.split()[0].title() <NEW_LINE> return first_name <NEW_LINE> <DEDENT> def clean_last_name(self): <NEW_LINE> <INDENT> last_name = self.cleaned_data.get("last_name") <NEW_LINE> last_name = last_name.title() <NEW_LINE> return last_name <NEW_LINE> <DEDENT> def save(self, commit=True): <NEW_LINE> <INDENT> user = super(UserSignupForm, self).save(commit=False) <NEW_LINE> user.set_password(self.cleaned_data["password"]) <NEW_LINE> user.email = self.cleaned_data["email"] <NEW_LINE> user.first_name = self.cleaned_data["first_name"] <NEW_LINE> user.last_name = self.cleaned_data["last_name"] <NEW_LINE> if commit: <NEW_LINE> <INDENT> user.save() <NEW_LINE> <DEDENT> return user | A form for creating new users. | 625990532ae34c7f260ac5db |
class RequestError(Exception): <NEW_LINE> <INDENT> pass | Error in API request | 62599053f7d966606f749332 |
class RegisterForm(FlaskForm): <NEW_LINE> <INDENT> first_name = StringField('First Name', validators=[ InputRequired(message='First name is required.')]) <NEW_LINE> last_name = StringField('Last Name', validators=[ InputRequired(message='Last name is required.')]) <NEW_LINE> email = StringField('Email', validators=[InputRequired( message='Enter a valid email.'), Email(message='Enter a valid email.')]) <NEW_LINE> password = PasswordField('Password', validators=[ InputRequired(message='Password is required.'), Regexp('^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[@$!%*\#\?&])[A-Za-z\d@$!#%*?&]{8,40}$', message="Password must be at least 8 characters long, contain upper and lowercase characters, at least one special character, and at least one number.")]) | Registration form for new user | 62599053ac7a0e7691f739d6 |
class LinearLayout(AndroidLayout): <NEW_LINE> <INDENT> @property <NEW_LINE> def takenWidth(self): <NEW_LINE> <INDENT> return sum([ child.width for child in self.children ]) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def fromSoup(cls, parent, soup, resourcesPaths: [pathlib.Path], *, device=None): <NEW_LINE> <INDENT> new = cls() <NEW_LINE> new.id = soup.get("android:id", None) <NEW_LINE> new.height = soup("android:layout_height", None) <NEW_LINE> new.width = soup("android:layout_width", None) <NEW_LINE> new.parent = parent <NEW_LINE> new.children = findChildren(new, soup.children, resourcesPaths, device=device) <NEW_LINE> return new <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> return self.height * self.width <NEW_LINE> <DEDENT> def buttonRatio(self): <NEW_LINE> <INDENT> buttonArea = sum(( kid.area() for kid in self.children if type(kid) == Button )) <NEW_LINE> return buttonArea / self.area() | An AndroidLayout which displays its children in-line. The simplest AndroidLayout. | 6259905363d6d428bbee3cc8 |
class RealTimeDataEndpoint: <NEW_LINE> <INDENT> def __init__(self, hass, api): <NEW_LINE> <INDENT> self.hass = hass <NEW_LINE> self.api = api <NEW_LINE> self.ready = asyncio.Event() <NEW_LINE> self.sensors = [] <NEW_LINE> <DEDENT> async def async_refresh(self, now=None): <NEW_LINE> <INDENT> from solax import SolaxRequestError <NEW_LINE> try: <NEW_LINE> <INDENT> api_response = await self.api.get_data() <NEW_LINE> self.ready.set() <NEW_LINE> <DEDENT> except SolaxRequestError: <NEW_LINE> <INDENT> if now is not None: <NEW_LINE> <INDENT> self.ready.clear() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise PlatformNotReady <NEW_LINE> <DEDENT> <DEDENT> data = api_response.data <NEW_LINE> for sensor in self.sensors: <NEW_LINE> <INDENT> if sensor.key in data: <NEW_LINE> <INDENT> sensor.value = data[sensor.key] <NEW_LINE> sensor.async_schedule_update_ha_state() | Representation of a Sensor. | 62599053e5267d203ee6cde4 |
class Model: <NEW_LINE> <INDENT> def __init__(self, coeffs, coeffs_normed, terms, algo): <NEW_LINE> <INDENT> self.algodict = {'FoBa': 3., 'STRidge': 7., 'SR3': 18., 'Lasso': 40.} <NEW_LINE> self.name = algo <NEW_LINE> self.coefficients = coeffs <NEW_LINE> self.normed_coeffs = coeffs_normed <NEW_LINE> self.terms = terms <NEW_LINE> self.mean_rel_err = self.algodict[algo] <NEW_LINE> self.mean_abs_err = 2. <NEW_LINE> self.no_incorrect_terms = 1 | Data structure to store all necessary data of each model obtained from hyperparameter tuning | 62599053596a89723612902a |
class Volume(AStorage): <NEW_LINE> <INDENT> def exists(self, key: str) -> bool: <NEW_LINE> <INDENT> return os.path.isfile(key) <NEW_LINE> <DEDENT> def load(self, key: str) -> str: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> fp = open(key, 'r+') <NEW_LINE> content = fp.read(10000) <NEW_LINE> if not content: <NEW_LINE> <INDENT> raise UploadException(self._lang.drive_file_cannot_read()) <NEW_LINE> <DEDENT> return str(content) <NEW_LINE> <DEDENT> except IsADirectoryError as err: <NEW_LINE> <INDENT> raise UploadException(self._lang.drive_file_cannot_read()) from err <NEW_LINE> <DEDENT> except PermissionError as err: <NEW_LINE> <INDENT> raise UploadException(self._lang.drive_file_cannot_read()) from err <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if 'fp' in locals(): <NEW_LINE> <INDENT> fp.close() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def save(self, key: str, data: str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> fp = open(key, 'w') <NEW_LINE> if 'fp' not in locals(): <NEW_LINE> <INDENT> raise UploadException(self._lang.drive_file_cannot_write()) <NEW_LINE> <DEDENT> if not fp.write(data): <NEW_LINE> <INDENT> raise UploadException(self._lang.drive_file_cannot_write()) <NEW_LINE> <DEDENT> <DEDENT> except IsADirectoryError as err: <NEW_LINE> <INDENT> raise UploadException(self._lang.drive_file_cannot_write()) from err <NEW_LINE> <DEDENT> except PermissionError as err: <NEW_LINE> <INDENT> raise UploadException(self._lang.drive_file_cannot_write()) from err <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if 'fp' in locals(): <NEW_LINE> <INDENT> fp.close() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def remove(self, key: str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.unlink(key) <NEW_LINE> <DEDENT> except OSError as err: <NEW_LINE> <INDENT> raise UploadException(self._lang.drive_file_cannot_remove()) from err | * Class Volume
* Processing info file on disk volume | 62599053cb5e8a47e493cc02 |
class AlphaBetaAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> numAgents = gameState.getNumAgents() <NEW_LINE> def helper(gameState, depth, agent, alpha, beta): <NEW_LINE> <INDENT> def overallValue(gameState, depth, agent, alpha, beta): <NEW_LINE> <INDENT> if gameState.isWin() or gameState.isLose() or depth == 0: <NEW_LINE> <INDENT> return self.evaluationFunction(gameState), None <NEW_LINE> <DEDENT> if agent == 0: <NEW_LINE> <INDENT> return maximum(gameState, depth, agent, alpha, beta) <NEW_LINE> <DEDENT> return minimum(gameState, depth, agent, alpha, beta) <NEW_LINE> <DEDENT> def maximum(gameState, depth, agent, alpha, beta): <NEW_LINE> <INDENT> action = None <NEW_LINE> count = -sys.maxint - 1 <NEW_LINE> for agentAction in gameState.getLegalActions(agent): <NEW_LINE> <INDENT> finalValue = overallValue(gameState.generateSuccessor(agent, agentAction), depth, (agent + 1) % numAgents, alpha, beta)[0] <NEW_LINE> if finalValue > beta: <NEW_LINE> <INDENT> return finalValue, agentAction <NEW_LINE> <DEDENT> if finalValue > count: <NEW_LINE> <INDENT> count = finalValue <NEW_LINE> action = agentAction <NEW_LINE> <DEDENT> alpha = max(alpha, count) <NEW_LINE> <DEDENT> return count, action <NEW_LINE> <DEDENT> def minimum(gameState, depth, agent, alpha, beta): <NEW_LINE> <INDENT> action = None <NEW_LINE> count = sys.maxint <NEW_LINE> for agentAction in gameState.getLegalActions(agent): <NEW_LINE> <INDENT> if (agent + 1) % numAgents == 0: <NEW_LINE> <INDENT> finalValue = overallValue(gameState.generateSuccessor(agent, agentAction), depth - 1, (agent + 1) % numAgents, alpha, beta)[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> finalValue = overallValue(gameState.generateSuccessor(agent, agentAction), depth, (agent + 1) % numAgents, alpha, beta)[0] <NEW_LINE> <DEDENT> if finalValue < alpha: <NEW_LINE> <INDENT> return finalValue, agentAction <NEW_LINE> <DEDENT> if finalValue < count: <NEW_LINE> <INDENT> count = finalValue <NEW_LINE> action = agentAction <NEW_LINE> <DEDENT> beta = min(beta, count) <NEW_LINE> <DEDENT> return count, action <NEW_LINE> <DEDENT> return overallValue(gameState, depth, 0, alpha, beta) <NEW_LINE> <DEDENT> return helper(gameState, self.depth, 0, -sys.maxint - 1, sys.maxint)[1] | Your minimax agent with alpha-beta pruning (question 3) | 62599053e64d504609df9e4b |
class Meta: <NEW_LINE> <INDENT> verbose_name_plural = "Acte" | Le titre du modèle qui d'affiche dans l'interface d'administration | 625990536e29344779b01b3f |
class GeoMultiPolygon(geo_field.Geom): <NEW_LINE> <INDENT> _type = 'geo_multi_polygon' <NEW_LINE> def __init__(self, string, dim=2, srid=900913, gist_index=True, **args): <NEW_LINE> <INDENT> super(GeoMultiPolygon, self).__init__( string, "MULTIPOLYGON", dim=dim, srid=srid, gist_index=gist_index, **args) | New type of column in the ORM for POSTGIS geometry MultiPolygon type | 62599053baa26c4b54d5079a |
class TestGitHubMostStarred(object): <NEW_LINE> <INDENT> def setup_method(self, method): <NEW_LINE> <INDENT> assert method <NEW_LINE> <DEDENT> def teardown_method(self, method): <NEW_LINE> <INDENT> assert method | Tests for GitHubMostStarred class. | 62599053507cdc57c63a629b |
class InvalidDodoFile(Exception): <NEW_LINE> <INDENT> pass | Invalid dodo file | 62599053dc8b845886d54abb |
class OverrideFeeds(_MWS): <NEW_LINE> <INDENT> ACCOUNT_TYPE = "Merchant" <NEW_LINE> def submit_feed(self, feed, feed_type, marketplaceids=None, content_type="text/xml", purge=False): <NEW_LINE> <INDENT> purge = 'true' if purge else 'false' <NEW_LINE> data = dict(Action='SubmitFeed', FeedType=feed_type, PurgeAndReplace=purge) <NEW_LINE> data.update(self.enumerate_param('MarketplaceIdList.Id.', marketplaceids)) <NEW_LINE> md = calc_md5(feed) <NEW_LINE> return self.request(data, method="POST", body=feed, extra_headers={'Content-MD5': md, 'Content-Type': content_type}) <NEW_LINE> <DEDENT> def get_feed_submission_list(self, feedids=None, max_count=None, feedtypes=None, processingstatuses=None, fromdate=None, todate=None): <NEW_LINE> <INDENT> data = dict(Action='GetFeedSubmissionList', MaxCount=max_count, SubmittedFromDate=fromdate, SubmittedToDate=todate, ) <NEW_LINE> data.update(self.enumerate_param('FeedSubmissionIdList.Id', feedids)) <NEW_LINE> data.update(self.enumerate_param('FeedTypeList.Type.', feedtypes)) <NEW_LINE> data.update(self.enumerate_param('FeedProcessingStatusList.Status.', processingstatuses)) <NEW_LINE> return self.request(data) <NEW_LINE> <DEDENT> def get_submission_list_by_next_token(self, token): <NEW_LINE> <INDENT> data = dict(Action='GetFeedSubmissionListByNextToken', NextToken=token) <NEW_LINE> return self.request(data) <NEW_LINE> <DEDENT> def get_feed_submission_count(self, feedtypes=None, processingstatuses=None, fromdate=None, todate=None): <NEW_LINE> <INDENT> data = dict(Action='GetFeedSubmissionCount', SubmittedFromDate=fromdate, SubmittedToDate=todate) <NEW_LINE> data.update(self.enumerate_param('FeedTypeList.Type.', feedtypes)) <NEW_LINE> data.update(self.enumerate_param('FeedProcessingStatusList.Status.', processingstatuses)) <NEW_LINE> return self.request(data) <NEW_LINE> <DEDENT> def cancel_feed_submissions(self, feedids=None, feedtypes=None, fromdate=None, todate=None): <NEW_LINE> <INDENT> data = dict(Action='CancelFeedSubmissions', SubmittedFromDate=fromdate, SubmittedToDate=todate) <NEW_LINE> data.update(self.enumerate_param('FeedSubmissionIdList.Id.', feedids)) <NEW_LINE> data.update(self.enumerate_param('FeedTypeList.Type.', feedtypes)) <NEW_LINE> return self.request(data) <NEW_LINE> <DEDENT> def get_feed_submission_result(self, feedid): <NEW_LINE> <INDENT> data = dict(Action='GetFeedSubmissionResult', FeedSubmissionId=feedid) <NEW_LINE> return self.request(data) | Amazon MWS Feeds API | 62599053462c4b4f79dbcefb |
class CollabServerFactory(ServerFactory): <NEW_LINE> <INDENT> def __init__(self, protocol_hooks=None): <NEW_LINE> <INDENT> print('Starting collaboration server factory.') <NEW_LINE> self.protocols = {} <NEW_LINE> self.available_docs = {} <NEW_LINE> if protocol_hooks is not None: <NEW_LINE> <INDENT> self.hooks = protocol_hooks <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.hooks = {} <NEW_LINE> <DEDENT> <DEDENT> def startFactory(self): <NEW_LINE> <INDENT> print('Factory started') <NEW_LINE> <DEDENT> def stopFactory(self): <NEW_LINE> <INDENT> print('Factory stopped') <NEW_LINE> <DEDENT> def broadcast(self, packet): <NEW_LINE> <INDENT> for protocol in self.protocols.itervalues(): <NEW_LINE> <INDENT> protocol.transport.write(packet) <NEW_LINE> <DEDENT> <DEDENT> def buildProtocol(self, addr): <NEW_LINE> <INDENT> print('{0} is connecting...'.format(str(addr))) <NEW_LINE> protocol = CollabServerProtocol( factory=self, address=addr, **self.hooks) <NEW_LINE> protocol.available_docs = self.available_docs <NEW_LINE> self.protocols[addr] = protocol <NEW_LINE> return protocol | A factory for server protocols
Creates and manages connections to collaboration clients | 6259905307d97122c42181a0 |
class ResponseHeadersPolicyConfig(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "Comment": (str, False), "CorsConfig": (CorsConfig, False), "CustomHeadersConfig": (CustomHeadersConfig, False), "Name": (str, True), "SecurityHeadersConfig": (SecurityHeadersConfig, False), } | `ResponseHeadersPolicyConfig <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudfront-responseheaderspolicy-responseheaderspolicyconfig.html>`__ | 625990538da39b475be046e1 |
class BaseSchemaItemField(BaseField): <NEW_LINE> <INDENT> _schema_class = None <NEW_LINE> _schema_kwarg_prefix = '' <NEW_LINE> _schema_valid_kwargs = () <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if not hasattr(self, '_kwargs_backup'): <NEW_LINE> <INDENT> self._kwargs_backup = kwargs.copy() <NEW_LINE> <DEDENT> type_args, type_kw, cleaned_kw = self.process_type_args(kwargs) <NEW_LINE> if not args: <NEW_LINE> <INDENT> schema_item, cleaned_kw = self._generate_schema_item(cleaned_kw) <NEW_LINE> <DEDENT> column_kw = self.process_column_args(cleaned_kw) <NEW_LINE> if args: <NEW_LINE> <INDENT> column_kw['name'], column_kw['type_'], schema_item = args <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> column_kw['type_'] = self._sqla_type_cls(*type_args, **type_kw) <NEW_LINE> if 'type_' not in kwargs: <NEW_LINE> <INDENT> self._init_kwargs = self._kwargs_backup.copy() <NEW_LINE> <DEDENT> <DEDENT> column_args = (schema_item,) <NEW_LINE> return Column.__init__(self, *column_args, **column_kw) <NEW_LINE> <DEDENT> def _generate_schema_item(self, cleaned_kw): <NEW_LINE> <INDENT> schema_kwargs = {} <NEW_LINE> for key in self._schema_valid_kwargs: <NEW_LINE> <INDENT> prefixed_key = self._schema_kwarg_prefix + key <NEW_LINE> if prefixed_key in cleaned_kw: <NEW_LINE> <INDENT> schema_kwargs[key] = cleaned_kw.pop(prefixed_key) <NEW_LINE> <DEDENT> <DEDENT> schema_item = self._schema_class(**schema_kwargs) <NEW_LINE> return schema_item, cleaned_kw | Base class for fields/columns that accept a schema item/constraint
on column init. E.g. Column(Integer, ForeignKey('user.id'))
It differs from regular columns in that an item/constraint passed to the
Column on init has to be passed as a positional argument and should
also receive arguments. Thus 3 objects need to be created on init:
Column, Type, and SchemaItem/Constraint.
Attributes:
_schema_class: Class to be instantiated to create a schema item.
_schema_kwarg_prefix: Prefix schema item's kwargs should have. This
is used to avoid making a mess, as both column, type and schemaitem
kwargs may be passed at once.
_schema_valid_kwargs: Sequence of strings that represent names of
kwargs `_schema_class` may receive. Should not include prefix. | 6259905321a7993f00c67464 |
class MultiHeadSelfAttention(_BaseMultiHeadAttention): <NEW_LINE> <INDENT> def build(self, input_shape): <NEW_LINE> <INDENT> if not isinstance(input_shape, list): <NEW_LINE> <INDENT> raise ValueError('Invalid input') <NEW_LINE> <DEDENT> d_model = input_shape[0][-1] <NEW_LINE> self.validate_model_dimensionality(d_model) <NEW_LINE> self.qkv_weights = self.add_weight( name='qkv_weights', shape=(d_model, d_model * 3), initializer='glorot_normal', trainable=True) <NEW_LINE> self.build_output_params(d_model) <NEW_LINE> return super().build(input_shape) <NEW_LINE> <DEDENT> def call(self, inputs, **kwargs): <NEW_LINE> <INDENT> if not (K.is_tensor(inputs[0]) and K.is_tensor(inputs[1])): <NEW_LINE> <INDENT> raise ValueError( 'The layer can be called only with one tensor as an argument') <NEW_LINE> <DEDENT> query_input, self_attn_mask = inputs <NEW_LINE> query_shape = K.shape(query_input) <NEW_LINE> seq_len, d_model = query_shape[-2], query_shape[-1] <NEW_LINE> qkv = K.dot(K.reshape(query_input, [-1, d_model]), self.qkv_weights) <NEW_LINE> pre_q, pre_k, pre_v = [ K.reshape( qkv[:, i * d_model:(i + 1) * d_model], (-1, seq_len, self.num_heads, d_model // self.num_heads)) for i in range(3)] <NEW_LINE> attention_out = self.attention(pre_q, pre_v, pre_k, seq_len, d_model, self_attn_mask, training=kwargs.get('training')) <NEW_LINE> return attention_out | Multi-head self-attention for both encoders and decoders.
Uses only one input and has implementation which is better suited for
such use case that more general MultiHeadAttention class. | 625990530fa83653e46f63db |
class RandomGray: <NEW_LINE> <INDENT> def __init__(self, consistent=True, p=0.5): <NEW_LINE> <INDENT> self.consistent = consistent <NEW_LINE> self.p = p <NEW_LINE> <DEDENT> def __call__(self, imgmap): <NEW_LINE> <INDENT> if self.consistent: <NEW_LINE> <INDENT> if random.random() < self.p: <NEW_LINE> <INDENT> return [self.grayscale(i) for i in imgmap] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return imgmap <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> result = [] <NEW_LINE> for i in imgmap: <NEW_LINE> <INDENT> if random.random() < self.p: <NEW_LINE> <INDENT> result.append(self.grayscale(i)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result.append(i) <NEW_LINE> <DEDENT> <DEDENT> assert len(result) == len(imgmap) <NEW_LINE> return result <NEW_LINE> <DEDENT> <DEDENT> def grayscale(self, img): <NEW_LINE> <INDENT> channel = np.random.choice(3) <NEW_LINE> np_img = np.array(img)[:,:,channel] <NEW_LINE> np_img = np.dstack([np_img, np_img, np_img]) <NEW_LINE> img = Image.fromarray(np_img, 'RGB') <NEW_LINE> return img | Actually it is a channel splitting, not strictly grayscale images | 6259905307f4c71912bb0931 |
@dataclass(frozen=True) <NEW_LINE> class SBool(Value): <NEW_LINE> <INDENT> value: bool <NEW_LINE> def type_name(self) -> SSym: <NEW_LINE> <INDENT> return SSym('bool') <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return str(self.value) <NEW_LINE> <DEDENT> def address(self) -> int: <NEW_LINE> <INDENT> return id(self.value) <NEW_LINE> <DEDENT> def to_param(self) -> bytecode.BoolLit: <NEW_LINE> <INDENT> return bytecode.BoolLit(self) | A lisp boolean | 62599053097d151d1a2c256e |
class TestIP6Input(VppTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestIP6Input, self).setUp() <NEW_LINE> self.create_pg_interfaces(range(2)) <NEW_LINE> for i in self.pg_interfaces: <NEW_LINE> <INDENT> i.admin_up() <NEW_LINE> i.config_ip6() <NEW_LINE> i.resolve_ndp() <NEW_LINE> <DEDENT> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> super(TestIP6Input, self).tearDown() <NEW_LINE> for i in self.pg_interfaces: <NEW_LINE> <INDENT> i.unconfig_ip6() <NEW_LINE> i.admin_down() <NEW_LINE> <DEDENT> <DEDENT> def send_and_expect(self, input, pkts, output): <NEW_LINE> <INDENT> self.vapi.cli("clear trace") <NEW_LINE> input.add_stream(pkts) <NEW_LINE> self.pg_enable_capture(self.pg_interfaces) <NEW_LINE> self.pg_start() <NEW_LINE> rx = output.get_capture(len(pkts)) <NEW_LINE> return rx <NEW_LINE> <DEDENT> def send_and_assert_no_replies(self, intf, pkts, remark): <NEW_LINE> <INDENT> self.vapi.cli("clear trace") <NEW_LINE> intf.add_stream(pkts) <NEW_LINE> self.pg_enable_capture(self.pg_interfaces) <NEW_LINE> self.pg_start() <NEW_LINE> for i in self.pg_interfaces: <NEW_LINE> <INDENT> i.get_capture(0) <NEW_LINE> i.assert_nothing_captured(remark=remark) <NEW_LINE> <DEDENT> <DEDENT> def test_ip_input(self): <NEW_LINE> <INDENT> p_version = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) / IPv6(src=self.pg0.remote_ip6, dst=self.pg1.remote_ip6, version=3) / UDP(sport=1234, dport=1234) / Raw('\xa5' * 100)) <NEW_LINE> self.send_and_assert_no_replies(self.pg0, p_version * 65, "funky version") <NEW_LINE> p_version = (Ether(src=self.pg0.remote_mac, dst=self.pg0.local_mac) / IPv6(src=self.pg0.remote_ip6, dst=self.pg1.remote_ip6, hlim=1) / UDP(sport=1234, dport=1234) / Raw('\xa5' * 100)) <NEW_LINE> rx = self.send_and_expect(self.pg0, p_version * 65, self.pg0) <NEW_LINE> rx = rx[0] <NEW_LINE> icmp = rx[ICMPv6TimeExceeded] <NEW_LINE> self.assertEqual(icmp.type, 3) <NEW_LINE> self.assertEqual(icmp.code, 0) <NEW_LINE> self.logger.error(self.vapi.cli("sh error")) | IPv6 Input Exceptions | 625990533539df3088ecd79e |
class TestImageController(BaseTestCase): <NEW_LINE> <INDENT> def test_get_image_by_hostcode_and_product_no(self): <NEW_LINE> <INDENT> response = self.client.open( '//images/hosts/{hostCode}/images/{productNo}'.format(hostCode='hostCode_example', productNo='productNo_example'), method='GET') <NEW_LINE> self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) <NEW_LINE> <DEDENT> def test_get_image_by_id(self): <NEW_LINE> <INDENT> response = self.client.open( '//images/{imageId}'.format(imageId='imageId_example'), method='GET') <NEW_LINE> self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) <NEW_LINE> <DEDENT> def test_get_images(self): <NEW_LINE> <INDENT> query_string = [('imageId', 'imageId_example'), ('offset', 56), ('limit', 56)] <NEW_LINE> response = self.client.open( '//images', method='GET', query_string=query_string) <NEW_LINE> self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) <NEW_LINE> <DEDENT> def test_get_images_by_object_id(self): <NEW_LINE> <INDENT> query_string = [('offset', 56), ('limit', 56)] <NEW_LINE> response = self.client.open( '//images/objects/{objectId}'.format(objectId='objectId_example'), method='GET', query_string=query_string) <NEW_LINE> self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) <NEW_LINE> <DEDENT> def test_get_images_by_user_image_file(self): <NEW_LINE> <INDENT> query_string = [('offset', 56), ('limit', 56)] <NEW_LINE> data = dict(file=(BytesIO(b'some file data'), 'file.txt')) <NEW_LINE> response = self.client.open( '//images/userImages', method='POST', data=data, content_type='multipart/form-data', query_string=query_string) <NEW_LINE> self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) <NEW_LINE> <DEDENT> def test_get_images_by_user_image_id_and_object_index(self): <NEW_LINE> <INDENT> response = self.client.open( '//images/userImages/{userImageId}/objects/{objectIndex}'.format(userImageId='userImageId_example', objectIndex=56), method='GET') <NEW_LINE> self.assert200(response, 'Response body is : ' + response.data.decode('utf-8')) | ImageController integration test stubs | 62599053a79ad1619776b539 |
class FlowLog(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.VpcId = None <NEW_LINE> self.FlowLogId = None <NEW_LINE> self.FlowLogName = None <NEW_LINE> self.ResourceType = None <NEW_LINE> self.ResourceId = None <NEW_LINE> self.TrafficType = None <NEW_LINE> self.CloudLogId = None <NEW_LINE> self.CloudLogState = None <NEW_LINE> self.FlowLogDescription = None <NEW_LINE> self.CreatedTime = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.VpcId = params.get("VpcId") <NEW_LINE> self.FlowLogId = params.get("FlowLogId") <NEW_LINE> self.FlowLogName = params.get("FlowLogName") <NEW_LINE> self.ResourceType = params.get("ResourceType") <NEW_LINE> self.ResourceId = params.get("ResourceId") <NEW_LINE> self.TrafficType = params.get("TrafficType") <NEW_LINE> self.CloudLogId = params.get("CloudLogId") <NEW_LINE> self.CloudLogState = params.get("CloudLogState") <NEW_LINE> self.FlowLogDescription = params.get("FlowLogDescription") <NEW_LINE> self.CreatedTime = params.get("CreatedTime") | 流日志
| 6259905316aa5153ce4019dc |
class TestModulesJinja(ModuleCase): <NEW_LINE> <INDENT> def _path(self, name, absolute=False): <NEW_LINE> <INDENT> path = os.path.join("modules", "jinja", name) <NEW_LINE> if absolute: <NEW_LINE> <INDENT> return os.path.join(RUNTIME_VARS.BASE_FILES, path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return path <NEW_LINE> <DEDENT> <DEDENT> def test_import_json(self): <NEW_LINE> <INDENT> json_file = "osarchmap.json" <NEW_LINE> ret = self.run_function("jinja.import_json", [self._path(json_file)]) <NEW_LINE> with salt.utils.files.fopen(self._path(json_file, absolute=True)) as fh_: <NEW_LINE> <INDENT> self.assertDictEqual(salt.utils.json.load(fh_), ret) <NEW_LINE> <DEDENT> <DEDENT> def test_import_yaml(self): <NEW_LINE> <INDENT> yaml_file = "defaults.yaml" <NEW_LINE> ret = self.run_function("jinja.import_yaml", [self._path(yaml_file)]) <NEW_LINE> with salt.utils.files.fopen(self._path(yaml_file, absolute=True)) as fh_: <NEW_LINE> <INDENT> self.assertDictEqual(salt.utils.yaml.safe_load(fh_), ret) <NEW_LINE> <DEDENT> <DEDENT> @requires_system_grains <NEW_LINE> def test_load_map(self, grains): <NEW_LINE> <INDENT> ret = self.run_function("jinja.load_map", [self._path("map.jinja"), "template"]) <NEW_LINE> assert isinstance( ret, dict ), "failed to return dictionary from jinja.load_map: {}".format(ret) <NEW_LINE> with salt.utils.files.fopen(self._path("defaults.yaml", absolute=True)) as fh_: <NEW_LINE> <INDENT> defaults = salt.utils.yaml.safe_load(fh_) <NEW_LINE> <DEDENT> with salt.utils.files.fopen(self._path("osarchmap.json", absolute=True)) as fh_: <NEW_LINE> <INDENT> osarchmap = salt.utils.json.load(fh_) <NEW_LINE> <DEDENT> with salt.utils.files.fopen( self._path("osfamilymap.yaml", absolute=True) ) as fh_: <NEW_LINE> <INDENT> osfamilymap = salt.utils.yaml.safe_load(fh_) <NEW_LINE> <DEDENT> with salt.utils.files.fopen(self._path("osmap.yaml", absolute=True)) as fh_: <NEW_LINE> <INDENT> osmap = salt.utils.yaml.safe_load(fh_) <NEW_LINE> <DEDENT> with salt.utils.files.fopen( self._path("osfingermap.yaml", absolute=True) ) as fh_: <NEW_LINE> <INDENT> osfingermap = salt.utils.yaml.safe_load(fh_) <NEW_LINE> <DEDENT> self.assertEqual( ret.get("arch"), osarchmap.get(grains["osarch"], {}).get("arch") ) <NEW_LINE> self.assertEqual( ret.get("config"), osfingermap.get(grains["osfinger"], {}).get( "config", osmap.get(grains["os"], {}).get( "config", osfamilymap.get(grains["os_family"], {}).get( "config", defaults.get("template").get("config") ), ), ), ) | Test the jinja map module | 625990537b25080760ed875b |
class Notification(models.Model): <NEW_LINE> <INDENT> visitor = models.ForeignKey(Visitor) <NEW_LINE> notification_text = models.CharField('Notification Message', max_length=200) <NEW_LINE> date = models.DateTimeField('Notification Date') <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.notification_text + ' ' + self.date.__str__() | Notification class represents a notification or message left by the owner for a visitor | 625990531f037a2d8b9e52e9 |
class StopAgentNode(ArmLogicTreeNode): <NEW_LINE> <INDENT> bl_idname = 'LNStopAgentNode' <NEW_LINE> bl_label = 'Stop Agent' <NEW_LINE> arm_version = 1 <NEW_LINE> def arm_init(self, context): <NEW_LINE> <INDENT> self.add_input('ArmNodeSocketAction', 'In') <NEW_LINE> self.add_input('ArmNodeSocketObject', 'Object') <NEW_LINE> self.add_output('ArmNodeSocketAction', 'Out') | Stops the given NavMesh agent. | 625990534a966d76dd5f03e7 |
class GetDetectInfoEnhancedResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Text = None <NEW_LINE> self.IdCardData = None <NEW_LINE> self.BestFrame = None <NEW_LINE> self.VideoData = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("Text") is not None: <NEW_LINE> <INDENT> self.Text = DetectInfoText() <NEW_LINE> self.Text._deserialize(params.get("Text")) <NEW_LINE> <DEDENT> if params.get("IdCardData") is not None: <NEW_LINE> <INDENT> self.IdCardData = DetectInfoIdCardData() <NEW_LINE> self.IdCardData._deserialize(params.get("IdCardData")) <NEW_LINE> <DEDENT> if params.get("BestFrame") is not None: <NEW_LINE> <INDENT> self.BestFrame = DetectInfoBestFrame() <NEW_LINE> self.BestFrame._deserialize(params.get("BestFrame")) <NEW_LINE> <DEDENT> if params.get("VideoData") is not None: <NEW_LINE> <INDENT> self.VideoData = DetectInfoVideoData() <NEW_LINE> self.VideoData._deserialize(params.get("VideoData")) <NEW_LINE> <DEDENT> self.RequestId = params.get("RequestId") | GetDetectInfoEnhanced返回参数结构体
| 625990537d847024c075d8d3 |
class NoChallengeCredentialsPlugin(object): <NEW_LINE> <INDENT> implements(interfaces.ICredentialsPlugin) <NEW_LINE> def extractCredentials(self, request): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def challenge(self, request): <NEW_LINE> <INDENT> if not IUnauthenticatedPrincipal.providedBy(request.principal): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def logout(self, request): <NEW_LINE> <INDENT> return False | A plugin that doesn't challenge if the principal is authenticated.
There are two reasonable ways to handle an unauthorized error for an
authenticated principal:
- Inform the user of the unauthorized error
- Let the user login with a different set of credentials
Since either approach is reasonable, we need to give the site manager
some way of specifying one of the two policies.
By default, a user will be challenged for a new set of credentials if
unauthorized. A site manager can insert this plugin in the front of the
plugin list to prevent that challenge from occurring. This will
typically result in an 'Unauthorized' message to the user.
The 'challenge' behavior of the plugin is simple. To illustrate, we'll
create a plugin:
>>> challenger = NoChallengeCredentialsPlugin()
and a test request with an authenticated principal:
>>> from zope.publisher.browser import TestRequest
>>> request = TestRequest()
>>> IUnauthenticatedPrincipal.providedBy(request.principal)
False
When we challenge using the plugin:
>>> challenger.challenge(request)
True
we get a value that signals the PAU that this plugin successfully
challenged the user (even though it actually did nothing). The PAU
will stop trying to challenge and the user will not get a chance to
provide different credentials. The result is typically an error message.
On the other hand, if the user is unauthenticated:
>>> class Principal(object):
... implements(IUnauthenticatedPrincipal)
>>> request.setPrincipal(Principal())
>>> IUnauthenticatedPrincipal.providedBy(request.principal)
True
the plugin challenge will return None:
>>> print challenger.challenge(request)
None
signaling the PAU that it should try the next plugin for a challenge. If
the PAU is configured properly, the user will receive a challenge and be
allowed to provide different credentials. | 62599053baa26c4b54d5079c |
class ConsumerGroupListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[ConsumerGroup]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ConsumerGroupListResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None) <NEW_LINE> self.next_link = kwargs.get('next_link', None) | The result to the List Consumer Group operation.
:param value: Result of the List Consumer Group operation.
:type value: list[~azure.mgmt.eventhub.v2021_11_01.models.ConsumerGroup]
:param next_link: Link to the next set of results. Not empty if Value contains incomplete list
of Consumer Group.
:type next_link: str | 62599053d99f1b3c44d06b98 |
class SetPartitions_set(SetPartitions): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def __classcall_private__(cls, s): <NEW_LINE> <INDENT> return super(SetPartitions_set, cls).__classcall__(cls, frozenset(s)) <NEW_LINE> <DEDENT> def __init__(self, s): <NEW_LINE> <INDENT> self._set = s <NEW_LINE> SetPartitions.__init__(self, category=FiniteEnumeratedSets()) <NEW_LINE> <DEDENT> def _repr_(self): <NEW_LINE> <INDENT> return "Set partitions of %s"%(Set(self._set)) <NEW_LINE> <DEDENT> def __contains__(self, x): <NEW_LINE> <INDENT> if not SetPartitions.__contains__(self, x): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if sum(map(len, x)) != len(self._set): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if reduce(lambda u, s: u.union(Set(s)), x, Set([])) != Set(self._set): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def cardinality(self): <NEW_LINE> <INDENT> return bell_number(len(self._set)) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for p in Partitions(len(self._set)): <NEW_LINE> <INDENT> for sp in self._iterator_part(p): <NEW_LINE> <INDENT> yield self.element_class(self, sp) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def base_set(self): <NEW_LINE> <INDENT> return Set(self._set) <NEW_LINE> <DEDENT> def base_set_cardinality(self): <NEW_LINE> <INDENT> return len(self._set) | Set partitions of a fixed set `S`. | 62599053462c4b4f79dbcefd |
class HillmanGrasslTableaux(Tableaux): <NEW_LINE> <INDENT> Element = HillmanGrasslTableau <NEW_LINE> @staticmethod <NEW_LINE> def __classcall_private__(cls, shape, size=None): <NEW_LINE> <INDENT> if shape not in Partitions(): <NEW_LINE> <INDENT> raise ValueError("Shape must be a partition") <NEW_LINE> <DEDENT> shape_part = Partition(shape) <NEW_LINE> if size is None: <NEW_LINE> <INDENT> return HillmanGrasslTableaux_all(shape_part) <NEW_LINE> <DEDENT> elif size not in NN: <NEW_LINE> <INDENT> raise ValueError("Size must be a non-negative integer") <NEW_LINE> <DEDENT> return HillmanGrasslTableaux_size(shape_part, size) <NEW_LINE> <DEDENT> def _from_integer_vector(self, vec): <NEW_LINE> <INDENT> tableau = [] <NEW_LINE> pos = 0 <NEW_LINE> for rowlen in self._shape: <NEW_LINE> <INDENT> tableau += [vec[pos:(pos + rowlen)]] <NEW_LINE> pos += rowlen <NEW_LINE> <DEDENT> return self.element_class(self, tableau) | A factory class for the various classes of Hillman-Grassl tableaux.
INPUT:
- ``shape`` -- the shape of the tableaux
- ``size`` -- the size of the tableaux
OUTPUT:
- The appropriate class, after checking basic consistency tests.
A Hillman-Grassl tableau is a tableau whose entries are non-negative
integers and whose size is a weighted sum over hook lengths.
EXAMPLES::
sage: from ptdt_package import *
sage: HG = HillmanGrasslTableaux([2,1]);HG.cardinality()
+Infinity
sage: HG = HillmanGrasslTableaux([4,2], 2);HG.cardinality()
5
sage: HG = HillmanGrasslTableaux([4,3],3)
sage: HG.random_element() #random
[[0, 0, 0, 1], [1, 0, 1]]
sage: HG = HillmanGrasslTableaux([5,2],4);HG
Hillman-Grassl tableaux of shape [5, 2] and size 4
sage: HG = HillmanGrasslTableaux([3,2],4);HG.list()
[[[1, 0, 0], [0, 0]],
[[0, 1, 0], [0, 1]],
[[0, 1, 1], [0, 0]],
[[0, 0, 0], [2, 0]],
[[0, 0, 0], [1, 2]],
[[0, 0, 1], [1, 1]],
[[0, 0, 2], [1, 0]],
[[0, 0, 0], [0, 4]],
[[0, 0, 1], [0, 3]],
[[0, 0, 2], [0, 2]],
[[0, 0, 3], [0, 1]],
[[0, 0, 4], [0, 0]]]
sage: ([[0,0,1],[0,1]]) in HillmanGrasslTableaux([3,2], 2)
True
sage: Tableau([[0,0,1],[0,1]]) in HillmanGrasslTableaux([3,2], 2)
True
sage: ([[0,0,-1],[0,1]]) in HillmanGrasslTableaux([3,2], 2)
False
sage: ([[0,0,1],[0,1]]) in HillmanGrasslTableaux([4,2], 2)
False
sage: HG = HillmanGrasslTableaux([5,1]);HG.subset()
Hillman-Grassl tableaux of shape [5, 1]
sage: HG = HillmanGrasslTableaux([5,1]);HG.subset(2)
Hillman-Grassl tableaux of shape [5, 1] and size 2
TESTS:: | 6259905371ff763f4b5e8ca7 |
class TagsTopping(Topping): <NEW_LINE> <INDENT> PROVIDES = [ "tags" ] <NEW_LINE> DEPENDS = [] <NEW_LINE> @staticmethod <NEW_LINE> def act(aggregate, classloader, verbose=False): <NEW_LINE> <INDENT> tags = aggregate.setdefault("tags", {}) <NEW_LINE> prefix = "data/minecraft/tags/" <NEW_LINE> suffix = ".json" <NEW_LINE> for path in classloader.path_map: <NEW_LINE> <INDENT> if not path.startswith(prefix) or not path.endswith(suffix): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> key = path[len(prefix):-len(suffix)] <NEW_LINE> type, name = key.split("/", 2) <NEW_LINE> with classloader.open(path) as fin: <NEW_LINE> <INDENT> data = json.load(fin) <NEW_LINE> <DEDENT> data["type"] = type <NEW_LINE> data["name"] = name <NEW_LINE> tags[key] = data | Provides a list of all block and item tags | 6259905321a7993f00c67466 |
class ShellPlugin(AbsPlugin): <NEW_LINE> <INDENT> def open(self, args): <NEW_LINE> <INDENT> return EXIT_OPEN_SHELL | Interactive shell plugin | 625990530c0af96317c577dc |
class itkImageDuplicatorICF2(ITKCommonBasePython.itkObject): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> ImageDimension = _itkImageDuplicatorPython.itkImageDuplicatorICF2_ImageDimension <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkImageDuplicatorPython.itkImageDuplicatorICF2___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def SetInputImage(self, *args): <NEW_LINE> <INDENT> return _itkImageDuplicatorPython.itkImageDuplicatorICF2_SetInputImage(self, *args) <NEW_LINE> <DEDENT> def GetOutput(self): <NEW_LINE> <INDENT> return _itkImageDuplicatorPython.itkImageDuplicatorICF2_GetOutput(self) <NEW_LINE> <DEDENT> def Update(self): <NEW_LINE> <INDENT> return _itkImageDuplicatorPython.itkImageDuplicatorICF2_Update(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkImageDuplicatorPython.delete_itkImageDuplicatorICF2 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkImageDuplicatorPython.itkImageDuplicatorICF2_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkImageDuplicatorPython.itkImageDuplicatorICF2_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkImageDuplicatorICF2.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New) | Proxy of C++ itkImageDuplicatorICF2 class | 6259905373bcbd0ca4bcb789 |
class BaseBackend(object): <NEW_LINE> <INDENT> def get(self, key): <NEW_LINE> <INDENT> raise NotImplementedError("Subclasses should implement this method") <NEW_LINE> <DEDENT> def set(self, key, value, timeout=None): <NEW_LINE> <INDENT> raise NotImplementedError("Subclasses should implement this method") <NEW_LINE> <DEDENT> def delete(self, key): <NEW_LINE> <INDENT> raise NotImplementedError("Subclasses should implement this method") | Abstract class that acts like every Cache Backend Interface. Extend it
to implement your own Cache Backend. | 625990530a50d4780f70683b |
class Jiggling( SteeringBehaviour ): <NEW_LINE> <INDENT> def __init__( self, robot ): <NEW_LINE> <INDENT> SteeringBehaviour.__init__( self, robot ) <NEW_LINE> self.numMovesRemaining = 0 <NEW_LINE> self.numMoveTicksRemaining = 0 <NEW_LINE> self.moveSteeringResult = ( 0.0, 0.0 ) <NEW_LINE> <DEDENT> def startJiggling( self ): <NEW_LINE> <INDENT> self.numMovesRemaining = NUM_MOVES <NEW_LINE> self.goBackwards = True <NEW_LINE> self.pickNewMove() <NEW_LINE> <DEDENT> def isJigglingComplete( self ): <NEW_LINE> <INDENT> return self.numMovesRemaining <= 0 and self.numMoveTicksRemaining <= 0 <NEW_LINE> <DEDENT> def update( self ): <NEW_LINE> <INDENT> moveAvailable = False <NEW_LINE> if self.numMoveTicksRemaining > 0: <NEW_LINE> <INDENT> moveAvailable = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.numMovesRemaining > 0: <NEW_LINE> <INDENT> self.pickNewMove() <NEW_LINE> self.numMovesRemaining = self.numMovesRemaining - 1 <NEW_LINE> moveAvailable = True <NEW_LINE> <DEDENT> <DEDENT> steeringResult = ( 0.0, 0.0 ) <NEW_LINE> if moveAvailable: <NEW_LINE> <INDENT> self.numMoveTicksRemaining = self.numMoveTicksRemaining - 1 <NEW_LINE> steeringResult = self.moveSteeringResult <NEW_LINE> <DEDENT> return steeringResult <NEW_LINE> <DEDENT> def pickNewMove( self ): <NEW_LINE> <INDENT> MIN_FORWARD_SPEED = 0.2 <NEW_LINE> MAX_FORWARD_SPEED = 0.5 <NEW_LINE> MIN_TURNING_SPEED = 0.1 <NEW_LINE> MAX_TURNING_SPEED = 0.3 <NEW_LINE> forwardSpeed = random.uniform( MIN_FORWARD_SPEED, MAX_FORWARD_SPEED ) <NEW_LINE> if self.goBackwards: <NEW_LINE> <INDENT> forwardSpeed = -forwardSpeed <NEW_LINE> <DEDENT> self.goBackwards = not self.goBackwards <NEW_LINE> turningSpeed = random.uniform( MIN_TURNING_SPEED, MAX_TURNING_SPEED ) <NEW_LINE> if random.random() < 0.5: <NEW_LINE> <INDENT> turningSpeed = -turningSpeed <NEW_LINE> <DEDENT> self.numMoveTicksRemaining = NUM_MOVE_TICKS <NEW_LINE> self.moveSteeringResult = ( forwardSpeed, turningSpeed ) | A steering behaviour that attempts to recover from stalls by jiggling
around... | 62599053f7d966606f749334 |
class NeighborhoodEncoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, features, features_global): <NEW_LINE> <INDENT> super(NeighborhoodEncoder, self).__init__() <NEW_LINE> assert(len(features) == 3) <NEW_LINE> assert(len(features_global) == 3) <NEW_LINE> self.fc1 = nn.Linear(3, features[0]) <NEW_LINE> self.fc2 = nn.Linear(features[0], features[1]) <NEW_LINE> self.fc3 = nn.Linear(features[1], features[2]) <NEW_LINE> self.fc1_global = nn.Linear(features[2], features_global[0]) <NEW_LINE> self.fc2_global = nn.Linear(features_global[0], features_global[1]) <NEW_LINE> self.fc3_global = nn.Linear(features_global[1], features_global[2]) <NEW_LINE> <DEDENT> def forward(self, relative_points, cluster): <NEW_LINE> <INDENT> fc1_features = F.relu(self.fc1(relative_points)) <NEW_LINE> fc2_features = F.relu(self.fc2(fc1_features)) <NEW_LINE> fc3_features = F.relu(self.fc3(fc2_features)) <NEW_LINE> max_features = gnn.global_max_pool( x=fc3_features, batch=cluster ) <NEW_LINE> fc1_global_features = F.relu(self.fc1_global(max_features)) <NEW_LINE> fc2_global_features = F.relu(self.fc2_global(fc1_global_features)) <NEW_LINE> fc3_global_features = F.relu(self.fc3_global(fc2_global_features)) <NEW_LINE> return fc3_global_features | This encoder takes in relative points and the cluster to which they belong and outputs
a single feature vector for each cluster. | 6259905324f1403a9268634c |
class UnsupportedImageTypeError(Exception): <NEW_LINE> <INDENT> pass | This image is formatted in a way pikepdf does not supported. | 6259905326068e7796d4de41 |
class TrainConfigModel(ConfigModel): <NEW_LINE> <INDENT> def __init__(self, config={}): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> self.optType = "" <NEW_LINE> confkeys = list(self.config.keys()) <NEW_LINE> if ('train_config' not in confkeys and 'trainConfig' not in confkeys): <NEW_LINE> <INDENT> self.config['train_config'] = {} <NEW_LINE> <DEDENT> <DEDENT> def setTrainConfigGradientClipping(self, val: float): <NEW_LINE> <INDENT> self.setTrainConfigParamVal(keyNameTree=['gradient_clipping_by_norm'], val=val, valType=float) <NEW_LINE> <DEDENT> def setTrainConfigBatchSize(self, val: int): <NEW_LINE> <INDENT> self.setTrainConfigParamVal(keyNameTree=['batch_size'], val=val, valType=int) <NEW_LINE> <DEDENT> def setTrainConfigOptimizerKey(self): <NEW_LINE> <INDENT> self.setTrainConfigParamVal(keyNameTree=['optimizer'], val={}, valType=dict) <NEW_LINE> <DEDENT> def setTrainConfigOptUseMovingAverage(self, val: bool): <NEW_LINE> <INDENT> assertCond(val, isinstance(val, bool)) <NEW_LINE> self.setTrainConfigParamVal(keyNameTree=['optimizer', 'use_moving_average'], val=val, valType=bool) <NEW_LINE> <DEDENT> def setTrainConfigOptType(self, optType: str): <NEW_LINE> <INDENT> assertCond(optType, isinstance(optType, str)) <NEW_LINE> self.setTrainConfigParamVal(keyNameTree=['optimizer', optType], val={}, valType=dict) <NEW_LINE> self.optType = optType <NEW_LINE> <DEDENT> def setTrainConfigParamVal(self, keyNameTree: [str], val, valType, contentType=None ): <NEW_LINE> <INDENT> keyNameTree.insert(0, 'train_config') <NEW_LINE> self.setConfigParamVal(keyNames=keyNameTree, val=val, valType=valType, contentType=contentType) <NEW_LINE> <DEDENT> def setTrainConfigParamsVal(self, keyNameVals): <NEW_LINE> <INDENT> for keyVal in keyNameVals: <NEW_LINE> <INDENT> keyNameTree = keyVal[0] <NEW_LINE> val = keyVal[1] <NEW_LINE> valType = keyVal[2] <NEW_LINE> contentType = keyVal[3] <NEW_LINE> self.setTrainConfigParamVal(keyNameTree=keyNameTree, val=val, valType=valType, contentType=contentType) | Handles model specific train config | 62599053be8e80087fbc057b |
class Movie(): <NEW_LINE> <INDENT> def __init__(self, title, poster_image_url, trailer_youtube_url): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> self.poster_image_url = poster_image_url <NEW_LINE> self.trailer_youtube_url = trailer_youtube_url | A Movie class with vairables title, poster_image_url
and trailer_youtube_url. | 6259905316aa5153ce4019df |
class KNearestNeighbors: <NEW_LINE> <INDENT> def __init__(self, X_train: NPArray, y_train: NPIntArray, k: int = 3) -> None: <NEW_LINE> <INDENT> self.k = k <NEW_LINE> self.X_train = X_train <NEW_LINE> self.y_train = y_train <NEW_LINE> <DEDENT> def predict(self, X: NPArray) -> NPArray: <NEW_LINE> <INDENT> dists = self.compute_distances(X) <NEW_LINE> return self.predict_labels(dists) <NEW_LINE> <DEDENT> def compute_distances(self, X: NPArray, num_loops: int = 0) -> NPArray: <NEW_LINE> <INDENT> if num_loops == 0: <NEW_LINE> <INDENT> return np.sqrt( np.sum(X**2, axis=1).reshape(-1, 1) - 2 * np.dot(X, self.X_train.T) + np.sum(self.X_train**2, axis=1) ) <NEW_LINE> <DEDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> if num_loops == 1: <NEW_LINE> <INDENT> for i in range(num_test): <NEW_LINE> <INDENT> dists[i, :] = np.sqrt(np.sum(np.square(X[i] - self.X_train), axis=1)) <NEW_LINE> <DEDENT> <DEDENT> elif num_loops == 2: <NEW_LINE> <INDENT> for i in range(num_test): <NEW_LINE> <INDENT> for j in range(num_train): <NEW_LINE> <INDENT> dists[i, j] = np.sqrt(np.sum(np.square(X[i] - self.X_train[j]))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return dists <NEW_LINE> <DEDENT> def predict_labels(self, dists: NPArray) -> NPArray: <NEW_LINE> <INDENT> num_test = dists.shape[0] <NEW_LINE> y_pred = np.zeros(num_test) <NEW_LINE> for i in range(num_test): <NEW_LINE> <INDENT> inds = np.argsort(dists[i, :])[: self.k] <NEW_LINE> closest_y = self.y_train[inds] <NEW_LINE> y_pred[i] = np.bincount(closest_y).argmax() <NEW_LINE> <DEDENT> return y_pred | A k-NN classifier with L2 distance | 6259905323849d37ff8525bd |
class PublicTagsApiTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = APIClient() <NEW_LINE> <DEDENT> def test_login_required(self): <NEW_LINE> <INDENT> res = self.client.get(TAGS_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED) | Test the publicy available tags API | 625990536e29344779b01b43 |
class StudentSpikeSlabPrior(RegressionSpikeSlabPrior): <NEW_LINE> <INDENT> def __init__( self, x, y=None, expected_r2=.5, prior_df=.01, expected_model_size=1, prior_information_weight=.01, diagonal_shrinkage=.5, optional_coefficient_estimate=None, max_flips=-1, mean_y=None, sdy=None, prior_inclusion_probabilities=None, sigma_upper_limit=np.Inf, tail_thickness_prior=R.UniformPrior(0.1, 100) ): <NEW_LINE> <INDENT> super().__init__( x=x, y=y, expected_r2=expected_r2, prior_df=prior_df, expected_model_size=expected_model_size, prior_information_weight=prior_information_weight, diagonal_shrinkage=diagonal_shrinkage, optional_coefficient_estimate=optional_coefficient_estimate, max_flips=max_flips, mean_y=mean_y, sdy=sdy, prior_inclusion_probabilities=prior_inclusion_probabilities, sigma_upper_limit=sigma_upper_limit) <NEW_LINE> self._nu_prior = tail_thickness_prior <NEW_LINE> <DEDENT> @property <NEW_LINE> def tail_thickness(self): <NEW_LINE> <INDENT> return self._nu_prior.boom() <NEW_LINE> <DEDENT> def create_sampler(self, model, assign=False): <NEW_LINE> <INDENT> pass | A SpikeSlabPrior appropriate for regression models with Student T errors. | 62599053d6c5a102081e3618 |
class HmacAuthV2Handler(AuthHandler, HmacKeys): <NEW_LINE> <INDENT> capability = ['hmac-v2', 'cloudfront'] <NEW_LINE> def __init__(self, host, config, provider): <NEW_LINE> <INDENT> AuthHandler.__init__(self, host, config, provider) <NEW_LINE> HmacKeys.__init__(self, host, config, provider) <NEW_LINE> self._hmac_256 = None <NEW_LINE> <DEDENT> def update_provider(self, provider): <NEW_LINE> <INDENT> super(HmacAuthV2Handler, self).update_provider(provider) <NEW_LINE> self._hmac_256 = None <NEW_LINE> <DEDENT> def add_auth(self, http_request, **kwargs): <NEW_LINE> <INDENT> headers = http_request.headers <NEW_LINE> if 'Date' not in headers: <NEW_LINE> <INDENT> headers['Date'] = formatdate(usegmt=True) <NEW_LINE> <DEDENT> if self._provider.security_token: <NEW_LINE> <INDENT> key = self._provider.security_token_header <NEW_LINE> headers[key] = self._provider.security_token <NEW_LINE> <DEDENT> b64_hmac = self.sign_string(headers['Date']) <NEW_LINE> auth_hdr = self._provider.auth_header <NEW_LINE> headers['Authorization'] = ("%s %s:%s" % (auth_hdr, self._provider.access_key, b64_hmac)) | Implements the simplified HMAC authorization used by CloudFront. | 62599053498bea3a75a59020 |
class AWSElasticBlockStoreVolumeSource(_kuber_definitions.Definition): <NEW_LINE> <INDENT> def __init__( self, fs_type: str = None, partition: int = None, read_only: bool = None, volume_id: str = None, ): <NEW_LINE> <INDENT> super(AWSElasticBlockStoreVolumeSource, self).__init__( api_version="core/v1", kind="AWSElasticBlockStoreVolumeSource" ) <NEW_LINE> self._properties = { "fsType": fs_type if fs_type is not None else "", "partition": partition if partition is not None else None, "readOnly": read_only if read_only is not None else None, "volumeID": volume_id if volume_id is not None else "", } <NEW_LINE> self._types = { "fsType": (str, None), "partition": (int, None), "readOnly": (bool, None), "volumeID": (str, None), } <NEW_LINE> <DEDENT> @property <NEW_LINE> def fs_type(self) -> str: <NEW_LINE> <INDENT> return typing.cast( str, self._properties.get("fsType"), ) <NEW_LINE> <DEDENT> @fs_type.setter <NEW_LINE> def fs_type(self, value: str): <NEW_LINE> <INDENT> self._properties["fsType"] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def partition(self) -> int: <NEW_LINE> <INDENT> return typing.cast( int, self._properties.get("partition"), ) <NEW_LINE> <DEDENT> @partition.setter <NEW_LINE> def partition(self, value: int): <NEW_LINE> <INDENT> self._properties["partition"] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def read_only(self) -> bool: <NEW_LINE> <INDENT> return typing.cast( bool, self._properties.get("readOnly"), ) <NEW_LINE> <DEDENT> @read_only.setter <NEW_LINE> def read_only(self, value: bool): <NEW_LINE> <INDENT> self._properties["readOnly"] = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def volume_id(self) -> str: <NEW_LINE> <INDENT> return typing.cast( str, self._properties.get("volumeID"), ) <NEW_LINE> <DEDENT> @volume_id.setter <NEW_LINE> def volume_id(self, value: str): <NEW_LINE> <INDENT> self._properties["volumeID"] = value <NEW_LINE> <DEDENT> def __enter__(self) -> "AWSElasticBlockStoreVolumeSource": <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> return False | Represents a Persistent Disk resource in AWS.
An AWS EBS disk must exist before mounting to a container.
The disk must also be in the same AWS zone as the kubelet.
An AWS EBS disk can only be mounted as read/write once. AWS
EBS volumes support ownership management and SELinux
relabeling. | 62599053435de62698e9d2fc |
class TargetMap(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_group_by_key(cls, key): <NEW_LINE> <INDENT> for group, keys in dimensions.hierarchy.iteritems(): <NEW_LINE> <INDENT> if key in keys: <NEW_LINE> <INDENT> return group <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def unpack(cls, value): <NEW_LINE> <INDENT> if not isinstance(value, basestring): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> return map(lambda x: x or None, value.split(separator)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def family_line(cls, value): <NEW_LINE> <INDENT> values = cls.unpack(value) <NEW_LINE> if not values: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> values = map(lambda val: val or 'Unknown', values) <NEW_LINE> return u' - '.join(values) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def pack(cls, values): <NEW_LINE> <INDENT> if not cls.is_valid(values): <NEW_LINE> <INDENT> raise InvalidTargetValues(values) <NEW_LINE> <DEDENT> stripped_strings = map(lambda x: unicode(x or '').strip(), values) <NEW_LINE> return separator.join(stripped_strings) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def is_valid(self, values): <NEW_LINE> <INDENT> return not(all(map(lambda x: x is None, values))) if values else False | Defined interface of a object which contains data about targeting,
possible keys, etc. | 62599053d99f1b3c44d06b9a |
class MatrixButtons(QtWidgets.QWidget): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.layout = QtWidgets.QGridLayout(self) <NEW_LINE> self.matrix00 = QtWidgets.QLineEdit() <NEW_LINE> self.matrix10 = QtWidgets.QLineEdit() <NEW_LINE> self.matrix20 = QtWidgets.QLineEdit() <NEW_LINE> self.addToLayout(self.matrix00,[0,0],[1,1]) <NEW_LINE> self.addToLayout(self.matrix10,[1,0],[1,1]) <NEW_LINE> self.addToLayout(self.matrix20,[2,0],[1,1]) <NEW_LINE> self.matrix01 = QtWidgets.QLineEdit() <NEW_LINE> self.matrix11 = QtWidgets.QLineEdit() <NEW_LINE> self.matrix21 = QtWidgets.QLineEdit() <NEW_LINE> self.addToLayout(self.matrix01,[0,1],[1,1]) <NEW_LINE> self.addToLayout(self.matrix11,[1,1],[1,1]) <NEW_LINE> self.addToLayout(self.matrix21,[2,1],[1,1]) <NEW_LINE> self.matrix02 = QtWidgets.QLineEdit() <NEW_LINE> self.matrix12 = QtWidgets.QLineEdit() <NEW_LINE> self.matrix22 = QtWidgets.QLineEdit() <NEW_LINE> self.addToLayout(self.matrix02,[0,2],[1,1]) <NEW_LINE> self.addToLayout(self.matrix12,[1,2],[1,1]) <NEW_LINE> self.addToLayout(self.matrix22,[2,2],[1,1]) <NEW_LINE> self.plotButton = QtWidgets.QPushButton('Plot') <NEW_LINE> self.addToLayout(self.plotButton,[1,3],[1,1]) <NEW_LINE> <DEDENT> def addToLayout(self, widget, position:list, size:list): <NEW_LINE> <INDENT> self.layout.addWidget(widget,position[1],position[0],size[1],size[0]) <NEW_LINE> <DEDENT> def setMatrix(self,matrix): <NEW_LINE> <INDENT> self.matrix00.setText(str(matrix[0,0])) <NEW_LINE> self.matrix10.setText(str(matrix[0,1])) <NEW_LINE> self.matrix20.setText(str(matrix[0,2])) <NEW_LINE> self.matrix01.setText(str(matrix[1,0])) <NEW_LINE> self.matrix11.setText(str(matrix[1,1])) <NEW_LINE> self.matrix21.setText(str(matrix[1,2])) <NEW_LINE> self.matrix02.setText(str(matrix[2,0])) <NEW_LINE> self.matrix12.setText(str(matrix[2,1])) <NEW_LINE> self.matrix22.setText(str(matrix[2,2])) | Widget for PyQt5 with the Matrix input and the buttons
...
Attributes
----------
layout: QtWidgets.QGridLayout | 62599053462c4b4f79dbceff |
class SXDAnalysis(systemtesting.MantidSystemTest): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> ws = Load(Filename='SXD23767.raw', LoadMonitors='Exclude') <NEW_LINE> from time import clock <NEW_LINE> start = clock() <NEW_LINE> QLab = ConvertToDiffractionMDWorkspace(InputWorkspace=ws, OutputDimensions='Q (lab frame)', SplitThreshold=50, LorentzCorrection='1',MaxRecursionDepth='13', Extents='-15,15,-15,15,-15,15',OneEventPerBin='0') <NEW_LINE> print(" ConvertToMD runs for: ",clock()-start,' sec') <NEW_LINE> peaks_qLab = FindPeaksMD(InputWorkspace=QLab, MaxPeaks=300, DensityThresholdFactor=10, PeakDistanceThreshold=1.0) <NEW_LINE> FindUBUsingFFT(PeaksWorkspace=peaks_qLab, MinD='3', MaxD='5',Tolerance=0.08) <NEW_LINE> out_params = IndexPeaks(PeaksWorkspace=peaks_qLab,Tolerance=0.12,RoundHKLs=1) <NEW_LINE> number_peaks_indexed = out_params[0] <NEW_LINE> ratio_indexed = float(number_peaks_indexed)/peaks_qLab.getNumberPeaks() <NEW_LINE> self.assertTrue(ratio_indexed >= 0.8, "Not enough peaks indexed. Ratio indexed : " + str(ratio_indexed)) <NEW_LINE> ShowPossibleCells(PeaksWorkspace=peaks_qLab,MaxScalarError='0.5') <NEW_LINE> SelectCellOfType(PeaksWorkspace=peaks_qLab, CellType='Cubic', Centering='F', Apply=True) <NEW_LINE> unitcell_length = 5.64 <NEW_LINE> unitcell_angle = 90 <NEW_LINE> length_tolerance = 0.1 <NEW_LINE> angle_tolerance = 0.25 <NEW_LINE> latt = peaks_qLab.sample().getOrientedLattice() <NEW_LINE> self.assertDelta( latt.a(), unitcell_length, length_tolerance, "a length is different from expected") <NEW_LINE> self.assertDelta( latt.b(), unitcell_length, length_tolerance, "b length is different from expected") <NEW_LINE> self.assertDelta( latt.c(), unitcell_length, length_tolerance, "c length is different from expected") <NEW_LINE> self.assertDelta( latt.alpha(), unitcell_angle, angle_tolerance, "alpha angle is different from expected") <NEW_LINE> self.assertDelta( latt.beta(), unitcell_angle, angle_tolerance, "beta angle is different from expected") <NEW_LINE> self.assertDelta( latt.gamma(), unitcell_angle, angle_tolerance, "gamma angle length is different from expected") <NEW_LINE> <DEDENT> def doValidation(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def requiredMemoryMB(self): <NEW_LINE> <INDENT> return 1000 | Start of a system test for SXD data analyiss | 62599053d53ae8145f91995d |
class Asteroid(Wrapper): <NEW_LINE> <INDENT> SMALL = 1 <NEW_LINE> MEDIUM = 2 <NEW_LINE> LARGE = 3 <NEW_LINE> images = {SMALL : games.load_image("asteroid_small.bmp"), MEDIUM : games.load_image("asteroid_med.bmp"), LARGE : games.load_image("asteroid_big.bmp") } <NEW_LINE> SPEED = 2 <NEW_LINE> POINTS = 30 <NEW_LINE> total = 0 <NEW_LINE> def __init__(self, game, x, y, size): <NEW_LINE> <INDENT> Asteroid.total += 1 <NEW_LINE> super(Asteroid, self).__init__( image = Asteroid.images[size], x = x, y = y, dx = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size, dy = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size) <NEW_LINE> self.game = game <NEW_LINE> self.size = size <NEW_LINE> self.hits = random.randint(0,1); <NEW_LINE> <DEDENT> def die(self): <NEW_LINE> <INDENT> self.game.score.value += int(Asteroid.POINTS / self.size) <NEW_LINE> self.game.score.right = games.screen.width - 10 <NEW_LINE> if self.hits == 0: <NEW_LINE> <INDENT> Asteroid.total -= 1 <NEW_LINE> if self.size != Asteroid.SMALL: <NEW_LINE> <INDENT> for i in range(self.size): <NEW_LINE> <INDENT> new_asteroid = Asteroid(game = self.game, x = self.x, y = self.y, size = self.size - 1) <NEW_LINE> games.screen.add(new_asteroid) <NEW_LINE> <DEDENT> <DEDENT> if Asteroid.total == 0: <NEW_LINE> <INDENT> self.game.advance() <NEW_LINE> <DEDENT> super(Asteroid, self).die() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.hits = self.hits - 1; | An asteroid which floats across the screen. | 6259905307f4c71912bb0935 |
class Solution: <NEW_LINE> <INDENT> def removeDuplicates(self, A): <NEW_LINE> <INDENT> length = 0 <NEW_LINE> for i in range(len(A)): <NEW_LINE> <INDENT> if i == 0 or A[i] != A[i - 1]: <NEW_LINE> <INDENT> A[length] = A[i] <NEW_LINE> length += 1 <NEW_LINE> <DEDENT> <DEDENT> return length | @param A: a list of integers
@return an integer | 62599053a79ad1619776b53b |
class FormattedExpansion(SageObject): <NEW_LINE> <INDENT> r <NEW_LINE> def __init__(self, tensor): <NEW_LINE> <INDENT> self.tensor = tensor <NEW_LINE> self.txt = None <NEW_LINE> self.latex = None <NEW_LINE> <DEDENT> def _repr_(self): <NEW_LINE> <INDENT> return self.txt <NEW_LINE> <DEDENT> def _latex_(self): <NEW_LINE> <INDENT> return self.latex | Helper class for displaying tensor expansions. | 62599053379a373c97d9a520 |
@encoding_stage.tf_style_encoding_stage <NEW_LINE> class PlusOneEncodingStage(encoding_stage.EncodingStageInterface): <NEW_LINE> <INDENT> ENCODED_VALUES_KEY = 'p1_values' <NEW_LINE> ADD_PARAM_KEY = 'p1_add' <NEW_LINE> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return 'plus_one' <NEW_LINE> <DEDENT> @property <NEW_LINE> def compressible_tensors_keys(self): <NEW_LINE> <INDENT> return [self.ENCODED_VALUES_KEY] <NEW_LINE> <DEDENT> @property <NEW_LINE> def commutes_with_sum(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def decode_needs_input_shape(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def get_params(self): <NEW_LINE> <INDENT> params = {self.ADD_PARAM_KEY: tf.constant(1.0)} <NEW_LINE> return params, params <NEW_LINE> <DEDENT> def encode(self, x, encode_params): <NEW_LINE> <INDENT> return {self.ENCODED_VALUES_KEY: x + encode_params[self.ADD_PARAM_KEY]} <NEW_LINE> <DEDENT> def decode(self, encoded_tensors, decode_params, num_summands=None, shape=None): <NEW_LINE> <INDENT> del num_summands, shape <NEW_LINE> decoded_x = ( encoded_tensors[self.ENCODED_VALUES_KEY] - decode_params[self.ADD_PARAM_KEY]) <NEW_LINE> return decoded_x | [Example] encoding stage, adding 1.
This is the simplest example implementation of an `EncodingStageInterface` -
no state, no constructor arguments, no shape information needed for decoding,
no commutativity with sum. | 62599053be8e80087fbc057c |
class InfoboxException(Exception): <NEW_LINE> <INDENT> pass | Wird geworfen, falls in einer Infobox Staat wichtige
Daten fehlen | 625990537b25080760ed875d |
class LookupResultSet(ResultSet): <NEW_LINE> <INDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) | Retrieve the value for the "Response" output from this choreography execution. ((xml) The response from Twitter in XML format) | 62599053e64d504609df9e4e |
class TestAttributesApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = AttributesApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_add_attribute(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_add_attributes(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_change_attribute(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_change_attributes(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_find_attributes(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_attribute(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_remove_attribute(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_remove_attributes(self): <NEW_LINE> <INDENT> pass | AttributesApi unit test stubs | 62599053507cdc57c63a62a1 |
class UseFlag(models.Model): <NEW_LINE> <INDENT> name = models.CharField( primary_key = True , max_length = 63 , validators = [use_flag_validator] ) <NEW_LINE> added_on = models.DateTimeField(auto_now_add=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> @models.permalink <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return ('stats:use_details_url', (), {'useflag': self.name}) <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_submissions(self): <NEW_LINE> <INDENT> return Submission.objects.filter(global_use__name=self.name).count() <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_all_hosts(self): <NEW_LINE> <INDENT> return Submission.objects.filter(global_use__name=self.name).order_by() .aggregate(Count('host', distinct=True)).values()[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_hosts(self): <NEW_LINE> <INDENT> return Submission.objects.latest_submissions .filter(global_use__name=self.name).count() <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_previous_hosts(self): <NEW_LINE> <INDENT> return self.num_all_hosts - self.num_hosts | A USE flag. | 62599053435de62698e9d2fe |
class Crawl_Baidu_Hotkey(Base): <NEW_LINE> <INDENT> __tablename__ = 'crawl_baidu_hotkey' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> time = Column(DateTime, nullable=True) <NEW_LINE> keyword = Column(String(64), nullable=True) <NEW_LINE> order = Column(SmallInteger) <NEW_LINE> state = Column(SmallInteger, default=1) <NEW_LINE> source_id = Column(String(32)) <NEW_LINE> created_time = Column(DateTime, default=func.now()) <NEW_LINE> updated_time = Column(DateTime, default=func.now(), onupdate=func.now()) | 微信新闻类 | 62599053b7558d58954649a8 |
class Token(namedtuple('Token', ('value', 'type', 'loc'))): <NEW_LINE> <INDENT> def __new__(cls, value=None, type=None, loc=None): <NEW_LINE> <INDENT> return super(Token, cls).__new__(cls, value, type, loc) | Override __new__ method to create default (None) values for namedtuple
>>> Token(value="foobar")
Token(value='foobar', type=None, loc=None)
>>> Token("foo", "bar")
Token(value='foo', type='bar', loc=None) | 625990533cc13d1c6d466c3a |
class Client(sleekxmpp.ClientXMPP): <NEW_LINE> <INDENT> def __init__(self, lp, jid, password): <NEW_LINE> <INDENT> self.lp = lp <NEW_LINE> sleekxmpp.ClientXMPP.__init__(self, jid, password) <NEW_LINE> self.add_event_handler("session_start", self.session_start) <NEW_LINE> self.add_event_handler("message", self.message) <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> if sleekxmpp.ClientXMPP.connect(self): <NEW_LINE> <INDENT> self.process(threaded=True) <NEW_LINE> <DEDENT> <DEDENT> def session_start(self, event): <NEW_LINE> <INDENT> self.lp.connected(self) <NEW_LINE> self.get_roster() <NEW_LINE> self.send_presence() <NEW_LINE> <DEDENT> def message(self, msg): <NEW_LINE> <INDENT> self.lp.message_received(self, msg['from'].bare, msg['body']) | A layer of abstraction over SleekXMPP
| 6259905382261d6c52730948 |
class SqlOrder(WorkOrderBase): <NEW_LINE> <INDENT> ordertype = models.ForeignKey(SqlOrderType,blank=True,null=True,on_delete=models.SET_NULL,related_name="sqlorder_type") <NEW_LINE> approver_group = models.ForeignKey(ApprovalGroup,blank=True,null=True,on_delete=models.SET_NULL,related_name="sqlorder_approvergroup") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = u"SQL工单表" <NEW_LINE> verbose_name_plural = verbose_name <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.id | SQL工单表 | 625990538e71fb1e983bcfc7 |
class ScholarArticleParser120201(ScholarArticleParser): <NEW_LINE> <INDENT> def _parse_article(self, div): <NEW_LINE> <INDENT> self.article = ScholarArticle() <NEW_LINE> for tag in div: <NEW_LINE> <INDENT> if not hasattr(tag, 'name'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if tag.name == 'h3' and self._tag_has_class(tag, 'gs_rt') and tag.a: <NEW_LINE> <INDENT> self.article['title'] = ''.join(tag.a.findAll(text=True)) <NEW_LINE> self.article['url'] = self._path2url(tag.a['href']) <NEW_LINE> if self.article['url'].endswith('.pdf'): <NEW_LINE> <INDENT> self.article['url_pdf'] = self.article['url'] <NEW_LINE> <DEDENT> <DEDENT> if tag.name == 'div' and self._tag_has_class(tag, 'gs_a'): <NEW_LINE> <INDENT> gs_split = tag.text.split("-") <NEW_LINE> year = self.year_re.findall(tag.text) <NEW_LINE> self.article['year'] = year[0] if len(year) > 0 else None <NEW_LINE> self.article['author'] = gs_split[0] if len(gs_split) > 0 else None <NEW_LINE> journ_N = gs_split[1].split(",") <NEW_LINE> self.article['journal_name'] = journ_N[0] if len(gs_split) > 0 else None <NEW_LINE> <DEDENT> if tag.name == 'div' and self._tag_has_class(tag, 'gs_fl'): <NEW_LINE> <INDENT> self._parse_links(tag) | This class reflects update to the Scholar results page layout that
Google recently. | 62599053097d151d1a2c2574 |
class LdbResultPrinter(StringPrinter): <NEW_LINE> <INDENT> def as_string(self, indent=0): <NEW_LINE> <INDENT> ret = "count = %(count)s, extended = %(extended)s, " "controls = %(controls)s, refs = %(refs)s" % self.val <NEW_LINE> try: <NEW_LINE> <INDENT> count = int(self.val['count']) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> ret += 'Count is not numeric value?' <NEW_LINE> return ret <NEW_LINE> <DEDENT> for i in range(count): <NEW_LINE> <INDENT> msg = LdbMessagePrinter(self.val['msgs'][i]) <NEW_LINE> ret += "\n%s" % (msg.as_string(indent+1)) <NEW_LINE> <DEDENT> return indent_string("{ <%s>\t%s }" % (self.val.type, ret), indent) | print a ldb message element | 6259905376e4537e8c3f0a88 |
class SamplesForm(forms.Form): <NEW_LINE> <INDENT> sample_list = utils.MultipleSamplesField(label=capfirst(_("samples"))) <NEW_LINE> def __init__(self, user, preset_sample, task, data=None, **kwargs): <NEW_LINE> <INDENT> samples = user.my_samples.all() <NEW_LINE> important_samples = set() <NEW_LINE> if task: <NEW_LINE> <INDENT> kwargs["initial"] = {"sample_list": list(task.samples.values_list("pk", flat=True))} <NEW_LINE> if user != task.customer or task.status != "1 new": <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.fields["sample_list"].disabled = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super().__init__(data, **kwargs) <NEW_LINE> <DEDENT> important_samples.update(task.samples.all()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super().__init__(data, **kwargs) <NEW_LINE> self.fields["sample_list"].initial = [] <NEW_LINE> if preset_sample: <NEW_LINE> <INDENT> important_samples.add(preset_sample) <NEW_LINE> self.fields["sample_list"].initial.append(preset_sample.pk) <NEW_LINE> <DEDENT> <DEDENT> self.fields["sample_list"].set_samples(user, samples, important_samples) <NEW_LINE> self.fields["sample_list"].widget.attrs.update({"size": "17", "style": "vertical-align: top"}) | Form for the list selection of samples.
| 6259905326068e7796d4de45 |
class DispatchViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Dispatch.objects.all() <NEW_LINE> serializer_class = DispatchSerializer | API endpoint that allows users to be viewed or edited. | 6259905316aa5153ce4019e2 |
class KVStorageMixin(object): <NEW_LINE> <INDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> raise NotImplementedError() | A very simple key/value interface to be implemented by storage controllers | 625990532ae34c7f260ac5e4 |
class UserEditForm(FlaskForm): <NEW_LINE> <INDENT> username = StringField('Username', validators=[DataRequired(), Unique(User, User.username, message = 'This username already exsits, please use a different name')]) <NEW_LINE> email = StringField('E-mail', validators=[DataRequired(), Unique(User, User.email, message = 'This email already exists'), Email()]) <NEW_LINE> password = PasswordField('Password', validators=[DataRequired(), Length(min=6)]) <NEW_LINE> confirmpassword = PasswordField('Password', validators=[DataRequired(), Length(min=6)]) <NEW_LINE> image_url = StringField('(Optional) User profile') <NEW_LINE> height = FloatField('Height') <NEW_LINE> weight = FloatField('Weight') <NEW_LINE> gender = SelectField('Gender', choices = [('Male','M'),('Female','M')]) <NEW_LINE> age = IntegerField('Age', validators = [NumberRange(min=0, max=100, message="Age is not valid")]) | Form for editing users. | 62599053dc8b845886d54ac3 |
class ApplicationMetaclass(ABCMeta): <NEW_LINE> <INDENT> def __new__(mcs, name, bases, attrs): <NEW_LINE> <INDENT> definitions = {} <NEW_LINE> for base in bases: <NEW_LINE> <INDENT> if hasattr(base, "definitions") and base.definitions: <NEW_LINE> <INDENT> definitions.update(base.definitions) <NEW_LINE> <DEDENT> <DEDENT> collections = tuple() <NEW_LINE> for base in bases: <NEW_LINE> <INDENT> if hasattr(base, "collections") and base.collections: <NEW_LINE> <INDENT> collections = collections + base.collections <NEW_LINE> <DEDENT> <DEDENT> if "definitions" in attrs and attrs["definitions"] is not None: <NEW_LINE> <INDENT> attrs['definitions'].update(definitions) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> attrs['definitions'] = definitions <NEW_LINE> <DEDENT> if "collections" in attrs and attrs["collections"] is not None: <NEW_LINE> <INDENT> attrs['collections'] = collections + attrs['collections'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> attrs['collections'] = collections <NEW_LINE> <DEDENT> return super().__new__(mcs, name, bases, attrs) <NEW_LINE> <DEDENT> def __init__(cls, name, bases, attrs): <NEW_LINE> <INDENT> super(ApplicationMetaclass, cls).__init__(name, bases, attrs) <NEW_LINE> cls.exposed_methods = {} <NEW_LINE> for base in bases: <NEW_LINE> <INDENT> if hasattr(base, 'exposed_methods'): <NEW_LINE> <INDENT> cls.exposed_methods.update(base.exposed_methods) <NEW_LINE> <DEDENT> <DEDENT> for method in (n for n in attrs.values() if hasattr(n, 'exposed')): <NEW_LINE> <INDENT> cls.exposed_methods[method.slug] = method | Inherit definitions from base classes, and let the subclass override any definitions from the base classes. | 62599053435de62698e9d301 |
class IntManipulator(object): <NEW_LINE> <INDENT> def __init__(self, description): <NEW_LINE> <INDENT> self.__description = description <NEW_LINE> <DEDENT> def description(self): <NEW_LINE> <INDENT> return self.__description <NEW_LINE> <DEDENT> def to_string(self, int_value): <NEW_LINE> <INDENT> return str(int_value) <NEW_LINE> <DEDENT> def type(self): <NEW_LINE> <INDENT> return VARIABLE_TYPES.INTEGER <NEW_LINE> <DEDENT> def dimension(self): <NEW_LINE> <INDENT> return 1 | Manipulator for simple integers. Since integers are so simple this does
almost nothing but it has the same API as other manipulators thus making it
easy to blindly do the same thing (e.g. convert to string) all values in a
record. | 625990533617ad0b5ee07645 |
class BaseTestWithDB(TestCase): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.language = None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def setUpTestData(cls): <NEW_LINE> <INDENT> super(BaseTestWithDB, cls).setUpTestData() <NEW_LINE> cls.username = "test" <NEW_LINE> cls.email = "[email protected]" <NEW_LINE> cls.password = "password" <NEW_LINE> cls.test_user = User.objects.create_user(cls.username, cls.email, cls.password) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super(BaseTestWithDB, cls).setUpClass() <NEW_LINE> cls.client = Client() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> super(BaseTestWithDB, cls).tearDownClass() <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> if self.language is not None: <NEW_LINE> <INDENT> activate(self.language) <NEW_LINE> <DEDENT> login = self.client.login(username=self.username, password=self.password) <NEW_LINE> self.assertEqual(login, True) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass | Base test class with methods implemented for Django testing. | 625990533cc13d1c6d466c3c |
class V1beta1IngressList(object): <NEW_LINE> <INDENT> def __init__(self, kind=None, apiVersion=None, metadata=None, items=None): <NEW_LINE> <INDENT> self.swagger_types = { 'kind': 'str', 'apiVersion': 'str', 'metadata': 'UnversionedListMeta', 'items': 'list[V1beta1Ingress]' } <NEW_LINE> self.attribute_map = { 'kind': 'kind', 'apiVersion': 'apiVersion', 'metadata': 'metadata', 'items': 'items' } <NEW_LINE> self._kind = kind <NEW_LINE> self._apiVersion = apiVersion <NEW_LINE> self._metadata = metadata <NEW_LINE> self._items = items <NEW_LINE> <DEDENT> @property <NEW_LINE> def kind(self): <NEW_LINE> <INDENT> return self._kind <NEW_LINE> <DEDENT> @kind.setter <NEW_LINE> def kind(self, kind): <NEW_LINE> <INDENT> self._kind = kind <NEW_LINE> <DEDENT> @property <NEW_LINE> def apiVersion(self): <NEW_LINE> <INDENT> return self._apiVersion <NEW_LINE> <DEDENT> @apiVersion.setter <NEW_LINE> def apiVersion(self, apiVersion): <NEW_LINE> <INDENT> self._apiVersion = apiVersion <NEW_LINE> <DEDENT> @property <NEW_LINE> def metadata(self): <NEW_LINE> <INDENT> return self._metadata <NEW_LINE> <DEDENT> @metadata.setter <NEW_LINE> def metadata(self, metadata): <NEW_LINE> <INDENT> self._metadata = metadata <NEW_LINE> <DEDENT> @property <NEW_LINE> def items(self): <NEW_LINE> <INDENT> return self._items <NEW_LINE> <DEDENT> @items.setter <NEW_LINE> def items(self, items): <NEW_LINE> <INDENT> self._items = items <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259905382261d6c52730949 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.