code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class FrameStackWrapperTfAgents(wrappers.PyEnvironmentBaseWrapper): <NEW_LINE> <INDENT> def __init__(self, env, k): <NEW_LINE> <INDENT> super(FrameStackWrapperTfAgents, self).__init__(env) <NEW_LINE> obs_spec: array_spec.ArraySpec = self._env.observation_spec() <NEW_LINE> if not isinstance(obs_spec, array_spec.ArraySpec): <NEW_LINE> <INDENT> raise ValueError('Unsupported observation_spec %s' % str(obs_spec)) <NEW_LINE> <DEDENT> if len(obs_spec.shape) != 1 and len(obs_spec.shape) != 3: <NEW_LINE> <INDENT> raise ValueError( 'Only 1D or 3D observations supported (found shape %s)' % ( str(obs_spec.shape))) <NEW_LINE> <DEDENT> if len(obs_spec.shape) == 1: <NEW_LINE> <INDENT> self._stacked_observation_spec = array_spec.ArraySpec( shape=(obs_spec.shape[0] * k,), dtype=obs_spec.dtype, name=obs_spec.name + '_stacked') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._stacked_observation_spec = array_spec.ArraySpec( shape=(obs_spec.shape[0:2] + (obs_spec.shape[2] * k,)), dtype=obs_spec.dtype, name=obs_spec.name + '_stacked') <NEW_LINE> <DEDENT> self._k: int = k <NEW_LINE> self._timesteps = collections.deque([], maxlen=k) <NEW_LINE> if hasattr(env, '_max_episode_steps'): <NEW_LINE> <INDENT> self._max_episode_steps = env._max_episode_steps <NEW_LINE> <DEDENT> <DEDENT> def _reset(self): <NEW_LINE> <INDENT> timestep = self._env.reset() <NEW_LINE> assert isinstance(timestep, ts.TimeStep), ( 'Expected TimeStep, got %s' % type(timestep)) <NEW_LINE> for _ in range(self._k): <NEW_LINE> <INDENT> self._timesteps.append(timestep) <NEW_LINE> <DEDENT> return self._get_timestep(timestep) <NEW_LINE> <DEDENT> def _step(self, action): <NEW_LINE> <INDENT> timestep = self._env.step(action) <NEW_LINE> assert isinstance(timestep, ts.TimeStep), ( 'Expected TimeStep, got %s' % type(timestep)) <NEW_LINE> self._timesteps.append(timestep) <NEW_LINE> return self._get_timestep(timestep) <NEW_LINE> <DEDENT> def _get_timestep(self, time_step): <NEW_LINE> <INDENT> assert len(self._timesteps) == self._k <NEW_LINE> time_step = time_step._asdict() <NEW_LINE> time_step['observation'] = np.concatenate([ frame.observation for frame in self._timesteps], axis=-1) <NEW_LINE> return ts.TimeStep(**time_step) <NEW_LINE> <DEDENT> def observation_spec(self): <NEW_LINE> <INDENT> return self._stacked_observation_spec | Env wrapper to stack k last frames.
Maintains a circular buffer of the last k frame observations and returns
TimeStep including a concatenated state vector (with the last frames action,
reward, etc). Used to train models with multi-state context.
Note, the first frame's state is replicated k times to produce a state vector
sequence of [s_0, s_0, ..., s_0], [s_0, s_0, ..., s_1], etc. | 6259906d283ffb24f3cf50fa |
class _AsyncExecution: <NEW_LINE> <INDENT> def __init__(self, max_workers: Optional[int] = None): <NEW_LINE> <INDENT> self._max_workers = ( max_workers or multiprocessing.cpu_count() ) <NEW_LINE> self._pool = ThreadPoolExecutor(max_workers=self._max_workers) <NEW_LINE> <DEDENT> def set_max_workers(self, count: int): <NEW_LINE> <INDENT> if self._pool: <NEW_LINE> <INDENT> self._pool.shutdown(wait=True) <NEW_LINE> <DEDENT> self._max_workers = count <NEW_LINE> self._pool = ThreadPoolExecutor(max_workers=self._max_workers) <NEW_LINE> <DEDENT> @run_on_executor(executor='_pool') <NEW_LINE> def as_future(self, query: Callable, *args: Any, **kwargs: Any) -> Future: <NEW_LINE> <INDENT> if not self._pool: <NEW_LINE> <INDENT> self._pool = ThreadPoolExecutor(max_workers=self._max_workers) <NEW_LINE> <DEDENT> return query | Tiny wrapper around ThreadPoolExecutor. This class is not meant to be
instantiated externally, but internally we just use it as a wrapper around
ThreadPoolExecutor so we can control the pool size and make the
`as_future` function public. | 6259906de1aae11d1e7cf434 |
class Money: <NEW_LINE> <INDENT> def __init__(self, amount, currency): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.currency.symbol is not None: <NEW_LINE> <INDENT> return f"{self.currency.symbol}{self.amount:.{self.currency.digits}f}" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return f"{self.currency.code} {self.amount:.{self.currency.digits}f}" <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"<Money {str(self)}>" <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (type(self) == type(other) and self.amount == other.amount and self.currency == other.currency) <NEW_LINE> <DEDENT> def add(self, other): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def sub(self, other): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def mul(self, multiplier): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def div(self, divisor): <NEW_LINE> <INDENT> pass | Represents an amount of money. Requires an amount and a currency. | 6259906d4f6381625f19a0d1 |
class DefaultProject(registry_forms.FormDefaults): <NEW_LINE> <INDENT> def set_defaults(self, state, create): <NEW_LINE> <INDENT> if not create: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> default_project = models.project_default(state.get_request()) <NEW_LINE> project_config = state.lookup_item(models.ProjectConfig) <NEW_LINE> if not project_config: <NEW_LINE> <INDENT> state.append_item(models.ProjectConfig, project=default_project) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if project_config.project: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> except models.Project.DoesNotExist: <NEW_LINE> <INDENT> state.update_item(project_config, project=default_project) | Default project configuration. | 6259906d76e4537e8c3f0dd6 |
class CustomSignupForm(SignupForm): <NEW_LINE> <INDENT> first_name = forms.CharField(max_length=50, label="First name") <NEW_LINE> last_name = forms.CharField(max_length=50, label="Last name") <NEW_LINE> def signup(self, request, user): <NEW_LINE> <INDENT> user.first_name = self.cleaned_data["first_name"] <NEW_LINE> user.last_name = self.cleaned_data["last_name"] <NEW_LINE> user.save() | Custom registration form that includes first and last names. | 6259906ddd821e528d6da5aa |
class TSV(DelimitedFormat): <NEW_LINE> <INDENT> delimiter = "\t" <NEW_LINE> @staticmethod <NEW_LINE> def detect(stream): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> csv.Sniffer().sniff(stream, delimiters="\t") <NEW_LINE> return True <NEW_LINE> <DEDENT> except (csv.Error, TypeError): <NEW_LINE> <INDENT> return False | TSV format. Assumes each row is of the form ``text label``.
| 6259906d23849d37ff852908 |
class CaseInsensitiveMultiDict(MultiDict[str, 'VT']): <NEW_LINE> <INDENT> def wrap_key(self, key: str) -> str: <NEW_LINE> <INDENT> return key.casefold() | A case-insensitive multi-dict. | 6259906d2ae34c7f260ac93b |
class CombinedLengthAndGirthCriterion(Criterion): <NEW_LINE> <INDENT> value = models.FloatField(_(u"CLAG"), default=0.0) <NEW_LINE> def get_operators(self): <NEW_LINE> <INDENT> return self.NUMBER_OPERATORS <NEW_LINE> <DEDENT> def is_valid(self): <NEW_LINE> <INDENT> if self.product: <NEW_LINE> <INDENT> clag = (2 * self.product.get_width()) + (2 * self.product.get_height()) + self.product.get_length() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.cart is None: <NEW_LINE> <INDENT> clag = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> max_width = 0 <NEW_LINE> max_length = 0 <NEW_LINE> total_height = 0 <NEW_LINE> for item in cart.get_items(): <NEW_LINE> <INDENT> if max_length < item.product.get_length(): <NEW_LINE> <INDENT> max_length = item.product.get_length() <NEW_LINE> <DEDENT> if max_width < item.product.get_width(): <NEW_LINE> <INDENT> max_width = item.product.get_width() <NEW_LINE> <DEDENT> total_height += item.product.get_height() <NEW_LINE> <DEDENT> clag = (2 * max_width) + (2 * total_height) + max_length <NEW_LINE> <DEDENT> <DEDENT> if (self.operator == self.EQUAL) and (clag == self.value): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif (self.operator == self.LESS_THAN) and (clag < self.value): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif (self.operator == self.LESS_THAN_EQUAL) and (clag <= self.value): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif (self.operator == self.GREATER_THAN) and (clag > self.value): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif (self.operator == self.GREATER_THAN_EQUAL) and (clag >= self.value): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | Criterion to check against combined length and girth. | 6259906d2c8b7c6e89bd5039 |
class EnvRegistry(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.env_specs = {} <NEW_LINE> <DEDENT> def make(self, id): <NEW_LINE> <INDENT> logger.info('Making new env: %s', id) <NEW_LINE> spec = self.spec(id) <NEW_LINE> return spec.make() <NEW_LINE> <DEDENT> def all(self): <NEW_LINE> <INDENT> return self.env_specs.values() <NEW_LINE> <DEDENT> def spec(self, id): <NEW_LINE> <INDENT> match = env_id_re.search(id) <NEW_LINE> if not match: <NEW_LINE> <INDENT> raise error.Error('Attempted to look up malformed environment ID: {}. (Currently all IDs must be of the form {}.)'.format(id.encode('utf-8'), env_id_re.pattern)) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self.env_specs[id] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> env_name = match.group(1) <NEW_LINE> matching_envs = [valid_env_name for valid_env_name, valid_env_spec in self.env_specs.items() if env_name == valid_env_spec._env_name] <NEW_LINE> if matching_envs: <NEW_LINE> <INDENT> raise error.DeprecatedEnv('Env {} not found (valid versions include {})'.format(id, matching_envs)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise error.UnregisteredEnv('No registered env with id: {}'.format(id)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def register(self, id, **kwargs): <NEW_LINE> <INDENT> if id in self.env_specs: <NEW_LINE> <INDENT> raise error.Error('Cannot re-register id: {}'.format(id)) <NEW_LINE> <DEDENT> self.env_specs[id] = EnvSpec(id, **kwargs) | Register an env by ID. IDs remain stable over time and are
guaranteed to resolve to the same environment dynamics (or be
desupported). The goal is that results on a particular environment
should always be comparable, and not depend on the version of the
code that was running. | 6259906d8e71fb1e983bd31a |
class LitModel(pl.LightningModule): <NEW_LINE> <INDENT> def __init__(self, model, padding_index, learning_rate, batch_size): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.save_hyperparameters() <NEW_LINE> self.model = model <NEW_LINE> self.loss = nn.CrossEntropyLoss(ignore_index=padding_index) <NEW_LINE> <DEDENT> def training_step(self, batch, batch_ind): <NEW_LINE> <INDENT> x, y = batch.src, batch.trg <NEW_LINE> logits = self.model(x, y[:, :-1]) <NEW_LINE> loss = self.loss(logits, y[:, 1:]) <NEW_LINE> self.log("train_loss", loss) <NEW_LINE> return loss <NEW_LINE> <DEDENT> def validation_step(self, batch, batch_ind): <NEW_LINE> <INDENT> x, y = batch.src, batch.trg <NEW_LINE> logits = self.model(x, y[:, :-1]) <NEW_LINE> loss = self.loss(logits, y[:, 1:]) <NEW_LINE> self.log("val_loss", loss, prog_bar=True, sync_dist=True) <NEW_LINE> <DEDENT> def configure_optimizers(self): <NEW_LINE> <INDENT> return torch.optim.Adam(self.parameters(), self.hparams.learning_rate) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def add_model_specific_args(parent_parser): <NEW_LINE> <INDENT> parser = ArgumentParser(parents=[parent_parser], add_help=False) <NEW_LINE> parser.add_argument('--batch_size', type=float, default=32) <NEW_LINE> parser.add_argument('--learning_rate', type=float, default=0.0001) <NEW_LINE> return parser | Simple PyTorch-Lightning model to train our Transformer. | 6259906df7d966606f7494e5 |
class TableFactory: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_table(table): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if table == "BigTable": <NEW_LINE> <INDENT> return BigTable() <NEW_LINE> <DEDENT> if table == "MediumTable": <NEW_LINE> <INDENT> return MediumTable() <NEW_LINE> <DEDENT> if table == "SmallTable": <NEW_LINE> <INDENT> return SmallTable() <NEW_LINE> <DEDENT> raise AssertionError("Table Not Found") <NEW_LINE> <DEDENT> except AssertionError as _e: <NEW_LINE> <INDENT> print(_e) <NEW_LINE> <DEDENT> return None | Tha Factory Class | 6259906df548e778e596cddf |
class userOp(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def UserLogin(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): <NEW_LINE> <INDENT> return grpc.experimental.unary_unary(request, target, '/userOp/UserLogin', userOp__pb2.LoginRequest.SerializeToString, userOp__pb2.LoginReply.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def UserRegister(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): <NEW_LINE> <INDENT> return grpc.experimental.unary_unary(request, target, '/userOp/UserRegister', userOp__pb2.UserRegisterRequest.SerializeToString, userOp__pb2.UserRegisterReply.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) | Missing associated documentation comment in .proto file. | 6259906df548e778e596cde0 |
class Types(Enum): <NEW_LINE> <INDENT> TIME = "time" <NEW_LINE> DURATION = "duration" <NEW_LINE> SIZE = "size" <NEW_LINE> @classmethod <NEW_LINE> def is_valid(cls, value): <NEW_LINE> <INDENT> if isinstance(value, AoiEstimator.Types): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return any(value.lower() == _item.value.lower() for _item in cls) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return False | Simple enum representing available estimate types. | 6259906d56b00c62f0fb4122 |
class Solution: <NEW_LINE> <INDENT> def singleNumber(self, A): <NEW_LINE> <INDENT> s = set() <NEW_LINE> for i in A: <NEW_LINE> <INDENT> if i in s: <NEW_LINE> <INDENT> s.remove(i) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s.add(i) <NEW_LINE> <DEDENT> <DEDENT> return s.pop() | @param A: An integer array
@return: An integer | 6259906d9c8ee82313040db1 |
class Register(MutableOperand): <NEW_LINE> <INDENT> def __init__(self, regnum): <NEW_LINE> <INDENT> self.rnum = regnum <NEW_LINE> <DEDENT> def get(self, thread): <NEW_LINE> <INDENT> return thread.r[self.rnum] <NEW_LINE> <DEDENT> def set(self, thread, value): <NEW_LINE> <INDENT> thread.r[self.rnum] = value <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> rnum = self.rnum <NEW_LINE> if rnum in _register_names: <NEW_LINE> <INDENT> return _register_names[rnum] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'r{0}'.format(rnum) <NEW_LINE> <DEDENT> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, type(self)) and self.rnum == other.rnum <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.rnum) | This class encapsulates a generic register (r0 to r15).
.. attribute:: rnum
The register number. | 6259906da8370b77170f1c1a |
class SDEntry_Service(_SDEntry): <NEW_LINE> <INDENT> _defaults = {"type": _SDEntry.TYPE_SRV_FINDSERVICE} <NEW_LINE> name = "Service Entry" <NEW_LINE> fields_desc = [ _SDEntry, IntField("minor_ver", 0)] | Service Entry. | 6259906d3317a56b869bf16d |
class __FRedans : <NEW_LINE> <INDENT> def __init__(self) : <NEW_LINE> <INDENT> self.MIB = set() <NEW_LINE> self.SKB = set() <NEW_LINE> <DEDENT> def update(self, ans) : <NEW_LINE> <INDENT> self.MIB.update(ans.MIB) <NEW_LINE> self.SKB.update(ans.SKB) <NEW_LINE> <DEDENT> def add(self, MIBerc, SKBerc) : <NEW_LINE> <INDENT> self.MIB.add(MIBerc) <NEW_LINE> self.SKB.add(SKBerc) <NEW_LINE> <DEDENT> def __str__(self) : <NEW_LINE> <INDENT> return ('Most Informative Basis (MIB):\n' + '\n'.join(str(e) for e in self.MIB) + '\nSkeletal Basis (SKB):\n' + '\n'.join(str(e) for e in self.SKB) ) | for the return value of FRed(...) | 6259906d56ac1b37e630390c |
class TestCase(TestCase): <NEW_LINE> <INDENT> fixtures = ['tests.json'] <NEW_LINE> counter = 1 <NEW_LINE> def get_new_page_data(self, draft=False): <NEW_LINE> <INDENT> page_data = {'title':'test page %d' % self.counter, 'slug':'test-page-%d' % self.counter, 'language':'en-us', 'sites':[2], 'status': Page.DRAFT if draft else Page.PUBLISHED, 'document_set-TOTAL_FORMS':0, 'document_set-INITIAL_FORMS':0, } <NEW_LINE> self.counter = self.counter + 1 <NEW_LINE> return page_data <NEW_LINE> <DEDENT> def create_new_page(self, client=None, draft=False): <NEW_LINE> <INDENT> if not client: <NEW_LINE> <INDENT> client = Client() <NEW_LINE> client.login(username= 'batiste', password='b') <NEW_LINE> <DEDENT> page_data = self.get_new_page_data(draft=draft) <NEW_LINE> response = client.post('/admin/pages/page/add/', page_data) <NEW_LINE> self.assertRedirects(response, '/admin/pages/page/') <NEW_LINE> slug_content = Content.objects.get_content_slug_by_slug( page_data['slug']) <NEW_LINE> return slug_content.page | Django page CMS test suite class | 6259906d2ae34c7f260ac93c |
class TestTransformations(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.X_, self.y_, headers = parse_csv(etalonroot + "/input.csv") <NEW_LINE> self.data = CData((self.X_, self.y_), cross_val=0) <NEW_LINE> <DEDENT> def test_standardization_on_etalon(self): <NEW_LINE> <INDENT> self.data.reset_data(shuff=False) <NEW_LINE> calcme = parse_csv(etalonroot + "std.csv", dtype="float64")[0] <NEW_LINE> calcme = np.sort(calcme.ravel()) <NEW_LINE> self.data.transformation = "std" <NEW_LINE> X = np.round(self.data.learning.astype("float64"), 3) <NEW_LINE> X = np.sort(X.ravel()) <NEW_LINE> self.assertEqual(self.data.transformation, "std", "The transformation property is faulty!") <NEW_LINE> self.assertTrue(np.all(np.equal(X, calcme)), "Standardization is faulty!") <NEW_LINE> <DEDENT> def test_pca_on_etalon(self): <NEW_LINE> <INDENT> self.data.reset_data(shuff=False) <NEW_LINE> calcme = parse_csv(etalonroot + "pca.csv", dtype="float64")[0] <NEW_LINE> calcme = np.round(np.sort(np.abs(calcme.ravel())), 1) <NEW_LINE> self.data.transformation = "pca" <NEW_LINE> X = self.data.learning.astype("float64") <NEW_LINE> X = np.round(np.sort(np.abs(X.ravel())), 1) <NEW_LINE> eq = np.isclose(X, calcme) <NEW_LINE> self.assertEqual(self.data.transformation, "pca", "The transformation property is faulty!") <NEW_LINE> self.assertTrue(np.all(eq), "PCA is faulty!") <NEW_LINE> <DEDENT> def test_lda_on_etalon(self): <NEW_LINE> <INDENT> self.data.reset_data(shuff=False) <NEW_LINE> calcme = parse_csv(etalonroot + "lda.csv", dtype="float64")[0] <NEW_LINE> calcme = np.round(np.sort(np.abs(calcme.ravel())), 1) <NEW_LINE> self.data.transformation = "lda" <NEW_LINE> X = self.data.learning.astype("float64") <NEW_LINE> X = np.round(np.sort(np.abs(X.ravel())), 1) <NEW_LINE> eq = np.isclose(X, calcme) <NEW_LINE> self.assertEqual(self.data.transformation, "lda", "The transformation property is faulty!") <NEW_LINE> self.assertTrue(np.all(eq), "LDA is faulty!") <NEW_LINE> <DEDENT> def test_ica_on_etalon(self): <NEW_LINE> <INDENT> self.data.reset_data(shuff=False) <NEW_LINE> calcme = parse_csv(etalonroot + "ica.csv", dtype="float64")[0] <NEW_LINE> calcme = np.round(np.sort(np.abs(calcme.ravel())), 1) <NEW_LINE> self.data.transformation = "ica" <NEW_LINE> X = self.data.learning.astype("float64") <NEW_LINE> X = np.round(np.sort(np.abs(X.ravel())), 1) <NEW_LINE> self.assertEqual(self.data.transformation, "ica", "The transformation property is faulty!") <NEW_LINE> self.assertTrue(np.allclose(X, calcme, rtol=1.e-4, atol=1.e-7), "ICA is faulty!") <NEW_LINE> <DEDENT> def test_autoencoding_on_etalon(self): <NEW_LINE> <INDENT> self.data.reset_data(shuff=False) <NEW_LINE> self.data.transformation = ("ae", 10) <NEW_LINE> self.assertEqual(self.data.transformation, "autoencoding", "Autoencoding failed on the <transformation> property assertion!") <NEW_LINE> self.assertEqual(self.data.learning.shape, (10, 10), "Autoencoding failed on the output shape test!") | Dear Transformation Wrapper Classes,
I would like you to: | 6259906de1aae11d1e7cf435 |
class PlanePan(ClickOrDrag): <NEW_LINE> <INDENT> def __init__(self, viewport, plane, button=LEFT_BTN): <NEW_LINE> <INDENT> self._plane = plane <NEW_LINE> self._viewport = viewport <NEW_LINE> self._beginPlanePoint = None <NEW_LINE> self._beginPos = None <NEW_LINE> self._dragNdcZ = 0. <NEW_LINE> super(PlanePan, self).__init__(button) <NEW_LINE> <DEDENT> def click(self, x, y): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def beginDrag(self, x, y): <NEW_LINE> <INDENT> ndc = self._viewport.windowToNdc(x, y) <NEW_LINE> ndcZ = self._viewport._pickNdcZGL(x, y) <NEW_LINE> if ndc is not None and ndcZ is not None: <NEW_LINE> <INDENT> ndcPos = numpy.array((ndc[0], ndc[1], ndcZ, 1.), dtype=numpy.float32) <NEW_LINE> scenePos = self._viewport.camera.transformPoint( ndcPos, direct=False, perspectiveDivide=True) <NEW_LINE> self._beginPos = self._plane.objectToSceneTransform.transformPoint( scenePos, direct=False) <NEW_LINE> self._dragNdcZ = ndcZ <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._beginPos = None <NEW_LINE> self._dragNdcZ = 0. <NEW_LINE> <DEDENT> self._beginPlanePoint = self._plane.plane.point <NEW_LINE> <DEDENT> def drag(self, x, y): <NEW_LINE> <INDENT> if self._beginPos is not None: <NEW_LINE> <INDENT> ndc = self._viewport.windowToNdc(x, y) <NEW_LINE> if ndc is not None: <NEW_LINE> <INDENT> ndcPos = numpy.array((ndc[0], ndc[1], self._dragNdcZ, 1.), dtype=numpy.float32) <NEW_LINE> scenePos = self._viewport.camera.transformPoint( ndcPos, direct=False, perspectiveDivide=True) <NEW_LINE> curPos = self._plane.objectToSceneTransform.transformPoint( scenePos, direct=False) <NEW_LINE> translation = curPos[:3] - self._beginPos[:3] <NEW_LINE> newPoint = self._beginPlanePoint + translation <NEW_LINE> bounds = self._plane.parent.bounds(dataBounds=True) <NEW_LINE> if bounds is not None: <NEW_LINE> <INDENT> newPoint = numpy.clip( newPoint, a_min=bounds[0], a_max=bounds[1]) <NEW_LINE> self._plane.plane.point = newPoint <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def endDrag(self, x, y): <NEW_LINE> <INDENT> self._beginPlanePoint = None | Pan a plane along its normal on drag. | 6259906d63b5f9789fe869b7 |
class UserProfileSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model=models.UserProfile <NEW_LINE> fields=('id','email','name','password') <NEW_LINE> extra_kwargs={'password':{'write_only':True}} <NEW_LINE> def create(self,validated_data): <NEW_LINE> <INDENT> user=models.UserProfile(email=validated_data['email'],name=validated_data['name']) <NEW_LINE> user.set_password(validated_data['password']) <NEW_LINE> user.save() <NEW_LINE> return user | A serializer for our user profile objects | 6259906d63d6d428bbee3eb4 |
class OrderDetails(BaseModel): <NEW_LINE> <INDENT> order = models.ForeignKey(Order) <NEW_LINE> item = models.ForeignKey(Item) <NEW_LINE> item_quantity = models.IntegerField(_(u'物品数量'), default=1) <NEW_LINE> comment = models.TextField(default="") | 用户订单详细 | 6259906d3346ee7daa338288 |
class Dive(object): <NEW_LINE> <INDENT> def __init__(self, url): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.password = None <NEW_LINE> self.username = None <NEW_LINE> return <NEW_LINE> <DEDENT> def call(self, timeout=1800): <NEW_LINE> <INDENT> if self.password is None or self.username is None: <NEW_LINE> <INDENT> self.retrieve_authentication() <NEW_LINE> <DEDENT> header = {'Accept': 'application/json'} <NEW_LINE> credentials = HTTPBasicAuth(self.username, self.password) <NEW_LINE> response = requests.get(self.url, headers=header, auth=credentials, timeout=timeout) <NEW_LINE> if response.status_code == requests.codes.server_error: <NEW_LINE> <INDENT> if 'content-type' in response.headers and response.headers['content-type'] == 'application/json': <NEW_LINE> <INDENT> raise ValueError(response.json()['detail']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError(response.text) <NEW_LINE> <DEDENT> <DEDENT> if response.status_code != requests.codes.OK: <NEW_LINE> <INDENT> response.raise_for_status() <NEW_LINE> <DEDENT> return response.json() <NEW_LINE> <DEDENT> def retrieve_authentication(self): <NEW_LINE> <INDENT> config = configparser.ConfigParser() <NEW_LINE> config.read(path.join(environ['HOME'], '.DSMZ_config')) <NEW_LINE> try: <NEW_LINE> <INDENT> self.username = config.get('authentication', 'user_id') <NEW_LINE> self.password = config.get('authentication', 'password') <NEW_LINE> <DEDENT> except (configparser.NoSectionError, configparser.NoOptionError): <NEW_LINE> <INDENT> self.password = None <NEW_LINE> raise AuthenticationError('Call DSMZ_login() to login to the API before proceeding') <NEW_LINE> <DEDENT> return | Client for DSMZ BacDive web services | 6259906d435de62698e9d65a |
class ExistDB(BaseExistDB): <NEW_LINE> <INDENT> def __init__(self, resultType=None, timeout=None): <NEW_LINE> <INDENT> if timeout is None: <NEW_LINE> <INDENT> timeout = getattr(settings, 'EXISTDB_TIMEOUT', None) <NEW_LINE> <DEDENT> BaseExistDB.__init__(self, resultType=resultType, server_url=self._get_exist_url(), timeout=timeout) <NEW_LINE> <DEDENT> def _get_exist_url(self): <NEW_LINE> <INDENT> exist_url = settings.EXISTDB_SERVER_URL <NEW_LINE> if '@' in exist_url: <NEW_LINE> <INDENT> warnings.warn("""EXISTDB_SERVER_URL should not include eXist user or password information. You should update your django settings to use EXISTDB_SERVER_USER and EXISTDB_SERVER_PASSWORD.""") <NEW_LINE> <DEDENT> username = getattr(settings, 'EXISTDB_SERVER_USER', None) <NEW_LINE> password = getattr(settings, 'EXISTDB_SERVER_PASSWORD', None) <NEW_LINE> if username or password: <NEW_LINE> <INDENT> urlparts = urlparse.urlsplit(exist_url) <NEW_LINE> if username and password: <NEW_LINE> <INDENT> prefix = '%s:%s' % (username, password) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prefix = username <NEW_LINE> <DEDENT> netloc = '%s@%s' % (prefix, urlparts.netloc) <NEW_LINE> exist_url = urlparse.urlunsplit((urlparts.scheme, netloc, urlparts.path, urlparts.query, urlparts.fragment)) <NEW_LINE> <DEDENT> return exist_url | Connect to an eXist database configured by ``settings.py``.
:param resultType: The class to use for returning :meth:`query` results;
defaults to :class:`eulcore.existdb.QueryResult`.
:param timeout: Connection timeout setting, if any. If none is
specified, this class will look for a ``EXISTDB_TIMEOUT``
configuration in django settings.
This class is a simple wrapper for :class:`eulcore.existdb.db.ExistDB`,
getting the server_url from the Django settings file instead of in an
argument. | 6259906d4f88993c371f114a |
class CaptureInfo(AlpacaBase): <NEW_LINE> <INDENT> first_key = "first" <NEW_LINE> last_key = "last" <NEW_LINE> def __init__(self, path, command=GetDefaults.info_command, *args, **kwargs): <NEW_LINE> <INDENT> super(CaptureInfo, self).__init__(*args, **kwargs) <NEW_LINE> self.path = path <NEW_LINE> self.command = command <NEW_LINE> self._first = None <NEW_LINE> self._last = None <NEW_LINE> self._first_regex = None <NEW_LINE> self._last_regex = None <NEW_LINE> self._output = None <NEW_LINE> return <NEW_LINE> <DEDENT> @property <NEW_LINE> def output(self): <NEW_LINE> <INDENT> if self._output is None: <NEW_LINE> <INDENT> command = "{} {}".format(self.command, self.path) <NEW_LINE> self.logger.debug("Running: '%s'", command) <NEW_LINE> outcome = subprocess.run(shlex.split(command), stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True, ) <NEW_LINE> self._output = outcome.stdout <NEW_LINE> self.logger.debug(self._output) <NEW_LINE> <DEDENT> return self._output <NEW_LINE> <DEDENT> @property <NEW_LINE> def first_regex(self): <NEW_LINE> <INDENT> if self._first_regex is None: <NEW_LINE> <INDENT> self._first_regex = re.compile(Info.first_regex) <NEW_LINE> <DEDENT> return self._first_regex <NEW_LINE> <DEDENT> @property <NEW_LINE> def last_regex(self): <NEW_LINE> <INDENT> if self._last_regex is None: <NEW_LINE> <INDENT> self._last_regex = re.compile(Info.last_regex) <NEW_LINE> <DEDENT> return self._last_regex <NEW_LINE> <DEDENT> @property <NEW_LINE> def first(self): <NEW_LINE> <INDENT> if self._first is None: <NEW_LINE> <INDENT> match = self.first_regex.search(self.output) <NEW_LINE> if match is None: <NEW_LINE> <INDENT> raise RuntimeError( "{} didn't match the first timestamp".format(self.command)) <NEW_LINE> <DEDENT> self._first = dateparser.parse(match.groupdict()[Info.first_key]) <NEW_LINE> <DEDENT> return self._first <NEW_LINE> <DEDENT> @property <NEW_LINE> def last(self): <NEW_LINE> <INDENT> if self._last is None: <NEW_LINE> <INDENT> match = self.last_regex.search(self.output) <NEW_LINE> if match is None: <NEW_LINE> <INDENT> raise RuntimeError( "{} didn't match the last timestamp".format(self.command)) <NEW_LINE> <DEDENT> self._last = dateparser.parse(match.groupdict()[Info.last_key]) <NEW_LINE> <DEDENT> return self._last <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.last < other <NEW_LINE> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> return self.last <= other <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return self.first > other <NEW_LINE> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> return self.first >= other <NEW_LINE> <DEDENT> def check_rep(self): <NEW_LINE> <INDENT> return | Holds the basic info for a PCAP file
Args:
path (str): path to file
command (str): command to get the info | 6259906d4e4d562566373c5b |
class SphericalStokesSolutionDelta(SphericalStokesSolution): <NEW_LINE> <INDENT> def __init__(self, ABCD, l, m, Rp=2.22, Rm=1.22, nu=1.0, g=1.0): <NEW_LINE> <INDENT> super(SphericalStokesSolutionDelta, self).__init__(l, m, Rp=Rp, Rm=Rm, nu=nu, g=g) <NEW_LINE> self.ABCD = ABCD <NEW_LINE> A, B, C, D = self.ABCD <NEW_LINE> self.G = -2*nu*(l+1)*(2*l+3)*C <NEW_LINE> self.H = -2*nu*l*(2*l-1)*D <NEW_LINE> <DEDENT> def Pl(self, r): <NEW_LINE> <INDENT> A, B, C, D = self.ABCD <NEW_LINE> l = self.l <NEW_LINE> return A*r**l + B*r**(-l-1) + C*r**(l+2) + D*r**(-l+1) <NEW_LINE> <DEDENT> def dPldr(self, r): <NEW_LINE> <INDENT> A, B, C, D = self.ABCD <NEW_LINE> l = self.l <NEW_LINE> return l*A*r**(l-1) + (-l-1)*B*r**(-l-2) + (l+2)*C*r**(l+1) + (-l+1)*D*r**-l <NEW_LINE> <DEDENT> def dPldr2(self, r): <NEW_LINE> <INDENT> A, B, C, D = self.ABCD <NEW_LINE> l = self.l <NEW_LINE> return l*(l-1)*A*r**(l-2) + (-l-1)*(-l-2)*B*r**(-l-3) + (l+2)*(l+1)*C*r**l + (-l+1)*(-l)*D*r**(-l-1) <NEW_LINE> <DEDENT> def p(self, r, theta, phi): <NEW_LINE> <INDENT> l, m = self.l, self.m <NEW_LINE> return (self.G*r**l + self.H*r**(-l-1))*Y(l, m, theta, phi) | Base class for solutions in spherical shell domains with delta(r-r') forcing
This implements the analytical solution in one half (above or below r')
of the domain which is based on a poloidal function
.. math ::
\mathcal{P}(r,\theta,\varphi) = \mathcal{P}_l(r)Y_{lm}(\theta, \varphi)
and velocity
.. math ::
\mathbf{u} = \nabla\times\left(\mathbf{r}\times\nabla\mathcal{P}\right)
where for biharmonic solutions, :math:`\mathcal{P}_l(r)` is determinded by four
coefficients, A, B, C, and D. | 6259906d23849d37ff85290a |
class IPage(Interface): <NEW_LINE> <INDENT> pass | The page type
| 6259906d32920d7e50bc789b |
class FileOpenFrame(ttk.Frame): <NEW_LINE> <INDENT> def __init__(self, master,file_entry_width=100): <NEW_LINE> <INDENT> super().__init__(master) <NEW_LINE> self.filePath = StringVar() <NEW_LINE> self.createWidget(file_entry_width) <NEW_LINE> self.pack() <NEW_LINE> <DEDENT> def createWidget(self,entry_width): <NEW_LINE> <INDENT> filePathLabel = ttk.Label(self,text=FILE_PATH_LABEL) <NEW_LINE> filePathLabel.grid(column=0,row=0) <NEW_LINE> filepathEntry = ttk.Entry(self,textvariable=self.filePath,widt=entry_width) <NEW_LINE> filepathEntry.grid(column=1,row=0) <NEW_LINE> filepathButton = ttk.Button(self,text=FILE_OPEN_BUTTON_LABEL,command=self.openFileDialog) <NEW_LINE> filepathButton.grid(column=2,row=0) <NEW_LINE> self.readButton = ttk.Button(self,text=FILE_READ_BUTTON_LABEL) <NEW_LINE> self.readButton.grid(column=3,row=0) <NEW_LINE> <DEDENT> def openFileDialog(self): <NEW_LINE> <INDENT> file = filedialog.askopenfilename(filetypes=[("sqliteファイル", "*.*")]); <NEW_LINE> self.filePath.set(file) <NEW_LINE> <DEDENT> def getFilePath(self): <NEW_LINE> <INDENT> return self.filePath.get() <NEW_LINE> <DEDENT> def setReadButtonCommand(self,func): <NEW_LINE> <INDENT> self.readButton["command"] = func | ファイルの読み込み用フレーム | 6259906d92d797404e389785 |
class dev_report(Event): <NEW_LINE> <INDENT> pass | Trigger the reporting of the status for each known device. | 6259906dd486a94d0ba2d813 |
class RtspBaseClass: <NEW_LINE> <INDENT> def createEmptyPipeline(self): <NEW_LINE> <INDENT> self.pipeline = gst.Pipeline('mypipeline') <NEW_LINE> <DEDENT> def createRtspsrcElement(self): <NEW_LINE> <INDENT> self.source = gst.element_factory_make('rtspsrc', 'source') <NEW_LINE> self.source.set_property('latency', 0) <NEW_LINE> self.formRtspUri() <NEW_LINE> self.source.set_property('location', self.rtspUri) <NEW_LINE> <DEDENT> def formRtspUri(self): <NEW_LINE> <INDENT> self.rtspUri = 'rtsp://%s:554/axis-media/media.amp?videocodec=jpeg&audio=0' % (self.ipAddress) <NEW_LINE> <DEDENT> def createDepayElement(self): <NEW_LINE> <INDENT> self.depay = gst.element_factory_make('rtpjpegdepay','mydepay') <NEW_LINE> <DEDENT> def createDecodeElement(self): <NEW_LINE> <INDENT> self.decode = gst.element_factory_make('ffdec_mjpeg','mydecode') <NEW_LINE> <DEDENT> def createXvimagesinkElement(self): <NEW_LINE> <INDENT> self.xvimagesink = gst.element_factory_make('xvimagesink', 'xvimagesink') <NEW_LINE> self.xvimagesink.set_xwindow_id(self.xid) <NEW_LINE> <DEDENT> def createPipelineCallbacks(self): <NEW_LINE> <INDENT> self.source.connect('pad-added', self.onPadAddedToRtspsrc) <NEW_LINE> self.source.connect('pad-removed', self.onPadRemovedFromRtspsrc) <NEW_LINE> <DEDENT> def onPadAddedToRtspsrc(self, rtspsrc, pad): <NEW_LINE> <INDENT> print('pad added to rtspsrc element.') <NEW_LINE> self.xvimagesink.set_xwindow_id(self.xid) <NEW_LINE> depaySinkPad = self.depay.get_pad('sink') <NEW_LINE> pad.link(depaySinkPad) <NEW_LINE> <DEDENT> def onPadRemovedFromRtspsrc(self, rtspsrc, pad): <NEW_LINE> <INDENT> print('pad removed from rtspsrc element.') <NEW_LINE> depaySinkPad = self.depay.get_pad('sink') <NEW_LINE> pad.unlink(depaySinkPad) <NEW_LINE> <DEDENT> def pauseOrUnpauseVideo(self): <NEW_LINE> <INDENT> if (self.pipeline.get_state()[1]==gst.STATE_PAUSED): <NEW_LINE> <INDENT> self.setPipelineStateToPlaying() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.setPipelineStateToPaused() <NEW_LINE> <DEDENT> <DEDENT> def setPipelineStateToPlaying(self): <NEW_LINE> <INDENT> self.pipeline.set_state(gst.STATE_PLAYING) <NEW_LINE> <DEDENT> def setPipelineStateToPaused(self): <NEW_LINE> <INDENT> self.pipeline.set_state(gst.STATE_PAUSED) <NEW_LINE> <DEDENT> def setPipelineStateToNull(self): <NEW_LINE> <INDENT> self.pipeline.set_state(gst.STATE_NULL) | RtspBaseClass is a base class that provides the building blocks for other
classes that create rtsp pipelines. Commonly used gstreamer pipeline
elements and callback methods are defined within. | 6259906d55399d3f05627d77 |
class CustomerTaxForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = CustomerTax <NEW_LINE> exclude = () | Form to add and edit a customer tax. | 6259906dd268445f2663a787 |
class DeletePatternRepeat(QUndoCommand): <NEW_LINE> <INDENT> def __init__(self, canvas, patternRepeat, parent = None): <NEW_LINE> <INDENT> super(DeletePatternRepeat, self).__init__(parent) <NEW_LINE> self.canvas = canvas <NEW_LINE> self.patternRepeat = patternRepeat <NEW_LINE> <DEDENT> def redo(self): <NEW_LINE> <INDENT> self.canvas.removeItem(self.patternRepeat) <NEW_LINE> self.canvas.patternRepeats.remove(self.patternRepeat) <NEW_LINE> <DEDENT> def undo(self): <NEW_LINE> <INDENT> self.canvas.addItem(self.patternRepeat) <NEW_LINE> self.canvas.patternRepeats.add(self.patternRepeat) | This class encapsulates the deletion of a pattern repeat
item on the canvas. | 6259906d99cbb53fe683273c |
class Webcam_detection_thread_worker(threading.Thread): <NEW_LINE> <INDENT> def __init__(self,on_finish) : <NEW_LINE> <INDENT> super(Webcam_detection_thread_worker, self).__init__() <NEW_LINE> self._on_finish = on_finish <NEW_LINE> <DEDENT> def run (self) : <NEW_LINE> <INDENT> WebCamObj = M_WEBCAM.luciole_webcam_detection() <NEW_LINE> nb_webcam = WebCamObj.detect_webcam() <NEW_LINE> self._on_finish(nb_webcam, WebCamObj ) | Thread in charge of calling the webcam detection | 6259906d8e7ae83300eea8e4 |
class Pvnrt(QtGui.QMainWindow): <NEW_LINE> <INDENT> def __init__(self, parent = None): <NEW_LINE> <INDENT> QtGui.QWidget.__init__(self, parent) <NEW_LINE> self.ui = bouncingballs.BouncingBalls() | pv = nrt class
| 6259906d56ac1b37e630390d |
class Scenario_repository_upstream_authorization_check(APITestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.upstream_username = 'rTtest123' <NEW_LINE> <DEDENT> @pre_upgrade <NEW_LINE> def test_pre_repository_scenario_upstream_authorization(self): <NEW_LINE> <INDENT> org = entities.Organization().create() <NEW_LINE> custom_repo = create_sync_custom_repo(org_id=org.id) <NEW_LINE> rake_repo = 'repo = Katello::Repository.find_by_id({0})'.format(custom_repo) <NEW_LINE> rake_username = '; repo.root.upstream_username = "{0}"'.format(self.upstream_username) <NEW_LINE> rake_repo_save = '; repo.save!(validate: false)' <NEW_LINE> result = run( "echo '{0}{1}{2}'|foreman-rake console".format( rake_repo, rake_username, rake_repo_save ) ) <NEW_LINE> self.assertIn('true', result) <NEW_LINE> global_dict = {self.__class__.__name__: {'repo_id': custom_repo}} <NEW_LINE> create_dict(global_dict) <NEW_LINE> <DEDENT> @post_upgrade(depend_on=test_pre_repository_scenario_upstream_authorization) <NEW_LINE> def test_post_repository_scenario_upstream_authorization(self): <NEW_LINE> <INDENT> repo_id = get_entity_data(self.__class__.__name__)['repo_id'] <NEW_LINE> rake_repo = 'repo = Katello::RootRepository.find_by_id({0})'.format(repo_id) <NEW_LINE> rake_username = '; repo.root.upstream_username' <NEW_LINE> result = run("echo '{0}{1}'|foreman-rake console".format(rake_repo, rake_username)) <NEW_LINE> self.assertNotIn(self.upstream_username, result) | This test scenario is to verify the upstream username in post-upgrade for a custom
repository which does have a upstream username but not password set on it in pre-upgrade.
Test Steps:
1. Before Satellite upgrade, Create a custom repository and sync it.
2. Set the upstream username on same repository using foreman-rake.
3. Upgrade Satellite.
4. Check if the upstream username value is removed for same repository. | 6259906d1b99ca4002290160 |
class ToolCursorPosition(ToolBase): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._idDrag = None <NEW_LINE> ToolBase.__init__(self, *args, **kwargs) <NEW_LINE> <DEDENT> def set_figure(self, figure): <NEW_LINE> <INDENT> if self._idDrag: <NEW_LINE> <INDENT> self.canvas.mpl_disconnect(self._idDrag) <NEW_LINE> <DEDENT> ToolBase.set_figure(self, figure) <NEW_LINE> if figure: <NEW_LINE> <INDENT> self._idDrag = self.canvas.mpl_connect( 'motion_notify_event', self.send_message) <NEW_LINE> <DEDENT> <DEDENT> def send_message(self, event): <NEW_LINE> <INDENT> if self.toolmanager.messagelock.locked(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> message = ' ' <NEW_LINE> if event.inaxes and event.inaxes.get_navigate(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> s = event.inaxes.format_coord(event.xdata, event.ydata) <NEW_LINE> <DEDENT> except (ValueError, OverflowError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> artists = [a for a in event.inaxes.mouseover_set if a.contains(event) and a.get_visible()] <NEW_LINE> if artists: <NEW_LINE> <INDENT> a = cbook._topmost_artist(artists) <NEW_LINE> if a is not event.inaxes.patch: <NEW_LINE> <INDENT> data = a.get_cursor_data(event) <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> s += ' [%s]' % a.format_cursor_data(data) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> message = s <NEW_LINE> <DEDENT> <DEDENT> self.toolmanager.message_event(message, self) | Send message with the current pointer position
This tool runs in the background reporting the position of the cursor | 6259906d76e4537e8c3f0dd9 |
class TestSetUpIof(): <NEW_LINE> <INDENT> def __init__(self, test_info=None, log_base_path=None): <NEW_LINE> <INDENT> self.test_info = test_info <NEW_LINE> self.log_dir_base = log_base_path <NEW_LINE> self.logger = logging.getLogger("TestRunnerLogger") <NEW_LINE> <DEDENT> def useLogDir(self, log_path): <NEW_LINE> <INDENT> self.log_dir_base = log_path <NEW_LINE> <DEDENT> def test_iof_started(self): <NEW_LINE> <INDENT> start_dir = self.test_info.get_defaultENV("CNSS_PREFIX") <NEW_LINE> self.logger.info("start_dir: %s", str(os.listdir(start_dir))) <NEW_LINE> ctrl_dir = os.path.join(start_dir, ".ctrl") <NEW_LINE> assert os.path.isdir(start_dir), "prefix is not a directory %s" % start_dir <NEW_LINE> filename = os.path.join(ctrl_dir, 'active') <NEW_LINE> i = 10 <NEW_LINE> while i > 0: <NEW_LINE> <INDENT> i = i - 1 <NEW_LINE> time.sleep(1) <NEW_LINE> if not os.path.exists(filename): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.logger.info("Found active file: %s", filename) <NEW_LINE> stat_obj = os.stat(filename) <NEW_LINE> assert S_ISREG(stat_obj.st_mode), "File type is not a regular file" <NEW_LINE> self.logger.info(stat_obj) <NEW_LINE> fd = open(filename) <NEW_LINE> data = fd.read() <NEW_LINE> fd.close() <NEW_LINE> if data.strip() == '1': <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> self.logger.info("start_dir: %s", str(os.listdir(start_dir))) <NEW_LINE> self.logger.info("Unable to detect file: %s", filename) <NEW_LINE> return 1 | Set up and start ctrl fs | 6259906d32920d7e50bc789c |
class TestBodhi4ComposeSyncWait(Base): <NEW_LINE> <INDENT> expected_title = "bodhi.compose.sync.wait" <NEW_LINE> expected_subti = "bodhi composer is waiting for dist-6E-epel-testing " + "to hit the master mirror" <NEW_LINE> expected_icon = "https://apps.fedoraproject.org/img/icons/bodhi.png" <NEW_LINE> expected_secondary_icon = "https://seccdn.libravatar.org/avatar/" + "05b5fce36707d3f962a8dc03094e41028ac3e765c8c2e182eab96228013ec9c9" + "?s=64&d=retro" <NEW_LINE> expected_usernames = set(['releng']) <NEW_LINE> expected_packages = set([]) <NEW_LINE> expected_objects = set(['repos/dist-6E-epel-testing']) <NEW_LINE> msg = { "username": "amqp-bridge", "source_name": "datanommer", "i": 185683, "timestamp": 1559871586.0, "msg_id": "2019-39734cc4-d3bd-4961-8303-502e45e6777b", "crypto": "x509", "topic": "org.fedoraproject.prod.bodhi.compose.sync.wait", "headers": {}, "source_version": "0.9.0", "msg": { "repo": "dist-6E-epel-testing", "agent": "releng" } } | `Bodhi <https://bodhi.fedoraproject.org>`_ publishes messages
on this topic when it begins waiting for a completed repo to sync. | 6259906daad79263cf43000b |
class Accuracy(EvalMetric): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Accuracy, self).__init__('accuracy') <NEW_LINE> <DEDENT> def update(self, labels, preds): <NEW_LINE> <INDENT> check_label_shapes(labels, preds) <NEW_LINE> for label, pred_label in zip(labels, preds): <NEW_LINE> <INDENT> if pred_label.shape != label.shape: <NEW_LINE> <INDENT> pred_label = ndarray.argmax_channel(pred_label) <NEW_LINE> <DEDENT> pred_label = pred_label.asnumpy().astype('int32') <NEW_LINE> label = label.asnumpy().astype('int32') <NEW_LINE> check_label_shapes(label, pred_label) <NEW_LINE> self.sum_metric += (pred_label.flat == label.flat).sum() <NEW_LINE> self.num_inst += len(pred_label.flat) | Computes accuracy classification score.
Examples
--------
>>> predicts = [mx.nd.array([[0.3, 0.7], [0, 1.], [0.4, 0.6]])]
>>> labels = [mx.nd.array([0, 1, 1])]
>>> acc = mx.metric.Accuracy()
>>> acc.update(preds = predicts, labels = labels)
>>> print acc.get()
('accuracy', 0.6666666666666666) | 6259906d3539df3088ecdaf2 |
class whitespace(Facet): <NEW_LINE> <INDENT> ENUM = DataDict( preserve='preserve', replace='replace', colapse='colapse', ) <NEW_LINE> name = 'whiteSpace' <NEW_LINE> def __init__(self, restriction): <NEW_LINE> <INDENT> assert restriction in self.ENUM <NEW_LINE> self.restriction = restriction <NEW_LINE> <DEDENT> def __call__(self, value): <NEW_LINE> <INDENT> if self.restriction == self.ENUM.preserve: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> value = re.compile('(\t|\r|\n)').sub(' ', value) <NEW_LINE> if self.restriction == self.ENUM.colapse: <NEW_LINE> <INDENT> value = re.compile('\s+').sub(' ', value) <NEW_LINE> <DEDENT> return value | The whitespace facet constrains the value of types derived from string
whitespace.restriction must be one of: preserve, replace, collapse
preserve
No normalization is done, the value is not changed
replace
All occurrences of #x9 (tab), #xA (line feed) and #xD (carriage
return) are replaced with #x20 (space)
collapse
After the processing implied by replace, contiguous sequences of
#x20's are collapsed to a single #x20, and leading and trailing
#x20's are removed.
Note: The notation #xA used here (and elsewhere in this
specification) represents the Universal Character Set (UCS) code
point hexadecimal A (line feed), which is denoted by U+000A.
This notation is to be distinguished from 
, which is the
XML character reference to that same UCS code point. | 6259906d4f6381625f19a0d3 |
class DistributionMeta(ABCMeta): <NEW_LINE> <INDENT> def __new__(cls, name, bases, clsdict): <NEW_LINE> <INDENT> if "random" in clsdict: <NEW_LINE> <INDENT> def _random(*args, **kwargs): <NEW_LINE> <INDENT> warnings.warn( "The old `Distribution.random` interface is deprecated.", FutureWarning, stacklevel=2, ) <NEW_LINE> return clsdict["random"](*args, **kwargs) <NEW_LINE> <DEDENT> clsdict["random"] = _random <NEW_LINE> <DEDENT> rv_op = clsdict.setdefault("rv_op", None) <NEW_LINE> rv_type = None <NEW_LINE> if isinstance(rv_op, RandomVariable): <NEW_LINE> <INDENT> rv_type = type(rv_op) <NEW_LINE> <DEDENT> new_cls = super().__new__(cls, name, bases, clsdict) <NEW_LINE> if rv_type is not None: <NEW_LINE> <INDENT> class_logp = clsdict.get("logp") <NEW_LINE> if class_logp: <NEW_LINE> <INDENT> @_logprob.register(rv_type) <NEW_LINE> def logp(op, values, *dist_params, **kwargs): <NEW_LINE> <INDENT> dist_params = dist_params[3:] <NEW_LINE> (value,) = values <NEW_LINE> return class_logp(value, *dist_params) <NEW_LINE> <DEDENT> <DEDENT> class_logcdf = clsdict.get("logcdf") <NEW_LINE> if class_logcdf: <NEW_LINE> <INDENT> @_logcdf.register(rv_type) <NEW_LINE> def logcdf(op, value, *dist_params, **kwargs): <NEW_LINE> <INDENT> dist_params = dist_params[3:] <NEW_LINE> return class_logcdf(value, *dist_params) <NEW_LINE> <DEDENT> <DEDENT> class_initval = clsdict.get("get_moment") <NEW_LINE> if class_initval: <NEW_LINE> <INDENT> @_get_moment.register(rv_type) <NEW_LINE> def get_moment(op, rv, rng, size, dtype, *dist_params): <NEW_LINE> <INDENT> return class_initval(rv, size, *dist_params) <NEW_LINE> <DEDENT> <DEDENT> new_cls.register(rv_type) <NEW_LINE> <DEDENT> return new_cls | DistributionMeta class
Notes
-----
DistributionMeta currently performs many functions, and will likely be refactored soon.
See issue below for more details
https://github.com/pymc-devs/pymc/issues/5308 | 6259906da17c0f6771d5d7d4 |
class TokenDescriptionSection(object): <NEW_LINE> <INDENT> def __init__(self, mf, api=None, persona_id=None): <NEW_LINE> <INDENT> self.app_manifest = mf <NEW_LINE> self.entries = [] <NEW_LINE> self._api = api <NEW_LINE> self._persona_id = persona_id <NEW_LINE> self._all_permissions = None <NEW_LINE> self._permissions = set() <NEW_LINE> self._dynamic_permissions = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def all_permissions(self): <NEW_LINE> <INDENT> if self._all_permissions is None: <NEW_LINE> <INDENT> with open(os.path.join(self.app_manifest.nix_closure, "permissions.json")) as perms: <NEW_LINE> <INDENT> self._all_permissions = json.loads(perms) <NEW_LINE> <DEDENT> <DEDENT> return self._all_permissions <NEW_LINE> <DEDENT> def add_entry(self, short_or_desc): <NEW_LINE> <INDENT> entry = TokenDescriptionEntry(short_or_desc) <NEW_LINE> self.entries.append(entry) <NEW_LINE> return entry <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return { 'domain': self.app_manifest.domain, 'name': self.app_manifest.name, 'run-as-admin': self.app_manifest.run_as_admin, 'singleton': self.app_manifest.singleton, 'version': self.app_manifest.version, 'icon': self.app_manifest.icon, 'entries': [ e.to_json() for e in self.entries ] } | Description of permissions associated with a particular application
| 6259906d8a43f66fc4bf39ea |
class JujuServiceDeploymentResource(CommonResource): <NEW_LINE> <INDENT> pipeline = ForeignKey( PipelineResource, 'pipeline', null=False, full_list=True) <NEW_LINE> jujuservice = ForeignKey( JujuServiceResource, 'jujuservice', full_list=True) <NEW_LINE> charm = ForeignKey(CharmResource, 'charm') <NEW_LINE> productundertest = ForeignKey( ProductUnderTestResource, 'productundertest') <NEW_LINE> units = ReverseManyField( 'oilserver.api.resources.UnitResource', 'units') <NEW_LINE> class Meta(CommonMeta): <NEW_LINE> <INDENT> queryset = models.JujuServiceDeployment.objects.select_related( 'pipeline', 'jujuservice', 'charm', 'productundertest').all() <NEW_LINE> filtering = { 'uuid': ('exact',), 'success': ALL, 'jujuservice': ALL_WITH_RELATIONS, 'charm': ALL_WITH_RELATIONS, 'pipeline': ALL_WITH_RELATIONS, 'productundertest': ALL_WITH_RELATIONS, 'units': ALL_WITH_RELATIONS, } | API Resource for 'JujuServiceDeployment' model. | 6259906d460517430c432c81 |
class List(Trackable): <NEW_LINE> <INDENT> name = models.CharField(max_length=70) <NEW_LINE> slug = models.SlugField() <NEW_LINE> project = models.ForeignKey(Project, on_delete=models.CASCADE) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f"<TodoList: {self.name}>" | Todo list for investigation. | 6259906daad79263cf43000c |
class Technicien(db.Model): <NEW_LINE> <INDENT> __tablename__ = "technicien" <NEW_LINE> __table_args__ = {'extend_existing': True} <NEW_LINE> code = db.Column(db.Integer, primary_key=True, autoincrement=True) <NEW_LINE> nom = db.Column(db.String(50)) <NEW_LINE> prenom = db.Column(db.String(50)) <NEW_LINE> intervention = db.Column(db.String(200)) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "<Intervention '{}'>".format(self.code) | Technicien Model for storing technicien related details | 6259906d63d6d428bbee3eb5 |
class RagelDLexer(DelegatingLexer): <NEW_LINE> <INDENT> name = 'Ragel in D Host' <NEW_LINE> aliases = ['ragel-d'] <NEW_LINE> filenames = ['*.rl'] <NEW_LINE> def __init__(self, **options): <NEW_LINE> <INDENT> super(RagelDLexer, self).__init__(DLexer, RagelEmbeddedLexer, **options) <NEW_LINE> <DEDENT> def analyse_text(text): <NEW_LINE> <INDENT> return '@LANG: d' in text | A lexer for `Ragel`_ in a D host file.
*New in Pygments 1.1* | 6259906d7b180e01f3e49c8f |
class Level(IntEnum): <NEW_LINE> <INDENT> DEBUG = 0 <NEW_LINE> INFO = 1 <NEW_LINE> WARN = 2 <NEW_LINE> ERROR = 3 | An enumerator representing the logging level.
Not valid if you override with your own loggers. | 6259906d4f88993c371f114b |
class _StreamRequestHandler(socketserver.StreamRequestHandler, object): <NEW_LINE> <INDENT> pass | Converted to newstyle class. | 6259906d99cbb53fe683273e |
class Cutout(object): <NEW_LINE> <INDENT> def __init__(self, n_holes, length): <NEW_LINE> <INDENT> self.n_holes = n_holes <NEW_LINE> self.length = length <NEW_LINE> <DEDENT> def __call__(self, img): <NEW_LINE> <INDENT> h = img.size(1) <NEW_LINE> w = img.size(2) <NEW_LINE> mask = np.ones((h, w), np.float32) <NEW_LINE> for n in range(self.n_holes): <NEW_LINE> <INDENT> y = np.random.randint(h) <NEW_LINE> x = np.random.randint(w) <NEW_LINE> y1 = np.clip(y - self.length // 2, 0, h) <NEW_LINE> y2 = np.clip(y + self.length // 2, 0, h) <NEW_LINE> x1 = np.clip(x - self.length // 2, 0, w) <NEW_LINE> x2 = np.clip(x + self.length // 2, 0, w) <NEW_LINE> mask[y1: y2, x1: x2] = 0. <NEW_LINE> <DEDENT> mask = torch.from_numpy(mask) <NEW_LINE> mask = mask.expand_as(img) <NEW_LINE> img = img * mask <NEW_LINE> return img | Taken primarily from: https://github.com/uoguelph-mlrg/Cutout
Randomly mask out one or more patches from an image. | 6259906db7558d5895464b5d |
class CourseOutlineView(APIView): <NEW_LINE> <INDENT> def get(self,request,*args,**kwargs): <NEW_LINE> <INDENT> res = {'code': 1000, 'data': None} <NEW_LINE> try: <NEW_LINE> <INDENT> course_obj = models.Course.objects.filter(pk=1) <NEW_LINE> ser_obj = serializer.CourseOutlineSerializer(course_obj,many=True) <NEW_LINE> res['data'] = ser_obj.data <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> res['code'] = 1001 <NEW_LINE> res['error'] = '数据查询失败' <NEW_LINE> <DEDENT> return Response(res) | 专题课id=1所有课程大纲 | 6259906d9c8ee82313040db3 |
class I_ior_w_l5_wp(Instruction_w_l5_wp_B): <NEW_LINE> <INDENT> name = 'ior' <NEW_LINE> mask = 0xF80060 <NEW_LINE> code = 0x700060 | IOR{.B} Wb, #lit5, [Wd] | 6259906d44b2445a339b758a |
class ExpectedErrorMiddleware: <NEW_LINE> <INDENT> def __init__(self, get_response): <NEW_LINE> <INDENT> self.get_response = get_response <NEW_LINE> <DEDENT> def __call__(self, request): <NEW_LINE> <INDENT> response = self.get_response(request) <NEW_LINE> return response <NEW_LINE> <DEDENT> def process_exception(self, request, exception): <NEW_LINE> <INDENT> _log_and_monitor_expected_errors(request, exception, 'middleware') | Middleware to add logging and monitoring for expected errors. | 6259906d8e7ae83300eea8e7 |
class SimpleOvalSquareFuncLayer_v2: <NEW_LINE> <INDENT> def __init__(self, x1, x2): <NEW_LINE> <INDENT> self.x1 = x1 <NEW_LINE> self.x2 = x2 <NEW_LINE> self.addX1_minus_three_layer = AddLayer() <NEW_LINE> self.X1_minus_three_square_layer = MulLayer() <NEW_LINE> self.mulX2_by_two_layer = MulLayer() <NEW_LINE> self.addTwoX2_minus_one_layer = AddLayer() <NEW_LINE> self.twoX2_minus_one_squaer_layer = MulLayer() <NEW_LINE> self.lastAdd_layer = AddLayer() <NEW_LINE> <DEDENT> def forward(self): <NEW_LINE> <INDENT> x1_minus_three = self.addX1_minus_three_layer.forward(self.x1, -3) <NEW_LINE> x1_minus_three_square = self.X1_minus_three_square_layer.forward(x1_minus_three, x1_minus_three) <NEW_LINE> x2_mul_by_two = self.mulX2_by_two_layer.forward(self.x2, 2) <NEW_LINE> two_x2_minus_one = self.addTwoX2_minus_one_layer.forward(x2_mul_by_two, -1) <NEW_LINE> two_x2_minus_one_square = self.twoX2_minus_one_squaer_layer.forward(two_x2_minus_one, two_x2_minus_one) <NEW_LINE> out = self.lastAdd_layer.forward(x1_minus_three_square, two_x2_minus_one_square) <NEW_LINE> return out <NEW_LINE> <DEDENT> def backward(self, dout): <NEW_LINE> <INDENT> dx1_minus_three_square, dtwo_x2_minus_one_square = self.lastAdd_layer.backward(dout) <NEW_LINE> dtwo_x2_minus_one, dtwo_x2_minus_one = self.twoX2_minus_one_squaer_layer.backward(dtwo_x2_minus_one_square) <NEW_LINE> dx2_mul_by_two, d_minus_one = self.addTwoX2_minus_one_layer.backward(dtwo_x2_minus_one) <NEW_LINE> dx2, d_two = self.mulX2_by_two_layer.backward(dx2_mul_by_two) <NEW_LINE> dx2 = 2 * dx2 <NEW_LINE> dx1_minus_three, dx1_minus_three = self.X1_minus_three_square_layer.backward(dx1_minus_three_square) <NEW_LINE> dx1, d_minus_three = self.addX1_minus_three_layer.backward(dx1_minus_three) <NEW_LINE> dx1 = 2 * dx1 <NEW_LINE> return dx1, dx2 | This is simple layer that uses loss function of (x-3)^2+(2y-1)^2 | 6259906d97e22403b383c75b |
class Member(models.Model): <NEW_LINE> <INDENT> ROLE_CHOICES = ( ('admin', 'admin'), ('regular', 'regular') ) <NEW_LINE> firstname = models.CharField(max_length=100) <NEW_LINE> lastname = models.CharField(max_length=100) <NEW_LINE> phone = models.CharField(max_length=13) <NEW_LINE> email = models.EmailField() <NEW_LINE> role = models.CharField(max_length=20, choices=ROLE_CHOICES) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "<{}-{}>".format(self.firstname, self.id) | Stores details of a team member | 6259906d8da39b475be04a44 |
class FidelityPromo(Promotion): <NEW_LINE> <INDENT> def discount(self, order): <NEW_LINE> <INDENT> if order.customer.fidelity >= 1000: <NEW_LINE> <INDENT> return order.total() * 0.05 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 | 5% discount for customers with 1000 or more fidelity points | 6259906dcb5e8a47e493cdae |
class TraceServiceServicer(object): <NEW_LINE> <INDENT> def BatchWriteSpans(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def CreateSpan(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') | This file describes an API for collecting and viewing traces and spans
within a trace. A Trace is a collection of spans corresponding to a single
operation or set of operations for an application. A span is an individual
timed event which forms a node of the trace tree. A single trace may
contain span(s) from multiple services. | 6259906d5166f23b2e244c2a |
class Clip: <NEW_LINE> <INDENT> def __init__(self, filepath, activation_string, name=None, thumbnail_path=None, params={}, tags=[]): <NEW_LINE> <INDENT> self.f_name = filepath <NEW_LINE> self.t_names = thumbnail_path <NEW_LINE> self.command = activation_string <NEW_LINE> self.params = params <NEW_LINE> self.tags = [] <NEW_LINE> if name is None: <NEW_LINE> <INDENT> self.name = os.path.split(os.path.splitext(filepath)[0])[1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> for tag in tags: <NEW_LINE> <INDENT> self.add_tag(tag) <NEW_LINE> <DEDENT> <DEDENT> def add_tag(self, tag): <NEW_LINE> <INDENT> if tag not in self.tags: <NEW_LINE> <INDENT> self.tags.append(tag) <NEW_LINE> <DEDENT> <DEDENT> def str_tags(self): <NEW_LINE> <INDENT> temp_tags = self.tags[:] <NEW_LINE> temp_tags.sort() <NEW_LINE> return ','.join(temp_tags) <NEW_LINE> <DEDENT> def remove_tag(self, tag): <NEW_LINE> <INDENT> if tag in self.tags: <NEW_LINE> <INDENT> self.tags.remove(tag) <NEW_LINE> <DEDENT> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.f_name < other <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return self.f_name > other <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name + "\n\t@ " + self.f_name | used to store "video clips"
with associated information
most importantly, filepath and how to activate them
but also params and tags
example params (inspired by previous sol version)
cue_points - point in time to jump to
loop_points - collection of pairs of points between which can loop
-> loop_type - default (d) or bounce (b)
loop_selection - which loop points are chosen
loop_on - boolean
playback_speed - self explanatory
play_direction - forward (f), backward (b), pause (p), random (r)
control_speed - multiplication factor for controlling clip | 6259906d16aa5153ce401d31 |
class DriverManager(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.driver = None <NEW_LINE> <DEDENT> def get_driver(self): <NEW_LINE> <INDENT> return self.driver <NEW_LINE> <DEDENT> def initialize_driver(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def pause(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> pass | Base class for all driver managers. | 6259906da8370b77170f1c1f |
@dataclass(init=True, repr=True, eq=False, order=False, unsafe_hash=False, frozen=True) <NEW_LINE> class CurvePool: <NEW_LINE> <INDENT> lp: ChecksumEthAddress <NEW_LINE> assets: List[ChecksumEthAddress] <NEW_LINE> pool_address: ChecksumEthAddress | Represent a curve pool contract with the position token and the assets in the pool | 6259906dadb09d7d5dc0bdc2 |
class PyParso(PythonPackage): <NEW_LINE> <INDENT> pypi = "parso/parso-0.6.1.tar.gz" <NEW_LINE> version('0.8.1', sha256='8519430ad07087d4c997fda3a7918f7cfa27cb58972a8c89c2a0295a1c940e9e') <NEW_LINE> version('0.7.1', sha256='caba44724b994a8a5e086460bb212abc5a8bc46951bf4a9a1210745953622eb9') <NEW_LINE> version('0.6.1', sha256='56b2105a80e9c4df49de85e125feb6be69f49920e121406f15e7acde6c9dfc57') <NEW_LINE> version('0.4.0', sha256='2e9574cb12e7112a87253e14e2c380ce312060269d04bd018478a3c92ea9a376') <NEW_LINE> depends_on('[email protected]:', type=('build', 'run'), when='@0.8.1:') <NEW_LINE> depends_on('[email protected]:2.8,3.4:', type=('build', 'run'), when='@0.6.1:') <NEW_LINE> depends_on('[email protected]:2.8,3.3:', type=('build', 'run'), when='@0.4.0:') <NEW_LINE> depends_on('py-setuptools', type='build') | Parso is a Python parser that supports error recovery and round-trip parsing
for different Python versions (in multiple Python versions).
Parso is also able to list multiple syntax errors
in your python file. | 6259906de76e3b2f99fda259 |
class InvalidEventError(Exception): <NEW_LINE> <INDENT> pass | Raised when a non-event is passed to the notifyListeners method of a
Notifier | 6259906d63b5f9789fe869bb |
class Tests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.root = os.path.dirname(__import__(settings.ROOT_URLCONF).__file__) <NEW_LINE> sys.path.append(self.root) <NEW_LINE> self.unique_id = str(uuid4()).replace("-", "") <NEW_LINE> self.test_apps = ["app%s%s" % (i, self.unique_id) for i in range(10)] <NEW_LINE> for test_app in self.test_apps: <NEW_LINE> <INDENT> app_dir = os.path.join(self.root, test_app) <NEW_LINE> os.mkdir(app_dir) <NEW_LINE> app_templates = os.path.join(app_dir, "templates") <NEW_LINE> os.mkdir(app_templates) <NEW_LINE> with open(os.path.join(app_dir, "__init__.py"), "w") as f: <NEW_LINE> <INDENT> f.write("") <NEW_LINE> <DEDENT> extends = test_app != self.test_apps[-1] <NEW_LINE> self._create_template(app_templates, test_app, extends) <NEW_LINE> <DEDENT> project_templates = os.path.join(self.root, "templates") <NEW_LINE> self.project_templates_exist = os.path.exists(project_templates) <NEW_LINE> if not self.project_templates_exist: <NEW_LINE> <INDENT> os.mkdir(project_templates) <NEW_LINE> <DEDENT> self._create_template(project_templates, "project", True) <NEW_LINE> <DEDENT> def _create_template(self, template_dir, test_string, extends): <NEW_LINE> <INDENT> with open(os.path.join(template_dir, self.unique_id), "w") as f: <NEW_LINE> <INDENT> template_vars = { "test_string": test_string, "extends_string": "", "super_string": "", } <NEW_LINE> if extends: <NEW_LINE> <INDENT> extends_string = "{%% overextends \"%s\" %%}" % self.unique_id <NEW_LINE> template_vars["extends_string"] = extends_string <NEW_LINE> template_vars["super_string"] = "{{ block.super }}" <NEW_LINE> <DEDENT> f.write((TEST_TEMPLATE % template_vars).strip()) <NEW_LINE> <DEDENT> <DEDENT> def test_overextends(self): <NEW_LINE> <INDENT> with self.modify_settings(INSTALLED_APPS={ 'prepend': self.test_apps }): <NEW_LINE> <INDENT> html = get_template(self.unique_id).render({}) <NEW_LINE> previous = "" <NEW_LINE> for test_string in ["project"] + self.test_apps: <NEW_LINE> <INDENT> self.assertTrue(test_string in html) <NEW_LINE> if previous: <NEW_LINE> <INDENT> self.assertTrue(html.index(test_string) < html.index(previous)) <NEW_LINE> <DEDENT> previous = test_string <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> for test_app in self.test_apps: <NEW_LINE> <INDENT> rmtree(os.path.join(self.root, test_app)) <NEW_LINE> <DEDENT> if self.project_templates_exist: <NEW_LINE> <INDENT> os.remove(os.path.join(self.root, "templates", self.unique_id)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rmtree(os.path.join(self.root, "templates")) | Test that ``overextends`` triggers across multiple project and
app templates with the same relative path. To achieve this, we
need the same template name to exist in multiple apps, as well as
at the project level, so we create some fake apps and a project
template. These all used a unique ID to ensure they don't
collide with anything in the project. When we're done, we clean
up our mess. | 6259906d3d592f4c4edbc739 |
class Solution: <NEW_LINE> <INDENT> def maxNode(self, root): <NEW_LINE> <INDENT> if root is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> left_max = self.maxNode(root.left) <NEW_LINE> right_max = self.maxNode(root.right) <NEW_LINE> if left_max is None and right_max is None: <NEW_LINE> <INDENT> return root <NEW_LINE> <DEDENT> elif left_max is None: <NEW_LINE> <INDENT> return max(right_max, root, key=lambda x: x.val) <NEW_LINE> <DEDENT> elif right_max is None: <NEW_LINE> <INDENT> return max(left_max, root, key=lambda x: x.val) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return max(left_max, right_max, root, key=lambda x: x.val) | @param: root: the root of tree
@return: the max node | 6259906daad79263cf43000e |
class ListeningThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, network, listen2): <NEW_LINE> <INDENT> threading.Thread.__init__(self, name="moteinopy.ListeningThread") <NEW_LINE> self.Network = network <NEW_LINE> self.Listen2 = listen2 <NEW_LINE> self.Stop = False <NEW_LINE> <DEDENT> def stop(self, sig=None, frame=None): <NEW_LINE> <INDENT> logger.debug("Listening thread attempting to stop itself") <NEW_LINE> self.Stop = True <NEW_LINE> self.Listen2.cancel_read() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> logger.debug("Serial listening thread started") <NEW_LINE> incoming = '' <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> incoming = self.Listen2.readline().rstrip() <NEW_LINE> <DEDENT> except serial.SerialException as e: <NEW_LINE> <INDENT> logger.debug("Serial exception occured: " + str(e)) <NEW_LINE> if not self.Stop: <NEW_LINE> <INDENT> logger.warning("serial exception ocurred: " + str(e)) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if self.Stop: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.debug("Serial port said: " + str(incoming)) <NEW_LINE> if is_hex_string(incoming): <NEW_LINE> <INDENT> Send2ParentThread(self.Network, incoming).start() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.error("Serial port said: " + str(incoming)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> logger.info("Serial listening thread shutting down") <NEW_LINE> self.Listen2.close() | A thread that listens to the Serial port. When something (that ends with a newline) is recieved
the thread will start up a Send2Parent thread and go back to listening to the Serial port | 6259906d38b623060ffaa47f |
class QtAbstractMeta(ABCMeta, type(QObject)): <NEW_LINE> <INDENT> pass | Metaclass that allows implementing ABC and QObject simultaneously | 6259906d1f5feb6acb164449 |
class TestWriteSheetView(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.fh = StringIO() <NEW_LINE> self.worksheet = Worksheet() <NEW_LINE> self.worksheet._set_filehandle(self.fh) <NEW_LINE> <DEDENT> def test_write_sheet_view_tab_not_selected(self): <NEW_LINE> <INDENT> self.worksheet._write_sheet_view() <NEW_LINE> exp = """<sheetView workbookViewId="0"/>""" <NEW_LINE> got = self.fh.getvalue() <NEW_LINE> self.assertEqual(got, exp) <NEW_LINE> <DEDENT> def test_write_sheet_view_tab_selected(self): <NEW_LINE> <INDENT> self.worksheet.select() <NEW_LINE> self.worksheet._write_sheet_view() <NEW_LINE> exp = """<sheetView tabSelected="1" workbookViewId="0"/>""" <NEW_LINE> got = self.fh.getvalue() <NEW_LINE> self.assertEqual(got, exp) <NEW_LINE> <DEDENT> def test_write_sheet_view_hide_gridlines(self): <NEW_LINE> <INDENT> self.worksheet.select() <NEW_LINE> self.worksheet.hide_gridlines() <NEW_LINE> self.worksheet._write_sheet_view() <NEW_LINE> exp = """<sheetView tabSelected="1" workbookViewId="0"/>""" <NEW_LINE> got = self.fh.getvalue() <NEW_LINE> self.assertEqual(got, exp) <NEW_LINE> <DEDENT> def test_write_sheet_view_hide_gridlines_0(self): <NEW_LINE> <INDENT> self.worksheet.select() <NEW_LINE> self.worksheet.hide_gridlines(0) <NEW_LINE> self.worksheet._write_sheet_view() <NEW_LINE> exp = """<sheetView tabSelected="1" workbookViewId="0"/>""" <NEW_LINE> got = self.fh.getvalue() <NEW_LINE> self.assertEqual(got, exp) <NEW_LINE> <DEDENT> def test_write_sheet_view_hide_gridlines_1(self): <NEW_LINE> <INDENT> self.worksheet.select() <NEW_LINE> self.worksheet.hide_gridlines(1) <NEW_LINE> self.worksheet._write_sheet_view() <NEW_LINE> exp = """<sheetView tabSelected="1" workbookViewId="0"/>""" <NEW_LINE> got = self.fh.getvalue() <NEW_LINE> self.assertEqual(got, exp) <NEW_LINE> <DEDENT> def test_write_sheet_view_hide_gridlines_2(self): <NEW_LINE> <INDENT> self.worksheet.select() <NEW_LINE> self.worksheet.hide_gridlines(2) <NEW_LINE> self.worksheet._write_sheet_view() <NEW_LINE> exp = """<sheetView showGridLines="0" tabSelected="1" workbookViewId="0"/>""" <NEW_LINE> got = self.fh.getvalue() <NEW_LINE> self.assertEqual(got, exp) <NEW_LINE> <DEDENT> def test_write_sheet_view_hide_row_col_headers(self): <NEW_LINE> <INDENT> self.worksheet.select() <NEW_LINE> self.worksheet.hide_row_col_headers() <NEW_LINE> self.worksheet._write_sheet_view() <NEW_LINE> exp = """<sheetView showRowColHeaders="0" tabSelected="1" workbookViewId="0"/>""" <NEW_LINE> got = self.fh.getvalue() <NEW_LINE> self.assertEqual(got, exp) | Test the Worksheet _write_sheet_view() method. | 6259906d4c3428357761bb0b |
class SeekingToRange(RangeXYHold): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def transitions(): <NEW_LINE> <INDENT> return RangeXYHold.transitions(SeekingToRange, { RangeXYHold.IN_RANGE : SeekingToAligned }, lostState = FindAttemptRange) | Heads toward the target until it reaches the desired range | 6259906d4e4d562566373c5f |
class Building(object): <NEW_LINE> <INDENT> def __init__(self, name, level, cost, increment): <NEW_LINE> <INDENT> if name in BUILDING_NAMES: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if type(name) != str: <NEW_LINE> <INDENT> raise TypeError("Building name should be a string") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Unknown building name {}".format(name)) <NEW_LINE> <DEDENT> <DEDENT> if type(level) == int: <NEW_LINE> <INDENT> if level > -1: <NEW_LINE> <INDENT> self.level = level <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Building level should be a non-negative int") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Building level should be of type int") <NEW_LINE> <DEDENT> if type(cost) == dict: <NEW_LINE> <INDENT> if len(cost) != len(RESOURCES): <NEW_LINE> <INDENT> raise ValueError("Cost dict of wrong size", len(cost), "should be", len(RESOURCES)) <NEW_LINE> <DEDENT> for r in RESOURCES: <NEW_LINE> <INDENT> val = cost.get(r, None) <NEW_LINE> if type(val) != int: <NEW_LINE> <INDENT> if val is None: <NEW_LINE> <INDENT> raise KeyError(r, "cost missing") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Cost value should be an int") <NEW_LINE> <DEDENT> <DEDENT> elif val < 0: <NEW_LINE> <INDENT> raise ValueError("Cost value should not be negative") <NEW_LINE> <DEDENT> <DEDENT> self.cost = cost <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Resource costs should be a dictionary") <NEW_LINE> <DEDENT> if type(increment) == float and 1 <= increment <= 2: <NEW_LINE> <INDENT> self.increment = increment <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if type(increment) != float: <NEW_LINE> <INDENT> raise TypeError("Increment value should be a float") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Increment value should be bounded by 1 and 2") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def level_up(self): <NEW_LINE> <INDENT> self.level += 1 <NEW_LINE> for resource in self.cost: <NEW_LINE> <INDENT> self.cost[resource] = int(self.cost[resource]*increment) | Building class | 6259906d7d847024c075dc35 |
class BaseField(object): <NEW_LINE> <INDENT> perm_getter = FieldPerm() <NEW_LINE> conv = convs.Char <NEW_LINE> widget = widgets.TextInput() <NEW_LINE> label = None <NEW_LINE> media = FormMedia() <NEW_LINE> def __init__(self, name, conv=None, parent=None, **kwargs): <NEW_LINE> <INDENT> kwargs.update(dict( parent=parent, name=name, conv=(conv or self.conv)(field=self), widget=(kwargs.get('widget') or self.widget)(field=self), )) <NEW_LINE> self._init_kwargs = kwargs <NEW_LINE> self.__dict__.update(kwargs) <NEW_LINE> <DEDENT> def __call__(self, **kwargs): <NEW_LINE> <INDENT> params = dict(self._init_kwargs, **kwargs) <NEW_LINE> return self.__class__(**params) <NEW_LINE> <DEDENT> @property <NEW_LINE> def multiple(self): <NEW_LINE> <INDENT> return self.conv.multiple <NEW_LINE> <DEDENT> @property <NEW_LINE> def env(self): <NEW_LINE> <INDENT> return self.parent.env <NEW_LINE> <DEDENT> @property <NEW_LINE> def form(self): <NEW_LINE> <INDENT> return self.parent.form <NEW_LINE> <DEDENT> @property <NEW_LINE> def input_name(self): <NEW_LINE> <INDENT> if self.name is None: <NEW_LINE> <INDENT> return self.parent.input_name <NEW_LINE> <DEDENT> return self.parent.prefix + self.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def error(self): <NEW_LINE> <INDENT> return self.form.errors.get(self.input_name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def clean_value(self): <NEW_LINE> <INDENT> return self.parent.python_data[self.name] <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def _relative_id(self): <NEW_LINE> <INDENT> return self.form.get_field_id(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return '%s-%s' % (self.form.id, self.input_name) <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> return self.conv.to_python(value) <NEW_LINE> <DEDENT> def from_python(self, value): <NEW_LINE> <INDENT> return self.conv.from_python(value) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def permissions(self): <NEW_LINE> <INDENT> return self.perm_getter.get_perms(self) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def writable(self): <NEW_LINE> <INDENT> return 'w' in self.permissions <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def readable(self): <NEW_LINE> <INDENT> return 'r' in self.permissions <NEW_LINE> <DEDENT> @property <NEW_LINE> def render_type(self): <NEW_LINE> <INDENT> return self.widget.render_type <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> return self.widget.render(self.raw_value) <NEW_LINE> <DEDENT> def get_media(self): <NEW_LINE> <INDENT> media = FormMedia(self.media) <NEW_LINE> media += self.widget.get_media() <NEW_LINE> return media | Simple container class which ancestors represents various parts of Form.
Encapsulates converter, various fields attributes, methods for data
access control | 6259906d99cbb53fe6832740 |
class BlockStatement: <NEW_LINE> <INDENT> def __init__(self, parallel=False, statements=None): <NEW_LINE> <INDENT> self._parallel = bool(parallel) <NEW_LINE> if statements is None: <NEW_LINE> <INDENT> self._statements = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._statements = statements <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"BlockStatement(parallel={self.parallel}, {self.statements})" <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return ( self.parallel == other.parallel and self.statements == other.statements ) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def parallel(self): <NEW_LINE> <INDENT> return self._parallel <NEW_LINE> <DEDENT> @property <NEW_LINE> def statements(self): <NEW_LINE> <INDENT> return self._statements <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.statements[key] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.statements) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.statements) | Represents a Jaqal block statement; either sequential or parallel. Can contain other
blocks, loop statements, and gate statements.
:param bool parallel: Set to False (default) for a sequential block or True for a parallel block.
:param statements: The contents of the block; defaults to an empty block.
:type statements: list(GateStatement, LoopStatement, BlockStatement) | 6259906db7558d5895464b5e |
class Convert_To_Mirror_Class(): <NEW_LINE> <INDENT> def GetResources(self): <NEW_LINE> <INDENT> iconpath = FreeCAD.getUserAppDataDir().encode("utf-8") + 'Mod/ehtecoptics/resources/mirror.svg.png' <NEW_LINE> return {'Pixmap': iconpath, 'Accel': "Shift+A", 'MenuText': "Convert To Absorber", 'ToolTip': "Converts a solid to a mirror"} <NEW_LINE> <DEDENT> def Activated(self): <NEW_LINE> <INDENT> theobj = App.ActiveDocument.ActiveObject <NEW_LINE> amirror = mirror.makeMirror() <NEW_LINE> theshape = theobj.Shape <NEW_LINE> amirror.Shape = theshape <NEW_LINE> theobj.ViewObject.Visibility = False <NEW_LINE> amirror.ViewObject.ShapeColor = (131.0 / 255, 137.0 / 255, 150.0 / 255) <NEW_LINE> return <NEW_LINE> <DEDENT> def IsActive(self): <NEW_LINE> <INDENT> return True | My new command | 6259906d4a966d76dd5f0742 |
class Submission(StatusModel, TimeStampedModel): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> app_label = "djspikeval" <NEW_LINE> <DEDENT> STATUS = Choices("private", "public") <NEW_LINE> description = models.TextField( blank=True, null=True) <NEW_LINE> user = models.ForeignKey( settings.AUTH_USER_MODEL, blank=True, help_text="The user associated with this submission.") <NEW_LINE> algorithm = models.ForeignKey( Algorithm, default=1, help_text="The Algorithm associated with this submission.") <NEW_LINE> dataset = models.ForeignKey( Dataset, help_text="The Dataset associated with this submission.", related_name="submission_set") <NEW_LINE> asset_set = GenericRelation("base.Asset") <NEW_LINE> @property <NEW_LINE> def attachment_set(self): <NEW_LINE> <INDENT> return self.asset_set.filter(kind="attachment") <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return "{} @ {}".format(self.algorithm, self.dataset) <NEW_LINE> <DEDENT> @models.permalink <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return "analysis:detail", (self.pk,), {} <NEW_LINE> <DEDENT> @models.permalink <NEW_LINE> def get_delete_url(self): <NEW_LINE> <INDENT> return "analysis:delete", (self.pk,), {} <NEW_LINE> <DEDENT> def toggle(self): <NEW_LINE> <INDENT> if self.status == self.STATUS.public: <NEW_LINE> <INDENT> self.status = self.STATUS.private <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.status = self.STATUS.public <NEW_LINE> <DEDENT> self.save() <NEW_LINE> <DEDENT> def is_public(self): <NEW_LINE> <INDENT> return self.status == Submission.STATUS.public and self.dataset.is_public() <NEW_LINE> <DEDENT> def is_editable(self, user): <NEW_LINE> <INDENT> return self.user == user or getattr(user, "is_superuser", False) is True <NEW_LINE> <DEDENT> def is_accessible(self, user): <NEW_LINE> <INDENT> return self.is_public() or self.is_editable(user) | container for a set of evaluations submitted by a user for one dataset | 6259906dac7a0e7691f73d41 |
class ImageTask(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.State = None <NEW_LINE> self.Message = None <NEW_LINE> self.ImageName = None <NEW_LINE> self.CreateTime = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.State = params.get("State") <NEW_LINE> self.Message = params.get("Message") <NEW_LINE> self.ImageName = params.get("ImageName") <NEW_LINE> self.CreateTime = params.get("CreateTime") | 镜像任务
| 6259906dd268445f2663a789 |
class Meta: <NEW_LINE> <INDENT> db_table = 'form_gen_dates' | Override some params. | 6259906d3539df3088ecdaf5 |
class QuitHandler(IRCHandler): <NEW_LINE> <INDENT> def receive_msg(self, msg): <NEW_LINE> <INDENT> if self.is_authenticated(msg): <NEW_LINE> <INDENT> if msg.find(self.bot.bang_name + ' quit') != -1: <NEW_LINE> <INDENT> self.reply_to(msg, "Screw you guys, I'm going home.") <NEW_LINE> self.bot.stop() | quit: Makes the bot quit the server on with '!<botname> quit' command | 6259906d44b2445a339b758b |
class DictIter(object): <NEW_LINE> <INDENT> __slots__ = 'data', 'keys', 'index' <NEW_LINE> def __init__(self, data): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.keys = sorted(data.keys()) <NEW_LINE> self.index = -1 <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> self.index += 1 <NEW_LINE> if self.index >= len(self.keys): <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> return self.data[self.keys[self.index]] | Iterator through the tree | 6259906da8370b77170f1c21 |
class HttpResponseMixin(object): <NEW_LINE> <INDENT> def render_to_http_response(self, json_data): <NEW_LINE> <INDENT> return HttpResponse(json_data,content_type='application/json') | doc string for HttpResponseMixin. | 6259906d2ae34c7f260ac942 |
class Meta: <NEW_LINE> <INDENT> model = Organisation <NEW_LINE> include_fk = True <NEW_LINE> exclude = ('passcode',) | . | 6259906d1b99ca4002290162 |
class CacheConfSchema(schema.ResponseSchema): <NEW_LINE> <INDENT> fields = { "CacheBehavior": fields.Bool(required=True, load_from="CacheBehavior"), "CacheTTL": fields.Int(required=True, load_from="CacheTTL"), "CacheUnit": fields.Str(required=True, load_from="CacheUnit"), "Description": fields.Str(required=False, load_from="Description"), "FollowOriginRule": fields.Bool( required=False, load_from="FollowOriginRule" ), "HttpCodePattern": fields.Str( required=False, load_from="HttpCodePattern" ), "PathPattern": fields.Str(required=True, load_from="PathPattern"), } | CacheConf - 缓存配置 | 6259906dfff4ab517ebcf074 |
@vaping.plugin.register("fping") <NEW_LINE> class FPing(FPingBase): <NEW_LINE> <INDENT> def init(self): <NEW_LINE> <INDENT> self.hosts = [] <NEW_LINE> for name, group_config in list(self.groups.items()): <NEW_LINE> <INDENT> self.hosts.extend(group_config.get("hosts", [])) <NEW_LINE> <DEDENT> <DEDENT> def probe(self): <NEW_LINE> <INDENT> msg = self.new_message() <NEW_LINE> msg["data"] = self._run_proc() <NEW_LINE> return msg | Run fping on configured hosts
# Config
- command (`str=fping`): command to run
- interval (`str=1m`): time between pings
- count (`int=5`): number of pings to send
- period (`int=20`): time in milliseconds that fping waits
between successive packets to an individual target | 6259906d67a9b606de5476cf |
class OutputProxy(StringIO): <NEW_LINE> <INDENT> def write(self, str_): <NEW_LINE> <INDENT> sys.stdout.write(str_) <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> sys.stdout.flush() | A simple interface to replace sys.stdout so
doctest can capture it. | 6259906d4e4d562566373c60 |
@dataclass <NEW_LINE> class BankConflict(): <NEW_LINE> <INDENT> is_conflict: bool <NEW_LINE> t: int <NEW_LINE> s: int <NEW_LINE> new_bank: int | Whether there is a conflict.
If there is a conflict, the coordinates of one of the conflicting values and it's new bank> | 6259906d097d151d1a2c28ca |
class TopicType(models.Model): <NEW_LINE> <INDENT> forum = models.ForeignKey(Forum) <NEW_LINE> name = models.CharField("类型名", max_length=32) <NEW_LINE> slug = models.CharField(u"别名", max_length=32, unique=True, null=True, validators=[ validators.RegexValidator(re.compile('^([a-zA-Z0-9_-]*[a-zA-Z][a-zA-Z0-9_-]*)$'), u'字母,数字,下划线,不能纯数字.'), validators.MinLengthValidator(3), ]) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_slug(self): <NEW_LINE> <INDENT> if self.slug: <NEW_LINE> <INDENT> return self.slug <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return str(self.pk) <NEW_LINE> <DEDENT> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> slug = self.get_slug() <NEW_LINE> return reverse('type_topic_list', args=(slug, )) <NEW_LINE> <DEDENT> def get_topic_count(self): <NEW_LINE> <INDENT> return Topic.objects.filter(forum=self.forum, topic_type=self).count() | 主题类型 | 6259906d63b5f9789fe869bd |
class BalloonConfig(Config): <NEW_LINE> <INDENT> NAME = "TreeRingCracksComb2_OnlyRing" <NEW_LINE> IMAGES_PER_GPU = 4 <NEW_LINE> NUM_CLASSES = 1 + 1 <NEW_LINE> STEPS_PER_EPOCH = 650 <NEW_LINE> VALIDATION_STEPS = 1 <NEW_LINE> BACKBONE = "resnet101" <NEW_LINE> USE_MINI_MASK = True <NEW_LINE> MINI_MASK_SHAPE = (56, 56) <NEW_LINE> IMAGE_RESIZE_MODE = "square" <NEW_LINE> IMAGE_MIN_DIM = 800 <NEW_LINE> IMAGE_MAX_DIM = 1024 <NEW_LINE> RPN_ANCHOR_SCALES = (32, 64, 128, 256, 512) <NEW_LINE> RPN_NMS_THRESHOLD = 0.9 <NEW_LINE> TRAIN_ROIS_PER_IMAGE = 200 <NEW_LINE> DETECTION_MIN_CONFIDENCE = 1.0 <NEW_LINE> LEARNING_RATE = 0.001 <NEW_LINE> LEARNING_MOMENTUM = 0.9 <NEW_LINE> WEIGHT_DECAY = 0.0001 <NEW_LINE> LOSS_WEIGHTS = { "rpn_class_loss": 1., "rpn_bbox_loss": 1., "mrcnn_class_loss": 1., "mrcnn_bbox_loss": 1., "mrcnn_mask_loss": 2. } <NEW_LINE> GRADIENT_CLIP_NORM = 5.0 | Configuration for training on the toy dataset.
Derives from the base Config class and overrides some values. | 6259906daad79263cf430010 |
class ScanError(ScheduleError): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> ScheduleError.__init__(self, *args, **kwargs) | Error in receivers specifications | 6259906d7047854f46340c11 |
class ValidateCloudConfigFileTest(CiTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(ValidateCloudConfigFileTest, self).setUp() <NEW_LINE> self.config_file = self.tmp_path('cloudcfg.yaml') <NEW_LINE> <DEDENT> def test_validateconfig_file_error_on_absent_file(self): <NEW_LINE> <INDENT> with self.assertRaises(RuntimeError) as context_mgr: <NEW_LINE> <INDENT> validate_cloudconfig_file('/not/here', {}) <NEW_LINE> <DEDENT> self.assertEqual( 'Configfile /not/here does not exist', str(context_mgr.exception)) <NEW_LINE> <DEDENT> def test_validateconfig_file_error_on_invalid_header(self): <NEW_LINE> <INDENT> write_file(self.config_file, '#junk') <NEW_LINE> with self.assertRaises(SchemaValidationError) as context_mgr: <NEW_LINE> <INDENT> validate_cloudconfig_file(self.config_file, {}) <NEW_LINE> <DEDENT> self.assertEqual( 'Cloud config schema errors: format-l1.c1: File {0} needs to begin' ' with "{1}"'.format( self.config_file, CLOUD_CONFIG_HEADER.decode()), str(context_mgr.exception)) <NEW_LINE> <DEDENT> def test_validateconfig_file_error_on_non_yaml_scanner_error(self): <NEW_LINE> <INDENT> write_file(self.config_file, '#cloud-config\nasdf:\nasdf') <NEW_LINE> with self.assertRaises(SchemaValidationError) as context_mgr: <NEW_LINE> <INDENT> validate_cloudconfig_file(self.config_file, {}) <NEW_LINE> <DEDENT> self.assertIn( 'schema errors: format-l3.c1: File {0} is not valid yaml.'.format( self.config_file), str(context_mgr.exception)) <NEW_LINE> <DEDENT> def test_validateconfig_file_error_on_non_yaml_parser_error(self): <NEW_LINE> <INDENT> write_file(self.config_file, '#cloud-config\n{}}') <NEW_LINE> with self.assertRaises(SchemaValidationError) as context_mgr: <NEW_LINE> <INDENT> validate_cloudconfig_file(self.config_file, {}) <NEW_LINE> <DEDENT> self.assertIn( 'schema errors: format-l2.c3: File {0} is not valid yaml.'.format( self.config_file), str(context_mgr.exception)) <NEW_LINE> <DEDENT> @skipUnlessJsonSchema() <NEW_LINE> def test_validateconfig_file_sctrictly_validates_schema(self): <NEW_LINE> <INDENT> schema = { 'properties': {'p1': {'type': 'string', 'format': 'hostname'}}} <NEW_LINE> write_file(self.config_file, '#cloud-config\np1: "-1"') <NEW_LINE> with self.assertRaises(SchemaValidationError) as context_mgr: <NEW_LINE> <INDENT> validate_cloudconfig_file(self.config_file, schema) <NEW_LINE> <DEDENT> self.assertEqual( "Cloud config schema errors: p1: '-1' is not a 'hostname'", str(context_mgr.exception)) | Tests for validate_cloudconfig_file. | 6259906d3346ee7daa33828b |
class NodeTest(unittest.TestCase): <NEW_LINE> <INDENT> def testCreationOcc(self): <NEW_LINE> <INDENT> node = Node("Salut") <NEW_LINE> self.assertEqual(1,node.getOcc()) | Test the Node class | 6259906d76e4537e8c3f0dde |
class Collections(enum.Enum): <NEW_LINE> <INDENT> PROJECTS = ( 'projects', 'projects/{projectsId}', {}, [u'projectsId'], True ) <NEW_LINE> PROJECTS_JOBS = ( 'projects.jobs', '{+name}', { '': 'projects/{projectsId}/jobs/{jobsId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_LOCATIONS = ( 'projects.locations', '{+name}', { '': 'projects/{projectsId}/locations/{locationsId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_MODELS = ( 'projects.models', '{+name}', { '': 'projects/{projectsId}/models/{modelsId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_MODELS_VERSIONS = ( 'projects.models.versions', '{+name}', { '': 'projects/{projectsId}/models/{modelsId}/versions/{versionsId}', }, [u'name'], True ) <NEW_LINE> PROJECTS_OPERATIONS = ( 'projects.operations', '{+name}', { '': 'projects/{projectsId}/operations/{operationsId}', }, [u'name'], True ) <NEW_LINE> def __init__(self, collection_name, path, flat_paths, params, enable_uri_parsing): <NEW_LINE> <INDENT> self.collection_name = collection_name <NEW_LINE> self.path = path <NEW_LINE> self.flat_paths = flat_paths <NEW_LINE> self.params = params <NEW_LINE> self.enable_uri_parsing = enable_uri_parsing | Collections for all supported apis. | 6259906d8e71fb1e983bd323 |
class EB_EPD(Binary): <NEW_LINE> <INDENT> def install_step(self): <NEW_LINE> <INDENT> os.chdir(self.builddir) <NEW_LINE> if self.cfg['install_cmd'] is None: <NEW_LINE> <INDENT> self.cfg['install_cmd'] = "./epd_free-%s-x86_64.sh -b -p %s" % (self.version, self.installdir) <NEW_LINE> <DEDENT> super(EB_EPD, self).install_step() | Easyblock implementing the build step for EPD,
this is just running the installer script, with an argument to the installdir | 6259906d99cbb53fe6832742 |
@command(server_cmds) <NEW_LINE> class server_attach(_CycladesInit, OptionalOutput): <NEW_LINE> <INDENT> arguments = dict( volume_id=ValueArgument('The volume to be attached', '--volume-id') ) <NEW_LINE> required = ('volume_id', ) <NEW_LINE> @errors.Generic.all <NEW_LINE> @errors.Cyclades.connection <NEW_LINE> @errors.Cyclades.server_id <NEW_LINE> @errors.Cyclades.endpoint <NEW_LINE> def _run(self, server_id): <NEW_LINE> <INDENT> r = self.client.attach_volume(server_id, self['volume_id']) <NEW_LINE> self.print_(r, self.print_dict) <NEW_LINE> <DEDENT> def main(self, server_id): <NEW_LINE> <INDENT> super(self.__class__, self)._run() <NEW_LINE> self._run(server_id=server_id) | Attach a volume on a VM | 6259906d2c8b7c6e89bd5041 |
class GameStats(): <NEW_LINE> <INDENT> def __init__(self, ai_settings): <NEW_LINE> <INDENT> self.ai_settings = ai_settings <NEW_LINE> self.reset_stats() <NEW_LINE> self.game_active = False <NEW_LINE> self.high_score = 0 <NEW_LINE> self.max_level = 1 <NEW_LINE> self.load_progress() <NEW_LINE> <DEDENT> def reset_stats(self): <NEW_LINE> <INDENT> self.ship_left = self.ai_settings.ship_limit <NEW_LINE> self.score = 0 <NEW_LINE> self.level = 1 <NEW_LINE> <DEDENT> def load_progress(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open('data/saved_progress.json', 'r') as fp: <NEW_LINE> <INDENT> progress = json.load(fp) <NEW_LINE> self.high_score = progress['high_score'] <NEW_LINE> self.max_level = progress['max_level'] <NEW_LINE> <DEDENT> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> print('No saved progress') <NEW_LINE> pass <NEW_LINE> <DEDENT> <DEDENT> def save_progress(self): <NEW_LINE> <INDENT> progress = { 'high_score': self.high_score , 'max_level': self.max_level } <NEW_LINE> if not os.path.exists('data/'): <NEW_LINE> <INDENT> os.mkdir('data/') <NEW_LINE> <DEDENT> with open('data/saved_progress.json', 'w') as fp: <NEW_LINE> <INDENT> json.dump(progress, fp) <NEW_LINE> print('Progress saved') | Track all game statistics | 6259906d3539df3088ecdaf7 |
class Notification(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey("Profile", verbose_name="User to be notified") <NEW_LINE> text = models.CharField(max_length=500, verbose_name="Notification message") <NEW_LINE> url = models.URLField(null=True, verbose_name="Notification URL") <NEW_LINE> time = models.DateTimeField(auto_now_add=True, verbose_name="Time when notification raised") <NEW_LINE> read = models.BooleanField(default=False, verbose_name="Indicates whether a notification has been read") <NEW_LINE> def mark_read(self): <NEW_LINE> <INDENT> self.read = True <NEW_LINE> self.save() <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return (("'%s' to %s on %s") % (self.text, self.user.username, self.time)) | Represents a notification message for a user | 6259906d32920d7e50bc78a1 |
class CreateLocation(messages.Message): <NEW_LINE> <INDENT> loc = messages.StringField(1, required=True) <NEW_LINE> observador = messages.StringField(2, required=True) | Message containing the information of a Location
loc: coordinates
observador: email | 6259906d66673b3332c31c58 |
class mytimr: <NEW_LINE> <INDENT> def start(self): <NEW_LINE> <INDENT> self.timeNow = time.time() <NEW_LINE> <DEDENT> def timeRep(self): <NEW_LINE> <INDENT> print("({0:.4f} sec.)".format(time.time() - self.timeNow)) | Rudimentary, measures real, not process time. | 6259906dbf627c535bcb2d25 |
class OfflineReq: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.I32, 'onsale_group_id', None, None, ), ) <NEW_LINE> def __init__(self, onsale_group_id=None,): <NEW_LINE> <INDENT> self.onsale_group_id = onsale_group_id <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.onsale_group_id = iprot.readI32(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('OfflineReq') <NEW_LINE> if self.onsale_group_id is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('onsale_group_id', TType.I32, 1) <NEW_LINE> oprot.writeI32(self.onsale_group_id) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if self.onsale_group_id is None: <NEW_LINE> <INDENT> raise TProtocol.TProtocolException(message='Required field onsale_group_id is unset!') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.onsale_group_id) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- onsale_group_id | 6259906da8370b77170f1c23 |
@SkypeUtils.initAttrs <NEW_LINE> @SkypeUtils.convertIds("user") <NEW_LINE> class SkypeEndpointEvent(SkypeEvent): <NEW_LINE> <INDENT> attrs = SkypeEvent.attrs + ("userId", "name", "capabilities") <NEW_LINE> @classmethod <NEW_LINE> def rawToFields(cls, raw={}): <NEW_LINE> <INDENT> fields = super(SkypeEndpointEvent, cls).rawToFields(raw) <NEW_LINE> res = raw.get("resource", {}) <NEW_LINE> fields.update({"userId": SkypeUtils.userToId(res.get("selfLink")), "name": res.get("privateInfo", {}).get("epname"), "capabilities": list(filter(None, res.get("publicInfo", {}) .get("capabilities", "").split(" | ")))}) <NEW_LINE> return fields | An event for changes to individual contact endpoints.
Attributes:
user (:class:`.SkypeUser`):
User whose endpoint emitted an event.
name (str):
Name of the device connected with this endpoint.
capabilities (str list):
Features available on the device. | 6259906d379a373c97d9a87b |
class Application(): <NEW_LINE> <INDENT> def __init__(self, browser, base_url): <NEW_LINE> <INDENT> if browser == "firefox": <NEW_LINE> <INDENT> self.wd = webdriver.Firefox() <NEW_LINE> <DEDENT> elif browser == "chrome": <NEW_LINE> <INDENT> self.wd = webdriver.Chrome() <NEW_LINE> <DEDENT> elif browser == "ie": <NEW_LINE> <INDENT> self.wd = webdriver.Ie() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Unrecognized browser {}".format(browser)) <NEW_LINE> <DEDENT> self.session = SessionHelper(self) <NEW_LINE> self.group = GroupHelper(self) <NEW_LINE> self.contact = ContactHelper(self) <NEW_LINE> self.open = OpenHelper(self) <NEW_LINE> self.url = URL(base_url) <NEW_LINE> <DEDENT> def destroy(self): <NEW_LINE> <INDENT> self.wd.quit() <NEW_LINE> <DEDENT> def is_valid(self): <NEW_LINE> <INDENT> try: self.wd.current_url; return True <NEW_LINE> except: return False | Class for represent Application. | 6259906d97e22403b383c75f |
class DimmableLedController(CustomDevice): <NEW_LINE> <INDENT> signature = { MODELS_INFO: [("_TZ3210_9q49basr", "TS0501B")], ENDPOINTS: { 1: { PROFILE_ID: zha.PROFILE_ID, DEVICE_TYPE: zha.DeviceType.DIMMABLE_LIGHT, INPUT_CLUSTERS: [ Basic.cluster_id, Identify.cluster_id, Groups.cluster_id, Scenes.cluster_id, OnOff.cluster_id, LevelControl.cluster_id, Color.cluster_id, LightLink.cluster_id, TuyaManufCluster.cluster_id, ], OUTPUT_CLUSTERS: [Time.cluster_id, Ota.cluster_id], }, 242: { PROFILE_ID: 41440, DEVICE_TYPE: 97, INPUT_CLUSTERS: [], OUTPUT_CLUSTERS: [GreenPowerProxy.cluster_id], }, }, } <NEW_LINE> replacement = { ENDPOINTS: { 1: { PROFILE_ID: zha.PROFILE_ID, DEVICE_TYPE: zha.DeviceType.DIMMABLE_LIGHT, INPUT_CLUSTERS: [ Basic.cluster_id, Identify.cluster_id, Groups.cluster_id, Scenes.cluster_id, OnOff.cluster_id, LevelControl.cluster_id, LightLink.cluster_id, ], OUTPUT_CLUSTERS: [Time.cluster_id, Ota.cluster_id], }, 242: { PROFILE_ID: 41440, DEVICE_TYPE: 97, INPUT_CLUSTERS: [], OUTPUT_CLUSTERS: [GreenPowerProxy.cluster_id], }, }, } | Tuya dimmable led controller single channel. | 6259906d283ffb24f3cf5104 |
class Node: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.r = None <NEW_LINE> <DEDENT> def make_il(self, il_code, symbol_table, c): <NEW_LINE> <INDENT> raise NotImplementedError | Base class for representing a single node in the AST.
All AST nodes inherit from this class. | 6259906d7b25080760ed8910 |
class LearningAgent(Agent): <NEW_LINE> <INDENT> def __init__(self, env, learning=False, epsilon=1.0, alpha=0.5): <NEW_LINE> <INDENT> super(LearningAgent, self).__init__(env) <NEW_LINE> self.planner = RoutePlanner(self.env, self) <NEW_LINE> self.valid_actions = self.env.valid_actions <NEW_LINE> self.learning = learning <NEW_LINE> self.Q = dict() <NEW_LINE> self.epsilon = epsilon <NEW_LINE> self.alpha = alpha <NEW_LINE> self.a = 0.0 <NEW_LINE> <DEDENT> def reset(self, destination=None, testing=False): <NEW_LINE> <INDENT> self.planner.route_to(destination) <NEW_LINE> if testing: <NEW_LINE> <INDENT> self.epsilon = 0 <NEW_LINE> self.alpha = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.a += 1 <NEW_LINE> self.epsilon = math.exp(-self.alpha * self.a) <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def build_state(self): <NEW_LINE> <INDENT> waypoint = self.planner.next_waypoint() <NEW_LINE> inputs = self.env.sense(self) <NEW_LINE> deadline = self.env.get_deadline(self) <NEW_LINE> state = (inputs['light'],inputs['left'],waypoint,inputs['oncoming']) <NEW_LINE> return state <NEW_LINE> <DEDENT> def get_maxQ(self, state): <NEW_LINE> <INDENT> return max(self.Q[state].values()) <NEW_LINE> <DEDENT> def createQ(self, state): <NEW_LINE> <INDENT> if self.learning and state not in self.Q: <NEW_LINE> <INDENT> self.Q[state] = {} <NEW_LINE> for i in self.valid_actions: <NEW_LINE> <INDENT> self.Q[state][i] = 0.0 <NEW_LINE> <DEDENT> <DEDENT> return <NEW_LINE> <DEDENT> def choose_action(self, state): <NEW_LINE> <INDENT> self.state = state <NEW_LINE> self.next_waypoint = self.planner.next_waypoint() <NEW_LINE> action = None <NEW_LINE> if not self.learning: <NEW_LINE> <INDENT> action = random.choice(self.valid_actions) <NEW_LINE> <DEDENT> elif self.epsilon > random.random(): <NEW_LINE> <INDENT> action = random.choice(self.valid_actions) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> max_q = self.get_maxQ(state) <NEW_LINE> list_maxq = [] <NEW_LINE> for i in self.Q[state]: <NEW_LINE> <INDENT> if self.Q[state][i] == max_q: <NEW_LINE> <INDENT> list_maxq.append(i) <NEW_LINE> <DEDENT> <DEDENT> action = random.choice(list_maxq) <NEW_LINE> <DEDENT> return action <NEW_LINE> <DEDENT> def learn(self, state, action, reward): <NEW_LINE> <INDENT> if self.learning: <NEW_LINE> <INDENT> k = reward - self.Q[state][action] <NEW_LINE> self.Q[state][action] += (self.alpha * (k)) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> state = self.build_state() <NEW_LINE> self.createQ(state) <NEW_LINE> action = self.choose_action(state) <NEW_LINE> reward = self.env.act(self, action) <NEW_LINE> self.learn(state, action, reward) <NEW_LINE> return | An agent that learns to drive in the Smartcab world.
This is the object you will be modifying. | 6259906e99fddb7c1ca639ff |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.