code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class IdentityWebContextAdapter(metaclass=ABCMeta): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def __init__(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def _on_request_init(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def _on_request_end(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def attach_identity_web_util(self, identity_web: 'IdentityWebPython') -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def has_context(self) -> bool: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> @require_request_context <NEW_LINE> def identity_context_data(self) -> 'IdentityContextData': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> @require_request_context <NEW_LINE> def session(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> @require_request_context <NEW_LINE> def clear_session(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @require_request_context <NEW_LINE> def get_value_from_session(self, key: str, default: Any = None) -> Any: <NEW_LINE> <INDENT> return self.session.get(key, default) <NEW_LINE> <DEDENT> @require_request_context <NEW_LINE> def get_request_param(self, key: str, default: Any = None) -> Any: <NEW_LINE> <INDENT> return self._get_request_params_as_dict(key, default) <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> @require_request_context <NEW_LINE> def redirect_to_absolute_url(self, absolute_url: str) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> @require_request_context <NEW_LINE> def get_request_params_as_dict(self) -> dict: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> @require_request_context <NEW_LINE> def _deserialize_identity_context_data_from_session(self) -> 'IdentityContextData': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> @require_request_context <NEW_LINE> def _serialize_identity_context_data_to_session(self) -> None: <NEW_LINE> <INDENT> pass
Context Adapter abstract base class. Extend this to enable IdentityWebPython to work within any environment (e.g. Flask, Django, Windows Desktop app, etc)
62599075627d3e7fe0e087cd
class DofIndex( ArrayFunc ): <NEW_LINE> <INDENT> def __init__( self, array, iax, index ): <NEW_LINE> <INDENT> assert index.ndim >= 1 <NEW_LINE> assert isinstance( array, numpy.ndarray ) <NEW_LINE> self.array = array <NEW_LINE> assert 0 <= iax < self.array.ndim <NEW_LINE> self.iax = iax <NEW_LINE> self.index = index <NEW_LINE> shape = self.array.shape[:iax] + index.shape + self.array.shape[iax+1:] <NEW_LINE> ArrayFunc.__init__( self, args=[index], shape=shape ) <NEW_LINE> <DEDENT> def evalf( self, index ): <NEW_LINE> <INDENT> item = [ slice(None) ] * self.array.ndim <NEW_LINE> item[self.iax] = index <NEW_LINE> return self.array[ tuple(item) ][_] <NEW_LINE> <DEDENT> def _get( self, i, item ): <NEW_LINE> <INDENT> if self.iax <= i < self.iax + self.index.ndim: <NEW_LINE> <INDENT> index = get( self.index, i - self.iax, item ) <NEW_LINE> return take( self.array, index, self.iax ) <NEW_LINE> <DEDENT> return take( get( self.array, i, item ), self.index, self.iax if i > self.iax else self.iax-1 ) <NEW_LINE> <DEDENT> def _add( self, other ): <NEW_LINE> <INDENT> if isinstance( other, DofIndex ) and self.iax == other.iax and self.index == other.index: <NEW_LINE> <INDENT> return take( self.array + other.array, self.index, self.iax ) <NEW_LINE> <DEDENT> <DEDENT> def _multiply( self, other ): <NEW_LINE> <INDENT> if not _isfunc(other) and other.ndim == 0: <NEW_LINE> <INDENT> return take( self.array * other, self.index, self.iax ) <NEW_LINE> <DEDENT> <DEDENT> def _localgradient( self, ndims ): <NEW_LINE> <INDENT> return _zeros( self.shape + (ndims,) ) <NEW_LINE> <DEDENT> def _concatenate( self, other, axis ): <NEW_LINE> <INDENT> if isinstance( other, DofIndex ) and self.iax == other.iax and self.index == other.index: <NEW_LINE> <INDENT> array = numpy.concatenate( [ self.array, other.array ], axis ) <NEW_LINE> return take( array, self.index, self.iax ) <NEW_LINE> <DEDENT> <DEDENT> def _edit( self, op ): <NEW_LINE> <INDENT> return take( self.array, op(self.index), self.iax )
element-based indexing
625990753d592f4c4edbc7ff
class FaceAPI(PartialAPI): <NEW_LINE> <INDENT> def __init__(self, api): <NEW_LINE> <INDENT> super().__init__(api) <NEW_LINE> self.saved_faces = set() <NEW_LINE> <DEDENT> async def list(self, pretty=False) -> Set[str]: <NEW_LINE> <INDENT> res = self.saved_faces = set(await self._get_j('faces')) <NEW_LINE> if pretty: <NEW_LINE> <INDENT> print_pretty(res) <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> async def delete(self, *, name: Optional[str] = None, delete_all: bool = False): <NEW_LINE> <INDENT> if bool(delete_all) + bool(name) != 1: <NEW_LINE> <INDENT> raise ValueError('set exactly one of `name` or `delete_all`') <NEW_LINE> <DEDENT> await self._delete('faces', **(dict(FaceId=name) if name else {})) <NEW_LINE> <DEDENT> async def start_detection(self): <NEW_LINE> <INDENT> await self._post('faces/detection/start') <NEW_LINE> <DEDENT> async def stop_detection(self): <NEW_LINE> <INDENT> await self._post('faces/detection/stop') <NEW_LINE> <DEDENT> async def start_training(self, face_id: str): <NEW_LINE> <INDENT> return await self._post('faces/training/start', dict(FaceId=face_id)) <NEW_LINE> <DEDENT> async def stop_training(self): <NEW_LINE> <INDENT> return await self._post('faces/training/stop') <NEW_LINE> <DEDENT> async def wait_for_training(self, face_id: str): <NEW_LINE> <INDENT> async def _wait(sp: SubPayload): <NEW_LINE> <INDENT> m = sp.data.message.message <NEW_LINE> log.info(m) <NEW_LINE> return m == FTMsgs.complete.value <NEW_LINE> <DEDENT> ecb = EventCallback(_wait) <NEW_LINE> async with self.api.ws.sub_unsub(SubType.face_training, ecb): <NEW_LINE> <INDENT> await asyncio.gather( self.start_training(face_id), ecb ) <NEW_LINE> <DEDENT> <DEDENT> async def cancel_training(self): <NEW_LINE> <INDENT> return await self._post('faces/training/cancel') <NEW_LINE> <DEDENT> async def start_recognition(self): <NEW_LINE> <INDENT> return await self._post('faces/recognition/start') <NEW_LINE> <DEDENT> async def stop_recognition(self): <NEW_LINE> <INDENT> return await self._post('faces/recognition/stop') <NEW_LINE> <DEDENT> async def stop_all(self): <NEW_LINE> <INDENT> return await asyncio.gather(self.stop_training(), self.cancel_training(), self.stop_recognition())
perform face detection, training, recognition; delete faces
625990757d43ff24874280b6
@implementer(IRecord) <NEW_LINE> class Record(Source, _Convertable): <NEW_LINE> <INDENT> def __init__(self, genre, id_, *args, **kw): <NEW_LINE> <INDENT> super(Record, self).__init__(genre, 'a', args, **kw) <NEW_LINE> self.id = id_ <NEW_LINE> if isinstance(self.genre, str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.genre = EntryType.from_string(genre.lower()) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.genre = EntryType.misc <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def from_object(cls, obj, **kw): <NEW_LINE> <INDENT> data = {} <NEW_LINE> for field in FIELDS: <NEW_LINE> <INDENT> value = getattr(obj, field, None) <NEW_LINE> if value: <NEW_LINE> <INDENT> data[field] = value <NEW_LINE> <DEDENT> <DEDENT> data.update(kw) <NEW_LINE> data.setdefault('title', obj.description) <NEW_LINE> data = sorted(data.items()) <NEW_LINE> return cls(obj.bibtex_type, obj.id, *data) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_string(cls, bibtexString, lowercase=False): <NEW_LINE> <INDENT> return cls.from_bibtex(bibtexString, lowercase=lowercase) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def sep(key): <NEW_LINE> <INDENT> return ' and ' if key in ('author', 'editor') else '; ' <NEW_LINE> <DEDENT> def getall(self, key): <NEW_LINE> <INDENT> res = self.get(key, []) <NEW_LINE> if isinstance(res, str): <NEW_LINE> <INDENT> res = res.split(Record.sep(key)) <NEW_LINE> <DEDENT> return [_f for _f in res if _f] <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> value = collections.OrderedDict.__getitem__(self, key) <NEW_LINE> if not isinstance(value, (tuple, list)): <NEW_LINE> <INDENT> value = [value] <NEW_LINE> <DEDENT> return Record.sep(key).join(filter(None, value)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> keys = sorted(self, key=lambda k: (k not in FIELDS_SET, k)) <NEW_LINE> m = max([0] + list(map(len, keys))) <NEW_LINE> fields = (" %s = {%s}" % (k.ljust(m), self[k]) for k in keys) <NEW_LINE> return "@%s{%s,\n%s\n}" % ( getattr(self.genre, 'value', self.genre), self.id, ",\n".join(fields))
A BibTeX record is an ordered dict with two special properties - id and genre. To overcome the limitation of single values per field in BibTeX, we allow fields, i.e. values of the dict to be iterables of strings as well. Note that to support this use case comprehensively, various methods of retrieving values will behave differently. I.e. values will be - joined to a string in __getitem__, - retrievable as assigned with get (i.e. only use get if you know how a value was assigned), - retrievable as list with getall .. note:: Unknown genres are converted to "misc".
6259907555399d3f05627e5e
class DecoderStack(tf.compat.v1.layers.Layer): <NEW_LINE> <INDENT> def __init__(self, params): <NEW_LINE> <INDENT> if params["couple_encoder_decoder"]: <NEW_LINE> <INDENT> name = "encoder" <NEW_LINE> with tf.compat.v1.variable_scope( name, reuse=tf.compat.v1.AUTO_REUSE) as scope: <NEW_LINE> <INDENT> super(DecoderStack, self).__init__(name=name, _scope=scope) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> name = "decoder" <NEW_LINE> super(DecoderStack, self).__init__(name=name) <NEW_LINE> <DEDENT> self.params = params <NEW_LINE> if params["norm_type"] == "prenorm": <NEW_LINE> <INDENT> decoder_class = PrenormDecoderLayer <NEW_LINE> <DEDENT> elif params["norm_type"] == "postnorm": <NEW_LINE> <INDENT> decoder_class = PostnormDecoderLayer <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError( "Norm type {} is not implemented".format(params["norm_type"])) <NEW_LINE> <DEDENT> if self.params.get("num_decoder_layers", None) is not None: <NEW_LINE> <INDENT> num_hidden_layers = self.params["num_decoder_layers"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> num_hidden_layers = self.params["num_hidden_layers"] <NEW_LINE> <DEDENT> self.decoder_layers = [ decoder_class( self.params["hidden_size"], self.params["intermediate_size"], utils.get_activation(self.params["hidden_act"]), self.params["attention_probs_dropout_prob"], self.params["hidden_dropout_prob"], self.params["initializer_range"], self.params["num_attention_heads"], self.params["use_bias"], name="layer_%d" % layer_idx) for layer_idx in range(num_hidden_layers) ] <NEW_LINE> self.layer_norm = utils.NormLayer() <NEW_LINE> <DEDENT> @property <NEW_LINE> def trainable_weights(self): <NEW_LINE> <INDENT> tvar_list = sum( [layer.trainable_weights for layer in self.decoder_layers], []) + self.layer_norm.trainable_weights <NEW_LINE> self._trainable_weights = list({v.name: v for v in tvar_list}.values()) <NEW_LINE> return self._trainable_weights <NEW_LINE> <DEDENT> def operation(self, decoder_inputs, self_attention_mask, encoder_outputs, encoder_mask, cache=None, decode_i=None, training=None): <NEW_LINE> <INDENT> attention_mask = tf.expand_dims(tf.expand_dims(encoder_mask, 1), 1) <NEW_LINE> if self.params["norm_type"] == "postnorm": <NEW_LINE> <INDENT> decoder_inputs = self.layer_norm.operation(decoder_inputs) <NEW_LINE> <DEDENT> layer_output = decoder_inputs <NEW_LINE> for layer in self.decoder_layers: <NEW_LINE> <INDENT> layer_cache = cache[layer.name] if cache is not None else None <NEW_LINE> layer_output = layer.operation( layer_output, encoder_outputs, self_attention_mask, attention_mask, layer_cache, decode_i, training) <NEW_LINE> <DEDENT> if self.params["norm_type"] == "prenorm": <NEW_LINE> <INDENT> layer_output = self.layer_norm(layer_output) <NEW_LINE> <DEDENT> return layer_output
Transformer decoder stack.
62599075fff4ab517ebcf15c
class Meta: <NEW_LINE> <INDENT> model = Flight <NEW_LINE> fields = ('id', 'pickup', 'destination', 'departure_date', 'return_date', 'no_travellers', 'passport_number', )
Meta class to map serializer's fields with the model fields.
625990757d847024c075dd1f
class Config(object): <NEW_LINE> <INDENT> DEBUG = False <NEW_LINE> CSRF_ENABLED = True <NEW_LINE> SECRET_KEY = getenv('SECRET', 'dodo@N9shiv:)()') <NEW_LINE> SQLALCHEMY_DATABASE_URI = getenv('DATABASE_URL', DEFAULT_DB_URL) <NEW_LINE> SQLALCHEMY_TRACK_MODIFICATIONS = False <NEW_LINE> JWT_AUTH_URL_RULE = '/api/v1/login' <NEW_LINE> JWT_AUTH_USERNAME_KEY = 'email' <NEW_LINE> JWT_EXPIRATION_DELTA = timedelta(hours=24)
Parent Babe configuration class.
62599075460517430c432cfb
class TestReportItem(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testReportItem(self): <NEW_LINE> <INDENT> model = thirdwatch_api.models.report_item.ReportItem()
ReportItem unit test stubs
625990751f5feb6acb16453a
class SimpleSingleFieldHandler(SingleFieldHandler): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _get(cls, event): <NEW_LINE> <INDENT> return event.get(cls.fieldnames[0], '') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _patch(cls, event, value): <NEW_LINE> <INDENT> event[cls.fieldnames[0]] = value
Handler for single-string details that require no special processing.
6259907563b5f9789fe86aaa
class SUSAN(FSLCommand): <NEW_LINE> <INDENT> _cmd = 'susan' <NEW_LINE> input_spec = SUSANInputSpec <NEW_LINE> output_spec = SUSANOutputSpec <NEW_LINE> def _format_arg(self, name, spec, value): <NEW_LINE> <INDENT> if name == 'fwhm': <NEW_LINE> <INDENT> return spec.argstr % (float(value) / np.sqrt(8 * np.log(2))) <NEW_LINE> <DEDENT> if name == 'usans': <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> return '0' <NEW_LINE> <DEDENT> arglist = [str(len(value))] <NEW_LINE> for filename, thresh in value: <NEW_LINE> <INDENT> arglist.extend([filename, '%.10f' % thresh]) <NEW_LINE> <DEDENT> return ' '.join(arglist) <NEW_LINE> <DEDENT> return super(SUSAN, self)._format_arg(name, spec, value) <NEW_LINE> <DEDENT> def _list_outputs(self): <NEW_LINE> <INDENT> outputs = self._outputs().get() <NEW_LINE> out_file = self.inputs.out_file <NEW_LINE> if not isdefined(out_file): <NEW_LINE> <INDENT> out_file = self._gen_fname(self.inputs.in_file, suffix='_smooth') <NEW_LINE> <DEDENT> outputs['smoothed_file'] = out_file <NEW_LINE> return outputs <NEW_LINE> <DEDENT> def _gen_filename(self, name): <NEW_LINE> <INDENT> if name == 'out_file': <NEW_LINE> <INDENT> return self._list_outputs()['smoothed_file'] <NEW_LINE> <DEDENT> return None
use FSL SUSAN to perform smoothing Examples -------- >>> from nipype.interfaces import fsl >>> from nipype.testing import example_data >>> print anatfile #doctest: +SKIP anatomical.nii #doctest: +SKIP >>> sus = fsl.SUSAN() >>> sus.inputs.in_file = example_data('structural.nii') >>> sus.inputs.brightness_threshold = 2000.0 >>> sus.inputs.fwhm = 8.0 >>> result = sus.run() #doctest: +SKIP
62599075e1aae11d1e7cf4b1
class KalmanFilter(object): <NEW_LINE> <INDENT> def __init__(self, dof=6): <NEW_LINE> <INDENT> self.dof = dof <NEW_LINE> self.A = np.eye(dof) <NEW_LINE> self.H = np.eye(dof) <NEW_LINE> self.B = 0 <NEW_LINE> self.Q = np.zeros(shape=(dof, dof)) <NEW_LINE> self.R = np.eye(dof) / 50 <NEW_LINE> self.P = np.eye(dof) <NEW_LINE> self.x = np.zeros((dof, 1)) <NEW_LINE> <DEDENT> def predict(self, u=0): <NEW_LINE> <INDENT> self.x = np.dot(self.A, self.x) + np.dot(self.B, u) <NEW_LINE> self.P = np.dot(np.dot(self.A, self.P), self.A.T) + self.Q <NEW_LINE> return self.x <NEW_LINE> <DEDENT> def update(self, z): <NEW_LINE> <INDENT> y = z - np.dot(self.H, self.x) <NEW_LINE> intermediate_mat = np.dot(self.P, self.H.T) <NEW_LINE> S = self.R + np.dot(self.H, intermediate_mat) <NEW_LINE> K = np.dot(intermediate_mat, np.linalg.inv(S)) <NEW_LINE> self.x = self.x + np.dot(K, y) <NEW_LINE> self.P = self.P - np.dot(np.dot(K, self.H), self.P)
Simple Multi-variate Kalman Filter.
62599075a17c0f6771d5d84f
class Error(): <NEW_LINE> <INDENT> def __init__(self, code, message): <NEW_LINE> <INDENT> self.code = code <NEW_LINE> self.message = message
просто класс для описания ошибок - ч бы было удобнее их отдавать получателю.
625990754527f215b58eb643
class SlotsAndDict(object): <NEW_LINE> <INDENT> __slots__ = ('__dict__', 'attribute') <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> attribute = "hi"
To allow dynamic assignment, add __dict__ to __slots__. This kills a lot of the time + memory benefits of __slots__, but is still faster than pure __dict__ .
6259907592d797404e3897fe
class LoreDeprecationTests(TestCase): <NEW_LINE> <INDENT> if _PY3: <NEW_LINE> <INDENT> skip = "Lore is not being ported to Python 3." <NEW_LINE> <DEDENT> def test_loreDeprecation(self): <NEW_LINE> <INDENT> reflect.namedAny("twisted.lore") <NEW_LINE> warningsShown = self.flushWarnings() <NEW_LINE> self.assertEqual(1, len(warningsShown)) <NEW_LINE> self.assertEqual( "twisted.lore was deprecated in Twisted 14.0.0: " "Use Sphinx instead.", warningsShown[0]['message'])
Contains tests to make sure Lore is marked as deprecated.
62599075f548e778e596ced5
@inherit_doc <NEW_LINE> class DefaultParamsWriter(MLWriter): <NEW_LINE> <INDENT> def __init__(self, instance: "Params"): <NEW_LINE> <INDENT> super(DefaultParamsWriter, self).__init__() <NEW_LINE> self.instance = instance <NEW_LINE> <DEDENT> def saveImpl(self, path: str) -> None: <NEW_LINE> <INDENT> DefaultParamsWriter.saveMetadata(self.instance, path, self.sc) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def extractJsonParams(instance: "Params", skipParams: Sequence[str]) -> Dict[str, Any]: <NEW_LINE> <INDENT> paramMap = instance.extractParamMap() <NEW_LINE> jsonParams = { param.name: value for param, value in paramMap.items() if param.name not in skipParams } <NEW_LINE> return jsonParams <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def saveMetadata( instance: "Params", path: str, sc: SparkContext, extraMetadata: Optional[Dict[str, Any]] = None, paramMap: Optional[Dict[str, Any]] = None, ) -> None: <NEW_LINE> <INDENT> metadataPath = os.path.join(path, "metadata") <NEW_LINE> metadataJson = DefaultParamsWriter._get_metadata_to_save( instance, sc, extraMetadata, paramMap ) <NEW_LINE> sc.parallelize([metadataJson], 1).saveAsTextFile(metadataPath) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_metadata_to_save( instance: "Params", sc: SparkContext, extraMetadata: Optional[Dict[str, Any]] = None, paramMap: Optional[Dict[str, Any]] = None, ) -> str: <NEW_LINE> <INDENT> uid = instance.uid <NEW_LINE> cls = instance.__module__ + "." + instance.__class__.__name__ <NEW_LINE> params = instance._paramMap <NEW_LINE> jsonParams = {} <NEW_LINE> if paramMap is not None: <NEW_LINE> <INDENT> jsonParams = paramMap <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for p in params: <NEW_LINE> <INDENT> jsonParams[p.name] = params[p] <NEW_LINE> <DEDENT> <DEDENT> jsonDefaultParams = {} <NEW_LINE> for p in instance._defaultParamMap: <NEW_LINE> <INDENT> jsonDefaultParams[p.name] = instance._defaultParamMap[p] <NEW_LINE> <DEDENT> basicMetadata = { "class": cls, "timestamp": int(round(time.time() * 1000)), "sparkVersion": sc.version, "uid": uid, "paramMap": jsonParams, "defaultParamMap": jsonDefaultParams, } <NEW_LINE> if extraMetadata is not None: <NEW_LINE> <INDENT> basicMetadata.update(extraMetadata) <NEW_LINE> <DEDENT> return json.dumps(basicMetadata, separators=(",", ":"))
Specialization of :py:class:`MLWriter` for :py:class:`Params` types Class for writing Estimators and Transformers whose parameters are JSON-serializable. .. versionadded:: 2.3.0
625990752c8b7c6e89bd512e
class _ConstantVacuumPermittivity(CommandManager): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(_ConstantVacuumPermittivity, self).__init__() <NEW_LINE> self.pixmap = "fem-solver-analysis-thermomechanical.svg" <NEW_LINE> self.menutext = Qt.QT_TRANSLATE_NOOP( "FEM_ConstantVacuumPermittivity", "Constant vacuum permittivity" ) <NEW_LINE> self.tooltip = Qt.QT_TRANSLATE_NOOP( "FEM_ConstantVacuumPermittivity", "Creates a FEM constant vacuum permittivity to overwrite standard value" ) <NEW_LINE> self.is_active = "with_document" <NEW_LINE> self.is_active = "with_analysis" <NEW_LINE> self.do_activated = "add_obj_on_gui_noset_edit"
The FEM_ConstantVacuumPermittivity command definition
62599075dc8b845886d54f01
class Normalize(object): <NEW_LINE> <INDENT> def __init__(self, mean, std): <NEW_LINE> <INDENT> self.mean = mean <NEW_LINE> self.std = std <NEW_LINE> <DEDENT> def __call__(self, image): <NEW_LINE> <INDENT> image = (image - self.mean) / self.std <NEW_LINE> return image
Convert ndarrays in sample to Tensors. Args: mean: Mean of rgb channels std: Standard deviation of rgb channels
62599075091ae3566870657f
class KubeBackend(BaseBackend, KubeMixIn): <NEW_LINE> <INDENT> kubectl_command = "kubectl" <NEW_LINE> env_key = "_env" <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.secret_exists = None <NEW_LINE> self.workflow = kwargs.get("workflow") <NEW_LINE> self.switch_kube_context() <NEW_LINE> self._check_kube_context() <NEW_LINE> self._check_kube_namespace() <NEW_LINE> <DEDENT> @property <NEW_LINE> def namespace(self): <NEW_LINE> <INDENT> return f"compose-flow-{self.workflow.args.profile}" <NEW_LINE> <DEDENT> def execute(self, command: str, **kwargs): <NEW_LINE> <INDENT> env = os.environ <NEW_LINE> return shell.execute(command, env, **kwargs) <NEW_LINE> <DEDENT> def ls(self) -> list: <NEW_LINE> <INDENT> return self._list_secrets() <NEW_LINE> <DEDENT> def read(self, name: str) -> str: <NEW_LINE> <INDENT> return self._read_secret_env(name) <NEW_LINE> <DEDENT> def rm(self, name: str) -> None: <NEW_LINE> <INDENT> self._remove_secret(name) <NEW_LINE> <DEDENT> def write(self, name: str, path) -> None: <NEW_LINE> <INDENT> return self._write_secret_env(name, path)
Manages native `kubectl secret` storage
62599075796e427e538500c0
class Lambda(pm.Deterministic): <NEW_LINE> <INDENT> def __init__(self, name, lam_fun, doc='A Deterministic made from an anonymous function', *args, **kwds): <NEW_LINE> <INDENT> (parent_names, parent_values) = get_signature(lam_fun) <NEW_LINE> if parent_values is None: <NEW_LINE> <INDENT> raise ValueError( '%s: All arguments to lam_fun must have default values.' % name) <NEW_LINE> <DEDENT> if not len(parent_names) == len(parent_values): <NEW_LINE> <INDENT> raise ValueError( '%s: All arguments to lam_fun must have default values.' % name) <NEW_LINE> <DEDENT> parents = dict(zip(parent_names[-len(parent_values):], parent_values)) <NEW_LINE> pm.Deterministic.__init__( self, eval=lam_fun, name=name, parents=parents, doc=doc, *args, **kwds)
L = Lambda(name, lambda p1=p1, p2=p2: f(p1, p2)[, doc, dtype=None, trace=True, cache_depth=2, plot=None]) Converts second argument, an anonymous function, into a Deterministic object with specified name. :Parameters: name : string The name of the deteriministic object to be created. lambda : function The function from which the deterministic object should be created. All arguments must be given default values! p1, p2, ... : any The parameters of lambda. other parameters : See docstring of Deterministic. :Note: Will work even if argument 'lambda' is a named function (defined using def) :SeeAlso: Deterministic, Logit, StukelLogit, StukelInvLogit, Logit, InvLogit, LinearCombination, Index
625990752c8b7c6e89bd512f
class DgmTypeEffect(models.Model): <NEW_LINE> <INDENT> type = models.ForeignKey(InvType) <NEW_LINE> effect = models.ForeignKey(DgmEffect) <NEW_LINE> is_default = models.BooleanField(default=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'eve_db' <NEW_LINE> ordering = ['id'] <NEW_LINE> verbose_name = 'Inventory Type Effect' <NEW_LINE> verbose_name_plural = 'Inventory Type Effect' <NEW_LINE> unique_together = ('type', 'effect') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.type <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__unicode__()
Effects related to items. Effects are like boolean flags - if an item has an effect listed, it's subject to this effect with the specified parameters, listed as per the DgmEffect. CCP Table: dgmTypeEffects CCP Primary key: ("typeID" smallint(6), "effectID" smallint(6))
625990755fcc89381b266dfc
class EventListener(object): <NEW_LINE> <INDENT> def __init__(self, state=state): <NEW_LINE> <INDENT> self.state = state <NEW_LINE> self.connection = establish_connection() <NEW_LINE> self.receiver = EventReceiver(self.connection, handlers={"*": self.state.event}) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.receiver.capture()
Capture events sent by messages and store them in memory.
625990758a43f66fc4bf3adc
class TestFileSignature(unittest.TestCase): <NEW_LINE> <INDENT> def test_form(self): <NEW_LINE> <INDENT> self.assertEqual(len(store.MAGIC), 8) <NEW_LINE> self.assertEqual(b"\211KAS\r\n\032\n", store.MAGIC)
Checks the file signature is what we think it should be.
625990758a349b6b43687ba1
class InvalidWorkpieceDimensions(PolyshaperError): <NEW_LINE> <INDENT> def __init__(self, machine_width, machine_height): <NEW_LINE> <INDENT> PolyshaperError.__init__(self, 4) <NEW_LINE> self.machine_width = machine_width <NEW_LINE> self.machine_height = machine_height <NEW_LINE> <DEDENT> def to_string(self): <NEW_LINE> <INDENT> return (_("Piece too big: maximum allowed dimensions for the selected machine is ") + "{:.1f}X{:.1f}".format(self.machine_width, self.machine_height))
The exception generated when the workpiece does not fit the machine
6259907532920d7e50bc798f
class ResponseData(BaseResponseData): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__(DiagnosticSessionControl) <NEW_LINE> self.session_echo = None <NEW_LINE> self.session_param_records = None <NEW_LINE> self.p2_server_max = None <NEW_LINE> self.p2_star_server_max = None
.. data:: session_echo Request subfunction echoed back by the server .. data:: session_param_records Raw session parameter records. Data given by the server. For 2006 configurations, this data can is manufacturer specific. For 2013 version and above, this data correspond to P2 and P2* timing requirement. .. data:: p2_server_max Default P2 max timing supported by the server for the activated diagnostic session. Applicable for 2013 version and above. Value in seconds. .. data:: p2_star_server_max Default P2* (NRC 0x78) max timing supported by the server for the activated diagnostic session. Applicable for 2013 version and above. Value in seconds
62599075435de62698e9d750
class ApiResponseForexPairs(object): <NEW_LINE> <INDENT> swagger_types = { 'pairs': 'list[ForexPair]' } <NEW_LINE> attribute_map = { 'pairs': 'pairs' } <NEW_LINE> def __init__(self, pairs=None): <NEW_LINE> <INDENT> self._pairs = None <NEW_LINE> self.discriminator = None <NEW_LINE> if pairs is not None: <NEW_LINE> <INDENT> self.pairs = pairs <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def pairs(self): <NEW_LINE> <INDENT> return self._pairs <NEW_LINE> <DEDENT> @property <NEW_LINE> def pairs_dict(self): <NEW_LINE> <INDENT> result = None <NEW_LINE> value = self.pairs <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = { 'pairs': value } <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> @pairs.setter <NEW_LINE> def pairs(self, pairs): <NEW_LINE> <INDENT> self._pairs = pairs <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ApiResponseForexPairs): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
625990754428ac0f6e659e77
class Logger: <NEW_LINE> <INDENT> def __init__(self,logfilename, debugfilename): <NEW_LINE> <INDENT> self.logfilename = logfilename <NEW_LINE> self.logfile = open(logfilename,'w') <NEW_LINE> self.logfile.write('<?xml version="1.0" encoding="UTF-8"?>\n') <NEW_LINE> self.logfile.write('<cli-logger machine="%s">\n\n' % socket.gethostname()) <NEW_LINE> self.buffer = '' <NEW_LINE> self.cwd = os.getcwd() <NEW_LINE> self.state = BeginState(self) <NEW_LINE> self.debugfilename = debugfilename <NEW_LINE> self.isLinux = False <NEW_LINE> if self.debugfilename is not None: <NEW_LINE> <INDENT> self.debugfile = codecs.open(debugfilename, encoding="utf-8", mode="w") <NEW_LINE> self.debugfile.write('<?xml version="1.0" encoding="UTF-8"?>\n') <NEW_LINE> self.debugfile.write("<cli-debug>\n") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.debugfile = None <NEW_LINE> <DEDENT> <DEDENT> def done(self): <NEW_LINE> <INDENT> self.logfile.write("]]></result>\n</cli-logger-entry>\n</cli-logger>\n") <NEW_LINE> self.logfile.close() <NEW_LINE> if self.debugfilename is not None: <NEW_LINE> <INDENT> self.debugfile.write("</cli-debug>") <NEW_LINE> <DEDENT> return self.raw_to_xml() <NEW_LINE> <DEDENT> def raw_to_xml(self): <NEW_LINE> <INDENT> xmlfilename = self.logfilename.replace('.raw','.xml') <NEW_LINE> fout = codecs.open(xmlfilename, encoding="utf-8", mode="w") <NEW_LINE> for line in codecs.open(self.logfilename,encoding="utf-8"): <NEW_LINE> <INDENT> fout.write(util.sanitize(line)) <NEW_LINE> <DEDENT> fout.close() <NEW_LINE> return xmlfilename <NEW_LINE> <DEDENT> def input_from_shell(self,buf): <NEW_LINE> <INDENT> if self.debugfile: <NEW_LINE> <INDENT> self.debug_log(buf,True) <NEW_LINE> <DEDENT> self.state.input_from_shell(buf) <NEW_LINE> self.state = self.state.next_state() <NEW_LINE> <DEDENT> def input_from_user(self,buf): <NEW_LINE> <INDENT> if self.debugfile: <NEW_LINE> <INDENT> self.debug_log(buf,False) <NEW_LINE> <DEDENT> self.state.input_from_user(buf) <NEW_LINE> self.state = self.state.next_state() <NEW_LINE> <DEDENT> def write(self,buf): <NEW_LINE> <INDENT> self.logfile.write(buf) <NEW_LINE> <DEDENT> def debug_log(self, buf, shell): <NEW_LINE> <INDENT> if shell == True: <NEW_LINE> <INDENT> self.debugfile.write("<shell><![CDATA[%s]]></shell>\n" % buf) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.debugfile.write("<user><![CDATA[%s]]></user>\n" % buf)
This class is responsible for writing the XML log file
625990752ae34c7f260aca2d
class Oscillator: <NEW_LINE> <INDENT> PATTERN = [NEUTRAL, PLUS, NEUTRAL, MINUS] <NEW_LINE> def __init__(self, frequency=3**9, timer=Timer, debug=False): <NEW_LINE> <INDENT> self._period = 1 / frequency / len(Oscillator.PATTERN) <NEW_LINE> self._output = ConnectionPoint(ConnectionPoint.WRITER) <NEW_LINE> self._idx = 0 <NEW_LINE> self._debug = debug <NEW_LINE> self._timer_class = timer <NEW_LINE> self.Update() <NEW_LINE> <DEDENT> def Update(self): <NEW_LINE> <INDENT> self._timer_class(self._period, self.Update).start() <NEW_LINE> self._output.SetStateWrite(Oscillator.PATTERN[self._idx]) <NEW_LINE> self._idx = (self._idx + 1) % len(Oscillator.PATTERN) <NEW_LINE> if (self._debug): <NEW_LINE> <INDENT> print(STATE_NAME[self._output.GetState()]) <NEW_LINE> <DEDENT> <DEDENT> def SetOutputWire(self, wire): <NEW_LINE> <INDENT> wire.Connect(self._output) <NEW_LINE> <DEDENT> def ReadOutput(self): <NEW_LINE> <INDENT> return self._output.GetState() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '%s<period: %d, state: %s>' % (type(self).__name__, self._period, STATE_NAME[self.ReadOutput()])
A clock that oscillates between (-), (0), and (+) such that every other tick is (0), and the reamining values swap between (-) and (+) Ex. (0), (+), (0), (-), (0), (+), (0), (-), (0), ... Default frequency is 19683 (3 ** 9)Hz.
62599075cc0a2c111447c775
class ImageTestClass(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.new_user = User(username = "beryl", email = "[email protected]", password = "show001") <NEW_LINE> self.new_user.save() <NEW_LINE> self.new_profile = Profile(profile_pic = '/posts', bio = "hello world", contacts = "[email protected]", user = self.new_user) <NEW_LINE> self.new_profile.save() <NEW_LINE> self.new_image = Image(name = 'chicken', ingredient = 'onions', country = 'Kenya', image = '/posts', recipe = 'hello new world', posted = '05/30/2020', profile = self.new_profile) <NEW_LINE> self.new_image.save_image() <NEW_LINE> <DEDENT> def test_instance(self): <NEW_LINE> <INDENT> self.assertTrue(isinstance(self.new_image, Image)) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> Image.objects.all().delete() <NEW_LINE> <DEDENT> def test_save_image(self): <NEW_LINE> <INDENT> self.new_image.save_image() <NEW_LINE> images = Image.objects.all() <NEW_LINE> self.assertTrue(len(images) > 0) <NEW_LINE> <DEDENT> def test_update_recipe(self): <NEW_LINE> <INDENT> self.new_image.save_image() <NEW_LINE> kwargs = {'image':'/posts', 'recipe':'hello new world'} <NEW_LINE> Image.update_recipe(self.image.id, **kwargs) <NEW_LINE> self.assertEqual("just a recipe", self.image.recipe) <NEW_LINE> <DEDENT> def test_delete_image(self): <NEW_LINE> <INDENT> self.new_image.save_image() <NEW_LINE> self.new_image.delete_image() <NEW_LINE> images = Image.objects.all() <NEW_LINE> self.assertTrue(len(images) == 0)
Test case for the Image class and it's behaviours.
625990752c8b7c6e89bd5130
class Gestures: <NEW_LINE> <INDENT> def __init__(self, device): <NEW_LINE> <INDENT> self.device = device <NEW_LINE> self.gestures = {} <NEW_LINE> self.params = {} <NEW_LINE> self.specs = {} <NEW_LINE> index = 0 <NEW_LINE> next_gesture_index = 0 <NEW_LINE> field_high = 0x00 <NEW_LINE> while field_high != 0x01: <NEW_LINE> <INDENT> fields = feature_request(device, FEATURE.GESTURE_2, 0x00, index >> 8, index & 0xFF) <NEW_LINE> if not fields: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> for offset in range(8): <NEW_LINE> <INDENT> field_high = fields[offset * 2] <NEW_LINE> field_low = fields[offset * 2 + 1] <NEW_LINE> if field_high == 0x1: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> elif field_high & 0x80: <NEW_LINE> <INDENT> gesture = Gesture(device, field_low, field_high, next_gesture_index) <NEW_LINE> next_gesture_index = next_gesture_index if gesture.index is None else next_gesture_index + 1 <NEW_LINE> self.gestures[gesture.gesture] = gesture <NEW_LINE> <DEDENT> elif field_high & 0xF0 == 0x30 or field_high & 0xF0 == 0x20: <NEW_LINE> <INDENT> param = Param(device, field_low, field_high) <NEW_LINE> self.params[param.param] = param <NEW_LINE> <DEDENT> elif field_high == 0x04: <NEW_LINE> <INDENT> if field_low != 0x00: <NEW_LINE> <INDENT> _log.error(f'Unimplemented GESTURE_2 grouping {field_low} {field_high} found.') <NEW_LINE> <DEDENT> <DEDENT> elif field_high & 0xF0 == 0x40: <NEW_LINE> <INDENT> spec = Spec(device, field_low, field_high) <NEW_LINE> self.specs[spec.spec] = spec <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _log.warn(f'Unimplemented GESTURE_2 field {field_low} {field_high} found.') <NEW_LINE> <DEDENT> index += 1 <NEW_LINE> <DEDENT> <DEDENT> device._gestures = self <NEW_LINE> <DEDENT> def gesture(self, gesture): <NEW_LINE> <INDENT> return self.gestures.get(gesture, None) <NEW_LINE> <DEDENT> def gesture_enabled(self, gesture): <NEW_LINE> <INDENT> g = self.gestures.get(gesture, None) <NEW_LINE> return g.enabled(self.device) if g else None <NEW_LINE> <DEDENT> def enable_gesture(self, gesture): <NEW_LINE> <INDENT> g = self.gestures.get(gesture, None) <NEW_LINE> return g.set(self.device, True) if g else None <NEW_LINE> <DEDENT> def disable_gesture(self, gesture): <NEW_LINE> <INDENT> g = self.gestures.get(gesture, None) <NEW_LINE> return g.set(self.device, False) if g else None <NEW_LINE> <DEDENT> def param(self, param): <NEW_LINE> <INDENT> return self.params.get(param, None) <NEW_LINE> <DEDENT> def get_param(self, param): <NEW_LINE> <INDENT> g = self.params.get(param, None) <NEW_LINE> return g.get(self.device) if g else None <NEW_LINE> <DEDENT> def set_param(self, param, value): <NEW_LINE> <INDENT> g = self.params.get(param, None) <NEW_LINE> return g.set(self.device, value) if g else None
Information about the gestures that a device supports. Right now only some information fields are supported. WARNING: Assumes that parameters are always global, which is not the case.
62599075dc8b845886d54f03
class ProblemaNreinas(blocales.Problema): <NEW_LINE> <INDENT> def __init__(self, n=8): <NEW_LINE> <INDENT> self.n = n <NEW_LINE> <DEDENT> def estado_aleatorio(self): <NEW_LINE> <INDENT> estado = list(range(self.n)) <NEW_LINE> shuffle(estado) <NEW_LINE> return tuple(estado) <NEW_LINE> <DEDENT> def vecinos(self, estado): <NEW_LINE> <INDENT> edo_lista = list(estado) <NEW_LINE> for i, j in permutations(range(self.n), 2): <NEW_LINE> <INDENT> edo_lista[i], edo_lista[j] = edo_lista[j], edo_lista[i] <NEW_LINE> yield tuple(edo_lista) <NEW_LINE> edo_lista[i], edo_lista[j] = edo_lista[j], edo_lista[i] <NEW_LINE> <DEDENT> <DEDENT> def vecino_aleatorio(self, estado): <NEW_LINE> <INDENT> vecino = list(estado) <NEW_LINE> i, j = sample(range(self.n), 2) <NEW_LINE> vecino[i], vecino[j] = vecino[j], vecino[i] <NEW_LINE> return tuple(vecino) <NEW_LINE> <DEDENT> def costo(self, estado): <NEW_LINE> <INDENT> c = 0 <NEW_LINE> for i, j in combinations(range(self.n), 2): <NEW_LINE> <INDENT> if estado[i] == estado[j] or abs(estado[i] - estado[j]) == abs(j-i): <NEW_LINE> <INDENT> c += 1 <NEW_LINE> <DEDENT> <DEDENT> return c
Las N reinas en forma de búsqueda local se inicializa como entorno = ProblemaNreinas(n) donde n es el número de reinas a colocar Por default son las clásicas 8 reinas.
62599075aad79263cf430100
class UserModelTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> User.query.delete() <NEW_LINE> Message.query.delete() <NEW_LINE> Follows.query.delete() <NEW_LINE> self.client = app.test_client() <NEW_LINE> <DEDENT> def test_user_model(self): <NEW_LINE> <INDENT> u = User(email="[email protected]", username="testuser", password="HASHED_PASSWORD") <NEW_LINE> db.session.add(u) <NEW_LINE> db.session.commit() <NEW_LINE> self.assertEqual(len(u.messages), 0) <NEW_LINE> self.assertEqual(len(u.followers), 0) <NEW_LINE> self.assertEqual(len(u.following), 0) <NEW_LINE> self.assertEqual(len(u.likes), 0) <NEW_LINE> self.assertEqual(repr(u), f"<User #{u.id}: testuser, [email protected]>") <NEW_LINE> u2 = User(email="[email protected]", username="testuser2", password="HASHED_PASSWORD") <NEW_LINE> db.session.add(u2) <NEW_LINE> db.session.commit() <NEW_LINE> self.assertEqual(u.is_following(u2), False) <NEW_LINE> self.assertEqual(u.is_followed_by(u2), False) <NEW_LINE> follow = Follows(user_being_followed_id=u.id, user_following_id=u2.id) <NEW_LINE> db.session.add(follow) <NEW_LINE> db.session.commit() <NEW_LINE> follow = Follows(user_being_followed_id=u2.id, user_following_id=u.id) <NEW_LINE> db.session.add(follow) <NEW_LINE> db.session.commit() <NEW_LINE> self.assertEqual(u.is_followed_by(u2), True) <NEW_LINE> self.assertEqual(u2.is_followed_by(u), True) <NEW_LINE> u3 = User.signup("testuser3", "[email protected]", "HASHED_PASSWORD", "") <NEW_LINE> db.session.commit() <NEW_LINE> good_user = User.authenticate(u3.username, "HASHED_PASSWORD") <NEW_LINE> self.assertTrue(good_user) <NEW_LINE> good_user = User.authenticate("bad-user-name", "HASHED_PASSWORD") <NEW_LINE> self.assertFalse(good_user) <NEW_LINE> good_user = User.authenticate(u3.username, "bad-password") <NEW_LINE> self.assertFalse(good_user)
Test model for users.
6259907521bff66bcd7245b1
class MacroGetter(object): <NEW_LINE> <INDENT> def __call__(self, context, request, view, name): <NEW_LINE> <INDENT> return zope.component.getMultiAdapter( (context, view, request), interface=interfaces.IMacroTemplate, name=name)
Collect named IMacroTemplate via TAL namespace called ``macro``.
62599075009cb60464d02e84
class Testcase(object): <NEW_LINE> <INDENT> def __init__(self, _tc_name=None, _tc_start_time=None, _tc_end_time=None, _tc_status=None, _tc_log=None): <NEW_LINE> <INDENT> self._name = _tc_name <NEW_LINE> self._start_time = _tc_start_time <NEW_LINE> self._end_time = _tc_end_time <NEW_LINE> self._status = _tc_status <NEW_LINE> self._log = _tc_log
This object contains all attribute of a testcase
625990753539df3088ecdbe0
class Segment_MEA(models.Model): <NEW_LINE> <INDENT> segment = models.ForeignKey( X12Segment ) <NEW_LINE> MEA01 = models.CharField( max_length=2 ) <NEW_LINE> MEA02 = models.CharField( max_length=3 ) <NEW_LINE> MEA03 = models.CharField( max_length=20 ) <NEW_LINE> MEA05 = models.CharField( max_length=20, null=True, blank=True ) <NEW_LINE> MEA06 = models.CharField( max_length=20, null=True, blank=True ) <NEW_LINE> MEA07 = models.CharField( max_length=2, null=True, blank=True ) <NEW_LINE> MEA08 = models.CharField( max_length=2, null=True, blank=True ) <NEW_LINE> MEA09 = models.CharField( max_length=2, null=True, blank=True ) <NEW_LINE> MEA10 = models.CharField( max_length=4, null=True, blank=True ) <NEW_LINE> def unmarshall( self, segmentToken, cs=':' ): <NEW_LINE> <INDENT> self.MEA01 = segmentToken.elt(1) <NEW_LINE> self.MEA02 = segmentToken.elt(2) <NEW_LINE> self.MEA03 = segmentToken.elt(3) <NEW_LINE> self.C001 = segmentToken.elt(4) <NEW_LINE> self.MEA05 = segmentToken.elt(5) <NEW_LINE> self.MEA06 = segmentToken.elt(6) <NEW_LINE> self.MEA07 = segmentToken.elt(7) <NEW_LINE> self.MEA08 = segmentToken.elt(8) <NEW_LINE> self.MEA09 = segmentToken.elt(9) <NEW_LINE> self.MEA10 = segmentToken.elt(10)
Properties(syntax=u'R03050608 C0504 C0604 L07030506 E0803',req_sit=u'S',repeat=u'20',pos=u'462',desc=u'Test Result')
625990757d43ff24874280b8
class GkukanMusiumdbDialogTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_icon_png(self): <NEW_LINE> <INDENT> path = ':/plugins/GkukanMusiumdb/icon.png' <NEW_LINE> icon = QIcon(path) <NEW_LINE> self.assertFalse(icon.isNull())
Test rerources work.
6259907516aa5153ce401e23
class EnterHold(Event): <NEW_LINE> <INDENT> pass
Wait for the player to hit start.
625990754f6381625f19a14f
class DenseSliceCOO(sparse.COO): <NEW_LINE> <INDENT> def __getitem__(self, *args, **kwargs): <NEW_LINE> <INDENT> obj = super().__getitem__(*args, **kwargs) <NEW_LINE> try: <NEW_LINE> <INDENT> return obj.todense() <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return obj
Just like sparse.COO, but returning a dense array on indexing/slicing
625990757d847024c075dd23
class IBlogEntry(Interface): <NEW_LINE> <INDENT> pass
Marker interface for SimpleBlog blog entry object.
625990758a43f66fc4bf3ade
class TestVP9Speed(unittest.TestCase): <NEW_LINE> <INDENT> def test_speed(self): <NEW_LINE> <INDENT> vp9 = VP9() <NEW_LINE> self._test_speed_normal_values(vp9) <NEW_LINE> self._test_speed_abnormal_values(vp9) <NEW_LINE> <DEDENT> def _test_speed_normal_values(self, vp9): <NEW_LINE> <INDENT> vp9.speed = 0 <NEW_LINE> self.assertEqual(vp9.speed, 0) <NEW_LINE> vp9.speed = 1 <NEW_LINE> self.assertEqual(vp9.speed, 1) <NEW_LINE> vp9.speed = 2 <NEW_LINE> self.assertEqual(vp9.speed, 2) <NEW_LINE> vp9.speed = 3 <NEW_LINE> self.assertEqual(vp9.speed, 3) <NEW_LINE> vp9.speed = 4 <NEW_LINE> self.assertEqual(vp9.speed, 4) <NEW_LINE> vp9.speed = 5 <NEW_LINE> self.assertEqual(vp9.speed, 5) <NEW_LINE> vp9.speed = 6 <NEW_LINE> self.assertEqual(vp9.speed, 6) <NEW_LINE> <DEDENT> def _test_speed_abnormal_values(self, vp9): <NEW_LINE> <INDENT> vp9.speed = -1 <NEW_LINE> self.assertEqual(vp9.speed, 0) <NEW_LINE> vp9.speed = -10 <NEW_LINE> self.assertEqual(vp9.speed, 0) <NEW_LINE> vp9.speed = 7 <NEW_LINE> self.assertEqual(vp9.speed, 0) <NEW_LINE> vp9.speed = 10 <NEW_LINE> self.assertEqual(vp9.speed, 0) <NEW_LINE> vp9.speed = None <NEW_LINE> self.assertEqual(vp9.speed, 0)
Tests all Speed option values for the VP9 object.
625990754527f215b58eb645
class Historian: <NEW_LINE> <INDENT> def __init__(self, config, dxl, recorder, register_monitor): <NEW_LINE> <INDENT> self.__config = config <NEW_LINE> self.__dxl = dxl <NEW_LINE> self.__recorder = recorder <NEW_LINE> self.__register_monitor = register_monitor <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self.__dxl.connect() <NEW_LINE> logger.info('connected dxlhistorian to service fabric') <NEW_LINE> for topic in self.__config.subscribe_to: <NEW_LINE> <INDENT> callback = RecordingCallback(self.__recorder, self.__register_monitor(RecordingMonitor('recording.{}'.format(topic)))) <NEW_LINE> self.__dxl.add_event_callback(topic, callback) <NEW_LINE> logger.info("subscribed dxlhistorian to topic %s", topic) <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> logger.info("disconnecting dxlhistorian from service fabric") <NEW_LINE> self.__dxl.disconnect()
Historian is the class encapsulating a single instance of the Historian listening for events being sent on the OpenDXL fabric
62599075f548e778e596ced9
class Curve: <NEW_LINE> <INDENT> def __init__ (self): <NEW_LINE> <INDENT> self.numPoints = -1 <NEW_LINE> self.order = -1 <NEW_LINE> self.points = [] <NEW_LINE> self.knots = [] <NEW_LINE> pass
Curve object
625990754428ac0f6e659e79
class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ("last_login", "last_request")
Last login and last request
625990752ae34c7f260aca2f
class GlobalBear(Bear): <NEW_LINE> <INDENT> def __init__(self, file_dict, section, message_queue, TIMEOUT=0): <NEW_LINE> <INDENT> Bear.__init__(self, section, message_queue, TIMEOUT) <NEW_LINE> self.file_dict = file_dict <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def kind(): <NEW_LINE> <INDENT> return BEAR_KIND.GLOBAL <NEW_LINE> <DEDENT> def run_bear(self, *args): <NEW_LINE> <INDENT> raise NotImplementedError("This function has to be implemented for a runnable bear.")
A GlobalBear is able to analyze semantical facts across several file. The results of a GlobalBear will be presented grouped by the origin Bear. Therefore Results spanning above multiple files are allowed and will be handled right. If you only look at one file at once anyway a LocalBear is better for your needs. (And better for performance and usability for both user and developer.)
625990752c8b7c6e89bd5132
class PaintingWithList(BasePainting): <NEW_LINE> <INDENT> def __init__(self, fig_support=111, name=""): <NEW_LINE> <INDENT> super(PaintingWithList, self).__init__(fig_support, name) <NEW_LINE> <DEDENT> def painting_mul_list(self, feature: list, label: list, index: int): <NEW_LINE> <INDENT> x_scatter = np.array([_data[index] for _data in feature]) <NEW_LINE> y_scatter = np.array(label) <NEW_LINE> self.ax1.scatter(x_scatter, y_scatter, c=self.all_color[1], marker=self.all_marker[1]) <NEW_LINE> <DEDENT> def painting_simple_list(self, feature: list, label: list): <NEW_LINE> <INDENT> x_scatter = np.array(feature) <NEW_LINE> y_scatter = np.array(label) <NEW_LINE> self.ax1.scatter(x_scatter, y_scatter, c=self.all_color[1], marker=self.all_marker[1]) <NEW_LINE> <DEDENT> def painting_list_with_label(self, x: list, y: list, label: list): <NEW_LINE> <INDENT> for i in range(len(x)): <NEW_LINE> <INDENT> x_point = x[i: i + 1] <NEW_LINE> y_point = y[i: i + 1] <NEW_LINE> label_point = int(label[i: i + 1][0]) <NEW_LINE> self.ax1.scatter(x_point, y_point, c=self.all_color[label_point], marker=self.all_marker[label_point]) <NEW_LINE> <DEDENT> <DEDENT> def painting_with_offset(self, data: list, label: list, mul_simple: bool = False): <NEW_LINE> <INDENT> if not mul_simple: <NEW_LINE> <INDENT> self.painting_mul_list(data, label, 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.painting_simple_list(data, label) <NEW_LINE> <DEDENT> <DEDENT> def painting_no_offset(self, data: list, label: list, mul_simple: bool = False): <NEW_LINE> <INDENT> if not mul_simple: <NEW_LINE> <INDENT> self.painting_mul_list(data, label, 0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.painting_simple_list(data, label)
画图(用于回归)
625990754a966d76dd5f0834
class BaseView: <NEW_LINE> <INDENT> _host = None <NEW_LINE> @property <NEW_LINE> def host(self): <NEW_LINE> <INDENT> return self._host <NEW_LINE> <DEDENT> @host.setter <NEW_LINE> def host(self, host): <NEW_LINE> <INDENT> self._host = host
base class for views
625990752c8b7c6e89bd5133
class Int32RectValueSerializer(ValueSerializer): <NEW_LINE> <INDENT> def CanConvertFromString(self,value,context): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def CanConvertToString(self,value,context): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ConvertFromString(self,value,context): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ConvertToString(self,value,context): <NEW_LINE> <INDENT> pass
Converts instances of System.String to and from instances of System.Windows.Int32Rect. Int32RectValueSerializer()
625990755fcc89381b266dfe
class Executor(base_example_gen_executor.BaseExampleGenExecutor): <NEW_LINE> <INDENT> def GetInputSourceToExamplePTransform(self): <NEW_LINE> <INDENT> return _BigQueryToExample
Generic TFX BigQueryExampleGen executor.
6259907521bff66bcd7245b3
class CircuitBlock(object): <NEW_LINE> <INDENT> def __init__(self, num_bit): <NEW_LINE> <INDENT> self.num_bit = num_bit <NEW_LINE> <DEDENT> def __call__(self, qureg, theta_list): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_param(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tocsr(self, theta_list): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> def tocsr_seq(self, theta_list): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> def dagger(self): <NEW_LINE> <INDENT> res = copy.copy(self) <NEW_LINE> res._dagger = True <NEW_LINE> return res
the building block of a circuit. This is an abstract class.
6259907532920d7e50bc7992
class KeyState(UGen): <NEW_LINE> <INDENT> __documentation_section__ = None <NEW_LINE> __slots__ = () <NEW_LINE> _ordered_input_names = ( 'keycode', 'minval', 'maxval', 'lag', ) <NEW_LINE> _valid_calculation_rates = None <NEW_LINE> def __init__( self, calculation_rate=None, keycode=0, lag=0.2, maxval=1, minval=0, ): <NEW_LINE> <INDENT> UGen.__init__( self, calculation_rate=calculation_rate, keycode=keycode, lag=lag, maxval=maxval, minval=minval, ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def kr( cls, keycode=0, lag=0.2, maxval=1, minval=0, ): <NEW_LINE> <INDENT> from supriya.tools import synthdeftools <NEW_LINE> calculation_rate = synthdeftools.CalculationRate.CONTROL <NEW_LINE> ugen = cls._new_expanded( calculation_rate=calculation_rate, keycode=keycode, lag=lag, maxval=maxval, minval=minval, ) <NEW_LINE> return ugen <NEW_LINE> <DEDENT> @property <NEW_LINE> def keycode(self): <NEW_LINE> <INDENT> index = self._ordered_input_names.index('keycode') <NEW_LINE> return self._inputs[index] <NEW_LINE> <DEDENT> @property <NEW_LINE> def lag(self): <NEW_LINE> <INDENT> index = self._ordered_input_names.index('lag') <NEW_LINE> return self._inputs[index] <NEW_LINE> <DEDENT> @property <NEW_LINE> def maxval(self): <NEW_LINE> <INDENT> index = self._ordered_input_names.index('maxval') <NEW_LINE> return self._inputs[index] <NEW_LINE> <DEDENT> @property <NEW_LINE> def minval(self): <NEW_LINE> <INDENT> index = self._ordered_input_names.index('minval') <NEW_LINE> return self._inputs[index]
:: >>> key_state = ugentools.KeyState.ar( ... keycode=0, ... lag=0.2, ... maxval=1, ... minval=0, ... ) >>> key_state KeyState.ar()
62599075009cb60464d02e86
class TableCreationError(HTTPException): <NEW_LINE> <INDENT> pass
handle table creation error
6259907516aa5153ce401e25
class game_state(): <NEW_LINE> <INDENT> def __init__(self, num_players, starting_coins, low_card, high_card, discard): <NEW_LINE> <INDENT> self.num_players = num_players <NEW_LINE> self.starting_coins = starting_coins <NEW_LINE> self.low_card = low_card <NEW_LINE> self.high_card = high_card <NEW_LINE> self.discard = discard <NEW_LINE> self.card_in_play = None <NEW_LINE> self.pot = 0 <NEW_LINE> self.players = [player_state(starting_coins) for _ in range(num_players)] <NEW_LINE> <DEDENT> def prehash(self): <NEW_LINE> <INDENT> return (self.card_in_play, self.pot, *(p.prehash() for p in self.players)) <NEW_LINE> <DEDENT> def deal(self, card): <NEW_LINE> <INDENT> assert self.card_in_play is None, 'Cannot deal a new card; one is already in play!' <NEW_LINE> assert self.pot == 0, 'Cannot deal a new card; pot should be zero!' <NEW_LINE> self.card_in_play = card <NEW_LINE> <DEDENT> def take(self): <NEW_LINE> <INDENT> self.players[0].take(self.card_in_play, self.pot) <NEW_LINE> self.card_in_play = None <NEW_LINE> self.pot = 0 <NEW_LINE> <DEDENT> def pass_turn(self): <NEW_LINE> <INDENT> self.players[0].pass_turn() <NEW_LINE> self.pot += 1 <NEW_LINE> self.players.append(self.players.pop(0)) <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.card_in_play = None <NEW_LINE> self.pot = 0 <NEW_LINE> self.players = [player_state(self.starting_coins) for _ in range(self.num_players)] <NEW_LINE> <DEDENT> def possible_next_cards(self): <NEW_LINE> <INDENT> already_delt = SortedSet <NEW_LINE> if self.card_in_play: <NEW_LINE> <INDENT> already_delt.add(self.card_in_play) <NEW_LINE> <DEDENT> for player in self.players: <NEW_LINE> <INDENT> already_delt = already_delt.union(player.cards) <NEW_LINE> <DEDENT> return [card for card in range(self.low_card, self.high_card + 1) if card not in already_delt] <NEW_LINE> <DEDENT> def get_results(self): <NEW_LINE> <INDENT> scores = [p.score() for p in self.players] <NEW_LINE> winning_score = min(scores) <NEW_LINE> won = [s == winning_score for s in scores] <NEW_LINE> num_winners = sum(won) <NEW_LINE> lose_pays = -1 / len(self.players) <NEW_LINE> win_pays = 1/num_winners + lose_pays <NEW_LINE> payoffs = [win_pays if w else lose_pays for w in won] <NEW_LINE> return scores, payoffs
Define a class for tracking the state of the No Thanks game.
62599075aad79263cf430103
class SuperMatchData(DataModel): <NEW_LINE> <INDENT> def __init__(self, d=None): <NEW_LINE> <INDENT> DataModel.__init__(self) <NEW_LINE> self.match_number = -1 <NEW_LINE> self.scout_name = "" <NEW_LINE> self.blue_speed = {} <NEW_LINE> self.red_speed = {} <NEW_LINE> self.blue_torque = {} <NEW_LINE> self.red_torque = {} <NEW_LINE> self.blue_control = {} <NEW_LINE> self.red_control = {} <NEW_LINE> self.blue_defense = {} <NEW_LINE> self.red_defense = {} <NEW_LINE> self.notes = "" <NEW_LINE> if d is not None: <NEW_LINE> <INDENT> self.set(d) <NEW_LINE> temp = self.blue_speed <NEW_LINE> self.blue_speed = {} <NEW_LINE> for key, value in temp.__dict__.items(): <NEW_LINE> <INDENT> self.blue_speed[key] = value <NEW_LINE> <DEDENT> temp = self.red_speed <NEW_LINE> self.red_speed = {} <NEW_LINE> for key, value in temp.__dict__.items(): <NEW_LINE> <INDENT> self.red_speed[key] = value <NEW_LINE> <DEDENT> temp = self.blue_torque <NEW_LINE> self.blue_torque = {} <NEW_LINE> for key, value in temp.__dict__.items(): <NEW_LINE> <INDENT> self.blue_torque[key] = value <NEW_LINE> <DEDENT> temp = self.red_torque <NEW_LINE> self.red_torque = {} <NEW_LINE> for key, value in temp.__dict__.items(): <NEW_LINE> <INDENT> self.red_torque[key] = value <NEW_LINE> <DEDENT> temp = self.blue_control <NEW_LINE> self.blue_control = {} <NEW_LINE> for key, value in temp.__dict__.items(): <NEW_LINE> <INDENT> self.blue_control[key] = value <NEW_LINE> <DEDENT> temp = self.red_control <NEW_LINE> self.red_control = {} <NEW_LINE> for key, value in temp.__dict__.items(): <NEW_LINE> <INDENT> self.red_control[key] = value <NEW_LINE> <DEDENT> temp = self.blue_defense <NEW_LINE> self.blue_defense = {} <NEW_LINE> for key, value in temp.__dict__.items(): <NEW_LINE> <INDENT> self.blue_defense[key] = value <NEW_LINE> <DEDENT> temp = self.red_defense <NEW_LINE> self.red_defense = {} <NEW_LINE> for key, value in temp.__dict__.items(): <NEW_LINE> <INDENT> self.red_defense[key] = value
Data model that contains the data collect by a Super Scout for a match
62599075460517430c432cfe
class GetMonthsOfYearTestCase(TestCase): <NEW_LINE> <INDENT> longMessage = True <NEW_LINE> def test_function(self): <NEW_LINE> <INDENT> self.assertEqual( utils.get_months_of_year(datetime.datetime.now().year - 1), 12) <NEW_LINE> self.assertEqual( utils.get_months_of_year(datetime.datetime.now().year + 1), 1) <NEW_LINE> self.assertEqual( utils.get_months_of_year(datetime.datetime.now().year), datetime.datetime.now().month)
Tests for the ``get_months_of_year`` function.
62599075d268445f2663a803
class TFModel(Model, TFParams, HasInputMapping, HasOutputMapping, HasBatchSize, HasModelDir, HasExportDir, HasSignatureDefKey, HasTagSet): <NEW_LINE> <INDENT> def __init__(self, tf_args): <NEW_LINE> <INDENT> super(TFModel, self).__init__() <NEW_LINE> self.args = Namespace(tf_args) <NEW_LINE> self._setDefault(input_mapping={}, output_mapping={}, batch_size=100, model_dir=None, export_dir=None, signature_def_key=None, tag_set=None) <NEW_LINE> <DEDENT> def _transform(self, dataset): <NEW_LINE> <INDENT> spark = SparkSession.builder.getOrCreate() <NEW_LINE> input_cols = [col for col, tensor in sorted(self.getInputMapping().items())] <NEW_LINE> output_cols = [col for tensor, col in sorted(self.getOutputMapping().items())] <NEW_LINE> logger.info("input_cols: {}".format(input_cols)) <NEW_LINE> logger.info("output_cols: {}".format(output_cols)) <NEW_LINE> logger.info("===== 1. inference args: {0}".format(self.args)) <NEW_LINE> logger.info("===== 2. inference params: {0}".format(self._paramMap)) <NEW_LINE> local_args = self.merge_args_params() <NEW_LINE> logger.info("===== 3. inference args + params: {0}".format(local_args)) <NEW_LINE> tf_args = self.args.argv if self.args.argv else local_args <NEW_LINE> _run_model = _run_model_tf1 if version.parse(TF_VERSION) < version.parse("2.0.0") else _run_model_tf2 <NEW_LINE> rdd_out = dataset.select(input_cols).rdd.mapPartitions(lambda it: _run_model(it, local_args, tf_args)) <NEW_LINE> rows_out = rdd_out.map(lambda x: Row(*x)) <NEW_LINE> return spark.createDataFrame(rows_out, output_cols)
Spark ML Model backed by a TensorFlow model checkpoint/saved_model on disk. During ``transform()``, each executor will run an independent, single-node instance of TensorFlow in parallel, so the model must fit in memory. The model/session will be loaded/initialized just once for each Spark Python worker, and the session will be cached for subsequent tasks/partitions to avoid re-loading the model for each partition. Args: :tf_args: Dictionary of arguments specific to TensorFlow "main" function.
62599075fff4ab517ebcf162
class TestInlineResponse2022ConfigIdByConfigTypeId(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return InlineResponse2022ConfigIdByConfigTypeId( aca7f705_9f00_4ff6_8b86_63a4d44bde8a = '0', _6cd51ae0_cfe4_499d_88d8_d02a9e18b25f = '0', d28725c0_9771_47d7_a9da_e408bd0adf4e = '0', _888d6565_4359_4d91_b38d_0a24124e4456 = '0', feb075cd_dd2b_47c5_922c_86ef3b06fb16 = '0' ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return InlineResponse2022ConfigIdByConfigTypeId( ) <NEW_LINE> <DEDENT> <DEDENT> def testInlineResponse2022ConfigIdByConfigTypeId(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
InlineResponse2022ConfigIdByConfigTypeId unit test stubs
625990758a43f66fc4bf3ae0
class SkuAvailabilityListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[SkuAvailability]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(SkuAvailabilityListResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None)
Check SKU availability result list. :param value: Check SKU availability result list. :type value: list[~azure.mgmt.cognitiveservices.models.SkuAvailability]
625990758a349b6b43687ba5
class ChecksumForm(forms.Form): <NEW_LINE> <INDENT> checksum = ChecksumField(required=True) <NEW_LINE> def __init__(self, translation, *args, **kwargs): <NEW_LINE> <INDENT> self.translation = translation <NEW_LINE> super(ChecksumForm, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def clean_checksum(self): <NEW_LINE> <INDENT> if 'checksum' not in self.cleaned_data: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> unit_set = self.translation.unit_set <NEW_LINE> try: <NEW_LINE> <INDENT> self.cleaned_data['unit'] = unit_set.filter( id_hash=self.cleaned_data['checksum'] )[0] <NEW_LINE> <DEDENT> except (Unit.DoesNotExist, IndexError): <NEW_LINE> <INDENT> self.translation.log_error( 'string %s disappeared!', self.cleaned_data['checksum'] ) <NEW_LINE> raise ValidationError(_( 'The string you wanted to translate is no longer available!' ))
Form for handling checksum IDs for translation.
6259907563b5f9789fe86ab0
class TestGitReceiveOldModified(Base): <NEW_LINE> <INDENT> expected_title = "git.receive" <NEW_LINE> expected_subti = ('[email protected] pushed to datanommer (master). "Try ' 'removing requirement on python-bunch."') <NEW_LINE> expected_secondary_icon = ( 'https://seccdn.libravatar.org/avatar/' '1a0d2acfddb1911ecf55da42cfa34710' '?s=64&d=retro') <NEW_LINE> expected_packages = set(['datanommer']) <NEW_LINE> expected_usernames = set() <NEW_LINE> expected_objects = set(['datanommer/datanommer.spec']) <NEW_LINE> expected_link = ("http://pkgs.fedoraproject.org/cgit/datanommer.git/commit" "/?h=master&id=66abdea4014eb2f0745fc38f86e20c7d7009237e") <NEW_LINE> msg = { "i": 1, "msg": { "commit": { "branch": "master", "email": "[email protected]", "message": "Try removing requirement on python-bunch.\n", "name": "Ralph Bean", "repo": "datanommer", "rev": "66abdea4014eb2f0745fc38f86e20c7d7009237e", "stats": { "files": { "datanommer.spec": { "deletions": 6, "insertions": 4, "lines": 10 } }, "total": { "deletions": 6, "files": 1, "insertions": 4, "lines": 10 } }, "summary": "Try removing requirement on python-bunch." } }, "timestamp": 1349735155.0, "topic": "org.fedoraproject.prod.git.receive" }
Sample message from the first generation of git-category messages that have been modified in datanommer to match the new topics.
62599075a17c0f6771d5d852
class TestSystemApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = ibmwex.apis.system_api.SystemApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_download_logs(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_download_service_logs(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_export_config(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_logs(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_usage(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_import_config(self): <NEW_LINE> <INDENT> pass
SystemApi unit test stubs
6259907523849d37ff852a03
class SymmetricCDPSolver(CDPSolver): <NEW_LINE> <INDENT> def __init__(self, rounding_digits) -> None: <NEW_LINE> <INDENT> super().__init__(rounding_digits) <NEW_LINE> self.__rounding_digits = rounding_digits <NEW_LINE> <DEDENT> def solve_partial_charges( self, graph: nx.Graph, charge_dists_collector: Dict[Atom, Tuple[ChargeList, WeightList, str]], total_charge: int, total_charge_diff: float = DEFAULT_TOTAL_CHARGE_DIFF, **kwargs ) -> None: <NEW_LINE> <INDENT> atom_idx = dict() <NEW_LINE> for k, (atom, _) in enumerate(charge_dists_collector.items()): <NEW_LINE> <INDENT> atom_idx[k] = atom <NEW_LINE> <DEDENT> neighborhoodclasses = self.compute_atom_neighborhood_classes(atom_idx, charge_dists_collector) <NEW_LINE> charge_dists_reduced = self.reduce_charge_distributions(charge_dists_collector, atom_idx, neighborhoodclasses) <NEW_LINE> solution, solutionTime, num_items, scaled_capacity = self.solve_dp_c(charge_dists_reduced, total_charge, total_charge_diff) <NEW_LINE> charge = 0 <NEW_LINE> profit = 0 <NEW_LINE> for (i, j) in solution: <NEW_LINE> <INDENT> for k in neighborhoodclasses[i]: <NEW_LINE> <INDENT> graph.node[atom_idx[k]]['partial_charge'] = charge_dists_collector[atom_idx[k]][0][j] <NEW_LINE> graph.node[atom_idx[k]]['score'] = charge_dists_collector[atom_idx[k]][1][j] <NEW_LINE> charge += graph.node[atom_idx[k]]['partial_charge'] <NEW_LINE> profit += graph.node[atom_idx[k]]['score'] <NEW_LINE> <DEDENT> <DEDENT> graph.graph['total_charge'] = round(charge, self.__rounding_digits) <NEW_LINE> graph.graph['score'] = profit <NEW_LINE> graph.graph['time'] = solutionTime <NEW_LINE> graph.graph['items'] = num_items <NEW_LINE> graph.graph['scaled_capacity'] = scaled_capacity
An optimizing solver using Dynamic Programming, C version. Use the HistogramCollector to produce appropriate charge distributions.
6259907592d797404e389801
class HTTPMethodNotAllowed(OptionalRepresentation, HTTPError): <NEW_LINE> <INDENT> def __init__(self, allowed_methods, **kwargs): <NEW_LINE> <INDENT> new_headers = {'Allow': ', '.join(allowed_methods)} <NEW_LINE> super(HTTPMethodNotAllowed, self).__init__(status.HTTP_405, **kwargs) <NEW_LINE> if not self.headers: <NEW_LINE> <INDENT> self.headers = {} <NEW_LINE> <DEDENT> self.headers.update(new_headers)
405 Method Not Allowed. The method received in the request-line is known by the origin server but not supported by the target resource. The origin server MUST generate an Allow header field in a 405 response containing a list of the target resource's currently supported methods. A 405 response is cacheable by default; i.e., unless otherwise indicated by the method definition or explicit cache controls. (See also: RFC 7231, Section 6.5.5) Args: allowed_methods (list of str): Allowed HTTP methods for this resource (e.g., ``['GET', 'POST', 'HEAD']``). Keyword Args: title (str): Human-friendly error title. If not provided, and `description` is also not provided, no body will be included in the response. description (str): Human-friendly description of the error, along with a helpful suggestion or two (default ``None``). headers (dict or list): A ``dict`` of header names and values to set, or a ``list`` of (*name*, *value*) tuples. Both *name* and *value* must be of type ``str`` or ``StringType``, and only character values 0x00 through 0xFF may be used on platforms that use wide characters. Note: The Content-Type header, if present, will be overridden. If you wish to return custom error messages, you can create your own HTTP error class, and install an error handler to convert it into an appropriate HTTP response for the client Note: Falcon can process a list of ``tuple`` slightly faster than a ``dict``. href (str): A URL someone can visit to find out more information (default ``None``). Unicode characters are percent-encoded. href_text (str): If href is given, use this as the friendly title/description for the link (default 'API documentation for this error'). code (int): An internal code that customers can reference in their support request or to help them when searching for knowledge base articles related to this error (default ``None``).
6259907566673b3332c31d4a
class TestAggregationSystem(VumiTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.aggregator_workers = [] <NEW_LINE> self.now = 0 <NEW_LINE> self.worker_helper = self.add_helper(WorkerHelper()) <NEW_LINE> self.broker = BrokerWrapper(self.worker_helper.broker) <NEW_LINE> <DEDENT> def fake_time(self): <NEW_LINE> <INDENT> return self.now <NEW_LINE> <DEDENT> def send(self, datapoints): <NEW_LINE> <INDENT> self.broker.send_datapoints("vumi.metrics", "vumi.metrics", datapoints) <NEW_LINE> <DEDENT> def recv(self): <NEW_LINE> <INDENT> return self.broker.recv_datapoints("vumi.metrics.aggregates", "vumi.metrics.aggregates") <NEW_LINE> <DEDENT> @inlineCallbacks <NEW_LINE> def _setup_workers(self, bucketters, aggregators, bucket_size): <NEW_LINE> <INDENT> bucket_config = { 'buckets': aggregators, 'bucket_size': bucket_size, } <NEW_LINE> for _i in range(bucketters): <NEW_LINE> <INDENT> worker = yield self.worker_helper.get_worker( metrics_workers.MetricTimeBucket, bucket_config) <NEW_LINE> <DEDENT> aggregator_config = { 'bucket_size': bucket_size, } <NEW_LINE> for i in range(aggregators): <NEW_LINE> <INDENT> config = aggregator_config.copy() <NEW_LINE> config['bucket'] = i <NEW_LINE> worker = yield self.worker_helper.get_worker( metrics_workers.MetricAggregator, config=config, start=False) <NEW_LINE> worker._time = self.fake_time <NEW_LINE> yield worker.startWorker() <NEW_LINE> self.aggregator_workers.append(worker) <NEW_LINE> <DEDENT> <DEDENT> @inlineCallbacks <NEW_LINE> def test_aggregating_one_metric(self): <NEW_LINE> <INDENT> yield self._setup_workers(1, 1, 5) <NEW_LINE> datapoints = [("vumi.test.foo", ["sum"], [(12345, 1.0), (12346, 2.0)])] <NEW_LINE> self.send(datapoints) <NEW_LINE> self.send(datapoints) <NEW_LINE> yield self.broker.kick_delivery() <NEW_LINE> yield self.broker.kick_delivery() <NEW_LINE> self.now = 12355 <NEW_LINE> for worker in self.aggregator_workers: <NEW_LINE> <INDENT> worker.check_buckets() <NEW_LINE> <DEDENT> datapoints, = self.recv() <NEW_LINE> self.assertEqual(datapoints, [ ["vumi.test.foo.sum", [], [[12345, 6.0]]] ])
Tests tying MetricTimeBucket and MetricAggregator together.
62599075d486a94d0ba2d905
class TestAlmTask(AlmTask): <NEW_LINE> <INDENT> def __init__(self, task_id, alm_id, priority, status, timestamp): <NEW_LINE> <INDENT> self.task_id = task_id <NEW_LINE> self.alm_id = alm_id <NEW_LINE> self.priority = priority <NEW_LINE> self.status = status <NEW_LINE> self.timestampe = timestamp <NEW_LINE> <DEDENT> def get_task_id(self): <NEW_LINE> <INDENT> return self.task_id <NEW_LINE> <DEDENT> def get_alm_id(self): <NEW_LINE> <INDENT> return self.alm_id <NEW_LINE> <DEDENT> def get_priority(self): <NEW_LINE> <INDENT> return self.priority <NEW_LINE> <DEDENT> def get_status(self): <NEW_LINE> <INDENT> return self.status <NEW_LINE> <DEDENT> def get_timestamp(self): <NEW_LINE> <INDENT> return self.timestamp
Simple test ALM Task
625990752c8b7c6e89bd5134
class FileDialog ( MFileDialog, Dialog ): <NEW_LINE> <INDENT> implements( IFileDialog ) <NEW_LINE> action = Enum( 'open', 'open files', 'save as' ) <NEW_LINE> default_directory = Unicode <NEW_LINE> default_filename = Unicode <NEW_LINE> default_path = Unicode <NEW_LINE> directory = Unicode <NEW_LINE> filename = Unicode <NEW_LINE> path = Unicode <NEW_LINE> paths = List( Unicode ) <NEW_LINE> wildcard = Unicode <NEW_LINE> wildcard_index = Int( 0 ) <NEW_LINE> def _create_contents ( self, parent ): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def close ( self ): <NEW_LINE> <INDENT> files = self.control.selectedFiles() <NEW_LINE> if files: <NEW_LINE> <INDENT> self.path = unicode( files[0] ) <NEW_LINE> self.paths = [ unicode( file ) for file in files ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.path = '' <NEW_LINE> self.paths = [ '' ] <NEW_LINE> <DEDENT> self.directory, self.filename = os.path.split( self.path ) <NEW_LINE> self.wildcard_index = self.control.filters().indexOf( self.control.selectedFilter() ) <NEW_LINE> super( FileDialog, self ).close() <NEW_LINE> <DEDENT> def _create_control ( self, parent ): <NEW_LINE> <INDENT> if ((len( self.default_path ) != 0) and (len( self.default_directory ) == 0) and (len( self.default_filename ) == 0)): <NEW_LINE> <INDENT> default_directory, default_filename = os.path.split( self.default_path ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> default_directory = self.default_directory <NEW_LINE> default_filename = self.default_filename <NEW_LINE> <DEDENT> keep = True <NEW_LINE> filters = QStringList() <NEW_LINE> for f in self.wildcard.split( '|' ): <NEW_LINE> <INDENT> if keep and f: <NEW_LINE> <INDENT> filters << f <NEW_LINE> <DEDENT> keep = not keep <NEW_LINE> <DEDENT> if not default_directory: <NEW_LINE> <INDENT> default_directory = QDir.currentPath() <NEW_LINE> <DEDENT> dlg = QFileDialog( parent, self.title, default_directory ) <NEW_LINE> dlg.setViewMode( QFileDialog.Detail ) <NEW_LINE> dlg.selectFile( default_filename ) <NEW_LINE> dlg.setFilters( filters ) <NEW_LINE> if self.wildcard_index < filters.count(): <NEW_LINE> <INDENT> dlg.selectFilter( filters[ self.wildcard_index ] ) <NEW_LINE> <DEDENT> if self.action == 'open': <NEW_LINE> <INDENT> dlg.setAcceptMode( QFileDialog.AcceptOpen ) <NEW_LINE> dlg.setFileMode( QFileDialog.ExistingFile ) <NEW_LINE> <DEDENT> elif self.action == 'open files': <NEW_LINE> <INDENT> dlg.setAcceptMode( QFileDialog.AcceptOpen ) <NEW_LINE> dlg.setFileMode( QFileDialog.ExistingFiles ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dlg.setAcceptMode( QFileDialog.AcceptSave ) <NEW_LINE> dlg.setFileMode( QFileDialog.AnyFile ) <NEW_LINE> <DEDENT> return dlg <NEW_LINE> <DEDENT> def _wildcard_default ( self ): <NEW_LINE> <INDENT> return self.WILDCARD_ALL
The toolkit specific implementation of a FileDialog. See the IFileDialog interface for the API documentation.
62599075091ae35668706585
class RetriableHTTPError(Exception): <NEW_LINE> <INDENT> pass
Raised when we get an HTTP error code that's worth retrying
625990755fcc89381b266dff
class StateResource(object): <NEW_LINE> <INDENT> swagger_types = { 'code': 'str', 'country_code_iso3': 'str', 'id': 'int', 'name': 'str' } <NEW_LINE> attribute_map = { 'code': 'code', 'country_code_iso3': 'country_code_iso3', 'id': 'id', 'name': 'name' } <NEW_LINE> def __init__(self, code=None, country_code_iso3=None, id=None, name=None): <NEW_LINE> <INDENT> self._code = None <NEW_LINE> self._country_code_iso3 = None <NEW_LINE> self._id = None <NEW_LINE> self._name = None <NEW_LINE> self.discriminator = None <NEW_LINE> if code is not None: <NEW_LINE> <INDENT> self.code = code <NEW_LINE> <DEDENT> if country_code_iso3 is not None: <NEW_LINE> <INDENT> self.country_code_iso3 = country_code_iso3 <NEW_LINE> <DEDENT> if id is not None: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def code(self): <NEW_LINE> <INDENT> return self._code <NEW_LINE> <DEDENT> @code.setter <NEW_LINE> def code(self, code): <NEW_LINE> <INDENT> self._code = code <NEW_LINE> <DEDENT> @property <NEW_LINE> def country_code_iso3(self): <NEW_LINE> <INDENT> return self._country_code_iso3 <NEW_LINE> <DEDENT> @country_code_iso3.setter <NEW_LINE> def country_code_iso3(self, country_code_iso3): <NEW_LINE> <INDENT> self._country_code_iso3 = country_code_iso3 <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @id.setter <NEW_LINE> def id(self, id): <NEW_LINE> <INDENT> self._id = id <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, StateResource): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
625990755166f23b2e244d22
class DetectLogos(base.Command): <NEW_LINE> <INDENT> detailed_help = {'auth_hints': vision_command_util.VISION_AUTH_HELP} <NEW_LINE> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> flags.AddVisionFlags(parser) <NEW_LINE> <DEDENT> def Run(self, args): <NEW_LINE> <INDENT> return vision_command_util.RunVisionCommand( 'LOGO_DETECTION', args.image_path, max_results=args.max_results ) <NEW_LINE> <DEDENT> def DeprecatedFormat(self, args): <NEW_LINE> <INDENT> return 'json'
Detect popular product logos within an image. Detect popular product logos within an image. {auth_hints}
625990754f6381625f19a151
class MeanAbsoluteError(function.Function): <NEW_LINE> <INDENT> def check_type_forward(self, in_types): <NEW_LINE> <INDENT> type_check.expect(in_types.size() == 2) <NEW_LINE> type_check.expect( in_types[0].dtype == numpy.float32, in_types[1].dtype == numpy.float32, in_types[0].shape == in_types[1].shape ) <NEW_LINE> <DEDENT> def forward_cpu(self, inputs): <NEW_LINE> <INDENT> x0, x1 = inputs <NEW_LINE> self.diff = x0 - x1 <NEW_LINE> diff = self.diff.ravel() <NEW_LINE> return numpy.array(abs(diff).sum() / diff.size, dtype=diff.dtype), <NEW_LINE> <DEDENT> def forward_gpu(self, inputs): <NEW_LINE> <INDENT> x0, x1 = inputs <NEW_LINE> self.diff = x0 - x1 <NEW_LINE> diff = self.diff.ravel() <NEW_LINE> return abs(diff).sum() / diff.dtype.type(diff.size), <NEW_LINE> <DEDENT> def backward(self, inputs, gy): <NEW_LINE> <INDENT> xp = cuda.get_array_module(*inputs) <NEW_LINE> coeff = gy[0] * gy[0].dtype.type(1. / self.diff.size) <NEW_LINE> gx0 = coeff * xp.sign(self.diff) <NEW_LINE> return gx0, -gx0
Mean absolute error function.
625990751b99ca40022901dc
class NormalRelation(CrossSectionRelation): <NEW_LINE> <INDENT> def __init__(self, cross_section, slope, datum=0): <NEW_LINE> <INDENT> super().__init__(cross_section, datum) <NEW_LINE> self._slope = slope <NEW_LINE> <DEDENT> def discharge(self, stage): <NEW_LINE> <INDENT> depth = stage - self._datum <NEW_LINE> return self._xs.normal_flow(depth, self._slope) <NEW_LINE> <DEDENT> def stage(self, discharge, y0=None): <NEW_LINE> <INDENT> y0 = self._get_y0(y0) <NEW_LINE> h0 = y0 - self._datum <NEW_LINE> depth = self._xs.normal_depth(discharge, self._slope, h0) <NEW_LINE> y = depth + self._datum <NEW_LINE> if not np.isnan(depth): <NEW_LINE> <INDENT> self._last_depth = y <NEW_LINE> <DEDENT> return y
Normal stage-discharge relation Parameters ---------- xs : CrossSection Cross section to base relation on slope : float Bed slope datum : float, optional Stage datum, optional (the default is 0)
6259907516aa5153ce401e27
class getJobStatus_args(object): <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'jobName', 'UTF8', None, ), ) <NEW_LINE> def __init__(self, jobName=None,): <NEW_LINE> <INDENT> self.jobName = jobName <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.jobName = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('getJobStatus_args') <NEW_LINE> if self.jobName is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('jobName', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.jobName.encode('utf-8') if sys.version_info[0] == 2 else self.jobName) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - jobName
625990757d847024c075dd27
class DenseJacobian(AssembledJacobian): <NEW_LINE> <INDENT> def __init__(self, system): <NEW_LINE> <INDENT> super().__init__(DenseMatrix, system=system)
Assemble dense global <Jacobian>.
62599075baa26c4b54d50bfc
class StockPickingRefundDocumentWizard(models.TransientModel): <NEW_LINE> <INDENT> _name = 'stock.picking.refund.wizard' <NEW_LINE> @api.multi <NEW_LINE> def create_refund(self): <NEW_LINE> <INDENT> picking_pool = self.env['stock.picking'] <NEW_LINE> move_pool = self.env['stock.move'] <NEW_LINE> quant_pool = self.env['stock.quant'] <NEW_LINE> company_pool = self.env['res.company'] <NEW_LINE> company = company_pool.search([])[0] <NEW_LINE> logistic_pick_refund_type = company.logistic_pick_out_type_id <NEW_LINE> location_to = logistic_pick_refund_type.default_location_src_id.id <NEW_LINE> location_from = logistic_pick_refund_type.default_location_dest_id.id <NEW_LINE> location_id = company.logistic_location_id.id <NEW_LINE> from_picking = self.picking_id <NEW_LINE> now = fields.Datetime.now() <NEW_LINE> partner = from_picking.partner_id <NEW_LINE> order = from_picking.sale_order_id <NEW_LINE> origin = from_picking.invoice_number or from_picking.ddt_number or from_picking.name <NEW_LINE> to_picking = picking_pool.create({ 'refund_origin_id': from_picking.id, 'stock_mode': 'in', 'sale_order_id': order.id, 'partner_id': partner.id, 'scheduled_date': now, 'origin': origin, 'picking_type_id': logistic_pick_refund_type.id, 'group_id': False, 'location_id': location_from, 'location_dest_id': location_to, 'state': 'draft', }) <NEW_LINE> to_picking_id = to_picking.id <NEW_LINE> for line in self.line_ids: <NEW_LINE> <INDENT> product = line.product_id <NEW_LINE> product_qty = line.refund_qty <NEW_LINE> quant_qty = line.stock_qty <NEW_LINE> if product_qty <= 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> move_id = move_pool.create({ 'picking_id': to_picking_id, 'company_id': company.id, 'partner_id': partner.id, 'picking_id': to_picking_id, 'product_id': product.id, 'name': product.name or ' ', 'date': now, 'date_expected': now, 'location_id': location_from, 'location_dest_id': location_to, 'product_uom_qty': product_qty, 'product_uom': product.uom_id.id, 'state': 'done', 'origin': origin, 'price_unit': product.standard_price, 'logistic_refund_id': line.line_id.id, }).id <NEW_LINE> if quant_qty <= 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> quant_id = quant_pool.create({ 'company_id': company.id, 'in_date': now, 'location_id': location_id, 'product_id': product.id, 'quantity': quant_qty, }) <NEW_LINE> <DEDENT> to_picking.refund_confirm_state_event() <NEW_LINE> return { 'type': 'ir.actions.act_window', 'name': _('Refund document'), 'view_type': 'form', 'view_mode': 'form,tree', 'res_id': to_picking_id, 'res_model': 'stock.picking', 'view_id': False, 'views': [(False, 'form'), (False, 'tree')], 'domain': [('id', '=', to_picking_id)], 'context': self.env.context, 'target': 'current', 'nodestroy': False, } <NEW_LINE> <DEDENT> picking_id = fields.Many2one( 'stock.picking', 'From document', required=True)
Wizard for generate refund document
62599075442bda511e95d9fe
class HostTestPluginCopyMethod_JN51xx(HostTestPluginBase): <NEW_LINE> <INDENT> name = "HostTestPluginCopyMethod_JN51xx" <NEW_LINE> type = "CopyMethod" <NEW_LINE> capabilities = ["jn51xx"] <NEW_LINE> required_parameters = ["image_path", "serial"] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> HostTestPluginBase.__init__(self) <NEW_LINE> <DEDENT> def is_os_supported(self, os_name=None): <NEW_LINE> <INDENT> if not os_name: <NEW_LINE> <INDENT> os_name = self.host_os_support() <NEW_LINE> <DEDENT> if os_name and os_name.startswith("Windows"): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def setup(self, *args, **kwargs): <NEW_LINE> <INDENT> self.JN51XX_PROGRAMMER = "JN51xxProgrammer.exe" <NEW_LINE> return True <NEW_LINE> <DEDENT> def execute(self, capability, *args, **kwargs): <NEW_LINE> <INDENT> if not kwargs["image_path"]: <NEW_LINE> <INDENT> self.print_plugin_error("Error: image path not specified") <NEW_LINE> return False <NEW_LINE> <DEDENT> if not kwargs["serial"]: <NEW_LINE> <INDENT> self.print_plugin_error("Error: serial port not set (not opened?)") <NEW_LINE> return False <NEW_LINE> <DEDENT> result = False <NEW_LINE> if self.check_parameters(capability, *args, **kwargs): <NEW_LINE> <INDENT> if kwargs["image_path"] and kwargs["serial"]: <NEW_LINE> <INDENT> image_path = os.path.normpath(kwargs["image_path"]) <NEW_LINE> serial_port = kwargs["serial"] <NEW_LINE> if capability == "jn51xx": <NEW_LINE> <INDENT> cmd = [ self.JN51XX_PROGRAMMER, "-s", serial_port, "-f", image_path, "-V0", ] <NEW_LINE> result = self.run_command(cmd) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return result
Plugin interface adaptor for the JN51xxProgrammer tool.
625990753317a56b869bf1ec
class AbstractModel(ABC): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def generate_competency(self, min_competency): <NEW_LINE> <INDENT> return random.uniform(min_competency, 1.0) <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def generate_ambition(self, min_ambition): <NEW_LINE> <INDENT> return random.uniform(min_ambition, 1.0) <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def generate_position(self, environment): <NEW_LINE> <INDENT> nrows, ncols = environment.shape <NEW_LINE> x_pos, y_pos = random.randint(0, ncols-1), random.randint(0, nrows-1) <NEW_LINE> return (x_pos, y_pos) <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def choose_claim_field(self, knowledge_radius, current_position, environment): <NEW_LINE> <INDENT> nrow, ncol = environment.shape <NEW_LINE> x_pos, y_pos = current_position <NEW_LINE> x_field = x_pos + int(random.uniform(0, knowledge_radius)) <NEW_LINE> y_field = y_pos + int(random.uniform(0, knowledge_radius)) <NEW_LINE> if x_field < 0 or x_field > ncol-1 or y_field < 0 or y_field > nrow-1: <NEW_LINE> <INDENT> return (x_pos, y_pos) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (x_field, y_field) <NEW_LINE> <DEDENT> <DEDENT> @abstractmethod <NEW_LINE> def relocate(self, knowledge_radius, current_position, environment): <NEW_LINE> <INDENT> nrow, ncol = environment.shape <NEW_LINE> x_pos, y_pos = current_position <NEW_LINE> new_x = x_pos + int(random.uniform(-knowledge_radius, knowledge_radius)) <NEW_LINE> new_y = y_pos + int(random.uniform(-knowledge_radius, knowledge_radius)) <NEW_LINE> if new_x < 0 or new_x > ncol-1 or new_y < 0 or new_y > nrow-1: <NEW_LINE> <INDENT> return (x_pos, y_pos) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (new_x, new_y) <NEW_LINE> <DEDENT> <DEDENT> @abstractmethod <NEW_LINE> def strategy(self, household_id): <NEW_LINE> <INDENT> return random.randint(-1, 1)
Implements a simple model for household decision-making. AbstractModel inherits ABC (Abstract Base Class), which provides the @abstractmethod decorator. The AbstractModel must be inherited by the AgentModel, which must override all abstract methods of the AbstractModel class.
6259907555399d3f05627e67
class BaseTestData(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.app = create_app('testing') <NEW_LINE> with self.app.app_context(): <NEW_LINE> <INDENT> self.client = self.app.test_client() <NEW_LINE> <DEDENT> self.admin_signin = self.client.post( "/api/v2/auth/signin", json=admin_login ) <NEW_LINE> self.admin_token = self.admin_signin.get_json()["data"][0]["token"] <NEW_LINE> self.post_data = self.client.post( "/api/v2/parties", json=party_holder, headers={ "Authorization": "Bearer {}".format(self.admin_token) } ) <NEW_LINE> self.office_data = self.client.post( '/api/v2/offices', json=office_holder, headers={ "Authorization": "Bearer {}".format(self.admin_token) } ) <NEW_LINE> self.user_data = self.client.post( '/api/v2/auth/signup', json=user ) <NEW_LINE> self.login_data = self.client.post( '/api/v2/auth/signin', json=user_logins ) <NEW_LINE> self.user_token = self.login_data.get_json()["data"][0]["token"] <NEW_LINE> self.office_application = self.client.post( '/api/v2/office/application', json=application, headers={ "Authorization": "Bearer {}".format(self.user_token) } ) <NEW_LINE> self.register_candidate = self.client.post( '/api/v2/office/1/register', json=candidate, headers={ "Authorization": "Bearer {}".format(self.admin_token) } ) <NEW_LINE> self.cast_vote = self.client.post( '/api/v2/votes', json=vote, headers={ "Authorization": "Bearer {}".format(self.user_token) } ) <NEW_LINE> self.office_results = self.client.get( '/api/v2/office/1/result', headers={ "Authorization": "Bearer {}".format(self.user_token) } ) <NEW_LINE> self.petition = self.client.post( '/api/v2/petitions', json=petition, headers={ "Authorization": "Bearer {}".format(self.user_token) } ) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> with self.app.app_context(): <NEW_LINE> <INDENT> db = AppDatabase() <NEW_LINE> db.drop_all()
base test data
625990755fdd1c0f98e5f8cb
class AsyncWiredDimmer3(WiredDimmer3, AsyncDimmer): <NEW_LINE> <INDENT> pass
HMIPW-DRD3 (Homematic IP Wired Dimming Actuator – 3x channels)
625990757d43ff24874280bb
class TestAuthenticationApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = payoneer_mobile_api.apis.authentication_api.AuthenticationApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_authentication_challenge_authenticate_post(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_authentication_challenge_cancel_post(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_authentication_challenge_get_post(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_authentication_challenge_refuse_sms_post(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_authentication_challenge_sms_post(self): <NEW_LINE> <INDENT> pass
AuthenticationApi unit test stubs
6259907560cbc95b06365a15
class JNTTDockerServerCommon(Common): <NEW_LINE> <INDENT> longdelay = 50 <NEW_LINE> shortdelay = 30
Common tests for servers on docker
6259907597e22403b383c852
class ProxyTypeMtproto(Object): <NEW_LINE> <INDENT> ID = "proxyTypeMtproto" <NEW_LINE> def __init__(self, secret, **kwargs): <NEW_LINE> <INDENT> self.secret = secret <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(q: dict, *args) -> "ProxyTypeMtproto": <NEW_LINE> <INDENT> secret = q.get('secret') <NEW_LINE> return ProxyTypeMtproto(secret)
An MTProto proxy server Attributes: ID (:obj:`str`): ``ProxyTypeMtproto`` Args: secret (:obj:`str`): The proxy's secret in hexadecimal encoding Returns: ProxyType Raises: :class:`telegram.Error`
625990758a349b6b43687ba9
class simplePDBatom: <NEW_LINE> <INDENT> def __init__(self,line=""): <NEW_LINE> <INDENT> if (line): <NEW_LINE> <INDENT> self.atnum = int(line[6:6+5]) <NEW_LINE> self.atname = line[12:12+4].strip() <NEW_LINE> self.alt = line[16:16+1].strip() <NEW_LINE> self.resname = line[17:17+3].strip().replace('+','') <NEW_LINE> self.chain = line[21:21+1].strip() <NEW_LINE> self.resnum = int(line[22:22+4]) <NEW_LINE> self.insert = line[26:26+1].strip() <NEW_LINE> self.x = float(line[30:30+8]) <NEW_LINE> self.y = float(line[38:38+8]) <NEW_LINE> self.z = float(line[46:46+8]) <NEW_LINE> self.occ = float(line[54:54+6]) <NEW_LINE> self.temp = float(line[60:60+6]) <NEW_LINE> self.segid = line[72:72+4].strip() <NEW_LINE> self.elem = line[76:76+2].strip() <NEW_LINE> self.charge = line[78:78+2].strip() <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> s = "ATOM %5d %-4s%s%3s %s%4d%s %8.3f%8.3f%8.3f%6.2f%6.2f %4s%-2s%2s"% (self.atnum, self.atname, self.alt, self.resname, self.chain, self.resnum, self.insert, self.x, self.y, self.z, self.occ, self.temp, self.segid, self.elem, self.charge); <NEW_LINE> return(s) <NEW_LINE> <DEDENT> def res3to1(self,resname_3letters): <NEW_LINE> <INDENT> map = {"ALA":"A", "CYS":"C", "ASP":"D", "GLU":"E", "PHE":"F", "GLY":"G", "HIS":"H", "ILE":"I", "LYS":"K", "LEU":"L", "MET":"M", "ASN":"N", "PRO":"P", "GLN":"Q", "ARG":"R", "SER":"S", "THR":"T", "VAL":"V", "TRP":"W", "TYR":"Y"} <NEW_LINE> if (map.has_key(resname_3letters)): <NEW_LINE> <INDENT> return(map[resname_3letters]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return(None)
Trival atom type that knows how to parse an ATOM line from a pdb file, and how to print itself out again
6259907532920d7e50bc7997
class Settings(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.screen_width = 800 <NEW_LINE> self.screen_height = 600 <NEW_LINE> self.bg_color = (230, 230, 230) <NEW_LINE> self.ship_speed_factor = 1.5 <NEW_LINE> self.bullet_speed_factor = 1 <NEW_LINE> self.bullet_width = 3 <NEW_LINE> self.bullet_height = 15 <NEW_LINE> self.bullet_color = 60, 60, 60
docstring for Settings
625990753346ee7daa338308
class HardwareManagerNotFound(RESTError): <NEW_LINE> <INDENT> message = 'No valid HardwareManager found.' <NEW_LINE> def __init__(self, details=None): <NEW_LINE> <INDENT> if details is not None: <NEW_LINE> <INDENT> details = details <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> details = self.message <NEW_LINE> <DEDENT> super(HardwareManagerNotFound, self).__init__(details)
Error raised when no valid HardwareManager can be found.
62599075ad47b63b2c5a919e
class Event(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=100) <NEW_LINE> created_date = models.DateTimeField(default=datetime.datetime.now, blank=True, null=True) <NEW_LINE> description = models.TextField(max_length=5000, blank=True, null=True) <NEW_LINE> location = models.CharField(max_length=100) <NEW_LINE> creator = models.ForeignKey(User, blank=True, null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title
An event represent
6259907556b00c62f0fb4221
class EditMediaStreamInfo(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.StreamId = None <NEW_LINE> self.StartTime = None <NEW_LINE> self.EndTime = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.StreamId = params.get("StreamId") <NEW_LINE> self.StartTime = params.get("StartTime") <NEW_LINE> self.EndTime = params.get("EndTime")
Video stream editing information
62599075f548e778e596cedf
class rcase(object): <NEW_LINE> <INDENT> def __init__(self, chan): <NEW_LINE> <INDENT> self.chan = chan <NEW_LINE> <DEDENT> def ready(self): <NEW_LINE> <INDENT> return self.chan.recv_ready() <NEW_LINE> <DEDENT> def exec_(self): <NEW_LINE> <INDENT> return self.chan.recv()
A case that will ``chan.recv()`` when the channel is able to receive.
62599075dc8b845886d54f0a
class Image(BaseModel): <NEW_LINE> <INDENT> img_type = models.CharField( '图片类型', null=True, max_length=30, default='', choices=IMAGE_TYPE) <NEW_LINE> order = models.IntegerField('排序位置', default=0) <NEW_LINE> active = models.BooleanField('生效', default=True) <NEW_LINE> name = models.CharField('名称', max_length=255, default='') <NEW_LINE> key = models.CharField("图片ID KEY", max_length=50, default="") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'comic_image' <NEW_LINE> verbose_name_plural = '图片' <NEW_LINE> ordering = ['order']
图片
62599075379a373c97d9a972
class Person(object): <NEW_LINE> <INDENT> def __init__(self, name, age, gender, convert, years_baptized, hobby): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.age = age <NEW_LINE> self.gender = gender <NEW_LINE> self.convert = convert <NEW_LINE> self.years_baptized = years_baptized <NEW_LINE> self.hobby = hobby <NEW_LINE> <DEDENT> def stats(faith, hope, charity, obedience, patience, knowledge, humility, spirit, endurance, charisma ): <NEW_LINE> <INDENT> self.FTH = faith <NEW_LINE> self.HOP = hope <NEW_LINE> self.CHA = charity <NEW_LINE> self.OBD = obedience <NEW_LINE> self.PAT = patience <NEW_LINE> self.KNW = knowledge <NEW_LINE> self.HUM= humility <NEW_LINE> self.SPR = spirit <NEW_LINE> self.END= endurance <NEW_LINE> self.CHR= charisma <NEW_LINE> <DEDENT> def skills(scripture_mastery, testify_topics, chat): <NEW_LINE> <INDENT> self.scripture_mastery = script_mastery <NEW_LINE> self.testify_topics = testify_topics <NEW_LINE> self.chat = chat <NEW_LINE> <DEDENT> def print_stats(self): <NEW_LINE> <INDENT> stats_list = {"Faith" : self.FTH, "Hope" : self.HOP, "Charity" : self.CHA, "Obedience" : self.OBD, "Patience" : self.PAT, "Knowledge" : self.KNW, "Humility" : self.HUM, "Spirit" : self.SPR, "Endurance" : self.END, "Charisma" : self.CHR} <NEW_LINE> return stats_list
Has the attributes of a missionary
625990757c178a314d78e893
class ExclusionOfLiability(Base): <NEW_LINE> <INDENT> __table_args__ = {'schema': app_schema_name} <NEW_LINE> __tablename__ = 'exclusion_of_liability' <NEW_LINE> id = sa.Column(sa.Integer, primary_key=True, autoincrement=False) <NEW_LINE> title = sa.Column(JSONType, nullable=False) <NEW_LINE> content = sa.Column(JSONType, nullable=False)
The bucket you can throw all addresses in the application should be able to use for the get egrid webservice. This is a bypass for the moment. In the end it seems ways more flexible to bind a service here but if you like you can use it. Attributes: id (int): The identifier. This is used in the database only and must not be set manually. If you don't like it - don't care about. title (str): The title which the exclusion of liability item has. content (str): The content which the exclusion of liability item has.
62599075167d2b6e312b8239
class DataModelV2: <NEW_LINE> <INDENT> def __init__(self, relatedness_path, unigrams_path, trigrams_path): <NEW_LINE> <INDENT> self.unigram = {} <NEW_LINE> self.trigram = {} <NEW_LINE> self.relatedness = {} <NEW_LINE> self.word_vector = {} <NEW_LINE> self._parse_relatedness(relatedness_path) <NEW_LINE> self._parse_unigrams(unigrams_path) <NEW_LINE> self._parse_trigrams(trigrams_path) <NEW_LINE> print('loaded justine data model') <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.trigram) <NEW_LINE> <DEDENT> def _parse_relatedness(self, path): <NEW_LINE> <INDENT> print('parsing relatedness data...') <NEW_LINE> data = data2numpy(path) <NEW_LINE> for row in data: <NEW_LINE> <INDENT> words = [row[0].lower(), row[1].lower()] <NEW_LINE> words.sort() <NEW_LINE> words = tuple(words) <NEW_LINE> relatedness = float(row[2]) <NEW_LINE> self.relatedness[words] = relatedness <NEW_LINE> <DEDENT> print('done') <NEW_LINE> return <NEW_LINE> <DEDENT> def _parse_unigrams(self, path): <NEW_LINE> <INDENT> print('parsing unigram data...') <NEW_LINE> data = data2numpy(path) <NEW_LINE> for row in data: <NEW_LINE> <INDENT> word = row[0].lower() <NEW_LINE> unigram = float(row[1]) <NEW_LINE> self.unigram[word] = unigram <NEW_LINE> <DEDENT> print('done') <NEW_LINE> return <NEW_LINE> <DEDENT> def _parse_trigrams(self, path): <NEW_LINE> <INDENT> print('parsing trigram data...') <NEW_LINE> data = data2numpy(path) <NEW_LINE> for row in data: <NEW_LINE> <INDENT> idx = int(row[0]) <NEW_LINE> word = row[1].lower() <NEW_LINE> m1_trigram = float(row[2]) <NEW_LINE> m2_trigram = float(row[3]) <NEW_LINE> if idx not in self.trigram: <NEW_LINE> <INDENT> self.trigram[idx] = {} <NEW_LINE> self.word_vector[idx] = [] <NEW_LINE> self.trigram[idx]['m1'] = [] <NEW_LINE> self.trigram[idx]['m2'] = [] <NEW_LINE> <DEDENT> self.trigram[idx]['m1'].append(m1_trigram) <NEW_LINE> self.trigram[idx]['m2'].append(m2_trigram) <NEW_LINE> self.word_vector[idx].append(word) <NEW_LINE> <DEDENT> print('done') <NEW_LINE> return
Class for parsing the cleaned relatedness, unigram and trigram data. For each sentence in puns dataset [data-agg.csv], we have precomputed relateness and trigram data. We just pass this data for a sentence to a probabilistic model which computes ambiguity and distinctiveness for a given pun/nonpun. Note: Word vector for each sentence is extracted from relatedness data.
625990757cff6e4e811b7390
class UnknownUser(User): <NEW_LINE> <INDENT> @property <NEW_LINE> def name(self) -> Optional[str]: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def hasPrivilege(self, priv: str) -> bool: <NEW_LINE> <INDENT> return False
Anonymous user who has no privileges.
625990758e7ae83300eea9e3
class WorkQueueTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> os.mkdir(TMP) <NEW_LINE> self.queuename = os.path.join(TMP, "wq") <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> shutil.rmtree(TMP) <NEW_LINE> <DEDENT> def test_constructor(self): <NEW_LINE> <INDENT> wqueue = obsticket.WorkQueue(self.queuename) <NEW_LINE> self.assertTrue(os.path.exists(self.queuename + ".curr")) <NEW_LINE> self.assertTrue(os.path.exists(self.queuename + ".tail")) <NEW_LINE> open(self.queuename + ".curr", "w").write("1") <NEW_LINE> open(self.queuename + ".tail", "w").write("2") <NEW_LINE> wqueue = obsticket.WorkQueue(self.queuename) <NEW_LINE> self.assertEqual(wqueue.curr, 1) <NEW_LINE> self.assertEqual(wqueue.tail, 2) <NEW_LINE> <DEDENT> def test_operations(self): <NEW_LINE> <INDENT> wqueue = obsticket.WorkQueue(self.queuename) <NEW_LINE> self.assertRaises(obsticket.QueueEmpty, wqueue.head) <NEW_LINE> self.assertRaises(obsticket.QueueNoNext, wqueue.next) <NEW_LINE> self.assertTrue(wqueue.add("work 1")) <NEW_LINE> self.assertFalse(wqueue.add("work 2")) <NEW_LINE> self.assertEqual(wqueue.head(), "work 1") <NEW_LINE> self.assertEqual(wqueue.next(), "work 2") <NEW_LINE> self.assertEqual(wqueue.head(), "work 2") <NEW_LINE> self.assertRaises(obsticket.QueueEmpty, wqueue.next)
Test case for obsticket.WorkQueue.
6259907591f36d47f2231b37
class CondorConfigValException(Exception): <NEW_LINE> <INDENT> def __init__(self,src): <NEW_LINE> <INDENT> self.source = src
If we couldn't read a condor_config_val
62599075460517430c432d01
class PreprocessorIfCondition: <NEW_LINE> <INDENT> file = None <NEW_LINE> linenr = None <NEW_LINE> column = None <NEW_LINE> E = None <NEW_LINE> result = None <NEW_LINE> def __init__(self, element): <NEW_LINE> <INDENT> _load_location(self, element) <NEW_LINE> self.E = element.get('E') <NEW_LINE> self.result = int(element.get('result')) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> attrs = ["file", "linenr", "column", "E", "result"] <NEW_LINE> return "{}({})".format( "PreprocessorIfCondition", ", ".join(("{}={}".format(a, repr(getattr(self, a))) for a in attrs)) )
Information about #if/#elif conditions
62599075aad79263cf430109
class YoutubeVideoRegistrationError(enum.IntEnum): <NEW_LINE> <INDENT> UNSPECIFIED = 0 <NEW_LINE> UNKNOWN = 1 <NEW_LINE> VIDEO_NOT_FOUND = 2 <NEW_LINE> VIDEO_NOT_ACCESSIBLE = 3
Enum describing YouTube video registration errors. Attributes: UNSPECIFIED (int): Enum unspecified. UNKNOWN (int): The received error code is not known in this version. VIDEO_NOT_FOUND (int): Video to be registered wasn't found. VIDEO_NOT_ACCESSIBLE (int): Video to be registered is not accessible (e.g. private).
6259907555399d3f05627e6a
class WidgetSerializer(DynamicFieldsMixin, serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Widget <NEW_LINE> fields = ('__all__') <NEW_LINE> <DEDENT> def validate_analysis_framework(self, analysis_framework): <NEW_LINE> <INDENT> if not analysis_framework.can_modify(self.context['request'].user): <NEW_LINE> <INDENT> raise serializers.ValidationError('Invalid Analysis Framework') <NEW_LINE> <DEDENT> return analysis_framework
Widget Model Serializer
625990758a349b6b43687bab
class PyAutopep8(Package): <NEW_LINE> <INDENT> homepage = "https://github.com/hhatto/autopep8" <NEW_LINE> url = "https://github.com/hhatto/autopep8/archive/ver1.2.2.tar.gz" <NEW_LINE> version('1.2.2', 'def3d023fc9dfd1b7113602e965ad8e1') <NEW_LINE> extends('python') <NEW_LINE> depends_on('py-setuptools', type='build') <NEW_LINE> depends_on('py-pep8', type=nolink) <NEW_LINE> def install(self, spec, prefix): <NEW_LINE> <INDENT> python('setup.py', 'install', '--prefix=%s' % prefix)
Automatic pep8 formatter
62599075ad47b63b2c5a91a0
class AvailableProvidersListCountry(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'country_name': {'key': 'countryName', 'type': 'str'}, 'providers': {'key': 'providers', 'type': '[str]'}, 'states': {'key': 'states', 'type': '[AvailableProvidersListState]'}, } <NEW_LINE> def __init__( self, *, country_name: Optional[str] = None, providers: Optional[List[str]] = None, states: Optional[List["AvailableProvidersListState"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(AvailableProvidersListCountry, self).__init__(**kwargs) <NEW_LINE> self.country_name = country_name <NEW_LINE> self.providers = providers <NEW_LINE> self.states = states
Country details. :param country_name: The country name. :type country_name: str :param providers: A list of Internet service providers. :type providers: list[str] :param states: List of available states in the country. :type states: list[~azure.mgmt.network.v2020_07_01.models.AvailableProvidersListState]
6259907523849d37ff852a09
class FitFailedError(RuntimeError, TypeError): <NEW_LINE> <INDENT> pass
Error for failed estimator 'fit' call. Inherits type error to accommodate Scikit-learn expectation of a ``TypeError`` on failed array checks in estimators.
625990752ae34c7f260aca37
class clsUser(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'user' <NEW_LINE> U_fullname = db.Column(db.String(50)) <NEW_LINE> U_username = db.Column(db.String(16), primary_key = True, index = True) <NEW_LINE> U_password = db.Column(db.String(200)) <NEW_LINE> U_email = db.Column(db.String(30), unique = True) <NEW_LINE> U_idActor = db.Column(db.Integer, db.ForeignKey('actors.A_idActor')) <NEW_LINE> def __init__(self, fullname, username, password, email, idActor): <NEW_LINE> <INDENT> self.U_fullname = fullname <NEW_LINE> self.U_username = username <NEW_LINE> self.U_password = password <NEW_LINE> self.U_email = email <NEW_LINE> self.U_idActor = idActor <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<fullname %r, username %r, email %r>' % (self.U_fullname, self.U_username, self.U_email)
Clase que define el modelo Usuario
625990755fdd1c0f98e5f8cf
class IterationTwoTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.lst_of_obs = [("A", "B", "B", "A")] * 10 + [("B", "A", "B")] * 20 <NEW_LINE> self.pi = hashdict([("s", 0.846,), ("t", 0.154)]) <NEW_LINE> self.A = LMatrix(rlabels = ["s", "t"], data = np.array([ [0.298, 0.702], [0.106, 0.894] ])) <NEW_LINE> self.B = LMatrix(rlabels = ["s", "t"], clabels = ["A", "B"], data = np.array([ [0.357, 0.643], [0.4292, 0.5708], ]) ) <NEW_LINE> <DEDENT> def test_pi(self): <NEW_LINE> <INDENT> _, _, pi = one_iter(self.lst_of_obs, self.A, self.B, self.pi) <NEW_LINE> for actual, expected in zip( (pi[s] for s in self.A.rlabels), [0.841, 0.159]): <NEW_LINE> <INDENT> self.assertAlmostEqual(actual, expected, places = 1) <NEW_LINE> <DEDENT> <DEDENT> def test_A(self): <NEW_LINE> <INDENT> A, _, _ = one_iter(self.lst_of_obs, self.A, self.B, self.pi) <NEW_LINE> for actual, expected in zip(A.flatten(), [0.292, 0.708, 0.109, 0.891]): <NEW_LINE> <INDENT> self.assertAlmostEqual(actual, expected, places = 2) <NEW_LINE> <DEDENT> <DEDENT> def test_B(self): <NEW_LINE> <INDENT> _, B, _ = one_iter(self.lst_of_obs, self.A, self.B, self.pi) <NEW_LINE> for actual, expected in zip(B.flatten(), [0.3624, 0.6376, 0.4252, 0.5748]): <NEW_LINE> <INDENT> self.assertAlmostEqual(actual, expected, places = 2) <NEW_LINE> <DEDENT> <DEDENT> def test_labels(self): <NEW_LINE> <INDENT> A, B, _ = one_iter(self.lst_of_obs, self.A, self.B, self.pi) <NEW_LINE> self.assertEqual(A.rlabels, self.A.rlabels) <NEW_LINE> self.assertEqual(B.rlabels, self.B.rlabels) <NEW_LINE> self.assertEqual(A.clabels, self.A.clabels) <NEW_LINE> self.assertEqual(B.clabels, self.B.clabels)
Continuing the first iteration, this is the second iteration
62599075009cb60464d02e8e