code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Franc(Money): <NEW_LINE> <INDENT> def times(self, multiplier: int) -> Money: <NEW_LINE> <INDENT> return Franc(self._amount * multiplier) | フラン通貨を表します。 | 62599081442bda511e95dabd |
class Climb(FlightPhaseNode): <NEW_LINE> <INDENT> def derive(self, toc=KTI('Top Of Climb'), eot=KTI('Climb Start')): <NEW_LINE> <INDENT> toc_list = [] <NEW_LINE> for this_toc in toc: <NEW_LINE> <INDENT> toc_list.append(this_toc.index) <NEW_LINE> <DEDENT> for this_eot in eot: <NEW_LINE> <INDENT> eot = this_eot.index <NEW_LINE> closest_toc = None <NEW_LINE> for this_toc in toc_list: <NEW_LINE> <INDENT> if (eot < this_toc and (this_toc < closest_toc or closest_toc is None)): <NEW_LINE> <INDENT> closest_toc = this_toc <NEW_LINE> <DEDENT> <DEDENT> self.create_phase(slice(eot, closest_toc)) | This phase goes from 1000 feet (top of Initial Climb) in the climb to the
top of climb | 6259908126068e7796d4e40e |
class _Options(object): <NEW_LINE> <INDENT> host = 'localhost' <NEW_LINE> port = 27017 <NEW_LINE> indices = () <NEW_LINE> database = None <NEW_LINE> collection = None <NEW_LINE> username = None <NEW_LINE> password = None <NEW_LINE> auto_index = True <NEW_LINE> collection_class = Collection <NEW_LINE> field_map = () <NEW_LINE> interface = False <NEW_LINE> def __init__(self, meta): <NEW_LINE> <INDENT> if meta is not None: <NEW_LINE> <INDENT> self.__dict__.update(meta.__dict__) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def _configure(cls, **defaults): <NEW_LINE> <INDENT> for attr in defaults: <NEW_LINE> <INDENT> setattr(cls, attr, defaults[attr]) | Container class for model metadata.
You shouldn't modify this class directly, :func:`_configure` should
be used instead. | 625990813346ee7daa3383c9 |
class Phdr64(Struct): <NEW_LINE> <INDENT> type = enums.PType <NEW_LINE> flags = Int2 <NEW_LINE> offset, vaddr, paddr, filesz, memsz, align = 6*(Int3,) | 64 bit program segment header | 6259908197e22403b383c9ce |
class PersonNotFoundException(Exception): <NEW_LINE> <INDENT> pass | Raised when given person was not found on endpoint | 62599081bf627c535bcb2fa0 |
class ExcessStruct(Struct, size=10): <NEW_LINE> <INDENT> _layout_ = dict( a=dict( offset=0, width=32 ) ) <NEW_LINE> a: int | Specified size is larger than fields | 625990813617ad0b5ee07c1e |
class Profile(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User) <NEW_LINE> activation_key = models.CharField(max_length=255, help_text="E-mail activation key.") <NEW_LINE> key_expires = models.DateTimeField(null=True, help_text="Expiration date of activation key.") <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> verbose_name = "User Profile" <NEW_LINE> verbose_name_plural = "User Profiles" | Holds additional profile fields of every User like the API keys.
Can be retrieved by the method get_profile() of the User class. | 62599081be7bc26dc9252bbd |
class itkRGBToLuminanceImageFilterIRGBUS3IUS3_Superclass(itkInPlaceImageFilterAPython.itkInPlaceImageFilterIRGBUS3IUS3): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkRGBToLuminanceImageFilterPython.itkRGBToLuminanceImageFilterIRGBUS3IUS3_Superclass___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def GetFunctor(self, *args): <NEW_LINE> <INDENT> return _itkRGBToLuminanceImageFilterPython.itkRGBToLuminanceImageFilterIRGBUS3IUS3_Superclass_GetFunctor(self, *args) <NEW_LINE> <DEDENT> def SetFunctor(self, *args): <NEW_LINE> <INDENT> return _itkRGBToLuminanceImageFilterPython.itkRGBToLuminanceImageFilterIRGBUS3IUS3_Superclass_SetFunctor(self, *args) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkRGBToLuminanceImageFilterPython.delete_itkRGBToLuminanceImageFilterIRGBUS3IUS3_Superclass <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkRGBToLuminanceImageFilterPython.itkRGBToLuminanceImageFilterIRGBUS3IUS3_Superclass_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkRGBToLuminanceImageFilterPython.itkRGBToLuminanceImageFilterIRGBUS3IUS3_Superclass_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkRGBToLuminanceImageFilterIRGBUS3IUS3_Superclass.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New) | Proxy of C++ itkRGBToLuminanceImageFilterIRGBUS3IUS3_Superclass class | 625990817cff6e4e811b7510 |
class EntryForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Entry <NEW_LINE> fields = ['text'] <NEW_LINE> labels = {'text': ''} <NEW_LINE> widgets = {'text': forms.Textarea(attrs={'cols': 80})} | Class for entry forms. | 6259908197e22403b383c9cf |
class CipherTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_one_to_one(self): <NEW_LINE> <INDENT> self.assertTrue(sub_cipher('toot', 'peep')) <NEW_LINE> <DEDENT> def test_one_to_two_correspondence(self): <NEW_LINE> <INDENT> self.assertFalse(sub_cipher('lambda', 'school')) <NEW_LINE> <DEDENT> def test_two_to_one_correspondence(self): <NEW_LINE> <INDENT> self.assertFalse(sub_cipher('school', 'lambda')) <NEW_LINE> <DEDENT> def test_unequal_length(self): <NEW_LINE> <INDENT> self.assertFalse(sub_cipher('o', 'lambda')) <NEW_LINE> <DEDENT> def test_empty_strings(self): <NEW_LINE> <INDENT> self.assertTrue(sub_cipher('', '')) | Run several error tests | 625990811f5feb6acb1646c9 |
class IStompServerProtocolFactory(IStompProtocolFactory): <NEW_LINE> <INDENT> pass | Marker interface for a stomp server protocol factory | 625990815166f23b2e244ea8 |
class TestSimpleChanges: <NEW_LINE> <INDENT> def test1(self, spiketrain, weights): <NEW_LINE> <INDENT> neurons, timesteps = spiketrain.shape <NEW_LINE> weights_before = weights.copy() <NEW_LINE> last_spike = spiketrain[:,-1].reshape((neurons, 1)) <NEW_LINE> suggested_weights = np.where(weights_before != 0, weights_before + last_spike.T, weights_before) <NEW_LINE> epsilon = np.finfo(float).eps <NEW_LINE> model = learning.STDP(eta=1., w_in=0., w_out=1., tau=epsilon, window_size=5, verbose=True) <NEW_LINE> model.weight_change(spiketrain, weights, timesteps) <NEW_LINE> assert np.array_equal(suggested_weights, weights) <NEW_LINE> <DEDENT> def test2(self, spiketrain, weights): <NEW_LINE> <INDENT> neurons, timesteps = spiketrain.shape <NEW_LINE> weights_before = weights.copy() <NEW_LINE> last_spike = spiketrain[:,-1].reshape((neurons, 1)) <NEW_LINE> suggested_weights = np.where(weights_before != 0, weights_before + last_spike, weights_before) <NEW_LINE> epsilon = np.finfo(float).eps <NEW_LINE> model = learning.STDP(eta=1., w_in=1., w_out=0., tau=epsilon, window_size=5) <NEW_LINE> model.weight_change(spiketrain, weights, timesteps) <NEW_LINE> assert np.array_equal(suggested_weights, weights) | Test simple changes | 6259908166673b3332c31ecf |
class MenuBar(Frame): <NEW_LINE> <INDENT> def __init__(self, boss=None): <NEW_LINE> <INDENT> Frame.__init__(self, borderwidth=2, relief=GROOVE) <NEW_LINE> file_menu = Menubutton(self, text='File') <NEW_LINE> file_menu.pack(side=LEFT, padx=5) <NEW_LINE> me1 = Menu(file_menu) <NEW_LINE> me1.add_command(label='Restart', underline=0, command=boss.reset) <NEW_LINE> me1.add_command(label='Quit', underline=0, command=boss.quit) <NEW_LINE> me1.add_command(label='Undo', underline=0, command=boss.undo) <NEW_LINE> file_menu.configure(menu=me1) <NEW_LINE> help_menu = Menubutton(self, text='Help') <NEW_LINE> help_menu.pack(side=LEFT, padx=5) <NEW_LINE> me1 = Menu(help_menu) <NEW_LINE> me1.add_command(label='Principe of the game', underline=0, command=boss.principe) <NEW_LINE> me1.add_command(label='By the way ...', underline=0, command=boss.by_the_way) <NEW_LINE> help_menu.configure(menu=me1) <NEW_LINE> option_menu = Menubutton(self, text='Option') <NEW_LINE> option_menu.pack(side=LEFT, padx=5) <NEW_LINE> me1 = Menu(option_menu) <NEW_LINE> me1.add_command(label='Normal', underline=0, command=boss.normal) <NEW_LINE> me1.add_command(label='Split', underline=0, command=boss.split) <NEW_LINE> option_menu.configure(menu=me1) | bar of menu rolling | 62599081d268445f2663a8c6 |
class AlgorithmResultDatabase: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.results = {} <NEW_LINE> self.results_dataset = {} <NEW_LINE> <DEDENT> def get_result(self, algorithm_name: str, test_case: str): <NEW_LINE> <INDENT> return self.results[algorithm_name][test_case] <NEW_LINE> <DEDENT> def has_result(self, algorithm_name: str, test_case: str): <NEW_LINE> <INDENT> return test_case in self.results[algorithm_name] <NEW_LINE> <DEDENT> def list_results(self, algorithm_name: str) -> Sequence[str]: <NEW_LINE> <INDENT> return self.results[algorithm_name].keys() <NEW_LINE> <DEDENT> def set_result(self, algorithm_name: str, test_case: str, value: Any, dataset=None): <NEW_LINE> <INDENT> self.results.setdefault(algorithm_name, {}) <NEW_LINE> self.results_dataset.setdefault(algorithm_name, {}) <NEW_LINE> self.results[algorithm_name][test_case] = value <NEW_LINE> self.results_dataset[algorithm_name][test_case] = dataset <NEW_LINE> <DEDENT> def representation(self, object_name="results_database"): <NEW_LINE> <INDENT> for algorithm in self.results: <NEW_LINE> <INDENT> for test_case in self.results[algorithm]: <NEW_LINE> <INDENT> value = self.get_result(algorithm, test_case) <NEW_LINE> vv = repr(value) <NEW_LINE> print(f"{object_name}.set_result('{algorithm}','{test_case}',{vv})") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def investigate_case(self, algorithm_name, test_case): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def investigate_algorithm(self, algorithm_name): <NEW_LINE> <INDENT> raise NotImplementedError() | Class to store and retrieve results from algorithms | 62599081656771135c48ad98 |
class TestPatchTodoItem(APITestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> response = createItem(self.client) <NEW_LINE> self.assertEqual(TodoItem.objects.get().completed, False) <NEW_LINE> url = response['Location'] <NEW_LINE> data = {'title': 'perform unit testing', 'completed': True} <NEW_LINE> self.response = self.client.patch(url, data, format='json') <NEW_LINE> <DEDENT> def test_received_200_ok_status_code(self): <NEW_LINE> <INDENT> self.assertEqual(self.response.status_code, status.HTTP_200_OK) <NEW_LINE> <DEDENT> def test_item_was_updated(self): <NEW_LINE> <INDENT> self.assertEqual(TodoItem.objects.get().completed, True) | Ensure that we can update an existing todo item using PATCH | 625990815fdd1c0f98e5fa50 |
class StatusResponse: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.invalid_data = {"Error": "Invalid data accepted"} <NEW_LINE> self.user_not_exist = {"Info": "The user does not exist"} <NEW_LINE> self.no_orders = {"Info": "The user has no orders"} <NEW_LINE> self.no_books = {"Info": "There are no books in this store"} <NEW_LINE> self.shop_not_exist = {"Info": "The store does not exist"} <NEW_LINE> self.order_added = {"Ok": "The order added successfully"} <NEW_LINE> self.order_no_added = {"Error": "The order is not added"} | The class contains some statuses
response, which server will return
to users. | 62599081aad79263cf43028c |
class SNLinear(Linear): <NEW_LINE> <INDENT> def __init__(self, in_size, out_size, use_gamma=False, nobias=False, initialW=None, initial_bias=None, Ip=1): <NEW_LINE> <INDENT> self.Ip = Ip <NEW_LINE> self.u = None <NEW_LINE> self.use_gamma = use_gamma <NEW_LINE> super(SNLinear, self).__init__( in_size, out_size, nobias, initialW, initial_bias ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def W_bar(self): <NEW_LINE> <INDENT> sigma, _u, _ = max_sv.max_singular_value(self.W, self.u, self.Ip) <NEW_LINE> sigma = broadcast_to(sigma.reshape((1, 1)), self.W.shape) <NEW_LINE> self.u = _u <NEW_LINE> if hasattr(self, 'gamma'): <NEW_LINE> <INDENT> return broadcast_to(self.gamma, self.W.shape) * self.W / sigma <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.W / sigma <NEW_LINE> <DEDENT> <DEDENT> def _initialize_params(self, in_size): <NEW_LINE> <INDENT> super(SNLinear, self)._initialize_params(in_size) <NEW_LINE> if self.use_gamma: <NEW_LINE> <INDENT> _, s, _ = np.linalg.svd(self.W.data) <NEW_LINE> with self.init_scope(): <NEW_LINE> <INDENT> self.gamma = chainer.Parameter(s[0], (1, 1)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> if self.W.data is None: <NEW_LINE> <INDENT> self._initialize_params(x.size // x.shape[0]) <NEW_LINE> <DEDENT> return linear.linear(x, self.W_bar, self.b) | Linear layer with Spectral Normalization.
Args:
in_size (int): Dimension of input vectors. If ``None``, parameter
initialization will be deferred until the first forward data pass
at which time the size will be determined.
out_size (int): Dimension of output vectors.
wscale (float): Scaling factor of the weight matrix.
bias (float): Initial bias value.
nobias (bool): If ``True``, then this function does not use the bias.
initialW (2-D array): Initial weight value. If ``None``, then this
function uses to initialize ``wscale``.
May also be a callable that takes ``numpy.ndarray`` or
``cupy.ndarray`` and edits its value.
initial_bias (1-D array): Initial bias value. If ``None``, then this
function uses to initialize ``bias``.
May also be a callable that takes ``numpy.ndarray`` or
``cupy.ndarray`` and edits its value.
use_gamma (bool): If true, apply scalar multiplication to the
normalized weight (i.e. reparameterize).
Ip (int): The number of power iteration for calculating the spcetral
norm of the weights.
.. seealso:: :func:`~chainer.functions.linear`
Attributes:
W (~chainer.Variable): Weight parameter.
W_bar (~chainer.Variable): Normalized (Reparametrized) weight parameter.
b (~chainer.Variable): Bias parameter.
u (~array): Current estimation of the right largest singular vector of W.
(optional) gamma (~chainer.Variable): the multiplier parameter. | 625990817cff6e4e811b7512 |
class NotFound(Exception): <NEW_LINE> <INDENT> def __init__(self, details): <NEW_LINE> <INDENT> self.details = details <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.details | A resource was not found (yields a 404 response). | 6259908197e22403b383c9d1 |
class QActionEvent(__PyQt4_QtCore.QEvent): <NEW_LINE> <INDENT> def action(self): <NEW_LINE> <INDENT> return QAction <NEW_LINE> <DEDENT> def before(self): <NEW_LINE> <INDENT> return QAction <NEW_LINE> <DEDENT> def __init__(self, *__args): <NEW_LINE> <INDENT> pass | QActionEvent(int, QAction, QAction before=None)
QActionEvent(QActionEvent) | 625990815166f23b2e244eaa |
class PusherMissingInstanceError(PusherError, KeyError): <NEW_LINE> <INDENT> pass | Error thrown when the instance id used does not exist | 625990817c178a314d78e953 |
@dataclass(frozen=True) <NEW_LINE> class PCMFormat: <NEW_LINE> <INDENT> rate: int <NEW_LINE> sample_fmt: PCMSampleFormat <NEW_LINE> channels: int <NEW_LINE> @property <NEW_LINE> def sample_duration(self) -> float: <NEW_LINE> <INDENT> return 1.0 / self.rate <NEW_LINE> <DEDENT> @property <NEW_LINE> def pyaudio_args(self) -> PyAudioStreamFormatArgs: <NEW_LINE> <INDENT> return PyAudioStreamFormatArgs( rate=self.rate, format=self.sample_fmt.portaudio, channels=self.channels ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def ffmpeg_args(self) -> FFmpegFormatArgs: <NEW_LINE> <INDENT> return FFmpegFormatArgs( ar=self.rate, f=self.sample_fmt.ffmpeg, ac=self.channels ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def ffmpeg_args_nofmt(self) -> MutableMapping[str, Any]: <NEW_LINE> <INDENT> return dict(ar=self.rate, ac=self.channels) <NEW_LINE> <DEDENT> @property <NEW_LINE> def width(self) -> int: <NEW_LINE> <INDENT> return self.sample_fmt.width * self.channels <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return f"{self.rate}Hz {self.sample_fmt} {self.channels}ch" | A dataclass to raw PCM format parameters | 62599081091ae35668706711 |
class LayerAdaptor(nn.Module): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> if config.adaptor_type == 'bottleneck': <NEW_LINE> <INDENT> self.self = EfficientAdaptorLayer(config) <NEW_LINE> <DEDENT> if 'film' in config.adaptor_type: <NEW_LINE> <INDENT> self.self = ApplyFiLM(config) <NEW_LINE> <DEDENT> if config.adaptor_type is None: <NEW_LINE> <INDENT> assert False, "should not be initializing adaptor layers if adaptor_type is None" <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x, adaptation_params): <NEW_LINE> <INDENT> gamma, beta, z = [None] * 3 <NEW_LINE> if isinstance(adaptation_params, tuple): <NEW_LINE> <INDENT> gamma, beta = adaptation_params <NEW_LINE> <DEDENT> elif isinstance(adaptation_params, torch.Tensor): <NEW_LINE> <INDENT> z = adaptation_params <NEW_LINE> <DEDENT> return self.self(x, gamma=gamma, beta=beta, z=z) | this can be three things:
- a parametrized MLP on hidden states
- FiLM, where the parameters are amortized (given by a function of a task_embedding)
- low rank transform on hidden states, where parameters amortized as above | 625990817d847024c075deaf |
class MuscleDeleteView(WgerDeleteMixin, DeleteView, WgerPermissionMixin): <NEW_LINE> <INDENT> model = Muscle <NEW_LINE> success_url = reverse_lazy('muscle-overview') <NEW_LINE> permission_required = 'exercises.delete_muscle' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(MuscleDeleteView, self).get_context_data(**kwargs) <NEW_LINE> context['title'] = _(u'Delete muscle %s?') % self.object.name <NEW_LINE> context['form_action'] = reverse('muscle-delete', kwargs={'pk': self.kwargs['pk']}) <NEW_LINE> return context | Generic view to delete an existing muscle | 625990812c8b7c6e89bd52b7 |
class StackedBiLSTMDenseHParams(TrainHParams): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(StackedBiLSTMDenseHParams, self).__init__() <NEW_LINE> self.bilstm_retseq_layer_num = 2 <NEW_LINE> self.state_dim = 300 <NEW_LINE> self.lstm_p_dropout = 0.5 <NEW_LINE> self.kernel_l2_lambda = 1e-5 <NEW_LINE> self.recurrent_l2_lambda = 1e-5 <NEW_LINE> self.bias_l2_lambda = 1e-5 <NEW_LINE> self.activity_l2_lambda = 0 <NEW_LINE> self.unit_reduce = False <NEW_LINE> self.dense_layer_num = 1 <NEW_LINE> self.linear_unit_num = self.state_dim <NEW_LINE> self.dense_p_dropout = 0.4 <NEW_LINE> self.optimizer = RMSprop() <NEW_LINE> self.lr_scheduler = LRSchedulerDoNothing() <NEW_LINE> self.early_stop_monitor = 'val_acc' <NEW_LINE> self.batch_size = 128 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> ret_info = list() <NEW_LINE> ret_info.append('\n================== '+self.current_classname+' ==================\n') <NEW_LINE> ret_info.append('bi-lstm retseq layer num: ' + str(self.bilstm_retseq_layer_num) + '\n') <NEW_LINE> ret_info.append('state dim: ' + str(self.state_dim) + '\n') <NEW_LINE> ret_info.append('lstm dropout proba: ' + str(self.lstm_p_dropout) + '\n\n') <NEW_LINE> ret_info.append('unit reduce: ' + str(self.unit_reduce) + '\n') <NEW_LINE> ret_info.append('dense layer num: ' + str(self.dense_layer_num) + '\n') <NEW_LINE> ret_info.append('linear unit num: ' + str(self.linear_unit_num) + '\n') <NEW_LINE> ret_info.append('dense dropout proba: ' + str(self.dense_p_dropout) + '\n\n') <NEW_LINE> super_str = super(StackedBiLSTMDenseHParams, self).__str__() <NEW_LINE> return ''.join(ret_info) + super_str | The best result is a val_accuracy of about 90.12%. | 6259908123849d37ff852b8b |
class BlogAuthor(models.Model): <NEW_LINE> <INDENT> name = models.ForeignKey(User,on_delete=models.PROTECT) <NEW_LINE> biography = models.TextField() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name.username <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('blog-author-detail', args=[str(self.id)]) | Model representing a blog author. | 62599081a05bb46b3848be91 |
class Config(object): <NEW_LINE> <INDENT> address = '0.0.0.0' <NEW_LINE> port = 8088 <NEW_LINE> incl_access_control_allow_origin = False <NEW_LINE> incl_access_control_allow_credentials = False <NEW_LINE> validate_callback = None <NEW_LINE> validate_exclude_paths = None <NEW_LINE> mapper_name = None | Config to be passed to the Server
Attributes:
address (str): The address to be used by the server.
port (int): The port to be used by the server.
incl_access_control_allow_origin (bool): Determines if
'Access-Control-Allow-Origin' should be includedin the servers
response header or not.
incl_access_control_allow_credentials (bool): Determines if
'Access-Control-Allow-Credentials' should be set to 'true' or
'false' in the servers response header.
validate_callback (function): A callback which will be called for
EVERY request (GET, POST, PUT, DELETE) BEFORE the actual resolved
function will be called.
This callback HAS to return either True (if the request is
allowed), or False (if the request is NOT allowed)
Usefully to implement e.g. authentication
validate_exclude_paths (list): A list of url-paths (without host:port)
to be excluded from validation.
e.g. ['/login', '/register']
mapper_name (str): Name of the mapper instance to use. Don't change
to use the default. | 625990813617ad0b5ee07c22 |
class Integer(int): <NEW_LINE> <INDENT> MAX_VALUE = sys.maxsize <NEW_LINE> @staticmethod <NEW_LINE> def parseInt(text): <NEW_LINE> <INDENT> return int(text or 0) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def parseLong(text): <NEW_LINE> <INDENT> try: return int(text or 0) <NEW_LINE> except: return int(text or 0) | Partial implementation of Java Integer type.
| 6259908171ff763f4b5e927f |
class HttpClientTestFailureReason(Base): <NEW_LINE> <INDENT> __tablename__ = 'HttpClientTestFailureReason' <NEW_LINE> __table_args__ = {'useexisting' : True} <NEW_LINE> id = Column("Id", Integer, primary_key=True) <NEW_LINE> reason = Column("Reason", String(5000), nullable=False) | For the Failed tests on the client side
we store the reason of failure in this Table | 625990814428ac0f6e65a002 |
class Segment(object): <NEW_LINE> <INDENT> def __init__(self, begin, end, label=None, signal=None): <NEW_LINE> <INDENT> self._begin = begin <NEW_LINE> self._end = end <NEW_LINE> self._label = label <NEW_LINE> self._signal = signal <NEW_LINE> <DEDENT> def get_begin_time(self): <NEW_LINE> <INDENT> return self._begin <NEW_LINE> <DEDENT> def get_end_time(self): <NEW_LINE> <INDENT> return self._end <NEW_LINE> <DEDENT> def get_begin(self): <NEW_LINE> <INDENT> return self._signal.get_idx(self.get_begin_time()) <NEW_LINE> <DEDENT> def get_end(self): <NEW_LINE> <INDENT> return self._signal.get_idx(self.get_end_time()) if self.get_end_time() is not None else None <NEW_LINE> <DEDENT> def get_duration(self): <NEW_LINE> <INDENT> return (self.get_end_time() - self.get_begin_time()) if self.get_end_time() is not None else None <NEW_LINE> <DEDENT> def get_label(self): <NEW_LINE> <INDENT> return self._label <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> return self._signal is None or self.get_begin_time() >= len(self._signal.get_end_time()) <NEW_LINE> <DEDENT> def __call__(self, data=None): <NEW_LINE> <INDENT> if data is None: <NEW_LINE> <INDENT> data = self._signal <NEW_LINE> <DEDENT> return data.segment_time(self.get_begin_time(), self.get_end_time()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '[%s:%s' % (str(self.get_begin_time()), str(self.get_end_time())) + ( ":%s]" % self._label if self._label is not None else "]") | Base Segment, a time begin-end pair with a reference to the base signal and a name. | 62599081ec188e330fdfa37e |
class Conversation(core_models.TimeStampedModel): <NEW_LINE> <INDENT> participants = models.ManyToManyField( "users.User", related_name="conversation", blank=True ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> usernames = [] <NEW_LINE> for user in self.participants.all(): <NEW_LINE> <INDENT> usernames.append(user) <NEW_LINE> <DEDENT> return " / ".join(str(v) for v in usernames) <NEW_LINE> <DEDENT> def count_messages(self): <NEW_LINE> <INDENT> return self.messages.count() <NEW_LINE> <DEDENT> count_messages.short_description = "# of Messages" <NEW_LINE> def count_participants(self): <NEW_LINE> <INDENT> return self.participants.count() <NEW_LINE> <DEDENT> count_participants.short_description = "# of Participants" | Conversation Model Definition | 625990811f5feb6acb1646cd |
class LGeoControl(tornado.web.UIModule): <NEW_LINE> <INDENT> def render(self, host="localhost:8080/mapapi", earth="earth",map="lmap"): <NEW_LINE> <INDENT> return self.render_string('template/ui_templates/plugin-geo-control.html', host=host, map=map, earth=earth, ) | this is a controller to controll geo. | 625990815fdd1c0f98e5fa53 |
class UnionFind: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.weights = {} <NEW_LINE> self.parents = {} <NEW_LINE> <DEDENT> def __getitem__(self, object): <NEW_LINE> <INDENT> if object not in self.parents: <NEW_LINE> <INDENT> self.parents[object] = object <NEW_LINE> self.weights[object] = 1 <NEW_LINE> return object <NEW_LINE> <DEDENT> path = [object] <NEW_LINE> root = self.parents[object] <NEW_LINE> while root != path[-1]: <NEW_LINE> <INDENT> path.append(root) <NEW_LINE> root = self.parents[root] <NEW_LINE> <DEDENT> for ancestor in path: <NEW_LINE> <INDENT> self.parents[ancestor] = root <NEW_LINE> <DEDENT> return root <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.parents) <NEW_LINE> <DEDENT> def union(self, *objects): <NEW_LINE> <INDENT> roots = [self[x] for x in objects] <NEW_LINE> heaviest = max([(self.weights[r], r) for r in roots])[1] <NEW_LINE> for r in roots: <NEW_LINE> <INDENT> if r != heaviest: <NEW_LINE> <INDENT> self.weights[heaviest] += self.weights[r] <NEW_LINE> self.parents[r] = heaviest <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_clusters(self): <NEW_LINE> <INDENT> inv_map = {} <NEW_LINE> for k, v in self.parents.iteritems(): <NEW_LINE> <INDENT> inv_map[v] = inv_map.get(v, []) <NEW_LINE> inv_map[v].append(k) <NEW_LINE> <DEDENT> return inv_map | Union-find data structure.
Each unionFind instance X maintains a family of disjoint sets of
hashable objects, supporting the following two methods:
- X[item] returns a name for the set containing the given item.
Each set is named by an arbitrarily-chosen one of its members; as
long as the set remains unchanged it will keep the same name. If
the item is not yet part of a set in X, a new singleton set is
created for it.
- X.union(item1, item2, ...) merges the sets containing each item
into a single larger set. If any item is not yet part of a set
in X, it is added to X as one of the members of the merged set. | 625990814f88993c371f128c |
class pet: <NEW_LINE> <INDENT> max_energy = 100 <NEW_LINE> max_toilet = 100 <NEW_LINE> max_health = 100 <NEW_LINE> def __init__(self, pet_name = 'Peto'): <NEW_LINE> <INDENT> self.hunger_level = 0 <NEW_LINE> self.happiness_level = 50 <NEW_LINE> self.name = pet_name <NEW_LINE> self.energy_level = 100 <NEW_LINE> self.toilet_need = 20 <NEW_LINE> self.health_level = 100 <NEW_LINE> self.body = turtle.Turtle() <NEW_LINE> <DEDENT> def increase_hunger(self, increase_by = 15): <NEW_LINE> <INDENT> self.hunger_level += increase_by <NEW_LINE> if self.hunger_level > 100: <NEW_LINE> <INDENT> self.hunger_level = 100 <NEW_LINE> <DEDENT> elif self.hunger_level < 0: <NEW_LINE> <INDENT> self.hunger_level = 0 <NEW_LINE> <DEDENT> <DEDENT> def increase_energy(self, increase_by = 0): <NEW_LINE> <INDENT> self.energy_level += increase_by <NEW_LINE> if self.energy_level > 100: <NEW_LINE> <INDENT> self.energy_level = 100 <NEW_LINE> <DEDENT> elif self.energy_level < 0: <NEW_LINE> <INDENT> self.increase_hunger(20) <NEW_LINE> self.energy_level = 0 <NEW_LINE> <DEDENT> <DEDENT> def increase_health(self, increase_by = -10): <NEW_LINE> <INDENT> self.health_level += increase_by <NEW_LINE> if self.health_level > 100: <NEW_LINE> <INDENT> self.health_level = 100 <NEW_LINE> <DEDENT> elif self.health_level < 0: <NEW_LINE> <INDENT> self.health_level = 0 | Encompases the data and methods that are possible with the pet | 625990813346ee7daa3383cc |
class ModelSelector(object): <NEW_LINE> <INDENT> def __init__(self, words: dict, hwords: dict, this_word: str, n_constant=3, min_n_components=2, max_n_components=10, random_state=None, verbose=False): <NEW_LINE> <INDENT> self.words = words <NEW_LINE> self.hwords = hwords <NEW_LINE> self.sequences = words[this_word] <NEW_LINE> self.X, self.lengths = hwords[this_word] <NEW_LINE> self.this_word = this_word <NEW_LINE> self.n_constant = n_constant <NEW_LINE> self.min_n_components = min_n_components <NEW_LINE> self.max_n_components = max_n_components <NEW_LINE> self.random_state = random_state <NEW_LINE> self.verbose = verbose <NEW_LINE> <DEDENT> def select(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def base_model(self, num_states): <NEW_LINE> <INDENT> warnings.filterwarnings("ignore", category=DeprecationWarning) <NEW_LINE> try: <NEW_LINE> <INDENT> hmm_model = GaussianHMM(n_components=num_states, covariance_type="diag", n_iter=1000, random_state=self.random_state, verbose=False).fit(self.X, self.lengths) <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print("model created for {} with {} states".format(self.this_word, num_states)) <NEW_LINE> <DEDENT> return hmm_model <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print("failure on {} with {} states".format(self.this_word, num_states)) <NEW_LINE> <DEDENT> return None | base class for model selection (strategy design pattern) | 625990814f6381625f19a219 |
class GPImportCache(CoClass): <NEW_LINE> <INDENT> _reg_clsid_ = GUID('{111FE3A4-4E66-4AF2-A95A-50DD7A7960D1}') <NEW_LINE> _idlflags_ = [] <NEW_LINE> _typelib_path_ = typelib_path <NEW_LINE> _reg_typelib_ = ('{C031A050-82C6-4F8F-8836-5692631CFFE6}', 10, 2) | Import pre-rendered tile cache. | 62599081099cdd3c63676164 |
class DensityMatrix: <NEW_LINE> <INDENT> def __init__(self, header, origin, density, pdbid): <NEW_LINE> <INDENT> self.pdbid = pdbid <NEW_LINE> self.header = header <NEW_LINE> self.origin = origin <NEW_LINE> self.densityArray = density <NEW_LINE> self.density = np.array(density).reshape(header.ncrs[2], header.ncrs[1], header.ncrs[0]) <NEW_LINE> self._meanDensity = None <NEW_LINE> self._stdDensity = None <NEW_LINE> self._totalAbsDensity = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def meanDensity(self): <NEW_LINE> <INDENT> if self._meanDensity == None: <NEW_LINE> <INDENT> self._meanDensity = np.mean(self.densityArray) <NEW_LINE> <DEDENT> return self._meanDensity <NEW_LINE> <DEDENT> @property <NEW_LINE> def stdDensity(self): <NEW_LINE> <INDENT> if self._stdDensity == None: <NEW_LINE> <INDENT> self._stdDensity = np.std(self.densityArray) <NEW_LINE> <DEDENT> return self._stdDensity <NEW_LINE> <DEDENT> def getTotalAbsDensity(self, densityCutoff): <NEW_LINE> <INDENT> if densityCutoff not in self._totalAbsDensity: <NEW_LINE> <INDENT> self._totalAbsDensity[densityCutoff] = utils.sumOfAbs(self.densityArray, densityCutoff) <NEW_LINE> <DEDENT> return self._totalAbsDensity[densityCutoff] <NEW_LINE> <DEDENT> def getPointDensityFromCrs(self, crsCoord): <NEW_LINE> <INDENT> return utils.getPointDensityFromCrs(self,crsCoord) <NEW_LINE> <DEDENT> def getPointDensityFromXyz(self, xyzCoord): <NEW_LINE> <INDENT> return utils.getPointDensityFromCrs(self, self.header.xyz2crsCoord(xyzCoord)) <NEW_LINE> <DEDENT> def getSphereCrsFromXyz(self, xyzCoord, radius, densityCutoff=0): <NEW_LINE> <INDENT> return utils.getSphereCrsFromXyz(self,xyzCoord,radius,densityCutoff) <NEW_LINE> <DEDENT> def getTotalDensityFromXyz(self, xyzCoord, radius, densityCutoff=0): <NEW_LINE> <INDENT> crsCoordList = utils.getSphereCrsFromXyz(self, xyzCoord, radius, densityCutoff) <NEW_LINE> return sum(utils.getPointDensityFromCrs(self, crs) for crs in crsCoordList) <NEW_LINE> <DEDENT> def findAberrantBlobs(self, xyzCoords, radius, densityCutoff=0): <NEW_LINE> <INDENT> if not isinstance(xyzCoords[0], (np.floating, float)): <NEW_LINE> <INDENT> if len(xyzCoords) > 1: <NEW_LINE> <INDENT> crsCoordList = list(utils.getSphereCrsFromXyzList(self, xyzCoords, radius, densityCutoff)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> crsCoordList = utils.getSphereCrsFromXyz(self, xyzCoords[0], radius, densityCutoff) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> crsCoordList = utils.getSphereCrsFromXyz(self, xyzCoords, radius, densityCutoff) <NEW_LINE> <DEDENT> return self.createBlobList(crsCoordList) <NEW_LINE> <DEDENT> def createFullBlobList(self, cutoff): <NEW_LINE> <INDENT> crsList = utils.createFullCrsList(self, cutoff) <NEW_LINE> return self.createBlobList(crsList) if crsList != None else None <NEW_LINE> <DEDENT> def createBlobList(self, crsList): <NEW_LINE> <INDENT> crsLists = utils.createCrsLists(crsList) <NEW_LINE> return [ DensityBlob.fromCrsList(crs_list, self) for crs_list in crsLists ] | :class:`pdb_eda.ccp4.DensityMatrix` that stores data and methods of a ccp4 file. | 625990818a349b6b43687d32 |
@dataclass <NEW_LINE> class SensorSISO(SensorAttributeBase): <NEW_LINE> <INDENT> source: Optional[BaseElement] = None <NEW_LINE> def __post_init__(self) -> None: <NEW_LINE> <INDENT> super().__post_init__() <NEW_LINE> assert self.source <NEW_LINE> assert isinstance(self.source, BaseElement) <NEW_LINE> assert self.attribute is None or hasattr(self.source, self.attribute) <NEW_LINE> <DEDENT> def get_dependencies(self) -> List[BaseElement]: <NEW_LINE> <INDENT> if self.source is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> if not isinstance(self.source, BaseElement): <NEW_LINE> <INDENT> raise ValueError( "source is not a subclass of BaseElement. " "Have references been resolved?" ) <NEW_LINE> <DEDENT> return [self.source] <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self) -> Any: <NEW_LINE> <INDENT> arg = self.source <NEW_LINE> if self.attribute is not None: <NEW_LINE> <INDENT> arg = getattr(arg, self.attribute) <NEW_LINE> <DEDENT> return arg if self.function is None else _FUNCTION_MAP[self.function](arg) | A sensor for extracting a single element attribute. | 6259908167a9b606de547811 |
class ApplicationGatewaySku(Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'tier': {'key': 'tier', 'type': 'str'}, 'capacity': {'key': 'capacity', 'type': 'int'}, } <NEW_LINE> def __init__(self, name=None, tier=None, capacity=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.tier = tier <NEW_LINE> self.capacity = capacity | SKU of an application gateway.
:param name: Name of an application gateway SKU. Possible values are:
'Standard_Small', 'Standard_Medium', 'Standard_Large', 'WAF_Medium', and
'WAF_Large'. Possible values include: 'Standard_Small', 'Standard_Medium',
'Standard_Large', 'WAF_Medium', 'WAF_Large'
:type name: str or :class:`ApplicationGatewaySkuName
<azure.mgmt.network.v2016_09_01.models.ApplicationGatewaySkuName>`
:param tier: Tier of an application gateway. Possible values are:
'Standard' and 'WAF'. Possible values include: 'Standard', 'WAF'
:type tier: str or :class:`ApplicationGatewayTier
<azure.mgmt.network.v2016_09_01.models.ApplicationGatewayTier>`
:param capacity: Capacity (instance count) of an application gateway.
:type capacity: int | 62599081be7bc26dc9252bc0 |
class Migration(migrations.Migration): <NEW_LINE> <INDENT> operations = ops | Used for gis-specific migration tests. | 625990815fcc89381b266ec7 |
class RefTable: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._table = None <NEW_LINE> <DEDENT> def initialize(self, content): <NEW_LINE> <INDENT> cols = ['idx', 'param', 'sumstat', 'distance'] <NEW_LINE> self._table = pd.DataFrame(content, columns=cols) <NEW_LINE> <DEDENT> def getRefTable(self): <NEW_LINE> <INDENT> return self._table <NEW_LINE> <DEDENT> def fillColumn(self, data, columnName): <NEW_LINE> <INDENT> if columnName == 'sumstat': <NEW_LINE> <INDENT> data = [np.array(row) for row in data] <NEW_LINE> <DEDENT> self._table[columnName] = data <NEW_LINE> <DEDENT> def getColumn(self, columnName): <NEW_LINE> <INDENT> return toArray(self._table, columnName) <NEW_LINE> <DEDENT> def fillRow(self, row, idx, param, sumstat): <NEW_LINE> <INDENT> self._table.loc[row, 'idx'] = idx <NEW_LINE> self._table.set_value(row, 'param', param) <NEW_LINE> self._table.set_value(row, 'sumstat', sumstat) | Holds the final ABC Table where each row corresponds to one simulation.
Contains information about:
- model index
- summary statistics
- drawn parameters
- distance to observed data | 625990813346ee7daa3383cd |
class Include(Element): <NEW_LINE> <INDENT> @capture_kwargs <NEW_LINE> def __init__( self, file, ): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.file = file <NEW_LINE> self._attribute_names = ['file'] | This element does not strictly speaking belong to MJCF. Instead it is
a meta-element, used to assemble multiple XML files in a single document
object model (DOM) before parsing. The included file must be a valid XML
file with a unique top-level element. This top-level element is removed by
the parser, and the elements below it are inserted at the location of the
include element. At least one element must be inserted as a result of this
procedure. The include element can be used where ever an XML element is
expected in the MJFC file. Nested includes are allowed, however a given
XML file can be included at most once in the entire model. After all the
included XML files have been assembled into a single DOM, it must
correspond to a valid MJCF model. Other than that, it is up to the user to
decide how to use includes and how to modularize large files if desired.
:param file:
The name of the XML file to be included. The file location is relative
to the directory of the main MJCF file. If the file is not in the same
directory, it should be prefixed with a relative path. | 62599081167d2b6e312b8300 |
class FixedWidthReaderTest(_TabularReaderTest): <NEW_LINE> <INDENT> TEST_CLASS = FixedWidthReader <NEW_LINE> @pytest.fixture <NEW_LINE> def kwargs(self): <NEW_LINE> <INDENT> fields = ( IntField("int", (0, 4), "3d"), ListField("arr", (4, None), ( StringField("x", (0, 4)), StringField("y", (4, 8))))) <NEW_LINE> return {"fields": fields, "endl": "\n"} <NEW_LINE> <DEDENT> @pytest.fixture <NEW_LINE> def stream(self): <NEW_LINE> <INDENT> return StringIO(" 123 abc def\n 456 ghi jkl\n 789 mno pqr\n") | Unit testing for the FixedWidthReader class.
| 6259908160cbc95b06365ad7 |
class UserViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = User.objects.all().order_by('-date_joined') <NEW_LINE> serializer_class = UserSerializer | API endpoint that allows users to be viewed/edited. | 6259908197e22403b383c9d6 |
class State(Printable): <NEW_LINE> <INDENT> _INTERNAL_KEY = 0 <NEW_LINE> def __init__(self, age=0): <NEW_LINE> <INDENT> self.val = { State._INTERNAL_KEY: 0 } <NEW_LINE> self.next_key = State._INTERNAL_KEY + 1 <NEW_LINE> self.age = age <NEW_LINE> <DEDENT> def add_remote(self, age=0): <NEW_LINE> <INDENT> res = self.next_key <NEW_LINE> self.val[res] = age <NEW_LINE> self.next_key += 1 <NEW_LINE> return res <NEW_LINE> <DEDENT> def apply_local_change(self): <NEW_LINE> <INDENT> self.val[State._INTERNAL_KEY] += 1 <NEW_LINE> self.age += 1 <NEW_LINE> <DEDENT> def apply_remote_change(self, remote): <NEW_LINE> <INDENT> self.val[remote] += 1 <NEW_LINE> self.age += 1 <NEW_LINE> <DEDENT> def get_snapshot(self): <NEW_LINE> <INDENT> return self.val.copy() <NEW_LINE> <DEDENT> def get_relative_to_remote(self, remote_key): <NEW_LINE> <INDENT> remote_state = self.val[remote_key] <NEW_LINE> return (self.age - remote_state, remote_state) | Represents the local displacement from the "initial value".
The "initial value" is what the local server was initilzied with, ie empty for
a central server of the value given to a remote server.
The displacement distance is tracked in terms of the number of changes made,
in the positive "axis" of the remote that made the changes.
Remote tracking is not recursive, if the central server reports a change made by
another client to me, it still looks like the central server made the change.
Attribtues:
val -- Mapping of remote keys to distance on that axis
next_key -- the next enumerated key to use as an axis direction
age -- the total number of changes made by any servers to reach this state | 625990814f6381625f19a21a |
class linear_regression_2: <NEW_LINE> <INDENT> def __init__(self, train_data, theta = 0): <NEW_LINE> <INDENT> self.theta = theta <NEW_LINE> self.train_dat = train_data <NEW_LINE> <DEDENT> def train(self ,variable_name, poly_order): <NEW_LINE> <INDENT> y = self.train_dat.mpg.values <NEW_LINE> self.var = variable_name <NEW_LINE> self.order = poly_order <NEW_LINE> X = np.ones(len(self.train_dat)) <NEW_LINE> if self.order == 0: <NEW_LINE> <INDENT> self.theta = 1/ (X.T @ X) * X.T @ y <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(1, self.order + 1): <NEW_LINE> <INDENT> for v in self.var: <NEW_LINE> <INDENT> x = self.train_dat[v] <NEW_LINE> X = np.c_[X, x ** (i)] <NEW_LINE> <DEDENT> <DEDENT> if np.linalg.det(X.T @ X) > 1e-10: <NEW_LINE> <INDENT> self.theta = np.linalg.inv(X.T @ X) @ X.T @ y <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.theta = np.linalg.pinv(X.T @ X) @ X.T @ y <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def predict(self, test_dat): <NEW_LINE> <INDENT> X_te = np.ones(len(test_dat)) <NEW_LINE> for i in range(1, self.order + 1): <NEW_LINE> <INDENT> for v in self.var: <NEW_LINE> <INDENT> x_te = test_dat[v] <NEW_LINE> X_te = np.c_[X_te, x_te ** (i)] <NEW_LINE> <DEDENT> <DEDENT> return (np.dot(X_te, self.theta)) | modify the linear regression solver to train all variable | 625990813617ad0b5ee07c26 |
class EnumField(TextField): <NEW_LINE> <INDENT> enum_types_list = dict() <NEW_LINE> def __init__(self, choices, enum_type = None, **kwargs): <NEW_LINE> <INDENT> self.enum_type = enum_type <NEW_LINE> self.choices = {} <NEW_LINE> self.localized = {} <NEW_LINE> for key, value in choices.iteritems(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value, localized = value <NEW_LINE> self.localized[value] = localized <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.choices[value] = key <NEW_LINE> <DEDENT> super(TextField, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def prepare(self): <NEW_LINE> <INDENT> class EnumType: <NEW_LINE> <INDENT> localized_choices = {} <NEW_LINE> <DEDENT> for key, value in self.choices.iteritems(): <NEW_LINE> <INDENT> setattr(EnumType, value, key) <NEW_LINE> <DEDENT> if self.enum_type is None: <NEW_LINE> <INDENT> enum_type = self.name.capitalize() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> enum_type = self.enum_type <NEW_LINE> <DEDENT> setattr(self.cls, enum_type, EnumType) <NEW_LINE> EnumType.localized_choices.update(self.localized) <NEW_LINE> EnumField.enum_types_list['{0}.{1}.{2}'.format(str(self.cls.__module__), self.cls.__name__, enum_type)] = EnumType.localized_choices <NEW_LINE> <DEDENT> def validate(self, value, document): <NEW_LINE> <INDENT> if value not in self.choices: <NEW_LINE> <INDENT> raise ValidationError("Invalid value '{1}' for enumeration field '{0}'!".format(self.name, value)) <NEW_LINE> <DEDENT> <DEDENT> def get_search_mapping(self): <NEW_LINE> <INDENT> mapping = super(EnumField, self).get_search_mapping() <NEW_LINE> mapping.update(dict( index = "not_analyzed", )) <NEW_LINE> return mapping | Field that may contain one of the many predefined values. | 6259908155399d3f05627feb |
class InDir(object): <NEW_LINE> <INDENT> def __init__(self, path): <NEW_LINE> <INDENT> self.new_path = path <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.old_path = os.getcwd() <NEW_LINE> os.chdir(self.new_path) <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> os.chdir(self.old_path) | A Context Manager that changes directories temporarily and safely. | 625990811f5feb6acb1646d1 |
class IngredientViewSet(BaseRecipeAttrViewSet): <NEW_LINE> <INDENT> queryset = Ingredient.objects.all() <NEW_LINE> serializer_class = serializers.IngredientSerializer | Manage Ingredients in the databases | 62599081e1aae11d1e7cf57e |
class AdminAjaxUsersHandler(BaseHandler): <NEW_LINE> <INDENT> @restrict_ip_address <NEW_LINE> @authenticated <NEW_LINE> @authorized(ADMIN_PERMISSION) <NEW_LINE> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> uuid = self.get_argument('uuid', '') <NEW_LINE> user = User.by_uuid(uuid) <NEW_LINE> if user is not None: <NEW_LINE> <INDENT> self.write({ 'username': user.username, }) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.write({'Error': 'User does not exist.'}) <NEW_LINE> <DEDENT> self.finish() | Handles AJAX data for admin handlers | 6259908150812a4eaa621931 |
class PowerControlDeviceOutletResource(DeleteablePowerObjectResource): <NEW_LINE> <INDENT> device = fields.ToOneField('chroma_api.power_control.PowerControlDeviceResource', 'device') <NEW_LINE> host = fields.ToOneField('chroma_api.host.HostResource', 'host', null = True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> queryset = PowerControlDeviceOutlet.objects.all() <NEW_LINE> resource_name = 'power_control_device_outlet' <NEW_LINE> authorization = DjangoAuthorization() <NEW_LINE> authentication = AnonymousAuthentication() <NEW_LINE> validation = ResolvingFormValidation(form_class=PowerControlDeviceOutletForm) <NEW_LINE> list_allowed_methods = ['get', 'post'] <NEW_LINE> detail_allowed_methods = ['get', 'put', 'delete', 'patch'] <NEW_LINE> readonly = ['id'] <NEW_LINE> excludes = ['not_deleted'] <NEW_LINE> always_return_data = True | An outlet (individual host power control entity) associated with a
Power Control Device. | 62599081adb09d7d5dc0c031 |
class ValidateAvaRigFormat(pyblish.api.InstancePlugin): <NEW_LINE> <INDENT> label = "Rig Format" <NEW_LINE> order = pyblish.api.ValidatorOrder <NEW_LINE> hosts = ["maya"] <NEW_LINE> families = ["ava.rig"] <NEW_LINE> def process(self, instance): <NEW_LINE> <INDENT> from maya import cmds <NEW_LINE> missing = list() <NEW_LINE> for member in ("controls_SET", "out_SET"): <NEW_LINE> <INDENT> if member not in instance: <NEW_LINE> <INDENT> missing.append(member) <NEW_LINE> <DEDENT> <DEDENT> assert not missing, "\"%s\" is missing members: %s" % ( instance, ", ".join("\"" + member + "\"" for member in missing)) <NEW_LINE> missing = list() <NEW_LINE> for node in cmds.sets("out_SET", query=True) or list(): <NEW_LINE> <INDENT> shapes = cmds.listRelatives(node, shapes=True) or list() <NEW_LINE> meshes = cmds.ls(shapes, type="mesh") <NEW_LINE> if not meshes: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.log.info("Checking '%s'" % node) <NEW_LINE> cmds.getAttr(node + ".mbID") <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> missing.append(node) <NEW_LINE> <DEDENT> <DEDENT> assert not missing, ("Missing ID attribute on: %s" % ", ".join(missing)) | A rig must have a certain hierarchy and members
- Must reside within `rig_GRP` transform
- out_SET
- controls_SET
- in_SET (optional)
- resources_SET (optional) | 62599081f9cc0f698b1c6038 |
class UnicodeInput(IOBase): <NEW_LINE> <INDENT> def __init__(self, hConsole, name, bufsize=1024): <NEW_LINE> <INDENT> self._hConsole = hConsole <NEW_LINE> self.bufsize = bufsize <NEW_LINE> self.buffer = create_unicode_buffer(bufsize) <NEW_LINE> self.name = name <NEW_LINE> self.encoding = 'utf-8' <NEW_LINE> <DEDENT> def readline(self): <NEW_LINE> <INDENT> maxnum = DWORD(self.bufsize - 1) <NEW_LINE> numrecv = DWORD(0) <NEW_LINE> result = ReadConsoleW(self._hConsole, self.buffer, maxnum, byref(numrecv), None) <NEW_LINE> if not result: <NEW_LINE> <INDENT> raise Exception("stdin failure") <NEW_LINE> <DEDENT> data = self.buffer.value[:numrecv.value] <NEW_LINE> if not PY3: <NEW_LINE> <INDENT> return data.encode(self.encoding) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return data | Unicode terminal input class. | 62599081bf627c535bcb2faa |
class Solver (object): <NEW_LINE> <INDENT> def solve (self, tripcode, *args): <NEW_LINE> <INDENT> raise NotImplementedError ( 'Solver derivatives must implement this method!' ) | Base class for solvers. | 625990817047854f46340e8d |
class IPMR2AboveContentBodyPortlets(IPortletManager): <NEW_LINE> <INDENT> pass | PMR2 Portlets that sits above the content body.
| 62599081283ffb24f3cf5379 |
class VivadoSessionContextAdapter(object): <NEW_LINE> <INDENT> def __init__(self, aManager, aCloseOnExit, aSId): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._console = None <NEW_LINE> self._mgr = aManager <NEW_LINE> self._sid = aSId <NEW_LINE> self._closeonexit = aCloseOnExit <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self._console = self._mgr._getconsole(sid=self._sid) <NEW_LINE> return self._console <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> if self._closeonexit: <NEW_LINE> <INDENT> self._console.close() <NEW_LINE> self._console = None | Summary
| 6259908155399d3f05627fed |
class DuplicationError(Exception): <NEW_LINE> <INDENT> implements(IDuplicationError) | A duplicate registration was attempted | 6259908150812a4eaa621932 |
class requestObjMsg( negGuiObjMsg ): <NEW_LINE> <INDENT> def __init__(self, requestNr, typeNr, parentKey, params =None): <NEW_LINE> <INDENT> negGuiMsg.__init__(self, REQUEST_OBJ_SIGNAL, requestNr, parentKey) <NEW_LINE> self.typeNr = typeNr <NEW_LINE> self.params = params <NEW_LINE> <DEDENT> def send(self): <NEW_LINE> <INDENT> self.sendObj( self ) | msg from gui to negotiator to request a new object | 6259908197e22403b383c9d9 |
class ConsoleValidationException(Exception): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> Exception.__init__(self, *args, **kwargs) | Clasa de erori pentru console | 625990814a966d76dd5f09c0 |
class Notebook(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.notes = [] <NEW_LINE> <DEDENT> def find_note(self, note_id): <NEW_LINE> <INDENT> for note in self.notes: <NEW_LINE> <INDENT> if note.id == note: <NEW_LINE> <INDENT> return note <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def new_note(self, memo, tags=""): <NEW_LINE> <INDENT> self.notes.append(Note(memo, tags)) <NEW_LINE> <DEDENT> def modify_memo(self, note_id, memo): <NEW_LINE> <INDENT> self.find_note(note_id).memo = memo <NEW_LINE> <DEDENT> def modify_tags(self, note_id, tags): <NEW_LINE> <INDENT> self.find_note(note_id).tags = tags <NEW_LINE> <DEDENT> def search(self, filter): <NEW_LINE> <INDENT> return [note for note in self.notes if note.match(filter)] | Represent a collection of notes that can be tagged,
modified, and searched. | 6259908144b2445a339b76ca |
class AlgorithmHandler(object): <NEW_LINE> <INDENT> def start_algorithm(self): <NEW_LINE> <INDENT> config = configparser.ConfigParser() <NEW_LINE> config.read(os.path.dirname(os.path.abspath(__file__)) + "/config.ini") <NEW_LINE> dataset = os.path.join(os.path.dirname(os.path.abspath(__file__)) + r"/tweets.json") <NEW_LINE> output = os.path.join(os.path.dirname(os.path.abspath(__file__)) + r"/result.json") <NEW_LINE> location = os.path.join(os.path.dirname(os.path.abspath(__file__)) + "/", config['algorithm']['location'].replace("\"", "")) <NEW_LINE> subprocess.Popen("python --version", stdout=subprocess.PIPE, shell=True) <NEW_LINE> cmd = "python " + location + "main.py -in \"" + dataset + "\" -l 0 -out " + output + " -tp enrichment" <NEW_LINE> subprocess.Popen(cmd, stdout=subprocess.PIPE, shell=True) | class for starting the algorithm | 6259908155399d3f05627fef |
class FluentAppIndexDashboard(AppIndexDashboard): <NEW_LINE> <INDENT> title = '' <NEW_LINE> def __init__(self, app_title, models, **kwargs): <NEW_LINE> <INDENT> super(FluentAppIndexDashboard, self).__init__(app_title, models, **kwargs) <NEW_LINE> self.children += ( self.get_model_list_module(), self.get_recent_actions_module(), ) <NEW_LINE> <DEDENT> def get_model_list_module(self): <NEW_LINE> <INDENT> return modules.ModelList(self.app_title, self.models) <NEW_LINE> <DEDENT> def get_recent_actions_module(self): <NEW_LINE> <INDENT> return modules.RecentActions( _('Recent Actions'), include_list=self.get_app_content_types(), limit=5, enabled=False, collapsible=False ) | A custom application index page for the Django admin interface.
This dashboard is displayed when one specific application is opened via the breadcrumb.
It displays the models and recent actions of the specific application.
To activate the dashboards add the following to your settings.py::
ADMIN_TOOLS_APP_INDEX_DASHBOARD = 'fluent_dashboard.dashboard.FluentAppIndexDashboard' | 625990814428ac0f6e65a00a |
class command_grep(HoneyPotCommand): <NEW_LINE> <INDENT> def grep_get_contents(self, filename, match): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> contents = self.fs.file_contents(filename) <NEW_LINE> self.grep_application(contents, match) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.errorWrite("grep: {}: No such file or directory\n".format(filename)) <NEW_LINE> <DEDENT> <DEDENT> def grep_application(self, contents, match): <NEW_LINE> <INDENT> match = path.basename(match) <NEW_LINE> match = match.replace("\"","") <NEW_LINE> contentsplit = contents.split('\n') <NEW_LINE> matches = re.compile(".*" + match + ".*") <NEW_LINE> for line in contentsplit: <NEW_LINE> <INDENT> if matches.match(line): <NEW_LINE> <INDENT> self.write(line + '\n') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def help(self): <NEW_LINE> <INDENT> self.errorWrite( 'usage: grep [-abcDEFGHhIiJLlmnOoPqRSsUVvwxZ] [-A num] [-B num] [-C[num]]\n') <NEW_LINE> self.errorWrite( '\t[-e pattern] [-f file] [--binary-files=value] [--color=when]\n') <NEW_LINE> self.errorWrite( '\t[--context[=num]] [--directories=action] [--label] [--line-buffered]\n') <NEW_LINE> self.errorWrite( '\t[--null] [pattern] [file ...]\n') <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> if not self.args: <NEW_LINE> <INDENT> self.help() <NEW_LINE> self.exit() <NEW_LINE> return <NEW_LINE> <DEDENT> self.n = 10 <NEW_LINE> if self.args[0] == '>': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> optlist, args = getopt.getopt(self.args, 'abcDEFGHhIiJLlmnOoPqRSsUVvwxZA:B:C:e:f:') <NEW_LINE> <DEDENT> except getopt.GetoptError as err: <NEW_LINE> <INDENT> self.errorWrite("grep: invalid option -- {}\n".format(err.opt)) <NEW_LINE> self.help() <NEW_LINE> self.exit() <NEW_LINE> return <NEW_LINE> <DEDENT> for opt in optlist: <NEW_LINE> <INDENT> if opt == '-h': <NEW_LINE> <INDENT> self.help() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not self.input_data: <NEW_LINE> <INDENT> files = self.check_arguments("grep", args[1:]) <NEW_LINE> for pname in files: <NEW_LINE> <INDENT> self.grep_get_contents(pname, args[0]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.grep_application(self.input_data, args[0]) <NEW_LINE> <DEDENT> self.exit() <NEW_LINE> <DEDENT> def lineReceived(self, line): <NEW_LINE> <INDENT> log.msg(eventid='qrassh.command.input', realm='grep', input=line, format='INPUT (%(realm)s): %(input)s') <NEW_LINE> <DEDENT> def handle_CTRL_D(self): <NEW_LINE> <INDENT> self.exit() | grep command | 625990813d592f4c4edbc8cd |
class DataContainer(VtkWidget): <NEW_LINE> <INDENT> _model_name = Unicode('DataContainerModel').tag(sync=True) <NEW_LINE> kind = Unicode().tag(sync=True) <NEW_LINE> attributes = Dict().tag(sync=True) <NEW_LINE> data_arrays = VarTuple(Instance(DataArray)).tag(sync=True, **widget_serialization) <NEW_LINE> def __init__(self, kind, data_arrays=(), attributes=Undefined, **kwargs): <NEW_LINE> <INDENT> if attributes is Undefined: <NEW_LINE> <INDENT> attributes = {} <NEW_LINE> <DEDENT> super(DataContainer, self).__init__( kind=kind, data_arrays=data_arrays, attributes=attributes, **kwargs ) | A structure that holds a sequence of DataArrays.
Represents things like Cells, Points, Verts, Lines, Strips, Polys,
CellData, and PointData. | 62599081fff4ab517ebcf2f2 |
class BlockWithTimestampMixin(object): <NEW_LINE> <INDENT> __slots__ = [] <NEW_LINE> @property <NEW_LINE> def timestamp(self): <NEW_LINE> <INDENT> return ( (self.timestamp_high << 32) + self.timestamp_low ) * self.timestamp_resolution <NEW_LINE> <DEDENT> @property <NEW_LINE> def timestamp_resolution(self): <NEW_LINE> <INDENT> return self.interface.timestamp_resolution | Block mixin adding properties to better access timestamps
of blocks that provide one. | 62599081ad47b63b2c5a932d |
class Square: <NEW_LINE> <INDENT> pass | empty Square class | 6259908160cbc95b06365ada |
@py2to3 <NEW_LINE> class Message(ModelObject): <NEW_LINE> <INDENT> __slots__ = ['message', 'level', 'html', 'timestamp', '_sort_key'] <NEW_LINE> def __init__(self, message='', level='INFO', html=False, timestamp=None, parent=None): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> self.level = level <NEW_LINE> self.html = html <NEW_LINE> self.timestamp = timestamp <NEW_LINE> self._sort_key = -1 <NEW_LINE> self.parent = parent <NEW_LINE> <DEDENT> @setter <NEW_LINE> def parent(self, parent): <NEW_LINE> <INDENT> if parent and parent is not getattr(self, 'parent', None): <NEW_LINE> <INDENT> self._sort_key = getattr(parent, '_child_sort_key', -1) <NEW_LINE> <DEDENT> return parent <NEW_LINE> <DEDENT> @property <NEW_LINE> def html_message(self): <NEW_LINE> <INDENT> return self.message if self.html else html_escape(self.message) <NEW_LINE> <DEDENT> def visit(self, visitor): <NEW_LINE> <INDENT> visitor.visit_message(self) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.message | A message outputted during the test execution.
The message can be a log message triggered by a keyword, or a warning
or an error occurred during the test execution. | 62599081adb09d7d5dc0c035 |
class TestGMT(unittest.TestCase): <NEW_LINE> <INDENT> def test_parse_standard(self): <NEW_LINE> <INDENT> x = list(parse_gmt_file(TEST_GMT_PATH)) <NEW_LINE> self.assertEqual(3, len(x)) <NEW_LINE> self.assertEqual("HALLMARK_TNFA_SIGNALING_VIA_NFKB", x[0][0]) <NEW_LINE> self.assertEqual( "http://www.gsea-msigdb.org/gsea/msigdb/cards/HALLMARK_TNFA_SIGNALING_VIA_NFKB", x[0][1] ) <NEW_LINE> self.assertEqual({"3726", "2920"}, x[0][2]) <NEW_LINE> self.assertEqual("HALLMARK_HYPOXIA", x[1][0]) <NEW_LINE> self.assertEqual("http://www.gsea-msigdb.org/gsea/msigdb/cards/HALLMARK_HYPOXIA", x[1][1]) <NEW_LINE> self.assertEqual({"5230", "5163", "2632"}, x[1][2]) <NEW_LINE> self.assertEqual("HALLMARK_CHOLESTEROL_HOMEOSTASIS", x[2][0]) <NEW_LINE> self.assertEqual( "http://www.gsea-msigdb.org/gsea/msigdb/cards/HALLMARK_CHOLESTEROL_HOMEOSTASIS", x[2][1] ) <NEW_LINE> self.assertEqual({"2224", "1595"}, x[2][2]) <NEW_LINE> <DEDENT> def test_parse_wikipathways(self): <NEW_LINE> <INDENT> x = list(parse_wikipathways_gmt(TEST_WP_GMT_PATH)) <NEW_LINE> self.assertEqual(3, len(x)) <NEW_LINE> self.assertEqual("WP4400", x[0][0]) <NEW_LINE> self.assertEqual("20200310", x[0][1]) <NEW_LINE> self.assertEqual("108112", x[0][2]) <NEW_LINE> self.assertEqual("FABP4 in ovarian cancer", x[0][3]) <NEW_LINE> self.assertEqual("Homo sapiens", x[0][4]) <NEW_LINE> self.assertEqual({"574413", "2167"}, x[0][5]) <NEW_LINE> self.assertEqual("WP23", x[1][0]) <NEW_LINE> self.assertEqual("20200310", x[1][1]) <NEW_LINE> self.assertEqual("108321", x[1][2]) <NEW_LINE> self.assertEqual("B Cell Receptor Signaling Pathway", x[1][3]) <NEW_LINE> self.assertEqual("Homo sapiens", x[1][4]) <NEW_LINE> self.assertEqual({"4690", "5781", "11184", "6195"}, x[1][5]) <NEW_LINE> self.assertEqual("WP2333", x[2][0]) <NEW_LINE> self.assertEqual("20200310", x[2][1]) <NEW_LINE> self.assertEqual("72015", x[2][2]) <NEW_LINE> self.assertEqual("Trans-sulfuration pathway", x[2][3]) <NEW_LINE> self.assertEqual("Homo sapiens", x[2][4]) <NEW_LINE> self.assertEqual({"1786", "2730", "27430"}, x[2][5]) | Test parsing GMT files. | 625990814f6381625f19a21d |
class UserProFileAdmin(object): <NEW_LINE> <INDENT> list_display = ['id', "name", "nickName", "mobile", "gender", 'language', 'country', 'province', 'city'] <NEW_LINE> search_fields = ["name", "nickName", "mobile", "gender", 'language', 'country', 'province', 'city'] <NEW_LINE> list_filter = ["name", "nickName", "mobile", "gender", 'language', 'country', 'province', 'city'] | 用户表显示 | 62599081d8ef3951e32c8bce |
class ContentRichTextPlugin(IntegrationFormElementPlugin, DRFSubmitPluginFormDataMixin): <NEW_LINE> <INDENT> uid = UID <NEW_LINE> integrate_with = INTEGRATE_WITH_UID <NEW_LINE> name = _("Content rich text") <NEW_LINE> group = _("Content") <NEW_LINE> def get_custom_field_instances(self, form_element_plugin, request=None, form_entry=None, form_element_entries=None, has_value=None, **kwargs): <NEW_LINE> <INDENT> rendered_text = form_element_plugin.get_rendered_text() <NEW_LINE> raw_data = form_element_plugin.get_raw_data() <NEW_LINE> field_kwargs = { 'initial': rendered_text, 'default': rendered_text, 'required': False, 'label': '', 'read_only': True, 'raw_data': raw_data, } <NEW_LINE> field_metadata = { 'type': 'content', 'contenttype': 'text', 'content': rendered_text, 'raw_data': raw_data } <NEW_LINE> return [ DRFIntegrationFormElementPluginProcessor( field_class=ContentRichTextField, field_kwargs=field_kwargs, field_metadata=field_metadata ) ] | Content rich text (CharField) plugin. | 625990815fcc89381b266ecb |
class Stacked(Network): <NEW_LINE> <INDENT> def __init__(self,nets): <NEW_LINE> <INDENT> self.nets = nets <NEW_LINE> <DEDENT> def forward(self,xs): <NEW_LINE> <INDENT> for i,net in enumerate(self.nets): <NEW_LINE> <INDENT> xs = net.forward(xs) <NEW_LINE> <DEDENT> return xs | Stack two networks on top of each other. | 6259908199fddb7c1ca63b48 |
class EncodeOrReplaceWriter(object): <NEW_LINE> <INDENT> def __init__(self, out): <NEW_LINE> <INDENT> self._encoding = getattr(out, 'encoding', None) or 'ascii' <NEW_LINE> self._write = out.write <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._write(data) <NEW_LINE> <DEDENT> except UnicodeEncodeError: <NEW_LINE> <INDENT> self._write( data.encode( self._encoding, errors='replace', ).decode(self._encoding), ) | Write-only file-ish object which replaces unsupported chars when
underlying file rejects them. | 625990812c8b7c6e89bd52c3 |
class HPUXNetwork(Network): <NEW_LINE> <INDENT> platform = 'HP-UX' <NEW_LINE> def populate(self): <NEW_LINE> <INDENT> netstat_path = self.module.get_bin_path('netstat') <NEW_LINE> if netstat_path is None: <NEW_LINE> <INDENT> return self.facts <NEW_LINE> <DEDENT> self.get_default_interfaces() <NEW_LINE> interfaces = self.get_interfaces_info() <NEW_LINE> self.facts['interfaces'] = interfaces.keys() <NEW_LINE> for iface in interfaces: <NEW_LINE> <INDENT> self.facts[iface] = interfaces[iface] <NEW_LINE> <DEDENT> return self.facts <NEW_LINE> <DEDENT> def get_default_interfaces(self): <NEW_LINE> <INDENT> rc, out, err = self.module.run_command("/usr/bin/netstat -nr") <NEW_LINE> lines = out.split('\n') <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> words = line.split() <NEW_LINE> if len(words) > 1: <NEW_LINE> <INDENT> if words[0] == 'default': <NEW_LINE> <INDENT> self.facts['default_interface'] = words[4] <NEW_LINE> self.facts['default_gateway'] = words[1] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def get_interfaces_info(self): <NEW_LINE> <INDENT> interfaces = {} <NEW_LINE> rc, out, err = self.module.run_command("/usr/bin/netstat -ni") <NEW_LINE> lines = out.split('\n') <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> words = line.split() <NEW_LINE> for i in range(len(words) - 1): <NEW_LINE> <INDENT> if words[i][:3] == 'lan': <NEW_LINE> <INDENT> device = words[i] <NEW_LINE> interfaces[device] = { 'device': device } <NEW_LINE> address = words[i+3] <NEW_LINE> interfaces[device]['ipv4'] = { 'address': address } <NEW_LINE> network = words[i+2] <NEW_LINE> interfaces[device]['ipv4'] = { 'network': network, 'interface': device, 'address': address } <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return interfaces | HP-UX-specifig subclass of Network. Defines networking facts:
- default_interface
- interfaces (a list of interface names)
- interface_<name> dictionary of ipv4 address information. | 62599081442bda511e95dac5 |
class Solution: <NEW_LINE> <INDENT> def fullJustify(self, words, maxWidth): <NEW_LINE> <INDENT> if not words: <NEW_LINE> <INDENT> return [""] <NEW_LINE> <DEDENT> line, length = [], 0 <NEW_LINE> results = [] <NEW_LINE> for w in words: <NEW_LINE> <INDENT> if length + len(w) + len(line) <= maxWidth: <NEW_LINE> <INDENT> length += len(w) <NEW_LINE> line.append(w) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> results.append(self._format_line(line, maxWidth)) <NEW_LINE> length = len(w) <NEW_LINE> line = [w] <NEW_LINE> <DEDENT> <DEDENT> if len(line): <NEW_LINE> <INDENT> results.append(self._format_last_line(line, maxWidth)) <NEW_LINE> <DEDENT> return results <NEW_LINE> <DEDENT> def _format_last_line(self, line, maxWidth): <NEW_LINE> <INDENT> s = " ".join(line) <NEW_LINE> return s + " " * (maxWidth - len(s)) <NEW_LINE> <DEDENT> def _format_line(self, line, maxWidth): <NEW_LINE> <INDENT> if len(line) == 1: <NEW_LINE> <INDENT> return line[0] + " " * (maxWidth - len(line[0])) <NEW_LINE> <DEDENT> length = sum([len(w) for w in line]) <NEW_LINE> space_length = maxWidth - length <NEW_LINE> gaps_count = len(line) - 1 <NEW_LINE> base_length = space_length // gaps_count <NEW_LINE> remaining = space_length % gaps_count <NEW_LINE> s = "" <NEW_LINE> for i in range(len(line) - 1): <NEW_LINE> <INDENT> if i < remaining: <NEW_LINE> <INDENT> s += line[i] + " " * base_length + " " <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s += line[i] + " " * base_length <NEW_LINE> <DEDENT> <DEDENT> s += line[-1] <NEW_LINE> return s | @param words: an array of string
@param maxWidth: a integer
@return: format the text such that each line has exactly maxWidth characters and is fully | 625990813d592f4c4edbc8ce |
class NoHandler(AbstractRequestHandler): <NEW_LINE> <INDENT> def can_handle(self, handler_input): <NEW_LINE> <INDENT> playback_info = util.get_playback_info(handler_input) <NEW_LINE> return (not playback_info.get("in_playback_session") and is_intent_name("AMAZON.NoIntent")( handler_input)) <NEW_LINE> <DEDENT> def handle(self, handler_input): <NEW_LINE> <INDENT> logger.info("In NoHandler") <NEW_LINE> playback_info = util.get_playback_info(handler_input) <NEW_LINE> playback_info["index"] = 0 <NEW_LINE> playback_info["offset_in_ms"] = 0 <NEW_LINE> playback_info["playback_index_changed"] = True <NEW_LINE> playback_info["has_previous_playback_session"] = False <NEW_LINE> return util.Controller.play(handler_input) | Handler for No intent when audio is not playing. | 625990817cff6e4e811b751e |
class StdLibProbe(Probe): <NEW_LINE> <INDENT> def get_distro(self): <NEW_LINE> <INDENT> name = None <NEW_LINE> version = UNKNOWN_DISTRO_VERSION <NEW_LINE> release = UNKNOWN_DISTRO_RELEASE <NEW_LINE> arch = UNKNOWN_DISTRO_ARCH <NEW_LINE> d_name, d_version_release, d_codename = platform.dist() <NEW_LINE> if d_name: <NEW_LINE> <INDENT> name = d_name <NEW_LINE> <DEDENT> if '.' in d_version_release: <NEW_LINE> <INDENT> d_version, d_release = d_version_release.split('.', 1) <NEW_LINE> version = d_version <NEW_LINE> release = d_release <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> version = d_version_release <NEW_LINE> <DEDENT> arch = os.uname()[4] <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> distro = LinuxDistro(name, version, release, arch) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> distro = UNKNOWN_DISTRO <NEW_LINE> <DEDENT> return distro | Probe that uses the Python standard library builtin detection
This Probe has a lower score on purporse, serving as a fallback
if no explicit (and hopefully more accurate) probe exists. | 6259908166673b3332c31edd |
class ArchiveToStageTransformer(Transformer): <NEW_LINE> <INDENT> @log_aware(log) <NEW_LINE> def __init__(self, origin_structure): <NEW_LINE> <INDENT> log_init_attempt(self, log, locals()) <NEW_LINE> self.origin_structure = origin_structure <NEW_LINE> self.destination_structure = None <NEW_LINE> log_init_success(self, log) <NEW_LINE> <DEDENT> @log_aware(log) <NEW_LINE> def transform(self, stage_identifier=None): <NEW_LINE> <INDENT> log.info("Transforming an Archive into a Stage") <NEW_LINE> if self.destination_structure is not None: <NEW_LINE> <INDENT> raise TypeError("a transformation already occured.") <NEW_LINE> <DEDENT> if stage_identifier is None: <NEW_LINE> <INDENT> log.debug("No stage identifier provided, setting to a uuid") <NEW_LINE> stage_identifier = uuid4().hex <NEW_LINE> <DEDENT> self.destination_structure = Stage(stage_identifier) <NEW_LINE> log.debug("Moving materialsuites into the Stage") <NEW_LINE> for n_materialsuite in self.origin_structure.materialsuite_list: <NEW_LINE> <INDENT> self.destination_structure.add_materialsuite( n_materialsuite ) <NEW_LINE> <DEDENT> log.debug("Moving accession records into the Stage") <NEW_LINE> for n_accessionrecord in self.origin_structure.accessionrecord_list: <NEW_LINE> <INDENT> self.destination_structure.add_accessionrecord( n_accessionrecord ) <NEW_LINE> <DEDENT> log.debug("Moving legalnotes into the Stage") <NEW_LINE> for n_legalnote in self.origin_structure.legalnote_list: <NEW_LINE> <INDENT> self.destination_structure.add_legalnote( n_legalnote ) <NEW_LINE> <DEDENT> log.debug("Moving adminnotes into the Stage") <NEW_LINE> for n_adminnote in self.origin_structure.adminnote_list: <NEW_LINE> <INDENT> self.destination_structure.add_adminnote( n_adminnote ) <NEW_LINE> <DEDENT> log.debug("Transformation complete, returning result") <NEW_LINE> return self.destination_structure <NEW_LINE> <DEDENT> @log_aware(log) <NEW_LINE> def get_origin_structure(self): <NEW_LINE> <INDENT> return self._origin_structure <NEW_LINE> <DEDENT> @log_aware(log) <NEW_LINE> def set_origin_structure(self, value): <NEW_LINE> <INDENT> if isinstance(value, Archive): <NEW_LINE> <INDENT> self._origin_structure = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("ArchiveToStageTransformerr must have an " + "instace of an Archive in origin_structure") <NEW_LINE> <DEDENT> <DEDENT> @log_aware(log) <NEW_LINE> def get_destination_structure(self): <NEW_LINE> <INDENT> return self._destination_structure <NEW_LINE> <DEDENT> @log_aware(log) <NEW_LINE> def set_destination_structure(self, value): <NEW_LINE> <INDENT> self._destination_structure = value <NEW_LINE> <DEDENT> @log_aware(log) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "< transform from archive {} to stage {}". format(id(self.origin_structure), id(self.destination_structure)) <NEW_LINE> <DEDENT> destination_structure = property(get_destination_structure, set_destination_structure) <NEW_LINE> origin_structure = property(get_origin_structure, set_origin_structure) | The StageToARrchiveTransformer takes an instance of a Stage structure
and copies its contents into an instance of an Archive structure | 6259908197e22403b383c9dd |
class Calle(models.Model): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Calle, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> ciudad = models.ForeignKey(Ciudad) <NEW_LINE> calle = models.CharField(max_length=100) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.calle <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Calle" <NEW_LINE> verbose_name_plural = "Calles" <NEW_LINE> ordering = ['ciudad', 'calle'] <NEW_LINE> unique_together = (("calle", "ciudad"),) | docstring for Calle | 625990815fdd1c0f98e5fa5e |
class WM_OT_blenderplayer_start(Operator): <NEW_LINE> <INDENT> bl_idname = "wm.blenderplayer_start" <NEW_LINE> bl_label = "Start Game In Player" <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> import os <NEW_LINE> import sys <NEW_LINE> import subprocess <NEW_LINE> gs = context.scene.game_settings <NEW_LINE> blender_bin_path = bpy.app.binary_path <NEW_LINE> blender_bin_dir = os.path.dirname(blender_bin_path) <NEW_LINE> ext = os.path.splitext(blender_bin_path)[-1] <NEW_LINE> player_path = os.path.join(blender_bin_dir, "blenderplayer" + ext) <NEW_LINE> if sys.platform == "darwin": <NEW_LINE> <INDENT> player_path = os.path.join(blender_bin_dir, "../../../blenderplayer.app/Contents/MacOS/blenderplayer") <NEW_LINE> <DEDENT> if not os.path.exists(player_path): <NEW_LINE> <INDENT> self.report({'ERROR'}, "Player path: %r not found" % player_path) <NEW_LINE> return {'CANCELLED'} <NEW_LINE> <DEDENT> filepath = bpy.data.filepath + '~' if bpy.data.is_saved else os.path.join(bpy.app.tempdir, "game.blend") <NEW_LINE> bpy.ops.wm.save_as_mainfile('EXEC_DEFAULT', filepath=filepath, copy=True) <NEW_LINE> args = [player_path] <NEW_LINE> args.extend([ "-g", "show_framerate", "=", "%d" % gs.show_framerate_profile, "-g", "show_profile", "=", "%d" % gs.show_framerate_profile, "-g", "show_properties", "=", "%d" % gs.show_debug_properties, "-g", "ignore_deprecation_warnings", "=", "%d" % (not gs.use_deprecation_warnings), ]) <NEW_LINE> args.append(filepath) <NEW_LINE> subprocess.call(args) <NEW_LINE> os.remove(filepath) <NEW_LINE> return {'FINISHED'} | Launch the blender-player with the current blend-file | 62599081f9cc0f698b1c603b |
class ReplaceVocabUnslicer(LeafUnslicer): <NEW_LINE> <INDENT> opentype = ('set-vocab',) <NEW_LINE> unslicerRegistry = BananaUnslicerRegistry <NEW_LINE> maxKeys = None <NEW_LINE> valueConstraint = ByteStringConstraint(100) <NEW_LINE> def setConstraint(self, constraint): <NEW_LINE> <INDENT> if isinstance(constraint, Any): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> assert isinstance(constraint, ByteStringConstraint) <NEW_LINE> self.valueConstraint = constraint <NEW_LINE> <DEDENT> def start(self, count): <NEW_LINE> <INDENT> self.d = {} <NEW_LINE> self.key = None <NEW_LINE> <DEDENT> def checkToken(self, typebyte, size): <NEW_LINE> <INDENT> if self.maxKeys is not None and len(self.d) >= self.maxKeys: <NEW_LINE> <INDENT> raise Violation("the table is full") <NEW_LINE> <DEDENT> if self.key is None: <NEW_LINE> <INDENT> if typebyte != INT: <NEW_LINE> <INDENT> raise BananaError("VocabUnslicer only accepts INT keys") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if typebyte != STRING: <NEW_LINE> <INDENT> raise BananaError("VocabUnslicer only accepts STRING values") <NEW_LINE> <DEDENT> if self.valueConstraint: <NEW_LINE> <INDENT> self.valueConstraint.checkToken(typebyte, size) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def receiveChild(self, token, ready_deferred=None): <NEW_LINE> <INDENT> assert not isinstance(token, Deferred) <NEW_LINE> assert ready_deferred is None <NEW_LINE> if self.key is None: <NEW_LINE> <INDENT> if token in self.d: <NEW_LINE> <INDENT> raise BananaError("duplicate key '%s'" % token) <NEW_LINE> <DEDENT> self.key = token <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.d[self.key] = token <NEW_LINE> self.key = None <NEW_LINE> <DEDENT> <DEDENT> def receiveClose(self): <NEW_LINE> <INDENT> if self.key is not None: <NEW_LINE> <INDENT> raise BananaError("sequence ended early: got key but not value") <NEW_LINE> <DEDENT> self.protocol.replaceIncomingVocabulary(self.d) <NEW_LINE> return ReplaceVocabularyTable, None <NEW_LINE> <DEDENT> def describe(self): <NEW_LINE> <INDENT> if self.key is not None: <NEW_LINE> <INDENT> return "<vocabdict>[%s]" % self.key <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "<vocabdict>" | Much like DictUnslicer, but keys must be numbers, and values must be
strings. This is used to set the entire vocab table at once. To add
individual tokens, use AddVocabUnslicer by sending an (add-vocab num
string) sequence. | 62599081099cdd3c63676169 |
class MethodValidateMiddleware: <NEW_LINE> <INDENT> def __init__(self, get_response): <NEW_LINE> <INDENT> self.get_response = get_response <NEW_LINE> <DEDENT> def __call__(self, request: HttpRequest): <NEW_LINE> <INDENT> response = self.get_response(request) <NEW_LINE> return response <NEW_LINE> <DEDENT> def process_view(self, request: HttpRequest, view_func, *view_args, **view_kwargs): <NEW_LINE> <INDENT> method = view_args[1].get('method', 'POST') <NEW_LINE> if isinstance(method, str): <NEW_LINE> <INDENT> if method == 'ALL': <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> elif method == request.method: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return JsonResponse(create_error_json_obj(400, '方法错误'), status=400) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(method, list): <NEW_LINE> <INDENT> if 'ALL' in method or request.method in method: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return JsonResponse(create_error_json_obj(400, '方法错误'), status=400) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('参数类型错误') | 处理请求 Method 的中间件 | 625990813617ad0b5ee07c2e |
class Grammar(object): <NEW_LINE> <INDENT> instance = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.variables = dict() <NEW_LINE> self.declarations = dict() <NEW_LINE> self.constants = dict() <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> for key in JavaToPyConfig.declarations: <NEW_LINE> <INDENT> self.variables[key] = JavaToPyConfig.declarations[key] <NEW_LINE> <DEDENT> for key in self.variables: <NEW_LINE> <INDENT> self.declarations[key] = Declaration(key) <NEW_LINE> <DEDENT> for key in self.variables: <NEW_LINE> <INDENT> Trace.debug('Interpreting ' + self.variables[key]) <NEW_LINE> pos = TextPosition(self.variables[key]) <NEW_LINE> self.declarations[key].parse(pos) <NEW_LINE> <DEDENT> <DEDENT> def parse(self, tok): <NEW_LINE> <INDENT> tok.next() <NEW_LINE> filedecl = self.declarations['$file'] <NEW_LINE> result = filedecl.match(tok) <NEW_LINE> if not result: <NEW_LINE> <INDENT> Trace.error('Actual file does not match $file.') <NEW_LINE> return | Read a complete grammar into memory. | 6259908144b2445a339b76cc |
class ImportJobs: <NEW_LINE> <INDENT> def __init__(self, lime_client): <NEW_LINE> <INDENT> self.lime_client = lime_client <NEW_LINE> <DEDENT> def create(self, import_config): <NEW_LINE> <INDENT> url = '/importjobs/' <NEW_LINE> job = ImportJob.create(import_config, self.lime_client) <NEW_LINE> r = self.lime_client.post(url, data=json.dumps(job.hal)) <NEW_LINE> if r.status_code != http.client.CREATED: <NEW_LINE> <INDENT> raise LimeClientError('Failed to create import job', r.status_code, r.text) <NEW_LINE> <DEDENT> return ImportJob(json.loads(r.text), self.lime_client) <NEW_LINE> <DEDENT> def get(self, url): <NEW_LINE> <INDENT> r = self.lime_client.get(url) <NEW_LINE> if r.status_code != http.client.OK: <NEW_LINE> <INDENT> raise LimeClientError('Failed to fetch import job', r.status_code, r.text) <NEW_LINE> <DEDENT> return ImportJob(json.loads(r.text), self.lime_client) | Handles the creation of a new import job.
:param lime_client: a logged in :class:`LimeClient` instance | 62599081283ffb24f3cf537f |
class MinimumSectionTimeValidator(Validator): <NEW_LINE> <INDENT> def rule_name(self): <NEW_LINE> <INDENT> return "Minimum section time" <NEW_LINE> <DEDENT> def validate(self, problem, solution): <NEW_LINE> <INDENT> errors = [] <NEW_LINE> return errors | Planning rule #103
Minimum section time
For each *train_run_section* the following holds:
t_exit - t_entry >= minimum_running_time + min_stopping_time,
where t_entry, t_exit are the entry and exit times into this *train_run_section*,
*minimum_running_time* is given by the *route_section* corresponding to this *train_run_section*
and *min_stopping_time* is given by the *section_requirement*
corresponding to this *train_run_section* or equal to 0 (zero) if no *section_requirement* with a *min_stopping_time* is associated to this *train_run_section*. | 62599081d8ef3951e32c8bcf |
class RunSubCommand(SubCommand): <NEW_LINE> <INDENT> NAME = None <NEW_LINE> DESCRIPTION = None <NEW_LINE> def add_specific_arguments(self, parser): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def add_arguments(self, parser): <NEW_LINE> <INDENT> parser.add_argument( "topology_jar", metavar="TOPOLOGY_JAR", help="Path to a Pyleus topology jar.") <NEW_LINE> self.add_specific_arguments(parser) <NEW_LINE> <DEDENT> def run_topology(jar_path, configs): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def run(self, configs): <NEW_LINE> <INDENT> jar_path = configs.topology_jar <NEW_LINE> if not is_jar(jar_path): <NEW_LINE> <INDENT> self.error("Invalid jar: {0}".format(jar_path)) <NEW_LINE> <DEDENT> self.run_topology(jar_path, configs) | Run subcommand class. | 625990812c8b7c6e89bd52c5 |
class TestRecomVirtualCategory(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testRecomVirtualCategory(self): <NEW_LINE> <INDENT> pass | RecomVirtualCategory unit test stubs | 62599081091ae3566870671f |
class LoginPage(Page): <NEW_LINE> <INDENT> url = '/index' <NEW_LINE> login_username_loc = (By.NAME, "username") <NEW_LINE> login_password_loc = (By.ID, "pass") <NEW_LINE> login_button_loc = (By.CLASS_NAME, "but") <NEW_LINE> def login_username(self, username): <NEW_LINE> <INDENT> self.find_element(*self.login_username_loc).send_keys(username) <NEW_LINE> <DEDENT> def login_password(self, password): <NEW_LINE> <INDENT> self.find_element(*self.login_password_loc).send_keys(password) <NEW_LINE> <DEDENT> def login_button(self): <NEW_LINE> <INDENT> self.find_element(*self.login_button_loc).click() <NEW_LINE> <DEDENT> def user_login(self, username='13916725407', password='111111'): <NEW_LINE> <INDENT> self.open(self.url) <NEW_LINE> self.login_username(username) <NEW_LINE> self.login_password(password) <NEW_LINE> self.login_button() <NEW_LINE> time.sleep(5) <NEW_LINE> <DEDENT> login_error_hint_loc = (By.XPATH, "//div[@class='error-text']/span") <NEW_LINE> login_success_loc = (By.CLASS_NAME, "userName") <NEW_LINE> def login_error_hint(self): <NEW_LINE> <INDENT> return self.find_element(*self.login_error_hint_loc).text <NEW_LINE> <DEDENT> def login_success(self): <NEW_LINE> <INDENT> return self.find_element(*self.login_success_loc).text | 用户登录页面 | 625990814f88993c371f1292 |
class InputSchema(object): <NEW_LINE> <INDENT> def __init__(self, schema): <NEW_LINE> <INDENT> self.__context = msgs.INPUT_EV_CONTEXT_BROWSER <NEW_LINE> self.__event = None <NEW_LINE> self.__mapping = {} <NEW_LINE> try: <NEW_LINE> <INDENT> logging.debug("parsing schema") <NEW_LINE> self.__parse_schema(schema) <NEW_LINE> logging.debug("parsing schema done") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> logging.error("syntax error in schema:\n%s\n%s", schema, logging.stacktrace()) <NEW_LINE> <DEDENT> <DEDENT> def __parse_schema(self, schema): <NEW_LINE> <INDENT> context = None <NEW_LINE> for line in schema.splitlines(): <NEW_LINE> <INDENT> if (line.startswith("#") or not line.strip()): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if (line[0] == "["): <NEW_LINE> <INDENT> name = line.strip()[1:-1] <NEW_LINE> context = getattr(msgs, "INPUT_EV_CONTEXT_" + name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parts = line.strip().split() <NEW_LINE> name = parts[0] <NEW_LINE> key = getattr(msgs, "HWKEY_EV_" + name) <NEW_LINE> name = parts[1] <NEW_LINE> if (name == "-"): <NEW_LINE> <INDENT> event = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> event = getattr(msgs, "INPUT_EV_" + name) <NEW_LINE> <DEDENT> name = parts[2] <NEW_LINE> if (name == "-"): <NEW_LINE> <INDENT> new_context = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_context = getattr(msgs, "INPUT_EV_CONTEXT_" + name) <NEW_LINE> <DEDENT> self.__mapping[(key, context)] = (event, new_context) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def send_key(self, key): <NEW_LINE> <INDENT> if ((key, self.__context) in self.__mapping): <NEW_LINE> <INDENT> self.__event, new_context = self.__mapping[(key, self.__context)] <NEW_LINE> <DEDENT> elif ((key, None) in self.__mapping): <NEW_LINE> <INDENT> self.__event, new_context = self.__mapping[(key, None)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__event = None <NEW_LINE> new_context = None <NEW_LINE> <DEDENT> if (new_context): <NEW_LINE> <INDENT> self.set_context(new_context) <NEW_LINE> <DEDENT> <DEDENT> def get_context(self): <NEW_LINE> <INDENT> return self.__context <NEW_LINE> <DEDENT> def set_context(self, context): <NEW_LINE> <INDENT> self.__context = context <NEW_LINE> if (logging.is_level(logging.DEBUG)): <NEW_LINE> <INDENT> logging.debug("input context changed: %s", msgs._id_to_name(context)) <NEW_LINE> <DEDENT> <DEDENT> def get_event(self): <NEW_LINE> <INDENT> return self.__event | Class for parsing and representing an input schema. The input schema is
a state machine. | 62599081ad47b63b2c5a9331 |
@content( 'workinggroup', icon='glyphicon glyphicon-align-left', ) <NEW_LINE> @implementer(IWorkingGroup) <NEW_LINE> class WorkingGroup(VisualisableElement, Entity): <NEW_LINE> <INDENT> name = renamer() <NEW_LINE> template = 'pontus:templates/visualisable_templates/object.pt' <NEW_LINE> proposal = SharedUniqueProperty('proposal', 'working_group') <NEW_LINE> members = SharedMultipleProperty('members', 'working_groups') <NEW_LINE> wating_list = SharedMultipleProperty('wating_list') <NEW_LINE> ballots = CompositeMultipleProperty('ballots') | Working group class | 62599081656771135c48ada0 |
class FastqSampleExtractAgent(Agent): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> super(FastqSampleExtractAgent, self).__init__(parent) <NEW_LINE> options = [ {"name": "in_fastq", "type": "infile", "format": "sequence.fastq"}, {"name": "file_sample_list", "type": "outfile", "format": "sequence.info_txt"}, {"name": "out_fa", "type": "outfile", "format": "sequence.fasta_dir"}, {"name": "length_dir", "type": "outfile", "format": "sequence.length_dir"} ] <NEW_LINE> self.add_option(options) <NEW_LINE> self.step.add_steps("sample_extract") <NEW_LINE> self.on('start', self.start_sample_extract) <NEW_LINE> self.on("end", self.end_sample_extract) <NEW_LINE> <DEDENT> def start_sample_extract(self): <NEW_LINE> <INDENT> self.step.sample_extract.start() <NEW_LINE> self.step.update() <NEW_LINE> <DEDENT> def end_sample_extract(self): <NEW_LINE> <INDENT> self.step.sample_extract.finish() <NEW_LINE> self.step.update() <NEW_LINE> <DEDENT> def check_options(self): <NEW_LINE> <INDENT> if not self.option("in_fastq").is_set: <NEW_LINE> <INDENT> raise OptionError("参数in_fastq不能为空") <NEW_LINE> <DEDENT> <DEDENT> def set_resource(self): <NEW_LINE> <INDENT> self._cpu = 4 <NEW_LINE> self._memory = "4G" | 从fastq或者fastq文件夹里提取样本的信息 | 625990814c3428357761bd9c |
class InputFile(base.TelegramObject): <NEW_LINE> <INDENT> def __init__(self, path_or_bytesio: Union[str, io.IOBase, Path, '_WebPipe'], filename=None, conf=None): <NEW_LINE> <INDENT> super(InputFile, self).__init__(conf=conf) <NEW_LINE> if isinstance(path_or_bytesio, str): <NEW_LINE> <INDENT> self._file = open(path_or_bytesio, 'rb') <NEW_LINE> self._path = path_or_bytesio <NEW_LINE> if filename is None: <NEW_LINE> <INDENT> filename = os.path.split(path_or_bytesio)[-1] <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(path_or_bytesio, (io.IOBase, _WebPipe)): <NEW_LINE> <INDENT> self._path = None <NEW_LINE> self._file = path_or_bytesio <NEW_LINE> <DEDENT> elif isinstance(path_or_bytesio, Path): <NEW_LINE> <INDENT> self._file = path_or_bytesio.open("rb") <NEW_LINE> self._path = path_or_bytesio.resolve() <NEW_LINE> if filename is None: <NEW_LINE> <INDENT> filename = path_or_bytesio.name <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('Not supported file type.') <NEW_LINE> <DEDENT> self._filename = filename <NEW_LINE> self.attachment_key = secrets.token_urlsafe(16) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if not hasattr(self, '_file'): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if inspect.iscoroutinefunction(self._file.close): <NEW_LINE> <INDENT> return asyncio.ensure_future(self._file.close()) <NEW_LINE> <DEDENT> self._file.close() <NEW_LINE> <DEDENT> @property <NEW_LINE> def filename(self): <NEW_LINE> <INDENT> if self._filename is None: <NEW_LINE> <INDENT> self._filename = api.guess_filename(self._file) <NEW_LINE> <DEDENT> return self._filename <NEW_LINE> <DEDENT> @filename.setter <NEW_LINE> def filename(self, value): <NEW_LINE> <INDENT> self._filename = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def attach(self): <NEW_LINE> <INDENT> return f"attach://{self.attachment_key}" <NEW_LINE> <DEDENT> def get_filename(self) -> str: <NEW_LINE> <INDENT> return self.filename <NEW_LINE> <DEDENT> @property <NEW_LINE> def file(self): <NEW_LINE> <INDENT> return self._file <NEW_LINE> <DEDENT> def get_file(self): <NEW_LINE> <INDENT> return self.file <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_url(cls, url, filename=None, chunk_size=CHUNK_SIZE): <NEW_LINE> <INDENT> pipe = _WebPipe(url, chunk_size=chunk_size) <NEW_LINE> if filename is None: <NEW_LINE> <INDENT> filename = pipe.name <NEW_LINE> <DEDENT> return cls(pipe, filename) <NEW_LINE> <DEDENT> def save(self, filename, chunk_size=CHUNK_SIZE): <NEW_LINE> <INDENT> with open(filename, 'wb') as fp: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> data = self.file.read(chunk_size) <NEW_LINE> if not data: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> fp.write(data) <NEW_LINE> <DEDENT> fp.flush() <NEW_LINE> <DEDENT> if self.file.seekable(): <NEW_LINE> <INDENT> self.file.seek(0) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"<InputFile 'attach://{self.attachment_key}' with file='{self.file}'>" <NEW_LINE> <DEDENT> __repr__ = __str__ <NEW_LINE> def to_python(self): <NEW_LINE> <INDENT> raise TypeError('Object of this type is not exportable!') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def to_object(cls, data): <NEW_LINE> <INDENT> raise TypeError('Object of this type is not importable!') | This object represents the contents of a file to be uploaded.
Must be posted using multipart/form-data in the usual way that files are uploaded via the browser.
Also that is not typical TelegramObject!
https://core.telegram.org/bots/api#inputfile | 6259908144b2445a339b76cd |
class Alias(Base, Email): <NEW_LINE> <INDENT> __tablename__ = "alias" <NEW_LINE> domain = db.relationship(Domain, backref=db.backref('aliases', cascade='all, delete-orphan')) <NEW_LINE> wildcard = db.Column(db.Boolean(), nullable=False, default=False) <NEW_LINE> destination = db.Column(CommaSeparatedList, nullable=False, default=[]) | An alias is an email address that redirects to some destination.
| 62599081d8ef3951e32c8bd0 |
class CallValidation(GlobalValidationToken, Generic[T]): <NEW_LINE> <INDENT> def __init__(self, func: Callable[..., T], *args, **kwargs): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> self.args = args <NEW_LINE> self.kwargs = kwargs <NEW_LINE> <DEDENT> def inner(self, v): <NEW_LINE> <INDENT> return self.func(v, *self.args, **self.kwargs) <NEW_LINE> <DEDENT> def __call__(self, *_): <NEW_LINE> <INDENT> return self.inner | An validation token to call arbitrary functions | 625990811f5feb6acb1646db |
class BaseGeometry: <NEW_LINE> <INDENT> def area(self): <NEW_LINE> <INDENT> raise Exception('area() is not implemented') <NEW_LINE> <DEDENT> def integer_validator(self, name, value): <NEW_LINE> <INDENT> if type(value) != int: <NEW_LINE> <INDENT> raise TypeError("{} must be an integer".format(name)) <NEW_LINE> <DEDENT> elif value <= 0: <NEW_LINE> <INDENT> raise ValueError("{} must be greater than 0".format(name)) | Defines BaseGeometry. | 62599081ec188e330fdfa38c |
class GeoIndex(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.index = {} <NEW_LINE> self.search_service = lookup("SearchService") <NEW_LINE> self.id_service = lookup("IdLookup") <NEW_LINE> self.kdindex = kdtree.create(dimensions=2) <NEW_LINE> <DEDENT> def add(self, record): <NEW_LINE> <INDENT> self.kdindex.add(CityKD(PROJECT_XY(record.lon, record.lat), record.geonameid)) <NEW_LINE> <DEDENT> def nearest(self, geonameid, k=7): <NEW_LINE> <INDENT> record = self.id_service.get(geonameid) <NEW_LINE> results = [] <NEW_LINE> for node, distance in self.kdindex.search_knn(PROJECT_XY(record.lon, record.lat), k): <NEW_LINE> <INDENT> results.append((self.id_service.get(node.data.geonameid), distance)) <NEW_LINE> <DEDENT> return results | Geo index service using lang, lat | 625990814f88993c371f1293 |
class NestedDict(dict): <NEW_LINE> <INDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.setdefault(key, self.__class__()) | Automated nested dictionary.
>>> nd = NestedDict()
>>> nd['a']['b']['c'] = 1
>>> nd
{'a': {'b': {'c': 1}}} | 62599081656771135c48ada1 |
class PluginNotFound(Exception): <NEW_LINE> <INDENT> pass | Raised when the plugin could not be found in the rendering process. | 62599081f548e778e596d074 |
class TaskList(models.Model): <NEW_LINE> <INDENT> title = models.CharField( max_length=50, unique=True, ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> def is_complete(self): <NEW_LINE> <INDENT> return all(task.is_done for task in self.task_set.all()) | List of tasks. | 6259908150812a4eaa621936 |
class LinearWarmup(LambdaLR): <NEW_LINE> <INDENT> def __init__(self, optimizer, warmup_steps, last_epoch=-1): <NEW_LINE> <INDENT> self.warmup_steps = warmup_steps <NEW_LINE> self.complete = False <NEW_LINE> super(LinearWarmup, self).__init__(optimizer, self.lr_lambda, last_epoch=last_epoch) <NEW_LINE> <DEDENT> def lr_lambda(self, step): <NEW_LINE> <INDENT> if step < self.warmup_steps: <NEW_LINE> <INDENT> return float(step) / float(max(1.0, self.warmup_steps)) <NEW_LINE> <DEDENT> self.complete = True <NEW_LINE> return 1. | Linear warmup and then constant.
Linearly increases learning rate schedule from 0 to 1 over `warmup_steps` training steps.
Keeps learning rate schedule equal to 1. after warmup_steps.
From https://bit.ly/39o2W1f | 625990811f5feb6acb1646dd |
class JavaObject(object): <NEW_LINE> <INDENT> def __init__(self, full_class_name=None, args_list=None,): <NEW_LINE> <INDENT> self.full_class_name = full_class_name <NEW_LINE> self.args_list = args_list <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.full_class_name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.args_list = [] <NEW_LINE> (_etype3, _size0) = iprot.readListBegin() <NEW_LINE> for _i4 in range(_size0): <NEW_LINE> <INDENT> _elem5 = JavaObjectArg() <NEW_LINE> _elem5.read(iprot) <NEW_LINE> self.args_list.append(_elem5) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('JavaObject') <NEW_LINE> if self.full_class_name is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('full_class_name', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.full_class_name.encode('utf-8') if sys.version_info[0] == 2 else self.full_class_name) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.args_list is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('args_list', TType.LIST, 2) <NEW_LINE> oprot.writeListBegin(TType.STRUCT, len(self.args_list)) <NEW_LINE> for iter6 in self.args_list: <NEW_LINE> <INDENT> iter6.write(oprot) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if self.full_class_name is None: <NEW_LINE> <INDENT> raise TProtocolException(message='Required field full_class_name is unset!') <NEW_LINE> <DEDENT> if self.args_list is None: <NEW_LINE> <INDENT> raise TProtocolException(message='Required field args_list is unset!') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- full_class_name
- args_list | 6259908155399d3f05627ff7 |
class TrainModel: <NEW_LINE> <INDENT> def __init__(self, run_id, data_path): <NEW_LINE> <INDENT> self.run_id = run_id <NEW_LINE> self.data_path = data_path <NEW_LINE> self.logger = Logger(self.run_id, 'TrainModel', 'training') <NEW_LINE> self.loadValidate = LoadValidate(self.run_id, self.data_path, 'training') <NEW_LINE> self.preProcess = Preprocessor(self.run_id, self.data_path, 'training') <NEW_LINE> self.modelTuner = ModelTuner(self.run_id, self.data_path, 'training') <NEW_LINE> self.fileOperation = FileOperation(self.run_id, self.data_path, 'training') <NEW_LINE> <DEDENT> def training_model(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.logger.info('Start of Training') <NEW_LINE> self.logger.info('Run_id:' + str(self.run_id)) <NEW_LINE> self.loadValidate.validate_trainset() <NEW_LINE> self.X, self.y = self.preProcess.preprocess_trainset() <NEW_LINE> columns = {"data_columns": [col for col in self.X.columns]} <NEW_LINE> with open('apps/database/columns.json', 'w') as f: <NEW_LINE> <INDENT> f.write(json.dumps(columns)) <NEW_LINE> x_train, x_test, y_train, y_test = train_test_split(self.X, self.y, test_size=0.2, random_state=0) <NEW_LINE> best_model_name, best_model = self.modelTuner.get_best_model(x_train, y_train, x_test, y_test) <NEW_LINE> save_model = self.fileOperation.save_model(best_model, best_model_name) <NEW_LINE> <DEDENT> self.logger.info('End of Training') <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.logger.exception('Unsuccessful End of Training') <NEW_LINE> raise Exception | *****************************************************************************
*
* filename: TrainModel.py
* version: 1.0
* author:
* creation date:
*
*
*
*
* description: Class to training the models
*
**************************************************************************** | 625990817cff6e4e811b7525 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.