code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class BasisSelectionDefinition(IUserDefinition): <NEW_LINE> <INDENT> type = "basis_selection" <NEW_LINE> _keywords = ['molecule','origin','x_axis','y_axis'] | The user definable object used for storing basis selection. | 6259906f097d151d1a2c28f0 |
class ValidateCodeForm(forms.Form): <NEW_LINE> <INDENT> promo_code = forms.CharField(max_length=10, required=True) | Form to validate a coupon code, this form is not related with any object | 6259906f460517430c432c96 |
class KytosLinkCreationError(Exception): <NEW_LINE> <INDENT> pass | Exception thrown when the link has an empty endpoint. | 6259906f4a966d76dd5f0769 |
class Employee: <NEW_LINE> <INDENT> num_of_emps = 0 <NEW_LINE> raise_amt = 1.04 <NEW_LINE> def __init__(self, first, last, pay): <NEW_LINE> <INDENT> self.first = first <NEW_LINE> self.last = last <NEW_LINE> self.pay = pay <NEW_LINE> Employee.num_of_emps += 1 <NEW_LINE> <DEDENT> def fullname(self): <NEW_LINE> <INDENT> return '{} {}'.format (self.first, self.last) <NEW_LINE> <DEDENT> def apply_raise(self): <NEW_LINE> <INDENT> self.pay = int(self.pay * self.raise_amt) | description of class | 6259906f7047854f46340c36 |
class Depthcharge(Boot): <NEW_LINE> <INDENT> def __init__(self, parent, parameters): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self.action = DepthchargeAction() <NEW_LINE> self.action.section = self.action_type <NEW_LINE> self.action.job = self.job <NEW_LINE> parent.add_action(self.action, parameters) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def accepts(cls, device, parameters): <NEW_LINE> <INDENT> if parameters['method'] != 'depthcharge': <NEW_LINE> <INDENT> return False, '"method" was not "depthcharge"' <NEW_LINE> <DEDENT> if 'commands' not in parameters: <NEW_LINE> <INDENT> raise ConfigurationError( "commands not specified in boot parameters") <NEW_LINE> <DEDENT> if 'depthcharge' not in device['actions']['boot']['methods']: <NEW_LINE> <INDENT> return ( False, '"depthcharge" was not in the device configuration boot methods' ) <NEW_LINE> <DEDENT> return True, 'accepted' | Depthcharge is a payload used by Coreboot in recent ChromeOS machines.
This boot strategy works with the "netboot" build variant of Depthcharge,
which just downloads files via TFTP from hardcoded locations, with the IP
address of the server also hardcoded in the firmware image. One of the
downloaded files is a FIT image that contains the kernel, ramdisk and
device tree blob, and the other contains the kernel arguments. | 6259906fa05bb46b3848bd6c |
class MainHandler(BlogHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> self.redirect('/blog') | Redirects to the HomePage of the website | 6259906f2c8b7c6e89bd5066 |
class Translation(models.Model): <NEW_LINE> <INDENT> language = models.ForeignKey('Language') <NEW_LINE> original = models.ForeignKey('Entry') <NEW_LINE> translated = models.ForeignKey('Entry', related_name="translated") <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u"Translation of %s into %s" % (self.original, self.language,) <NEW_LINE> <DEDENT> def get_link(self): <NEW_LINE> <INDENT> url = self.translated.get_absolute_url() <NEW_LINE> return u'<a href="%s">%s</a>' % (url, self.language,) <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return self.translated.get_absolute_url() | Link together two entries, where @translated is a translation of
@original in the language @language. | 6259906fbf627c535bcb2d4b |
class index(object): <NEW_LINE> <INDENT> def GET(self): <NEW_LINE> <INDENT> web.header('Content-Type', 'text/html; charset=utf-8', unique=True) <NEW_LINE> return "<a href='/dnspodlogin'>使用DNSPod登陆</a>" | 显示登陆链接 | 6259906f3d592f4c4edbc761 |
class DEMACrossoverSignal(BaseSignalGenerator): <NEW_LINE> <INDENT> def __init__(self, market, interval, ema_short, ema_long, strategy): <NEW_LINE> <INDENT> super().__init__(market, interval, strategy) <NEW_LINE> self.fma = ExponentialMovingAverage(self.market, interval, ema_short) <NEW_LINE> self.sma = ExponentialMovingAverage(self.market, interval, ema_long) <NEW_LINE> self.threshold = .025 <NEW_LINE> <DEDENT> def check_condition(self, new_candle): <NEW_LINE> <INDENT> self.strategy.print_message("GETTING DEMA CROSSOVER SIGNAL") <NEW_LINE> if (self.sma.value is not None) & (self.fma.value is not None): <NEW_LINE> <INDENT> self.strategy.print_message("SMA: " + str(self.sma.value)) <NEW_LINE> self.strategy.print_message("FMA: " + str(self.fma.value)) <NEW_LINE> if (self.fma.value - self.sma.value) > self.threshold: <NEW_LINE> <INDENT> self.strategy.print_message("Currently in up-trend. Buy signal TRUE") <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> return False | "This signal generator is a copy of the DEMA strategy example used in gekko
This strategy is similar to the sma_crossover_signal except is is simpler in that it does not worry about caching candles
...it simply signals true when FMA > SMA and false whe SMA > FMA at an amount greater than the threshold | 6259906f7d43ff2487428052 |
class BuildMagicException(Exception): <NEW_LINE> <INDENT> msg = 'build-magic error' <NEW_LINE> def __init__(self, exception=None, message=''): <NEW_LINE> <INDENT> if exception: <NEW_LINE> <INDENT> super().__init__(f'{self.msg}: {str(exception)}') <NEW_LINE> <DEDENT> elif message: <NEW_LINE> <INDENT> super().__init__(f'{self.msg}: {message}') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super().__init__(f'{self.msg}') | build-magic base exception class. | 6259906f2ae34c7f260ac969 |
class _HapStatusCodes(object): <NEW_LINE> <INDENT> SUCCESS = 0 <NEW_LINE> INSUFFICIENT_PRIVILEGES = -70401 <NEW_LINE> UNABLE_TO_COMMUNICATE = -70402 <NEW_LINE> RESOURCE_BUSY = -70403 <NEW_LINE> CANT_WRITE_READ_ONLY = -70404 <NEW_LINE> CANT_READ_WRITE_ONLY = -70405 <NEW_LINE> NOTIFICATION_NOT_SUPPORTED = -70406 <NEW_LINE> OUT_OF_RESOURCES = -70407 <NEW_LINE> TIMED_OUT = -70408 <NEW_LINE> RESOURCE_NOT_EXIST = -70409 <NEW_LINE> INVALID_VALUE = -70410 <NEW_LINE> INSUFFICIENT_AUTH = -70411 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._codes = { _HapStatusCodes.SUCCESS: 'This specifies a success for the request.', _HapStatusCodes.INSUFFICIENT_PRIVILEGES: 'Request denied due to insufficient privileges.', _HapStatusCodes.UNABLE_TO_COMMUNICATE: 'Unable to communicate with requested service, e.g. the power to the accessory was turned off.', _HapStatusCodes.RESOURCE_BUSY: 'Resource is busy, try again.', _HapStatusCodes.CANT_WRITE_READ_ONLY: 'Cannot write to read only characteristic.', _HapStatusCodes.CANT_READ_WRITE_ONLY: 'Cannot read from a write only characteristic.', _HapStatusCodes.NOTIFICATION_NOT_SUPPORTED: 'Notification is not supported for characteristic.', _HapStatusCodes.OUT_OF_RESOURCES: 'Out of resources to process request.', _HapStatusCodes.TIMED_OUT: 'Operation timed out.', _HapStatusCodes.RESOURCE_NOT_EXIST: 'Resource does not exist.', _HapStatusCodes.INVALID_VALUE: 'Accessory received an invalid value in a write request.', _HapStatusCodes.INSUFFICIENT_AUTH: 'Insufficient Authorization.' } <NEW_LINE> self._categories_rev = {self._codes[k]: k for k in self._codes.keys()} <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> if item in self._codes: <NEW_LINE> <INDENT> return self._codes[item] <NEW_LINE> <DEDENT> raise KeyError('Item {item} not found'.format(item=item)) | This data is taken from Table 5-12 HAP Satus Codes on page 80. | 6259906ff548e778e596ce0d |
@enum.unique <NEW_LINE> @functools.total_ordering <NEW_LINE> class VerdictClassification(enum.Enum): <NEW_LINE> <INDENT> Benign = "benign" <NEW_LINE> Indeterminate = "indeterminate" <NEW_LINE> Threat = "threat" <NEW_LINE> def __lt__(self, other): <NEW_LINE> <INDENT> members = list(self.__class__) <NEW_LINE> return members.index(self) < members.index(other) | An enumeration of classification markers for malware verdicts.
Note that the order of declaration is important: it provides
the appropriate ordering behavior when finding the minimum
and maximum classifications for a set of verdicts. | 6259906f92d797404e38979b |
class SricErrorBroadcast(Exception): <NEW_LINE> <INDENT> pass | Cannot listen on broadcast address | 6259906f16aa5153ce401d5b |
class CollectUser(pyblish.api.ContextPlugin): <NEW_LINE> <INDENT> order = pyblish.api.CollectorOrder <NEW_LINE> label = '获取当前用户' <NEW_LINE> def process(self, context): <NEW_LINE> <INDENT> assert isinstance(context, pyblish.api.Context) <NEW_LINE> name = cgtwq.ACCOUNT.select( cgtwq.get_account_id()).to_entry()['name'] <NEW_LINE> context.data['artist'] = name <NEW_LINE> context.data['accountID'] = cgtwq.get_account_id() <NEW_LINE> context.create_instance( '制作者: {}'.format(name), family='制作者' ) | 获取当前登录的用户帐号. | 6259906f91f36d47f2231acf |
class ComponentEditMonoTest(ComponentEditTest): <NEW_LINE> <INDENT> def create_component(self): <NEW_LINE> <INDENT> return self.create_ts_mono() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def remove_units(store): <NEW_LINE> <INDENT> store.parse(store.XMLskeleton.replace("\n", "").encode('utf-8')) <NEW_LINE> store.save() <NEW_LINE> <DEDENT> def test_unit_add(self): <NEW_LINE> <INDENT> translation = self.component.translation_set.get(language_code='cs') <NEW_LINE> self.remove_units(translation.store.store) <NEW_LINE> del translation.__dict__['store'] <NEW_LINE> unit = translation.unit_set.all()[0] <NEW_LINE> request = self.get_request('/') <NEW_LINE> self.assertTrue( unit.translate(request, ['Empty'], STATE_TRANSLATED) ) | Test for error handling | 6259906fcc0a2c111447c711 |
class PortugalExt(Portugal): <NEW_LINE> <INDENT> def _populate(self, year): <NEW_LINE> <INDENT> super(PortugalExt, self)._populate(year) <NEW_LINE> e = easter(year) <NEW_LINE> self[e - rd(days=47)] = "Carnaval" <NEW_LINE> self[date(year, 12, 24)] = "Vespera de Natal" <NEW_LINE> self[date(year, 12, 26)] = "26 de Dezembro" <NEW_LINE> self[date(year, 12, 31)] = "Vespera de Ano novo" <NEW_LINE> self[date(year, 6, 13)] = "Dia de Santo António" | Adds extended days that most people have as a bonus from their companies:
- Carnival
- the day before and after xmas
- the day before the new year
- Lisbon's city holyday | 6259906f66673b3332c31c7f |
class Event(object): <NEW_LINE> <INDENT> def execute(self, state, *args): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def end(self, state, *args): <NEW_LINE> <INDENT> raise NotImplementedError | Interface for random events.
Events are functions that modify the state of the board when called, but do not cause state transitions. | 6259906f379a373c97d9a8a1 |
class AllowReadWriteTask(task.Task): <NEW_LINE> <INDENT> def execute(self, client, snap_name, *args, **kwargs): <NEW_LINE> <INDENT> LOG.debug('%s.execute', self.__class__.__name__) <NEW_LINE> client.modify_snapshot(snap_name, allow_rw=True) <NEW_LINE> <DEDENT> def revert(self, result, client, snap_name, *args, **kwargs): <NEW_LINE> <INDENT> method_name = '%s.revert' % self.__class__.__name__ <NEW_LINE> LOG.warning(_LW('%(method_name)s: ' 'setting snapshot %(snap_name)s to read-only.'), {'method_name': method_name, 'snap_name': snap_name}) <NEW_LINE> client.modify_snapshot(snap_name, allow_rw=False) | Task to modify a Snapshot to allow ReadWrite on it. | 6259906f01c39578d7f14375 |
class UnNormalize(object): <NEW_LINE> <INDENT> def __init__(self, mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)): <NEW_LINE> <INDENT> self.mean = mean <NEW_LINE> self.std = std <NEW_LINE> <DEDENT> def __call__(self, tensor): <NEW_LINE> <INDENT> for t, m, s in zip(tensor, self.mean, self.std): <NEW_LINE> <INDENT> t.mul_(s).add_(m) <NEW_LINE> <DEDENT> return tensor | Inverse normalize the image for visualize | 6259906fe76e3b2f99fda283 |
class SeatAccessPermissions(BaseAccessPermissions): <NEW_LINE> <INDENT> def can_retrieve(self, user): <NEW_LINE> <INDENT> return user.has_perm('openslides_votecollector.can_manage_votecollector') <NEW_LINE> <DEDENT> def get_serializer_class(self, user=None): <NEW_LINE> <INDENT> from .serializers import SeatSerializer <NEW_LINE> return SeatSerializer | Access permissions container for Seat and SeatViewSet. | 6259906f63b5f9789fe869e4 |
class TransfiniteError(RuntimeError): <NEW_LINE> <INDENT> pass | Raised when an operation is attempted that may take an infinite number of
steps, such as comparing two infinite sets. | 6259906fadb09d7d5dc0bdec |
class ThiopeptideLayer(RegionLayer): <NEW_LINE> <INDENT> def __init__(self, record: RecordLayer, results: ThioResults, region_feature: Region) -> None: <NEW_LINE> <INDENT> RegionLayer.__init__(self, record, region_feature) <NEW_LINE> self.motifs = [] <NEW_LINE> for motif in results.motifs: <NEW_LINE> <INDENT> if motif.is_contained_by(self.region_feature) and isinstance(motif, Prepeptide): <NEW_LINE> <INDENT> self.motifs.append(motif) | A wrapper of RegionLayer to allow for tracking the ThiopeptideMotifs | 6259906f1f037a2d8b9e54ab |
class Rectangle: <NEW_LINE> <INDENT> def __init__(self, width=0, height=0): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> <DEDENT> @property <NEW_LINE> def width(self): <NEW_LINE> <INDENT> return self.__width <NEW_LINE> <DEDENT> @width.setter <NEW_LINE> def width(self, value): <NEW_LINE> <INDENT> if type(value) != int: <NEW_LINE> <INDENT> raise TypeError("width must be an integer") <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError("width must be >= 0") <NEW_LINE> <DEDENT> self.__width = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> return self.__height <NEW_LINE> <DEDENT> @height.setter <NEW_LINE> def height(self, value): <NEW_LINE> <INDENT> if type(value) != int: <NEW_LINE> <INDENT> raise TypeError("height must be an integer") <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError("height must be >= 0") <NEW_LINE> <DEDENT> self.__height = value <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> return (self.width * self.height) <NEW_LINE> <DEDENT> def perimeter(self): <NEW_LINE> <INDENT> if self.width == 0 or self.height == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return ((self.width + self.height) * 2) | Class for rectangles
| 6259906fec188e330fdfa123 |
@DeviceStateMachine.state_class <NEW_LINE> class android_extracting(statemachine.State): <NEW_LINE> <INDENT> TIMEOUT = 600 <NEW_LINE> PERMANENT_FAILURE_COUNT = 10 <NEW_LINE> def on_android_rebooting(self, args): <NEW_LINE> <INDENT> self.machine.clear_counter(self.state_name) <NEW_LINE> self.machine.goto_state(android_rebooting) <NEW_LINE> <DEDENT> def on_timeout(self): <NEW_LINE> <INDENT> if self.machine.increment_counter(self.state_name) > self.PERMANENT_FAILURE_COUNT: <NEW_LINE> <INDENT> self.machine.goto_state(failed_android_extracting) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.machine.goto_state(pxe_power_cycling) | The second-stage script is extracting the Android artifacts onto the
sdcard. When this is complete, the script will send an event and go into
the 'android_rebooting' state. | 6259906f56b00c62f0fb414f |
class CorrectionFactorSetIterator(APIObject,IDisposable,IEnumerator): <NEW_LINE> <INDENT> def Dispose(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def MoveNext(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def next(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ReleaseManagedResources(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ReleaseUnmanagedResources(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Reset(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __enter__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __exit__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __iter__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Current=property(lambda self: object(),lambda self,v: None,lambda self: None) | An iterator to a correction factor set.
CorrectionFactorSetIterator() | 6259906f26068e7796d4e1bd |
class Atomindices(TopologyAttr): <NEW_LINE> <INDENT> attrname = 'indices' <NEW_LINE> singular = 'index' <NEW_LINE> target_classes = [Atom] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._guessed = False <NEW_LINE> <DEDENT> def set_atoms(self, ag, values): <NEW_LINE> <INDENT> raise AttributeError("Atom indices are fixed; they cannot be reset") <NEW_LINE> <DEDENT> def get_atoms(self, ag): <NEW_LINE> <INDENT> return ag._ix <NEW_LINE> <DEDENT> def get_residues(self, rg): <NEW_LINE> <INDENT> return list(self.top.tt.residues2atoms_2d(rg._ix)) <NEW_LINE> <DEDENT> def get_segments(self, sg): <NEW_LINE> <INDENT> return list(self.top.tt.segments2atoms_2d(sg._ix)) | Globally unique indices for each atom in the group.
If the group is an AtomGroup, then this gives the index for each atom in
the group. This is the unambiguous identifier for each atom in the
topology, and it is not alterable.
If the group is a ResidueGroup or SegmentGroup, then this gives the indices
of each atom represented in the group in a 1-D array, in the order of the
elements in that group. | 6259906fb7558d5895464b73 |
class SkipTest(Exception): <NEW_LINE> <INDENT> def __init__(self, reason="Test skipped"): <NEW_LINE> <INDENT> super(SkipTest, self).__init__(reason) <NEW_LINE> self.reason = reason | This exception should be raised in order to interrupt the execution of the currently running test, marking
it as skipped | 6259906fa8370b77170f1c4a |
class Province(models.Model): <NEW_LINE> <INDENT> code = models.IntegerField(verbose_name="省代码", unique=True) <NEW_LINE> name = models.CharField(max_length=64, verbose_name="省名称", unique=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{} - {}".format(self.code, self.name) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = "24. 省" | 省份表 | 6259906f3d592f4c4edbc763 |
class COMListener: <NEW_LINE> <INDENT> def __init__(self, options): <NEW_LINE> <INDENT> self.options = options <NEW_LINE> self.listener_input = None <NEW_LINE> self.com_input = None <NEW_LINE> <DEDENT> def com_listener_main(self, address, listener_input, com_input): <NEW_LINE> <INDENT> self.listener_input = listener_input <NEW_LINE> self.com_input = com_input <NEW_LINE> self.com_input.put(Message(address, 'MAIN_LEVEL', 'info', { 'message': 'COM_LISTENER started on port ' + address })) <NEW_LINE> try: <NEW_LINE> <INDENT> with Serial(address, self.options["BAUD"], timeout=self.options["COM_PORT_TIMEOUT"]) as port: <NEW_LINE> <INDENT> port.write(bytes("99 0", "utf-8")) <NEW_LINE> response = port.readline().strip().decode() <NEW_LINE> if not response == '': <NEW_LINE> <INDENT> while port.inWaiting() > 0: <NEW_LINE> <INDENT> response = response + port.read(port.inWaiting()).strip().decode() <NEW_LINE> <DEDENT> self.com_input.put(Message(address, 'COM_LEVEL', 'command', { 'directive': 'add', 'message': 'Added a robot on port ' + address })) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.com_input.put(Message(address, 'COM_LEVEL', 'command', { 'directive': 'failure', 'message': 'Could not add robot on port ' + address })) <NEW_LINE> <DEDENT> port.close() <NEW_LINE> <DEDENT> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> self.com_input.put(Message(address, 'COM_LEVEL', 'command', { 'directive': 'failure', 'message': 'Failed with the following error: ' + str(err) })) <NEW_LINE> if self.options["RAISE_ERRORS_AFTER_CATCH"]: <NEW_LINE> <INDENT> raise | Checks a COM port to see if there is a robot available. If a robot is available, it
sends an "add" message to the communication level.
Args:
options (dict): The dictionary containing the program settings.
Attributes:
options (dict): The dictionary containing the program settings.
listener_input (Queue): The queue for receiving messages in the listener level.
com_input (Queue): The queue for sending messages to the communication level. | 6259906f2ae34c7f260ac96c |
class LoginView(FormView): <NEW_LINE> <INDENT> success_url = '/' <NEW_LINE> form_class = LoginForm <NEW_LINE> template_name = 'users/login.html' <NEW_LINE> redirect_field_name = REDIRECT_FIELD_NAME <NEW_LINE> @method_decorator(sensitive_post_parameters('password')) <NEW_LINE> @method_decorator(csrf_protect) <NEW_LINE> @method_decorator(never_cache) <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if User.objects.count() == 0: <NEW_LINE> <INDENT> return redirect(reverse_lazy('setup-manager')) <NEW_LINE> <DEDENT> request.session.set_test_cookie() <NEW_LINE> return super(LoginView, self).dispatch(request, *args, **kwargs) <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> username = self.request.POST["username"] <NEW_LINE> password = self.request.POST["password"] <NEW_LINE> user = authenticate(request=self.request, username=username, password=password) <NEW_LINE> if user is None or not user.is_authenticated: <NEW_LINE> <INDENT> return redirect('/users/login/?next=/') <NEW_LINE> <DEDENT> login(self.request, user) <NEW_LINE> if self.request.session.test_cookie_worked(): <NEW_LINE> <INDENT> self.request.session.delete_test_cookie() <NEW_LINE> <DEDENT> return super(LoginView, self).form_valid(form) <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> redirect_to = self.request.GET[self.redirect_field_name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> redirect_to = self.success_url <NEW_LINE> <DEDENT> if not is_safe_url(url=redirect_to, host=self.request.get_host()): <NEW_LINE> <INDENT> redirect_to = self.success_url <NEW_LINE> <DEDENT> return redirect_to | Provides the ability to login as a user with a username and password | 6259906ff548e778e596ce0f |
class BoundedFloatTextAndSlider(widgets.widget_float._Float, widgets.Box): <NEW_LINE> <INDENT> def __init__(self, description="", **args): <NEW_LINE> <INDENT> text = widgets.BoundedFloatText(description=description,**args) <NEW_LINE> slider = widgets.FloatSlider(**args) <NEW_LINE> traitlets.link( (text, 'value'), (slider, 'value') ) <NEW_LINE> widgets.Box.__init__(self, [text, slider], description=description) <NEW_LINE> self.layout.display = 'flex' <NEW_LINE> self.layout.align_items = 'stretch' <NEW_LINE> traitlets.link( (text, 'value'), (slider, 'value') ) <NEW_LINE> traitlets.link( (text, 'value'), (self, 'value') ) | An input widget to pick a float, either by moving a slider or entering an explicit value | 6259906f91f36d47f2231ad0 |
class Feature(ClientObject): <NEW_LINE> <INDENT> def explain(self): <NEW_LINE> <INDENT> result = self.obj.explain() <NEW_LINE> if result.status != 0: <NEW_LINE> <INDENT> raise ClientError(result.status, result.text) <NEW_LINE> <DEDENT> return result.outArgs['explanation'] <NEW_LINE> <DEDENT> def clearParams(self): <NEW_LINE> <INDENT> result = self.obj.clearParams() <NEW_LINE> if result.status != 0: <NEW_LINE> <INDENT> raise ClientError(result.status, result.text) <NEW_LINE> <DEDENT> return result.outArgs['ret'] <NEW_LINE> <DEDENT> def modifyParams(self, command, params, options={}): <NEW_LINE> <INDENT> result = self.obj.modifyParams(command, params, options) <NEW_LINE> if result.status != 0: <NEW_LINE> <INDENT> raise ClientError(result.status, result.text) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def setName(self, name): <NEW_LINE> <INDENT> result = self.obj.setName(name) <NEW_LINE> if result.status != 0: <NEW_LINE> <INDENT> raise ClientError(result.status, result.text) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def modifyIncludedFeatures(self, command, features, options={}): <NEW_LINE> <INDENT> result = self.obj.modifyIncludedFeatures(command, features, options) <NEW_LINE> if result.status != 0: <NEW_LINE> <INDENT> raise ClientError(result.status, result.text) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def modifyDepends(self, command, depends, options={}): <NEW_LINE> <INDENT> result = self.obj.modifyDepends(command, depends, options) <NEW_LINE> if result.status != 0: <NEW_LINE> <INDENT> raise ClientError(result.status, result.text) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def modifyConflicts(self, command, conflicts, options={}): <NEW_LINE> <INDENT> result = self.obj.modifyConflicts(command, conflicts, options) <NEW_LINE> if result.status != 0: <NEW_LINE> <INDENT> raise ClientError(result.status, result.text) <NEW_LINE> <DEDENT> return | com.redhat.grid.config:Feature | 6259906f442bda511e95d999 |
class SpeechContext(object): <NEW_LINE> <INDENT> swagger_types = { 'phrases': 'list[str]' } <NEW_LINE> attribute_map = { 'phrases': 'phrases' } <NEW_LINE> def __init__(self, phrases=None): <NEW_LINE> <INDENT> self._phrases = None <NEW_LINE> self.discriminator = None <NEW_LINE> if phrases is not None: <NEW_LINE> <INDENT> self.phrases = phrases <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def phrases(self): <NEW_LINE> <INDENT> return self._phrases <NEW_LINE> <DEDENT> @phrases.setter <NEW_LINE> def phrases(self, phrases): <NEW_LINE> <INDENT> self._phrases = phrases <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(SpeechContext, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, SpeechContext): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259906faad79263cf430038 |
class ParsingContext(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.type_stack = [] <NEW_LINE> self.id_stack = [] <NEW_LINE> self.args = {'context': self} <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> zipped = zip(self.type_stack, self.id_stack) <NEW_LINE> strs = ["%s=%s" % (t, i) for (t, i) in zipped] <NEW_LINE> return "ParsingContext(stack=%r)" % strs <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> return not self.type_stack and not self.id_stack <NEW_LINE> <DEDENT> def push(self, obj_type, json, id_field): <NEW_LINE> <INDENT> if id_field not in json.get_field_names(): <NEW_LINE> <INDENT> raise SwaggerError("Missing id_field: %s" % id_field, self) <NEW_LINE> <DEDENT> self.push_str(obj_type, json, str(json[id_field])) <NEW_LINE> <DEDENT> def push_str(self, obj_type, json, id_string): <NEW_LINE> <INDENT> self.type_stack.append(obj_type) <NEW_LINE> self.id_stack.append(id_string) <NEW_LINE> self.args[obj_type] = json <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> del self.args[self.type_stack.pop()] <NEW_LINE> self.id_stack.pop() | Context information for parsing.
This object is immutable. To change contexts (like adding an item to the
stack), use the next() and next_stack() functions to build a new one. | 6259906f283ffb24f3cf512c |
class GreedyEpsilonPolicy(Policy): <NEW_LINE> <INDENT> def __init__(self, num_actions, epsilon=0.05): <NEW_LINE> <INDENT> assert num_actions >= 1 <NEW_LINE> self.num_actions = num_actions <NEW_LINE> self.epsilon = epsilon <NEW_LINE> <DEDENT> def select_action(self, q_values, **kwargs): <NEW_LINE> <INDENT> assert self.num_actions == q_values.shape[1] <NEW_LINE> greedy_action = np.argmax(q_values) <NEW_LINE> if np.random.random() >= self.epsilon: <NEW_LINE> <INDENT> return greedy_action <NEW_LINE> <DEDENT> action = np.random.randint(0, self.num_actions) <NEW_LINE> return action | Selects greedy action or with some probability a random action.
With probability epsilon choose a random action. Otherwise choose the greedy action. | 6259906f55399d3f05627da5 |
class DynamicKeyBindings(_Proxy): <NEW_LINE> <INDENT> def __init__( self, get_key_bindings: Callable[[], Optional[KeyBindingsBase]] ) -> None: <NEW_LINE> <INDENT> self.get_key_bindings = get_key_bindings <NEW_LINE> self.__version = 0 <NEW_LINE> self._last_child_version = None <NEW_LINE> self._dummy = Bind() <NEW_LINE> <DEDENT> def _update_cache(self) -> None: <NEW_LINE> <INDENT> bind = self.get_key_bindings() or self._dummy <NEW_LINE> assert isinstance(bind, KeyBindingsBase) <NEW_LINE> version = id(bind), bind._version <NEW_LINE> self._bindings2 = bind <NEW_LINE> self._last_version = version | KeyBinder class that can dynamically returns any KeyBinder.
:param get_key_bindings: Callable that returns a :class:`.KeyBinder` instance. | 6259906f4e4d562566373c89 |
class BetaReportErrorsServiceStub(object): <NEW_LINE> <INDENT> def ReportErrorEvent(self, request, timeout, metadata=None, with_call=False, protocol_options=None): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> ReportErrorEvent.future = None | The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This class was generated
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0. | 6259906f4e4d562566373c8a |
class BaseErrorHandler(ABC): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def __call__(self, errors: Iterable[ValidationError]) -> Any: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def __iter__(self) -> Iterator[Any]: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def add(self, error: ValidationError) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def emit(self, error: ValidationError) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def end(self, validator: "UnconcernedValidator") -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def extend(self, errors: Iterable[ValidationError]) -> None: <NEW_LINE> <INDENT> for error in errors: <NEW_LINE> <INDENT> self.add(error) <NEW_LINE> <DEDENT> <DEDENT> def start(self, validator: "UnconcernedValidator") -> None: <NEW_LINE> <INDENT> pass | Base class for all error handlers. | 6259906f66673b3332c31c81 |
class Point(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.x = 0 <NEW_LINE> self.y = 0 <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> x.reset() <NEW_LINE> y.reset() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'Point:({}, {})'.format(self.x, self.y) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Point({}, {})'.format(self.x, self.y) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self.x == other.x and self.y == other.y: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | Represents a 2d point | 6259906f379a373c97d9a8a3 |
class ListServicesResponse(_messages.Message): <NEW_LINE> <INDENT> nextPageToken = _messages.StringField(1) <NEW_LINE> services = _messages.MessageField('GoogleApiServiceusageV1Service', 2, repeated=True) | Response message for the `ListServices` method.
Fields:
nextPageToken: Token that can be passed to `ListServices` to resume a
paginated query.
services: The available services for the requested project. | 6259906ff548e778e596ce10 |
class TaskLoader(): <NEW_LINE> <INDENT> cmd_options = () <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.cmd_names = [] <NEW_LINE> self.config = None <NEW_LINE> <DEDENT> def load_tasks(self, cmd, opt_values, pos_args): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _load_from(cmd, namespace, cmd_list): <NEW_LINE> <INDENT> if inspect.ismodule(namespace): <NEW_LINE> <INDENT> members = dict(inspect.getmembers(namespace)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> members = namespace <NEW_LINE> <DEDENT> task_list = loader.load_tasks(members, cmd_list, cmd.execute_tasks) <NEW_LINE> doit_config = loader.load_doit_config(members) <NEW_LINE> return task_list, doit_config | DEPRECATED: task-loader interface responsible of creating Task objects
:cvar cmd_options:
(list of dict) see cmdparse.CmdOption for dict format
Subclasses must implement the method `load_tasks`. | 6259906f627d3e7fe0e0870b |
class Group(object): <NEW_LINE> <INDENT> group_file = '%s/etc/group' % (CowrieConfig().get('honeypot', 'contents_path'),) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.load() <NEW_LINE> <DEDENT> def load(self): <NEW_LINE> <INDENT> self.group = [] <NEW_LINE> with open(self.group_file, 'r') as f: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> rawline = f.readline() <NEW_LINE> if not rawline: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> line = rawline.strip() <NEW_LINE> if not line: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if line.startswith('#'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> (gr_name, gr_passwd, gr_gid, gr_mem) = line.split(':') <NEW_LINE> e = {} <NEW_LINE> e["gr_name"] = gr_name <NEW_LINE> try: <NEW_LINE> <INDENT> e["gr_gid"] = int(gr_gid) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> e["gr_gid"] = 1001 <NEW_LINE> <DEDENT> e["gr_mem"] = gr_mem <NEW_LINE> self.group.append(e) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def save(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def getgrnam(self, name): <NEW_LINE> <INDENT> for _ in self.group: <NEW_LINE> <INDENT> if name == _["gr_name"]: <NEW_LINE> <INDENT> return _ <NEW_LINE> <DEDENT> <DEDENT> raise KeyError("getgrnam(): name not found in group file: " + name) <NEW_LINE> <DEDENT> def getgrgid(self, uid): <NEW_LINE> <INDENT> for _ in self.group: <NEW_LINE> <INDENT> if uid == _["gr_gid"]: <NEW_LINE> <INDENT> return _ <NEW_LINE> <DEDENT> <DEDENT> raise KeyError("getgruid(): uid not found in group file: " + str(uid)) | This class contains code to handle the groups and their properties in
/etc/group. | 6259906f4a966d76dd5f076d |
class MockSIBConnection(object): <NEW_LINE> <INDENT> NAMESPACES = { 'rdf': 'http://www.w3.org/1999/02/22-rdf-syntax-ns#', 'rdfs': 'http://www.w3.org/2000/01/rdf-schema#', 'owl': 'http://www.w3.org/2002/07/owl#', 'xsd': 'http://www.w3.org/2001/XMLSchema#', 'dc': 'http://purl.org/dc/elements/1.1/', 'sib': 'http://www.nokia.com/NRC/M3/sib#', 'daml': 'http://www.daml.org/2000/12/daml+oil#', } <NEW_LINE> def __init__(self, node_name='Node', method='Manual'): <NEW_LINE> <INDENT> self.triple_store = set() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, type_, value, traceback): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def insert(self, triples, **kwargs): <NEW_LINE> <INDENT> for triple in triples: <NEW_LINE> <INDENT> self.triple_store.add(triple) <NEW_LINE> <DEDENT> <DEDENT> def __expand_namespace(self, node): <NEW_LINE> <INDENT> if isinstance(node, uri) and ':' in node: <NEW_LINE> <INDENT> namespace, value = node.split(':') <NEW_LINE> try: <NEW_LINE> <INDENT> return uri(self.NAMESPACES[namespace] + value) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return node <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return node <NEW_LINE> <DEDENT> <DEDENT> def __node_matches(self, pattern, node): <NEW_LINE> <INDENT> if not pattern: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.__expand_namespace(pattern) == node <NEW_LINE> <DEDENT> def __matches(self, pattern, triple): <NEW_LINE> <INDENT> return self.__node_matches(pattern.subject, triple.subject) and self.__node_matches(pattern.predicate, triple.predicate) and self.__node_matches(pattern.object, triple.object) <NEW_LINE> <DEDENT> def query(self, query): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for triple in self.triple_store: <NEW_LINE> <INDENT> if self.__matches(query, triple): <NEW_LINE> <INDENT> result.append(triple) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def update(self, r_triples, i_triples): <NEW_LINE> <INDENT> self.remove(r_triples) <NEW_LINE> self.insert(i_triples) <NEW_LINE> <DEDENT> def remove(self, triples): <NEW_LINE> <INDENT> for triple in triples: <NEW_LINE> <INDENT> self.triple_store.remove(triple) <NEW_LINE> <DEDENT> return True | A fake SIBConnection class that implement the basic API of SIBConnection
and does not connect to SIB at all. | 6259906f38b623060ffaa495 |
class TestLogger(): <NEW_LINE> <INDENT> def __init__(self, store_log=True): <NEW_LINE> <INDENT> self.log = [] <NEW_LINE> self.log_to_file = False <NEW_LINE> self.log_file_name = None <NEW_LINE> self.store_log = store_log <NEW_LINE> self.LogType = construct_enum(INFO='Info', WARN='Warning', NOTIF='Notification', ERROR='Error', EXCEPT='Exception') <NEW_LINE> self.LogToFileAttr = construct_enum(CREATE=1, APPEND=2) <NEW_LINE> <DEDENT> def log_line(self, LogType, log_line, timestamp=True, line_delim='\n'): <NEW_LINE> <INDENT> log_timestamp = time() <NEW_LINE> log_entry = {'log_type' : LogType, 'log_timestamp' : log_timestamp, 'log_line' : log_line, '_future' : None } <NEW_LINE> if self.store_log: <NEW_LINE> <INDENT> self.log.append(log_entry) <NEW_LINE> <DEDENT> return log_entry | Super-class for logging and printing ongoing events for test suite pass
| 6259906fa05bb46b3848bd6e |
class ScRelationSet: <NEW_LINE> <INDENT> def __init__(self, ctx, addr, relAddr): <NEW_LINE> <INDENT> self.ctx = ctx <NEW_LINE> self.addr = addr <NEW_LINE> self.relAddr = relAddr <NEW_LINE> <DEDENT> def Has(self, elAddr: ScAddr) -> bool: <NEW_LINE> <INDENT> it = self.ctx.Iterator5( self.addr, ScType.EdgeDCommonConst, elAddr, ScType.EdgeAccessConstPosPerm, self.relAddr) <NEW_LINE> return it.Next() <NEW_LINE> <DEDENT> def Add(self, elAddr: ScAddr) -> bool: <NEW_LINE> <INDENT> if not self.Has(elAddr): <NEW_LINE> <INDENT> edge = self.ctx.CreateEdge(ScType.EdgeDCommonConst, self.addr, elAddr) <NEW_LINE> edge = self.ctx.CreateEdge( ScType.EdgeAccessConstPosPerm, self.relAddr, edge) <NEW_LINE> return edge.IsValid() <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def Remove(self, elAddr: ScAddr) -> bool: <NEW_LINE> <INDENT> it = self.ctx.Iterator5( self.addr, ScType.EdgeDCommonConst, elAddr, ScType.EdgeAccessConstPosPerm, self.relAddr) <NEW_LINE> if it.Next(): <NEW_LINE> <INDENT> self.ctx.DeleteElement(it.Get(1)) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def Clear(self): <NEW_LINE> <INDENT> it = self.ctx.Iterator5( self.addr, ScType.EdgeDCommonConst, ScType.Unknown, ScType.EdgeAccessConstPosPerm, self.relAddr) <NEW_LINE> while it.Next(): <NEW_LINE> <INDENT> self.ctx.DeleteElement(it.Get(1)) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> it = self.ctx.Iterator5( self.addr, ScType.EdgeDCommonConst, ScType.Unknown, ScType.EdgeAccessConstPosPerm, self.relAddr) <NEW_LINE> return Iterator(it) | Set of elements that linekd with element by specified relation.
This set class controls instances of:
el1
=> nrel_relation: el2;
=> nrel_relation: el3;
=> nrel_relation: el4;; | 6259906f76e4537e8c3f0e08 |
class AttachVolume(task.Task): <NEW_LINE> <INDENT> def __init__(self, vol_drv): <NEW_LINE> <INDENT> self.vol_drv = vol_drv <NEW_LINE> self.vol_id = block_device.get_volume_id(self.vol_drv.connection_info) <NEW_LINE> super(AttachVolume, self).__init__(name='attach_vol_%s' % self.vol_id) <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> LOG.info('Attaching volume %(vol)s.', {'vol': self.vol_id}, instance=self.vol_drv.instance) <NEW_LINE> self.vol_drv.attach_volume() <NEW_LINE> <DEDENT> def revert(self, result, flow_failures): <NEW_LINE> <INDENT> LOG.warning('Rolling back attachment for volume %(vol)s.', {'vol': self.vol_id}, instance=self.vol_drv.instance) <NEW_LINE> self.vol_drv.reset_stg_ftsk() <NEW_LINE> try: <NEW_LINE> <INDENT> self.vol_drv.detach_volume() <NEW_LINE> <DEDENT> except exception.VolumeDetachFailed: <NEW_LINE> <INDENT> LOG.exception("Unable to detach volume %s during rollback.", self.vol_id, instance=self.vol_drv.instance) | The task to attach a volume to an instance. | 6259906f7047854f46340c3b |
class LoginRequiredMiddleware(object): <NEW_LINE> <INDENT> def process_view(self, request, view_func, view_args, view_kwargs): <NEW_LINE> <INDENT> login_required = view_kwargs.pop('login_required', False) <NEW_LINE> if login_required: <NEW_LINE> <INDENT> if not request.user.is_authenticated(): <NEW_LINE> <INDENT> url = '{0}?next={1}'.format( reverse('accounts_login'), request.META['PATH_INFO'], ) <NEW_LINE> return HttpResponseRedirect(url) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return | Middleware to look for ``login_required`` keyword argument on URLs
and present anonymous users with a login sequence. | 6259906f7d847024c075dc62 |
class GetSpecificOffice(Resource): <NEW_LINE> <INDENT> parser = reqparse.RequestParser() <NEW_LINE> parser.add_argument('Type', type=str, required=True, help='Please fill in this field') <NEW_LINE> parser.add_argument('name', type=str, required=True, help='Please fill in this field') <NEW_LINE> @jwt_required <NEW_LINE> def get(self, office_id): <NEW_LINE> <INDENT> office = CreatePoliticalOffice().get_office_by_id(office_id) <NEW_LINE> if office: <NEW_LINE> <INDENT> return {"status": 200,"Office": office.serializer()}, 200 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {"Status":400, "Message": "This office does not exist"}, 400 <NEW_LINE> <DEDENT> <DEDENT> @jwt_required <NEW_LINE> @admin_access <NEW_LINE> def delete(self,office_id): <NEW_LINE> <INDENT> office = CreatePoliticalOffice().get_office_by_id(office_id) <NEW_LINE> if not office: <NEW_LINE> <INDENT> return {"status": 404,"message": "this office does not exist"},404 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> CreatePoliticalOffice().delete_office(office_id) <NEW_LINE> return {"status": 200,"Message": "office deleted successfully"} <NEW_LINE> <DEDENT> <DEDENT> @jwt_required <NEW_LINE> @admin_access <NEW_LINE> def patch(self, office_id): <NEW_LINE> <INDENT> update_office = GetSpecificOffice.parser.parse_args() <NEW_LINE> name = update_office['Type'] <NEW_LINE> Type = update_office['name'] <NEW_LINE> validate_office_data = validations.Validations() <NEW_LINE> if not validate_office_data.validate_input_fields(name): <NEW_LINE> <INDENT> return {"status":400,"Message": "Please enter." "valid office name"}, 400 <NEW_LINE> <DEDENT> if not validate_office_data.validate_input_fields(Type): <NEW_LINE> <INDENT> return {"status":400,"Message": "Please enter valid office type"}, 400 <NEW_LINE> <DEDENT> if CreatePoliticalOffice().get_office_by_id(office_id): <NEW_LINE> <INDENT> office =CreatePoliticalOffice(Type,name) <NEW_LINE> office.update_office(office_id) <NEW_LINE> return{"Message": "party details updated", "status": 200}, 200 <NEW_LINE> <DEDENT> return { "status": 404, "Message": "Office does not exist" }, 404 | get a specific political office by id. | 6259906f7c178a314d78e82d |
class TrainingCenterCreateView( LoginRequiredMixin, TrainingCenterMixin, CreateView): <NEW_LINE> <INDENT> context_object_name = 'trainingcenter' <NEW_LINE> template_name = 'training_center/create.html' <NEW_LINE> def get_success_url(self): <NEW_LINE> <INDENT> return reverse('certifyingorganisation-detail', kwargs={ 'project_slug': self.object.certifying_organisation.project.slug, 'slug': self.object.certifying_organisation.slug, }) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super( TrainingCenterCreateView, self).get_context_data(**kwargs) <NEW_LINE> context['trainingcenters'] = self.get_queryset() .filter(certifying_organisation=self.certifying_organisation) <NEW_LINE> return context <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> super(TrainingCenterCreateView, self).form_valid(form) <NEW_LINE> return HttpResponseRedirect(self.get_success_url()) <NEW_LINE> <DEDENT> except IntegrityError: <NEW_LINE> <INDENT> return ValidationError( 'ERROR: Training Center by this name is already exists!') <NEW_LINE> <DEDENT> <DEDENT> def get_form_kwargs(self): <NEW_LINE> <INDENT> kwargs = super(TrainingCenterCreateView, self).get_form_kwargs() <NEW_LINE> self.organisation_slug = self.kwargs.get('organisation_slug', None) <NEW_LINE> self.certifying_organisation = CertifyingOrganisation.objects.get(slug=self.organisation_slug) <NEW_LINE> kwargs.update({ 'user': self.request.user, 'certifying_organisation': self.certifying_organisation }) <NEW_LINE> return kwargs | Create view for Training Center. | 6259906f7b180e01f3e49ca6 |
class KerasLayer(Layer): <NEW_LINE> <INDENT> def __init__( self, layer = None, keras_layer = None, keras_args = {}, name ='keras_layer', ): <NEW_LINE> <INDENT> Layer.__init__(self, name=name) <NEW_LINE> assert layer is not None <NEW_LINE> assert keras_layer is not None <NEW_LINE> self.inputs = layer.outputs <NEW_LINE> print(" [TL] KerasLayer %s: %s" % (self.name, keras_layer)) <NEW_LINE> print(" This API will be removed, please use LambdaLayer instead.") <NEW_LINE> with tf.variable_scope(name) as vs: <NEW_LINE> <INDENT> self.outputs = keras_layer(self.inputs, **keras_args) <NEW_LINE> variables = tf.get_collection(TF_GRAPHKEYS_VARIABLES, scope=vs.name) <NEW_LINE> <DEDENT> self.all_layers = list(layer.all_layers) <NEW_LINE> self.all_params = list(layer.all_params) <NEW_LINE> self.all_drop = dict(layer.all_drop) <NEW_LINE> self.all_layers.extend( [self.outputs] ) <NEW_LINE> self.all_params.extend( variables ) | The :class:`KerasLayer` class can be used to merge all Keras layers into
TensorLayer. Example can be found here `tutorial_keras.py <https://github.com/zsdonghao/tensorlayer/blob/master/example/tutorial_keras.py>`_.
This layer will be deprecated soon as :class:`LambdaLayer` can do the same thing.
Parameters
----------
layer : a :class:`Layer` instance
The `Layer` class feeding into this layer.
keras_layer : a keras network function
keras_args : dictionary
The arguments for the keras model.
name : a string or None
An optional name to attach to this layer. | 6259906f67a9b606de5476e5 |
class TestXModuleHandler(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.module = XModule(descriptor=Mock(), field_data=Mock(), runtime=Mock(), scope_ids=Mock()) <NEW_LINE> self.module.handle_ajax = Mock(return_value='{}') <NEW_LINE> self.request = webob.Request({}) <NEW_LINE> <DEDENT> def test_xmodule_handler_passed_data(self): <NEW_LINE> <INDENT> self.module.xmodule_handler(self.request) <NEW_LINE> self.module.handle_ajax.assert_called_with(None, self.request.POST) <NEW_LINE> <DEDENT> def test_xmodule_handler_dispatch(self): <NEW_LINE> <INDENT> self.module.xmodule_handler(self.request, 'dispatch') <NEW_LINE> self.module.handle_ajax.assert_called_with('dispatch', self.request.POST) <NEW_LINE> <DEDENT> def test_xmodule_handler_return_value(self): <NEW_LINE> <INDENT> response = self.module.xmodule_handler(self.request) <NEW_LINE> self.assertIsInstance(response, webob.Response) <NEW_LINE> self.assertEqual(response.body, '{}') | Tests that the xmodule_handler function correctly wraps handle_ajax | 6259906f8e7ae83300eea914 |
class CXERXCalculator: <NEW_LINE> <INDENT> def __init__(self, individual1, individual2): <NEW_LINE> <INDENT> self.individual2 = individual2 <NEW_LINE> self.individual1 = individual1 <NEW_LINE> <DEDENT> def crossover(self): <NEW_LINE> <INDENT> child1 = self._createChild(self.individual1, self.individual2) <NEW_LINE> child2 = self._createChild(self.individual2, self.individual1) <NEW_LINE> self.individual1[:] = child1[:] <NEW_LINE> self.individual2[:] = child2[:] <NEW_LINE> return self.individual1, self.individual2 <NEW_LINE> <DEDENT> def _createChild(self, individual1, individual2): <NEW_LINE> <INDENT> self._buildEdgeMap(individual1, individual2) <NEW_LINE> child = creator.Individual() <NEW_LINE> currentNode = individual1[random.randint(0, len(individual1) - 1)] <NEW_LINE> while True: <NEW_LINE> <INDENT> child.append(currentNode) <NEW_LINE> neighborgs = self.edgeMap.pop(currentNode) <NEW_LINE> neighborgs = list(neighborgs) <NEW_LINE> self._cleanupNodeFromEdgeMap(currentNode) <NEW_LINE> if neighborgs != []: <NEW_LINE> <INDENT> currentNeighborg = neighborgs[0] <NEW_LINE> currentLen = len(self.edgeMap[currentNeighborg]) <NEW_LINE> for neighborg in neighborgs[1:]: <NEW_LINE> <INDENT> if len(self.edgeMap[neighborg]) < currentLen: <NEW_LINE> <INDENT> currentLen = len(self.edgeMap[neighborg]) <NEW_LINE> currentNeighborg = neighborg <NEW_LINE> <DEDENT> elif len(self.edgeMap[neighborg]) == currentLen: <NEW_LINE> <INDENT> if random.randint(0, 1) > 0.5: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> currentLen = len(self.edgeMap[neighborg]) <NEW_LINE> currentNeighborg = neighborg <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> currentNode = currentNeighborg <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.edgeMap == {}: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> currentNode = self.edgeMap.keys()[random.randint(0, len(self.edgeMap.keys()) - 1)] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return child <NEW_LINE> <DEDENT> def _buildEdgeMap(self,individual1,individual2): <NEW_LINE> <INDENT> self.edgeMap = {} <NEW_LINE> for node in individual1: <NEW_LINE> <INDENT> pos1 = individual1.index(node) <NEW_LINE> pos2 = individual2.index(node) <NEW_LINE> neighborg1 = individual1[(pos1 - 1) % len(individual1)] <NEW_LINE> neighborg2 = individual1[(pos1 + 1) % len(individual1)] <NEW_LINE> neighborg3 = individual2[(pos2 - 1) % len(individual2)] <NEW_LINE> neighborg4 = individual2[(pos2 + 1) % len(individual2)] <NEW_LINE> self.edgeMap[node] = set([neighborg1, neighborg2, neighborg3, neighborg4]) <NEW_LINE> <DEDENT> <DEDENT> def _cleanupNodeFromEdgeMap(self, referenceNode): <NEW_LINE> <INDENT> edgeMapEntriesToBeRemoved = [] <NEW_LINE> for node in self.edgeMap: <NEW_LINE> <INDENT> if referenceNode in self.edgeMap[node]: <NEW_LINE> <INDENT> self.edgeMap[node].remove(referenceNode) <NEW_LINE> <DEDENT> if self.edgeMap[node] == set(): <NEW_LINE> <INDENT> edgeMapEntriesToBeRemoved.append(node) <NEW_LINE> <DEDENT> <DEDENT> for node in edgeMapEntriesToBeRemoved: <NEW_LINE> <INDENT> edgeMapEntriesToBeRemoved.remove(node) | Contains the logic to perform the ERX crossover. ERX crossover is based on:
1. creation of an edgeMap
2. creation of child based on to the child a random node at first,
then the less connected nodes | 6259906f4f88993c371f1162 |
class SourcePathFrame(BasePAFrame): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> BasePAFrame.__init__(self, *args, **kwargs) <NEW_LINE> self.w_label_source_path_label = Label( self, text=u'Папка источник с фотографиями ') <NEW_LINE> self.w_label_base_path = Label( self, text=settings.PHOTO_FINDER_LAST_DIR) <NEW_LINE> self.w_button_select_base_path = Button( self, text=u'Изменить', command=self.click_button_select_base_path) <NEW_LINE> <DEDENT> def _pa_layout(self): <NEW_LINE> <INDENT> label_height = 0.25 <NEW_LINE> self.w_label_source_path_label.place( relx=0, rely=0 ) <NEW_LINE> self.w_label_base_path.place( relx=0, rely=label_height ) <NEW_LINE> self.w_button_select_base_path.place( relx=0, rely=label_height * 2 ) <NEW_LINE> <DEDENT> def click_button_select_base_path(self): <NEW_LINE> <INDENT> path = askdirectory( title=u'Выберите папку с фотографиями', initialdir=settings.PHOTO_FINDER_LAST_SIR) <NEW_LINE> if not path: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.w_label_base_path['text'] = path <NEW_LINE> settings.PHOTO_FINDER_LAST_SIR = path | фрейм для выбора источника | 6259906f8e7ae83300eea915 |
class StagedActivation(BaseScheduler): <NEW_LINE> <INDENT> stage_list = [] <NEW_LINE> shuffle = False <NEW_LINE> shuffle_between_stages = False <NEW_LINE> stage_time = 1 <NEW_LINE> def __init__(self, model, stage_list=["step"], shuffle=False, shuffle_between_stages=False): <NEW_LINE> <INDENT> super().__init__(model) <NEW_LINE> self.stage_list = stage_list <NEW_LINE> self.shuffle = shuffle <NEW_LINE> self.shuffle_between_stages = shuffle_between_stages <NEW_LINE> self.stage_time = 1 / len(self.stage_list) <NEW_LINE> <DEDENT> def step(self): <NEW_LINE> <INDENT> if self.shuffle: <NEW_LINE> <INDENT> random.shuffle(self.agents) <NEW_LINE> <DEDENT> for stage in self.stage_list: <NEW_LINE> <INDENT> for agent in self.agents: <NEW_LINE> <INDENT> getattr(agent, stage)(self.model) <NEW_LINE> <DEDENT> if self.shuffle_between_stages: <NEW_LINE> <INDENT> random.shuffle(self.agents) <NEW_LINE> <DEDENT> self.time += self.stage_time <NEW_LINE> <DEDENT> self.steps += 1 | A scheduler which allows agent activation to be divided into several stages
instead of a single `step` method. All agents execute one stage before
moving on to the next.
Agents must have all the stage methods implemented. Stage methods take a
model object as their only argument.
This schedule tracks steps and time separately. Time advances in fractional
increments of 1 / (# of stages), meaning that 1 step = 1 unit of time. | 6259906f92d797404e38979d |
class Message(six.text_type): <NEW_LINE> <INDENT> def __new__(cls, msgid, msgtext=None, params=None, domain='heatclient', *args): <NEW_LINE> <INDENT> if not msgtext: <NEW_LINE> <INDENT> msgtext = Message._translate_msgid(msgid, domain) <NEW_LINE> <DEDENT> msg = super(Message, cls).__new__(cls, msgtext) <NEW_LINE> msg.msgid = msgid <NEW_LINE> msg.domain = domain <NEW_LINE> msg.params = params <NEW_LINE> return msg <NEW_LINE> <DEDENT> def translate(self, desired_locale=None): <NEW_LINE> <INDENT> translated_message = Message._translate_msgid(self.msgid, self.domain, desired_locale) <NEW_LINE> if self.params is None: <NEW_LINE> <INDENT> return translated_message <NEW_LINE> <DEDENT> translated_params = _translate_args(self.params, desired_locale) <NEW_LINE> translated_message = translated_message % translated_params <NEW_LINE> return translated_message <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _translate_msgid(msgid, domain, desired_locale=None): <NEW_LINE> <INDENT> if not desired_locale: <NEW_LINE> <INDENT> system_locale = locale.getdefaultlocale() <NEW_LINE> if not system_locale[0]: <NEW_LINE> <INDENT> desired_locale = 'en_US' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> desired_locale = system_locale[0] <NEW_LINE> <DEDENT> <DEDENT> locale_dir = os.environ.get(domain.upper() + '_LOCALEDIR') <NEW_LINE> lang = gettext.translation(domain, localedir=locale_dir, languages=[desired_locale], fallback=True) <NEW_LINE> if six.PY3: <NEW_LINE> <INDENT> translator = lang.gettext <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> translator = lang.ugettext <NEW_LINE> <DEDENT> translated_message = translator(msgid) <NEW_LINE> return translated_message <NEW_LINE> <DEDENT> def __mod__(self, other): <NEW_LINE> <INDENT> params = self._sanitize_mod_params(other) <NEW_LINE> unicode_mod = super(Message, self).__mod__(params) <NEW_LINE> modded = Message(self.msgid, msgtext=unicode_mod, params=params, domain=self.domain) <NEW_LINE> return modded <NEW_LINE> <DEDENT> def _sanitize_mod_params(self, other): <NEW_LINE> <INDENT> if other is None: <NEW_LINE> <INDENT> params = (other,) <NEW_LINE> <DEDENT> elif isinstance(other, dict): <NEW_LINE> <INDENT> params = {} <NEW_LINE> if isinstance(self.params, dict): <NEW_LINE> <INDENT> for key, val in self.params.items(): <NEW_LINE> <INDENT> params[key] = self._copy_param(val) <NEW_LINE> <DEDENT> <DEDENT> for key, val in other.items(): <NEW_LINE> <INDENT> params[key] = self._copy_param(val) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> params = self._copy_param(other) <NEW_LINE> <DEDENT> return params <NEW_LINE> <DEDENT> def _copy_param(self, param): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return copy.deepcopy(param) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return six.text_type(param) <NEW_LINE> <DEDENT> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> msg = _('Message objects do not support addition.') <NEW_LINE> raise TypeError(msg) <NEW_LINE> <DEDENT> def __radd__(self, other): <NEW_LINE> <INDENT> return self.__add__(other) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> msg = _('Message objects do not support str() because they may ' 'contain non-ascii characters. ' 'Please use unicode() or translate() instead.') <NEW_LINE> raise UnicodeError(msg) | A Message object is a unicode object that can be translated.
Translation of Message is done explicitly using the translate() method.
For all non-translation intents and purposes, a Message is simply unicode,
and can be treated as such. | 6259906f9c8ee82313040dca |
class TestDownloadTranscripts(BaseTranscripts): <NEW_LINE> <INDENT> def update_video_component(self, sub=None, youtube_id=None): <NEW_LINE> <INDENT> sjson_transcript = json.loads(SJSON_TRANSCRIPT_CONTENT) <NEW_LINE> self.item.sub = sub <NEW_LINE> if sub: <NEW_LINE> <INDENT> self.save_subs_to_store(sjson_transcript, sub) <NEW_LINE> <DEDENT> self.item.youtube_id_1_0 = youtube_id <NEW_LINE> if youtube_id: <NEW_LINE> <INDENT> self.save_subs_to_store(sjson_transcript, youtube_id) <NEW_LINE> <DEDENT> modulestore().update_item(self.item, self.user.id) <NEW_LINE> <DEDENT> def download_transcript(self, locator): <NEW_LINE> <INDENT> payload = {} <NEW_LINE> if locator: <NEW_LINE> <INDENT> payload.update({'locator': str(locator)}) <NEW_LINE> <DEDENT> download_transcript_url = reverse('download_transcripts') <NEW_LINE> response = self.client.get(download_transcript_url, payload) <NEW_LINE> return response <NEW_LINE> <DEDENT> def assert_download_response(self, response, expected_status_code, expected_content=None): <NEW_LINE> <INDENT> self.assertEqual(response.status_code, expected_status_code) <NEW_LINE> if expected_content: <NEW_LINE> <INDENT> assert response.content.decode('utf-8') == expected_content <NEW_LINE> <DEDENT> <DEDENT> def test_download_youtube_transcript_success(self): <NEW_LINE> <INDENT> self.update_video_component(youtube_id='JMD_ifUUfsU') <NEW_LINE> response = self.download_transcript(locator=self.video_usage_key) <NEW_LINE> self.assert_download_response(response, expected_content=SRT_TRANSCRIPT_CONTENT, expected_status_code=200) <NEW_LINE> <DEDENT> def test_download_non_youtube_transcript_success(self): <NEW_LINE> <INDENT> self.update_video_component(sub='test_subs') <NEW_LINE> response = self.download_transcript(locator=self.video_usage_key) <NEW_LINE> self.assert_download_response(response, expected_content=SRT_TRANSCRIPT_CONTENT, expected_status_code=200) <NEW_LINE> <DEDENT> def test_download_transcript_404_without_locator(self): <NEW_LINE> <INDENT> response = self.download_transcript(locator=None) <NEW_LINE> self.assert_download_response(response, expected_status_code=404) <NEW_LINE> <DEDENT> def test_download_transcript_404_with_bad_locator(self): <NEW_LINE> <INDENT> response = self.download_transcript(locator='invalid-locator') <NEW_LINE> self.assert_download_response(response, expected_status_code=404) <NEW_LINE> <DEDENT> def test_download_transcript_404_for_non_video_module(self): <NEW_LINE> <INDENT> usage_key = self.create_non_video_module() <NEW_LINE> response = self.download_transcript(locator=usage_key) <NEW_LINE> self.assert_download_response(response, expected_status_code=404) <NEW_LINE> <DEDENT> def test_download_transcript_404_for_no_yt_and_no_sub(self): <NEW_LINE> <INDENT> self.update_video_component(sub=None, youtube_id=None) <NEW_LINE> response = self.download_transcript(locator=self.video_usage_key) <NEW_LINE> self.assert_download_response(response, expected_status_code=404) | Tests for '/transcripts/download' url. | 6259906f4f6381625f19a0eb |
class PerfReporter(object): <NEW_LINE> <INDENT> __PLOT_SIZE_X = 30 <NEW_LINE> __PLOT_SIZE_Y = 15 <NEW_LINE> __QUARTILE_HIGH = 80 <NEW_LINE> __QUARTILE_LOW = 20 <NEW_LINE> __TRIM_SIGMA = 2 <NEW_LINE> @staticmethod <NEW_LINE> def trim_outliers(distribution, nsigma, threshold=10): <NEW_LINE> <INDENT> if len(distribution) < threshold: <NEW_LINE> <INDENT> return distribution <NEW_LINE> <DEDENT> miu = mean(distribution) <NEW_LINE> sigma = stdev(distribution) <NEW_LINE> qhigh = miu + nsigma * sigma <NEW_LINE> qlow = miu - nsigma * sigma <NEW_LINE> return [element for element in distribution if element < qhigh and element > qlow] <NEW_LINE> <DEDENT> def __process_data(self, data): <NEW_LINE> <INDENT> return [self.trim_outliers(dist, self.__TRIM_SIGMA) for dist in data] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def create_table(stream, data, header, title='Runtime table'): <NEW_LINE> <INDENT> stream.write('\n' + '=' * 50 + '\n') <NEW_LINE> stream.write(title + '\n') <NEW_LINE> stream.write(tabulate(data, headers=header, showindex=True, tablefmt='grid') + '\n') <NEW_LINE> <DEDENT> def plot(self, filename, labels, runtimes, title='Runtime plot'): <NEW_LINE> <INDENT> fig, ax1 = mplot.subplots(figsize=(self.__PLOT_SIZE_X, self.__PLOT_SIZE_Y)) <NEW_LINE> fig.canvas.set_window_title('Runtime plot') <NEW_LINE> boxplot = mplot.boxplot(self.__process_data(runtimes), showmeans=True, meanline=True, showcaps=True, showfliers=True) <NEW_LINE> mplot.setp(boxplot['boxes'], color='black') <NEW_LINE> mplot.setp(boxplot['whiskers'], color='black') <NEW_LINE> mplot.setp(boxplot['fliers'], color='red', marker='+') <NEW_LINE> mplot.setp(mplot.setp(ax1, xticklabels=labels), rotation=0, fontsize=10) <NEW_LINE> mplot.title(title) <NEW_LINE> mplot.minorticks_on() <NEW_LINE> ax1.yaxis.grid(b=True, which='major', linestyle='-', color='black') <NEW_LINE> ax1.yaxis.grid(b=True, which='minor', linestyle='-.', color='grey') <NEW_LINE> ax1.yaxis.set_major_formatter(FormatStrFormatter('%.3e')) <NEW_LINE> ax1.set_xlabel('Size(n), Scenario') <NEW_LINE> ax1.set_ylabel('T (seconds)') <NEW_LINE> mplot.savefig(filename) <NEW_LINE> mplot.close() | Simple class to process and report test data. | 6259906f91f36d47f2231ad1 |
class time_(html_tag): <NEW_LINE> <INDENT> pass | The time element represents either a time on a 24 hour clock, or a precise
date in the proleptic Gregorian calendar, optionally with a time and a
time-zone offset. | 6259906f97e22403b383c788 |
class ModelConverter(object): <NEW_LINE> <INDENT> def from_client_to_store(self, client_resource, store_resource): <NEW_LINE> <INDENT> raise NotImplementedError("Should be implemented by a sub-class") <NEW_LINE> <DEDENT> def from_store_to_client(self, store_resource, client_resource): <NEW_LINE> <INDENT> raise NotImplementedError("Should be implemented by a sub-class") | TODO: find a better name and explain | 6259906ff548e778e596ce12 |
class PluginFraction(PrependerPlugin): <NEW_LINE> <INDENT> def __init__(self, config, section=None): <NEW_LINE> <INDENT> PrependerPlugin.__init__(self, config, section) <NEW_LINE> self.filter = None <NEW_LINE> self.requiredvars = { 'filterfile': { 'default': '/etc/fuglu/pluginfraction.regex', } } <NEW_LINE> self.logger = self._logger() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Plugin Fraction" <NEW_LINE> <DEDENT> def pluginlist(self, suspect, pluginlist): <NEW_LINE> <INDENT> if not self._initfilter(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> args = self.filter.get_args(suspect) <NEW_LINE> if len(args) == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> includepluginlist = [] <NEW_LINE> for arg in args: <NEW_LINE> <INDENT> includepluginlist.extend(arg.split(',')) <NEW_LINE> <DEDENT> listcopy = pluginlist[:] <NEW_LINE> for plug in pluginlist: <NEW_LINE> <INDENT> name = plug.__class__.__name__ <NEW_LINE> if name not in includepluginlist: <NEW_LINE> <INDENT> listcopy.remove(plug) <NEW_LINE> <DEDENT> <DEDENT> return listcopy <NEW_LINE> <DEDENT> def _initfilter(self): <NEW_LINE> <INDENT> if self.filter != None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> filename = self.config.get(self.section, 'filterfile') <NEW_LINE> if filename == None or filename == "": <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not os.path.exists(filename): <NEW_LINE> <INDENT> self.logger.error( 'Filterfile not found for pluginfraction: %s' % filename) <NEW_LINE> return False <NEW_LINE> <DEDENT> self.filter = SuspectFilter(filename) <NEW_LINE> return True <NEW_LINE> <DEDENT> def lint(self): <NEW_LINE> <INDENT> return (self.checkConfig() and self.lint_filter()) <NEW_LINE> <DEDENT> def lint_filter(self): <NEW_LINE> <INDENT> filterfile = self.config.get(self.section, 'filterfile') <NEW_LINE> filter = SuspectFilter(filterfile) <NEW_LINE> return filter.lint() | Runs only a fraction of loaded scanner plugins based on standard filter file
Use this if you only want to run a fraction of the standard plugins on a specific port for example
eg. put this in /etc/fuglu/pluginfraction.regex:
@incomingport 1100 SAPlugin,AttachmentPlugin | 6259906faad79263cf43003b |
class SeqRef(ExprRef): <NEW_LINE> <INDENT> def sort(self): <NEW_LINE> <INDENT> return SeqSortRef(Z3_get_sort(self.ctx_ref(), self.as_ast()), self.ctx) <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> return Concat(self, other) <NEW_LINE> <DEDENT> def __radd__(self, other): <NEW_LINE> <INDENT> return Concat(other, self) <NEW_LINE> <DEDENT> def __getitem__(self, i): <NEW_LINE> <INDENT> if _is_int(i): <NEW_LINE> <INDENT> i = IntVal(i, self.ctx) <NEW_LINE> <DEDENT> return SeqRef(Z3_mk_seq_nth(self.ctx_ref(), self.as_ast(), i.as_ast()), self.ctx) <NEW_LINE> <DEDENT> def at(self, i): <NEW_LINE> <INDENT> if _is_int(i): <NEW_LINE> <INDENT> i = IntVal(i, self.ctx) <NEW_LINE> <DEDENT> return SeqRef(Z3_mk_seq_at(self.ctx_ref(), self.as_ast(), i.as_ast()), self.ctx) <NEW_LINE> <DEDENT> def is_string(self): <NEW_LINE> <INDENT> return Z3_is_string_sort(self.ctx_ref(), Z3_get_sort(self.ctx_ref(), self.as_ast())) <NEW_LINE> <DEDENT> def is_string_value(self): <NEW_LINE> <INDENT> return Z3_is_string(self.ctx_ref(), self.as_ast()) <NEW_LINE> <DEDENT> def as_string(self): <NEW_LINE> <INDENT> if self.is_string_value(): <NEW_LINE> <INDENT> return Z3_get_string(self.ctx_ref(), self.as_ast()) <NEW_LINE> <DEDENT> return Z3_ast_to_string(self.ctx_ref(), self.as_ast()) | Sequence expression. | 6259906f627d3e7fe0e0870d |
class WelcomeAPI(MethodView): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> responseObject = { 'status': 'success', 'message': 'API is currently up. Check out https://github.com/Tehsurfer/Blackfynn-Backend-Connector/ for more details.' } <NEW_LINE> return make_response(jsonify(responseObject)), 200 | Welcome Resource | 6259906f1f5feb6acb164476 |
class GroupServiceProvider(providers.Singleton): <NEW_LINE> <INDENT> provided_type = GroupService | Service provider for group plugins. | 6259906fadb09d7d5dc0bdf0 |
class _EnumerantsInitializer(object): <NEW_LINE> <INDENT> def __get__(self, oself, cls): <NEW_LINE> <INDENT> cls._initializeEnumerants() <NEW_LINE> return cls._enumerants | L{_EnumerantsInitializer} is a descriptor used to initialize a cache of
objects representing named constants for a particular L{_ConstantsContainer}
subclass.
@since: Twisted 12.0.0. | 6259906f3346ee7daa3382a1 |
class TestBoggle(unittest.TestCase): <NEW_LINE> <INDENT> def test_can_create_an_empty_grid(self): <NEW_LINE> <INDENT> grid = boggle.make_grid(0,0) <NEW_LINE> self.assertEqual(len(grid),0) <NEW_LINE> <DEDENT> def test_grid_size_is_width_times_height(): <NEW_LINE> <INDENT> grid = boggle.make_grid(2,3) <NEW_LINE> self.assertEqual(len(grid),6) <NEW_LINE> <DEDENT> def test_grid_coordinates(self): <NEW_LINE> <INDENT> grid = boggle.make_grid(2,3) <NEW_LINE> self.assertIn((0,0),grid) <NEW_LINE> self.assertIn((0,1),grid) <NEW_LINE> self.assertIn((1,0),grid) <NEW_LINE> self.assertIn((1,1),grid) <NEW_LINE> self.assertNotIn((2,2),grid) | Our test suite for boggle solver | 6259906f76e4537e8c3f0e0a |
class ListAllDepartmentView(ListAPIView): <NEW_LINE> <INDENT> pagination_class = CustomPagination <NEW_LINE> permission_classes = () <NEW_LINE> parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,) <NEW_LINE> renderer_classes = (renderers.JSONRenderer,) <NEW_LINE> serializer_class = DepartmentSerializers <NEW_LINE> def set_extra_data_for_paginator(self, extra_data): <NEW_LINE> <INDENT> if self.paginator and hasattr(self.paginator, 'set_extra_attributes'): <NEW_LINE> <INDENT> self.paginator.set_extra_attributes(extra_data) <NEW_LINE> <DEDENT> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> apt_id = self.kwargs.get('apt_id') <NEW_LINE> extra_data = {} <NEW_LINE> list_apt = Apartment.objects.filter(id=apt_id) <NEW_LINE> if len(list_apt) == 1: <NEW_LINE> <INDENT> extra_data['apt_name'] = list_apt[0].name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> queryset = [] <NEW_LINE> self.set_extra_data_for_paginator(extra_data) <NEW_LINE> return queryset <NEW_LINE> <DEDENT> list_department = Department.objects.filter(apartment_id=apt_id).order_by('id') <NEW_LINE> queryset = list_department <NEW_LINE> self.set_extra_data_for_paginator(extra_data) <NEW_LINE> return queryset | list all department.
if user is superuser -> api_id not NULL
if user is ceo(superadmin, admin) -> apt_id = apt of request.user | 6259906fa05bb46b3848bd6f |
class SyntheticData(Dataset): <NEW_LINE> <INDENT> def __init__(self, unused_data_dir): <NEW_LINE> <INDENT> super(SyntheticData, self).__init__('synthetic') <NEW_LINE> <DEDENT> def get_image_preprocessor(self): <NEW_LINE> <INDENT> return preprocessing.SyntheticImagePreprocessor <NEW_LINE> <DEDENT> def use_synthetic_gpu_images(self): <NEW_LINE> <INDENT> return True | Configuration for synthetic dataset. | 6259906ff9cc0f698b1c5f0d |
class Spout(Component): <NEW_LINE> <INDENT> def ack(self, tup_id): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def fail(self, tup_id): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def next_tuple(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def emit( self, tup, tup_id=None, stream=None, direct_task=None, need_task_ids=False ): <NEW_LINE> <INDENT> return super(Spout, self).emit( tup, tup_id=tup_id, stream=stream, direct_task=direct_task, need_task_ids=need_task_ids, ) <NEW_LINE> <DEDENT> def activate(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def deactivate(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _run(self): <NEW_LINE> <INDENT> cmd = self.read_command() <NEW_LINE> if cmd["command"] == "next": <NEW_LINE> <INDENT> self.next_tuple() <NEW_LINE> <DEDENT> elif cmd["command"] == "ack": <NEW_LINE> <INDENT> self.ack(cmd["id"]) <NEW_LINE> <DEDENT> elif cmd["command"] == "fail": <NEW_LINE> <INDENT> self.fail(cmd["id"]) <NEW_LINE> <DEDENT> elif cmd["command"] == "activate": <NEW_LINE> <INDENT> self.activate() <NEW_LINE> <DEDENT> elif cmd["command"] == "deactivate": <NEW_LINE> <INDENT> self.deactivate() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.error("Received invalid command from Storm: %r", cmd) <NEW_LINE> <DEDENT> self.send_message({"command": "sync"}) | Base class for all pystorm spouts.
For more information on spouts, consult Storm's
`Concepts documentation <http://storm.apache.org/documentation/Concepts.html>`_. | 6259906f796e427e5384fffd |
class TestStrucDataCase10(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls._data = fs.open_sif(os.path.join(FILES, 'struc', 'single_super_elem', 'test01_2ndord_linstat_R1.SIU')) <NEW_LINE> cls._f_verified = h5py.File(os.path.join(FILES, 'verified_testdata.h5'), 'r') <NEW_LINE> cls._gr_verified = cls._f_verified['test01_2ndord_linstat_R1/MD_plates/noderes'] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> cls._data.close() <NEW_LINE> cls._f_verified.close() <NEW_LINE> <DEDENT> def test_get_nodes(self): <NEW_LINE> <INDENT> nodes = self._data.get_nodes(sets='MD_plates', kind='shell', disconnected=False) <NEW_LINE> nodes_verified = self._gr_verified['nodes'] <NEW_LINE> self.assertTrue(np.allclose(nodes, nodes_verified)) <NEW_LINE> <DEDENT> def test_get_elements(self): <NEW_LINE> <INDENT> elems = self._data.get_elements(sets='MD_plates', kind='shell', disconnected=False) <NEW_LINE> connectivity = self._gr_verified['connectivity'] <NEW_LINE> offset = self._gr_verified['offset'] <NEW_LINE> eltyp = self._gr_verified['eltyp'] <NEW_LINE> self.assertTrue(np.allclose(elems[0], connectivity)) <NEW_LINE> self.assertTrue(np.allclose(elems[1], offset)) <NEW_LINE> self.assertTrue(np.allclose(elems[2], eltyp)) <NEW_LINE> <DEDENT> def test_get_noderesults_displacement(self): <NEW_LINE> <INDENT> res = self._data.get_noderesults('displacement', rescases=1, sets='MD_plates', disconnected=False) <NEW_LINE> res_verified = self._gr_verified['displacement'] <NEW_LINE> self.assertTrue(np.allclose(res, res_verified)) | 2nd order 8-node quadrilateral shell elements, node results
| 6259906fbe8e80087fbc0915 |
class GwFileTypeEnum: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> enumValue = 0 <NEW_LINE> fileBuffer = None | A result from Glasswall containing the determined file type value. | 6259906fdd821e528d6da5c4 |
class WhoisGetterBasic(object): <NEW_LINE> <INDENT> def get_whois_content(self, domain): <NEW_LINE> <INDENT> result = subprocess.run(['whois', domain], stdout=subprocess.PIPE) <NEW_LINE> return result.stdout.decode(sys.getdefaultencoding()) | Simple basic whois content getter from the command line
running whois abc.com. It will grab the whole text
and return it back. | 6259906f7d43ff2487428055 |
class Step(Benchmark): <NEW_LINE> <INDENT> def __init__(self, dimensions=2): <NEW_LINE> <INDENT> Benchmark.__init__(self, dimensions) <NEW_LINE> self.bounds = list(zip([-100.0] * self.dimensions, [100.0] * self.dimensions)) <NEW_LINE> self.custom_bounds = ([-5, 5], [-5, 5]) <NEW_LINE> self.global_optimum = [0.5 for _ in range(self.dimensions)] <NEW_LINE> self.fglob = 0.5 <NEW_LINE> self.change_dimensionality = True <NEW_LINE> <DEDENT> def evaluator(self, x, *args): <NEW_LINE> <INDENT> self.fun_evals += 1 <NEW_LINE> return sum((floor(x) + 0.5)**2.0) | Step test objective function.
This class defines the Step global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Step}}(\mathbf{x}) = \sum_{i=1}^{n} \left ( \lfloor x_i \rfloor + 0.5 \right )^2
Here, :math:`n` represents the number of dimensions and :math:`x_i \in [-100, 100]` for :math:`i=1,...,n`.
.. figure:: figures/Step.png
:alt: Step function
:align: center
**Two-dimensional Step function**
*Global optimum*: :math:`f(x_i) = 0` for :math:`x_i = 0.5` for :math:`i=1,...,n` | 6259906f4428ac0f6e659db9 |
class LBActiveConnectionsPollster(_LBStatsPollster): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def _get_sample(pool, data): <NEW_LINE> <INDENT> return make_sample_from_pool( pool, name='network.services.lb.active.connections', type=sample.TYPE_GAUGE, unit='connection', volume=data.active_connections, ) | Pollster to capture Active Load Balancer connections. | 6259906f99cbb53fe683276f |
class Placeholder(Composable): <NEW_LINE> <INDENT> def __init__(self, name=None): <NEW_LINE> <INDENT> if isinstance(name, str): <NEW_LINE> <INDENT> if ')' in name: <NEW_LINE> <INDENT> raise ValueError("invalid name: %r" % name) <NEW_LINE> <DEDENT> <DEDENT> elif name is not None: <NEW_LINE> <INDENT> raise TypeError("expected string or None as name, got %r" % name) <NEW_LINE> <DEDENT> super(Placeholder, self).__init__(name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._wrapped <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Placeholder(%r)" % ( self._wrapped if self._wrapped is not None else '',) <NEW_LINE> <DEDENT> def as_string(self, context): <NEW_LINE> <INDENT> if self._wrapped is not None: <NEW_LINE> <INDENT> return "%%(%s)s" % self._wrapped <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "%s" | A `Composable` representing a placeholder for query parameters.
If the name is specified, generate a named placeholder (e.g. ``%(name)s``),
otherwise generate a positional placeholder (e.g. ``%s``).
The object is useful to generate SQL queries with a variable number of
arguments.
Examples::
>>> names = ['foo', 'bar', 'baz']
>>> q1 = sql.SQL("insert into table ({}) values ({})").format(
... sql.SQL(', ').join(map(sql.Identifier, names)),
... sql.SQL(', ').join(sql.Placeholder() * len(names)))
>>> print(q1.as_string(conn))
insert into table ("foo", "bar", "baz") values (%s, %s, %s)
>>> q2 = sql.SQL("insert into table ({}) values ({})").format(
... sql.SQL(', ').join(map(sql.Identifier, names)),
... sql.SQL(', ').join(map(sql.Placeholder, names)))
>>> print(q2.as_string(conn))
insert into table ("foo", "bar", "baz") values (%(foo)s, %(bar)s, %(baz)s) | 6259906f4a966d76dd5f0770 |
class Section(SupportsBytes): <NEW_LINE> <INDENT> def __init__(self, payload_type: 'OpMsg.PayloadType'): <NEW_LINE> <INDENT> self.payload_type = payload_type <NEW_LINE> <DEDENT> def __bytes__(self): <NEW_LINE> <INDENT> raise NotImplementedError() | Generic section | 6259906f8e7ae83300eea916 |
class BaseClass(QtGui.QWidget): <NEW_LINE> <INDENT> def __init__(self, parent = None): <NEW_LINE> <INDENT> QtGui.QWidget.__init__(self, parent) <NEW_LINE> self.resetAuthor() <NEW_LINE> <DEDENT> def getAuthor(self): <NEW_LINE> <INDENT> return self._author <NEW_LINE> <DEDENT> def setAuthor(self, name): <NEW_LINE> <INDENT> self._author = name <NEW_LINE> <DEDENT> def resetAuthor(self): <NEW_LINE> <INDENT> self._author = "David Boddie" <NEW_LINE> <DEDENT> author = QtCore.pyqtProperty("QString", getAuthor, setAuthor, resetAuthor) | BaseClass(QtGui.QWidget)
Provides a base custom widget class to show that properties implemented
in Python can be inherited and shown as belonging to distinct classes
in Qt Designer's Property Editor. | 6259906f32920d7e50bc78cd |
class MockLibatutility(MockSharedLib): <NEW_LINE> <INDENT> libs = ['atutility.so', 'atutility'] <NEW_LINE> functions = [ 'AT_ConvertBuffer', 'AT_ConvertBufferUsingMetadata', 'AT_FinaliseUtilityLibrary', 'AT_InitialiseUtilityLibrary', ] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(MockLibatutility, self).__init__() <NEW_LINE> self.AT_InitialiseUtilityLibrary._call = lambda : 0 | Mock Andor's atutility (SDK3) for microscope.cameras.SDK3.
| 6259906f8e7ae83300eea917 |
class Benchmarker: <NEW_LINE> <INDENT> def __init__(self, config, ds, classifier): <NEW_LINE> <INDENT> self.ds = ds <NEW_LINE> self.clf = classifier <NEW_LINE> self.logger_factory = logger_factory <NEW_LINE> self.log = logger_factory.logger_for(config, self.__class__.__name__) <NEW_LINE> self.config = config <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> test_size = self.ds.get_test_size() <NEW_LINE> num_of_chunks = int(math.ceil(test_size/70000.0)) <NEW_LINE> test_subsets = self.slices(test_size, wanted_parts=num_of_chunks) <NEW_LINE> print("Benchmarking classifier over %d examples in %d chunks." % (test_size, num_of_chunks)) <NEW_LINE> cms = Parallel(n_jobs=self.config.j, verbose=1)(delayed(_benchmark)(self, i) for i in test_subsets) <NEW_LINE> self.log_final_confusion_matrix(cms) <NEW_LINE> <DEDENT> def slices(self, length, wanted_parts=1): <NEW_LINE> <INDENT> return [(i*length // wanted_parts, (i+1)*length // wanted_parts) for i in range(wanted_parts)] <NEW_LINE> <DEDENT> def log_final_confusion_matrix(self, cms): <NEW_LINE> <INDENT> final_confusion_matrix = [[0,0], [0,0]] <NEW_LINE> for cm in cms: <NEW_LINE> <INDENT> final_confusion_matrix[0][0] += cm[0][0] <NEW_LINE> final_confusion_matrix[0][1] += cm[0][1] <NEW_LINE> final_confusion_matrix[1][0] += cm[1][0] <NEW_LINE> final_confusion_matrix[1][1] += cm[1][1] <NEW_LINE> <DEDENT> log_confusion_matrix(self.log, final_confusion_matrix) | Slices the test set and generates the final confusion matrix | 6259906f91f36d47f2231ad2 |
class MaxServingsLowerThanLargestRecipeError(StaticSiteError): <NEW_LINE> <INDENT> pass | Thrown when max_servings set to less than the largest recipes' native serving count. | 6259906f32920d7e50bc78ce |
class SettingsView(zoom.mvc.View): <NEW_LINE> <INDENT> def about(self): <NEW_LINE> <INDENT> return zoom.page(zoom.tools.load_content('about.md')) | Settings View | 6259906f8a43f66fc4bf3a1b |
class PandasHDFStoreSingleNode(FramewiseData): <NEW_LINE> <INDENT> def __init__(self, filename, key='FrameData', mode='a', t_column='frame', use_tabular_copy=False, **kwargs): <NEW_LINE> <INDENT> self.filename = os.path.abspath(filename) <NEW_LINE> self.key = key <NEW_LINE> self._t_column = t_column <NEW_LINE> self.store = pd.HDFStore(self.filename, mode, **kwargs) <NEW_LINE> with pd.get_store(self.filename) as store: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> store[self.key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._validate_node(use_tabular_copy) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def t_column(self): <NEW_LINE> <INDENT> return self._t_column <NEW_LINE> <DEDENT> def put(self, df): <NEW_LINE> <INDENT> self._validate(df) <NEW_LINE> self.store.append(self.key, df, data_columns=True) <NEW_LINE> <DEDENT> def get(self, frame_no): <NEW_LINE> <INDENT> frame = self.store.select(self.key, '{0} == {1}'.format( self._t_column, frame_no)) <NEW_LINE> return frame <NEW_LINE> <DEDENT> def dump(self, N=None): <NEW_LINE> <INDENT> if N is None: <NEW_LINE> <INDENT> return self.store.select(self.key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Nth_frame = self.frames[N - 1] <NEW_LINE> return self.store.select(self.key, '{0} <= {1}'.format( self._t_column, Nth_frame)) <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.store.close() <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if hasattr(self, 'store'): <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def frames(self): <NEW_LINE> <INDENT> frame_nos = self.store.select_column(self.key, self.t_column).unique() <NEW_LINE> frame_nos.sort() <NEW_LINE> return frame_nos <NEW_LINE> <DEDENT> def _validate_node(self, use_tabular_copy): <NEW_LINE> <INDENT> if use_tabular_copy: <NEW_LINE> <INDENT> self.key = _make_tabular_copy(self.filename, self.key) <NEW_LINE> <DEDENT> pandas_type = getattr(getattr(getattr( self.store._handle.root, self.key, None), '_v_attrs', None), 'pandas_type', None) <NEW_LINE> if not pandas_type == 'frame_table': <NEW_LINE> <INDENT> raise ValueError("This node is not tabular. Call with " "use_tabular_copy=True to proceed.") | Save all frames into one large node.
This implementation is more complex than PandasHDFStore,
but it simplifies (speeds up?) cross-frame queries,
like queries for a single probe's entire trajectory.
Any additional keyword arguments to the constructor are passed to
pandas.HDFStore(). | 6259906faad79263cf43003d |
class hr_employee(osv.osv): <NEW_LINE> <INDENT> _inherit = 'hr.employee' <NEW_LINE> _description = 'Employee' <NEW_LINE> def _calculate_total_wage(self, cr, uid, ids, name, args, context): <NEW_LINE> <INDENT> if not ids: return {} <NEW_LINE> res = {} <NEW_LINE> current_date = datetime.now().strftime('%Y-%m-%d') <NEW_LINE> for employee in self.browse(cr, uid, ids, context=context): <NEW_LINE> <INDENT> if not employee.contract_ids: <NEW_LINE> <INDENT> res[employee.id] = {'basic': 0.0} <NEW_LINE> continue <NEW_LINE> <DEDENT> cr.execute( 'SELECT SUM(wage) ' 'FROM hr_contract ' 'WHERE employee_id = %s ' 'AND date_start <= %s ' 'AND (date_end > %s OR date_end is NULL)', (employee.id, current_date, current_date)) <NEW_LINE> result = dict(cr.dictfetchone()) <NEW_LINE> res[employee.id] = {'basic': result['sum']} <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> def _payslip_count(self, cr, uid, ids, field_name, arg, context=None): <NEW_LINE> <INDENT> Payslip = self.pool['hr.payslip'] <NEW_LINE> return { employee_id: Payslip.search_count(cr,uid, [('employee_id', '=', employee_id)], context=context) for employee_id in ids } <NEW_LINE> <DEDENT> _columns = { 'slip_ids':fields.one2many('hr.payslip', 'employee_id', 'Payslips', required=False, readonly=True), 'total_wage': fields.function(_calculate_total_wage, method=True, type='float', string='Total Basic Salary', digits_compute=dp.get_precision('Payroll'), help="Sum of all current contract's wage of employee."), 'payslip_count': fields.function(_payslip_count, type='integer', string='Payslips'), } | Employee | 6259906ff548e778e596ce14 |
class HitMarker(entity.Entity): <NEW_LINE> <INDENT> def __init__(self, x, y): <NEW_LINE> <INDENT> entity.Entity.__init__(self) <NEW_LINE> self.class_id = 4 <NEW_LINE> self.netxpos = NetworkVar(self, x, 1) <NEW_LINE> self.netypos = NetworkVar(self, y, 2) <NEW_LINE> self.frames = 0 <NEW_LINE> <DEDENT> def update(self, world): <NEW_LINE> <INDENT> self.frames += 1 <NEW_LINE> if self.frames > 32: <NEW_LINE> <INDENT> self.destroy(world) | Displays a hit circle for the client | 6259906f4527f215b58eb5e3 |
class Layer(MultyvacModel): <NEW_LINE> <INDENT> def __init__(self, name, **kwargs): <NEW_LINE> <INDENT> MultyvacModel.__init__(self, **kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.size = kwargs.get('size') <NEW_LINE> self.created_at = kwargs.get('created_at') <NEW_LINE> <DEDENT> def mkdir(self, path): <NEW_LINE> <INDENT> r = self.multyvac._ask(Multyvac._ASK_PUT, '/layer/%s/mkdir' % self.name, params={'path': path}, ) <NEW_LINE> return MultyvacModule.check_success(r) <NEW_LINE> <DEDENT> def put_contents(self, contents, target_path, target_mode=None): <NEW_LINE> <INDENT> files = {'file': (target_path, contents)} <NEW_LINE> data = {'file_mode': target_mode} <NEW_LINE> r = self.multyvac._ask(Multyvac._ASK_PUT, '/layer/%s' % self.name, files=files, data=data, ) <NEW_LINE> return MultyvacModule.check_success(r) <NEW_LINE> <DEDENT> def get_contents(self, path): <NEW_LINE> <INDENT> r = self.multyvac._ask(Multyvac._ASK_GET, '/layer/%s' % self.name, params={'path': [path]}, ) <NEW_LINE> f = r['files'][0] <NEW_LINE> f['contents'] = base64.b64decode(f['contents']) <NEW_LINE> return f <NEW_LINE> <DEDENT> def get_file(self, remote_path, local_path): <NEW_LINE> <INDENT> f = self.get_contents(remote_path) <NEW_LINE> with open(local_path, 'wb') as target_f: <NEW_LINE> <INDENT> target_f.write(f['contents']) <NEW_LINE> <DEDENT> <DEDENT> def put_file(self, local_path, remote_path, target_mode=None): <NEW_LINE> <INDENT> with open(local_path, 'rb') as f: <NEW_LINE> <INDENT> self.put_contents(f.read(), remote_path, target_mode) <NEW_LINE> <DEDENT> <DEDENT> def ls(self, path): <NEW_LINE> <INDENT> r = self.multyvac._ask(Multyvac._ASK_GET, '/layer/%s/ls' % self.name, params={'path': path}, ) <NEW_LINE> return r['ls'] <NEW_LINE> <DEDENT> def rm(self, path): <NEW_LINE> <INDENT> r = self.multyvac._ask(Multyvac._ASK_POST, '/layer/%s/rm' % self.name, params={'path': path}, ) <NEW_LINE> return MultyvacModule.check_success(r) <NEW_LINE> <DEDENT> def modify(self, vol=None, max_runtime=3600): <NEW_LINE> <INDENT> jid = self.multyvac.job.shell_submit( 'sleep %s' % max_runtime, _name='layer modify %s' % self.name, _vol=vol, _layer={'name': self.name, 'mount_rw': True}, _tags={'system': 'true'}, ) <NEW_LINE> return self.get_modify_layer_job(jid) <NEW_LINE> <DEDENT> def get_modify_layer_job(self, jid): <NEW_LINE> <INDENT> job = ModifyLayerJob(jid, multyvac=self.multyvac) <NEW_LINE> job.update() <NEW_LINE> return job <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Layer(%s)" % repr(self.name) | Represents a Multyvac Layer and its associated operations. | 6259906f66673b3332c31c85 |
class VariableSets(APIClassTemplate): <NEW_LINE> <INDENT> VALID_JSON_DATA = ["id", "name", "type", "description"] <NEW_LINE> VALID_FOR_KWARGS = VALID_JSON_DATA + [] <NEW_LINE> URL_SUFFIX = "/object/variablesets" <NEW_LINE> def __init__(self, fmc, **kwargs): <NEW_LINE> <INDENT> super().__init__(fmc, **kwargs) <NEW_LINE> logging.debug("In __init__() for VariableSets class.") <NEW_LINE> self.parse_kwargs(**kwargs) <NEW_LINE> <DEDENT> def post(self): <NEW_LINE> <INDENT> logging.info("POST method for API for VariableSets not supported.") <NEW_LINE> pass <NEW_LINE> <DEDENT> def put(self): <NEW_LINE> <INDENT> logging.info("PUT method for API for VariableSets not supported.") <NEW_LINE> pass <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> logging.info("DELETE method for API for VariableSets not supported.") <NEW_LINE> pass | The VariableSets Object in the FMC. | 6259906f460517430c432c9a |
class Category(TrackableUpdateCreateModel): <NEW_LINE> <INDENT> parent_category = models.ForeignKey( 'shop.Category', verbose_name='Parent category', blank=True, null=True, db_index=True, on_delete=models.SET_NULL, related_name='child_categories', related_query_name='child_category', ) <NEW_LINE> name = models.CharField('Name', max_length=100) <NEW_LINE> is_active = models.BooleanField('Active', default=True) <NEW_LINE> sort_order = models.PositiveIntegerField('Sort order', default=1) <NEW_LINE> objects = CategoryManager() <NEW_LINE> def __str__(self) -> str: <NEW_LINE> <INDENT> if self.parent_category_id is not None: <NEW_LINE> <INDENT> return '{parent_category} -> {category}'.format(parent_category=self.parent_category, category=self.name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Category' <NEW_LINE> verbose_name_plural = 'Categories' <NEW_LINE> app_label = 'shop' <NEW_LINE> ordering = ['sort_order', 'name'] | Модель для описания категории товара.
Содержит поля имени, индекса родительской категории,
а также активности и порядка сортировки. | 6259906f2c8b7c6e89bd506d |
class KickRejoin(BaseExtension): <NEW_LINE> <INDENT> requires = ["BasicRFC", "ISupport"] <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.rejoin_delay = kwargs.pop('rejoin_delay', 5) <NEW_LINE> self.rejoin_on_remove = kwargs.pop('rejoin_on_remove', True) <NEW_LINE> self.scheduled = {} <NEW_LINE> if self.rejoin_on_remove: <NEW_LINE> <INDENT> self.parts = set() <NEW_LINE> <DEDENT> <DEDENT> @event("commands_out", "PART") <NEW_LINE> def on_part_out(self, _, line): <NEW_LINE> <INDENT> if not self.rejoin_on_remove: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> isupport = self.base.isupport <NEW_LINE> chantypes = isupport.get("CHANTYPES") <NEW_LINE> for channel in line.params[0].split(","): <NEW_LINE> <INDENT> if not channel.startswith(*chantypes): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> channel = self.casefold(channel) <NEW_LINE> self.parts.add(channel) <NEW_LINE> <DEDENT> <DEDENT> @event("commands", "KICK") <NEW_LINE> @event("commands", "PART") <NEW_LINE> def on_kick(self, _, line): <NEW_LINE> <INDENT> basicrfc = self.base.basic_rfc <NEW_LINE> params = line.params <NEW_LINE> channel = self.casefold(params[0]) <NEW_LINE> if self.casefold(params[1]) != self.casefold(basicrfc.nick): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if line.command == 'PART': <NEW_LINE> <INDENT> if not self.rejoin_on_remove: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> elif channel in self.parts: <NEW_LINE> <INDENT> self.parts.discard(channel) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> if self.rejoin_on_remove: <NEW_LINE> <INDENT> self.parts.discard(channel) <NEW_LINE> <DEDENT> if channel in self.scheduled: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> future = self.schedule(self.rejoin_delay, partial(self.join, channel)) <NEW_LINE> self.scheduled[channel] = future <NEW_LINE> <DEDENT> def join(self, channel): <NEW_LINE> <INDENT> self.send("JOIN", [channel]) <NEW_LINE> self.parts.discard(channel) <NEW_LINE> del self.scheduled[channel] <NEW_LINE> <DEDENT> @event("link", "disconnected") <NEW_LINE> def on_disconnected(self, _): <NEW_LINE> <INDENT> for future in self.scheduled.values(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.unschedule(future) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self.scheduled.clear() | Rejoin a channel automatically after being kicked or removed. | 6259906f38b623060ffaa497 |
class TestProfileService(OVSBaseTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> test_user_info = ('[email protected]', 'Bob', 'Smith', 'RESIDENT') <NEW_LINE> UserService.create_user(*test_user_info) <NEW_LINE> self.test_user = UserService.get_user_by_email('[email protected]') <NEW_LINE> <DEDENT> def test_update_profile(self): <NEW_LINE> <INDENT> profile = self.test_user.profile <NEW_LINE> self.assertEqual(profile.preferred_name, "Bob") <NEW_LINE> self.assertEqual(profile.phone_number, None) <NEW_LINE> self.assertEqual(profile.preferred_email, '[email protected]') <NEW_LINE> self.assertEqual(profile.race, None) <NEW_LINE> self.assertEqual(profile.gender, 'Unspecified') <NEW_LINE> ProfileService.update_profile(self.test_user.id, preferred_name='Jenny', phone_number='867-5309', preferred_email='[email protected]', race='Black', gender=Gender.FEMALE) <NEW_LINE> self.assertEqual(profile.preferred_name, "Jenny") <NEW_LINE> self.assertEqual(profile.phone_number, "867-5309") <NEW_LINE> self.assertEqual(profile.preferred_email, '[email protected]') <NEW_LINE> self.assertEqual(profile.race, 'Black') <NEW_LINE> self.assertEqual(profile.gender, Gender.FEMALE) <NEW_LINE> <DEDENT> def test_get_all_profiles(self): <NEW_LINE> <INDENT> expected = self.db.session.query(Profile).count() <NEW_LINE> self.assertEqual(len(ProfileService.get_all_profiles()), expected) | Tests for profile services | 6259906f1f037a2d8b9e54ae |
class HyperParameters(object): <NEW_LINE> <INDENT> device = None <NEW_LINE> gpu_no = None <NEW_LINE> data_set = None <NEW_LINE> lr = 0.0 <NEW_LINE> epoch = 0 <NEW_LINE> batch_size = 0 <NEW_LINE> clip_norm = 0 <NEW_LINE> decay_steps = 0 <NEW_LINE> decay_rate = 0.0 <NEW_LINE> embedding_size = 0 <NEW_LINE> hidden_units = 0 <NEW_LINE> keep_prob = 0.0 <NEW_LINE> layer_num = 0 <NEW_LINE> num_tags = 4 | 超参类
| 6259906f3346ee7daa3382a2 |
class V1beta1APIVersion(object): <NEW_LINE> <INDENT> def __init__(self, name=None): <NEW_LINE> <INDENT> self.swagger_types = { 'name': 'str' } <NEW_LINE> self.attribute_map = { 'name': 'name' } <NEW_LINE> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 6259906f76e4537e8c3f0e0c |
class DocumentIndex(object): <NEW_LINE> <INDENT> def __init__(self, documents): <NEW_LINE> <INDENT> self._documents = documents <NEW_LINE> self._inverted_index = defaultdict(set) <NEW_LINE> self._document_frequencies = self._rollup_frequencies(documents) <NEW_LINE> <DEDENT> def doc_freq(self, w, default=1): <NEW_LINE> <INDENT> return self._document_frequencies.get(stem(w.lower()), default) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._documents) <NEW_LINE> <DEDENT> def _rollup_frequencies(self, documents): <NEW_LINE> <INDENT> f = defaultdict(lambda: 0) <NEW_LINE> for d in documents: <NEW_LINE> <INDENT> for w in d._frequencies: <NEW_LINE> <INDENT> f[w] += 1 <NEW_LINE> self._inverted_index[w].add(d) <NEW_LINE> <DEDENT> <DEDENT> return f <NEW_LINE> <DEDENT> def _candidate_documents(self, doc, keys=None): <NEW_LINE> <INDENT> docs = set() <NEW_LINE> for w in doc._frequencies: <NEW_LINE> <INDENT> if keys: <NEW_LINE> <INDENT> for d in self._inverted_index[w]: <NEW_LINE> <INDENT> if d.key in keys: <NEW_LINE> <INDENT> docs.add(d) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> docs.update(self._inverted_index[w]) <NEW_LINE> <DEDENT> <DEDENT> return docs <NEW_LINE> <DEDENT> def query(self, doc, max_results=10, distance=fdot_product, keys=None): <NEW_LINE> <INDENT> results = [] <NEW_LINE> for d in self._candidate_documents(doc, keys=keys): <NEW_LINE> <INDENT> dist = distance(doc, d) <NEW_LINE> if dist == 0: <NEW_LINE> <INDENT> dist = inf <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dist = 1 / dist <NEW_LINE> <DEDENT> heapq.heappush(results, (dist, d.key)) <NEW_LINE> results = heapq.nsmallest(max_results, results, key=lambda x: x[0]) <NEW_LINE> <DEDENT> return results | A box of documents, which can be queried.
| 6259906fa05bb46b3848bd70 |
class DomainStruct (object): <NEW_LINE> <INDENT> def __init__ (self, domain): <NEW_LINE> <INDENT> eT2LD = get_public_suffix(domain) <NEW_LINE> et2s = eT2LD.split('.') <NEW_LINE> eTLD = '.'.join(et2s[1:]) <NEW_LINE> if eTLD == '': <NEW_LINE> <INDENT> self.isFQDN = False <NEW_LINE> self.eTkLD = [] <NEW_LINE> self.sub = self.eSLD = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.isFQDN = True <NEW_LINE> self.eTkLD = [eTLD, eT2LD] <NEW_LINE> self.eSLD = et2s[0] <NEW_LINE> self.sub = domain[:-len(eT2LD)-1] <NEW_LINE> _subs = self.sub.split('.') if len(self.sub) > 0 else [] <NEW_LINE> for i in xrange(len(_subs)): <NEW_LINE> <INDENT> _eTkLD = '.'.join(_subs[-i-1:]+[eT2LD]) <NEW_LINE> self.eTkLD.append(_eTkLD) <NEW_LINE> <DEDENT> <DEDENT> self.domain = domain <NEW_LINE> self.nowww = domain[4:] if domain.startswith('www.') else domain | Return a DomStruct, representing the structure of the input domain
Example 1 (valid eTLD, and len(domain)>len(eT2LD)):
domain = 'www.5.4.3.google.co.uk'
nowww = '5.4.3.google.co.uk'
et2s = ['google', 'co', 'uk']
eSLD = 'google'
sub = 'www.5.4.3'
subs = ['www', '5', '4', '3']
eTkLD = [
'co.uk',
'google.co.uk',
'3.google.co.uk',
'4.3.google.co.uk',
'5.4.3.google.co.uk',
'www.5.4.3.google.co.uk',
]
Example 2 (valid eTLD, and len(domain)==len(eT2LD)):
domain = 'google.co.uk'
nowww = 'google.co.uk'
et2s = ['google', 'co', 'uk']
eSLD = 'google'
sub = ''
eTkLD = [
'co.uk',
'google.co.uk',
]
Example 3 (invalid eTLD case):
domain = 'www.bar.local'
nowww = 'bar.local'
et2s = ['local']
eTkLD = [] | 6259906f7d847024c075dc66 |
class ExportStatistics: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def export_to_csv(file_path, samples, statistics): <NEW_LINE> <INDENT> header_row = [ '', samples.qualifiers[1], samples.qualifiers[2], samples.qualifiers[3], samples.qualifiers[4]] <NEW_LINE> matrix = [] <NEW_LINE> for key, value in samples.names.items(): <NEW_LINE> <INDENT> temp = [value, statistics.stats[key][1], statistics.stats[key][2], statistics.stats[key][3], statistics.stats[key][4]] <NEW_LINE> matrix.append(temp) <NEW_LINE> <DEDENT> with codecs.open(file_path, 'w', 'utf-8') as csv_file: <NEW_LINE> <INDENT> file_writer = csv.writer(csv_file, delimiter=';') <NEW_LINE> file_writer.writerow(header_row) <NEW_LINE> for element in matrix: <NEW_LINE> <INDENT> file_writer.writerow(element) | Class used for exporting the created in software
statistics. | 6259906fb7558d5895464b76 |
class AdaptiveInterpolator(object): <NEW_LINE> <INDENT> def adaptive_init(self, f, interp_class): <NEW_LINE> <INDENT> if f is not None: <NEW_LINE> <INDENT> self.f = f <NEW_LINE> <DEDENT> self.interp_class = interp_class <NEW_LINE> self.n = 3 <NEW_LINE> Xs = self.get_nodes(self.n) <NEW_LINE> Ys = self.f(Xs) <NEW_LINE> interp_class.__init__(self, Xs, Ys) <NEW_LINE> <DEDENT> def adaptive_interp(self, par=None): <NEW_LINE> <INDENT> if par is None: <NEW_LINE> <INDENT> par = params.interpolation <NEW_LINE> <DEDENT> maxn = par.maxn <NEW_LINE> n = self.n <NEW_LINE> old_err = None <NEW_LINE> cm = convergence_monitor(par=par.convergence) <NEW_LINE> while n <= maxn: <NEW_LINE> <INDENT> new_n = 2 * n - 1 <NEW_LINE> new_Xs = self.get_incremental_nodes(new_n) <NEW_LINE> new_Ys = self.f(new_Xs) <NEW_LINE> err = self.test_accuracy(new_Xs, new_Ys) <NEW_LINE> maxy = max(abs(new_Ys).max(), abs(self.Ys).max()) <NEW_LINE> if par.debug_info: <NEW_LINE> <INDENT> print("interp. err", err, old_err, new_n) <NEW_LINE> <DEDENT> cm.add(err, maxy) <NEW_LINE> if cm.test_convergence()[0]: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> old_err = err <NEW_LINE> n = new_n <NEW_LINE> self.add_nodes(new_Xs, new_Ys) <NEW_LINE> <DEDENT> self.n = n <NEW_LINE> if par.debug_plot and n >= maxn: <NEW_LINE> <INDENT> debug_plot(self.a, self.b, self.Xs, self.Ys, None) <NEW_LINE> <DEDENT> if par.debug_info: <NEW_LINE> <INDENT> print("interp. err = ", err, "nodes=", n) <NEW_LINE> <DEDENT> self.err = err <NEW_LINE> <DEDENT> def test_accuracy(self, new_Xs, new_Ys): <NEW_LINE> <INDENT> errs = abs(self.interp_class.interp_at(self, new_Xs) - new_Ys) <NEW_LINE> err = errs.max() <NEW_LINE> return err | Mix-in class for adaptive interpolators.
Increase number of nodes until error is small. | 6259906f796e427e5384ffff |
class TestDieSize(unittest.TestCase): <NEW_LINE> <INDENT> def test_die_size(self): <NEW_LINE> <INDENT> with open(os.path.join('test', 'testfiles_for_unittests', 'trailing_null_dies.elf'), 'rb') as f: <NEW_LINE> <INDENT> elffile = ELFFile(f) <NEW_LINE> self.assertTrue(elffile.has_dwarf_info()) <NEW_LINE> dwarfinfo = elffile.get_dwarf_info() <NEW_LINE> for CU in dwarfinfo.iter_CUs(): <NEW_LINE> <INDENT> for child in CU.get_top_DIE().iter_children(): <NEW_LINE> <INDENT> self.assertEqual(child.size, 3) | This test verifies that null DIEs are treated correctly - i.e.
removed when we 'unflatten' the linear list and build a tree.
The test file contains a CU with two non-null DIEs (both three bytes big),
where the second one is followed by three null DIEs.
We verify that the null DIEs are discarded and that the length of the second DIE
does not include the null entries that follow it. | 6259906f4c3428357761bb3b |
class Element(View): <NEW_LINE> <INDENT> template = "menu/element.html" <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> return render(request, self.template, context) | Top songs.
TODO: Show songs by its popularity. | 6259906fbf627c535bcb2d53 |
class DummyRing(StorageRing): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(DummyRing, self).__init__() <NEW_LINE> self._lifetime = 10 * q.hour <NEW_LINE> self._current = 100 * q.mA <NEW_LINE> self._energy = 5 * q.MeV <NEW_LINE> self._current_decay = 0.05 * q.mA / q.hour <NEW_LINE> self._energy_decay = 0.05 * q.MeV / q.hour <NEW_LINE> def update(): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> self._lifetime += 1 * q.hour <NEW_LINE> time.sleep(5.0) <NEW_LINE> self['energy'].notify() <NEW_LINE> self['current'].notify() <NEW_LINE> <DEDENT> <DEDENT> self.monitor = threading.Thread(target=update) <NEW_LINE> self.monitor.daemon = True <NEW_LINE> self.monitor.start() <NEW_LINE> <DEDENT> def _get_current(self): <NEW_LINE> <INDENT> return self._current - self._lifetime * self._current_decay <NEW_LINE> <DEDENT> def _get_energy(self): <NEW_LINE> <INDENT> return self._energy - self._lifetime * self._energy_decay <NEW_LINE> <DEDENT> def _get_lifetime(self): <NEW_LINE> <INDENT> return self._lifetime | Create a Dummy Ring. | 6259906fa8370b77170f1c50 |
class JerboaTokenReader(object): <NEW_LINE> <INDENT> def __init__(self, tokenizedfile): <NEW_LINE> <INDENT> self.file = tokenizedfile <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> orig_text = self.file.next() <NEW_LINE> tokens = self.file.next().split() <NEW_LINE> tags = self.file.next().split() <NEW_LINE> indices = self.file.next().split() <NEW_LINE> return (zip(tokens, tags, indices), orig_text) <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> raise | Reads from the full output of the Jerboa tokenizer. | 6259906f7b180e01f3e49ca8 |
@unique <NEW_LINE> class CUBAExtension(Enum): <NEW_LINE> <INDENT> BOX_FACES = "BOX_FACES" <NEW_LINE> BOX_VECTORS = "BOX_VECTORS" <NEW_LINE> BOX_ORIGIN = "BOX_ORIGIN" <NEW_LINE> PAIR_POTENTIALS = "PAIR_POTENTIALS" <NEW_LINE> FIXED_GROUP = "FIXED_GROUP" | Provisional CUBA keywords specific for Liggghts-Md
These are additional CUBA-Keywords that are not included
in simphony-common yet. The proposed description for
these new CUBA keywords is:
- description: Simulation box faces
domain: [MD]
key: BOX_FACES
name: BoxFaces
number: 100
shape: [1]
type: double
- description: Simulation box vectors
domain: [MD]
key: BOX_VECTORS
name: BoxVectors
number: 101
shape: [3,3]
type: double
- description: Simulation box origin
domain: [MD]
key: BOX_ORIGIN
name: BoxOrigin
number: 102
shape: [3]
type: double
- description: Thermodynamic ensemble
domain: [MD]
key: THERMODYNAMIC_ENSEMBLE
name: ThermodynamicEnsemble
number: 103
shape: [20]
type: string
- description: Pair potentials
domain: [MD]
key: PAIR_POTENTIALS
name: PairPotentials
number: 104
shape: [20]
type: string
- description: Fixed group of particles
domain: [MD]
key: FIXED_GROUP
name: FixedGroup
number: 105
shape: [20]
type: string | 6259906fa8370b77170f1c51 |
class MtgjsonDeckObject: <NEW_LINE> <INDENT> code: str <NEW_LINE> commander: List[Dict[str, Any]] <NEW_LINE> main_board: List[Dict[str, Any]] <NEW_LINE> name: str <NEW_LINE> side_board: List[Dict[str, Any]] <NEW_LINE> release_date: str <NEW_LINE> type: str <NEW_LINE> file_name: str <NEW_LINE> def set_sanitized_name(self, name: str) -> None: <NEW_LINE> <INDENT> word_characters_only_regex = re.compile(r"[^\w]") <NEW_LINE> capital_case = "".join(x for x in name.title() if not x.isspace()) <NEW_LINE> deck_name_sanitized = word_characters_only_regex.sub("", capital_case) <NEW_LINE> self.file_name = f"{deck_name_sanitized}_{self.code}" <NEW_LINE> <DEDENT> def to_json(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> skip_keys = {"file_name"} <NEW_LINE> return { to_camel_case(key): value for key, value in self.__dict__.items() if "__" not in key and not callable(value) and key not in skip_keys } | MTGJSON Singular Card Object | 6259906f8e7ae83300eea918 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.