code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class CommentDetailsSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> article = ArticleSerializer(read_only=True) <NEW_LINE> created_by = UserSerializer(read_only=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Comment <NEW_LINE> fields = ['id', 'article', 'created_by', 'edited', 'comment'] <NEW_LINE> read_only_fields = ('article', 'edited') | Comment Details serializer. | 6259905d63b5f9789fe867a7 |
class applyPolicyToTarget_IDL_result(object): <NEW_LINE> <INDENT> thrift_spec = ((0, TType.I32, 'success', None, None), (1, TType.STRUCT, 'e', (Shared.ttypes.ExceptionIDL, Shared.ttypes.ExceptionIDL.thrift_spec), None)) <NEW_LINE> def __init__(self, success = None, e = None): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.e = e <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid,) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.success = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.e = Shared.ttypes.ExceptionIDL() <NEW_LINE> self.e.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('applyPolicyToTarget_IDL_result') <NEW_LINE> if self.success != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.I32, 0) <NEW_LINE> oprot.writeI32(self.success) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.e != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('e', TType.STRUCT, 1) <NEW_LINE> self.e.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> def validate(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = [ '%s=%r' % (key, value) for (key, value,) in self.__dict__.iteritems() ] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | Attributes:
- success
- e | 6259905d442bda511e95d874 |
class PatternMatchingPaths: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_inputs(cls): <NEW_LINE> <INDENT> num_patterns = int(input()) <NEW_LINE> pattern_group = PatternGroup() <NEW_LINE> for i in range(num_patterns): <NEW_LINE> <INDENT> pattern = cls.get_input_object(object_type=Pattern) <NEW_LINE> pattern_group.add_pattern(pattern) <NEW_LINE> <DEDENT> num_paths = int(input()) <NEW_LINE> paths = [] <NEW_LINE> for i in range(num_paths): <NEW_LINE> <INDENT> paths.append(cls.get_input_object(object_type=Path)) <NEW_LINE> <DEDENT> return num_patterns, pattern_group, num_paths, paths <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_input_object(cls, object_type): <NEW_LINE> <INDENT> return object_type(input().strip()) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def run(cls): <NEW_LINE> <INDENT> num_patterns, pattern_group, num_paths, paths = cls.get_inputs() <NEW_LINE> for path in paths: <NEW_LINE> <INDENT> closest_match = Matcher.get_closest_matching_pattern_for_path(path=path, pattern_group=pattern_group) <NEW_LINE> print(closest_match.input_pattern if closest_match else 'NO MATCH') | Driver class for this project | 6259905d3617ad0b5ee07781 |
class Location(object): <NEW_LINE> <INDENT> def is_equivalent(self, location): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @property <NEW_LINE> def prefix(self): <NEW_LINE> <INDENT> return None | Base class for ARIA locations.
Locations are used by :class:`~aria.parser.loading.LoaderSource` to delegate to
an appropriate :class:`~aria.parser.loading.Loader`. | 6259905d0a50d4780f7068d9 |
class StatsDiscipline(models.Model): <NEW_LINE> <INDENT> name = models.ForeignKey(Discipline, verbose_name=_('discipline')) <NEW_LINE> value = models.PositiveIntegerField(_('value'), default=0) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return '%s %s' % (self.name, self.value) | Represents the stats of disciplines. | 6259905d3d592f4c4edbc511 |
class CheckFailure(CommandError): <NEW_LINE> <INDENT> pass | Exception raised when the predicates in :attr:`.Command.checks` have failed. | 6259905d6e29344779b01c83 |
class Revision(models.Model): <NEW_LINE> <INDENT> doc = models.ForeignKey('Document', verbose_name="Document") <NEW_LINE> author = models.ForeignKey(settings.AUTH_USER_MODEL) <NEW_LINE> content = models.TextField() <NEW_LINE> created_on = models.DateTimeField(auto_now_add=True) <NEW_LINE> objects = RevisionQuerySet() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return "created on %s by %s" % (self.created_on, self.author.username) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.full_clean() <NEW_LINE> latestContent = self.doc.latest <NEW_LINE> if latestContent and latestContent.content == self.content: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.id = None <NEW_LINE> self.created_on = None <NEW_LINE> <DEDENT> super(Revision, self).save(*args, **kwargs) | A revision for a document.
Every time a document is edited, a new revision is created. | 6259905d2ae34c7f260ac71c |
class Point: <NEW_LINE> <INDENT> def __init__(self, initX, initY): <NEW_LINE> <INDENT> self.x = initX <NEW_LINE> self.y = initY <NEW_LINE> <DEDENT> def getX(self): <NEW_LINE> <INDENT> return self.x <NEW_LINE> <DEDENT> def getY(self): <NEW_LINE> <INDENT> return self.y <NEW_LINE> <DEDENT> def get_line_to(self, other): <NEW_LINE> <INDENT> if self.x == other.x or self.y == other.y: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> a = (self.y-other.y)//(self.x-other.x) <NEW_LINE> b = other.y - a*other.x <NEW_LINE> return (a,b) | Point class for representing and manipulating x,y coordinates. | 6259905dd268445f2663a677 |
class Third(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> chrome_driver = os.path.abspath(r"C:\Program Files (x86)\\Google\Chrome\\Application\chromedriver.exe") <NEW_LINE> os.environ["webdriver.chrome.driver"] = chrome_driver <NEW_LINE> self.driver = webdriver.Chrome(chrome_driver) <NEW_LINE> self.driver.implicitly_wait(20) <NEW_LINE> self.driver.maximize_window() <NEW_LINE> self.base_url = "http://www.kuaiqiangche.com/" <NEW_LINE> <DEDENT> def test_third3(self): <NEW_LINE> <INDENT> driver = self.driver <NEW_LINE> driver.get(self.base_url) <NEW_LINE> sleep(0.1) <NEW_LINE> driver.refresh() <NEW_LINE> now_handle = driver.current_window_handle <NEW_LINE> ele = driver.find_element_by_xpath(".//*[@id='auto-insurance']/div[3]/a") <NEW_LINE> ActionChains(driver).move_to_element(ele).click().perform() <NEW_LINE> sleep(0.1) <NEW_LINE> all_handles =driver.window_handles <NEW_LINE> for handle in all_handles: <NEW_LINE> <INDENT> if handle != now_handle: <NEW_LINE> <INDENT> driver.switch_to.window(handle) <NEW_LINE> judge = driver.find_element_by_xpath(".//*[@id='footer']/div[2]/div/div[1]/div[1]/div[3]/h3").text <NEW_LINE> self.assertEqual(judge,u"关于我们",msg="this jump is failed") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.driver.quit() | 第三个链接 | 6259905d30dc7b76659a0d9b |
@dependency.requires('federation_api') <NEW_LINE> class IdentityProvider(_ControllerBase): <NEW_LINE> <INDENT> collection_name = 'identity_providers' <NEW_LINE> member_name = 'identity_provider' <NEW_LINE> _mutable_parameters = frozenset(['description', 'enabled']) <NEW_LINE> _public_parameters = frozenset(['id', 'enabled', 'description', 'links']) <NEW_LINE> @classmethod <NEW_LINE> def _add_related_links(cls, context, ref): <NEW_LINE> <INDENT> ref.setdefault('links', {}) <NEW_LINE> base_path = ref['links'].get('self') <NEW_LINE> if base_path is None: <NEW_LINE> <INDENT> base_path = '/'.join([IdentityProvider.base_url(context), ref['id']]) <NEW_LINE> <DEDENT> for name in ['protocols']: <NEW_LINE> <INDENT> ref['links'][name] = '/'.join([base_path, name]) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def _add_self_referential_link(cls, context, ref): <NEW_LINE> <INDENT> id = ref.get('id') <NEW_LINE> self_path = '/'.join([cls.base_url(context), id]) <NEW_LINE> ref.setdefault('links', {}) <NEW_LINE> ref['links']['self'] = self_path <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def wrap_member(cls, context, ref): <NEW_LINE> <INDENT> cls._add_self_referential_link(context, ref) <NEW_LINE> cls._add_related_links(context, ref) <NEW_LINE> ref = cls.filter_params(ref) <NEW_LINE> return {cls.member_name: ref} <NEW_LINE> <DEDENT> @controller.protected() <NEW_LINE> def create_identity_provider(self, context, idp_id, identity_provider): <NEW_LINE> <INDENT> identity_provider = self._normalize_dict(identity_provider) <NEW_LINE> identity_provider.setdefault('enabled', False) <NEW_LINE> IdentityProvider.check_immutable_params(identity_provider) <NEW_LINE> idp_ref = self.federation_api.create_idp(idp_id, identity_provider) <NEW_LINE> response = IdentityProvider.wrap_member(context, idp_ref) <NEW_LINE> return wsgi.render_response(body=response, status=('201', 'Created')) <NEW_LINE> <DEDENT> @controller.protected() <NEW_LINE> def list_identity_providers(self, context): <NEW_LINE> <INDENT> ref = self.federation_api.list_idps() <NEW_LINE> ref = [self.filter_params(x) for x in ref] <NEW_LINE> return IdentityProvider.wrap_collection(context, ref) <NEW_LINE> <DEDENT> @controller.protected() <NEW_LINE> def get_identity_provider(self, context, idp_id): <NEW_LINE> <INDENT> ref = self.federation_api.get_idp(idp_id) <NEW_LINE> return IdentityProvider.wrap_member(context, ref) <NEW_LINE> <DEDENT> @controller.protected() <NEW_LINE> def delete_identity_provider(self, context, idp_id): <NEW_LINE> <INDENT> self.federation_api.delete_idp(idp_id) <NEW_LINE> <DEDENT> @controller.protected() <NEW_LINE> def update_identity_provider(self, context, idp_id, identity_provider): <NEW_LINE> <INDENT> identity_provider = self._normalize_dict(identity_provider) <NEW_LINE> IdentityProvider.check_immutable_params(identity_provider) <NEW_LINE> idp_ref = self.federation_api.update_idp(idp_id, identity_provider) <NEW_LINE> return IdentityProvider.wrap_member(context, idp_ref) | Identity Provider representation. | 6259905d7d847024c075da07 |
class EnactScheduledChangeView(AdminView): <NEW_LINE> <INDENT> def __init__(self, namespace, table): <NEW_LINE> <INDENT> self.namespace = namespace <NEW_LINE> self.path = "/scheduled_changes/%s/:sc_id/enact" % namespace <NEW_LINE> self.table = table <NEW_LINE> self.sc_table = table.scheduled_changes <NEW_LINE> super(EnactScheduledChangeView, self).__init__() <NEW_LINE> <DEDENT> def _post(self, sc_id, transaction, changed_by): <NEW_LINE> <INDENT> self.sc_table.enactChange(sc_id, changed_by, transaction) <NEW_LINE> return jsonify({}) | /scheduled_changes/:namespace/:sc_id/enact | 6259905db7558d5895464a47 |
class FtdiMpsseError(FtdiFeatureError): <NEW_LINE> <INDENT> pass | MPSSE mode not supported on FTDI device | 6259905d379a373c97d9a65a |
class ToTensorNormalize(object): <NEW_LINE> <INDENT> def __call__(self, sample): <NEW_LINE> <INDENT> image_tensor = sample['tensor'] <NEW_LINE> minX = image_tensor.min() <NEW_LINE> maxX = image_tensor.max() <NEW_LINE> image_tensor = (image_tensor - minX) / (maxX - minX) <NEW_LINE> image_tensor = image_tensor.max(axis=0) <NEW_LINE> image_tensor = cv2.resize(image_tensor, dsize=(64, 64), interpolation=cv2.INTER_CUBIC) <NEW_LINE> image_tensor = np.clip(image_tensor, 0, 1) <NEW_LINE> return torch.from_numpy(image_tensor).view(1, 64, 64) | Convert ndarrays in sample to Tensors. | 6259905dbe8e80087fbc06ba |
class Cmd(BaseCmd): <NEW_LINE> <INDENT> name = 'provides' <NEW_LINE> help_text = ("find what upkg provides what file") <NEW_LINE> def build(self): <NEW_LINE> <INDENT> return super(Cmd, self).build() | Docstring for Search | 6259905d8da39b475be0481c |
class EmailMessageComponent(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.email_account_messages_id = None <NEW_LINE> self.component_type = None <NEW_LINE> self.component_value = None | @summary: Entity class to store the individual email message component such as from,to,body of the email message. | 6259905d4e4d562566373a3d |
class AIDemand(object): <NEW_LINE> <INDENT> def __init__(self, aiDemandType, aiTargetType, aiTargetID, amount): <NEW_LINE> <INDENT> aiTarget = AITarget(aiTargetType, aiTargetID) <NEW_LINE> self.__aiTarget = aiTarget <NEW_LINE> self.__aiDemandType = aiDemandType <NEW_LINE> self.__amount = amount <NEW_LINE> <DEDENT> def getAIDemandType(self): <NEW_LINE> <INDENT> return self.__aiDemandType <NEW_LINE> <DEDENT> def getAITarget(self): <NEW_LINE> <INDENT> return self.__aiTarget <NEW_LINE> <DEDENT> def getAmount(self): <NEW_LINE> <INDENT> return self.__amount <NEW_LINE> <DEDENT> def setAmount(self, amount): <NEW_LINE> <INDENT> self.__amount = amount | encapsulate AI demand | 6259905dd53ae8145f919a97 |
class Joypad(iMemory): <NEW_LINE> <INDENT> def __init__(self, joypad_driver, interrupt): <NEW_LINE> <INDENT> assert isinstance(joypad_driver, JoypadDriver) <NEW_LINE> assert isinstance(interrupt, Interrupt ) <NEW_LINE> self.driver = joypad_driver <NEW_LINE> self.interrupt = interrupt <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.joyp = 0xF <NEW_LINE> self.button_code = 0xF <NEW_LINE> self.cycles = constants.JOYPAD_CLOCK <NEW_LINE> <DEDENT> def get_cycles(self): <NEW_LINE> <INDENT> return self.cycles <NEW_LINE> <DEDENT> def emulate(self, ticks): <NEW_LINE> <INDENT> ticks = int(ticks) <NEW_LINE> self.cycles -= ticks <NEW_LINE> if self.cycles <= 0: <NEW_LINE> <INDENT> if self.driver.is_raised(): <NEW_LINE> <INDENT> self.update() <NEW_LINE> <DEDENT> self.cycles = constants.JOYPAD_CLOCK <NEW_LINE> <DEDENT> <DEDENT> def write(self, address, data): <NEW_LINE> <INDENT> address = int(address) <NEW_LINE> if address == constants.JOYP: <NEW_LINE> <INDENT> self.joyp = (self.joyp & 0xC) + (data & 0x3) <NEW_LINE> self.update() <NEW_LINE> <DEDENT> <DEDENT> def read(self, address): <NEW_LINE> <INDENT> address = int(address) <NEW_LINE> if address == constants.JOYP: <NEW_LINE> <INDENT> return (self.joyp << 4) + self.button_code <NEW_LINE> <DEDENT> return 0xFF <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> oldButtons = self.button_code <NEW_LINE> if self.joyp == 0x1: <NEW_LINE> <INDENT> self.button_code = self.driver.get_button_code() <NEW_LINE> <DEDENT> elif self.joyp == 0x2: <NEW_LINE> <INDENT> self.button_code = self.driver.get_direction_code() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.button_code = 0xF <NEW_LINE> <DEDENT> if oldButtons != self.button_code: <NEW_LINE> <INDENT> self.interrupt.raise_interrupt(constants.JOYPAD) | PyBoy GameBoy (TM) Emulator
Joypad Input | 6259905da79ad1619776b5d8 |
class DotAttention(BaseAttention): <NEW_LINE> <INDENT> def __init__(self, key_emb_size: int, ctxt_emb_size: int, num_labels: int): <NEW_LINE> <INDENT> super(DotAttention, self).__init__( input_emb_size=ctxt_emb_size, key_emb_size=key_emb_size, output_emb_size=key_emb_size ) <NEW_LINE> self.ctxt_emb_size = ctxt_emb_size <NEW_LINE> self.key_emb_size = key_emb_size <NEW_LINE> self.num_labels = num_labels <NEW_LINE> self.proj_to_label_space = nn.Linear(key_emb_size, num_labels) <NEW_LINE> self.ctxt_proj = nn.Linear(ctxt_emb_size, key_emb_size) <NEW_LINE> <DEDENT> @overrides <NEW_LINE> def forward(self, context: torch.Tensor, mask: torch.LongTensor) -> Tuple[torch.Tensor, torch.Tensor]: <NEW_LINE> <INDENT> encoded_ctxt = self.ctxt_proj(context) <NEW_LINE> batch, max_seq_len, hdim = encoded_ctxt.size() <NEW_LINE> logits = self.proj_to_label_space(encoded_ctxt) <NEW_LINE> negval = -10e5 <NEW_LINE> float_mask = mask.unsqueeze(-1).expand(-1, -1, logits.size(-1)).float() <NEW_LINE> logits = (float_mask * logits) + (negval * (1. - float_mask)) <NEW_LINE> alpha = F.softmax(logits, 1).transpose(1, 2) <NEW_LINE> batch, num_labels, max_seq_len = alpha.size() <NEW_LINE> expanded_ctxt = encoded_ctxt.unsqueeze(1).expand( -1, num_labels, -1, -1).contiguous().view( -1, max_seq_len, hdim) <NEW_LINE> alpha_flat = alpha.contiguous().view( -1, max_seq_len).unsqueeze(1) <NEW_LINE> weighted_ctxt = torch.bmm( alpha_flat, expanded_ctxt).squeeze(1) <NEW_LINE> outputs = weighted_ctxt.view(batch, num_labels, -1) <NEW_LINE> return outputs, alpha <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_params(cls, params: Params) -> 'DotAttention': <NEW_LINE> <INDENT> key_emb_size = params.pop("key_emb_size") <NEW_LINE> ctxt_emb_size = params.pop("ctxt_emb_size") <NEW_LINE> num_labels = params.pop("num_labels") <NEW_LINE> params.assert_empty(cls.__name__) <NEW_LINE> return DotAttention( key_emb_size=key_emb_size, ctxt_emb_size=ctxt_emb_size, num_labels=num_labels ) | This computes attention values based on
dot products for scores.
This class computes num_label attention distributions,
one for each label, however the projection network share
parameters. Note that the weight in proj_to_label_namespace
corresponds to the key for each label.
Arguments:
key_emb_size (int): The vector size of the key
ctxt_emb_size (int): The embedding size of the ctxt
num_labels (int): The number of labels | 6259905d7d847024c075da08 |
class NodesNotAvailable(NodeStateViolation): <NEW_LINE> <INDENT> api_error = httplib.CONFLICT | Requested node(s) are not available to be acquired. | 6259905d4f6381625f199fbd |
class Editor: <NEW_LINE> <INDENT> document: document.Document <NEW_LINE> drawing: drawing.Drawing <NEW_LINE> _clients: Dict[str, Tuple[str, str]] <NEW_LINE> _colors: List[str] <NEW_LINE> _color_index: int <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.document = document.Document() <NEW_LINE> self.drawing = drawing.Drawing() <NEW_LINE> self._clients = {} <NEW_LINE> self._colors = ['#AAFF00', '#FFAA00', '#FF00AA', '#AA00FF', '#00AAFF'] <NEW_LINE> self._color_index = 0 <NEW_LINE> <DEDENT> def get_clients_state(self) -> Iterator[List[str]]: <NEW_LINE> <INDENT> for session_id in self._clients: <NEW_LINE> <INDENT> alias = self._clients[session_id][0] <NEW_LINE> color = self._clients[session_id][1] <NEW_LINE> yield [alias, color] <NEW_LINE> <DEDENT> <DEDENT> def does_client_exist(self, session_id: str) -> bool: <NEW_LINE> <INDENT> return session_id in self._clients <NEW_LINE> <DEDENT> def add_client(self, session_id: str, alias: str): <NEW_LINE> <INDENT> color = self.get_next_color() <NEW_LINE> self._clients[session_id] = (alias, color) <NEW_LINE> return color <NEW_LINE> <DEDENT> def get_next_color(self): <NEW_LINE> <INDENT> new_color = self._colors[self._color_index] <NEW_LINE> self._color_index = (self._color_index + 1) % len(self._colors) <NEW_LINE> return new_color | Class representing a collaborate-code editor.
Instance Attributes:
- document: the text document of the editor
- drawing: the drawing of the editor | 6259905d460517430c432b6d |
class Symbol(Expr): <NEW_LINE> <INDENT> __slots__ = '_hash', '_name', 'dshape', '_token' <NEW_LINE> __inputs__ = () <NEW_LINE> def __init__(self, name, dshape, token=None): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> if isinstance(dshape, _strtypes): <NEW_LINE> <INDENT> dshape = datashape.dshape(dshape) <NEW_LINE> <DEDENT> if isinstance(dshape, Mono) and not isinstance(dshape, DataShape): <NEW_LINE> <INDENT> dshape = DataShape(dshape) <NEW_LINE> <DEDENT> self.dshape = dshape <NEW_LINE> self._token = token <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def _resources(self): <NEW_LINE> <INDENT> return dict() | Symbolic data. The leaf of a Blaze expression
Example
-------
>>> points = symbol('points', '5 * 3 * {x: int, y: int}') | 6259905d32920d7e50bc767d |
class SpecMachine(CarBase): <NEW_LINE> <INDENT> def __init__(self, car_type, brand, photo_file_name, carrying, extra): <NEW_LINE> <INDENT> super().__init__(car_type, brand, photo_file_name, carrying) <NEW_LINE> self.extra = extra | Special machine class | 6259905d07f4c71912bb0a73 |
class Validator(GenericValidator): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.regexp = re.compile(r'^\d{2,10}$') <NEW_LINE> <DEDENT> def validate(self, vat_number): <NEW_LINE> <INDENT> if super(Validator, self).validate(vat_number) is False: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> vat_number = str(vat_number) <NEW_LINE> if vat_number[0] == '0': <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> vat_number = vat_number.rjust(10,str('0')) <NEW_LINE> checksum = int (vat_number[9]) <NEW_LINE> weights = [7, 5, 3, 2, 1, 7, 5, 3, 2] <NEW_LINE> checkval = self.sum_weights(weights, vat_number) <NEW_LINE> checkval = (checkval * 10) % 11 <NEW_LINE> if checkval==10: <NEW_LINE> <INDENT> checkval=0 <NEW_LINE> <DEDENT> return checkval == checksum | For rules see /docs/VIES-VAT Validation Routines-v15.0.doc | 6259905d442bda511e95d875 |
class Complex(FSLCommand): <NEW_LINE> <INDENT> _cmd = 'fslcomplex' <NEW_LINE> input_spec = ComplexInputSpec <NEW_LINE> output_spec = ComplexOuputSpec <NEW_LINE> def _parse_inputs(self, skip=None): <NEW_LINE> <INDENT> if skip == None: <NEW_LINE> <INDENT> skip = [] <NEW_LINE> <DEDENT> if self.inputs.real_cartesian: <NEW_LINE> <INDENT> skip += self.inputs._ofs[:3] <NEW_LINE> <DEDENT> elif self.inputs.real_polar: <NEW_LINE> <INDENT> skip += self.inputs._ofs[:1]+self.inputs._ofs[3:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> skip += self.inputs._ofs[1:] <NEW_LINE> <DEDENT> return super(Complex,self)._parse_inputs(skip) <NEW_LINE> <DEDENT> def _gen_filename(self, name): <NEW_LINE> <INDENT> if name == 'complex_out_file': <NEW_LINE> <INDENT> if self.inputs.complex_cartesian: <NEW_LINE> <INDENT> in_file = self.inputs.real_in_file <NEW_LINE> <DEDENT> elif self.inputs.complex_polar: <NEW_LINE> <INDENT> in_file = self.inputs.magnitude_in_file <NEW_LINE> <DEDENT> elif self.inputs.complex_split or self.inputs.complex_merge: <NEW_LINE> <INDENT> in_file = self.inputs.complex_in_file <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self._gen_fname(in_file, suffix="_cplx") <NEW_LINE> <DEDENT> elif name =='magnitude_out_file': <NEW_LINE> <INDENT> return self._gen_fname(self.inputs.complex_in_file, suffix="_mag") <NEW_LINE> <DEDENT> elif name =='phase_out_file': <NEW_LINE> <INDENT> return self._gen_fname(self.inputs.complex_in_file,suffix="_phase") <NEW_LINE> <DEDENT> elif name =='real_out_file': <NEW_LINE> <INDENT> return self._gen_fname(self.inputs.complex_in_file, suffix="_real") <NEW_LINE> <DEDENT> elif name =='imaginary_out_file': <NEW_LINE> <INDENT> return self._gen_fname(self.inputs.complex_in_file, suffix="_imag") <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def _get_output(self,name): <NEW_LINE> <INDENT> output = getattr(self.inputs,name) <NEW_LINE> if not isdefined(output): <NEW_LINE> <INDENT> output = self._gen_filename(name) <NEW_LINE> <DEDENT> return os.path.abspath(output) <NEW_LINE> <DEDENT> def _list_outputs(self): <NEW_LINE> <INDENT> outputs = self.output_spec().get() <NEW_LINE> if self.inputs.complex_cartesian or self.inputs.complex_polar or self.inputs.complex_split or self.inputs.complex_merge: <NEW_LINE> <INDENT> outputs['complex_out_file'] = self._get_output('complex_out_file') <NEW_LINE> <DEDENT> elif self.inputs.real_cartesian: <NEW_LINE> <INDENT> outputs['real_out_file'] = self._get_output('real_out_file') <NEW_LINE> outputs['imaginary_out_file'] = self._get_output('imaginary_out_file') <NEW_LINE> <DEDENT> elif self.inputs.real_polar: <NEW_LINE> <INDENT> outputs['magnitude_out_file'] = self._get_output('magnitude_out_file') <NEW_LINE> outputs['phase_out_file'] = self._get_output('phase_out_file') <NEW_LINE> <DEDENT> return outputs | fslcomplex is a tool for converting complex data
Examples
--------
>>> cplx = Complex()
>>> cplx.inputs.complex_in_file = "complex.nii"
>>> cplx.real_polar = True
>>> res = cplx.run() # doctest: +SKIP | 6259905d627d3e7fe0e084c2 |
class Attachment: <NEW_LINE> <INDENT> __name__ = "ir.attachment" <NEW_LINE> def _json(self): <NEW_LINE> <INDENT> rv = { 'create_date': self.create_date.isoformat(), "objectType": self.__name__, "id": self.id, "updatedBy": self.uploaded_by._json(), "displayName": self.name, "description": self.description, } <NEW_LINE> if has_request_context(): <NEW_LINE> <INDENT> rv['downloadUrl'] = url_for( 'project.work.download_file', attachment_id=self.id, task=Transaction().context.get('task'), ) <NEW_LINE> <DEDENT> return rv | Ir Attachment | 6259905d99cbb53fe6832517 |
class TestFeature3(BaseFeature): <NEW_LINE> <INDENT> LABELS = None <NEW_LINE> ATTRIBUTES = None | Some example doc string.
References
----------
Doe, J. Nature. (2016).
Smith, J. Science. (2010).
Other
-----
Something else. | 6259905de64d504609df9eea |
class MaxSizeStringIO(StringIO): <NEW_LINE> <INDENT> def __init__(self, max_size=None, buffer=None): <NEW_LINE> <INDENT> args = [] <NEW_LINE> if buffer is not None: <NEW_LINE> <INDENT> args.append(buffer) <NEW_LINE> <DEDENT> StringIO.__init__(self, *args) <NEW_LINE> self.__max_size = max_size <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> if self.tell() + len(data) > self.__max_size: <NEW_LINE> <INDENT> raise MemoryError <NEW_LINE> <DEDENT> StringIO.write(self, data) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> self.close() | raises a MemoryError when trying to write more than max size | 6259905d2ae34c7f260ac71e |
class BaseModel(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def from_json(cls, json_str): <NEW_LINE> <INDENT> return cls.from_dict(json.loads(json_str)) <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return json.dumps(self.to_dict()) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, data): <NEW_LINE> <INDENT> model = cls() <NEW_LINE> for key in data: <NEW_LINE> <INDENT> setattr(model, key, data.get(key)) <NEW_LINE> <DEDENT> return model <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for key in self.__dict__: <NEW_LINE> <INDENT> result[key] = getattr(self, key) <NEW_LINE> if isinstance(result[key], BaseModel): <NEW_LINE> <INDENT> result[key] = result[key].to_dict() <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s" % self.to_dict() | Superclass responsible for converting json data to/from model | 6259905d99cbb53fe6832518 |
class ConstructFlowEcho(Flow): <NEW_LINE> <INDENT> def constructor(self, data: dict) -> list: <NEW_LINE> <INDENT> arg_string = " ".join(f"-{key} {val}" for (key, val) in data.items()) <NEW_LINE> final_string = [f"ls {arg_string}"] <NEW_LINE> return final_string | This will construct bash script to run Echo command | 6259905d7d847024c075da09 |
class ObjectGroupRight(APIBaseID): <NEW_LINE> <INDENT> relatedObject = fields.EmbeddedField(APIBase) <NEW_LINE> relatedGroup = fields.EmbeddedField(APIBase) <NEW_LINE> relatedRights= fields.ListField(APIBase) | relations between object, group and right (permission) | 6259905db7558d5895464a48 |
class FunctionInstance(AcquisitionFunction.FunctionInstance): <NEW_LINE> <INDENT> def __init__(self, model, desired_extremum, incumbent_cost, xi): <NEW_LINE> <INDENT> super().__init__(model, desired_extremum) <NEW_LINE> self.incumbent_cost = incumbent_cost <NEW_LINE> self.xi = xi <NEW_LINE> self.scale_factor = 1 if desired_extremum == 'max' else -1 <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return 'EI' <NEW_LINE> <DEDENT> def __call__(self, X): <NEW_LINE> <INDENT> mus, sigmas = self.model.predict(X, return_std_dev=True) <NEW_LINE> mask = sigmas != 0 <NEW_LINE> sigmas = sigmas[mask] <NEW_LINE> diff = self.scale_factor * (mus[mask] - self.incumbent_cost) - self.xi <NEW_LINE> Zs = diff / sigmas <NEW_LINE> EIs = np.zeros_like(mus) <NEW_LINE> EIs[mask] = (diff * norm.cdf(Zs)) + (sigmas * norm.pdf(Zs)) <NEW_LINE> return EIs | Expected Improvement Acquisition function
Expected Improvement is related to probability of improvement in that they
both compare points with the incumbent (best) trial, making them both
improvement based acquisition functions. However EI is vastly superior to PI
(which unlike EI is prone to over exploitation and not enough exploration)
because EI takes the value/amount of improvement over the incumbent into
account in addition to the probability of that improvement, whereas PI
treats all amounts of improvement the same.
Note:
the formulas shown here are for the case of maximisation.
.. math::
EI(\mathbf x)=\mathbb E\left[max(0,\; f(\mathbf x)-f(\mathbf x^+))\right]
Where :math:`f` is the *surrogate* objective function and :math:`\mathbf x^+` is the
parameter values for the best known (incumbent) trial.
If :math:`f` is a Gaussian Process then EI can be calculated analytically:
.. math::
EI(\mathbf x)=\begin{cases}
\left(\mu(\mathbf x)-f(\mathbf x^+)\right)\mathbf\Phi(Z) \;+\; \sigma(\mathbf x)\phi(Z) & \text{if } \sigma(\mathbf x)>0\\
0 & \text{if } \sigma(\mathbf x) = 0
\end{cases}
.. math::
Z=\frac{\mu(\mathbf x)-f(\mathbf x^+)}{\sigma(\mathbf x)}
Where
- :math:`\phi(\cdot)=` standard multivariate normal distribution PDF (ie :math:`\boldsymbol\mu=\mathbf 0,\;\Sigma=I`)
- :math:`\Phi(\cdot)=` standard multivariate normal distribution CDF
a parameter :math:`\xi` (sometimes called 'jitter') can be introduced to
control the exploitation-exploration trade-off (:math:`\xi=0.01` works well
in 'almost all cases' (Lizotte, 2008))
.. math::
EI(\mathbf x)=\begin{cases}
\left(\mu(\mathbf x)-f(\mathbf x^+)-\xi\right)\mathbf\Phi(Z) \;+\; \sigma(\mathbf x)\phi(Z) & \text{if } \sigma(\mathbf x)>0\\
0 & \text{if } \sigma(\mathbf x) = 0
\end{cases}
.. math::
Z=\frac{\mu(\mathbf x)-f(\mathbf x^+)-\xi}{\sigma(\mathbf x)} | 6259905d55399d3f05627b57 |
class TestRecombinationError(TestLowLevelSimulate): <NEW_LINE> <INDENT> def test_bad_types(self): <NEW_LINE> <INDENT> self._recombination_probabilities = {} <NEW_LINE> self.assertRaises(TypeError, self.simulate) <NEW_LINE> self._recombination_probabilities = None <NEW_LINE> self.assertRaises(TypeError, self.simulate) <NEW_LINE> <DEDENT> def test_bad_values(self): <NEW_LINE> <INDENT> errors = ["0.1", {}, [], None, -1, 1000, 1.01] <NEW_LINE> for error in errors: <NEW_LINE> <INDENT> self._recombination_probabilities = [error] <NEW_LINE> self.assertRaises(_ercs.InputError, self.simulate) <NEW_LINE> <DEDENT> good_values = [0.1, 0.2, 0.3] <NEW_LINE> self._recombination_probabilities = good_values <NEW_LINE> self.simulate() <NEW_LINE> for error in errors: <NEW_LINE> <INDENT> self._recombination_probabilities = good_values + [error] <NEW_LINE> random.shuffle(self._recombination_probabilities) <NEW_LINE> self.assertRaises(_ercs.InputError, self.simulate) | Test the list of recombination probabilities to see if bad arguments
are caught correctly. | 6259905dadb09d7d5dc0bba2 |
class SlotRegistrationNo(AbsParking): <NEW_LINE> <INDENT> name = "slot_number_for_registration_number" <NEW_LINE> def __init__(self, args): <NEW_LINE> <INDENT> self.registration_no = args[1] <NEW_LINE> <DEDENT> def execute(self, parking_lot=None): <NEW_LINE> <INDENT> for index, slot in enumerate(parking_lot.slots): <NEW_LINE> <INDENT> if slot and slot['registration_no'] == self.registration_no: <NEW_LINE> <INDENT> print(index+1) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> print("Not found") | class to leave a particular parking slot | 6259905d4e4d562566373a3f |
class Gravatar(object): <NEW_LINE> <INDENT> def __init__(self, app=None, size=100, rating='g', default='retro', force_default=False, force_lower=False, use_ssl=False, base_url=None, **kwargs): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> self.rating = rating <NEW_LINE> self.default = default <NEW_LINE> self.force_default = force_default <NEW_LINE> self.force_lower = force_lower <NEW_LINE> self.use_ssl = use_ssl <NEW_LINE> self.base_url = base_url <NEW_LINE> self.app = None <NEW_LINE> if app is not None: <NEW_LINE> <INDENT> self.init_app(app, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def get_app(self, reference_app=None): <NEW_LINE> <INDENT> if reference_app is not None: <NEW_LINE> <INDENT> return reference_app <NEW_LINE> <DEDENT> if self.app is not None: <NEW_LINE> <INDENT> return self.app <NEW_LINE> <DEDENT> ctx = connection_stack.top <NEW_LINE> if ctx is not None: <NEW_LINE> <INDENT> return ctx.app <NEW_LINE> <DEDENT> raise RuntimeError('Application not registered on Gravatar' ' instance and no application bound' ' to current context') <NEW_LINE> <DEDENT> def init_app(self, app): <NEW_LINE> <INDENT> if not hasattr(app, 'extensions'): <NEW_LINE> <INDENT> app.extensions = {} <NEW_LINE> <DEDENT> app.jinja_env.filters.setdefault('gravatar', self) <NEW_LINE> app.extensions['gravatar'] = self <NEW_LINE> <DEDENT> def __call__(self, email, size=None, rating=None, default=None, force_default=None, force_lower=False, use_ssl=None, base_url=None): <NEW_LINE> <INDENT> if size is None: <NEW_LINE> <INDENT> size = self.size <NEW_LINE> <DEDENT> if rating is None: <NEW_LINE> <INDENT> rating = self.rating <NEW_LINE> <DEDENT> if default is None: <NEW_LINE> <INDENT> default = self.default <NEW_LINE> <DEDENT> if force_default is None: <NEW_LINE> <INDENT> force_default = self.force_default <NEW_LINE> <DEDENT> if force_lower is None: <NEW_LINE> <INDENT> force_lower = self.force_lower <NEW_LINE> <DEDENT> if force_lower: <NEW_LINE> <INDENT> email = email.lower() <NEW_LINE> <DEDENT> if use_ssl is None: <NEW_LINE> <INDENT> use_ssl = self.use_ssl <NEW_LINE> <DEDENT> if base_url is None: <NEW_LINE> <INDENT> base_url = self.base_url <NEW_LINE> <DEDENT> if base_url is not None: <NEW_LINE> <INDENT> url = base_url + 'avatar/' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if use_ssl: <NEW_LINE> <INDENT> url = 'https://secure.gravatar.com/avatar/' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = 'http://www.gravatar.com/avatar/' <NEW_LINE> <DEDENT> <DEDENT> hash = hashlib.md5(email).hexdigest() <NEW_LINE> link = '{url}{hash}' '?s={size}&d={default}&r={rating}'.format(**locals()) <NEW_LINE> if force_default: <NEW_LINE> <INDENT> link = link + '&f=y' <NEW_LINE> <DEDENT> return link | Simple object for create gravatar link.
gravatar = Gravatar(app,
size=100,
rating='g',
default='retro',
force_default=False,
force_lower=False,
use_ssl=False,
base_url=None
)
:param app: Your Flask app instance
:param size: Default size for avatar
:param rating: Default rating
:param default: Default type for unregistred emails
:param force_default: Build only default avatars
:param force_lower: Make email.lower() before build link
:param use_ssl: Use https rather than http
:param base_url: Use custom base url for build link | 6259905da79ad1619776b5d9 |
class CSVModel: <NEW_LINE> <INDENT> fields = {'ID':None,'Password':None,'Text':None} <NEW_LINE> def __init__(self, saved_data): <NEW_LINE> <INDENT> self.saved_data = saved_data <NEW_LINE> self.load_data = {} | CSV Storage | 6259905d460517430c432b6e |
class Greeter(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def SayHello(request, target, options=(), channel_credentials=None, call_credentials=None, compression=None, wait_for_ready=None, timeout=None, metadata=None): <NEW_LINE> <INDENT> return grpc.experimental.unary_unary(request, target, '/proto.Greeter/SayHello', hellogrpc__pb2.HelloRequest.SerializeToString, hellogrpc__pb2.HelloReply.FromString, options, channel_credentials, call_credentials, compression, wait_for_ready, timeout, metadata) | The greeter service definition.
| 6259905dfff4ab517ebcee5d |
class HazardDetectionLight(CloudMsg): <NEW_LINE> <INDENT> class Request(CloudRequest): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.imageFilepath = '' <NEW_LINE> super(HazardDetectionLight.Request, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def make_payload(self): <NEW_LINE> <INDENT> return Payload() <NEW_LINE> <DEDENT> def make_files(self): <NEW_LINE> <INDENT> return [File(self.imageFilepath, postfield='file')] <NEW_LINE> <DEDENT> <DEDENT> class Response(CloudResponse): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.error = '' <NEW_LINE> self.light_level = 0.0 <NEW_LINE> super(HazardDetectionLight.Response, self).__init__(**kwargs) <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.req = HazardDetectionLight.Request() <NEW_LINE> self.resp = HazardDetectionLight.Response() <NEW_LINE> super(HazardDetectionLight, self).__init__( svcname='hazard_detection_light_check', **kwargs) | Hazard Detection Light Check Cloud Message object | 6259905dbaa26c4b54d508de |
class MarketingViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> api_name = 'marketing' <NEW_LINE> queryset = Marketing.objects.all() <NEW_LINE> serializer_class = MarketingSerializer <NEW_LINE> permission_classes = (AllowAny,) <NEW_LINE> renderer_classes = [r.CSVRenderer, ] + api_settings.DEFAULT_RENDERER_CLASSES | the marketing view | 6259905d1b99ca4002290053 |
class SrcImplicit(TestSystem): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> TestSystem.__init__(self, **kwargs) <NEW_LINE> pdb_filename = get_data_filename("data/src-implicit/1yi6-minimized.pdb") <NEW_LINE> pdbfile = app.PDBFile(pdb_filename) <NEW_LINE> forcefields_to_use = ['amber99sbildn.xml', 'amber99_obc.xml'] <NEW_LINE> forcefield = app.ForceField(*forcefields_to_use) <NEW_LINE> system = forcefield.createSystem(pdbfile.topology, nonbondedMethod=app.NoCutoff, constraints=app.HBonds) <NEW_LINE> positions = pdbfile.getPositions() <NEW_LINE> self.system, self.positions, self.topology = system, positions, pdbfile.topology | Src kinase in implicit AMBER 99sb-ildn with OBC GBSA solvent.
Examples
--------
>>> src = SrcImplicit()
>>> system, positions = src.system, src.positions | 6259905d07f4c71912bb0a74 |
class UpdateModelMixin(object): <NEW_LINE> <INDENT> def update(self, request, *args, **kwargs): <NEW_LINE> <INDENT> partial = kwargs.pop('partial', False) <NEW_LINE> self.object = self.get_object_or_none() <NEW_LINE> if self.object is None: <NEW_LINE> <INDENT> created = True <NEW_LINE> save_kwargs = {'force_insert': True} <NEW_LINE> success_status_code = status.HTTP_201_CREATED <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> created = False <NEW_LINE> save_kwargs = {'force_update': True} <NEW_LINE> success_status_code = status.HTTP_200_OK <NEW_LINE> <DEDENT> serializer = self.get_serializer(self.object, data=request.DATA, files=request.FILES, partial=partial) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> self.pre_save(serializer.object) <NEW_LINE> self.object = serializer.save(**save_kwargs) <NEW_LINE> self.post_save(self.object, created=created) <NEW_LINE> return Response(serializer.data, status=success_status_code) <NEW_LINE> <DEDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def partial_update(self, request, *args, **kwargs): <NEW_LINE> <INDENT> kwargs['partial'] = True <NEW_LINE> return self.update(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_object_or_none(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.get_object() <NEW_LINE> <DEDENT> except Http404: <NEW_LINE> <INDENT> if self.request.method == 'PUT': <NEW_LINE> <INDENT> self.check_permissions(clone_request(self.request, 'POST')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def pre_save(self, obj): <NEW_LINE> <INDENT> lookup = self.kwargs.get(self.lookup_field, None) <NEW_LINE> pk = self.kwargs.get(self.pk_url_kwarg, None) <NEW_LINE> slug = self.kwargs.get(self.slug_url_kwarg, None) <NEW_LINE> slug_field = slug and self.slug_field or None <NEW_LINE> if lookup: <NEW_LINE> <INDENT> setattr(obj, self.lookup_field, lookup) <NEW_LINE> <DEDENT> if pk: <NEW_LINE> <INDENT> setattr(obj, 'pk', pk) <NEW_LINE> <DEDENT> if slug: <NEW_LINE> <INDENT> setattr(obj, slug_field, slug) <NEW_LINE> <DEDENT> if hasattr(obj, 'full_clean'): <NEW_LINE> <INDENT> exclude = _get_validation_exclusions(obj, pk, slug_field, self.lookup_field) <NEW_LINE> obj.full_clean(exclude) | Update a model instance. | 6259905da8370b77170f1a06 |
class PresenceAnalyzerUtilsTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> main.app.config.update({'DATA_CSV': TEST_DATA_CSV}) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_data(self): <NEW_LINE> <INDENT> data = utils.get_data() <NEW_LINE> self.assertIsInstance(data, dict) <NEW_LINE> self.assertItemsEqual(data.keys(), [10, 11]) <NEW_LINE> sample_date = datetime.date(2013, 9, 10) <NEW_LINE> self.assertIn(sample_date, data[10]) <NEW_LINE> self.assertItemsEqual(data[10][sample_date].keys(), ['start', 'end']) <NEW_LINE> self.assertEqual( data[10][sample_date]['start'], datetime.time(9, 39, 5) ) <NEW_LINE> <DEDENT> def test_seconds_since_midnight(self): <NEW_LINE> <INDENT> data = utils.seconds_since_midnight(datetime.time(1, 2, 3)) <NEW_LINE> self.assertEqual(data, 3723) <NEW_LINE> <DEDENT> def test_interval(self): <NEW_LINE> <INDENT> start = datetime.time(1) <NEW_LINE> end = datetime.time(1, 2, 3) <NEW_LINE> data = utils.interval(start, end) <NEW_LINE> self.assertEqual(data, 123) <NEW_LINE> <DEDENT> def test_mean(self): <NEW_LINE> <INDENT> self.assertEqual(utils.mean([0]), 0) <NEW_LINE> self.assertEqual(utils.mean(range(1, 10)), 5.) <NEW_LINE> self.assertEqual(utils.mean(range(1, 5)), 2.5) <NEW_LINE> self.assertIsInstance(utils.mean([0]), float) <NEW_LINE> <DEDENT> def test_group_by_weekday(self): <NEW_LINE> <INDENT> data = utils.get_data() <NEW_LINE> li = [[], [30047], [24465], [23705], [], [], []] <NEW_LINE> self.assertEqual(utils.group_by_weekday(data[10]), li) | Utility functions tests. | 6259905d8e7ae83300eea6c6 |
class TwitterScraper(twitter.Api): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> super().__init__(consumer_key=config['consumer_key'], consumer_secret=config['consumer_secret_key'], access_token_key=config['access_token'], access_token_secret=config['access_token_secret']) <NEW_LINE> self._sconfig = config <NEW_LINE> <DEDENT> def search_keywords(self): <NEW_LINE> <INDENT> for keyword in self._sconfig['keywords']: <NEW_LINE> <INDENT> geocode=self._sconfig['geocode']+','+ self._sconfig['radius'] <NEW_LINE> results = self.GetSearch(term=keyword,count=100,geocode=geocode) <NEW_LINE> print(len(results)) <NEW_LINE> for x in results: <NEW_LINE> <INDENT> if not x.coordinates: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> print(x.coordinates) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def parse_status(self,status,keywords): <NEW_LINE> <INDENT> pass | Scraps twitter for disaster information. Stores any information in a database. | 6259905d4428ac0f6e659b76 |
class IFeaturedListingsTile(IBaseCollectionTile): <NEW_LINE> <INDENT> directives.order_before(content_uid='*') <NEW_LINE> content_uid = schema.Choice( required=True, source=CatalogSource( object_provides=IFeaturedListings.__identifier__, path={ 'query': [''], 'depth': -1, }, ), title=_(u'Select an existing featured listings collection'), ) | Configuration schema for a featured listings collection. | 6259905d01c39578d7f14253 |
class SqlCreateOptions(usage.Options): <NEW_LINE> <INDENT> synopsis = '[options] <file>' <NEW_LINE> optFlags = [ ['dump', 'd', 'dump SQL scripts to standrad output'], ['live', 'l', 'live changes to the databse using database config file'] ] <NEW_LINE> def parseArgs(self, file=None): <NEW_LINE> <INDENT> if file is None: <NEW_LINE> <INDENT> self['file'] = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self['file'] = file if file.endswith('.sql') else '{}.sql'.format( file) <NEW_LINE> <DEDENT> <DEDENT> def opt_version(self): <NEW_LINE> <INDENT> show_version() <NEW_LINE> sys.exit(0) <NEW_LINE> <DEDENT> def postOptions(self): <NEW_LINE> <INDENT> if self['file'] is None and not self['dump'] and not self['live']: <NEW_LINE> <INDENT> print(self) <NEW_LINE> <DEDENT> if self['live'] and (self['dump'] or self['file'] is not None): <NEW_LINE> <INDENT> if commons.Interaction.userchoice( 'What do you want to do. Dump the script or execute it?', ('1', '2'), ('Dump it', 'Execute it') ) == '1': <NEW_LINE> <INDENT> self['live'] = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self['dump'] = 0 <NEW_LINE> self['file'] = None | Sql Create options for mamba-admin tool
| 6259905d3c8af77a43b68a5d |
class IOOptions(_object): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, IOOptions, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, IOOptions, name) <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> this = _ilwisobjects.new_IOOptions(*args) <NEW_LINE> try: <NEW_LINE> <INDENT> self.this.append(this) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.this = this <NEW_LINE> <DEDENT> <DEDENT> def contains(self, option): <NEW_LINE> <INDENT> return _ilwisobjects.IOOptions_contains(self, option) <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return _ilwisobjects.IOOptions_size(self) <NEW_LINE> <DEDENT> def __getitem__(self, option): <NEW_LINE> <INDENT> return _ilwisobjects.IOOptions___getitem__(self, option) <NEW_LINE> <DEDENT> def addOption(self, key, value): <NEW_LINE> <INDENT> return _ilwisobjects.IOOptions_addOption(self, key, value) <NEW_LINE> <DEDENT> __swig_destroy__ = _ilwisobjects.delete_IOOptions <NEW_LINE> __del__ = lambda self: None | Proxy of C++ pythonapi::IOOptions class. | 6259905d2c8b7c6e89bd4e27 |
class VBRAINSDemonWarp(SlicerCommandLine): <NEW_LINE> <INDENT> input_spec = VBRAINSDemonWarpInputSpec <NEW_LINE> output_spec = VBRAINSDemonWarpOutputSpec <NEW_LINE> _cmd = " VBRAINSDemonWarp " <NEW_LINE> _outputs_filenames = {'outputVolume':'outputVolume.nii','outputCheckerboardVolume':'outputCheckerboardVolume.nii','outputDeformationFieldVolume':'outputDeformationFieldVolume.nrrd'} | title: Vector Demon Registration (BRAINS)
category: Registration
description:
This program finds a deformation field to warp a moving image onto a fixed image. The images must be of the same signal kind, and contain an image of the same kind of object. This program uses the Thirion Demons warp software in ITK, the Insight Toolkit. Additional information is available at: http://www.nitrc.org/projects/brainsdemonwarp.
version: 3.0.0
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Modules:BRAINSDemonWarp
license: https://www.nitrc.org/svn/brains/BuildScripts/trunk/License.txt
contributor: This tool was developed by Hans J. Johnson and Greg Harris.
acknowledgements: The development of this tool was supported by funding from grants NS050568 and NS40068 from the National Institute of Neurological Disorders and Stroke and grants MH31593, MH40856, from the National Institute of Mental Health. | 6259905d462c4b4f79dbd03f |
class BayesianLogisticRegression(Variational_Loss): <NEW_LINE> <INDENT> def __init__(self, D, hdim_mean, hdim_var, b): <NEW_LINE> <INDENT> super().__init__(D, hdim_mean, hdim_var, b) <NEW_LINE> self.optimizer_mean_0 = optim.Adam(self.nn_mean_0.parameters(),lr=1e-2) <NEW_LINE> self.optimizer_mean_1 = optim.Adam(self.nn_mean_1.parameters(),lr=1e-2) <NEW_LINE> self.optimizer_cov_0 = optim.Adam(self.nn_cov_0.parameters(),lr=1e-2) <NEW_LINE> self.optimizer_cov_1 = optim.Adam(self.nn_cov_1.parameters(),lr=1e-2) <NEW_LINE> <DEDENT> def SGD_step(self, x, y, mc=1, verbose=True, train_mean=True, train_cov=True): <NEW_LINE> <INDENT> self.optimizer_mean_0.zero_grad() <NEW_LINE> self.optimizer_mean_1.zero_grad() <NEW_LINE> self.optimizer_cov_0.zero_grad() <NEW_LINE> self.optimizer_cov_1.zero_grad() <NEW_LINE> self.compute_mean_cov(x, y) <NEW_LINE> self.sample_from_q(x.shape[1], mc) <NEW_LINE> self.ELBO(x, y, mc) <NEW_LINE> self.ELBO_loss.backward() <NEW_LINE> if train_mean: <NEW_LINE> <INDENT> self.optimizer_mean_0.step() <NEW_LINE> self.optimizer_mean_1.step() <NEW_LINE> <DEDENT> if train_cov: <NEW_LINE> <INDENT> self.optimizer_cov_0.step() <NEW_LINE> self.optimizer_cov_1.step() <NEW_LINE> <DEDENT> if verbose: <NEW_LINE> <INDENT> print('\nELBO loss: ', self.ELBO_loss) <NEW_LINE> <DEDENT> <DEDENT> def predict(self, x_star, mc=200): <NEW_LINE> <INDENT> self.sample_from_q(x_star.shape[1], n_samples=mc) <NEW_LINE> prob = torch.mean(torch.sigmoid([email protected]), dim=1) <NEW_LINE> return prob | Bayesian Logistic Regressor.
Parameters:
D (int) - number of features (dimension of the input data), which
corresponds to the input and output dimension of the Neural Networks.
hdim_mean (int) - dimension of the hidden layer for the Neural
Networks that compute the mean vector (weights).
hdim_var (int) - dimension of the hidden layer for the Neural
Networks that compute de diagonal of the covariance matrices.
b (float) - diversity parameter of the prior Laplace ditribution.
Attributes:
ELBO_loss (float) - last computed value of the minus Evidence Lower Bound.
mean (tensor) - tensor of shape (D,1), being D the number of features,
containing the estimated weights.
cov (tensor) - tensor of shape (D,D), being D the number of features,
containing the estimated diagonal covariance matrix. | 6259905d004d5f362081fb0b |
class WANTestConfigParser(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.config = ConfigParser.RawConfigParser() <NEW_LINE> self.config.read('../tmp/wantest.ini') <NEW_LINE> self.ip_addresses = self.config.get('ping', 'ip_addresses') <NEW_LINE> self.domain_names = self.config.get('nslookup', 'domain_names') <NEW_LINE> self.portals = self.config.get('portal', 'portals') <NEW_LINE> self.web_urls = self.config.get('signin', 'web_urls') <NEW_LINE> self.usernames = self.config.get('signin', 'usernames') <NEW_LINE> self.passwords = self.config.get('signin', 'passwords') <NEW_LINE> self.smtp_host = self.config.get('email', 'smtp_host') <NEW_LINE> self.mail_port = self.config.get('email', 'mail_port') <NEW_LINE> self.use_ssl = self.config.get('email', 'use_ssl') <NEW_LINE> self.mail_user = self.config.get('email', 'mail_user') <NEW_LINE> self.mail_pass = self.config.get('email', 'mail_pass') <NEW_LINE> self.mail_postfix = self.config.get('email', 'mail_postfix') <NEW_LINE> self.mail_to_list = self.config.get('email', 'mail_to_list') <NEW_LINE> self.behind_proxy = self.config.getint('email', 'behind_proxy') <NEW_LINE> self.proxy_host = self.config.get('email', 'proxy_host') <NEW_LINE> self.proxy_port = self.config.getint('email', 'proxy_port') <NEW_LINE> <DEDENT> def show_info(self): <NEW_LINE> <INDENT> print("--- ping ---") <NEW_LINE> print("ip_addresses: {0}".format(self.ip_addresses)) <NEW_LINE> print("\n--- nslookup ---") <NEW_LINE> print("domain_names: {0}".format(self.domain_names)) <NEW_LINE> print("portals: {0}".format(self.portals)) <NEW_LINE> print("\n--- email ---") <NEW_LINE> print("smtp_host: {0}".format(self.smtp_host)) <NEW_LINE> print("use_ssl: {0}".format(self.use_ssl)) <NEW_LINE> print("mail_port: {0}".format(self.mail_port)) <NEW_LINE> print("mail_user: {0}".format(self.mail_user)) <NEW_LINE> print("mail_pass: {0}".format(self.mail_pass)) <NEW_LINE> print("mail_postfix: {0}".format(self.mail_postfix)) <NEW_LINE> print("mail_to_list: {0}".format(self.mail_to_list)) <NEW_LINE> print("behind_proxy: {0}".format(self.behind_proxy)) <NEW_LINE> print("proxy_host: {0}".format(self.proxy_host)) <NEW_LINE> print("proxy_port: {0}".format(self.proxy_port)) <NEW_LINE> print("\n--- signin ---") <NEW_LINE> print("web_urls: {0}".format(self.web_urls)) <NEW_LINE> print("usernames: {0}".format(self.usernames)) <NEW_LINE> print("passwords: {0}".format(self.passwords)) | Parses the WANTest configuration file wantest.ini. | 6259905d7d847024c075da0b |
class BaseEstimator(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _get_param_names(cls): <NEW_LINE> <INDENT> from inspect import signature <NEW_LINE> init = getattr(cls.__init__, 'deprecated_original', cls.__init__) <NEW_LINE> if init is object.__init__: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> init_signature = signature(init) <NEW_LINE> parameters = [p for p in init_signature.parameters.values() if p.name != 'self' and p.kind != p.VAR_KEYWORD] <NEW_LINE> for p in parameters: <NEW_LINE> <INDENT> if p.kind == p.VAR_POSITIONAL: <NEW_LINE> <INDENT> raise RuntimeError("scikit-learn estimators should always " "specify their parameters in the signature" " of their __init__ (no varargs)." " %s with constructor %s doesn't " " follow this convention." % (cls, init_signature)) <NEW_LINE> <DEDENT> <DEDENT> return sorted([p.name for p in parameters]) <NEW_LINE> <DEDENT> def get_params(self, deep=True): <NEW_LINE> <INDENT> out = dict() <NEW_LINE> for key in self._get_param_names(): <NEW_LINE> <INDENT> warnings.simplefilter("always", DeprecationWarning) <NEW_LINE> try: <NEW_LINE> <INDENT> with warnings.catch_warnings(record=True) as w: <NEW_LINE> <INDENT> value = getattr(self, key, None) <NEW_LINE> <DEDENT> if len(w) and w[0].category == DeprecationWarning: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> warnings.filters.pop(0) <NEW_LINE> <DEDENT> if deep and hasattr(value, 'get_params'): <NEW_LINE> <INDENT> deep_items = value.get_params().items() <NEW_LINE> out.update((key + '__' + k, val) for k, val in deep_items) <NEW_LINE> <DEDENT> out[key] = value <NEW_LINE> <DEDENT> return out <NEW_LINE> <DEDENT> def set_params(self, **params): <NEW_LINE> <INDENT> if not params: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> valid_params = self.get_params(deep=True) <NEW_LINE> for key, value in params.items(): <NEW_LINE> <INDENT> split = key.split('__', 1) <NEW_LINE> if len(split) > 1: <NEW_LINE> <INDENT> name, sub_name = split <NEW_LINE> if name not in valid_params: <NEW_LINE> <INDENT> raise ValueError('Invalid parameter %s for estimator %s. ' 'Check the list of available parameters ' 'with `estimator.get_params().keys()`.' % (name, self)) <NEW_LINE> <DEDENT> sub_object = valid_params[name] <NEW_LINE> sub_object.set_params(**{sub_name: value}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if key not in valid_params: <NEW_LINE> <INDENT> raise ValueError('Invalid parameter %s for estimator %s. ' 'Check the list of available parameters ' 'with `estimator.get_params().keys()`.' % (key, self.__class__.__name__)) <NEW_LINE> <DEDENT> setattr(self, key, value) <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> from sklearn.base import _pprint <NEW_LINE> class_name = self.__class__.__name__ <NEW_LINE> return '%s(%s)' % (class_name, _pprint(self.get_params(deep=False), offset=len(class_name),),) | Base class for all estimators in scikit-learn
Notes
-----
All estimators should specify all the parameters that can be set
at the class level in their ``__init__`` as explicit keyword
arguments (no ``*args`` or ``**kwargs``). | 6259905d16aa5153ce401b1d |
class ProvisionSchedule(Base, ProvisionFormButtonMixin): <NEW_LINE> <INDENT> _when_to_provision_radio_locator = ( By.CSS_SELECTOR, "input[name='schedule__schedule_type']") <NEW_LINE> _power_on_checkbox_locator = (By.ID, "schedule__vm_auto_start") <NEW_LINE> _retirement_select_locator = (By.ID, "schedule__retirement") <NEW_LINE> @property <NEW_LINE> def when_to_provision(self): <NEW_LINE> <INDENT> return self.selenium.find_elements( *self._when_to_provision_radio_locator) <NEW_LINE> <DEDENT> @property <NEW_LINE> def power_on_after_creation(self): <NEW_LINE> <INDENT> return self.get_element(*self._power_on_checkbox_locator) <NEW_LINE> <DEDENT> @property <NEW_LINE> def retirement(self): <NEW_LINE> <INDENT> return Select(self.get_element(*self._retirement_select_locator)) <NEW_LINE> <DEDENT> def fill_fields( self, when_to_provision_selection, power_on_after_creation_check, retirement_selection): <NEW_LINE> <INDENT> self.when_to_provision[0].click() <NEW_LINE> self._wait_for_results_refresh() <NEW_LINE> self.when_to_provision[when_to_provision_selection].click() <NEW_LINE> self._wait_for_results_refresh() <NEW_LINE> if power_on_after_creation_check is not None: <NEW_LINE> <INDENT> if power_on_after_creation_check and not self.power_on_after_creation.is_selected(): <NEW_LINE> <INDENT> self.power_on_after_creation.click() <NEW_LINE> self._wait_for_results_refresh() <NEW_LINE> <DEDENT> <DEDENT> self.retirement.select_by_visible_text(retirement_selection) <NEW_LINE> self._wait_for_results_refresh() <NEW_LINE> return ProvisionSchedule(self.testsetup) | Provision wizard - Schedule tab | 6259905d3cc13d1c6d466d7a |
class ForeignDataWrapper(DbObjectWithOptions): <NEW_LINE> <INDENT> objtype = "FOREIGN DATA WRAPPER" <NEW_LINE> single_extern_file = True <NEW_LINE> @property <NEW_LINE> def allprivs(self): <NEW_LINE> <INDENT> return 'U' <NEW_LINE> <DEDENT> def to_map(self, no_owner, no_privs): <NEW_LINE> <INDENT> wrapper = self._base_map(no_owner, no_privs) <NEW_LINE> if hasattr(self, 'servers'): <NEW_LINE> <INDENT> srvs = {} <NEW_LINE> for srv in self.servers: <NEW_LINE> <INDENT> srvs.update(self.servers[srv].to_map(no_owner, no_privs)) <NEW_LINE> <DEDENT> wrapper.update(srvs) <NEW_LINE> del wrapper['servers'] <NEW_LINE> <DEDENT> return wrapper <NEW_LINE> <DEDENT> @commentable <NEW_LINE> @grantable <NEW_LINE> @ownable <NEW_LINE> def create(self): <NEW_LINE> <INDENT> clauses = [] <NEW_LINE> for fnc in ['validator', 'handler']: <NEW_LINE> <INDENT> if hasattr(self, fnc): <NEW_LINE> <INDENT> clauses.append("%s %s" % (fnc.upper(), getattr(self, fnc))) <NEW_LINE> <DEDENT> <DEDENT> if hasattr(self, 'options'): <NEW_LINE> <INDENT> clauses.append(self.options_clause()) <NEW_LINE> <DEDENT> return ["CREATE FOREIGN DATA WRAPPER %s%s" % ( quote_id(self.name), clauses and '\n ' + ',\n '.join(clauses) or '')] <NEW_LINE> <DEDENT> def diff_map(self, inwrapper): <NEW_LINE> <INDENT> stmts = super(ForeignDataWrapper, self).diff_map(inwrapper) <NEW_LINE> if hasattr(inwrapper, 'owner'): <NEW_LINE> <INDENT> if inwrapper.owner != self.owner: <NEW_LINE> <INDENT> stmts.append(self.alter_owner(inwrapper.owner)) <NEW_LINE> <DEDENT> <DEDENT> stmts.append(self.diff_description(inwrapper)) <NEW_LINE> return stmts | A foreign data wrapper definition | 6259905d7d847024c075da0c |
class MPISlave(MPIBatchWorker): <NEW_LINE> <INDENT> def __init__(self, estimator, scorer, fit_params): <NEW_LINE> <INDENT> super(MPISlave, self).__init__(estimator, scorer, fit_params) <NEW_LINE> <DEDENT> def _run_grid_search(self): <NEW_LINE> <INDENT> self._data_X, self._data_y = comm.bcast(None, root=0) <NEW_LINE> work_batch = comm.scatter(None, root=0) <NEW_LINE> results = self.process_batch(work_batch) <NEW_LINE> comm.gather(results, root=0) <NEW_LINE> <DEDENT> def _run_train_test(self): <NEW_LINE> <INDENT> self._data_X, self._data_y = comm.bcast(None, root=0) <NEW_LINE> work_item = comm.recv(None, source=0, tag=MPI_TAG_TRAIN_TEST_DATA) <NEW_LINE> fold_id = work_item[0] <NEW_LINE> if fold_id == MPI_MSG_TERMINATE: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> LOG.debug("Node %d is running testing for fold %d", comm_rank, fold_id) <NEW_LINE> test_results = self.process_batch([work_item]) <NEW_LINE> comm.send((fold_id, test_results[0]['score']), dest=0, tag=MPI_TAG_RESULT) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> task_desc = self._task_desc <NEW_LINE> while True: <NEW_LINE> <INDENT> comm.Bcast([task_desc, MPI.INT], root=0) <NEW_LINE> if task_desc[1] == MPI_MSG_TERMINATE: <NEW_LINE> <INDENT> LOG.debug("Node %d received terminate message", comm_rank) <NEW_LINE> return <NEW_LINE> <DEDENT> if task_desc[1] == MPI_MSG_CV: <NEW_LINE> <INDENT> self._run_grid_search() <NEW_LINE> <DEDENT> elif task_desc[1] == MPI_MSG_TEST: <NEW_LINE> <INDENT> self._run_train_test() <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('unknown task with id %d' % task_desc[1]) <NEW_LINE> <DEDENT> <DEDENT> LOG.debug("Node %d is terminating", comm_rank) | Receives task from root node and sends results back | 6259905d9c8ee82313040ca7 |
class CT_HdrFtr(BaseOxmlElement): <NEW_LINE> <INDENT> p = ZeroOrMore('w:p', successors=()) <NEW_LINE> tbl = ZeroOrMore('w:tbl', successors=()) | `w:hdr` and `w:ftr`, the root element for header and footer part respectively | 6259905dac7a0e7691f73b1c |
class UserProfileManager(BaseUserManager): <NEW_LINE> <INDENT> def create_user(self, email, name, password=None): <NEW_LINE> <INDENT> if not email: <NEW_LINE> <INDENT> raise ValueError('user must have an email address') <NEW_LINE> <DEDENT> email = self.normalize_email(email) <NEW_LINE> user = self.model(email=email, name=name) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user <NEW_LINE> <DEDENT> def create_superuser(self, email, name, password): <NEW_LINE> <INDENT> user = self.create_user(email, name, password) <NEW_LINE> user.is_superuser = True <NEW_LINE> user.is_staff = True <NEW_LINE> user.save(using = self._db) <NEW_LINE> return user | Manager for USER PROFILES | 6259905d4f6381625f199fbf |
class SettingsTestCase(ModoTestCase): <NEW_LINE> <INDENT> def test_get_settings(self): <NEW_LINE> <INDENT> url = reverse("core:parameters") <NEW_LINE> response = self.ajax_get(url) <NEW_LINE> for app in ["core", "admin", "limits"]: <NEW_LINE> <INDENT> self.assertIn('data-app="{}"'.format(app), response["content"]) <NEW_LINE> <DEDENT> <DEDENT> def test_save_settings(self): <NEW_LINE> <INDENT> url = reverse("core:parameters") <NEW_LINE> settings = SETTINGS_SAMPLE.copy() <NEW_LINE> response = self.client.post(url, settings, format="json") <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> settings["core-rounds_number"] = "" <NEW_LINE> response = self.client.post(url, settings, format="json") <NEW_LINE> self.assertEqual(response.status_code, 400) <NEW_LINE> compare(response.json(), { "form_errors": {"rounds_number": ["This field is required."]}, "prefix": "core" }) | Settings tests. | 6259905d460517430c432b6f |
class Position: <NEW_LINE> <INDENT> _CHRONO = 0 <NEW_LINE> @classmethod <NEW_LINE> def _Get_id(cls): <NEW_LINE> <INDENT> cls._CHRONO += 1 <NEW_LINE> return cls._CHRONO <NEW_LINE> <DEDENT> def __init__(self, x, y): <NEW_LINE> <INDENT> self._id = self._Get_id() <NEW_LINE> self._position = (x,y) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"<Position #{self.id} ({self.x},{self.y})>" <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @property <NEW_LINE> def position(self)->tuple: <NEW_LINE> <INDENT> return self._position <NEW_LINE> <DEDENT> @property <NEW_LINE> def x(self)->float: <NEW_LINE> <INDENT> return self._position[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def y(self)->float: <NEW_LINE> <INDENT> return self._position[1] <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Position): <NEW_LINE> <INDENT> return self.id == other.id | Classe définissant une position pour un objet RoadItem | 6259905d32920d7e50bc7680 |
class ListProject(CommandResource): <NEW_LINE> <INDENT> cmd_columns = _COMMAND_COLUMNS <NEW_LINE> http_resource = _HTTP_RESOURCE <NEW_LINE> pk_column = _PK_COLUMN <NEW_LINE> @staticmethod <NEW_LINE> def add_known_arguments(parser): <NEW_LINE> <INDENT> return ListCommand.add_args(parser) | List all the RBAC Projects | 6259905d45492302aabfdb13 |
class RawPropertiesEndpoints(BasePropertiesEndpoints): <NEW_LINE> <INDENT> def __init__(self, host, port, account_id, auth_token, version=DEFAULT_API_VERSION, secure=SECURE, **kwargs): <NEW_LINE> <INDENT> super(RawPropertiesEndpoints, self).__init__(host, port, account_id, auth_token, version, secure, **kwargs) <NEW_LINE> self.name = 'raw-properties' <NEW_LINE> <DEDENT> def delete(self, id_): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def update(self, id_, modifier): <NEW_LINE> <INDENT> raise NotImplementedError | RawProperties endpoints.
Args:
host (str): Exabyte API hostname.
port (int): Exabyte API port number.
account_id (str): account ID.
auth_token (str): authentication token.
version (str): Exabyte API version.
secure (bool): whether to use secure http protocol (https vs http).
kwargs (dict): a dictionary of HTTP session options.
timeout (int): session timeout in seconds.
Attributes:
name (str): endpoint name.
user_id (str): user ID.
auth_token (str): authentication token.
headers (dict): default HTTP headers. | 6259905d7d43ff2487427f2d |
class NodeType(type): <NEW_LINE> <INDENT> def __new__(cls, name, bases, attrs): <NEW_LINE> <INDENT> newslots = [] <NEW_LINE> assert len(bases) == 1, 'multiple inheritance not allowed' <NEW_LINE> for attr in 'fields', 'attributes': <NEW_LINE> <INDENT> names = attrs.get(attr, ()) <NEW_LINE> storage = [] <NEW_LINE> storage.extend(getattr(bases[0], attr, ())) <NEW_LINE> storage.extend(names) <NEW_LINE> attrs[attr] = tuple(storage) <NEW_LINE> newslots.extend(names) <NEW_LINE> <DEDENT> attrs['__slots__'] = newslots <NEW_LINE> attrs.setdefault('abstract', False) <NEW_LINE> return type.__new__(cls, name, bases, attrs) | A metaclass for nodes that handles field and attribute inheritance. | 6259905d63d6d428bbee3da5 |
class TagAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ['id', 'name', 'status_google', 'status_yandex', 'status_instagram'] | Class to display the tags in the admin panel.
Attributes:
list_display: Set which fields are displayed on the change list page
of the admin. | 6259905d4428ac0f6e659b78 |
class TensorDataset(DatasetSource): <NEW_LINE> <INDENT> def __init__(self, tensors): <NEW_LINE> <INDENT> super(TensorDataset, self).__init__() <NEW_LINE> with ops.name_scope("tensors"): <NEW_LINE> <INDENT> tensors = nest.pack_sequence_as(tensors, [ sparse_tensor_lib.SparseTensor.from_value(t) if sparse_tensor_lib.is_sparse(t) else ops.convert_to_tensor( t, name="component_%d" % i) for i, t in enumerate(nest.flatten(tensors)) ]) <NEW_LINE> <DEDENT> self._tensors = sparse.serialize_sparse_tensors(tensors) <NEW_LINE> output_classes = sparse.get_classes(tensors) <NEW_LINE> output_shapes = nest.pack_sequence_as( tensors, [t.get_shape() for t in nest.flatten(tensors)]) <NEW_LINE> output_types = nest.pack_sequence_as( tensors, [t.dtype for t in nest.flatten(tensors)]) <NEW_LINE> self._structure = structure_lib.convert_legacy_structure( output_types, output_shapes, output_classes) <NEW_LINE> <DEDENT> def _as_variant_tensor(self): <NEW_LINE> <INDENT> return gen_dataset_ops.tensor_dataset( nest.flatten(self._tensors), output_shapes=self._structure._flat_shapes) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _element_structure(self): <NEW_LINE> <INDENT> return self._structure | A `Dataset` with a single element, viz. a nested structure of tensors. | 6259905dcb5e8a47e493cca3 |
class PurchaseLineTax(ModelSQL): <NEW_LINE> <INDENT> __name__ = 'purchase.line-account.tax' <NEW_LINE> _table = 'purchase_line_account_tax' <NEW_LINE> line = fields.Many2One('purchase.line', 'Purchase Line', ondelete='CASCADE', select=True, required=True, domain=[('type', '=', 'line')]) <NEW_LINE> tax = fields.Many2One('account.tax', 'Tax', ondelete='RESTRICT', select=True, required=True, domain=[('parent', '=', None)]) | Purchase Line - Tax | 6259905d7047854f463409fb |
class BasePostProcessor(object): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> def __init__(self, dedisperser): <NEW_LINE> <INDENT> self._dedisperser = dedisperser <NEW_LINE> <DEDENT> @property <NEW_LINE> def dedisperser(self): <NEW_LINE> <INDENT> return self._dedisperser <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def __call__(self, itree, trigger_set): <NEW_LINE> <INDENT> return [] | Abstract base class for post processors.
All post processors must inherit from this class.
When sub-classing, only keyword arguments may be added to the constructor. | 6259905d004d5f362081fb0c |
class LOOT_353: <NEW_LINE> <INDENT> pass | Psionic Probe | 6259905d8e7ae83300eea6c9 |
class DocumentTokenizer(object): <NEW_LINE> <INDENT> def __init__(self, sent_tokenizer=None, word_tokenizer=None): <NEW_LINE> <INDENT> if not sent_tokenizer: <NEW_LINE> <INDENT> self.sent_tokenizer = DefaultSentenceTokenizer() <NEW_LINE> <DEDENT> if not word_tokenizer: <NEW_LINE> <INDENT> self.word_tokenizer = TreebankWordTokenizer() <NEW_LINE> <DEDENT> <DEDENT> def tokenize_doc(self, doc): <NEW_LINE> <INDENT> tokenized_sents_and_spans = [] <NEW_LINE> try: <NEW_LINE> <INDENT> sentence_spans = self.sent_tokenizer.tokenize_sents(doc) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise e <NEW_LINE> return [] <NEW_LINE> <DEDENT> for start, end in sentence_spans: <NEW_LINE> <INDENT> sentence = doc[start: end] <NEW_LINE> tokenized_sents_and_spans.append(self.tokenize_sent(sentence, start)) <NEW_LINE> <DEDENT> return tokenized_sents_and_spans <NEW_LINE> <DEDENT> def tokenize_sent(self, sentence, offset): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> tokens = self.word_tokenizer.tokenize(sentence) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("Word tokenizing failed") <NEW_LINE> print(sentence) <NEW_LINE> raise e <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> spans = self.word_tokenizer.span_tokenize(sentence) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("Span tokenizing failed") <NEW_LINE> print(sentence) <NEW_LINE> raise e <NEW_LINE> <DEDENT> tokens_and_spans = [] <NEW_LINE> for token, span in zip(tokens, spans): <NEW_LINE> <INDENT> start, end = span <NEW_LINE> true_start = start + offset <NEW_LINE> true_end = end + offset <NEW_LINE> tokens_and_spans.append((token, (true_start, true_end))) <NEW_LINE> <DEDENT> return tokens_and_spans | Used to split a document into sentences and tokens.
Returns a list of lists TODO | 6259905d16aa5153ce401b1f |
class AROrderSelectionResults(object): <NEW_LINE> <INDENT> def __init__(self, model, ics, trend, seasonal, period): <NEW_LINE> <INDENT> self._model = model <NEW_LINE> self._ics = ics <NEW_LINE> self._trend = trend <NEW_LINE> self._seasonal = seasonal <NEW_LINE> self._period = period <NEW_LINE> aic = sorted(ics, key=lambda r: r[1][0]) <NEW_LINE> self._aic = dict([(key, val[0]) for key, val in aic]) <NEW_LINE> bic = sorted(ics, key=lambda r: r[1][1]) <NEW_LINE> self._bic = dict([(key, val[1]) for key, val in bic]) <NEW_LINE> hqic = sorted(ics, key=lambda r: r[1][2]) <NEW_LINE> self._hqic = dict([(key, val[2]) for key, val in hqic]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def model(self): <NEW_LINE> <INDENT> return self._model <NEW_LINE> <DEDENT> @property <NEW_LINE> def seasonal(self): <NEW_LINE> <INDENT> return self._seasonal <NEW_LINE> <DEDENT> @property <NEW_LINE> def trend(self): <NEW_LINE> <INDENT> return self._trend <NEW_LINE> <DEDENT> @property <NEW_LINE> def period(self): <NEW_LINE> <INDENT> return self._period <NEW_LINE> <DEDENT> @property <NEW_LINE> def aic(self): <NEW_LINE> <INDENT> return self._aic <NEW_LINE> <DEDENT> @property <NEW_LINE> def bic(self): <NEW_LINE> <INDENT> return self._bic <NEW_LINE> <DEDENT> @property <NEW_LINE> def hqic(self): <NEW_LINE> <INDENT> return self._hqic <NEW_LINE> <DEDENT> @property <NEW_LINE> def ar_lags(self): <NEW_LINE> <INDENT> return self._model.ar_lags | Results from an AR order selection
Contains the information criteria for all fitted model orders. | 6259905d55399d3f05627b5b |
class GUITestResult(unittest.TestResult): <NEW_LINE> <INDENT> def __init__(self, callback): <NEW_LINE> <INDENT> unittest.TestResult.__init__(self) <NEW_LINE> self.callback = callback <NEW_LINE> <DEDENT> def addError(self, test, err): <NEW_LINE> <INDENT> unittest.TestResult.addError(self, test, err) <NEW_LINE> self.callback.notifyTestErrored(test, err) <NEW_LINE> <DEDENT> def addFailure(self, test, err): <NEW_LINE> <INDENT> unittest.TestResult.addFailure(self, test, err) <NEW_LINE> self.callback.notifyTestFailed(test, err) <NEW_LINE> <DEDENT> def stopTest(self, test): <NEW_LINE> <INDENT> unittest.TestResult.stopTest(self, test) <NEW_LINE> self.callback.notifyTestFinished(test) <NEW_LINE> <DEDENT> def startTest(self, test): <NEW_LINE> <INDENT> unittest.TestResult.startTest(self, test) <NEW_LINE> self.callback.notifyTestStarted(test) | A TestResult that makes callbacks to its associated GUI TestRunner.
Used by BaseGUITestRunner. Need not be created directly. | 6259905d498bea3a75a5911b |
class RawSql(Sql): <NEW_LINE> <INDENT> def __init__(self, text, data = {}): <NEW_LINE> <INDENT> self.text = text <NEW_LINE> Sql.__init__(self, data) <NEW_LINE> <DEDENT> def to_raw(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return RawSql(self.text, copy.copy(self.data)) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.text | Simply saves a string, and also some data. This is in contrast with `Sql`, which may save
the query in a more abstract way. | 6259905d8a43f66fc4bf37c9 |
class AsynRecord(EpicsRecordDeviceCommonAll): <NEW_LINE> <INDENT> ascii_output = Component(EpicsSignal, ".AOUT", kind="hinted") <NEW_LINE> binary_output = Component(EpicsSignal, ".BOUT", kind="normal") <NEW_LINE> end_of_message_reason = Component(EpicsSignalRO, ".EOMR", kind="config") <NEW_LINE> input_format = Component(EpicsSignalRO, ".IFMT", kind="config") <NEW_LINE> input_maxlength = Component(EpicsSignal, ".IMAX", kind="config") <NEW_LINE> interface = Component(EpicsSignal, ".IFACE", kind="config") <NEW_LINE> number_bytes_actually_read = Component(EpicsSignalRO, ".NRRD", kind="normal") <NEW_LINE> number_bytes_actually_written = Component(EpicsSignalRO, ".NAWT", kind="normal") <NEW_LINE> number_bytes_to_read = Component(EpicsSignal, ".NORD", kind="config") <NEW_LINE> number_bytes_to_write = Component(EpicsSignal, ".NOWT", kind="config") <NEW_LINE> octet_is_valid = Component(EpicsSignalRO, ".OCTETIV", kind="normal") <NEW_LINE> output_format = Component(EpicsSignalRO, ".OFMT", kind="config") <NEW_LINE> output_maxlength = Component(EpicsSignal, ".OMAX", kind="config") <NEW_LINE> terminator_input = Component(EpicsSignal, ".IEOS", kind="config") <NEW_LINE> terminator_output = Component(EpicsSignal, ".OEOS", kind="config") <NEW_LINE> timeout = Component(EpicsSignal, ".TMOT", kind="config") <NEW_LINE> transaction_mode = Component(EpicsSignal, ".TMOD", kind="config") <NEW_LINE> translated_input = Component(EpicsSignal, ".TINP", kind="config") | EPICS asyn record support in ophyd
.. index:: Ophyd Device; synApps AsynRecord
:see: https://epics.anl.gov/modules/soft/asyn/R4-36/asynRecord.html
:see: https://github.com/epics-modules/asyn
:see: https://epics.anl.gov/modules/soft/asyn/ | 6259905d3cc13d1c6d466d7c |
class Text: <NEW_LINE> <INDENT> def __init__(self, content, config: Config = DefaultConfig()): <NEW_LINE> <INDENT> self.tokenizer: TokenizerBase = utils.load_class(config.tokenizer_class)(config) <NEW_LINE> self.spellcheck: BoSpell = BoSpell(config=config) <NEW_LINE> self.content: str = content <NEW_LINE> self.tokens: List = self.tokenizer.tokenize(content) <NEW_LINE> self._suggestions = {} <NEW_LINE> self._corrected = "" <NEW_LINE> <DEDENT> @property <NEW_LINE> def suggestions(self): <NEW_LINE> <INDENT> if self._suggestions: <NEW_LINE> <INDENT> return self._suggestions <NEW_LINE> <DEDENT> for idx, token in enumerate(self.tokens): <NEW_LINE> <INDENT> if self.spellcheck.is_error(token): <NEW_LINE> <INDENT> suggestions = self.spellcheck.candidates(token.text, 5) <NEW_LINE> span = Span(start=token.start, end=token.start + token.len) <NEW_LINE> self._suggestions[idx] = Suggestions(candidates=suggestions, span=span) <NEW_LINE> <DEDENT> <DEDENT> return self._suggestions <NEW_LINE> <DEDENT> @property <NEW_LINE> def corrected(self): <NEW_LINE> <INDENT> if self._corrected: <NEW_LINE> <INDENT> return self._corrected <NEW_LINE> <DEDENT> for idx, token in enumerate(self.tokens): <NEW_LINE> <INDENT> if idx in self.suggestions: <NEW_LINE> <INDENT> self._corrected += self.suggestions[idx].candidates[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._corrected += token.text <NEW_LINE> <DEDENT> <DEDENT> return self._corrected | Class Text represents the corrected text. | 6259905d379a373c97d9a660 |
class PhotoSizeEmpty(TLObject): <NEW_LINE> <INDENT> __slots__: List[str] = ["type"] <NEW_LINE> ID = 0xe17e23c <NEW_LINE> QUALNAME = "types.PhotoSizeEmpty" <NEW_LINE> def __init__(self, *, type: str) -> None: <NEW_LINE> <INDENT> self.type = type <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(data: BytesIO, *args: Any) -> "PhotoSizeEmpty": <NEW_LINE> <INDENT> type = String.read(data) <NEW_LINE> return PhotoSizeEmpty(type=type) <NEW_LINE> <DEDENT> def write(self) -> bytes: <NEW_LINE> <INDENT> data = BytesIO() <NEW_LINE> data.write(Int(self.ID, False)) <NEW_LINE> data.write(String(self.type)) <NEW_LINE> return data.getvalue() | This object is a constructor of the base type :obj:`~pyrogram.raw.base.PhotoSize`.
Details:
- Layer: ``122``
- ID: ``0xe17e23c``
Parameters:
type: ``str`` | 6259905d7d847024c075da0e |
class GroupsTable(groups_tables.GroupsTable): <NEW_LINE> <INDENT> roles = tables.Column( lambda obj: ", ".join(getattr(obj, 'roles', [])), verbose_name=_('Roles'), form_field=forms.CharField( widget=forms.Textarea(attrs={'rows': 4}), required=False)) <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> name = "groupstable" <NEW_LINE> verbose_name = _("Groups") | Display groups of the project. | 6259905db57a9660fecd30b7 |
class NumberedListItem(elements._BlockElementContainingBlock_PrefixGrouped): <NEW_LINE> <INDENT> HTML_OUTER_TAGS = ('<ol>', '</ol>') <NEW_LINE> HTML_TAGS = ('<li>', '</li>') | A numbered list item::
#. List item
1. List item | 6259905da219f33f346c7e41 |
class LoadFeedError(TweetFeederError): <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> super(LoadFeedError, self).__init__(type(self).__name__, msg) | Raised when you fail to load from the tweet feed. | 6259905dd7e4931a7ef3d6a2 |
class DDeltaStar(metric.Metric): <NEW_LINE> <INDENT> def _compute(self, tolerance, verbose=False): <NEW_LINE> <INDENT> del tolerance <NEW_LINE> assert self.env.q_values is not None, 'Q-Values have not been computed.' <NEW_LINE> self.metric = np.max(np.abs(self.env.q_values[:, None, :] - self.env.q_values[None, :, :]), axis=-1) <NEW_LINE> self.statistics = metric.Statistics(0., 0., 0, 0.) | Implementation of the d_{\Delta^*} metric. | 6259905d009cb60464d02b71 |
class CurrencyConverter: <NEW_LINE> <INDENT> ACCURACY = 2 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._usd_rate = requests.get( 'https://www.cbr-xml-daily.ru/daily_json.js' ).json()['Valute']['USD']['Value'] <NEW_LINE> <DEDENT> def calculate(self, usd_amount: float) -> float: <NEW_LINE> <INDENT> return round(usd_amount*self._usd_rate, self.ACCURACY) | Конвертер валют USD -> RUB
Актуальный курс по ЦБ подгружается при инициализации | 6259905d38b623060ffaa36d |
class Bet: <NEW_LINE> <INDENT> def __init__(self, outcome, amount): <NEW_LINE> <INDENT> self.outcome = outcome <NEW_LINE> self.amount = amount <NEW_LINE> <DEDENT> def win_amount(self): <NEW_LINE> <INDENT> return self.outcome.win_amount(self.amount) + self.amount <NEW_LINE> <DEDENT> def lose_amount(self): <NEW_LINE> <INDENT> return self.amount <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.outcome) | A Bet is an amount that a player has wagered on an outcome
Args:
outcome (str): outcome the bet is made on
amount (str): how much money is bet on the outcome | 6259905d435de62698e9d441 |
class struct_stream(bin_methods): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def parse(cls, f: BinaryIO) -> Any: <NEW_LINE> <INDENT> return cls(*struct.unpack(cls.PACK, f.read(struct.calcsize(cls.PACK)))) <NEW_LINE> <DEDENT> def stream(self, f): <NEW_LINE> <INDENT> f.write(struct.pack(self.PACK, self)) | Create a class that can parse and stream itself based on a struct.pack template string. | 6259905d4e4d562566373a44 |
class bugenhagenAorticBR(AorticBaroreceptor): <NEW_LINE> <INDENT> solutionMemoryFields = ["HR_p","HR_s","delta"] <NEW_LINE> solutionMemoryFieldsToSave = ["HR_p","HR_s","delta"] <NEW_LINE> solutionMemoryFields.extend(AorticBaroreceptor.solutionMemoryFields) <NEW_LINE> solutionMemoryFieldsToSave.extend(AorticBaroreceptor.solutionMemoryFieldsToSave) <NEW_LINE> def __init__(self, BaroDict): <NEW_LINE> <INDENT> super(bugenhagenAorticBR, self).__init__(BaroDict) <NEW_LINE> self.modelName = 'bugenhagenAorticBR' <NEW_LINE> self.baroreceptorCellML = cellMLBaroreflexModels.bugenhagenAorticBR <NEW_LINE> self.HR_p = np.zeros(0) <NEW_LINE> self.HR_s = np.zeros(0) <NEW_LINE> self.delta = np.zeros(0) <NEW_LINE> self.update(BaroDict) <NEW_LINE> <DEDENT> def initializeForSimulation(self, vascularNetwork): <NEW_LINE> <INDENT> super(bugenhagenAorticBR, self).initializeForSimulation(vascularNetwork) <NEW_LINE> self.n[0] = self.algebraic[0][3] <NEW_LINE> self.Tsym[0] = self.algebraic[0][7] <NEW_LINE> self.Tparasym[0] = self.algebraic[0][8] <NEW_LINE> self.c_nor[0] = self.states[0][3] <NEW_LINE> self.c_ach[0] = self.states[0][4] <NEW_LINE> self.delta[0] = self.algebraic[0][0] <NEW_LINE> self.HR_p[0] = self.algebraic[0][10] <NEW_LINE> self.HR_s[0] = self.algebraic[0][9] <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> super(bugenhagenAorticBR, self).__call__() <NEW_LINE> n = self.currentMemoryIndex[0] <NEW_LINE> self.n[n + 1] = self.algebraic[-1][3] <NEW_LINE> self.Tsym[n + 1] = self.algebraic[-1][7] <NEW_LINE> self.Tparasym[n + 1] = self.algebraic[-1][8] <NEW_LINE> self.c_nor[n + 1] = self.states[-1][3] <NEW_LINE> self.c_ach[n + 1] = self.states[-1][4] <NEW_LINE> self.delta[n + 1] = self.algebraic[-1][0] <NEW_LINE> self.HR_p[n + 1] = self.algebraic[-1][10] <NEW_LINE> self.HR_s[n + 1] = self.algebraic[-1][9] | for models of the AorticBaroreceptors
Aortic Baroreceptor models with strain input and period of the heart cycle as output | 6259905d67a9b606de5475bf |
class Heuristic3Bot(MinimaxBot): <NEW_LINE> <INDENT> def __init__(self, number, time_limit=4, name=None): <NEW_LINE> <INDENT> if name is None: <NEW_LINE> <INDENT> name = "Heuristic3 Minimax" <NEW_LINE> <DEDENT> MinimaxBot.__init__(self, number, time_limit, name=name) <NEW_LINE> self.player_type = 'h2 minimax' <NEW_LINE> <DEDENT> def compute_score(self, board): <NEW_LINE> <INDENT> G_CENTER = 10 <NEW_LINE> G_CORNER = 8 <NEW_LINE> G_EDGE = 6 <NEW_LINE> L_CENTER = 3 <NEW_LINE> L_CORNER = 2 <NEW_LINE> L_EDGE = 1 <NEW_LINE> GLOBAL_ATTACKING_SEQUENCE_WEIGHT = 5 <NEW_LINE> LOCAL_ATTACKING_SEQUENCE_WEIGHT = 0.5 <NEW_LINE> global_capture_weight = numpy.array([G_CORNER, G_EDGE, G_CORNER, G_EDGE, G_CENTER, G_EDGE, G_CORNER, G_EDGE, G_CORNER]) <NEW_LINE> global_adjustment_weights = numpy.array([1.2, 1.0, 1.2, 1.0, 1.4, 1.0, 1.2, 1.0, 1.2]) <NEW_LINE> local_capture_weight = numpy.array([L_CORNER, L_EDGE, L_CORNER, L_EDGE, L_CENTER, L_EDGE, L_CORNER, L_EDGE, L_CORNER]) <NEW_LINE> our_capture_vector = board.get_capture_vector(Board.X) <NEW_LINE> opponent_capture_vector = board.get_capture_vector(Board.O) <NEW_LINE> score = 0 <NEW_LINE> score += numpy.dot(our_capture_vector, global_capture_weight) - numpy.dot(opponent_capture_vector, global_capture_weight) <NEW_LINE> attacking_sequences = board.count_attacking_sequences(Board.X) <NEW_LINE> opponent_attacking_sequences = board.count_attacking_sequences(Board.O) <NEW_LINE> score += GLOBAL_ATTACKING_SEQUENCE_WEIGHT*(attacking_sequences - opponent_attacking_sequences) <NEW_LINE> for row in [0, 1, 2]: <NEW_LINE> <INDENT> for col in [0, 1, 2]: <NEW_LINE> <INDENT> local_board = board.board[row][col] <NEW_LINE> if not local_board.board_completed: <NEW_LINE> <INDENT> our_capture_vector = local_board.get_capture_vector(Board.X) <NEW_LINE> opponent_capture_vector = local_board.get_capture_vector(Board.O) <NEW_LINE> index = row*3 + col <NEW_LINE> adjusted_weight = local_capture_weight * global_adjustment_weights[index] <NEW_LINE> score += numpy.dot(our_capture_vector, adjusted_weight) - numpy.dot(opponent_capture_vector, adjusted_weight) <NEW_LINE> l_attacking_sequences = local_board.count_attacking_sequences(Board.X) <NEW_LINE> l_opponent_attacking_sequences = local_board.count_attacking_sequences(Board.O) <NEW_LINE> score += LOCAL_ATTACKING_SEQUENCE_WEIGHT * (l_attacking_sequences - l_opponent_attacking_sequences) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return score | Minimax bot that plays using the H3 Heuristic
| 6259905d0a50d4780f7068dd |
class Solution2: <NEW_LINE> <INDENT> def combinationSum2(self, candidates: List[int], target: int) -> List[List[int]]: <NEW_LINE> <INDENT> ans = [] <NEW_LINE> cand_counter = Counter(candidates) <NEW_LINE> candidates = list(set(candidates)) <NEW_LINE> n = len(candidates) - 1 <NEW_LINE> def comb_sum2(sol: List[int], idx, same_cnt, current, target): <NEW_LINE> <INDENT> if current == target: <NEW_LINE> <INDENT> ans.append(sol) <NEW_LINE> return <NEW_LINE> <DEDENT> if idx > n or current > target: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if same_cnt < cand_counter[candidates[idx]]: <NEW_LINE> <INDENT> comb_sum2(sol + [candidates[idx]], idx, same_cnt + 1, current + candidates[idx], target) <NEW_LINE> <DEDENT> comb_sum2(sol, idx + 1, 0, current, target) <NEW_LINE> <DEDENT> comb_sum2([], 0, 0, 0, target) <NEW_LINE> return ans | Rum time: 108ms
Slightly Faster than Solution1
Time Complexity: O(n**2) | 6259905d63d6d428bbee3da6 |
class PairwiseScore(models.Model): <NEW_LINE> <INDENT> holder = models.ForeignKey(Person, related_name='holder') <NEW_LINE> partner = models.ForeignKey(Person, related_name='partner') <NEW_LINE> score = models.DecimalField(max_digits=16, decimal_places=15) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return 'Score(%s, %s) = %f' % (self.holder.name, self.partner.name, self.score) | Score between a pair of Person objects
The score is higher the more recently the pair have had lunch | 6259905d63b5f9789fe867af |
class StateDeclined(base.State): <NEW_LINE> <INDENT> name = "Declined" <NEW_LINE> def enter_state(self, request, application): <NEW_LINE> <INDENT> application.decline() <NEW_LINE> <DEDENT> def view(self, request, application, label, roles, actions): <NEW_LINE> <INDENT> if label is None and 'is_applicant' in roles and 'is_admin' not in roles: <NEW_LINE> <INDENT> if 'reopen' in request.POST: <NEW_LINE> <INDENT> return 'reopen' <NEW_LINE> <DEDENT> return render( template_name='kgapplications/common_declined.html', context={ 'application': application, 'actions': actions, 'roles': roles }, request=request) <NEW_LINE> <DEDENT> return super(StateDeclined, self).view( request, application, label, roles, actions) | This application declined. | 6259905d442bda511e95d878 |
class DeleteWordListInputSet(InputSet): <NEW_LINE> <INDENT> def set_APIKey(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'APIKey', value) <NEW_LINE> <DEDENT> def set_Password(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Password', value) <NEW_LINE> <DEDENT> def set_Username(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Username', value) <NEW_LINE> <DEDENT> def set_WordList(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'WordList', value) | An InputSet with methods appropriate for specifying the inputs to the DeleteWordList
Choreo. The InputSet object is used to specify input parameters when executing this Choreo. | 6259905dbaa26c4b54d508e3 |
class Character(DeviceType): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return "char" | Type of Character devices. | 6259905d627d3e7fe0e084c8 |
class GoogleBPEVocab(Vocab): <NEW_LINE> <INDENT> def __init__(self, max_size, vocab_file=None): <NEW_LINE> <INDENT> import sentencepiece as spm <NEW_LINE> self.spm = spm <NEW_LINE> self.max_size = max_size <NEW_LINE> self.vocab_file = vocab_file <NEW_LINE> self.sp = spm.SentencePieceProcessor() <NEW_LINE> <DEDENT> def count_file(self, path, verbose=False, add_eos=False): <NEW_LINE> <INDENT> self.spm.SentencePieceTrainer.Train( f'--input={self.vocab_file} --model_prefix=m --vocab_size={self.max_size} --model_type=bpe') <NEW_LINE> <DEDENT> def build_vocab(self): <NEW_LINE> <INDENT> if self.vocab_file: <NEW_LINE> <INDENT> self.sp.Load(self.vocab_file) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def encode_file(self, path, ordered=False, verbose=False, add_eos=True, add_double_eos=False) -> torch.LongTensor: <NEW_LINE> <INDENT> with open(path, encoding='utf-8') as f: <NEW_LINE> <INDENT> return torch.LongTensor(self.sp.EncodeAsIds(f.read())) | Don't use this until this issue is fixed.
https://github.com/google/sentencepiece/issues/318 | 6259905da17c0f6771d5d6c1 |
class DefensiveAgent(RAgent): <NEW_LINE> <INDENT> def registerInitialState(self, gameState): <NEW_LINE> <INDENT> RAgent.registerInitialState(self, gameState) <NEW_LINE> self.percepter=JointParticleFilter() <NEW_LINE> self.percepter.initialize(gameState,self) <NEW_LINE> <DEDENT> def getFeatures(self, gameState, action): <NEW_LINE> <INDENT> features = util.Counter() <NEW_LINE> successor = self.getSuccessor(gameState, action) <NEW_LINE> foodList = self.getFood(successor).asList() <NEW_LINE> myFoodList = self.getFoodYouAreDefending(gameState).asList() <NEW_LINE> features['successorScore'] = len(myFoodList) <NEW_LINE> myState = successor.getAgentState(self.index) <NEW_LINE> myPos = myState.getPosition() <NEW_LINE> observation = self.getCurrentObservation() <NEW_LINE> posOfOtherTeam = [observation.getAgentPosition(i) for i in range(observation.getNumAgents()) if i not in gameState.getBlueTeamIndices()] <NEW_LINE> features['score']= self.getScore(gameState) <NEW_LINE> features['onDefense'] = 1 <NEW_LINE> if myState.isPacman: features['onDefense'] = 0 <NEW_LINE> enemies = [successor.getAgentState(i) for i in self.getOpponents(successor)] <NEW_LINE> invaders = [a for a in enemies if a.isPacman and a.getPosition() != None] <NEW_LINE> features['numInvaders'] = len(invaders) <NEW_LINE> dists=[] <NEW_LINE> for a in enemies: <NEW_LINE> <INDENT> if(a.isPacman and a.getPosition() != None): <NEW_LINE> <INDENT> dists.append(self.distancer.getDistance(myPos, a.getPosition())) <NEW_LINE> <DEDENT> <DEDENT> self.percepter.observeState(gameState) <NEW_LINE> expectedPosPair=self.percepter.bestChoice() <NEW_LINE> for i in expectedPosPair: <NEW_LINE> <INDENT> self.debugDraw([i],[1,0,0],clear=True) <NEW_LINE> dists.append(self.distancer.getDistance(myPos,i)) <NEW_LINE> <DEDENT> if(len(dists)==0): <NEW_LINE> <INDENT> dists.append(1) <NEW_LINE> <DEDENT> features['invaderDistance'] = min(dists) <NEW_LINE> features['enemyOneDistance']=dists[0] <NEW_LINE> features['enemyTwoDistance']=dists[1] <NEW_LINE> return features <NEW_LINE> <DEDENT> def getWeights(self, gameState, action): <NEW_LINE> <INDENT> return {'numInvaders': -100, 'onDefense': 300, 'invaderDistance': -100, 'stop': -100, 'reverse': -2} | A reflex agent that keeps its side Pacman-free. Again,
this is to give you an idea of what a defensive agent
could be like. It is not the best or only way to make
such an agent. | 6259905d45492302aabfdb16 |
class ScriptSettings(object): <NEW_LINE> <INDENT> def __init__(self, func_name: str): <NEW_LINE> <INDENT> self.func_name = func_name <NEW_LINE> self.env = os.environ['env'] <NEW_LINE> self.env_variables = self.create_env_vairables() <NEW_LINE> <DEDENT> def create_env_vairables(self) -> Dict[str, str]: <NEW_LINE> <INDENT> return { 'IOT_PREFIX': os.environ['IOT_PREFIX'], 'S3_PREFIX': os.environ['S3_PREFIX'], } | Settings for the files in scripts folder. | 6259905dcb5e8a47e493cca4 |
@tf_export("RandomShuffleQueue") <NEW_LINE> class RandomShuffleQueue(QueueBase): <NEW_LINE> <INDENT> def __init__(self, capacity, min_after_dequeue, dtypes, shapes=None, names=None, seed=None, shared_name=None, name="random_shuffle_queue"): <NEW_LINE> <INDENT> dtypes = _as_type_list(dtypes) <NEW_LINE> shapes = _as_shape_list(shapes, dtypes) <NEW_LINE> names = _as_name_list(names, dtypes) <NEW_LINE> seed1, seed2 = random_seed.get_seed(seed) <NEW_LINE> if seed1 is None and seed2 is None: <NEW_LINE> <INDENT> seed1, seed2 = 0, 0 <NEW_LINE> <DEDENT> elif seed is None and shared_name is not None: <NEW_LINE> <INDENT> string = (str(seed1) + shared_name).encode("utf-8") <NEW_LINE> seed2 = int(hashlib.md5(string).hexdigest()[:8], 16) & 0x7FFFFFFF <NEW_LINE> <DEDENT> queue_ref = gen_data_flow_ops._random_shuffle_queue_v2( component_types=dtypes, shapes=shapes, capacity=capacity, min_after_dequeue=min_after_dequeue, seed=seed1, seed2=seed2, shared_name=shared_name, name=name) <NEW_LINE> super(RandomShuffleQueue, self).__init__(dtypes, shapes, names, queue_ref) | A queue implementation that dequeues elements in a random order.
See @{tf.QueueBase} for a description of the methods on
this class.
@compatibility(eager)
Queues are not compatible with eager execution. Instead, please
use `tf.data` to get data into your model.
@end_compatibility | 6259905d21a7993f00c675aa |
@keys.assign(seq=seqs.LEFT_SQUARE_BRACKET, modes=_MODES_MOTION) <NEW_LINE> class ViGotoOpeningBracket(ViMotionDef): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> ViMotionDef.__init__(self, *args, **kwargs) <NEW_LINE> self.scroll_into_view = True <NEW_LINE> self.updates_xpos = True <NEW_LINE> self.input_parser = parser_def(command=inputs.vi_left_square_bracket, interactive_command=None, input_param=None, on_done=None, type=input_types.INMEDIATE) <NEW_LINE> <DEDENT> @property <NEW_LINE> def accept_input(self): <NEW_LINE> <INDENT> return self.inp == '' <NEW_LINE> <DEDENT> def accept(self, key): <NEW_LINE> <INDENT> translated = utils.translate_char(key) <NEW_LINE> assert len(translated) == 1, '`[` only accepts a single char' <NEW_LINE> self._inp = translated <NEW_LINE> return True <NEW_LINE> <DEDENT> def translate(self, state): <NEW_LINE> <INDENT> cmd = {} <NEW_LINE> cmd['motion'] = '_vi_left_square_bracket' <NEW_LINE> cmd['motion_args'] = {'char': self.inp, 'mode': state.mode, 'count': state.count, } <NEW_LINE> return cmd | Vim: `[` | 6259905d004d5f362081fb0d |
class TstClient(object): <NEW_LINE> <INDENT> def __init__(self, client, db): <NEW_LINE> <INDENT> self.client = client <NEW_LINE> self.db = db <NEW_LINE> <DEDENT> def get_valid_page(self, url): <NEW_LINE> <INDENT> response = self.client.get(url, follow_redirects=True) <NEW_LINE> assert response.status_code == 200, "GET %s returned %d" % (url, response.status_code) <NEW_LINE> assert response_has_no_errors(response), "GET %s returned an error" % url <NEW_LINE> return response <NEW_LINE> <DEDENT> def get_invalid_page(self, url, expected_error): <NEW_LINE> <INDENT> response = self.client.get(url, follow_redirects=True) <NEW_LINE> assert response.status_code == 200, "POST %s returned %d" % (url, response.status_code) <NEW_LINE> response_has_error = response_has_string(response, expected_error) <NEW_LINE> if not response_has_error: <NEW_LINE> <INDENT> print(response.data) <NEW_LINE> <DEDENT> assert response_has_error, "POST %s did not contain '%s' error" % (url, expected_error) <NEW_LINE> return response <NEW_LINE> <DEDENT> def post_valid_form(self, url, **kwargs): <NEW_LINE> <INDENT> response = self.client.post(url, data=kwargs, follow_redirects=True) <NEW_LINE> assert response.status_code == 200, "POST %s returned %d" % (url, response.status_code) <NEW_LINE> assert response_has_no_errors(response), "post_valid_form(%s) returned an error" % url <NEW_LINE> return response <NEW_LINE> <DEDENT> def post_invalid_form(self, url, expected_error, **kwargs): <NEW_LINE> <INDENT> response = self.client.post(url, data=kwargs, follow_redirects=True) <NEW_LINE> assert response.status_code == 200, "POST %s returned %d" % (url, response.status_code) <NEW_LINE> response_has_error = response_has_string(response, expected_error) <NEW_LINE> if not response_has_error: <NEW_LINE> <INDENT> print(response.data) <NEW_LINE> <DEDENT> assert response_has_error, "POST %s did not contain '%s' error" % (url, expected_error) <NEW_LINE> return response <NEW_LINE> <DEDENT> def login(self, **kwargs): <NEW_LINE> <INDENT> url = url_for('user.login') <NEW_LINE> return self.post_valid_form(url, **kwargs) <NEW_LINE> <DEDENT> def logout(self, **kwargs): <NEW_LINE> <INDENT> url = url_for('user.logout') <NEW_LINE> response = self.client.get(url, follow_redirects=True) <NEW_LINE> assert response.status_code == 200 | Utility class for tests | 6259905dd486a94d0ba2d605 |
class PasswordProperty(object): <NEW_LINE> <INDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> if instance: <NEW_LINE> <INDENT> return Password(instance) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> <DEDENT> def __set__(self, instance, value): <NEW_LINE> <INDENT> instance.salt = ''.join((chr(random.getrandbits(8)) for i in range(64))) <NEW_LINE> instance.password_sha512 = hashlib.sha512(value.encode('utf-8')+instance.salt).digest() | This is the password attribute of User.
When set, it writes to user.password_sha512 and user.salt.
When get, it returns a Password object. | 6259905d097d151d1a2c26ab |
class Mirror(Object): <NEW_LINE> <INDENT> def __init__(self, hash=None, mirrors=None, status=None): <NEW_LINE> <INDENT> self.hash = hash <NEW_LINE> self.mirrors = mirrors <NEW_LINE> self.status = status | Mirror or file replica settings.
Attributes:
hash (str):
mirrors (int): number of file replicas.
status (str): current file replica status. | 6259905d16aa5153ce401b21 |
class posture(object): <NEW_LINE> <INDENT> implements(IVocabularyFactory) <NEW_LINE> def __call__(self, context=None): <NEW_LINE> <INDENT> items = ( SimpleTerm(value='standing', title=_(u'standing')), SimpleTerm(value='crouching', title=_(u'crouching')), SimpleTerm(value='lying', title=_(u'lying')), SimpleTerm(value='sitting', title=_(u'sitting')), SimpleTerm(value='reclining', title=_(u'reclining')), ) <NEW_LINE> return SimpleVocabulary(items) | posture
| 6259905df548e778e596cbc8 |
class AttributesTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_version(self): <NEW_LINE> <INDENT> self.assertTrue( steenzout.object.version() == steenzout.object.__version__ ) | steenzout.object package test cases. | 6259905d4f6381625f199fc1 |
class AssessmentPartSearch(abc_assessment_authoring_searches.AssessmentPartSearch, osid_searches.OsidSearch): <NEW_LINE> <INDENT> @utilities.arguments_not_none <NEW_LINE> def search_among_assessment_parts(self, bank_ids): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> @utilities.arguments_not_none <NEW_LINE> def order_assessment_part_results(self, assessment_part_search_order): <NEW_LINE> <INDENT> raise errors.Unimplemented() <NEW_LINE> <DEDENT> @utilities.arguments_not_none <NEW_LINE> def get_assessment_part_search_record(self, assessment_part_search_record_type): <NEW_LINE> <INDENT> raise errors.Unimplemented() | The search interface for governing assessment part searches. | 6259905d8da39b475be04824 |
class ListBudgetsAsyncPager: <NEW_LINE> <INDENT> def __init__( self, method: Callable[..., Awaitable[budget_service.ListBudgetsResponse]], request: budget_service.ListBudgetsRequest, response: budget_service.ListBudgetsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): <NEW_LINE> <INDENT> self._method = method <NEW_LINE> self._request = budget_service.ListBudgetsRequest(request) <NEW_LINE> self._response = response <NEW_LINE> self._metadata = metadata <NEW_LINE> <DEDENT> def __getattr__(self, name: str) -> Any: <NEW_LINE> <INDENT> return getattr(self._response, name) <NEW_LINE> <DEDENT> @property <NEW_LINE> async def pages(self) -> AsyncIterator[budget_service.ListBudgetsResponse]: <NEW_LINE> <INDENT> yield self._response <NEW_LINE> while self._response.next_page_token: <NEW_LINE> <INDENT> self._request.page_token = self._response.next_page_token <NEW_LINE> self._response = await self._method(self._request, metadata=self._metadata) <NEW_LINE> yield self._response <NEW_LINE> <DEDENT> <DEDENT> def __aiter__(self) -> AsyncIterator[budget_model.Budget]: <NEW_LINE> <INDENT> async def async_generator(): <NEW_LINE> <INDENT> async for page in self.pages: <NEW_LINE> <INDENT> for response in page.budgets: <NEW_LINE> <INDENT> yield response <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return async_generator() <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return "{0}<{1!r}>".format(self.__class__.__name__, self._response) | A pager for iterating through ``list_budgets`` requests.
This class thinly wraps an initial
:class:`google.cloud.billing.budgets_v1beta1.types.ListBudgetsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``budgets`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListBudgets`` requests and continue to iterate
through the ``budgets`` field on the
corresponding responses.
All the usual :class:`google.cloud.billing.budgets_v1beta1.types.ListBudgetsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup. | 6259905d460517430c432b71 |
class Alien(Sprite): <NEW_LINE> <INDENT> def __init__( self, input_ai_settings, input_screen): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.ai_settings = input_ai_settings <NEW_LINE> self.screen = input_screen <NEW_LINE> self.image = pygame.image.load('images/alien.bmp') <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.screen_rect = input_screen.get_rect() <NEW_LINE> self.rect.x = self.rect.width <NEW_LINE> self.rect.y = self.rect.height <NEW_LINE> self.x = float(self.rect.x) <NEW_LINE> <DEDENT> def check_edges(self): <NEW_LINE> <INDENT> if self.rect.right >= self.screen_rect.right: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif self.rect.left <= 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.x += (self.ai_settings.alien_speed_factor * self.ai_settings.fleet_direction) <NEW_LINE> self.rect.x = self.x <NEW_LINE> <DEDENT> def blitme(self): <NEW_LINE> <INDENT> self.screen.blit(self.image, self.rect) | Create an alien ship at the top of the screen | 6259905dbaa26c4b54d508e4 |
class ExtractFeaturesTestCase(unittest.TestCase): <NEW_LINE> <INDENT> pass | Class for UniTests of different methods in extract_features module | 6259905d379a373c97d9a663 |
class ApiLogInTests(TestCase): <NEW_LINE> <INDENT> fixtures = ['test_data'] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.api = APIMiddleware() <NEW_LINE> <DEDENT> def testBasicAuthentication(self): <NEW_LINE> <INDENT> request = HttpRequest() <NEW_LINE> credentials = 'johndoe:foobar'.encode('base64') <NEW_LINE> request.META['Authorization'] = 'Basic %s' % credentials <NEW_LINE> self.assertTrue(self.api._perform_basic_auth(request)) <NEW_LINE> self.assertTrue(request.user.is_authenticated()) <NEW_LINE> self.assertEquals(request.user, User.objects.get(id=1)) | Test log in of Django users via Basic auth. | 6259905d24f1403a926863ed |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.