code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class UserLogPageViewMixin(object): <NEW_LINE> <INDENT> @method_decorator(user_log_page_view) <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return super(UserLogPageViewMixin, self).dispatch(request, *args, **kwargs)
A simple mix-in class to write an event on every request to the view. events are written using the `user_log_page_view` decorator.
62599077bf627c535bcb2e62
class RiskDecomposition(object): <NEW_LINE> <INDENT> def __init__(self, utility): <NEW_LINE> <INDENT> self.utility = utility <NEW_LINE> self.sdf_tree = BigStorageTree(utility.period_len, utility.decision_times) <NEW_LINE> self.sdf_tree.set_value(0, np.array([1.0])) <NEW_LINE> n = len(self.sdf_tree) <NEW_LINE> self.expected_damages = np.zeros(n) <NEW_LINE> self.risk_premiums = np.zeros(n) <NEW_LINE> self.expected_sdf = np.zeros(n) <NEW_LINE> self.cross_sdf_damages = np.zeros(n) <NEW_LINE> self.discounted_expected_damages = np.zeros(n) <NEW_LINE> self.net_discount_damages = np.zeros(n) <NEW_LINE> self.cov_term = np.zeros(n) <NEW_LINE> self.expected_sdf[0] = 1.0 <NEW_LINE> <DEDENT> def save_output(self, m, prefix=None): <NEW_LINE> <INDENT> end_price = find_term_structure(m, self.utility, 0.01) <NEW_LINE> perp_yield = perpetuity_yield(end_price, self.sdf_tree.periods[-2]) <NEW_LINE> damage_scale = self.utility.cost.price(0, m[0], 0) / (self.net_discount_damages.sum()+self.risk_premiums.sum()) <NEW_LINE> scaled_discounted_ed = self.net_discount_damages * damage_scale <NEW_LINE> scaled_risk_premiums = self.risk_premiums * damage_scale <NEW_LINE> if prefix is not None: <NEW_LINE> <INDENT> prefix += "_" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prefix = "" <NEW_LINE> <DEDENT> write_columns_csv([self.expected_sdf, self.net_discount_damages, self.expected_damages, self.risk_premiums, self.cross_sdf_damages, self.discounted_expected_damages, self.cov_term, scaled_discounted_ed, scaled_risk_premiums], prefix + "sensitivity_output", ["Year", "Discount Prices", "Net Expected Damages", "Expected Damages", "Risk Premium", "Cross SDF & Damages", "Discounted Expected Damages", "Cov Term", "Scaled Net Expected Damages", "Scaled Risk Premiums"], [self.sdf_tree.periods.astype(int)+2015]) <NEW_LINE> append_to_existing([[end_price], [perp_yield], [scaled_discounted_ed.sum()], [scaled_risk_premiums.sum()], [self.utility.cost.price(0, m[0], 0)]], prefix+"sensitivity_output", header=["Zero Bound Price", "Perp Yield", "Expected Damages", "Risk Premium", "SCC"], start_char='\n') <NEW_LINE> store_trees(prefix=prefix, tree_dict={'SDF':self.sdf_tree, 'DeltaConsumption':self.delta_cons_tree})
Calculate and save analysis of output from the EZ-Climate model. Parameters ---------- utility : `Utility` object object of utility class Attributes ---------- utility : `Utility` object object of utility class sdf_tree : `BaseStorageTree` object SDF for each node expected_damages : ndarray expected damages in each period risk_premiums : ndarray risk premium in each period expected_sdf : ndarray expected SDF in each period cross_sdf_damages : ndarray cross term between the SDF and damages discounted_expected_damages : ndarray expected discounted damages for each period net_discount_damages : ndarray net discount damage, i.e. when cost is also accounted for cov_term : ndarray covariance between SDF and damages
62599077aad79263cf43014d
class Normalizer: <NEW_LINE> <INDENT> def __init__( self, key, center_axis=None, scale_axis=None, storage_dir=None, name=None ): <NEW_LINE> <INDENT> self.key = key <NEW_LINE> self.center_axis = None if center_axis is None else tuple(center_axis) <NEW_LINE> self.scale_axis = None if scale_axis is None else tuple(scale_axis) <NEW_LINE> self.storage_dir = None if storage_dir is None else Path(storage_dir) <NEW_LINE> self.name = name <NEW_LINE> self.moments = None <NEW_LINE> <DEDENT> def normalize(self, x): <NEW_LINE> <INDENT> assert self.moments is not None <NEW_LINE> mean, scale = self.moments <NEW_LINE> x -= mean <NEW_LINE> x /= (scale + 1e-18) <NEW_LINE> return x <NEW_LINE> <DEDENT> def __call__(self, example): <NEW_LINE> <INDENT> example[self.key] = self.normalize(example[self.key]) <NEW_LINE> return example <NEW_LINE> <DEDENT> def initialize_moments(self, dataset=None, verbose=False): <NEW_LINE> <INDENT> filepath = None if self.storage_dir is None else self.storage_dir / f"{self.key}_moments_{self.name}.json" if self.name else self.storage_dir / f"{self.key}_moments.json" <NEW_LINE> if filepath is not None and Path(filepath).exists(): <NEW_LINE> <INDENT> with filepath.open() as fid: <NEW_LINE> <INDENT> mean, scale = json.load(fid) <NEW_LINE> <DEDENT> if verbose: <NEW_LINE> <INDENT> print(f'Restored moments from {filepath}') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> assert dataset is not None <NEW_LINE> mean = 0. <NEW_LINE> mean_count = 0 <NEW_LINE> energy = 0. <NEW_LINE> energy_count = 0 <NEW_LINE> for example in tqdm(dataset, disable=not verbose): <NEW_LINE> <INDENT> x = example[self.key] <NEW_LINE> if self.center_axis is not None: <NEW_LINE> <INDENT> if not mean_count: <NEW_LINE> <INDENT> mean = np.sum(x, axis=self.center_axis, keepdims=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mean += np.sum(x, axis=self.center_axis, keepdims=True) <NEW_LINE> <DEDENT> mean_count += np.prod( np.array(x.shape)[np.array(self.center_axis)] ) <NEW_LINE> <DEDENT> if self.scale_axis is not None: <NEW_LINE> <INDENT> if not energy_count: <NEW_LINE> <INDENT> energy = np.sum(x**2, axis=self.scale_axis, keepdims=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> energy += np.sum(x**2, axis=self.scale_axis, keepdims=True) <NEW_LINE> <DEDENT> energy_count += np.prod( np.array(x.shape)[np.array(self.scale_axis)] ) <NEW_LINE> <DEDENT> <DEDENT> if self.center_axis is not None: <NEW_LINE> <INDENT> mean /= mean_count <NEW_LINE> <DEDENT> if self.scale_axis is not None: <NEW_LINE> <INDENT> energy /= energy_count <NEW_LINE> scale = np.sqrt(np.mean( energy - mean ** 2, axis=self.scale_axis, keepdims=True )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> scale = np.array(1.) <NEW_LINE> <DEDENT> if filepath is not None: <NEW_LINE> <INDENT> with filepath.open('w') as fid: <NEW_LINE> <INDENT> json.dump( (mean.tolist(), scale.tolist()), fid, sort_keys=True, indent=4 ) <NEW_LINE> <DEDENT> if verbose: <NEW_LINE> <INDENT> print(f'Saved moments to {filepath}') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.moments = np.array(mean), np.array(scale)
perform global mean and scale normalization.
625990773346ee7daa33832a
class NotFound(EsStatsException): <NEW_LINE> <INDENT> pass
Exception raised when expected information is not found.
62599077091ae356687065cf
class FLMResidual(Layer): <NEW_LINE> <INDENT> def __init__(self, units, activation=None, kernel_initializer=None, batch_normalization=False, depth=None, feature_linear_modulation=False, unmatched_dimensions=False): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> assert( activation is not None ) <NEW_LINE> assert( kernel_initializer is not None ) <NEW_LINE> flm = feature_linear_modulation <NEW_LINE> batch_norm = batch_normalization <NEW_LINE> use_bias = not batch_normalization <NEW_LINE> dense_linear_layer = partial(Dense, activation=None, use_bias=use_bias, kernel_initializer=kernel_initializer) <NEW_LINE> batch_norm_layer = BatchNormalization <NEW_LINE> self.dl_layer_1 = dense_linear_layer(units) <NEW_LINE> self.dl_layer_2 = dense_linear_layer(units) <NEW_LINE> self.bn_layer_1 = batch_norm_layer() if batch_norm else None <NEW_LINE> self.bn_layer_2 = batch_norm_layer() if batch_norm else None <NEW_LINE> self.flm_layer_1 = EFLM(units, depth) if flm else None <NEW_LINE> self.flm_layer_2 = EFLM(units, depth) if flm else None <NEW_LINE> self.flm = flm <NEW_LINE> self.activation = activations.get(activation) <NEW_LINE> self.batch_norm = batch_norm <NEW_LINE> self.unmatched_dims = unmatched_dimensions <NEW_LINE> self.initializer = kernel_initializer <NEW_LINE> self.units = units <NEW_LINE> <DEDENT> def build(self, input_shape): <NEW_LINE> <INDENT> assert isinstance( input_shape, (list, tuple) ) <NEW_LINE> main_shape = input_shape[0] <NEW_LINE> if self.unmatched_dims: <NEW_LINE> <INDENT> assert( main_shape[-1] != self.units ) <NEW_LINE> self.w = self.add_weight(name='W', shape=(input_shape[-1], self.units), initializer=self.initializer, trainable=True) <NEW_LINE> self.transform = lambda x : tf.matmul( x, self.w ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert( main_shape[-1] == self.units ) <NEW_LINE> self.transform = lambda x : x <NEW_LINE> <DEDENT> <DEDENT> def call(self, inputs, training=False): <NEW_LINE> <INDENT> batch_norm = self.batch_norm <NEW_LINE> flm = self.flm <NEW_LINE> x = inputs[0] <NEW_LINE> s = inputs[1] <NEW_LINE> x = self.dl_layer_1(x) <NEW_LINE> if batch_norm: x = self.bn_layer_1(x, training=training) <NEW_LINE> if flm: x = self.flm_layer_1([x, s]) <NEW_LINE> x = self.activation(x) <NEW_LINE> x = self.dl_layer_2(x) <NEW_LINE> if batch_norm: x = self.bn_layer_2(x, training=training) <NEW_LINE> if flm: x = self.flm_layer_2([x, s]) <NEW_LINE> x = x + self.transform(inputs[0]) <NEW_LINE> return self.activation(x) <NEW_LINE> <DEDENT> def num_layers(self): <NEW_LINE> <INDENT> return 2
Implements residual layer (Kaiming He et al., 2015) Added batch normalization option Add feature wise linear modulation NOTE: if input and output dimensions are not equal specify unmatched_dimensions=True, the residual layer becomes: f(x) + A*x instead of: f(x) + x
625990775166f23b2e244d6c
class ResPartner(geo_model.GeoModel): <NEW_LINE> <INDENT> _inherit = "res.partner" <NEW_LINE> geo_point = fields.GeoPoint('Addresses coordinate')
Add geo_point to partner using a function filed
62599077a8370b77170f1d63
class ClosedPosition(Position): <NEW_LINE> <INDENT> def add_transaction(self, transaction): <NEW_LINE> <INDENT> self.basis += transaction.shares * transaction.open_price <NEW_LINE> self.mktval += transaction.shares * transaction.close_price <NEW_LINE> self.transactions.append(transaction)
Position subclass specifically for closed positions.
625990779c8ee82313040e52
class Query(ObjectType): <NEW_LINE> <INDENT> info = String() <NEW_LINE> devices = List(Device, pattern=String()) <NEW_LINE> device = Field(Device, name=String(required=True)) <NEW_LINE> domains = List(Domain, pattern=String()) <NEW_LINE> families = List(Family, domain=String(), pattern=String()) <NEW_LINE> members = List(Member, domain=String(), family=String(), pattern=String()) <NEW_LINE> user_actions = List(UserAction, pattern=String(),skip=Int(),first=Int()) <NEW_LINE> servers = List(Server, pattern=String()) <NEW_LINE> instances = List(ServerInstance, server=String(), pattern=String()) <NEW_LINE> classes = List(DeviceClass, pattern=String()) <NEW_LINE> async def resolve_info(self, info): <NEW_LINE> <INDENT> db = PyTango.Database() <NEW_LINE> return db.get_info() <NEW_LINE> <DEDENT> async def resolve_device(self, info, name=None): <NEW_LINE> <INDENT> device_names = db.get_device_exported(name) <NEW_LINE> if len(device_names) == 1: <NEW_LINE> <INDENT> return Device(name=device_names[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> async def resolve_devices(self, info, pattern="*"): <NEW_LINE> <INDENT> device_names = db.get_device_exported(pattern) <NEW_LINE> return [Device(name=name) for name in device_names] <NEW_LINE> <DEDENT> def resolve_domains(self, info, pattern="*"): <NEW_LINE> <INDENT> domains = db.get_device_domain("%s/*" % pattern) <NEW_LINE> return [Domain(name=d) for d in sorted(domains)] <NEW_LINE> <DEDENT> def resolve_families(self, info, domain="*", pattern="*"): <NEW_LINE> <INDENT> families = db.get_device_family(f"{domain}/{pattern}/*") <NEW_LINE> return [Family(domain=domain, name=d) for d in sorted(families)] <NEW_LINE> <DEDENT> def resolve_members(self, info, domain="*", family="*", pattern="*"): <NEW_LINE> <INDENT> members = db.get_device_member(f"{domain}/{family}/{pattern}") <NEW_LINE> return [Member(domain=domain, family=family, name=member) for member in sorted(members)] <NEW_LINE> <DEDENT> def resolve_servers(self, info, pattern="*"): <NEW_LINE> <INDENT> servers = db.get_server_name_list() <NEW_LINE> rule = re.compile(fnmatch.translate(pattern), re.IGNORECASE) <NEW_LINE> return [Server(name=srv) for srv in sorted(servers) if rule.match(srv)] <NEW_LINE> <DEDENT> def resolve_user_actions(self, info, pattern="*", first = None, skip = None): <NEW_LINE> <INDENT> result = user_actions.get(pattern) <NEW_LINE> if skip: <NEW_LINE> <INDENT> result = result[skip:] <NEW_LINE> <DEDENT> if first: <NEW_LINE> <INDENT> result = result[:first] <NEW_LINE> <DEDENT> return result
This class contains all the queries.
625990774428ac0f6e659ec5
class DescribeTimerScalingPoliciesResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TimerScalingPolicies = None <NEW_LINE> self.TotalCount = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("TimerScalingPolicies") is not None: <NEW_LINE> <INDENT> self.TimerScalingPolicies = [] <NEW_LINE> for item in params.get("TimerScalingPolicies"): <NEW_LINE> <INDENT> obj = TimerScalingPolicy() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.TimerScalingPolicies.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.TotalCount = params.get("TotalCount") <NEW_LINE> self.RequestId = params.get("RequestId")
DescribeTimerScalingPolicies返回参数结构体
625990774e4d562566373d99
class OfficialDocument(models.Model): <NEW_LINE> <INDENT> _name = 'school.official_document' <NEW_LINE> _inherit = ['mail.activity.mixin'] <NEW_LINE> name = fields.Char('Name',compute='compute_name') <NEW_LINE> @api.depends('student_id.name','type_id.name') <NEW_LINE> def compute_name(self): <NEW_LINE> <INDENT> for doc in self: <NEW_LINE> <INDENT> doc.name = "%s - %s" % (doc.student_id.name, doc.type_id.name) <NEW_LINE> <DEDENT> <DEDENT> student_id = fields.Many2one('res.partner', string='Student', domain="[('student', '=', '1')]",required = True, readonly=True) <NEW_LINE> type_id = fields.Many2one('school.official_document_type',string="Type",required = True) <NEW_LINE> is_available = fields.Boolean('Is Available',default = False) <NEW_LINE> attachment_ids = fields.Many2many('ir.attachment','official_document_ir_attachment_rel', 'official_document_id','ir_attachment_id', 'Attachments', domain="[('res_model','=','res.partner'),('res_id','=',student_id)]") <NEW_LINE> attachment_count = fields.Integer(compute='_compute_attachment_count', string='# Attachments') <NEW_LINE> def _compute_attachment_count(self): <NEW_LINE> <INDENT> for doc in self: <NEW_LINE> <INDENT> doc.attachment_count = len(doc.attachment_ids) <NEW_LINE> <DEDENT> <DEDENT> note = fields.Text('Notes') <NEW_LINE> expiry_date = fields.Date('Expiry Date') <NEW_LINE> has_expiry_date = fields.Boolean(related='type_id.has_expiry_date') <NEW_LINE> @api.model <NEW_LINE> def _needaction_domain_get(self): <NEW_LINE> <INDENT> return ['|',('is_available', '=', False),('expiry_date', '<', date.today())]
Official Document
6259907723849d37ff852a4d
class ExampleParser(transform.Transform): <NEW_LINE> <INDENT> def __init__(self, features): <NEW_LINE> <INDENT> super(ExampleParser, self).__init__() <NEW_LINE> if isinstance(features, dict): <NEW_LINE> <INDENT> self._ordered_features = collections.OrderedDict(sorted(features.items( ), key=lambda f: f[0])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._ordered_features = collections.OrderedDict(features) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return "ExampleParser" <NEW_LINE> <DEDENT> @property <NEW_LINE> def input_valency(self): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def _output_names(self): <NEW_LINE> <INDENT> return list(self._ordered_features.keys()) <NEW_LINE> <DEDENT> @transform.parameter <NEW_LINE> def feature_definitions(self): <NEW_LINE> <INDENT> return self._ordered_features <NEW_LINE> <DEDENT> def _apply_transform(self, input_tensors, **kwargs): <NEW_LINE> <INDENT> parsed_values = parsing_ops.parse_example(input_tensors[0], features=self._ordered_features) <NEW_LINE> return self.return_type(**parsed_values)
A Transform that parses serialized `tensorflow.Example` protos.
62599077627d3e7fe0e0881e
class InfoLoggingEventSubscriber(LoggingEventSubscriber): <NEW_LINE> <INDENT> event_id = LOGGING_INFO_EVENT_ID
Event Notification Subscriber for informational logging events. The "origin" parameter in this class' initializer should be the process' exchange name (TODO: correct?)
62599077adb09d7d5dc0bf00
class Tag(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=20, unique=True) <NEW_LINE> used_count = models.IntegerField(default=1) <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['id'] <NEW_LINE> app_label = 'glow'
标签的数据模型
625990778a349b6b43687bf1
class Softsign(OnnxOpConverter): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _impl_v1(cls, inputs, attr, params): <NEW_LINE> <INDENT> return inputs[0] / (_expr.const(1.) + Absolute.get_converter(1)(inputs, attr, params))
Operator converter for Softsign.
62599077f9cc0f698b1c5f97
class Regular(_BaseTile): <NEW_LINE> <INDENT> pass
Regular tile. Does nothing.
6259907744b2445a339b7629
class SecurityError(Exception): <NEW_LINE> <INDENT> pass
Raised when an action is denied due to security settings.
625990772c8b7c6e89bd5180
class GeniusTestHub(GeniusHubBase): <NEW_LINE> <INDENT> def __init__(self, zones_json, device_json, debug=None) -> None: <NEW_LINE> <INDENT> super().__init__("test_hub", username="test", debug=debug) <NEW_LINE> _LOGGER.info("Using GeniusTestHub()") <NEW_LINE> self._test_json["zones"] = zones_json <NEW_LINE> self._test_json["devices"] = device_json <NEW_LINE> <DEDENT> async def update(self) -> None: <NEW_LINE> <INDENT> self._zones = self._test_json["zones"] <NEW_LINE> self._devices = self._test_json["devices"] <NEW_LINE> self._issues = self._issues_via_v3_zones({"data": self._zones}) <NEW_LINE> self._version = self._version_via_v3_zones({"data": self._zones}) <NEW_LINE> super().update()
The test class for a Genius Hub - uses a test file.
625990775fdd1c0f98e5f914
class Solution2: <NEW_LINE> <INDENT> def countConsistentStrings(self, allowed: str, words: List[str]) -> int: <NEW_LINE> <INDENT> consistent = 0 <NEW_LINE> for word in words: <NEW_LINE> <INDENT> if len(set(word).difference(allowed)) == 0: <NEW_LINE> <INDENT> consistent += 1 <NEW_LINE> <DEDENT> <DEDENT> return consistent
Using hash set, for each word compare (letters) difference between a word and allowed. Runtime: 288 ms, faster than 18.49% of Python3 Memory Usage: 16 MB, less than 92.44% of Python3 Time complexity: O(n*k) Space complexity: O(1)
6259907701c39578d7f14400
class D2(D1): <NEW_LINE> <INDENT> pass
Absorbs both verticalls
6259907732920d7e50bc79df
class TileCache: <NEW_LINE> <INDENT> def __init__(self, width=32, height=None): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height or width <NEW_LINE> self.cache = {} <NEW_LINE> <DEDENT> def __getitem__(self, filename): <NEW_LINE> <INDENT> key = (filename, self.width, self.height) <NEW_LINE> try: <NEW_LINE> <INDENT> return self.cache[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> tile_table = self._load_tile_table(filename, self.width, self.height) <NEW_LINE> self.cache[key] = tile_table <NEW_LINE> return tile_table <NEW_LINE> <DEDENT> <DEDENT> def _load_tile_table(self, filename, width, height): <NEW_LINE> <INDENT> image = pygame.image.load(filename).convert() <NEW_LINE> image_width, image_height = image.get_size() <NEW_LINE> tile_table = [] <NEW_LINE> for tile_x in range(0, image_width/width): <NEW_LINE> <INDENT> line = [] <NEW_LINE> tile_table.append(line) <NEW_LINE> for tile_y in range(0, image_height/height): <NEW_LINE> <INDENT> rect = (tile_x*width, tile_y*height, width, height) <NEW_LINE> line.append(image.subsurface(rect)) <NEW_LINE> <DEDENT> <DEDENT> return tile_table
Load the tilesets lazily into global cache
62599077be8e80087fbc0a2b
class ImageDQ: <NEW_LINE> <INDENT> def __init__(self, data, dqparser=None): <NEW_LINE> <INDENT> ndim = 2 <NEW_LINE> data = np.asarray(data) <NEW_LINE> if data.ndim != ndim: <NEW_LINE> <INDENT> raise ValueError( f'Expected ndim={ndim} but data has ndim={data.ndim}') <NEW_LINE> <DEDENT> if 'int' not in data.dtype.name: <NEW_LINE> <INDENT> warnings.warn( f'Data has dtype={data.dtype}, will be converted to int...', AstropyUserWarning) <NEW_LINE> <DEDENT> if dqparser is None or not isinstance(dqparser, DQParser): <NEW_LINE> <INDENT> dqparser = DQParser.from_instrument(None) <NEW_LINE> <DEDENT> self.data = data <NEW_LINE> self.parser = dqparser <NEW_LINE> self._dqs_by_flag = None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_fits(cls, filename, ext=('DQ', 1), inskey='INSTRUME'): <NEW_LINE> <INDENT> with fits.open(filename) as pf: <NEW_LINE> <INDENT> data = pf[ext].data <NEW_LINE> try: <NEW_LINE> <INDENT> instrument = pf['PRIMARY'].header[inskey] <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> warnings.warn( f'Failed to read {inskey} from PRIMARY header, using ' f'default: {repr(e)}', AstropyUserWarning) <NEW_LINE> dqparser = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dqparser = DQParser.from_instrument(instrument) <NEW_LINE> <DEDENT> <DEDENT> return cls(data, dqparser=dqparser) <NEW_LINE> <DEDENT> def interpret_pixel(self, x, y, origin=1): <NEW_LINE> <INDENT> if origin not in (0, 1): <NEW_LINE> <INDENT> raise ValueError('origin must be 0 or 1') <NEW_LINE> <DEDENT> return self.parser.interpret_dqval(self.data[y - origin, x - origin]) <NEW_LINE> <DEDENT> def interpret_all(self, verbose=True): <NEW_LINE> <INDENT> self._dqs_by_flag = self.parser.interpret_array( self.data, verbose=verbose) <NEW_LINE> <DEDENT> def _check_cache(self): <NEW_LINE> <INDENT> if self._dqs_by_flag is None: <NEW_LINE> <INDENT> raise ValueError('Run interpret_all() method first!') <NEW_LINE> <DEDENT> <DEDENT> def dq_mask(self, dqval): <NEW_LINE> <INDENT> self._check_cache() <NEW_LINE> if dqval not in self._dqs_by_flag: <NEW_LINE> <INDENT> raise ValueError( f'DQ={dqval} not found in {sorted(self._dqs_by_flag)}') <NEW_LINE> <DEDENT> mask = np.zeros_like(self.data, dtype=bool) <NEW_LINE> mask[self._dqs_by_flag[dqval]] = True <NEW_LINE> return mask <NEW_LINE> <DEDENT> def pixlist(self, origin=1): <NEW_LINE> <INDENT> if origin not in (0, 1): <NEW_LINE> <INDENT> raise ValueError('origin must be 0 or 1') <NEW_LINE> <DEDENT> self._check_cache() <NEW_LINE> pixlist_by_flag = {} <NEW_LINE> for key, idx in self._dqs_by_flag.items(): <NEW_LINE> <INDENT> pixlist_by_flag[key] = list(zip(idx[1] + origin, idx[0] + origin)) <NEW_LINE> <DEDENT> return pixlist_by_flag
Class to handle DQ flags in an image. Parameters ---------- data : ndarray DQ data array to interpret. dqparser : `DQParser` or `None` DQ parser for interpretation. If not given, default is used. Attributes ---------- data : ndarray Same as input. parser : `DQParser` DQ parser for interpretation. Raises ------ ValueError Invalid image dimension.
62599077aad79263cf430150
class IRequestHandlerStream(IFileStream): <NEW_LINE> <INDENT> def __init__(self, handler): <NEW_LINE> <INDENT> self.handler = handler <NEW_LINE> super(IRequestHandlerStream, self).__init__(handler.rfile)
SocketServer request handler input stream
62599077a17c0f6771d5d879
class Portfolio(Base): <NEW_LINE> <INDENT> __tablename__ = 'portfolio' <NEW_LINE> username = Column(Text, primary_key=True) <NEW_LINE> stocks = Column(Unicode)
Create DB for unique user stock portfolio.
625990775fdd1c0f98e5f915
class DirtyStrike(VariableStrike): <NEW_LINE> <INDENT> def __init__(self, T, K, A): <NEW_LINE> <INDENT> super(DirtyStrike, self).__init__(T, K) <NEW_LINE> self._A = A <NEW_LINE> <DEDENT> def strike(self, t): <NEW_LINE> <INDENT> ti = 0 <NEW_LINE> accC = 0 <NEW_LINE> for i in self._A.times: <NEW_LINE> <INDENT> if i > t: <NEW_LINE> <INDENT> accC = self._A.C * (t - ti) / (i - ti) <NEW_LINE> break <NEW_LINE> <DEDENT> ti = i <NEW_LINE> <DEDENT> return self.K + accC
Create a strike that is adjusted for outstanding interest.
6259907755399d3f05627eab
class TopologySpec(object): <NEW_LINE> <INDENT> def __init__(self, specs): <NEW_LINE> <INDENT> if not _as_set(specs).issuperset(set(["name", "topology"])): <NEW_LINE> <INDENT> raise InvalidTopologyError( "Each topology must specify tags 'name' and 'topology'" " Found: {0}".format(_as_list(specs))) <NEW_LINE> <DEDENT> self.name = specs["name"] <NEW_LINE> if "workers" in specs: <NEW_LINE> <INDENT> self.workers = specs["workers"] <NEW_LINE> <DEDENT> if "ackers" in specs: <NEW_LINE> <INDENT> self.ackers = specs["ackers"] <NEW_LINE> <DEDENT> if "max_spout_pending" in specs: <NEW_LINE> <INDENT> self.max_spout_pending = specs["max_spout_pending"] <NEW_LINE> <DEDENT> if "max_shellbolt_pending" in specs: <NEW_LINE> <INDENT> self.max_shellbolt_pending = specs["max_shellbolt_pending"] <NEW_LINE> <DEDENT> if "message_timeout_secs" in specs: <NEW_LINE> <INDENT> self.message_timeout_secs = specs["message_timeout_secs"] <NEW_LINE> <DEDENT> if "logging_config" in specs: <NEW_LINE> <INDENT> self.logging_config = specs["logging_config"] <NEW_LINE> <DEDENT> if "serializer" in specs: <NEW_LINE> <INDENT> if specs["serializer"] in SERIALIZERS: <NEW_LINE> <INDENT> self.serializer = specs["serializer"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise InvalidTopologyError( "Unknown serializer. Allowed: {0}. Found: {1}" .format(SERIALIZERS, specs["serializer"])) <NEW_LINE> <DEDENT> <DEDENT> if "requirements_filename" in specs: <NEW_LINE> <INDENT> self.requirements_filename = specs["requirements_filename"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.requirements_filename = None <NEW_LINE> <DEDENT> self.topology = [] <NEW_LINE> for component in specs["topology"]: <NEW_LINE> <INDENT> if "spout" in component: <NEW_LINE> <INDENT> self.topology.append(SpoutSpec(component["spout"])) <NEW_LINE> <DEDENT> elif "bolt" in component: <NEW_LINE> <INDENT> self.topology.append(BoltSpec(component["bolt"])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise InvalidTopologyError( "Unknown tag. Allowed:'bolt' and 'spout'. Found: {0}" .format(_as_list(specs["topology"]))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def verify_groupings(self): <NEW_LINE> <INDENT> topology_out_fields = {} <NEW_LINE> for component in self.topology: <NEW_LINE> <INDENT> topology_out_fields[component.name] = component.output_fields <NEW_LINE> <DEDENT> for component in self.topology: <NEW_LINE> <INDENT> if (isinstance(component, BoltSpec) and component.groupings is not None): <NEW_LINE> <INDENT> component.verify_groupings(topology_out_fields) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def asdict(self): <NEW_LINE> <INDENT> dict_object = copy.deepcopy(self.__dict__) <NEW_LINE> dict_object["topology"] = [ component.asdict() for component in self.topology] <NEW_LINE> return dict_object
Topology level specification class.
62599077a05bb46b3848bdf6
class RegUsers(Resource): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> @register_user_api.expect(user_register) <NEW_LINE> def post(): <NEW_LINE> <INDENT> parser = reqparse.RequestParser() <NEW_LINE> parser.add_argument('name', type=str, help='Rate cannot be converted', location=['json']) <NEW_LINE> parser.add_argument('types', type=str, help='Rate cannot be converted', location=['json']) <NEW_LINE> parser.add_argument('age', type=int, help='Rate cannot be converted', location=['json']) <NEW_LINE> args = parser.parse_args() <NEW_LINE> res = reg_users.post_a_user(name=args['name'], types=args['types'], age=args['age']) <NEW_LINE> return res
User Registration
62599077796e427e53850111
class AggregatingRootSubclass(AggregatingRoot): <NEW_LINE> <INDENT> pass
Subclass of a class with aggregates.
62599077627d3e7fe0e08820
class constrainOR(Constrain): <NEW_LINE> <INDENT> def __init__(self, list_constrains): <NEW_LINE> <INDENT> self._c = list_constrains <NEW_LINE> <DEDENT> def evaluate(self, x): <NEW_LINE> <INDENT> out = -np.inf <NEW_LINE> for constrain in self._c: <NEW_LINE> <INDENT> out = np.max((out, constrain.evaluate(x))) <NEW_LINE> <DEDENT> return out <NEW_LINE> <DEDENT> def to_scipy(self): <NEW_LINE> <INDENT> return {'type': 'ineq', 'fun': self.evaluate}
inequality constraints
6259907767a9b606de547771
class TokenStateModification: <NEW_LINE> <INDENT> def __init__(self, modification: Callable[[RunningToken], None]) -> None: <NEW_LINE> <INDENT> self.modification = modification <NEW_LINE> <DEDENT> @pedantic <NEW_LINE> def change_token(self, token: RunningToken) -> None: <NEW_LINE> <INDENT> token_before = token.copy() <NEW_LINE> try: <NEW_LINE> <INDENT> self.modification(token) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> msg = f'Cannot run the modification: {self.modification}.' <NEW_LINE> logging.error(msg) <NEW_LINE> traceback.print_exc() <NEW_LINE> <DEDENT> if not token_before.keys() == token.keys(): <NEW_LINE> <INDENT> difference = set(token.keys()).difference(set(token_before.keys())) <NEW_LINE> logging.error(f'attribute {difference} not in token') <NEW_LINE> raise MissingAttributeInTokenError(token=token_before, attribute=list(difference)[0]) <NEW_LINE> <DEDENT> diff = {} <NEW_LINE> for key in token: <NEW_LINE> <INDENT> if token[key] != token_before[key]: <NEW_LINE> <INDENT> logging.debug(f'Token changed: {key}: {token_before[key]} -> {token[key]}') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> src = inspect.getsource(self.modification) <NEW_LINE> src = format_code_string(text=src) <NEW_LINE> return f'TokenStateCondition: {src}' <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return self.__str__()
This class defines a modification on a RunningToken and changes it via a defined callable function.
6259907744b2445a339b762a
class Session(ndb.Model): <NEW_LINE> <INDENT> name = ndb.StringProperty(required=True) <NEW_LINE> highlights = ndb.StringProperty() <NEW_LINE> speaker = ndb.StringProperty() <NEW_LINE> duration = ndb.IntegerProperty() <NEW_LINE> typeOfSession = ndb.StringProperty() <NEW_LINE> date = ndb.DateProperty() <NEW_LINE> startTime = ndb.TimeProperty() <NEW_LINE> conferenceKey = ndb.StringProperty(required=True)
Session -- Session object
62599077460517430c432d25
class VenueTest(BaseTest, unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.location = telegram.Location(longitude=1., latitude=0.) <NEW_LINE> self.title = 'title' <NEW_LINE> self._address = '_address' <NEW_LINE> self.foursquare_id = 'foursquare id' <NEW_LINE> self.json_dict = { 'location': self.location.to_dict(), 'title': self.title, 'address': self._address, 'foursquare_id': self.foursquare_id } <NEW_LINE> <DEDENT> def test_sticker_de_json(self): <NEW_LINE> <INDENT> sticker = telegram.Venue.de_json(self.json_dict, self._bot) <NEW_LINE> self.assertTrue(isinstance(sticker.location, telegram.Location)) <NEW_LINE> self.assertEqual(sticker.title, self.title) <NEW_LINE> self.assertEqual(sticker.address, self._address) <NEW_LINE> self.assertEqual(sticker.foursquare_id, self.foursquare_id) <NEW_LINE> <DEDENT> def test_sticker_to_json(self): <NEW_LINE> <INDENT> sticker = telegram.Venue.de_json(self.json_dict, self._bot) <NEW_LINE> self.assertTrue(self.is_json(sticker.to_json())) <NEW_LINE> <DEDENT> def test_sticker_to_dict(self): <NEW_LINE> <INDENT> sticker = telegram.Venue.de_json(self.json_dict, self._bot).to_dict() <NEW_LINE> self.assertTrue(self.is_dict(sticker)) <NEW_LINE> self.assertDictEqual(self.json_dict, sticker) <NEW_LINE> <DEDENT> @flaky(3, 1) <NEW_LINE> def test_reply_venue(self): <NEW_LINE> <INDENT> message = self._bot.sendMessage(self._chat_id, '.') <NEW_LINE> message = message.reply_venue(self.location.latitude, self.location.longitude, self.title, self._address) <NEW_LINE> self.assertAlmostEqual(message.venue.location.latitude, self.location.latitude, 2) <NEW_LINE> self.assertAlmostEqual(message.venue.location.longitude, self.location.longitude, 2)
This object represents Tests for Telegram Venue.
6259907799fddb7c1ca63aa2
class NoSuchMatcherException(Exception): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> super(NoSuchMatcherException, self).__init__("No such matcher: %s" % name)
to be thrown when no matcher with a given name was found
6259907799cbb53fe683287e
class MapsNoVsync(MapsBenchmark): <NEW_LINE> <INDENT> tag = 'novsync' <NEW_LINE> @classmethod <NEW_LINE> def Name(cls): <NEW_LINE> <INDENT> return 'maps.novsync' <NEW_LINE> <DEDENT> def SetExtraBrowserOptions(self, options): <NEW_LINE> <INDENT> options.AppendExtraBrowserArgs('--disable-gpu-vsync')
Runs the Google Maps benchmark with Vsync disabled
6259907732920d7e50bc79e1
class TestViewtopicPhpbbB6T2259706ffset15(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.board_id = 6 <NEW_LINE> self.topic_id = 2259706 <NEW_LINE> self.offset = 15 <NEW_LINE> self.html_path = os.path.join('tests', 'phpbb.b6.t2259706.offset15.htm') <NEW_LINE> with open(self.html_path, 'r') as f: <NEW_LINE> <INDENT> self.page_html = f.read() <NEW_LINE> <DEDENT> self.posts = parse_viewtopic.parse_thread_page( page_html=self.page_html, board_id=self.board_id, topic_id=self.topic_id, offset=self.offset ) <NEW_LINE> return <NEW_LINE> <DEDENT> def test_thread_level(self): <NEW_LINE> <INDENT> self.assertEqual(len(self.posts), 15) <NEW_LINE> return <NEW_LINE> <DEDENT> def test_post_userids(self): <NEW_LINE> <INDENT> self.assertEqual(self.posts[0]['userid'], 182473) <NEW_LINE> self.assertEqual(self.posts[11]['userid'], 1136425) <NEW_LINE> self.assertEqual(self.posts[13]['userid'], 1136425) <NEW_LINE> return <NEW_LINE> <DEDENT> def test_post_usernames(self): <NEW_LINE> <INDENT> self.assertEqual(self.posts[0]['username'], 'Lumpy Burgertushie') <NEW_LINE> self.assertEqual(self.posts[11]['username'], 'Danielx64') <NEW_LINE> self.assertEqual(self.posts[13]['username'], 'Danielx64') <NEW_LINE> return <NEW_LINE> <DEDENT> def test_post_titles(self): <NEW_LINE> <INDENT> self.assertEqual(self.posts[0]['title'], 'Re: Speedtest') <NEW_LINE> self.assertEqual(self.posts[11]['title'], 'Re: Speedtest') <NEW_LINE> self.assertEqual(self.posts[13]['title'], 'Re: Speedtest') <NEW_LINE> return <NEW_LINE> <DEDENT> def test_thread_attachments_alt_text(self): <NEW_LINE> <INDENT> self.assertEqual(self.posts[11]['attachments'][0]['DOWNLOAD_NAME'], None) <NEW_LINE> self.assertEqual(self.posts[13]['attachments'][0]['DOWNLOAD_NAME'], u'3775542717.png') <NEW_LINE> return <NEW_LINE> <DEDENT> def test_thread_attachments_count(self): <NEW_LINE> <INDENT> print("self.posts[13]['attachments']: {0!r}".format(self.posts[13]['attachments'])) <NEW_LINE> self.assertEqual(self.posts[0]['attachments'], []) <NEW_LINE> self.assertEqual(self.posts[1]['attachments'], []) <NEW_LINE> self.assertEqual(self.posts[2]['attachments'], []) <NEW_LINE> self.assertEqual(self.posts[3]['attachments'], []) <NEW_LINE> self.assertEqual(len(self.posts[11]['attachments']), 1) <NEW_LINE> self.assertEqual(len(self.posts[13]['attachments']), 3) <NEW_LINE> return <NEW_LINE> <DEDENT> def test_thread_attachments_dl_url(self): <NEW_LINE> <INDENT> self.assertEqual(self.posts[11]['attachments'][0]['dl_url'], None) <NEW_LINE> self.assertEqual(self.posts[13]['attachments'][0]['dl_url'], u'./download/file.php?id=159886&amp;sid=5f585129d9f3e20dde9a82ebe4facd8d') <NEW_LINE> return <NEW_LINE> <DEDENT> def test_thread_attachments_title(self): <NEW_LINE> <INDENT> self.assertEqual(self.posts[11]['attachments'][0]['title'], None) <NEW_LINE> self.assertEqual(self.posts[13]['attachments'][0]['title'], None) <NEW_LINE> return
phpBB v3 https://www.phpbb.com/community/viewtopic.php?f=6&t=2259706&start=15 Has an attachment class without any file/link/image
62599077ad47b63b2c5a91e8
class Wheat(Crop): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__(1,3,6) <NEW_LINE> self._type = "Wheat" <NEW_LINE> <DEDENT> def grow(self,light,water): <NEW_LINE> <INDENT> if light >= self._light_need and water >= self._water_need: <NEW_LINE> <INDENT> if self._status == "Seedling" and water > self._water_need: <NEW_LINE> <INDENT> self._growth += self._growth_rate * 1.5 <NEW_LINE> <DEDENT> elif self._status == "Young" and water > self._water_need: <NEW_LINE> <INDENT> self._growth += self._growth_rate * 1.25 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._growth += self._growth_rate <NEW_LINE> <DEDENT> <DEDENT> self._days_growing += 1 <NEW_LINE> self._update_status()
a wheat crop
62599077aad79263cf430152
class _Attribute(object): <NEW_LINE> <INDENT> def __init__(self, name, help_text=None, required=False, fallthroughs=None, completer=None, value_type=None): <NEW_LINE> <INDENT> if re.search(r'[A-Z]', name) and re.search('r[a-z]', name): <NEW_LINE> <INDENT> raise ValueError( 'Invalid attribute name [{}]: Attribute names should be in lower ' 'snake case (foo_bar) so they can be transformed to flag names.' .format(name)) <NEW_LINE> <DEDENT> self.name = name <NEW_LINE> self.help_text = help_text <NEW_LINE> self.required = required <NEW_LINE> self.fallthroughs = fallthroughs or [] <NEW_LINE> self.completer = completer <NEW_LINE> self.value_type = value_type or six.text_type <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, type(self)): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return (self.name == other.name and self.help_text == other.help_text and self.required == other.required and self.completer == other.completer and self.fallthroughs == other.fallthroughs and self.value_type == other.value_type) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return sum(map(hash, [ self.name, self.help_text, self.required, self.completer, self.value_type])) + sum(map(hash, self.fallthroughs))
A base class for concept attributes. Attributes: name: The name of the attribute. Used primarily to control the arg or flag name corresponding to the attribute. Must be in all lower case. help_text: String describing the attribute's relationship to the concept, used to generate help for an attribute flag. required: True if the attribute is required. fallthroughs: [googlecloudsdk.calliope.concepts.deps_lib.Fallthrough], the list of sources of data, in priority order, that can provide a value for the attribute if not given on the command line. These should only be sources inherent to the attribute, such as associated properties, not command-specific sources. completer: core.cache.completion_cache.Completer, the completer associated with the attribute. value_type: the type to be accepted by the attribute arg. Defaults to str.
625990772ae34c7f260aca80
class StartUp: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.font = 'big' <NEW_LINE> <DEDENT> def on_start_up(self): <NEW_LINE> <INDENT> custom_fig = Figlet(font=self.font) <NEW_LINE> application_name = custom_fig.renderText("Tweeting Mill") <NEW_LINE> iteration = "v.0.1.0 - A Twitter CLI Tool" <NEW_LINE> introductions = [application_name + '\n' + iteration] <NEW_LINE> for introduction in introductions: <NEW_LINE> <INDENT> print(introduction) <NEW_LINE> <DEDENT> <DEDENT> def options(self): <NEW_LINE> <INDENT> first_option = "0 - Return to the options page." <NEW_LINE> second_option = "1 - Search up a twitter user and user's recent post." <NEW_LINE> third_option = "2 - Create a data log to form a word cloud of recent tweets of a user." <NEW_LINE> fourth_option = "3 - Exit program" <NEW_LINE> option_lists = [first_option + "\n" + second_option + "\n" + third_option + "\n" + fourth_option] <NEW_LINE> for option in option_lists: <NEW_LINE> <INDENT> print(option)
On Start up this will display an options page of the CLI tool with a nice ASCII terminal banner
625990772c8b7c6e89bd5183
@api_rest.route('/totalProducts/') <NEW_LINE> class DeleteProduct(SecureResource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> return total_products()
Unsecure Resource Class: Inherit from Resource
625990777d43ff24874280e1
class NotProvided: <NEW_LINE> <INDENT> pass
Simple class to be used as constant for default parameters. The goal is to be able to pass explicitly ``None`` Examples -------- >>> def foo(param=NotProvided): ... if param is NotProvided: ... print('Param not provided') ... elif param is None: ... print('Param is None') ... else: ... print('Param is set to %s' % param)
625990774e4d562566373d9d
class TestDB(unittest.TestCase): <NEW_LINE> <INDENT> def test_insertar(self): <NEW_LINE> <INDENT> client = MongoClient('localhost',27017) <NEW_LINE> dbUsuario = client.usuarios.usuarios <NEW_LINE> DAO.insertarNuevoUsuario(555) <NEW_LINE> cursor = dbUsuario.find_one({'_id':555}) <NEW_LINE> self.assertTrue(cursor != None) <NEW_LINE> <DEDENT> def test_existeUsuario(self): <NEW_LINE> <INDENT> client = MongoClient('localhost',27017) <NEW_LINE> dbUsuario = client.usuarios.usuarios <NEW_LINE> self.assertTrue(DAO.existe_Usuario) <NEW_LINE> <DEDENT> def test_getidioma(self): <NEW_LINE> <INDENT> client = MongoClient('localhost',27017) <NEW_LINE> dbUsuario = client.usuarios.usuarios <NEW_LINE> idioma = DAO.get_idioma(555) <NEW_LINE> print(idioma) <NEW_LINE> self.assertTrue(idioma =='Cast') <NEW_LINE> dbUsuario.delete_one({'_id':555}) <NEW_LINE> <DEDENT> def test_query(self): <NEW_LINE> <INDENT> self.assertEqual(apiai.query("salario consejal",555,'Cast'),(1,'Un Concejal gana 66.495,37€ de retribución anual bruta (sin antigüedad).')) <NEW_LINE> self.assertEqual(apiai.query("salario Sergi",555,'Cast'),(1,'Sergi Campillo Fernández gana 74.342,66€ de retribución anual bruta (sin antigüedad) por su cargo de Teniente de Alcalde.')) <NEW_LINE> self.assertEqual(apiai.query("impuesto benimaclet",555,'Cast'),(1, 'El barrio Benimaclet pago en total 6.428.288,48 en impuestos el año 2016.')) <NEW_LINE> self.assertEqual(apiai.query("ibi benimaclet 2018",555,'Cast'),(0, 'No tenemos los impuesto del 2018 año. de momento solo contamos con los impuesto del año 2016.')) <NEW_LINE> self.assertEqual(apiai.query("presupuesto Valencia 2012",555,'Cast'),(1, 'El presupuesto de Valencia, en el año 2012, es: 715.845.394,90.')) <NEW_LINE> <DEDENT> def test_addComa(self): <NEW_LINE> <INDENT> self.assertEqual(DAO.addComa('78551.36'),'78.551,36') <NEW_LINE> self.assertEqual(DAO.addComa(75585),'75.585')
def setUp(self): client = MongoClient('localhost',27017) dbUsuario = client.usuarios.usuarios #print(dbUsuario.remove({'_id':555})) return
62599077796e427e53850113
class NestedValidationError(ValidationError): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> if isinstance(message, dict): <NEW_LINE> <INDENT> self._messages = [message] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._messages = message <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def messages(self): <NEW_LINE> <INDENT> return self._messages
The default ValidationError behavior is to stringify each item in the list if the messages are a list of error messages. In the case of nested serializers, where the parent has many children, then the child's `serializer.errors` will be a list of dicts. In the case of a single child, the `serializer.errors` will be a dict. We need to override the default behavior to get properly nested error dicts.
62599077d486a94d0ba2d952
class MightySummoner(Feature): <NEW_LINE> <INDENT> name = "Mighty Summoner" <NEW_LINE> source = "Druid (Circle of the Shepherd)"
Starting at 6th level, beasts and fey that you conjure are more resilient than normal. Any beast or fey summoned or created by a spell that you cast gains the. following benefits: -- The creature appears with more hit points than normal: 2 extra hit points per Hit Die it has. -- The damage from its natural weapons is considered magical for the purpose of overcoming immunity and resistance to nonmagical attacks and damage.
625990774f88993c371f11ee
@irc_message_representation <NEW_LINE> class InviteMessage(MessageType("invite", ["user", "channel"])): <NEW_LINE> <INDENT> pass
Invites the user of the given nickname to join the specified channel.
625990778a349b6b43687bf5
class CodeRegionType: <NEW_LINE> <INDENT> VALID_CODE_REGIONS_COUNT = 4 <NEW_LINE> ANNOTATIONS, BUG_REPRODUCER_ASSISTANT, FUNCTION, CLASS = range(VALID_CODE_REGIONS_COUNT)
Enum constants to classify "code regions", for instance: class, function, etc.
62599077aad79263cf430153
class AIThreadEvent(Event): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.connected=False
Helper class so I can tell the calling engine that we connected okay
62599077d268445f2663a82b
class DanaEquivalence(unittest.TestCase): <NEW_LINE> <INDENT> def test_connections_random(self): <NEW_LINE> <INDENT> for pycon, ccon, src_shape, tgt_shape, wgt_shape in [ (pydana.OneToOne, cdana.OneToOne, 4, 4, 4), (pydana.OneToAll, cdana.OneToAll, 4, 4, 4), (pydana.AssToMot, cdana.AssToMot, 16, 4, 4), (pydana.AssToCog, cdana.AssToCog, 16, 4, 4), (pydana.MotToAss, cdana.MotToAss, 4, 16, 4), (pydana.CogToAss, cdana.CogToAss, 4, 16, 4), (pydana.AllToAll, cdana.AllToAll, 4, 4, 16)]: <NEW_LINE> <INDENT> np.random.seed(0) <NEW_LINE> for _ in range(1000): <NEW_LINE> <INDENT> source = np.random.rand(src_shape) <NEW_LINE> target = np.random.rand(tgt_shape) <NEW_LINE> weights = np.random.rand(wgt_shape) <NEW_LINE> gain = np.random.random() <NEW_LINE> source_copy, weights_copy = np.copy(source), np.copy(weights) <NEW_LINE> py = pycon(source, np.copy(target), weights, gain) <NEW_LINE> c = ccon(source, np.copy(target), weights, gain) <NEW_LINE> for _ in range(10): <NEW_LINE> <INDENT> py.propagate() <NEW_LINE> c.propagate() <NEW_LINE> self.assertTrue(np.all(source == source_copy)) <NEW_LINE> self.assertTrue(np.all(weights == weights_copy)) <NEW_LINE> self.assertTrue(np.allclose(c.target, py.target, rtol=1e-05, atol=1e-08)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def random_inputs(cls, group): <NEW_LINE> <INDENT> group['U'] = np.random.random(group['U'].shape) <NEW_LINE> group['Iext'] = np.random.random(group['Iext'].shape) <NEW_LINE> group['Isyn'] = np.random.random(group['Isyn'].shape) <NEW_LINE> <DEDENT> def test_group_random(self): <NEW_LINE> <INDENT> total, error = 0, 0 <NEW_LINE> N, M = 1000, 10 <NEW_LINE> dt = np.random.uniform(low=-0.1, high=0.1) <NEW_LINE> for seed in range(N): <NEW_LINE> <INDENT> cg = cdana.Group(4) <NEW_LINE> pyg = pydana.Group(4) <NEW_LINE> Us, deltas = [[], []], [[], []] <NEW_LINE> for j, g in enumerate([cg, pyg]): <NEW_LINE> <INDENT> Us.append([]) <NEW_LINE> deltas.append([]) <NEW_LINE> np.random.seed(seed) <NEW_LINE> self.random_inputs(g) <NEW_LINE> for i in range(M): <NEW_LINE> <INDENT> g.evaluate(dt) <NEW_LINE> deltas[j].append(g.delta) <NEW_LINE> Us[j].append(g['U']) <NEW_LINE> <DEDENT> <DEDENT> for k in range(M): <NEW_LINE> <INDENT> total += 1 <NEW_LINE> if deltas[0][k] != deltas[1][k]: <NEW_LINE> <INDENT> error += 1 <NEW_LINE> <DEDENT> self.assertTrue(np.all(Us[0][k] == Us[1][k]))
Tests aimed at verifying that dana and cdana produce the exact same ouputs.
6259907744b2445a339b762b
class MSC(EECert): <NEW_LINE> <INDENT> def __init__(self, pem): <NEW_LINE> <INDENT> self.policy_binding = None <NEW_LINE> super().__init__(pem) <NEW_LINE> assert self.validate() <NEW_LINE> <DEDENT> def parse(self, pem): <NEW_LINE> <INDENT> certs = pem_to_certs(pem) <NEW_LINE> if not certs: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.domain_name = get_cn(certs[0]) <NEW_LINE> self.policy_binding = certs[-1] <NEW_LINE> assert get_cn(self.policy_binding) == self.domain_name <NEW_LINE> chain = [] <NEW_LINE> for cert in reversed(certs[:-1]): <NEW_LINE> <INDENT> chain.insert(0, cert) <NEW_LINE> if get_cn(cert) == self.domain_name: <NEW_LINE> <INDENT> self.chains.insert(0, chain) <NEW_LINE> chain = [] <NEW_LINE> <DEDENT> <DEDENT> assert not chain <NEW_LINE> <DEDENT> def pack(self): <NEW_LINE> <INDENT> chain_pem = super().pack() <NEW_LINE> policy_pem = certs_to_pem([self.policy_binding]) <NEW_LINE> return b"".join([chain_pem, policy_pem]) <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> pem = CERT_SEP.join(self.pem.split(CERT_SEP)[:-1]) <NEW_LINE> pi, _ = binding_from_pem(pem) <NEW_LINE> try: <NEW_LINE> <INDENT> exts = self.policy_binding.extensions.get_extension_for_class(CertificatePolicies) <NEW_LINE> return pi == exts.value <NEW_LINE> <DEDENT> except ExtensionNotFound: <NEW_LINE> <INDENT> logging.error("Certificate binding not found.") <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> tmp = ["MSC\n"] <NEW_LINE> tmp.append(super().__repr__()) <NEW_LINE> tmp.append("Policy Binding: %s\n" % self.policy_binding) <NEW_LINE> return "".join(tmp)
Multi-signature certificate.
625990774c3428357761bc52
class FileListView(generic.ListView): <NEW_LINE> <INDENT> model = UploadFile
アップロードされたファイルの一覧ページ
6259907799cbb53fe6832880
class FrameAnnotation(Annotation): <NEW_LINE> <INDENT> def __init__(self, nbsock): <NEW_LINE> <INDENT> self.nbsock = nbsock <NEW_LINE> self.buf = None <NEW_LINE> self.lnum = 0 <NEW_LINE> self.disabled = False <NEW_LINE> self.is_set = False <NEW_LINE> self.sernum = nbsock.sernum.last <NEW_LINE> <DEDENT> def set_buf_lnum(self, buf, lnum): <NEW_LINE> <INDENT> self.buf = buf <NEW_LINE> self.lnum = lnum <NEW_LINE> self.is_set = False <NEW_LINE> <DEDENT> def update(self, disabled=False): <NEW_LINE> <INDENT> if not self.is_set: <NEW_LINE> <INDENT> self.buf.define_frameanno() <NEW_LINE> self.nbsock.send_cmd(self.buf, 'addAnno', '%d %d %d/0 -1' % (self.sernum, self.buf.frame_typeNum, self.lnum)) <NEW_LINE> self.nbsock.last_buf = self.buf <NEW_LINE> self.nbsock.last_buf.lnum = self.lnum <NEW_LINE> self.nbsock.last_buf.col = 0 <NEW_LINE> self.nbsock.send_cmd(self.buf, 'setDot', '%d/0' % self.lnum) <NEW_LINE> self.is_set = True <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'frame at line %d' % self.lnum
The frame annotation is the sign set in the current frame.
6259907763b5f9789fe86b00
@alias <NEW_LINE> class Type(BlogCollaboratorType): <NEW_LINE> <INDENT> pass
Short blog type alias
6259907756b00c62f0fb426d
class IpAddress(Model): <NEW_LINE> <INDENT> _validation = { 'ports': {'required': True}, 'type': {'required': True, 'constant': True}, 'fqdn': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'ports': {'key': 'ports', 'type': '[Port]'}, 'type': {'key': 'type', 'type': 'str'}, 'ip': {'key': 'ip', 'type': 'str'}, 'dns_name_label': {'key': 'dnsNameLabel', 'type': 'str'}, 'fqdn': {'key': 'fqdn', 'type': 'str'}, } <NEW_LINE> type = "Public" <NEW_LINE> def __init__(self, ports, ip=None, dns_name_label=None): <NEW_LINE> <INDENT> super(IpAddress, self).__init__() <NEW_LINE> self.ports = ports <NEW_LINE> self.ip = ip <NEW_LINE> self.dns_name_label = dns_name_label <NEW_LINE> self.fqdn = None
IP address for the container group. Variables are only populated by the server, and will be ignored when sending a request. :param ports: The list of ports exposed on the container group. :type ports: list[~azure.mgmt.containerinstance.models.Port] :ivar type: Specifies if the IP is exposed to the public internet. Default value: "Public" . :vartype type: str :param ip: The IP exposed to the public internet. :type ip: str :param dns_name_label: The Dns name label for the IP. :type dns_name_label: str :ivar fqdn: The FQDN for the IP. :vartype fqdn: str
6259907732920d7e50bc79e3
class Group(Statement): <NEW_LINE> <INDENT> def __init__(self, identifier, group_statements, validate_identifier = True): <NEW_LINE> <INDENT> if not isinstance(group_statements, GroupStatements): <NEW_LINE> <INDENT> raise TypeError("group_statements is not an instance of GroupStatements") <NEW_LINE> <DEDENT> self.statements = group_statements <NEW_LINE> super().__init__(identifier, self.statements, validate_identifier) <NEW_LINE> <DEDENT> def _format(self, indent, width = "9"): <NEW_LINE> <INDENT> sub_width = str(self.statements._max_identifier_width) <NEW_LINE> sub_indent = indent + " " <NEW_LINE> return "{}{} = {}{}{}{} = {}".format( indent, format("GROUP", width), self.identifier, "\r\n{}\r\n".format("\r\n".join( stmt._format(sub_indent, sub_width) for stmt in iter(self.statements) )) if len(self.statements) else "\r\n", indent, format("END_GROUP", width), self.identifier )
Represents a PDS group statement. Parameters - `identifier` (:obj:`str`) Identifier of the group. - `group_statements` (:class:`GroupStatements`) Nested statements of the group. - `validate_identifier` (:obj:`True` or :obj:`False`) Whether `identifier` should be checked to see if it's a valid identifer for a PDS group statement. Default is :obj:`True`. Raises - :exc:`TypeError` If `group_statements` is not a instance of :class:`GroupStatements`. - :exc:`ValueError` If `validate_identifier` is :obj:`True` and `identifier` is not a valid identifier for a group statement. Attributes .. attribute:: identifier Identifier of the group statement. A :obj:`str` instance. Read-only. .. attribute:: statements Nested statements of the group statement. A :class:`GroupStatements` instance. Read-only. .. attribute:: value Same as :attr:`statements`.
62599077e1aae11d1e7cf4dd
class WekaBayes(object): <NEW_LINE> <INDENT> def __init__(self, labels, algorithm='K2'): <NEW_LINE> <INDENT> self.algorithm = algorithm <NEW_LINE> self.labels = deepcopy(labels) <NEW_LINE> self.model = None <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "WekaBayes(algorithm='{}')".format(self.algorithm) <NEW_LINE> <DEDENT> def fit(self, train_set): <NEW_LINE> <INDENT> self.model = Classifier(classname="weka.classifiers.bayes.net.BayesNetGenerator") <NEW_LINE> self.model.options = ['-Q', 'weka.classifiers.bayes.net.search.local.{}'.format(self.algorithm)] <NEW_LINE> self.model.build_classifier(train_set) <NEW_LINE> <DEDENT> def predict(self, test_set): <NEW_LINE> <INDENT> results = [] <NEW_LINE> for _, inst in enumerate(test_set): <NEW_LINE> <INDENT> results.append(self.model.classify_instance(inst)) <NEW_LINE> <DEDENT> return results <NEW_LINE> <DEDENT> def predict_with_class_names(self, test_set): <NEW_LINE> <INDENT> results = [] <NEW_LINE> for _, inst in enumerate(test_set): <NEW_LINE> <INDENT> results.append(self.labels[int(self.model.classify_instance(inst))]) <NEW_LINE> <DEDENT> return results
I don't care anymore - Phil Collins We don't need no water let the motherfucker burn.
62599077ec188e330fdfa242
class ProductImage(models.Model): <NEW_LINE> <INDENT> image = models.ImageField(upload_to='products') <NEW_LINE> product = models.ForeignKey(Product, related_name='images', on_delete=models.CASCADE)
Каритнки к продукту
625990771f5feb6acb164591
class DetectorDevice(models.Model): <NEW_LINE> <INDENT> externalId = models.CharField(max_length=32, unique=True) <NEW_LINE> def getDict(self): <NEW_LINE> <INDENT> dict = {} <NEW_LINE> dict['deviceId'] = self.externalId <NEW_LINE> return dict <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.externalId
device which detects beacons, now only cellphones
62599077ad47b63b2c5a91ea
class CompareReport: <NEW_LINE> <INDENT> file = None <NEW_LINE> def __init__(self, index, reason): <NEW_LINE> <INDENT> self.index = index <NEW_LINE> self.reason = reason
When two files don't match, this tells you how they don't match This is necessary because the system that is doing the actual comparing may not be the one printing out the reports. For speed the compare information can be pipelined back to the client connection as an iter of CompareReports.
62599077a17c0f6771d5d87b
class Verbosity(Enum): <NEW_LINE> <INDENT> minimal = 0 <NEW_LINE> normal = 1 <NEW_LINE> verbose = 2 <NEW_LINE> very_verbose = 3 <NEW_LINE> def __ge__(self, other): <NEW_LINE> <INDENT> if self.__class__ is other.__class__: <NEW_LINE> <INDENT> return self.value >= other.value <NEW_LINE> <DEDENT> return NotImplemented <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> if self.__class__ is other.__class__: <NEW_LINE> <INDENT> return self.value > other.value <NEW_LINE> <DEDENT> return NotImplemented <NEW_LINE> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> if self.__class__ is other.__class__: <NEW_LINE> <INDENT> return self.value <= other.value <NEW_LINE> <DEDENT> return NotImplemented <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if self.__class__ is other.__class__: <NEW_LINE> <INDENT> return self.value < other.value <NEW_LINE> <DEDENT> return NotImplemented
Verbosity enum.
625990779c8ee82313040e55
class DBScraper(DatedScraper): <NEW_LINE> <INDENT> options_form = DBScraperForm <NEW_LINE> def _login(self, username, password): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _initialize(self): <NEW_LINE> <INDENT> self._login(self.options['username'], self.options['password'])
Base class for (dated) scrapers that require a login
625990774428ac0f6e659ecb
class ACLToken(rdfvalue.RDFProtoStruct): <NEW_LINE> <INDENT> protobuf = flows_pb2.ACLToken <NEW_LINE> supervisor = False <NEW_LINE> def Copy(self): <NEW_LINE> <INDENT> result = super(ACLToken, self).Copy() <NEW_LINE> result.supervisor = False <NEW_LINE> return result <NEW_LINE> <DEDENT> def CheckExpiry(self): <NEW_LINE> <INDENT> if self.expiry and time.time() > self.expiry: <NEW_LINE> <INDENT> raise ExpiryError("Token expired.") <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> result = "" <NEW_LINE> if self.supervisor: <NEW_LINE> <INDENT> result = "******* SUID *******\n" <NEW_LINE> <DEDENT> return result + super(ACLToken, self).__str__() <NEW_LINE> <DEDENT> def SetUID(self): <NEW_LINE> <INDENT> result = self.Copy() <NEW_LINE> result.supervisor = True <NEW_LINE> return result <NEW_LINE> <DEDENT> def RealUID(self): <NEW_LINE> <INDENT> result = self.Copy() <NEW_LINE> result.supervisor = False <NEW_LINE> return result
The access control token.
6259907760cbc95b06365a3b
class IBanner(form.Schema, IImageScaleTraversable): <NEW_LINE> <INDENT> image = NamedBlobImage( title=_(u"Banner Image"), required=True, ) <NEW_LINE> text = RichText( title=_(u"Formated Banner Text"), description=_(u"Optional banner text with html formatting. If you " u"leave this field empty the description will be used " u"instead."), required=False, ) <NEW_LINE> position = schema.Choice( title=_(u"Teaser Text Position"), vocabulary=positions, required=False, default=u"caption-bottom-left", )
A singel banner containing images and captions
6259907776e4537e8c3f0f1b
class IndexPage: <NEW_LINE> <INDENT> allowed_methods = {'GET', 'POST'} <NEW_LINE> @faucets.produces('text/html', 'application/json') <NEW_LINE> @faucets.consumes('text/html') <NEW_LINE> @faucets.template('index.html') <NEW_LINE> def get(self, data, name, **query): <NEW_LINE> <INDENT> return {'name': name} <NEW_LINE> <DEDENT> @faucets.produces('text/html', 'application/json') <NEW_LINE> @faucets.consumes('application/x-www-form-urlencoded') <NEW_LINE> @faucets.template('index.html') <NEW_LINE> def post(self, data, name, **query): <NEW_LINE> <INDENT> if data is not None and 'name' in data: <NEW_LINE> <INDENT> return {'name': data['name'][0]} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {}
The IndexPage class is the handler for two of the routes below. See the app.routes.add calls down below for more details on how this class is used by the application.
625990778a349b6b43687bf7
class seriousAdverseOutcomeProp(SchemaProperty): <NEW_LINE> <INDENT> _prop_schema = 'seriousAdverseOutcome' <NEW_LINE> _expected_schema = 'MedicalEntity' <NEW_LINE> _enum = False <NEW_LINE> _format_as = "ForeignKey"
SchemaField for seriousAdverseOutcome Usage: Include in SchemaObject SchemaFields as your_django_field = seriousAdverseOutcomeProp() schema.org description:A possible serious complication and/or serious side effect of this therapy. Serious adverse outcomes include those that are life-threatening; result in death, disability, or permanent damage; require hospitalization or prolong existing hospitalization; cause congenital anomalies or birth defects; or jeopardize the patient and may require medical or surgical intervention to prevent one of the outcomes in this definition. prop_schema returns just the property without url# format_as is used by app templatetags based upon schema.org datatype used to reference MedicalEntity
62599077f9cc0f698b1c5f9a
class ProjectedWaitResource(Resource): <NEW_LINE> <INDENT> def __init__(self, population): <NEW_LINE> <INDENT> Resource.__init__(self) <NEW_LINE> self.population = population <NEW_LINE> <DEDENT> def render_GET(self, request): <NEW_LINE> <INDENT> start = time.time() - 24 * 60 * 60 <NEW_LINE> end = start + 1 * 60 *60 <NEW_LINE> hails = self.population.hails.values() <NEW_LINE> times = [h.waittime() for h in hails if start < h.hail_time < end] <NEW_LINE> if len(times) == 0: <NEW_LINE> <INDENT> response = { 'error': 'INSUFFICIENT DATA FOR MEANINGFUL ANSWER' } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response = { 'wait mins': sum(times) / float(len(times)) / 60.0 } <NEW_LINE> <DEDENT> return json.dumps(response)
I estimate the average wait time over the next hour by calculating the average waittime over this hour yesterday.
625990777c178a314d78e8b9
class Divide(Node): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> Node.__init__(self, name=name) <NEW_LINE> self.numerator = core.Port(name=self.__own__("numerator")) <NEW_LINE> self.denominator = core.Port(name=self.__own__("denominator")) <NEW_LINE> self._inputs.extend([self.numerator, self.denominator]) <NEW_LINE> self.output = core.Port(name=self.__own__('output')) <NEW_LINE> self._outputs.append(self.output) <NEW_LINE> <DEDENT> def transform(self): <NEW_LINE> <INDENT> self.validate_ports() <NEW_LINE> denom = (self.denominator.variable == 0) + self.denominator.variable <NEW_LINE> self.output.variable = self.numerator.variable / denom
Compute the ratio of two inputs.
625990774c3428357761bc54
class JobFailure(TowerCLIError): <NEW_LINE> <INDENT> exit_code = 99
An exception class for job failures that require error codes within the Tower CLI.
6259907799fddb7c1ca63aa4
class ContactLogDetail(generics.RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> serializer_class = ContactLogSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticated, ) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return ContactLog.objects.filter( family__organizations__in=Organization.objects.get_read_orgs(self.request.user) ).order_by('famiy', '-contact_date') <NEW_LINE> <DEDENT> def delete(self, request, *args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> obj = ContactLog.objects.get(pk=kwargs.get('pk', None)) <NEW_LINE> <DEDENT> except ContactLog.DoesNotExist: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> if obj.family.organizations.filter( organization_id__in=Organization.objects.get_readwrite_orgs(self.request.user) ).count() == 0: <NEW_LINE> <INDENT> error_msg = "Write permissions for this family are required to delete this contact log record." <NEW_LINE> raise PermissionDenied(detail=error_msg) <NEW_LINE> <DEDENT> return super(ContactLogDetail, self).delete(self, request, *args, **kwargs)
The contact log describes an in-person or telephone contact between an employee and a client, other than a home visit. Read access is based on the user's read access to the family, write access requires write access to the family, read access to the employee, and that the family_member be linked to the family either as a child or an adult. Read access to the contact type is also required for create/update operations. DELETE requests require write access to the family. If a query parameter of ?family_id=123 is passed to a GET request, only those contacts related to family 123 are returned.
6259907716aa5153ce401e77
class FQ12(FQP): <NEW_LINE> <INDENT> degree = 12 <NEW_LINE> FQ12_MODULUS_COEFFS = None <NEW_LINE> def __init__(self, coeffs: Sequence[IntOrFQ]) -> None: <NEW_LINE> <INDENT> if self.FQ12_MODULUS_COEFFS is None: <NEW_LINE> <INDENT> raise AttributeError("FQ12 Modulus Coeffs haven't been specified") <NEW_LINE> <DEDENT> self.mc_tuples = [(i, c) for i, c in enumerate(self.FQ12_MODULUS_COEFFS) if c] <NEW_LINE> super().__init__(coeffs, self.FQ12_MODULUS_COEFFS)
The 12th-degree extension field
6259907726068e7796d4e2da
class GoogleVisionAPIFaceExtractor(GoogleVisionAPIExtractor): <NEW_LINE> <INDENT> request_type = 'FACE_DETECTION' <NEW_LINE> response_object = 'faceAnnotations' <NEW_LINE> def _to_df(self, result, handle_annotations=None): <NEW_LINE> <INDENT> annotations = result._data <NEW_LINE> if handle_annotations == 'first': <NEW_LINE> <INDENT> annotations = [annotations[0]] <NEW_LINE> <DEDENT> face_results = [] <NEW_LINE> for i, annotation in enumerate(annotations): <NEW_LINE> <INDENT> data_dict = {} <NEW_LINE> for field, val in annotation.items(): <NEW_LINE> <INDENT> if 'Confidence' in field: <NEW_LINE> <INDENT> data_dict['face_' + field] = val <NEW_LINE> <DEDENT> elif 'oundingPoly' in field: <NEW_LINE> <INDENT> for j, vertex in enumerate(val['vertices']): <NEW_LINE> <INDENT> for dim in ['x', 'y']: <NEW_LINE> <INDENT> name = '%s_vertex%d_%s' % (field, j+1, dim) <NEW_LINE> val = vertex[dim] if dim in vertex else np.nan <NEW_LINE> data_dict[name] = val <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif field == 'landmarks': <NEW_LINE> <INDENT> for lm in val: <NEW_LINE> <INDENT> if 'type' in lm: <NEW_LINE> <INDENT> name = 'landmark_' + lm['type'] + '_%s' <NEW_LINE> lm_pos = {name % k: v for (k, v) in lm['position'].items()} <NEW_LINE> data_dict.update(lm_pos) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> data_dict[field] = val <NEW_LINE> <DEDENT> <DEDENT> face_results.append(data_dict) <NEW_LINE> <DEDENT> return pd.DataFrame(face_results)
Identifies faces in images using the Google Cloud Vision API.
62599077aad79263cf430156
class RandomLengthListFactory(Factory): <NEW_LINE> <INDENT> def __init__(self, factory=None, min_items=0, max_items=1): <NEW_LINE> <INDENT> super(RandomLengthListFactory, self).__init__() <NEW_LINE> self._factory = factory <NEW_LINE> self._min_items = min_items <NEW_LINE> self._max_items = max_items <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> self._factory = iter(self._factory) <NEW_LINE> return super(RandomLengthListFactory, self).__iter__() <NEW_LINE> <DEDENT> def set_element_amount(self, element_amount): <NEW_LINE> <INDENT> super(RandomLengthListFactory, self).set_element_amount(element_amount) <NEW_LINE> self._factory.set_element_amount(element_amount * self._max_items) <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> return [self._factory.next() for i in xrange(random.randint(self._min_items, self._max_items))]
A factory that returns on each iteration a list of of between `min` and `max` items, returned from calls to the given factory. Example, >> import testdata >> f = RandomLengthListFactory(testdata.CountingFactory(1), 3, 8).generate(5) >> list(f) [[1, 2, 3], [4, 5, 6, 7], [8, 9, 10], [11, 12,13, 14, 15]]
62599077097d151d1a2c2a13
class ZombieBot_RandomCoinFlip(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def turn(self, gameState): <NEW_LINE> <INDENT> results = roll() <NEW_LINE> while results and random.randint(0, 1) == 0: <NEW_LINE> <INDENT> results = roll()
After the first roll, this bot always has a fifty-fifty chance of deciding to roll again or stopping.
62599077f548e778e596cf2e
class EsProcessEvents(BaseModel): <NEW_LINE> <INDENT> version = IntegerField(help_text="Version of EndpointSecurity event") <NEW_LINE> seq_num = BigIntegerField(help_text="Per event sequence number") <NEW_LINE> global_seq_num = BigIntegerField(help_text="Global sequence number") <NEW_LINE> pid = BigIntegerField(help_text="Process (or thread) ID") <NEW_LINE> path = TextField(help_text="Path of executed file") <NEW_LINE> parent = BigIntegerField(help_text="Parent process ID") <NEW_LINE> original_parent = BigIntegerField(help_text="Original parent process ID in case of reparenting") <NEW_LINE> cmdline = TextField(help_text="Command line arguments (argv)") <NEW_LINE> cmdline_count = BigIntegerField(help_text="Number of command line arguments") <NEW_LINE> env = TextField(help_text="Environment variables delimited by spaces") <NEW_LINE> env_count = BigIntegerField(help_text="Number of environment variables") <NEW_LINE> cwd = TextField(help_text="The process current working directory") <NEW_LINE> uid = BigIntegerField(help_text="User ID of the process") <NEW_LINE> euid = BigIntegerField(help_text="Effective User ID of the process") <NEW_LINE> gid = BigIntegerField(help_text="Group ID of the process") <NEW_LINE> egid = BigIntegerField(help_text="Effective Group ID of the process") <NEW_LINE> username = TextField(help_text="Username") <NEW_LINE> signing_id = TextField(help_text="Signature identifier of the process") <NEW_LINE> team_id = TextField(help_text="Team identifier of thd process") <NEW_LINE> cdhash = TextField(help_text="Codesigning hash of the process") <NEW_LINE> platform_binary = IntegerField(help_text="Indicates if the binary is Apple signed binary (1) or not (0)") <NEW_LINE> exit_code = IntegerField(help_text="Exit code of a process in case of an exit event") <NEW_LINE> child_pid = BigIntegerField(help_text="Process ID of a child process in case of a fork event") <NEW_LINE> time = BigIntegerField(help_text="Time of execution in UNIX time") <NEW_LINE> event_type = TextField(help_text="Type of EndpointSecurity event") <NEW_LINE> eid = TextField(help_text="Event ID") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> table_name = "es_process_events"
Process execution events from EndpointSecurity.
625990772ae34c7f260aca84
class rule_021(Rule): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Rule.__init__(self, 'generic', '021', oToken) <NEW_LINE> self.bInsertWhitespace = False
This rule checks the semicolon is not on it's own line. **Violation** .. code-block:: vhdl U_FIFO : FIFO generic ( G_WIDTH : integer ) ; **Fix** .. code-block:: vhdl U_FIFO : FIFO generic ( G_WIDTH : integer );
625990779c8ee82313040e56
class HumanPlayer(Player): <NEW_LINE> <INDENT> def get_move(self, game): <NEW_LINE> <INDENT> game.display_game_board() <NEW_LINE> while True: <NEW_LINE> <INDENT> print("") <NEW_LINE> move = input("Choose your spot: ") <NEW_LINE> try: <NEW_LINE> <INDENT> move = int(move) - 1 <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> print("You must enter an integer, 1-9.") <NEW_LINE> continue <NEW_LINE> <DEDENT> if move < 0 or move > 8: <NEW_LINE> <INDENT> print("That move is not even on the board.") <NEW_LINE> <DEDENT> elif game.board[move] != "": <NEW_LINE> <INDENT> print("That spot is already taken.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return move <NEW_LINE> <DEDENT> def game_over_callback(self, game): <NEW_LINE> <INDENT> if isinstance(game.playerX, HumanPlayer) and isinstance(game.playerO, HumanPlayer) and game.playerO == self: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> game.display_game_board() <NEW_LINE> if game.winner == None: <NEW_LINE> <INDENT> print(" _____ _ ") <NEW_LINE> print(" / ____| | | _ ") <NEW_LINE> print(" | | __ _ | |_ | )___ ") <NEW_LINE> print(" | | / _` || __||// __| ") <NEW_LINE> print(" | |____| (_| || |_ \__ \ ") <NEW_LINE> print(" \_____|\__,_| \__| |___/ ") <NEW_LINE> print(" __ _ __ _ _ __ ___ ___ ") <NEW_LINE> print(" / _` | / _` || '_ ` _ \ / _ \ ") <NEW_LINE> print(" | (_| || (_| || | | | | || __/ ") <NEW_LINE> print(" \__, | \__,_||_| |_| |_| \___| ") <NEW_LINE> print(" __/ | ") <NEW_LINE> print(" |___/ ") <NEW_LINE> print("") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if game.winner == game.playerX: <NEW_LINE> <INDENT> print(" __ __ _ _ ") <NEW_LINE> print(" \ \ / / (_) | |") <NEW_LINE> print(" \ V / __ __ _ _ __ ___ | |") <NEW_LINE> print(" > < \ \ /\ / /| || '_ \ / __|| |") <NEW_LINE> print(" / . \ \ V V / | || | | |\__ \|_|") <NEW_LINE> print(" /_/ \_\ \_/\_/ |_||_| |_||___/(_)") <NEW_LINE> print("") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(" ____ _ _ ") <NEW_LINE> print(" / __ \ (_) | |") <NEW_LINE> print(" | | | | __ __ _ _ __ ___ | |") <NEW_LINE> print(" | | | | \ \ /\ / /| || '_ \ / __|| |") <NEW_LINE> print(" | |__| | \ V V / | || | | |\__ \|_|") <NEW_LINE> print(" \____/ \_/\_/ |_||_| |_||___/(_)") <NEW_LINE> print("")
A type of player that will ask for human input to indicate a move
625990774a966d76dd5f0889
class ROSPkgException(roslib_electric.exceptions.ROSLibException): <NEW_LINE> <INDENT> pass
Base class of package-related errors.
625990774f88993c371f11f0
class Solution(object): <NEW_LINE> <INDENT> def countAndSay(self, n): <NEW_LINE> <INDENT> if n <= 1: <NEW_LINE> <INDENT> return "1" <NEW_LINE> <DEDENT> pre_str = '1' <NEW_LINE> for i in range(2, n+1): <NEW_LINE> <INDENT> index = 0 <NEW_LINE> current_str = '' <NEW_LINE> while index < len(pre_str): <NEW_LINE> <INDENT> pos = index + 1 <NEW_LINE> repeat = 0 <NEW_LINE> while pos < len(pre_str) and pre_str[index] == pre_str[pos]: <NEW_LINE> <INDENT> pos += 1 <NEW_LINE> repeat += 1 <NEW_LINE> <DEDENT> current_str += str(repeat+1) + pre_str[index] <NEW_LINE> index = pos <NEW_LINE> <DEDENT> pre_str = current_str <NEW_LINE> <DEDENT> return pre_str
Quite straight-forward solution. We generate k-th string, and from k-th string we generate k+1-th string, until we generate n-th string.
6259907760cbc95b06365a3c
class Plotter: <NEW_LINE> <INDENT> def __init__(self, opts): <NEW_LINE> <INDENT> self.xrange = copy.copy(opts.xrange) <NEW_LINE> self.yrange = copy.copy(opts.yrange) <NEW_LINE> self.bgirange = copy.copy(opts.intparams.background) <NEW_LINE> self.pkrange = copy.copy(opts.intparams.peak) <NEW_LINE> self.gp = Gnuplot.Gnuplot() <NEW_LINE> self.gp("set term x11 size %d,%d" % (opts.gpwidth, opts.gpheight)) <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.gp("reset") <NEW_LINE> self.gp("unset arrow") <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.gp("clear") <NEW_LINE> <DEDENT> def set_xrange(self, minx, maxx): <NEW_LINE> <INDENT> if minx >= maxx: <NEW_LINE> <INDENT> raise Plotter_error("Invalid X display range %d,%d" % (opts.gpwidth, opts.gpheight)) <NEW_LINE> <DEDENT> self.xrange = datarange.DataRange(minx, maxx) <NEW_LINE> self.gp("set xrange [%.3f:%.3f]" % (minx, maxx)) <NEW_LINE> <DEDENT> def set_yrange(self, miny, maxy): <NEW_LINE> <INDENT> if miny >= maxy: <NEW_LINE> <INDENT> raise Plotter_error("Invalid Y display range") <NEW_LINE> <DEDENT> self.yrange = datarange.DataRange(miny, maxy) <NEW_LINE> self.gp("set yrange [%.1f:%.1f]" % (miny, maxy)) <NEW_LINE> <DEDENT> def set_bgirange(self, minr, maxr): <NEW_LINE> <INDENT> if minr >= maxr: <NEW_LINE> <INDENT> raise Plotter_error("Invalid Background integration range") <NEW_LINE> <DEDENT> self.bgirange = datarange.DataRange(minr, maxr) <NEW_LINE> self.gp("set arrow from %.3f,%.1f to %.3f,%.1f nohead ls 6" % (minr, self.yrange.lower, minr, self.yrange.upper)) <NEW_LINE> self.gp("set arrow from %.3f,%.1f to %.3f,%.1f nohead ls 6" % (maxr, self.yrange.lower, maxr, self.yrange.upper)) <NEW_LINE> <DEDENT> def set_pkrange(self, minr, maxr): <NEW_LINE> <INDENT> if minr >= maxr: <NEW_LINE> <INDENT> raise Plotter_error("Invalid Peak integration range") <NEW_LINE> <DEDENT> self.pkrange = datarange.DataRange(minr, maxr) <NEW_LINE> self.gp("set arrow from %.3f,%.1f to %.3f,%.1f nohead ls 6" % (minr, self.yrange.lower, minr, self.yrange.upper)) <NEW_LINE> self.gp("set arrow from %.3f,%.1f to %.3f,%.1f nohead ls 6" % (maxr, self.yrange.lower, maxr, self.yrange.upper)) <NEW_LINE> <DEDENT> def set_plot(self, plotfiles): <NEW_LINE> <INDENT> if len(plotfiles) == 0: <NEW_LINE> <INDENT> self.clear() <NEW_LINE> return <NEW_LINE> <DEDENT> plotcmds = [ "'%s' w l notitle" % p for p in plotfiles] <NEW_LINE> self.gp("plot " + string.join(plotcmds, ','))
Class to run GNUplot
62599077379a373c97d9a9c1
class JobScheduleEnableOptions(Model): <NEW_LINE> <INDENT> _attribute_map = { 'timeout': {'key': '', 'type': 'int'}, 'client_request_id': {'key': '', 'type': 'str'}, 'return_client_request_id': {'key': '', 'type': 'bool'}, 'ocp_date': {'key': '', 'type': 'rfc-1123'}, 'if_match': {'key': '', 'type': 'str'}, 'if_none_match': {'key': '', 'type': 'str'}, 'if_modified_since': {'key': '', 'type': 'rfc-1123'}, 'if_unmodified_since': {'key': '', 'type': 'rfc-1123'}, } <NEW_LINE> def __init__(self, *, timeout: int=30, client_request_id: str=None, return_client_request_id: bool=False, ocp_date=None, if_match: str=None, if_none_match: str=None, if_modified_since=None, if_unmodified_since=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(JobScheduleEnableOptions, self).__init__(**kwargs) <NEW_LINE> self.timeout = timeout <NEW_LINE> self.client_request_id = client_request_id <NEW_LINE> self.return_client_request_id = return_client_request_id <NEW_LINE> self.ocp_date = ocp_date <NEW_LINE> self.if_match = if_match <NEW_LINE> self.if_none_match = if_none_match <NEW_LINE> self.if_modified_since = if_modified_since <NEW_LINE> self.if_unmodified_since = if_unmodified_since
Additional parameters for enable operation. :param timeout: The maximum time that the server can spend processing the request, in seconds. The default is 30 seconds. Default value: 30 . :type timeout: int :param client_request_id: The caller-generated request identity, in the form of a GUID with no decoration such as curly braces, e.g. 9C4D50EE-2D56-4CD3-8152-34347DC9F2B0. :type client_request_id: str :param return_client_request_id: Whether the server should return the client-request-id in the response. Default value: False . :type return_client_request_id: bool :param ocp_date: The time the request was issued. Client libraries typically set this to the current system clock time; set it explicitly if you are calling the REST API directly. :type ocp_date: datetime :param if_match: An ETag value associated with the version of the resource known to the client. The operation will be performed only if the resource's current ETag on the service exactly matches the value specified by the client. :type if_match: str :param if_none_match: An ETag value associated with the version of the resource known to the client. The operation will be performed only if the resource's current ETag on the service does not match the value specified by the client. :type if_none_match: str :param if_modified_since: A timestamp indicating the last modified time of the resource known to the client. The operation will be performed only if the resource on the service has been modified since the specified time. :type if_modified_since: datetime :param if_unmodified_since: A timestamp indicating the last modified time of the resource known to the client. The operation will be performed only if the resource on the service has not been modified since the specified time. :type if_unmodified_since: datetime
6259907766673b3332c31d9d
class StatusTestCase(BasicTestCase): <NEW_LINE> <INDENT> def test_status_creation(self): <NEW_LINE> <INDENT> rv = self.client.post('/set/up/', data=dict(value="True")) <NEW_LINE> self.assertEqual(201, rv.status_code) <NEW_LINE> <DEDENT> def test_status_existence(self): <NEW_LINE> <INDENT> rv = self.client.post('/set/up/', data=dict(value="True")) <NEW_LINE> self.assertEqual(201, rv.status_code) <NEW_LINE> rv = self.client.get('/get/up/') <NEW_LINE> self.assertEqual(200, rv.status_code) <NEW_LINE> self.assertEqual("True", loads(rv.data)['status']) <NEW_LINE> <DEDENT> def test_status_nonexistence(self): <NEW_LINE> <INDENT> rv = self.client.get('/get/notup/') <NEW_LINE> self.assertEqual(404, rv.status_code) <NEW_LINE> <DEDENT> def test_status_update(self): <NEW_LINE> <INDENT> rv = self.client.post('/set/up/', data=dict(value="True")) <NEW_LINE> self.assertEqual(201, rv.status_code) <NEW_LINE> rv = self.client.get('/get/up/') <NEW_LINE> self.assertEqual(200, rv.status_code) <NEW_LINE> self.assertEqual("True", loads(rv.data)['status']) <NEW_LINE> rv = self.client.post('/set/up/', data=dict(value="False")) <NEW_LINE> self.assertEqual(201, rv.status_code) <NEW_LINE> rv = self.client.get('/get/up/') <NEW_LINE> self.assertEqual(200, rv.status_code) <NEW_LINE> self.assertEqual("False", loads(rv.data)['status']) <NEW_LINE> <DEDENT> def test_status_deletion(self): <NEW_LINE> <INDENT> rv = self.client.post('/set/up/', data=dict(value="True")) <NEW_LINE> self.assertEqual(201, rv.status_code) <NEW_LINE> rv = self.client.get('/get/up/') <NEW_LINE> self.assertEqual(200, rv.status_code) <NEW_LINE> self.assertEqual("True", loads(rv.data)['status']) <NEW_LINE> rv = self.client.get('/del/up/') <NEW_LINE> self.assertEqual(204, rv.status_code) <NEW_LINE> rv = self.client.get('/get/up/') <NEW_LINE> self.assertEqual(404, rv.status_code) <NEW_LINE> rv = self.client.get('/del/up/') <NEW_LINE> self.assertEqual(404, rv.status_code) <NEW_LINE> <DEDENT> def test_status_timestamp(self): <NEW_LINE> <INDENT> self.client.post('/set/up/', data=dict(value="True")) <NEW_LINE> old = loads(self.client.get('/get/up/').data)['update_time'] <NEW_LINE> time.sleep(1) <NEW_LINE> self.client.post('/set/up/', data=dict(value="True")) <NEW_LINE> new = loads(self.client.get('/get/up/').data)['update_time'] <NEW_LINE> self.assertNotEqual(old, new)
Test basic status operations
6259907744b2445a339b762d
class InstancesView(object): <NEW_LINE> <INDENT> def __init__(self, instances, req=None): <NEW_LINE> <INDENT> self.instances = instances <NEW_LINE> self.req = req <NEW_LINE> <DEDENT> def data(self): <NEW_LINE> <INDENT> data = [] <NEW_LINE> for instance in self.instances: <NEW_LINE> <INDENT> data.append(self.data_for_instance(instance)) <NEW_LINE> <DEDENT> return {'instances': data} <NEW_LINE> <DEDENT> def data_for_instance(self, instance): <NEW_LINE> <INDENT> view = InstanceView(instance, req=self.req) <NEW_LINE> return view.data()['instance']
Shows a list of SimpleInstance objects.
6259907856b00c62f0fb4271
class ItemsAppsConfig(AppConfig): <NEW_LINE> <INDENT> name = 'pmanagement.items' <NEW_LINE> verbose_name = 'items'
Items app config
625990787d847024c075dd7a
class SSSBApartmentLoader(ItemLoader): <NEW_LINE> <INDENT> default_input_processor = MapCompose(str.strip, is_empty) <NEW_LINE> apt_name_in = MapCompose(remove_extra_middle_spaces) <NEW_LINE> apt_name_out = TakeFirst() <NEW_LINE> apt_price_in = MapCompose(get_first_space) <NEW_LINE> apt_price_out = TakeFirst() <NEW_LINE> furnitured_in = MapCompose(set_boolean) <NEW_LINE> furnitured_out = TakeFirst() <NEW_LINE> electricity_in = MapCompose(set_boolean) <NEW_LINE> electricity_out = TakeFirst() <NEW_LINE> _10_month_in = MapCompose(set_boolean) <NEW_LINE> _10_month_out = TakeFirst() <NEW_LINE> default_output_processor = Join()
Class for loading items for SSSBApartmentSpider.
625990785166f23b2e244d76
class Base(Message): <NEW_LINE> <INDENT> pass
Базовый класс для сообщений для локаторов.
62599078ec188e330fdfa246
class IPortletTemplate(Interface): <NEW_LINE> <INDENT> pass
Portlet Template
62599078ad47b63b2c5a91ee
class TransformablePortAspects(PortAspects, Transformable): <NEW_LINE> <INDENT> def __create_ports__(self, ports): <NEW_LINE> <INDENT> ports = self.create_ports(ports) <NEW_LINE> ports = ports.transform_copy(self.transformation) <NEW_LINE> return ports
Factory class that automatically transform ports.
6259907826068e7796d4e2dc
class CVSBinaryFileEOLStyleSetter(SVNPropertySetter): <NEW_LINE> <INDENT> propname = 'svn:eol-style' <NEW_LINE> def set_properties(self, s_item): <NEW_LINE> <INDENT> if self.propname in s_item.svn_props: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if s_item.cvs_rev.cvs_file.mode == 'b': <NEW_LINE> <INDENT> s_item.svn_props[self.propname] = None
Set the eol-style to None for files with CVS mode '-kb'.
62599078f548e778e596cf2f
class IOpenIDPersistentIdentity(Interface): <NEW_LINE> <INDENT> account = Attribute('The `IAccount` for the user.') <NEW_LINE> openid_identity_url = Attribute( 'The OpenID identity URL for the user.') <NEW_LINE> openid_identifier = Attribute( 'The OpenID identifier used with the request.')
An object that represents a persistent user identity URL. This interface is generally needed by the UI.
62599078097d151d1a2c2a15
class NumberLocale(VegaLiteSchema): <NEW_LINE> <INDENT> _schema = {'$ref': '#/definitions/NumberLocale'} <NEW_LINE> def __init__(self, currency=Undefined, decimal=Undefined, grouping=Undefined, thousands=Undefined, minus=Undefined, nan=Undefined, numerals=Undefined, percent=Undefined, **kwds): <NEW_LINE> <INDENT> super(NumberLocale, self).__init__(currency=currency, decimal=decimal, grouping=grouping, thousands=thousands, minus=minus, nan=nan, numerals=numerals, percent=percent, **kwds)
NumberLocale schema wrapper Mapping(required=[decimal, thousands, grouping, currency]) Locale definition for formatting numbers. Attributes ---------- currency : :class:`Vector2string` The currency prefix and suffix (e.g., ["$", ""]). decimal : string The decimal point (e.g., "."). grouping : List(float) The array of group sizes (e.g., [3]), cycled as needed. thousands : string The group separator (e.g., ","). minus : string The minus sign (defaults to hyphen-minus, "-"). nan : string The not-a-number value (defaults to "NaN"). numerals : :class:`Vector10string` An array of ten strings to replace the numerals 0-9. percent : string The percent sign (defaults to "%").
62599078f548e778e596cf30
class SurgeryHistory(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.SurgeryName = None <NEW_LINE> self.SurgeryDate = None <NEW_LINE> self.PreoperativePathology = None <NEW_LINE> self.IntraoperativePathology = None <NEW_LINE> self.PostoperativePathology = None <NEW_LINE> self.DischargeDiagnosis = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("SurgeryName") is not None: <NEW_LINE> <INDENT> self.SurgeryName = SurgeryAttr() <NEW_LINE> self.SurgeryName._deserialize(params.get("SurgeryName")) <NEW_LINE> <DEDENT> if params.get("SurgeryDate") is not None: <NEW_LINE> <INDENT> self.SurgeryDate = SurgeryAttr() <NEW_LINE> self.SurgeryDate._deserialize(params.get("SurgeryDate")) <NEW_LINE> <DEDENT> if params.get("PreoperativePathology") is not None: <NEW_LINE> <INDENT> self.PreoperativePathology = SurgeryAttr() <NEW_LINE> self.PreoperativePathology._deserialize(params.get("PreoperativePathology")) <NEW_LINE> <DEDENT> if params.get("IntraoperativePathology") is not None: <NEW_LINE> <INDENT> self.IntraoperativePathology = SurgeryAttr() <NEW_LINE> self.IntraoperativePathology._deserialize(params.get("IntraoperativePathology")) <NEW_LINE> <DEDENT> if params.get("PostoperativePathology") is not None: <NEW_LINE> <INDENT> self.PostoperativePathology = SurgeryAttr() <NEW_LINE> self.PostoperativePathology._deserialize(params.get("PostoperativePathology")) <NEW_LINE> <DEDENT> if params.get("DischargeDiagnosis") is not None: <NEW_LINE> <INDENT> self.DischargeDiagnosis = SurgeryAttr() <NEW_LINE> self.DischargeDiagnosis._deserialize(params.get("DischargeDiagnosis")) <NEW_LINE> <DEDENT> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
手术史
625990785fdd1c0f98e5f91d
class HttxPassManager(HttxObject): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> HttxObject.__init__(self) <NEW_LINE> self.passmanager = HTTPPasswordMgrWithDefaultRealm() <NEW_LINE> <DEDENT> def __deepcopy__(self, memo): <NEW_LINE> <INDENT> clone = self.__class__() <NEW_LINE> with self.lock: <NEW_LINE> <INDENT> clone.passmanager = deepcopy(self.passmanager) <NEW_LINE> <DEDENT> return clone <NEW_LINE> <DEDENT> def add_password(self, realm, url, user, passwd): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> self.passmanager.add_password(realm, url, user, passwd) <NEW_LINE> <DEDENT> <DEDENT> def find_user_password(self, realm, url): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> return self.passmanager.find_user_password(realm, url)
An object manages username and password for url and realms with locking semantics to be used in L{HttxOptions} @ivar passmanager: storage for username, password credentials @type passmanager: HTTPPasswordMgrWithDefaultRealm
62599078a8370b77170f1d6e
class TestConfig(Config): <NEW_LINE> <INDENT> ENV = "test" <NEW_LINE> TESTING = True <NEW_LINE> DEBUG = True <NEW_LINE> SQLALCHEMY_DATABASE_URI = 'sqlite://' <NEW_LINE> BCRYPT_LOG_ROUNDS = 4 <NEW_LINE> WTF_CSRF_ENABLED = False
Test configuration
62599078283ffb24f3cf5248
class _PartialDict(dict): <NEW_LINE> <INDENT> def __missing__(self, key): <NEW_LINE> <INDENT> return _AsIsFormat(key)
Simple derived dict that returns an as-is formatter for missing keys. To partially format a string use, e.g.: `string.Formatter().vformat('{name} {job} {bye}',(),_PartialDict(name="me", job="you"))` which gives `me you {bye}`
625990784428ac0f6e659ecf
class text_record(resource_record): <NEW_LINE> <INDENT> def __init__(self, api, soap_entity, soap_client): <NEW_LINE> <INDENT> super(text_record, self).__init__(api, soap_entity, soap_client)
Instantiate Text TXT record. :param api: API instance used by the entity to communicate with BAM. :param soap_entity: the SOAP (suds) entity returned by the BAM API. :param soap_client: the suds client instance.
6259907855399d3f05627eb3
class MqCtx(context.changectx): <NEW_LINE> <INDENT> def __init__(self, repo, patch_name): <NEW_LINE> <INDENT> self.name = patch_name <NEW_LINE> self._rev = self.name <NEW_LINE> self._repo = repo <NEW_LINE> self._queue = self._repo.mq <NEW_LINE> self.path = self._queue.join(self.name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def applied(self): <NEW_LINE> <INDENT> return bool(self._queue.isapplied(self.name)) <NEW_LINE> <DEDENT> def __contains__(self, filename): <NEW_LINE> <INDENT> return filename in self.files() <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for filename in self.files(): <NEW_LINE> <INDENT> yield filename <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, filename): <NEW_LINE> <INDENT> return self.filectx(filename) <NEW_LINE> <DEDENT> def files(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def filectx(self, path, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError
Base class of mq patch context (changectx, filectx, etc.)
625990783d592f4c4edbc82d
class AnonymousSurvey(): <NEW_LINE> <INDENT> def __init__(self, question): <NEW_LINE> <INDENT> self.question = question <NEW_LINE> self.responses = [] <NEW_LINE> <DEDENT> def show_question(self): <NEW_LINE> <INDENT> print(self.question) <NEW_LINE> <DEDENT> def store_response(self, new_response): <NEW_LINE> <INDENT> self.responses.append(new_response) <NEW_LINE> <DEDENT> def show_results(self): <NEW_LINE> <INDENT> print('Survey results:') <NEW_LINE> for response in self.responses: <NEW_LINE> <INDENT> print('- ' + response)
收集匿名调查问卷的答案
62599078cc0a2c111447c7a1
class LZlswIndex(indexes.SearchIndex, indexes.Indexable): <NEW_LINE> <INDENT> text = indexes.CharField(document=True, use_template=True) <NEW_LINE> company_name = indexes.CharField(model_attr='company_name',null=True) <NEW_LINE> industry_involved = indexes.CharField(model_attr='industry_involved', null=True) <NEW_LINE> province = indexes.CharField(model_attr='province', null=True) <NEW_LINE> phone = indexes.CharField(model_attr='phone', null=True) <NEW_LINE> company_id = indexes.CharField(model_attr='company_id') <NEW_LINE> id = indexes.IntegerField(model_attr='id') <NEW_LINE> industriesid = indexes.CharField(model_attr='industriesid', null=True) <NEW_LINE> def get_model(self): <NEW_LINE> <INDENT> return LZlsw <NEW_LINE> <DEDENT> def index_queryset(self, using=None): <NEW_LINE> <INDENT> return self.get_model().objects.filter()
ANlmy索引数据模型类
625990784527f215b58eb670
@skipIf(NO_MOCK, NO_MOCK_REASON) <NEW_LINE> class SensorTestCase(TestCase): <NEW_LINE> <INDENT> def test_sense(self): <NEW_LINE> <INDENT> with patch.dict(sensors.__salt__, {'cmd.run': MagicMock(return_value='A:a B:b C:c D:d')}): <NEW_LINE> <INDENT> self.assertDictEqual(sensors.sense('chip'), {'A': 'a B'})
Test cases for salt.modules.sensors
6259907876e4537e8c3f0f1f
class Level1Design(SPMCommand): <NEW_LINE> <INDENT> input_spec = Level1DesignInputSpec <NEW_LINE> output_spec = Level1DesignOutputSpec <NEW_LINE> _jobtype = 'stats' <NEW_LINE> _jobname = 'fmri_spec' <NEW_LINE> def _format_arg(self, opt, spec, val): <NEW_LINE> <INDENT> if opt in ['spm_mat_dir', 'mask_image']: <NEW_LINE> <INDENT> return np.array([str(val)], dtype=object) <NEW_LINE> <DEDENT> if opt in ['session_info']: <NEW_LINE> <INDENT> if isinstance(val, dict): <NEW_LINE> <INDENT> return [val] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return val <NEW_LINE> <DEDENT> <DEDENT> return val <NEW_LINE> <DEDENT> def _parse_inputs(self): <NEW_LINE> <INDENT> einputs = super(Level1Design, self)._parse_inputs(skip=('mask_threshold')) <NEW_LINE> for sessinfo in einputs[0]['sess']: <NEW_LINE> <INDENT> sessinfo['scans'] = scans_for_fnames(filename_to_list(sessinfo['scans']), keep4d=False) <NEW_LINE> <DEDENT> if not isdefined(self.inputs.spm_mat_dir): <NEW_LINE> <INDENT> einputs[0]['dir'] = np.array([str(os.getcwd())], dtype=object) <NEW_LINE> <DEDENT> return einputs <NEW_LINE> <DEDENT> def _make_matlab_command(self, content): <NEW_LINE> <INDENT> if isdefined(self.inputs.mask_image): <NEW_LINE> <INDENT> postscript = "load SPM;\n" <NEW_LINE> postscript += "SPM.xM.VM = spm_vol('%s');\n" % list_to_filename(self.inputs.mask_image) <NEW_LINE> postscript += "SPM.xM.I = 0;\n" <NEW_LINE> postscript += "SPM.xM.T = [];\n" <NEW_LINE> postscript += "SPM.xM.TH = ones(size(SPM.xM.TH))*(%s);\n" % self.inputs.mask_threshold <NEW_LINE> postscript += "SPM.xM.xs = struct('Masking', 'explicit masking only');\n" <NEW_LINE> postscript += "save SPM SPM;\n" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> postscript = None <NEW_LINE> <DEDENT> return super(Level1Design, self)._make_matlab_command(content, postscript=postscript) <NEW_LINE> <DEDENT> def _list_outputs(self): <NEW_LINE> <INDENT> outputs = self._outputs().get() <NEW_LINE> spm = os.path.join(os.getcwd(), 'SPM.mat') <NEW_LINE> outputs['spm_mat_file'] = spm <NEW_LINE> return outputs
Generate an SPM design matrix http://www.fil.ion.ucl.ac.uk/spm/doc/manual.pdf#page=61 Examples -------- >>> level1design = Level1Design() >>> level1design.inputs.timing_units = 'secs' >>> level1design.inputs.interscan_interval = 2.5 >>> level1design.inputs.bases = {'hrf':{'derivs': [0,0]}} >>> level1design.inputs.session_info = 'session_info.npz' >>> level1design.run() # doctest: +SKIP
62599078aad79263cf430159
class ReadConfig: <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> self.config = configparser.ConfigParser() <NEW_LINE> self.config['Basic'] = { 'CHANGE_DATA': True, 'CHANGE_ADDR': True, 'IP_ADDR': '223.90.40.4' } <NEW_LINE> self.config.read(filename, encoding='utf-8') <NEW_LINE> <DEDENT> def get_basic(self, param): <NEW_LINE> <INDENT> value = self.config.get('Basic', param) <NEW_LINE> return value <NEW_LINE> <DEDENT> def get_api(self, param): <NEW_LINE> <INDENT> value = self.config.get('API', param) <NEW_LINE> return value <NEW_LINE> <DEDENT> def get_user(self): <NEW_LINE> <INDENT> value = self.config.items('User') <NEW_LINE> return value
读取配置文件类
62599078460517430c432d29
class RuleValueList(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self, in_str_list, usage_type, usage_contained_type): <NEW_LINE> <INDENT> self.in_str = in_str_list <NEW_LINE> self.usage_type = usage_type <NEW_LINE> self.usage_contained_type = usage_contained_type <NEW_LINE> return <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_list(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def is_set(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def resolve_and_validate(self, ruleset, rule, rule_part=GRUL_PART_CONDITION, required=False): <NEW_LINE> <INDENT> pass
Represent a value in
6259907871ff763f4b5e914c
class HTMLField(models.TextField): <NEW_LINE> <INDENT> def formfield(self, **kwargs): <NEW_LINE> <INDENT> defaults = { 'widget': HtmlInput } <NEW_LINE> defaults.update(kwargs) <NEW_LINE> defaults['widget'] = HtmlInput(attrs={'placeholder': self.verbose_name}) <NEW_LINE> return super(HTMLField, self).formfield(**defaults)
A large string field for HTML content. It uses the TinyMCE widget in forms.
625990787cff6e4e811b73e0