code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class Linkage(core.Node): <NEW_LINE> <INDENT> @property <NEW_LINE> def relation(self): <NEW_LINE> <INDENT> return _single_child_by_tag(self, EdgeTags.LinkRelation) <NEW_LINE> <DEDENT> @property <NEW_LINE> def arguments(self): <NEW_LINE> <INDENT> return _multiple_children_by_tag(self, EdgeTags.LinkArgument) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{}-->{}".format(str(self.relation.ID), ','.join(x.ID for x in self.arguments)) | A Linkage between parallel scenes.
A Linkage object represents a connection between two parallel scenes.
The semantic type of the link is not determined in this object, but the
:class:`FoundationalNode` of linkage is referred as the link relation,
and the linked scenes are referred to as the arguments.
Most cases will have two arguments, but some constructions have 1 or 3+
arguments, depending on the semantic connection.
Attributes:
relation: FoundationalNode of the relation words.
arguments: list of FoundationalNodes of the relation participants. | 62599066097d151d1a2c27de |
class CondorJob(Job): <NEW_LINE> <INDENT> def __init__(self, basedir): <NEW_LINE> <INDENT> super(self.__class__, self).__init__(basedir) <NEW_LINE> <DEDENT> def prepareDescription(self): <NEW_LINE> <INDENT> transfer_files = [item for sublist in [t.transfer_files for t in self.tasks] for item in sublist] <NEW_LINE> OpSys = "WINDOWS" if platform.system() == "Windows" else "LINUX" <NEW_LINE> fslFilename = '' <NEW_LINE> for f in transfer_files: <NEW_LINE> <INDENT> b, ext = os.path.splitext(f) <NEW_LINE> if ext == '.fsl': <NEW_LINE> <INDENT> fslFilename = f <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> numRuns = len(self.tasks) <NEW_LINE> submit = [ 'InitialDir = {}/{}$(Process)'.format( self.basedir, self.runDirPrefix), 'Universe = vanilla', 'Executable = {}'.format(cfg.get_executable()), 'Arguments = -b {}'.format(fslFilename), 'Output = femag.out', 'Log = femag.log', 'Error = femag.err', 'Notification = never', 'input = /dev/null', 'transfer_input_files = {}'.format(','.join(set(transfer_files))), 'requirements = (OpSys=="{}") && Arch=="x86_64"'.format(OpSys), 'Queue {}'.format(numRuns), ''] <NEW_LINE> filename = os.path.join(self.basedir, "femag.submit") <NEW_LINE> with open(os.path.join(self.basedir, "femag.submit"), 'w') as submitFile: <NEW_LINE> <INDENT> submitFile.writelines('\n'.join(submit)) <NEW_LINE> <DEDENT> return filename | represents a femag job that is to be run in HT Condor | 625990669c8ee82313040d41 |
class StartRoundMessage(LeapP2PMessage): <NEW_LINE> <INDENT> def __init__(self, isSpeaker, image): <NEW_LINE> <INDENT> self.data = RoundData(isSpeaker, image) <NEW_LINE> self.instruction = constants.START_ROUND | Contains whether or not the receiver is a speaker, and also
the topic image. | 62599066e5267d203ee6cf77 |
class DataBase: <NEW_LINE> <INDENT> def write_data(self, geo_data): <NEW_LINE> <INDENT> with open(dummy_file, 'a') as f: <NEW_LINE> <INDENT> f.write(geo_data) | Dummy object for persistence simulation | 62599066cc0a2c111447c689 |
class LabeledItems: <NEW_LINE> <INDENT> def __init__(self, items: Items, valid_labels: Dict[str, int]) -> None: <NEW_LINE> <INDENT> _check_constructor_arguments(items, valid_labels) <NEW_LINE> self.items = items <NEW_LINE> self.valid_labels = tuple(valid_labels) <NEW_LINE> self.num_classes = len(valid_labels) <NEW_LINE> self.labels_to_numbers = {label: i for i, label in enumerate(self.valid_labels)} <NEW_LINE> self.numbers_to_labels = {v: k for k, v in self.labels_to_numbers.items()} <NEW_LINE> <DEDENT> def labels(self, predictions: List[float]) -> Dict[str, float]: <NEW_LINE> <INDENT> return { self.numbers_to_labels[index]: probability for index, probability in enumerate(predictions) if probability > 0 } <NEW_LINE> <DEDENT> def labels_sorted_by_probability(self, predictions: List[float]) -> Dict[str, float]: <NEW_LINE> <INDENT> pairs = sorted( [(label, probability) for label, probability in self.labels(predictions).items()], key=itemgetter(1), reverse=True ) <NEW_LINE> return OrderedDict(pairs) <NEW_LINE> <DEDENT> def _dense_to_one_hot(self, labels: Set[str]) -> numpy.ndarray: <NEW_LINE> <INDENT> raise NotImplementedError() | Common properties for data used as input for neural network. These include items and labels for these
items. | 625990660c0af96317c57918 |
class LoginWidget(QWidget): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.setWindowTitle("Login") <NEW_LINE> self.login_label = QLabel("Please enter the Password to access:") <NEW_LINE> self.password_entry = QLineEdit() <NEW_LINE> self.password_entry.setEchoMode(QLineEdit.Password) <NEW_LINE> self.login_button = QPushButton("Login") <NEW_LINE> self.quit_button = QPushButton("Quit") <NEW_LINE> self.buttons_widget = QWidget() <NEW_LINE> self.login_layout = QVBoxLayout() <NEW_LINE> self.login_buttons_layout = QHBoxLayout() <NEW_LINE> self.login_buttons_layout.addWidget(self.quit_button) <NEW_LINE> self.login_buttons_layout.addWidget(self.login_button) <NEW_LINE> self.buttons_widget.setLayout(self.login_buttons_layout) <NEW_LINE> self.login_layout.addWidget(self.login_label) <NEW_LINE> self.login_layout.addWidget(self.password_entry) <NEW_LINE> self.login_layout.addWidget(self.buttons_widget) | Error displayed when login password is incorrect | 625990663d592f4c4edbc651 |
class TestOptionData(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return OptionData( key = '0', name = '0', value = home_connect_sdk.models.value.value(), unit = '0' ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return OptionData( key = '0', value = home_connect_sdk.models.value.value(), ) <NEW_LINE> <DEDENT> <DEDENT> def testOptionData(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True) | OptionData unit test stubs | 625990663539df3088ecda12 |
class Vault: <NEW_LINE> <INDENT> VAULT_ENGINES = { 'kv_v2': VaultKV2, } <NEW_LINE> def __init__(self, name, client): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._client = client <NEW_LINE> self._mounted = None <NEW_LINE> self._engines = {} <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[attr] <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> raise AttributeError(e) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, engine_name): <NEW_LINE> <INDENT> if self._mounted is None: <NEW_LINE> <INDENT> log.info( 'Loading mounted secrets engines for Vault configuration ' f'{self._name!r} ...' ) <NEW_LINE> self._mounted = self._client.sys.list_mounted_secrets_engines()['data'] <NEW_LINE> <DEDENT> if engine_name not in self._engines: <NEW_LINE> <INDENT> path = f'{engine_name}/' <NEW_LINE> if path not in self._mounted: <NEW_LINE> <INDENT> raise RuntimeError( f'Unknown mounted secret engine {engine_name!r}' ) <NEW_LINE> <DEDENT> mountconf = self._mounted[path] <NEW_LINE> type_ = mountconf['type'] <NEW_LINE> if type_ == 'kv': <NEW_LINE> <INDENT> version = mountconf['options']['version'] <NEW_LINE> type_ = f'{type_}_v{version}' <NEW_LINE> <DEDENT> if type_ not in self.VAULT_ENGINES: <NEW_LINE> <INDENT> supported = ', '.join(sorted(self.VAULT_ENGINES.keys())) <NEW_LINE> raise RuntimeError( f'Mount {engine_name!r} is an unsupported engine ' f'type {type_!r}. Supported engines are: {supported}.' ) <NEW_LINE> <DEDENT> engine_class = self.VAULT_ENGINES[type_] <NEW_LINE> self._engines[engine_name] = engine_class( self._client, engine_name, ) <NEW_LINE> <DEDENT> return self._engines[engine_name] | High level Vault accessor holding and client instance.
:param str name: Name of the Vault configuration.
:param client: Client connected to a Vault instance. | 6259906671ff763f4b5e8f18 |
class CreateSecurityGroupsRuleRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(CreateSecurityGroupsRuleRequest, self).__init__( '/regions/{regionId}/vpc_securityGroups/{id}/rule', 'POST', header, version) <NEW_LINE> self.parameters = parameters | 新增安全组规则 | 625990664e4d562566373b7b |
class TestImportFile(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testImportFile(self): <NEW_LINE> <INDENT> pass | ImportFile unit test stubs | 62599066cb5e8a47e493cd3e |
class OptimizerNetwork: <NEW_LINE> <INDENT> def __init__(self, circuit_parameters: CircuitParameters, spike_times: Dict[int, np.ndarray]): <NEW_LINE> <INDENT> self._circuit_parameters = circuit_parameters <NEW_LINE> self._subnets = [] <NEW_LINE> for neuron in self._circuit_parameters.training_nodes(): <NEW_LINE> <INDENT> subnet = SubNetwork( self._circuit_parameters.network, neuron, self._circuit_parameters.neuron_parameters[neuron], self._circuit_parameters.synaptic_parameters, self._circuit_parameters.noise_parameters, self._circuit_parameters.neuron_model, self._circuit_parameters.global_parameters['weight_scale'] ) <NEW_LINE> subnet.set_inputs([spike_times[node] for node in subnet.presynaptic_nodes]) <NEW_LINE> self._subnets.append(subnet) <NEW_LINE> <DEDENT> <DEDENT> def run(self, duration: float): <NEW_LINE> <INDENT> nest.Simulate(duration) <NEW_LINE> <DEDENT> def get_spike_trains(self) -> Dict[int, np.ndarray]: <NEW_LINE> <INDENT> return {subnet.neuron_id: subnet.get_spike_output() for subnet in self._subnets} | The OptimizerNetwork builds a series of subnetworks that can be executed
and ran with the NEST kernel. Each subnetwork represents a target neuron
with inputs provided by its pre-synaptic neurons. Only the target neuron
is simulated. | 6259906644b2445a339b751a |
class Alarm(Base): <NEW_LINE> <INDENT> __tablename__ = 'alarm' <NEW_LINE> __table_args__ = ( Index('ix_alarm_user_id', 'user_id'), Index('ix_alarm_project_id', 'project_id'), ) <NEW_LINE> id = Column(String(255), primary_key=True) <NEW_LINE> enabled = Column(Boolean) <NEW_LINE> name = Column(Text) <NEW_LINE> type = Column(String(50)) <NEW_LINE> description = Column(Text) <NEW_LINE> timestamp = Column(DateTime, default=timeutils.utcnow) <NEW_LINE> user_id = Column(String(255), ForeignKey('user.id')) <NEW_LINE> project_id = Column(String(255), ForeignKey('project.id')) <NEW_LINE> state = Column(String(255)) <NEW_LINE> state_timestamp = Column(DateTime, default=timeutils.utcnow) <NEW_LINE> ok_actions = Column(JSONEncodedDict) <NEW_LINE> alarm_actions = Column(JSONEncodedDict) <NEW_LINE> insufficient_data_actions = Column(JSONEncodedDict) <NEW_LINE> repeat_actions = Column(Boolean) <NEW_LINE> rule = Column(JSONEncodedDict) | Define Alarm data. | 62599066462c4b4f79dbd17b |
class EncodeError(ValueError): <NEW_LINE> <INDENT> def __init__( self, msg: str, doc_dec: DecodedDict, doc_enc: EncodedDict, field: str ): <NEW_LINE> <INDENT> errmsg = f"{msg}: field {field}" <NEW_LINE> ValueError.__init__(self, errmsg) <NEW_LINE> self.msg = msg <NEW_LINE> self.doc_dec = doc_dec <NEW_LINE> self.doc_enc = doc_enc <NEW_LINE> self.field = field <NEW_LINE> <DEDENT> def __reduce__( self, ) -> Tuple[Type["EncodeError"], Tuple[str, DecodedDict, EncodedDict, str]]: <NEW_LINE> <INDENT> return self.__class__, (self.msg, self.doc_dec, self.doc_enc, self.field) | Subclass of ValueError that describes ISO8583 encoding error.
Attributes
----------
msg : str
The unformatted error message
doc_dec : dict
Dict containing decoded ISO8583 data being encoded
doc_enc : dict
Dict containing partially encoded ISO8583 data
field : str
The ISO8583 field where parsing failed | 625990668e7ae83300eea803 |
@tag(0xC7) <NEW_LINE> class DataContentDescriptor(Descriptor): <NEW_LINE> <INDENT> descriptor_tag = uimsbf(8) <NEW_LINE> descriptor_length = uimsbf(8) <NEW_LINE> data_component_id = uimsbf(16) <NEW_LINE> entry_component = uimsbf(8) <NEW_LINE> selector_length = uimsbf(8) <NEW_LINE> @case(lambda self: self.data_component_id == 0x08) <NEW_LINE> class arib_caption_info(Syntax): <NEW_LINE> <INDENT> num_languages = uimsbf(8) <NEW_LINE> @times(num_languages) <NEW_LINE> class languages(Syntax): <NEW_LINE> <INDENT> language_tag = bslbf(3) <NEW_LINE> reserved = bslbf(1) <NEW_LINE> DMF = bslbf(4) <NEW_LINE> ISO_639_language_code = char(24) <NEW_LINE> <DEDENT> <DEDENT> @case(lambda self: self.data_component_id != 0x08) <NEW_LINE> class other(Syntax): <NEW_LINE> <INDENT> selector_byte = uimsbf(lambda self: self.selector_length) <NEW_LINE> <DEDENT> num_of_component_ref = uimsbf(8) <NEW_LINE> @times(num_of_component_ref) <NEW_LINE> class component_refs(Syntax): <NEW_LINE> <INDENT> component_ref = uimsbf(8) <NEW_LINE> <DEDENT> ISO_639_language_code = char(24) <NEW_LINE> text_length = uimsbf(8) <NEW_LINE> data_text = aribstr(text_length) | データコンテンツ記述子(ARIB-STD-B10-2-6.2.28)
data_component_idが0x0008のものは、selector_byteに
字幕・文字スーパーの識別情報が入っている(ARIB-STD-B24-1-3-9.6.2) | 6259906632920d7e50bc77ba |
class AzureFirewall(Resource): <NEW_LINE> <INDENT> _validation = { 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'etag': {'key': 'etag', 'type': 'str'}, 'application_rule_collections': {'key': 'properties.applicationRuleCollections', 'type': '[AzureFirewallApplicationRuleCollection]'}, 'network_rule_collections': {'key': 'properties.networkRuleCollections', 'type': '[AzureFirewallNetworkRuleCollection]'}, 'ip_configurations': {'key': 'properties.ipConfigurations', 'type': '[AzureFirewallIPConfiguration]'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(AzureFirewall, self).__init__(**kwargs) <NEW_LINE> self.etag = None <NEW_LINE> self.application_rule_collections = kwargs.get('application_rule_collections', None) <NEW_LINE> self.network_rule_collections = kwargs.get('network_rule_collections', None) <NEW_LINE> self.ip_configurations = kwargs.get('ip_configurations', None) <NEW_LINE> self.provisioning_state = kwargs.get('provisioning_state', None) | Azure Firewall resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: Gets a unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param application_rule_collections: Collection of application rule collections used by a Azure
Firewall.
:type application_rule_collections:
list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallApplicationRuleCollection]
:param network_rule_collections: Collection of network rule collections used by a Azure
Firewall.
:type network_rule_collections:
list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallNetworkRuleCollection]
:param ip_configurations: IP configuration of the Azure Firewall resource.
:type ip_configurations:
list[~azure.mgmt.network.v2018_07_01.models.AzureFirewallIPConfiguration]
:param provisioning_state: The provisioning state of the resource. Possible values include:
"Succeeded", "Updating", "Deleting", "Failed".
:type provisioning_state: str or ~azure.mgmt.network.v2018_07_01.models.ProvisioningState | 62599066cc0a2c111447c68a |
class Default(object): <NEW_LINE> <INDENT> pass | GeoJSON default. | 6259906676e4537e8c3f0cf7 |
class Layer2(object): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if "layer1" in kwargs: <NEW_LINE> <INDENT> self.layer1 = kwargs["layer1"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.layer1 = Layer1(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def create_vault(self, name): <NEW_LINE> <INDENT> self.layer1.create_vault(name) <NEW_LINE> return self.get_vault(name) <NEW_LINE> <DEDENT> def delete_vault(self, name): <NEW_LINE> <INDENT> return self.layer1.delete_vault(name) <NEW_LINE> <DEDENT> def get_vault(self, name): <NEW_LINE> <INDENT> response_data = self.layer1.describe_vault(name) <NEW_LINE> return vault.Vault(self.layer1, response_data) <NEW_LINE> <DEDENT> def list_vaults(self): <NEW_LINE> <INDENT> response_data = self.layer1.list_vaults() <NEW_LINE> return [vault.Vault(self.layer1, rd) for rd in response_data['VaultList']] | Provides a more pythonic and friendly interface to Glacier based on Layer1 | 625990667c178a314d78e7a6 |
class EvolutionAlgorithm(metaclass=ABCMeta): <NEW_LINE> <INDENT> def __init__(self, problem: Problem, population_size: int = 100, max_generations: int = 1000, callbacks: Sequence[Callback] = None, plot_progress: bool = False, verbosity: int = 1, **_): <NEW_LINE> <INDENT> self.best: Individual = None <NEW_LINE> self.current_best: Individual = None <NEW_LINE> self.stop_evolving = False <NEW_LINE> self.generation = 0 <NEW_LINE> self.problem = problem <NEW_LINE> self.max_generations = max_generations <NEW_LINE> self.population_size = population_size <NEW_LINE> if self.population_size == 0: <NEW_LINE> <INDENT> raise ValueError("Initial population is empty") <NEW_LINE> <DEDENT> self._population = None <NEW_LINE> self.population = [self.problem.create_individual() for _ in range(self.population_size)] <NEW_LINE> self.best_scores = [] <NEW_LINE> self.current_best_scores = [] <NEW_LINE> self.callbacks = callbacks or [] <NEW_LINE> for callback in self.callbacks: <NEW_LINE> <INDENT> callback.subscribe(self) <NEW_LINE> <DEDENT> self.plot_progress = plot_progress <NEW_LINE> self.verbosity = verbosity <NEW_LINE> <DEDENT> @property <NEW_LINE> def population(self) -> List[Individual]: <NEW_LINE> <INDENT> return self._population <NEW_LINE> <DEDENT> @population.setter <NEW_LINE> def population(self, population: List[Individual]) -> None: <NEW_LINE> <INDENT> self._population = population <NEW_LINE> for individual in self.population: <NEW_LINE> <INDENT> self.problem.score_individual(individual) <NEW_LINE> <DEDENT> self.current_best = min(self.population) <NEW_LINE> if self.best is None or self.current_best < self.best: <NEW_LINE> <INDENT> self.best = self.current_best <NEW_LINE> <DEDENT> <DEDENT> def evolve(self) -> None: <NEW_LINE> <INDENT> for callback in self.callbacks: <NEW_LINE> <INDENT> callback.on_train_start(self.verbosity) <NEW_LINE> <DEDENT> if self.verbosity > 0: <NEW_LINE> <INDENT> print(f"{type(self).__name__} starts solving problem {self.problem}") <NEW_LINE> if self.problem.best_known is not None: <NEW_LINE> <INDENT> print(f"Best known solution so far is {self.problem.best_known.score}") <NEW_LINE> <DEDENT> <DEDENT> self.stop_evolving = False <NEW_LINE> if self.plot_progress: <NEW_LINE> <INDENT> data_queue, plot_process = self.problem.get_plot_process(self.max_generations) <NEW_LINE> try: <NEW_LINE> <INDENT> plot_process.start() <NEW_LINE> time.sleep(2) <NEW_LINE> while not self.stop_evolving: <NEW_LINE> <INDENT> self.evolve_once() <NEW_LINE> data_queue.put((self.current_best.score, self.best)) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> for callback in self.callbacks: <NEW_LINE> <INDENT> callback.on_train_end() <NEW_LINE> <DEDENT> plot_process.join() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> while not self.stop_evolving: <NEW_LINE> <INDENT> self.evolve_once() <NEW_LINE> <DEDENT> for callback in self.callbacks: <NEW_LINE> <INDENT> callback.on_train_end(self.verbosity) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @abstractmethod <NEW_LINE> def evolve_once(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_args(cls, **kwargs): <NEW_LINE> <INDENT> cls.from_args(**kwargs) | Base class for evolutionary algorithms.
:param problem: Problem that can create and score individuals
:param population_size: The 'mu' parameter. Number of parents for each generation
:param max_generations: Stops after that many generations
:param callbacks: Optional callbacks
:param plot_progress: Wether to plot the progress while running. This has only been
tested with the matplotlib backend "TkAgg" and is not garantueed to work with others.
:param verbosity: Controls verbosity of output. | 62599066f548e778e596ccff |
class Student(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=255, unique=True) <NEW_LINE> marks = models.IntegerField() <NEW_LINE> school = models.ForeignKey(School,null=True) <NEW_LINE> uuid = models.CharField(max_length=255, null=True) <NEW_LINE> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.uuid = uuid.uuid4() <NEW_LINE> super().save(*args, **kwargs) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name.capitalize() | Student Model | 62599066be8e80087fbc07fe |
@tools.register(config.DebugCommands.EXIT, Help.commands) <NEW_LINE> class Exit(Quit): <NEW_LINE> <INDENT> inp = config.DebugCommands.EXIT | Quits the game back to the main screen. | 6259906699cbb53fe6832659 |
class TemporaryDirectory: <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.name = None <NEW_LINE> self.name = tempfile.mkdtemp(**kwargs) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __exit__(self, exctype, excvalue, exctrace): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> shutil.rmtree(self.name, True) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.name = None | Very simple temporary directory context manager.
Will try to delete afterward, but will also ignore OS and similar
errors on deletion. | 625990668a43f66fc4bf3906 |
class OptionAttributeTests(CityDataTest): <NEW_LINE> <INDENT> def testSetForAllColumns(self): <NEW_LINE> <INDENT> self.x.field_names = sorted(self.x.field_names) <NEW_LINE> self.x.align = "l" <NEW_LINE> self.x.max_width = 10 <NEW_LINE> self.x.start = 2 <NEW_LINE> self.x.end = 4 <NEW_LINE> self.x.sortby = "Area" <NEW_LINE> self.x.reversesort = True <NEW_LINE> self.x.header = True <NEW_LINE> self.x.border = False <NEW_LINE> self.x.hrule = True <NEW_LINE> self.x.int_format = "4" <NEW_LINE> self.x.float_format = "2.2" <NEW_LINE> self.x.padding_width = 2 <NEW_LINE> self.x.left_padding_width = 2 <NEW_LINE> self.x.right_padding_width = 2 <NEW_LINE> self.x.vertical_char = "!" <NEW_LINE> self.x.horizontal_char = "~" <NEW_LINE> self.x.junction_char = "*" <NEW_LINE> self.x.format = True <NEW_LINE> self.x.attributes = {"class" : "prettytable"} <NEW_LINE> assert self.x.get_string() == self.x[:].get_string() <NEW_LINE> <DEDENT> def testSetForOneColumn(self): <NEW_LINE> <INDENT> self.x.align["Rainfall"] = "l" <NEW_LINE> self.x.max_width["Name"] = 10 <NEW_LINE> self.x.int_format["Population"] = "4" <NEW_LINE> self.x.float_format["Area"] = "2.2" <NEW_LINE> assert self.x.get_string() == self.x[:].get_string() | Make sure all options which have an attribute interface work as they should.
Also make sure option settings are copied correctly when a table is cloned by
slicing. | 625990667047854f46340b29 |
class Pipeline(object): <NEW_LINE> <INDENT> def __init__(self, steps): <NEW_LINE> <INDENT> self._validate_steps(steps) <NEW_LINE> self.steps = steps <NEW_LINE> self.n_steps = len(steps) <NEW_LINE> <DEDENT> def _validate_steps(self, steps): <NEW_LINE> <INDENT> assert isinstance(steps, list) <NEW_LINE> assert len(steps) > 0 <NEW_LINE> for step in steps: <NEW_LINE> <INDENT> assert isinstance(step, Step) <NEW_LINE> <DEDENT> <DEDENT> def fit(self, **inputs): <NEW_LINE> <INDENT> for step in self.steps[:-1]: <NEW_LINE> <INDENT> inputs = step.fit_forward(**inputs) <NEW_LINE> <DEDENT> self.steps[-1].fit(**inputs) <NEW_LINE> return self <NEW_LINE> <DEDENT> def forward(self, **inputs): <NEW_LINE> <INDENT> for step in self.steps: <NEW_LINE> <INDENT> inputs = step.forward(**inputs) <NEW_LINE> <DEDENT> return inputs | This is similar to sklearn.Pipeline much more minimalistic and flexible. | 6259906671ff763f4b5e8f1a |
class IAMServerCertificate: <NEW_LINE> <INDENT> def __init__(self, metadata: Dict[str, Union[str, datetime]], certificate_body: str, certificate_chain: str): <NEW_LINE> <INDENT> self.metadata = metadata <NEW_LINE> self.certificate_body = certificate_body <NEW_LINE> self.certificate_chain = certificate_chain <NEW_LINE> self.name = metadata['ServerCertificateName'] <NEW_LINE> self.arn = metadata['Arn'] <NEW_LINE> self.expiration = metadata['Expiration'] <NEW_LINE> self.path = metadata['Path'] <NEW_LINE> self.certificate_id = metadata['ServerCertificateId'] <NEW_LINE> self.upload_date = metadata['UploadDate'] <NEW_LINE> <DEDENT> def __lt__(self, other: "IAMServerCertificate"): <NEW_LINE> <INDENT> return self.upload_date < other.upload_date <NEW_LINE> <DEDENT> def __eq__(self, other: "IAMServerCertificate"): <NEW_LINE> <INDENT> return self.arn == other.arn <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<IAMServerCertificate: {name}>".format_map(vars(self)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_boto_dict(cls, server_certificate: Dict[str, Any]) -> "IAMServerCertificate": <NEW_LINE> <INDENT> metadata = server_certificate['ServerCertificateMetadata'] <NEW_LINE> certificate_body = server_certificate['CertificateBody'] <NEW_LINE> certificate_chain = server_certificate['CertificateChain'] <NEW_LINE> return cls(metadata, certificate_body, certificate_chain) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_boto_server_certificate(cls, server_certificate) -> "IAMServerCertificate": <NEW_LINE> <INDENT> metadata = server_certificate.server_certificate_metadata <NEW_LINE> certificate_body = server_certificate.certificate_body <NEW_LINE> certificate_chain = server_certificate.certificate_chain <NEW_LINE> return cls(metadata, certificate_body, certificate_chain) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_by_name(cls, region: str, name: str) -> "IAMServerCertificate": <NEW_LINE> <INDENT> client = BotoClientProxy('iam', region) <NEW_LINE> iam = IAM(region) <NEW_LINE> try: <NEW_LINE> <INDENT> response = client.get_server_certificate(ServerCertificateName=name) <NEW_LINE> server_certificate = response['ServerCertificate'] <NEW_LINE> certificate = cls.from_boto_dict(server_certificate) <NEW_LINE> <DEDENT> except ClientError as error: <NEW_LINE> <INDENT> certificates = sorted(iam.get_certificates(name=name), reverse=True) <NEW_LINE> try: <NEW_LINE> <INDENT> certificate = certificates[0] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise error <NEW_LINE> <DEDENT> <DEDENT> return certificate <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def arn_is_server_certificate(arn: Optional[str]=None): <NEW_LINE> <INDENT> if arn is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (arn.startswith("arn:aws:iam:") and 'server-certificate' in arn) <NEW_LINE> <DEDENT> <DEDENT> def is_valid(self, when: Optional[datetime]=None) -> bool: <NEW_LINE> <INDENT> when = when if when is not None else datetime.now(timezone.utc) <NEW_LINE> return when < self.expiration | Server certificate stored in IAM.
See:
http://boto3.readthedocs.io/en/latest/reference/services/iam.html#IAM.Client.get_server_certificate | 625990665166f23b2e244b46 |
class SecondaryInternalDNSServer(DNSServerBase): <NEW_LINE> <INDENT> def _setup(self): <NEW_LINE> <INDENT> self._xpaths.add_profile(value="/dns-servers/secondary") <NEW_LINE> self.add_dns_params(False) | A Secondary Internal DNS Server for remote networks
Args:
dns_server(str): IP of DNS Server
use_cloud_default(bool): Use cloud default DNS | 6259906616aa5153ce401c4f |
class StarElmView(LoginRequiredMixin, View): <NEW_LINE> <INDENT> template_name = 'happiness_star/elm-star.html' <NEW_LINE> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> user = self.request.user <NEW_LINE> token = utils.user_jwt(user) <NEW_LINE> return render( request, self.template_name, {'token': token}, status=200) | Display the Elm + GQL interface | 62599066627d3e7fe0e08600 |
class ExecutionFailed(Exception): <NEW_LINE> <INDENT> pass | Execution failed. | 62599066f548e778e596cd00 |
class EEAFixture(PloneSandboxLayer): <NEW_LINE> <INDENT> defaultBases = (PLONE_FIXTURE,) <NEW_LINE> def setUpZope(self, app, configurationContext): <NEW_LINE> <INDENT> import eea.controlpanel <NEW_LINE> self.loadZCML(package=eea.controlpanel) <NEW_LINE> z2.installProduct(app, 'eea.controlpanel') <NEW_LINE> <DEDENT> def tearDownZope(self, app): <NEW_LINE> <INDENT> z2.uninstallProduct(app, 'eea.controlpanel') <NEW_LINE> <DEDENT> def setUpPloneSite(self, portal): <NEW_LINE> <INDENT> self.applyProfile(portal, 'eea.controlpanel:default') <NEW_LINE> setRoles(portal, TEST_USER_ID, ['Manager']) <NEW_LINE> portal.invokeFactory("Folder", "sandbox", title="Sandbox") | EEA Testing Policy
| 625990668e7ae83300eea804 |
class Movie(): <NEW_LINE> <INDENT> def __init__(self, movie_title, movie_storyline, movie_release_date, movie_rated, poster_image, trailer_youtube): <NEW_LINE> <INDENT> self.title = movie_title <NEW_LINE> self.storyline = movie_storyline <NEW_LINE> self.release_date = movie_release_date <NEW_LINE> self.rated = movie_rated <NEW_LINE> self.poster_image_url = poster_image <NEW_LINE> self.trailer_youtube_url = trailer_youtube | A movie class that stores its desription and details
Attributes:
title: A string contains movie title name
storyline: A string decribes main story line of the movie
release_date: A string indicate movie's release date
rated: A string indicates movies rated e.g. G, PG, PG-13 etc.
poster_image_url: A link to movie's poster image
youtube_url: A link to movie's trailer | 625990668da39b475be04960 |
class TestTransactionsFind(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.card_product = CardProducts().create() <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = get_client() <NEW_LINE> self.fake = Faker() <NEW_LINE> self.sim = Simulate() <NEW_LINE> <DEDENT> def verify_transaction_model(self, response, verify): <NEW_LINE> <INDENT> actual = response.__class__.__name__ <NEW_LINE> expected = 'TransactionModel' <NEW_LINE> self.assertEqual(actual, expected, 'Unexpected response found') <NEW_LINE> expected_attributes = [ 'type', 'state', 'token', 'user_token', 'acting_user_token', 'card_token', 'gpa', 'duration', 'created_time', 'user_transaction_time', 'settlement_date', 'request_amount', 'amount', 'currency_code', 'response', 'network', 'acquirer_fee_amount', 'acquirer', 'user', 'card', 'card_acceptor' ] <NEW_LINE> for attribute in expected_attributes: <NEW_LINE> <INDENT> with self.subTest(f'{attribute} is not defined'): <NEW_LINE> <INDENT> self.assertIsNotNone(getattr(response, attribute)) <NEW_LINE> <DEDENT> <DEDENT> match_attributes = list(verify.keys()) <NEW_LINE> for attribute in match_attributes: <NEW_LINE> <INDENT> with self.subTest(f'{attribute} does not match the expected value'): <NEW_LINE> <INDENT> self.assertEqual(getattr(response, attribute), verify[attribute]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_merchant(self): <NEW_LINE> <INDENT> merchant_model = { "name": self.fake.company() } <NEW_LINE> return self.client.merchants.create(merchant_model) <NEW_LINE> <DEDENT> def get_card_product(self): <NEW_LINE> <INDENT> return self.card_product <NEW_LINE> <DEDENT> def test_transactions_find(self): <NEW_LINE> <INDENT> merchant = self.get_merchant() <NEW_LINE> user = self.client.users.create({}) <NEW_LINE> card_product = self.get_card_product() <NEW_LINE> card_request = { "card_product_token": card_product.token, "user_token": user.token } <NEW_LINE> card = self.client.cards.create(card_request) <NEW_LINE> auth_request_model = { "card_token": card.token, "amount": 100.0, "mid": merchant.token } <NEW_LINE> transaction = self.sim.authorization(auth_request_model) <NEW_LINE> transaction_token = transaction['transaction']['token'] <NEW_LINE> found = self.client.transactions.find(transaction_token) <NEW_LINE> verify = { "type": "authorization", "user_token": user.token, "acting_user_token": user.token, "card_token": card.token, "request_amount": auth_request_model['amount'], "amount": auth_request_model['amount'] } <NEW_LINE> self.verify_transaction_model(found, verify) | Tests for the transactions.find endpoint. | 62599066009cb60464d02caf |
class InvoiceNote(TimeStampedSafeDeleteModel): <NEW_LINE> <INDENT> lease = models.ForeignKey( "leasing.Lease", verbose_name=_("Lease"), related_name="invoice_notes", on_delete=models.PROTECT, ) <NEW_LINE> billing_period_start_date = models.DateField( verbose_name=_("Billing period start date"), null=True, blank=True ) <NEW_LINE> billing_period_end_date = models.DateField( verbose_name=_("Billing period end date"), null=True, blank=True ) <NEW_LINE> notes = models.TextField(verbose_name=_("Notes"), blank=True) <NEW_LINE> recursive_get_related_skip_relations = ["lease"] <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = pgettext_lazy("Model name", "Invoice note") <NEW_LINE> verbose_name_plural = pgettext_lazy("Model name", "Invoice notes") | In Finnish: Laskun tiedote | 625990667c178a314d78e7a7 |
class SearchAttributes(Model): <NEW_LINE> <INDENT> def __init__(self, id: str=None, name: str=None, info: str=None, attributes: List[IndexSearchAttributes]=None): <NEW_LINE> <INDENT> self.swagger_types = { 'id': str, 'name': str, 'info': str, 'attributes': List[IndexSearchAttributes] } <NEW_LINE> self.attribute_map = { 'id': 'id', 'name': 'name', 'info': 'info', 'attributes': 'attributes' } <NEW_LINE> self._id = id <NEW_LINE> self._name = name <NEW_LINE> self._info = info <NEW_LINE> self._attributes = attributes <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt) -> 'SearchAttributes': <NEW_LINE> <INDENT> return util.deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self) -> str: <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @id.setter <NEW_LINE> def id(self, id: str): <NEW_LINE> <INDENT> self._id = id <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self) -> str: <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name: str): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def info(self) -> str: <NEW_LINE> <INDENT> return self._info <NEW_LINE> <DEDENT> @info.setter <NEW_LINE> def info(self, info: str): <NEW_LINE> <INDENT> self._info = info <NEW_LINE> <DEDENT> @property <NEW_LINE> def attributes(self) -> List[IndexSearchAttributes]: <NEW_LINE> <INDENT> return self._attributes <NEW_LINE> <DEDENT> @attributes.setter <NEW_LINE> def attributes(self, attributes: List[IndexSearchAttributes]): <NEW_LINE> <INDENT> self._attributes = attributes | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62599066796e427e5384feed |
class UserAdmin(_UserAdmin): <NEW_LINE> <INDENT> form = UserChangeForm <NEW_LINE> add_form = UserCreationForm <NEW_LINE> add_form_template = "admin/authentication/user/add_form.html" <NEW_LINE> add_fieldsets = ( (_("Primary fields"), { "fields": ( "email", "first_name", "last_name", "password1", "password2", ), }), ) <NEW_LINE> fieldsets = ( (_("Primary fields"), { "fields": ( "email", "first_name", "last_name", "password", "force_password_change", ) }), (_("Permissions"), { "fields": ( "is_active", "is_staff", "is_superuser", "groups", "user_permissions", ) }), (_("Additional information"), { "fields": ( "last_login", "date_joined", ) }), ) <NEW_LINE> ordering = ( "email", ) <NEW_LINE> list_display = ( "pk", "email", "first_name", "last_name", "is_active", "is_staff", "is_superuser", ) <NEW_LINE> list_filter = ( "is_active", "is_staff", "is_superuser", "date_joined", ) <NEW_LINE> search_fields = ( "id", "email", "first_name", "last_name", ) <NEW_LINE> readonly_fields = ( "date_joined", "last_login", ) <NEW_LINE> filter_horizontal = ( "groups", "user_permissions", ) <NEW_LINE> def hijack(self, request, queryset): <NEW_LINE> <INDENT> if queryset.count() > 1: <NEW_LINE> <INDENT> messages.error(request, _("You can only log-in as one user!")) <NEW_LINE> return HttpResponseRedirect(request.get_full_path()) <NEW_LINE> <DEDENT> user = queryset[0] <NEW_LINE> user.backend = "django.contrib.auth.backends.ModelBackend" <NEW_LINE> login(request, user) <NEW_LINE> return HttpResponseRedirect("/") <NEW_LINE> <DEDENT> hijack.short_description = _("Log in as selected user") <NEW_LINE> actions = [ "hijack" ] | Admin for the User model of the simple_authentication app. | 62599066a8370b77170f1b46 |
class Config: <NEW_LINE> <INDENT> MOVIE_API_BASE_URL ='https://api.themoviedb.org/3/movie/{}?api_key={}' <NEW_LINE> MOVIE_API_KEY = os.environ.get('MOVIE_API_KEY') <NEW_LINE> SECRET_KEY = os.environ.get('SECRET_KEY') | General configuration parent class | 62599066a17c0f6771d5d762 |
class PrivateEndpoint(Resource): <NEW_LINE> <INDENT> _validation = { 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, 'network_interfaces': {'readonly': True}, 'provisioning_state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'etag': {'key': 'etag', 'type': 'str'}, 'subnet': {'key': 'properties.subnet', 'type': 'Subnet'}, 'network_interfaces': {'key': 'properties.networkInterfaces', 'type': '[NetworkInterface]'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'private_link_service_connections': {'key': 'properties.privateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'}, 'manual_private_link_service_connections': {'key': 'properties.manualPrivateLinkServiceConnections', 'type': '[PrivateLinkServiceConnection]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(PrivateEndpoint, self).__init__(**kwargs) <NEW_LINE> self.etag = None <NEW_LINE> self.subnet = kwargs.get('subnet', None) <NEW_LINE> self.network_interfaces = None <NEW_LINE> self.provisioning_state = None <NEW_LINE> self.private_link_service_connections = kwargs.get('private_link_service_connections', None) <NEW_LINE> self.manual_private_link_service_connections = kwargs.get('manual_private_link_service_connections', None) | Private endpoint resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:param subnet: The ID of the subnet from which the private IP will be allocated.
:type subnet: ~azure.mgmt.network.v2019_12_01.models.Subnet
:ivar network_interfaces: An array of references to the network interfaces created for this
private endpoint.
:vartype network_interfaces: list[~azure.mgmt.network.v2019_12_01.models.NetworkInterface]
:ivar provisioning_state: The provisioning state of the private endpoint resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_12_01.models.ProvisioningState
:param private_link_service_connections: A grouping of information about the connection to the
remote resource.
:type private_link_service_connections:
list[~azure.mgmt.network.v2019_12_01.models.PrivateLinkServiceConnection]
:param manual_private_link_service_connections: A grouping of information about the connection
to the remote resource. Used when the network admin does not have access to approve connections
to the remote resource.
:type manual_private_link_service_connections:
list[~azure.mgmt.network.v2019_12_01.models.PrivateLinkServiceConnection] | 6259906676e4537e8c3f0cfa |
class TpmAttestationFormat(object): <NEW_LINE> <INDENT> TPM_GENERATED_VALUE = b"\xffTCG" <NEW_LINE> TPM_ST_ATTEST_CERTIFY = b"\x80\x17" <NEW_LINE> @classmethod <NEW_LINE> def parse(cls, data): <NEW_LINE> <INDENT> reader = ByteBuffer(data) <NEW_LINE> generated_value = reader.read(4) <NEW_LINE> if generated_value != cls.TPM_GENERATED_VALUE: <NEW_LINE> <INDENT> raise ValueError("generated value field is invalid") <NEW_LINE> <DEDENT> tpmi_st_attest = reader.read(2) <NEW_LINE> if tpmi_st_attest != cls.TPM_ST_ATTEST_CERTIFY: <NEW_LINE> <INDENT> raise ValueError("tpmi_st_attest field is invalid") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> name = reader.read(reader.unpack("!H")) <NEW_LINE> data = reader.read(reader.unpack("!H")) <NEW_LINE> clock = reader.unpack("!Q") <NEW_LINE> reset_count = reader.unpack("!L") <NEW_LINE> restart_count = reader.unpack("!L") <NEW_LINE> safe_value = reader.unpack("B") <NEW_LINE> if safe_value not in (0, 1): <NEW_LINE> <INDENT> raise ValueError("invalid value 0x{0:x} for boolean".format(safe_value)) <NEW_LINE> <DEDENT> safe = safe_value == 1 <NEW_LINE> firmware_version = reader.unpack("!Q") <NEW_LINE> attested_name = reader.read(reader.unpack("!H")) <NEW_LINE> attested_qualified_name = reader.read(reader.unpack("!H")) <NEW_LINE> <DEDENT> except struct.error as e: <NEW_LINE> <INDENT> raise ValueError(e) <NEW_LINE> <DEDENT> return cls( name=name, data=data, clock_info=(clock, reset_count, restart_count, safe), firmware_version=firmware_version, attested=TpmsCertifyInfo( name=attested_name, qualified_name=attested_qualified_name ), ) <NEW_LINE> <DEDENT> def __init__(self, name, data, clock_info, firmware_version, attested): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.data = data <NEW_LINE> self.clock_info = clock_info <NEW_LINE> self.firmware_version = firmware_version <NEW_LINE> assert attested.__class__ == TpmsCertifyInfo <NEW_LINE> self.attested = attested <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ( "<TpmAttestationFormat" " data={self.data}" " name={self.name}" " clock_info={self.clock_info}" " firmware_version=0x{self.firmware_version:x}" " attested={self.attested}" ">".format(self=self) ) | the signature data is defined by [TPMv2-Part2] Section 10.12.8 (TPMS_ATTEST)
as:
TPM_GENERATED_VALUE (0xff544347 aka "ÿTCG")
TPMI_ST_ATTEST - always TPM_ST_ATTEST_CERTIFY (0x8017)
because signing procedure defines it should call TPM_Certify
[TPMv2-Part3] Section 18.2
TPM2B_NAME
size (uint16)
name (size long)
TPM2B_DATA
size (uint16)
name (size long)
TPMS_CLOCK_INFO
clock (uint64)
resetCount (uint32)
restartCount (uint32)
safe (byte) 1 yes, 0 no
firmwareVersion uint64
attested TPMS_CERTIFY_INFO (because TPM_ST_ATTEST_CERTIFY)
name TPM2B_NAME
qualified_name TPM2B_NAME
See:
https://www.trustedcomputinggroup.org/wp-content/uploads/TPM-Rev-2.0-Part-2-Structures-01.38.pdf
https://www.trustedcomputinggroup.org/wp-content/uploads/TPM-Rev-2.0-Part-3-Commands-01.38.pdf | 625990663617ad0b5ee078c8 |
class TitleScene(BaseScene): <NEW_LINE> <INDENT> def __init__(self, game, test=False): <NEW_LINE> <INDENT> self.test = test <NEW_LINE> if self.test: <NEW_LINE> <INDENT> self.render_count = 0 <NEW_LINE> <DEDENT> super().__init__(game) <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> print("Setting up TitleScene...") <NEW_LINE> self.game.display.background.setImage("assets/title_screen.jpg") <NEW_LINE> <DEDENT> def process_input(self, events, pressed_keys): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def render(self, display): <NEW_LINE> <INDENT> if self.test: <NEW_LINE> <INDENT> self.render_count += 1 <NEW_LINE> if self.render_count > 60: <NEW_LINE> <INDENT> super().terminate() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> print("Cleaning up TitleScene...") | This class is a minimal implementation example.
Feel free to use as a starting point for your scenes! | 625990662c8b7c6e89bd4f5d |
class JSONHelper(simplejson.JSONEncoder): <NEW_LINE> <INDENT> def default(self, obj): <NEW_LINE> <INDENT> if hasattr(obj, '__json__'): <NEW_LINE> <INDENT> return getattr(obj, '__json__')() <NEW_LINE> <DEDENT> if isinstance(obj, db.GqlQuery): <NEW_LINE> <INDENT> return list(obj) <NEW_LINE> <DEDENT> elif isinstance(obj, db.Model): <NEW_LINE> <INDENT> properties = obj.properties().items() <NEW_LINE> output = {} <NEW_LINE> for field, value in properties: <NEW_LINE> <INDENT> output[field] = getattr(obj, field) <NEW_LINE> <DEDENT> return output <NEW_LINE> <DEDENT> elif isinstance(obj, datetime.datetime): <NEW_LINE> <INDENT> output = {} <NEW_LINE> fields = ['day', 'hour', 'microsecond', 'minute', 'month', 'second', 'year'] <NEW_LINE> methods = ['ctime', 'isocalendar', 'isoformat', 'isoweekday', 'timetuple'] <NEW_LINE> for field in fields: <NEW_LINE> <INDENT> output[field] = getattr(obj, field) <NEW_LINE> <DEDENT> for method in methods: <NEW_LINE> <INDENT> output[method] = getattr(obj, method)() <NEW_LINE> <DEDENT> output['epoch'] = time.mktime(obj.timetuple()) <NEW_LINE> return output <NEW_LINE> <DEDENT> elif isinstance(obj, time.struct_time): <NEW_LINE> <INDENT> return list(obj) <NEW_LINE> <DEDENT> elif isinstance(obj, users.User): <NEW_LINE> <INDENT> output = {} <NEW_LINE> methods = ['nickname', 'email', 'auth_domain'] <NEW_LINE> for method in methods: <NEW_LINE> <INDENT> output[method] = getattr(obj, method)() <NEW_LINE> <DEDENT> return output <NEW_LINE> <DEDENT> return simplejson.JSONEncoder.default(self, obj) | Extends JSONEncoder to add support for GQL results and properties.
Adds support to simplejson JSONEncoders for GQL results and properties by
overriding JSONEncoder's default method. | 62599066435de62698e9d581 |
class Config(ABC): <NEW_LINE> <INDENT> def __init__(self, config_path: str = None, default_config: dict = None): <NEW_LINE> <INDENT> self._config: dict = {} if default_config is None else default_config.copy() <NEW_LINE> if config_path is not None: <NEW_LINE> <INDENT> self.load(config_path) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, item: str) -> Any: <NEW_LINE> <INDENT> return self._config.get(item, None) <NEW_LINE> <DEDENT> def __setitem__(self, item: str, value: Any): <NEW_LINE> <INDENT> self._config[item] = value <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return json.dumps(self._config, sort_keys=False, indent=4) <NEW_LINE> <DEDENT> def load(self, config_path: str): <NEW_LINE> <INDENT> config_path = os.path.abspath(config_path) <NEW_LINE> if os.path.isfile(config_path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(config_path, 'r') as f: <NEW_LINE> <INDENT> self._config: dict = self.load_dict(f) <NEW_LINE> <DEDENT> logger.info(f"Config file '{config_path}' was successfully loaded.\nConfiguration:\n{self}.") <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error(f"Can not load config file '{config_path}. {e}.") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> logger.warning(f"Can not load config file '{config_path}'. File does not exist. " f"Creating config file with default configuration.") <NEW_LINE> self.save(config_path) <NEW_LINE> <DEDENT> <DEDENT> def save(self, config_path: str): <NEW_LINE> <INDENT> config_path = os.path.abspath(config_path) <NEW_LINE> config_dir, config_name = os.path.split(config_path) <NEW_LINE> if not os.path.isdir(config_dir): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.makedirs(config_dir) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> logger.error(f"Can not create config file directory '{config_dir}. {e}.") <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> with open(config_path, 'w') as f: <NEW_LINE> <INDENT> self.dump_dict(self._config, f) <NEW_LINE> <DEDENT> logger.info(f"Config file '{config_path}' was successfully saved.\nConfiguration:\n{self}.") <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error(f"Can not save config file '{config_path}. {e}.") <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> @abstractmethod <NEW_LINE> def dump_dict(d: dict, stream: Any): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @abstractmethod <NEW_LINE> def load_dict(stream: Any) -> dict: <NEW_LINE> <INDENT> pass | Config base class
| 6259906699cbb53fe683265c |
class IdentityV3toV2MethodsTestCase(unit.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(IdentityV3toV2MethodsTestCase, self).setUp() <NEW_LINE> self.load_backends() <NEW_LINE> self.user_id = uuid.uuid4().hex <NEW_LINE> self.default_project_id = uuid.uuid4().hex <NEW_LINE> self.tenant_id = uuid.uuid4().hex <NEW_LINE> self.user1 = {'id': self.user_id, 'name': self.user_id, 'default_project_id': self.default_project_id, 'domain_id': CONF.identity.default_domain_id} <NEW_LINE> self.user2 = {'id': self.user_id, 'name': self.user_id, 'domain_id': CONF.identity.default_domain_id} <NEW_LINE> self.user3 = {'id': self.user_id, 'name': self.user_id, 'default_project_id': self.default_project_id, 'tenantId': self.tenant_id, 'domain_id': CONF.identity.default_domain_id} <NEW_LINE> self.user4 = {'id': self.user_id, 'name': self.user_id, 'tenantId': self.tenant_id, 'domain_id': CONF.identity.default_domain_id} <NEW_LINE> self.expected_user = {'id': self.user_id, 'name': self.user_id, 'username': self.user_id, 'tenantId': self.default_project_id} <NEW_LINE> self.expected_user_no_tenant_id = {'id': self.user_id, 'name': self.user_id, 'username': self.user_id} <NEW_LINE> <DEDENT> def test_v3_to_v2_user_method(self): <NEW_LINE> <INDENT> updated_user1 = controller.V2Controller.v3_to_v2_user(self.user1) <NEW_LINE> self.assertIs(self.user1, updated_user1) <NEW_LINE> self.assertDictEqual(self.expected_user, self.user1) <NEW_LINE> updated_user2 = controller.V2Controller.v3_to_v2_user(self.user2) <NEW_LINE> self.assertIs(self.user2, updated_user2) <NEW_LINE> self.assertDictEqual(self.expected_user_no_tenant_id, self.user2) <NEW_LINE> updated_user3 = controller.V2Controller.v3_to_v2_user(self.user3) <NEW_LINE> self.assertIs(self.user3, updated_user3) <NEW_LINE> self.assertDictEqual(self.expected_user, self.user3) <NEW_LINE> updated_user4 = controller.V2Controller.v3_to_v2_user(self.user4) <NEW_LINE> self.assertIs(self.user4, updated_user4) <NEW_LINE> self.assertDictEqual(self.expected_user_no_tenant_id, self.user4) <NEW_LINE> <DEDENT> def test_v3_to_v2_user_method_list(self): <NEW_LINE> <INDENT> user_list = [self.user1, self.user2, self.user3, self.user4] <NEW_LINE> updated_list = controller.V2Controller.v3_to_v2_user(user_list) <NEW_LINE> self.assertEqual(len(user_list), len(updated_list)) <NEW_LINE> for i, ref in enumerate(updated_list): <NEW_LINE> <INDENT> self.assertIs(ref, user_list[i]) <NEW_LINE> <DEDENT> self.assertDictEqual(self.expected_user, self.user1) <NEW_LINE> self.assertDictEqual(self.expected_user_no_tenant_id, self.user2) <NEW_LINE> self.assertDictEqual(self.expected_user, self.user3) <NEW_LINE> self.assertDictEqual(self.expected_user_no_tenant_id, self.user4) | Test users V3 to V2 conversion methods. | 625990666e29344779b01dc8 |
class PSQLConn(object): <NEW_LINE> <INDENT> def __init__(self, db, user, password, host): <NEW_LINE> <INDENT> self.db = db <NEW_LINE> self.user = user <NEW_LINE> self.password = password <NEW_LINE> self.host = host <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> connection = pg.connect( host=self.host, database=self.db, user=self.user, password=self.password) <NEW_LINE> connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT) <NEW_LINE> return connection | Stores the connection to psql. | 625990667d847024c075db50 |
class CommentListCreate(generics.ListCreateAPIView): <NEW_LINE> <INDENT> queryset = Comment.objects.all() <NEW_LINE> serializer_class = CommentSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticated, ) | List or create a movie | 62599066d486a94d0ba2d736 |
class RatingsBody(db.Model): <NEW_LINE> <INDENT> review_id = db.Column(db.Integer, db.ForeignKey('ratings.id'), primary_key=True) <NEW_LINE> review_body = db.Column(db.String(1024), nullable=False) | Object for the Ratings_Body table. | 625990664e4d562566373b7f |
class DBModel(Model): <NEW_LINE> <INDENT> validation_regexp = {} <NEW_LINE> def update_from(self, form, field, formfield=None, cb=False): <NEW_LINE> <INDENT> formfield = formfield if formfield else field <NEW_LINE> try: <NEW_LINE> <INDENT> value = form[formfield] <NEW_LINE> if value == re.match(self.validation_regexp.get(field, '.*'), value).group(): <NEW_LINE> <INDENT> fieldtype = type(getattr(self, field)) <NEW_LINE> if fieldtype is bool and type(value) is str: <NEW_LINE> <INDENT> setattr(self, field, value.lower() in ('true', 'yes', 'on')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setattr(self, field, value) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError('Does not match regexp!') <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> err = '"%s" is not a valid %s' % (value, field) <NEW_LINE> if cb: <NEW_LINE> <INDENT> cb(err) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise InvalidValue(err) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> class Meta(object): <NEW_LINE> <INDENT> database = DB | base class for other database models | 6259906645492302aabfdc54 |
class TestCommandInfo(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testCommandInfo(self): <NEW_LINE> <INDENT> model = iengage_client.models.command_info.CommandInfo() | CommandInfo unit test stubs | 625990667cff6e4e811b71c0 |
class Role(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'roles' <NEW_LINE> id = db.Column(db.String(45), primary_key=True) <NEW_LINE> name = db.Column(db.String(255), unique=True) <NEW_LINE> description = db.Column(db.String(255)) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.id = str(uuid4()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Model Role `{}`>".format(self.name) | Represents Proected roles. | 625990664f6381625f19a061 |
class TestPaymentsTransactionsApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = knetik_cloud.apis.payments_transactions_api.PaymentsTransactionsApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_transaction(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_get_transactions(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_refund_transaction(self): <NEW_LINE> <INDENT> pass | PaymentsTransactionsApi unit test stubs | 625990660a50d4780f70697c |
class NXSDialog(QDialog): <NEW_LINE> <INDENT> def __init__(self, parent=None, active_file=None): <NEW_LINE> <INDENT> QDialog.__init__(self, parent) <NEW_LINE> vbox=QVBoxLayout(self) <NEW_LINE> vbox.setMargin(0) <NEW_LINE> if active_file is not None: <NEW_LINE> <INDENT> self.setWindowTitle(u'NXS Browser - %s'%active_file) <NEW_LINE> <DEDENT> self.nxs_widget=NXSWidget(self, active_file) <NEW_LINE> vbox.addWidget(self.nxs_widget) <NEW_LINE> self.resize(700, 700) <NEW_LINE> self.nxs_widget.ui.splitter.setSizes([400, 260]) <NEW_LINE> self.nxs_widget.ui.splitter_2.setSizes([200, 460]) | A QDialog with a NXSWidget in it. | 62599066498bea3a75a591bd |
class RepeatedBracket(Bracket): <NEW_LINE> <INDENT> def match(self, tok): <NEW_LINE> <INDENT> decl = Declaration('*') <NEW_LINE> result = self.declaration.match(tok) <NEW_LINE> if not result: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> while result != None: <NEW_LINE> <INDENT> decl.pieces.append(result) <NEW_LINE> result = self.declaration.match(tok) <NEW_LINE> <DEDENT> return decl | A bracket present one or more times (quantifier +). | 62599067fff4ab517ebcef94 |
@tf_export('estimator.LinearClassifier') <NEW_LINE> class LinearClassifier(estimator.Estimator): <NEW_LINE> <INDENT> def __init__(self, feature_columns, model_dir=None, n_classes=2, weight_column=None, label_vocabulary=None, optimizer='Ftrl', config=None, partitioner=None, warm_start_from=None, loss_reduction=losses.Reduction.SUM): <NEW_LINE> <INDENT> if n_classes == 2: <NEW_LINE> <INDENT> head = head_lib._binary_logistic_head_with_sigmoid_cross_entropy_loss( weight_column=weight_column, label_vocabulary=label_vocabulary, loss_reduction=loss_reduction) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> head = head_lib._multi_class_head_with_softmax_cross_entropy_loss( n_classes, weight_column=weight_column, label_vocabulary=label_vocabulary, loss_reduction=loss_reduction) <NEW_LINE> <DEDENT> def _model_fn(features, labels, mode, config): <NEW_LINE> <INDENT> return _linear_model_fn( features=features, labels=labels, mode=mode, head=head, feature_columns=tuple(feature_columns or []), optimizer=optimizer, partitioner=partitioner, config=config) <NEW_LINE> <DEDENT> super(LinearClassifier, self).__init__( model_fn=_model_fn, model_dir=model_dir, config=config, warm_start_from=warm_start_from) | Linear classifier model.
Train a linear model to classify instances into one of multiple possible
classes. When number of possible classes is 2, this is binary classification.
Example:
```python
categorical_column_a = categorical_column_with_hash_bucket(...)
categorical_column_b = categorical_column_with_hash_bucket(...)
categorical_feature_a_x_categorical_feature_b = crossed_column(...)
# Estimator using the default optimizer.
estimator = LinearClassifier(
feature_columns=[categorical_column_a,
categorical_feature_a_x_categorical_feature_b])
# Or estimator using the FTRL optimizer with regularization.
estimator = LinearClassifier(
feature_columns=[categorical_column_a,
categorical_feature_a_x_categorical_feature_b],
optimizer=tf.train.FtrlOptimizer(
learning_rate=0.1,
l1_regularization_strength=0.001
))
# Or estimator with warm-starting from a previous checkpoint.
estimator = LinearClassifier(
feature_columns=[categorical_column_a,
categorical_feature_a_x_categorical_feature_b],
warm_start_from="/path/to/checkpoint/dir")
# Input builders
def input_fn_train: # returns x, y (where y represents label's class index).
...
def input_fn_eval: # returns x, y (where y represents label's class index).
...
estimator.train(input_fn=input_fn_train)
estimator.evaluate(input_fn=input_fn_eval)
estimator.predict(input_fn=input_fn_predict)
```
Input of `train` and `evaluate` should have following features,
otherwise there will be a `KeyError`:
* if `weight_column` is not `None`, a feature with
`key=weight_column` whose value is a `Tensor`.
* for each `column` in `feature_columns`:
- if `column` is a `SparseColumn`, a feature with `key=column.name`
whose `value` is a `SparseTensor`.
- if `column` is a `WeightedSparseColumn`, two features: the first with
`key` the id column name, the second with `key` the weight column name.
Both features' `value` must be a `SparseTensor`.
- if `column` is a `RealValuedColumn`, a feature with `key=column.name`
whose `value` is a `Tensor`.
Loss is calculated by using softmax cross entropy.
@compatibility(eager)
Estimators are not compatible with eager execution.
@end_compatibility | 62599067e1aae11d1e7cf3c8 |
class OBDTransport: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._connected = False <NEW_LINE> self._error = "" <NEW_LINE> <DEDENT> def _OnConnected(self): <NEW_LINE> <INDENT> self._connected = True <NEW_LINE> <DEDENT> def _OnDisconnected(self): <NEW_LINE> <INDENT> self._connected = False <NEW_LINE> <DEDENT> def IsConnected(self): <NEW_LINE> <INDENT> return self._connected <NEW_LINE> <DEDENT> def GetErrorString(self): <NEW_LINE> <INDENT> return self._error <NEW_LINE> <DEDENT> def Discover(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def Connect(self, address, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def Close(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def Recv(self, len): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def Send(self, data): <NEW_LINE> <INDENT> raise NotImplementedError() | OBDTransport abstracts the underlying communication layer (bluetooth/COM) | 625990673539df3088ecda17 |
class JSONLDObject(SimpleNamespace, object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(JSONLDObject, self).__init__(**kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def register_class_for_property(cls , property, newCls): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def context(self): <NEW_LINE> <INDENT> return self.__dict__["@context"] <NEW_LINE> <DEDENT> @context.setter <NEW_LINE> def context(self, value): <NEW_LINE> <INDENT> self.__dict__["@context"] = value <NEW_LINE> <DEDENT> def __getattribute__(self, attr): <NEW_LINE> <INDENT> map = {"annotations": Annotation,"aggregates": Aggregate, "authoredBy": Agent, "createdBy": Agent, "curatedBy": Agent, "contributedBy": Agent, "retrievedBy": Agent} <NEW_LINE> if(attr in map.keys()): <NEW_LINE> <INDENT> cls = map[attr] <NEW_LINE> value = super(JSONLDObject, self).__getattribute__(attr) <NEW_LINE> if value is not None: <NEW_LINE> <INDENT> if isinstance(value,list): <NEW_LINE> <INDENT> for i in list(value): <NEW_LINE> <INDENT> if not isinstance(i,cls): <NEW_LINE> <INDENT> object = cls(**i) <NEW_LINE> value.remove(i) <NEW_LINE> value.append(object) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif not isinstance(value,cls): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = cls(**value) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> value = cls(value) <NEW_LINE> <DEDENT> self.__setattr__(attr,value) <NEW_LINE> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(JSONLDObject, self).__getattribute__(attr) <NEW_LINE> <DEDENT> <DEDENT> def populated(self): <NEW_LINE> <INDENT> return {key: value for (key,value) in self.__dict__.items() if value is not None} | A class that provides attribute based access to an instances __dict__ | 62599067f548e778e596cd03 |
class GetRoleNumber(APIView): <NEW_LINE> <INDENT> def get(self, request, format=None): <NEW_LINE> <INDENT> return get_role_number(request) <NEW_LINE> <DEDENT> def post(self, request, format=None): <NEW_LINE> <INDENT> return get_role_number(request) | 获取角色数量 | 625990671b99ca40022900f2 |
class Tree(dict): <NEW_LINE> <INDENT> def __init__(self, entries=None): <NEW_LINE> <INDENT> dict.__init__(self) <NEW_LINE> self.get("", insert=True) <NEW_LINE> if entries: <NEW_LINE> <INDENT> account_balances = collections.defaultdict(CounterInventory) <NEW_LINE> for entry in entries: <NEW_LINE> <INDENT> if isinstance(entry, Open): <NEW_LINE> <INDENT> self.get(entry.account, insert=True) <NEW_LINE> <DEDENT> for posting in getattr(entry, "postings", []): <NEW_LINE> <INDENT> account_balances[posting.account].add_position(posting) <NEW_LINE> <DEDENT> <DEDENT> for name, balance in sorted(account_balances.items()): <NEW_LINE> <INDENT> self.insert(name, balance) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def ancestors(self, name): <NEW_LINE> <INDENT> while name: <NEW_LINE> <INDENT> name = account.parent(name) <NEW_LINE> yield self.get(name) <NEW_LINE> <DEDENT> <DEDENT> def insert(self, name, balance): <NEW_LINE> <INDENT> node = self.get(name, insert=True) <NEW_LINE> node.balance.add_inventory(balance) <NEW_LINE> node.balance_children.add_inventory(balance) <NEW_LINE> node.has_txns = True <NEW_LINE> for parent_node in self.ancestors(name): <NEW_LINE> <INDENT> parent_node.balance_children.add_inventory(balance) <NEW_LINE> <DEDENT> <DEDENT> def get(self, name, insert=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> node = TreeNode(name) <NEW_LINE> if insert: <NEW_LINE> <INDENT> if name: <NEW_LINE> <INDENT> parent = self.get(account.parent(name), insert=True) <NEW_LINE> parent.children.append(node) <NEW_LINE> <DEDENT> self[name] = node <NEW_LINE> <DEDENT> return node <NEW_LINE> <DEDENT> <DEDENT> def net_profit(self, options, account_name): <NEW_LINE> <INDENT> income = self.get(options["name_income"]) <NEW_LINE> expenses = self.get(options["name_expenses"]) <NEW_LINE> net_profit = Tree() <NEW_LINE> net_profit.insert( account_name, income.balance_children + expenses.balance_children ) <NEW_LINE> return net_profit.get(account_name) <NEW_LINE> <DEDENT> def cap(self, options, unrealized_account): <NEW_LINE> <INDENT> equity = options["name_equity"] <NEW_LINE> conversions = CounterInventory( { (currency, None): -number for currency, number in self.get("") .balance_children.reduce(convert.get_cost) .items() } ) <NEW_LINE> self.insert( equity + ":" + options["account_current_conversions"], conversions ) <NEW_LINE> self.insert( equity + ":" + unrealized_account, -self.get("").balance_children ) <NEW_LINE> self.insert( equity + ":" + options["account_current_earnings"], self.get(options["name_income"]).balance_children, ) <NEW_LINE> self.insert( equity + ":" + options["account_current_earnings"], self.get(options["name_expenses"]).balance_children, ) | Account tree.
Args:
entries: A list of entries to compute balances from. | 62599067d486a94d0ba2d737 |
class CustomProduct(models.Model): <NEW_LINE> <INDENT> product = models.OneToOneField(Product, verbose_name=_('Product'), primary_key=True) <NEW_LINE> downpayment = models.IntegerField(_("Percent Downpayment"), default=20) <NEW_LINE> deferred_shipping = models.BooleanField(_('Deferred Shipping'), help_text=_('Do not charge shipping at checkout for this item.'), default=False) <NEW_LINE> option_group = models.ManyToManyField(OptionGroup, verbose_name=_('Option Group'), blank=True,) <NEW_LINE> def _is_shippable(self): <NEW_LINE> <INDENT> return not self.deferred_shipping <NEW_LINE> <DEDENT> is_shippable = property(fget=_is_shippable) <NEW_LINE> def _get_fullPrice(self): <NEW_LINE> <INDENT> return self.get_qty_price(1) <NEW_LINE> <DEDENT> unit_price = property(_get_fullPrice) <NEW_LINE> def add_template_context(self, context, selected_options, **kwargs): <NEW_LINE> <INDENT> from satchmo.product.utils import serialize_options <NEW_LINE> context['options'] = serialize_options(self, selected_options) <NEW_LINE> return context <NEW_LINE> <DEDENT> def get_qty_price(self, qty): <NEW_LINE> <INDENT> price = get_product_quantity_price(self.product, qty) <NEW_LINE> if not price and qty == 1: <NEW_LINE> <INDENT> price = Decimal("0.00") <NEW_LINE> <DEDENT> elif not price: <NEW_LINE> <INDENT> price = self.product._get_fullPrice() <NEW_LINE> <DEDENT> return price * self.downpayment / 100 <NEW_LINE> <DEDENT> def get_full_price(self, qty=1): <NEW_LINE> <INDENT> price = get_product_quantity_price(self.product, qty) <NEW_LINE> if not price: <NEW_LINE> <INDENT> price = self.product.unit_price <NEW_LINE> <DEDENT> return price <NEW_LINE> <DEDENT> full_price = property(fget=get_full_price) <NEW_LINE> def _get_subtype(self): <NEW_LINE> <INDENT> return 'CustomProduct' <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u"CustomProduct: %s" % self.product.name <NEW_LINE> <DEDENT> def get_valid_options(self): <NEW_LINE> <INDENT> return get_all_options(self, ids_only=True) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _('Custom Product') <NEW_LINE> verbose_name_plural = _('Custom Products') | Product which must be custom-made or ordered. | 6259906792d797404e38971a |
class HTTP401(HTTPException): <NEW_LINE> <INDENT> status = __doc__ <NEW_LINE> def headers(self): <NEW_LINE> <INDENT> return [('WWW-Authenticate', '%s' % self)] <NEW_LINE> <DEDENT> def body(self): <NEW_LINE> <INDENT> return [] | 401 Unauthorized | 6259906745492302aabfdc56 |
class AvailablePhoneNumbersConnector(BaseConnector): <NEW_LINE> <INDENT> def listAvailablePhoneNumbers( self, country, type_, page=None, pageSize=None, contains=None, areaCode=None, inRegion=None, inPostalCode=None,): <NEW_LINE> <INDENT> queryParams = { 'Country': country, 'Type': type_, 'Page': page, 'PageSize': pageSize, 'Contains': contains, 'AreaCode': areaCode, 'InRegion': inRegion, 'InPostalCode': inPostalCode, } <NEW_LINE> if isinstance(type_, Enum): <NEW_LINE> <INDENT> type_ = type_.value <NEW_LINE> <DEDENT> params = flatDict(queryParams) <NEW_LINE> availablePhoneNumbers = self._executor.read( ('AvailablePhoneNumbers', country, type_), AvailablePhoneNumbers, params) <NEW_LINE> return availablePhoneNumbers | Used for all forms of communication with the `AvailablePhoneNumbers`
endpoint of the Zang REST API.
.. seealso:: zang.connectors.connector_factory.ConnectorFactory | 625990673cc13d1c6d466ebe |
class CacheKey(object): <NEW_LINE> <INDENT> def __init__(self, key): <NEW_LINE> <INDENT> self._key = key <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self._key == other <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__unicode__() <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return smart_str(self._key) | A stub string class that we can use to check if a key was created already. | 62599067442bda511e95d916 |
class ReactTutorialHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): <NEW_LINE> <INDENT> def do_POST(self): <NEW_LINE> <INDENT> content_length = int(self.headers['Content-Length']) <NEW_LINE> post_data = dict(urlparse.parse_qsl(self.rfile.read(content_length).decode('utf-8'))) <NEW_LINE> try: <NEW_LINE> <INDENT> f = open(self.translate_path(self.path), 'rb+') <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> self.send_error(404, "File not found") <NEW_LINE> return None <NEW_LINE> <DEDENT> current_list = json.load(f) <NEW_LINE> current_list.append(post_data) <NEW_LINE> f.seek(0) <NEW_LINE> json.dump(current_list, f) <NEW_LINE> f.close() <NEW_LINE> return self.do_GET() | Extension of SimpleHTTPRequestHandler that works with the react.js tutorial.
In addition to standard SimpleHTTPServer file-webserver functionality, adds
POST-ability.
USAGE: python server.py to serve files from the cwd
(works the same as running python -m SimpleHTTPServer in the directory) | 62599067d6c5a102081e38a0 |
class HTMLGenerator: <NEW_LINE> <INDENT> def __init__(self, entries): <NEW_LINE> <INDENT> self._entries = entries <NEW_LINE> <DEDENT> def get_html(self): <NEW_LINE> <INDENT> html_root = etree.Element("html") <NEW_LINE> css = etree.SubElement(html_root, "link") <NEW_LINE> css.attrib["rel"] = "stylesheet" <NEW_LINE> css.attrib["href"] = "card.css" <NEW_LINE> for entry in self._entries: <NEW_LINE> <INDENT> html_root.append(self._get_html_for_entry(entry)) <NEW_LINE> <DEDENT> return html_root <NEW_LINE> <DEDENT> def _get_html_for_entry(self, entry): <NEW_LINE> <INDENT> div_root = etree.Element("div") <NEW_LINE> body = etree.SubElement(div_root, "body") <NEW_LINE> title = etree.SubElement(body, "h1") <NEW_LINE> title.text = entry.get_name() <NEW_LINE> card_type_element = etree.SubElement(body, "h2") <NEW_LINE> card_type = entry.get_entry_type() <NEW_LINE> if card_type == Entry.VIRUS: <NEW_LINE> <INDENT> card_type_element.text = "VIRUS" <NEW_LINE> <DEDENT> elif card_type == Entry.GENOME: <NEW_LINE> <INDENT> card_type_element.text = "GENOME" <NEW_LINE> <DEDENT> image_results = entry.get_picture_url() <NEW_LINE> if type(image_results) == type([]): <NEW_LINE> <INDENT> for img_url in image_results: <NEW_LINE> <INDENT> img = etree.SubElement(body, "img") <NEW_LINE> img.attrib["src"] = img_url <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> img = etree.SubElement(body, "img") <NEW_LINE> img.attrib["src"] = image_results <NEW_LINE> <DEDENT> paragraph = etree.SubElement(body, "p") <NEW_LINE> if entry.get_entry_type() == Entry.VIRUS: <NEW_LINE> <INDENT> paragraph.text = "Genome Modifier (Kilobase Count):{0}".format(entry.get_base_count()) <NEW_LINE> <DEDENT> elif entry.get_entry_type() == Entry.GENOME: <NEW_LINE> <INDENT> paragraph.text = "Genome Score (Chromosome Count):{0}".format(entry.get_chromosome_count()) <NEW_LINE> <DEDENT> description = etree.SubElement(body, "p") <NEW_LINE> description.text = entry.get_description() <NEW_LINE> return div_root | Generates an HTML document styled with CSS for easy printout of the trading cards. | 62599067379a373c97d9a799 |
class LinksParser(HTMLParser): <NEW_LINE> <INDENT> def handle_starttag(self, tag, attrs): <NEW_LINE> <INDENT> if tag == 'a': <NEW_LINE> <INDENT> for item in attrs: <NEW_LINE> <INDENT> if item[0] == 'href': <NEW_LINE> <INDENT> print(item[1]) | print the links in a page | 62599067460517430c432c12 |
class SummaryListSum(ListView): <NEW_LINE> <INDENT> template_name = 'summary_sum_list.html' <NEW_LINE> context_object_name = 'sites' <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return Site.objects.all().annotate(sum_val_a=Sum('entry__val_a'), sum_val_b=Sum('entry__val_b')) | Shows an Aggregated Sum of A And B Values From Each Site | 62599067796e427e5384fef1 |
class Token(object): <NEW_LINE> <INDENT> UNKNOWN = 0 <NEW_LINE> LPAR = 1 <NEW_LINE> RPAR = 2 <NEW_LINE> INT = 4 <NEW_LINE> FLOAT = 5 <NEW_LINE> VAR = 6 <NEW_LINE> MINUS = 7 <NEW_LINE> PLUS = 8 <NEW_LINE> MUL = 9 <NEW_LINE> DIV = 10 <NEW_LINE> MOD = 11 <NEW_LINE> POWER = 12 <NEW_LINE> FIRST_OP = 7 <NEW_LINE> def __init__(self, value): <NEW_LINE> <INDENT> if type(value) == int: <NEW_LINE> <INDENT> self._type = Token.INT <NEW_LINE> <DEDENT> if type(value) == float: <NEW_LINE> <INDENT> self._type = Token.FLOAT <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._type = self._makeType(value) <NEW_LINE> <DEDENT> self._value = value <NEW_LINE> <DEDENT> def isOperator(self): <NEW_LINE> <INDENT> return self._type >= Token.FIRST_OP <NEW_LINE> <DEDENT> def getPrecedence(self): <NEW_LINE> <INDENT> if self._type == Token.POWER: <NEW_LINE> <INDENT> return 2 <NEW_LINE> <DEDENT> elif self._type in (Token.MUL, Token.DIV, Token.MOD): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> elif self._type in (Token.PLUS, Token.MINUS): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self._value) <NEW_LINE> <DEDENT> def getType(self): <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> def getValue(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> def _makeType(self, string): <NEW_LINE> <INDENT> if string == '*': return Token.MUL <NEW_LINE> elif string == '/': return Token.DIV <NEW_LINE> elif string == '+': return Token.PLUS <NEW_LINE> elif string == '-': return Token.MINUS <NEW_LINE> elif string == "^": return Token.POWER <NEW_LINE> elif string == "%": return Token.MOD <NEW_LINE> elif string == "(": return Token.LPAR <NEW_LINE> elif string == ")": return Token.RPAR <NEW_LINE> elif str(string) in "abcdefghigjklmnopqrstuvwxyz": return Token.VAR <NEW_LINE> else: return Token.UNKNOWN; | Represents a word in the language. | 62599067d268445f2663a71a |
class Sitemap(object): <NEW_LINE> <INDENT> def __init__(self, xmltext): <NEW_LINE> <INDENT> tree = ElementTree() <NEW_LINE> tree.parse(StringIO(xmltext)) <NEW_LINE> self._root = tree.getroot() <NEW_LINE> rt = self._root.tag <NEW_LINE> self.type = self._root.tag.split('}', 1)[1] if '}' in rt else rt <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for elem in self._root.getchildren(): <NEW_LINE> <INDENT> d = {} <NEW_LINE> for el in elem.getchildren(): <NEW_LINE> <INDENT> tag = el.tag <NEW_LINE> name = tag.split('}', 1)[1] if '}' in tag else tag <NEW_LINE> d[name] = el.text.strip() if el.text else '' <NEW_LINE> <DEDENT> yield d | Class to parse Sitemap (type=urlset) and Sitemap Index
(type=sitemapindex) files | 62599067f548e778e596cd05 |
class TxBook(Book): <NEW_LINE> <INDENT> def __init__(self, **kwa): <NEW_LINE> <INDENT> super(TxBook, self).__init__(**kwa) <NEW_LINE> self.pages = [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def index(self): <NEW_LINE> <INDENT> return (self.data['sn'], self.data['dn'], self.data['si'], self.data['bi'],) <NEW_LINE> <DEDENT> def pack(self, data=None, body=None): <NEW_LINE> <INDENT> if data: <NEW_LINE> <INDENT> self.data.update(data) <NEW_LINE> <DEDENT> if body is not None: <NEW_LINE> <INDENT> self.body = body <NEW_LINE> <DEDENT> self.pages = [] <NEW_LINE> page = TxPage( stack=self.stack, data=self.data, embody=self.body) <NEW_LINE> page.prepack() <NEW_LINE> self.packed = page.body.packed <NEW_LINE> if page.size <= raeting.UXD_MAX_PACKET_SIZE: <NEW_LINE> <INDENT> self.pages.append(page) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.paginate(headsize=len(page.head.packed)) <NEW_LINE> <DEDENT> <DEDENT> def paginate(self, headsize): <NEW_LINE> <INDENT> extrasize = 2 <NEW_LINE> hotelsize = headsize + extrasize <NEW_LINE> secsize = raeting.UXD_MAX_PACKET_SIZE - hotelsize <NEW_LINE> seccount = (self.size // secsize) + (1 if self.size % secsize else 0) <NEW_LINE> for i in range(seccount): <NEW_LINE> <INDENT> if i == seccount - 1: <NEW_LINE> <INDENT> section = self.packed[i * secsize:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> section = self.packed[i * secsize: (i+1) * secsize] <NEW_LINE> <DEDENT> data = odict(self.data) <NEW_LINE> data['pn'] = i <NEW_LINE> data['pc'] = seccount <NEW_LINE> page = TxPage( stack=self.stack, data=data) <NEW_LINE> page.body.packed = section <NEW_LINE> page.head.pack() <NEW_LINE> page.packed = page.head.packed + page.body.packed <NEW_LINE> self.pages.append(page) | Manages an outgoing message and its associated pages(s) | 6259906776e4537e8c3f0cfe |
class RosterForm(forms.ModelForm): <NEW_LINE> <INDENT> user = forms.ModelChoiceField(required=True, label="Instructor", queryset=User.objects.all()) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Roster <NEW_LINE> fields = ( 'user', ) <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.user = kwargs.pop('user') <NEW_LINE> super(RosterForm, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> cleaned_data = super(RosterForm, self).clean() <NEW_LINE> user = cleaned_data.get('user') <NEW_LINE> return cleaned_data <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> Roster.objects.create(user=self.cleaned_data['user'], group=self.instance, role=UserRole.ADMIN) <NEW_LINE> super(RosterForm, self).save(*args, **kwargs) | A form for creating a new group with a new teacher | 625990674428ac0f6e659cad |
@dataclass(eq=False, repr=False) <NEW_LINE> class AbstractWaveform(Message): <NEW_LINE> <INDENT> frame: str | A waveform envelope defined for a specific frame. This abstract class is made concrete by either a `Waveform` or a templated waveform such as `GaussianWaveform` | 6259906766673b3332c31b78 |
class InRowRepeat: <NEW_LINE> <INDENT> def __init__(self, components, until=None): <NEW_LINE> <INDENT> if type(components) is not list: <NEW_LINE> <INDENT> raise TypeError('Components must be a list.') <NEW_LINE> <DEDENT> if len(components) == 0: <NEW_LINE> <INDENT> raise ValueError('Components must not be empty.') <NEW_LINE> <DEDENT> for component in components: <NEW_LINE> <INDENT> if not is_valid_row_component(component): <NEW_LINE> <INDENT> raise TypeError('Each component of an InRowRepeat must be an Annotation or InRowRepeat.') <NEW_LINE> <DEDENT> <DEDENT> if until is not None and type(until) != str: <NEW_LINE> <INDENT> raise TypeError('Until must be a non-empty string.') <NEW_LINE> <DEDENT> if until is not None and not until.strip(): <NEW_LINE> <INDENT> raise ValueError('Until must be a non-empty string.') <NEW_LINE> <DEDENT> self.components = components <NEW_LINE> self.until = until <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> repeat_str = '<repeat' <NEW_LINE> if self.until: <NEW_LINE> <INDENT> repeat_str += ' until="' + self.until + '"' <NEW_LINE> <DEDENT> repeat_str += '>' + ''.join([component.__str__() for component in self.components]) + '</repeat>' <NEW_LINE> return repeat_str <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if other.__class__ is not InRowRepeat: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.until != other.until: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return is_equal_pairwise(self.components, other.components) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) | Represents a series of stitches that are repeated within a single row. | 625990676e29344779b01dcc |
class FadesError(Exception): <NEW_LINE> <INDENT> pass | Provides a Fades exception. | 62599067ac7a0e7691f73c61 |
class BaseStat(object): <NEW_LINE> <INDENT> def __init__(self, agent, stat_conf, logger): <NEW_LINE> <INDENT> self.agent = agent <NEW_LINE> self.stat_conf = stat_conf <NEW_LINE> self.logger = logger <NEW_LINE> self.configure() <NEW_LINE> <DEDENT> def configure(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def stat(self): <NEW_LINE> <INDENT> return {} | Base class for all service stat | 6259906799cbb53fe6832660 |
class ResourceLibrary(cc_targets.CcTarget): <NEW_LINE> <INDENT> def __init__(self, name, srcs, deps, optimize, extra_cppflags, kwargs): <NEW_LINE> <INDENT> super(ResourceLibrary, self).__init__( name=name, type='resource_library', srcs=srcs, deps=deps, visibility=None, warning='', defs=[], incs=[], export_incs=[], optimize=optimize, extra_cppflags=extra_cppflags, extra_linkflags=[], kwargs=kwargs) <NEW_LINE> hdrs = [self._target_file_path('%s.h' % self.name)] <NEW_LINE> self.attr['generated_hdrs'] = hdrs <NEW_LINE> cc_targets._declare_hdrs(self, hdrs) <NEW_LINE> <DEDENT> def ninja_rules(self): <NEW_LINE> <INDENT> self._check_deprecated_deps() <NEW_LINE> if not self.srcs: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> resources = [self._source_file_path(s) for s in self.srcs] <NEW_LINE> index = [self._target_file_path('%s.h' % self.name), self._target_file_path('%s.c' % self.name)] <NEW_LINE> self.ninja_build('resource_index', index, inputs=resources, variables={ 'name': regular_variable_name(self.name), 'path': self.path }) <NEW_LINE> sources = ['%s.c' % self.name] <NEW_LINE> for resource in self.srcs: <NEW_LINE> <INDENT> generated_source = '%s.c' % resource <NEW_LINE> self.ninja_build('resource', self._target_file_path(generated_source), inputs=self._source_file_path(resource)) <NEW_LINE> sources.append(generated_source) <NEW_LINE> <DEDENT> objs = self._generated_cc_objects(sources) <NEW_LINE> self._cc_library(objs) | This class is used to generate C/C++ resource library rules. | 625990677d847024c075db54 |
class BetaP2PNodeServicer(object): <NEW_LINE> <INDENT> def GetNodeState(self, request, context): <NEW_LINE> <INDENT> context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) <NEW_LINE> <DEDENT> def GetKnownPeers(self, request, context): <NEW_LINE> <INDENT> context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) <NEW_LINE> <DEDENT> def GetBlock(self, request, context): <NEW_LINE> <INDENT> context.code(beta_interfaces.StatusCode.UNIMPLEMENTED) | The Beta API is deprecated for 0.15.0 and later.
It is recommended to use the GA API (classes and functions in this
file not marked beta) for all further purposes. This class was generated
only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0. | 625990677d43ff2487427fce |
class Event(Base): <NEW_LINE> <INDENT> __tablename__ = 'event' <NEW_LINE> event_id = Column(Integer, nullable=False, primary_key=True, autoincrement=True) <NEW_LINE> time_stamp = Column(DateTime, nullable=False, server_default=sa.func.now()) <NEW_LINE> severity = Column(String(50), nullable=False, server_default='') <NEW_LINE> type = Column(String(50), nullable=False, server_default='') <NEW_LINE> version = Column(String(250), nullable=False, server_default='') <NEW_LINE> vendor = Column(String(250), nullable=False, server_default='') <NEW_LINE> description = Column(Text, nullable=False, server_default='') | The event table is to store required attributes of an event | 62599067aad79263cf42ff34 |
class Comment(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'comments' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> body = db.Column(db.Text) <NEW_LINE> body_html = db.Column(db.Text) <NEW_LINE> timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow) <NEW_LINE> disabled = db.Column(db.Boolean) <NEW_LINE> author_id = db.Column(db.Integer, db.ForeignKey('users.id')) <NEW_LINE> post_id = db.Column(db.Integer, db.ForeignKey('posts.id')) <NEW_LINE> @staticmethod <NEW_LINE> def on_changed_body(target, value, oldvalue, initiator): <NEW_LINE> <INDENT> allowed_tags = [ 'a', 'abbr', 'acronym', 'b', 'code', 'em', 'i', 'strong' ] <NEW_LINE> target.body_html = bleach.linkify( bleach.clean(markdown(value, output_format='html'), tags=allowed_tags, strip=True) ) <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> json_comment = { 'url': url_for('api.get_comment', id=self.id, _external=True), 'post': url_for('api.get_post', id=self.post_id, _external=True), 'body': self.body, 'body_html': self.body_html, 'timestamp': self.timestamp, 'author': url_for('api.get_user', id=self.author_id, _external=True), } <NEW_LINE> return json_comment <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_json(json_comment): <NEW_LINE> <INDENT> body = json_comment.get('body') <NEW_LINE> if body is None or body == '': <NEW_LINE> <INDENT> raise ValidationError('comment does not have a body') <NEW_LINE> <DEDENT> return Comment(body=body) | Модель комментариев | 625990673cc13d1c6d466ec0 |
class DataLogger(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def log_training_progress(self, model, epoch, i, set_size, batch_time, data_time, classerr, losses, print_freq, collectors): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def log_activation_statsitic(self, phase, stat_name, activation_stats, epoch): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def log_weights_sparsity(self, model, epoch): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def log_quantization_error(self, model): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def log_weights_distribution(self, named_params, steps_completed): <NEW_LINE> <INDENT> pass | This is an abstract interface for data loggers
Data loggers log the progress of the training process to some backend.
This backend can be a file, a web service, or some other means to collect and/or
display the training | 6259906744b2445a339b751e |
class TurboActivateInetTimeoutError(TurboActivateInetError): <NEW_LINE> <INDENT> pass | The connection to the server timed out because a long period of time
elapsed since the last data was sent or received. | 625990671f5feb6acb164368 |
class CustomConfigParser(metaclass=Singleton): <NEW_LINE> <INDENT> def __init__(self, config_override_filename=None): <NEW_LINE> <INDENT> if config_override_filename and not os.path.isfile(config_override_filename): <NEW_LINE> <INDENT> raise ValueError("Unable to find config file {}".format(config_override_filename)) <NEW_LINE> <DEDENT> main_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.ini') <NEW_LINE> self.config = ConfigParser(interpolation=ExtendedInterpolation()) <NEW_LINE> self.config.optionxform = str <NEW_LINE> if config_override_filename: <NEW_LINE> <INDENT> self.config.read([main_file, config_override_filename]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.config.read(main_file) | Singleton class that wraps the ConfigParser to make sure it's only loaded once.
The first time a config filename override will be considered. After that
the parameter is irrelevant as the same config object will be returned. | 62599067d6c5a102081e38a2 |
class pyNoIdChainSilhouetteIterator: <NEW_LINE> <INDENT> def __init__(self, stayInSelection=True): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def init(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def traverse(self, iter): <NEW_LINE> <INDENT> pass | Natural chaining iterator that follows the edges of the same nature following the topology of objects, with decreasing priority for silhouettes, then borders, then suggestive contours, then all other edge types. It won’t chain the same ViewEdge twice. | 62599067aad79263cf42ff35 |
class ShareTypeManager(base.ManagerWithFind): <NEW_LINE> <INDENT> resource_class = ShareType <NEW_LINE> def list(self, search_opts=None, show_all=True): <NEW_LINE> <INDENT> query_string = '' <NEW_LINE> if show_all: <NEW_LINE> <INDENT> query_string = '?is_public=all' <NEW_LINE> <DEDENT> return self._list("/types%s" % query_string, "share_types") <NEW_LINE> <DEDENT> def get(self, share_type="default"): <NEW_LINE> <INDENT> return self._get("/types/%s" % common_base.getid(share_type), "share_type") <NEW_LINE> <DEDENT> def delete(self, share_type): <NEW_LINE> <INDENT> self._delete("/types/%s" % common_base.getid(share_type)) <NEW_LINE> <DEDENT> def _do_create(self, name, spec_driver_handles_share_servers, spec_snapshot_support=True, is_public=True, is_public_keyname="os-share-type-access:is_public"): <NEW_LINE> <INDENT> body = { "share_type": { "name": name, is_public_keyname: is_public, "extra_specs": { "driver_handles_share_servers": spec_driver_handles_share_servers, "snapshot_support": spec_snapshot_support, }, } } <NEW_LINE> return self._create("/types", body, "share_type") <NEW_LINE> <DEDENT> @api_versions.wraps("1.0", "2.6") <NEW_LINE> def create(self, name, spec_driver_handles_share_servers, spec_snapshot_support=True, is_public=True): <NEW_LINE> <INDENT> return self._do_create( name, spec_driver_handles_share_servers, spec_snapshot_support, is_public, "os-share-type-access:is_public") <NEW_LINE> <DEDENT> @api_versions.wraps("2.7") <NEW_LINE> def create(self, name, spec_driver_handles_share_servers, spec_snapshot_support=True, is_public=True): <NEW_LINE> <INDENT> return self._do_create( name, spec_driver_handles_share_servers, spec_snapshot_support, is_public, "share_type_access:is_public") | Manage :class:`ShareType` resources. | 6259906791f36d47f2231a4d |
class sequenceTransform(VegaSchema): <NEW_LINE> <INDENT> _schema = {'$ref': '#/defs/sequenceTransform'} <NEW_LINE> def __init__(self, start=Undefined, stop=Undefined, type=Undefined, signal=Undefined, step=Undefined, **kwds): <NEW_LINE> <INDENT> super(sequenceTransform, self).__init__(start=start, stop=stop, type=type, signal=signal, step=step, **kwds) | sequenceTransform schema wrapper
Mapping(required=[type, start, stop])
Attributes
----------
start : anyOf(float, :class:`signal`)
stop : anyOf(float, :class:`signal`)
type : enum('sequence')
signal : string
step : anyOf(float, :class:`signal`)
as : anyOf(string, :class:`signal`) | 62599067a8370b77170f1b4c |
class DistInfo(IndexReference): <NEW_LINE> <INDENT> def __init__(self, release, dist_type=None, url=None, hashname=None, hashval=None, is_external=True, python_version=None, index=None): <NEW_LINE> <INDENT> self.set_index(index) <NEW_LINE> self.release = release <NEW_LINE> self.dist_type = dist_type <NEW_LINE> self.python_version = python_version <NEW_LINE> self._unpacked_dir = None <NEW_LINE> self.downloaded_location = None <NEW_LINE> self.urls = [] <NEW_LINE> self._url = None <NEW_LINE> self.add_url(url, hashname, hashval, is_external) <NEW_LINE> <DEDENT> def add_url(self, url=None, hashname=None, hashval=None, is_external=True): <NEW_LINE> <INDENT> if hashname is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> hashlib.new(hashname) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise UnsupportedHashName(hashname) <NEW_LINE> <DEDENT> <DEDENT> if url not in [u['url'] for u in self.urls]: <NEW_LINE> <INDENT> self.urls.append({ 'url': url, 'hashname': hashname, 'hashval': hashval, 'is_external': is_external, }) <NEW_LINE> self._url = None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> if self._url is None: <NEW_LINE> <INDENT> if len(self.urls) > 1: <NEW_LINE> <INDENT> internals_urls = [u for u in self.urls if u['is_external'] == False] <NEW_LINE> if len(internals_urls) >= 1: <NEW_LINE> <INDENT> self._url = internals_urls[0] <NEW_LINE> <DEDENT> <DEDENT> if self._url is None: <NEW_LINE> <INDENT> self._url = self.urls[0] <NEW_LINE> <DEDENT> <DEDENT> return self._url <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_source(self): <NEW_LINE> <INDENT> return self.dist_type == 'sdist' <NEW_LINE> <DEDENT> def download(self, path=None): <NEW_LINE> <INDENT> if path is None: <NEW_LINE> <INDENT> path = tempfile.mkdtemp() <NEW_LINE> <DEDENT> if self.downloaded_location is None: <NEW_LINE> <INDENT> url = self.url['url'] <NEW_LINE> archive_name = urlparse.urlparse(url)[2].split('/')[-1] <NEW_LINE> filename, headers = urllib.urlretrieve(url, path + "/" + archive_name) <NEW_LINE> self.downloaded_location = filename <NEW_LINE> self._check_md5(filename) <NEW_LINE> <DEDENT> return self.downloaded_location <NEW_LINE> <DEDENT> def unpack(self, path=None): <NEW_LINE> <INDENT> if not self._unpacked_dir: <NEW_LINE> <INDENT> if path is None: <NEW_LINE> <INDENT> path = tempfile.mkdtemp() <NEW_LINE> <DEDENT> filename = self.download(path) <NEW_LINE> unpack_archive(filename, path) <NEW_LINE> self._unpacked_dir = path <NEW_LINE> <DEDENT> return path <NEW_LINE> <DEDENT> def _check_md5(self, filename): <NEW_LINE> <INDENT> hashname = self.url['hashname'] <NEW_LINE> expected_hashval = self.url['hashval'] <NEW_LINE> if None not in (expected_hashval, hashname): <NEW_LINE> <INDENT> f = open(filename, 'rb') <NEW_LINE> try: <NEW_LINE> <INDENT> hashval = hashlib.new(hashname) <NEW_LINE> hashval.update(f.read()) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> f.close() <NEW_LINE> <DEDENT> if hashval.hexdigest() != expected_hashval: <NEW_LINE> <INDENT> raise HashDoesNotMatch("got %s instead of %s" % (hashval.hexdigest(), expected_hashval)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self.release is None: <NEW_LINE> <INDENT> return "<? ? %s>" % self.dist_type <NEW_LINE> <DEDENT> return "<%s %s %s>" % ( self.release.name, self.release.version, self.dist_type or "") | Represents a distribution retrieved from an index (sdist, bdist, ...)
| 6259906776e4537e8c3f0cff |
class Tuple(Funsor): <NEW_LINE> <INDENT> def __init__(self, args): <NEW_LINE> <INDENT> assert isinstance(args, tuple) <NEW_LINE> assert all(isinstance(arg, Funsor) for arg in args) <NEW_LINE> inputs = OrderedDict() <NEW_LINE> for arg in args: <NEW_LINE> <INDENT> inputs.update(arg.inputs) <NEW_LINE> <DEDENT> output = Product[tuple(arg.output for arg in args)] <NEW_LINE> super().__init__(inputs, output) <NEW_LINE> self.args = args <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for i in range(len(self.args)): <NEW_LINE> <INDENT> yield self[i] | Funsor term representing tuples of other terms of possibly heterogeneous type. | 62599067e76e3b2f99fda17d |
class Naive(SupervisedTemplate): <NEW_LINE> <INDENT> def __init__( self, model: Module, optimizer: Optimizer, criterion=CrossEntropyLoss(), train_mb_size: int = 1, train_epochs: int = 1, eval_mb_size: int = None, device=None, plugins: Optional[List[SupervisedPlugin]] = None, evaluator: EvaluationPlugin = default_evaluator, eval_every=-1, **base_kwargs ): <NEW_LINE> <INDENT> super().__init__( model, optimizer, criterion, train_mb_size=train_mb_size, train_epochs=train_epochs, eval_mb_size=eval_mb_size, device=device, plugins=plugins, evaluator=evaluator, eval_every=eval_every, **base_kwargs ) | Naive finetuning.
The simplest (and least effective) Continual Learning strategy. Naive just
incrementally fine tunes a single model without employing any method
to contrast the catastrophic forgetting of previous knowledge.
This strategy does not use task identities.
Naive is easy to set up and its results are commonly used to show the worst
performing baseline. | 625990673617ad0b5ee078ce |
class DenseNet121(object): <NEW_LINE> <INDENT> def __init__(self, input_shape, output_dim, k=32, theta=0.5): <NEW_LINE> <INDENT> self.k = k <NEW_LINE> self.theta = theta <NEW_LINE> x = Input(shape=input_shape) <NEW_LINE> h = Conv2D(64, kernel_size=(7, 7), strides=(2, 2), padding='same')(x) <NEW_LINE> h = BatchNormalization()(h) <NEW_LINE> h = Activation('relu')(h) <NEW_LINE> h = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same')(h) <NEW_LINE> h, n_channel = self._dense_block(h, 64, 6) <NEW_LINE> h, n_channel = self._transition(h, n_channel) <NEW_LINE> h, n_channel = self._dense_block(h, n_channel, 12) <NEW_LINE> h, n_channel = self._transition(h, n_channel) <NEW_LINE> h, n_channel = self._dense_block(h, n_channel, 24) <NEW_LINE> h, n_channel = self._transition(h, n_channel) <NEW_LINE> h, _ = self._dense_block(h, n_channel, 16) <NEW_LINE> h = GlobalAveragePooling2D()(h) <NEW_LINE> h = Dense(1000, activation='relu')(h) <NEW_LINE> y = Dense(output_dim, activation='softmax')(h) <NEW_LINE> self.model = Model(x, y) <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> return self.model <NEW_LINE> <DEDENT> def _dense_block(self, x, n_channel, nb_blocks): <NEW_LINE> <INDENT> h = x <NEW_LINE> for _ in range(nb_blocks): <NEW_LINE> <INDENT> stream = h <NEW_LINE> h = BatchNormalization()(h) <NEW_LINE> h = Activation('relu')(h) <NEW_LINE> h = Conv2D(128, kernel_size=(1, 1), padding='same')(h) <NEW_LINE> h = BatchNormalization()(h) <NEW_LINE> h = Activation('relu')(h) <NEW_LINE> h = Conv2D(self.k, kernel_size=(3, 3), padding='same')(h) <NEW_LINE> h = Concatenate()([stream, h]) <NEW_LINE> n_channel += self.k <NEW_LINE> <DEDENT> return h, n_channel <NEW_LINE> <DEDENT> def _transition(self, x, n_channel): <NEW_LINE> <INDENT> n_channel = int(n_channel * self.theta) <NEW_LINE> h = BatchNormalization()(x) <NEW_LINE> h = Activation('relu')(h) <NEW_LINE> h = Conv2D(n_channel, kernel_size=(1, 1), padding='same')(h) <NEW_LINE> return AveragePooling2D()(h), n_channel | Reference:
"Densely Connected Convolutional Networks"
https://arxiv.org/abs/1608.06993 | 6259906767a9b606de547660 |
class DateTime(DataElementValue): <NEW_LINE> <INDENT> value = models.DateTimeField() <NEW_LINE> raw = models.CharField( max_length=26, help_text="YYYYMMDDHHMMSS.FFFFFF&ZZXX" ) | A :class:`~django.db.models.Model` representing a single *DateTime* data
element value. | 625990674428ac0f6e659caf |
class MemcacheStatus(request_handler.RequestHandler): <NEW_LINE> <INDENT> @user_util.open_access <NEW_LINE> def get(self): <NEW_LINE> <INDENT> now = datetime.datetime.now() <NEW_LINE> now_time_t = int(time.mktime(now.timetuple())) <NEW_LINE> memcache_stats = memcache.get_stats() <NEW_LINE> if self.request.get('output') in ('text', 'txt'): <NEW_LINE> <INDENT> self.response.out.write(now_time_t) <NEW_LINE> self.response.out.write(' h:%(hits)s' ' m:%(misses)s' ' bh:%(byte_hits)s' ' i:%(items)s' ' b:%(bytes)s' ' oia:%(oldest_item_age)s' '\n' % memcache_stats) <NEW_LINE> self.response.headers['Content-Type'] = "text/text" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> template_values = { 'now': now.ctime(), 'now_time_t': now_time_t, 'memcache_stats': memcache_stats, } <NEW_LINE> self.render_jinja2_template("memcache_stats.html", template_values) | Handle requests to show information about the current state of memcache.
Gives raw data, suitable for plotting.
This is open-access so it's easy to write a script to download this
data and store it. Nothing here is sensitive.
TODO(csilvers): save the data and show a pretty graphy. | 625990674e4d562566373b84 |
class Plugin(): <NEW_LINE> <INDENT> def __init__(self, plugin, name, commands): <NEW_LINE> <INDENT> self.plugin = plugin <NEW_LINE> self.name = name <NEW_LINE> self.commands = commands | Store information about a plugin | 625990677047854f46340b31 |
class NoConsumerMiddleware(ViewMiddleware): <NEW_LINE> <INDENT> def process_view(self, request, view_func, view_args, view_kwargs): <NEW_LINE> <INDENT> request.can_view_consumer = ( request.user.is_authenticated() and request.user.get_profile().can_view_consumer() ) <NEW_LINE> name = self.get_name(view_func) <NEW_LINE> if (name.startswith(settings.NO_ADDONS_MODULES) or not request.can_view_consumer and name.startswith(settings.NO_CONSUMER_MODULES)): <NEW_LINE> <INDENT> return jingo.render(request, 'site/no_consumer.html') | Suprisingly similar to the other middleware, except on finding a match
it renders a page and has a bigger list of things we don't like.
Even more temporary. Maybe even more dragons. | 625990673317a56b869bf101 |
class SimpleIterator(DBIterator): <NEW_LINE> <INDENT> def __init__(self, triples, pattern, offset=0): <NEW_LINE> <INDENT> super(SimpleIterator, self).__init__(pattern) <NEW_LINE> self._triples = triples <NEW_LINE> self._popped = offset <NEW_LINE> <DEDENT> def has_next(self): <NEW_LINE> <INDENT> return len(self._triples) > 0 <NEW_LINE> <DEDENT> def last_read(self): <NEW_LINE> <INDENT> return "{}".format(self._popped) <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> self._popped += 1 <NEW_LINE> return self._triples.pop() | A DBIterator that iterates over a set of triples | 6259906799fddb7c1ca6398e |
class FanMap(SnmpPlugin): <NEW_LINE> <INDENT> maptype = "FanMap" <NEW_LINE> modname = "Products.ZenModel.Fan" <NEW_LINE> relname = "fans" <NEW_LINE> compname = "hw" <NEW_LINE> snmpGetTableMaps = ( GetTableMap('sensorTable', '.1.3.6.1.4.1.30155.2.1.2.1', { '.2': 'id', '.3': '_type', '.7': 'state', } ), ) <NEW_LINE> states = {0:'unspecified', 1:'ok', 2:'warn', 3:'critical', 4:'unknown'} <NEW_LINE> def process(self, device, results, log): <NEW_LINE> <INDENT> log.info('processing %s for device %s', self.name(), device.id) <NEW_LINE> getdata, tabledata = results <NEW_LINE> rm = self.relMap() <NEW_LINE> for oid, sensor in tabledata.get("sensorTable",{}).iteritems(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> om = self.objectMap(sensor) <NEW_LINE> if int(om._type) != 1: continue <NEW_LINE> om.snmpindex = oid.strip('.') <NEW_LINE> om.id = self.prepId(om.id) <NEW_LINE> om.state = self.states.get(int(om.state), 'unknown') <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> rm.append(om) <NEW_LINE> <DEDENT> return rm | Map OpenBSD sensor table to model. | 62599067b7558d5895464aee |
class TooManySpecsForKey(Exception): <NEW_LINE> <INDENT> pass | Thrown when a mapping contains multiple specs when a singular spec is expected. | 625990677cff6e4e811b71c5 |
class OverrunBufferException(ParseException): <NEW_LINE> <INDENT> def __init__(self, readOffs, bufLen): <NEW_LINE> <INDENT> tvalue = "read: %s, buffer length: %s" % (hex(readOffs), hex(bufLen)) <NEW_LINE> super(ParseException, self).__init__(tvalue) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(unicode(self)) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u"Tried to parse beyond the end of the file (%s)" % (self._value) | An exception to be thrown during parsing when something is unpack into
or from a location beyond the boundaries of a buffer. | 625990674428ac0f6e659cb0 |
class SouthTexasCity(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=30) <NEW_LINE> point = models.PointField(srid=32140) <NEW_LINE> objects = models.GeoManager() <NEW_LINE> def __unicode__(self): return self.name | City model on projected coordinate system for South Texas. | 62599067f7d966606f749479 |
class PserverZODBUsersTestCase(unittest.TestCase): <NEW_LINE> <INDENT> layer = PloneBaseLayer <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.layer.requester('POST', '/plone/plone/@addons', data=json.dumps({ "id": "zodbusers" })) <NEW_LINE> <DEDENT> def get_portal(self): <NEW_LINE> <INDENT> root = self.layer.new_root() <NEW_LINE> return root['plone'] | Adding the OAuth utility | 62599067cb5e8a47e493cd43 |
class ChoicesTask(object): <NEW_LINE> <INDENT> def __iter__(self): <NEW_LINE> <INDENT> from project.models import Task <NEW_LINE> for i in Task.query.order_by(Task.task_no.asc()).all(): <NEW_LINE> <INDENT> yield (i.id, i.task_no) | this method ensure a dynamic choice_list for selectField | 625990672ae34c7f260ac866 |
class AirVisualOptionsFlowHandler(config_entries.OptionsFlow): <NEW_LINE> <INDENT> def __init__(self, config_entry): <NEW_LINE> <INDENT> self.config_entry = config_entry <NEW_LINE> <DEDENT> async def async_step_init(self, user_input=None): <NEW_LINE> <INDENT> if user_input is not None: <NEW_LINE> <INDENT> return self.async_create_entry(title="", data=user_input) <NEW_LINE> <DEDENT> return self.async_show_form( step_id="init", data_schema=vol.Schema( { vol.Required( CONF_SHOW_ON_MAP, default=self.config_entry.options.get(CONF_SHOW_ON_MAP), ): bool } ), ) | Handle an AirVisual options flow. | 625990670a50d4780f70697f |
class DeleteLayerVersionRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.LayerName = None <NEW_LINE> self.LayerVersion = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.LayerName = params.get("LayerName") <NEW_LINE> self.LayerVersion = params.get("LayerVersion") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | DeleteLayerVersion请求参数结构体
| 625990678e7ae83300eea80d |
class Interaction: <NEW_LINE> <INDENT> name = "Particle interaction" <NEW_LINE> integral_type = None <NEW_LINE> integrals = None <NEW_LINE> def __init__(self, name=None, integrals=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.integrals = IntegralDeduplicator(self.integrals).integrals <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name + "\n\t" + "\n\t".join([str(integral) for integral in self.integrals]) <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> for integral in self.integrals: <NEW_LINE> <INDENT> integral.initialize() | ## Interaction
Class that holds collision integrals explicitly defined for a set of particles | 625990670c0af96317c5791e |
class Style: <NEW_LINE> <INDENT> component_margin = 24 <NEW_LINE> body_text_size = 24 <NEW_LINE> header_text_size = 48 <NEW_LINE> header_font = QtGui.QFont("Segoe UI", header_text_size) <NEW_LINE> body_font = QtGui.QFont("Segoe UI", body_text_size) <NEW_LINE> ui_text_color = Qt.white <NEW_LINE> background_style = "background-color: rgba(26,26,26)" <NEW_LINE> animation_time = 400 | Style constants | 6259906755399d3f05627c9f |
class Stretch(ReversibleTransform): <NEW_LINE> <INDENT> stretch_center = CoordParameter(default=(0,0)) <NEW_LINE> def set_stretch_factor(self, value): <NEW_LINE> <INDENT> if isinstance(value, Coord): <NEW_LINE> <INDENT> self.__stretch_factor__ = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__stretch_factor__ = Coord(value[0], value[1]) <NEW_LINE> <DEDENT> if self.__stretch_factor__[0] == 0.0 or self.__stretch_factor__[1] == 0.0: <NEW_LINE> <INDENT> raise ValueError("Error: Stretch factor cannot be zero in Stretch transform") <NEW_LINE> <DEDENT> <DEDENT> stretch_factor = SetFunctionParameter('__stretch_factor__', set_stretch_factor) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "[SPiRA: Stretch] (factor {}, center {})".format(self.stretch_factor, self.stretch_center) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__repr__() <NEW_LINE> <DEDENT> def apply_to_coord(self, coord): <NEW_LINE> <INDENT> x1 = self.__stretch_factor__[0] * coord[0] <NEW_LINE> x2 = (1 - self.__stretch_factor__[0]) * self.stretch_center[0] <NEW_LINE> y1 = self.__stretch_factor__[1] * coord[1] <NEW_LINE> y2 = (1 - self.__stretch_factor__[1]) * self.stretch_center[1] <NEW_LINE> return Coord(x1+x2, y1+y2) <NEW_LINE> <DEDENT> def reverse_on_coord(self, coord): <NEW_LINE> <INDENT> x1 = 1.0 / self.__stretch_factor__[0] * coord[0] <NEW_LINE> x2 = (1 - 1.0 / self.__stretch_factor__[0]) * self.stretch_center[0] <NEW_LINE> y1 = 1.0 / self.__stretch_factor__[1] * coord[1] <NEW_LINE> y2 = (1 - 1.0 / self.__stretch_factor__[1]) * self.stretch_center[1] <NEW_LINE> return Coord(x1+x2, y1+y2) <NEW_LINE> <DEDENT> def apply_to_array(self, coords): <NEW_LINE> <INDENT> coords *= np.array([self.stretch_factor.x, self.stretch_factor.y]) <NEW_LINE> x = (1 - self.__stretch_factor__.x) * self.stretch_center.x <NEW_LINE> y = (1 - self.__stretch_factor__.y) * self.stretch_center.y <NEW_LINE> coords += np.array([x, y]) <NEW_LINE> return coords <NEW_LINE> <DEDENT> def reverse_on_array(self, coords): <NEW_LINE> <INDENT> coords *= np.array([1.0 / self.stretch_factor.x, 1.0 / self.stretch_factor.y]) <NEW_LINE> x = (1 - 1.0 / self.__stretch_factor__.x) * self.stretch_center.x <NEW_LINE> y = (1 - 1.0 / self.__stretch_factor__.y) * self.stretch_center.y <NEW_LINE> coords += np.array([x, y]) <NEW_LINE> return coords <NEW_LINE> <DEDENT> def apply_to_angle(self, angle): <NEW_LINE> <INDENT> return angle <NEW_LINE> <DEDENT> def is_identity(self): <NEW_LINE> <INDENT> return ((self.stretch_factor.x == 1.0) and (self.stretch_factor.y == 1.0)) <NEW_LINE> <DEDENT> def id_string(self): <NEW_LINE> <INDENT> return self.__repr__() | Stretch an object using.
Example
-------
>>> s = Stretch()(shape) | 62599067d268445f2663a71c |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.