code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class prototype: <NEW_LINE> <INDENT> def types(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def label(self, type): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def html(self, type): <NEW_LINE> <INDENT> raise NotImplementedError()
Prototype for a marking object that defines the rules for marking artifacts.
6259903d596a897236128eca
class SwatInjection(ManagedObject): <NEW_LINE> <INDENT> consts = SwatInjectionConsts() <NEW_LINE> naming_props = set(['name']) <NEW_LINE> mo_meta = MoMeta("SwatInjection", "swatInjection", "inject-[name]", VersionMeta.Version101e, "InputOutput", 0x3f, [], ["admin"], ['topSystem'], ['swatAction'], ["Get"]) <NEW_LINE> prop_meta = { "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version101e, MoPropertyMeta.INTERNAL, 0x2, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, 0x4, 0, 256, None, [], []), "model": MoPropertyMeta("model", "model", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "name": MoPropertyMeta("name", "name", "string", VersionMeta.Version101e, MoPropertyMeta.NAMING, 0x8, None, None, r"""[\-\.:_a-zA-Z0-9]{1,16}""", [], []), "pool_dn": MoPropertyMeta("pool_dn", "poolDn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, None, 0, 256, None, [], []), "revision": MoPropertyMeta("revision", "revision", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []), "sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []), "serial": MoPropertyMeta("serial", "serial", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x20, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "vendor": MoPropertyMeta("vendor", "vendor", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []), } <NEW_LINE> prop_map = { "childAction": "child_action", "dn": "dn", "model": "model", "name": "name", "poolDn": "pool_dn", "revision": "revision", "rn": "rn", "sacl": "sacl", "serial": "serial", "status": "status", "vendor": "vendor", } <NEW_LINE> def __init__(self, parent_mo_or_dn, name, **kwargs): <NEW_LINE> <INDENT> self._dirty_mask = 0 <NEW_LINE> self.name = name <NEW_LINE> self.child_action = None <NEW_LINE> self.model = None <NEW_LINE> self.pool_dn = None <NEW_LINE> self.revision = None <NEW_LINE> self.sacl = None <NEW_LINE> self.serial = None <NEW_LINE> self.status = None <NEW_LINE> self.vendor = None <NEW_LINE> ManagedObject.__init__(self, "SwatInjection", parent_mo_or_dn, **kwargs)
This is SwatInjection class.
6259903de76e3b2f99fd9c44
class CopyGenerator(ReportGenerator): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> copy_dict = self.skin_dict['CopyGenerator'] <NEW_LINE> log_success = to_bool(weeutil.config.search_up(copy_dict, 'log_success', True)) <NEW_LINE> copy_list = [] <NEW_LINE> if self.first_run: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> copy_list += weeutil.weeutil.option_as_list(copy_dict['copy_once']) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> copy_list += weeutil.weeutil.option_as_list(copy_dict['copy_always']) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> os.chdir(os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['SKIN_ROOT'], self.skin_dict['skin'])) <NEW_LINE> html_dest_dir = os.path.join(self.config_dict['WEEWX_ROOT'], self.skin_dict['HTML_ROOT']) <NEW_LINE> ncopy = 0 <NEW_LINE> for pattern in copy_list: <NEW_LINE> <INDENT> for path in glob.glob(pattern): <NEW_LINE> <INDENT> ncopy += weeutil.weeutil.deep_copy_path(path, html_dest_dir) <NEW_LINE> <DEDENT> <DEDENT> if log_success: <NEW_LINE> <INDENT> log.info("copygenerator: Copied %d files to %s", ncopy, html_dest_dir)
Class for managing the 'copy generator.' This will copy files from the skin subdirectory to the public_html subdirectory.
6259903d711fe17d825e15b8
class _Variable(object): <NEW_LINE> <INDENT> def __init__(self, var_name, var_type, initial_value=None, length=None): <NEW_LINE> <INDENT> if var_type not in VAR_TYPES: <NEW_LINE> <INDENT> raise Exception("{0}: unknown variable type".format(var_type)) <NEW_LINE> <DEDENT> if var_type == VAR_ARRAY: <NEW_LINE> <INDENT> if length is None: <NEW_LINE> <INDENT> raise Exception("array variables must define a length") <NEW_LINE> <DEDENT> keys = [catstr(var_name, CMP_ARRAY(i)) for i in range(length)] <NEW_LINE> <DEDENT> elif length is not None: <NEW_LINE> <INDENT> raise Exception("{0}: attempting to assign length".format(var_name)) <NEW_LINE> <DEDENT> elif var_type == VAR_SCALAR: <NEW_LINE> <INDENT> length = DIM_SCALAR <NEW_LINE> keys = [var_name] <NEW_LINE> <DEDENT> elif var_type == VAR_VECTOR: <NEW_LINE> <INDENT> length = DIM_VECTOR <NEW_LINE> keys = [catstr(var_name, CMP_VECTOR(i)) for i in range(length)] <NEW_LINE> <DEDENT> elif var_type == VAR_TENSOR: <NEW_LINE> <INDENT> length = DIM_TENSOR <NEW_LINE> keys = [catstr(var_name, CMP_TENSOR(i)) for i in range(length)] <NEW_LINE> <DEDENT> elif var_type == VAR_SYMTENSOR: <NEW_LINE> <INDENT> length = DIM_SYMTENSOR <NEW_LINE> keys = [catstr(var_name, CMP_SYMTENSOR(i)) for i in range(length)] <NEW_LINE> <DEDENT> elif var_type == VAR_SKEWTENSOR: <NEW_LINE> <INDENT> length = DIM_SKEWTENSOR <NEW_LINE> keys = [catstr(var_name, CMP_SKEWTENSOR(i)) for i in range(length)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("{0}: unexpected variable type".format(var_type)) <NEW_LINE> <DEDENT> if initial_value is None: <NEW_LINE> <INDENT> initial_value = np.zeros(length) <NEW_LINE> <DEDENT> elif isscalar(initial_value): <NEW_LINE> <INDENT> initial_value = np.ones(length) * initial_value <NEW_LINE> <DEDENT> elif len(initial_value) != length: <NEW_LINE> <INDENT> raise Exception("{0}: initial_value must have " "length {1}".format(var_name, length)) <NEW_LINE> <DEDENT> self.name = var_name <NEW_LINE> self.vtype = var_type <NEW_LINE> self.length = length <NEW_LINE> self.initial_value = initial_value <NEW_LINE> self.keys = keys <NEW_LINE> return
Variable class
6259903d15baa723494631c9
class ReachabilityObserver(ABC): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def reachability_update(self, is_online: bool) -> None: <NEW_LINE> <INDENT> pass
The Observer interface declares the reachability_update method, used by subjects.
6259903d94891a1f408ba013
class BetaLoadReportingServiceStub(object): <NEW_LINE> <INDENT> def StreamLoadStats(self, request_iterator, timeout, metadata=None, with_call=False, protocol_options=None): <NEW_LINE> <INDENT> raise NotImplementedError()
The Beta API is deprecated for 0.15.0 and later. It is recommended to use the GA API (classes and functions in this file not marked beta) for all further purposes. This class was generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0.
6259903d15baa723494631ca
@base.ReleaseTracks(base.ReleaseTrack.BETA) <NEW_LINE> class Delete(base.DeleteCommand): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> flags.GetTPUNameArg().AddToParser(parser) <NEW_LINE> compute_flags.AddZoneFlag( parser, resource_type='tpu', operation_type='delete', explanation=( 'Zone in which TPU lives. ' 'If not specified, will use `default` compute/zone.')) <NEW_LINE> parser.display_info.AddCacheUpdater(None) <NEW_LINE> <DEDENT> def Run(self, args): <NEW_LINE> <INDENT> tpu = args.tpu_id <NEW_LINE> console_io.PromptContinue( 'You are about to delete tpu [{}]'.format(tpu), default=True, cancel_on_no=True, cancel_string='Aborted by user.') <NEW_LINE> result = cli_util.Delete(tpu, args.zone) <NEW_LINE> log.DeletedResource(args.tpu_id, kind='tpu') <NEW_LINE> return result
Deletes a Cloud TPU.
6259903d73bcbd0ca4bcb4c3
class TRMMMetaDataTransformer(object): <NEW_LINE> <INDENT> def __init__(self, meta_transformer_config=None): <NEW_LINE> <INDENT> if not meta_transformer_config: <NEW_LINE> <INDENT> meta_transformer_config = {} <NEW_LINE> <DEDENT> self.debug_logger = meta_transformer_config.get('debug_logger',lambda*a,**kwa:None) <NEW_LINE> <DEDENT> def transform(self, trmm_data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> header_string = trmm_data.getMetaDataToTransform() <NEW_LINE> meta_data_dict = dict(d.split("=") for d in header_string.split(" ") if ("=" in d) and (d is not '')) <NEW_LINE> self.debug_logger("len(meta_data_dict)",len(meta_data_dict)) <NEW_LINE> meta_data_dict['ftp_file_name'] = str(trmm_data.getDataToExtract()) <NEW_LINE> self._addCustomFields(meta_data_dict) <NEW_LINE> trmm_data.setMetaDataToLoad(meta_data_dict) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.debug_logger("transformMetaData Exception:",str(e),str(arcpy.GetMessages(2))) <NEW_LINE> trmm_data.handleException(exception=("transformMetaData:",str(e)),messages=arcpy.GetMessages(2)) <NEW_LINE> <DEDENT> <DEDENT> def _addCustomFields(self, meta_data_dict): <NEW_LINE> <INDENT> strptime = datetime.strptime <NEW_LINE> datetime_format = '%Y%m%d%H%M%S' <NEW_LINE> string_datetime_format = '%Y/%m/%d %H:%M:%S' <NEW_LINE> timestamp_string = "%s%s" % (meta_data_dict['nominal_YYYYMMDD'], meta_data_dict['nominal_HHMMSS']) <NEW_LINE> meta_data_dict['datetime'] = strptime(timestamp_string, datetime_format) <NEW_LINE> timestamp_string = "%s%s" % (meta_data_dict['nominal_YYYYMMDD'], meta_data_dict['nominal_HHMMSS']) <NEW_LINE> meta_data_dict['datetime_string'] = strptime(timestamp_string, datetime_format).strftime(string_datetime_format) <NEW_LINE> start_time_string = "%s%s" % (meta_data_dict['begin_YYYYMMDD'], meta_data_dict['begin_HHMMSS']) <NEW_LINE> meta_data_dict['start_datetime'] = strptime(start_time_string, datetime_format) <NEW_LINE> start_time_string = "%s%s" % (meta_data_dict['begin_YYYYMMDD'], meta_data_dict['begin_HHMMSS']) <NEW_LINE> meta_data_dict['start_datetime_string'] = strptime(start_time_string, datetime_format).strftime(string_datetime_format) <NEW_LINE> end_time_string = "%s%s" % (meta_data_dict['end_YYYYMMDD'], meta_data_dict['end_HHMMSS']) <NEW_LINE> meta_data_dict['end_datetime'] = strptime(end_time_string, datetime_format) <NEW_LINE> end_time_string = "%s%s" % (meta_data_dict['end_YYYYMMDD'], meta_data_dict['end_HHMMSS']) <NEW_LINE> meta_data_dict['end_datetime_string'] = strptime(end_time_string, datetime_format).strftime(string_datetime_format)
Class TRMMMetaDataTransformer: 1) converts the header string inside a given bin file into a dictionary 2) adds additional custom field values to the dictionary (values to fields not associated with the bin's meta-data)
6259903d07d97122c4217ed7
@dataclasses.dataclass <NEW_LINE> class Operation: <NEW_LINE> <INDENT> path: str <NEW_LINE> summary: str <NEW_LINE> description: str <NEW_LINE> operation_id: str <NEW_LINE> tags: List[str] <NEW_LINE> security: Security <NEW_LINE> verb: str <NEW_LINE> path_parameters: List[StringParameter] <NEW_LINE> query_parameters: List[StringParameter] <NEW_LINE> json_request_body_type: str <NEW_LINE> responses: List[Response] <NEW_LINE> @property <NEW_LINE> def interface_name(self) -> str: <NEW_LINE> <INDENT> if self.operation_id: <NEW_LINE> <INDENT> return formatting.capitalize_first_letter(self.operation_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return formatting.capitalize_first_letter( self.verb.lower()) + formatting.snake_case_to_pascal_case( self.path.replace('{', '').replace('}', '').replace('/', '_')) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def response_type_name(self) -> str: <NEW_LINE> <INDENT> return self.interface_name + 'ResponseSet' <NEW_LINE> <DEDENT> @property <NEW_LINE> def request_type_name(self) -> str: <NEW_LINE> <INDENT> return self.interface_name + 'Request'
An operation uniquely identified with a path and HTTP verb
6259903d23e79379d538d738
class LoudnessSensor(AnalogSensor): <NEW_LINE> <INDENT> def __init__(self, port="AD1", gpg=None, use_mutex=False): <NEW_LINE> <INDENT> debug("Loudness Sensor on port " + port) <NEW_LINE> self.set_descriptor("Loudness sensor") <NEW_LINE> try: <NEW_LINE> <INDENT> AnalogSensor.__init__(self, port, "INPUT", gpg, use_mutex) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> self.set_pin(1) <NEW_LINE> self._max_value = 1024
| Class for the `Grove Loudness Sensor`_. | This class derives from :py:class:`~easygopigo3.AnalogSensor` class, so all of their attributes and methods are inherited. | For creating a :py:class:`~easygopigo3.LoudnessSensor` object we need to call :py:meth:`~easygopigo3.EasyGoPiGo3.init_loudness_sensor` method like in the following examples. .. code-block:: python # create an EasyGoPiGo3 object gpg3_obj = EasyGoPiGo3() # and now instantiate a LoudnessSensor object through the gpg3_obj object loudness_sensor = gpg3_obj.init_loudness_sensor() # do the usual stuff, like read the data of the sensor value = loudness_sensor.read() value_percentage = loudness_sensor.percent_read() # take a look at AnalogSensor class and Sensor class for more methods and attributes | Or if we need to specify the port we want to use, we might do it like in the following example. .. code-block:: python # create an EasyGoPiGo3 object gpg3_obj = EasyGoPiGo3() # variable for holding the port to which we have the sound sensor connected to port = "AD1" loudness_sensor = gpg3_obj.init_loudness_sensor(port) # read the sensor the same way as in the previous example .. seealso:: For more sensors, please see our Dexter Industries `shop`_.
6259903d07f4c71912bb066b
class TestDFA(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> class TRecognizer(antlr3.BaseRecognizer): <NEW_LINE> <INDENT> api_version = 'HEAD' <NEW_LINE> <DEDENT> self.recog = TRecognizer() <NEW_LINE> <DEDENT> def testInit(self): <NEW_LINE> <INDENT> dfa = antlr3.DFA( self.recog, 1, eot=[], eof=[], min=[], max=[], accept=[], special=[], transition=[] ) <NEW_LINE> <DEDENT> def testUnpack(self): <NEW_LINE> <INDENT> self.assertEqual( antlr3.DFA.unpack( "\1\3\1\4\2\uffff\1\5\22\uffff\1\2\31\uffff\1\6\6\uffff" "\32\6\4\uffff\1\6\1\uffff\32\6" ), [ 3, 4, -1, -1, 5, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 2, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 6, -1, -1, -1, -1, -1, -1, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, -1, -1, -1, -1, 6, -1, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6 ] )
Test case for the DFA class.
6259903d4e696a045264e73e
class KubernetesTests(TestCase): <NEW_LINE> <INDENT> def test_01_service_manager_install(self): <NEW_LINE> <INDENT> subprocess.check_call( ['vagrant', 'plugin', 'install', 'vagrant-service-manager'] ) <NEW_LINE> <DEDENT> def test_02_VagrantUp(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> exit_code = subprocess.check_call( ['vagrant', 'up'], cwd='/root/adb/components/centos/centos-k8s-singlenode-setup' ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.assertEqual(exit_code, 0) <NEW_LINE> <DEDENT> def test_03_kubectl_output(self): <NEW_LINE> <INDENT> subprocess.call(["sleep", "5"]) <NEW_LINE> try: <NEW_LINE> <INDENT> output = subprocess.check_output( ['vagrant', 'ssh', '-c', '%s' % "kubectl get nodes"], cwd="/root/adb/components/centos/centos-k8s-singlenode-setup" ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.assertIn('127.0.0.1', output.split()) <NEW_LINE> self.assertIn('Ready', output.split()) <NEW_LINE> <DEDENT> def test_04_atomic_app(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> subprocess.call([ "vagrant", "ssh", "-c", "%s" % "sudo yum -y install epel-release && " "sudo yum -y install ansible" ]) <NEW_LINE> subprocess.call([ "vagrant", "ssh", "-c", "%s" % "git clone %s " % ANSIBLE_REPO ]) <NEW_LINE> subprocess.call([ "vagrant", "ssh", "-c", "%s" % "cd ci-ansible && ansible-playbook install-atomicapp.yaml" ]) <NEW_LINE> subprocess.call([ "vagrant", "ssh", "-c", "%s" % "atomic run projectatomic/helloapache" ]) <NEW_LINE> subprocess.call(["sleep", "60"]) <NEW_LINE> output = subprocess.check_output([ "vagrant", "ssh", "-c", "%s" % "kubectl get pods| grep helloapache" ]) <NEW_LINE> self.assertIn("helloapache", output) <NEW_LINE> self.assertIn("Running", output) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def test_05_VagrantDestroy(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> exit_code = subprocess.check_call( ['vagrant', 'destroy', '-f'], cwd="/root/adb/components/centos/" "centos-k8s-singlenode-setup" ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.assertEqual(exit_code, 0)
This class tests Kubernetes on ADB box.
6259903dcad5886f8bdc5999
class OffRLAlgo(RLAlgo): <NEW_LINE> <INDENT> def __init__( self, pretrain_epochs=0, min_pool=0, target_hard_update_period=1000, use_soft_update=True, tau=0.001, opt_times=1, **kwargs): <NEW_LINE> <INDENT> super(OffRLAlgo, self).__init__(**kwargs) <NEW_LINE> self.pretrain_epochs = pretrain_epochs <NEW_LINE> self.target_hard_update_period = target_hard_update_period <NEW_LINE> self.use_soft_update = use_soft_update <NEW_LINE> self.tau = tau <NEW_LINE> self.opt_times = opt_times <NEW_LINE> self.min_pool = min_pool <NEW_LINE> self.sample_key = ["obs", "next_obs", "acts", "rewards", "terminals"] <NEW_LINE> <DEDENT> def update_per_timestep(self): <NEW_LINE> <INDENT> if self.replay_buffer.num_steps_can_sample() > max( self.min_pool, self.batch_size): <NEW_LINE> <INDENT> for _ in range(self.opt_times): <NEW_LINE> <INDENT> batch = self.replay_buffer.random_batch( self.batch_size, self.sample_key) <NEW_LINE> infos = self.update(batch) <NEW_LINE> self.logger.add_update_info(infos) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update_per_epoch(self): <NEW_LINE> <INDENT> for _ in range(self.opt_times): <NEW_LINE> <INDENT> batch = self.replay_buffer.random_batch( self.batch_size, self.sample_key) <NEW_LINE> infos = self.update(batch) <NEW_LINE> self.logger.add_update_info(infos) <NEW_LINE> <DEDENT> <DEDENT> def pretrain(self): <NEW_LINE> <INDENT> total_frames = 0 <NEW_LINE> self.pretrain_frames = self.pretrain_epochs * self.epoch_frames <NEW_LINE> for pretrain_epoch in range(self.pretrain_epochs): <NEW_LINE> <INDENT> start = time.time() <NEW_LINE> self.start_epoch() <NEW_LINE> training_epoch_info = self.collector.train_one_epoch() <NEW_LINE> for reward in training_epoch_info["train_rewards"]: <NEW_LINE> <INDENT> self.training_episode_rewards.append(reward) <NEW_LINE> <DEDENT> finish_epoch_info = self.finish_epoch() <NEW_LINE> total_frames += self.epoch_frames <NEW_LINE> infos = {} <NEW_LINE> infos["Train_Epoch_Reward"] = training_epoch_info["train_epoch_reward"] <NEW_LINE> infos["Running_Training_Average_Rewards"] = np.mean( self.training_episode_rewards) <NEW_LINE> infos.update(finish_epoch_info) <NEW_LINE> self.logger.add_epoch_info( pretrain_epoch, total_frames, time.time() - start, infos, csv_write=False) <NEW_LINE> <DEDENT> self.logger.log("Finished Pretrain")
Base RL Algorithm Framework
6259903d8da39b475be04428
class ModelGeotagManager(models.Manager): <NEW_LINE> <INDENT> pass
A manager for retrieving tags for a particular model.
6259903dec188e330fdf9ad3
class ConfigNotFoundException(SelfDefinedException): <NEW_LINE> <INDENT> pass
Exception class that representing configuration not found.
6259903dd53ae8145f919696
class CumMax(Stream[float]): <NEW_LINE> <INDENT> def __init__(self, skip_na: bool = True) -> None: <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.skip_na = skip_na <NEW_LINE> self.c_max = -np.inf <NEW_LINE> <DEDENT> def forward(self) -> float: <NEW_LINE> <INDENT> node = self.inputs[0] <NEW_LINE> if self.skip_na: <NEW_LINE> <INDENT> if np.isnan(node.value): <NEW_LINE> <INDENT> return np.nan <NEW_LINE> <DEDENT> if not np.isnan(node.value) and node.value > self.c_max: <NEW_LINE> <INDENT> self.c_max = node.value <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.c_max is None: <NEW_LINE> <INDENT> self.c_max = node.value <NEW_LINE> <DEDENT> elif np.isnan(node.value): <NEW_LINE> <INDENT> self.c_max = np.nan <NEW_LINE> <DEDENT> elif node.value > self.c_max: <NEW_LINE> <INDENT> self.c_max = node.value <NEW_LINE> <DEDENT> <DEDENT> return self.c_max <NEW_LINE> <DEDENT> def has_next(self) -> bool: <NEW_LINE> <INDENT> return True
A stream operator that creates a cumulative maximum of values. Parameters ---------- skip_na : bool, default True Exclude NA/null values. If a value is NA, the result will be NA. References ---------- [1] https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.Series.cummax.html
6259903d1f5feb6acb163e2d
class Dataset(NameDescriptionMixin): <NEW_LINE> <INDENT> created_at = CreatedAtField() <NEW_LINE> owner = models.ForeignKey(settings.AUTH_USER_MODEL) <NEW_LINE> projects = models.ManyToManyField('project.Project', related_name='datasets')
Defines a dataset imported into a project by a user.
6259903db5575c28eb7135e6
class Solution: <NEW_LINE> <INDENT> def subarraySum(self, nums): <NEW_LINE> <INDENT> prefix_sum = {0: -1} <NEW_LINE> sum = 0 <NEW_LINE> for i in xrange(len(nums)): <NEW_LINE> <INDENT> sum += nums[i] <NEW_LINE> if sum in prefix_sum: <NEW_LINE> <INDENT> return [prefix_sum[sum] + 1, i] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prefix_sum[sum] = i
@param nums: A list of integers @return: A list of integers includes the index of the first number and the index of the last number
6259903d379a373c97d9a263
class EntryDiscussions(DiscussionFeed): <NEW_LINE> <INDENT> def get_object(self, request, year, month, day, slug): <NEW_LINE> <INDENT> return get_object_or_404(Entry, slug=slug, creation_date__year=year, creation_date__month=month, creation_date__day=day) <NEW_LINE> <DEDENT> def items(self, obj): <NEW_LINE> <INDENT> return obj.discussions[:FEEDS_MAX_ITEMS] <NEW_LINE> <DEDENT> def link(self, obj): <NEW_LINE> <INDENT> return obj.get_absolute_url() <NEW_LINE> <DEDENT> def get_title(self, obj): <NEW_LINE> <INDENT> return _('Discussions on %(object)s') % {'object': obj.title} <NEW_LINE> <DEDENT> def description(self, obj): <NEW_LINE> <INDENT> return _('The latest discussions on the entry %(object)s') % { 'object': obj.title}
Feed for discussions on an entry.
6259903ddc8b845886d547f0
class _RegexHook(_Hook): <NEW_LINE> <INDENT> type = HookType.regex <NEW_LINE> def __init__(self, function): <NEW_LINE> <INDENT> _Hook.__init__(self, function) <NEW_LINE> self.regexes = [] <NEW_LINE> <DEDENT> def add_hook(self, *regexes, **kwargs): <NEW_LINE> <INDENT> self._add_hook(**kwargs) <NEW_LINE> if len(regexes) == 1 and not (isinstance(regexes[0], str) or hasattr(regexes[0], "search")): <NEW_LINE> <INDENT> regexes = regexes[0] <NEW_LINE> <DEDENT> assert isinstance(regexes, collections.Iterable) <NEW_LINE> for re_to_match in regexes: <NEW_LINE> <INDENT> if isinstance(re_to_match, str): <NEW_LINE> <INDENT> re_to_match = re.compile(re_to_match) <NEW_LINE> <DEDENT> assert hasattr(re_to_match, "search") <NEW_LINE> self.regexes.append(re_to_match)
:type regexes: list[re.__Regex]
6259903d8a349b6b43687480
class WindowsCodepagePlugin( interface.WindowsRegistryValueArtifactPreprocessorPlugin): <NEW_LINE> <INDENT> ARTIFACT_DEFINITION_NAME = 'WindowsCodePage' <NEW_LINE> def _ParseValueData(self, mediator, value_data): <NEW_LINE> <INDENT> if not isinstance(value_data, str): <NEW_LINE> <INDENT> raise errors.PreProcessFail( 'Unsupported Windows Registry value type: {0!s} for ' 'artifact: {1:s}.'.format( type(value_data), self.ARTIFACT_DEFINITION_NAME)) <NEW_LINE> <DEDENT> codepage = 'cp{0:s}'.format(value_data) <NEW_LINE> try: <NEW_LINE> <INDENT> mediator.SetCodepage(codepage) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> mediator.ProducePreprocessingWarning( self.ARTIFACT_DEFINITION_NAME, 'Unable to set codepage.')
The Windows codepage plugin.
6259903d004d5f362081f901
class MonNotification(monasca_setup.detection.Plugin): <NEW_LINE> <INDENT> def _detect(self): <NEW_LINE> <INDENT> if find_process_cmdline('monasca-notification') is not None: <NEW_LINE> <INDENT> self.available = True <NEW_LINE> <DEDENT> <DEDENT> def build_config(self): <NEW_LINE> <INDENT> log.info("\tEnabling the Monasca Notification healthcheck") <NEW_LINE> notification_process = find_process_cmdline('monasca-notification') <NEW_LINE> notification_user = notification_process.as_dict(['username'])['username'] <NEW_LINE> return watch_process_by_username(notification_user, 'monasca-notification', 'monitoring', 'monasca-notification') <NEW_LINE> <DEDENT> def dependencies_installed(self): <NEW_LINE> <INDENT> return True
Detect the Monsaca notification engine and setup some simple checks.
6259903dd164cc61758221b1
class AddColumnPipe(Pipe): <NEW_LINE> <INDENT> def __init__(self, engineer_func: Callable[[pd.DataFrame], pd.Series], name: str): <NEW_LINE> <INDENT> self.__engineer_func = engineer_func <NEW_LINE> self.__name = name <NEW_LINE> <DEDENT> def flush(self, data: pd.DataFrame) -> pd.DataFrame: <NEW_LINE> <INDENT> column = self.__engineer_func(data) <NEW_LINE> assert data.shape[0] == column.shape[0], 'Engineered column should have same number of rows as input data %i - %i' % (column.shape[0], data.shape[0]) <NEW_LINE> data[self.__name] = column <NEW_LINE> return data
Perform feature engineering to add a column to data. :param engineer_func: Callable to engineer a new column :param names: Name for the new column
6259903d15baa723494631cc
class GaussianProcessLogMarginalLikelihood(GaussianProcessLogLikelihood): <NEW_LINE> <INDENT> def __init__(self, covariance_function, historical_data): <NEW_LINE> <INDENT> super(GaussianProcessLogMarginalLikelihood, self).__init__( covariance_function, historical_data, log_likelihood_type=C_GP.LogLikelihoodTypes.log_marginal_likelihood, )
Class for computing the Log Marginal Likelihood, ``log(p(y | X, \theta))``. That is, the probability of observing the training values, y, given the training points, X, and hyperparameters (of the covariance function), ``\theta``. This is a measure of how likely it is that the observed values came from our Gaussian Process Prior. .. Note:: This is a copy of LogMarginalLikelihoodEvaluator's class comments in gpp_model_selection.hpp. See this file's comments and :mod:`moe.optimal_learning.python.interfaces.log_likelihood_interface` for more details as well as the hpp and corresponding .cpp file. Given a particular covariance function (including hyperparameters) and training data ((point, function value, measurement noise) tuples), the log marginal likelihood is the log probability that the data were observed from a Gaussian Process would have generated the observed function values at the given measurement points. So log marginal likelihood tells us "the probability of the observations given the assumptions of the model." Log marginal sits well with the Bayesian Inference camp. (Rasmussen & Williams p118) This quantity primarily deals with the trade-off between model fit and model complexity. Handling this trade-off is automatic in the log marginal likelihood calculation. See Rasmussen & Williams 5.2 and 5.4.1 for more details. We can use the log marginal likelihood to determine how good our model is. Additionally, we can maximize it by varying hyperparameters (or even changing covariance functions) to improve our model quality. Hence this class provides access to functions for computing log marginal likelihood and its hyperparameter gradients.
6259903d30dc7b76659a0a6c
class ShowVlanRemoteSpanSchema(MetaParser): <NEW_LINE> <INDENT> schema = {'vlan_id': {Any(): {'vlan_is_remote_span':bool} }, }
Schema for show vlan remote-span
6259903d50485f2cf55dc1be
class HuffyPenguin(GenericTumblrV1): <NEW_LINE> <INDENT> name = "huffypenguin" <NEW_LINE> long_name = "Huffy Penguin" <NEW_LINE> url = "https://huffy-penguin.tumblr.com"
Class to retrieve Huffy Penguin comics.
6259903dd10714528d69efa9
class Button: <NEW_LINE> <INDENT> def __init__(self, ai_game, msg): <NEW_LINE> <INDENT> self.screen = ai_game.screen <NEW_LINE> self.screen_rect = self.screen.get_rect() <NEW_LINE> self.width, self.height = 200, 50 <NEW_LINE> self.button_color = (0, 255, 0) <NEW_LINE> self.text_color = (255, 255, 255) <NEW_LINE> self.font = pygame.font.SysFont(None, 48) <NEW_LINE> self.rect = pygame.Rect(0, 0, self.width, self.height) <NEW_LINE> self.rect.center = self.screen_rect.center <NEW_LINE> self._prep_msg(msg) <NEW_LINE> <DEDENT> def _prep_msg(self, msg): <NEW_LINE> <INDENT> self.msg_image = self.font.render( msg, True, self.text_color, self.button_color) <NEW_LINE> self.msg_image_rect = self.msg_image.get_rect() <NEW_LINE> self.msg_image_rect.center = self.rect.center <NEW_LINE> <DEDENT> def draw_button(self): <NEW_LINE> <INDENT> self.screen.fill(self.button_color, self.rect) <NEW_LINE> self.screen.blit(self.msg_image, self.msg_image_rect)
Simple model of a button.
6259903dc432627299fa421e
class ColumnCollisionTest(fixtures.MappedTest): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def define_tables(cls, metadata): <NEW_LINE> <INDENT> Table( "book", metadata, Column( "id", Integer, primary_key=True, test_needs_autoincrement=True ), Column("book_id", String(50)), Column("title", String(50)), ) <NEW_LINE> <DEDENT> def test_naming(self): <NEW_LINE> <INDENT> book = self.tables.book <NEW_LINE> class Book(fixtures.ComparableEntity): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.mapper_registry.map_imperatively(Book, book) <NEW_LINE> with fixture_session() as sess: <NEW_LINE> <INDENT> b1 = Book(book_id="abc", title="def") <NEW_LINE> sess.add(b1) <NEW_LINE> sess.flush() <NEW_LINE> b1.title = "ghi" <NEW_LINE> sess.flush() <NEW_LINE> sess.commit() <NEW_LINE> <DEDENT> with fixture_session() as sess: <NEW_LINE> <INDENT> eq_(sess.query(Book).first(), Book(book_id="abc", title="ghi"))
Ensure the mapper doesn't break bind param naming rules on flush.
6259903d3c8af77a43b68859
class Net(resnet.Net): <NEW_LINE> <INDENT> def __init__(self, opt, ds, metaparams=None, masks=None): <NEW_LINE> <INDENT> super(Net, self).__init__(opt, ds) <NEW_LINE> self.meta = [metaparams] <NEW_LINE> if self.meta[0] is None: <NEW_LINE> <INDENT> self.meta = [partition.Metaparam(opt)] <NEW_LINE> <DEDENT> share = self.meta[0].partition <NEW_LINE> self.num_unique = share.num_unique <NEW_LINE> self.bw_ref = [] <NEW_LINE> layer_ref = self.resnet.get_layer_ref() <NEW_LINE> num_feats = self.resnet.num_feat_ref <NEW_LINE> if opt.bottleneck_ratio != 1: <NEW_LINE> <INDENT> num_feats += [int(f * opt.bottleneck_ratio) for f in num_feats] <NEW_LINE> <DEDENT> if masks is None: <NEW_LINE> <INDENT> self.resnet.mask_ref, self.grad_mask_ref = partition.prepare_masks( share, num_feats) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.resnet.mask_ref, self.grad_mask_ref = masks <NEW_LINE> <DEDENT> repeat_rate = int(np.ceil(len(layer_ref) / self.num_unique)) <NEW_LINE> unq_idx_ref = [i // repeat_rate for i in range(len(layer_ref))] <NEW_LINE> for l_idx, l in enumerate(layer_ref): <NEW_LINE> <INDENT> unq_idx = unq_idx_ref[l_idx] <NEW_LINE> tmp_m = self.resnet.get_module(l) <NEW_LINE> l_name = l[-1] <NEW_LINE> cnv = tmp_m._modules[l_name] <NEW_LINE> if 'conv1' in l and (not opt.last_third_only or 'layer3' in l): <NEW_LINE> <INDENT> tmp_m.conv = partition.masked_cnv(self.resnet, cnv, unq_idx) <NEW_LINE> bw_masks = [ self.grad_mask_ref[i][unq_idx][cnv.out_channels] for i in range(self.num_tasks) ] <NEW_LINE> self.bw_ref += [[cnv, bw_masks]]
ResNet that supports masking for feature partitioning.
6259903dbe383301e0254a53
class JobStoreExistsException(Exception): <NEW_LINE> <INDENT> def __init__(self, locator): <NEW_LINE> <INDENT> super(JobStoreExistsException, self).__init__( "The job store '%s' already exists. Use --restart to resume the workflow, or remove " "the job store with 'toil clean' to start the workflow from scratch" % locator)
Indicates that the specified job store already exists.
6259903d66673b3332c31633
class BandwidthSchedule(ARMBaseModel): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'start': {'required': True}, 'stop': {'required': True}, 'rate_in_mbps': {'required': True}, 'days': {'required': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'start': {'key': 'properties.start', 'type': 'str'}, 'stop': {'key': 'properties.stop', 'type': 'str'}, 'rate_in_mbps': {'key': 'properties.rateInMbps', 'type': 'int'}, 'days': {'key': 'properties.days', 'type': '[str]'}, } <NEW_LINE> def __init__(self, *, start: str, stop: str, rate_in_mbps: int, days, **kwargs) -> None: <NEW_LINE> <INDENT> super(BandwidthSchedule, self).__init__(**kwargs) <NEW_LINE> self.start = start <NEW_LINE> self.stop = stop <NEW_LINE> self.rate_in_mbps = rate_in_mbps <NEW_LINE> self.days = days
The bandwidth schedule details. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: The path ID that uniquely identifies the object. :vartype id: str :ivar name: The object name. :vartype name: str :ivar type: The hierarchical type of the object. :vartype type: str :param start: Required. The start time of the schedule in UTC. :type start: str :param stop: Required. The stop time of the schedule in UTC. :type stop: str :param rate_in_mbps: Required. The bandwidth rate in Mbps. :type rate_in_mbps: int :param days: Required. The days of the week when this schedule is applicable. :type days: list[str or ~azure.mgmt.edgegateway.models.DayOfWeek]
6259903d21a7993f00c671a9
class TimeSeriesBase(orm.StdModel): <NEW_LINE> <INDENT> converter = DateTimeConverter <NEW_LINE> def todate(self, v): <NEW_LINE> <INDENT> return todatetime(v) <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return self.data.size() <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> def intervals(self, startdate, enddate, parseinterval = None): <NEW_LINE> <INDENT> parseinterval = parseinterval or default_parse_interval <NEW_LINE> start = self.data_start <NEW_LINE> end = self.data_end <NEW_LINE> todate = self.todate <NEW_LINE> startdate = todate(parseinterval(startdate,0)) <NEW_LINE> enddate = max(startdate,todate(parseinterval(enddate,0))) <NEW_LINE> calc_intervals = [] <NEW_LINE> if start: <NEW_LINE> <INDENT> if startdate < start: <NEW_LINE> <INDENT> calc_start = startdate <NEW_LINE> calc_end = parseinterval(start, -1) <NEW_LINE> if calc_end >= calc_start: <NEW_LINE> <INDENT> calc_intervals.append((calc_start, calc_end)) <NEW_LINE> <DEDENT> <DEDENT> if enddate > end: <NEW_LINE> <INDENT> calc_start = parseinterval(end, 1) <NEW_LINE> calc_end = enddate <NEW_LINE> if calc_end >= calc_start: <NEW_LINE> <INDENT> calc_intervals.append((calc_start, calc_end)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> start = startdate <NEW_LINE> end = enddate <NEW_LINE> calc_intervals.append((startdate, enddate)) <NEW_LINE> <DEDENT> if calc_intervals: <NEW_LINE> <INDENT> N = len(calc_intervals) <NEW_LINE> start1 = calc_intervals[0][0] <NEW_LINE> end1 = calc_intervals[N - 1][1] <NEW_LINE> if start: <NEW_LINE> <INDENT> start = min(start, start1) <NEW_LINE> end = max(end, end1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start = start1 <NEW_LINE> end = end1 <NEW_LINE> <DEDENT> <DEDENT> return calc_intervals
Timeseries base model class
6259903d287bf620b6272e27
class PiggyBankSingle(object): <NEW_LINE> <INDENT> openapi_types = { 'data': 'PiggyBankRead' } <NEW_LINE> attribute_map = { 'data': 'data' } <NEW_LINE> def __init__(self, data=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._data = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, data): <NEW_LINE> <INDENT> if self.local_vars_configuration.client_side_validation and data is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `data`, must not be `None`") <NEW_LINE> <DEDENT> self._data = data <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, PiggyBankSingle): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, PiggyBankSingle): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
6259903d30c21e258be99a49
class DictTree(dict, Named): <NEW_LINE> <INDENT> IGNORE_CHILD='IGNORE_CHILD' <NEW_LINE> def __init__( self, name, children=() ): <NEW_LINE> <INDENT> Named.__init__(self, name) <NEW_LINE> for i,c in enumerate(children): <NEW_LINE> <INDENT> c_name= c.name if isinstance(c, DictTree) else self._leaf_name(c, i) <NEW_LINE> self[c_name]=c <NEW_LINE> <DEDENT> <DEDENT> def _leaf_name(self, leaf, default=""): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return leaf.name <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return default <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def intersect( cls, *trees ): <NEW_LINE> <INDENT> recursive=True <NEW_LINE> common_names= reduce(set.intersection, [set(f.keys()) for f in trees]) <NEW_LINE> def get_childs( child_name ): <NEW_LINE> <INDENT> childs= [tree[child_name] for tree in trees] <NEW_LINE> if recursive and childs and isinstance(childs[0], DictTree): <NEW_LINE> <INDENT> return cls.getCommonFeatures( childs ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return childs <NEW_LINE> <DEDENT> <DEDENT> all_childs= zip(*map( get_childs, common_names )) <NEW_LINE> return [cls(tree.name, childs) for tree,childs in zip(trees, all_childs)] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _isleave( x ): <NEW_LINE> <INDENT> return not isinstance(x, DictTree) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def map( cls, f_leave, *trees ): <NEW_LINE> <INDENT> def map_child( *c ): <NEW_LINE> <INDENT> return f_leave(*c) if DictTree._isleave(c[0]) else cls.map( f_leave, *c) <NEW_LINE> <DEDENT> old_children= zip(*(t.values() for t in trees)) <NEW_LINE> children= [map_child(*cs) for cs in old_children ] <NEW_LINE> filtered_children= filter( lambda x: x is not cls.IGNORE_CHILD, children ) <NEW_LINE> return cls( trees[0].name, filtered_children ) <NEW_LINE> <DEDENT> def reduce( self, reduce_f ): <NEW_LINE> <INDENT> return reduce(reduce_f, (c if DictTree._isleave(c) else c.reduce(reduce_f) for c in self.values())) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "{}({})".format(self.__class__.__name__, self.name)
A dict that can have other DictTree objects as values. Basically, a arbitrary tree that can have any object as a leave.
6259903d07d97122c4217edb
class EnumerationEventFormatterHelperTest(test_lib.EventFormatterTestCase): <NEW_LINE> <INDENT> def testInitialization(self): <NEW_LINE> <INDENT> event_formatter_helper = interface.EnumerationEventFormatterHelper() <NEW_LINE> self.assertIsNotNone(event_formatter_helper) <NEW_LINE> <DEDENT> def testFormatEventValues(self): <NEW_LINE> <INDENT> event_formatter_helper = interface.EnumerationEventFormatterHelper() <NEW_LINE> event_values = {} <NEW_LINE> event_formatter_helper.FormatEventValues(event_values)
Tests for the enumeration event formatter helper.
6259903dd10714528d69efaa
class TestMultiClassMeasure(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> torch.manual_seed(1001) <NEW_LINE> base = -15 * torch.ones(16, 4, 256, 256) <NEW_LINE> base[:, 0] *= -1 <NEW_LINE> negative_base = torch.randn(16, 4, 256, 256) <NEW_LINE> negative_base[:, 0] = negative_base.min() - 15 * torch.ones(16, 256, 256) <NEW_LINE> self.target_class = torch.argmax(base, axis=1) <NEW_LINE> self.negative_target_class = torch.argmax(negative_base, axis=1) <NEW_LINE> self.half_negative_target_class = self.target_class.clone() <NEW_LINE> self.half_negative_target_class[8:] = self.negative_target_class[8:] <NEW_LINE> self.seg_mask_logits = base.clone() <NEW_LINE> if torch.cuda.is_available(): <NEW_LINE> <INDENT> self.target_class = self.target_class.cuda() <NEW_LINE> self.negative_target_class = self.negative_target_class.cuda() <NEW_LINE> self.half_negative_target_class = self.half_negative_target_class.cuda() <NEW_LINE> self.seg_mask_logits = self.seg_mask_logits.cuda() <NEW_LINE> <DEDENT> <DEDENT> def _test_worstcase(self): <NEW_LINE> <INDENT> loss = self.measure(self.seg_mask_logits, self.negative_target_class) <NEW_LINE> assert np.isclose(loss.item(), self.worstcase_target, atol=self.worstcase_tol) <NEW_LINE> <DEDENT> def _test_bestcase(self): <NEW_LINE> <INDENT> loss = self.measure(self.seg_mask_logits, self.target_class) <NEW_LINE> assert np.isclose(loss.item(), self.bestcase_target, atol=self.bestcase_tol) <NEW_LINE> <DEDENT> def _test_order(self): <NEW_LINE> <INDENT> negative = self.measure(self.seg_mask_logits, self.negative_target_class) <NEW_LINE> similar = self.measure(self.seg_mask_logits, self.half_negative_target_class) <NEW_LINE> equal = self.measure(self.seg_mask_logits, self.target_class) <NEW_LINE> if self.higher_is_better: <NEW_LINE> <INDENT> assert negative < similar < equal <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert negative > similar > equal
Base class for testing losses. It provides dummy images with varying similarity, such that various degrees of the loss functions / metrics can be emulated.
6259903dc432627299fa421f
class Scraper(object): <NEW_LINE> <INDENT> def __init__(self, conf, **kwargs): <NEW_LINE> <INDENT> self.conf = conf <NEW_LINE> self.kwarg = kwargs <NEW_LINE> self.last_status_code = None <NEW_LINE> self.last_exception = None <NEW_LINE> if 'timeout' not in self.kwarg: <NEW_LINE> <INDENT> self.kwarg['timeout'] = self.conf['timeout'] <NEW_LINE> <DEDENT> <DEDENT> def connect(self, url): <NEW_LINE> <INDENT> retries = self.conf['retries'] <NEW_LINE> while retries > 0: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> response = requests.get(url, **self.kwarg) <NEW_LINE> return response <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.last_exception = e <NEW_LINE> retries -= 1 <NEW_LINE> sleep(self.conf['sleep']) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise exceptions.ConnectionError(url) <NEW_LINE> <DEDENT> <DEDENT> def get_response(self, url): <NEW_LINE> <INDENT> response = self.connect(url) <NEW_LINE> self.last_status_code = response.status_code <NEW_LINE> if self.last_status_code == 200: <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> elif self.conf['ignore_status_code']: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise exceptions.StatusCodeError( "{} from {}".format(self.last_status_code, url)) <NEW_LINE> <DEDENT> <DEDENT> def get_json(self, url): <NEW_LINE> <INDENT> response = self.get_response(url) <NEW_LINE> if response is not None: <NEW_LINE> <INDENT> return response.json() <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def get_text(self, url): <NEW_LINE> <INDENT> response = self.get_response(url) <NEW_LINE> if response is not None: <NEW_LINE> <INDENT> return response.text <NEW_LINE> <DEDENT> return None
This class enables easy usage of request module. Its main feature is trying to connect multiple times before throwing an exception.
6259903dbe383301e0254a55
class Ghost(Entity): <NEW_LINE> <INDENT> ghosts = [] <NEW_LINE> def __init__(self, id): <NEW_LINE> <INDENT> super(Ghost, self).__init__(Constants.TYPE_GHOST) <NEW_LINE> self.value = Constants.VALUE_GHOST_BASIC <NEW_LINE> self.id = id <NEW_LINE> self._generate_random_ghost_position() <NEW_LINE> self.alive = True <NEW_LINE> self.captured = False <NEW_LINE> self._add_ghost(self) <NEW_LINE> <DEDENT> def _generate_random_ghost_position(self): <NEW_LINE> <INDENT> generated = False <NEW_LINE> while not generated: <NEW_LINE> <INDENT> self.x = random.randint(0, Constants.MAP_WIDTH) <NEW_LINE> self.y = random.randint(0, Constants.MAP_HEIGHT) <NEW_LINE> if not (self.is_in_team_0_base or self.is_in_team_1_base): <NEW_LINE> <INDENT> generated = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def _add_ghost(cls, obj): <NEW_LINE> <INDENT> cls.ghosts.append(obj) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def reset_ghost(cls): <NEW_LINE> <INDENT> cls.ghosts = [] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_ghost(cls, ids): <NEW_LINE> <INDENT> for ghost in cls.ghosts: <NEW_LINE> <INDENT> if ghost.id == ids: <NEW_LINE> <INDENT> return ghost <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def run_away(self, busters): <NEW_LINE> <INDENT> closest = [] <NEW_LINE> max_distance = Constants.MAP_MAX_DISTANCE <NEW_LINE> for buster in busters: <NEW_LINE> <INDENT> temp_dist = MathUtility.distance(self.x, self.y, buster.x, buster.y) <NEW_LINE> if temp_dist < max_distance and temp_dist < Constants.ENTITY_RANGE_VISION: <NEW_LINE> <INDENT> closest = [buster] <NEW_LINE> max_distance = temp_dist <NEW_LINE> <DEDENT> elif temp_dist == max_distance and temp_dist < Constants.ENTITY_RANGE_VISION: <NEW_LINE> <INDENT> closest.append(buster) <NEW_LINE> <DEDENT> <DEDENT> if closest: <NEW_LINE> <INDENT> new_x, new_y = MathUtility.opposite_direction(self.x, self.y, closest[0].x, closest[0].y, Constants.GHOST_RUN_WAY) <NEW_LINE> self.x = new_x <NEW_LINE> self.y = new_y <NEW_LINE> <DEDENT> <DEDENT> def being_captured(self, buster): <NEW_LINE> <INDENT> self.updating_position(buster) <NEW_LINE> self.captured = True <NEW_LINE> self.value = 1 <NEW_LINE> <DEDENT> def kill(self): <NEW_LINE> <INDENT> self.alive = False <NEW_LINE> <DEDENT> def being_released(self, buster): <NEW_LINE> <INDENT> self.captured = False <NEW_LINE> self.updating_position(buster) <NEW_LINE> self.angle = buster.angle <NEW_LINE> self.value = Constants.VALUE_GHOST_BASIC <NEW_LINE> <DEDENT> def busting_cancelled(self): <NEW_LINE> <INDENT> print("Ghost {} cancelling busting".format(self.id)) <NEW_LINE> self.captured = False <NEW_LINE> self.value = Constants.VALUE_GHOST_BASIC <NEW_LINE> <DEDENT> def updating_position(self, buster): <NEW_LINE> <INDENT> self.x = buster.x <NEW_LINE> self.y = buster.y <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'Ghost {}, X: {}, Y: {}, Value: {}, State: {}, Captured: {}, Alive: {}'.format(self.id, self.x, self.y, self.value, self.state, self.captured, self.alive)
Class that will handle the ghost entity
6259903d07f4c71912bb066f
class SupervisedTrainer(Trainer): <NEW_LINE> <INDENT> def __init__(self, model, optimizer='adam', loss='cross_entropy', metrics=None): <NEW_LINE> <INDENT> from magnet.nodes.functional import wiki <NEW_LINE> if isinstance(optimizer, str): optimizer = optimizer_wiki[optimizer.lower()](model.parameters()) <NEW_LINE> if isinstance(loss, str): loss = wiki['losses'][loss.lower()] <NEW_LINE> if metrics is None: metrics = [] <NEW_LINE> if not isinstance(metrics, (tuple, list)): metrics = [metrics] <NEW_LINE> for i, metric in enumerate(metrics): <NEW_LINE> <INDENT> if isinstance(metric, str): metrics[i] = (metric, wiki['metrics'][metric.lower()]) <NEW_LINE> <DEDENT> super().__init__([model], [optimizer]) <NEW_LINE> self.loss = loss <NEW_LINE> self.metrics = metrics <NEW_LINE> <DEDENT> def optimize(self): <NEW_LINE> <INDENT> optimizer = self.optimizers[0] <NEW_LINE> loss = self.get_loss(self.dataloader) <NEW_LINE> if self.is_training(): <NEW_LINE> <INDENT> loss.backward() <NEW_LINE> self.callbacks('gradient', trainer=self, models=self.models) <NEW_LINE> optimizer.step() <NEW_LINE> optimizer.zero_grad() <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def validate(trainer, dataloader): <NEW_LINE> <INDENT> trainer.get_loss(dataloader, validation=True) <NEW_LINE> <DEDENT> def get_loss(self, dataloader, validation=False): <NEW_LINE> <INDENT> def write_stats(key, value): <NEW_LINE> <INDENT> self.callbacks('write_stats', trainer=self, key=key, value=value, validation=validation, buffer_size=len(dataloader)) <NEW_LINE> <DEDENT> model = self.models[0] <NEW_LINE> x, y = next(dataloader) <NEW_LINE> y_pred = model(x) <NEW_LINE> loss = self.loss(y_pred, y) <NEW_LINE> write_stats('loss', loss.item()) <NEW_LINE> for metric in self.metrics: write_stats(metric[0], metric[1](y_pred, y).item()) <NEW_LINE> return loss
A simple trainer that implements a supervised approach where a simple model :math:`\hat{y} = f(x)` is trained to map :math:`\hat{y}` to ground-truth :math:`y` according to some specified loss. This is the training routine that most high-level deep learning frameworks implement. Args: model (``nn.Module``): The model that needs to be trained optimizer (str or optim.Optimzer): The optimizer used to train the model. Default: ``'adam'`` loss (str or ``callable``): A loss function that gives the objective to be minimized. Default: ``'cross_entropy'`` metrics (list): Any other metrics that need to be monitored. Default: ``None`` * :attr:`optimizer` can be an actual ``optim.Optimizer`` instance or the name of a popular optimzizer (eg. ``'adam'``). * :attr:`loss` can be a function or the name of a popular loss function (eg. ``'cross_entropy'``). It should accept 2 arguments (:math:`\hat{y}`, :math:`y`). * :attr:`metrics` should contain a list of functions which accept 2 arguments (:math:`\hat{y}`, :math:`y`), like the loss function. .. note:: A static :py:meth:`validate` function is provided for the validation callback .. note:: The :attr:`metrics` is of no use unless there is some callback (eg.``callbacks.Monitor``) to receive the metrics Examples:: >>> import magnet as mag >>> import magnet.nodes as mn >>> from magnet.data import Data >>> from magnet.training import callbacks, SupervisedTrainer >>> data = Data.get('mnist') >>> model = mn.Linear(10, act=None) >>> model.build(x=next(data())[0]) >>> trainer = SupervisedTrainer(model) >>> callbacks=[callbacks.Monitor(), callbacks.Validate(data(64, mode='val'), SupervisedTrainer.validate)] >>> trainer.train(data(64, shuffle=True), 1, callbacks)
6259903d10dbd63aa1c71e14
class MySQLParams(Params): <NEW_LINE> <INDENT> def __init__(self, params): <NEW_LINE> <INDENT> if 'name' not in params: <NEW_LINE> <INDENT> raise MySQLParseException('"name" is a required parameter') <NEW_LINE> <DEDENT> if 'user' not in params: <NEW_LINE> <INDENT> raise MySQLParseException('"user" is a required parameter') <NEW_LINE> <DEDENT> super(self.__class__, self).__init__( host=params['host'] if 'host' in params else 'localhost', port=params['port'] if 'port' in params else 3306, name=params['name'] if 'name' in params else None, user=params['user'] if 'user' in params else None, password=params['password'] if 'password' in params else None) <NEW_LINE> <DEDENT> def get_dump_args(self, tables=[]): <NEW_LINE> <INDENT> extra_args = '' <NEW_LINE> if self.user is not None: <NEW_LINE> <INDENT> extra_args += '-u {} '.format(self.user) <NEW_LINE> <DEDENT> if self.password is not None: <NEW_LINE> <INDENT> extra_args += '-p{} '.format(self.password) <NEW_LINE> <DEDENT> if self.name is not None or len(tables) > 0: <NEW_LINE> <INDENT> extra_args += '{}'.format(' '.join([self.name] + tables)) <NEW_LINE> <DEDENT> return '-h {} -P {} {}'.format(self.host, self.port, extra_args) <NEW_LINE> <DEDENT> def get_mysql_args(self): <NEW_LINE> <INDENT> extra_args = '' <NEW_LINE> if self.user is not None: <NEW_LINE> <INDENT> extra_args += '-u {} '.format(self.user) <NEW_LINE> <DEDENT> if self.password is not None: <NEW_LINE> <INDENT> extra_args += '-p{} '.format(self.password) <NEW_LINE> <DEDENT> if self.name is not None: <NEW_LINE> <INDENT> extra_args += '{}'.format(self.name) <NEW_LINE> <DEDENT> return '-h {} -P {} {}'.format(self.host, self.port, extra_args) <NEW_LINE> <DEDENT> def load_db(self): <NEW_LINE> <INDENT> from MySQLdb import connect <NEW_LINE> return connect(host=self.host, user=self.user, passwd=self.password, db=self.name, port=self.port)
Container for MySQL parameters. Has some helpful methods to get args for the `mysql` and `mysqldump` commands, as well as a method to load the MySQLdb database.
6259903d30c21e258be99a4a
class Teacher(models.Model): <NEW_LINE> <INDENT> org = models.ForeignKey(CourseOrg, verbose_name="所属机构") <NEW_LINE> name = models.CharField(max_length=50, verbose_name="教师名称") <NEW_LINE> work_years = models.IntegerField(default=0, verbose_name="工作年限", null=True, blank=True) <NEW_LINE> work_company = models.CharField(max_length=50, verbose_name="就职公司", null=True, blank=True) <NEW_LINE> work_position = models.CharField(max_length=50, verbose_name="公司职位", null=True, blank=True) <NEW_LINE> points = models.CharField(max_length=50, verbose_name="教学特点", null=True, blank=True) <NEW_LINE> click_nums = models.IntegerField(default=0, verbose_name="点击数", null=True, blank=True) <NEW_LINE> fav_nums = models.IntegerField(default=0, verbose_name="收藏数", null=True, blank=True) <NEW_LINE> add_time = models.DateTimeField(default=datetime.now) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "教师" <NEW_LINE> verbose_name_plural = verbose_name <NEW_LINE> ordering = ('-id',) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
讲师表
6259903d287bf620b6272e29
class IPv4Address(_BaseV4, _BaseIP): <NEW_LINE> <INDENT> def __init__(self, address): <NEW_LINE> <INDENT> _BaseIP.__init__(self, address) <NEW_LINE> _BaseV4.__init__(self, address) <NEW_LINE> if isinstance(address, int): <NEW_LINE> <INDENT> self._ip = address <NEW_LINE> if address < 0 or address > self._ALL_ONES: <NEW_LINE> <INDENT> raise AddressValueError(address) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> if _compat_has_real_bytes: <NEW_LINE> <INDENT> if isinstance(address, bytes) and len(address) == 4: <NEW_LINE> <INDENT> self._ip = struct.unpack('!I', address)[0] <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> addr_str = str(address) <NEW_LINE> if not self._is_valid_ip(addr_str): <NEW_LINE> <INDENT> raise AddressValueError(addr_str) <NEW_LINE> <DEDENT> self._ip = self._ip_int_from_string(addr_str)
Represent and manipulate single IPv4 Addresses.
6259903d507cdc57c63a5fd9
class connection(): <NEW_LINE> <INDENT> def add(self,a,b): <NEW_LINE> <INDENT> return a+b <NEW_LINE> <DEDENT> def setConnectionString(self,connectionString): <NEW_LINE> <INDENT> self.connectionString = connectionString <NEW_LINE> return self <NEW_LINE> <DEDENT> def getConnectionString(self): <NEW_LINE> <INDENT> return self.connectionString
classdocs
6259903d287bf620b6272e2a
class TestAzAz(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.fake = Faker("az") <NEW_LINE> Faker.seed(0) <NEW_LINE> <DEDENT> def test_first_name(self): <NEW_LINE> <INDENT> name = self.fake.first_name() <NEW_LINE> assert name <NEW_LINE> self.assertIsInstance(name, str) <NEW_LINE> assert name in AzAzProvider.first_names <NEW_LINE> name = self.fake.first_name_female() <NEW_LINE> assert name <NEW_LINE> self.assertIsInstance(name, str) <NEW_LINE> assert name in AzAzProvider.first_names <NEW_LINE> assert name in AzAzProvider.first_names_female <NEW_LINE> name = self.fake.first_name_male() <NEW_LINE> assert name <NEW_LINE> self.assertIsInstance(name, str) <NEW_LINE> assert name in AzAzProvider.first_names <NEW_LINE> assert name in AzAzProvider.first_names_male <NEW_LINE> <DEDENT> def test_last_name(self): <NEW_LINE> <INDENT> name = self.fake.last_name() <NEW_LINE> assert name <NEW_LINE> self.assertIsInstance(name, str) <NEW_LINE> assert name in AzAzProvider.last_names <NEW_LINE> name = self.fake.last_name_female() <NEW_LINE> assert name <NEW_LINE> self.assertIsInstance(name, str) <NEW_LINE> assert name in AzAzProvider.last_names_female + AzAzProvider.last_names_unisex <NEW_LINE> name = self.fake.last_name_unique_to_female() <NEW_LINE> assert name <NEW_LINE> self.assertIsInstance(name, str) <NEW_LINE> assert name in AzAzProvider.last_names_female <NEW_LINE> name = self.fake.last_name_male() <NEW_LINE> assert name <NEW_LINE> self.assertIsInstance(name, str) <NEW_LINE> assert name in AzAzProvider.last_names_male + AzAzProvider.last_names_unisex <NEW_LINE> name = self.fake.last_name_unique_to_male() <NEW_LINE> assert name <NEW_LINE> self.assertIsInstance(name, str) <NEW_LINE> assert name in AzAzProvider.last_names_male
Tests for az_AZ locale person provider
6259903d6e29344779b01890
class TestWriteCCNoSop(TestWriteCCFull): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> cdl_convert.ColorCorrection.members = {} <NEW_LINE> self.cdl = cdl_convert.ColorCorrection("burp_200.x15", '') <NEW_LINE> self.cdl.sat = 1.0128109381 <NEW_LINE> self.cdl.sat_node.desc = ['I am a lovely sat node'] <NEW_LINE> self.target_xml_root = enc(CC_NO_SOP_WRITE) <NEW_LINE> self.target_xml = enc('\n'.join(CC_NO_SOP_WRITE.split('\n')[1:]))
Tests writing a CC XML with no SOP node
6259903d91af0d3eaad3b074
class NetworkInterfaceDnsSettings(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'applied_dns_servers': {'readonly': True}, 'internal_fqdn': {'readonly': True}, 'internal_domain_name_suffix': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'dns_servers': {'key': 'dnsServers', 'type': '[str]'}, 'applied_dns_servers': {'key': 'appliedDnsServers', 'type': '[str]'}, 'internal_dns_name_label': {'key': 'internalDnsNameLabel', 'type': 'str'}, 'internal_fqdn': {'key': 'internalFqdn', 'type': 'str'}, 'internal_domain_name_suffix': {'key': 'internalDomainNameSuffix', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(NetworkInterfaceDnsSettings, self).__init__(**kwargs) <NEW_LINE> self.dns_servers = kwargs.get('dns_servers', None) <NEW_LINE> self.applied_dns_servers = None <NEW_LINE> self.internal_dns_name_label = kwargs.get('internal_dns_name_label', None) <NEW_LINE> self.internal_fqdn = None <NEW_LINE> self.internal_domain_name_suffix = None
DNS settings of a network interface. Variables are only populated by the server, and will be ignored when sending a request. :param dns_servers: List of DNS servers IP addresses. Use 'AzureProvidedDNS' to switch to azure provided DNS resolution. 'AzureProvidedDNS' value cannot be combined with other IPs, it must be the only value in dnsServers collection. :type dns_servers: list[str] :ivar applied_dns_servers: If the VM that uses this NIC is part of an Availability Set, then this list will have the union of all DNS servers from all NICs that are part of the Availability Set. This property is what is configured on each of those VMs. :vartype applied_dns_servers: list[str] :param internal_dns_name_label: Relative DNS name for this NIC used for internal communications between VMs in the same virtual network. :type internal_dns_name_label: str :ivar internal_fqdn: Fully qualified DNS name supporting internal communications between VMs in the same virtual network. :vartype internal_fqdn: str :ivar internal_domain_name_suffix: Even if internalDnsNameLabel is not specified, a DNS entry is created for the primary NIC of the VM. This DNS name can be constructed by concatenating the VM name with the value of internalDomainNameSuffix. :vartype internal_domain_name_suffix: str
6259903d63b5f9789fe863ab
class FlatMenuTestCase(TestCase): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def create_test_menus_for_site(site, count=3, set_option_vals=False): <NEW_LINE> <INDENT> for i in range(1, count + 1): <NEW_LINE> <INDENT> obj = FlatMenu.objects.create( site=site, handle='test-%s' % i, title='Test Menu %s' % i ) <NEW_LINE> if set_option_vals: <NEW_LINE> <INDENT> obj._option_vals = utils.make_optionvals_instance() <NEW_LINE> <DEDENT> yield obj <NEW_LINE> <DEDENT> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.site = Site.objects.get(is_default_site=True) <NEW_LINE> self.menus = tuple( self.create_test_menus_for_site(self.site, set_option_vals=True) ) <NEW_LINE> <DEDENT> def get_test_menu_instance(self): <NEW_LINE> <INDENT> return self.menus[0]
A base TestCase class for testing FlatMenu model class methods
6259903dd164cc61758221b5
class RedisDict(dict): <NEW_LINE> <INDENT> def __init__(self, redis_url: Union[str, 'StrictRedis'], key_id: str, seq: Optional[Iterable] = None, **kwargs): <NEW_LINE> <INDENT> self._redis = redis_from_url_or_object(redis_url) <NEW_LINE> self.key_id = key_id <NEW_LINE> args = [] if seq is None else [seq] <NEW_LINE> super().__init__(*args, **kwargs) <NEW_LINE> if not self: <NEW_LINE> <INDENT> self.read() <NEW_LINE> <DEDENT> <DEDENT> def read(self): <NEW_LINE> <INDENT> self.clear() <NEW_LINE> self.update(json.loads(self._redis.get(self.key_id) or '{}')) <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> obj = prepare_obj_for_json(self) <NEW_LINE> self._redis.set(self.key_id, json.dumps(obj))
Dictionary, that store in Redis as one solid json by his own redis key (key_id) for save dict in redis it's need to call :meth:`telegram.ext.redis_util.RedisDict.flush` read object from redis on initialization :meth:`telegram.ext.redis_util.RedisDict.__init__`
6259903d50485f2cf55dc1c2
class WrongPresentationType(ValueError): <NEW_LINE> <INDENT> pass
The file read was not a valid file for this type.
6259903d3c8af77a43b6885b
class Grades(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.students = [] <NEW_LINE> self.grades = {} <NEW_LINE> self.isSorted = True <NEW_LINE> <DEDENT> def addStudent(self, student): <NEW_LINE> <INDENT> if student in self.students: <NEW_LINE> <INDENT> raise ValueError("Duplicate student") <NEW_LINE> <DEDENT> self.students.append(student) <NEW_LINE> self.grades[student.getIdNum()] = [] <NEW_LINE> self.isSorted = False <NEW_LINE> <DEDENT> def addGrade(self, student, grade): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.grades[student.getIdNum()].append(grade) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise ValueError("Student not in grade book") <NEW_LINE> <DEDENT> <DEDENT> def getGrades(self, student): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.grades[student.getIdNum()][:] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise ValueError("Student not in grade book") <NEW_LINE> <DEDENT> <DEDENT> def allStudents(self): <NEW_LINE> <INDENT> if not self.isSorted: <NEW_LINE> <INDENT> self.students.sort() <NEW_LINE> self.isSorted = True <NEW_LINE> <DEDENT> for s in self.students: <NEW_LINE> <INDENT> yield s
A mapping from Students to a list of grades
6259903d45492302aabfd718
class ResourceDomainObjectTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_construction_positional(self): <NEW_LINE> <INDENT> resource = Resource('foo', 'bar') <NEW_LINE> self.assertEqual('foo', resource.type) <NEW_LINE> self.assertEqual('bar', resource.id) <NEW_LINE> <DEDENT> def test_construction_named(self): <NEW_LINE> <INDENT> resource = Resource(resource_id='bar', resource_type='foo') <NEW_LINE> self.assertEqual('foo', resource.type) <NEW_LINE> self.assertEqual('bar', resource.id)
Tests the construction of the snaps.domain.Resource class
6259903d66673b3332c31637
class CycleInfo: <NEW_LINE> <INDENT> def __init__(self, active_time, inactive_time, quantity): <NEW_LINE> <INDENT> self.active_time = active_time <NEW_LINE> self.inactive_time = inactive_time <NEW_LINE> self.quantity = quantity <NEW_LINE> <DEDENT> @property <NEW_LINE> def average_time(self): <NEW_LINE> <INDENT> return self.active_time + self.inactive_time <NEW_LINE> <DEDENT> def _get_cycle_quantity(self): <NEW_LINE> <INDENT> return self.quantity <NEW_LINE> <DEDENT> def _get_time(self): <NEW_LINE> <INDENT> return (self.active_time + self.inactive_time) * self.quantity <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> spec = ['active_time', 'inactive_time', 'quantity'] <NEW_LINE> return make_repr_str(self, spec)
Holds information about cycle sequence. This class is used only when all cycles in sequence have the same parameters. Attributes: active_time: How long this effect is active. inactive_time: How long this effect is inactive after its activity. quantity: Defines how many times cycle should be repeated.
6259903dd6c5a102081e3365
class TestDestinyDefinitionsDestinyTalentGridDefinition(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testDestinyDefinitionsDestinyTalentGridDefinition(self): <NEW_LINE> <INDENT> pass
DestinyDefinitionsDestinyTalentGridDefinition unit test stubs
6259903d0fa83653e46f6119
class GenericStringSimilarityFeatureSpace: <NEW_LINE> <INDENT> def __init__(self, alphabet, n, Y, is_normalized, gs_kernel): <NEW_LINE> <INDENT> self.n = int(n) <NEW_LINE> self.is_normalized = is_normalized <NEW_LINE> self._y_lengths = numpy.array([len(y) for y in Y]) <NEW_LINE> self.max_train_length = numpy.max(self._y_lengths) <NEW_LINE> self.gs_kernel = gs_kernel <NEW_LINE> self._Y_int = transform_strings_to_integer_lists(Y, alphabet) <NEW_LINE> self._n_grams_int = transform_strings_to_integer_lists(get_n_grams(alphabet, n), alphabet) <NEW_LINE> self._n_gram_similarity_matrix = gs_kernel.get_alphabet_similarity_matrix() <NEW_LINE> if is_normalized: <NEW_LINE> <INDENT> self._normalization = numpy.sqrt(gs_kernel.element_wise_kernel(Y)) <NEW_LINE> <DEDENT> <DEDENT> def compute_weights(self, y_weights, y_length): <NEW_LINE> <INDENT> normalized_weights = numpy.copy(y_weights) <NEW_LINE> max_length = max(y_length, self.max_train_length) <NEW_LINE> if self.is_normalized: <NEW_LINE> <INDENT> normalized_weights *= 1. / self._normalization <NEW_LINE> <DEDENT> n_partitions = y_length - self.n + 1 <NEW_LINE> position_matrix = self.gs_kernel.get_position_matrix(max_length) <NEW_LINE> gs_weights = compute_gs_similarity_weights(n_partitions, self._n_grams_int, self._Y_int, normalized_weights, self._y_lengths, position_matrix, self._n_gram_similarity_matrix) <NEW_LINE> return numpy.array(gs_weights)
Output space for the Generic String kernel with position and n-gram similarity. Doesn't use a sparse matrix representation because it takes in account the similarity between the n-grams. This is used to compute the weights of the graph during the inference phase. Attributes ---------- n : int N-gram length. is_normalized : bool True if the feature space should be normalized, False otherwise. max_train_length : int Length of the longest string in the training dataset. gs_kernel : GenericStringKernel Generic string kernel.
6259903d30c21e258be99a4c
class AnalyticsConfig(AppConfig): <NEW_LINE> <INDENT> name = 'analytics'
Configs for analytics app.
6259903d8c3a8732951f7797
class Readings(pygame.sprite.Sprite): <NEW_LINE> <INDENT> def __init__(self, orientation = None, width = 10, height = 10): <NEW_LINE> <INDENT> pygame.sprite.Sprite.__init__(self) <NEW_LINE> self.image = pygame.image.load("reading.png") <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.orientation = orientation <NEW_LINE> self.tick_lim = random.randint(3, 9) <NEW_LINE> self.ticks = 0 <NEW_LINE> self.in_air = True <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> if self.orientation == "north" and self.ticks < self.tick_lim: <NEW_LINE> <INDENT> self.rect.y -= READINGS_SPEED <NEW_LINE> <DEDENT> elif self.orientation == "northeast" and self.ticks < self.tick_lim: <NEW_LINE> <INDENT> self.rect.y -= READINGS_SPEED <NEW_LINE> self.rect.x += READINGS_SPEED <NEW_LINE> <DEDENT> elif self.orientation == "east" and self.ticks < self.tick_lim: <NEW_LINE> <INDENT> self.rect.x += READINGS_SPEED <NEW_LINE> <DEDENT> elif self.orientation == "southeast" and self.ticks < self.tick_lim: <NEW_LINE> <INDENT> self.rect.x += READINGS_SPEED <NEW_LINE> self.rect.y += READINGS_SPEED <NEW_LINE> <DEDENT> elif self.orientation == "south" and self.ticks < self.tick_lim: <NEW_LINE> <INDENT> self.rect.y += READINGS_SPEED <NEW_LINE> <DEDENT> elif self.orientation == "southwest" and self.ticks < self.tick_lim: <NEW_LINE> <INDENT> self.rect.y += READINGS_SPEED <NEW_LINE> self.rect.x -= READINGS_SPEED <NEW_LINE> <DEDENT> elif self.orientation == "west" and self.ticks < self.tick_lim: <NEW_LINE> <INDENT> self.rect.x -= READINGS_SPEED <NEW_LINE> <DEDENT> elif self.orientation == "northwest" and self.ticks < self.tick_lim: <NEW_LINE> <INDENT> self.rect.y -= READINGS_SPEED <NEW_LINE> self.rect.x -= READINGS_SPEED <NEW_LINE> <DEDENT> elif self.ticks == self.tick_lim: <NEW_LINE> <INDENT> self.in_air = False <NEW_LINE> <DEDENT> self.ticks += 1
This class represents a piece of reading material.
6259903dd4950a0f3b111760
class WishListCreateView(PageTitleMixin, CreateView): <NEW_LINE> <INDENT> model = WishList <NEW_LINE> template_name = 'customer/wishlists/wishlists_form.html' <NEW_LINE> active_tab = "wishlists" <NEW_LINE> page_title = _('Create a new wish list') <NEW_LINE> form_class = WishListForm <NEW_LINE> product = None <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if 'product_pk' in kwargs: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.product = Product.objects.get(pk=kwargs['product_pk']) <NEW_LINE> <DEDENT> except ObjectDoesNotExist: <NEW_LINE> <INDENT> messages.error( request, _("The requested product no longer exists")) <NEW_LINE> return redirect('wishlists-create') <NEW_LINE> <DEDENT> <DEDENT> return super(WishListCreateView, self).dispatch( request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> ctx = super(WishListCreateView, self).get_context_data(**kwargs) <NEW_LINE> ctx['product'] = self.product <NEW_LINE> return ctx <NEW_LINE> <DEDENT> def get_form_kwargs(self): <NEW_LINE> <INDENT> kwargs = super(WishListCreateView, self).get_form_kwargs() <NEW_LINE> kwargs['user'] = self.request.user <NEW_LINE> return kwargs <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> wishlist = form.save() <NEW_LINE> if self.product: <NEW_LINE> <INDENT> wishlist.add(self.product) <NEW_LINE> msg = _("Your wishlist has been created and '%(name)s " "has been added") % {'name': self.product.get_title()} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = _("Your wishlist has been created") <NEW_LINE> <DEDENT> messages.success(self.request, msg) <NEW_LINE> return redirect(wishlist.get_absolute_url())
Create a new wishlist If a product ID is assed as a kwargs, then this product will be added to the wishlist.
6259903d379a373c97d9a269
class RecognizeCarRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ImageUrl = None <NEW_LINE> self.ImageBase64 = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ImageUrl = params.get("ImageUrl") <NEW_LINE> self.ImageBase64 = params.get("ImageBase64") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
RecognizeCar请求参数结构体
6259903d73bcbd0ca4bcb4ca
class VWAPEventWindow(EventWindow): <NEW_LINE> <INDENT> def __init__(self, market_aware=True, window_length=None, delta=None): <NEW_LINE> <INDENT> EventWindow.__init__(self, market_aware, window_length, delta) <NEW_LINE> self.fields = ('price', 'volume') <NEW_LINE> self.flux = 0.0 <NEW_LINE> self.totalvolume = 0.0 <NEW_LINE> <DEDENT> def handle_add(self, event): <NEW_LINE> <INDENT> self.assert_required_fields(event) <NEW_LINE> self.flux += event.volume * event.price <NEW_LINE> self.totalvolume += event.volume <NEW_LINE> <DEDENT> def handle_remove(self, event): <NEW_LINE> <INDENT> self.flux -= event.volume * event.price <NEW_LINE> self.totalvolume -= event.volume <NEW_LINE> <DEDENT> def get_vwap(self): <NEW_LINE> <INDENT> if len(self.ticks) == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (self.flux / self.totalvolume) <NEW_LINE> <DEDENT> <DEDENT> def assert_required_fields(self, event): <NEW_LINE> <INDENT> for field in self.fields: <NEW_LINE> <INDENT> if field not in event: <NEW_LINE> <INDENT> raise WrongDataForTransform( transform="VWAPEventWindow", fields=self.fields)
Iteratively maintains a vwap for a single sid over a given timedelta.
6259903d07f4c71912bb0672
class CountyShape: <NEW_LINE> <INDENT> def __init__(self, county): <NEW_LINE> <INDENT> shp_fp = "Bay_Area_County_Boundaries/ark28722-s7hs4j-shapefile/s7hs4j.shp" <NEW_LINE> shp = gpd.read_file(DATA_DIR + shp_fp, encoding='UTF-8') <NEW_LINE> if any(shp['COUNTY'] == county): <NEW_LINE> <INDENT> self.boundary = shp[shp['COUNTY'] == county]['geometry'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Shape file not found!')
Get county boundaries. The tracts near the border of the counties in the US Census are very rough, especially if the county borders water. To nicely plot the tracts, a `shp` file with more precise boundary definitions is intersected with the tract polygons defined in the US Census. The tract boundaries are adjusted to the border coordinates in the `shp` file for tract boundary coordinates found to be outside of the border defined in the `shp` file.
6259903de76e3b2f99fd9c4c
class Scalpel(MakefilePackage, SourceforgePackage): <NEW_LINE> <INDENT> homepage = "http://scalpel.sourceforge.net/index.html" <NEW_LINE> sourceforge_mirror_path = "scalpel/scalpel-0.5.4.tar.gz" <NEW_LINE> version('0.5.4', sha256='506f731b3886def158c15fd8b74fa98390f304a507d2040972e6b09ddefac8f0') <NEW_LINE> version('0.5.3', sha256='d45b569fe3aa5934883bc7216c243d53168351c23e020d96a46fa77a1563b65e') <NEW_LINE> depends_on('[email protected]:') <NEW_LINE> parallel = False <NEW_LINE> @run_before('install') <NEW_LINE> def filter_sbang(self): <NEW_LINE> <INDENT> with working_dir(self.stage.source_path): <NEW_LINE> <INDENT> kwargs = {'ignore_absent': True, 'backup': False, 'string': False} <NEW_LINE> match = '^#!/usr/bin/env perl' <NEW_LINE> perl = self.spec['perl'].command <NEW_LINE> substitute = "#!{perl}".format(perl=perl) <NEW_LINE> files = ['FindDenovos.pl', 'scalpel-export', 'scalpel-discovery', 'FindVariants.pl', 'FindSomatic.pl'] <NEW_LINE> filter_file(match, substitute, *files, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def install(self, spec, prefix): <NEW_LINE> <INDENT> destdir = prefix.bin <NEW_LINE> mkdirp(destdir) <NEW_LINE> files = ['FindSomatic.pl', 'HashesIO.pm', 'MLDBM.pm', 'scalpel-export', 'Utils.pm', 'FindDenovos.pl', 'FindVariants.pl', 'scalpel-discovery', 'SequenceIO.pm', 'Usage.pm'] <NEW_LINE> for f in files: <NEW_LINE> <INDENT> install(f, destdir) <NEW_LINE> <DEDENT> dirs = ['Text', 'MLDBM', 'Parallel', ] <NEW_LINE> for d in dirs: <NEW_LINE> <INDENT> install_tree(d, join_path(destdir, d)) <NEW_LINE> <DEDENT> install_tree('bamtools-2.3.0/bin', join_path(destdir, 'bamtools-2.3.0', 'bin')) <NEW_LINE> install_tree('bamtools-2.3.0/lib', join_path(destdir, 'bamtools-2.3.0', 'lib')) <NEW_LINE> mkdirp(join_path(destdir, 'bcftools-1.1')) <NEW_LINE> install('bcftools-1.1/bcftools', join_path(destdir, 'bcftools-1.1')) <NEW_LINE> mkdirp(join_path(destdir, 'Microassembler')) <NEW_LINE> install('Microassembler/Microassembler', join_path(destdir, 'Microassembler')) <NEW_LINE> mkdirp(join_path(destdir, 'samtools-1.1')) <NEW_LINE> install('samtools-1.1/samtools', join_path(destdir, 'samtools-1.1'))
Scalpel is a software package for detecting INDELs (INsertions and DELetions) mutations in a reference genome which has been sequenced with next-generation sequencing technology.
6259903db57a9660fecd2cbb
@public <NEW_LINE> class ISteamNews(SteamWebAPI): <NEW_LINE> <INDENT> def GetNewsForApp(self, appid, maxlength='', enddate='', count='', feeds='', method_version=2): <NEW_LINE> <INDENT> parameters = { 'appid': appid, 'maxlength': maxlength, 'enddate': enddate, 'count': count, 'feeds': feeds, } <NEW_LINE> return self.api_query( interface=self.__class__.__name__, method='GetNewsForApp', method_version=method_version, httpmethod='GET', parameters=parameters, )
Methods relating to Steam News.
6259903d76d4e153a661db94
class ObjectName(TraitType): <NEW_LINE> <INDENT> info_text = "a valid object identifier in Python" <NEW_LINE> if sys.version_info[0] < 3: <NEW_LINE> <INDENT> _name_re = re.compile(r"[a-zA-Z_][a-zA-Z0-9_]*$") <NEW_LINE> def isidentifier(self, s): <NEW_LINE> <INDENT> return bool(self._name_re.match(s)) <NEW_LINE> <DEDENT> def coerce_str(self, obj, value): <NEW_LINE> <INDENT> if isinstance(value, str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return str(value) <NEW_LINE> <DEDENT> except UnicodeEncodeError: <NEW_LINE> <INDENT> self.error(obj, value) <NEW_LINE> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> isidentifier = staticmethod(lambda s: s.isidentifier()) <NEW_LINE> coerce_str = staticmethod(lambda _,s: s) <NEW_LINE> <DEDENT> def validate(self, obj, value): <NEW_LINE> <INDENT> value = self.coerce_str(obj, value) <NEW_LINE> if isinstance(value, str) and self.isidentifier(value): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> self.error(obj, value)
A string holding a valid object name in this version of Python. This does not check that the name exists in any scope.
6259903d96565a6dacd2d8ab
class ForwardInstance(Instance): <NEW_LINE> <INDENT> __slots__ = ('resolve', 'args', 'kwargs') <NEW_LINE> def __init__(self, resolve, args=None, kwargs=None, factory=None): <NEW_LINE> <INDENT> self.resolve = resolve <NEW_LINE> self.args = args <NEW_LINE> self.kwargs = kwargs <NEW_LINE> if factory is not None: <NEW_LINE> <INDENT> self.set_default_value_mode(DefaultValue.CallObject, factory) <NEW_LINE> <DEDENT> elif args is not None or kwargs is not None: <NEW_LINE> <INDENT> mode = DefaultValue.MemberMethod_Object <NEW_LINE> self.set_default_value_mode(mode, "default") <NEW_LINE> <DEDENT> self.set_validate_mode(Validate.MemberMethod_ObjectOldNew, "validate") <NEW_LINE> <DEDENT> def default(self, owner): <NEW_LINE> <INDENT> kind = self.resolve() <NEW_LINE> args = self.args or () <NEW_LINE> kwargs = self.kwargs or {} <NEW_LINE> factory = lambda: kind(*args, **kwargs) <NEW_LINE> self.set_default_value_mode(DefaultValue.CallObject, factory) <NEW_LINE> return kind(*args, **kwargs) <NEW_LINE> <DEDENT> def validate(self, owner, old, new): <NEW_LINE> <INDENT> kind = self.resolve() <NEW_LINE> self.set_validate_mode(Validate.Instance, kind) <NEW_LINE> return self.do_validate(owner, old, new) <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> clone = super(ForwardInstance, self).clone() <NEW_LINE> clone.resolve = self.resolve <NEW_LINE> clone.args = self.args <NEW_LINE> clone.kwargs = self.kwargs <NEW_LINE> return clone
An Instance which delays resolving the type definition. The first time the value is accessed or modified, the type will be resolved and the forward instance will behave identically to a normal instance.
6259903d23e79379d538d740
class TestV1beta1StorageClassList(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testV1beta1StorageClassList(self): <NEW_LINE> <INDENT> pass
V1beta1StorageClassList unit test stubs
6259903d3c8af77a43b6885c
@renderer <NEW_LINE> class MIDI7Render(Render): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__(7)
convert int <-> bytes with 0 msb
6259903e73bcbd0ca4bcb4cb
class ComputerVisionClient(ComputerVisionClientOperationsMixin, SDKClient): <NEW_LINE> <INDENT> def __init__( self, endpoint, credentials): <NEW_LINE> <INDENT> self.config = ComputerVisionClientConfiguration(endpoint, credentials) <NEW_LINE> super(ComputerVisionClient, self).__init__(self.config.credentials, self.config) <NEW_LINE> client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} <NEW_LINE> self.api_version = '3.0' <NEW_LINE> self._serialize = Serializer(client_models) <NEW_LINE> self._deserialize = Deserializer(client_models)
The Computer Vision API provides state-of-the-art algorithms to process images and return information. For example, it can be used to determine if an image contains mature content, or it can be used to find all the faces in an image. It also has other features like estimating dominant and accent colors, categorizing the content of images, and describing an image with complete English sentences. Additionally, it can also intelligently generate images thumbnails for displaying large images effectively. :ivar config: Configuration for client. :vartype config: ComputerVisionClientConfiguration :param endpoint: Supported Cognitive Services endpoints. :type endpoint: str :param credentials: Subscription credentials which uniquely identify client subscription. :type credentials: None
6259903ec432627299fa4221
class UserUpdateJob(UpdateView): <NEW_LINE> <INDENT> def get_object(self, queryset=None): <NEW_LINE> <INDENT> return UserJob.objects.get(pk=self.kwargs['job']) <NEW_LINE> <DEDENT> model = UserJob <NEW_LINE> fields = '__all__' <NEW_LINE> section = "Job Profile" <NEW_LINE> title = 'update' <NEW_LINE> button = 'Update' <NEW_LINE> def get_success_url(self): <NEW_LINE> <INDENT> return reverse('listing_job', kwargs={ 'pk': self.object.user_id.pk, })
Update job details for a user
6259903e287bf620b6272e2e
class ProductionConfig(BaseConfig): <NEW_LINE> <INDENT> SECRET_KEY = 'my_precious' <NEW_LINE> DEBUG = False <NEW_LINE> SQLALCHEMY_DATABASE_URI = database_url
Production configuration.
6259903eac7a0e7691f7372b
class Journal(Component): <NEW_LINE> <INDENT> TYPE = 'journal' <NEW_LINE> def __init__(self, target, device): <NEW_LINE> <INDENT> Component.__init__(self, target.fs, target.server, target.action_enabled, target._mode) <NEW_LINE> self.target = target <NEW_LINE> self.dev = device <NEW_LINE> <DEDENT> @property <NEW_LINE> def label(self): <NEW_LINE> <INDENT> return self.uniqueid() <NEW_LINE> <DEDENT> def uniqueid(self): <NEW_LINE> <INDENT> return "%s_jdev" % self.target.uniqueid() <NEW_LINE> <DEDENT> def longtext(self): <NEW_LINE> <INDENT> return "%s journal (%s)" % (self.target.get_id(), self.dev) <NEW_LINE> <DEDENT> def full_check(self, mountdata=True): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> info = os.stat(self.dev) <NEW_LINE> <DEDENT> except OSError as exp: <NEW_LINE> <INDENT> raise ComponentError(self, str(exp)) <NEW_LINE> <DEDENT> if not stat.S_ISBLK(info[stat.ST_MODE]): <NEW_LINE> <INDENT> raise ComponentError(self, "bad journal device") <NEW_LINE> <DEDENT> <DEDENT> def lustre_check(self): <NEW_LINE> <INDENT> pass
Manage a target external journal device.
6259903e26068e7796d4db89
class AuthApi(Resource): <NEW_LINE> <INDENT> def post(self): <NEW_LINE> <INDENT> args = AuthParser.post.parse_args() <NEW_LINE> user = User.query.get(args['user_no']) <NEW_LINE> if not user: <NEW_LINE> <INDENT> abort(400, message="用户名不存在或者密码错误", code=ErrorCode.user_not_found.value) <NEW_LINE> <DEDENT> if not user.check_password(args["password"]): <NEW_LINE> <INDENT> abort(400, message="用户名不存在或密码错误", code=ErrorCode.user_not_found.value) <NEW_LINE> <DEDENT> s = Serializer( current_app.config['SECRET_KEY'], expires_in=current_app.config['TOKEN_EXPIRES_IN'] or 600 ) <NEW_LINE> return {'Token': s.dumps({"user_no": user.user_no})} <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def auth_required(func): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def _warpper(*args, **kwargs): <NEW_LINE> <INDENT> r_args = AuthParser.auth.parse_args() <NEW_LINE> token = r_args['Token'] <NEW_LINE> if not token: <NEW_LINE> <INDENT> abort(400, message="未检测到token", code=ErrorCode.signature_required.value) <NEW_LINE> <DEDENT> user_no = "" <NEW_LINE> s = Serializer(current_app.config['SECRET_KEY']) <NEW_LINE> try: <NEW_LINE> <INDENT> data = s.loads(token) <NEW_LINE> user_no = data['user_no'] <NEW_LINE> <DEDENT> except SignatureExpired: <NEW_LINE> <INDENT> abort(403, message="token已过期", code=ErrorCode.signature_expired.value) <NEW_LINE> <DEDENT> except BadSignature: <NEW_LINE> <INDENT> abort(401, message="token不合法", code=ErrorCode.signature_required.value) <NEW_LINE> <DEDENT> user = User.query.get(user_no) <NEW_LINE> if not user: <NEW_LINE> <INDENT> abort(400, message="用户{}不存在".format(user_no), code=ErrorCode.user_not_found.value) <NEW_LINE> <DEDENT> if not user.isvalid(): <NEW_LINE> <INDENT> abort(400, message="用户{}无权限,请联系管理员开通".format(user_no), code=ErrorCode.permission_not_allowed.value) <NEW_LINE> <DEDENT> kwargs.update({'user': user}) <NEW_LINE> return func(*args, **kwargs) <NEW_LINE> <DEDENT> return _warpper
用户认证
6259903e15baa723494631d4
class ImEncoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_size, zsize=32, use_res=False, use_bn=False, depth=0, colors=3): <NEW_LINE> <INDENT> a, b, c = 16, 64, 128 <NEW_LINE> p, q, r = 4, 4, 4 <NEW_LINE> super().__init__() <NEW_LINE> self.zsize = zsize <NEW_LINE> modules = [ util.Block(colors, a, use_res=use_res, batch_norm=use_bn), nn.MaxPool2d((p, p)), util.Block(a, b, use_res=use_res, batch_norm=use_bn), nn.MaxPool2d((q, q)), util.Block(b, c, use_res=use_res, batch_norm=use_bn), nn.MaxPool2d((r, r)), ] <NEW_LINE> for i in range(depth): <NEW_LINE> <INDENT> modules.append( util.Block(c, c, use_res=use_res, batch_norm=use_bn)) <NEW_LINE> <DEDENT> modules.extend([ util.Flatten(), nn.Linear((in_size[0] // (p*q*r)) * (in_size[1] // (p*q*r)) * c, zsize * 2) ]) <NEW_LINE> self.encoder = nn.Sequential(*modules) <NEW_LINE> <DEDENT> def forward(self, image): <NEW_LINE> <INDENT> zcomb = self.encoder(image) <NEW_LINE> return zcomb[:, :self.zsize], zcomb[:, self.zsize:]
Encoder for a VAE
6259903e16aa5153ce401730
class ImgurScrapeMIMEParser(BaseMIMEParser): <NEW_LINE> <INDENT> pattern = re.compile(r'https?://(w+\.)?(m\.)?imgur\.com/[^.]+$') <NEW_LINE> @staticmethod <NEW_LINE> def get_mimetype(url): <NEW_LINE> <INDENT> page = requests.get(url) <NEW_LINE> soup = BeautifulSoup(page.content, 'html.parser') <NEW_LINE> tag = soup.find('meta', attrs={'name': 'twitter:image'}) <NEW_LINE> if tag: <NEW_LINE> <INDENT> url = tag.get('content') <NEW_LINE> if GifvMIMEParser.pattern.match(url): <NEW_LINE> <INDENT> return GifvMIMEParser.get_mimetype(url) <NEW_LINE> <DEDENT> <DEDENT> return BaseMIMEParser.get_mimetype(url)
The majority of imgur links don't point directly to the image, so we need to open the provided url and scrape the page for the link. Scrape the actual image url from an imgur landing page. Imgur intentionally obscures this on most reddit links in order to draw more traffic for their advertisements. There are a couple of <meta> tags that supply the relevant info: <meta name="twitter:image" content="https://i.imgur.com/xrqQ4LEh.jpg"> <meta property="og:image" content="http://i.imgur.com/xrqQ4LE.jpg?fb"> <link rel="image_src" href="http://i.imgur.com/xrqQ4LE.jpg">
6259903e26238365f5fadd9a
class HTTP01DualNetworkedServersTest(unittest.TestCase): <NEW_LINE> <INDENT> _multiprocess_can_split_ = True <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.account_key = jose.JWK.load( test_util.load_vector('rsa1024_key.pem')) <NEW_LINE> self.resources = set() <NEW_LINE> from acme.standalone import HTTP01DualNetworkedServers <NEW_LINE> self.servers = HTTP01DualNetworkedServers(('', 0), resources=self.resources) <NEW_LINE> self.port = self.servers.getsocknames()[0][1] <NEW_LINE> self.servers.serve_forever() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.servers.shutdown_and_server_close() <NEW_LINE> <DEDENT> def test_index(self): <NEW_LINE> <INDENT> response = requests.get( 'http://localhost:{0}'.format(self.port), verify=False) <NEW_LINE> self.assertEqual( response.text, 'ACME client standalone challenge solver') <NEW_LINE> self.assertTrue(response.ok) <NEW_LINE> <DEDENT> def test_404(self): <NEW_LINE> <INDENT> response = requests.get( 'http://localhost:{0}/foo'.format(self.port), verify=False) <NEW_LINE> self.assertEqual(response.status_code, http_client.NOT_FOUND) <NEW_LINE> <DEDENT> def _test_http01(self, add): <NEW_LINE> <INDENT> chall = challenges.HTTP01(token=(b'x' * 16)) <NEW_LINE> response, validation = chall.response_and_validation(self.account_key) <NEW_LINE> from acme.standalone import HTTP01RequestHandler <NEW_LINE> resource = HTTP01RequestHandler.HTTP01Resource( chall=chall, response=response, validation=validation) <NEW_LINE> if add: <NEW_LINE> <INDENT> self.resources.add(resource) <NEW_LINE> <DEDENT> return resource.response.simple_verify( resource.chall, 'localhost', self.account_key.public_key(), port=self.port) <NEW_LINE> <DEDENT> def test_http01_found(self): <NEW_LINE> <INDENT> self.assertTrue(self._test_http01(add=True)) <NEW_LINE> <DEDENT> def test_http01_not_found(self): <NEW_LINE> <INDENT> self.assertFalse(self._test_http01(add=False))
Tests for acme.standalone.HTTP01DualNetworkedServers.
6259903e07d97122c4217ee1
class Example(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def getGithubUrl(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): <NEW_LINE> <INDENT> return grpc.experimental.unary_unary(request, target, '/example.Example/getGithubUrl', example__pb2.request.SerializeToString, example__pb2.userGithub.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getMessage(request, target, options=(), channel_credentials=None, call_credentials=None, insecure=False, compression=None, wait_for_ready=None, timeout=None, metadata=None): <NEW_LINE> <INDENT> return grpc.experimental.unary_unary(request, target, '/example.Example/getMessage', example__pb2.request.SerializeToString, example__pb2.userGithubMessage.FromString, options, channel_credentials, insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
Missing associated documentation comment in .proto file.
6259903ed10714528d69efad
class Player (models.Model): <NEW_LINE> <INDENT> firstName = models.CharField(max_length=70) <NEW_LINE> lastName = models.CharField(max_length=70) <NEW_LINE> birth = models.DateField() <NEW_LINE> vk_link = models.CharField(max_length=30, null=True) <NEW_LINE> basePosition = models.CharField(max_length=1) <NEW_LINE> image = models.ImageField(upload_to="./players", default="./404/profile.jpg", **NULLABLE) <NEW_LINE> history = models.ManyToManyField('Team', through='RecOfTeam') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "Игрок" <NEW_LINE> verbose_name_plural = "Игроки" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s %s" % (self.firstName, self.lastName) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return unicode(self.__str__()) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return unicode(self.__str__()) <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse("player", args=(self.id,))
Simple model of player.
6259903e23e79379d538d742
@Predictor.register("sentence-tagger") <NEW_LINE> class SentenceTaggerPredictor(Predictor): <NEW_LINE> <INDENT> def __init__( self, model: Model, dataset_reader: DatasetReader, language: str = "en_core_web_sm", ) -> None: <NEW_LINE> <INDENT> super().__init__(model, dataset_reader) <NEW_LINE> self._tokenizer = SpacyTokenizer(language=language, pos_tags=True) <NEW_LINE> <DEDENT> def predict(self, sentence: str) -> JsonDict: <NEW_LINE> <INDENT> return self.predict_json({"sentence": sentence}) <NEW_LINE> <DEDENT> @overrides <NEW_LINE> def _json_to_instance(self, json_dict: JsonDict) -> Instance: <NEW_LINE> <INDENT> sentence = json_dict["sentence"] <NEW_LINE> tokens = self._tokenizer.tokenize(sentence) <NEW_LINE> return self._dataset_reader.text_to_instance(tokens) <NEW_LINE> <DEDENT> @overrides <NEW_LINE> def predictions_to_labeled_instances( self, instance: Instance, outputs: Dict[str, numpy.ndarray] ) -> List[Instance]: <NEW_LINE> <INDENT> predicted_tags = outputs["tags"] <NEW_LINE> predicted_spans = [] <NEW_LINE> i = 0 <NEW_LINE> while i < len(predicted_tags): <NEW_LINE> <INDENT> tag = predicted_tags[i] <NEW_LINE> if tag[0] == "U": <NEW_LINE> <INDENT> current_tags = [ t if idx == i else "O" for idx, t in enumerate(predicted_tags) ] <NEW_LINE> predicted_spans.append(current_tags) <NEW_LINE> <DEDENT> elif tag[0] == "B": <NEW_LINE> <INDENT> begin_idx = i <NEW_LINE> while tag[0] != "L": <NEW_LINE> <INDENT> i += 1 <NEW_LINE> tag = predicted_tags[i] <NEW_LINE> <DEDENT> end_idx = i <NEW_LINE> current_tags = [ t if begin_idx <= idx <= end_idx else "O" for idx, t in enumerate(predicted_tags) ] <NEW_LINE> predicted_spans.append(current_tags) <NEW_LINE> <DEDENT> i += 1 <NEW_LINE> <DEDENT> instances = [] <NEW_LINE> for labels in predicted_spans: <NEW_LINE> <INDENT> new_instance = deepcopy(instance) <NEW_LINE> text_field: TextField = instance["tokens"] <NEW_LINE> new_instance.add_field( "tags", SequenceLabelField(labels, text_field), self._model.vocab ) <NEW_LINE> instances.append(new_instance) <NEW_LINE> <DEDENT> instances.reverse() <NEW_LINE> return instances
Predictor for any model that takes in a sentence and returns a single set of tags for it. In particular, it can be used with the [`CrfTagger`](../models/crf_tagger.md) model and also the [`SimpleTagger`](../models/simple_tagger.md) model.
6259903e3c8af77a43b6885d
class RenderParams(OrderedDict): <NEW_LINE> <INDENT> def __init__(self, render_id="00", folder='~', filename='sample.wav', duration=20.0, wait=0.0): <NEW_LINE> <INDENT> OrderedDict.__init__(self, [ ('render_id', render_id), ('folder', folder), ('filename', filename), ('duration', duration), ('wait', wait) ]) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_json(cls, dct): <NEW_LINE> <INDENT> return RenderParams(**dct)
Keep track of render params.
6259903ebe383301e0254a5b
class size_spider(scrapy.Spider): <NEW_LINE> <INDENT> name = "akc" <NEW_LINE> allowed_domains = ["akc.org"] <NEW_LINE> start_urls = ["http://www.akc.org/dog-breeds/"] <NEW_LINE> def parse(self, response): <NEW_LINE> <INDENT> top_atoz = response.xpath('//ul[@class="pagination"]')[0] <NEW_LINE> atoz = top_atoz.xpath(".//li/a/@href").extract() <NEW_LINE> for letter in atoz: <NEW_LINE> <INDENT> url = response.urljoin(letter) <NEW_LINE> yield scrapy.Request(url, callback=self.breeds_on_page) <NEW_LINE> <DEDENT> <DEDENT> def breeds_on_page(self,response): <NEW_LINE> <INDENT> rurls = response.xpath("//h2/a/@href").extract() <NEW_LINE> for rurl in rurls: <NEW_LINE> <INDENT> url = response.urljoin(rurl) <NEW_LINE> yield scrapy.Request(url, callback=self.extract_breed_info) <NEW_LINE> <DEDENT> <DEDENT> def extract_breed_info(self,response): <NEW_LINE> <INDENT> breed = response.xpath("//h1/text()").extract()[0] <NEW_LINE> try: <NEW_LINE> <INDENT> rank_node = response.xpath('//div[@class="bigrank"]')[0] <NEW_LINE> rank = int( rank_node.xpath("text()").extract()[0] ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> rank = -1 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> mytype_node = response.xpath('//div[@class="type"]') <NEW_LINE> mytype = mytype_node.xpath("img/@alt").extract()[0] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> mytype = "unknown" <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> detail_node = response.xpath('//div[@class="description"]') <NEW_LINE> energy = detail_node.xpath('span[@class="energy_levels"]/text()').extract()[0] <NEW_LINE> energy = energy.replace("\n","").replace("ENERGY","").strip() <NEW_LINE> mysize = detail_node.xpath('span[@class="size"]/text()').extract()[0] <NEW_LINE> mysize = mysize.replace("\n","").replace("size","").strip() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> energy = "unknown" <NEW_LINE> mysize = "unknown" <NEW_LINE> <DEDENT> entry = dog_item() <NEW_LINE> entry["breed"] = breed <NEW_LINE> entry["rank"] = rank <NEW_LINE> entry["group"] = mytype <NEW_LINE> entry["size"] = mysize <NEW_LINE> entry["energy"]= energy <NEW_LINE> yield entry
use `scrapy crawl akc -o akc.json` to run this spider
6259903e4e696a045264e743
class CommandToolBitLoad: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def GetResources(self): <NEW_LINE> <INDENT> return { "Pixmap": "Path_ToolBit", "MenuText": QT_TRANSLATE_NOOP("Path_ToolBitLoad", "Load Tool"), "ToolTip": QT_TRANSLATE_NOOP( "Path_ToolBitLoad", "Load an existing ToolBit object from a file" ), } <NEW_LINE> <DEDENT> def selectedTool(self): <NEW_LINE> <INDENT> sel = FreeCADGui.Selection.getSelectionEx() <NEW_LINE> if 1 == len(sel) and isinstance( sel[0].Object.Proxy, PathScripts.PathToolBit.ToolBit ): <NEW_LINE> <INDENT> return sel[0].Object <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def IsActive(self): <NEW_LINE> <INDENT> return FreeCAD.ActiveDocument is not None <NEW_LINE> <DEDENT> def Activated(self): <NEW_LINE> <INDENT> if PathScripts.PathToolBitGui.LoadTools(): <NEW_LINE> <INDENT> FreeCAD.ActiveDocument.recompute()
Command used to load an existing Tool from a file into the current document.
6259903ed6c5a102081e3369
class InfoBox(Gtk.Box): <NEW_LINE> <INDENT> def __init__(self, all_elements, uuid=None): <NEW_LINE> <INDENT> Gtk.Box.__init__(self, halign=Gtk.Align.CENTER, height_request=90, spacing=5, margin_top=10, margin_left=10, margin_right=10) <NEW_LINE> actions.destroy_children(self) <NEW_LINE> element = get_by_uuid(uuid, all_elements) <NEW_LINE> if element: <NEW_LINE> <INDENT> vbox_keys = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=0) <NEW_LINE> self.pack_start(vbox_keys, False, True, 0) <NEW_LINE> vbox_values = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=0) <NEW_LINE> self.pack_start(vbox_values, False, True, 0) <NEW_LINE> for text in ['Name:', 'Type:', 'Size:', 'Content:']: <NEW_LINE> <INDENT> self.add_label(text, vbox_keys, 1) <NEW_LINE> <DEDENT> name = '<b>%s</b>' %element['label']['name'] <NEW_LINE> self.add_label(name, vbox_values, 0) <NEW_LINE> text_type = element['label']['type']['long'] <NEW_LINE> if element['type'] in ['pv', 'lv'] and element['vg_name']: <NEW_LINE> <INDENT> text_type += ' - in volume group %s' %element['vg_name'] <NEW_LINE> <DEDENT> self.add_label(text_type, vbox_values, 0) <NEW_LINE> self.add_label(element['label']['size'], vbox_values) <NEW_LINE> self.add_label(element['label']['content'], vbox_values) <NEW_LINE> <DEDENT> self.show_all() <NEW_LINE> <DEDENT> def add_label(self, text, box, xalign=0): <NEW_LINE> <INDENT> label = Gtk.Label(xalign = xalign) <NEW_LINE> label.set_markup(text) <NEW_LINE> box.pack_start(label, False, True, 0)
Box with information about the selected storage element.
6259903e10dbd63aa1c71e1a
class BaseNotificationStoreProvider(object): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> @abc.abstractmethod <NEW_LINE> def get_notification_message_by_id(self, msg_id, options=None): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def save_notification_message(self, msg): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def save_user_notification(self, user_msg): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def bulk_create_user_notification(self, user_msgs): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_notification_type(self, name): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_all_notification_types(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def save_notification_type(self, msg_type): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_notification_for_user(self, user_id, msg_id): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_num_notifications_for_user(self, user_id, filters=None): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_notifications_for_user(self, user_id, filters=None, options=None): <NEW_LINE> <INDENT> raise NotImplementedError()
The base abstract class for all notifications data providers, such as MySQL/Django-ORM backed. IMPORTANT: NotificationStoreProvider is assumed to be a singleton, therefore there must be no state stored in the instance of the provider class.
6259903e0fa83653e46f611d
class UnifiedDiffParser(t_diff.TextDiffParser): <NEW_LINE> <INDENT> diff_format = "unified" <NEW_LINE> BEFORE_FILE_CRE = re.compile(r"^--- ({0})(\s+{1})?(.*)$".format(pd_utils.PATH_RE_STR, t_diff.EITHER_TS_RE_STR)) <NEW_LINE> AFTER_FILE_CRE = re.compile(r"^\+\+\+ ({0})(\s+{1})?(.*)$".format(pd_utils.PATH_RE_STR, t_diff.EITHER_TS_RE_STR)) <NEW_LINE> HUNK_DATA_CRE = re.compile(r"^@@\s+-(\d+)(,(\d+))?\s+\+(\d+)(,(\d+))?\s+@@\s*(.*)$") <NEW_LINE> @staticmethod <NEW_LINE> def get_hunk_at(lines, index): <NEW_LINE> <INDENT> match = UnifiedDiffParser.HUNK_DATA_CRE.match(lines[index]) <NEW_LINE> if not match: <NEW_LINE> <INDENT> return (None, index) <NEW_LINE> <DEDENT> start_index = index <NEW_LINE> before_length = int(match.group(3)) if match.group(3) is not None else 1 <NEW_LINE> after_length = int(match.group(6)) if match.group(6) is not None else 1 <NEW_LINE> index += 1 <NEW_LINE> before_count = after_count = 0 <NEW_LINE> try: <NEW_LINE> <INDENT> while before_count < before_length or after_count < after_length: <NEW_LINE> <INDENT> if lines[index].startswith("-"): <NEW_LINE> <INDENT> before_count += 1 <NEW_LINE> <DEDENT> elif lines[index].startswith("+"): <NEW_LINE> <INDENT> after_count += 1 <NEW_LINE> <DEDENT> elif lines[index].startswith(" "): <NEW_LINE> <INDENT> before_count += 1 <NEW_LINE> after_count += 1 <NEW_LINE> <DEDENT> elif not lines[index].startswith("\\"): <NEW_LINE> <INDENT> raise t_diff.ParseError(_("Unexpected end of unified diff hunk."), index) <NEW_LINE> <DEDENT> index += 1 <NEW_LINE> <DEDENT> if index < len(lines) and lines[index].startswith("\\"): <NEW_LINE> <INDENT> index += 1 <NEW_LINE> <DEDENT> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise t_diff.ParseError(_("Unexpected end of patch text.")) <NEW_LINE> <DEDENT> before_chunk = _CHUNK(int(match.group(1)), before_length) <NEW_LINE> after_chunk = _CHUNK(int(match.group(4)), after_length) <NEW_LINE> return (UnifiedDiffHunk(lines[start_index:index], before_chunk, after_chunk), index)
Class to parse "unified" diffs
6259903e23e79379d538d743
class NamedUnit(UnitBase): <NEW_LINE> <INDENT> def __init__(self, st, register=False, doc=None, format=None): <NEW_LINE> <INDENT> UnitBase.__init__(self) <NEW_LINE> if isinstance(st, (bytes, unicode)): <NEW_LINE> <INDENT> self._names = [st] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if len(st) == 0: <NEW_LINE> <INDENT> raise ValueError( "st list must have at least one entry") <NEW_LINE> <DEDENT> self._names = st[:] <NEW_LINE> <DEDENT> if format is None: <NEW_LINE> <INDENT> format = {} <NEW_LINE> <DEDENT> self._format = format <NEW_LINE> if doc is None: <NEW_LINE> <INDENT> doc = self._generate_doc() <NEW_LINE> <DEDENT> doc = textwrap.dedent(doc) <NEW_LINE> doc = textwrap.fill(doc) <NEW_LINE> self.__doc__ = doc <NEW_LINE> self._register_unit(register) <NEW_LINE> <DEDENT> def _generate_doc(self): <NEW_LINE> <INDENT> names = self.names <NEW_LINE> if len(self.names) > 1: <NEW_LINE> <INDENT> return "{1} ({0})".format(*names[:2]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return names[0] <NEW_LINE> <DEDENT> <DEDENT> def get_format_name(self, format): <NEW_LINE> <INDENT> return self._format.get(format, self.name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def names(self): <NEW_LINE> <INDENT> return self._names <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._names[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def aliases(self): <NEW_LINE> <INDENT> return self._names[1:] <NEW_LINE> <DEDENT> def _register_unit(self, register): <NEW_LINE> <INDENT> if not self._names: <NEW_LINE> <INDENT> raise UnitsException("unit has no string representation") <NEW_LINE> <DEDENT> for st in self._names: <NEW_LINE> <INDENT> if not re.match("^[A-Za-z_]+$", st): <NEW_LINE> <INDENT> raise ValueError( "Invalid unit name {0!r}".format(st)) <NEW_LINE> <DEDENT> if register: <NEW_LINE> <INDENT> if st in self._namespace: <NEW_LINE> <INDENT> raise ValueError( "Object with name {0!r} already exists " "in namespace".format(st)) <NEW_LINE> <DEDENT> self._namespace[st] = self <NEW_LINE> <DEDENT> self._registry[st] = self
The base class of units that have a name. Parameters ---------- st : str or list of str The name of the unit. If a list, the first element is the canonical (short) name, and the rest of the elements are aliases. register : boolean, optional When `True`, also register the unit in the standard unit namespace. Default is `False`. doc : str, optional A docstring describing the unit. format : dict, optional A mapping to format-specific representations of this unit. For example, for the ``Ohm`` unit, it might be nice to have it displayed as ``\Omega`` by the ``latex`` formatter. In that case, `format` argument should be set to:: {'latex': r'\Omega'} Raises ------ ValueError If any of the given unit names are already in the registry. ValueError If any of the given unit names are not valid Python tokens.
6259903e30c21e258be99a51
class SimulationRunMethod(Waveform): <NEW_LINE> <INDENT> def runSim(self, sim): <NEW_LINE> <INDENT> duration = randrange(1, 300) <NEW_LINE> while sim.run(duration, quiet=QUIET): <NEW_LINE> <INDENT> duration = randrange(1, 300)
Basic test of run method of Simulation object
6259903e50485f2cf55dc1c7
@ClassFactory.register(ClassType.NETWORK) <NEW_LINE> class AdaptiveAvgPool2d(nn.AdaptiveAvgPool2d, OperatorSerializable): <NEW_LINE> <INDENT> def __init__(self, output_size=(1, 1)): <NEW_LINE> <INDENT> super(AdaptiveAvgPool2d, self).__init__(output_size) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> return super().forward(x)
AdaptiveAvgPool2d Module inherit nn.AdaptiveAvgPool2d.
6259903e16aa5153ce401732
class TestTimeField(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls) -> None: <NEW_LINE> <INDENT> init_testing() <NEW_LINE> <DEDENT> def test_basic(self) -> None: <NEW_LINE> <INDENT> mtd = application.PROJECT.conn_manager.manager().metadata("flupdates") <NEW_LINE> self.assertTrue(mtd) <NEW_LINE> if mtd is not None: <NEW_LINE> <INDENT> field = mtd.field("hora") <NEW_LINE> self.assertNotEqual(field, None) <NEW_LINE> if field is not None: <NEW_LINE> <INDENT> self.assertEqual(field.name(), "hora") <NEW_LINE> self.assertEqual(field.alias(), "Hora") <NEW_LINE> self.assertEqual(field.isPrimaryKey(), False) <NEW_LINE> self.assertEqual(field.isCompoundKey(), False) <NEW_LINE> self.assertEqual(field.length(), 0) <NEW_LINE> self.assertEqual(field.type(), "time") <NEW_LINE> self.assertEqual(field.allowNull(), False) <NEW_LINE> self.assertEqual(field.visibleGrid(), True) <NEW_LINE> self.assertEqual(field.visible(), True) <NEW_LINE> self.assertEqual(field.editable(), False) <NEW_LINE> self.assertEqual(field.defaultValue(), None) <NEW_LINE> self.assertEqual(field.regExpValidator(), None) <NEW_LINE> self.assertEqual(field.flDecodeType("time"), "time")
Test time field.
6259903ebaa26c4b54d504ed
class Plants: <NEW_LINE> <INDENT> lookup = {} <NEW_LINE> def __init__(self, name, emoji, rarity, level_up_boost): <NEW_LINE> <INDENT> self.name = name.capitalize() <NEW_LINE> self.emoji = lookup_emoji(emoji) <NEW_LINE> self.rarity = rarity <NEW_LINE> self.single = False <NEW_LINE> self.pet_multiplyer = level_up_boost <NEW_LINE> self.utilities = ["food"] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> text = f"{self.emoji} {self.name}" <NEW_LINE> return text
All the plants you can collect These can be stacked
6259903ed99f1b3c44d068e1
class TestNcValue(BaseTestCase): <NEW_LINE> <INDENT> __metaclass__ = TestNcValueMeta <NEW_LINE> def test_singleton(self): <NEW_LINE> <INDENT> value = TEST_VALUES[1] <NEW_LINE> meta_val_a = noca.nc_value.value(value, METADATAS[0]) <NEW_LINE> meta_val_b = noca.nc_value.value(value, METADATAS[1]) <NEW_LINE> self.assertIs(meta_val_a.basetype, meta_val_b.basetype) <NEW_LINE> <DEDENT> def test_basetype_consistency(self): <NEW_LINE> <INDENT> value = TEST_VALUES[1] <NEW_LINE> value_type = type(value) <NEW_LINE> meta_val_a = noca.nc_value.value(value, METADATAS[0]) <NEW_LINE> meta_val_b = noca.nc_value.value(meta_val_a, METADATAS[1]) <NEW_LINE> self.assertIs(meta_val_a.basetype, value_type) <NEW_LINE> self.assertIs(meta_val_b.basetype, value_type) <NEW_LINE> <DEDENT> def test_inheritance(self): <NEW_LINE> <INDENT> value = TEST_VALUES[1] <NEW_LINE> value_type = type(value) <NEW_LINE> meta_val_a = noca.nc_value.value(value, METADATAS[0]) <NEW_LINE> self.assertTrue(isinstance(meta_val_a, value_type)) <NEW_LINE> self.assertTrue(isinstance(meta_val_a, numbers.Real)) <NEW_LINE> self.assertTrue(isinstance(meta_val_a, noca.nc_value.NcValue)) <NEW_LINE> self.assertTrue(isinstance(meta_val_a, noca.nc_value.NcIntValue)) <NEW_LINE> <DEDENT> def test_metadata_concatenation(self): <NEW_LINE> <INDENT> value = TEST_VALUES[1] <NEW_LINE> meta_val_a = noca.nc_value.value(value, METADATAS[0]) <NEW_LINE> meta_val_calc = (meta_val_a + 2) / 2 <NEW_LINE> self.assertEqual(meta_val_calc.metadata, "({} + 2) / 2".format(METADATAS[0]))
Metaclass helps to instantiate the TestNcValueMeta with all tests
6259903e24f1403a926861ef
class RaycastInterceptor(b2.rayCastCallback): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.__type = None <NEW_LINE> self.__point = None <NEW_LINE> <DEDENT> def ReportFixture(self, fixture, point, normal, fraction): <NEW_LINE> <INDENT> self.__type = fixture.body.userData <NEW_LINE> self.__point = point <NEW_LINE> return fraction <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self.__type <NEW_LINE> <DEDENT> @property <NEW_LINE> def point(self): <NEW_LINE> <INDENT> return self.__point
Raycast callback used in the laser targetting. Returns the userData of the intersected fixture body, which results in walls or targets
6259903e07f4c71912bb0677
class RandomCrop(object): <NEW_LINE> <INDENT> def __init__(self, min_ious=(0.1, 0.3, 0.5, 0.7, 0.9), min_crop_size=0.3): <NEW_LINE> <INDENT> self.sample_mode = (1, *min_ious, 0) <NEW_LINE> self.min_crop_size = min_crop_size <NEW_LINE> <DEDENT> def __call__(self, img, boxes, labels): <NEW_LINE> <INDENT> h, w, c = img.shape <NEW_LINE> while True: <NEW_LINE> <INDENT> mode = random.choice(self.sample_mode) <NEW_LINE> if mode == 1: <NEW_LINE> <INDENT> return img, boxes, labels <NEW_LINE> <DEDENT> min_iou = mode <NEW_LINE> for i in range(50): <NEW_LINE> <INDENT> new_w = random.uniform(self.min_crop_size * w, w) <NEW_LINE> new_h = random.uniform(self.min_crop_size * h, h) <NEW_LINE> if new_h / new_w < 0.5 or new_h / new_w > 2: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> left = random.uniform(w - new_w) <NEW_LINE> top = random.uniform(h - new_h) <NEW_LINE> patch = np.array((int(left), int(top), int(left + new_w), int(top + new_h))) <NEW_LINE> overlaps = bbox_overlaps( patch.reshape(-1, 4), boxes.reshape(-1, 4)).reshape(-1) <NEW_LINE> if overlaps.min() < min_iou: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> center = (boxes[:, :2] + boxes[:, 2:]) / 2 <NEW_LINE> mask = (center[:, 0] > patch[0]) * ( center[:, 1] > patch[1]) * (center[:, 0] < patch[2]) * ( center[:, 1] < patch[3]) <NEW_LINE> if not mask.any(): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> boxes = boxes[mask] <NEW_LINE> labels = labels[mask] <NEW_LINE> img = img[patch[1]:patch[3], patch[0]:patch[2]] <NEW_LINE> boxes[:, 2:] = boxes[:, 2:].clip(max=patch[2:]) <NEW_LINE> boxes[:, :2] = boxes[:, :2].clip(min=patch[:2]) <NEW_LINE> boxes -= np.tile(patch[:2], 2) <NEW_LINE> return img, boxes, labels
随机切割:随机体现在一方面是随机选择切割ious的大小(从0/0.1/0.3/0.5/0.7/0.9/1),即要求切出来的图片大小跟gt bbox 相比,计算的最小ious要大于随机出来的iou值,这样每次虽然随机切割,但每次包含的gt bbox尺寸也不同,但至少要包含一点(因为ious>0.1) 另一方面是随机定义一个w,h,计算该切出图片跟gt bbox的ious(包含在上面的过程) 同时,强制要求切出来的图片要包含所有bbox的中心点,以确保gt bbox至少1/4在切出的图片上,否则太小就没有训练意义了
6259903e21a7993f00c671b3
class TestBuildCQM(unittest.TestCase): <NEW_LINE> <INDENT> def test_build_cqm1(self): <NEW_LINE> <INDENT> cqm = build_knapsack_cqm([10, 1], [5, 7], 10) <NEW_LINE> self.assertEqual(cqm.objective.linear, {0: -10.0, 1: -1.0}) <NEW_LINE> self.assertEqual(cqm.constraints["capacity"].lhs.linear, {0: 5.0, 1: 7.0}) <NEW_LINE> self.assertEqual(cqm.constraints["capacity"].rhs, 10)
Verify correct construction of CQM for very_small.csv data with weight 10.
6259903e10dbd63aa1c71e1c
class Network(object): <NEW_LINE> <INDENT> def __init__(self, id): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.populations = {} <NEW_LINE> self.projections = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def location(self): <NEW_LINE> <INDENT> location_sum = mathutils.Vector((0,0,0)) <NEW_LINE> for _, population in self.populations.items(): <NEW_LINE> <INDENT> location_sum += population.location <NEW_LINE> <DEDENT> return location_sum / len(self.populations) <NEW_LINE> <DEDENT> def highlightPopulationsAll(self): <NEW_LINE> <INDENT> for population in self.populations: <NEW_LINE> <INDENT> self.highlightPopulation(population) <NEW_LINE> <DEDENT> <DEDENT> def highlightPopulation(self, population_id): <NEW_LINE> <INDENT> if not population_id in self.populations: <NEW_LINE> <INDENT> raise ValueError('Population ID not found') <NEW_LINE> <DEDENT> random_color = (random.random(), random.random(), random.random() ) <NEW_LINE> self.populations[population_id].setColor(random_color) <NEW_LINE> <DEDENT> def removeHighlightAll(self): <NEW_LINE> <INDENT> for _,population in self.populations.items(): <NEW_LINE> <INDENT> population.removeColor() <NEW_LINE> <DEDENT> <DEDENT> def pullProjectionsAll(self, strength): <NEW_LINE> <INDENT> for id1, id2 in self.projections: <NEW_LINE> <INDENT> if id1 != id2: <NEW_LINE> <INDENT> self.pullProjections(id1, id2, strength) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def pullProjections(self, populationID_1, populationID_2, strength): <NEW_LINE> <INDENT> projections = self.projections[(populationID_1,populationID_2)] <NEW_LINE> middle = mathutils.Vector((0,0,0)) <NEW_LINE> for p in projections: <NEW_LINE> <INDENT> middle += p.middle <NEW_LINE> <DEDENT> for p in projections: <NEW_LINE> <INDENT> p.pullCenterTo(middle / len(projections), strength) <NEW_LINE> <DEDENT> <DEDENT> def animateSpikes(self, spikes, colorMap='jet', animateAxons=True): <NEW_LINE> <INDENT> for cell_id in spikes: <NEW_LINE> <INDENT> for _, population in self.populations.items(): <NEW_LINE> <INDENT> if cell_id in population.cells: <NEW_LINE> <INDENT> population.cells[cell_id].setSpikes(spikes[cell_id], colorMap) <NEW_LINE> continue
This class represents a network. Network consists of populations and projections between them.
6259903e23849d37ff8522ff
class ShowIpBgpAllDampeningParameters(ShowIpBgpAllDampeningParameters_iosxe): <NEW_LINE> <INDENT> pass
Parser for show ip bgp all dampening parameters
6259903e287bf620b6272e31
class ApiType(Enum): <NEW_LINE> <INDENT> exec = 'exec' <NEW_LINE> cloudflare = 'cloudflare'
The API scheme.
6259903e507cdc57c63a5fe1
class AndExpression(SimpleComparison, Binary): <NEW_LINE> <INDENT> def __init__(self, ebpf, left, right): <NEW_LINE> <INDENT> Binary.__init__(self, ebpf, left, right, Opcode.AND) <NEW_LINE> SimpleComparison.__init__(self, ebpf, left, right, Opcode.JSET) <NEW_LINE> self.opcode = (Opcode.JSET, None, Opcode.JSET, None) <NEW_LINE> self.invert = None <NEW_LINE> <DEDENT> def compare(self, negative): <NEW_LINE> <INDENT> super().compare(False) <NEW_LINE> if negative: <NEW_LINE> <INDENT> origin = len(self.ebpf.opcodes) <NEW_LINE> self.ebpf.opcodes.append(None) <NEW_LINE> self.target() <NEW_LINE> self.origin = origin <NEW_LINE> self.opcode = Opcode.JMP <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, exc, etype, tb): <NEW_LINE> <INDENT> super().__exit__(exc, etype, tb) <NEW_LINE> if self.invert is not None: <NEW_LINE> <INDENT> olen = len(self.ebpf.opcodes) <NEW_LINE> assert self.ebpf.opcodes[self.invert].opcode == Opcode.JMP <NEW_LINE> self.ebpf.opcodes[self.invert:self.invert] = self.ebpf.opcodes[self.else_origin+1:] <NEW_LINE> del self.ebpf.opcodes[olen-1:] <NEW_LINE> op, dst, src, off, imm = self.ebpf.opcodes[self.invert - 1] <NEW_LINE> self.ebpf.opcodes[self.invert - 1] = Instruction(op, dst, src, len(self.ebpf.opcodes) - self.else_origin + 1, imm) <NEW_LINE> <DEDENT> <DEDENT> def Else(self): <NEW_LINE> <INDENT> op, dst, src, off, imm = self.ebpf.opcodes[self.origin] <NEW_LINE> if op is Opcode.JMP: <NEW_LINE> <INDENT> self.invert = self.origin <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ebpf.opcodes[self.origin] = Instruction(op, dst, src, off+1, imm) <NEW_LINE> <DEDENT> self.else_origin = len(self.ebpf.opcodes) <NEW_LINE> self.ebpf.opcodes.append(None) <NEW_LINE> return self
The & operator may also be used as a comparison
6259903e8da39b475be04434
class AbilityName(TableBase): <NEW_LINE> <INDENT> __tablename__ = 'ability_names' <NEW_LINE> language_id = sa.Column(sa.Integer, sa.ForeignKey('languages.id'), primary_key=True) <NEW_LINE> ability_id = sa.Column(sa.Integer, sa.ForeignKey('abilities.id'), primary_key=True) <NEW_LINE> name = sa.Column(sa.Text, nullable=False)
An ability's name in a particular language.
6259903e711fe17d825e15bf
class ExternalSite(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'gcd_external_site' <NEW_LINE> app_label = 'gcd' <NEW_LINE> ordering = ('site',) <NEW_LINE> verbose_name_plural = 'External Sites' <NEW_LINE> <DEDENT> site = models.CharField(max_length=255) <NEW_LINE> matching = models.CharField(max_length=50) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return str(self.site)
Pre-approved external sites that can be linked to.
6259903e8e05c05ec3f6f77e
class IndexView(HTMLMixin, SimpleView): <NEW_LINE> <INDENT> methods = ['GET'] <NEW_LINE> @classmethod <NEW_LINE> def get_view_name(cls): <NEW_LINE> <INDENT> return 'index' <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_view_icon(cls): <NEW_LINE> <INDENT> return 'module-lab' <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_view_title(cls, **kwargs): <NEW_LINE> <INDENT> return lazy_gettext('Welcome to my laboratory') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_menu_title(cls, **kwargs): <NEW_LINE> <INDENT> return lazy_gettext('Lab')
View presenting home page.
6259903e94891a1f408ba01a
class GetLatestDeliveryTaskAPI(APIView): <NEW_LINE> <INDENT> permission_classes = (AllowAny,) <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> if queue.read_data_from_queue() is False: <NEW_LINE> <INDENT> return Response({"status": False, "message": "No New Order", "data": None}, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> result = DeliveryTask.objects.filter(id=queue.read_data_from_queue()["id"]).first() <NEW_LINE> if result.last_known_state == "CANCELLED" or result.last_known_state == "DECLINED": <NEW_LINE> <INDENT> queue.read_data_from_queue(acknowledge=True) <NEW_LINE> return Response({"status": False, "message": "No New Order", "data": None}, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response({"status": True, "message": None, "data": queue.read_data_from_queue()}, status=status.HTTP_200_OK)
This endpoint is used to return the latest delivery task. This API is used by the delivery boys to get the latest delivery task.
6259903e63b5f9789fe863b3