code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class LexicalSeparator(LexicalToken): <NEW_LINE> <INDENT> pass | Signifies a generic separator, given by a ",".
Examples:
[1, 2, 3]
does add with number a, number b | 62599057a219f33f346c7d91 |
class VamasHeader(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.formatID = 'VAMAS Surface Chemical Analysis Standard Data Transfer Format 1988 May 4' <NEW_LINE> self.instituteID = 'Not Specified' <NEW_LINE> self.instriumentModelID = 'Not Specified' <NEW_LINE> self.operatorID = 'Not Specified' <NEW_LINE> self.experimentID = 'Not Specified' <NEW_LINE> self.noCommentLines = '2' <NEW_LINE> self.commentLines = 'Casa Info Follows CasaXPS Version 2.3.22PR1.0\n0' <NEW_LINE> self.expMode = 'NORM' <NEW_LINE> self.scanMode = 'REGULAR' <NEW_LINE> self.nrRegions = '0' <NEW_LINE> self.nrExpVar = '1' <NEW_LINE> self.expVarLabel = 'Exp Variable' <NEW_LINE> self.expVarUnit = 'd' <NEW_LINE> self.unknown3 = '0' <NEW_LINE> self.unknown4 = '0' <NEW_LINE> self.unknown5 = '0' <NEW_LINE> self.unknown6 = '0' <NEW_LINE> self.noBlocks = '1' | An object to store the Vamas header information. | 625990574428ac0f6e659ac7 |
class PoliticalUnit(): <NEW_LINE> <INDENT> def __init__(self, id, votes, minalgn=1, maxalgn=6): <NEW_LINE> <INDENT> self._id = id <NEW_LINE> self._votes = votes <NEW_LINE> self._algn = None <NEW_LINE> self._minalgn = minalgn <NEW_LINE> self._maxalgn = maxalgn <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return (self._id) <NEW_LINE> <DEDENT> @property <NEW_LINE> def votes(self): <NEW_LINE> <INDENT> return (self._votes) <NEW_LINE> <DEDENT> @property <NEW_LINE> def algn(self): <NEW_LINE> <INDENT> return (self._algn) <NEW_LINE> <DEDENT> @property <NEW_LINE> def alignments(self): <NEW_LINE> <INDENT> return (self._algn.alignments) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> retstr = "ID: {0}, vote count: {1}".format(self._id, self._votes) <NEW_LINE> retstr += "\t" + str(self._algn) <NEW_LINE> return (retstr) <NEW_LINE> <DEDENT> def randomalignments(self, algns=None): <NEW_LINE> <INDENT> self._algn = algn.Alignment(algns, self._minalgn, self._maxalgn) <NEW_LINE> self._algn.randassgn(algns) | PoliticalUnit represents a single division of an overall political
body, e.g. a province or state within a nation, or a county or
district within a state. | 6259905799cbb53fe683246b |
class SynchronousTcpClient(Runner, unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.initialize(["../tools/reference/diagslave", "-m", "tcp", "-p", "12345"]) <NEW_LINE> self.client = ModbusClient(port=12345) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.client.close() <NEW_LINE> self.shutdown() | These are the integration tests for the synchronous
tcp client. | 62599057596a897236129076 |
class ParentFacilityUserResource(ModelResource): <NEW_LINE> <INDENT> def _get_facility_users(self, bundle): <NEW_LINE> <INDENT> zone_id = bundle.request.GET.get('zone_id') <NEW_LINE> zone_ids = bundle.request.GET.get('zone_ids') <NEW_LINE> facility_id = bundle.request.GET.get('facility_id') <NEW_LINE> group_id = bundle.request.GET.get('group_id') <NEW_LINE> facility_user_objects = [] <NEW_LINE> if group_id: <NEW_LINE> <INDENT> facility_user_objects = FacilityUser.objects.filter(group__id=group_id) <NEW_LINE> <DEDENT> elif facility_id: <NEW_LINE> <INDENT> facility_user_objects = FacilityUser.objects.filter(facility__id=facility_id) <NEW_LINE> <DEDENT> elif zone_id: <NEW_LINE> <INDENT> facility_user_objects = FacilityUser.objects.by_zone(get_object_or_None(Zone, id=zone_id)) <NEW_LINE> <DEDENT> elif zone_ids: <NEW_LINE> <INDENT> zone_ids = zone_ids.split(",") <NEW_LINE> facility_user_objects = [] <NEW_LINE> for zone_id in zone_ids: <NEW_LINE> <INDENT> facility_user_objects += FacilityUser.objects.by_zone(get_object_or_None(Zone, id=zone_id)) <NEW_LINE> <DEDENT> <DEDENT> facility_user_dict = {} <NEW_LINE> for user in facility_user_objects: <NEW_LINE> <INDENT> facility_user_dict[user.id] = user <NEW_LINE> <DEDENT> return facility_user_dict <NEW_LINE> <DEDENT> def create_response(self, request, data, response_class=HttpResponse, **response_kwargs): <NEW_LINE> <INDENT> response = super(ParentFacilityUserResource, self).create_response(request, data, response_class=response_class, **response_kwargs) <NEW_LINE> if response["Content-Type"].startswith("text/csv"): <NEW_LINE> <INDENT> params = ["%s-%s" % (k,str(v)[0:8]) for (k,v) in request.GET.items() if v and k not in ["format", "limit"]] <NEW_LINE> response["Content-Disposition"] = "filename=%s__%s__exported_at-%s.csv" % (request.path.strip("/").split("/")[-1], "__".join(params), datetime.now().strftime("%Y%m%d_%H%M%S")) <NEW_LINE> <DEDENT> return response | A class with helper methods for getting facility users for data export requests | 62599057baa26c4b54d50831 |
class LibvirtdSession(aexpect.Tail): <NEW_LINE> <INDENT> def _output_handler(self, line): <NEW_LINE> <INDENT> time_pattern = r'[-\d]+ [.:+\d]+ [:\d]+ ' <NEW_LINE> debug_pattern = time_pattern + 'debug :' <NEW_LINE> result = re.match(debug_pattern, line) <NEW_LINE> params = self.debug_params + (line,) <NEW_LINE> if self.debug_func and result: <NEW_LINE> <INDENT> self.debug_func(*params) <NEW_LINE> <DEDENT> info_pattern = time_pattern + 'info :' <NEW_LINE> result = re.match(info_pattern, line) <NEW_LINE> params = self.info_params + (line,) <NEW_LINE> if self.info_func and result: <NEW_LINE> <INDENT> self.info_func(*params) <NEW_LINE> <DEDENT> warning_pattern = time_pattern + 'warning :' <NEW_LINE> result = re.match(warning_pattern, line) <NEW_LINE> params = self.warning_params + (line,) <NEW_LINE> if self.warning_func and result: <NEW_LINE> <INDENT> self.warning_func(*params) <NEW_LINE> <DEDENT> error_pattern = time_pattern + 'error :' <NEW_LINE> result = re.match(error_pattern, line) <NEW_LINE> params = self.error_params + (line,) <NEW_LINE> if self.error_func and result: <NEW_LINE> <INDENT> self.error_func(*params) <NEW_LINE> <DEDENT> <DEDENT> def _termination_handler(self, status): <NEW_LINE> <INDENT> if self.was_running: <NEW_LINE> <INDENT> logging.debug('Restarting libvirtd service') <NEW_LINE> self.libvirtd.start() <NEW_LINE> <DEDENT> <DEDENT> def _wait_for_start(self, timeout=60): <NEW_LINE> <INDENT> def _check_start(): <NEW_LINE> <INDENT> virsh_cmd = "virsh list" <NEW_LINE> try: <NEW_LINE> <INDENT> utils.run(virsh_cmd, timeout=2) <NEW_LINE> return True <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return utils_misc.wait_for(_check_start, timeout=timeout) <NEW_LINE> <DEDENT> def __init__(self, debug_func=None, debug_params=(), info_func=None, info_params=(), warning_func=None, warning_params=(), error_func=None, error_params=(), ): <NEW_LINE> <INDENT> self.debug_func = debug_func <NEW_LINE> self.debug_params = debug_params <NEW_LINE> self.info_func = info_func <NEW_LINE> self.info_params = info_params <NEW_LINE> self.warning_func = warning_func <NEW_LINE> self.warning_params = warning_params <NEW_LINE> self.error_func = error_func <NEW_LINE> self.error_params = error_params <NEW_LINE> self.libvirtd = utils_libvirtd.Libvirtd() <NEW_LINE> self.was_running = self.libvirtd.is_running() <NEW_LINE> if self.was_running: <NEW_LINE> <INDENT> logging.debug('Stopping libvirtd service') <NEW_LINE> self.libvirtd.stop() <NEW_LINE> <DEDENT> aexpect.Tail.__init__( self, "LIBVIRT_DEBUG=1 /usr/sbin/libvirtd", output_func=self._output_handler, termination_func=self._termination_handler) <NEW_LINE> self._wait_for_start() | Class to generate a libvirtd process and handler all the logging info. | 625990572ae34c7f260ac673 |
class BOT_913: <NEW_LINE> <INDENT> pass | Demonic Project | 62599057ac7a0e7691f73a6d |
class NH3(raw_data_util.RawDataFile): <NEW_LINE> <INDENT> def __init__(self, directory, zero=-0.8, window=5): <NEW_LINE> <INDENT> self.directory = directory <NEW_LINE> self.files = listdir(directory) <NEW_LINE> self.zero = zero <NEW_LINE> self.window = window <NEW_LINE> <DEDENT> def _read_files(self, file): <NEW_LINE> <INDENT> df = pd.read_csv(self.directory + file, encoding='latin1', skiprows=1, delimiter=';') <NEW_LINE> df['dateTimes'] = pd.to_datetime((df['DateTime'] - 25569) * 86400.0, unit='s') <NEW_LINE> df['dateTimes'] = df['dateTimes'].dt.round('min') <NEW_LINE> df = df[['dateTimes', 'NH3 [ppb]']] <NEW_LINE> df.rename(columns={'NH3 [ppb]': 'NH3_raw'}, inplace=True) <NEW_LINE> return df <NEW_LINE> <DEDENT> def _subtract_offset(self): <NEW_LINE> <INDENT> self.data['NH3_raw'] -= self.zero <NEW_LINE> <DEDENT> def _rolling_mean(self): <NEW_LINE> <INDENT> self.data['NH3_ppb'] = self.data['NH3_raw'].rolling(self.window).mean() <NEW_LINE> <DEDENT> def load_data(self): <NEW_LINE> <INDENT> frames = [*map(self._read_files, self.files)] <NEW_LINE> data = pd.concat(frames) <NEW_LINE> self.data = data <NEW_LINE> self._subtract_offset() <NEW_LINE> self._rolling_mean() <NEW_LINE> <DEDENT> def plot_nh3(self, plot_raw=False, tval='dateTimes', tspan=None): <NEW_LINE> <INDENT> fig, ax = plt.subplots(figsize=(12, 6)) <NEW_LINE> if plot_raw: <NEW_LINE> <INDENT> ax.plot(self.data[[tval]], self.data[['NH3_raw']], color='red') <NEW_LINE> <DEDENT> ax.plot(self.data[[tval]], self.data[['NH3_ppb']], color='blue') <NEW_LINE> if tspan: <NEW_LINE> <INDENT> ax.set_xlim(tspan[0], tspan[1]) <NEW_LINE> <DEDENT> elif tval=='relTime': <NEW_LINE> <INDENT> ax.set_xlim(-1, 3) <NEW_LINE> <DEDENT> ax.set_ylim(-1.0) <NEW_LINE> ax.set_xlabel('Time [hr]') <NEW_LINE> ax.set_ylabel('NH$_{3}$ ppb') <NEW_LINE> fig.tight_layout() <NEW_LINE> return fig, ax | Class for processing raw nh3 monitor data files | 625990578da39b475be04779 |
@python_2_unicode_compatible <NEW_LINE> class Category(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=100) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name | 模型必须继承models.Model类 | 6259905732920d7e50bc75d3 |
class ConfigValueAssociation(Base): <NEW_LINE> <INDENT> __tablename__ = 'config_value_association' <NEW_LINE> config_id = Column(ForeignKey('config.id'), primary_key=True) <NEW_LINE> config_value_id = Column(ForeignKey('config_value.id'), primary_key=True) <NEW_LINE> config_value = relationship("ConfigValue", lazy="joined", innerjoin=True) <NEW_LINE> def __init__(self, config_value): <NEW_LINE> <INDENT> self.config_value = config_value <NEW_LINE> <DEDENT> def new_version(self, session): <NEW_LINE> <INDENT> session.expire(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.config_value.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self.config_value.value <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, value): <NEW_LINE> <INDENT> if value != self.config_value.value: <NEW_LINE> <INDENT> self.config_data.elements[self.name] = ConfigValueAssociation( ConfigValue(self.config_value.name, value) ) | Relate ConfigData objects to associated ConfigValue objects. | 625990574a966d76dd5f047e |
class FakeHost(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def create_one_host(attrs=None): <NEW_LINE> <INDENT> if attrs is None: <NEW_LINE> <INDENT> attrs = {} <NEW_LINE> <DEDENT> host_info = { "id": 1, "service_id": 1, "host": "host1", "uuid": 'host-id-' + uuid.uuid4().hex, "vcpus": 10, "memory_mb": 100, "local_gb": 100, "vcpus_used": 5, "memory_mb_used": 50, "local_gb_used": 10, "hypervisor_type": "xen", "hypervisor_version": 1, "hypervisor_hostname": "devstack1", "free_ram_mb": 50, "free_disk_gb": 50, "current_workload": 10, "running_vms": 1, "cpu_info": "", "disk_available_least": 1, "host_ip": "10.10.10.10", "supported_instances": "", "metrics": "", "pci_stats": "", "extra_resources": "", "stats": "", "numa_topology": "", "ram_allocation_ratio": 1.0, "cpu_allocation_ratio": 1.0 } <NEW_LINE> host_info.update(attrs) <NEW_LINE> host = fakes.FakeResource( info=copy.deepcopy(host_info), loaded=True) <NEW_LINE> return host | Fake one host. | 625990576e29344779b01bd8 |
class HexExternalViewerCommand(sublime_plugin.TextCommand): <NEW_LINE> <INDENT> def run(self, edit): <NEW_LINE> <INDENT> viewer = common.hv_settings("external_viewer", {}).get("viewer", "") <NEW_LINE> if not exists(viewer): <NEW_LINE> <INDENT> error("Can't find the external hex viewer!") <NEW_LINE> return <NEW_LINE> <DEDENT> file_name = self.view.file_name() <NEW_LINE> if file_name is not None and exists(file_name): <NEW_LINE> <INDENT> cmd = [viewer] + common.hv_settings("external_viewer", {}).get("args", []) <NEW_LINE> for x in range(0, len(cmd)): <NEW_LINE> <INDENT> cmd[x] = cmd[x].replace("${FILE}", file_name) <NEW_LINE> <DEDENT> subprocess.Popen(cmd) <NEW_LINE> <DEDENT> <DEDENT> def is_enabled(self): <NEW_LINE> <INDENT> viewer = common.hv_settings("external_viewer", {}).get("viewer", "") <NEW_LINE> return exists(viewer) and self.view.file_name() is not None | Open hex data in external hex program. | 625990571f037a2d8b9e5332 |
class ExperienceBatcher(object): <NEW_LINE> <INDENT> def __init__(self, experience_collector, run_inference, get_q_values, state_normalize_factor): <NEW_LINE> <INDENT> self.experience_collector = experience_collector <NEW_LINE> self.run_inference = run_inference <NEW_LINE> self.get_q_values = get_q_values <NEW_LINE> self.state_normalize_factor = state_normalize_factor <NEW_LINE> <DEDENT> def get_batches_stepwise(self): <NEW_LINE> <INDENT> cache = [] <NEW_LINE> for batches in self.get_batches(): <NEW_LINE> <INDENT> cache.append(batches) <NEW_LINE> if len(cache) >= BATCHES_KEEP_CONSTANT: <NEW_LINE> <INDENT> for cached_batches in cache: <NEW_LINE> <INDENT> yield cached_batches <NEW_LINE> <DEDENT> cache = [] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_batches(self): <NEW_LINE> <INDENT> print("Initializing memory...") <NEW_LINE> memory = replay_memory.ReplayMemory() <NEW_LINE> while not memory.is_full(): <NEW_LINE> <INDENT> for experience in self.experience_collector.collect(play.random_strategy): <NEW_LINE> <INDENT> memory.add(experience) <NEW_LINE> <DEDENT> <DEDENT> memory.print_stats() <NEW_LINE> for i in itertools.count(): <NEW_LINE> <INDENT> if i < START_DECREASE_EPSILON_GAMES: <NEW_LINE> <INDENT> epsilon = 1.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> epsilon = max(MIN_EPSILON, 1.0 - (i - START_DECREASE_EPSILON_GAMES) / DECREASE_EPSILON_GAMES) <NEW_LINE> <DEDENT> strategy = play.make_epsilon_greedy_strategy(self.get_q_values, epsilon) <NEW_LINE> for experience in self.experience_collector.collect(strategy): <NEW_LINE> <INDENT> memory.add(experience) <NEW_LINE> batch_experiences = memory.sample(BATCH_SIZE) <NEW_LINE> yield self.experiences_to_batches(batch_experiences) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def experiences_to_batches(self, experiences): <NEW_LINE> <INDENT> batch_size = len(experiences) <NEW_LINE> state_batch = np.zeros((batch_size, 16), dtype=np.float) <NEW_LINE> next_state_batch = np.zeros((batch_size, 16), dtype=np.float) <NEW_LINE> actions = np.zeros((batch_size,), dtype=np.int) <NEW_LINE> reward_batch = np.zeros((batch_size,), dtype=np.float) <NEW_LINE> bad_action_batch = np.zeros((batch_size,), dtype=np.bool) <NEW_LINE> available_actions_batch = np.zeros((batch_size, 4), dtype=np.bool) <NEW_LINE> merged = np.zeros((batch_size,), dtype=np.float) <NEW_LINE> for i, experience in enumerate(experiences): <NEW_LINE> <INDENT> state_batch[i, :] = (experience.state.flatten() * self.state_normalize_factor) <NEW_LINE> next_state_batch[i, :] = (experience.next_state.flatten() * self.state_normalize_factor) <NEW_LINE> actions[i] = experience.action <NEW_LINE> reward_batch[i] = experience.reward <NEW_LINE> bad_action_batch[i] = experience.game_over or experience.not_available <NEW_LINE> available_actions_batch[i, experience.next_state_available_actions] = True <NEW_LINE> merged[i] = (np.count_nonzero(experience.state) - np.count_nonzero(experience.next_state) + 1) <NEW_LINE> <DEDENT> targets = target_batch_computer.TargetBatchComputer(self.run_inference).compute( reward_batch, bad_action_batch, next_state_batch, available_actions_batch, merged) <NEW_LINE> return state_batch, targets, actions | Builds experience batches using an ExperienceCollector. | 62599057a79ad1619776b584 |
class Linkinfo: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.project = None <NEW_LINE> self.package = None <NEW_LINE> self.xsrcmd5 = None <NEW_LINE> self.lsrcmd5 = None <NEW_LINE> self.srcmd5 = None <NEW_LINE> self.error = None <NEW_LINE> self.rev = None <NEW_LINE> self.baserev = None <NEW_LINE> <DEDENT> def read(self, linkinfo_node): <NEW_LINE> <INDENT> if linkinfo_node == None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.project = linkinfo_node.get('project') <NEW_LINE> self.package = linkinfo_node.get('package') <NEW_LINE> self.xsrcmd5 = linkinfo_node.get('xsrcmd5') <NEW_LINE> self.lsrcmd5 = linkinfo_node.get('lsrcmd5') <NEW_LINE> self.srcmd5 = linkinfo_node.get('srcmd5') <NEW_LINE> self.error = linkinfo_node.get('error') <NEW_LINE> self.rev = linkinfo_node.get('rev') <NEW_LINE> self.baserev = linkinfo_node.get('baserev') <NEW_LINE> <DEDENT> def islink(self): <NEW_LINE> <INDENT> if self.xsrcmd5 or self.lsrcmd5: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def isexpanded(self): <NEW_LINE> <INDENT> if self.lsrcmd5 and not self.xsrcmd5: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def haserror(self): <NEW_LINE> <INDENT> if self.error: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.islink() and not self.isexpanded(): <NEW_LINE> <INDENT> return 'project %s, package %s, xsrcmd5 %s, rev %s' % (self.project, self.package, self.xsrcmd5, self.rev) <NEW_LINE> <DEDENT> elif self.islink() and self.isexpanded(): <NEW_LINE> <INDENT> if self.haserror(): <NEW_LINE> <INDENT> return 'broken link to project %s, package %s, srcmd5 %s, lsrcmd5 %s: %s' % (self.project, self.package, self.srcmd5, self.lsrcmd5, self.error) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'expanded link to project %s, package %s, srcmd5 %s, lsrcmd5 %s' % (self.project, self.package, self.srcmd5, self.lsrcmd5) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return 'None' | linkinfo metadata (which is part of the xml representing a directory
| 62599058d6c5a102081e36ad |
class DomesticMelonOrder(AbstractMelonOrder): <NEW_LINE> <INDENT> order_type = "domestic" <NEW_LINE> def __init__(self, species, qty): <NEW_LINE> <INDENT> super().__init__(species, qty) <NEW_LINE> self.tax = .08 <NEW_LINE> <DEDENT> def get_total(): <NEW_LINE> <INDENT> super().get_total() <NEW_LINE> return total <NEW_LINE> <DEDENT> def mark_shipped(self): <NEW_LINE> <INDENT> super().mark_shipped() | A melon order within the USA. | 6259905807d97122c4218238 |
class OBJECT_PT_Isometrify(bpy.types.Panel): <NEW_LINE> <INDENT> bl_label = "Isometrify" <NEW_LINE> bl_idname = "OBJECT_PT_Isometrifys" <NEW_LINE> bl_space_type = 'PROPERTIES' <NEW_LINE> bl_region_type = 'WINDOW' <NEW_LINE> bl_context = "render" <NEW_LINE> def draw(self, context): <NEW_LINE> <INDENT> layout = self.layout <NEW_LINE> scene = context.scene <NEW_LINE> layout.operator("render.isometrify", text="Generate Isometric Renders") <NEW_LINE> layout.prop_search(scene, "worldCamera", scene, "objects") <NEW_LINE> layout.prop_search(scene, "armature", scene, "objects") <NEW_LINE> layout.prop(scene, "SettleFrames") | Creates a Panel in the Object properties window | 62599058097d151d1a2c25f9 |
class CrossentropyNDTopK(torch.nn.CrossEntropyLoss): <NEW_LINE> <INDENT> def forward(self, inp, target): <NEW_LINE> <INDENT> target = target.long() <NEW_LINE> num_classes = inp.size()[1] <NEW_LINE> i0 = 1 <NEW_LINE> i1 = 2 <NEW_LINE> while i1 < len(inp.shape): <NEW_LINE> <INDENT> inp = inp.transpose(i0, i1) <NEW_LINE> i0 += 1 <NEW_LINE> i1 += 1 <NEW_LINE> <DEDENT> inp = inp.contiguous() <NEW_LINE> inp = inp.view(-1, num_classes) <NEW_LINE> with torch.no_grad(): <NEW_LINE> <INDENT> prob = torch.softmax(inp, -1) <NEW_LINE> <DEDENT> target = target.view(-1,) <NEW_LINE> return super(CrossentropyND, self).forward(inp, target) | Network has to have NO NONLINEARITY! | 62599058507cdc57c63a6333 |
class ProactorSelectorThreadWarning(RuntimeWarning): <NEW_LINE> <INDENT> pass | Warning class for notifying about the extra thread spawned by tornado
We automatically support proactor via tornado's AddThreadSelectorEventLoop | 62599058cc0a2c111447c57a |
class BitBoost(BaseEstimator): <NEW_LINE> <INDENT> numt = RawBitBoost.numt <NEW_LINE> numt_p = RawBitBoost.numt_p <NEW_LINE> __init__ = gen_init_fun(RawBitBoost.config_params, __file__) <NEW_LINE> def fit(self, X, y): <NEW_LINE> <INDENT> X, y = check_X_y(X, y, accept_sparse=False, dtype=self.numt, order="F") <NEW_LINE> nexamples, nfeatures = X.shape <NEW_LINE> self._bitboost = RawBitBoost(nfeatures, nexamples) <NEW_LINE> self._bitboost.set_config(self.get_params()) <NEW_LINE> self._bitboost.set_data(X, self.categorical_features) <NEW_LINE> self._bitboost.set_target(y) <NEW_LINE> self._bitboost.train() <NEW_LINE> self._is_fitted = True <NEW_LINE> return self <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> X = check_array(X, accept_sparse=False) <NEW_LINE> check_is_fitted(self, "_is_fitted") <NEW_LINE> self._bitboost.set_data(X) <NEW_LINE> return self._bitboost.predict() <NEW_LINE> <DEDENT> def _check_sklearn_estimator(self): <NEW_LINE> <INDENT> check_estimator(BitBoost) | BitBoost base estimator.
| 62599058009cb60464d02ac3 |
class ConfigurationMixIn(OptionsManagerMixIn, OptionsProviderMixIn): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if not args: <NEW_LINE> <INDENT> kwargs.setdefault('usage', '') <NEW_LINE> <DEDENT> kwargs.setdefault('quiet', 1) <NEW_LINE> OptionsManagerMixIn.__init__(self, *args, **kwargs) <NEW_LINE> OptionsProviderMixIn.__init__(self) <NEW_LINE> if not getattr(self, 'option_groups', None): <NEW_LINE> <INDENT> self.option_groups = [] <NEW_LINE> for option, optdict in self.options: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> gdef = (optdict['group'], '') <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not gdef in self.option_groups: <NEW_LINE> <INDENT> self.option_groups.append(gdef) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.register_options_provider(self, own_group=0) <NEW_LINE> <DEDENT> def register_options(self, options): <NEW_LINE> <INDENT> options_by_group = {} <NEW_LINE> for optname, optdict in options: <NEW_LINE> <INDENT> options_by_group.setdefault(optdict.get('group', self.name.upper()), []).append((optname, optdict)) <NEW_LINE> <DEDENT> for group, options in list(options_by_group.items()): <NEW_LINE> <INDENT> self.add_option_group(group, None, options, self) <NEW_LINE> <DEDENT> self.options += tuple(options) <NEW_LINE> <DEDENT> def load_defaults(self): <NEW_LINE> <INDENT> OptionsProviderMixIn.load_defaults(self) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return getattr(self.config, self.option_name(key)) <NEW_LINE> <DEDENT> except (OptionValueError, AttributeError): <NEW_LINE> <INDENT> raise KeyError(key) <NEW_LINE> <DEDENT> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> self.set_option(self.option_name(key), value) <NEW_LINE> <DEDENT> def get(self, key, default=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return getattr(self.config, self.option_name(key)) <NEW_LINE> <DEDENT> except (OptionError, AttributeError): <NEW_LINE> <INDENT> return default | basic mixin for simple configurations which don't need the
manager / providers model | 6259905816aa5153ce401a72 |
class Execution(resource.Resource): <NEW_LINE> <INDENT> id = wtypes.text <NEW_LINE> workflow_name = wtypes.text <NEW_LINE> params = wtypes.text <NEW_LINE> state = wtypes.text <NEW_LINE> state_info = wtypes.text <NEW_LINE> input = wtypes.text <NEW_LINE> output = wtypes.text <NEW_LINE> created_at = wtypes.text <NEW_LINE> updated_at = wtypes.text <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> d = super(Execution, self).to_dict() <NEW_LINE> if d.get('input'): <NEW_LINE> <INDENT> d['input'] = json.loads(d['input']) <NEW_LINE> <DEDENT> if d.get('output'): <NEW_LINE> <INDENT> d['output'] = json.loads(d['output']) <NEW_LINE> <DEDENT> if d.get('params'): <NEW_LINE> <INDENT> d['params'] = json.loads(d['params']) <NEW_LINE> <DEDENT> return d <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, d): <NEW_LINE> <INDENT> e = cls() <NEW_LINE> for key, val in d.items(): <NEW_LINE> <INDENT> if hasattr(e, key): <NEW_LINE> <INDENT> if key == 'input' or key == 'output' and val is not None: <NEW_LINE> <INDENT> val = json.dumps(val) <NEW_LINE> <DEDENT> setattr(e, key, val) <NEW_LINE> <DEDENT> <DEDENT> params = d.get('start_params', {}) <NEW_LINE> if params: <NEW_LINE> <INDENT> setattr(e, 'params', json.dumps(params)) <NEW_LINE> <DEDENT> return e <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def sample(cls): <NEW_LINE> <INDENT> return cls(id='123e4567-e89b-12d3-a456-426655440000', workflow_name='flow', state='SUCCESS', input='{}', output='{}', params='{"env": {"k1": "abc", "k2": 123}}', created_at='1970-01-01T00:00:00.000000', updated_at='1970-01-01T00:00:00.000000') | Execution resource. | 625990589c8ee82313040c52 |
class Writer(object): <NEW_LINE> <INDENT> def __init__(self, handle): <NEW_LINE> <INDENT> self.handle = handle <NEW_LINE> self.wrapper = TextWrapper(width=70) <NEW_LINE> <DEDENT> def write(self, seq_record): <NEW_LINE> <INDENT> if seq_record.description: <NEW_LINE> <INDENT> print(">%s" % seq_record.description, file=self.handle) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(">%s" % (seq_record.id, ), file=self.handle) <NEW_LINE> <DEDENT> print("\n".join(self.wrapper.wrap(seq_record.seq)), file=self.handle) | Writes `SeqRecord` objects in FASTA format to a given file handle. | 6259905876e4537e8c3f0b1b |
class SimTypeDouble(SimTypeFloat): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(SimTypeDouble, self).__init__(64) <NEW_LINE> <DEDENT> sort = claripy.FSORT_DOUBLE <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return 'double' | An IEEE754 double-precision floating point number | 6259905823849d37ff852653 |
class VIEW3D_TP_Yz_Mod_Mirror(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "tp_ops.mod_mirror_yz" <NEW_LINE> bl_label = "Mirror Yz" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> scene = bpy.context.scene <NEW_LINE> selected = bpy.context.selected_objects <NEW_LINE> object = bpy.ops.object <NEW_LINE> for obj in selected: <NEW_LINE> <INDENT> scene.objects.active = obj <NEW_LINE> object.modifier_add(type = "MIRROR") <NEW_LINE> for mod in obj.modifiers: <NEW_LINE> <INDENT> if mod.type == "MIRROR": <NEW_LINE> <INDENT> bpy.context.object.modifiers["Mirror"].use_x = False <NEW_LINE> bpy.context.object.modifiers["Mirror"].use_y = True <NEW_LINE> bpy.context.object.modifiers["Mirror"].use_z = True <NEW_LINE> bpy.context.object.modifiers["Mirror"].show_on_cage = True <NEW_LINE> bpy.context.object.modifiers["Mirror"].use_clip = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return {'FINISHED'} | Add a yz mirror modifier with cage and clipping | 62599058adb09d7d5dc0baf9 |
class MplRectangularROI(AbstractMplRoi): <NEW_LINE> <INDENT> def __init__(self, ax): <NEW_LINE> <INDENT> AbstractMplRoi.__init__(self, ax) <NEW_LINE> self._xi = None <NEW_LINE> self._yi = None <NEW_LINE> self._scrubbing = False <NEW_LINE> self.plot_opts = {'edgecolor': PATCH_COLOR, 'facecolor': PATCH_COLOR, 'alpha': 0.3} <NEW_LINE> self._patch = Rectangle((0., 0.), 1., 1.) <NEW_LINE> self._patch.set_zorder(100) <NEW_LINE> self._setup_patch() <NEW_LINE> <DEDENT> def _setup_patch(self): <NEW_LINE> <INDENT> self._ax.add_patch(self._patch) <NEW_LINE> self._patch.set_visible(False) <NEW_LINE> self._sync_patch() <NEW_LINE> <DEDENT> def _roi_factory(self): <NEW_LINE> <INDENT> return RectangularROI() <NEW_LINE> <DEDENT> def start_selection(self, event): <NEW_LINE> <INDENT> if event.inaxes != self._ax: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._xi = event.xdata <NEW_LINE> self._yi = event.ydata <NEW_LINE> if self._roi.defined() and self._roi.contains(event.xdata, event.ydata): <NEW_LINE> <INDENT> self._scrubbing = True <NEW_LINE> self._cx, self._cy = self._roi.center() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._scrubbing = False <NEW_LINE> self._roi.reset() <NEW_LINE> self._roi.update_limits(event.xdata, event.ydata, event.xdata, event.ydata) <NEW_LINE> <DEDENT> self._mid_selection = True <NEW_LINE> self._sync_patch() <NEW_LINE> <DEDENT> def update_selection(self, event): <NEW_LINE> <INDENT> if not self._mid_selection or event.inaxes != self._ax: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self._scrubbing: <NEW_LINE> <INDENT> self._roi.move_to(self._cx + event.xdata - self._xi, self._cy + event.ydata - self._yi) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._roi.update_limits(min(event.xdata, self._xi), min(event.ydata, self._yi), max(event.xdata, self._xi), max(event.ydata, self._yi)) <NEW_LINE> <DEDENT> self._sync_patch() <NEW_LINE> <DEDENT> def finalize_selection(self, event): <NEW_LINE> <INDENT> self._mid_selection = False <NEW_LINE> self._patch.set_visible(False) <NEW_LINE> self._draw() <NEW_LINE> <DEDENT> def _sync_patch(self): <NEW_LINE> <INDENT> if self._roi.defined(): <NEW_LINE> <INDENT> corner = self._roi.corner() <NEW_LINE> width = self._roi.width() <NEW_LINE> height = self._roi.height() <NEW_LINE> self._patch.set_xy(corner) <NEW_LINE> self._patch.set_width(width) <NEW_LINE> self._patch.set_height(height) <NEW_LINE> self._patch.set(**self.plot_opts) <NEW_LINE> self._patch.set_visible(True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._patch.set_visible(False) <NEW_LINE> <DEDENT> self._draw() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "MPL Rectangle: %s" % self._patch | A subclass of RectangularROI that also renders the ROI to a plot
*Attributes*:
plot_opts:
Dictionary instance
A dictionary of plot keywords that are passed to
the patch representing the ROI. These control
the visual properties of the ROI | 6259905832920d7e50bc75d5 |
class XmlWriter(WearNowXmlWriter): <NEW_LINE> <INDENT> def __init__(self, dbase, user, strip_photos, compress=1): <NEW_LINE> <INDENT> WearNowXmlWriter.__init__( self, dbase, strip_photos, compress, VERSION, user) <NEW_LINE> self.user = user <NEW_LINE> <DEDENT> def write(self, filename): <NEW_LINE> <INDENT> ret = 0 <NEW_LINE> try: <NEW_LINE> <INDENT> ret = WearNowXmlWriter.write(self, filename) <NEW_LINE> <DEDENT> except DbWriteFailure as msg: <NEW_LINE> <INDENT> (m1, m2) = msg.messages() <NEW_LINE> self.user.notify_error("%s\n%s" % (m1, m2)) <NEW_LINE> <DEDENT> return ret | Writes a database to the XML file. | 62599058fff4ab517ebcedb3 |
class Solution: <NEW_LINE> <INDENT> def levelOrder(self, root): <NEW_LINE> <INDENT> results = [] <NEW_LINE> q = [root] <NEW_LINE> if root is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> while q: <NEW_LINE> <INDENT> q_new = [] <NEW_LINE> results.append([n.val for n in q]) <NEW_LINE> for node in q: <NEW_LINE> <INDENT> if node.left: <NEW_LINE> <INDENT> q_new.append(node.left) <NEW_LINE> <DEDENT> if node.right: <NEW_LINE> <INDENT> q_new.append(node.right) <NEW_LINE> <DEDENT> <DEDENT> q = q_new <NEW_LINE> <DEDENT> return results | @param root: A Tree
@return: Level order a list of lists of integer | 6259905832920d7e50bc75d6 |
class LegacyDeviceNoLongerSupportedMessage(Message): <NEW_LINE> <INDENT> __is_visible = False <NEW_LINE> def __init__(self) -> None: <NEW_LINE> <INDENT> super().__init__( text = I18N_CATALOG.i18nc("@info:status", "You are attempting to connect to a printer that is not " "running Ultimaker Connect. Please update the printer to the " "latest firmware."), title = I18N_CATALOG.i18nc("@info:title", "Update your printer"), lifetime = 10, message_type = Message.MessageType.WARNING ) <NEW_LINE> <DEDENT> def show(self) -> None: <NEW_LINE> <INDENT> if LegacyDeviceNoLongerSupportedMessage.__is_visible: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> super().show() <NEW_LINE> LegacyDeviceNoLongerSupportedMessage.__is_visible = True <NEW_LINE> <DEDENT> def hide(self, send_signal = True) -> None: <NEW_LINE> <INDENT> super().hide(send_signal) <NEW_LINE> LegacyDeviceNoLongerSupportedMessage.__is_visible = False | Message shown when trying to connect to a legacy printer device. | 62599058498bea3a75a590b6 |
class SlideText(object): <NEW_LINE> <INDENT> def __init__(self, content, from_file=False, text_size=12, text_color=None, font="Arial", horizontal="C", bold=False, underline=False, italics=False): <NEW_LINE> <INDENT> if from_file: <NEW_LINE> <INDENT> with open(content, 'r') as file: <NEW_LINE> <INDENT> self.content = file.read() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.content = content <NEW_LINE> <DEDENT> self.text_size = text_size <NEW_LINE> self.text_color = text_color <NEW_LINE> self.font = font <NEW_LINE> self.vertical_value = 50 <NEW_LINE> self.horizontal = horizontal <NEW_LINE> self.bold = bold <NEW_LINE> self.underline = underline <NEW_LINE> self.italics = italics | Function represents the normal text inside the slide | 6259905815baa72349463522 |
class Review(models.Model): <NEW_LINE> <INDENT> response = models.ForeignKey(Response) <NEW_LINE> comment = models.TextField(blank=True,null=True) <NEW_LINE> creation_time = models.DateTimeField(auto_now_add=True) <NEW_LINE> complete = models.BooleanField(default=False) <NEW_LINE> @property <NEW_LINE> def review_template(self): <NEW_LINE> <INDENT> abstract() <NEW_LINE> <DEDENT> def review_template_input(self): <NEW_LINE> <INDENT> abstract() | When a user demonstrates a lack of understanding of the tagging
guidelines on a specific task, it is useful for a merger to flag
the item for the worker to review, showing both the tagger's
response and the merger's final selection and comment. Reviews
are created in the task_view/submit handler, and presented to
the user as additional 'things to complete' by the UI. | 62599058462c4b4f79dbcf94 |
class ValueIterationAgent(ValueEstimationAgent): <NEW_LINE> <INDENT> def __init__(self, mdp, discount = 0.9, iterations = 100): <NEW_LINE> <INDENT> self.mdp = mdp <NEW_LINE> self.discount = discount <NEW_LINE> self.iterations = iterations <NEW_LINE> self.values = util.Counter() <NEW_LINE> "*** YOUR CODE HERE ***" <NEW_LINE> states = self.mdp.getStates() <NEW_LINE> for i in range(self.iterations): <NEW_LINE> <INDENT> valuesCopy = self.values.copy() <NEW_LINE> for state in states: <NEW_LINE> <INDENT> if not self.mdp.isTerminal(state): <NEW_LINE> <INDENT> action = self.computeActionFromValues(state) <NEW_LINE> qvalue = self.computeQValueFromValues(state, action) <NEW_LINE> valuesCopy[state] = qvalue <NEW_LINE> <DEDENT> <DEDENT> self.values = valuesCopy <NEW_LINE> <DEDENT> <DEDENT> def getValue(self, state): <NEW_LINE> <INDENT> return self.values[state] <NEW_LINE> <DEDENT> def computeQValueFromValues(self, state, action): <NEW_LINE> <INDENT> qvalue = 0.0 <NEW_LINE> transitionFunction = self.mdp.getTransitionStatesAndProbs(state, action) <NEW_LINE> for nextState, prob in transitionFunction: <NEW_LINE> <INDENT> qvalue += prob * (self.mdp.getReward(state, action, nextState) + (self.discount*self.getValue(nextState))) <NEW_LINE> <DEDENT> return qvalue <NEW_LINE> <DEDENT> def computeActionFromValues(self, state): <NEW_LINE> <INDENT> resultAction = None <NEW_LINE> actions = self.mdp.getPossibleActions(state) <NEW_LINE> if len(actions) == 0: <NEW_LINE> <INDENT> return resultAction <NEW_LINE> <DEDENT> qvalue = -math.inf <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> tempQValue = self.computeQValueFromValues(state, action) <NEW_LINE> if tempQValue > qvalue: <NEW_LINE> <INDENT> qvalue = tempQValue <NEW_LINE> resultAction = action <NEW_LINE> <DEDENT> <DEDENT> return resultAction <NEW_LINE> <DEDENT> def getPolicy(self, state): <NEW_LINE> <INDENT> return self.computeActionFromValues(state) <NEW_LINE> <DEDENT> def getAction(self, state): <NEW_LINE> <INDENT> return self.computeActionFromValues(state) <NEW_LINE> <DEDENT> def getQValue(self, state, action): <NEW_LINE> <INDENT> return self.computeQValueFromValues(state, action) | * Please read learningAgents.py before reading this.*
A ValueIterationAgent takes a Markov decision process
(see mdp.py) on initialization and runs value iteration
for a given number of iterations using the supplied
discount factor. | 62599058be8e80087fbc0612 |
class TestReverseLetter(unittest.TestCase): <NEW_LINE> <INDENT> def test_reverse_letter(self): <NEW_LINE> <INDENT> self.assertEqual(reverse_letter("krishan"), "nahsirk") <NEW_LINE> self.assertEqual(reverse_letter("ultr53o?n"), "nortlu") <NEW_LINE> self.assertEqual(reverse_letter("ab23c"), "cba") <NEW_LINE> self.assertEqual(reverse_letter("krish21an"), "nahsirk") | Class to test 'reverse_letter' function | 6259905899cbb53fe683246e |
class SystemSettings(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def http_proxy(self): <NEW_LINE> <INDENT> system_proxies = urllib.getproxies() <NEW_LINE> proxy = system_proxies.get("http") <NEW_LINE> if proxy: <NEW_LINE> <INDENT> proxy = proxy.replace("http://", "", 1) <NEW_LINE> <DEDENT> return proxy | Handles loading the system settings. | 625990584428ac0f6e659acb |
class PhaseState(mutablerecords.Record('PhaseState', [ 'name', 'phase_record', 'measurements'])): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def from_descriptor(cls, phase_desc, notify_cb): <NEW_LINE> <INDENT> return cls( phase_desc.name, test_record.PhaseRecord.from_descriptor(phase_desc), {measurement.name: copy.deepcopy(measurement).set_notification_callback(notify_cb) for measurement in phase_desc.measurements}) <NEW_LINE> <DEDENT> def _asdict(self): <NEW_LINE> <INDENT> return { 'name': self.name, 'codeinfo': self.phase_record.codeinfo, 'start_time_millis': 0, 'attachments': { name: attachment.sha1 for name, attachment in self.attachments.iteritems() }, 'measurements': self.measurements, } <NEW_LINE> <DEDENT> @property <NEW_LINE> def result(self): <NEW_LINE> <INDENT> return self.phase_record.result <NEW_LINE> <DEDENT> @result.setter <NEW_LINE> def result(self, result): <NEW_LINE> <INDENT> self.phase_record.result = result <NEW_LINE> <DEDENT> @property <NEW_LINE> def attachments(self): <NEW_LINE> <INDENT> return self.phase_record.attachments <NEW_LINE> <DEDENT> def attach(self, name, data, mimetype=None): <NEW_LINE> <INDENT> if name in self.phase_record.attachments: <NEW_LINE> <INDENT> raise DuplicateAttachmentError('Duplicate attachment for %s' % name) <NEW_LINE> <DEDENT> if mimetype and not mimetypes.guess_extension(mimetype): <NEW_LINE> <INDENT> _LOG.warning('Unrecognized MIME type: "%s" for attachment "%s"', mimetype, name) <NEW_LINE> <DEDENT> self.phase_record.attachments[name] = test_record.Attachment(data, mimetype) <NEW_LINE> <DEDENT> def attach_from_file(self, filename, name=None, mimetype=None): <NEW_LINE> <INDENT> with open(filename, 'rb') as f: <NEW_LINE> <INDENT> self.attach( name if name is not None else filename, f.read(), mimetype=mimetype if mimetype is not None else mimetypes.guess_type( filename)[0]) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> @contextlib.contextmanager <NEW_LINE> def record_timing_context(self): <NEW_LINE> <INDENT> self.phase_record.start_time_millis = util.time_millis() <NEW_LINE> try: <NEW_LINE> <INDENT> yield <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> validated_measurements = { name: measurement for name, measurement in self.measurements.iteritems() if measurement.outcome is not measurements.Outcome.PARTIALLY_SET } <NEW_LINE> validated_measurements.update({ name: measurement.validate() for name, measurement in self.measurements.iteritems() if measurement.outcome is measurements.Outcome.PARTIALLY_SET }) <NEW_LINE> self.phase_record.measurements = validated_measurements <NEW_LINE> self.phase_record.end_time_millis = util.time_millis() | Data type encapsulating interesting information about a running phase.
Attributes:
phase_record: A test_record.PhaseRecord for the running phase.
attachments: Convenience accessor for phase_record.attachments.
measurements: A dict mapping measurement name to it's declaration; this
dict can be passed to measurements.Collection to initialize a user-
facing Collection for setting measurements.
result: Convenience getter/setter for phase_record.result. | 62599058596a897236129078 |
class DirectionIdentity(object): <NEW_LINE> <INDENT> _prefix = 'target' <NEW_LINE> _revision = '2015-04-07' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _meta_info(): <NEW_LINE> <INDENT> from ydk.models.ietf._meta import _ietf_diffserv_target as meta <NEW_LINE> return meta._meta_table['DirectionIdentity']['meta_info'] | This is identity of traffic direction | 625990583c8af77a43b68a08 |
class UserFilterBackend(filters.BaseFilterBackend): <NEW_LINE> <INDENT> def filter_queryset(self, request, queryset, view): <NEW_LINE> <INDENT> user = request.query_params.get('user', None) <NEW_LINE> if user: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user_uuid = uuid.UUID(user) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise exceptions.ParseError(_('Invalid value in filter %(filter)s') % {'filter': 'user'}) <NEW_LINE> <DEDENT> queryset = queryset.filter(user__uuid=user_uuid) <NEW_LINE> <DEDENT> if not request.user.is_authenticated: <NEW_LINE> <INDENT> return queryset <NEW_LINE> <DEDENT> is_own = request.query_params.get('is_own', None) <NEW_LINE> if is_own is not None: <NEW_LINE> <INDENT> is_own = is_own.lower() <NEW_LINE> if is_own in ('true', 't', 'yes', 'y', '1'): <NEW_LINE> <INDENT> queryset = queryset.filter(user=request.user) <NEW_LINE> <DEDENT> elif is_own in ('false', 'f', 'no', 'n', '0'): <NEW_LINE> <INDENT> queryset = queryset.exclude(user=request.user) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise exceptions.ParseError(_('Invalid value in filter %(filter)s') % {'filter': 'is_own'}) <NEW_LINE> <DEDENT> <DEDENT> return queryset | Filter by user uuid and by is_own. | 6259905807d97122c421823b |
class PasswordInput(TextInput): <NEW_LINE> <INDENT> pass | Single-line password input widget.
This widget hides the input value so that it is not visible in the browser.
.. warning::
Secure transmission of the password to Bokeh server application code
requires configuring the server for SSL (i.e. HTTPS) termination. | 625990587b25080760ed87a7 |
class IgnoreRuleSet: <NEW_LINE> <INDENT> def __init__(self, name, uri): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.uri = uri <NEW_LINE> self.rules = [] <NEW_LINE> <DEDENT> def load(self): <NEW_LINE> <INDENT> def build_rules(s): <NEW_LINE> <INDENT> for k, v in s.items(): <NEW_LINE> <INDENT> self.rules.append(IgnoreRule(self, k, v.get('rationale', '???'), v.get('type', 'ignore'), v.get('hostmasks', []), v.get('accountnames', []), v.get('patterns', []))) <NEW_LINE> <DEDENT> <DEDENT> def test_load_cb(payload): <NEW_LINE> <INDENT> build_rules(yaml.load(payload)) <NEW_LINE> <DEDENT> if weechat_is_fake: <NEW_LINE> <INDENT> d = open(self.uri, 'r') <NEW_LINE> return test_load_cb(d.read()) <NEW_LINE> <DEDENT> <DEDENT> def install(self): <NEW_LINE> <INDENT> [r.install() for r in self.rules] <NEW_LINE> <DEDENT> def uninstall(self): <NEW_LINE> <INDENT> [r.uninstall() for r in self.rules] | A downloaded collection of rules.
Handles merging updates vs current state, and so on. | 6259905855399d3f05627aaf |
class prepend(Stream): <NEW_LINE> <INDENT> def __call__(self, iterator): <NEW_LINE> <INDENT> return itertools.chain(self.iterator, iterator) | Inject values at the beginning of the input stream.
>>> seq(7, 7) >> prepend(xrange(0, 10, 2)) >> item[:10]
[0, 2, 4, 6, 8, 7, 14, 21, 28, 35] | 6259905801c39578d7f141ff |
class KNearestNeighbor(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def train(self, X, y): <NEW_LINE> <INDENT> self.X_train = X <NEW_LINE> self.y_train = y <NEW_LINE> <DEDENT> def predict(self, X, k=1, num_loops=0): <NEW_LINE> <INDENT> if num_loops == 0: <NEW_LINE> <INDENT> dists = self.compute_distances_no_loops(X) <NEW_LINE> <DEDENT> elif num_loops == 1: <NEW_LINE> <INDENT> dists = self.compute_distances_one_loop(X) <NEW_LINE> <DEDENT> elif num_loops == 2: <NEW_LINE> <INDENT> dists = self.compute_distances_two_loops(X) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Invalid value %d for num_loops' % num_loops) <NEW_LINE> <DEDENT> return self.predict_labels(dists, k=k) <NEW_LINE> <DEDENT> def compute_distances_two_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in xrange(num_test): <NEW_LINE> <INDENT> for j in xrange(num_train): <NEW_LINE> <INDENT> dists[i, j] = np.linalg.norm(X[i]-self.X_train[j], 2) <NEW_LINE> <DEDENT> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_one_loop(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> for i in xrange(num_test): <NEW_LINE> <INDENT> dists[i] = np.sqrt(np.sum(np.square(X[i] - self.X_train), axis=1)) <NEW_LINE> <DEDENT> return dists <NEW_LINE> <DEDENT> def compute_distances_no_loops(self, X): <NEW_LINE> <INDENT> num_test = X.shape[0] <NEW_LINE> num_train = self.X_train.shape[0] <NEW_LINE> dists = np.zeros((num_test, num_train)) <NEW_LINE> test_norm = np.sum(np.square(X), axis=1) <NEW_LINE> train_norm = np.sum(np.square(self.X_train), axis=1) <NEW_LINE> cross_prod = -2*X.dot(self.X_train.T) <NEW_LINE> dists = np.sqrt(cross_prod + test_norm.reshape((-1,1)) + train_norm.reshape((1,-1))) <NEW_LINE> return dists <NEW_LINE> <DEDENT> def predict_labels(self, dists, k=1): <NEW_LINE> <INDENT> num_test = dists.shape[0] <NEW_LINE> y_pred = np.zeros(num_test) <NEW_LINE> for i in xrange(num_test): <NEW_LINE> <INDENT> closest_y = [] <NEW_LINE> closest_y = self.y_train[np.argsort(dists[i, :])][: k] <NEW_LINE> import collections <NEW_LINE> y_pred[i] = collections.Counter(closest_y).most_common(1)[0][0] <NEW_LINE> <DEDENT> return y_pred | a kNN classifier with L2 distance | 62599058a17c0f6771d5d669 |
class VersionsTest(base.IsolatedUnitTest): <NEW_LINE> <INDENT> def test_get_version_list(self): <NEW_LINE> <INDENT> req = webob.Request.blank('/', base_url="http://0.0.0.0:9292/") <NEW_LINE> req.accept = "application/json" <NEW_LINE> conf = utils.TestConfigOpts({ 'bind_host': '0.0.0.0', 'bind_port': 9292 }) <NEW_LINE> res = req.get_response(versions.Controller(conf)) <NEW_LINE> self.assertEqual(res.status_int, 300) <NEW_LINE> self.assertEqual(res.content_type, "application/json") <NEW_LINE> results = json.loads(res.body)["versions"] <NEW_LINE> expected = [ { "id": "v1.1", "status": "CURRENT", "links": [ { "rel": "self", "href": "http://0.0.0.0:9292/v1/"}]}, { "id": "v1.0", "status": "SUPPORTED", "links": [ { "rel": "self", "href": "http://0.0.0.0:9292/v1/"}]}] <NEW_LINE> self.assertEqual(results, expected) <NEW_LINE> <DEDENT> def test_client_handles_versions(self): <NEW_LINE> <INDENT> api_client = client.Client("0.0.0.0", doc_root="") <NEW_LINE> self.assertRaises(exception.MultipleChoices, api_client.get_images) | Test the version information returned from
the API service | 625990588da39b475be0477d |
class RePinForm(Form): <NEW_LINE> <INDENT> pin = forms.IntegerField(widget=forms.HiddenInput()) <NEW_LINE> def clean_pin(self): <NEW_LINE> <INDENT> pin_id = self.cleaned_data['pin'] <NEW_LINE> try: <NEW_LINE> <INDENT> pin = Pin._default_manager.get(id=pin_id) <NEW_LINE> <DEDENT> except Pin.DoesNotExist: <NEW_LINE> <INDENT> raise forms.ValidationError( "Invalid pin" ) <NEW_LINE> <DEDENT> return pin | Pin repinning form. | 62599058adb09d7d5dc0bafb |
class UnbiasedRandomController(): <NEW_LINE> <INDENT> def __init__(self,speed_limit=0,speed_limit_buffer=0,accel_range=[-5,5],yaw_rate_range=[0,0],**kwargs): <NEW_LINE> <INDENT> self.accel_range = accel_range <NEW_LINE> self.yaw_rate_range = yaw_rate_range <NEW_LINE> <DEDENT> def setup(self,**kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def selectAction(self,state,lim_accel_range,lim_yaw_rate_range): <NEW_LINE> <INDENT> accel_range = list(self.accel_range) <NEW_LINE> yaw_rate_range = list(self.yaw_rate_range) <NEW_LINE> if lim_accel_range[0] is not None and lim_accel_range[0]>accel_range[0]: accel_range[0] = lim_accel_range[0] <NEW_LINE> if lim_accel_range[1] is not None and lim_accel_range[1]<accel_range[1]: accel_range[1] = lim_accel_range[1] <NEW_LINE> if lim_yaw_rate_range[0] is not None and lim_yaw_rate_range[0]>yaw_rate_range[0]: yaw_rate_range[0] = lim_yaw_rate_range[0] <NEW_LINE> if lim_accel_range[1] is not None and lim_yaw_rate_range[1]<yaw_rate_range[1]: yaw_rate_range[1] = lim_yaw_rate_range[1] <NEW_LINE> if accel_range[0]!=0 and accel_range[1]/accel_range[0] <0: <NEW_LINE> <INDENT> if random.random()<abs(accel_range[0])/(abs(accel_range[1]-accel_range[0])): <NEW_LINE> <INDENT> accel = np.random.uniform(0,accel_range[1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> accel = np.random.uniform(accel_range[0],0) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> accel = np.random.uniform(accel_range[0],accel_range[1]) <NEW_LINE> <DEDENT> if yaw_rate_range[0]!=0 and yaw_rate_range[1]/yaw_rate_range[0]<0: <NEW_LINE> <INDENT> if random.random()<abs(yaw_rate_range[0])/(abs(yaw_rate_range[1]-yaw_rate_range[0])): <NEW_LINE> <INDENT> yaw_rate = np.random.uniform(0,yaw_rate_range[1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yaw_rate = np.random.uniform(yaw_rate_range[0],0) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> yaw_rate = np.random.uniform(yaw_rate_range[0],yaw_rate_range[1]) <NEW_LINE> <DEDENT> return accel,yaw_rate | Random controller that samples actions in such a way that the expected value of the magnitudes of the acceleration and yaw rate is 0
i.e. if the magnitude of the lower bound of the range is greater than the magnitude of the upper bound (and they have opposite sign)
then we increase the probability of sampling a value greater than 0 to capture this. | 62599058379a373c97d9a5b6 |
class ObservationFactory(factory.django.DjangoModelFactory): <NEW_LINE> <INDENT> satellite = factory.SubFactory(SatelliteFactory) <NEW_LINE> author = factory.SubFactory(UserFactory) <NEW_LINE> start = fuzzy.FuzzyDateTime(now() - timedelta(days=3), now() + timedelta(days=3)) <NEW_LINE> end = factory.LazyAttribute( lambda x: x.start + timedelta(hours=random.randint(1, 8)) ) <NEW_LINE> transponder = factory.SubFactory(TransponderFactory) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Observation | Observation model factory. | 62599058dc8b845886d54b57 |
class Logistics(StateModel,TimeModel,SortModel): <NEW_LINE> <INDENT> name = models.CharField(default=u'',max_length=128,verbose_name=u'名称') <NEW_LINE> address = models.CharField(default=u'',max_length=512,verbose_name=u'地址') <NEW_LINE> contact = models.CharField(default=u'',max_length=20,verbose_name=u'联系人') <NEW_LINE> tel = models.CharField(default=u'',max_length=20,verbose_name=u'电话') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = u'物流公司' <NEW_LINE> verbose_name_plural = u'b-物流公司' <NEW_LINE> app_label = 'system' | 物流公司 | 62599058d6c5a102081e36b1 |
class NotStoppedError(Exception): <NEW_LINE> <INDENT> def __init__(self, message=None): <NEW_LINE> <INDENT> super(NotStoppedError, self).__init__(message) | Indicates a call to an object that should have been stopped before. | 625990587cff6e4e811b6fd4 |
class ExactInference(InferenceModule): <NEW_LINE> <INDENT> def initializeUniformly(self, gameState): <NEW_LINE> <INDENT> self.beliefs = DiscreteDistribution() <NEW_LINE> for p in self.legalPositions: <NEW_LINE> <INDENT> self.beliefs[p] = 1.0 <NEW_LINE> <DEDENT> self.beliefs.normalize() <NEW_LINE> <DEDENT> def observeUpdate(self, observation, gameState): <NEW_LINE> <INDENT> distance = DiscreteDistribution() <NEW_LINE> pacmanPosition = gameState.getPacmanPosition() <NEW_LINE> jailPosition = self.getJailPosition() <NEW_LINE> for ghostpositons in self.allPositions: <NEW_LINE> <INDENT> prob = self.getObservationProb(observation, pacmanPosition, ghostpositons, jailPosition) <NEW_LINE> aaa = prob * self.beliefs[ghostpositons] <NEW_LINE> distance[ghostpositons] = aaa <NEW_LINE> <DEDENT> distance.normalize() <NEW_LINE> self.beliefs = distance <NEW_LINE> <DEDENT> def elapseTime(self, gameState): <NEW_LINE> <INDENT> copy = self.beliefs.copy() <NEW_LINE> distributions = dict() <NEW_LINE> for position in self.allPositions: <NEW_LINE> <INDENT> newposdist = self.getPositionDistribution(gameState, position) <NEW_LINE> distributions[position] = newposdist <NEW_LINE> <DEDENT> for pos in self.allPositions: <NEW_LINE> <INDENT> probability = 0 <NEW_LINE> for firstPosition in self.allPositions: <NEW_LINE> <INDENT> probability = probability + self.beliefs[firstPosition] * distributions[firstPosition][pos] <NEW_LINE> <DEDENT> copy[pos] = probability <NEW_LINE> <DEDENT> self.beliefs = copy <NEW_LINE> <DEDENT> def getBeliefDistribution(self): <NEW_LINE> <INDENT> return self.beliefs | The exact dynamic inference module should use forward algorithm updates to
compute the exact belief function at each time step. | 6259905882261d6c52730993 |
class Chessboard(Feature): <NEW_LINE> <INDENT> _spcorners = None <NEW_LINE> _dims = None <NEW_LINE> def __init__(self, img, dim, subpixelscorners): <NEW_LINE> <INDENT> self._dims = dim <NEW_LINE> self._spcorners = subpixelscorners <NEW_LINE> x = np.average(np.array(self._spcorners)[:, 0]) <NEW_LINE> y = np.average(np.array(self._spcorners)[:, 1]) <NEW_LINE> posdiagsorted = sorted(self._spcorners, key=lambda corner: corner[0] + corner[1]) <NEW_LINE> negdiagsorted = sorted(self._spcorners, key=lambda corner: corner[0] - corner[1]) <NEW_LINE> points = (posdiagsorted[0], negdiagsorted[-1], posdiagsorted[-1], negdiagsorted[0]) <NEW_LINE> super(Chessboard, self).__init__(img, x, y, points) <NEW_LINE> <DEDENT> def draw(self, no_need_color=None): <NEW_LINE> <INDENT> cv.DrawChessboardCorners(self._image.bitmap, self._dims, self._spcorners, 1) <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> sqform = spsd.squareform(spsd.pdist(self._points, "euclidean")) <NEW_LINE> a = sqform[0][1] <NEW_LINE> b = sqform[1][2] <NEW_LINE> c = sqform[2][3] <NEW_LINE> d = sqform[3][0] <NEW_LINE> p = sqform[0][2] <NEW_LINE> q = sqform[1][3] <NEW_LINE> s = (a + b + c + d) / 2.0 <NEW_LINE> return 2 * sqrt((s - a) * (s - b) * (s - c) * (s - d) - (a * c + b * d + p * q) * (a * c + b * d - p * q) / 4) | Used for Calibration, it uses a chessboard to calibrate from pixels
to real world measurements. | 62599058097d151d1a2c25fd |
class ManholeTests(TestCase): <NEW_LINE> <INDENT> def test_interface(self): <NEW_LINE> <INDENT> self.assertTrue(verifyObject(ITerminalServerFactory, TerminalManhole())) <NEW_LINE> <DEDENT> def test_buildTerminalProtocol(self): <NEW_LINE> <INDENT> store = Store() <NEW_LINE> factory = TerminalManhole(store=store) <NEW_LINE> viewer = object() <NEW_LINE> protocol = factory.buildTerminalProtocol(viewer) <NEW_LINE> self.assertTrue(isinstance(protocol, ColoredManhole)) <NEW_LINE> self.assertEqual(protocol.namespace, {'db': store, 'viewer': viewer}) | Tests for L{TerminalManhole} which provides an L{ITerminalServerFactory}
for a protocol which gives a user an in-process Python REPL. | 6259905845492302aabfda69 |
class TrustchainBaseEndpoint(resource.Resource): <NEW_LINE> <INDENT> def __init__(self, session): <NEW_LINE> <INDENT> resource.Resource.__init__(self) <NEW_LINE> self.session = session | This class represents the base class of the trustchain community. | 6259905807f4c71912bb09cc |
class TrainingParams(object): <NEW_LINE> <INDENT> def __init__(self, flags=None): <NEW_LINE> <INDENT> if flags: <NEW_LINE> <INDENT> self.learning_rate = flags.learning_rate <NEW_LINE> self.lr_decay_factor = flags.learning_rate_decay_factor <NEW_LINE> self.max_gradient_norm = flags.max_gradient_norm <NEW_LINE> self.batch_size = flags.batch_size <NEW_LINE> self.size = flags.size <NEW_LINE> self.num_layers = flags.num_layers <NEW_LINE> self.steps_per_checkpoint = flags.steps_per_checkpoint <NEW_LINE> self.max_steps = flags.max_steps <NEW_LINE> self.optimizer = flags.optimizer <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.learning_rate = 0.5 <NEW_LINE> self.lr_decay_factor = 0.99 <NEW_LINE> self.max_gradient_norm = 5.0 <NEW_LINE> self.batch_size = 64 <NEW_LINE> self.size = 64 <NEW_LINE> self.num_layers = 2 <NEW_LINE> self.steps_per_checkpoint = 200 <NEW_LINE> self.max_steps = 0 <NEW_LINE> self.optimizer = "sgd" | Class with training parameters. | 625990587b25080760ed87a8 |
class Home(views.View): <NEW_LINE> <INDENT> def dispatch_request(self): <NEW_LINE> <INDENT> return Response( render_template('home.html'), status=200 ) | Load our home page
N.B - Could be served as static | 625990584e4d56256637399a |
class ImageModel(ZooModel): <NEW_LINE> <INDENT> def __init__(self, bigdl_type="float"): <NEW_LINE> <INDENT> super(ImageModel, self).__init__(None, bigdl_type) <NEW_LINE> <DEDENT> def predict_image_set(self, image, configure=None): <NEW_LINE> <INDENT> res = callBigDlFunc(self.bigdl_type, "imageModelPredict", self.value, image, configure) <NEW_LINE> return ImageSet(res) <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> config = callBigDlFunc(self.bigdl_type, "getImageConfig", self.value) <NEW_LINE> return ImageConfigure(jvalue=config) | The basic class for image model. | 62599058b57a9660fecd300d |
class Graph: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._nodes = set() <NEW_LINE> self._edges = [] <NEW_LINE> self._cache = {} <NEW_LINE> <DEDENT> def add_edge(self, from_node, to_node): <NEW_LINE> <INDENT> self._edges.append(Edge(from_node, to_node)) <NEW_LINE> from_node.add_connection(to_node) <NEW_LINE> <DEDENT> def create_from_dict(self, data): <NEW_LINE> <INDENT> for key, value in data.iteritems(): <NEW_LINE> <INDENT> if key not in self._cache: <NEW_LINE> <INDENT> from_node = Node(key) <NEW_LINE> self._cache[key] = from_node <NEW_LINE> self._nodes.add(from_node) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> from_node = self._cache[key] <NEW_LINE> <DEDENT> for item in value: <NEW_LINE> <INDENT> if item not in self._cache: <NEW_LINE> <INDENT> to_node = Node(item) <NEW_LINE> self._cache[item] = to_node <NEW_LINE> self._nodes.add(to_node) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> to_node = self._cache[item] <NEW_LINE> <DEDENT> self.add_edge(from_node, to_node) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def traverse_df(self): <NEW_LINE> <INDENT> for node in self._nodes: <NEW_LINE> <INDENT> if not node.visited(): <NEW_LINE> <INDENT> self._traverse_df(node) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _traverse_df(self, node): <NEW_LINE> <INDENT> node.visit() <NEW_LINE> for connection in node.get_connections(): <NEW_LINE> <INDENT> if not connection.visited(): <NEW_LINE> <INDENT> self._traverse_df(connection) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def is_connection(self, from_node, to_node): <NEW_LINE> <INDENT> return self._is_connection(from_node, to_node, None) <NEW_LINE> <DEDENT> def _is_connection(self, from_node, to_node, path): <NEW_LINE> <INDENT> if path is None: <NEW_LINE> <INDENT> path = [from_node] <NEW_LINE> <DEDENT> if from_node.is_leaf() and from_node.eq(to_node): <NEW_LINE> <INDENT> path.append(to_node) <NEW_LINE> return path <NEW_LINE> <DEDENT> for node in from_node.get_connections(): <NEW_LINE> <INDENT> if to_node.eq(path[-1]): <NEW_LINE> <INDENT> return path <NEW_LINE> <DEDENT> path.append(node) <NEW_LINE> if to_node.eq(node): <NEW_LINE> <INDENT> return path <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._is_connection(node, to_node, path) <NEW_LINE> <DEDENT> <DEDENT> if not to_node.eq(path[-1]): <NEW_LINE> <INDENT> path.pop() <NEW_LINE> <DEDENT> return path | Graph structure | 6259905855399d3f05627ab2 |
class UpcomingEventRetriever: <NEW_LINE> <INDENT> def __init__(self, icalendar_url): <NEW_LINE> <INDENT> self.icalendar_url = icalendar_url <NEW_LINE> self.error_count = 0 <NEW_LINE> self.refresh_calendar() <NEW_LINE> <DEDENT> def refresh_calendar(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> calendar_contents = urllib.request.urlopen(self.icalendar_url).read() <NEW_LINE> self.calendar = Calendar.from_ical(calendar_contents) <NEW_LINE> self.error_count = 0 <NEW_LINE> <DEDENT> except URLError: <NEW_LINE> <INDENT> if self.error_count < 3: <NEW_LINE> <INDENT> self.error_count += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_upcoming_event_list(self, timedelta_in_days): <NEW_LINE> <INDENT> now = datetime.now(timezone.utc) <NEW_LINE> end_date = now + timedelta(days=timedelta_in_days) <NEW_LINE> events = [] <NEW_LINE> for component in self.calendar.walk(): <NEW_LINE> <INDENT> if component.name == "VEVENT": <NEW_LINE> <INDENT> summary = component.get('summary') <NEW_LINE> description = component.get('description') <NEW_LINE> dtstart = component.get('dtstart').dt <NEW_LINE> if dtstart > now and dtstart < end_date: <NEW_LINE> <INDENT> events.append(UpcomingEvent(summary, description, dtstart)) <NEW_LINE> <DEDENT> elif component.get('RRULE'): <NEW_LINE> <INDENT> rruleset = rrule.rruleset() <NEW_LINE> rruleset.rrule( rrule.rrulestr(component.get('RRULE').to_ical().decode('utf-8'), dtstart=dtstart) ) <NEW_LINE> recurring_dates = rruleset.between(now, end_date) <NEW_LINE> if len(recurring_dates) > 0: <NEW_LINE> <INDENT> events.append(UpcomingEvent(summary, description, recurring_dates[0])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return sorted(events, key=lambda event: event.dtstart) | Retrieves upcoming events from an icalendar url | 6259905824f1403a92686398 |
class TestPresentation(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.prs = Presentation() <NEW_LINE> <DEDENT> def test__blob_rewrites_sldIdLst(self): <NEW_LINE> <INDENT> relationships = RelationshipCollectionBuilder() .with_tuple_targets(2, RT_SLIDEMASTER) .with_tuple_targets(3, RT_SLIDE) .with_ordering(RT_SLIDEMASTER, RT_SLIDE) .build() <NEW_LINE> prs = Presentation() <NEW_LINE> prs._relationships = relationships <NEW_LINE> prs.partname = '/ppt/presentation.xml' <NEW_LINE> path = os.path.join(thisdir, 'test_files/presentation.xml') <NEW_LINE> prs._element = etree.parse(path).getroot() <NEW_LINE> blob = prs._blob <NEW_LINE> presentation = etree.fromstring(blob) <NEW_LINE> sldIds = presentation.xpath('./p:sldIdLst/p:sldId', namespaces=nsmap) <NEW_LINE> expected = ['rId3', 'rId4', 'rId5'] <NEW_LINE> actual = [sldId.get(qtag('r:id')) for sldId in sldIds] <NEW_LINE> msg = "expected ordering %s, got %s" % (expected, actual) <NEW_LINE> self.assertEqual(expected, actual, msg) <NEW_LINE> <DEDENT> def test_slidemasters_property_empty_on_construction(self): <NEW_LINE> <INDENT> self.assertIsSizedProperty(self.prs, 'slidemasters', 0) <NEW_LINE> <DEDENT> def test_slidemasters_correct_length_after_pkg_open(self): <NEW_LINE> <INDENT> pkg = Package(test_pptx_path) <NEW_LINE> prs = pkg.presentation <NEW_LINE> slidemasters = prs.slidemasters <NEW_LINE> self.assertLength(slidemasters, 1) <NEW_LINE> <DEDENT> def test_slides_property_empty_on_construction(self): <NEW_LINE> <INDENT> self.assertIsSizedProperty(self.prs, 'slides', 0) <NEW_LINE> <DEDENT> def test_slides_correct_length_after_pkg_open(self): <NEW_LINE> <INDENT> pkg = Package(test_pptx_path) <NEW_LINE> prs = pkg.presentation <NEW_LINE> slides = prs.slides <NEW_LINE> self.assertLength(slides, 1) | Test Presentation | 62599058097d151d1a2c25fe |
class SchemaValidator: <NEW_LINE> <INDENT> __schema_path = os.path.join('src', 'validation', 'schema') <NEW_LINE> @allure.step('Validating actual response body against the schema {schema_name}') <NEW_LINE> def validate_json(self, schema_name, actual_json): <NEW_LINE> <INDENT> logger.info(f'Starting to validate actual response body against the schema {schema_name}') <NEW_LINE> schema_path = os.path.join(self.__schema_path, schema_name) <NEW_LINE> try: <NEW_LINE> <INDENT> with open(schema_path, 'r') as f: <NEW_LINE> <INDENT> schema = json.loads(f.read()) <NEW_LINE> validate(actual_json, schema) <NEW_LINE> <DEDENT> logger.info('Done. The JSON is OK') <NEW_LINE> <DEDENT> except exceptions.ValidationError as err: <NEW_LINE> <INDENT> message = err.args[0] <NEW_LINE> log_exception('Failed validating the JSON: ' + message) <NEW_LINE> raise AssertionError('JSON schema validation failed. See log for details.') | This class reads all schemas in the src/validation/schema folder,
and then validates the chosen schema against the given JSON | 62599058004d5f362081fab6 |
class TasteMovie(ModelUtils, ndb.Model): <NEW_LINE> <INDENT> movie = ndb.KeyProperty(Movie) <NEW_LINE> taste = ndb.FloatProperty(required=True) <NEW_LINE> added = ndb.BooleanProperty(default=False) <NEW_LINE> def add_movie(self, movie): <NEW_LINE> <INDENT> self.movie = movie.key <NEW_LINE> self.put() <NEW_LINE> <DEDENT> def update_taste(self, taste): <NEW_LINE> <INDENT> self.taste += taste <NEW_LINE> self.put() | This model represents the taste of a user about a movie. | 625990586e29344779b01bde |
class PeriodicReportViewSet(mixins.CreateModelMixin, mixins.RetrieveModelMixin, mixins.ListModelMixin, GenericViewSet): <NEW_LINE> <INDENT> def get_queryset(self): <NEW_LINE> <INDENT> return PeriodicReport.objects.filter(task__user=self.request.user) <NEW_LINE> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> task = PeriodicTask.objects.get( id=serializer.validated_data['task'].id ) <NEW_LINE> serializer.save(total_rules=task.get_all_rules()) <NEW_LINE> <DEDENT> serializer_class = PeriodicReportSerializer | ViewSet for ``PeriodicReport`` | 62599058d486a94d0ba2d55b |
class PythonLinter(linter.Linter): <NEW_LINE> <INDENT> def context_sensitive_executable_path(self, cmd): <NEW_LINE> <INDENT> success, executable = super().context_sensitive_executable_path(cmd) <NEW_LINE> if success: <NEW_LINE> <INDENT> return success, executable <NEW_LINE> <DEDENT> python = self.settings.get('python', None) <NEW_LINE> self.logger.info( "{}: wanted python is '{}'".format(self.name, python) ) <NEW_LINE> cmd_name = cmd[0] <NEW_LINE> if python: <NEW_LINE> <INDENT> python = str(python) <NEW_LINE> if VERSION_RE.match(python): <NEW_LINE> <INDENT> python_bin = find_python_version(python) <NEW_LINE> if python_bin is None: <NEW_LINE> <INDENT> self.logger.error( "{} deactivated, cannot locate '{}' " "for given python '{}'" .format(self.name, cmd_name, python) ) <NEW_LINE> return True, None <NEW_LINE> <DEDENT> self.logger.info( "{}: Using '{}' for given python '{}'" .format(self.name, python_bin, python) ) <NEW_LINE> return True, [python_bin, '-m', cmd_name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not os.path.exists(python): <NEW_LINE> <INDENT> self.logger.error( "{} deactivated, cannot locate '{}'" .format(self.name, python) ) <NEW_LINE> return True, None <NEW_LINE> <DEDENT> return True, [python, '-m', cmd_name] <NEW_LINE> <DEDENT> <DEDENT> executable = self._ask_pipenv(cmd_name) <NEW_LINE> if executable: <NEW_LINE> <INDENT> self.logger.info( "{}: Using {} according to 'pipenv'" .format(self.name, executable) ) <NEW_LINE> return True, executable <NEW_LINE> <DEDENT> self.logger.info( "{}: trying to use globally installed {}" .format(self.name, cmd_name) ) <NEW_LINE> executable = util.which(cmd_name) <NEW_LINE> if executable is None: <NEW_LINE> <INDENT> self.logger.warning( "cannot locate '{}'. Fill in the 'python' or " "'executable' setting." .format(self.name) ) <NEW_LINE> <DEDENT> return True, executable <NEW_LINE> <DEDENT> def _ask_pipenv(self, linter_name): <NEW_LINE> <INDENT> cwd = self.get_working_dir() <NEW_LINE> if cwd is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> pipfile = os.path.join(cwd, 'Pipfile') <NEW_LINE> if not os.path.exists(pipfile): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> venv = ask_pipenv_for_venv(linter_name, cwd) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> executable = find_script_by_python_env(venv, linter_name) <NEW_LINE> if not executable: <NEW_LINE> <INDENT> self.logger.info( "{} is not installed in the virtual env at '{}'." .format(linter_name, venv) ) <NEW_LINE> return None <NEW_LINE> <DEDENT> return executable | This Linter subclass provides Python-specific functionality.
Linters that check python should inherit from this class.
By doing so, they automatically get the following features:
- Automatic discovery of virtual environments using `pipenv`
- Support for a "python" setting.
- Support for a "executable" setting. | 62599058435de62698e9d397 |
class NECapital(ABOUT): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Capital' <NEW_LINE> verbose_name_plural = 'Capitals' <NEW_LINE> app_label='RDb' <NEW_LINE> db_table = 'RDb_necapital' | NECapital: An adjancency table that associates NEProcesses with NEResources.
:param resource: The capital resource associated with the process.
:param process: The process enabled by this capital. | 6259905882261d6c52730994 |
class Encoder(object): <NEW_LINE> <INDENT> def __init__(self, params, cohort, secret, irr_rand): <NEW_LINE> <INDENT> self.params = params <NEW_LINE> self.cohort = cohort <NEW_LINE> self.secret = secret <NEW_LINE> self.irr_rand = irr_rand <NEW_LINE> <DEDENT> def _internal_encode_bits(self, bits): <NEW_LINE> <INDENT> uniform, f_mask = get_prr_masks( self.secret, int2bytes(bits), self.params.prob_f, self.params.num_bloombits) <NEW_LINE> prr = (bits & ~f_mask) | (uniform & f_mask) <NEW_LINE> p_bits = self.irr_rand.p_gen() <NEW_LINE> q_bits = self.irr_rand.q_gen() <NEW_LINE> irr = (p_bits & ~prr) | (q_bits & prr) <NEW_LINE> return prr, irr <NEW_LINE> <DEDENT> def _internal_encode(self, word): <NEW_LINE> <INDENT> bloom_bits = get_bloom_bits(word, self.cohort, self.params.num_hashes, self.params.num_bloombits) <NEW_LINE> bloom = 0 <NEW_LINE> for bit_to_set in bloom_bits: <NEW_LINE> <INDENT> bloom |= (1 << bit_to_set) <NEW_LINE> <DEDENT> prr, irr = self._internal_encode_bits(bloom) <NEW_LINE> print(dec2bin(bloom,self.params.num_bloombits), dec2bin(prr,self.params.num_bloombits), irr) <NEW_LINE> return dec2bin(bloom,self.params.num_bloombits), dec2bin(prr,self.params.num_bloombits), irr <NEW_LINE> <DEDENT> def encode_bits(self, bits): <NEW_LINE> <INDENT> _, irr = self._internal_encode_bits(bits) <NEW_LINE> return irr <NEW_LINE> <DEDENT> def encode(self, word): <NEW_LINE> <INDENT> bloom, prr, irr = self._internal_encode(word) <NEW_LINE> return bloom, prr, irr | Obfuscates values for a given user using the RAPPOR privacy algorithm. | 6259905899cbb53fe6832472 |
class SpriteSheet: <NEW_LINE> <INDENT> def __init__(self, img, color_key=-1, has_alpha=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> img = _convert_to_pygame_surface(img) <NEW_LINE> self.sheet = img.convert_alpha() if has_alpha else img.convert() <NEW_LINE> <DEDENT> except pygame.error as error: <NEW_LINE> <INDENT> print("Unable to load sprite sheet file: ", str(img), file=sys.stderr) <NEW_LINE> raise <NEW_LINE> <DEDENT> self.color_key = color_key <NEW_LINE> self.has_alpha = has_alpha <NEW_LINE> self.cache = {} <NEW_LINE> <DEDENT> def get_image(self, rect): <NEW_LINE> <INDENT> if not isinstance(rect, pygame.Rect): <NEW_LINE> <INDENT> rect = pygame.Rect(rect) <NEW_LINE> <DEDENT> key = str(rect) <NEW_LINE> if key in self.cache: <NEW_LINE> <INDENT> result = self.cache[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = _extract_sprite_image(self.sheet, rect, self.color_key, self.has_alpha) <NEW_LINE> self.cache[key] = result <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def get_image_list(self, *rects): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for r in rects: <NEW_LINE> <INDENT> result.append(self.get_image(r)) <NEW_LINE> <DEDENT> return result | Class used to retrieve individual images from a sprite sheet. | 62599058a219f33f346c7d99 |
@dataclass(eq=False) <NEW_LINE> class CandeProbBase(CandeFormattableMixin, CandeReadableMixin, abc.ABC): <NEW_LINE> <INDENT> method_: InitVar[Method] <NEW_LINE> mode_: InitVar[Mode] <NEW_LINE> level_: InitVar[Level] = Level.THREE <NEW_LINE> heading_: str = "From `candemachine` by: Rick Teachey, [email protected]" <NEW_LINE> iterations_: int = field(default=-99, init=False, repr=False) <NEW_LINE> pipe_groups: List[PipeGroup] = field(default_factory=list, init=False, repr=False) <NEW_LINE> materials: List[Material] = field(default_factory=list, init=False, repr=False) <NEW_LINE> def __post_init__(self, method_, mode_, level_): <NEW_LINE> <INDENT> self._cande = CandeMain(Method(method_), Mode(mode_), Level(level_)) <NEW_LINE> self.truncate_heading() <NEW_LINE> <DEDENT> @property <NEW_LINE> def method(self): <NEW_LINE> <INDENT> return self._cande.method <NEW_LINE> <DEDENT> @property <NEW_LINE> def mode(self): <NEW_LINE> <INDENT> return self._cande.mode <NEW_LINE> <DEDENT> @property <NEW_LINE> def level(self): <NEW_LINE> <INDENT> return self._cande.level <NEW_LINE> <DEDENT> @property <NEW_LINE> def heading(self): <NEW_LINE> <INDENT> return self._cande.heading <NEW_LINE> <DEDENT> @property <NEW_LINE> def iterations(self): <NEW_LINE> <INDENT> return self._cande.iterations <NEW_LINE> <DEDENT> def cid_format(self): <NEW_LINE> <INDENT> result_strs = [] <NEW_LINE> result_strs.append(f'{self.mode.value: <8}{self.level.value: >2d}{self.method.value: >2d}{self.n_pipe_groups: >3d}{self.heading: <60}{self.iterations: >5d}') <NEW_LINE> return ''.join(result_strs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def n_pipe_groups(self): <NEW_LINE> <INDENT> return len(self.pipe_groups) <NEW_LINE> <DEDENT> def write(self, p: Path, output="cid", mode="x"): <NEW_LINE> <INDENT> with p.open(mode=mode) as fout: <NEW_LINE> <INDENT> fout.write("\n".join(serialize(output))) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def from_candemain(cls, main): <NEW_LINE> <INDENT> if main.heading is None: <NEW_LINE> <INDENT> main.heading = cls.__dataclass_fields__["heading_"].default <NEW_LINE> <DEDENT> if main.iterations is None: <NEW_LINE> <INDENT> main.iterations = cls.__dataclass_fields__["iterations_"].default <NEW_LINE> <DEDENT> obj = cls.__new__(cls) <NEW_LINE> obj._cande = main <NEW_LINE> obj.truncate_heading() <NEW_LINE> return obj <NEW_LINE> <DEDENT> def truncate_heading(self): <NEW_LINE> <INDENT> if len(self.heading)>60: <NEW_LINE> <INDENT> logging.debug(f"Truncated heading {len(self._cande.heading)-60} characters") <NEW_LINE> self._cande.heading = self._cande.heading[:60] <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def read(cls, p: Path): <NEW_LINE> <INDENT> if p.suffix != "cid": <NEW_LINE> <INDENT> raise CandeReadError(f"File type {p.suffix!r} not supported.") <NEW_LINE> <DEDENT> with p.open(mode="r") as fin: <NEW_LINE> <INDENT> return deserialize(fin, p.suffix) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> yield from (self.pipe_groups, *self.problem_contents, self.materials) <NEW_LINE> <DEDENT> @property <NEW_LINE> @abc.abstractmethod <NEW_LINE> def problem_contents(self): <NEW_LINE> <INDENT> ... | Base class for CandeL1, CandeL2, and CandeL3 problems | 62599058d7e4931a7ef3d613 |
class TestInsert(unittest.TestCase): <NEW_LINE> <INDENT> def test_insert_duplicate_root(self): <NEW_LINE> <INDENT> root = bst.BinaryNode(1) <NEW_LINE> self.assertFalse(root.insert(1)) <NEW_LINE> self.assertEqual(root.size, 1) <NEW_LINE> <DEDENT> def test_insert_duplicate_leaf(self): <NEW_LINE> <INDENT> root = bst.BinaryNode.build(1, 2) <NEW_LINE> self.assertFalse(root.insert(2)) <NEW_LINE> self.assertEqual(root.size, 2) <NEW_LINE> self.assertEqual(root.value, 1) <NEW_LINE> <DEDENT> def test_insert_left(self): <NEW_LINE> <INDENT> root = bst.BinaryNode(2) <NEW_LINE> self.assertTrue(root.insert(1)) <NEW_LINE> self.assertEqual(root.size, 2) <NEW_LINE> self.assertEqual(root.value, 2) <NEW_LINE> self.assertEqual(root.inorder, [1, 2]) <NEW_LINE> <DEDENT> def test_insert_right(self): <NEW_LINE> <INDENT> root = bst.BinaryNode(2) <NEW_LINE> self.assertTrue(root.insert(3)) <NEW_LINE> self.assertEqual(root.size, 2) <NEW_LINE> self.assertEqual(root.value, 2) <NEW_LINE> self.assertEqual(root.inorder, [2, 3]) <NEW_LINE> <DEDENT> def test_bulk_insert(self): <NEW_LINE> <INDENT> root = bst.BinaryNode(4) <NEW_LINE> new_root, results = root.insert(5, 2, 1, 2, 3) <NEW_LINE> self.assertEqual(root, new_root) <NEW_LINE> self.assertEqual(root.size, 5) <NEW_LINE> self.assertTrue(results[0]) <NEW_LINE> self.assertTrue(results[1]) <NEW_LINE> self.assertTrue(results[2]) <NEW_LINE> self.assertFalse(results[3]) <NEW_LINE> self.assertTrue(results[4]) <NEW_LINE> self.assertEqual(root.inorder, [1, 2, 3, 4, 5]) | Tests insertion into a binary search tree. | 625990587b25080760ed87a9 |
class TestTeacherEpic(TestCaseDatabase): <NEW_LINE> <INDENT> def test_teacher(self): <NEW_LINE> <INDENT> user_student_1 = UserShop() <NEW_LINE> user_student_2 = UserShop() <NEW_LINE> user_teacher = UserShop() <NEW_LINE> stub_library = LibraryShop() <NEW_LINE> url = url_for('userview') <NEW_LINE> response = self.client.post( url, data=stub_library.user_view_post_data_json, headers=user_teacher.headers ) <NEW_LINE> self.assertEqual(response.status_code, 200, response) <NEW_LINE> library_id_teacher = response.json['id'] <NEW_LINE> for user in [user_student_1, user_student_2]: <NEW_LINE> <INDENT> url = url_for('libraryview', library=library_id_teacher) <NEW_LINE> with MockSolrBigqueryService(number_of_bibcodes=0) as BQ, MockEndPoint([ user_teacher, user_student_1, user_student_2 ]) as EP: <NEW_LINE> <INDENT> response = self.client.get( url, headers=user.headers ) <NEW_LINE> <DEDENT> self.assertEqual( response.status_code, NO_PERMISSION_ERROR['number'] ) <NEW_LINE> self.assertEqual( response.json['error'], NO_PERMISSION_ERROR['body'] ) <NEW_LINE> <DEDENT> for user in [user_student_1, user_student_2]: <NEW_LINE> <INDENT> url = url_for('permissionview', library=library_id_teacher) <NEW_LINE> with MockEmailService(user): <NEW_LINE> <INDENT> response = self.client.post( url, data=user.permission_view_post_data_json({'read': True, 'write': False, 'admin': False, 'owner': False}), headers=user_teacher.headers ) <NEW_LINE> <DEDENT> self.assertEqual(response.status_code, 200) <NEW_LINE> url = url_for('libraryview', library=library_id_teacher) <NEW_LINE> with MockSolrBigqueryService(number_of_bibcodes=0) as BQ, MockEndPoint([ user_teacher, user_student_1, user_student_2 ]) as EP: <NEW_LINE> <INDENT> response = self.client.get( url, headers=user.headers ) <NEW_LINE> <DEDENT> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertIn('documents', response.json) <NEW_LINE> <DEDENT> url = url_for('permissionview', library=library_id_teacher) <NEW_LINE> with MockEmailService(user_student_2): <NEW_LINE> <INDENT> response = self.client.post( url, data=user_student_2.permission_view_post_data_json({'read': False, 'write': False, 'admin': False, 'owner': False}), headers=user_teacher.headers ) <NEW_LINE> <DEDENT> self.assertEqual(response.status_code, 200) <NEW_LINE> url = url_for('libraryview', library=library_id_teacher) <NEW_LINE> with MockSolrBigqueryService(number_of_bibcodes=0) as BQ, MockEndPoint([ user_teacher, user_student_1, user_student_2 ]) as EP: <NEW_LINE> <INDENT> response = self.client.get( url, headers=user_student_2.headers ) <NEW_LINE> <DEDENT> self.assertEqual(response.status_code, NO_PERMISSION_ERROR['number']) <NEW_LINE> self.assertEqual(response.json['error'], NO_PERMISSION_ERROR['body']) <NEW_LINE> url = url_for('libraryview', library=library_id_teacher) <NEW_LINE> with MockSolrBigqueryService(number_of_bibcodes=0) as BQ, MockEndPoint([ user_teacher, user_student_1, user_student_2 ]) as EP: <NEW_LINE> <INDENT> response = self.client.get( url, headers=user_student_1.headers ) <NEW_LINE> <DEDENT> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertIn('documents', response.json) | Base class used to test the Teacher Epic | 62599058ac7a0e7691f73a75 |
@inside_glslc_testsuite('OptionCapD') <NEW_LINE> class TestMultipleDashCapDOfSameName(expect.ValidObjectFile): <NEW_LINE> <INDENT> shader = FileShader('#version 150\nvoid main(){X Y a=Z;}', '.vert') <NEW_LINE> glslc_args = ['-c', '-DX=main', '-DY=int', '-DZ=(1+2)', '-DX', shader] | Tests multiple -D occurrences with same macro name. | 6259905894891a1f408ba1c0 |
class DynamicProxmoxInventory(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.inventory = {} <NEW_LINE> self.read_cli_args() <NEW_LINE> if self.args.list: <NEW_LINE> <INDENT> self.inventory = self.example_inventory() <NEW_LINE> <DEDENT> elif self.args.host: <NEW_LINE> <INDENT> self.inventory = self.empty_inventory() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.inventory = self.empty_inventory() <NEW_LINE> <DEDENT> print(json.dumps(self.inventory, indent=2)) <NEW_LINE> <DEDENT> def paramiko_connection(self, server, username, password): <NEW_LINE> <INDENT> ssh = paramiko.SSHClient() <NEW_LINE> ssh.load_system_host_keys() <NEW_LINE> ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) <NEW_LINE> ssh.connect(server, username=username, password=password) <NEW_LINE> return ssh <NEW_LINE> <DEDENT> def exec_ssh_command(self, paramikoClient, command): <NEW_LINE> <INDENT> stdin, stdout, stderr = paramikoClient.exec_command(command) <NEW_LINE> return { "stdin": stdin, "stdout": stdout, "stderr": stderr } <NEW_LINE> <DEDENT> def example_inventory(self): <NEW_LINE> <INDENT> ssh = self.paramiko_connection('homelab', 'root', os.getenv('PVE_ROOT_PASSWORD')) <NEW_LINE> allContainersCommand = self.exec_ssh_command(ssh, "pct list | grep 'running' | awk '{print $1, $3}'") <NEW_LINE> inventory = ProxmoxNodeInventory.createFromContainerResponse(allContainersCommand) <NEW_LINE> for host in inventory.container: <NEW_LINE> <INDENT> command = f"pct exec {host.id} -- hostname -I | awk '{{print $1}}'" <NEW_LINE> hostIpCommand = self.exec_ssh_command(ssh, command) <NEW_LINE> host.setIpAddress(hostIpCommand) <NEW_LINE> <DEDENT> swarm_manager_containers = inventory.getContainersByHostnameStartsWith('swarm-manager') <NEW_LINE> swarm_node_containers = inventory.getContainersByHostnameStartsWith('swarm-node') <NEW_LINE> hostVars = PveVmHostVars.create(inventory) <NEW_LINE> return { 'swarmmanagers': { 'hosts': [host.ip for host in swarm_manager_containers.container], 'vars': { } }, 'swarmnodes': { 'hosts': [host.ip for host in swarm_node_containers.container], 'vars': { } }, '_meta': { 'hostvars': hostVars.toJson() } } <NEW_LINE> <DEDENT> def empty_inventory(self): <NEW_LINE> <INDENT> return {'_meta': {'hostvars': {}}} <NEW_LINE> <DEDENT> def read_cli_args(self): <NEW_LINE> <INDENT> parser = argparse.ArgumentParser() <NEW_LINE> parser.add_argument('--list', action = 'store_true') <NEW_LINE> parser.add_argument('--host', action = 'store') <NEW_LINE> self.args = parser.parse_args() | Args:
--list Returns a list of all inventory items
--host Returns a specific hostname | 6259905823e79379d538da91 |
class DisplaySaleTarget(View): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> getParams = request.GET <NEW_LINE> natural_week = getParams.get('natural_week', '') <NEW_LINE> __match=re.compile('^\d{4}-\d{2}').match(natural_week) <NEW_LINE> if __match: <NEW_LINE> <INDENT> natural_week=__match.group() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> natural_week= get_day_of_week() <NEW_LINE> <DEDENT> data_dict = SaleTarget.objects.filter(natural_week=natural_week).values("id","phase_name__phase_name", "natural_week", "target", "phase_count","sale_target_remark",'sale_target_owner__chinese_name') .order_by('-natural_week','sale_target_owner__chinese_name','phase_name__phase_name') <NEW_LINE> result_dict=list(data_dict) <NEW_LINE> content = dict_to_json(result_dict) <NEW_LINE> response = my_response(code=0, msg=u"查询成功", content=content) <NEW_LINE> return response | 显示所有销售目标 | 625990588e71fb1e983bd05e |
class NSNitroNserrCachegroupHostNreq(NSNitroCrErrors): <NEW_LINE> <INDENT> pass | Nitro error code 577
Host not required | 62599058004d5f362081fab7 |
class Circle(object): <NEW_LINE> <INDENT> def __init__(self, x, y, radius): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.radius = radius <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ('%s(x=%r, y=%r, radius=%r)' % (self.__class__.__name__, self.x, self.y, self.radius)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def perimeter(self): <NEW_LINE> <INDENT> return 2*np.pi*self.radius <NEW_LINE> <DEDENT> @property <NEW_LINE> def centroid(self): <NEW_LINE> <INDENT> return np.array((self.x, self.y)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def area(self): <NEW_LINE> <INDENT> return 4*np.pi*self.radius**2 <NEW_LINE> <DEDENT> @property <NEW_LINE> def bounds(self): <NEW_LINE> <INDENT> r = self.radius <NEW_LINE> return Rectangle(self.x - r, self.y - r, 2*r, 2*r) <NEW_LINE> <DEDENT> def get_theta(self, x, y): <NEW_LINE> <INDENT> return np.arctan2(y - self.y, x - self.x) <NEW_LINE> <DEDENT> def get_point(self, angle): <NEW_LINE> <INDENT> x = self.x + self.radius * np.cos(angle) <NEW_LINE> y = self.y + self.radius * np.sin(angle) <NEW_LINE> return np.squeeze(np.c_[x, y]) <NEW_LINE> <DEDENT> def get_points(self, spacing=1): <NEW_LINE> <INDENT> num_points = max(4, int(self.perimeter/spacing)) <NEW_LINE> theta = np.linspace(0, 2*np.pi, num_points, endpoint=False) <NEW_LINE> return self.get_point(theta) <NEW_LINE> <DEDENT> def get_tangent(self, angle): <NEW_LINE> <INDENT> return np.squeeze(np.c_[np.cos(angle), np.sin(angle)]) | class that represents a circle | 62599058435de62698e9d398 |
class TeamProfile(models.Model): <NEW_LINE> <INDENT> team = models.OneToOneField(Team, blank=True, null=True) <NEW_LINE> languages_spoken = models.CharField(max_length=100) <NEW_LINE> presentation_text = models.TextField(max_length=4000) <NEW_LINE> application_letter = models.FileField(verbose_name="Upload your motivation letter", upload_to='applications/teams/motivationletter/', blank=True, null=True) <NEW_LINE> application_video = models.FileField(verbose_name="Upload Application Video", blank=True, null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.team.university | Stores all relevant Information about the Team as the object of a Application.
Relevant information about the Applicants as part of the team are stored in the
'Student Profile' and assTociated to the Team Instance. | 6259905810dbd63aa1c72144 |
class BottleneckProjectionsWriter(api.RecordWriter): <NEW_LINE> <INDENT> def __init__(self, context): <NEW_LINE> <INDENT> super(BottleneckProjectionsWriter, self).__init__(context) <NEW_LINE> self.logger = LOGGER.getChild("BottleneckProjectionsWriter") <NEW_LINE> self.d, self.bn = context.get_default_work_file().rsplit("/", 1) <NEW_LINE> self.fcache = OutFileCache() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> for f in self.fcache.values(): <NEW_LINE> <INDENT> f.close() <NEW_LINE> <DEDENT> <DEDENT> def emit(self, key, value): <NEW_LINE> <INDENT> path = hdfs.path.join(self.d, key, self.bn) <NEW_LINE> checksum, bneck = value <NEW_LINE> self.fcache[path].write(checksum + bneck.tobytes()) | Write out a binary record for each bottleneck. Expects a bytes object (md5
digest of the JPEG data) as the key and a numpy array (the bottleneck) as
the value. | 62599058462c4b4f79dbcf9a |
class AllocatorParser: <NEW_LINE> <INDENT> def __init__(self, basename): <NEW_LINE> <INDENT> self.defaults = {} <NEW_LINE> self.args = [] <NEW_LINE> self.args = self.parseArgs(basename) <NEW_LINE> <DEDENT> def parseArgs(self, basename): <NEW_LINE> <INDENT> parser = argparse.ArgumentParser(prog=basename) <NEW_LINE> parser.add_argument("platform", help="node allocation platform") <NEW_LINE> parser.add_argument("-n", "--node-count", action="store", default=None, dest="nodeCount", help="number of nodes to use", type=int, required=True) <NEW_LINE> parser.add_argument("-c", "--cpus", action="store", default=None, dest="cpus", help="cpus per node (WAS '-s' (--slots) option)", type=int, required=True) <NEW_LINE> parser.add_argument("-m", "--maximum-wall-clock", action="store", dest="maximumWallClock", default=None, help="maximum wall clock time", type=str, required=True) <NEW_LINE> parser.add_argument("-N", "--node-set", action="store", dest="nodeSet", default=None, help="node set name") <NEW_LINE> parser.add_argument("-q", "--queue", action="store", dest="queue", default="debug", help="queue name") <NEW_LINE> parser.add_argument("-e", "--email", action="store_true", dest="email", default=None, help="email notification flag") <NEW_LINE> parser.add_argument("-O", "--output-log", action="store", dest="outputLog", default=None, help="Output log filename") <NEW_LINE> parser.add_argument("-E", "--error-log", action="store", dest="errorLog", default=None, help="Error log filename") <NEW_LINE> parser.add_argument("-g", "--glidein-shutdown", action="store", dest="glideinShutdown", type=int, default=None, help="glide-in inactivity shutdown time in seconds") <NEW_LINE> parser.add_argument("-v", "--verbose", action="store_true", dest="verbose", help="verbose") <NEW_LINE> parser.add_argument("-r", "--reservation", action="store", dest="reservation", default=None, help="run id") <NEW_LINE> parser.add_argument("-d", "--dynamic", const='__default__', nargs='?', action="store", dest="dynamic", type=str, default=None, help="configure to use dynamic slots") <NEW_LINE> self.args = parser.parse_args() <NEW_LINE> return self.args <NEW_LINE> <DEDENT> def getArgs(self): <NEW_LINE> <INDENT> return self.args <NEW_LINE> <DEDENT> def getPlatform(self): <NEW_LINE> <INDENT> return self.args.platform | An argument parser for node allocation requests.
Parameters
----------
basename : `str`
The name used to identify the running program | 62599058be8e80087fbc0618 |
class SingleArgSubgroup_Meta(SingleArgSubgroup): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> desc = self.__doc__.strip() <NEW_LINE> keys = [Key.meta("id", "some sort of unique identifier for the EDU")] <NEW_LINE> super(SingleArgSubgroup_Meta, self).__init__(desc, keys) <NEW_LINE> <DEDENT> def fill(self, current, arg, target=None): <NEW_LINE> <INDENT> vec = self if target is None else target <NEW_LINE> vec["id"] = spans_to_str(arg.span) | arg-identification features | 6259905838b623060ffaa319 |
class DistributionLineSegment(ACLineSegment): <NEW_LINE> <INDENT> conductor_info = db.ReferenceProperty(ConductorInfo, collection_name="conductor_segments") <NEW_LINE> sequence_impedance = db.ReferenceProperty(PerLengthSequenceImpedance, collection_name="conductor_segments") <NEW_LINE> phase_impedance = db.ReferenceProperty(PerLengthPhaseImpedance, collection_name="conductor_segments") | Extends ACLineSegment with references to a library of standard types from which electrical parameters can be calculated, as follows: - calculate electrical parameters from asset data, using associated ConductorInfo, with values then multiplied by Conductor.length to produce a matrix model. - calculate unbalanced electrical parameters from associated PerLengthPhaseImpedance, then multiplied by Conductor.length to produce a matrix model. - calculate transposed electrical parameters from associated PerLengthSequenceImpedance, then multiplied by Conductor.length to produce a sequence model. For symmetrical, transposed 3ph lines, it is sufficient to use inherited ACLineSegment attributes, which describe sequence impedances and admittances for the entire length of the segment. Known issue: Attributes expressing impedances and admittances in PerLengthSequenceImpedance and PhaseImpedanceData use Resistance, etc., which describe pre-calculated, full length of segment, while we should have a longitudinal unit, per length. Taking 'r' as example, its 'unit'=Ohm, but the value is effectively in Ohm/m, so the value needs to be multiplied by Conductor.length. This is against the whole idea of unit data types and is semantically wrong, but base CIM does not have the required data types at this moment. Until the revision of unit modelling in CIM, applications need to deduce and locally handle appending '/m' for units and ensure they multiply the values by Conductor.length. At least one of the Associations must exist.
| 6259905807d97122c4218240 |
class Game(object): <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> WIN_TITLE: str = 'RL Tutorial' <NEW_LINE> SCR_W, SCR_H = 64, 36 <NEW_LINE> self.scene: Scene = RoguelikeScene(SCR_W, SCR_H) <NEW_LINE> tcod.console_set_custom_font( contents.Fonts.ARIAL_10_10, tcod.FONT_TYPE_GREYSCALE | tcod.FONT_LAYOUT_TCOD) <NEW_LINE> tcod.console_init_root(SCR_W, SCR_H, title=WIN_TITLE, fullscreen=False) <NEW_LINE> <DEDENT> def run(self) -> None: <NEW_LINE> <INDENT> key, mouse = tcod.Key(), tcod.Mouse() <NEW_LINE> while not tcod.console_is_window_closed(): <NEW_LINE> <INDENT> tcod.sys_check_for_event(tcod.EVENT_KEY_PRESS, key, mouse) <NEW_LINE> self.scene.render() <NEW_LINE> self.scene.handle_imput(key, mouse) | Provides game loop. | 62599058d99f1b3c44d06c36 |
class TypeTypeComplex(TypeType): <NEW_LINE> <INDENT> def node_type(self): <NEW_LINE> <INDENT> return self.__class__ | A type for complex objects. | 62599058009cb60464d02aca |
class scheduleEventSearchType (rangeMatcherType): <NEW_LINE> <INDENT> _TypeDefinition = None <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_ELEMENT_ONLY <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'scheduleEventSearchType') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('https://rs-test.poms.omroep.nl/v1/schema/urn:vpro:api:2013', 248, 2) <NEW_LINE> _ElementMap = rangeMatcherType._ElementMap.copy() <NEW_LINE> _AttributeMap = rangeMatcherType._AttributeMap.copy() <NEW_LINE> __begin = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'begin'), 'begin', '__urnvproapi2013_scheduleEventSearchType_urnvproapi2013begin', False, pyxb.utils.utility.Location('https://rs-test.poms.omroep.nl/v1/schema/urn:vpro:api:2013', 252, 10), ) <NEW_LINE> begin = property(__begin.value, __begin.set, None, None) <NEW_LINE> __end = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'end'), 'end', '__urnvproapi2013_scheduleEventSearchType_urnvproapi2013end', False, pyxb.utils.utility.Location('https://rs-test.poms.omroep.nl/v1/schema/urn:vpro:api:2013', 253, 10), ) <NEW_LINE> end = property(__end.value, __end.set, None, None) <NEW_LINE> __channel = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'channel'), 'channel', '__urnvproapi2013_scheduleEventSearchType_urnvproapi2013channel', False, pyxb.utils.utility.Location('https://rs-test.poms.omroep.nl/v1/schema/urn:vpro:api:2013', 254, 10), ) <NEW_LINE> channel = property(__channel.value, __channel.set, None, None) <NEW_LINE> __net = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'net'), 'net', '__urnvproapi2013_scheduleEventSearchType_urnvproapi2013net', False, pyxb.utils.utility.Location('https://rs-test.poms.omroep.nl/v1/schema/urn:vpro:api:2013', 255, 10), ) <NEW_LINE> net = property(__net.value, __net.set, None, None) <NEW_LINE> __rerun = pyxb.binding.content.ElementDeclaration(pyxb.namespace.ExpandedName(Namespace, 'rerun'), 'rerun', '__urnvproapi2013_scheduleEventSearchType_urnvproapi2013rerun', False, pyxb.utils.utility.Location('https://rs-test.poms.omroep.nl/v1/schema/urn:vpro:api:2013', 256, 10), ) <NEW_LINE> rerun = property(__rerun.value, __rerun.set, None, None) <NEW_LINE> _ElementMap.update({ __begin.name() : __begin, __end.name() : __end, __channel.name() : __channel, __net.name() : __net, __rerun.name() : __rerun }) <NEW_LINE> _AttributeMap.update({ }) | Complex type {urn:vpro:api:2013}scheduleEventSearchType with content type ELEMENT_ONLY | 62599058507cdc57c63a633b |
class FileSourceInfo(SourceInfo): <NEW_LINE> <INDENT> def is_my_business(self, action, **keywords): <NEW_LINE> <INDENT> status = SourceInfo.is_my_business(self, action, **keywords) <NEW_LINE> if status: <NEW_LINE> <INDENT> file_name = keywords.get("file_name", None) <NEW_LINE> if file_name: <NEW_LINE> <INDENT> if is_string(type(file_name)): <NEW_LINE> <INDENT> file_type = find_file_type_from_file_name(file_name, action) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise IOError("Wrong file name") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> file_type = keywords.get("file_type") <NEW_LINE> <DEDENT> status = self.can_i_handle(action, file_type) <NEW_LINE> <DEDENT> return status <NEW_LINE> <DEDENT> def can_i_handle(self, action, file_type): <NEW_LINE> <INDENT> raise NotImplementedError("") | Plugin description for a file source | 62599058a219f33f346c7d9b |
class NrpeCheck(object): <NEW_LINE> <INDENT> OK = 0 <NEW_LINE> WARNING = 1 <NEW_LINE> CRITICAL = 2 <NEW_LINE> UNKNOWN = 3 <NEW_LINE> MAX_MESSAGE_LENGTH = 4096 <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.status = None <NEW_LINE> self.output = None <NEW_LINE> self.perfdata = None <NEW_LINE> <DEDENT> def format_message(self): <NEW_LINE> <INDENT> if self.output is None: <NEW_LINE> <INDENT> self.output = '' <NEW_LINE> <DEDENT> if self.perfdata is None: <NEW_LINE> <INDENT> self.perfdata = '' <NEW_LINE> <DEDENT> output_lines = self.output.split('\n') <NEW_LINE> perfdata_lines = self.perfdata.split('\n') <NEW_LINE> message = output_lines[0] <NEW_LINE> if perfdata_lines[0]: <NEW_LINE> <INDENT> message += u'|' + perfdata_lines[0] <NEW_LINE> <DEDENT> if output_lines[1:] or perfdata_lines[1:]: <NEW_LINE> <INDENT> message += u'\n' <NEW_LINE> <DEDENT> if output_lines[1:]: <NEW_LINE> <INDENT> message += u'\n'.join(output_lines[1:]) <NEW_LINE> <DEDENT> if perfdata_lines[1:]: <NEW_LINE> <INDENT> message += u'|' + u'\n'.join(perfdata_lines[1:]) <NEW_LINE> <DEDENT> return message <NEW_LINE> <DEDENT> def check(self): <NEW_LINE> <INDENT> raise RuntimeError("Subclasses must define the check() method.") <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.check() <NEW_LINE> if self.status not in [NrpeCheck.OK, NrpeCheck.WARNING, NrpeCheck.CRITICAL]: <NEW_LINE> <INDENT> self.status = NrpeCheck.UNKNOWN <NEW_LINE> <DEDENT> sys.stdout.write(self.format_message()[:self.MAX_MESSAGE_LENGTH]) <NEW_LINE> sys.exit(self.status) | Base class for running NRPE checks.
Reference: https://assets.nagios.com/downloads/nagioscore/docs/nagioscore/3/en/pluginapi.html | 625990583c8af77a43b68a0b |
@inside_spirv_testsuite('SpirvOptFlags') <NEW_LINE> class TestValidPassFlags(expect.ValidObjectFile1_5, expect.ExecutedListOfPasses): <NEW_LINE> <INDENT> flags = [ '--wrap-opkill', '--ccp', '--cfg-cleanup', '--combine-access-chains', '--compact-ids', '--convert-local-access-chains', '--copy-propagate-arrays', '--eliminate-dead-branches', '--eliminate-dead-code-aggressive', '--eliminate-dead-const', '--eliminate-dead-functions', '--eliminate-dead-inserts', '--eliminate-dead-variables', '--eliminate-insert-extract', '--eliminate-local-multi-store', '--eliminate-local-single-block', '--eliminate-local-single-store', '--flatten-decorations', '--fold-spec-const-op-composite', '--freeze-spec-const', '--if-conversion', '--inline-entry-points-exhaustive', '--loop-fission', '20', '--loop-fusion', '5', '--loop-unroll', '--loop-unroll-partial', '3', '--loop-peeling', '--merge-blocks', '--merge-return', '--loop-unswitch', '--private-to-local', '--reduce-load-size', '--redundancy-elimination', '--remove-duplicates', '--replace-invalid-opcode', '--ssa-rewrite', '--scalar-replacement', '--scalar-replacement=42', '--strength-reduction', '--strip-debug', '--strip-reflect', '--vector-dce', '--workaround-1209', '--unify-const', '--graphics-robust-access', '--wrap-opkill', '--amd-ext-to-khr' ] <NEW_LINE> expected_passes = [ 'wrap-opkill', 'ccp', 'cfg-cleanup', 'combine-access-chains', 'compact-ids', 'convert-local-access-chains', 'copy-propagate-arrays', 'eliminate-dead-branches', 'eliminate-dead-code-aggressive', 'eliminate-dead-const', 'eliminate-dead-functions', 'eliminate-dead-inserts', 'eliminate-dead-variables', 'simplify-instructions', 'ssa-rewrite', 'eliminate-local-single-block', 'eliminate-local-single-store', 'flatten-decorations', 'fold-spec-const-op-composite', 'freeze-spec-const', 'if-conversion', 'inline-entry-points-exhaustive', 'loop-fission', 'loop-fusion', 'loop-unroll', 'loop-unroll', 'loop-peeling', 'merge-blocks', 'merge-return', 'loop-unswitch', 'private-to-local', 'reduce-load-size', 'redundancy-elimination', 'remove-duplicates', 'replace-invalid-opcode', 'ssa-rewrite', 'scalar-replacement=100', 'scalar-replacement=42', 'strength-reduction', 'strip-debug', 'strip-reflect', 'vector-dce', 'workaround-1209', 'unify-const', 'graphics-robust-access', 'wrap-opkill', 'amd-ext-to-khr' ] <NEW_LINE> shader = placeholder.FileSPIRVShader(empty_main_assembly(), '.spvasm') <NEW_LINE> output = placeholder.TempFileName('output.spv') <NEW_LINE> spirv_args = [shader, '-o', output, '--print-all'] + flags <NEW_LINE> expected_object_filenames = (output) | Tests that spirv-opt accepts all valid optimization flags. | 625990582ae34c7f260ac67d |
@total_ordering <NEW_LINE> class ResultCode(Enum): <NEW_LINE> <INDENT> OK = 1 <NEW_LINE> CANCELLED = 2 <NEW_LINE> WARNING = 3 <NEW_LINE> ERROR = 4 <NEW_LINE> INSPECT = 5 <NEW_LINE> def __hash__(self) -> int: <NEW_LINE> <INDENT> return self.value <NEW_LINE> <DEDENT> def __eq__(self, other: object) -> bool: <NEW_LINE> <INDENT> if isinstance(other, ResultCode): <NEW_LINE> <INDENT> return self.value == other.value <NEW_LINE> <DEDENT> elif other is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> def __gt__(self, other: object) -> bool: <NEW_LINE> <INDENT> if isinstance(other, ResultCode): <NEW_LINE> <INDENT> return self.value > other.value <NEW_LINE> <DEDENT> elif other is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented | Result codes for tasks and jobs.
Result codes can be compared to each other and to None;
the more urgent result is considered greater and
all results are greater than None. | 6259905807f4c71912bb09d1 |
class ConnectionProxy(object): <NEW_LINE> <INDENT> def __init__(self, pool=None, client_proxy=None, connected=True): <NEW_LINE> <INDENT> self.client = client_proxy <NEW_LINE> self._pool = weakref.ref(pool) <NEW_LINE> self.ready_callbacks = [] <NEW_LINE> self._connected = connected <NEW_LINE> self.info = {'db': -1} <NEW_LINE> <DEDENT> @property <NEW_LINE> def pool(self): <NEW_LINE> <INDENT> return self._pool() <NEW_LINE> <DEDENT> def connected(self): <NEW_LINE> <INDENT> return self._connected <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> if not self._connected: <NEW_LINE> <INDENT> self.pool.reconnect(self) <NEW_LINE> self._connected = True <NEW_LINE> <DEDENT> <DEDENT> def ready(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def wait_until_ready(self, callback=None): <NEW_LINE> <INDENT> if callback: <NEW_LINE> <INDENT> self.ready_callbacks.append(callback) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def execute_pending_command(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def assign_connection(self, connection): <NEW_LINE> <INDENT> if self.ready_callbacks: <NEW_LINE> <INDENT> connection.ready_callbacks += self.ready_callbacks <NEW_LINE> self.ready_callbacks = [] <NEW_LINE> <DEDENT> connection._event_handler = self.client <NEW_LINE> self.client.connection = connection <NEW_LINE> self.pool.release(self) <NEW_LINE> if connection.connected(): <NEW_LINE> <INDENT> connection.fire_event('on_connect') <NEW_LINE> <DEDENT> connection.execute_pending_command() | A stub object to replace a client's connection until one is available. | 62599058b57a9660fecd3011 |
@LOSSES.register_module() <NEW_LINE> class IoULoss(nn.Module): <NEW_LINE> <INDENT> def __init__(self, linear=False, eps=1e-6, reduction='mean', loss_weight=1.0, mode='log'): <NEW_LINE> <INDENT> super(IoULoss, self).__init__() <NEW_LINE> assert mode in ['linear', 'square', 'log'] <NEW_LINE> if linear: <NEW_LINE> <INDENT> mode = 'linear' <NEW_LINE> warnings.warn('DeprecationWarning: Setting "linear=True" in ' 'IOULoss is deprecated, please use "mode=`linear`" ' 'instead.') <NEW_LINE> <DEDENT> self.mode = mode <NEW_LINE> self.linear = linear <NEW_LINE> self.eps = eps <NEW_LINE> self.reduction = reduction <NEW_LINE> self.loss_weight = loss_weight <NEW_LINE> <DEDENT> def forward(self, pred, target, weight=None, avg_factor=None, reduction_override=None, **kwargs): <NEW_LINE> <INDENT> assert reduction_override in (None, 'none', 'mean', 'sum') <NEW_LINE> reduction = ( reduction_override if reduction_override else self.reduction) <NEW_LINE> if (weight is not None) and (not torch.any(weight > 0)) and ( reduction != 'none'): <NEW_LINE> <INDENT> if pred.dim() == weight.dim() + 1: <NEW_LINE> <INDENT> weight = weight.unsqueeze(1) <NEW_LINE> <DEDENT> return (pred * weight).sum() <NEW_LINE> <DEDENT> if weight is not None and weight.dim() > 1: <NEW_LINE> <INDENT> assert weight.shape == pred.shape <NEW_LINE> weight = weight.mean(-1) <NEW_LINE> <DEDENT> loss = self.loss_weight * iou_loss( pred, target, weight, mode=self.mode, eps=self.eps, reduction=reduction, avg_factor=avg_factor, **kwargs) <NEW_LINE> return loss | IoULoss.
Computing the IoU loss between a set of predicted bboxes and target bboxes.
Args:
linear (bool): If True, use linear scale of loss else determined
by mode. Default: False.
eps (float): Eps to avoid log(0).
reduction (str): Options are "none", "mean" and "sum".
loss_weight (float): Weight of loss.
mode (str): Loss scaling mode, including "linear", "square", and "log".
Default: 'log' | 6259905876e4537e8c3f0b22 |
class CoverLocale: <NEW_LINE> <INDENT> __instance = None <NEW_LINE> class __impl: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Locale = self._enum( RB='rhythmbox', LOCALE_DOMAIN='alternative-toolbar') <NEW_LINE> <DEDENT> def switch_locale(self, locale_type): <NEW_LINE> <INDENT> locale.setlocale(locale.LC_ALL, '') <NEW_LINE> locale.bindtextdomain(locale_type, RB.locale_dir()) <NEW_LINE> locale.textdomain(locale_type) <NEW_LINE> gettext.bindtextdomain(locale_type, RB.locale_dir()) <NEW_LINE> gettext.textdomain(locale_type) <NEW_LINE> gettext.install(locale_type) <NEW_LINE> <DEDENT> def get_locale(self): <NEW_LINE> <INDENT> return locale.getdefaultlocale()[0] <NEW_LINE> <DEDENT> def _enum(self, **enums): <NEW_LINE> <INDENT> return type('Enum', (), enums) <NEW_LINE> <DEDENT> def get_translation(self, value): <NEW_LINE> <INDENT> return gettext.gettext(value) <NEW_LINE> <DEDENT> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> if CoverLocale.__instance is None: <NEW_LINE> <INDENT> CoverLocale.__instance = CoverLocale.__impl() <NEW_LINE> <DEDENT> self.__dict__['_CoverLocale__instance'] = CoverLocale.__instance <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> return getattr(self.__instance, attr) <NEW_LINE> <DEDENT> def __setattr__(self, attr, value): <NEW_LINE> <INDENT> return setattr(self.__instance, attr, value) | This class manages the locale | 62599058b7558d58954649f6 |
@adapter_config(required=(IAlchemyEngineUtility, IAdminLayer, AlchemyEngineEditForm), provides=IAJAXFormRenderer) <NEW_LINE> class AlchemyEngineEditFormAJAXRenderer(ContextRequestViewAdapter): <NEW_LINE> <INDENT> def render(self, changes): <NEW_LINE> <INDENT> if not changes: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> manager = get_parent(self.context, IAlchemyManager) <NEW_LINE> return { 'callbacks': [ get_json_table_row_refresh_callback(manager, self.request, AlchemyManagerEnginesTable, self.context) ] } | SQLAlchemy engine edit form AJAX renderer | 625990588e71fb1e983bd060 |
class TestMediaTitleProperty: <NEW_LINE> <INDENT> def test_set_title(self): <NEW_LINE> <INDENT> mkv = MKV(test_paths['default'], stages.STAGE_0) <NEW_LINE> mkv._analyze() <NEW_LINE> assert mkv.media_title == 'Default Test' <NEW_LINE> assert mkv.state.clean_name == 'Default Test' <NEW_LINE> <DEDENT> def test_colon_in_title(self): <NEW_LINE> <INDENT> mkv = MKV(test_paths['default'], stages.STAGE_0) <NEW_LINE> mkv.media_title = 'Title 2: Revenge of the Colon' <NEW_LINE> assert mkv.media_title == 'Title 2: Revenge of the Colon' <NEW_LINE> assert mkv.state.clean_name == 'Title 2 Revenge of the Colon' | Tests various aspects of file name generation | 62599058adb09d7d5dc0bb01 |
class FlowCompiler(object): <NEW_LINE> <INDENT> def __init__(self, deep_compiler_func): <NEW_LINE> <INDENT> self._deep_compiler_func = deep_compiler_func <NEW_LINE> <DEDENT> def compile(self, flow, parent=None): <NEW_LINE> <INDENT> graph = gr.DiGraph(name=flow.name) <NEW_LINE> graph.add_node(flow, kind=FLOW, noop=True) <NEW_LINE> tree_node = tr.Node(flow, kind=FLOW, noop=True) <NEW_LINE> if parent is not None: <NEW_LINE> <INDENT> parent.add(tree_node) <NEW_LINE> <DEDENT> if flow.retry is not None: <NEW_LINE> <INDENT> tree_node.add(tr.Node(flow.retry, kind=RETRY)) <NEW_LINE> <DEDENT> decomposed = dict( (child, self._deep_compiler_func(child, parent=tree_node)[0]) for child in flow) <NEW_LINE> decomposed_graphs = list(six.itervalues(decomposed)) <NEW_LINE> graph = gr.merge_graphs(graph, *decomposed_graphs, overlap_detector=_overlap_occurrence_detector) <NEW_LINE> for u, v, attr_dict in flow.iter_links(): <NEW_LINE> <INDENT> u_graph = decomposed[u] <NEW_LINE> v_graph = decomposed[v] <NEW_LINE> _add_update_edges(graph, u_graph.no_successors_iter(), list(v_graph.no_predecessors_iter()), attr_dict=attr_dict) <NEW_LINE> <DEDENT> if flow.retry is not None: <NEW_LINE> <INDENT> graph.add_node(flow.retry, kind=RETRY) <NEW_LINE> _add_update_edges(graph, [flow], [flow.retry], attr_dict={LINK_INVARIANT: True}) <NEW_LINE> for node in graph.nodes_iter(): <NEW_LINE> <INDENT> if node is not flow.retry and node is not flow: <NEW_LINE> <INDENT> graph.node[node].setdefault(RETRY, flow.retry) <NEW_LINE> <DEDENT> <DEDENT> from_nodes = [flow.retry] <NEW_LINE> attr_dict = {LINK_INVARIANT: True, LINK_RETRY: True} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> from_nodes = [flow] <NEW_LINE> attr_dict = {LINK_INVARIANT: True} <NEW_LINE> <DEDENT> _add_update_edges(graph, from_nodes, [ node for node in graph.no_predecessors_iter() if node is not flow ], attr_dict=attr_dict) <NEW_LINE> flow_term = Terminator(flow) <NEW_LINE> graph.add_node(flow_term, kind=FLOW_END, noop=True) <NEW_LINE> _add_update_edges(graph, [ node for node in graph.no_successors_iter() if node is not flow_term ], [flow_term], attr_dict={LINK_INVARIANT: True}) <NEW_LINE> return graph, tree_node | Recursive compiler of flows. | 625990588e7ae83300eea624 |
class Red305(object): <NEW_LINE> <INDENT> def __init__(self, concentration): <NEW_LINE> <INDENT> self.name = 'BASF Lumogen F Red 305' <NEW_LINE> self.quantum_efficiency = 0.95 <NEW_LINE> self.concentration = concentration <NEW_LINE> self.logger = logging.getLogger('pvtrace.red305') <NEW_LINE> self.logger.info('concentration at red305 is ' + str(concentration*1000) + ' ppm') <NEW_LINE> <DEDENT> def description(self): <NEW_LINE> <INDENT> return self.name + ' (Concentration : ' + str(self.concentration) + 'mg/g)' <NEW_LINE> <DEDENT> def absorption(self): <NEW_LINE> <INDENT> if self.concentration is None: <NEW_LINE> <INDENT> raise Exception('Missing data for dye absorption. Concentration unknown') <NEW_LINE> <DEDENT> absorption_data = np.loadtxt(os.path.join(PVTDATA, 'dyes', 'Red305_010mg_g_1m-1.txt')) <NEW_LINE> phi = 1.006732182 * self.concentration/0.10 <NEW_LINE> self.logger.info('phi equals ' + str(phi) + ' (this should approximately be target concentration / 100 ppm') <NEW_LINE> absorption_data[:, 1] = absorption_data[:, 1] * phi <NEW_LINE> return Spectrum(x=absorption_data[:, 0], y=absorption_data[:, 1]) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def emission(): <NEW_LINE> <INDENT> emission_data = np.loadtxt(os.path.join(PVTDATA, "dyes", 'Red305_ems_spectrum.txt')) <NEW_LINE> return Spectrum(x=emission_data[:, 0], y=emission_data[:, 1]) | Class to generate spectra for Red305-based devices | 6259905829b78933be26ab90 |
class DiamondTest3(DiamondTest): <NEW_LINE> <INDENT> k = 3 <NEW_LINE> k_c = (0.1, 0, 0) <NEW_LINE> test8 = False | Compare this (krhf_slow) @3kp@Gamma vs reference (krhf_slow_supercell). | 62599058462c4b4f79dbcf9c |
class TransformerEncoder(BaseTransformerEncoder): <NEW_LINE> <INDENT> def __init__(self, attention_cell='multi_head', num_layers=2, units=512, hidden_size=2048, max_length=50, num_heads=4, scaled=True, dropout=0.0, use_residual=True, output_attention=False, weight_initializer=None, bias_initializer='zeros', prefix=None, params=None): <NEW_LINE> <INDENT> super(TransformerEncoder, self).__init__(attention_cell=attention_cell, num_layers=num_layers, units=units, hidden_size=hidden_size, max_length=max_length, num_heads=num_heads, scaled=scaled, dropout=dropout, use_residual=use_residual, output_attention=output_attention, weight_initializer=weight_initializer, bias_initializer=bias_initializer, prefix=prefix, params=params, positional_weight='sinusoidal', use_bert_encoder=False, use_layer_norm_before_dropout=False, scale_embed=True) | Structure of the Transformer Encoder.
Parameters
----------
attention_cell : AttentionCell or str, default 'multi_head'
Arguments of the attention cell.
Can be 'multi_head', 'scaled_luong', 'scaled_dot', 'dot', 'cosine', 'normed_mlp', 'mlp'
num_layers : int
Number of attention layers.
units : int
Number of units for the output.
hidden_size : int
number of units in the hidden layer of position-wise feed-forward networks
max_length : int
Maximum length of the input sequence
num_heads : int
Number of heads in multi-head attention
scaled : bool
Whether to scale the softmax input by the sqrt of the input dimension
in multi-head attention
dropout : float
Dropout probability of the attention probabilities.
use_residual : bool
output_attention: bool
Whether to output the attention weights
weight_initializer : str or Initializer
Initializer for the input weights matrix, used for the linear
transformation of the inputs.
bias_initializer : str or Initializer
Initializer for the bias vector.
prefix : str, default None.
Prefix for name of `Block`s. (and name of weight if params is `None`).
params : Parameter or None
Container for weight sharing between cells. Created if `None`.
Inputs:
- **inputs** : input sequence of shape (batch_size, length, C_in)
- **states** : list of tensors for initial states and masks.
- **valid_length** : valid lengths of each sequence. Usually used when part of sequence
has been padded. Shape is (batch_size, )
Outputs:
- **outputs** : the output of the encoder. Shape is (batch_size, length, C_out)
- **additional_outputs** : list of tensors.
Either be an empty list or contains the attention weights in this step.
The attention weights will have shape (batch_size, length, mem_length) or
(batch_size, num_heads, length, mem_length) | 6259905891af0d3eaad3b3bf |
class Processor: <NEW_LINE> <INDENT> def __init__(self, text): <NEW_LINE> <INDENT> if type(text) is not str: <NEW_LINE> <INDENT> raise TextProcError("Processors require strings") <NEW_LINE> <DEDENT> self.text = text <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.text) <NEW_LINE> <DEDENT> def count(self): <NEW_LINE> <INDENT> return len(self) <NEW_LINE> <DEDENT> def count_alpha(self): <NEW_LINE> <INDENT> alpha = re.compile(r'[a-z,A-Z]') <NEW_LINE> return len(alpha.findall(self.text)) <NEW_LINE> <DEDENT> def count_numeric(self): <NEW_LINE> <INDENT> alpha = re.compile(r'[1-9]') <NEW_LINE> return len(alpha.findall(self.text)) <NEW_LINE> <DEDENT> def count_vowels(self): <NEW_LINE> <INDENT> vowels = re.compile(r'[aeiou]', re.IGNORECASE) <NEW_LINE> return len(vowels.findall(self.text)) <NEW_LINE> <DEDENT> def is_phonenumber(self): <NEW_LINE> <INDENT> phonenum = re.compile(r'^\(?\d{3}\)?[\s.-]?\d{3}[\s.-]?\d{4}$') <NEW_LINE> if phonenum.match(self.text) is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True | Class for Processing Strings | 62599058be8e80087fbc061a |
class Splash(object): <NEW_LINE> <INDENT> def __init__(self, container_id, port, splash_container): <NEW_LINE> <INDENT> self.splash_container = splash_container <NEW_LINE> self.id = container_id <NEW_LINE> self.port = port <NEW_LINE> <DEDENT> def get_id(self): <NEW_LINE> <INDENT> return self.id <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return self.splash_container.get_name() <NEW_LINE> <DEDENT> def get_description(self): <NEW_LINE> <INDENT> return self.splash_container.get_description() <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return self.splash_container.get_name() <NEW_LINE> <DEDENT> def get_proxy_dir(self): <NEW_LINE> <INDENT> return self.splash_container.get_proxy_dir() <NEW_LINE> <DEDENT> def get_proxy_name(self): <NEW_LINE> <INDENT> return self.splash_container.get_proxy_name() <NEW_LINE> <DEDENT> def get_cpu_limit(self): <NEW_LINE> <INDENT> return self.splash_container.get_cpu_limit() <NEW_LINE> <DEDENT> def get_memory_limit(self): <NEW_LINE> <INDENT> return self.splash_container.get_memory_limit() <NEW_LINE> <DEDENT> def get_maxrss(self): <NEW_LINE> <INDENT> return self.splash_container.get_maxrss() <NEW_LINE> <DEDENT> def kill(self): <NEW_LINE> <INDENT> docker_id = cmd_kill_docker(self.id) <NEW_LINE> return docker_id <NEW_LINE> <DEDENT> def restart(self, soft=True): <NEW_LINE> <INDENT> if soft: <NEW_LINE> <INDENT> docker_id = cmd_restart_docker(self.id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> docker_id = cmd_kill_docker(self.id) <NEW_LINE> if docker_id: <NEW_LINE> <INDENT> new_docker_id = cmd_launch_docker(self.port, self.get_proxy_dir(), self.get_proxy_name(), self.get_cpu_limit(), self.get_memory_limit(), self.get_maxrss()) <NEW_LINE> if new_docker_id: <NEW_LINE> <INDENT> self.id = new_docker_id <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return self.id | Splash. | 625990583eb6a72ae038bbf7 |
class Matching(object): <NEW_LINE> <INDENT> def __init__(self, pairs): <NEW_LINE> <INDENT> self.sthlm = set() <NEW_LINE> self.ldn = set() <NEW_LINE> self.neighbors = {} <NEW_LINE> self.match = {} <NEW_LINE> self.dist = {} <NEW_LINE> self.q = collections.deque() <NEW_LINE> for i, j in pairs: <NEW_LINE> <INDENT> self.sthlm.add(i) <NEW_LINE> self.ldn.add(j) <NEW_LINE> self.neighbors.setdefault(i, set()).add(j) <NEW_LINE> self.neighbors.setdefault(j, set()).add(i) <NEW_LINE> <DEDENT> <DEDENT> def koenig(self): <NEW_LINE> <INDENT> self.t = set() <NEW_LINE> for i in self.sthlm: <NEW_LINE> <INDENT> if self.match[i] is None: <NEW_LINE> <INDENT> self.t.add(i) <NEW_LINE> self.vertex_cover(i) <NEW_LINE> <DEDENT> <DEDENT> return list((self.sthlm - self.t) | (self.ldn & self.t)) <NEW_LINE> <DEDENT> def vertex_cover(self, v): <NEW_LINE> <INDENT> for u in self.neighbors[v]: <NEW_LINE> <INDENT> if u in self.t: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> m = self.match[u] <NEW_LINE> if m and m != v: <NEW_LINE> <INDENT> self.t.add(u) <NEW_LINE> self.t.add(m) <NEW_LINE> self.vertex_cover(m) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def hopcroft_karp(self): <NEW_LINE> <INDENT> for employee in (self.sthlm | self.ldn): <NEW_LINE> <INDENT> self.match[employee] = None <NEW_LINE> self.dist[employee] = None <NEW_LINE> <DEDENT> matches = 0 <NEW_LINE> while self.breadth_first_search(): <NEW_LINE> <INDENT> for i in self.sthlm: <NEW_LINE> <INDENT> if not self.match[i] and self.depth_first_search(i): <NEW_LINE> <INDENT> matches += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return matches <NEW_LINE> <DEDENT> def breadth_first_search(self): <NEW_LINE> <INDENT> for i in self.sthlm: <NEW_LINE> <INDENT> if not self.match[i]: <NEW_LINE> <INDENT> self.dist[i] = 0 <NEW_LINE> self.q.append(i) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dist[i] = None <NEW_LINE> <DEDENT> <DEDENT> self.dist[None] = None <NEW_LINE> while self.q: <NEW_LINE> <INDENT> i = self.q.popleft() <NEW_LINE> if not i: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for j in self.neighbors[i]: <NEW_LINE> <INDENT> m = self.match[j] <NEW_LINE> if self.dist[m] is None: <NEW_LINE> <INDENT> self.dist[m] = self.dist[i] + 1 <NEW_LINE> self.q.append(m) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return self.dist[None] is not None <NEW_LINE> <DEDENT> def depth_first_search(self, i): <NEW_LINE> <INDENT> if not i: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> for j in self.neighbors[i]: <NEW_LINE> <INDENT> p = self.match[j] <NEW_LINE> if self.dist[p] == self.dist[i] + 1 and self.depth_first_search(p): <NEW_LINE> <INDENT> self.match[i], self.match[j] = j, i <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> self.dist[i] = None <NEW_LINE> return False | I perform calculation for minimum vertex cover of the employee graph.
I'm based on:
* http://en.wikipedia.org/wiki/Koenig's_theorem_(graph_theory)
* http://en.wikipedia.org/wiki/Hopcroft-Karp_algorithm | 62599058379a373c97d9a5bc |
class HexException(Exception): <NEW_LINE> <INDENT> def __init__(self, hex_file, message): <NEW_LINE> <INDENT> self.hex_file = hex_file <NEW_LINE> self.message = message | Exception raised for problems with the hex file.
Attributes:
hex_file -- the file that caused the error
message -- explanation of the error | 6259905845492302aabfda70 |
class INT( int ): <NEW_LINE> <INDENT> def __repr__( self ): <NEW_LINE> <INDENT> return '%s(%s)' % ( self.__class__.__name__, int.__repr__( self )) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return int.__str__( int( self )) <NEW_LINE> <DEDENT> def loc( self ): <NEW_LINE> <INDENT> return _loc_repr( self ) | AST: INT( number ) | 62599058d53ae8145f9199fb |
class Field(object): <NEW_LINE> <INDENT> def __init__(self, name, storage_type="unknown", analytical_type="typeless", concrete_storage_type=None, missing_values=None, label=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.label = label <NEW_LINE> self.storage_type = storage_type <NEW_LINE> self.analytical_type = analytical_type <NEW_LINE> self.concrete_storage_type = concrete_storage_type <NEW_LINE> self.missing_values = missing_values <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> d = { "name": self.name, "label": self.label, "storage_type": self.storage_type, "analytical_type": self.analytical_type, "concrete_storage_type": self.concrete_storage_type, "missing_values": self.missing_values } <NEW_LINE> return d <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s(%s)>" % (self.__class__, self.to_dict()) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self is other: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.name != other.name or self.label != other.label: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif self.storage_type != other.storage_type or self.analytical_type != other.analytical_type: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif self.concrete_storage_type != other.concrete_storage_type: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> elif self.missing_values != other.missing_values: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def __ne__(self,other): <NEW_LINE> <INDENT> return not self.__eq__(other) | Metadata - information about a field in a dataset or in a datastream.
:Attributes:
* `name` - field name
* `label` - optional human readable field label
* `storage_type` - Normalized data storage type. The data storage type
is abstracted
* `concrete_storage_type` (optional, recommended) - Data store/database
dependent storage type - this is the real name of data type as used
in a database where the field comes from or where the field is going
to be created (this might be null if unknown)
* `analytical_type` - data type used in data mining algorithms
* `missing_values` (optional) - Array of values that represent missing
values in the dataset for given field | 6259905801c39578d7f14203 |
class SimulationProblem: <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> def __init__(self, model, solverId, method): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> self.solverId = solverId <NEW_LINE> self.method = method <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def simulate(self, overrideSimulProblem=None): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def get_model(self): <NEW_LINE> <INDENT> return self.model <NEW_LINE> <DEDENT> def get_solver_id(self): <NEW_LINE> <INDENT> return self.solverId <NEW_LINE> <DEDENT> def get_method(self): <NEW_LINE> <INDENT> return self.method | Abstract class of simulation problem | 62599058627d3e7fe0e08426 |
class ChompConstants(object): <NEW_LINE> <INDENT> RANDOM_PLAYER = "_random" <NEW_LINE> MINIMAL_STEP_PLAYER = "_minimal_step" <NEW_LINE> ACTUAL_PLAYER = '_person' <NEW_LINE> ALPHABETA_PLAYER = '_alpha_beta' <NEW_LINE> CHOCOLATE_ATE_VALUE = '.' <NEW_LINE> CHOCOLATE_POISON_VALUE = 'X' <NEW_LINE> CHOCOLATE_SWEET_VALUE = 'O' | The Class consists of all the constants to avoid typos and also it is easier to refactor something | 625990586e29344779b01be4 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.